This guide uses the API client from the Guides index. Copy it to your project first.
What You’ll Build
Export pipelines for:- CSV files: For Excel, Google Sheets
- JSON files: For data pipelines
- BigQuery: For Google Cloud analytics
- Snowflake: For enterprise data warehousing
Fetch All Data
First, gather data from all endpoints.Copy
async function fetchAllData(client, brandId, period = 30) {
const [performance, competitors, sources] = await Promise.all([
client.getPerformance(brandId, { period }),
client.getCompetitors(brandId, { period, limit: 100 }),
client.getSources(brandId, { period, limit: 100 }),
]);
return {
exportedAt: new Date().toISOString(),
brandId,
period,
performance: {
scores: performance.scores,
methodology: performance.methodology,
},
competitors: competitors.competitors.map(c => ({
id: c.id,
name: c.name,
relationship: c.relationship,
shareOfVoice: c.shareOfVoice,
totalMentions: c.totalMentions,
avgPosition: c.avgPosition,
avgSentiment: c.avgSentiment,
})),
sources: sources.sources.map(s => ({
id: s.id,
domain: s.domain,
type: s.type,
isSelf: s.isSelf,
totalMentions: s.totalMentions,
rate: s.rate,
avgPosition: s.avgPosition,
})),
};
}
Export to CSV
Copy
const fs = require('fs');
function exportToCSV(data, outputDir = './exports') {
if (!fs.existsSync(outputDir)) {
fs.mkdirSync(outputDir, { recursive: true });
}
const timestamp = data.exportedAt.split('T')[0];
// Competitors CSV
const competitorHeaders = ['id', 'name', 'relationship', 'shareOfVoice', 'totalMentions', 'avgPosition', 'avgSentiment'];
const competitorRows = data.competitors.map(c => competitorHeaders.map(h => c[h]).join(','));
fs.writeFileSync(
`${outputDir}/competitors_${timestamp}.csv`,
[competitorHeaders.join(','), ...competitorRows].join('\n')
);
// Sources CSV
const sourceHeaders = ['id', 'domain', 'type', 'isSelf', 'totalMentions', 'rate', 'avgPosition'];
const sourceRows = data.sources.map(s => sourceHeaders.map(h => s[h]).join(','));
fs.writeFileSync(
`${outputDir}/sources_${timestamp}.csv`,
[sourceHeaders.join(','), ...sourceRows].join('\n')
);
// Performance CSV (single row)
const perfHeaders = ['geoScore', 'mentionRate', 'sourceRate', 'shareOfVoice', 'sentiment'];
const perfValues = [
data.performance.scores.global,
data.performance.scores.mentionRate,
data.performance.scores.sourceRate,
data.performance.scores.shareOfVoice,
data.performance.scores.sentiment,
];
fs.writeFileSync(
`${outputDir}/performance_${timestamp}.csv`,
[perfHeaders.join(','), perfValues.join(',')].join('\n')
);
return {
files: [
`${outputDir}/competitors_${timestamp}.csv`,
`${outputDir}/sources_${timestamp}.csv`,
`${outputDir}/performance_${timestamp}.csv`,
],
};
}
Export to JSON
Copy
const fs = require('fs');
function exportToJSON(data, outputDir = './exports') {
if (!fs.existsSync(outputDir)) {
fs.mkdirSync(outputDir, { recursive: true });
}
const timestamp = data.exportedAt.split('T')[0];
const filepath = `${outputDir}/qwairy_export_${timestamp}.json`;
fs.writeFileSync(filepath, JSON.stringify(data, null, 2));
return { file: filepath, size: fs.statSync(filepath).size };
}
Database Schemas
Create these tables before exporting to BigQuery or Snowflake.Copy
-- Create dataset
CREATE SCHEMA IF NOT EXISTS qwairy;
-- Competitors table
CREATE TABLE IF NOT EXISTS qwairy.competitors (
id STRING NOT NULL,
name STRING NOT NULL,
relationship STRING,
share_of_voice FLOAT64,
total_mentions INT64,
avg_position FLOAT64,
avg_sentiment FLOAT64,
exported_at TIMESTAMP NOT NULL,
brand_id STRING NOT NULL
);
-- Sources table
CREATE TABLE IF NOT EXISTS qwairy.sources (
id STRING NOT NULL,
domain STRING NOT NULL,
type STRING,
is_self BOOL,
total_mentions INT64,
rate FLOAT64,
avg_position FLOAT64,
exported_at TIMESTAMP NOT NULL,
brand_id STRING NOT NULL
);
-- Performance table
CREATE TABLE IF NOT EXISTS qwairy.performance (
global FLOAT64,
mentionRate FLOAT64,
sourceRate FLOAT64,
shareOfVoice FLOAT64,
sentiment FLOAT64,
methodology STRING,
exported_at TIMESTAMP NOT NULL,
brand_id STRING NOT NULL
);
Export to BigQuery
Copy
const { BigQuery } = require('@google-cloud/bigquery');
async function exportToBigQuery(data, datasetId = 'qwairy', projectId = process.env.GCP_PROJECT_ID) {
const bigquery = new BigQuery({ projectId });
// Create dataset if not exists
const [datasets] = await bigquery.getDatasets();
if (!datasets.find(d => d.id === datasetId)) {
await bigquery.createDataset(datasetId);
}
const dataset = bigquery.dataset(datasetId);
// Insert competitors
await dataset.table('competitors').insert(
data.competitors.map(c => ({
...c,
exportedAt: data.exportedAt,
brandId: data.brandId,
}))
);
// Insert sources
await dataset.table('sources').insert(
data.sources.map(s => ({
...s,
exportedAt: data.exportedAt,
brandId: data.brandId,
}))
);
// Insert performance
await dataset.table('performance').insert([{
...data.performance.scores,
methodology: JSON.stringify(data.performance.methodology),
exportedAt: data.exportedAt,
brandId: data.brandId,
}]);
return { dataset: datasetId, tables: ['competitors', 'sources', 'performance'] };
}
Export to Snowflake
Copy
const snowflake = require('snowflake-sdk');
async function exportToSnowflake(data, config) {
const connection = snowflake.createConnection({
account: config.account,
username: config.username,
password: config.password,
warehouse: config.warehouse,
database: config.database,
schema: config.schema || 'QWAIRY',
});
await new Promise((resolve, reject) => {
connection.connect((err) => err ? reject(err) : resolve());
});
const execute = (sql, binds = []) => new Promise((resolve, reject) => {
connection.execute({ sqlText: sql, binds, complete: (err, stmt, rows) => err ? reject(err) : resolve(rows) });
});
// Insert competitors
for (const c of data.competitors) {
await execute(
`INSERT INTO competitors (id, name, relationship, share_of_voice, total_mentions, avg_position, avg_sentiment, exported_at, brand_id)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[c.id, c.name, c.relationship, c.shareOfVoice, c.totalMentions, c.avgPosition, c.avgSentiment, data.exportedAt, data.brandId]
);
}
// Insert sources
for (const s of data.sources) {
await execute(
`INSERT INTO sources (id, domain, type, is_self, total_mentions, rate, avg_position, exported_at, brand_id)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[s.id, s.domain, s.type, s.isSelf, s.totalMentions, s.rate, s.avgPosition, data.exportedAt, data.brandId]
);
}
connection.destroy();
return { schema: config.schema || 'QWAIRY', tables: ['competitors', 'sources'] };
}
Usage
Copy
const client = new QwairyClient(process.env.QWAIRY_API_TOKEN);
// Fetch data
const data = await fetchAllData(client, 'your-brand-id', 30);
// Export to files
const csvResult = exportToCSV(data);
console.log('CSV files:', csvResult.files);
const jsonResult = exportToJSON(data);
console.log('JSON file:', jsonResult.file, `(${jsonResult.size} bytes)`);
// Export to BigQuery (requires @google-cloud/bigquery)
// const bqResult = await exportToBigQuery(data);
// console.log('BigQuery tables:', bqResult.tables);
Scheduling Exports
Automate daily or weekly exports:| Platform | Configuration |
|---|---|
| Cron | 0 1 * * * (daily at 1am) |
| GitHub Actions | schedule: cron: '0 1 * * *' |
| AWS Lambda | EventBridge scheduled rule |
| Google Cloud | Cloud Scheduler + Cloud Functions |
Next Steps
- Create a custom dashboard for real-time monitoring
- Set up weekly reports for stakeholders
- Add competitive analysis to your exports

