bulltrackers-module 1.0.715 → 1.0.717
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -185,6 +185,12 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
185
185
|
|
|
186
186
|
runMetrics.io.writes += 1;
|
|
187
187
|
|
|
188
|
+
// 4. Write to BigQuery (for analytics) - same structure as other computations
|
|
189
|
+
// Page computations store the full result object { cid1: {...}, cid2: {...}, ... } in result_data
|
|
190
|
+
await writeToBigQuery(result, name, dStr, calc.manifest.category, logger, false).catch(err => {
|
|
191
|
+
logger.log('WARN', `[BigQuery] Failed to write page computation ${name} for ${dStr}: ${err.message}`);
|
|
192
|
+
});
|
|
193
|
+
|
|
188
194
|
if (isFinalFlush && calc.manifest.hash) {
|
|
189
195
|
successUpdates[name] = {
|
|
190
196
|
hash: calc.manifest.hash, simHash: simHash, resultHash: resultHash,
|
|
@@ -555,15 +561,15 @@ async function writeToBigQuery(result, name, dateContext, category, logger, isAl
|
|
|
555
561
|
|
|
556
562
|
const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
|
|
557
563
|
|
|
558
|
-
// Use
|
|
559
|
-
//
|
|
560
|
-
|
|
564
|
+
// Use MERGE operation to overwrite existing results (by date + computation_name + category)
|
|
565
|
+
// This ensures re-running a computation overwrites the old result
|
|
566
|
+
// Key fields: date, computation_name, category (ignoring created_at)
|
|
567
|
+
const { insertRowsWithMerge } = require('../../core/utils/bigquery_utils');
|
|
568
|
+
const keyFields = ['date', 'computation_name', 'category'];
|
|
561
569
|
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
await insertRowsLoadJob(datasetId, 'computation_results', [row], logger);
|
|
566
|
-
}
|
|
570
|
+
// For alert computations, we still want to use MERGE but it will use load jobs (free)
|
|
571
|
+
// This ensures overwrites work correctly for both alert and non-alert computations
|
|
572
|
+
await insertRowsWithMerge(datasetId, 'computation_results', [row], keyFields, logger);
|
|
567
573
|
|
|
568
574
|
} catch (error) {
|
|
569
575
|
// Log but don't throw - BigQuery write failure shouldn't break Firestore writes
|
|
@@ -663,8 +669,29 @@ function calculateFirestoreBytes(value) {
|
|
|
663
669
|
}
|
|
664
670
|
|
|
665
671
|
function calculateExpirationDate(dateStr, ttlDays) {
|
|
672
|
+
// Validate inputs
|
|
673
|
+
if (!dateStr || typeof dateStr !== 'string') {
|
|
674
|
+
return null; // Invalid date string
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
if (ttlDays === undefined || ttlDays === null || isNaN(Number(ttlDays))) {
|
|
678
|
+
return null; // Invalid TTL days
|
|
679
|
+
}
|
|
680
|
+
|
|
666
681
|
const base = new Date(dateStr);
|
|
667
|
-
|
|
682
|
+
|
|
683
|
+
// Check if date is valid (invalid dates have NaN getTime())
|
|
684
|
+
if (isNaN(base.getTime())) {
|
|
685
|
+
return null; // Invalid date
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
base.setDate(base.getDate() + Number(ttlDays));
|
|
689
|
+
|
|
690
|
+
// Double-check the result is still valid
|
|
691
|
+
if (isNaN(base.getTime())) {
|
|
692
|
+
return null; // Resulting date is invalid
|
|
693
|
+
}
|
|
694
|
+
|
|
668
695
|
return base;
|
|
669
696
|
}
|
|
670
697
|
|
|
@@ -196,27 +196,17 @@ async function insertRowsWithMerge(datasetId, tableId, rows, keyFields, logger =
|
|
|
196
196
|
logger.log('INFO', `[BigQuery] Loaded ${validRows.length} rows into temp table ${tempTableId} using LOAD JOB (free)`);
|
|
197
197
|
}
|
|
198
198
|
|
|
199
|
-
// Use MERGE to insert
|
|
199
|
+
// Use MERGE to insert new rows or update existing rows (SQL-native deduplication/overwrite)
|
|
200
200
|
// This is more efficient than checking in JavaScript
|
|
201
201
|
const mergeConditions = keyFields.map(f => `target.${f} = source.${f}`).join(' AND ');
|
|
202
|
-
const mergeQuery = `
|
|
203
|
-
MERGE \`${tablePath}\` AS target
|
|
204
|
-
USING \`${tempTablePath}\` AS source
|
|
205
|
-
ON ${mergeConditions}
|
|
206
|
-
WHEN NOT MATCHED THEN
|
|
207
|
-
INSERT ROW
|
|
208
|
-
`;
|
|
209
|
-
|
|
210
|
-
await query(mergeQuery, {}, logger);
|
|
211
202
|
|
|
212
|
-
//
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
const
|
|
216
|
-
const countBefore = existingBefore[0]?.cnt || 0;
|
|
203
|
+
// Build UPDATE clause - update all non-key fields
|
|
204
|
+
const allFields = schema.map(f => f.name);
|
|
205
|
+
const nonKeyFields = allFields.filter(f => !keyFields.includes(f));
|
|
206
|
+
const updateClause = nonKeyFields.map(f => `${f} = source.${f}`).join(', ');
|
|
217
207
|
|
|
218
|
-
//
|
|
219
|
-
//
|
|
208
|
+
// Count rows that will be inserted (don't exist in target) vs updated (already exist)
|
|
209
|
+
// Query BEFORE the MERGE to get accurate counts
|
|
220
210
|
const [insertedCountResult] = await query(`
|
|
221
211
|
SELECT COUNT(*) as inserted
|
|
222
212
|
FROM \`${tempTablePath}\` AS source
|
|
@@ -227,12 +217,30 @@ async function insertRowsWithMerge(datasetId, tableId, rows, keyFields, logger =
|
|
|
227
217
|
`, {}, logger);
|
|
228
218
|
|
|
229
219
|
const rowsInserted = insertedCountResult[0]?.inserted || 0;
|
|
220
|
+
const rowsUpdated = validRows.length - rowsInserted;
|
|
221
|
+
|
|
222
|
+
// Now perform the MERGE (inserts new rows, updates existing rows)
|
|
223
|
+
const mergeQuery = `
|
|
224
|
+
MERGE \`${tablePath}\` AS target
|
|
225
|
+
USING \`${tempTablePath}\` AS source
|
|
226
|
+
ON ${mergeConditions}
|
|
227
|
+
WHEN MATCHED THEN
|
|
228
|
+
UPDATE SET ${updateClause}
|
|
229
|
+
WHEN NOT MATCHED THEN
|
|
230
|
+
INSERT ROW
|
|
231
|
+
`;
|
|
232
|
+
|
|
233
|
+
await query(mergeQuery, {}, logger);
|
|
230
234
|
|
|
231
235
|
// Drop temp table
|
|
232
236
|
await tempTable.delete();
|
|
233
237
|
|
|
234
238
|
if (logger) {
|
|
235
|
-
|
|
239
|
+
if (rowsUpdated > 0) {
|
|
240
|
+
logger.log('INFO', `[BigQuery] MERGE completed: ${rowsInserted} new rows inserted, ${rowsUpdated} existing rows updated in ${tablePath}`);
|
|
241
|
+
} else {
|
|
242
|
+
logger.log('INFO', `[BigQuery] MERGE completed: ${rowsInserted} new rows inserted into ${tablePath} (${validRows.length - rowsInserted} duplicates skipped via SQL)`);
|
|
243
|
+
}
|
|
236
244
|
}
|
|
237
245
|
|
|
238
246
|
return rowsInserted;
|
|
@@ -617,6 +625,11 @@ const SCHEMAS = {
|
|
|
617
625
|
{ name: 'instrument_id', type: 'INT64', mode: 'REQUIRED' },
|
|
618
626
|
{ name: 'insights_data', type: 'JSON', mode: 'REQUIRED' },
|
|
619
627
|
{ name: 'fetched_at', type: 'TIMESTAMP', mode: 'REQUIRED' }
|
|
628
|
+
],
|
|
629
|
+
ticker_mappings: [
|
|
630
|
+
{ name: 'instrument_id', type: 'INT64', mode: 'REQUIRED' },
|
|
631
|
+
{ name: 'ticker', type: 'STRING', mode: 'REQUIRED' },
|
|
632
|
+
{ name: 'last_updated', type: 'TIMESTAMP', mode: 'REQUIRED' }
|
|
620
633
|
]
|
|
621
634
|
};
|
|
622
635
|
|
|
@@ -804,6 +817,27 @@ async function ensureInstrumentInsightsTable(logger = null) {
|
|
|
804
817
|
);
|
|
805
818
|
}
|
|
806
819
|
|
|
820
|
+
/**
|
|
821
|
+
* Ensure ticker_mappings table exists
|
|
822
|
+
* @param {object} logger - Logger instance
|
|
823
|
+
* @returns {Promise<Table>}
|
|
824
|
+
*/
|
|
825
|
+
async function ensureTickerMappingsTable(logger = null) {
|
|
826
|
+
const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
|
|
827
|
+
const tableId = 'ticker_mappings';
|
|
828
|
+
const schema = getSchema(tableId);
|
|
829
|
+
|
|
830
|
+
return await ensureTableExists(
|
|
831
|
+
datasetId,
|
|
832
|
+
tableId,
|
|
833
|
+
schema,
|
|
834
|
+
{
|
|
835
|
+
clusterFields: ['instrument_id']
|
|
836
|
+
},
|
|
837
|
+
logger
|
|
838
|
+
);
|
|
839
|
+
}
|
|
840
|
+
|
|
807
841
|
/**
|
|
808
842
|
* Query portfolio data from BigQuery
|
|
809
843
|
* @param {string} dateStr - Date string (YYYY-MM-DD)
|
|
@@ -1623,6 +1657,60 @@ async function queryAssetPrices(startDateStr = null, endDateStr = null, instrume
|
|
|
1623
1657
|
}
|
|
1624
1658
|
}
|
|
1625
1659
|
|
|
1660
|
+
/**
|
|
1661
|
+
* Query ticker mappings from BigQuery
|
|
1662
|
+
* Returns data in format: { instrumentId: "TICKER", ... }
|
|
1663
|
+
* @param {object} logger - Logger instance
|
|
1664
|
+
* @returns {Promise<object|null>} Ticker mappings object, or null if not found/error
|
|
1665
|
+
*/
|
|
1666
|
+
async function queryTickerMappings(logger = null) {
|
|
1667
|
+
if (process.env.BIGQUERY_ENABLED === 'false') {
|
|
1668
|
+
if (logger) logger.log('DEBUG', '[BigQuery] Ticker mappings query skipped (BIGQUERY_ENABLED=false)');
|
|
1669
|
+
return null;
|
|
1670
|
+
}
|
|
1671
|
+
|
|
1672
|
+
const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
|
|
1673
|
+
const tablePath = `${datasetId}.ticker_mappings`;
|
|
1674
|
+
|
|
1675
|
+
try {
|
|
1676
|
+
const sqlQuery = `
|
|
1677
|
+
SELECT
|
|
1678
|
+
instrument_id,
|
|
1679
|
+
ticker
|
|
1680
|
+
FROM \`${tablePath}\`
|
|
1681
|
+
ORDER BY instrument_id ASC
|
|
1682
|
+
`;
|
|
1683
|
+
|
|
1684
|
+
if (logger) {
|
|
1685
|
+
logger.log('INFO', `[BigQuery] 🔍 Querying ticker mappings from ${tablePath}`);
|
|
1686
|
+
}
|
|
1687
|
+
|
|
1688
|
+
const rows = await query(sqlQuery, {}, logger);
|
|
1689
|
+
|
|
1690
|
+
if (!rows || rows.length === 0) {
|
|
1691
|
+
if (logger) logger.log('INFO', `[BigQuery] No ticker mappings found in ${tablePath}`);
|
|
1692
|
+
return null;
|
|
1693
|
+
}
|
|
1694
|
+
|
|
1695
|
+
// Transform to expected format: { instrumentId: "TICKER" }
|
|
1696
|
+
const mappings = {};
|
|
1697
|
+
for (const row of rows) {
|
|
1698
|
+
mappings[String(row.instrument_id)] = row.ticker;
|
|
1699
|
+
}
|
|
1700
|
+
|
|
1701
|
+
if (logger) {
|
|
1702
|
+
logger.log('INFO', `[BigQuery] ✅ Retrieved ${Object.keys(mappings).length} ticker mappings from ${tablePath}`);
|
|
1703
|
+
}
|
|
1704
|
+
|
|
1705
|
+
return mappings;
|
|
1706
|
+
} catch (error) {
|
|
1707
|
+
if (logger) {
|
|
1708
|
+
logger.log('WARN', `[BigQuery] Ticker mappings query failed for ${tablePath}: ${error.message}`);
|
|
1709
|
+
}
|
|
1710
|
+
return null;
|
|
1711
|
+
}
|
|
1712
|
+
}
|
|
1713
|
+
|
|
1626
1714
|
module.exports = {
|
|
1627
1715
|
getBigQueryClient,
|
|
1628
1716
|
getOrCreateDataset,
|
|
@@ -1639,6 +1727,7 @@ module.exports = {
|
|
|
1639
1727
|
ensurePIMasterListTable,
|
|
1640
1728
|
ensurePIRankingsTable,
|
|
1641
1729
|
ensureInstrumentInsightsTable,
|
|
1730
|
+
ensureTickerMappingsTable,
|
|
1642
1731
|
queryPortfolioData,
|
|
1643
1732
|
queryHistoryData,
|
|
1644
1733
|
querySocialData,
|
|
@@ -1646,6 +1735,7 @@ module.exports = {
|
|
|
1646
1735
|
queryPIMasterList,
|
|
1647
1736
|
queryPIRankings,
|
|
1648
1737
|
queryInstrumentInsights,
|
|
1738
|
+
queryTickerMappings,
|
|
1649
1739
|
queryComputationResult,
|
|
1650
1740
|
queryComputationResultsRange,
|
|
1651
1741
|
checkExistingRows,
|
package/index.js
CHANGED
|
@@ -65,6 +65,8 @@ const { runPopularInvestorFetch } = require('./functions
|
|
|
65
65
|
const { backfillTaskEngineData } = require('./functions/maintenance/backfill-task-engine-data/index');
|
|
66
66
|
const { backfillPIMasterListRankings } = require('./functions/maintenance/backfill-pi-master-list-rankings/index');
|
|
67
67
|
const { backfillInstrumentInsights } = require('./functions/maintenance/backfill-instrument-insights/index');
|
|
68
|
+
const { backfillTickerMappings } = require('./functions/maintenance/backfill-ticker-mappings/index');
|
|
69
|
+
const { backfillPriceData } = require('./functions/maintenance/backfill-price-data-from-firestore/index');
|
|
68
70
|
|
|
69
71
|
// Alert System
|
|
70
72
|
const { handleAlertTrigger, handleComputationResultWrite, checkAndSendAllClearNotifications } = require('./functions/alert-system/index');
|
|
@@ -135,7 +137,9 @@ const maintenance = {
|
|
|
135
137
|
// [NEW] BigQuery backfills
|
|
136
138
|
backfillTaskEngineData,
|
|
137
139
|
backfillPIMasterListRankings,
|
|
138
|
-
backfillInstrumentInsights
|
|
140
|
+
backfillInstrumentInsights,
|
|
141
|
+
backfillTickerMappings,
|
|
142
|
+
backfillPriceData
|
|
139
143
|
};
|
|
140
144
|
|
|
141
145
|
const proxy = { handlePost };
|