bulltrackers-module 1.0.721 → 1.0.723
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/data/CachedDataLoader.js +101 -102
- package/functions/computation-system/data/DependencyFetcher.js +48 -8
- package/functions/computation-system/persistence/ResultCommitter.js +158 -573
- package/functions/computation-system/utils/data_loader.js +253 -1088
- package/functions/core/utils/bigquery_utils.js +248 -112
- package/functions/etoro-price-fetcher/helpers/handler_helpers.js +4 -1
- package/functions/fetch-insights/helpers/handler_helpers.js +63 -65
- package/functions/fetch-popular-investors/helpers/fetch_helpers.js +143 -458
- package/functions/orchestrator/index.js +108 -141
- package/functions/root-data-indexer/index.js +130 -437
- package/index.js +0 -2
- package/package.json +3 -4
- package/functions/invalid-speculator-handler/helpers/handler_helpers.js +0 -38
- package/functions/speculator-cleanup-orchestrator/helpers/cleanup_helpers.js +0 -101
|
@@ -1,32 +1,32 @@
|
|
|
1
1
|
/**
|
|
2
|
-
*
|
|
3
|
-
*
|
|
4
|
-
*
|
|
5
|
-
*
|
|
6
|
-
*
|
|
2
|
+
* @fileoverview Caching layer for data loading.
|
|
3
|
+
* REFACTORED:
|
|
4
|
+
* 1. Supports new BigQuery-backed loader functions.
|
|
5
|
+
* 2. correctly passes 'userTypes' to portfolio/history loaders.
|
|
6
|
+
* 3. Handles Virtual Shards for BigQuery prices.
|
|
7
7
|
*/
|
|
8
8
|
const {
|
|
9
9
|
loadDailyInsights,
|
|
10
10
|
loadDailySocialPostInsights,
|
|
11
|
-
getRelevantShardRefs,
|
|
12
11
|
getPriceShardRefs,
|
|
12
|
+
getRelevantShardRefs,
|
|
13
13
|
loadVerificationProfiles,
|
|
14
14
|
loadPopularInvestorRankings,
|
|
15
15
|
loadPIRatings,
|
|
16
16
|
loadPIPageViews,
|
|
17
|
-
loadWatchlistMembership
|
|
17
|
+
loadWatchlistMembership,
|
|
18
18
|
loadPIAlertHistory,
|
|
19
19
|
loadPIWatchlistData,
|
|
20
20
|
loadPopularInvestorMasterList,
|
|
21
|
-
loadDailyPortfolios
|
|
21
|
+
loadDailyPortfolios,
|
|
22
|
+
loadDailyHistory
|
|
22
23
|
} = require('../utils/data_loader');
|
|
23
24
|
const { getAvailabilityWindow } = require('./AvailabilityChecker');
|
|
24
25
|
const zlib = require('zlib');
|
|
25
26
|
|
|
26
27
|
// =============================================================================
|
|
27
|
-
// CONFIGURATION
|
|
28
|
+
// CONFIGURATION
|
|
28
29
|
// =============================================================================
|
|
29
|
-
// Centralizes config keys, defaults, loader functions, and availability flags.
|
|
30
30
|
const LOADER_DEFINITIONS = {
|
|
31
31
|
loadRankings: {
|
|
32
32
|
cache: 'rankings',
|
|
@@ -38,28 +38,28 @@ const LOADER_DEFINITIONS = {
|
|
|
38
38
|
loadRatings: {
|
|
39
39
|
cache: 'ratings',
|
|
40
40
|
configKey: 'piRatingsCollection',
|
|
41
|
-
defaultCol: '
|
|
41
|
+
defaultCol: 'pi_ratings',
|
|
42
42
|
fn: loadPIRatings,
|
|
43
43
|
flag: 'piRatings'
|
|
44
44
|
},
|
|
45
45
|
loadPageViews: {
|
|
46
46
|
cache: 'pageViews',
|
|
47
47
|
configKey: 'piPageViewsCollection',
|
|
48
|
-
defaultCol: '
|
|
48
|
+
defaultCol: 'pi_page_views',
|
|
49
49
|
fn: loadPIPageViews,
|
|
50
50
|
flag: 'piPageViews'
|
|
51
51
|
},
|
|
52
52
|
loadWatchlistMembership: {
|
|
53
53
|
cache: 'watchlistMembership',
|
|
54
54
|
configKey: 'watchlistMembershipCollection',
|
|
55
|
-
defaultCol: '
|
|
56
|
-
fn:
|
|
55
|
+
defaultCol: 'watchlist_membership',
|
|
56
|
+
fn: loadWatchlistMembership,
|
|
57
57
|
flag: 'watchlistMembership'
|
|
58
58
|
},
|
|
59
59
|
loadAlertHistory: {
|
|
60
60
|
cache: 'alertHistory',
|
|
61
61
|
configKey: 'piAlertHistoryCollection',
|
|
62
|
-
defaultCol: '
|
|
62
|
+
defaultCol: 'pi_alert_history',
|
|
63
63
|
fn: loadPIAlertHistory,
|
|
64
64
|
flag: 'piAlertHistory'
|
|
65
65
|
},
|
|
@@ -79,15 +79,18 @@ const LOADER_DEFINITIONS = {
|
|
|
79
79
|
},
|
|
80
80
|
loadPIWatchlistData: {
|
|
81
81
|
cache: 'piWatchlistData',
|
|
82
|
-
// No collection key needed for direct implementation, but handled by fn
|
|
83
82
|
fn: loadPIWatchlistData,
|
|
84
|
-
isIdBased: true
|
|
83
|
+
isIdBased: true
|
|
85
84
|
},
|
|
86
|
-
// <--- ADDED SUPPORT FOR PORTFOLIO SERIES
|
|
87
85
|
loadPortfolios: {
|
|
88
86
|
cache: 'portfolios',
|
|
89
|
-
fn: loadDailyPortfolios
|
|
90
|
-
|
|
87
|
+
fn: loadDailyPortfolios,
|
|
88
|
+
flag: 'hasPortfolio' // Loose check, accurate check handles inside loader
|
|
89
|
+
},
|
|
90
|
+
loadHistory: {
|
|
91
|
+
cache: 'history',
|
|
92
|
+
fn: loadDailyHistory,
|
|
93
|
+
flag: 'hasHistory'
|
|
91
94
|
}
|
|
92
95
|
};
|
|
93
96
|
|
|
@@ -96,11 +99,12 @@ class CachedDataLoader {
|
|
|
96
99
|
this.config = config;
|
|
97
100
|
this.deps = dependencies;
|
|
98
101
|
|
|
99
|
-
// Initialize caches
|
|
102
|
+
// Initialize caches
|
|
100
103
|
this.cache = {
|
|
101
104
|
mappings: null,
|
|
102
105
|
verifications: null,
|
|
103
106
|
piMasterList: null,
|
|
107
|
+
prices: new Map(), // Dedicated cache for price shard data
|
|
104
108
|
...Object.values(LOADER_DEFINITIONS).reduce((acc, def) => {
|
|
105
109
|
acc[def.cache] = new Map();
|
|
106
110
|
return acc;
|
|
@@ -111,70 +115,66 @@ class CachedDataLoader {
|
|
|
111
115
|
_tryDecompress(data) {
|
|
112
116
|
if (data?._compressed === true && data.payload) {
|
|
113
117
|
try { return JSON.parse(zlib.gunzipSync(data.payload).toString()); }
|
|
114
|
-
catch (e) {
|
|
118
|
+
catch (e) { this.deps.logger?.log('WARN', '[CachedDataLoader] Decompression failed'); return {}; }
|
|
115
119
|
}
|
|
116
120
|
return data;
|
|
117
121
|
}
|
|
118
122
|
|
|
119
123
|
// =========================================================================
|
|
120
|
-
// GENERIC LOADER
|
|
124
|
+
// GENERIC LOADER
|
|
121
125
|
// =========================================================================
|
|
122
|
-
// [FIX] Accepts ...args to pass down filters (like requiredUserTypes)
|
|
123
126
|
async _loadGeneric(methodName, key, ...args) {
|
|
124
127
|
const def = LOADER_DEFINITIONS[methodName];
|
|
125
128
|
if (!def) throw new Error(`Unknown loader method: ${methodName}`);
|
|
126
129
|
|
|
130
|
+
// Construct a cache key that includes args (like userTypes) if present
|
|
131
|
+
// e.g. "2025-01-01::POPULAR_INVESTOR"
|
|
132
|
+
const cacheKey = args.length > 0 ? `${key}::${JSON.stringify(args)}` : key;
|
|
127
133
|
const cacheMap = this.cache[def.cache];
|
|
128
|
-
|
|
134
|
+
|
|
135
|
+
if (cacheMap.has(cacheKey)) return cacheMap.get(cacheKey);
|
|
129
136
|
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
this.deps.logger?.log('INFO', `[CachedDataLoader] 📂 Loading '${def.cache}' from: ${collection}/${key}`);
|
|
137
|
+
if (def.configKey && this.deps.logger) {
|
|
138
|
+
// Only log simple date-based loads to avoid spam
|
|
139
|
+
// this.deps.logger.log('INFO', `[CachedDataLoader] Loading ${def.cache} for ${key}`);
|
|
134
140
|
}
|
|
135
141
|
|
|
136
142
|
const promise = def.fn(this.config, this.deps, key, ...args);
|
|
137
|
-
cacheMap.set(
|
|
143
|
+
cacheMap.set(cacheKey, promise);
|
|
138
144
|
return promise;
|
|
139
145
|
}
|
|
140
146
|
|
|
141
147
|
// =========================================================================
|
|
142
|
-
// PUBLIC
|
|
148
|
+
// PUBLIC METHODS
|
|
143
149
|
// =========================================================================
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
async
|
|
148
|
-
async
|
|
149
|
-
async loadRankings(dateStr) { return this._loadGeneric('loadRankings' , dateStr); }
|
|
150
|
-
async loadRatings(dateStr) { return this._loadGeneric('loadRatings' , dateStr); }
|
|
151
|
-
async loadPageViews(dateStr) { return this._loadGeneric('loadPageViews' , dateStr); }
|
|
150
|
+
async loadInsights(dateStr) { return this._loadGeneric('loadInsights', dateStr); }
|
|
151
|
+
async loadSocial(dateStr, userTypes) { return this._loadGeneric('loadSocial', dateStr, userTypes); }
|
|
152
|
+
async loadRankings(dateStr) { return this._loadGeneric('loadRankings', dateStr); }
|
|
153
|
+
async loadRatings(dateStr) { return this._loadGeneric('loadRatings', dateStr); }
|
|
154
|
+
async loadPageViews(dateStr) { return this._loadGeneric('loadPageViews', dateStr); }
|
|
152
155
|
async loadWatchlistMembership(dateStr) { return this._loadGeneric('loadWatchlistMembership', dateStr); }
|
|
153
|
-
async loadAlertHistory(dateStr) { return this._loadGeneric('loadAlertHistory'
|
|
154
|
-
async loadPIWatchlistData(piCid) { return this._loadGeneric('loadPIWatchlistData'
|
|
155
|
-
// <--- ADDED PORTFOLIOS ACCESSOR with extra arg
|
|
156
|
+
async loadAlertHistory(dateStr) { return this._loadGeneric('loadAlertHistory', dateStr); }
|
|
157
|
+
async loadPIWatchlistData(piCid) { return this._loadGeneric('loadPIWatchlistData', String(piCid)); }
|
|
156
158
|
async loadPortfolios(dateStr, userTypes) { return this._loadGeneric('loadPortfolios', dateStr, userTypes); }
|
|
159
|
+
async loadHistory(dateStr, userTypes) { return this._loadGeneric('loadHistory', dateStr, userTypes); }
|
|
157
160
|
|
|
158
161
|
// =========================================================================
|
|
159
|
-
// SPECIALIZED
|
|
162
|
+
// SPECIALIZED METHODS
|
|
160
163
|
// =========================================================================
|
|
161
|
-
|
|
164
|
+
|
|
162
165
|
async loadMappings() {
|
|
163
166
|
if (this.cache.mappings) return this.cache.mappings;
|
|
164
167
|
this.cache.mappings = await this.deps.calculationUtils.loadInstrumentMappings();
|
|
165
168
|
return this.cache.mappings;
|
|
166
169
|
}
|
|
167
170
|
|
|
168
|
-
async loadVerifications(dateStr) {
|
|
171
|
+
async loadVerifications(dateStr) {
|
|
172
|
+
// Verifications are mostly static/global, but we accept dateStr for consistency
|
|
169
173
|
if (this.cache.verifications) return this.cache.verifications;
|
|
170
|
-
|
|
171
|
-
const col = this.config.verificationsCollection || 'verification_profiles';
|
|
172
|
-
this.deps.logger?.log('INFO', `[CachedDataLoader] 📂 Loading 'verifications' from: ${col} (Context: ${dateStr || 'Global'})`);
|
|
173
|
-
|
|
174
|
-
// Pass dateStr so data_loader can check GCS snapshots
|
|
175
174
|
this.cache.verifications = await loadVerificationProfiles(this.config, this.deps, dateStr);
|
|
176
175
|
return this.cache.verifications;
|
|
177
176
|
}
|
|
177
|
+
|
|
178
178
|
async loadPIMasterList() {
|
|
179
179
|
if (this.cache.piMasterList) return this.cache.piMasterList;
|
|
180
180
|
this.cache.piMasterList = await loadPopularInvestorMasterList(this.config, this.deps);
|
|
@@ -185,70 +185,63 @@ class CachedDataLoader {
|
|
|
185
185
|
return getPriceShardRefs(this.config, this.deps);
|
|
186
186
|
}
|
|
187
187
|
|
|
188
|
-
async getSpecificPriceShardReferences(
|
|
189
|
-
return getRelevantShardRefs(this.config, this.deps,
|
|
188
|
+
async getSpecificPriceShardReferences(ids) {
|
|
189
|
+
return getRelevantShardRefs(this.config, this.deps, ids);
|
|
190
190
|
}
|
|
191
191
|
|
|
192
192
|
async loadPriceShard(docRef) {
|
|
193
|
-
//
|
|
193
|
+
// Handle BigQuery "Virtual Shard"
|
|
194
194
|
if (docRef && docRef._bigquery === true) {
|
|
195
|
-
//
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
// If BigQuery returns empty, fallback to Firestore
|
|
206
|
-
this.deps.logger.log('WARN', `[CachedDataLoader] BigQuery returned no price data, falling back to Firestore`);
|
|
207
|
-
} catch (bqError) {
|
|
208
|
-
this.deps.logger.log('WARN', `[CachedDataLoader] BigQuery price load failed, falling back to Firestore: ${bqError.message}`);
|
|
209
|
-
// Fall through to Firestore
|
|
195
|
+
// Use a dedicated cache key for the ALL-PRICE blob
|
|
196
|
+
// If targetIds are specified, create a unique cache key
|
|
197
|
+
const targetIds = docRef.targetIds && docRef.targetIds.length > 0 ? docRef.targetIds : null;
|
|
198
|
+
const cacheKey = targetIds
|
|
199
|
+
? `BQ_PRICES_${targetIds.sort().join(',')}`
|
|
200
|
+
: 'BQ_ALL_PRICES';
|
|
201
|
+
|
|
202
|
+
if (this.cache.prices.has(cacheKey)) {
|
|
203
|
+
return this.cache.prices.get(cacheKey);
|
|
210
204
|
}
|
|
205
|
+
|
|
206
|
+
const { queryAssetPrices } = require('../../core/utils/bigquery_utils');
|
|
207
|
+
// queryAssetPrices signature: (startDateStr, endDateStr, instrumentIds, logger)
|
|
208
|
+
const promise = queryAssetPrices(null, null, targetIds, this.deps.logger).then(data => {
|
|
209
|
+
const result = data || {};
|
|
210
|
+
this.cache.prices.set(cacheKey, result);
|
|
211
|
+
return result;
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
return promise;
|
|
211
215
|
}
|
|
212
|
-
|
|
213
|
-
// Firestore
|
|
216
|
+
|
|
217
|
+
// Firestore Fallback
|
|
214
218
|
try {
|
|
215
219
|
const snap = await docRef.get();
|
|
216
220
|
return snap.exists ? this._tryDecompress(snap.data()) : {};
|
|
217
|
-
} catch (e) {
|
|
218
|
-
console.error(`Error loading shard ${docRef.path}:`, e);
|
|
219
|
-
return {};
|
|
220
|
-
}
|
|
221
|
+
} catch (e) { return {}; }
|
|
221
222
|
}
|
|
222
223
|
|
|
223
224
|
// =========================================================================
|
|
224
|
-
//
|
|
225
|
+
// SERIES LOADING (Batching)
|
|
225
226
|
// =========================================================================
|
|
226
|
-
/**
|
|
227
|
-
* Optimistically loads data series using Batch Reads (db.getAll).
|
|
228
|
-
* Uses Availability Index to minimize costs.
|
|
229
|
-
* [FIX] Now accepts ...args to pass context (e.g. requiredUserTypes)
|
|
230
|
-
*/
|
|
231
227
|
async loadSeries(loaderMethod, dateStr, lookbackDays, ...args) {
|
|
232
228
|
const def = LOADER_DEFINITIONS[loaderMethod];
|
|
233
229
|
if (!def) throw new Error(`[CachedDataLoader] Unknown series method ${loaderMethod}`);
|
|
234
230
|
|
|
235
|
-
//
|
|
231
|
+
// If method doesn't support batching configuration, use legacy loop
|
|
236
232
|
if (!def.configKey || !def.flag) return this._loadSeriesLegacy(loaderMethod, dateStr, lookbackDays, ...args);
|
|
237
233
|
|
|
238
|
-
// 1. Calculate
|
|
234
|
+
// 1. Calculate Range
|
|
239
235
|
const endDate = new Date(dateStr);
|
|
240
236
|
const startDate = new Date(endDate);
|
|
241
237
|
startDate.setUTCDate(startDate.getUTCDate() - (lookbackDays - 1));
|
|
242
238
|
|
|
243
|
-
// 2.
|
|
239
|
+
// 2. Check Availability
|
|
244
240
|
let availabilityMap = new Map();
|
|
245
241
|
try {
|
|
246
242
|
availabilityMap = await getAvailabilityWindow(this.deps, startDate.toISOString().slice(0, 10), endDate.toISOString().slice(0, 10));
|
|
247
|
-
} catch (e) {
|
|
248
|
-
console.warn(`[CachedDataLoader] Availability check failed. Optimistic batching enabled.`);
|
|
249
|
-
}
|
|
243
|
+
} catch (e) {}
|
|
250
244
|
|
|
251
|
-
// 3. Construct Batch Refs
|
|
252
245
|
const collectionName = this.config[def.configKey] || def.defaultCol;
|
|
253
246
|
const batchRefs = [];
|
|
254
247
|
const dateKeyMap = [];
|
|
@@ -259,35 +252,38 @@ class CachedDataLoader {
|
|
|
259
252
|
const dString = d.toISOString().slice(0, 10);
|
|
260
253
|
|
|
261
254
|
const dayStatus = availabilityMap.get(dString);
|
|
262
|
-
// Fetch if index says data exists OR if index is missing (optimistic)
|
|
263
255
|
if (!dayStatus || dayStatus[def.flag]) {
|
|
264
256
|
batchRefs.push(this.deps.db.collection(collectionName).doc(dString));
|
|
265
257
|
dateKeyMap.push(dString);
|
|
266
258
|
}
|
|
267
259
|
}
|
|
268
260
|
|
|
269
|
-
//
|
|
261
|
+
// Note: Batch loading ONLY works for Firestore.
|
|
262
|
+
// If we are in BigQuery mode, we should NOT use Firestore batching for these types.
|
|
263
|
+
// However, this `loadSeries` method is mostly used for simple docs (Rankings, Ratings).
|
|
264
|
+
// Since `loadRankings` in `data_loader.js` now points to BigQuery, passing a Firestore Ref
|
|
265
|
+
// won't work because `data_loader` isn't called here; we are accessing DB directly.
|
|
266
|
+
|
|
267
|
+
// [CRITICAL FIX]
|
|
268
|
+
// If we are using BigQuery, we cannot use `db.getAll` on non-existent Firestore docs.
|
|
269
|
+
// We must delegate back to the loader method (which handles BQ) in a loop (concurrently).
|
|
270
|
+
if (process.env.BIGQUERY_ENABLED !== 'false') {
|
|
271
|
+
return this._loadSeriesLegacy(loaderMethod, dateStr, lookbackDays, ...args);
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
// Firestore Batch Logic (Only runs if BQ Disabled)
|
|
270
275
|
const results = {};
|
|
271
276
|
if (batchRefs.length > 0) {
|
|
272
|
-
this.deps.logger?.log('INFO', `[CachedDataLoader] 📂 Batch loading ${batchRefs.length} docs for '${loaderMethod}'`);
|
|
273
277
|
try {
|
|
274
278
|
const snapshots = await this.deps.db.getAll(...batchRefs);
|
|
275
279
|
snapshots.forEach((snap, idx) => {
|
|
276
280
|
if (snap.exists) {
|
|
277
281
|
const raw = this._tryDecompress(snap.data());
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
const { date, lastUpdated, ...clean } = raw;
|
|
281
|
-
results[dateKeyMap[idx]] = clean;
|
|
282
|
-
} else if (loaderMethod === 'loadRankings') {
|
|
283
|
-
results[dateKeyMap[idx]] = raw.Items || [];
|
|
284
|
-
} else {
|
|
285
|
-
results[dateKeyMap[idx]] = raw;
|
|
286
|
-
}
|
|
282
|
+
if (loaderMethod === 'loadRankings') results[dateKeyMap[idx]] = raw.Items || [];
|
|
283
|
+
else results[dateKeyMap[idx]] = raw;
|
|
287
284
|
}
|
|
288
285
|
});
|
|
289
286
|
} catch (err) {
|
|
290
|
-
console.warn(`[CachedDataLoader] Batch failed: ${err.message}. Legacy fallback.`);
|
|
291
287
|
return this._loadSeriesLegacy(loaderMethod, dateStr, lookbackDays, ...args);
|
|
292
288
|
}
|
|
293
289
|
}
|
|
@@ -309,8 +305,11 @@ class CachedDataLoader {
|
|
|
309
305
|
const d = new Date(endDate);
|
|
310
306
|
d.setUTCDate(d.getUTCDate() - i);
|
|
311
307
|
const dStr = d.toISOString().slice(0, 10);
|
|
312
|
-
|
|
313
|
-
|
|
308
|
+
promises.push(
|
|
309
|
+
this[loaderMethod](dStr, ...args)
|
|
310
|
+
.then(data => { if (data) results[dStr] = data; })
|
|
311
|
+
.catch(() => {})
|
|
312
|
+
);
|
|
314
313
|
}
|
|
315
314
|
|
|
316
315
|
await Promise.all(promises);
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
* FILENAME: computation-system/data/DependencyFetcher.js
|
|
3
3
|
* @fileoverview Fetches dependencies for computations.
|
|
4
4
|
* REFACTORED: Unified fetch logic, streamlined decompression/sharding/GCS.
|
|
5
|
+
* UPDATED: Properly checks isPage/isAlert flags to determine BigQuery vs Firestore routing.
|
|
5
6
|
*/
|
|
6
7
|
const { normalizeName } = require('../utils/utils');
|
|
7
8
|
const zlib = require('zlib');
|
|
@@ -9,6 +10,39 @@ const { Storage } = require('@google-cloud/storage');
|
|
|
9
10
|
|
|
10
11
|
const storage = new Storage(); // Singleton Client
|
|
11
12
|
|
|
13
|
+
/**
|
|
14
|
+
* Helper to check if a computation is isPage or isAlert by looking up manifest
|
|
15
|
+
* @param {string} normalizedName - Normalized computation name
|
|
16
|
+
* @param {object} config - Config object that may contain manifest info
|
|
17
|
+
* @param {string} category - Computation category (fallback for alerts)
|
|
18
|
+
* @returns {Promise<{isPage: boolean, isAlert: boolean}>}
|
|
19
|
+
*/
|
|
20
|
+
async function checkComputationFlags(normalizedName, config, category) {
|
|
21
|
+
let isPage = false;
|
|
22
|
+
let isAlert = false;
|
|
23
|
+
|
|
24
|
+
// Try to get manifest from config if available
|
|
25
|
+
if (config.getCalculations && typeof config.getCalculations === 'function') {
|
|
26
|
+
try {
|
|
27
|
+
const calculations = config.getCalculations(config);
|
|
28
|
+
const manifest = calculations.find(c => normalizeName(c.name) === normalizedName);
|
|
29
|
+
if (manifest) {
|
|
30
|
+
isPage = manifest.isPage === true;
|
|
31
|
+
isAlert = manifest.isAlertComputation === true;
|
|
32
|
+
return { isPage, isAlert };
|
|
33
|
+
}
|
|
34
|
+
} catch (e) {
|
|
35
|
+
// Fall through to category-based detection
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// Fallback: infer from category (alerts category = alert computation)
|
|
40
|
+
// Note: isPage cannot be inferred from category alone, so defaults to false
|
|
41
|
+
isAlert = category === 'alerts';
|
|
42
|
+
|
|
43
|
+
return { isPage, isAlert };
|
|
44
|
+
}
|
|
45
|
+
|
|
12
46
|
// =============================================================================
|
|
13
47
|
// HELPERS
|
|
14
48
|
// =============================================================================
|
|
@@ -45,20 +79,24 @@ function tryDecompress(payload) {
|
|
|
45
79
|
/**
|
|
46
80
|
* Fetches, decompresses, and reassembles (if sharded or on GCS) a single result document.
|
|
47
81
|
* NEW: For non-alert, non-page computations, tries BigQuery first (cheaper, no sharding/compression).
|
|
82
|
+
* @param {object} db - Firestore database instance
|
|
83
|
+
* @param {object} config - Configuration object (may contain logger, manifestLookup, etc.)
|
|
84
|
+
* @param {string} dateStr - Date string in YYYY-MM-DD format
|
|
85
|
+
* @param {string} name - Computation name (normalized)
|
|
86
|
+
* @param {string} category - Computation category
|
|
87
|
+
* @returns {Promise<object|null>} Result data or null if not found
|
|
48
88
|
*/
|
|
49
89
|
async function fetchSingleResult(db, config, dateStr, name, category) {
|
|
50
90
|
const { resultsCollection = 'computation_results', resultsSubcollection = 'results', computationsSubcollection = 'computations' } = config;
|
|
51
91
|
const log = config.logger || console;
|
|
52
92
|
|
|
53
|
-
//
|
|
54
|
-
|
|
55
|
-
// For now, we'll try BigQuery first for all non-alert computations (alerts are in 'alerts' category)
|
|
56
|
-
const isAlertComputation = category === 'alerts';
|
|
57
|
-
// Page computations are typically in 'popular-investor' category but have isPage flag
|
|
58
|
-
// For now, we'll try BigQuery for all non-alert computations
|
|
93
|
+
// Check if this is an alert or page computation by looking up manifest
|
|
94
|
+
const { isPage: isPageComputation, isAlert: isAlertComputation } = await checkComputationFlags(name, config, category);
|
|
59
95
|
|
|
60
|
-
// Try BigQuery first for non-alert computations (reduces Firestore reads)
|
|
61
|
-
|
|
96
|
+
// Try BigQuery first for non-alert, non-page computations (reduces Firestore reads)
|
|
97
|
+
// isPage and isAlert computations are always written to Firestore (in addition to BigQuery),
|
|
98
|
+
// so we should check Firestore for them, but can also try BigQuery as a fallback
|
|
99
|
+
if (!isAlertComputation && !isPageComputation && process.env.BIGQUERY_ENABLED !== 'false') {
|
|
62
100
|
try {
|
|
63
101
|
const { queryComputationResult } = require('../../core/utils/bigquery_utils');
|
|
64
102
|
const bigqueryResult = await queryComputationResult(name, category, dateStr, log);
|
|
@@ -71,6 +109,8 @@ async function fetchSingleResult(db, config, dateStr, name, category) {
|
|
|
71
109
|
log.log('WARN', `[DependencyFetcher] BigQuery fetch failed for ${name}, falling back to Firestore: ${bqError.message}`);
|
|
72
110
|
// Fall through to Firestore
|
|
73
111
|
}
|
|
112
|
+
} else if (isAlertComputation || isPageComputation) {
|
|
113
|
+
log.log('INFO', `[DependencyFetcher] 📄 Using Firestore for ${isAlertComputation ? 'alert' : 'page'} computation ${name} (${dateStr})`);
|
|
74
114
|
}
|
|
75
115
|
|
|
76
116
|
// Fallback to Firestore (for alerts, pages, or if BigQuery fails)
|