bulltrackers-module 1.0.151 → 1.0.153
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/appscript-api/index.js +8 -38
- package/functions/computation-system/helpers/computation_pass_runner.js +38 -183
- package/functions/computation-system/helpers/orchestration_helpers.js +120 -314
- package/functions/computation-system/utils/data_loader.js +47 -132
- package/functions/computation-system/utils/schema_capture.js +7 -41
- package/functions/computation-system/utils/utils.js +37 -124
- package/functions/core/utils/firestore_utils.js +8 -46
- package/functions/core/utils/intelligent_header_manager.js +26 -128
- package/functions/core/utils/intelligent_proxy_manager.js +33 -171
- package/functions/core/utils/pubsub_utils.js +7 -24
- package/functions/dispatcher/helpers/dispatch_helpers.js +9 -30
- package/functions/dispatcher/index.js +7 -30
- package/functions/etoro-price-fetcher/helpers/handler_helpers.js +12 -80
- package/functions/fetch-insights/helpers/handler_helpers.js +18 -70
- package/functions/generic-api/helpers/api_helpers.js +28 -167
- package/functions/generic-api/index.js +49 -188
- package/functions/invalid-speculator-handler/helpers/handler_helpers.js +10 -47
- package/functions/orchestrator/helpers/discovery_helpers.js +1 -5
- package/functions/orchestrator/index.js +1 -6
- package/functions/price-backfill/helpers/handler_helpers.js +13 -69
- package/functions/social-orchestrator/helpers/orchestrator_helpers.js +5 -37
- package/functions/social-task-handler/helpers/handler_helpers.js +29 -186
- package/functions/speculator-cleanup-orchestrator/helpers/cleanup_helpers.js +19 -78
- package/functions/task-engine/handler_creator.js +2 -8
- package/functions/task-engine/helpers/update_helpers.js +17 -83
- package/functions/task-engine/helpers/verify_helpers.js +11 -56
- package/functions/task-engine/utils/firestore_batch_manager.js +16 -67
- package/functions/task-engine/utils/task_engine_utils.js +6 -35
- package/index.js +45 -43
- package/package.json +1 -1
|
@@ -1,26 +1,22 @@
|
|
|
1
1
|
const { FieldPath } = require('@google-cloud/firestore');
|
|
2
|
-
// --- MODIFIED: Import streamPortfolioData ---
|
|
3
2
|
const { getPortfolioPartRefs, loadFullDayMap, loadDataByRefs, loadDailyInsights, loadDailySocialPostInsights, getHistoryPartRefs, streamPortfolioData, streamHistoryData } = require('../utils/data_loader.js');
|
|
4
3
|
const { normalizeName, commitBatchInChunks } = require('../utils/utils.js');
|
|
5
|
-
|
|
6
|
-
const { batchStoreSchemas } = require('../utils/schema_capture');
|
|
4
|
+
const { batchStoreSchemas } = require('../utils/schema_capture.js');
|
|
7
5
|
|
|
8
6
|
/** Stage 1: Group manifest by pass number */
|
|
9
7
|
function groupByPass(manifest) { return manifest.reduce((acc, calc) => { (acc[calc.pass] = acc[calc.pass] || []).push(calc); return acc; }, {}); }
|
|
10
8
|
|
|
11
9
|
/** * --- MODIFIED: Returns detailed missing dependencies for logging ---
|
|
12
10
|
* Stage 2: Check root data dependencies for a calc
|
|
13
|
-
* --- THIS FUNCTION IS NOW MORE GRANULAR ---
|
|
14
11
|
*/
|
|
15
12
|
function checkRootDependencies(calcManifest, rootDataStatus) {
|
|
16
13
|
const missing = [];
|
|
17
14
|
if (!calcManifest.rootDataDependencies || !calcManifest.rootDataDependencies.length) { return { canRun: true, missing };}
|
|
18
15
|
for (const dep of calcManifest.rootDataDependencies) {
|
|
19
|
-
if (dep === 'portfolio'
|
|
20
|
-
else if (dep === 'insights' && !rootDataStatus.hasInsights)
|
|
21
|
-
else if (dep === 'social'
|
|
22
|
-
else if (dep === 'history'
|
|
23
|
-
}
|
|
16
|
+
if (dep === 'portfolio' && !rootDataStatus.hasPortfolio) missing.push('portfolio');
|
|
17
|
+
else if (dep === 'insights' && !rootDataStatus.hasInsights) missing.push('insights');
|
|
18
|
+
else if (dep === 'social' && !rootDataStatus.hasSocial) missing.push('social');
|
|
19
|
+
else if (dep === 'history' && !rootDataStatus.hasHistory) missing.push('history'); }
|
|
24
20
|
return { canRun: missing.length === 0, missing };
|
|
25
21
|
}
|
|
26
22
|
|
|
@@ -40,7 +36,7 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
|
|
|
40
36
|
if (dateToProcess >= earliestDates.social) {tasks.push(loadDailySocialPostInsights(config, dependencies, dateStr).then(res => {socialData = res;hasSocial = !!res;}));}
|
|
41
37
|
if (dateToProcess >= earliestDates.history) {tasks.push(getHistoryPartRefs(config, dependencies, dateStr).then(res => {historyRefs = res;hasHistory = !!(res?.length);}));}
|
|
42
38
|
await Promise.all(tasks);
|
|
43
|
-
logger.log('INFO', `[PassRunner] Data availability for ${dateStr}: P:${hasPortfolio}, I:${
|
|
39
|
+
logger.log('INFO', `[PassRunner] Data availability for ${dateStr}: P:${hasPortfolio}, I:${hasInsights}, S:${hasSocial}, H:${hasHistory}`);
|
|
44
40
|
if (!(hasPortfolio || hasInsights || hasSocial || hasHistory)) { logger.log('WARN', `[PassRunner] No root data at all for ${dateStr}.`); return null; }
|
|
45
41
|
return { portfolioRefs, todayInsights: insightsData, todaySocialPostInsights: socialData, historyRefs, status: { hasPortfolio, hasInsights, hasSocial, hasHistory } };
|
|
46
42
|
} catch (err) { logger.log('ERROR', `[PassRunner] Error checking data for ${dateStr}`, { errorMessage: err.message }); return null; }
|
|
@@ -77,9 +73,9 @@ function filterCalculations(standardCalcs, metaCalcs, rootDataStatus, existingRe
|
|
|
77
73
|
const dependencies = calc.rootDataDependencies || [];
|
|
78
74
|
for (const dep of dependencies) {
|
|
79
75
|
if (dep === 'portfolio' && earliestDates.portfolio > earliestRunDate) earliestRunDate = earliestDates.portfolio;
|
|
80
|
-
if (dep === 'history'
|
|
81
|
-
if (dep === 'social'
|
|
82
|
-
if (dep === 'insights'
|
|
76
|
+
if (dep === 'history' && earliestDates.history > earliestRunDate) earliestRunDate = earliestDates.history;
|
|
77
|
+
if (dep === 'social' && earliestDates.social > earliestRunDate) earliestRunDate = earliestDates.social;
|
|
78
|
+
if (dep === 'insights' && earliestDates.insights > earliestRunDate) earliestRunDate = earliestDates.insights;
|
|
83
79
|
}
|
|
84
80
|
if (calc.isHistorical && earliestRunDate.getTime() > 0) { earliestRunDate.setUTCDate(earliestRunDate.getUTCDate() + 1); }
|
|
85
81
|
return earliestRunDate;
|
|
@@ -103,14 +99,16 @@ function initializeCalculators(calcs, logger) { const state = {}; for (const c o
|
|
|
103
99
|
|
|
104
100
|
/** * Stage 7: Load historical data required for calculations
|
|
105
101
|
*/
|
|
106
|
-
// --- MODIFIED: Stage 7: Load ONLY non-streaming historical data ---
|
|
107
102
|
async function loadHistoricalData(date, calcs, config, deps, rootData) {
|
|
108
103
|
const { logger } = deps;
|
|
109
104
|
const updated = {...rootData};
|
|
110
105
|
const tasks = [];
|
|
111
106
|
const needsYesterdayInsights = calcs.some(c => c.isHistorical && c.rootDataDependencies.includes('insights'));
|
|
112
107
|
const needsYesterdaySocial = calcs.some(c => c.isHistorical && c.rootDataDependencies.includes('social'));
|
|
113
|
-
|
|
108
|
+
const needsYesterdayPortfolio = calcs.some(c => c.isHistorical && c.rootDataDependencies.includes('portfolio'));
|
|
109
|
+
const prev = new Date(date);
|
|
110
|
+
prev.setUTCDate(prev.getUTCDate() - 1);
|
|
111
|
+
const prevStr = prev.toISOString().slice(0, 10);
|
|
114
112
|
if(needsYesterdayInsights) {
|
|
115
113
|
tasks.push((async()=>{ const prev=new Date(date); prev.setUTCDate(prev.getUTCDate()-1); const prevStr=prev.toISOString().slice(0,10);
|
|
116
114
|
logger.log('INFO', `[PassRunner] Loading YESTERDAY insights data for ${prevStr}`);
|
|
@@ -120,240 +118,120 @@ async function loadHistoricalData(date, calcs, config, deps, rootData) {
|
|
|
120
118
|
logger.log('INFO', `[PassRunner] Loading YESTERDAY social data for ${prevStr}`);
|
|
121
119
|
updated.yesterdaySocialPostInsights=await loadDailySocialPostInsights(config,deps,prevStr); })());}
|
|
122
120
|
|
|
121
|
+
if(needsYesterdayPortfolio) {
|
|
122
|
+
tasks.push((async()=>{
|
|
123
|
+
logger.log('INFO', `[PassRunner] Getting YESTERDAY portfolio refs for ${prevStr}`);
|
|
124
|
+
updated.yesterdayPortfolioRefs = await getPortfolioPartRefs(config, deps, prevStr);
|
|
125
|
+
})());
|
|
126
|
+
}
|
|
123
127
|
await Promise.all(tasks);
|
|
124
128
|
return updated;
|
|
125
129
|
}
|
|
126
130
|
|
|
127
131
|
/**
|
|
128
132
|
* --- REFACTORED: Stage 8: Stream and process data for standard calculations ---
|
|
129
|
-
* This function now streams today's portfolios, yesterday's portfolios,
|
|
130
|
-
* and today's history data in parallel to avoid OOM errors.
|
|
131
|
-
* It loads chunks of all three streams, processes UIDs found in the
|
|
132
|
-
* main (today's portfolio) stream, and then deletes processed users
|
|
133
|
-
* from the historical maps to free memory.
|
|
134
133
|
*/
|
|
135
|
-
async function streamAndProcess(dateStr, state, passName, config, deps, rootData) {
|
|
134
|
+
async function streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs) {
|
|
136
135
|
const { logger, calculationUtils } = deps;
|
|
137
136
|
const { todayInsights, yesterdayInsights, todaySocialPostInsights, yesterdaySocialPostInsights } = rootData;
|
|
138
137
|
const calcsThatStreamPortfolio = Object.values(state).filter(calc => calc && calc.manifest && (calc.manifest.rootDataDependencies.includes('portfolio') || calc.manifest.category === 'speculators'));
|
|
139
138
|
const context={instrumentMappings:(await calculationUtils.loadInstrumentMappings()).instrumentToTicker, sectorMapping:(await calculationUtils.loadInstrumentMappings()).instrumentToSector, todayDateStr:dateStr, dependencies:deps, config};
|
|
140
139
|
let firstUser=true;
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
args=[null,null,null,{...context, userType: 'n/a'},todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,null,null];
|
|
151
|
-
}
|
|
152
|
-
try{ await Promise.resolve(calc.process(...args)); } catch(e){logger.log('WARN',`Process error ${name} (non-stream)`,{err:e.message});}
|
|
153
|
-
}
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
if (calcsThatStreamPortfolio.length === 0) {
|
|
158
|
-
logger.log('INFO', `[${passName}] No portfolio-streaming calcs to run for ${dateStr}. Skipping stream.`);
|
|
159
|
-
return;
|
|
160
|
-
}
|
|
161
|
-
|
|
140
|
+
for(const name in state){ const calc=state[name]; if(!calc||typeof calc.process!=='function') continue;
|
|
141
|
+
const cat=calc.manifest.category;
|
|
142
|
+
if(cat==='socialPosts'||cat==='insights') {
|
|
143
|
+
if (firstUser) {
|
|
144
|
+
logger.log('INFO', `[${passName}] Running non-streaming calc: ${name}`);
|
|
145
|
+
let args=[null,null,null,{...context, userType: 'n/a'},todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,null,null];
|
|
146
|
+
if(calc.manifest.isHistorical) { args=[null,null,null,{...context, userType: 'n/a'},todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,null,null]; }
|
|
147
|
+
try{ await Promise.resolve(calc.process(...args)); } catch(e){logger.log('WARN',`Process error ${name} (non-stream)`,{err:e.message});} } } }
|
|
148
|
+
if (calcsThatStreamPortfolio.length === 0) { logger.log('INFO', `[${passName}] No portfolio-streaming calcs to run for ${dateStr}. Skipping stream.`); return; }
|
|
162
149
|
logger.log('INFO', `[${passName}] Streaming portfolio & historical data for ${calcsThatStreamPortfolio.length} calcs...`);
|
|
163
|
-
|
|
164
150
|
const prevDate = new Date(dateStr + 'T00:00:00Z');
|
|
165
151
|
prevDate.setUTCDate(prevDate.getUTCDate() - 1);
|
|
166
152
|
const prevDateStr = prevDate.toISOString().slice(0, 10);
|
|
167
|
-
|
|
168
153
|
const needsYesterdayPortfolio = Object.values(state).some(c => c && c.manifest.isHistorical && c.manifest.rootDataDependencies.includes('portfolio'));
|
|
169
154
|
const needsTodayHistory = Object.values(state).some(c => c && c.manifest.rootDataDependencies.includes('history'));
|
|
170
|
-
|
|
171
|
-
const
|
|
172
|
-
const hT_iterator = needsTodayHistory ? streamHistoryData(config, deps, dateStr) : null;
|
|
173
|
-
|
|
155
|
+
const yP_iterator = needsYesterdayPortfolio ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
|
|
156
|
+
const hT_iterator = needsTodayHistory ? streamHistoryData(config, deps, dateStr, historyRefs) : null;
|
|
174
157
|
let yesterdayPortfolios = {};
|
|
175
158
|
let todayHistoryData = {};
|
|
176
|
-
|
|
177
|
-
if (
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
}
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
context.userType=userType;
|
|
199
|
-
|
|
200
|
-
const pY = yesterdayPortfolios[uid] || null; // Yesterday's Portfolio
|
|
201
|
-
const hT = todayHistoryData[uid] || null; // Today's History
|
|
202
|
-
// (Note: yesterdayHistoryData (hY) would require another stream if needed)
|
|
203
|
-
|
|
204
|
-
for(const name in state){
|
|
205
|
-
const calc=state[name]; if(!calc||typeof calc.process!=='function') continue;
|
|
206
|
-
const cat=calc.manifest.category, isSocialOrInsights=cat==='socialPosts'||cat==='insights', isHistorical=calc.manifest.isHistorical, isSpec=cat==='speculators';
|
|
207
|
-
|
|
208
|
-
if(isSocialOrInsights) continue; // Skip non-streaming calcs
|
|
209
|
-
|
|
210
|
-
let args=[p,null,uid,context,todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,hT,null];
|
|
211
|
-
|
|
212
|
-
if(isHistorical){
|
|
213
|
-
if(!pY && (cat !== 'behavioural' && name !== 'historical-performance-aggregator')) continue;
|
|
214
|
-
args=[p,pY,uid,context,todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,hT,null];
|
|
215
|
-
}
|
|
216
|
-
|
|
217
|
-
if((userType==='normal'&&isSpec)||(userType==='speculator'&&!isSpec&&name!=='users-processed')) continue;
|
|
218
|
-
|
|
219
|
-
try{ await Promise.resolve(calc.process(...args)); } catch(e){logger.log('WARN',`Process error ${name} for ${uid}`,{err:e.message});}
|
|
220
|
-
}
|
|
221
|
-
firstUser=false;
|
|
222
|
-
|
|
223
|
-
if (pY) { delete yesterdayPortfolios[uid]; }
|
|
224
|
-
if (hT) { delete todayHistoryData[uid]; }
|
|
225
|
-
}
|
|
226
|
-
}
|
|
159
|
+
if (yP_iterator) { Object.assign(yesterdayPortfolios, (await yP_iterator.next()).value || {}); logger.log('INFO', `[${passName}] Loaded first chunk of yesterday's portfolios.`); }
|
|
160
|
+
if (hT_iterator) { Object.assign(todayHistoryData, (await hT_iterator.next()).value || {}); logger.log('INFO', `[${passName}] Loaded first chunk of today's history.`); }
|
|
161
|
+
for await (const chunk of streamPortfolioData(config, deps, dateStr, portfolioRefs)) {
|
|
162
|
+
if (yP_iterator) { Object.assign(yesterdayPortfolios, (await yP_iterator.next()).value || {}); }
|
|
163
|
+
if (hT_iterator) { Object.assign(todayHistoryData, (await hT_iterator.next()).value || {}); }
|
|
164
|
+
for(const uid in chunk){
|
|
165
|
+
const p = chunk[uid]; if(!p) continue;
|
|
166
|
+
const userType=p.PublicPositions?'speculator':'normal';
|
|
167
|
+
context.userType=userType;
|
|
168
|
+
const pY = yesterdayPortfolios[uid] || null;
|
|
169
|
+
const hT = todayHistoryData[uid] || null;
|
|
170
|
+
for(const name in state){
|
|
171
|
+
const calc=state[name]; if(!calc||typeof calc.process!=='function') continue;
|
|
172
|
+
const cat=calc.manifest.category, isSocialOrInsights=cat==='socialPosts'||cat==='insights', isHistorical=calc.manifest.isHistorical, isSpec=cat==='speculators';
|
|
173
|
+
if(isSocialOrInsights) continue;
|
|
174
|
+
let args=[p,null,uid,context,todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,hT,null];
|
|
175
|
+
if(isHistorical){ if(!pY && (cat !== 'behavioural' && name !== 'historical-performance-aggregator')) continue; args=[p,pY,uid,context,todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,hT,null]; }
|
|
176
|
+
if((userType==='normal'&&isSpec)||(userType==='speculator'&&!isSpec&&name!=='users-processed')) continue;
|
|
177
|
+
try{ await Promise.resolve(calc.process(...args)); } catch(e){logger.log('WARN',`Process error ${name} for ${uid}`,{err:e.message});} }
|
|
178
|
+
firstUser=false;
|
|
179
|
+
if (pY) { delete yesterdayPortfolios[uid]; }
|
|
180
|
+
if (hT) { delete todayHistoryData[uid]; } } }
|
|
227
181
|
logger.log('INFO', `[${passName}] Finished streaming data for ${dateStr}.`);
|
|
228
182
|
}
|
|
229
183
|
|
|
230
184
|
/** Stage 9: Run standard computations */
|
|
231
185
|
async function runStandardComputationPass(date, calcs, passName, config, deps, rootData) {
|
|
232
186
|
const dStr = date.toISOString().slice(0, 10), logger = deps.logger;
|
|
233
|
-
if (calcs.length === 0) {
|
|
234
|
-
logger.log('INFO', `[${passName}] No standard calcs to run for ${dStr} after filtering.`);
|
|
235
|
-
return;
|
|
236
|
-
}
|
|
187
|
+
if (calcs.length === 0) { logger.log('INFO', `[${passName}] No standard calcs to run for ${dStr} after filtering.`); return; }
|
|
237
188
|
logger.log('INFO', `[${passName}] Running ${dStr} with ${calcs.length} calcs.`);
|
|
238
189
|
const fullRoot = await loadHistoricalData(date, calcs, config, deps, rootData);
|
|
239
190
|
const state = initializeCalculators(calcs, logger);
|
|
240
|
-
await streamAndProcess(dStr, state, passName, config, deps, fullRoot);
|
|
241
|
-
|
|
191
|
+
await streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs);
|
|
242
192
|
let success = 0;
|
|
243
193
|
const failedCalcs = [];
|
|
244
194
|
const standardWrites = [];
|
|
245
195
|
const shardedWrites = {};
|
|
246
|
-
|
|
247
|
-
// === NEW: Collect schemas ===
|
|
248
196
|
const schemasToStore = [];
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
// === CHANGED: Capture static schema ===
|
|
276
|
-
const calcClass = calc.manifest.class;
|
|
277
|
-
let staticSchema = null;
|
|
278
|
-
if (calcClass && typeof calcClass.getSchema === 'function') {
|
|
279
|
-
try {
|
|
280
|
-
staticSchema = calcClass.getSchema();
|
|
281
|
-
} catch (e) {
|
|
282
|
-
logger.log('WARN', `[SchemaCapture] Failed to get static schema for ${name}`, { err: e.message });
|
|
283
|
-
}
|
|
284
|
-
} else {
|
|
285
|
-
logger.log('TRACE', `[SchemaCapture] No static schema found for ${name}. Skipping manifest entry.`);
|
|
286
|
-
}
|
|
287
|
-
|
|
288
|
-
if (staticSchema) {
|
|
289
|
-
schemasToStore.push({
|
|
290
|
-
name,
|
|
291
|
-
category: calc.manifest.category,
|
|
292
|
-
schema: staticSchema, // <-- Use the static schema
|
|
293
|
-
metadata: {
|
|
294
|
-
isHistorical: calc.manifest.isHistorical || false,
|
|
295
|
-
dependencies: calc.manifest.dependencies || [],
|
|
296
|
-
rootDataDependencies: calc.manifest.rootDataDependencies || [],
|
|
297
|
-
pass: calc.manifest.pass,
|
|
298
|
-
type: calc.manifest.type || 'standard'
|
|
299
|
-
}
|
|
300
|
-
});
|
|
301
|
-
}
|
|
302
|
-
// === END CHANGED SECTION ===
|
|
303
|
-
|
|
304
|
-
success++;
|
|
305
|
-
}
|
|
306
|
-
} catch (e) {
|
|
307
|
-
logger.log('ERROR', `getResult failed ${name} for ${dStr}`, { err: e.message, stack: e.stack });
|
|
308
|
-
failedCalcs.push(name);
|
|
309
|
-
}
|
|
310
|
-
}
|
|
311
|
-
|
|
312
|
-
// === NEW: Store schemas asynchronously (don't block computation) ===
|
|
313
|
-
if (schemasToStore.length > 0) {
|
|
314
|
-
// This function is now imported from the simplified schema_capture.js
|
|
315
|
-
batchStoreSchemas(deps, config, schemasToStore).catch(err => {
|
|
316
|
-
logger.log('WARN', '[SchemaCapture] Non-blocking schema storage failed', {
|
|
317
|
-
errorMessage: err.message
|
|
318
|
-
});
|
|
319
|
-
});
|
|
320
|
-
}
|
|
321
|
-
|
|
322
|
-
if (standardWrites.length > 0) {
|
|
323
|
-
await commitBatchInChunks(config, deps, standardWrites, `${passName} Standard ${dStr}`);
|
|
324
|
-
}
|
|
325
|
-
|
|
197
|
+
for (const name in state) { const calc = state[name];
|
|
198
|
+
if (!calc || typeof calc.getResult !== 'function') continue;
|
|
199
|
+
try { const result = await Promise.resolve(calc.getResult());
|
|
200
|
+
if (result && Object.keys(result).length > 0) {
|
|
201
|
+
const standardResult = {};
|
|
202
|
+
for (const key in result) {
|
|
203
|
+
if (key.startsWith('sharded_')) {
|
|
204
|
+
const shardedData = result[key];
|
|
205
|
+
for (const collectionName in shardedData) {
|
|
206
|
+
if (!shardedWrites[collectionName]) shardedWrites[collectionName] = {};
|
|
207
|
+
Object.assign(shardedWrites[collectionName], shardedData[collectionName]); }
|
|
208
|
+
} else { standardResult[key] = result[key]; }}
|
|
209
|
+
if (Object.keys(standardResult).length > 0) {
|
|
210
|
+
const docRef = deps.db.collection(config.resultsCollection).doc(dStr) .collection(config.resultsSubcollection).doc(calc.manifest.category) .collection(config.computationsSubcollection).doc(name);
|
|
211
|
+
standardWrites.push({ ref: docRef, data: standardResult });}
|
|
212
|
+
const calcClass = calc.manifest.class;
|
|
213
|
+
let staticSchema = null;
|
|
214
|
+
if (calcClass && typeof calcClass.getSchema === 'function') {
|
|
215
|
+
try { staticSchema = calcClass.getSchema(); } catch (e) { logger.log('WARN', `[SchemaCapture] Failed to get static schema for ${name}`, { err: e.message }); }
|
|
216
|
+
} else { logger.log('TRACE', `[SchemaCapture] No static schema found for ${name}. Skipping manifest entry.`); }
|
|
217
|
+
if (staticSchema) {
|
|
218
|
+
schemasToStore.push({ name, category: calc.manifest.category, schema: staticSchema, metadata: { isHistorical: calc.manifest.isHistorical || false, dependencies: calc.manifest.dependencies || [], rootDataDependencies: calc.manifest.rootDataDependencies || [], pass: calc.manifest.pass, type: calc.manifest.type || 'standard' } }); }
|
|
219
|
+
success++; } } catch (e) { logger.log('ERROR', `getResult failed ${name} for ${dStr}`, { err: e.message, stack: e.stack }); failedCalcs.push(name); } }
|
|
220
|
+
if (schemasToStore.length > 0) { batchStoreSchemas(deps, config, schemasToStore).catch(err => { logger.log('WARN', '[SchemaCapture] Non-blocking schema storage failed', { errorMessage: err.message }); });}
|
|
221
|
+
if (standardWrites.length > 0) { await commitBatchInChunks(config, deps, standardWrites, `${passName} Standard ${dStr}`); }
|
|
326
222
|
for (const docPath in shardedWrites) {
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
docRef = deps.db.collection(collection).doc(docPath);
|
|
337
|
-
}
|
|
338
|
-
if (docData && typeof docData === 'object' && !Array.isArray(docData)) {
|
|
339
|
-
shardedDocWrites.push({ ref: docRef, data: docData });
|
|
340
|
-
} else {
|
|
341
|
-
logger.log('ERROR', `[${passName}] Invalid sharded document data for ${docPath}. Not an object.`, { data: docData });
|
|
342
|
-
}
|
|
343
|
-
if (shardedDocWrites.length > 0) {
|
|
344
|
-
await commitBatchInChunks(config, deps, shardedDocWrites, `${passName} Sharded ${docPath} ${dStr}`);
|
|
345
|
-
}
|
|
346
|
-
}
|
|
347
|
-
|
|
223
|
+
const docData = shardedWrites[docPath];
|
|
224
|
+
const shardedDocWrites = [];
|
|
225
|
+
let docRef;
|
|
226
|
+
if (docPath.includes('/')) { docRef = deps.db.doc(docPath); } else {
|
|
227
|
+
const collection = (docPath.startsWith('user_profile_history')) ? config.shardedUserProfileCollection : config.shardedProfitabilityCollection;
|
|
228
|
+
docRef = deps.db.collection(collection).doc(docPath); }
|
|
229
|
+
if (docData && typeof docData === 'object' && !Array.isArray(docData)) {shardedDocWrites.push({ ref: docRef, data: docData });
|
|
230
|
+
} else { logger.log('ERROR', `[${passName}] Invalid sharded document data for ${docPath}. Not an object.`, { data: docData }); }
|
|
231
|
+
if (shardedDocWrites.length > 0) { await commitBatchInChunks(config, deps, shardedDocWrites, `${passName} Sharded ${docPath} ${dStr}`); } }
|
|
348
232
|
const logMetadata = {};
|
|
349
|
-
if (failedCalcs.length > 0) {
|
|
350
|
-
|
|
351
|
-
}
|
|
352
|
-
logger.log(
|
|
353
|
-
success === calcs.length ? 'SUCCESS' : 'WARN',
|
|
354
|
-
`[${passName}] Completed ${dStr}. Success: ${success}/${calcs.length}`,
|
|
355
|
-
logMetadata
|
|
356
|
-
);
|
|
233
|
+
if (failedCalcs.length > 0) { logMetadata.failedComputations = failedCalcs; }
|
|
234
|
+
logger.log(success === calcs.length ? 'SUCCESS' : 'WARN', `[${passName}] Completed ${dStr}. Success: ${success}/${calcs.length}`, logMetadata );
|
|
357
235
|
}
|
|
358
236
|
|
|
359
237
|
/**
|
|
@@ -361,116 +239,44 @@ async function runStandardComputationPass(date, calcs, passName, config, deps, r
|
|
|
361
239
|
*/
|
|
362
240
|
async function runMetaComputationPass(date, calcs, passName, config, deps, fetchedDeps, rootData) {
|
|
363
241
|
const dStr = date.toISOString().slice(0, 10), logger = deps.logger;
|
|
364
|
-
if (calcs.length === 0) {
|
|
365
|
-
logger.log('INFO', `[${passName}] No meta calcs to run for ${dStr} after filtering.`);
|
|
366
|
-
return;
|
|
367
|
-
}
|
|
242
|
+
if (calcs.length === 0) { logger.log('INFO', `[${passName}] No meta calcs to run for ${dStr} after filtering.`); return; }
|
|
368
243
|
logger.log('INFO', `[${passName}] Running ${dStr} with ${calcs.length} calcs.`);
|
|
369
244
|
const fullRoot = await loadHistoricalData(date, calcs, config, deps, rootData);
|
|
370
|
-
|
|
371
245
|
let success = 0;
|
|
372
246
|
const failedCalcs = [];
|
|
373
247
|
const standardWrites = [];
|
|
374
248
|
const shardedWrites = {};
|
|
375
|
-
|
|
376
|
-
// === NEW: Collect schemas ===
|
|
377
249
|
const schemasToStore = [];
|
|
378
|
-
|
|
379
250
|
for (const mCalc of calcs) {
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
.collection(config.resultsSubcollection).doc(mCalc.category)
|
|
405
|
-
.collection(config.computationsSubcollection).doc(name);
|
|
406
|
-
standardWrites.push({ ref: docRef, data: standardResult });
|
|
407
|
-
}
|
|
408
|
-
|
|
409
|
-
// === CHANGED: Capture static schema ===
|
|
410
|
-
const calcClass = mCalc.class;
|
|
411
|
-
let staticSchema = null;
|
|
412
|
-
if (calcClass && typeof calcClass.getSchema === 'function') {
|
|
413
|
-
try {
|
|
414
|
-
staticSchema = calcClass.getSchema();
|
|
415
|
-
} catch (e) {
|
|
416
|
-
logger.log('WARN', `[SchemaCapture] Failed to get static schema for ${name}`, { err: e.message });
|
|
417
|
-
}
|
|
418
|
-
} else {
|
|
419
|
-
logger.log('TRACE', `[SchemaCapture] No static schema found for ${name}. Skipping manifest entry.`);
|
|
420
|
-
}
|
|
421
|
-
|
|
422
|
-
if (staticSchema) {
|
|
423
|
-
schemasToStore.push({
|
|
424
|
-
name,
|
|
425
|
-
category: mCalc.category,
|
|
426
|
-
schema: staticSchema, // <-- Use the static schema
|
|
427
|
-
metadata: {
|
|
428
|
-
isHistorical: mCalc.isHistorical || false,
|
|
429
|
-
dependencies: mCalc.dependencies || [],
|
|
430
|
-
rootDataDependencies: mCalc.rootDataDependencies || [],
|
|
431
|
-
pass: mCalc.pass,
|
|
432
|
-
type: 'meta'
|
|
433
|
-
}
|
|
434
|
-
});
|
|
435
|
-
}
|
|
436
|
-
// === END CHANGED SECTION ===
|
|
437
|
-
|
|
438
|
-
success++;
|
|
439
|
-
}
|
|
440
|
-
} catch (e) {
|
|
441
|
-
logger.log('ERROR', `Meta-calc failed ${name} for ${dStr}`, { err: e.message, stack: e.stack });
|
|
442
|
-
failedCalcs.push(name);
|
|
443
|
-
}
|
|
444
|
-
}
|
|
445
|
-
|
|
446
|
-
// === NEW: Store schemas asynchronously ===
|
|
447
|
-
if (schemasToStore.length > 0) {
|
|
448
|
-
// This function is now imported from the simplified schema_capture.js
|
|
449
|
-
batchStoreSchemas(deps, config, schemasToStore).catch(err => {
|
|
450
|
-
logger.log('WARN', '[SchemaCapture] Non-blocking schema storage failed', {
|
|
451
|
-
errorMessage: err.message
|
|
452
|
-
});
|
|
453
|
-
});
|
|
454
|
-
}
|
|
455
|
-
|
|
456
|
-
if (standardWrites.length > 0) {
|
|
457
|
-
await commitBatchInChunks(config, deps, standardWrites, `${passName} Meta ${dStr}`);
|
|
458
|
-
}
|
|
459
|
-
|
|
251
|
+
const name = normalizeName(mCalc.name), Cl = mCalc.class;
|
|
252
|
+
if (typeof Cl !== 'function') { logger.log('ERROR', `Invalid class ${name}`); failedCalcs.push(name); continue; }
|
|
253
|
+
const inst = new Cl();
|
|
254
|
+
try { const result = await Promise.resolve(inst.process(dStr, { ...deps, rootData: fullRoot }, config, fetchedDeps));
|
|
255
|
+
if (result && Object.keys(result).length > 0) {
|
|
256
|
+
const standardResult = {};
|
|
257
|
+
for (const key in result) {
|
|
258
|
+
if (key.startsWith('sharded_')) { const shardedData = result[key]; for (const collectionName in shardedData) {
|
|
259
|
+
if (!shardedWrites[collectionName]) shardedWrites[collectionName] = {}; Object.assign(shardedWrites[collectionName], shardedData[collectionName]); }
|
|
260
|
+
} else { standardResult[key] = result[key]; } }
|
|
261
|
+
if (Object.keys(standardResult).length > 0) {
|
|
262
|
+
const docRef = deps.db.collection(config.resultsCollection).doc(dStr) .collection(config.resultsSubcollection).doc(mCalc.category) .collection(config.computationsSubcollection).doc(name);
|
|
263
|
+
standardWrites.push({ ref: docRef, data: standardResult }); }
|
|
264
|
+
const calcClass = mCalc.class;
|
|
265
|
+
let staticSchema = null;
|
|
266
|
+
if (calcClass && typeof calcClass.getSchema === 'function') {
|
|
267
|
+
try { staticSchema = calcClass.getSchema();
|
|
268
|
+
} catch (e) { logger.log('WARN', `[SchemaCapture] Failed to get static schema for ${name}`, { err: e.message }); }
|
|
269
|
+
} else { logger.log('TRACE', `[SchemaCapture] No static schema found for ${name}. Skipping manifest entry.`); }
|
|
270
|
+
if (staticSchema) { schemasToStore.push({ name, category: mCalc.category, schema: staticSchema, metadata: { isHistorical: mCalc.isHistorical || false, dependencies: mCalc.dependencies || [], rootDataDependencies: mCalc.rootDataDependencies || [], pass: mCalc.pass, type: 'meta' } }); }
|
|
271
|
+
success++; }
|
|
272
|
+
} catch (e) { logger.log('ERROR', `Meta-calc failed ${name} for ${dStr}`, { err: e.message, stack: e.stack }); failedCalcs.push(name); } }
|
|
273
|
+
if (schemasToStore.length > 0) { batchStoreSchemas(deps, config, schemasToStore).catch(err => { logger.log('WARN', '[SchemaCapture] Non-blocking schema storage failed', { errorMessage: err.message }); }); }
|
|
274
|
+
if (standardWrites.length > 0) { await commitBatchInChunks(config, deps, standardWrites, `${passName} Meta ${dStr}`);}
|
|
460
275
|
for (const collectionName in shardedWrites) {
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
? deps.db.doc(docId)
|
|
466
|
-
: deps.db.collection(collectionName).doc(docId);
|
|
467
|
-
shardedDocWrites.push({ ref: docRef, data: docs[docId] });
|
|
468
|
-
}
|
|
469
|
-
if (shardedDocWrites.length > 0) {
|
|
470
|
-
await commitBatchInChunks(config, deps, shardedDocWrites, `${passName} Sharded ${collectionName} ${dStr}`);
|
|
471
|
-
}
|
|
472
|
-
}
|
|
473
|
-
|
|
276
|
+
const docs = shardedWrites[collectionName];
|
|
277
|
+
const shardedDocWrites = [];
|
|
278
|
+
for (const docId in docs) { const docRef = docId.includes('/') ? deps.db.doc(docId) : deps.db.collection(collectionName).doc(docId); shardedDocWrites.push({ ref: docRef, data: docs[docId] }); }
|
|
279
|
+
if (shardedDocWrites.length > 0) { await commitBatchInChunks(config, deps, shardedDocWrites, `${passName} Sharded ${collectionName} ${dStr}`); } }
|
|
474
280
|
const logMetadata = {};
|
|
475
281
|
if (failedCalcs.length > 0) { logMetadata.failedComputations = failedCalcs; }
|
|
476
282
|
logger.log( success === calcs.length ? 'SUCCESS' : 'WARN', `[${passName}] Completed ${dStr}. Success: ${success}/${calcs.length}`, logMetadata );
|