bulltrackers-module 1.0.211 → 1.0.213

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,559 +1,543 @@
1
- /**
2
- * FILENAME: bulltrackers-module/functions/computation-system/helpers/orchestration_helpers.js
3
- * FIXED: 'commitResults' now isolates commits PER COMPUTATION.
4
- * A single failure (e.g., size limit) will only fail that specific calculation,
5
- * allowing others in the same pass/date to succeed and be recorded.
6
- */
7
-
8
- const { ComputationController } = require('../controllers/computation_controller');
9
- const { batchStoreSchemas } = require('../utils/schema_capture');
10
- const { normalizeName, commitBatchInChunks } = require('../utils/utils');
11
- const {
12
- getPortfolioPartRefs, loadDailyInsights, loadDailySocialPostInsights,
13
- getHistoryPartRefs, streamPortfolioData, streamHistoryData,
14
- getRelevantShardRefs, loadDataByRefs
15
- } = require('../utils/data_loader');
16
-
17
- const {
18
- DataExtractor, HistoryExtractor, MathPrimitives, Aggregators,
19
- Validators, SCHEMAS, SignalPrimitives, DistributionAnalytics,
20
- TimeSeries, priceExtractor
21
- } = require('../layers/math_primitives.js');
22
-
23
- const pLimit = require('p-limit');
24
-
25
- /**
26
- * Groups calculations from a manifest by their 'pass' property.
27
- */
28
- function groupByPass(manifest) { return manifest.reduce((acc, calc) => { (acc[calc.pass] = acc[calc.pass] || []).push(calc); return acc; }, {}); }
29
-
30
- /**
31
- * --- PASSIVE DATA VALIDATION ---
32
- */
33
- function validateResultPatterns(logger, calcName, results, category) {
34
- if (category === 'speculator' || category === 'speculators') return;
35
-
36
- const tickers = Object.keys(results);
37
- const totalItems = tickers.length;
38
-
39
- if (totalItems < 5) return;
40
-
41
- const sampleTicker = tickers.find(t => results[t] && typeof results[t] === 'object');
42
- if (!sampleTicker) return;
43
-
44
- const keys = Object.keys(results[sampleTicker]);
45
-
46
- keys.forEach(key => {
47
- if (key.startsWith('_')) return;
48
-
49
- let nullCount = 0;
50
- let nanCount = 0;
51
- let undefinedCount = 0;
52
-
53
- for (const t of tickers) {
54
- const val = results[t][key];
55
- if (val === null) nullCount++;
56
- if (val === undefined) undefinedCount++;
57
- if (typeof val === 'number' && isNaN(val)) nanCount++;
58
- }
59
-
60
- if (nanCount === totalItems) {
61
- logger.log('ERROR', `[DataQuality] Calc '${calcName}' field '${key}' is NaN for 100% of ${totalItems} items.`);
62
- } else if (undefinedCount === totalItems) {
63
- logger.log('ERROR', `[DataQuality] Calc '${calcName}' field '${key}' is UNDEFINED for 100% of ${totalItems} items.`);
64
- }
65
- else if (nullCount > (totalItems * 0.9)) {
66
- logger.log('WARN', `[DataQuality] Calc '${calcName}' field '${key}' is NULL for ${nullCount}/${totalItems} items.`);
67
- }
68
- });
69
- }
70
-
71
- function checkRootDependencies(calcManifest, rootDataStatus) {
72
- const missing = [];
73
- if (!calcManifest.rootDataDependencies) return { canRun: true, missing };
74
- for (const dep of calcManifest.rootDataDependencies) {
75
- if (dep === 'portfolio' && !rootDataStatus.hasPortfolio) missing.push('portfolio');
76
- else if (dep === 'insights' && !rootDataStatus.hasInsights) missing.push('insights');
77
- else if (dep === 'social' && !rootDataStatus.hasSocial) missing.push('social');
78
- else if (dep === 'history' && !rootDataStatus.hasHistory) missing.push('history');
79
- else if (dep === 'price' && !rootDataStatus.hasPrices) missing.push('price');
80
- }
81
- return { canRun: missing.length === 0, missing };
82
- }
83
-
84
- async function checkRootDataAvailability(dateStr, config, dependencies, earliestDates) {
85
- const { logger } = dependencies;
86
- const dateToProcess = new Date(dateStr + 'T00:00:00Z');
87
- let portfolioRefs = [], historyRefs = [];
88
- let hasPortfolio = false, hasInsights = false, hasSocial = false, hasHistory = false, hasPrices = false;
89
- let insightsData = null, socialData = null;
90
-
91
- try {
92
- const tasks = [];
93
- if (dateToProcess >= earliestDates.portfolio) tasks.push(getPortfolioPartRefs (config, dependencies, dateStr).then(r => { portfolioRefs = r; hasPortfolio = !!r.length; }));
94
- if (dateToProcess >= earliestDates.insights) tasks.push(loadDailyInsights (config, dependencies, dateStr).then(r => { insightsData = r; hasInsights = !!r; }));
95
- if (dateToProcess >= earliestDates.social) tasks.push(loadDailySocialPostInsights (config, dependencies, dateStr).then(r => { socialData = r; hasSocial = !!r; }));
96
- if (dateToProcess >= earliestDates.history) tasks.push(getHistoryPartRefs (config, dependencies, dateStr).then(r => { historyRefs = r; hasHistory = !!r.length; }));
97
-
98
- if (dateToProcess >= earliestDates.price) {
99
- tasks.push(checkPriceDataAvailability(config, dependencies).then(r => { hasPrices = r; }));
100
- }
101
-
102
- await Promise.all(tasks);
103
-
104
- if (!(hasPortfolio || hasInsights || hasSocial || hasHistory || hasPrices)) return null;
105
-
106
- return {
107
- portfolioRefs,
108
- historyRefs,
109
- todayInsights: insightsData,
110
- todaySocialPostInsights: socialData,
111
- status: { hasPortfolio, hasInsights, hasSocial, hasHistory, hasPrices }
112
- };
113
-
114
- } catch (err) {
115
- logger.log('ERROR', `Error checking data: ${err.message}`);
116
- return null;
117
- }
118
- }
119
-
120
- async function checkPriceDataAvailability(config, dependencies) {
121
- const { db, logger } = dependencies;
122
- const collection = config.priceCollection || 'asset_prices';
123
- try {
124
- const snapshot = await db.collection(collection).limit(1).get();
125
- if (snapshot.empty) {
126
- logger.log('WARN', `[checkPriceData] No price shards found in ${collection}`);
127
- return false;
128
- }
129
- return true;
130
- } catch (e) {
131
- logger.log('ERROR', `[checkPriceData] Failed to check price availability: ${e.message}`);
132
- return false;
133
- }
134
- }
135
-
136
- async function fetchComputationStatus(dateStr, config, { db }) {
137
- const collection = config.computationStatusCollection || 'computation_status';
138
- const docRef = db.collection(collection).doc(dateStr);
139
- const snap = await docRef.get();
140
- return snap.exists ? snap.data() : {};
141
- }
142
-
143
- async function fetchGlobalComputationStatus(config, { db }) {
144
- const collection = config.computationStatusCollection || 'computation_status';
145
- const docRef = db.collection(collection).doc('global_status');
146
- const snap = await docRef.get();
147
- return snap.exists ? snap.data() : {};
148
- }
149
-
150
- async function updateComputationStatus(dateStr, updates, config, { db }) {
151
- if (!updates || Object.keys(updates).length === 0) return;
152
- const collection = config.computationStatusCollection || 'computation_status';
153
- const docRef = db.collection(collection).doc(dateStr);
154
- await docRef.set(updates, { merge: true });
155
- }
156
-
157
- async function updateGlobalComputationStatus(updatesByDate, config, { db }) {
158
- if (!updatesByDate || Object.keys(updatesByDate).length === 0) return;
159
- const collection = config.computationStatusCollection || 'computation_status';
160
- const docRef = db.collection(collection).doc('global_status');
161
- const flattenUpdates = {};
162
- for (const [date, statuses] of Object.entries(updatesByDate)) {
163
- for (const [calc, status] of Object.entries(statuses)) {
164
- flattenUpdates[`${date}.${calc}`] = status;
165
- }
166
- }
167
- try {
168
- await docRef.update(flattenUpdates);
169
- } catch (err) {
170
- if (err.code === 5) {
171
- const deepObj = {};
172
- for (const [date, statuses] of Object.entries(updatesByDate)) {
173
- deepObj[date] = statuses;
174
- }
175
- await docRef.set(deepObj, { merge: true });
176
- } else {
177
- throw err;
178
- }
179
- }
180
- }
181
-
182
- async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config, { db }, includeSelf = false) {
183
- const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
184
- const calcsToFetch = new Set();
185
- for (const calc of calcsInPass) {
186
- if (calc.dependencies) { calc.dependencies.forEach(d => calcsToFetch.add(normalizeName(d))); }
187
- if (includeSelf && calc.isHistorical) { calcsToFetch.add(normalizeName(calc.name)); }
188
- }
189
- if (!calcsToFetch.size) return {};
190
- const fetched = {};
191
- const docRefs = [];
192
- const names = [];
193
- for (const name of calcsToFetch) {
194
- const m = manifestMap.get(name);
195
- if (m) {
196
- docRefs.push(db.collection(config.resultsCollection).doc(dateStr)
197
- .collection(config.resultsSubcollection).doc(m.category || 'unknown')
198
- .collection(config.computationsSubcollection).doc(name));
199
- names.push(name);
200
- }
201
- }
202
- if (docRefs.length) {
203
- const snaps = await db.getAll(...docRefs);
204
- snaps.forEach((doc, i) => { if (doc.exists && doc.data()._completed) { fetched[names[i]] = doc.data(); } });
205
- }
206
- return fetched;
207
- }
208
-
209
- async function streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs, fetchedDeps, previousFetchedDeps) {
210
- const { logger } = deps;
211
- const controller = new ComputationController(config, deps);
212
- const calcs = Object.values(state).filter(c => c && c.manifest);
213
- const streamingCalcs = calcs.filter(c =>
214
- c.manifest.rootDataDependencies.includes('portfolio') ||
215
- c.manifest.rootDataDependencies.includes('history')
216
- );
217
-
218
- if (streamingCalcs.length === 0) return;
219
-
220
- logger.log('INFO', `[${passName}] Streaming for ${streamingCalcs.length} computations...`);
221
-
222
- await controller.loader.loadMappings();
223
- const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
224
- const prevDateStr = prevDate.toISOString().slice(0, 10);
225
-
226
- const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
227
- const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
228
- const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs) ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
229
- const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
230
- const tH_iter = (needsTradingHistory && historyRefs) ? streamHistoryData(config, deps, dateStr, historyRefs) : null;
231
-
232
- let yP_chunk = {};
233
- let tH_chunk = {};
234
-
235
- for await (const tP_chunk of tP_iter) {
236
- if (yP_iter) yP_chunk = (await yP_iter.next()).value || {};
237
- if (tH_iter) tH_chunk = (await tH_iter.next()).value || {};
238
-
239
- const promises = streamingCalcs.map(calc =>
240
- controller.executor.executePerUser(
241
- calc,
242
- calc.manifest,
243
- dateStr,
244
- tP_chunk,
245
- yP_chunk,
246
- tH_chunk,
247
- fetchedDeps,
248
- previousFetchedDeps
249
- )
250
- );
251
- await Promise.all(promises);
252
- }
253
- logger.log('INFO', `[${passName}] Streaming complete.`);
254
- }
255
-
256
- async function runStandardComputationPass(date, calcs, passName, config, deps, rootData, fetchedDeps, previousFetchedDeps, skipStatusWrite = false) {
257
- const dStr = date.toISOString().slice(0, 10);
258
- const logger = deps.logger;
259
- const fullRoot = { ...rootData };
260
- if (calcs.some(c => c.isHistorical)) {
261
- const prev = new Date(date); prev.setUTCDate(prev.getUTCDate() - 1);
262
- const prevStr = prev.toISOString().slice(0, 10);
263
- fullRoot.yesterdayPortfolioRefs = await getPortfolioPartRefs(config, deps, prevStr);
264
- }
265
-
266
- const state = {};
267
- for (const c of calcs) {
268
- try {
269
- const inst = new c.class();
270
- inst.manifest = c;
271
- state[normalizeName(c.name)] = inst;
272
- logger.log('INFO', `${c.name} calculation running for ${dStr}`);
273
- }
274
- catch (e) { logger.log('WARN', `Failed to init ${c.name}`); }
275
- }
276
-
277
- await streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs, fetchedDeps, previousFetchedDeps);
278
- return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
279
- }
280
-
281
- async function runMetaComputationPass(date, calcs, passName, config, deps, fetchedDeps, previousFetchedDeps, rootData, skipStatusWrite = false) {
282
- const controller = new ComputationController(config, deps);
283
- const dStr = date.toISOString().slice(0, 10);
284
- const state = {};
285
-
286
- for (const mCalc of calcs) {
287
- try {
288
- deps.logger.log('INFO', `${mCalc.name} calculation running for ${dStr}`);
289
- const inst = new mCalc.class();
290
- inst.manifest = mCalc;
291
- await controller.executor.executeOncePerDay(inst, mCalc, dStr, fetchedDeps, previousFetchedDeps);
292
- state[normalizeName(mCalc.name)] = inst;
293
- } catch (e) { deps.logger.log('ERROR', `Meta calc failed ${mCalc.name}: ${e.message}`); }
294
- }
295
- return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
296
- }
297
-
298
- /**
299
- * --- REFACTORED: commitResults ---
300
- * Commits results individually per calculation.
301
- * If one calculation fails (e.g. size limit), others still succeed.
302
- */
303
- async function commitResults(stateObj, dStr, passName, config, deps, skipStatusWrite = false) {
304
- const successUpdates = {};
305
- const schemas = [];
306
-
307
- // Iterate PER CALCULATION to isolate failures
308
- for (const name in stateObj) {
309
- const calc = stateObj[name];
310
- let hasData = false;
311
-
312
- try {
313
- const result = await calc.getResult();
314
- if (!result) {
315
- deps.logger.log('INFO', `${name} for ${dStr}: Skipped (Empty Result)`);
316
- continue;
317
- }
318
-
319
- const standardRes = {};
320
- const shardedWrites = [];
321
- const calcWrites = []; // Accumulate all writes for THIS specific calculation
322
-
323
- // 1. Separate Standard and Sharded Data
324
- for (const key in result) {
325
- if (key.startsWith('sharded_')) {
326
- const sData = result[key];
327
- // sData structure: { CollectionName: { DocId: { ...data } } }
328
- for (const colName in sData) {
329
- const docsMap = sData[colName];
330
- for (const docId in docsMap) {
331
- // Support both full path or collection-relative path
332
- const ref = docId.includes('/') ? deps.db.doc(docId) : deps.db.collection(colName).doc(docId);
333
- shardedWrites.push({
334
- ref,
335
- data: { ...docsMap[docId], _completed: true }
336
- });
337
- }
338
- }
339
- if (Object.keys(sData).length > 0) hasData = true;
340
- } else {
341
- standardRes[key] = result[key];
342
- }
343
- }
344
-
345
- // 2. Prepare Standard Result Write
346
- if (Object.keys(standardRes).length) {
347
- validateResultPatterns(deps.logger, name, standardRes, calc.manifest.category);
348
- standardRes._completed = true;
349
-
350
- const docRef = deps.db.collection(config.resultsCollection).doc(dStr)
351
- .collection(config.resultsSubcollection).doc(calc.manifest.category)
352
- .collection(config.computationsSubcollection).doc(name);
353
-
354
- calcWrites.push({
355
- ref: docRef,
356
- data: standardRes
357
- });
358
- hasData = true;
359
- }
360
-
361
- // 3. Queue Schema (Safe to accumulate)
362
- if (calc.manifest.class.getSchema) {
363
- const { class: _cls, ...safeMetadata } = calc.manifest;
364
- schemas.push({
365
- name, category: calc.manifest.category, schema: calc.manifest.class.getSchema(), metadata: safeMetadata
366
- });
367
- }
368
-
369
- // 4. ATTEMPT COMMIT FOR THIS CALCULATION ONLY
370
- if (hasData) {
371
- // Combine standard + sharded writes for this unit of work
372
- const allWritesForCalc = [...calcWrites, ...shardedWrites];
373
-
374
- if (allWritesForCalc.length > 0) {
375
- await commitBatchInChunks(config, deps, allWritesForCalc, `${name} Results`);
376
-
377
- // IF we get here, the commit succeeded.
378
- successUpdates[name] = true;
379
- deps.logger.log('INFO', `${name} for ${dStr}: \u2714 Success (Written)`);
380
- } else {
381
- deps.logger.log('INFO', `${name} for ${dStr}: - No Data to Write`);
382
- }
383
- } else {
384
- deps.logger.log('INFO', `${name} for ${dStr}: - Empty`);
385
- }
386
-
387
- } catch (e) {
388
- // CRITICAL: Catch errors here so the loop continues for other calculations
389
- deps.logger.log('ERROR', `${name} for ${dStr}: \u2716 FAILED Commit: ${e.message}`);
390
- // Do NOT add to successUpdates
391
- }
392
- }
393
-
394
- // Save Schemas (Best effort, isolated)
395
- if (schemas.length) batchStoreSchemas(deps, config, schemas).catch(() => { });
396
-
397
- // Update Status Document (Only for the ones that succeeded)
398
- if (!skipStatusWrite && Object.keys(successUpdates).length > 0) {
399
- await updateComputationStatus(dStr, successUpdates, config, deps);
400
- deps.logger.log('INFO', `[${passName}] Updated status document for ${Object.keys(successUpdates).length} successful computations.`);
401
- }
402
- return successUpdates;
403
- }
404
-
405
- /**
406
- * --- UPDATED: runBatchPriceComputation ---
407
- */
408
- async function runBatchPriceComputation(config, deps, dateStrings, calcs, targetTickers = []) {
409
- const { logger, db, calculationUtils } = deps;
410
- const controller = new ComputationController(config, deps);
411
-
412
- const mappings = await controller.loader.loadMappings();
413
-
414
- let targetInstrumentIds = [];
415
- if (targetTickers && targetTickers.length > 0) {
416
- const tickerToInst = mappings.tickerToInstrument || {};
417
- targetInstrumentIds = targetTickers.map(t => tickerToInst[t]).filter(id => id);
418
- if (targetInstrumentIds.length === 0) {
419
- logger.log('WARN', '[BatchPrice] Target tickers provided but no IDs found. Aborting.');
420
- return;
421
- }
422
- }
423
-
424
- const allShardRefs = await getRelevantShardRefs(config, deps, targetInstrumentIds);
425
-
426
- if (!allShardRefs.length) {
427
- logger.log('WARN', '[BatchPrice] No relevant price shards found. Exiting.');
428
- return;
429
- }
430
-
431
- const OUTER_CONCURRENCY_LIMIT = 2;
432
- const SHARD_BATCH_SIZE = 20;
433
- const WRITE_BATCH_LIMIT = 50;
434
-
435
- logger.log('INFO', `[BatchPrice] Execution Plan: ${dateStrings.length} days, ${allShardRefs.length} shards. Concurrency: ${OUTER_CONCURRENCY_LIMIT}.`);
436
-
437
- const shardChunks = [];
438
- for (let i = 0; i < allShardRefs.length; i += SHARD_BATCH_SIZE) {
439
- shardChunks.push(allShardRefs.slice(i, i + SHARD_BATCH_SIZE));
440
- }
441
-
442
- const outerLimit = pLimit(OUTER_CONCURRENCY_LIMIT);
443
-
444
- const chunkPromises = [];
445
- for (let index = 0; index < shardChunks.length; index++) {
446
- const shardChunkRefs = shardChunks[index];
447
- chunkPromises.push(outerLimit(async () => {
448
- try {
449
- logger.log('INFO', `[BatchPrice] Processing chunk ${index + 1}/${shardChunks.length} (${shardChunkRefs.length} shards)...`);
450
-
451
- const pricesData = await loadDataByRefs(config, deps, shardChunkRefs);
452
-
453
- if (targetInstrumentIds.length > 0) {
454
- const requestedSet = new Set(targetInstrumentIds);
455
- for (const loadedInstrumentId in pricesData) {
456
- if (!requestedSet.has(loadedInstrumentId)) {
457
- delete pricesData[loadedInstrumentId];
458
- }
459
- }
460
- }
461
-
462
- // We now accumulate writes per calc to allow partial success, though batching optimization is tricky here.
463
- // For safety, let's keep the existing structure but wrap individual calc processing in try/catch
464
- // inside the write phase if possible.
465
- // However, runBatchPrice is optimized for BULK throughput.
466
- // To prevent total failure, we will use a safe array.
467
- const writes = [];
468
-
469
- for (const dateStr of dateStrings) {
470
- const context = {
471
- mappings,
472
- prices: { history: pricesData },
473
- date: { today: dateStr },
474
- math: {
475
- extract: DataExtractor,
476
- history: HistoryExtractor,
477
- compute: MathPrimitives,
478
- aggregate: Aggregators,
479
- validate: Validators,
480
- signals: SignalPrimitives,
481
- schemas: SCHEMAS,
482
- distribution: DistributionAnalytics,
483
- TimeSeries: TimeSeries,
484
- priceExtractor: priceExtractor
485
- }
486
- };
487
-
488
- for (const calcManifest of calcs) {
489
- try {
490
- // logger.log('INFO', `[BatchPrice] >> Running ${calcManifest.name} for ${dateStr}...`); // Verbose
491
- const instance = new calcManifest.class();
492
- await instance.process(context);
493
- const result = await instance.getResult();
494
-
495
- if (result && Object.keys(result).length > 0) {
496
- let dataToWrite = result;
497
- if (result.by_instrument) dataToWrite = result.by_instrument;
498
-
499
- if (Object.keys(dataToWrite).length > 0) {
500
- const docRef = db.collection(config.resultsCollection).doc(dateStr)
501
- .collection(config.resultsSubcollection).doc(calcManifest.category)
502
- .collection(config.computationsSubcollection).doc(normalizeName(calcManifest.name));
503
-
504
- writes.push({
505
- ref: docRef,
506
- data: { ...dataToWrite, _completed: true },
507
- options: { merge: true }
508
- });
509
- }
510
- }
511
- } catch (err) {
512
- logger.log('ERROR', `[BatchPrice] \u2716 Failed ${calcManifest.name} for ${dateStr}: ${err.message}`);
513
- }
514
- }
515
- }
516
-
517
- if (writes.length > 0) {
518
- const commitBatches = [];
519
- for (let i = 0; i < writes.length; i += WRITE_BATCH_LIMIT) {
520
- commitBatches.push(writes.slice(i, i + WRITE_BATCH_LIMIT));
521
- }
522
-
523
- const commitLimit = pLimit(10);
524
-
525
- await Promise.all(commitBatches.map((batchWrites, bIndex) => commitLimit(async () => {
526
- const batch = db.batch();
527
- batchWrites.forEach(w => batch.set(w.ref, w.data, w.options));
528
-
529
- try {
530
- await calculationUtils.withRetry(() => batch.commit(), `BatchPrice-C${index}-B${bIndex}`);
531
- } catch (commitErr) {
532
- logger.log('ERROR', `[BatchPrice] Commit failed for Chunk ${index} Batch ${bIndex}.`, { error: commitErr.message });
533
- }
534
- })));
535
- }
536
-
537
- } catch (chunkErr) {
538
- logger.log('ERROR', `[BatchPrice] Fatal error processing Chunk ${index}.`, { error: chunkErr.message });
539
- }
540
- }));
541
- }
542
-
543
- await Promise.all(chunkPromises);
544
- logger.log('INFO', '[BatchPrice] Optimization pass complete.');
545
- }
546
-
547
- module.exports = {
548
- groupByPass,
549
- checkRootDependencies,
550
- checkRootDataAvailability,
551
- fetchExistingResults,
552
- fetchComputationStatus,
553
- fetchGlobalComputationStatus,
554
- updateComputationStatus,
555
- updateGlobalComputationStatus,
556
- runStandardComputationPass,
557
- runMetaComputationPass,
558
- runBatchPriceComputation
1
+ /**
2
+ * FILENAME: bulltrackers-module/functions/computation-system/helpers/orchestration_helpers.js
3
+ * FIXED: 'commitResults' now records the CODE HASH in the status document
4
+ * instead of a boolean, enabling auto-invalidation on code changes.
5
+ */
6
+
7
+ const { ComputationController } = require('../controllers/computation_controller');
8
+ const { batchStoreSchemas } = require('../utils/schema_capture');
9
+ const { normalizeName, commitBatchInChunks } = require('../utils/utils');
10
+ const {
11
+ getPortfolioPartRefs, loadDailyInsights, loadDailySocialPostInsights,
12
+ getHistoryPartRefs, streamPortfolioData, streamHistoryData,
13
+ getRelevantShardRefs, loadDataByRefs
14
+ } = require('../utils/data_loader');
15
+
16
+ const {
17
+ DataExtractor, HistoryExtractor, MathPrimitives, Aggregators,
18
+ Validators, SCHEMAS, SignalPrimitives, DistributionAnalytics,
19
+ TimeSeries, priceExtractor
20
+ } = require('../layers/math_primitives.js');
21
+
22
+ const pLimit = require('p-limit');
23
+
24
+ function groupByPass(manifest) { return manifest.reduce((acc, calc) => { (acc[calc.pass] = acc[calc.pass] || []).push(calc); return acc; }, {}); }
25
+
26
+ /**
27
+ * --- PASSIVE DATA VALIDATION ---
28
+ */
29
+ function validateResultPatterns(logger, calcName, results, category) {
30
+ if (category === 'speculator' || category === 'speculators') return;
31
+
32
+ const tickers = Object.keys(results);
33
+ const totalItems = tickers.length;
34
+
35
+ if (totalItems < 5) return;
36
+
37
+ const sampleTicker = tickers.find(t => results[t] && typeof results[t] === 'object');
38
+ if (!sampleTicker) return;
39
+
40
+ const keys = Object.keys(results[sampleTicker]);
41
+
42
+ keys.forEach(key => {
43
+ if (key.startsWith('_')) return;
44
+
45
+ let nullCount = 0;
46
+ let nanCount = 0;
47
+ let undefinedCount = 0;
48
+
49
+ for (const t of tickers) {
50
+ const val = results[t][key];
51
+ if (val === null) nullCount++;
52
+ if (val === undefined) undefinedCount++;
53
+ if (typeof val === 'number' && isNaN(val)) nanCount++;
54
+ }
55
+
56
+ if (nanCount === totalItems) {
57
+ logger.log('ERROR', `[DataQuality] Calc '${calcName}' field '${key}' is NaN for 100% of ${totalItems} items.`);
58
+ } else if (undefinedCount === totalItems) {
59
+ logger.log('ERROR', `[DataQuality] Calc '${calcName}' field '${key}' is UNDEFINED for 100% of ${totalItems} items.`);
60
+ }
61
+ else if (nullCount > (totalItems * 0.9)) {
62
+ logger.log('WARN', `[DataQuality] Calc '${calcName}' field '${key}' is NULL for ${nullCount}/${totalItems} items.`);
63
+ }
64
+ });
65
+ }
66
+
67
+ function checkRootDependencies(calcManifest, rootDataStatus) {
68
+ const missing = [];
69
+ if (!calcManifest.rootDataDependencies) return { canRun: true, missing };
70
+ for (const dep of calcManifest.rootDataDependencies) {
71
+ if (dep === 'portfolio' && !rootDataStatus.hasPortfolio) missing.push('portfolio');
72
+ else if (dep === 'insights' && !rootDataStatus.hasInsights) missing.push('insights');
73
+ else if (dep === 'social' && !rootDataStatus.hasSocial) missing.push('social');
74
+ else if (dep === 'history' && !rootDataStatus.hasHistory) missing.push('history');
75
+ else if (dep === 'price' && !rootDataStatus.hasPrices) missing.push('price');
76
+ }
77
+ return { canRun: missing.length === 0, missing };
78
+ }
79
+
80
+ async function checkRootDataAvailability(dateStr, config, dependencies, earliestDates) {
81
+ const { logger } = dependencies;
82
+ const dateToProcess = new Date(dateStr + 'T00:00:00Z');
83
+ let portfolioRefs = [], historyRefs = [];
84
+ let hasPortfolio = false, hasInsights = false, hasSocial = false, hasHistory = false, hasPrices = false;
85
+ let insightsData = null, socialData = null;
86
+
87
+ try {
88
+ const tasks = [];
89
+ if (dateToProcess >= earliestDates.portfolio) tasks.push(getPortfolioPartRefs (config, dependencies, dateStr).then(r => { portfolioRefs = r; hasPortfolio = !!r.length; }));
90
+ if (dateToProcess >= earliestDates.insights) tasks.push(loadDailyInsights (config, dependencies, dateStr).then(r => { insightsData = r; hasInsights = !!r; }));
91
+ if (dateToProcess >= earliestDates.social) tasks.push(loadDailySocialPostInsights (config, dependencies, dateStr).then(r => { socialData = r; hasSocial = !!r; }));
92
+ if (dateToProcess >= earliestDates.history) tasks.push(getHistoryPartRefs (config, dependencies, dateStr).then(r => { historyRefs = r; hasHistory = !!r.length; }));
93
+
94
+ if (dateToProcess >= earliestDates.price) {
95
+ tasks.push(checkPriceDataAvailability(config, dependencies).then(r => { hasPrices = r; }));
96
+ }
97
+
98
+ await Promise.all(tasks);
99
+
100
+ if (!(hasPortfolio || hasInsights || hasSocial || hasHistory || hasPrices)) return null;
101
+
102
+ return {
103
+ portfolioRefs,
104
+ historyRefs,
105
+ todayInsights: insightsData,
106
+ todaySocialPostInsights: socialData,
107
+ status: { hasPortfolio, hasInsights, hasSocial, hasHistory, hasPrices },
108
+ yesterdayPortfolioRefs: null // Will be populated if needed
109
+ };
110
+
111
+ } catch (err) {
112
+ logger.log('ERROR', `Error checking data: ${err.message}`);
113
+ return null;
114
+ }
115
+ }
116
+
117
+ async function checkPriceDataAvailability(config, dependencies) {
118
+ const { db } = dependencies;
119
+ const collection = config.priceCollection || 'asset_prices';
120
+ try {
121
+ const snapshot = await db.collection(collection).limit(1).get();
122
+ if (snapshot.empty) return false;
123
+ return true;
124
+ } catch (e) {
125
+ return false;
126
+ }
127
+ }
128
+
129
+ async function fetchComputationStatus(dateStr, config, { db }) {
130
+ const collection = config.computationStatusCollection || 'computation_status';
131
+ const docRef = db.collection(collection).doc(dateStr);
132
+ const snap = await docRef.get();
133
+ return snap.exists ? snap.data() : {};
134
+ }
135
+
136
+ async function fetchGlobalComputationStatus(config, { db }) {
137
+ const collection = config.computationStatusCollection || 'computation_status';
138
+ const docRef = db.collection(collection).doc('global_status');
139
+ const snap = await docRef.get();
140
+ return snap.exists ? snap.data() : {};
141
+ }
142
+
143
+ async function updateComputationStatus(dateStr, updates, config, { db }) {
144
+ if (!updates || Object.keys(updates).length === 0) return;
145
+ const collection = config.computationStatusCollection || 'computation_status';
146
+ const docRef = db.collection(collection).doc(dateStr);
147
+ await docRef.set(updates, { merge: true });
148
+ }
149
+
150
+ async function updateGlobalComputationStatus(updatesByDate, config, { db }) {
151
+ if (!updatesByDate || Object.keys(updatesByDate).length === 0) return;
152
+ const collection = config.computationStatusCollection || 'computation_status';
153
+ const docRef = db.collection(collection).doc('global_status');
154
+ const flattenUpdates = {};
155
+ for (const [date, statuses] of Object.entries(updatesByDate)) {
156
+ for (const [calc, status] of Object.entries(statuses)) {
157
+ flattenUpdates[`${date}.${calc}`] = status;
158
+ }
159
+ }
160
+ try {
161
+ await docRef.update(flattenUpdates);
162
+ } catch (err) {
163
+ if (err.code === 5) {
164
+ const deepObj = {};
165
+ for (const [date, statuses] of Object.entries(updatesByDate)) {
166
+ deepObj[date] = statuses;
167
+ }
168
+ await docRef.set(deepObj, { merge: true });
169
+ } else {
170
+ throw err;
171
+ }
172
+ }
173
+ }
174
+
175
+ async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config, { db }, includeSelf = false) {
176
+ const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
177
+ const calcsToFetch = new Set();
178
+ for (const calc of calcsInPass) {
179
+ if (calc.dependencies) { calc.dependencies.forEach(d => calcsToFetch.add(normalizeName(d))); }
180
+ if (includeSelf && calc.isHistorical) { calcsToFetch.add(normalizeName(calc.name)); }
181
+ }
182
+ if (!calcsToFetch.size) return {};
183
+ const fetched = {};
184
+ const docRefs = [];
185
+ const names = [];
186
+ for (const name of calcsToFetch) {
187
+ const m = manifestMap.get(name);
188
+ if (m) {
189
+ docRefs.push(db.collection(config.resultsCollection).doc(dateStr)
190
+ .collection(config.resultsSubcollection).doc(m.category || 'unknown')
191
+ .collection(config.computationsSubcollection).doc(name));
192
+ names.push(name);
193
+ }
194
+ }
195
+ if (docRefs.length) {
196
+ const snaps = await db.getAll(...docRefs);
197
+ snaps.forEach((doc, i) => { if (doc.exists && doc.data()._completed) { fetched[names[i]] = doc.data(); } });
198
+ }
199
+ return fetched;
200
+ }
201
+
202
+ async function streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs, fetchedDeps, previousFetchedDeps) {
203
+ const { logger } = deps;
204
+ const controller = new ComputationController(config, deps);
205
+ const calcs = Object.values(state).filter(c => c && c.manifest);
206
+ const streamingCalcs = calcs.filter(c =>
207
+ c.manifest.rootDataDependencies.includes('portfolio') ||
208
+ c.manifest.rootDataDependencies.includes('history')
209
+ );
210
+
211
+ if (streamingCalcs.length === 0) return;
212
+
213
+ logger.log('INFO', `[${passName}] Streaming for ${streamingCalcs.length} computations...`);
214
+
215
+ await controller.loader.loadMappings();
216
+ const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
217
+ const prevDateStr = prevDate.toISOString().slice(0, 10);
218
+
219
+ const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
220
+ const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
221
+ const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs) ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
222
+ const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
223
+ const tH_iter = (needsTradingHistory && historyRefs) ? streamHistoryData(config, deps, dateStr, historyRefs) : null;
224
+
225
+ let yP_chunk = {};
226
+ let tH_chunk = {};
227
+
228
+ for await (const tP_chunk of tP_iter) {
229
+ if (yP_iter) yP_chunk = (await yP_iter.next()).value || {};
230
+ if (tH_iter) tH_chunk = (await tH_iter.next()).value || {};
231
+
232
+ const promises = streamingCalcs.map(calc =>
233
+ controller.executor.executePerUser(
234
+ calc,
235
+ calc.manifest,
236
+ dateStr,
237
+ tP_chunk,
238
+ yP_chunk,
239
+ tH_chunk,
240
+ fetchedDeps,
241
+ previousFetchedDeps
242
+ )
243
+ );
244
+ await Promise.all(promises);
245
+ }
246
+ logger.log('INFO', `[${passName}] Streaming complete.`);
247
+ }
248
+
249
+ async function runStandardComputationPass(date, calcs, passName, config, deps, rootData, fetchedDeps, previousFetchedDeps, skipStatusWrite = false) {
250
+ const dStr = date.toISOString().slice(0, 10);
251
+ const logger = deps.logger;
252
+ const fullRoot = { ...rootData };
253
+ if (calcs.some(c => c.isHistorical)) {
254
+ const prev = new Date(date); prev.setUTCDate(prev.getUTCDate() - 1);
255
+ const prevStr = prev.toISOString().slice(0, 10);
256
+ fullRoot.yesterdayPortfolioRefs = await getPortfolioPartRefs(config, deps, prevStr);
257
+ }
258
+
259
+ const state = {};
260
+ for (const c of calcs) {
261
+ try {
262
+ const inst = new c.class();
263
+ inst.manifest = c;
264
+ state[normalizeName(c.name)] = inst;
265
+ logger.log('INFO', `${c.name} calculation running for ${dStr}`);
266
+ }
267
+ catch (e) { logger.log('WARN', `Failed to init ${c.name}`); }
268
+ }
269
+
270
+ await streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs, fetchedDeps, previousFetchedDeps);
271
+ return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
272
+ }
273
+
274
+ async function runMetaComputationPass(date, calcs, passName, config, deps, fetchedDeps, previousFetchedDeps, rootData, skipStatusWrite = false) {
275
+ const controller = new ComputationController(config, deps);
276
+ const dStr = date.toISOString().slice(0, 10);
277
+ const state = {};
278
+
279
+ for (const mCalc of calcs) {
280
+ try {
281
+ deps.logger.log('INFO', `${mCalc.name} calculation running for ${dStr}`);
282
+ const inst = new mCalc.class();
283
+ inst.manifest = mCalc;
284
+ await controller.executor.executeOncePerDay(inst, mCalc, dStr, fetchedDeps, previousFetchedDeps);
285
+ state[normalizeName(mCalc.name)] = inst;
286
+ } catch (e) { deps.logger.log('ERROR', `Meta calc failed ${mCalc.name}: ${e.message}`); }
287
+ }
288
+ return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
289
+ }
290
+
291
+ /**
292
+ * --- REFACTORED: commitResults ---
293
+ * Commits results individually per calculation.
294
+ * If one calculation fails (e.g. size limit), others still succeed.
295
+ * UPDATED: Writes the HASH to the status document.
296
+ */
297
+ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusWrite = false) {
298
+ const successUpdates = {};
299
+ const schemas = [];
300
+
301
+ // Iterate PER CALCULATION to isolate failures
302
+ for (const name in stateObj) {
303
+ const calc = stateObj[name];
304
+ let hasData = false;
305
+
306
+ try {
307
+ const result = await calc.getResult();
308
+ if (!result) {
309
+ deps.logger.log('INFO', `${name} for ${dStr}: Skipped (Empty Result)`);
310
+ continue;
311
+ }
312
+
313
+ const standardRes = {};
314
+ const shardedWrites = [];
315
+ const calcWrites = [];
316
+
317
+ // 1. Separate Standard and Sharded Data
318
+ for (const key in result) {
319
+ if (key.startsWith('sharded_')) {
320
+ const sData = result[key];
321
+ for (const colName in sData) {
322
+ const docsMap = sData[colName];
323
+ for (const docId in docsMap) {
324
+ const ref = docId.includes('/') ? deps.db.doc(docId) : deps.db.collection(colName).doc(docId);
325
+ shardedWrites.push({
326
+ ref,
327
+ data: { ...docsMap[docId], _completed: true }
328
+ });
329
+ }
330
+ }
331
+ if (Object.keys(sData).length > 0) hasData = true;
332
+ } else {
333
+ standardRes[key] = result[key];
334
+ }
335
+ }
336
+
337
+ // 2. Prepare Standard Result Write
338
+ if (Object.keys(standardRes).length) {
339
+ validateResultPatterns(deps.logger, name, standardRes, calc.manifest.category);
340
+ standardRes._completed = true;
341
+
342
+ const docRef = deps.db.collection(config.resultsCollection).doc(dStr)
343
+ .collection(config.resultsSubcollection).doc(calc.manifest.category)
344
+ .collection(config.computationsSubcollection).doc(name);
345
+
346
+ calcWrites.push({
347
+ ref: docRef,
348
+ data: standardRes
349
+ });
350
+ hasData = true;
351
+ }
352
+
353
+ // 3. Queue Schema (Safe to accumulate)
354
+ if (calc.manifest.class.getSchema) {
355
+ const { class: _cls, ...safeMetadata } = calc.manifest;
356
+ schemas.push({
357
+ name, category: calc.manifest.category, schema: calc.manifest.class.getSchema(), metadata: safeMetadata
358
+ });
359
+ }
360
+
361
+ // 4. ATTEMPT COMMIT FOR THIS CALCULATION ONLY
362
+ if (hasData) {
363
+ const allWritesForCalc = [...calcWrites, ...shardedWrites];
364
+
365
+ if (allWritesForCalc.length > 0) {
366
+ await commitBatchInChunks(config, deps, allWritesForCalc, `${name} Results`);
367
+
368
+ // --- CRITICAL UPDATE: Store the Smart Hash ---
369
+ successUpdates[name] = calc.manifest.hash || true;
370
+
371
+ deps.logger.log('INFO', `${name} for ${dStr}: \u2714 Success (Written)`);
372
+ } else {
373
+ deps.logger.log('INFO', `${name} for ${dStr}: - No Data to Write`);
374
+ }
375
+ } else {
376
+ deps.logger.log('INFO', `${name} for ${dStr}: - Empty`);
377
+ }
378
+
379
+ } catch (e) {
380
+ deps.logger.log('ERROR', `${name} for ${dStr}: \u2716 FAILED Commit: ${e.message}`);
381
+ }
382
+ }
383
+
384
+ // Save Schemas (Best effort, isolated)
385
+ if (schemas.length) batchStoreSchemas(deps, config, schemas).catch(() => { });
386
+
387
+ // Update Status Document (Only for the ones that succeeded)
388
+ if (!skipStatusWrite && Object.keys(successUpdates).length > 0) {
389
+ await updateComputationStatus(dStr, successUpdates, config, deps);
390
+ deps.logger.log('INFO', `[${passName}] Updated status document for ${Object.keys(successUpdates).length} successful computations.`);
391
+ }
392
+ return successUpdates;
393
+ }
394
+
395
+ /**
396
+ * --- UPDATED: runBatchPriceComputation ---
397
+ */
398
+ async function runBatchPriceComputation(config, deps, dateStrings, calcs, targetTickers = []) {
399
+ const { logger, db, calculationUtils } = deps;
400
+ const controller = new ComputationController(config, deps);
401
+
402
+ const mappings = await controller.loader.loadMappings();
403
+
404
+ let targetInstrumentIds = [];
405
+ if (targetTickers && targetTickers.length > 0) {
406
+ const tickerToInst = mappings.tickerToInstrument || {};
407
+ targetInstrumentIds = targetTickers.map(t => tickerToInst[t]).filter(id => id);
408
+ if (targetInstrumentIds.length === 0) {
409
+ logger.log('WARN', '[BatchPrice] Target tickers provided but no IDs found. Aborting.');
410
+ return;
411
+ }
412
+ }
413
+
414
+ const allShardRefs = await getRelevantShardRefs(config, deps, targetInstrumentIds);
415
+
416
+ if (!allShardRefs.length) {
417
+ logger.log('WARN', '[BatchPrice] No relevant price shards found. Exiting.');
418
+ return;
419
+ }
420
+
421
+ const OUTER_CONCURRENCY_LIMIT = 2;
422
+ const SHARD_BATCH_SIZE = 20;
423
+ const WRITE_BATCH_LIMIT = 50;
424
+
425
+ logger.log('INFO', `[BatchPrice] Execution Plan: ${dateStrings.length} days, ${allShardRefs.length} shards. Concurrency: ${OUTER_CONCURRENCY_LIMIT}.`);
426
+
427
+ const shardChunks = [];
428
+ for (let i = 0; i < allShardRefs.length; i += SHARD_BATCH_SIZE) {
429
+ shardChunks.push(allShardRefs.slice(i, i + SHARD_BATCH_SIZE));
430
+ }
431
+
432
+ const outerLimit = pLimit(OUTER_CONCURRENCY_LIMIT);
433
+
434
+ const chunkPromises = [];
435
+ for (let index = 0; index < shardChunks.length; index++) {
436
+ const shardChunkRefs = shardChunks[index];
437
+ chunkPromises.push(outerLimit(async () => {
438
+ try {
439
+ logger.log('INFO', `[BatchPrice] Processing chunk ${index + 1}/${shardChunks.length} (${shardChunkRefs.length} shards)...`);
440
+
441
+ const pricesData = await loadDataByRefs(config, deps, shardChunkRefs);
442
+
443
+ if (targetInstrumentIds.length > 0) {
444
+ const requestedSet = new Set(targetInstrumentIds);
445
+ for (const loadedInstrumentId in pricesData) {
446
+ if (!requestedSet.has(loadedInstrumentId)) {
447
+ delete pricesData[loadedInstrumentId];
448
+ }
449
+ }
450
+ }
451
+
452
+ const writes = [];
453
+
454
+ for (const dateStr of dateStrings) {
455
+ const context = {
456
+ mappings,
457
+ prices: { history: pricesData },
458
+ date: { today: dateStr },
459
+ math: {
460
+ extract: DataExtractor,
461
+ history: HistoryExtractor,
462
+ compute: MathPrimitives,
463
+ aggregate: Aggregators,
464
+ validate: Validators,
465
+ signals: SignalPrimitives,
466
+ schemas: SCHEMAS,
467
+ distribution: DistributionAnalytics,
468
+ TimeSeries: TimeSeries,
469
+ priceExtractor: priceExtractor
470
+ }
471
+ };
472
+
473
+ for (const calcManifest of calcs) {
474
+ try {
475
+ const instance = new calcManifest.class();
476
+ await instance.process(context);
477
+ const result = await instance.getResult();
478
+
479
+ if (result && Object.keys(result).length > 0) {
480
+ let dataToWrite = result;
481
+ if (result.by_instrument) dataToWrite = result.by_instrument;
482
+
483
+ if (Object.keys(dataToWrite).length > 0) {
484
+ const docRef = db.collection(config.resultsCollection).doc(dateStr)
485
+ .collection(config.resultsSubcollection).doc(calcManifest.category)
486
+ .collection(config.computationsSubcollection).doc(normalizeName(calcManifest.name));
487
+
488
+ writes.push({
489
+ ref: docRef,
490
+ data: { ...dataToWrite, _completed: true },
491
+ options: { merge: true }
492
+ });
493
+ }
494
+ }
495
+ } catch (err) {
496
+ logger.log('ERROR', `[BatchPrice] \u2716 Failed ${calcManifest.name} for ${dateStr}: ${err.message}`);
497
+ }
498
+ }
499
+ }
500
+
501
+ if (writes.length > 0) {
502
+ const commitBatches = [];
503
+ for (let i = 0; i < writes.length; i += WRITE_BATCH_LIMIT) {
504
+ commitBatches.push(writes.slice(i, i + WRITE_BATCH_LIMIT));
505
+ }
506
+
507
+ const commitLimit = pLimit(10);
508
+
509
+ await Promise.all(commitBatches.map((batchWrites, bIndex) => commitLimit(async () => {
510
+ const batch = db.batch();
511
+ batchWrites.forEach(w => batch.set(w.ref, w.data, w.options));
512
+
513
+ try {
514
+ await calculationUtils.withRetry(() => batch.commit(), `BatchPrice-C${index}-B${bIndex}`);
515
+ } catch (commitErr) {
516
+ logger.log('ERROR', `[BatchPrice] Commit failed for Chunk ${index} Batch ${bIndex}.`, { error: commitErr.message });
517
+ }
518
+ })));
519
+ }
520
+
521
+ } catch (chunkErr) {
522
+ logger.log('ERROR', `[BatchPrice] Fatal error processing Chunk ${index}.`, { error: chunkErr.message });
523
+ }
524
+ }));
525
+ }
526
+
527
+ await Promise.all(chunkPromises);
528
+ logger.log('INFO', '[BatchPrice] Optimization pass complete.');
529
+ }
530
+
531
+ module.exports = {
532
+ groupByPass,
533
+ checkRootDependencies,
534
+ checkRootDataAvailability,
535
+ fetchExistingResults,
536
+ fetchComputationStatus,
537
+ fetchGlobalComputationStatus,
538
+ updateComputationStatus,
539
+ updateGlobalComputationStatus,
540
+ runStandardComputationPass,
541
+ runMetaComputationPass,
542
+ runBatchPriceComputation
559
543
  };