bulltrackers-module 1.0.211 → 1.0.213
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/controllers/computation_controller.js +199 -188
- package/functions/computation-system/helpers/computation_dispatcher.js +90 -90
- package/functions/computation-system/helpers/computation_manifest_builder.js +327 -283
- package/functions/computation-system/helpers/computation_pass_runner.js +168 -157
- package/functions/computation-system/helpers/computation_worker.js +85 -85
- package/functions/computation-system/helpers/orchestration_helpers.js +542 -558
- package/functions/computation-system/layers/extractors.js +279 -0
- package/functions/computation-system/layers/index.js +40 -0
- package/functions/computation-system/layers/math_primitives.js +743 -743
- package/functions/computation-system/layers/mathematics.js +397 -0
- package/functions/computation-system/layers/profiling.js +287 -0
- package/functions/computation-system/layers/validators.js +170 -0
- package/functions/computation-system/utils/schema_capture.js +63 -63
- package/functions/computation-system/utils/utils.js +22 -1
- package/functions/task-engine/helpers/update_helpers.js +17 -49
- package/package.json +1 -1
|
@@ -1,559 +1,543 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* FILENAME: bulltrackers-module/functions/computation-system/helpers/orchestration_helpers.js
|
|
3
|
-
* FIXED: 'commitResults' now
|
|
4
|
-
*
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
const {
|
|
9
|
-
const {
|
|
10
|
-
const {
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
const
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
}
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
if (dep === '
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
if (dateToProcess >= earliestDates.
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
};
|
|
113
|
-
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
async function
|
|
137
|
-
const collection = config.computationStatusCollection || 'computation_status';
|
|
138
|
-
const docRef
|
|
139
|
-
const snap
|
|
140
|
-
return snap.exists ? snap.data() : {};
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
async function
|
|
144
|
-
|
|
145
|
-
const
|
|
146
|
-
const
|
|
147
|
-
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
async function
|
|
151
|
-
if (!
|
|
152
|
-
const collection = config.computationStatusCollection || 'computation_status';
|
|
153
|
-
const docRef = db.collection(collection).doc(
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
}
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
const
|
|
184
|
-
const
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
);
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
const
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
}
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
const
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
const
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
}
|
|
360
|
-
|
|
361
|
-
//
|
|
362
|
-
if (
|
|
363
|
-
const
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
const
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
}
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
await Promise.all(chunkPromises);
|
|
544
|
-
logger.log('INFO', '[BatchPrice] Optimization pass complete.');
|
|
545
|
-
}
|
|
546
|
-
|
|
547
|
-
module.exports = {
|
|
548
|
-
groupByPass,
|
|
549
|
-
checkRootDependencies,
|
|
550
|
-
checkRootDataAvailability,
|
|
551
|
-
fetchExistingResults,
|
|
552
|
-
fetchComputationStatus,
|
|
553
|
-
fetchGlobalComputationStatus,
|
|
554
|
-
updateComputationStatus,
|
|
555
|
-
updateGlobalComputationStatus,
|
|
556
|
-
runStandardComputationPass,
|
|
557
|
-
runMetaComputationPass,
|
|
558
|
-
runBatchPriceComputation
|
|
1
|
+
/**
|
|
2
|
+
* FILENAME: bulltrackers-module/functions/computation-system/helpers/orchestration_helpers.js
|
|
3
|
+
* FIXED: 'commitResults' now records the CODE HASH in the status document
|
|
4
|
+
* instead of a boolean, enabling auto-invalidation on code changes.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
const { ComputationController } = require('../controllers/computation_controller');
|
|
8
|
+
const { batchStoreSchemas } = require('../utils/schema_capture');
|
|
9
|
+
const { normalizeName, commitBatchInChunks } = require('../utils/utils');
|
|
10
|
+
const {
|
|
11
|
+
getPortfolioPartRefs, loadDailyInsights, loadDailySocialPostInsights,
|
|
12
|
+
getHistoryPartRefs, streamPortfolioData, streamHistoryData,
|
|
13
|
+
getRelevantShardRefs, loadDataByRefs
|
|
14
|
+
} = require('../utils/data_loader');
|
|
15
|
+
|
|
16
|
+
const {
|
|
17
|
+
DataExtractor, HistoryExtractor, MathPrimitives, Aggregators,
|
|
18
|
+
Validators, SCHEMAS, SignalPrimitives, DistributionAnalytics,
|
|
19
|
+
TimeSeries, priceExtractor
|
|
20
|
+
} = require('../layers/math_primitives.js');
|
|
21
|
+
|
|
22
|
+
const pLimit = require('p-limit');
|
|
23
|
+
|
|
24
|
+
function groupByPass(manifest) { return manifest.reduce((acc, calc) => { (acc[calc.pass] = acc[calc.pass] || []).push(calc); return acc; }, {}); }
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* --- PASSIVE DATA VALIDATION ---
|
|
28
|
+
*/
|
|
29
|
+
function validateResultPatterns(logger, calcName, results, category) {
|
|
30
|
+
if (category === 'speculator' || category === 'speculators') return;
|
|
31
|
+
|
|
32
|
+
const tickers = Object.keys(results);
|
|
33
|
+
const totalItems = tickers.length;
|
|
34
|
+
|
|
35
|
+
if (totalItems < 5) return;
|
|
36
|
+
|
|
37
|
+
const sampleTicker = tickers.find(t => results[t] && typeof results[t] === 'object');
|
|
38
|
+
if (!sampleTicker) return;
|
|
39
|
+
|
|
40
|
+
const keys = Object.keys(results[sampleTicker]);
|
|
41
|
+
|
|
42
|
+
keys.forEach(key => {
|
|
43
|
+
if (key.startsWith('_')) return;
|
|
44
|
+
|
|
45
|
+
let nullCount = 0;
|
|
46
|
+
let nanCount = 0;
|
|
47
|
+
let undefinedCount = 0;
|
|
48
|
+
|
|
49
|
+
for (const t of tickers) {
|
|
50
|
+
const val = results[t][key];
|
|
51
|
+
if (val === null) nullCount++;
|
|
52
|
+
if (val === undefined) undefinedCount++;
|
|
53
|
+
if (typeof val === 'number' && isNaN(val)) nanCount++;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
if (nanCount === totalItems) {
|
|
57
|
+
logger.log('ERROR', `[DataQuality] Calc '${calcName}' field '${key}' is NaN for 100% of ${totalItems} items.`);
|
|
58
|
+
} else if (undefinedCount === totalItems) {
|
|
59
|
+
logger.log('ERROR', `[DataQuality] Calc '${calcName}' field '${key}' is UNDEFINED for 100% of ${totalItems} items.`);
|
|
60
|
+
}
|
|
61
|
+
else if (nullCount > (totalItems * 0.9)) {
|
|
62
|
+
logger.log('WARN', `[DataQuality] Calc '${calcName}' field '${key}' is NULL for ${nullCount}/${totalItems} items.`);
|
|
63
|
+
}
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function checkRootDependencies(calcManifest, rootDataStatus) {
|
|
68
|
+
const missing = [];
|
|
69
|
+
if (!calcManifest.rootDataDependencies) return { canRun: true, missing };
|
|
70
|
+
for (const dep of calcManifest.rootDataDependencies) {
|
|
71
|
+
if (dep === 'portfolio' && !rootDataStatus.hasPortfolio) missing.push('portfolio');
|
|
72
|
+
else if (dep === 'insights' && !rootDataStatus.hasInsights) missing.push('insights');
|
|
73
|
+
else if (dep === 'social' && !rootDataStatus.hasSocial) missing.push('social');
|
|
74
|
+
else if (dep === 'history' && !rootDataStatus.hasHistory) missing.push('history');
|
|
75
|
+
else if (dep === 'price' && !rootDataStatus.hasPrices) missing.push('price');
|
|
76
|
+
}
|
|
77
|
+
return { canRun: missing.length === 0, missing };
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
async function checkRootDataAvailability(dateStr, config, dependencies, earliestDates) {
|
|
81
|
+
const { logger } = dependencies;
|
|
82
|
+
const dateToProcess = new Date(dateStr + 'T00:00:00Z');
|
|
83
|
+
let portfolioRefs = [], historyRefs = [];
|
|
84
|
+
let hasPortfolio = false, hasInsights = false, hasSocial = false, hasHistory = false, hasPrices = false;
|
|
85
|
+
let insightsData = null, socialData = null;
|
|
86
|
+
|
|
87
|
+
try {
|
|
88
|
+
const tasks = [];
|
|
89
|
+
if (dateToProcess >= earliestDates.portfolio) tasks.push(getPortfolioPartRefs (config, dependencies, dateStr).then(r => { portfolioRefs = r; hasPortfolio = !!r.length; }));
|
|
90
|
+
if (dateToProcess >= earliestDates.insights) tasks.push(loadDailyInsights (config, dependencies, dateStr).then(r => { insightsData = r; hasInsights = !!r; }));
|
|
91
|
+
if (dateToProcess >= earliestDates.social) tasks.push(loadDailySocialPostInsights (config, dependencies, dateStr).then(r => { socialData = r; hasSocial = !!r; }));
|
|
92
|
+
if (dateToProcess >= earliestDates.history) tasks.push(getHistoryPartRefs (config, dependencies, dateStr).then(r => { historyRefs = r; hasHistory = !!r.length; }));
|
|
93
|
+
|
|
94
|
+
if (dateToProcess >= earliestDates.price) {
|
|
95
|
+
tasks.push(checkPriceDataAvailability(config, dependencies).then(r => { hasPrices = r; }));
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
await Promise.all(tasks);
|
|
99
|
+
|
|
100
|
+
if (!(hasPortfolio || hasInsights || hasSocial || hasHistory || hasPrices)) return null;
|
|
101
|
+
|
|
102
|
+
return {
|
|
103
|
+
portfolioRefs,
|
|
104
|
+
historyRefs,
|
|
105
|
+
todayInsights: insightsData,
|
|
106
|
+
todaySocialPostInsights: socialData,
|
|
107
|
+
status: { hasPortfolio, hasInsights, hasSocial, hasHistory, hasPrices },
|
|
108
|
+
yesterdayPortfolioRefs: null // Will be populated if needed
|
|
109
|
+
};
|
|
110
|
+
|
|
111
|
+
} catch (err) {
|
|
112
|
+
logger.log('ERROR', `Error checking data: ${err.message}`);
|
|
113
|
+
return null;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
async function checkPriceDataAvailability(config, dependencies) {
|
|
118
|
+
const { db } = dependencies;
|
|
119
|
+
const collection = config.priceCollection || 'asset_prices';
|
|
120
|
+
try {
|
|
121
|
+
const snapshot = await db.collection(collection).limit(1).get();
|
|
122
|
+
if (snapshot.empty) return false;
|
|
123
|
+
return true;
|
|
124
|
+
} catch (e) {
|
|
125
|
+
return false;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
async function fetchComputationStatus(dateStr, config, { db }) {
|
|
130
|
+
const collection = config.computationStatusCollection || 'computation_status';
|
|
131
|
+
const docRef = db.collection(collection).doc(dateStr);
|
|
132
|
+
const snap = await docRef.get();
|
|
133
|
+
return snap.exists ? snap.data() : {};
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
async function fetchGlobalComputationStatus(config, { db }) {
|
|
137
|
+
const collection = config.computationStatusCollection || 'computation_status';
|
|
138
|
+
const docRef = db.collection(collection).doc('global_status');
|
|
139
|
+
const snap = await docRef.get();
|
|
140
|
+
return snap.exists ? snap.data() : {};
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
async function updateComputationStatus(dateStr, updates, config, { db }) {
|
|
144
|
+
if (!updates || Object.keys(updates).length === 0) return;
|
|
145
|
+
const collection = config.computationStatusCollection || 'computation_status';
|
|
146
|
+
const docRef = db.collection(collection).doc(dateStr);
|
|
147
|
+
await docRef.set(updates, { merge: true });
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
async function updateGlobalComputationStatus(updatesByDate, config, { db }) {
|
|
151
|
+
if (!updatesByDate || Object.keys(updatesByDate).length === 0) return;
|
|
152
|
+
const collection = config.computationStatusCollection || 'computation_status';
|
|
153
|
+
const docRef = db.collection(collection).doc('global_status');
|
|
154
|
+
const flattenUpdates = {};
|
|
155
|
+
for (const [date, statuses] of Object.entries(updatesByDate)) {
|
|
156
|
+
for (const [calc, status] of Object.entries(statuses)) {
|
|
157
|
+
flattenUpdates[`${date}.${calc}`] = status;
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
try {
|
|
161
|
+
await docRef.update(flattenUpdates);
|
|
162
|
+
} catch (err) {
|
|
163
|
+
if (err.code === 5) {
|
|
164
|
+
const deepObj = {};
|
|
165
|
+
for (const [date, statuses] of Object.entries(updatesByDate)) {
|
|
166
|
+
deepObj[date] = statuses;
|
|
167
|
+
}
|
|
168
|
+
await docRef.set(deepObj, { merge: true });
|
|
169
|
+
} else {
|
|
170
|
+
throw err;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config, { db }, includeSelf = false) {
|
|
176
|
+
const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
|
|
177
|
+
const calcsToFetch = new Set();
|
|
178
|
+
for (const calc of calcsInPass) {
|
|
179
|
+
if (calc.dependencies) { calc.dependencies.forEach(d => calcsToFetch.add(normalizeName(d))); }
|
|
180
|
+
if (includeSelf && calc.isHistorical) { calcsToFetch.add(normalizeName(calc.name)); }
|
|
181
|
+
}
|
|
182
|
+
if (!calcsToFetch.size) return {};
|
|
183
|
+
const fetched = {};
|
|
184
|
+
const docRefs = [];
|
|
185
|
+
const names = [];
|
|
186
|
+
for (const name of calcsToFetch) {
|
|
187
|
+
const m = manifestMap.get(name);
|
|
188
|
+
if (m) {
|
|
189
|
+
docRefs.push(db.collection(config.resultsCollection).doc(dateStr)
|
|
190
|
+
.collection(config.resultsSubcollection).doc(m.category || 'unknown')
|
|
191
|
+
.collection(config.computationsSubcollection).doc(name));
|
|
192
|
+
names.push(name);
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
if (docRefs.length) {
|
|
196
|
+
const snaps = await db.getAll(...docRefs);
|
|
197
|
+
snaps.forEach((doc, i) => { if (doc.exists && doc.data()._completed) { fetched[names[i]] = doc.data(); } });
|
|
198
|
+
}
|
|
199
|
+
return fetched;
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
async function streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs, fetchedDeps, previousFetchedDeps) {
|
|
203
|
+
const { logger } = deps;
|
|
204
|
+
const controller = new ComputationController(config, deps);
|
|
205
|
+
const calcs = Object.values(state).filter(c => c && c.manifest);
|
|
206
|
+
const streamingCalcs = calcs.filter(c =>
|
|
207
|
+
c.manifest.rootDataDependencies.includes('portfolio') ||
|
|
208
|
+
c.manifest.rootDataDependencies.includes('history')
|
|
209
|
+
);
|
|
210
|
+
|
|
211
|
+
if (streamingCalcs.length === 0) return;
|
|
212
|
+
|
|
213
|
+
logger.log('INFO', `[${passName}] Streaming for ${streamingCalcs.length} computations...`);
|
|
214
|
+
|
|
215
|
+
await controller.loader.loadMappings();
|
|
216
|
+
const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
|
|
217
|
+
const prevDateStr = prevDate.toISOString().slice(0, 10);
|
|
218
|
+
|
|
219
|
+
const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
|
|
220
|
+
const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
|
|
221
|
+
const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs) ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
|
|
222
|
+
const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
|
|
223
|
+
const tH_iter = (needsTradingHistory && historyRefs) ? streamHistoryData(config, deps, dateStr, historyRefs) : null;
|
|
224
|
+
|
|
225
|
+
let yP_chunk = {};
|
|
226
|
+
let tH_chunk = {};
|
|
227
|
+
|
|
228
|
+
for await (const tP_chunk of tP_iter) {
|
|
229
|
+
if (yP_iter) yP_chunk = (await yP_iter.next()).value || {};
|
|
230
|
+
if (tH_iter) tH_chunk = (await tH_iter.next()).value || {};
|
|
231
|
+
|
|
232
|
+
const promises = streamingCalcs.map(calc =>
|
|
233
|
+
controller.executor.executePerUser(
|
|
234
|
+
calc,
|
|
235
|
+
calc.manifest,
|
|
236
|
+
dateStr,
|
|
237
|
+
tP_chunk,
|
|
238
|
+
yP_chunk,
|
|
239
|
+
tH_chunk,
|
|
240
|
+
fetchedDeps,
|
|
241
|
+
previousFetchedDeps
|
|
242
|
+
)
|
|
243
|
+
);
|
|
244
|
+
await Promise.all(promises);
|
|
245
|
+
}
|
|
246
|
+
logger.log('INFO', `[${passName}] Streaming complete.`);
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
async function runStandardComputationPass(date, calcs, passName, config, deps, rootData, fetchedDeps, previousFetchedDeps, skipStatusWrite = false) {
|
|
250
|
+
const dStr = date.toISOString().slice(0, 10);
|
|
251
|
+
const logger = deps.logger;
|
|
252
|
+
const fullRoot = { ...rootData };
|
|
253
|
+
if (calcs.some(c => c.isHistorical)) {
|
|
254
|
+
const prev = new Date(date); prev.setUTCDate(prev.getUTCDate() - 1);
|
|
255
|
+
const prevStr = prev.toISOString().slice(0, 10);
|
|
256
|
+
fullRoot.yesterdayPortfolioRefs = await getPortfolioPartRefs(config, deps, prevStr);
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
const state = {};
|
|
260
|
+
for (const c of calcs) {
|
|
261
|
+
try {
|
|
262
|
+
const inst = new c.class();
|
|
263
|
+
inst.manifest = c;
|
|
264
|
+
state[normalizeName(c.name)] = inst;
|
|
265
|
+
logger.log('INFO', `${c.name} calculation running for ${dStr}`);
|
|
266
|
+
}
|
|
267
|
+
catch (e) { logger.log('WARN', `Failed to init ${c.name}`); }
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
await streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs, fetchedDeps, previousFetchedDeps);
|
|
271
|
+
return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
async function runMetaComputationPass(date, calcs, passName, config, deps, fetchedDeps, previousFetchedDeps, rootData, skipStatusWrite = false) {
|
|
275
|
+
const controller = new ComputationController(config, deps);
|
|
276
|
+
const dStr = date.toISOString().slice(0, 10);
|
|
277
|
+
const state = {};
|
|
278
|
+
|
|
279
|
+
for (const mCalc of calcs) {
|
|
280
|
+
try {
|
|
281
|
+
deps.logger.log('INFO', `${mCalc.name} calculation running for ${dStr}`);
|
|
282
|
+
const inst = new mCalc.class();
|
|
283
|
+
inst.manifest = mCalc;
|
|
284
|
+
await controller.executor.executeOncePerDay(inst, mCalc, dStr, fetchedDeps, previousFetchedDeps);
|
|
285
|
+
state[normalizeName(mCalc.name)] = inst;
|
|
286
|
+
} catch (e) { deps.logger.log('ERROR', `Meta calc failed ${mCalc.name}: ${e.message}`); }
|
|
287
|
+
}
|
|
288
|
+
return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
/**
|
|
292
|
+
* --- REFACTORED: commitResults ---
|
|
293
|
+
* Commits results individually per calculation.
|
|
294
|
+
* If one calculation fails (e.g. size limit), others still succeed.
|
|
295
|
+
* UPDATED: Writes the HASH to the status document.
|
|
296
|
+
*/
|
|
297
|
+
async function commitResults(stateObj, dStr, passName, config, deps, skipStatusWrite = false) {
|
|
298
|
+
const successUpdates = {};
|
|
299
|
+
const schemas = [];
|
|
300
|
+
|
|
301
|
+
// Iterate PER CALCULATION to isolate failures
|
|
302
|
+
for (const name in stateObj) {
|
|
303
|
+
const calc = stateObj[name];
|
|
304
|
+
let hasData = false;
|
|
305
|
+
|
|
306
|
+
try {
|
|
307
|
+
const result = await calc.getResult();
|
|
308
|
+
if (!result) {
|
|
309
|
+
deps.logger.log('INFO', `${name} for ${dStr}: Skipped (Empty Result)`);
|
|
310
|
+
continue;
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
const standardRes = {};
|
|
314
|
+
const shardedWrites = [];
|
|
315
|
+
const calcWrites = [];
|
|
316
|
+
|
|
317
|
+
// 1. Separate Standard and Sharded Data
|
|
318
|
+
for (const key in result) {
|
|
319
|
+
if (key.startsWith('sharded_')) {
|
|
320
|
+
const sData = result[key];
|
|
321
|
+
for (const colName in sData) {
|
|
322
|
+
const docsMap = sData[colName];
|
|
323
|
+
for (const docId in docsMap) {
|
|
324
|
+
const ref = docId.includes('/') ? deps.db.doc(docId) : deps.db.collection(colName).doc(docId);
|
|
325
|
+
shardedWrites.push({
|
|
326
|
+
ref,
|
|
327
|
+
data: { ...docsMap[docId], _completed: true }
|
|
328
|
+
});
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
if (Object.keys(sData).length > 0) hasData = true;
|
|
332
|
+
} else {
|
|
333
|
+
standardRes[key] = result[key];
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
// 2. Prepare Standard Result Write
|
|
338
|
+
if (Object.keys(standardRes).length) {
|
|
339
|
+
validateResultPatterns(deps.logger, name, standardRes, calc.manifest.category);
|
|
340
|
+
standardRes._completed = true;
|
|
341
|
+
|
|
342
|
+
const docRef = deps.db.collection(config.resultsCollection).doc(dStr)
|
|
343
|
+
.collection(config.resultsSubcollection).doc(calc.manifest.category)
|
|
344
|
+
.collection(config.computationsSubcollection).doc(name);
|
|
345
|
+
|
|
346
|
+
calcWrites.push({
|
|
347
|
+
ref: docRef,
|
|
348
|
+
data: standardRes
|
|
349
|
+
});
|
|
350
|
+
hasData = true;
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
// 3. Queue Schema (Safe to accumulate)
|
|
354
|
+
if (calc.manifest.class.getSchema) {
|
|
355
|
+
const { class: _cls, ...safeMetadata } = calc.manifest;
|
|
356
|
+
schemas.push({
|
|
357
|
+
name, category: calc.manifest.category, schema: calc.manifest.class.getSchema(), metadata: safeMetadata
|
|
358
|
+
});
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
// 4. ATTEMPT COMMIT FOR THIS CALCULATION ONLY
|
|
362
|
+
if (hasData) {
|
|
363
|
+
const allWritesForCalc = [...calcWrites, ...shardedWrites];
|
|
364
|
+
|
|
365
|
+
if (allWritesForCalc.length > 0) {
|
|
366
|
+
await commitBatchInChunks(config, deps, allWritesForCalc, `${name} Results`);
|
|
367
|
+
|
|
368
|
+
// --- CRITICAL UPDATE: Store the Smart Hash ---
|
|
369
|
+
successUpdates[name] = calc.manifest.hash || true;
|
|
370
|
+
|
|
371
|
+
deps.logger.log('INFO', `${name} for ${dStr}: \u2714 Success (Written)`);
|
|
372
|
+
} else {
|
|
373
|
+
deps.logger.log('INFO', `${name} for ${dStr}: - No Data to Write`);
|
|
374
|
+
}
|
|
375
|
+
} else {
|
|
376
|
+
deps.logger.log('INFO', `${name} for ${dStr}: - Empty`);
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
} catch (e) {
|
|
380
|
+
deps.logger.log('ERROR', `${name} for ${dStr}: \u2716 FAILED Commit: ${e.message}`);
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
// Save Schemas (Best effort, isolated)
|
|
385
|
+
if (schemas.length) batchStoreSchemas(deps, config, schemas).catch(() => { });
|
|
386
|
+
|
|
387
|
+
// Update Status Document (Only for the ones that succeeded)
|
|
388
|
+
if (!skipStatusWrite && Object.keys(successUpdates).length > 0) {
|
|
389
|
+
await updateComputationStatus(dStr, successUpdates, config, deps);
|
|
390
|
+
deps.logger.log('INFO', `[${passName}] Updated status document for ${Object.keys(successUpdates).length} successful computations.`);
|
|
391
|
+
}
|
|
392
|
+
return successUpdates;
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
/**
|
|
396
|
+
* --- UPDATED: runBatchPriceComputation ---
|
|
397
|
+
*/
|
|
398
|
+
async function runBatchPriceComputation(config, deps, dateStrings, calcs, targetTickers = []) {
|
|
399
|
+
const { logger, db, calculationUtils } = deps;
|
|
400
|
+
const controller = new ComputationController(config, deps);
|
|
401
|
+
|
|
402
|
+
const mappings = await controller.loader.loadMappings();
|
|
403
|
+
|
|
404
|
+
let targetInstrumentIds = [];
|
|
405
|
+
if (targetTickers && targetTickers.length > 0) {
|
|
406
|
+
const tickerToInst = mappings.tickerToInstrument || {};
|
|
407
|
+
targetInstrumentIds = targetTickers.map(t => tickerToInst[t]).filter(id => id);
|
|
408
|
+
if (targetInstrumentIds.length === 0) {
|
|
409
|
+
logger.log('WARN', '[BatchPrice] Target tickers provided but no IDs found. Aborting.');
|
|
410
|
+
return;
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
const allShardRefs = await getRelevantShardRefs(config, deps, targetInstrumentIds);
|
|
415
|
+
|
|
416
|
+
if (!allShardRefs.length) {
|
|
417
|
+
logger.log('WARN', '[BatchPrice] No relevant price shards found. Exiting.');
|
|
418
|
+
return;
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
const OUTER_CONCURRENCY_LIMIT = 2;
|
|
422
|
+
const SHARD_BATCH_SIZE = 20;
|
|
423
|
+
const WRITE_BATCH_LIMIT = 50;
|
|
424
|
+
|
|
425
|
+
logger.log('INFO', `[BatchPrice] Execution Plan: ${dateStrings.length} days, ${allShardRefs.length} shards. Concurrency: ${OUTER_CONCURRENCY_LIMIT}.`);
|
|
426
|
+
|
|
427
|
+
const shardChunks = [];
|
|
428
|
+
for (let i = 0; i < allShardRefs.length; i += SHARD_BATCH_SIZE) {
|
|
429
|
+
shardChunks.push(allShardRefs.slice(i, i + SHARD_BATCH_SIZE));
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
const outerLimit = pLimit(OUTER_CONCURRENCY_LIMIT);
|
|
433
|
+
|
|
434
|
+
const chunkPromises = [];
|
|
435
|
+
for (let index = 0; index < shardChunks.length; index++) {
|
|
436
|
+
const shardChunkRefs = shardChunks[index];
|
|
437
|
+
chunkPromises.push(outerLimit(async () => {
|
|
438
|
+
try {
|
|
439
|
+
logger.log('INFO', `[BatchPrice] Processing chunk ${index + 1}/${shardChunks.length} (${shardChunkRefs.length} shards)...`);
|
|
440
|
+
|
|
441
|
+
const pricesData = await loadDataByRefs(config, deps, shardChunkRefs);
|
|
442
|
+
|
|
443
|
+
if (targetInstrumentIds.length > 0) {
|
|
444
|
+
const requestedSet = new Set(targetInstrumentIds);
|
|
445
|
+
for (const loadedInstrumentId in pricesData) {
|
|
446
|
+
if (!requestedSet.has(loadedInstrumentId)) {
|
|
447
|
+
delete pricesData[loadedInstrumentId];
|
|
448
|
+
}
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
const writes = [];
|
|
453
|
+
|
|
454
|
+
for (const dateStr of dateStrings) {
|
|
455
|
+
const context = {
|
|
456
|
+
mappings,
|
|
457
|
+
prices: { history: pricesData },
|
|
458
|
+
date: { today: dateStr },
|
|
459
|
+
math: {
|
|
460
|
+
extract: DataExtractor,
|
|
461
|
+
history: HistoryExtractor,
|
|
462
|
+
compute: MathPrimitives,
|
|
463
|
+
aggregate: Aggregators,
|
|
464
|
+
validate: Validators,
|
|
465
|
+
signals: SignalPrimitives,
|
|
466
|
+
schemas: SCHEMAS,
|
|
467
|
+
distribution: DistributionAnalytics,
|
|
468
|
+
TimeSeries: TimeSeries,
|
|
469
|
+
priceExtractor: priceExtractor
|
|
470
|
+
}
|
|
471
|
+
};
|
|
472
|
+
|
|
473
|
+
for (const calcManifest of calcs) {
|
|
474
|
+
try {
|
|
475
|
+
const instance = new calcManifest.class();
|
|
476
|
+
await instance.process(context);
|
|
477
|
+
const result = await instance.getResult();
|
|
478
|
+
|
|
479
|
+
if (result && Object.keys(result).length > 0) {
|
|
480
|
+
let dataToWrite = result;
|
|
481
|
+
if (result.by_instrument) dataToWrite = result.by_instrument;
|
|
482
|
+
|
|
483
|
+
if (Object.keys(dataToWrite).length > 0) {
|
|
484
|
+
const docRef = db.collection(config.resultsCollection).doc(dateStr)
|
|
485
|
+
.collection(config.resultsSubcollection).doc(calcManifest.category)
|
|
486
|
+
.collection(config.computationsSubcollection).doc(normalizeName(calcManifest.name));
|
|
487
|
+
|
|
488
|
+
writes.push({
|
|
489
|
+
ref: docRef,
|
|
490
|
+
data: { ...dataToWrite, _completed: true },
|
|
491
|
+
options: { merge: true }
|
|
492
|
+
});
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
} catch (err) {
|
|
496
|
+
logger.log('ERROR', `[BatchPrice] \u2716 Failed ${calcManifest.name} for ${dateStr}: ${err.message}`);
|
|
497
|
+
}
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
if (writes.length > 0) {
|
|
502
|
+
const commitBatches = [];
|
|
503
|
+
for (let i = 0; i < writes.length; i += WRITE_BATCH_LIMIT) {
|
|
504
|
+
commitBatches.push(writes.slice(i, i + WRITE_BATCH_LIMIT));
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
const commitLimit = pLimit(10);
|
|
508
|
+
|
|
509
|
+
await Promise.all(commitBatches.map((batchWrites, bIndex) => commitLimit(async () => {
|
|
510
|
+
const batch = db.batch();
|
|
511
|
+
batchWrites.forEach(w => batch.set(w.ref, w.data, w.options));
|
|
512
|
+
|
|
513
|
+
try {
|
|
514
|
+
await calculationUtils.withRetry(() => batch.commit(), `BatchPrice-C${index}-B${bIndex}`);
|
|
515
|
+
} catch (commitErr) {
|
|
516
|
+
logger.log('ERROR', `[BatchPrice] Commit failed for Chunk ${index} Batch ${bIndex}.`, { error: commitErr.message });
|
|
517
|
+
}
|
|
518
|
+
})));
|
|
519
|
+
}
|
|
520
|
+
|
|
521
|
+
} catch (chunkErr) {
|
|
522
|
+
logger.log('ERROR', `[BatchPrice] Fatal error processing Chunk ${index}.`, { error: chunkErr.message });
|
|
523
|
+
}
|
|
524
|
+
}));
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
await Promise.all(chunkPromises);
|
|
528
|
+
logger.log('INFO', '[BatchPrice] Optimization pass complete.');
|
|
529
|
+
}
|
|
530
|
+
|
|
531
|
+
module.exports = {
|
|
532
|
+
groupByPass,
|
|
533
|
+
checkRootDependencies,
|
|
534
|
+
checkRootDataAvailability,
|
|
535
|
+
fetchExistingResults,
|
|
536
|
+
fetchComputationStatus,
|
|
537
|
+
fetchGlobalComputationStatus,
|
|
538
|
+
updateComputationStatus,
|
|
539
|
+
updateGlobalComputationStatus,
|
|
540
|
+
runStandardComputationPass,
|
|
541
|
+
runMetaComputationPass,
|
|
542
|
+
runBatchPriceComputation
|
|
559
543
|
};
|