bulltrackers-module 1.0.218 → 1.0.220

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. package/functions/computation-system/WorkflowOrchestrator.js +153 -0
  2. package/functions/computation-system/context/ContextFactory.js +63 -0
  3. package/functions/computation-system/context/ManifestBuilder.js +240 -0
  4. package/functions/computation-system/controllers/computation_controller.js +12 -4
  5. package/functions/computation-system/data/AvailabilityChecker.js +75 -0
  6. package/functions/computation-system/data/CachedDataLoader.js +63 -0
  7. package/functions/computation-system/data/DependencyFetcher.js +70 -0
  8. package/functions/computation-system/executors/MetaExecutor.js +68 -0
  9. package/functions/computation-system/executors/PriceBatchExecutor.js +99 -0
  10. package/functions/computation-system/executors/StandardExecutor.js +115 -0
  11. package/functions/computation-system/helpers/computation_dispatcher.js +3 -3
  12. package/functions/computation-system/helpers/computation_worker.js +44 -18
  13. package/functions/computation-system/layers/extractors.js +9 -3
  14. package/functions/computation-system/layers/mathematics.js +1 -1
  15. package/functions/computation-system/persistence/FirestoreUtils.js +64 -0
  16. package/functions/computation-system/persistence/ResultCommitter.js +118 -0
  17. package/functions/computation-system/persistence/StatusRepository.js +23 -0
  18. package/functions/computation-system/topology/HashManager.js +35 -0
  19. package/functions/computation-system/utils/utils.js +38 -10
  20. package/index.js +8 -3
  21. package/package.json +1 -1
  22. package/functions/computation-system/helpers/computation_manifest_builder.js +0 -291
  23. package/functions/computation-system/helpers/computation_pass_runner.js +0 -129
  24. package/functions/computation-system/helpers/orchestration_helpers.js +0 -352
@@ -0,0 +1,70 @@
1
+ /**
2
+ * @fileoverview Fetches results from previous computations, handling auto-sharding hydration.
3
+ */
4
+ const { normalizeName } = require('../utils/utils');
5
+
6
+ async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config, { db }, includeSelf = false) {
7
+ const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
8
+ const calcsToFetch = new Set();
9
+
10
+ for (const calc of calcsInPass) {
11
+ if (calc.dependencies) calc.dependencies.forEach(d => calcsToFetch.add(normalizeName(d)));
12
+ if (includeSelf && calc.isHistorical) calcsToFetch.add(normalizeName(calc.name));
13
+ }
14
+
15
+ if (!calcsToFetch.size) return {};
16
+
17
+ const fetched = {};
18
+ const docRefs = [];
19
+ const names = [];
20
+
21
+ for (const name of calcsToFetch) {
22
+ const m = manifestMap.get(name);
23
+ if (m) {
24
+ docRefs.push(db.collection(config.resultsCollection)
25
+ .doc(dateStr)
26
+ .collection(config.resultsSubcollection)
27
+ .doc(m.category || 'unknown')
28
+ .collection(config.computationsSubcollection)
29
+ .doc(name));
30
+ names.push(name);
31
+ }
32
+ }
33
+
34
+ if (docRefs.length) {
35
+ const snaps = await db.getAll(...docRefs);
36
+ const hydrationPromises = [];
37
+
38
+ snaps.forEach((doc, i) => {
39
+ const name = names[i];
40
+ if (!doc.exists) return;
41
+ const data = doc.data();
42
+ if (data._sharded === true) {
43
+ hydrationPromises.push(hydrateAutoShardedResult(doc.ref, name));
44
+ } else if (data._completed) {
45
+ fetched[name] = data;
46
+ }
47
+ });
48
+
49
+ if (hydrationPromises.length > 0) {
50
+ const hydratedResults = await Promise.all(hydrationPromises);
51
+ hydratedResults.forEach(res => { fetched[res.name] = res.data; });
52
+ }
53
+ }
54
+ return fetched;
55
+ }
56
+
57
+ async function hydrateAutoShardedResult(docRef, resultName) {
58
+ const shardsCol = docRef.collection('_shards');
59
+ const snapshot = await shardsCol.get();
60
+ const assembledData = { _completed: true };
61
+ snapshot.forEach(doc => {
62
+ const chunk = doc.data();
63
+ Object.assign(assembledData, chunk);
64
+ });
65
+ delete assembledData._sharded;
66
+ delete assembledData._completed;
67
+ return { name: resultName, data: assembledData };
68
+ }
69
+
70
+ module.exports = { fetchExistingResults };
@@ -0,0 +1,68 @@
1
+ /**
2
+ * @fileoverview Executor for "Meta" (global) calculations.
3
+ */
4
+ const { normalizeName } = require('../utils/utils');
5
+ const { CachedDataLoader } = require('../data/CachedDataLoader');
6
+ const { ContextFactory } = require('../context/ContextFactory');
7
+ const { commitResults } = require('../persistence/ResultCommitter');
8
+
9
+ class MetaExecutor {
10
+ static async run(date, calcs, passName, config, deps, fetchedDeps, previousFetchedDeps, rootData, skipStatusWrite = false) {
11
+ const dStr = date.toISOString().slice(0, 10);
12
+ const state = {};
13
+ const cachedLoader = new CachedDataLoader(config, deps);
14
+
15
+ for (const mCalc of calcs) {
16
+ try {
17
+ deps.logger.log('INFO', `${mCalc.name} calculation running for ${dStr}`);
18
+ const inst = new mCalc.class();
19
+ inst.manifest = mCalc;
20
+
21
+ await MetaExecutor.executeOncePerDay(inst, mCalc, dStr, fetchedDeps, previousFetchedDeps, config, deps, cachedLoader);
22
+ state[normalizeName(mCalc.name)] = inst;
23
+ } catch (e) {
24
+ deps.logger.log('ERROR', `Meta calc failed ${mCalc.name}: ${e.message}`);
25
+ }
26
+ }
27
+ return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
28
+ }
29
+
30
+ static async executeOncePerDay(calcInstance, metadata, dateStr, computedDeps, prevDeps, config, deps, loader) {
31
+ const mappings = await loader.loadMappings();
32
+ const { logger } = deps;
33
+ const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await loader.loadInsights(dateStr) } : null;
34
+ const social = metadata.rootDataDependencies?.includes('social') ? { today: await loader.loadSocial(dateStr) } : null;
35
+
36
+ if (metadata.rootDataDependencies?.includes('price')) {
37
+ logger.log('INFO', `[Executor] Running Batched/Sharded Execution for ${metadata.name}`);
38
+ const shardRefs = await loader.getPriceShardReferences();
39
+ if (shardRefs.length === 0) { logger.log('WARN', '[Executor] No price shards found.'); return {}; }
40
+
41
+ let processedCount = 0;
42
+ for (const ref of shardRefs) {
43
+ const shardData = await loader.loadPriceShard(ref);
44
+ const partialContext = ContextFactory.buildMetaContext({
45
+ dateStr, metadata, mappings, insights, socialData: social,
46
+ prices: { history: shardData }, computedDependencies: computedDeps,
47
+ previousComputedDependencies: prevDeps, config, deps
48
+ });
49
+
50
+ await calcInstance.process(partialContext);
51
+ partialContext.prices = null;
52
+ processedCount++;
53
+ if (processedCount % 10 === 0 && global.gc) { global.gc(); }
54
+ }
55
+ logger.log('INFO', `[Executor] Finished Batched Execution for ${metadata.name} (${processedCount} shards).`);
56
+ return calcInstance.getResult ? await calcInstance.getResult() : {};
57
+ } else {
58
+ const context = ContextFactory.buildMetaContext({
59
+ dateStr, metadata, mappings, insights, socialData: social,
60
+ prices: {}, computedDependencies: computedDeps,
61
+ previousComputedDependencies: prevDeps, config, deps
62
+ });
63
+ return await calcInstance.process(context);
64
+ }
65
+ }
66
+ }
67
+
68
+ module.exports = { MetaExecutor };
@@ -0,0 +1,99 @@
1
+ /**
2
+ * @fileoverview Specialized Executor for Price-Dependent Batch computations.
3
+ */
4
+ const pLimit = require('p-limit');
5
+ const { normalizeName } = require('../utils/utils');
6
+ const { getRelevantShardRefs, loadDataByRefs } = require('../utils/data_loader');
7
+ const { CachedDataLoader } = require('../data/CachedDataLoader');
8
+ const mathLayer = require('../layers/index');
9
+ const { LEGACY_MAPPING } = require('../topology/HashManager');
10
+
11
+ async function runBatchPriceComputation(config, deps, dateStrings, calcs, targetTickers = []) {
12
+ const { logger, db, calculationUtils } = deps;
13
+ const cachedLoader = new CachedDataLoader(config, deps);
14
+ const mappings = await cachedLoader.loadMappings();
15
+
16
+ let targetInstrumentIds = [];
17
+ if (targetTickers && targetTickers.length > 0) {
18
+ const tickerToInst = mappings.tickerToInstrument || {};
19
+ targetInstrumentIds = targetTickers.map(t => tickerToInst[t]).filter(id => id);
20
+ if (targetInstrumentIds.length === 0) { logger.log('WARN', '[BatchPrice] Target tickers provided but no IDs found. Aborting.'); return; }
21
+ }
22
+
23
+ const allShardRefs = await getRelevantShardRefs(config, deps, targetInstrumentIds);
24
+ if (!allShardRefs.length) { logger.log('WARN', '[BatchPrice] No relevant price shards found. Exiting.'); return; }
25
+
26
+ const OUTER_CONCURRENCY_LIMIT = 2, SHARD_BATCH_SIZE = 20, WRITE_BATCH_LIMIT = 50;
27
+ logger.log('INFO', `[BatchPrice] Execution Plan: ${dateStrings.length} days, ${allShardRefs.length} shards. Concurrency: ${OUTER_CONCURRENCY_LIMIT}.`);
28
+
29
+ const shardChunks = [];
30
+ for (let i = 0; i < allShardRefs.length; i += SHARD_BATCH_SIZE) { shardChunks.push(allShardRefs.slice(i, i + SHARD_BATCH_SIZE)); }
31
+
32
+ const outerLimit = pLimit(OUTER_CONCURRENCY_LIMIT);
33
+ const chunkPromises = [];
34
+
35
+ for (let index = 0; index < shardChunks.length; index++) {
36
+ const shardChunkRefs = shardChunks[index];
37
+ chunkPromises.push(outerLimit(async () => {
38
+ try {
39
+ logger.log('INFO', `[BatchPrice] Processing chunk ${index + 1}/${shardChunks.length} (${shardChunkRefs.length} shards)...`);
40
+ const pricesData = await loadDataByRefs(config, deps, shardChunkRefs);
41
+ if (targetInstrumentIds.length > 0) {
42
+ const requestedSet = new Set(targetInstrumentIds);
43
+ for (const loadedInstrumentId in pricesData) {
44
+ if (!requestedSet.has(loadedInstrumentId)) { delete pricesData[loadedInstrumentId]; }
45
+ }
46
+ }
47
+
48
+ const writes = [];
49
+ for (const dateStr of dateStrings) {
50
+ const dynamicMathContext = {};
51
+ for (const [key, value] of Object.entries(mathLayer)) {
52
+ dynamicMathContext[key] = value;
53
+ if (LEGACY_MAPPING[key]) { dynamicMathContext[LEGACY_MAPPING[key]] = value;}
54
+ }
55
+ const context = { mappings, prices: { history: pricesData }, date: { today: dateStr }, math: dynamicMathContext };
56
+
57
+ for (const calcManifest of calcs) {
58
+ try {
59
+ const instance = new calcManifest.class();
60
+ await instance.process(context);
61
+ const result = await instance.getResult();
62
+ if (result && Object.keys(result).length > 0) {
63
+ let dataToWrite = result;
64
+ if (result.by_instrument) dataToWrite = result.by_instrument;
65
+ if (Object.keys(dataToWrite).length > 0) {
66
+ const docRef = db.collection(config.resultsCollection)
67
+ .doc(dateStr)
68
+ .collection(config.resultsSubcollection)
69
+ .doc(calcManifest.category)
70
+ .collection(config.computationsSubcollection)
71
+ .doc(normalizeName(calcManifest.name));
72
+ writes.push({ ref: docRef, data: { ...dataToWrite, _completed: true }, options: { merge: true } });
73
+ }
74
+ }
75
+ } catch (err) { logger.log('ERROR', `[BatchPrice] \u2716 Failed ${calcManifest.name} for ${dateStr}: ${err.message}`); }
76
+ }
77
+ }
78
+
79
+ if (writes.length > 0) {
80
+ const commitBatches = [];
81
+ for (let i = 0; i < writes.length; i += WRITE_BATCH_LIMIT) { commitBatches.push(writes.slice(i, i + WRITE_BATCH_LIMIT)); }
82
+ const commitLimit = pLimit(10);
83
+ await Promise.all(commitBatches.map((batchWrites, bIndex) => commitLimit(async () => {
84
+ const batch = db.batch(); batchWrites.forEach(w => batch.set(w.ref, w.data, w.options));
85
+ try {
86
+ await calculationUtils.withRetry(() => batch.commit(), `BatchPrice-C${index}-B${bIndex}`);
87
+ } catch (commitErr) {
88
+ logger.log('ERROR', `[BatchPrice] Commit failed for Chunk ${index} Batch ${bIndex}.`, { error: commitErr.message });
89
+ }
90
+ })));
91
+ }
92
+ } catch (chunkErr) { logger.log('ERROR', `[BatchPrice] Fatal error processing Chunk ${index}.`, { error: chunkErr.message }); }
93
+ }));
94
+ }
95
+ await Promise.all(chunkPromises);
96
+ logger.log('INFO', '[BatchPrice] Optimization pass complete.');
97
+ }
98
+
99
+ module.exports = { runBatchPriceComputation };
@@ -0,0 +1,115 @@
1
+ /**
2
+ * @fileoverview Executor for "Standard" (per-user) calculations.
3
+ */
4
+ const { normalizeName } = require('../utils/utils');
5
+ const { streamPortfolioData, streamHistoryData, getPortfolioPartRefs } = require('../utils/data_loader');
6
+ const { CachedDataLoader } = require('../data/CachedDataLoader');
7
+ const { ContextFactory } = require('../context/ContextFactory');
8
+ const { commitResults } = require('../persistence/ResultCommitter');
9
+ const mathLayer = require('../layers/index');
10
+
11
+ class StandardExecutor {
12
+ static async run(date, calcs, passName, config, deps, rootData, fetchedDeps, previousFetchedDeps, skipStatusWrite = false) {
13
+ const dStr = date.toISOString().slice(0, 10);
14
+ const logger = deps.logger;
15
+
16
+ // 1. Prepare Yesterdays Data if needed
17
+ const fullRoot = { ...rootData };
18
+ if (calcs.some(c => c.isHistorical)) {
19
+ const prev = new Date(date); prev.setUTCDate(prev.getUTCDate() - 1);
20
+ const prevStr = prev.toISOString().slice(0, 10);
21
+ fullRoot.yesterdayPortfolioRefs = await getPortfolioPartRefs(config, deps, prevStr);
22
+ }
23
+
24
+ // 2. Initialize Instances
25
+ const state = {};
26
+ for (const c of calcs) {
27
+ try {
28
+ const inst = new c.class();
29
+ inst.manifest = c;
30
+ state[normalizeName(c.name)] = inst;
31
+ logger.log('INFO', `${c.name} calculation running for ${dStr}`);
32
+ } catch (e) {
33
+ logger.log('WARN', `Failed to init ${c.name}`);
34
+ }
35
+ }
36
+
37
+ // 3. Stream & Process
38
+ await StandardExecutor.streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs, fetchedDeps, previousFetchedDeps);
39
+
40
+ // 4. Commit
41
+ return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
42
+ }
43
+
44
+ static async streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs, fetchedDeps, previousFetchedDeps) {
45
+ const { logger } = deps;
46
+ const calcs = Object.values(state).filter(c => c && c.manifest);
47
+ const streamingCalcs = calcs.filter(c => c.manifest.rootDataDependencies.includes('portfolio') || c.manifest.rootDataDependencies.includes('history'));
48
+
49
+ if (streamingCalcs.length === 0) return;
50
+
51
+ logger.log('INFO', `[${passName}] Streaming for ${streamingCalcs.length} computations...`);
52
+
53
+ const cachedLoader = new CachedDataLoader(config, deps);
54
+ await cachedLoader.loadMappings();
55
+
56
+ const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
57
+ const prevDateStr = prevDate.toISOString().slice(0, 10);
58
+
59
+ const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
60
+ const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
61
+ const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs)
62
+ ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs)
63
+ : null;
64
+
65
+ const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
66
+ const tH_iter = (needsTradingHistory && historyRefs)
67
+ ? streamHistoryData(config, deps, dateStr, historyRefs)
68
+ : null;
69
+
70
+ let yP_chunk = {}, tH_chunk = {};
71
+
72
+ for await (const tP_chunk of tP_iter) {
73
+ if (yP_iter) yP_chunk = (await yP_iter.next()).value || {};
74
+ if (tH_iter) tH_chunk = (await tH_iter.next()).value || {};
75
+
76
+ // Execute chunk for all calcs
77
+ const promises = streamingCalcs.map(calc =>
78
+ StandardExecutor.executePerUser(calc, calc.manifest, dateStr, tP_chunk, yP_chunk, tH_chunk, fetchedDeps, previousFetchedDeps, config, deps, cachedLoader)
79
+ );
80
+ await Promise.all(promises);
81
+ }
82
+ logger.log('INFO', `[${passName}] Streaming complete.`);
83
+ }
84
+
85
+ static async executePerUser(calcInstance, metadata, dateStr, portfolioData, yesterdayPortfolioData, historyData, computedDeps, prevDeps, config, deps, loader) {
86
+ const { logger } = deps;
87
+ const targetUserType = metadata.userType;
88
+ const mappings = await loader.loadMappings();
89
+ const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await loader.loadInsights(dateStr) } : null;
90
+ const SCHEMAS = mathLayer.SCHEMAS;
91
+
92
+ for (const [userId, todayPortfolio] of Object.entries(portfolioData)) {
93
+ const yesterdayPortfolio = yesterdayPortfolioData ? yesterdayPortfolioData[userId] : null;
94
+ const todayHistory = historyData ? historyData[userId] : null;
95
+ const actualUserType = todayPortfolio.PublicPositions ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
96
+
97
+ if (targetUserType !== 'all') {
98
+ const mappedTarget = (targetUserType === 'speculator') ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
99
+ if (mappedTarget !== actualUserType) continue;
100
+ }
101
+
102
+ const context = ContextFactory.buildPerUserContext({
103
+ todayPortfolio, yesterdayPortfolio, todayHistory, userId,
104
+ userType: actualUserType, dateStr, metadata, mappings, insights,
105
+ computedDependencies: computedDeps, previousComputedDependencies: prevDeps,
106
+ config, deps
107
+ });
108
+
109
+ try { await calcInstance.process(context); }
110
+ catch (e) { logger.log('WARN', `Calc ${metadata.name} failed for user ${userId}: ${e.message}`); }
111
+ }
112
+ }
113
+ }
114
+
115
+ module.exports = { StandardExecutor };
@@ -1,12 +1,11 @@
1
1
  /**
2
2
  * FILENAME: bulltrackers-module/functions/computation-system/helpers/computation_dispatcher.js
3
3
  * PURPOSE: Dispatches computation tasks to Pub/Sub for scalable execution.
4
- * FIXED: Instantiates PubSubUtils locally to ensure valid logger/dependencies are used.
5
- * IMPROVED: Logging now explicitly lists the calculations being scheduled.
4
+ * REFACTORED: Now uses WorkflowOrchestrator for helper functions.
6
5
  */
7
6
 
8
7
  const { getExpectedDateStrings } = require('../utils/utils.js');
9
- const { groupByPass } = require('./orchestration_helpers.js');
8
+ const { groupByPass } = require('../WorkflowOrchestrator.js');
10
9
  const { PubSubUtils } = require('../../core/utils/pubsub_utils');
11
10
 
12
11
  const TOPIC_NAME = 'computation-tasks';
@@ -19,6 +18,7 @@ async function dispatchComputationPass(config, dependencies, computationManifest
19
18
  const { logger } = dependencies;
20
19
  const pubsubUtils = new PubSubUtils(dependencies);
21
20
  const passToRun = String(config.COMPUTATION_PASS_TO_RUN);
21
+
22
22
  if (!passToRun) { return logger.log('ERROR', '[Dispatcher] No pass defined (COMPUTATION_PASS_TO_RUN). Aborting.'); }
23
23
 
24
24
  // 1. Validate Pass Existence
@@ -1,11 +1,10 @@
1
1
  /**
2
2
  * FILENAME: bulltrackers-module/functions/computation-system/helpers/computation_worker.js
3
3
  * PURPOSE: Consumes computation tasks from Pub/Sub and executes them.
4
- * FIXED: Added robust payload parsing to handle Cloud Functions Gen 2 (CloudEvents).
4
+ * REFACTORED: Now imports logic from the new WorkflowOrchestrator.
5
5
  */
6
6
 
7
- const { runDateComputation } = require('./computation_pass_runner.js');
8
- const { groupByPass } = require('./orchestration_helpers.js');
7
+ const { runDateComputation, groupByPass } = require('../WorkflowOrchestrator.js');
9
8
 
10
9
  /**
11
10
  * Handles a single Pub/Sub message for a computation task.
@@ -16,29 +15,56 @@ async function handleComputationTask(message, config, dependencies, computationM
16
15
 
17
16
  let data;
18
17
  try {
19
- // 1. Handle Cloud Functions Gen 2 (CloudEvent)
20
- if (message.data && message.data.message && message.data.message.data) { const buffer = Buffer.from(message.data.message.data, 'base64'); data = JSON.parse(buffer.toString()); }
21
- // 2. Handle Cloud Functions Gen 1 / Legacy PubSub
22
- else if (message.data && typeof message.data === 'string') { const buffer = Buffer.from(message.data, 'base64'); data = JSON.parse(buffer.toString()); }
23
- // 3. Handle Direct JSON (Test harness or simulator)
24
- else if (message.json) { data = message.json; }
25
- // 4. Fallback: Assume message is the payload
26
- else { data = message; }
27
- } catch (parseError) { logger.log('ERROR', `[Worker] Failed to parse Pub/Sub payload.`, { error: parseError.message }); return; }
18
+ // 1. Handle Cloud Functions Gen 2 (CloudEvent) -> Gen 1 -> Direct JSON -> Message
19
+ if (message.data && message.data.message && message.data.message.data) {
20
+ const buffer = Buffer.from(message.data.message.data, 'base64'); data = JSON.parse(buffer.toString());
21
+ } else if (message.data && typeof message.data === 'string') {
22
+ const buffer = Buffer.from(message.data, 'base64'); data = JSON.parse(buffer.toString());
23
+ } else if (message.json) {
24
+ data = message.json;
25
+ } else {
26
+ data = message;
27
+ }
28
+ } catch (parseError) {
29
+ logger.log('ERROR', `[Worker] Failed to parse Pub/Sub payload.`, { error: parseError.message });
30
+ return;
31
+ }
28
32
 
29
33
  try {
30
34
  // Validate Action
31
- if (!data || data.action !== 'RUN_COMPUTATION_DATE') { if (data) logger.log('WARN', `[Worker] Unknown or missing action: ${data?.action}. Ignoring.`); return; }
35
+ if (!data || data.action !== 'RUN_COMPUTATION_DATE') {
36
+ if (data) logger.log('WARN', `[Worker] Unknown or missing action: ${data?.action}. Ignoring.`);
37
+ return;
38
+ }
39
+
32
40
  const { date, pass } = data;
33
- if (!date || !pass) { logger.log('ERROR', `[Worker] Missing date or pass in payload: ${JSON.stringify(data)}`); return; }
41
+ if (!date || !pass) {
42
+ logger.log('ERROR', `[Worker] Missing date or pass in payload: ${JSON.stringify(data)}`);
43
+ return;
44
+ }
45
+
34
46
  logger.log('INFO', `[Worker] Received task: Date=${date}, Pass=${pass}`);
47
+
35
48
  const passes = groupByPass(computationManifest);
36
49
  const calcsInThisPass = passes[pass] || [];
37
- if (!calcsInThisPass.length) { logger.log('WARN', `[Worker] No calculations found for Pass ${pass}.`); return; }
50
+
51
+ if (!calcsInThisPass.length) {
52
+ logger.log('WARN', `[Worker] No calculations found for Pass ${pass}.`);
53
+ return;
54
+ }
55
+
38
56
  const result = await runDateComputation(date, pass, calcsInThisPass, config, dependencies, computationManifest);
39
- if (result) { logger.log('INFO', `[Worker] Successfully processed ${date} (Pass ${pass}). Updates: ${Object.keys(result.updates || {}).length}`);
40
- } else { logger.log('INFO', `[Worker] Processed ${date} (Pass ${pass}) - Skipped (Dependencies missing or already done).`); }
41
- } catch (err) { logger.log('ERROR', `[Worker] Fatal error processing task: ${err.message}`, { stack: err.stack }); throw err; }
57
+
58
+ if (result) {
59
+ logger.log('INFO', `[Worker] Successfully processed ${date} (Pass ${pass}). Updates: ${Object.keys(result.updates || {}).length}`);
60
+ } else {
61
+ logger.log('INFO', `[Worker] Processed ${date} (Pass ${pass}) - Skipped (Dependencies missing or already done).`);
62
+ }
63
+
64
+ } catch (err) {
65
+ logger.log('ERROR', `[Worker] Fatal error processing task: ${err.message}`, { stack: err.stack });
66
+ throw err;
67
+ }
42
68
  }
43
69
 
44
70
  module.exports = { handleComputationTask };
@@ -252,6 +252,14 @@ class InsightsExtractor {
252
252
  * @param {string} timeframe - 'today' or 'yesterday'
253
253
  * @returns {Array} Array of insight objects
254
254
  */
255
+
256
+ // MAJOR FIX TO GET INSIGHTS METHOD FOR THE CORRECT STRUCTURE AND SUPPORTING YESTERDAY + TODAY DATA REQUESTS
257
+ // THIS IS INJECTED TO BE USED LIKE :
258
+
259
+ // process(context) {
260
+ // const { insights: insightsHelper } = context.math;
261
+ // const insights = insightsHelper.getInsights(context); This is the direct call
262
+
255
263
  static getInsights(context, timeframe = 'today') {
256
264
  const insightsData = context.insights;
257
265
 
@@ -263,9 +271,7 @@ class InsightsExtractor {
263
271
  if (!doc) return [];
264
272
 
265
273
  // Extract the insights array from the document
266
- if (doc.insights && Array.isArray(doc.insights)) {
267
- return doc.insights;
268
- }
274
+ if (doc.insights && Array.isArray(doc.insights)) { return doc.insights; }
269
275
 
270
276
  return [];
271
277
  }
@@ -59,7 +59,7 @@ class MathPrimitives {
59
59
  return 1.0;
60
60
  }
61
61
 
62
- const probability = normCDF(( -Math.abs(b) - nu * t ) / sigmaSqrtT) + Math.exp((2 * nu * Math.abs(b)) / (sigma * sigma)) * normCDF(( -Math.abs(b) + nu * t ) / sigmaSqrtT);
62
+ const probability = normCDF(-term3) + Math.exp(term2) * normCDF(-term1);
63
63
 
64
64
  return Math.min(Math.max(probability, 0), 1);
65
65
  }
@@ -0,0 +1,64 @@
1
+ /**
2
+ * @fileoverview Low-level Firestore interactions.
3
+ */
4
+ const { withRetry } = require('../utils/utils.js'); // Assuming this exists or is passed in deps
5
+
6
+ async function commitBatchInChunks(config, deps, writes, operationName) {
7
+ const { db, logger, calculationUtils } = deps;
8
+ const retryFn = calculationUtils ? calculationUtils.withRetry : (fn) => fn();
9
+
10
+ if (!writes || !writes.length) {
11
+ logger.log('WARN', `[${operationName}] No writes to commit.`);
12
+ return;
13
+ }
14
+
15
+ const MAX_BATCH_OPS = 300;
16
+ const MAX_BATCH_BYTES = 9 * 1024 * 1024;
17
+
18
+ let currentBatch = db.batch();
19
+ let currentOpsCount = 0;
20
+ let currentBytesEst = 0;
21
+ let batchIndex = 1;
22
+
23
+ const commitAndReset = async () => {
24
+ if (currentOpsCount > 0) {
25
+ try {
26
+ await retryFn(
27
+ () => currentBatch.commit(),
28
+ `${operationName} (Chunk ${batchIndex})`
29
+ );
30
+ logger.log('INFO', `[${operationName}] Committed chunk ${batchIndex} (${currentOpsCount} ops, ~${(currentBytesEst / 1024 / 1024).toFixed(2)} MB).`);
31
+ batchIndex++;
32
+ } catch (err) {
33
+ logger.log('ERROR', `[${operationName}] Failed to commit chunk ${batchIndex}. Size: ${(currentBytesEst / 1024 / 1024).toFixed(2)} MB.`, { error: err.message });
34
+ throw err;
35
+ }
36
+ }
37
+ currentBatch = db.batch();
38
+ currentOpsCount = 0;
39
+ currentBytesEst = 0;
40
+ };
41
+
42
+ for (const write of writes) {
43
+ let docSize = 100;
44
+ try { if (write.data) docSize = JSON.stringify(write.data).length; } catch (e) { }
45
+
46
+ if (docSize > 900 * 1024) {
47
+ logger.log('WARN', `[${operationName}] Large document detected (~${(docSize / 1024).toFixed(2)} KB).`);
48
+ }
49
+
50
+ if ((currentOpsCount + 1 > MAX_BATCH_OPS) || (currentBytesEst + docSize > MAX_BATCH_BYTES)) {
51
+ await commitAndReset();
52
+ }
53
+
54
+ const options = write.options || { merge: true };
55
+ currentBatch.set(write.ref, write.data, options);
56
+
57
+ currentOpsCount++;
58
+ currentBytesEst += docSize;
59
+ }
60
+
61
+ await commitAndReset();
62
+ }
63
+
64
+ module.exports = { commitBatchInChunks };