bulltrackers-module 1.0.175 → 1.0.176

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,22 +1,24 @@
1
1
  /**
2
- * FIXED: orchestration_helpers.js
3
- * V3.3: Added Status Document logic (Single Source of Truth for Run Status).
2
+ * FILENAME: bulltrackers-module/functions/computation-system/helpers/orchestration_helpers.js
4
3
  */
5
4
 
6
- const { ComputationController } = require('../controllers/computation_controller');
5
+ const { ComputationController } = require('../controllers/computation_controller');
6
+ const { batchStoreSchemas } = require('../utils/schema_capture');
7
+ const { normalizeName, commitBatchInChunks } = require('../utils/utils');
7
8
  const {
8
9
  getPortfolioPartRefs, loadDailyInsights, loadDailySocialPostInsights,
9
10
  getHistoryPartRefs, streamPortfolioData, streamHistoryData
10
11
  } = require('../utils/data_loader');
11
- const { batchStoreSchemas } = require('../utils/schema_capture');
12
- const { normalizeName, commitBatchInChunks } = require('../utils/utils');
13
12
 
14
- // --- Helpers ---
15
13
 
16
- function groupByPass(manifest) {
17
- return manifest.reduce((acc, calc) => { (acc[calc.pass] = acc[calc.pass] || []).push(calc); return acc; }, {});
18
- }
14
+ /**
15
+ * Groups calculations from a manifest by their 'pass' property.
16
+ */
17
+ function groupByPass(manifest) { return manifest.reduce((acc, calc) => { (acc[calc.pass] = acc[calc.pass] || []).push(calc); return acc; }, {}); }
19
18
 
19
+ /**
20
+ * Checks if all root data dependencies for a given calculation are met.
21
+ */
20
22
  function checkRootDependencies(calcManifest, rootDataStatus) {
21
23
  const missing = [];
22
24
  if (!calcManifest.rootDataDependencies) return { canRun: true, missing };
@@ -29,24 +31,22 @@ function checkRootDependencies(calcManifest, rootDataStatus) {
29
31
  return { canRun: missing.length === 0, missing };
30
32
  }
31
33
 
34
+ /**
35
+ * Checks for the availability of all required root data for a specific date.
36
+ */
32
37
  async function checkRootDataAvailability(dateStr, config, dependencies, earliestDates) {
33
- const { logger } = dependencies;
38
+ // ... [Unchanged content of checkRootDataAvailability] ...
39
+ const { logger } = dependencies;
34
40
  const dateToProcess = new Date(dateStr + 'T00:00:00Z');
35
-
36
- let portfolioRefs = [], historyRefs = [];
37
- let hasPortfolio = false, hasInsights = false, hasSocial = false, hasHistory = false;
38
- let insightsData = null, socialData = null;
41
+ let portfolioRefs = [], historyRefs = [];
42
+ let hasPortfolio = false, hasInsights = false, hasSocial = false, hasHistory = false, insightsData = null , socialData = null;
39
43
 
40
44
  try {
41
45
  const tasks = [];
42
- if (dateToProcess >= earliestDates.portfolio)
43
- tasks.push(getPortfolioPartRefs(config, dependencies, dateStr).then(r => { portfolioRefs = r; hasPortfolio = !!r.length; }));
44
- if (dateToProcess >= earliestDates.insights)
45
- tasks.push(loadDailyInsights(config, dependencies, dateStr).then(r => { insightsData = r; hasInsights = !!r; }));
46
- if (dateToProcess >= earliestDates.social)
47
- tasks.push(loadDailySocialPostInsights(config, dependencies, dateStr).then(r => { socialData = r; hasSocial = !!r; }));
48
- if (dateToProcess >= earliestDates.history)
49
- tasks.push(getHistoryPartRefs(config, dependencies, dateStr).then(r => { historyRefs = r; hasHistory = !!r.length; }));
46
+ if (dateToProcess >= earliestDates.portfolio) tasks.push(getPortfolioPartRefs(config, dependencies, dateStr).then(r => { portfolioRefs = r; hasPortfolio = !!r.length; }));
47
+ if (dateToProcess >= earliestDates.insights) tasks.push(loadDailyInsights(config, dependencies, dateStr).then(r => { insightsData = r; hasInsights = !!r; }));
48
+ if (dateToProcess >= earliestDates.social) tasks.push(loadDailySocialPostInsights(config, dependencies, dateStr).then(r => { socialData = r; hasSocial = !!r; }));
49
+ if (dateToProcess >= earliestDates.history) tasks.push(getHistoryPartRefs(config, dependencies, dateStr).then(r => { historyRefs = r; hasHistory = !!r.length; }));
50
50
 
51
51
  await Promise.all(tasks);
52
52
 
@@ -57,83 +57,114 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
57
57
  todayInsights: insightsData, todaySocialPostInsights: socialData,
58
58
  status: { hasPortfolio, hasInsights, hasSocial, hasHistory }
59
59
  };
60
+
60
61
  } catch (err) {
61
62
  logger.log('ERROR', `Error checking data: ${err.message}`);
62
63
  return null;
63
64
  }
64
65
  }
65
66
 
66
- // --- NEW: Status Document Helpers ---
67
-
67
+ /**
68
+ * --- DEPRECATED: Old per-date fetch ---
69
+ * Keeps compatibility but logic moves to fetchGlobalComputationStatus
70
+ */
68
71
  async function fetchComputationStatus(dateStr, config, { db }) {
69
72
  const collection = config.computationStatusCollection || 'computation_status';
70
- const docRef = db.collection(collection).doc(dateStr);
73
+ const docRef = db.collection(collection).doc(dateStr);
74
+ const snap = await docRef.get();
75
+ return snap.exists ? snap.data() : {};
76
+ }
77
+
78
+ /**
79
+ * --- NEW: Fetches the SINGLE GLOBAL status document ---
80
+ * Loads the entire history of statuses in one read.
81
+ */
82
+ async function fetchGlobalComputationStatus(config, { db }) {
83
+ const collection = config.computationStatusCollection || 'computation_status';
84
+ const docRef = db.collection(collection).doc('global_status');
71
85
  const snap = await docRef.get();
72
86
  return snap.exists ? snap.data() : {};
73
87
  }
74
88
 
89
+ /**
90
+ * --- DEPRECATED: Old per-date update ---
91
+ */
75
92
  async function updateComputationStatus(dateStr, updates, config, { db }) {
76
93
  if (!updates || Object.keys(updates).length === 0) return;
77
94
  const collection = config.computationStatusCollection || 'computation_status';
78
- const docRef = db.collection(collection).doc(dateStr);
79
- // Merge the new statuses (true/false) into the daily tracking document
80
- await docRef.set(updates, { merge: true });
95
+ const docRef = db.collection(collection).doc(dateStr);
96
+ await docRef.set(updates, { merge: true });
81
97
  }
82
98
 
83
- // --- OPTIMIZED FETCH ---
84
- // Now strictly used for fetching DATA (results), not for checking if something ran.
85
- async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config, { db }, includeSelf = false) {
86
- const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
87
- const calcsToFetch = new Set();
99
+ /**
100
+ * --- NEW: Batch Updates to Global Document ---
101
+ * Accepts a map of { "YYYY-MM-DD": { calcName: true, ... } }
102
+ * and writes them using dot notation to avoid overwriting other dates.
103
+ */
104
+ async function updateGlobalComputationStatus(updatesByDate, config, { db }) {
105
+ if (!updatesByDate || Object.keys(updatesByDate).length === 0) return;
106
+ const collection = config.computationStatusCollection || 'computation_status';
107
+ const docRef = db.collection(collection).doc('global_status');
88
108
 
89
- for (const calc of calcsInPass) {
90
- if (calc.dependencies) {
91
- calc.dependencies.forEach(d => calcsToFetch.add(normalizeName(d)));
92
- }
93
- if (includeSelf && calc.isHistorical) {
94
- calcsToFetch.add(normalizeName(calc.name));
109
+ // Flatten to dot notation for Firestore update: "2023-10-27.calcName": true
110
+ const flattenUpdates = {};
111
+ for (const [date, statuses] of Object.entries(updatesByDate)) {
112
+ for (const [calc, status] of Object.entries(statuses)) {
113
+ flattenUpdates[`${date}.${calc}`] = status;
95
114
  }
96
115
  }
97
116
 
117
+ try {
118
+ await docRef.update(flattenUpdates);
119
+ } catch (err) {
120
+ // If doc doesn't exist (first run), update fails. Use set({merge:true}).
121
+ if (err.code === 5) { // NOT_FOUND
122
+ const deepObj = {};
123
+ for (const [date, statuses] of Object.entries(updatesByDate)) {
124
+ deepObj[date] = statuses;
125
+ }
126
+ await docRef.set(deepObj, { merge: true });
127
+ } else {
128
+ throw err;
129
+ }
130
+ }
131
+ }
132
+
133
+ /**
134
+ * --- UPDATED: fetchExistingResults ---
135
+ * (Unchanged, keeps fetching results per date as this is heavy data)
136
+ */
137
+ async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config, { db }, includeSelf = false) {
138
+ const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
139
+ const calcsToFetch = new Set();
140
+ for (const calc of calcsInPass) {
141
+ if (calc.dependencies) { calc.dependencies.forEach(d => calcsToFetch.add(normalizeName(d))); }
142
+ if (includeSelf && calc.isHistorical) { calcsToFetch.add(normalizeName(calc.name)); }
143
+ }
98
144
  if (!calcsToFetch.size) return {};
99
-
100
145
  const fetched = {};
101
146
  const docRefs = [];
102
- const names = [];
103
-
147
+ const names = [];
104
148
  for (const name of calcsToFetch) {
105
149
  const m = manifestMap.get(name);
106
- if (m) {
107
- docRefs.push(db.collection(config.resultsCollection).doc(dateStr)
108
- .collection(config.resultsSubcollection).doc(m.category || 'unknown')
109
- .collection(config.computationsSubcollection).doc(name));
110
- names.push(name);
111
- }
112
- }
113
-
150
+ if (m) { docRefs.push(db.collection(config.resultsCollection).doc(dateStr)
151
+ .collection(config.resultsSubcollection).doc(m.category || 'unknown')
152
+ .collection(config.computationsSubcollection).doc(name));
153
+ names.push(name); } }
114
154
  if (docRefs.length) {
115
155
  const snaps = await db.getAll(...docRefs);
116
- snaps.forEach((doc, i) => {
117
- if(doc.exists && doc.data()._completed) {
118
- fetched[names[i]] = doc.data();
119
- }
120
- });
156
+ snaps.forEach((doc, i) => { if(doc.exists && doc.data()._completed) { fetched[names[i]] = doc.data(); } });
121
157
  }
122
158
  return fetched;
123
159
  }
124
160
 
125
- function filterCalculations(standardCalcs, metaCalcs, rootDataStatus, existingResults, passToRun, dateStr, earliestDates) {
126
- // DEPRECATED in favor of Status Document logic in Pass Runner.
127
- // Kept for backward compatibility if needed, but effectively replaced.
128
- return { standardCalcsToRun: standardCalcs, metaCalcsToRun: metaCalcs };
129
- }
130
-
131
- // --- EXECUTION DELEGATES ---
132
-
161
+ /**
162
+ * --- UPDATED: streamAndProcess ---
163
+ * (Unchanged)
164
+ */
133
165
  async function streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs, fetchedDeps, previousFetchedDeps) {
134
166
  const { logger } = deps;
135
167
  const controller = new ComputationController(config, deps);
136
-
137
168
  const calcs = Object.values(state).filter(c => c && c.manifest);
138
169
  const streamingCalcs = calcs.filter(c =>
139
170
  c.manifest.rootDataDependencies.includes('portfolio') ||
@@ -148,20 +179,13 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
148
179
  const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
149
180
  const prevDateStr = prevDate.toISOString().slice(0, 10);
150
181
 
151
- // 1. Today's Portfolio Stream
152
182
  const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
153
183
 
154
- // 2. Yesterday's Portfolio Stream (for 'isHistorical' calcs)
155
184
  const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
156
- const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs)
157
- ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs)
158
- : null;
185
+ const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs) ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
159
186
 
160
- // 3. Today's History Stream
161
187
  const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
162
- const tH_iter = (needsTradingHistory && historyRefs)
163
- ? streamHistoryData(config, deps, dateStr, historyRefs)
164
- : null;
188
+ const tH_iter = (needsTradingHistory && historyRefs) ? streamHistoryData(config, deps, dateStr, historyRefs) : null;
165
189
 
166
190
  let yP_chunk = {};
167
191
  let tH_chunk = {};
@@ -187,36 +211,47 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
187
211
  logger.log('INFO', `[${passName}] Streaming complete.`);
188
212
  }
189
213
 
190
- // --- RUNNERS ---
191
-
192
- async function runStandardComputationPass(date, calcs, passName, config, deps, rootData, fetchedDeps, previousFetchedDeps) {
214
+ /**
215
+ * --- UPDATED: runStandardComputationPass ---
216
+ * Now accepts `skipStatusWrite` and returns `successUpdates`
217
+ */
218
+ async function runStandardComputationPass(date, calcs, passName, config, deps, rootData, fetchedDeps, previousFetchedDeps, skipStatusWrite = false) {
193
219
  const dStr = date.toISOString().slice(0, 10);
194
220
  const logger = deps.logger;
195
221
 
196
222
  const fullRoot = { ...rootData };
197
223
  if (calcs.some(c => c.isHistorical)) {
198
- const prev = new Date(date); prev.setUTCDate(prev.getUTCDate() - 1);
224
+ const prev = new Date(date); prev.setUTCDate(prev.getUTCDate() - 1);
199
225
  const prevStr = prev.toISOString().slice(0, 10);
200
226
  fullRoot.yesterdayPortfolioRefs = await getPortfolioPartRefs(config, deps, prevStr);
201
227
  }
202
228
 
203
229
  const state = {};
204
230
  for (const c of calcs) {
205
- try {
206
- const inst = new c.class();
207
- inst.manifest = c;
208
- state[normalizeName(c.name)] = inst;
209
- } catch(e) { logger.log('WARN', `Failed to init ${c.name}`); }
231
+ try {
232
+ const inst = new c.class();
233
+ inst.manifest = c;
234
+ state[normalizeName(c.name)] = inst;
235
+ }
236
+ catch(e) {
237
+ logger.log('WARN', `Failed to init ${c.name}`);
238
+ }
210
239
  }
211
240
 
212
241
  await streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs, fetchedDeps, previousFetchedDeps);
213
- await commitResults(state, dStr, passName, config, deps);
242
+
243
+ // Return the updates instead of just writing them
244
+ return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
214
245
  }
215
246
 
216
- async function runMetaComputationPass(date, calcs, passName, config, deps, fetchedDeps, previousFetchedDeps, rootData) {
247
+ /**
248
+ * --- UPDATED: runMetaComputationPass ---
249
+ * Now accepts `skipStatusWrite` and returns `successUpdates`
250
+ */
251
+ async function runMetaComputationPass(date, calcs, passName, config, deps, fetchedDeps, previousFetchedDeps, rootData, skipStatusWrite = false) {
217
252
  const controller = new ComputationController(config, deps);
218
- const dStr = date.toISOString().slice(0, 10);
219
- const state = {};
253
+ const dStr = date.toISOString().slice(0, 10);
254
+ const state = {};
220
255
 
221
256
  for (const mCalc of calcs) {
222
257
  try {
@@ -227,12 +262,16 @@ async function runMetaComputationPass(date, calcs, passName, config, deps, fetch
227
262
  } catch (e) { deps.logger.log('ERROR', `Meta calc failed ${mCalc.name}: ${e.message}`); }
228
263
  }
229
264
 
230
- await commitResults(state, dStr, passName, config, deps);
265
+ return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
231
266
  }
232
267
 
233
- async function commitResults(stateObj, dStr, passName, config, deps) {
268
+ /**
269
+ * --- UPDATED: commitResults ---
270
+ * Added `skipStatusWrite` parameter. Returns `successUpdates`.
271
+ */
272
+ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusWrite = false) {
234
273
  const writes = [], schemas = [], sharded = {};
235
- const successUpdates = {}; // Track which calcs finished successfully
274
+ const successUpdates = {};
236
275
 
237
276
  for (const name in stateObj) {
238
277
  const calc = stateObj[name];
@@ -265,19 +304,14 @@ async function commitResults(stateObj, dStr, passName, config, deps) {
265
304
  });
266
305
  }
267
306
 
268
- // Mark as successful in our local tracker
269
307
  successUpdates[name] = true;
270
308
 
271
309
  } catch (e) { deps.logger.log('ERROR', `Commit failed ${name}: ${e.message}`); }
272
310
  }
273
311
 
274
- // 1. Store Schemas
275
312
  if (schemas.length) batchStoreSchemas(deps, config, schemas).catch(()=>{});
276
-
277
- // 2. Write Results
278
313
  if (writes.length) await commitBatchInChunks(config, deps, writes, `${passName} Results`);
279
314
 
280
- // 3. Write Sharded Results
281
315
  for (const col in sharded) {
282
316
  const sWrites = [];
283
317
  for (const id in sharded[col]) {
@@ -287,11 +321,12 @@ async function commitResults(stateObj, dStr, passName, config, deps) {
287
321
  if (sWrites.length) await commitBatchInChunks(config, deps, sWrites, `${passName} Sharded ${col}`);
288
322
  }
289
323
 
290
- // 4. Update Status Document (Single Source of Truth)
291
- if (Object.keys(successUpdates).length > 0) {
324
+ if (!skipStatusWrite && Object.keys(successUpdates).length > 0) {
292
325
  await updateComputationStatus(dStr, successUpdates, config, deps);
293
326
  deps.logger.log('INFO', `[${passName}] Updated status document for ${Object.keys(successUpdates).length} computations.`);
294
327
  }
328
+
329
+ return successUpdates;
295
330
  }
296
331
 
297
332
  module.exports = {
@@ -299,8 +334,10 @@ module.exports = {
299
334
  checkRootDependencies,
300
335
  checkRootDataAvailability,
301
336
  fetchExistingResults,
302
- fetchComputationStatus, // Exported for Pass Runner
303
- updateComputationStatus, // Exported for Pass Runner (error handling)
337
+ fetchComputationStatus,
338
+ fetchGlobalComputationStatus,
339
+ updateComputationStatus,
340
+ updateGlobalComputationStatus,
304
341
  runStandardComputationPass,
305
342
  runMetaComputationPass
306
- };
343
+ };