bulltrackers-module 1.0.214 → 1.0.215

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,8 +4,7 @@
4
4
  */
5
5
 
6
6
  // Load all layers dynamically from the index
7
- const mathLayer = require('../layers/index');
8
-
7
+ const mathLayer = require('../layers/index');
9
8
  const { loadDailyInsights, loadDailySocialPostInsights, getRelevantShardRefs, getPriceShardRefs } = require('../utils/data_loader');
10
9
 
11
10
  // Legacy Keys Mapping (Ensures backward compatibility with existing Calculations)
@@ -26,76 +25,20 @@ const LEGACY_MAPPING = {
26
25
  };
27
26
 
28
27
  class DataLoader {
29
- constructor(config, dependencies) {
30
- this.config = config;
31
- this.deps = dependencies;
32
- this.cache = { mappings: null, insights: new Map(), social: new Map(), prices: null };
33
- }
34
-
35
- get mappings() { return this.cache.mappings; }
36
-
37
- async loadMappings() {
38
- if (this.cache.mappings) return this.cache.mappings;
39
- const { calculationUtils } = this.deps;
40
- this.cache.mappings = await calculationUtils.loadInstrumentMappings();
41
- return this.cache.mappings;
42
- }
43
- async loadInsights(dateStr) {
44
- if (this.cache.insights.has(dateStr)) return this.cache.insights.get(dateStr);
45
- const insights = await loadDailyInsights(this.config, this.deps, dateStr);
46
- this.cache.insights.set(dateStr, insights);
47
- return insights;
48
- }
49
- async loadSocial(dateStr) {
50
- if (this.cache.social.has(dateStr)) return this.cache.social.get(dateStr);
51
- const social = await loadDailySocialPostInsights(this.config, this.deps, dateStr);
52
- this.cache.social.set(dateStr, social);
53
- return social;
54
- }
55
-
56
- async getPriceShardReferences() {
57
- return getPriceShardRefs(this.config, this.deps);
58
- }
59
-
60
- async getSpecificPriceShardReferences(targetInstrumentIds) {
61
- return getRelevantShardRefs(this.config, this.deps, targetInstrumentIds);
62
- }
63
-
64
- async loadPriceShard(docRef) {
65
- try {
66
- const snap = await docRef.get();
67
- if (!snap.exists) return {};
68
- return snap.data();
69
- } catch (e) {
70
- console.error(`Error loading shard ${docRef.path}:`, e);
71
- return {};
72
- }
73
- }
28
+ constructor(config, dependencies) { this.config = config; this.deps = dependencies; this.cache = { mappings: null, insights: new Map(), social: new Map(), prices: null }; }
29
+ get mappings() { return this.cache.mappings; }
30
+ async loadMappings() { if (this.cache.mappings) return this.cache.mappings; const { calculationUtils } = this.deps; this.cache.mappings = await calculationUtils.loadInstrumentMappings(); return this.cache.mappings; }
31
+ async loadInsights(dateStr) { if (this.cache.insights.has(dateStr)) return this.cache.insights.get(dateStr); const insights = await loadDailyInsights(this.config, this.deps, dateStr); this.cache.insights.set(dateStr, insights); return insights; }
32
+ async loadSocial(dateStr) { if (this.cache.social.has(dateStr)) return this.cache.social.get(dateStr); const social = await loadDailySocialPostInsights(this.config, this.deps, dateStr); this.cache.social.set(dateStr, social); return social; }
33
+ async getPriceShardReferences() { return getPriceShardRefs(this.config, this.deps); }
34
+ async getSpecificPriceShardReferences (targetInstrumentIds) { return getRelevantShardRefs(this.config, this.deps, targetInstrumentIds); }
35
+ async loadPriceShard(docRef) { try { const snap = await docRef.get(); if (!snap.exists) return {}; return snap.data(); } catch (e) { console.error(`Error loading shard ${docRef.path}:`, e); return {}; } }
74
36
  }
75
37
 
76
38
  class ContextBuilder {
77
-
78
- /**
79
- * dynamically constructs the 'math' object.
80
- * 1. Iterates over all exports from layers/index.js
81
- * 2. Maps standard classes to legacy keys (extract, compute, etc.)
82
- * 3. Adds ALL classes by their actual name to support new features automatically.
83
- */
84
39
  static buildMathContext() {
85
40
  const mathContext = {};
86
-
87
- // 1. Auto-discover and map
88
- for (const [key, value] of Object.entries(mathLayer)) {
89
- // Add by actual name (e.g. math.NewFeature)
90
- mathContext[key] = value;
91
-
92
- // Map to legacy key if exists (e.g. math.extract)
93
- const legacyKey = LEGACY_MAPPING[key];
94
- if (legacyKey) {
95
- mathContext[legacyKey] = value;
96
- }
97
- }
98
-
41
+ for (const [key, value] of Object.entries(mathLayer)) { mathContext[key] = value; const legacyKey = LEGACY_MAPPING[key]; if (legacyKey) { mathContext[legacyKey] = value; } }
99
42
  return mathContext;
100
43
  }
101
44
 
@@ -107,7 +50,7 @@ class ContextBuilder {
107
50
  insights: { today: insights?.today, yesterday: insights?.yesterday },
108
51
  social: { today: socialData?.today, yesterday: socialData?.yesterday },
109
52
  mappings: mappings || {},
110
- math: ContextBuilder.buildMathContext(), // DYNAMIC LOAD
53
+ math: ContextBuilder.buildMathContext(),
111
54
  computed: computedDependencies || {},
112
55
  previousComputed: previousComputedDependencies || {},
113
56
  meta: metadata, config, deps
@@ -122,7 +65,7 @@ class ContextBuilder {
122
65
  social: { today: socialData?.today, yesterday: socialData?.yesterday },
123
66
  prices: prices || {},
124
67
  mappings: mappings || {},
125
- math: ContextBuilder.buildMathContext(), // DYNAMIC LOAD
68
+ math: ContextBuilder.buildMathContext(),
126
69
  computed: computedDependencies || {},
127
70
  previousComputed: previousComputedDependencies || {},
128
71
  meta: metadata, config, deps
@@ -133,23 +76,23 @@ class ContextBuilder {
133
76
  class ComputationExecutor {
134
77
  constructor(config, dependencies, dataLoader) {
135
78
  this.config = config;
136
- this.deps = dependencies;
79
+ this.deps = dependencies;
137
80
  this.loader = dataLoader;
138
81
  }
139
82
 
140
83
  async executePerUser(calcInstance, metadata, dateStr, portfolioData, yesterdayPortfolioData, historyData, computedDeps, prevDeps) {
141
- const { logger } = this.deps;
84
+ const { logger } = this.deps;
142
85
  const targetUserType = metadata.userType;
143
- const mappings = await this.loader.loadMappings();
144
- const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await this.loader.loadInsights(dateStr) } : null;
86
+ const mappings = await this.loader.loadMappings();
87
+ const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await this.loader.loadInsights(dateStr) } : null;
145
88
 
146
89
  // Access SCHEMAS dynamically from the loaded layer
147
90
  const SCHEMAS = mathLayer.SCHEMAS;
148
91
 
149
92
  for (const [userId, todayPortfolio] of Object.entries(portfolioData)) {
150
93
  const yesterdayPortfolio = yesterdayPortfolioData ? yesterdayPortfolioData[userId] : null;
151
- const todayHistory = historyData ? historyData[userId] : null;
152
- const actualUserType = todayPortfolio.PublicPositions ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
94
+ const todayHistory = historyData ? historyData[userId] : null;
95
+ const actualUserType = todayPortfolio.PublicPositions ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
153
96
  if (targetUserType !== 'all') {
154
97
  const mappedTarget = (targetUserType === 'speculator') ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
155
98
  if (mappedTarget !== actualUserType) continue;
@@ -160,10 +103,10 @@ class ComputationExecutor {
160
103
  }
161
104
 
162
105
  async executeOncePerDay(calcInstance, metadata, dateStr, computedDeps, prevDeps) {
163
- const mappings = await this.loader.loadMappings();
106
+ const mappings = await this.loader.loadMappings();
164
107
  const { logger } = this.deps;
165
- const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await this.loader.loadInsights(dateStr) } : null;
166
- const social = metadata.rootDataDependencies?.includes('social') ? { today: await this.loader.loadSocial(dateStr) } : null;
108
+ const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await this.loader.loadInsights(dateStr) } : null;
109
+ const social = metadata.rootDataDependencies?.includes('social') ? { today: await this.loader.loadSocial(dateStr) } : null;
167
110
 
168
111
  if (metadata.rootDataDependencies?.includes('price')) {
169
112
  logger.log('INFO', `[Executor] Running Batched/Sharded Execution for ${metadata.name}`);
@@ -171,7 +114,7 @@ class ComputationExecutor {
171
114
  if (shardRefs.length === 0) { logger.log('WARN', '[Executor] No price shards found.'); return {}; }
172
115
  let processedCount = 0;
173
116
  for (const ref of shardRefs) {
174
- const shardData = await this.loader.loadPriceShard(ref);
117
+ const shardData = await this.loader.loadPriceShard(ref);
175
118
  const partialContext = ContextBuilder.buildMetaContext({ dateStr, metadata, mappings, insights, socialData: social, prices: { history: shardData }, computedDependencies: computedDeps, previousComputedDependencies: prevDeps, config: this.config, deps: this.deps });
176
119
  await calcInstance.process(partialContext);
177
120
  partialContext.prices = null;
@@ -189,9 +132,9 @@ class ComputationExecutor {
189
132
 
190
133
  class ComputationController {
191
134
  constructor(config, dependencies) {
192
- this.config = config;
193
- this.deps = dependencies;
194
- this.loader = new DataLoader(config, dependencies);
135
+ this.config = config;
136
+ this.deps = dependencies;
137
+ this.loader = new DataLoader(config, dependencies);
195
138
  this.executor = new ComputationExecutor(config, dependencies, this.loader);
196
139
  }
197
140
  }
@@ -17,53 +17,42 @@ const TOPIC_NAME = 'computation-tasks';
17
17
  */
18
18
  async function dispatchComputationPass(config, dependencies, computationManifest) {
19
19
  const { logger } = dependencies;
20
-
21
- // Create fresh PubSubUtils instance
22
20
  const pubsubUtils = new PubSubUtils(dependencies);
23
-
24
21
  const passToRun = String(config.COMPUTATION_PASS_TO_RUN);
25
-
26
- if (!passToRun) {
27
- return logger.log('ERROR', '[Dispatcher] No pass defined (COMPUTATION_PASS_TO_RUN). Aborting.');
28
- }
22
+ if (!passToRun) { return logger.log('ERROR', '[Dispatcher] No pass defined (COMPUTATION_PASS_TO_RUN). Aborting.'); }
29
23
 
30
24
  // 1. Validate Pass Existence
31
25
  const passes = groupByPass(computationManifest);
32
26
  const calcsInThisPass = passes[passToRun] || [];
33
27
 
34
- if (!calcsInThisPass.length) {
35
- return logger.log('WARN', `[Dispatcher] No calcs for Pass ${passToRun}. Exiting.`);
36
- }
28
+ if (!calcsInThisPass.length) { return logger.log('WARN', `[Dispatcher] No calcs for Pass ${passToRun}. Exiting.`); }
37
29
 
38
30
  const calcNames = calcsInThisPass.map(c => c.name).join(', ');
39
31
  logger.log('INFO', `🚀 [Dispatcher] Preparing PASS ${passToRun}.`);
40
32
  logger.log('INFO', `[Dispatcher] Included Calculations: [${calcNames}]`);
41
33
 
42
34
  // 2. Determine Date Range
43
- // Hardcoded earliest dates - keep synced with PassRunner for now
44
35
  const earliestDates = {
45
36
  portfolio: new Date('2025-09-25T00:00:00Z'),
46
- history: new Date('2025-11-05T00:00:00Z'),
47
- social: new Date('2025-10-30T00:00:00Z'),
48
- insights: new Date('2025-08-26T00:00:00Z'),
49
- price: new Date('2025-08-01T00:00:00Z')
37
+ history: new Date('2025-11-05T00:00:00Z'),
38
+ social: new Date('2025-10-30T00:00:00Z'),
39
+ insights: new Date('2025-08-26T00:00:00Z'),
40
+ price: new Date('2025-08-01T00:00:00Z')
50
41
  };
51
- const passEarliestDate = Object.values(earliestDates).reduce((a, b) => a < b ? a : b);
52
- const endDateUTC = new Date(Date.UTC(new Date().getUTCFullYear(), new Date().getUTCMonth(), new Date().getUTCDate() - 1));
53
42
 
43
+ const passEarliestDate = Object.values(earliestDates).reduce((a, b) => a < b ? a : b);
44
+ const endDateUTC = new Date(Date.UTC(new Date().getUTCFullYear(), new Date().getUTCMonth(), new Date().getUTCDate() - 1));
54
45
  const allExpectedDates = getExpectedDateStrings(passEarliestDate, endDateUTC);
55
46
 
56
47
  logger.log('INFO', `[Dispatcher] Dispatches checks for ${allExpectedDates.length} dates (${allExpectedDates[0]} to ${allExpectedDates[allExpectedDates.length - 1]}). Workers will validate dependencies.`);
57
48
 
58
49
  // 3. Dispatch Messages
59
50
  let dispatchedCount = 0;
60
- const BATCH_SIZE = 50;
51
+ const BATCH_SIZE = 50;
61
52
 
62
53
  // We can publish in parallel batches
63
54
  const chunks = [];
64
- for (let i = 0; i < allExpectedDates.length; i += BATCH_SIZE) {
65
- chunks.push(allExpectedDates.slice(i, i + BATCH_SIZE));
66
- }
55
+ for (let i = 0; i < allExpectedDates.length; i += BATCH_SIZE) { chunks.push(allExpectedDates.slice(i, i + BATCH_SIZE)); }
67
56
 
68
57
  for (const chunk of chunks) {
69
58
  const messages = chunk.map(dateStr => ({
@@ -79,9 +68,7 @@ async function dispatchComputationPass(config, dependencies, computationManifest
79
68
  await pubsubUtils.publishMessageBatch(TOPIC_NAME, messages);
80
69
  dispatchedCount += messages.length;
81
70
  logger.log('INFO', `[Dispatcher] Dispatched batch of ${messages.length} tasks.`);
82
- } catch (err) {
83
- logger.log('ERROR', `[Dispatcher] Failed to dispatch batch: ${err.message}`);
84
- }
71
+ } catch (err) { logger.log('ERROR', `[Dispatcher] Failed to dispatch batch: ${err.message}`); }
85
72
  }
86
73
 
87
74
  logger.log('INFO', `[Dispatcher] Finished. Dispatched ${dispatchedCount} checks for Pass ${passToRun}.`);
@@ -24,18 +24,14 @@ const ValidatorsLayer = require('../layers/validators');
24
24
  * -------------------------------------------------- */
25
25
 
26
26
  function generateLayerHash(layerExports, layerName) {
27
- const keys = Object.keys(layerExports).sort(); // Sort for determinism
27
+ const keys = Object.keys(layerExports).sort();
28
28
  let combinedSource = `LAYER:${layerName}`;
29
29
 
30
30
  for (const key of keys) {
31
31
  const item = layerExports[key];
32
- if (typeof item === 'function') {
33
- combinedSource += item.toString();
34
- } else if (typeof item === 'object' && item !== null) {
35
- combinedSource += JSON.stringify(item);
36
- } else {
37
- combinedSource += String(item);
38
- }
32
+ if (typeof item === 'function') { combinedSource += item.toString();
33
+ } else if (typeof item === 'object' && item !== null) { combinedSource += JSON.stringify(item);
34
+ } else { combinedSource += String(item); }
39
35
  }
40
36
  return generateCodeHash(combinedSource);
41
37
  }
@@ -43,9 +39,9 @@ function generateLayerHash(layerExports, layerName) {
43
39
  // Pre-compute layer hashes at startup
44
40
  const LAYER_HASHES = {
45
41
  'mathematics': generateLayerHash(MathematicsLayer, 'mathematics'),
46
- 'extractors': generateLayerHash(ExtractorsLayer, 'extractors'),
47
- 'profiling': generateLayerHash(ProfilingLayer, 'profiling'),
48
- 'validators': generateLayerHash(ValidatorsLayer, 'validators')
42
+ 'extractors': generateLayerHash(ExtractorsLayer, 'extractors'),
43
+ 'profiling': generateLayerHash(ProfilingLayer, 'profiling'),
44
+ 'validators': generateLayerHash(ValidatorsLayer, 'validators')
49
45
  };
50
46
 
51
47
  // Map code patterns to Layer dependencies
@@ -81,11 +77,11 @@ const LAYER_TRIGGERS = {
81
77
  * Pretty Console Helpers
82
78
  * -------------------------------------------------- */
83
79
  const log = {
84
- info: (msg) => console.log('ℹ︎ ' + msg),
85
- step: (msg) => console.log('› ' + msg),
86
- warn: (msg) => console.warn('⚠︎ ' + msg),
87
- success: (msg) => console.log('✔︎ ' + msg),
88
- error: (msg) => console.error('✖ ' + msg),
80
+ info: (msg) => console.log('ℹ︎ ' + msg),
81
+ step: (msg) => console.log('› ' + msg),
82
+ warn: (msg) => console.warn('⚠︎ ' + msg),
83
+ success: (msg) => console.log('✔︎ ' + msg),
84
+ error: (msg) => console.error('✖ ' + msg),
89
85
  fatal: (msg) => { console.error('✖ FATAL ✖ ' + msg); console.error('✖ FATAL ✖ Manifest build FAILED.'); },
90
86
  divider: (label) => { const line = ''.padEnd(60, '─'); console.log(`\n${line}\n${label}\n${line}\n`); },
91
87
  };
@@ -141,7 +137,7 @@ function buildManifest(productLinesToRun = [], calculations) {
141
137
  if (!Class || typeof Class !== 'function') return;
142
138
  const normalizedName = normalizeName(name);
143
139
 
144
- if (typeof Class.getMetadata !== 'function') { log.fatal(`Calculation "${normalizedName}" is missing static getMetadata().`); hasFatalError = true; return; }
140
+ if (typeof Class.getMetadata !== 'function') { log.fatal(`Calculation "${normalizedName}" is missing static getMetadata().`); hasFatalError = true; return; }
145
141
  if (typeof Class.getDependencies !== 'function') { log.fatal(`Calculation "${normalizedName}" is missing static getDependencies().`); hasFatalError = true;return; }
146
142
 
147
143
  const metadata = Class.getMetadata();
@@ -161,10 +157,7 @@ function buildManifest(productLinesToRun = [], calculations) {
161
157
 
162
158
  // Check for specific layer usage
163
159
  for (const [layerName, triggers] of Object.entries(LAYER_TRIGGERS)) {
164
- if (triggers.some(trigger => codeStr.includes(trigger))) {
165
- compositeHashString += LAYER_HASHES[layerName];
166
- usedLayers.push(layerName);
167
- }
160
+ if (triggers.some(trigger => codeStr.includes(trigger))) { compositeHashString += LAYER_HASHES[layerName]; usedLayers.push(layerName); }
168
161
  }
169
162
 
170
163
  // Safe Mode Fallback
@@ -187,7 +180,7 @@ function buildManifest(productLinesToRun = [], calculations) {
187
180
  userType: metadata.userType,
188
181
  dependencies: dependencies,
189
182
  pass: 0,
190
- hash: baseHash, // Intrinsic Hash (Updated later to include deps)
183
+ hash: baseHash,
191
184
  debugUsedLayers: isSafeMode ? ['ALL (Safe Mode)'] : usedLayers
192
185
  };
193
186
 
@@ -233,13 +226,8 @@ function buildManifest(productLinesToRun = [], calculations) {
233
226
 
234
227
  /* ---------------- 3. Filter for Product Lines ---------------- */
235
228
  const productLineEndpoints = [];
236
- for (const [name, entry] of manifestMap.entries()) {
237
- if (productLinesToRun.includes(entry.category)) { productLineEndpoints.push(name); }
238
- }
239
- // Always include core
240
- for (const [name, entry] of manifestMap.entries()) {
241
- if (entry.sourcePackage === 'core') { productLineEndpoints.push(name); }
242
- }
229
+ for (const [name, entry] of manifestMap.entries()) { if (productLinesToRun.includes(entry.category)) { productLineEndpoints.push(name); } }
230
+ for (const [name, entry] of manifestMap.entries()) { if (entry.sourcePackage === 'core') { productLineEndpoints.push(name); } }
243
231
 
244
232
  const requiredCalcs = getDependencySet(productLineEndpoints, adjacency);
245
233
  log.info(`Filtered down to ${requiredCalcs.size} active calculations.`);
@@ -247,6 +235,7 @@ function buildManifest(productLinesToRun = [], calculations) {
247
235
  const filteredManifestMap = new Map();
248
236
  const filteredInDegree = new Map();
249
237
  const filteredReverseAdjacency = new Map();
238
+
250
239
  for (const name of requiredCalcs) { filteredManifestMap.set(name, manifestMap.get(name)); filteredInDegree.set(name, inDegree.get(name));
251
240
  const consumers = (reverseAdjacency.get(name) || []).filter(consumer => requiredCalcs.has(consumer)); filteredReverseAdjacency.set(name, consumers); }
252
241
 
@@ -272,26 +261,16 @@ function buildManifest(productLinesToRun = [], calculations) {
272
261
  throw new Error('Circular dependency detected. Manifest build failed.'); }
273
262
 
274
263
  /* ---------------- 5. Phase 2: Cascading Dependency Hashing ---------------- */
275
- // Now that we have a topological order (Dependencies come BEFORE Consumers),
276
- // we can update hashes sequentially.
277
264
  log.step('Computing Cascading Merkle Hashes...');
278
265
 
279
266
  for (const entry of sortedManifest) {
280
267
  // Start with the intrinsic hash (Code + Layers)
281
268
  let dependencySignature = entry.hash;
282
269
 
283
- // Append the hashes of all dependencies
284
- // Since we are iterating in topo order, dependencies are guaranteed to be processed/updated already.
285
270
  if (entry.dependencies && entry.dependencies.length > 0) {
286
- const depHashes = entry.dependencies.map(depName => {
287
- const depEntry = filteredManifestMap.get(depName);
288
- if (!depEntry) return ''; // Should not happen given validation
289
- return depEntry.hash;
290
- }).join('|'); // Use separator to prevent collisions
291
-
271
+ const depHashes = entry.dependencies.map(depName => { const depEntry = filteredManifestMap.get(depName); if (!depEntry) return ''; return depEntry.hash; }).join('|');
292
272
  dependencySignature += `|DEPS:${depHashes}`;
293
273
  }
294
-
295
274
  // Generate the Final Smart Hash
296
275
  entry.hash = generateCodeHash(dependencySignature);
297
276
  }
@@ -33,24 +33,22 @@ async function runComputationPass(config, dependencies, computationManifest) {
33
33
  insights: new Date('2025-08-26T00:00:00Z'),
34
34
  price: new Date('2025-08-01T00:00:00Z')
35
35
  };
36
+
36
37
  earliestDates.absoluteEarliest = Object.values(earliestDates).reduce((a, b) => a < b ? a : b);
37
38
 
38
39
  const passes = groupByPass(computationManifest);
39
40
  const calcsInThisPass = passes[passToRun] || [];
40
41
 
41
- if (!calcsInThisPass.length)
42
- return logger.log('WARN', `[PassRunner] No calcs for Pass ${passToRun}. Exiting.`);
43
-
44
- const passEarliestDate = earliestDates.absoluteEarliest;
45
- const endDateUTC = new Date(Date.UTC(new Date().getUTCFullYear(), new Date().getUTCMonth(), new Date().getUTCDate() - 1));
46
- const allExpectedDates = getExpectedDateStrings(passEarliestDate, endDateUTC);
42
+ if (!calcsInThisPass.length) return logger.log('WARN', `[PassRunner] No calcs for Pass ${passToRun}. Exiting.`);
47
43
 
48
- const priceBatchCalcs = calcsInThisPass.filter(c => c.type === 'meta' && c.rootDataDependencies?.includes('price'));
44
+ const passEarliestDate = earliestDates.absoluteEarliest;
45
+ const endDateUTC = new Date(Date.UTC(new Date().getUTCFullYear(), new Date().getUTCMonth(), new Date().getUTCDate() - 1));
46
+ const allExpectedDates = getExpectedDateStrings(passEarliestDate, endDateUTC);
47
+ const priceBatchCalcs = calcsInThisPass.filter(c => c.type === 'meta' && c.rootDataDependencies?.includes('price'));
49
48
  const standardAndOtherMetaCalcs = calcsInThisPass.filter(c => !priceBatchCalcs.includes(c));
50
49
 
51
50
  if (priceBatchCalcs.length > 0) {
52
- try {
53
- await runBatchPriceComputation(config, dependencies, allExpectedDates, priceBatchCalcs);
51
+ try { await runBatchPriceComputation(config, dependencies, allExpectedDates, priceBatchCalcs);
54
52
  } catch (e) { logger.log('ERROR', 'Legacy Batch Price failed', e); }
55
53
  }
56
54
 
@@ -63,51 +61,26 @@ async function runComputationPass(config, dependencies, computationManifest) {
63
61
  }
64
62
 
65
63
  async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, dependencies, computationManifest) {
66
- const { logger } = dependencies;
64
+ const { logger } = dependencies;
67
65
  const dateToProcess = new Date(dateStr + 'T00:00:00Z');
68
-
69
- const dailyStatus = await fetchComputationStatus(dateStr, config, dependencies);
70
-
71
- // Filter AND Log reason for skipping
66
+ const dailyStatus = await fetchComputationStatus(dateStr, config, dependencies);
72
67
  const calcsToAttempt = [];
73
-
74
68
  for (const calc of calcsInThisPass) {
75
- const cName = normalizeName(calc.name);
69
+ const cName = normalizeName(calc.name);
76
70
  const storedStatus = dailyStatus[cName];
77
- const currentHash = calc.hash;
71
+ const currentHash = calc.hash;
78
72
 
79
73
  // 1. Dependency Check
80
- if (calc.dependencies && calc.dependencies.length > 0) {
81
- const missing = calc.dependencies.filter(depName => !dailyStatus[normalizeName(depName)]);
82
- if (missing.length > 0) {
83
- // Too noisy to log every skip, but useful for debugging if needed.
84
- // Only logging if it's NOT a bulk skip.
85
- // logger.log('TRACE', `[Skip] ${cName} missing deps: ${missing.join(', ')}`);
86
- continue;
87
- }
88
- }
74
+ if (calc.dependencies && calc.dependencies.length > 0) { const missing = calc.dependencies.filter(depName => !dailyStatus[normalizeName(depName)]); if (missing.length > 0) { logger.log('TRACE', `[Skip] ${cName} missing deps: ${missing.join(', ')}`); continue; } }
89
75
 
90
76
  // 2. Logic A: No previous run
91
- if (!storedStatus) {
92
- logger.log('INFO', `[Versioning] ${cName}: New run needed (No prior status).`);
93
- calcsToAttempt.push(calc);
94
- continue;
95
- }
77
+ if (!storedStatus) { logger.log('INFO', `[Versioning] ${cName}: New run needed (No prior status).`); calcsToAttempt.push(calc); continue; }
96
78
 
97
79
  // 3. Logic B: Hash Mismatch
98
- // FIX: Ensure storedStatus is a string before calling substring
99
- if (typeof storedStatus === 'string' && currentHash && storedStatus !== currentHash) {
100
- logger.log('INFO', `[Versioning] ${cName}: Code Changed. (Old: ${storedStatus.substring(0,6)}... New: ${currentHash.substring(0,6)}...)`);
101
- calcsToAttempt.push(calc);
102
- continue;
103
- }
80
+ if (typeof storedStatus === 'string' && currentHash && storedStatus !== currentHash) { logger.log('INFO', `[Versioning] ${cName}: Code Changed. (Old: ${storedStatus.substring(0,6)}... New: ${currentHash.substring(0,6)}...)`); calcsToAttempt.push(calc); continue; }
104
81
 
105
82
  // 4. Logic C: Upgrade Legacy Boolean -> Hash
106
- if (storedStatus === true && currentHash) {
107
- logger.log('INFO', `[Versioning] ${cName}: Upgrading legacy status to Hash.`);
108
- calcsToAttempt.push(calc);
109
- continue;
110
- }
83
+ if (storedStatus === true && currentHash) { logger.log('INFO', `[Versioning] ${cName}: Upgrading legacy status to Hash.`); calcsToAttempt.push(calc); continue; }
111
84
  }
112
85
 
113
86
  if (!calcsToAttempt.length) return null;
@@ -121,47 +94,34 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
121
94
  };
122
95
 
123
96
  const rootData = await checkRootDataAvailability(dateStr, config, dependencies, earliestDates);
124
- if (!rootData) {
125
- logger.log('INFO', `[DateRunner] Root data missing for ${dateStr}. Skipping.`);
126
- return null;
127
- }
128
-
97
+ if (!rootData) { logger.log('INFO', `[DateRunner] Root data missing for ${dateStr}. Skipping.`); return null; }
129
98
  const runnableCalcs = calcsToAttempt.filter(c => checkRootDependencies(c, rootData.status).canRun);
130
-
131
99
  if (!runnableCalcs.length) return null;
132
-
133
100
  const standardToRun = runnableCalcs.filter(c => c.type === 'standard');
134
101
  const metaToRun = runnableCalcs.filter(c => c.type === 'meta');
135
-
136
102
  logger.log('INFO', `[DateRunner] Running ${dateStr}: ${standardToRun.length} std, ${metaToRun.length} meta`);
137
-
138
103
  const dateUpdates = {};
139
104
 
140
105
  try {
141
106
  const calcsRunning = [...standardToRun, ...metaToRun];
142
-
143
107
  const existingResults = await fetchExistingResults(dateStr, calcsRunning, computationManifest, config, dependencies, false);
144
108
  const prevDate = new Date(dateToProcess); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
145
109
  const prevDateStr = prevDate.toISOString().slice(0, 10);
146
110
  const previousResults = await fetchExistingResults(prevDateStr, calcsRunning, computationManifest, config, dependencies, true);
147
111
 
148
- if (standardToRun.length) {
149
- const updates = await runStandardComputationPass(dateToProcess, standardToRun, `Pass ${passToRun} (Std)`, config, dependencies, rootData, existingResults, previousResults, false);
150
- Object.assign(dateUpdates, updates);
151
- }
152
- if (metaToRun.length) {
153
- const updates = await runMetaComputationPass(dateToProcess, metaToRun, `Pass ${passToRun} (Meta)`, config, dependencies, existingResults, previousResults, rootData, false);
154
- Object.assign(dateUpdates, updates);
155
- }
112
+ if (standardToRun.length) { const updates = await runStandardComputationPass(dateToProcess, standardToRun, `Pass ${passToRun} (Std)`, config, dependencies, rootData, existingResults, previousResults, false);
113
+ Object.assign(dateUpdates, updates); }
114
+
115
+ if (metaToRun.length) { const updates = await runMetaComputationPass(dateToProcess, metaToRun, `Pass ${passToRun} (Meta)`, config, dependencies, existingResults, previousResults, rootData, false);
116
+ Object.assign(dateUpdates, updates); }
117
+
156
118
  } catch (err) {
157
119
  logger.log('ERROR', `[DateRunner] FAILED Pass ${passToRun} for ${dateStr}`, { errorMessage: err.message });
158
120
  [...standardToRun, ...metaToRun].forEach(c => dateUpdates[normalizeName(c.name)] = false);
159
121
  throw err;
160
122
  }
161
123
 
162
- if (Object.keys(dateUpdates).length > 0) {
163
- await updateComputationStatus(dateStr, dateUpdates, config, dependencies);
164
- }
124
+ if (Object.keys(dateUpdates).length > 0) { await updateComputationStatus(dateStr, dateUpdates, config, dependencies); }
165
125
 
166
126
  return { date: dateStr, updates: dateUpdates };
167
127
  }
@@ -17,70 +17,28 @@ async function handleComputationTask(message, config, dependencies, computationM
17
17
  let data;
18
18
  try {
19
19
  // 1. Handle Cloud Functions Gen 2 (CloudEvent)
20
- // Structure: event.data.message.data (base64)
21
- if (message.data && message.data.message && message.data.message.data) {
22
- const buffer = Buffer.from(message.data.message.data, 'base64');
23
- data = JSON.parse(buffer.toString());
24
- }
20
+ if (message.data && message.data.message && message.data.message.data) { const buffer = Buffer.from(message.data.message.data, 'base64'); data = JSON.parse(buffer.toString()); }
25
21
  // 2. Handle Cloud Functions Gen 1 / Legacy PubSub
26
- // Structure: message.data (base64) or message.json
27
- else if (message.data && typeof message.data === 'string') {
28
- const buffer = Buffer.from(message.data, 'base64');
29
- data = JSON.parse(buffer.toString());
30
- }
22
+ else if (message.data && typeof message.data === 'string') { const buffer = Buffer.from(message.data, 'base64'); data = JSON.parse(buffer.toString()); }
31
23
  // 3. Handle Direct JSON (Test harness or simulator)
32
- else if (message.json) {
33
- data = message.json;
34
- }
24
+ else if (message.json) { data = message.json; }
35
25
  // 4. Fallback: Assume message is the payload
36
- else {
37
- data = message;
38
- }
39
- } catch (parseError) {
40
- logger.log('ERROR', `[Worker] Failed to parse Pub/Sub payload.`, { error: parseError.message });
41
- return;
42
- }
26
+ else { data = message; }
27
+ } catch (parseError) { logger.log('ERROR', `[Worker] Failed to parse Pub/Sub payload.`, { error: parseError.message }); return; }
43
28
 
44
29
  try {
45
30
  // Validate Action
46
- if (!data || data.action !== 'RUN_COMPUTATION_DATE') {
47
- // Only log if data exists but action is wrong, prevents log spam on empty messages
48
- if (data) logger.log('WARN', `[Worker] Unknown or missing action: ${data?.action}. Ignoring.`);
49
- return;
50
- }
51
-
31
+ if (!data || data.action !== 'RUN_COMPUTATION_DATE') { if (data) logger.log('WARN', `[Worker] Unknown or missing action: ${data?.action}. Ignoring.`); return; }
52
32
  const { date, pass } = data;
53
-
54
- if (!date || !pass) {
55
- logger.log('ERROR', `[Worker] Missing date or pass in payload: ${JSON.stringify(data)}`);
56
- return;
57
- }
58
-
33
+ if (!date || !pass) { logger.log('ERROR', `[Worker] Missing date or pass in payload: ${JSON.stringify(data)}`); return; }
59
34
  logger.log('INFO', `[Worker] Received task: Date=${date}, Pass=${pass}`);
60
-
61
- // Resolve calculations for this pass
62
35
  const passes = groupByPass(computationManifest);
63
36
  const calcsInThisPass = passes[pass] || [];
64
-
65
- if (!calcsInThisPass.length) {
66
- logger.log('WARN', `[Worker] No calculations found for Pass ${pass}.`);
67
- return;
68
- }
69
-
70
- // Execute the computation for this specific date
71
- // The runner internally checks dependencies (Pass 1, 2, 3 status) and skips if not ready.
37
+ if (!calcsInThisPass.length) { logger.log('WARN', `[Worker] No calculations found for Pass ${pass}.`); return; }
72
38
  const result = await runDateComputation(date, pass, calcsInThisPass, config, dependencies, computationManifest);
73
-
74
- if (result) {
75
- logger.log('INFO', `[Worker] Successfully processed ${date} (Pass ${pass}). Updates: ${Object.keys(result.updates || {}).length}`);
76
- } else {
77
- logger.log('INFO', `[Worker] Processed ${date} (Pass ${pass}) - Skipped (Dependencies missing or already done).`);
78
- }
79
-
80
- } catch (err) {
81
- logger.log('ERROR', `[Worker] Fatal error processing task: ${err.message}`, { stack: err.stack });
82
- throw err; // Re-throw to trigger Pub/Sub retry
83
- }
39
+ if (result) { logger.log('INFO', `[Worker] Successfully processed ${date} (Pass ${pass}). Updates: ${Object.keys(result.updates || {}).length}`);
40
+ } else { logger.log('INFO', `[Worker] Processed ${date} (Pass ${pass}) - Skipped (Dependencies missing or already done).`); }
41
+ } catch (err) { logger.log('ERROR', `[Worker] Fatal error processing task: ${err.message}`, { stack: err.stack }); throw err; }
84
42
  }
85
43
 
86
44
  module.exports = { handleComputationTask };