bulltrackers-module 1.0.219 → 1.0.221

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. package/functions/computation-system/WorkflowOrchestrator.js +153 -0
  2. package/functions/computation-system/context/ContextFactory.js +63 -0
  3. package/functions/computation-system/context/ManifestBuilder.js +240 -0
  4. package/functions/computation-system/controllers/computation_controller.js +12 -4
  5. package/functions/computation-system/data/AvailabilityChecker.js +75 -0
  6. package/functions/computation-system/data/CachedDataLoader.js +63 -0
  7. package/functions/computation-system/data/DependencyFetcher.js +70 -0
  8. package/functions/computation-system/executors/MetaExecutor.js +68 -0
  9. package/functions/computation-system/executors/PriceBatchExecutor.js +99 -0
  10. package/functions/computation-system/executors/StandardExecutor.js +109 -0
  11. package/functions/computation-system/helpers/computation_dispatcher.js +7 -7
  12. package/functions/computation-system/helpers/computation_worker.js +44 -18
  13. package/functions/computation-system/layers/mathematics.js +1 -1
  14. package/functions/computation-system/persistence/FirestoreUtils.js +64 -0
  15. package/functions/computation-system/persistence/ResultCommitter.js +119 -0
  16. package/functions/computation-system/persistence/StatusRepository.js +29 -0
  17. package/functions/computation-system/topology/HashManager.js +35 -0
  18. package/functions/computation-system/utils/utils.js +39 -11
  19. package/index.js +8 -3
  20. package/package.json +1 -1
  21. package/functions/computation-system/helpers/computation_manifest_builder.js +0 -320
  22. package/functions/computation-system/helpers/computation_pass_runner.js +0 -119
  23. package/functions/computation-system/helpers/orchestration_helpers.js +0 -352
@@ -0,0 +1,29 @@
1
+ /**
2
+ * @fileoverview Manages computation status tracking in Firestore.
3
+ */
4
+
5
+ async function fetchComputationStatus(dateStr, config, { db }) {
6
+ // FIX: Check dateStr directly, or define 'key' before checking it.
7
+ if (!dateStr) throw new Error('fetchStatus requires a key');
8
+
9
+ const key = dateStr;
10
+ const collection = config.computationStatusCollection || 'computation_status';
11
+ const docRef = db.collection(collection).doc(key);
12
+
13
+ const snap = await docRef.get();
14
+ return snap.exists ? snap.data() : {};
15
+ }
16
+
17
+ async function updateComputationStatus(dateStr, updates, config, { db }) {
18
+ if (!dateStr) throw new Error('updateStatus requires a key');
19
+
20
+ if (!updates || Object.keys(updates).length === 0) return;
21
+
22
+ const collection = config.computationStatusCollection || 'computation_status';
23
+ const docRef = db.collection(collection).doc(dateStr);
24
+
25
+ await docRef.set(updates, { merge: true });
26
+ return true;
27
+ }
28
+
29
+ module.exports = { fetchComputationStatus, updateComputationStatus };
@@ -0,0 +1,35 @@
1
+ /**
2
+ * @fileoverview Manages code versioning, hashing, and legacy mappings.
3
+ */
4
+ const crypto = require('crypto');
5
+
6
+ // Legacy Keys Mapping (Ensures backward compatibility)
7
+ const LEGACY_MAPPING = {
8
+ DataExtractor: 'extract',
9
+ HistoryExtractor: 'history',
10
+ MathPrimitives: 'compute',
11
+ Aggregators: 'aggregate',
12
+ Validators: 'validate',
13
+ SignalPrimitives: 'signals',
14
+ SCHEMAS: 'schemas',
15
+ DistributionAnalytics: 'distribution',
16
+ TimeSeries: 'TimeSeries',
17
+ priceExtractor: 'priceExtractor',
18
+ InsightsExtractor: 'insights',
19
+ UserClassifier: 'classifier',
20
+ Psychometrics: 'psychometrics',
21
+ CognitiveBiases: 'bias',
22
+ SkillAttribution: 'skill',
23
+ ExecutionAnalytics: 'execution',
24
+ AdaptiveAnalytics: 'adaptive'
25
+ };
26
+
27
+ function generateCodeHash(codeString) {
28
+ if (!codeString) return 'unknown';
29
+ let clean = codeString.replace(/\/\/.*$/gm, '');
30
+ clean = clean.replace(/\/\*[\s\S]*?\*\//g, '');
31
+ clean = clean.replace(/\s+/g, '');
32
+ return crypto.createHash('sha256').update(clean).digest('hex');
33
+ }
34
+
35
+ module.exports = { LEGACY_MAPPING, generateCodeHash };
@@ -3,7 +3,7 @@
3
3
  */
4
4
 
5
5
  const { FieldValue, FieldPath } = require('@google-cloud/firestore');
6
- const crypto = require('crypto');
6
+ const crypto = require('crypto');
7
7
 
8
8
  /** Stage 1: Normalize a calculation name to kebab-case */
9
9
  function normalizeName(name) { return name.replace(/_/g, '-'); }
@@ -19,12 +19,34 @@ function generateCodeHash(codeString) {
19
19
  return crypto.createHash('sha256').update(clean).digest('hex');
20
20
  }
21
21
 
22
+ /**
23
+ * Executes a function with exponential backoff retry logic.
24
+ * @param {Function} fn - Async function to execute
25
+ * @param {string} operationName - Label for logging
26
+ * @param {number} maxRetries - Max attempts (default 3)
27
+ */
28
+ async function withRetry(fn, operationName, maxRetries = 3) {
29
+ let attempt = 0;
30
+ while (attempt < maxRetries) {
31
+ try {
32
+ return await fn();
33
+ } catch (error) {
34
+ attempt++;
35
+ console.warn(`[Retry] ${operationName} failed (Attempt ${attempt}/${maxRetries}): ${error.message}`);
36
+ if (attempt >= maxRetries) throw error;
37
+ // Exponential backoff: 1s, 2s, 4s...
38
+ await new Promise(resolve => setTimeout(resolve, 1000 * Math.pow(2, attempt - 1)));
39
+ }
40
+ }
41
+ }
42
+
22
43
  /** * Stage 2: Commit a batch of writes in chunks
23
44
  * FIXED: Now respects write.options (e.g. { merge: false }) to allow overwrites/deletes.
24
45
  */
25
46
  async function commitBatchInChunks(config, deps, writes, operationName) {
26
- const { db, logger, calculationUtils } = deps;
27
- const { withRetry } = calculationUtils;
47
+ const { db, logger } = deps;
48
+ // Use the local withRetry if not provided in deps
49
+ const retryFn = (deps.calculationUtils && deps.calculationUtils.withRetry) ? deps.calculationUtils.withRetry : withRetry;
28
50
 
29
51
  if (!writes || !writes.length) {
30
52
  logger.log('WARN', `[${operationName}] No writes to commit.`);
@@ -42,7 +64,7 @@ async function commitBatchInChunks(config, deps, writes, operationName) {
42
64
  const commitAndReset = async () => {
43
65
  if (currentOpsCount > 0) {
44
66
  try {
45
- await withRetry(
67
+ await retryFn(
46
68
  () => currentBatch.commit(),
47
69
  `${operationName} (Chunk ${batchIndex})`
48
70
  );
@@ -94,8 +116,7 @@ function getExpectedDateStrings(startDate, endDate) {
94
116
 
95
117
  /** Stage 4: Get the earliest date in a *flat* collection where doc IDs are dates. */
96
118
  async function getFirstDateFromSimpleCollection(config, deps, collectionName) {
97
- const { db, logger, calculationUtils } = deps;
98
- const { withRetry } = calculationUtils;
119
+ const { db, logger } = deps;
99
120
  try {
100
121
  if (!collectionName) { logger.log('WARN', `[Core Utils] Collection name not provided for simple date query.`); return null; }
101
122
  const query = db.collection(collectionName).where(FieldPath.documentId(), '>=', '2000-01-01').orderBy(FieldPath.documentId(), 'asc').limit(1);
@@ -107,8 +128,7 @@ async function getFirstDateFromSimpleCollection(config, deps, collectionName) {
107
128
 
108
129
  /** Stage 4: Get the earliest date in a sharded collection */
109
130
  async function getFirstDateFromCollection(config, deps, collectionName) {
110
- const { db, logger, calculationUtils } = deps;
111
- const { withRetry } = calculationUtils;
131
+ const { db, logger } = deps;
112
132
  let earliestDate = null;
113
133
  try {
114
134
  if (!collectionName) { logger.log('WARN', `[Core Utils] Collection name not provided for sharded date query.`); return null; }
@@ -165,8 +185,7 @@ async function getEarliestDataDates(config, deps) {
165
185
  }
166
186
 
167
187
  async function getFirstDateFromPriceCollection(config, deps) {
168
- const { db, logger, calculationUtils } = deps;
169
- const { withRetry } = calculationUtils;
188
+ const { db, logger } = deps;
170
189
  const collection = config.priceCollection || 'asset_prices';
171
190
  try {
172
191
  const snapshot = await withRetry(() => db.collection(collection).limit(10).get(), `GetPriceShards(${collection})`);
@@ -184,4 +203,13 @@ async function getFirstDateFromPriceCollection(config, deps) {
184
203
  } catch (e) { logger.log('ERROR', `Failed to get earliest price date from ${collection}`, { errorMessage: e.message }); return null; }
185
204
  }
186
205
 
187
- module.exports = { FieldValue, FieldPath, normalizeName, commitBatchInChunks, getExpectedDateStrings, getEarliestDataDates, generateCodeHash };
206
+ module.exports = {
207
+ FieldValue,
208
+ FieldPath,
209
+ normalizeName,
210
+ commitBatchInChunks,
211
+ getExpectedDateStrings,
212
+ getEarliestDataDates,
213
+ generateCodeHash,
214
+ withRetry // Exported here
215
+ };
package/index.js CHANGED
@@ -43,14 +43,19 @@ const taskEngine = {
43
43
  handleUpdate: require('./functions/task-engine/helpers/update_helpers').handleUpdate
44
44
  };
45
45
 
46
- // --- NEW IMPORT ---
47
- const { build: buildManifestFunc } = require('./functions/computation-system/helpers/computation_manifest_builder');
46
+ // --- UPDATED IMPORT: Point to the new Context Domain ---
47
+ const { build: buildManifestFunc } = require('./functions/computation-system/context/ManifestBuilder');
48
48
 
49
49
  // Computation System
50
50
  const computationSystem = {
51
- runComputationPass: require('./functions/computation-system/helpers/computation_pass_runner').runComputationPass,
51
+ // UPDATED: Point to the new Workflow Orchestrator
52
+ runComputationPass: require('./functions/computation-system/WorkflowOrchestrator').runComputationPass,
53
+
54
+ // These helpers wrap the Orchestrator, so they stay, but we updated their internals (see below)
52
55
  dispatchComputationPass: require('./functions/computation-system/helpers/computation_dispatcher').dispatchComputationPass,
53
56
  handleComputationTask: require('./functions/computation-system/helpers/computation_worker').handleComputationTask,
57
+
58
+ // Utils
54
59
  dataLoader: require('./functions/computation-system/utils/data_loader'),
55
60
  computationUtils: require('./functions/computation-system/utils/utils'),
56
61
  buildManifest: buildManifestFunc
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.219",
3
+ "version": "1.0.221",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [
@@ -1,320 +0,0 @@
1
- /**
2
- * @fileoverview
3
- * Dynamic Manifest Builder (v6.1 - Granular Merkle Tree Dependency Hashing)
4
- *
5
- * KEY FEATURES:
6
- * 1. Smart Granular Hashing: Detects SPECIFIC classes used within layers to avoid unnecessary re-runs.
7
- * - Changing 'MathPrimitives' won't invalidate computations that only use 'SignalPrimitives'.
8
- * 2. Cascading Invalidation (Merkle Hashing):
9
- * The final hash of a computation is derived from:
10
- * [Own Code] + [Specific Layer Class Hashes] + [Hashes of all Dependencies]
11
- */
12
-
13
- const { generateCodeHash } = require('../utils/utils');
14
-
15
- // 1. Import Layers directly to generate their "State Hashes"
16
- const MathematicsLayer = require('../layers/mathematics');
17
- const ExtractorsLayer = require('../layers/extractors');
18
- const ProfilingLayer = require('../layers/profiling');
19
- const ValidatorsLayer = require('../layers/validators');
20
-
21
- /* --------------------------------------------------
22
- * 1. Granular Layer Hash Generation
23
- * -------------------------------------------------- */
24
-
25
- /**
26
- * Generates a map of hashes for every export in a layer.
27
- * Returns: { 'MathPrimitives': 'sha256...', 'SignalPrimitives': 'sha256...', ... }
28
- */
29
- function generateLayerHashes(layerExports, layerName) {
30
- const hashes = {};
31
- const keys = Object.keys(layerExports).sort();
32
-
33
- for (const key of keys) {
34
- const item = layerExports[key];
35
- // We hash the specific export content prefixed with its identity
36
- let source = `LAYER:${layerName}:EXPORT:${key}`;
37
-
38
- if (typeof item === 'function') {
39
- source += item.toString();
40
- } else if (typeof item === 'object' && item !== null) {
41
- source += JSON.stringify(item);
42
- } else {
43
- source += String(item);
44
- }
45
-
46
- hashes[key] = generateCodeHash(source);
47
- }
48
- return hashes;
49
- }
50
-
51
- // Pre-compute granular layer hashes at startup
52
- // Structure: { mathematics: { MathPrimitives: '...', SignalPrimitives: '...' }, ... }
53
- const LAYER_HASHES = {
54
- 'mathematics': generateLayerHashes(MathematicsLayer, 'mathematics'),
55
- 'extractors': generateLayerHashes(ExtractorsLayer, 'extractors'),
56
- 'profiling': generateLayerHashes(ProfilingLayer, 'profiling'),
57
- 'validators': generateLayerHashes(ValidatorsLayer, 'validators')
58
- };
59
-
60
- // Map code patterns to SPECIFIC Layer Exports
61
- // Structure: { LayerName: { ExportName: [Triggers...] } }
62
- const LAYER_TRIGGERS = {
63
- 'mathematics': {
64
- 'MathPrimitives': ['math.compute', 'MathPrimitives'],
65
- 'SignalPrimitives': ['math.signals', 'SignalPrimitives', 'signals.'],
66
- 'Aggregators': ['math.aggregate', 'Aggregators'],
67
- 'TimeSeries': ['math.timeseries', 'TimeSeries', 'timeSeries.'],
68
- 'TimeSeriesAnalysis': ['TimeSeriesAnalysis'],
69
- 'DistributionAnalytics': ['math.distribution', 'DistributionAnalytics', 'distribution.'],
70
- 'FinancialEngineering': ['math.financial', 'FinancialEngineering']
71
- },
72
- 'extractors': {
73
- 'DataExtractor': ['math.extract', 'DataExtractor'],
74
- 'HistoryExtractor': ['math.history', 'HistoryExtractor'],
75
- 'priceExtractor': ['math.prices', 'priceExtractor'],
76
- 'InsightsExtractor': ['math.insights', 'InsightsExtractor', 'insights.'],
77
- 'TradeSeriesBuilder': ['math.tradeSeries', 'TradeSeriesBuilder']
78
- },
79
- 'profiling': {
80
- 'SCHEMAS': ['math.profiling', 'SCHEMAS'],
81
- 'UserClassifier': ['math.classifier', 'UserClassifier'],
82
- 'Psychometrics': ['math.psychometrics', 'Psychometrics'],
83
- 'CognitiveBiases': ['math.bias', 'CognitiveBiases'],
84
- 'SkillAttribution': ['math.skill', 'SkillAttribution'],
85
- 'ExecutionAnalytics': ['ExecutionAnalytics'],
86
- 'AdaptiveAnalytics': ['AdaptiveAnalytics']
87
- },
88
- 'validators': {
89
- 'Validators': ['math.validate', 'Validators']
90
- }
91
- };
92
-
93
- /* --------------------------------------------------
94
- * Pretty Console Helpers
95
- * -------------------------------------------------- */
96
- const log = {
97
- info: (msg) => console.log('ℹ︎ ' + msg),
98
- step: (msg) => console.log('› ' + msg),
99
- warn: (msg) => console.warn('⚠︎ ' + msg),
100
- success: (msg) => console.log('✔︎ ' + msg),
101
- error: (msg) => console.error('✖ ' + msg),
102
- fatal: (msg) => { console.error('✖ FATAL ✖ ' + msg); console.error('✖ FATAL ✖ Manifest build FAILED.'); },
103
- divider: (label) => { const line = ''.padEnd(60, '─'); console.log(`\n${line}\n${label}\n${line}\n`); },
104
- };
105
-
106
- /* --------------------------------------------------
107
- * Helper Utilities
108
- * -------------------------------------------------- */
109
-
110
- const normalizeName = (name) => { if (typeof name !== 'string') return name; return name.trim().replace(/,$/, '').replace(/_/g, '-').toLowerCase(); };
111
-
112
- function suggestClosest(name, candidates, n = 3) {
113
- const levenshtein = (a = '', b = '') => {
114
- const m = a.length, n = b.length;
115
- if (!m) return n; if (!n) return m;
116
- const dp = Array.from({ length: m + 1 }, (_, i) => Array(n + 1).fill(i));
117
- for (let j = 0; j <= n; j++) dp[0][j] = j;
118
- for (let i = 1; i <= m; i++)
119
- for (let j = 1; j <= n; j++)
120
- dp[i][j] = a[i - 1] === b[j - 1] ? dp[i - 1][j - 1] : Math.min(dp[i - 1][j - 1], dp[i][j - 1], dp[i - 1][j]) + 1;
121
- return dp[m][n];
122
- };
123
- const scores = candidates.map(c => [c, levenshtein(name, c)]);
124
- scores.sort((a, b) => a[1] - b[1]);
125
- return scores.slice(0, n).map(s => s[0]);
126
- }
127
-
128
- function getDependencySet(endpoints, adjacencyList) {
129
- const required = new Set(endpoints);
130
- const queue = [...endpoints];
131
- while (queue.length > 0) { const calcName = queue.shift(); const dependencies = adjacencyList.get(calcName);
132
- if (dependencies) { for (const dep of dependencies) { if (!required.has(dep)) { required.add(dep); queue.push(dep); } } } }
133
- return required;
134
- }
135
-
136
- /* --------------------------------------------------
137
- * Core Manifest Builder
138
- * -------------------------------------------------- */
139
-
140
- function buildManifest(productLinesToRun = [], calculations) {
141
- log.divider('Building Dynamic Manifest (Granular Merkle Hashing)');
142
- log.info(`Target Product Lines: [${productLinesToRun.join(', ')}]`);
143
-
144
- const manifestMap = new Map();
145
- const adjacency = new Map();
146
- const reverseAdjacency = new Map();
147
- const inDegree = new Map();
148
- let hasFatalError = false;
149
-
150
- /* ---------------- 1. Load All Calculations (Phase 1: Intrinsic Hash) ---------------- */
151
- log.step('Loading and validating all calculation classes…');
152
-
153
- function processCalc(Class, name, folderName) {
154
- if (!Class || typeof Class !== 'function') return;
155
- const normalizedName = normalizeName(name);
156
-
157
- if (typeof Class.getMetadata !== 'function') { log.fatal(`Calculation "${normalizedName}" is missing static getMetadata().`); hasFatalError = true; return; }
158
- if (typeof Class.getDependencies !== 'function') { log.fatal(`Calculation "${normalizedName}" is missing static getDependencies().`); hasFatalError = true;return; }
159
-
160
- const metadata = Class.getMetadata();
161
- const dependencies = Class.getDependencies().map(normalizeName);
162
-
163
- const codeStr = Class.toString();
164
- if (metadata.isHistorical === true && !codeStr.includes('yesterday') && !codeStr.includes('previousComputed')) {
165
- log.warn(`Calculation "${normalizedName}" marked 'isHistorical' but no 'previousComputed' state reference found.`);
166
- }
167
-
168
- let finalCategory = folderName === 'core' && metadata.category ? metadata.category : folderName;
169
-
170
- // --- PHASE 1: INTRINSIC HASH (Code + Granular Layers) ---
171
- let compositeHashString = generateCodeHash(codeStr);
172
- const usedDeps = [];
173
-
174
- // Check for specific layer usage (Granular Check)
175
- for (const [layerName, exportsMap] of Object.entries(LAYER_TRIGGERS)) {
176
- const layerHashes = LAYER_HASHES[layerName]; // { Export: Hash }
177
-
178
- for (const [exportName, triggers] of Object.entries(exportsMap)) {
179
- // If code uses this specific export...
180
- if (triggers.some(trigger => codeStr.includes(trigger))) {
181
- const exportHash = layerHashes[exportName];
182
- if (exportHash) {
183
- compositeHashString += exportHash;
184
- usedDeps.push(`${layerName}.${exportName}`);
185
- }
186
- }
187
- }
188
- }
189
-
190
- // Safe Mode Fallback: If no dependencies detected, assume usage of ALL to be safe.
191
- let isSafeMode = false;
192
- if (usedDeps.length === 0) {
193
- isSafeMode = true;
194
- Object.values(LAYER_HASHES).forEach(layerObj => {
195
- Object.values(layerObj).forEach(h => compositeHashString += h);
196
- });
197
- }
198
-
199
- const baseHash = generateCodeHash(compositeHashString);
200
-
201
- const manifestEntry = {
202
- name: normalizedName,
203
- class: Class,
204
- category: finalCategory,
205
- sourcePackage: folderName,
206
- type: metadata.type,
207
- isHistorical: metadata.isHistorical,
208
- rootDataDependencies: metadata.rootDataDependencies || [],
209
- userType: metadata.userType,
210
- dependencies: dependencies,
211
- pass: 0,
212
- hash: baseHash,
213
- debugUsedLayers: isSafeMode ? ['ALL (Safe Mode)'] : usedDeps
214
- };
215
-
216
- manifestMap.set(normalizedName, manifestEntry);
217
- adjacency.set(normalizedName, dependencies);
218
- inDegree.set(normalizedName, dependencies.length);
219
- dependencies.forEach(dep => { if (!reverseAdjacency.has(dep)) reverseAdjacency.set(dep, []); reverseAdjacency.get(dep).push(normalizedName); });
220
- }
221
-
222
- if (!calculations || typeof calculations !== 'object') {
223
- throw new Error('Manifest build failed: Invalid calculations object.');
224
- }
225
-
226
- for (const folderName in calculations) {
227
- if (folderName === 'legacy') continue;
228
- const group = calculations[folderName];
229
- for (const key in group) {
230
- const entry = group[key];
231
- if (typeof entry === 'function') { processCalc(entry, key, folderName); }
232
- }
233
- }
234
-
235
- if (hasFatalError) { throw new Error('Manifest build failed due to missing static methods.'); }
236
- log.success(`Loaded ${manifestMap.size} calculations.`);
237
-
238
- /* ---------------- 2. Validate Dependency Links ---------------- */
239
- const allNames = new Set(manifestMap.keys());
240
- let invalidLinks = false;
241
- for (const [name, entry] of manifestMap) {
242
- for (const dep of entry.dependencies) {
243
- if (!allNames.has(dep)) {
244
- invalidLinks = true;
245
- const guesses = suggestClosest(dep, Array.from(allNames));
246
- log.error(`${name} depends on unknown calculation "${dep}". Did you mean: ${guesses.join(', ')}?`);
247
- }
248
- if (dep === name) {
249
- invalidLinks = true;
250
- log.error(`${name} has a circular dependency on *itself*!`);
251
- }
252
- }
253
- }
254
- if (invalidLinks) { throw new Error('Manifest validation failed.'); }
255
-
256
- /* ---------------- 3. Filter for Product Lines ---------------- */
257
- const productLineEndpoints = [];
258
- for (const [name, entry] of manifestMap.entries()) { if (productLinesToRun.includes(entry.category)) { productLineEndpoints.push(name); } }
259
- for (const [name, entry] of manifestMap.entries()) { if (entry.sourcePackage === 'core') { productLineEndpoints.push(name); } }
260
-
261
- const requiredCalcs = getDependencySet(productLineEndpoints, adjacency);
262
- log.info(`Filtered down to ${requiredCalcs.size} active calculations.`);
263
-
264
- const filteredManifestMap = new Map();
265
- const filteredInDegree = new Map();
266
- const filteredReverseAdjacency = new Map();
267
-
268
- for (const name of requiredCalcs) { filteredManifestMap.set(name, manifestMap.get(name)); filteredInDegree.set(name, inDegree.get(name));
269
- const consumers = (reverseAdjacency.get(name) || []).filter(consumer => requiredCalcs.has(consumer)); filteredReverseAdjacency.set(name, consumers); }
270
-
271
- /* ---------------- 4. Topological Sort ---------------- */
272
- const sortedManifest = [];
273
- const queue = [];
274
- let maxPass = 0;
275
-
276
- for (const [name, degree] of filteredInDegree) { if (degree === 0) { queue.push(name); filteredManifestMap.get(name).pass = 1; maxPass = 1; } }
277
- queue.sort();
278
- while (queue.length) {
279
- const currentName = queue.shift();
280
- const currentEntry = filteredManifestMap.get(currentName);
281
- sortedManifest.push(currentEntry);
282
-
283
- for (const neighborName of (filteredReverseAdjacency.get(currentName) || [])) { const newDegree = filteredInDegree.get(neighborName) - 1; filteredInDegree.set(neighborName, newDegree);
284
- const neighborEntry = filteredManifestMap.get(neighborName);
285
- if (neighborEntry.pass <= currentEntry.pass) { neighborEntry.pass = currentEntry.pass + 1; if (neighborEntry.pass > maxPass) maxPass = neighborEntry.pass; }
286
- if (newDegree === 0) { queue.push(neighborName); } }
287
- queue.sort(); }
288
-
289
- if (sortedManifest.length !== filteredManifestMap.size) {
290
- throw new Error('Circular dependency detected. Manifest build failed.'); }
291
-
292
- /* ---------------- 5. Phase 2: Cascading Dependency Hashing ---------------- */
293
- log.step('Computing Cascading Merkle Hashes...');
294
-
295
- for (const entry of sortedManifest) {
296
- // Start with the intrinsic hash (Code + Granular Layers)
297
- let dependencySignature = entry.hash;
298
-
299
- if (entry.dependencies && entry.dependencies.length > 0) {
300
- const depHashes = entry.dependencies.map(depName => { const depEntry = filteredManifestMap.get(depName); if (!depEntry) return ''; return depEntry.hash; }).join('|');
301
- dependencySignature += `|DEPS:${depHashes}`;
302
- }
303
- // Generate the Final Smart Hash
304
- entry.hash = generateCodeHash(dependencySignature);
305
- }
306
-
307
- log.success(`Total passes required: ${maxPass}`);
308
- return sortedManifest;
309
- }
310
-
311
- function build(productLinesToRun, calculations) {
312
- try {
313
- return buildManifest(productLinesToRun, calculations);
314
- } catch (error) {
315
- log.error(error.message);
316
- return null;
317
- }
318
- }
319
-
320
- module.exports = { build };
@@ -1,119 +0,0 @@
1
- /**
2
- * FILENAME: bulltrackers-module/functions/computation-system/helpers/computation_pass_runner.js
3
- * FIXED: 'storedStatus.substring' crash and 'missing dependency' log clarity.
4
- */
5
-
6
- const {
7
- groupByPass,
8
- checkRootDataAvailability,
9
- fetchExistingResults,
10
- fetchComputationStatus,
11
- updateComputationStatus,
12
- runStandardComputationPass,
13
- runMetaComputationPass,
14
- checkRootDependencies,
15
- runBatchPriceComputation
16
- } = require('./orchestration_helpers.js');
17
-
18
- const { getExpectedDateStrings, normalizeName } = require('../utils/utils.js');
19
-
20
- const PARALLEL_BATCH_SIZE = 7;
21
-
22
- async function runComputationPass(config, dependencies, computationManifest) {
23
- const { logger } = dependencies;
24
- const passToRun = String(config.COMPUTATION_PASS_TO_RUN);
25
- if (!passToRun) return logger.log('ERROR', '[PassRunner] No pass defined. Aborting.');
26
-
27
- logger.log('INFO', `🚀 Starting PASS ${passToRun} (Legacy Mode)...`);
28
-
29
- const earliestDates = {
30
- portfolio: new Date('2025-09-25T00:00:00Z'),
31
- history: new Date('2025-11-05T00:00:00Z'),
32
- social: new Date('2025-10-30T00:00:00Z'),
33
- insights: new Date('2025-08-26T00:00:00Z'),
34
- price: new Date('2025-08-01T00:00:00Z')
35
- };
36
-
37
- earliestDates.absoluteEarliest = Object.values(earliestDates).reduce((a, b) => a < b ? a : b);
38
-
39
- const passes = groupByPass(computationManifest);
40
- const calcsInThisPass = passes[passToRun] || [];
41
-
42
- if (!calcsInThisPass.length) return logger.log('WARN', `[PassRunner] No calcs for Pass ${passToRun}. Exiting.`);
43
-
44
- const passEarliestDate = earliestDates.absoluteEarliest;
45
- const endDateUTC = new Date(Date.UTC(new Date().getUTCFullYear(), new Date().getUTCMonth(), new Date().getUTCDate() - 1));
46
- const allExpectedDates = getExpectedDateStrings(passEarliestDate, endDateUTC);
47
- const priceBatchCalcs = calcsInThisPass.filter(c => c.type === 'meta' && c.rootDataDependencies?.includes('price'));
48
- const standardAndOtherMetaCalcs = calcsInThisPass.filter(c => !priceBatchCalcs.includes(c));
49
-
50
- if (priceBatchCalcs.length > 0) {
51
- try { await runBatchPriceComputation(config, dependencies, allExpectedDates, priceBatchCalcs);
52
- } catch (e) { logger.log('ERROR', 'Legacy Batch Price failed', e); }
53
- }
54
-
55
- if (standardAndOtherMetaCalcs.length === 0) return;
56
-
57
- for (let i = 0; i < allExpectedDates.length; i += PARALLEL_BATCH_SIZE) {
58
- const batch = allExpectedDates.slice(i, i + PARALLEL_BATCH_SIZE);
59
- await Promise.all(batch.map(dateStr => runDateComputation(dateStr, passToRun, standardAndOtherMetaCalcs, config, dependencies, computationManifest)));
60
- }
61
- }
62
-
63
- async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, dependencies, computationManifest) {
64
- const { logger } = dependencies;
65
- const dateToProcess = new Date(dateStr + 'T00:00:00Z');
66
- const dailyStatus = await fetchComputationStatus(dateStr, config, dependencies);
67
- const calcsToAttempt = [];
68
- for (const calc of calcsInThisPass) {
69
- const cName = normalizeName(calc.name);
70
- const storedStatus = dailyStatus[cName];
71
- const currentHash = calc.hash;
72
-
73
- if (calc.dependencies && calc.dependencies.length > 0) { const missing = calc.dependencies.filter(depName => !dailyStatus[normalizeName(depName)]); if (missing.length > 0) { logger.log('TRACE', `[Skip] ${cName} missing deps: ${missing.join(', ')}`); continue; } }
74
- if (!storedStatus) { logger.log('INFO', `[Versioning] ${cName}: New run needed (No prior status).`); calcsToAttempt.push(calc); continue; }
75
- if (typeof storedStatus === 'string' && currentHash && storedStatus !== currentHash) { logger.log('INFO', `[Versioning] ${cName}: Code Changed. (Old: ${storedStatus.substring(0,6)}... New: ${currentHash.substring(0,6)}...)`); calcsToAttempt.push(calc); continue; }
76
- if (storedStatus === true && currentHash) { logger.log('INFO', `[Versioning] ${cName}: Upgrading legacy status to Hash.`); calcsToAttempt.push(calc); continue; }
77
- }
78
-
79
- if (!calcsToAttempt.length) return null;
80
-
81
- const earliestDates = {
82
- portfolio: new Date('2025-09-25T00:00:00Z'),
83
- history: new Date('2025-11-05T00:00:00Z'),
84
- social: new Date('2025-10-30T00:00:00Z'),
85
- insights: new Date('2025-08-26T00:00:00Z'),
86
- price: new Date('2025-08-01T00:00:00Z')
87
- };
88
-
89
- const rootData = await checkRootDataAvailability(dateStr, config, dependencies, earliestDates);
90
- if (!rootData) { logger.log('INFO', `[DateRunner] Root data missing for ${dateStr}. Skipping.`); return null; }
91
- const runnableCalcs = calcsToAttempt.filter(c => checkRootDependencies(c, rootData.status).canRun);
92
- if (!runnableCalcs.length) return null;
93
- const standardToRun = runnableCalcs.filter(c => c.type === 'standard');
94
- const metaToRun = runnableCalcs.filter(c => c.type === 'meta');
95
- logger.log('INFO', `[DateRunner] Running ${dateStr}: ${standardToRun.length} std, ${metaToRun.length} meta`);
96
- const dateUpdates = {};
97
-
98
- try {
99
- const calcsRunning = [...standardToRun, ...metaToRun];
100
- const existingResults = await fetchExistingResults(dateStr, calcsRunning, computationManifest, config, dependencies, false);
101
- const prevDate = new Date(dateToProcess); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
102
- const prevDateStr = prevDate.toISOString().slice(0, 10);
103
- const previousResults = await fetchExistingResults(prevDateStr, calcsRunning, computationManifest, config, dependencies, true);
104
-
105
- if (standardToRun.length) { const updates = await runStandardComputationPass(dateToProcess, standardToRun, `Pass ${passToRun} (Std)`, config, dependencies, rootData, existingResults, previousResults, false); Object.assign(dateUpdates, updates); }
106
- if (metaToRun.length) { const updates = await runMetaComputationPass(dateToProcess, metaToRun, `Pass ${passToRun} (Meta)`, config, dependencies, existingResults, previousResults, rootData, false); Object.assign(dateUpdates, updates); }
107
-
108
- } catch (err) {
109
- logger.log('ERROR', `[DateRunner] FAILED Pass ${passToRun} for ${dateStr}`, { errorMessage: err.message });
110
- [...standardToRun, ...metaToRun].forEach(c => dateUpdates[normalizeName(c.name)] = false);
111
- throw err;
112
- }
113
-
114
- if (Object.keys(dateUpdates).length > 0) { await updateComputationStatus(dateStr, dateUpdates, config, dependencies); }
115
-
116
- return { date: dateStr, updates: dateUpdates };
117
- }
118
-
119
- module.exports = { runComputationPass, runDateComputation };