bulltrackers-module 1.0.263 → 1.0.265

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  /**
2
2
  * @fileoverview Main Orchestrator. Coordinates the topological execution.
3
- * UPDATED: Uses centralized AvailabilityChecker for strict UserType validation.
3
+ * UPDATED: Implements Smart Audit logic to detect WHY a hash mismatch occurred.
4
4
  */
5
5
  const { normalizeName, DEFINITIVE_EARLIEST_DATES } = require('./utils/utils');
6
6
  const { checkRootDataAvailability, checkRootDependencies } = require('./data/AvailabilityChecker');
@@ -10,13 +10,13 @@ const { StandardExecutor } = require('./executor
10
10
  const { MetaExecutor } = require('./executors/MetaExecutor');
11
11
  const { generateProcessId, PROCESS_TYPES } = require('./logger/logger');
12
12
 
13
- // [FIX] Split IMPOSSIBLE into semantic categories
14
13
  const STATUS_IMPOSSIBLE_PREFIX = 'IMPOSSIBLE';
15
14
 
16
15
  function groupByPass(manifest) { return manifest.reduce((acc, calc) => { (acc[calc.pass] = acc[calc.pass] || []).push(calc); return acc; }, {}); }
17
16
 
18
17
  /**
19
18
  * Analyzes whether calculations should run, be skipped, or are blocked.
19
+ * Now performs Deep Hash Analysis to explain Re-Runs.
20
20
  */
21
21
  function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus, manifestMap, prevDailyStatus = null) {
22
22
  const report = { runnable: [], blocked: [], impossible: [], failedDependency: [], reRuns: [], skipped: [] };
@@ -28,7 +28,6 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
28
28
  const stored = currentStatusMap[norm];
29
29
  const depManifest = manifestMap.get(norm);
30
30
  if (!stored) return false;
31
- // [FIX] Check for any IMPOSSIBLE variant
32
31
  if (typeof stored.hash === 'string' && stored.hash.startsWith(STATUS_IMPOSSIBLE_PREFIX)) return false;
33
32
  if (!depManifest) return false;
34
33
  if (stored.hash !== depManifest.hash) return false;
@@ -42,7 +41,6 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
42
41
  const storedCategory = stored ? stored.category : null;
43
42
  const currentHash = calc.hash;
44
43
 
45
- // [FIX] Granular impossible marking
46
44
  const markImpossible = (reason, type = 'GENERIC') => {
47
45
  report.impossible.push({ name: cName, reason });
48
46
  const statusHash = `${STATUS_IMPOSSIBLE_PREFIX}:${type}`;
@@ -58,7 +56,6 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
58
56
  let migrationOldCategory = null;
59
57
  if (storedCategory && storedCategory !== calc.category) { migrationOldCategory = storedCategory; }
60
58
 
61
- // [FIX] Check for any IMPOSSIBLE variant in storage
62
59
  if (typeof storedHash === 'string' && storedHash.startsWith(STATUS_IMPOSSIBLE_PREFIX)) {
63
60
  report.skipped.push({ name: cName, reason: `Permanently Impossible (${storedHash})` });
64
61
  continue;
@@ -69,7 +66,6 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
69
66
  if (!rootCheck.canRun) {
70
67
  const missingStr = rootCheck.missing.join(', ');
71
68
  if (!isTargetToday) {
72
- // [FIX] Mark specifically as NO_DATA
73
69
  markImpossible(`Missing Root Data: ${missingStr} (Historical)`, 'NO_DATA');
74
70
  } else {
75
71
  report.blocked.push({ name: cName, reason: `Missing Root Data: ${missingStr} (Waiting)` });
@@ -83,7 +79,6 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
83
79
  for (const dep of calc.dependencies) {
84
80
  const normDep = normalizeName(dep);
85
81
  const depStored = simulationStatus[normDep];
86
- // [FIX] Check for any IMPOSSIBLE variant in dependencies
87
82
  if (depStored && typeof depStored.hash === 'string' && depStored.hash.startsWith(STATUS_IMPOSSIBLE_PREFIX)) {
88
83
  dependencyIsImpossible = true;
89
84
  break;
@@ -93,7 +88,6 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
93
88
  }
94
89
 
95
90
  if (dependencyIsImpossible) {
96
- // [FIX] Mark specifically as UPSTREAM failure
97
91
  markImpossible('Dependency is Impossible', 'UPSTREAM');
98
92
  continue;
99
93
  }
@@ -111,44 +105,88 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
111
105
  }
112
106
  }
113
107
 
114
- if (!storedHash) { markRunnable(); }
115
- else if (storedHash !== currentHash) { markRunnable(true, { name: cName, oldHash: storedHash, newHash: currentHash, previousCategory: migrationOldCategory }); }
116
- else if (migrationOldCategory) { markRunnable(true, { name: cName, reason: 'Category Migration', previousCategory: migrationOldCategory, newCategory: calc.category }); }
117
- else { report.skipped.push({ name: cName }); simulationStatus[cName] = { hash: currentHash, category: calc.category }; }
108
+ // --- HASH CHECK LOGIC ---
109
+ if (!storedHash) {
110
+ markRunnable(false); // New Calculation
111
+ }
112
+ else if (storedHash !== currentHash) {
113
+ // Smart Logic: Why did it change?
114
+ let changeReason = "Hash Mismatch (Unknown)";
115
+ const oldComp = stored.composition;
116
+ const newComp = calc.composition;
117
+
118
+ if (oldComp && newComp) {
119
+ // 1. Check Code
120
+ if (oldComp.code !== newComp.code) {
121
+ changeReason = "Code Changed";
122
+ }
123
+ // 2. Check Layers
124
+ else if (JSON.stringify(oldComp.layers) !== JSON.stringify(newComp.layers)) {
125
+ // Find specific layer
126
+ const changedLayers = [];
127
+ for(const lKey in newComp.layers) {
128
+ if (newComp.layers[lKey] !== oldComp.layers[lKey]) changedLayers.push(lKey);
129
+ }
130
+ changeReason = `Layer Update: [${changedLayers.join(', ')}]`;
131
+ }
132
+ // 3. Check Dependencies
133
+ else if (JSON.stringify(oldComp.deps) !== JSON.stringify(newComp.deps)) {
134
+ // Find specific dep
135
+ const changedDeps = [];
136
+ for(const dKey in newComp.deps) {
137
+ if (newComp.deps[dKey] !== oldComp.deps[dKey]) changedDeps.push(dKey);
138
+ }
139
+ changeReason = `Upstream Change: [${changedDeps.join(', ')}]`;
140
+ }
141
+ else {
142
+ changeReason = "Logic/Epoch Change";
143
+ }
144
+ } else {
145
+ changeReason = "Hash Mismatch (No prior composition)";
146
+ }
147
+
148
+ markRunnable(true, {
149
+ name: cName,
150
+ oldHash: storedHash,
151
+ newHash: currentHash,
152
+ previousCategory: migrationOldCategory,
153
+ reason: changeReason // <--- Passed to Reporter
154
+ });
155
+ }
156
+ else if (migrationOldCategory) {
157
+ markRunnable(true, { name: cName, reason: 'Category Migration', previousCategory: migrationOldCategory, newCategory: calc.category });
158
+ }
159
+ else {
160
+ report.skipped.push({ name: cName });
161
+ simulationStatus[cName] = { hash: currentHash, category: calc.category, composition: calc.composition };
162
+ }
118
163
  }
119
164
  return report;
120
165
  }
121
166
 
122
167
  /**
123
168
  * DIRECT EXECUTION PIPELINE (For Workers)
124
- * Skips analysis. Assumes the calculation is valid and runnable.
125
- * [UPDATED] Accepted previousCategory argument to handle migrations.
126
169
  */
127
170
  async function executeDispatchTask(dateStr, pass, targetComputation, config, dependencies, computationManifest, previousCategory = null) {
128
171
  const { logger } = dependencies;
129
172
  const pid = generateProcessId(PROCESS_TYPES.EXECUTOR, targetComputation, dateStr);
130
173
 
131
- // 1. Get Calculation Manifest
132
174
  const manifestMap = new Map(computationManifest.map(c => [normalizeName(c.name), c]));
133
175
  const calcManifest = manifestMap.get(normalizeName(targetComputation));
134
176
 
135
177
  if (!calcManifest) { throw new Error(`Calculation '${targetComputation}' not found in manifest.`); }
136
178
 
137
- // [UPDATED] Attach migration context if present
138
179
  if (previousCategory) {
139
180
  calcManifest.previousCategory = previousCategory;
140
181
  logger.log('INFO', `[Executor] Migration detected for ${calcManifest.name}. Old data will be cleaned from: ${previousCategory}`);
141
182
  }
142
183
 
143
- // 2. Fetch Root Data Availability
144
184
  const rootData = await checkRootDataAvailability(dateStr, config, dependencies, DEFINITIVE_EARLIEST_DATES);
145
-
146
185
  if (!rootData) {
147
- logger.log('ERROR', `[Executor] FATAL: Root data check failed for ${targetComputation} on ${dateStr}. Index might be missing.`);
186
+ logger.log('ERROR', `[Executor] FATAL: Root data check failed for ${targetComputation} on ${dateStr}.`);
148
187
  return;
149
188
  }
150
189
 
151
- // 3. Fetch Dependencies
152
190
  const calcsToRun = [calcManifest];
153
191
  const existingResults = await fetchExistingResults(dateStr, calcsToRun, computationManifest, config, dependencies, false);
154
192
 
@@ -160,7 +198,6 @@ async function executeDispatchTask(dateStr, pass, targetComputation, config, dep
160
198
  previousResults = await fetchExistingResults(prevDateStr, calcsToRun, computationManifest, config, dependencies, true);
161
199
  }
162
200
 
163
- // 4. Execute
164
201
  logger.log('INFO', `[Executor] Running ${calcManifest.name} for ${dateStr}`, { processId: pid });
165
202
  let resultUpdates = {};
166
203
 
@@ -176,5 +213,4 @@ async function executeDispatchTask(dateStr, pass, targetComputation, config, dep
176
213
  }
177
214
  }
178
215
 
179
-
180
216
  module.exports = { executeDispatchTask, groupByPass, analyzeDateExecution };
@@ -1,5 +1,6 @@
1
1
  /**
2
2
  * @fileoverview Dynamic Manifest Builder - Handles Topological Sort and Auto-Discovery.
3
+ * UPDATED: Generates Granular Hash Composition for Audit Trails.
3
4
  */
4
5
  const { generateCodeHash, LEGACY_MAPPING } = require('../topology/HashManager.js');
5
6
  const { normalizeName } = require('../utils/utils');
@@ -106,9 +107,13 @@ function buildManifest(productLinesToRun = [], calculations) {
106
107
  const metadata = Class.getMetadata();
107
108
  const dependencies = Class.getDependencies().map(normalizeName);
108
109
  const codeStr = Class.toString();
110
+ const selfCodeHash = generateCodeHash(codeStr);
111
+
112
+ let compositeHashString = selfCodeHash + `|EPOCH:${SYSTEM_EPOCH}`;
109
113
 
110
- let compositeHashString = generateCodeHash(codeStr) + `|EPOCH:${SYSTEM_EPOCH}`; // Here we build the hash
111
114
  const usedDeps = [];
115
+ // Track layer hashes for composition analysis
116
+ const usedLayerHashes = {};
112
117
 
113
118
  for (const [layerName, exportsMap] of Object.entries(LAYER_TRIGGERS)) {
114
119
  const layerHashes = LAYER_HASHES[layerName];
@@ -118,19 +123,30 @@ function buildManifest(productLinesToRun = [], calculations) {
118
123
  if (exportHash) {
119
124
  compositeHashString += exportHash;
120
125
  usedDeps.push(`${layerName}.${exportName}`);
126
+
127
+ // Group hashes by layer for the composition report
128
+ if (!usedLayerHashes[layerName]) usedLayerHashes[layerName] = '';
129
+ usedLayerHashes[layerName] += exportHash;
121
130
  }
122
131
  }
123
132
  }
124
133
  }
125
134
 
135
+ // Simplify layer hashes to one hash per layer for the report
136
+ const layerComposition = {};
137
+ for(const [lName, lStr] of Object.entries(usedLayerHashes)) {
138
+ layerComposition[lName] = generateCodeHash(lStr);
139
+ }
140
+
126
141
  // Safe Mode Fallback
127
142
  let isSafeMode = false;
128
143
  if (usedDeps.length === 0) {
129
144
  isSafeMode = true;
130
145
  Object.values(LAYER_HASHES).forEach(layerObj => { Object.values(layerObj).forEach(h => compositeHashString += h); });
146
+ layerComposition['ALL_SAFE_MODE'] = 'ALL';
131
147
  }
132
148
 
133
- const baseHash = generateCodeHash(compositeHashString);
149
+ const intrinsicHash = generateCodeHash(compositeHashString);
134
150
 
135
151
  const manifestEntry = {
136
152
  name: normalizedName,
@@ -143,7 +159,16 @@ function buildManifest(productLinesToRun = [], calculations) {
143
159
  userType: metadata.userType,
144
160
  dependencies: dependencies,
145
161
  pass: 0,
146
- hash: baseHash,
162
+ hash: intrinsicHash, // Will be updated with deps
163
+
164
+ // [NEW] Composition Object for Audit
165
+ composition: {
166
+ epoch: SYSTEM_EPOCH,
167
+ code: selfCodeHash,
168
+ layers: layerComposition,
169
+ deps: {} // Will be populated after topo sort
170
+ },
171
+
147
172
  debugUsedLayers: isSafeMode ? ['ALL (Safe Mode)'] : usedDeps
148
173
  };
149
174
 
@@ -174,8 +199,6 @@ function buildManifest(productLinesToRun = [], calculations) {
174
199
  }
175
200
 
176
201
  const productLineEndpoints = [];
177
-
178
- // [UPDATE] Check if we should run ALL product lines (if empty or wildcard)
179
202
  const runAll = !productLinesToRun || productLinesToRun.length === 0 || productLinesToRun.includes('*');
180
203
 
181
204
  for (const [name, entry] of manifestMap.entries()) {
@@ -187,7 +210,6 @@ function buildManifest(productLinesToRun = [], calculations) {
187
210
  const requiredCalcs = getDependencySet(productLineEndpoints, adjacency);
188
211
  log.info(`Filtered down to ${requiredCalcs.size} active calculations.`);
189
212
 
190
- // [LOG VERIFICATION] Final Proof of Active Lines
191
213
  const activePackages = new Set();
192
214
  requiredCalcs.forEach(name => {
193
215
  const entry = manifestMap.get(name);
@@ -240,11 +262,17 @@ function buildManifest(productLinesToRun = [], calculations) {
240
262
 
241
263
  // --- Cascading Hash (Phase 2) ---
242
264
  for (const entry of sortedManifest) {
243
- let dependencySignature = entry.hash;
265
+ let dependencySignature = entry.hash; // Start with intrinsic
266
+
244
267
  if (entry.dependencies && entry.dependencies.length > 0) {
245
268
  const depHashes = entry.dependencies.map(depName => {
246
- const depEntry = filteredManifestMap.get(depName);
247
- return depEntry ? depEntry.hash : '';
269
+ const depEntry = filteredManifestMap.get(depName);
270
+ if (depEntry) {
271
+ // Populate Composition
272
+ entry.composition.deps[depName] = depEntry.hash;
273
+ return depEntry.hash;
274
+ }
275
+ return '';
248
276
  }).join('|');
249
277
  dependencySignature += `|DEPS:${depHashes}`;
250
278
  }
@@ -1,6 +1,7 @@
1
1
  /**
2
2
  * @fileoverview Executor for "Standard" (per-user) calculations.
3
- * UPDATED: Handles lazy loading of data references (accepts null refs from Indexer).
3
+ * UPDATED: Handles lazy loading of data references.
4
+ * UPDATED: Supports Multi-Date Fan-Out Aggregation (Time Machine Mode).
4
5
  */
5
6
  const { normalizeName } = require('../utils/utils');
6
7
  const { streamPortfolioData, streamHistoryData, getPortfolioPartRefs } = require('../utils/data_loader');
@@ -39,8 +40,46 @@ class StandardExecutor {
39
40
  // 3. Stream & Process
40
41
  await StandardExecutor.streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs, fetchedDeps, previousFetchedDeps);
41
42
 
42
- // 4. Commit
43
- return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
43
+ // 4. Pre-Commit Transformation for Fan-Out
44
+ // If a calc produced multi-date output per user, we must transpose it:
45
+ // FROM: UserA -> { "2024-01-01": data, "2024-01-02": data }
46
+ // TO: "2024-01-01" -> { UserA: data }, "2024-01-02" -> { UserA: data }
47
+
48
+ const transformedState = {};
49
+ for (const [name, inst] of Object.entries(state)) {
50
+ const result = await inst.getResult(); // { userId: { date: data } } or { userId: data }
51
+ const firstUser = Object.keys(result)[0];
52
+
53
+ // Check if the inner value is a Date Map
54
+ // Only checks the first user as heuristic; implies uniform return type
55
+ if (firstUser && result[firstUser] && typeof result[firstUser] === 'object') {
56
+ const innerKeys = Object.keys(result[firstUser]);
57
+ // Check if keys look like YYYY-MM-DD
58
+ const isDateMap = innerKeys.length > 0 && innerKeys.every(k => /^\d{4}-\d{2}-\d{2}$/.test(k));
59
+
60
+ if (isDateMap) {
61
+ const transposed = {};
62
+ for (const [userId, dateMap] of Object.entries(result)) {
63
+ for (const [dateKey, dailyData] of Object.entries(dateMap)) {
64
+ if (!transposed[dateKey]) transposed[dateKey] = {};
65
+ transposed[dateKey][userId] = dailyData;
66
+ }
67
+ }
68
+
69
+ // Mock a "getResult" for the committer that returns the Transposed Map
70
+ transformedState[name] = {
71
+ manifest: inst.manifest,
72
+ getResult: async () => transposed
73
+ };
74
+ continue;
75
+ }
76
+ }
77
+ // Normal behavior
78
+ transformedState[name] = inst;
79
+ }
80
+
81
+ // 5. Commit
82
+ return await commitResults(transformedState, dStr, passName, config, deps, skipStatusWrite);
44
83
  }
45
84
 
46
85
  static async streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs, fetchedDeps, previousFetchedDeps) {
@@ -58,20 +97,16 @@ class StandardExecutor {
58
97
  const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
59
98
  const prevDateStr = prevDate.toISOString().slice(0, 10);
60
99
 
61
- // [FIX] pass null if portfolioRefs is null; streamPortfolioData handles the fetch
62
100
  const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
63
101
 
64
102
  const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
65
- // yesterdayPortfolioRefs are manually fetched in run(), so they are usually populated
66
103
  const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs) ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
67
104
 
68
105
  const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
69
- // [FIX] Removed '&& historyRefs' check. We pass null to streamHistoryData if refs are missing, allowing it to fetch them.
70
106
  const tH_iter = (needsTradingHistory) ? streamHistoryData(config, deps, dateStr, historyRefs) : null;
71
107
 
72
108
  let yP_chunk = {}, tH_chunk = {};
73
109
 
74
- // [FIX] Ensure manual iterators are closed if loop fails
75
110
  try {
76
111
  for await (const tP_chunk of tP_iter) {
77
112
  if (yP_iter) yP_chunk = (await yP_iter.next()).value || {};