bulltrackers-module 1.0.230 → 1.0.231

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  /**
2
2
  * @fileoverview Main Orchestrator. Coordinates the topological execution.
3
- * UPDATED: Implements 'IMPOSSIBLE' state logic for missing root data on historical dates.
3
+ * UPDATED: Removed legacy boolean 'true' logic.
4
4
  */
5
5
  const { normalizeName } = require('./utils/utils');
6
6
  const { checkRootDataAvailability } = require('./data/AvailabilityChecker');
@@ -10,7 +10,6 @@ const { StandardExecutor } = require('./executor
10
10
  const { MetaExecutor } = require('./executors/MetaExecutor');
11
11
  const { generateProcessId, PROCESS_TYPES } = require('./logger/logger');
12
12
 
13
- // New Status Constant
14
13
  const STATUS_IMPOSSIBLE = 'IMPOSSIBLE';
15
14
 
16
15
  function groupByPass(manifest) {
@@ -20,21 +19,14 @@ function groupByPass(manifest) {
20
19
  }, {});
21
20
  }
22
21
 
23
- /**
24
- * Performs strict analysis of what can run.
25
- * IMPOSSIBLE LOGIC:
26
- * 1. If Root Data is missing AND Date != Today -> IMPOSSIBLE.
27
- * 2. If Dependency is IMPOSSIBLE -> IMPOSSIBLE.
28
- * 3. IMPOSSIBLE items are written to DB to prevent future retries.
29
- */
30
22
  function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus, manifestMap) {
31
23
  const report = {
32
24
  runnable: [],
33
- blocked: [], // Missing Root Data (Today - Retriable)
34
- impossible: [], // Missing Root Data (Historical) or Dependency Impossible
35
- failedDependency: [], // Missing/Stale Dependency (Transient)
36
- reRuns: [], // Hash Mismatch
37
- skipped: [] // Already done & valid
25
+ blocked: [],
26
+ impossible: [],
27
+ failedDependency: [],
28
+ reRuns: [],
29
+ skipped: []
38
30
  };
39
31
 
40
32
  const isTargetToday = (dateStr === new Date().toISOString().slice(0, 10));
@@ -44,9 +36,7 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
44
36
  const storedDepHash = dailyStatus[norm];
45
37
  const depManifest = manifestMap.get(norm);
46
38
 
47
- // Check 1: Is dependency IMPOSSIBLE? (Logic handled in main loop, but safe to check here)
48
39
  if (storedDepHash === STATUS_IMPOSSIBLE) return false;
49
-
50
40
  if (!storedDepHash) return false;
51
41
  if (!depManifest) return false;
52
42
  if (storedDepHash !== depManifest.hash) return false;
@@ -59,7 +49,7 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
59
49
  const storedHash = dailyStatus[cName];
60
50
  const currentHash = calc.hash;
61
51
 
62
- // 1. Check if ALREADY marked IMPOSSIBLE
52
+ // 1. Check Impossible
63
53
  if (storedHash === STATUS_IMPOSSIBLE) {
64
54
  report.skipped.push({ name: cName, reason: 'Permanently Impossible' });
65
55
  continue;
@@ -78,7 +68,6 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
78
68
  }
79
69
 
80
70
  if (missingRoots.length > 0) {
81
- // LOGIC: If date is NOT today, missing root data is fatal and permanent.
82
71
  if (!isTargetToday) {
83
72
  report.impossible.push({ name: cName, reason: `Missing Root Data: ${missingRoots.join(', ')} (Historical)` });
84
73
  } else {
@@ -95,10 +84,8 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
95
84
  for (const dep of calc.dependencies) {
96
85
  const normDep = normalizeName(dep);
97
86
 
98
- // Check if the dependency is marked IMPOSSIBLE in the DB
99
87
  if (dailyStatus[normDep] === STATUS_IMPOSSIBLE) {
100
88
  dependencyIsImpossible = true;
101
- // We can break early, if one input is impossible, the result is impossible.
102
89
  break;
103
90
  }
104
91
 
@@ -109,7 +96,6 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
109
96
  }
110
97
 
111
98
  if (dependencyIsImpossible) {
112
- // Propagate the Impossible Status
113
99
  report.impossible.push({ name: cName, reason: 'Dependency is Impossible' });
114
100
  continue;
115
101
  }
@@ -119,14 +105,13 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
119
105
  continue;
120
106
  }
121
107
 
122
- // 4. Hash / State Check
123
- if (!storedHash || storedHash === false) { // false indicates previous transient failure
108
+ // 4. Hash / State Check (Legacy 'true' logic removed)
109
+ if (!storedHash || storedHash === false) {
124
110
  report.runnable.push(calc);
125
111
  } else if (storedHash !== currentHash) {
126
112
  report.reRuns.push({ name: cName, oldHash: storedHash, newHash: currentHash });
127
- } else if (storedHash === true) {
128
- report.reRuns.push({ name: cName, reason: 'Legacy Upgrade' });
129
113
  } else {
114
+ // Stored Hash === Current Hash
130
115
  report.skipped.push({ name: cName });
131
116
  }
132
117
  }
@@ -162,7 +147,6 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
162
147
  if (logger && typeof logger.logDateAnalysis === 'function') {
163
148
  logger.logDateAnalysis(dateStr, analysisReport);
164
149
  } else {
165
- // Safe fallback
166
150
  const logMsg = `[Analysis] Date: ${dateStr} | Runnable: ${analysisReport.runnable.length} | Blocked: ${analysisReport.blocked.length} | Impossible: ${analysisReport.impossible.length}`;
167
151
  if (logger && logger.info) logger.info(logMsg);
168
152
  else console.log(logMsg);
@@ -170,12 +154,8 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
170
154
 
171
155
  // 5. UPDATE STATUS FOR NON-RUNNABLE ITEMS
172
156
  const statusUpdates = {};
173
-
174
- // A. Mark BLOCKED as 'false' (Transient Failure)
175
157
  analysisReport.blocked.forEach(item => statusUpdates[item.name] = false);
176
158
  analysisReport.failedDependency.forEach(item => statusUpdates[item.name] = false);
177
-
178
- // B. Mark IMPOSSIBLE as 'IMPOSSIBLE' (Permanent Failure - Overwrites existing status)
179
159
  analysisReport.impossible.forEach(item => statusUpdates[item.name] = STATUS_IMPOSSIBLE);
180
160
 
181
161
  if (Object.keys(statusUpdates).length > 0) {
@@ -45,12 +45,19 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
45
45
  await commitBatchInChunks(config, deps, updates, `${name} Results`);
46
46
 
47
47
  // Structured Storage Log
48
- logger.logStorage(pid, name, dStr, mainDocRef.path, totalSize, isSharded);
48
+ if (logger && logger.logStorage) {
49
+ logger.logStorage(pid, name, dStr, mainDocRef.path, totalSize, isSharded);
50
+ }
49
51
 
50
- successUpdates[name] = calc.manifest.hash || true;
52
+ // Update success tracking
53
+ if (calc.manifest.hash) {
54
+ successUpdates[name] = calc.manifest.hash;
55
+ }
51
56
  }
52
57
  } catch (e) {
53
- logger.log('ERROR', `Commit failed for ${name}`, { processId: pid, error: e.message });
58
+ if (logger && logger.log) {
59
+ logger.log('ERROR', `Commit failed for ${name}`, { processId: pid, error: e.message });
60
+ }
54
61
  }
55
62
  }
56
63
 
@@ -62,10 +69,7 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
62
69
  return successUpdates;
63
70
  }
64
71
 
65
- // ... rest of file (calculateFirestoreBytes, prepareAutoShardedWrites) remains same ...
66
- // Just ensure prepareAutoShardedWrites uses the provided logger if it logs internal warnings.
67
72
  function calculateFirestoreBytes(value) {
68
- // ... same as before
69
73
  if (value === null) return 1;
70
74
  if (value === undefined) return 0;
71
75
  if (typeof value === 'boolean') return 1;
@@ -79,8 +83,6 @@ function calculateFirestoreBytes(value) {
79
83
  }
80
84
 
81
85
  async function prepareAutoShardedWrites(result, docRef, logger) {
82
- // ... same logic, just ensure existing logs inside here use the logger properly if needed
83
- // Copied from previous logic, essentially checks size > 900KB and splits
84
86
  const SAFETY_THRESHOLD_BYTES = 1000 * 1024;
85
87
  const OVERHEAD_ALLOWANCE = 20 * 1024;
86
88
  const CHUNK_LIMIT = SAFETY_THRESHOLD_BYTES - OVERHEAD_ALLOWANCE;
@@ -94,8 +96,6 @@ async function prepareAutoShardedWrites(result, docRef, logger) {
94
96
 
95
97
  if ((totalSize + docPathSize) < CHUNK_LIMIT) { const data = { ...result, _completed: true, _sharded: false }; return [{ ref: docRef, data, options: { merge: true } }]; }
96
98
 
97
- // Note: We don't log "Sharding..." here anymore because we log the structured event in commitResults
98
-
99
99
  for (const [key, value] of Object.entries(result)) {
100
100
  if (key.startsWith('_')) continue;
101
101
  const keySize = Buffer.byteLength(key, 'utf8') + 1;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.230",
3
+ "version": "1.0.231",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [