bulltrackers-module 1.0.268 → 1.0.269

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  /**
2
2
  * @fileoverview Main Orchestrator. Coordinates the topological execution.
3
- * UPDATED: Implements Smart Audit logic to detect WHY a hash mismatch occurred.
4
- * FIX: Added 'Audit Upgrade' check to force re-run if composition metadata is missing.
3
+ * UPDATED: Removed 'Permanently Impossible' optimization to ensure full visibility/recovery.
4
+ * UPDATED: Includes 'Audit Upgrade' check.
5
5
  */
6
6
  const { normalizeName, DEFINITIVE_EARLIEST_DATES } = require('./utils/utils');
7
7
  const { checkRootDataAvailability, checkRootDependencies } = require('./data/AvailabilityChecker');
@@ -17,7 +17,6 @@ function groupByPass(manifest) { return manifest.reduce((acc, calc) => { (acc[c
17
17
 
18
18
  /**
19
19
  * Analyzes whether calculations should run, be skipped, or are blocked.
20
- * Now performs Deep Hash Analysis to explain Re-Runs.
21
20
  */
22
21
  function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus, manifestMap, prevDailyStatus = null) {
23
22
  const report = { runnable: [], blocked: [], impossible: [], failedDependency: [], reRuns: [], skipped: [] };
@@ -58,16 +57,16 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
58
57
  let migrationOldCategory = null;
59
58
  if (storedCategory && storedCategory !== calc.category) { migrationOldCategory = storedCategory; }
60
59
 
61
- if (typeof storedHash === 'string' && storedHash.startsWith(STATUS_IMPOSSIBLE_PREFIX)) {
62
- report.skipped.push({ name: cName, reason: `Permanently Impossible (${storedHash})` });
63
- continue;
64
- }
60
+ // [REMOVED] The "Permanently Impossible" optimization block was here.
61
+ // Removal ensures we re-check Root Data every time, allowing for visibility and recovery.
65
62
 
63
+ // 1. Check Root Data (The Primary Gate)
66
64
  const rootCheck = checkRootDependencies(calc, rootDataStatus);
67
65
 
68
66
  if (!rootCheck.canRun) {
69
67
  const missingStr = rootCheck.missing.join(', ');
70
68
  if (!isTargetToday) {
69
+ // If previously impossible, this confirms it. If previously run, this is a regression.
71
70
  markImpossible(`Missing Root Data: ${missingStr} (Historical)`, 'NO_DATA');
72
71
  } else {
73
72
  report.blocked.push({ name: cName, reason: `Missing Root Data: ${missingStr} (Waiting)` });
@@ -75,6 +74,7 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
75
74
  continue;
76
75
  }
77
76
 
77
+ // 2. Check Dependencies
78
78
  let dependencyIsImpossible = false;
79
79
  const missingDeps = [];
80
80
  if (calc.dependencies) {
@@ -95,6 +95,7 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
95
95
  }
96
96
  if (missingDeps.length > 0) { report.failedDependency.push({ name: cName, missing: missingDeps }); continue; }
97
97
 
98
+ // 3. Check Historical Continuity
98
99
  if (calc.isHistorical && prevDailyStatus) {
99
100
  const yesterday = new Date(dateStr + 'T00:00:00Z');
100
101
  yesterday.setUTCDate(yesterday.getUTCDate() - 1);
@@ -107,12 +108,12 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
107
108
  }
108
109
  }
109
110
 
110
- // --- HASH CHECK LOGIC ---
111
+ // 4. Check Hash / Composition (The Audit Gate)
111
112
  if (!storedHash) {
112
113
  markRunnable(false, { reason: "New Calculation" });
113
114
  }
114
115
  else if (storedHash !== currentHash) {
115
- // Smart Logic: Why did it change?
116
+ // Smart Audit Logic
116
117
  let changeReason = "Hash Mismatch (Unknown)";
117
118
  const oldComp = stored.composition;
118
119
  const newComp = calc.composition;
@@ -153,7 +154,7 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
153
154
  else if (migrationOldCategory) {
154
155
  markRunnable(true, { name: cName, reason: 'Category Migration', previousCategory: migrationOldCategory, newCategory: calc.category });
155
156
  }
156
- // [CRITICAL FIX] Audit Upgrade Check: Force re-run if hash matches but composition is missing (Legacy Record)
157
+ // Audit Upgrade Check
157
158
  else if (!stored.composition) {
158
159
  markRunnable(true, {
159
160
  name: cName,
@@ -163,7 +164,7 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
163
164
  });
164
165
  }
165
166
  else {
166
- report.skipped.push({ name: cName });
167
+ report.skipped.push({ name: cName, reason: "Up To Date" });
167
168
  simulationStatus[cName] = { hash: currentHash, category: calc.category, composition: calc.composition };
168
169
  }
169
170
  }
@@ -1,2 +1,2 @@
1
1
  // Change this string to force a global re-computation
2
- module.exports = "v1.0-epoch-2";
2
+ module.exports = "v2.0-epoch-2";
@@ -1,7 +1,8 @@
1
1
  /**
2
2
  * @fileoverview Build Reporter & Auto-Runner.
3
3
  * Generates a "Pre-Flight" report of what the computation system WILL do.
4
- * UPDATED: Shards report details to subcollections to bypass 1MB limit on 'Invalidate All' scenarios.
4
+ * UPDATED: Shards report details to subcollections to bypass 1MB limit.
5
+ * UPDATED: Explicitly reports SKIPPED items for 100% visibility.
5
6
  */
6
7
 
7
8
  const { analyzeDateExecution } = require('../WorkflowOrchestrator');
@@ -16,7 +17,6 @@ const packageVersion = pac
16
17
 
17
18
  /**
18
19
  * AUTO-RUN ENTRY POINT
19
- * Checks if a report for the current version exists. If not, runs it.
20
20
  */
21
21
  async function ensureBuildReport(config, dependencies, manifest) {
22
22
  const { db, logger } = dependencies;
@@ -34,8 +34,6 @@ async function ensureBuildReport(config, dependencies, manifest) {
34
34
  }
35
35
 
36
36
  logger.log('INFO', `[BuildReporter] 🚀 New Version Detected (${packageVersion}). Auto-running Pre-flight Report...`);
37
-
38
- // Scope: 90 days is fine now that we shard the output
39
37
  await generateBuildReport(config, dependencies, manifest, 90, buildId);
40
38
 
41
39
  } catch (e) {
@@ -65,12 +63,12 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
65
63
  packageVersion: packageVersion,
66
64
  generatedAt: new Date().toISOString(),
67
65
  summary: {},
68
- _sharded: true // Flag to tell UI/Tools to look in subcollection
66
+ _sharded: true
69
67
  };
70
68
 
71
69
  let totalReRuns = 0;
72
70
  let totalNew = 0;
73
- const detailWrites = []; // Accumulate writes for batching
71
+ const detailWrites = [];
74
72
 
75
73
  const limit = pLimit(20);
76
74
 
@@ -99,17 +97,20 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
99
97
 
100
98
  const analysis = analyzeDateExecution(dateStr, manifest, rootDataStatus, dailyStatus, manifestMap, prevDailyStatus);
101
99
 
102
- const dateSummary = { willRun: [], willReRun: [], blocked: [], impossible: [] };
100
+ // [NEW] Added 'skipped' to the summary object
101
+ const dateSummary = { willRun: [], willReRun: [], blocked: [], impossible: [], skipped: [] };
103
102
 
104
103
  analysis.runnable.forEach (item => dateSummary.willRun.push ({ name: item.name, reason: "New / No Previous Record" }));
105
104
  analysis.reRuns.forEach (item => dateSummary.willReRun.push ({ name: item.name, reason: item.reason || "Hash Mismatch" }));
106
105
  analysis.impossible.forEach (item => dateSummary.impossible.push ({ name: item.name, reason: item.reason }));
107
106
  [...analysis.blocked, ...analysis.failedDependency].forEach(item => dateSummary.blocked.push({ name: item.name, reason: item.reason || 'Dependency' }));
108
-
109
- const hasUpdates = dateSummary.willRun.length || dateSummary.willReRun.length || dateSummary.blocked.length || dateSummary.impossible.length;
110
107
 
111
- if (hasUpdates) {
112
- // Prepare Write for Subcollection
108
+ // [NEW] Map skipped items so the math adds up to 92
109
+ analysis.skipped.forEach (item => dateSummary.skipped.push ({ name: item.name, reason: item.reason || "Up To Date" }));
110
+
111
+ // Update: We write the report if there is ANY data, not just updates
112
+ // This ensures full visibility into the day's state
113
+ if (dateSummary.willRun.length || dateSummary.willReRun.length || dateSummary.blocked.length || dateSummary.impossible.length || dateSummary.skipped.length) {
113
114
  const detailRef = db.collection('computation_build_records').doc(buildId).collection('details').doc(dateStr);
114
115
  detailWrites.push({
115
116
  ref: detailRef,
@@ -139,17 +140,14 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
139
140
 
140
141
  reportHeader.summary = { totalReRuns, totalNew, scanRange: `${datesToCheck[0]} to ${datesToCheck[datesToCheck.length-1]}` };
141
142
 
142
- // 1. Write Header
143
143
  const reportRef = db.collection('computation_build_records').doc(buildId);
144
144
  await reportRef.set(reportHeader);
145
145
 
146
- // 2. Batch Write Details (Using FirestoreUtils to handle batching constraints)
147
146
  if (detailWrites.length > 0) {
148
147
  logger.log('INFO', `[BuildReporter] Writing ${detailWrites.length} detail records...`);
149
148
  await commitBatchInChunks(config, dependencies, detailWrites, 'BuildReportDetails');
150
149
  }
151
150
 
152
- // 3. Update 'latest' pointer (Summary only)
153
151
  await db.collection('computation_build_records').doc('latest').set({ ...reportHeader, note: "Latest build report pointer (See subcollection for details)." });
154
152
 
155
153
  logger.log('SUCCESS', `[BuildReporter] Report ${buildId} saved. Re-runs: ${totalReRuns}, New: ${totalNew}.`);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.268",
3
+ "version": "1.0.269",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [