bulltrackers-module 1.0.281 → 1.0.283
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/onboarding.md +154 -869
- package/functions/computation-system/persistence/ContractValidator.js +81 -0
- package/functions/computation-system/persistence/ResultCommitter.js +73 -13
- package/functions/computation-system/scripts/UpdateContracts.js +128 -0
- package/functions/computation-system/simulation/Fabricator.js +285 -0
- package/functions/computation-system/simulation/SeededRandom.js +41 -0
- package/functions/computation-system/simulation/SimRunner.js +51 -0
- package/functions/computation-system/tools/BuildReporter.js +199 -159
- package/functions/computation-system/tools/ContractDiscoverer.js +144 -0
- package/package.json +1 -1
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Deterministic Pseudo-Random Number Generator (LCG).
|
|
3
|
+
* Ensures that for a given seed, the sequence of numbers is identical across runs.
|
|
4
|
+
*/
|
|
5
|
+
class SeededRandom {
|
|
6
|
+
constructor(seedString) {
|
|
7
|
+
this.state = this._stringToSeed(seedString);
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
_stringToSeed(str) {
|
|
11
|
+
let h = 2166136261 >>> 0;
|
|
12
|
+
for (let i = 0; i < str.length; i++) {
|
|
13
|
+
h = Math.imul(h ^ str.charCodeAt(i), 16777619);
|
|
14
|
+
}
|
|
15
|
+
return h >>> 0;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/** Returns a float between 0 and 1 */
|
|
19
|
+
next() {
|
|
20
|
+
this.state = (Math.imul(48271, this.state) % 2147483647);
|
|
21
|
+
return (this.state - 1) / 2147483646;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/** Returns an integer between min and max (inclusive) */
|
|
25
|
+
range(min, max) {
|
|
26
|
+
return Math.floor(this.next() * (max - min + 1)) + min;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
/** Returns a random element from an array */
|
|
30
|
+
choice(arr) {
|
|
31
|
+
if (!arr || arr.length === 0) return null;
|
|
32
|
+
return arr[this.range(0, arr.length - 1)];
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/** Returns a boolean based on probability */
|
|
36
|
+
bool(probability = 0.5) {
|
|
37
|
+
return this.next() < probability;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
module.exports = SeededRandom;
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Runner for Behavioral Hashing (SimHash).
|
|
3
|
+
* Executes a calculation against a fabricated, deterministic context.
|
|
4
|
+
*/
|
|
5
|
+
const Fabricator = require('./Fabricator');
|
|
6
|
+
const { generateDataHash } = require('../utils/utils');
|
|
7
|
+
|
|
8
|
+
class SimRunner {
|
|
9
|
+
/**
|
|
10
|
+
* Runs the simulation for a specific calculation.
|
|
11
|
+
* @param {Object} calcManifest - The manifest entry for the calculation.
|
|
12
|
+
* @param {Map} fullManifestMap - Map of all manifests (to look up dependencies).
|
|
13
|
+
* @returns {Promise<string>} The SimHash (SHA256 of the output).
|
|
14
|
+
*/
|
|
15
|
+
static async run(calcManifest, fullManifestMap) {
|
|
16
|
+
try {
|
|
17
|
+
const fabricator = new Fabricator(calcManifest.name);
|
|
18
|
+
|
|
19
|
+
// 1. Generate Deterministic Context
|
|
20
|
+
const context = await fabricator.generateContext(calcManifest, fullManifestMap);
|
|
21
|
+
|
|
22
|
+
// 2. Instantiate
|
|
23
|
+
const instance = new calcManifest.class();
|
|
24
|
+
|
|
25
|
+
// 3. Process
|
|
26
|
+
await instance.process(context);
|
|
27
|
+
|
|
28
|
+
// 4. Get Result
|
|
29
|
+
// Note: If the calculation uses internal state buffering (like `results` property),
|
|
30
|
+
// getResult() usually returns that.
|
|
31
|
+
let result = null;
|
|
32
|
+
if (instance.getResult) {
|
|
33
|
+
result = await instance.getResult();
|
|
34
|
+
} else {
|
|
35
|
+
result = instance.result || instance.results || {};
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// 5. Sanitize & Hash
|
|
39
|
+
// We strip any non-deterministic keys if they leak (like timestamps generated inside process)
|
|
40
|
+
// But ideally, the context mocking prevents this.
|
|
41
|
+
return generateDataHash(result);
|
|
42
|
+
|
|
43
|
+
} catch (e) {
|
|
44
|
+
console.error(`[SimRunner] Simulation failed for ${calcManifest.name}:`, e);
|
|
45
|
+
// If simulation crashes, we return a hash of the error to safely trigger a re-run
|
|
46
|
+
return generateDataHash({ error: e.message });
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
module.exports = SimRunner;
|
|
@@ -1,34 +1,44 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Build Reporter & Auto-Runner.
|
|
3
3
|
* Generates a "Pre-Flight" report of what the computation system WILL do.
|
|
4
|
-
*
|
|
5
|
-
*
|
|
6
|
-
*
|
|
7
|
-
* UPDATED (IDEA 1): Added Dependency Impact Analysis ("Blast Radius").
|
|
4
|
+
* UPGRADED: Implements Behavioral Hashing (SimHash) to detect Cosmetic vs Logic changes.
|
|
5
|
+
* OPTIMIZED: Caches SimHashes and actively updates status for Stable items to prevent re-runs.
|
|
6
|
+
* OPTIMIZED (V2): Implements System Fingerprinting to skip 90-day scan if manifest is identical.
|
|
8
7
|
*/
|
|
9
8
|
|
|
10
9
|
const { analyzeDateExecution } = require('../WorkflowOrchestrator');
|
|
11
|
-
const { fetchComputationStatus }
|
|
10
|
+
const { fetchComputationStatus, updateComputationStatus } = require('../persistence/StatusRepository');
|
|
12
11
|
const { normalizeName, getExpectedDateStrings, DEFINITIVE_EARLIEST_DATES } = require('../utils/utils');
|
|
13
12
|
const { checkRootDataAvailability } = require('../data/AvailabilityChecker');
|
|
13
|
+
const SimRunner = require('../simulation/SimRunner');
|
|
14
14
|
const pLimit = require('p-limit');
|
|
15
15
|
const path = require('path');
|
|
16
|
+
const crypto = require('crypto');
|
|
16
17
|
const packageJson = require(path.join(__dirname, '..', '..', '..', 'package.json'));
|
|
17
18
|
const packageVersion = packageJson.version;
|
|
18
19
|
|
|
20
|
+
// Persistent Registry for SimHashes (so Workers don't have to recalc)
|
|
21
|
+
const SIMHASH_REGISTRY_COLLECTION = 'system_simhash_registry';
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Helper: Generates a unique signature for the entire computation system state.
|
|
25
|
+
* If ANY calculation logic or dependency changes, this hash changes.
|
|
26
|
+
*/
|
|
27
|
+
function getSystemFingerprint(manifest) {
|
|
28
|
+
// Sort to ensure determinism
|
|
29
|
+
const sortedHashes = manifest.map(c => c.hash).sort().join('|');
|
|
30
|
+
return crypto.createHash('sha256').update(sortedHashes).digest('hex');
|
|
31
|
+
}
|
|
32
|
+
|
|
19
33
|
/**
|
|
20
|
-
* Helper: Determines if a calculation should be excluded from the report
|
|
21
|
-
* because the date is prior to the earliest possible data existence.
|
|
34
|
+
* Helper: Determines if a calculation should be excluded from the report.
|
|
22
35
|
*/
|
|
23
36
|
function isDateBeforeAvailability(dateStr, calcManifest) {
|
|
24
37
|
const targetDate = new Date(dateStr + 'T00:00:00Z');
|
|
25
38
|
const deps = calcManifest.rootDataDependencies || [];
|
|
26
|
-
|
|
27
|
-
// If no data dependencies, it's always valid (e.g., pure math)
|
|
28
39
|
if (deps.length === 0) return false;
|
|
29
40
|
|
|
30
41
|
for (const dep of deps) {
|
|
31
|
-
// Map dependency name to start date
|
|
32
42
|
let startDate = null;
|
|
33
43
|
if (dep === 'portfolio') startDate = DEFINITIVE_EARLIEST_DATES.portfolio;
|
|
34
44
|
else if (dep === 'history') startDate = DEFINITIVE_EARLIEST_DATES.history;
|
|
@@ -36,7 +46,6 @@ function isDateBeforeAvailability(dateStr, calcManifest) {
|
|
|
36
46
|
else if (dep === 'insights') startDate = DEFINITIVE_EARLIEST_DATES.insights;
|
|
37
47
|
else if (dep === 'price') startDate = DEFINITIVE_EARLIEST_DATES.price;
|
|
38
48
|
|
|
39
|
-
// If we have a start date and the target is BEFORE it, exclude this calc.
|
|
40
49
|
if (startDate && targetDate < startDate) { return true; }
|
|
41
50
|
}
|
|
42
51
|
return false;
|
|
@@ -44,17 +53,14 @@ function isDateBeforeAvailability(dateStr, calcManifest) {
|
|
|
44
53
|
|
|
45
54
|
/**
|
|
46
55
|
* Helper: Calculates the transitive closure of dependents (Blast Radius).
|
|
47
|
-
* Returns the count of direct and total cascading dependents.
|
|
48
56
|
*/
|
|
49
57
|
function calculateBlastRadius(targetCalcName, reverseGraph) {
|
|
50
58
|
const impactSet = new Set();
|
|
51
59
|
const queue = [targetCalcName];
|
|
52
60
|
|
|
53
|
-
// BFS Traversal
|
|
54
61
|
while(queue.length > 0) {
|
|
55
62
|
const current = queue.shift();
|
|
56
63
|
const dependents = reverseGraph.get(current) || [];
|
|
57
|
-
|
|
58
64
|
dependents.forEach(child => {
|
|
59
65
|
if (!impactSet.has(child)) {
|
|
60
66
|
impactSet.add(child);
|
|
@@ -66,46 +72,128 @@ function calculateBlastRadius(targetCalcName, reverseGraph) {
|
|
|
66
72
|
return {
|
|
67
73
|
directDependents: (reverseGraph.get(targetCalcName) || []).length,
|
|
68
74
|
totalCascadingDependents: impactSet.size,
|
|
69
|
-
affectedCalculations: Array.from(impactSet).slice(0, 50)
|
|
75
|
+
affectedCalculations: Array.from(impactSet).slice(0, 50)
|
|
70
76
|
};
|
|
71
77
|
}
|
|
72
78
|
|
|
79
|
+
/**
|
|
80
|
+
* [NEW] Helper: Runs SimHash check with Caching and Registry Persistence.
|
|
81
|
+
*/
|
|
82
|
+
async function verifyBehavioralStability(candidates, manifestMap, dailyStatus, logger, simHashCache, db) {
|
|
83
|
+
const trueReRuns = [];
|
|
84
|
+
const stableUpdates = [];
|
|
85
|
+
|
|
86
|
+
// Limit concurrency for simulations
|
|
87
|
+
const limit = pLimit(10);
|
|
88
|
+
|
|
89
|
+
const checks = candidates.map(item => limit(async () => {
|
|
90
|
+
try {
|
|
91
|
+
const manifest = manifestMap.get(item.name);
|
|
92
|
+
const stored = dailyStatus[item.name];
|
|
93
|
+
|
|
94
|
+
if (!stored || !stored.simHash || !manifest) {
|
|
95
|
+
trueReRuns.push(item);
|
|
96
|
+
return;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// 1. Check Cache first (Avoid re-simulating the same code for 100 different dates)
|
|
100
|
+
let newSimHash = simHashCache.get(manifest.hash);
|
|
101
|
+
|
|
102
|
+
// 2. If Miss, Run Simulation & Persist to Registry
|
|
103
|
+
if (!newSimHash) {
|
|
104
|
+
newSimHash = await SimRunner.run(manifest, manifestMap);
|
|
105
|
+
simHashCache.set(manifest.hash, newSimHash);
|
|
106
|
+
|
|
107
|
+
// Write to Registry so Production Workers can find it without running SimRunner
|
|
108
|
+
// Fire-and-forget write to reduce latency
|
|
109
|
+
db.collection(SIMHASH_REGISTRY_COLLECTION).doc(manifest.hash).set({
|
|
110
|
+
simHash: newSimHash,
|
|
111
|
+
createdAt: new Date(),
|
|
112
|
+
calcName: manifest.name
|
|
113
|
+
}).catch(err => logger.log('WARN', `Failed to write SimHash registry for ${manifest.name}: ${err.message}`));
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// 3. Compare
|
|
117
|
+
if (newSimHash === stored.simHash) {
|
|
118
|
+
// BEHAVIORAL MATCH: Code changed, but output is identical.
|
|
119
|
+
stableUpdates.push({
|
|
120
|
+
...item,
|
|
121
|
+
reason: "Code Updated (Logic Stable)",
|
|
122
|
+
simHash: newSimHash, // New SimHash (same as old)
|
|
123
|
+
newHash: manifest.hash // New Code Hash
|
|
124
|
+
});
|
|
125
|
+
} else {
|
|
126
|
+
// BEHAVIORAL MISMATCH: Logic changed.
|
|
127
|
+
trueReRuns.push({
|
|
128
|
+
...item,
|
|
129
|
+
reason: item.reason + ` [SimHash Mismatch]`,
|
|
130
|
+
oldSimHash: stored.simHash,
|
|
131
|
+
newSimHash: newSimHash
|
|
132
|
+
});
|
|
133
|
+
}
|
|
134
|
+
} catch (e) {
|
|
135
|
+
logger.log('WARN', `[BuildReporter] SimHash check failed for ${item.name}: ${e.message}`);
|
|
136
|
+
trueReRuns.push(item);
|
|
137
|
+
}
|
|
138
|
+
}));
|
|
139
|
+
|
|
140
|
+
await Promise.all(checks);
|
|
141
|
+
return { trueReRuns, stableUpdates };
|
|
142
|
+
}
|
|
143
|
+
|
|
73
144
|
/**
|
|
74
145
|
* AUTO-RUN ENTRY POINT
|
|
75
|
-
* Uses transactional locking to prevent race conditions.
|
|
76
146
|
*/
|
|
77
147
|
async function ensureBuildReport(config, dependencies, manifest) {
|
|
78
148
|
const { db, logger } = dependencies;
|
|
79
149
|
const now = new Date();
|
|
80
|
-
const buildId = `v${packageVersion}_${now.getFullYear()}-${String(now.getMonth()+1).padStart(2,'0')}-${String(now.getDate()).padStart(2,'0')}_${String(now.getHours()).padStart(2,'0')}
|
|
81
|
-
|
|
82
|
-
// Lock document specific to this version
|
|
150
|
+
const buildId = `v${packageVersion}_${now.getFullYear()}-${String(now.getMonth()+1).padStart(2,'0')}-${String(now.getDate()).padStart(2,'0')}_${String(now.getHours()).padStart(2,'0')}`;
|
|
83
151
|
const lockRef = db.collection('computation_build_records').doc(`init_lock_v${packageVersion}`);
|
|
84
152
|
|
|
85
153
|
try {
|
|
86
|
-
// Transaction: "Hey I am deploying" check
|
|
87
154
|
const shouldRun = await db.runTransaction(async (t) => {
|
|
88
|
-
const doc
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
// Claim the lock
|
|
93
|
-
t.set(lockRef, {
|
|
94
|
-
status: 'IN_PROGRESS',
|
|
95
|
-
startedAt: new Date(),
|
|
96
|
-
workerId: process.env.K_REVISION || 'unknown',
|
|
97
|
-
buildId: buildId
|
|
98
|
-
});
|
|
155
|
+
const doc = await t.get(lockRef);
|
|
156
|
+
if (doc.exists) { return false; }
|
|
157
|
+
t.set(lockRef, { status: 'IN_PROGRESS', startedAt: new Date(), buildId: buildId });
|
|
99
158
|
return true;
|
|
100
159
|
});
|
|
101
160
|
|
|
102
|
-
if (!shouldRun) { logger.log('INFO', `[BuildReporter] 🔒 Report for v${packageVersion}
|
|
161
|
+
if (!shouldRun) { logger.log('INFO', `[BuildReporter] 🔒 Report for v${packageVersion} locked. Skipping.`); return; }
|
|
162
|
+
|
|
163
|
+
// [NEW] 1. Calculate Current System Fingerprint
|
|
164
|
+
const currentSystemHash = getSystemFingerprint(manifest);
|
|
103
165
|
|
|
104
|
-
|
|
166
|
+
// [NEW] 2. Fetch Last Build's Fingerprint
|
|
167
|
+
const latestBuildDoc = await db.collection('computation_build_records').doc('latest').get();
|
|
105
168
|
|
|
106
|
-
|
|
169
|
+
if (latestBuildDoc.exists) {
|
|
170
|
+
const latestData = latestBuildDoc.data();
|
|
171
|
+
|
|
172
|
+
// [OPTIMIZATION] If signatures match, we can clone the report or just skip
|
|
173
|
+
if (latestData.systemFingerprint === currentSystemHash) {
|
|
174
|
+
logger.log('INFO', `[BuildReporter] ⚡ System Fingerprint (${currentSystemHash.substring(0,8)}) matches latest build. Skipping Report.`);
|
|
175
|
+
|
|
176
|
+
// Create a "Skipped" record for the new version so we know it deployed
|
|
177
|
+
await db.collection('computation_build_records').doc(buildId).set({
|
|
178
|
+
buildId,
|
|
179
|
+
packageVersion,
|
|
180
|
+
systemFingerprint: currentSystemHash,
|
|
181
|
+
status: 'SKIPPED_IDENTICAL',
|
|
182
|
+
referenceBuild: latestData.buildId, // Pointer to the build that actually did the work
|
|
183
|
+
generatedAt: new Date().toISOString()
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
// Release lock and exit
|
|
187
|
+
lockRef.update({ status: 'SKIPPED', completedAt: new Date() }).catch(() => {});
|
|
188
|
+
return;
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
logger.log('INFO', `[BuildReporter] 🚀 Change Detected. Running Pre-flight Report for v${packageVersion}...`);
|
|
193
|
+
|
|
194
|
+
// Pass the fingerprint to generateBuildReport so it can save it
|
|
195
|
+
await generateBuildReport(config, dependencies, manifest, 90, buildId, currentSystemHash);
|
|
107
196
|
|
|
108
|
-
// Optional: Update lock to completed (fire-and-forget update)
|
|
109
197
|
lockRef.update({ status: 'COMPLETED', completedAt: new Date() }).catch(() => {});
|
|
110
198
|
|
|
111
199
|
} catch (e) {
|
|
@@ -114,13 +202,16 @@ async function ensureBuildReport(config, dependencies, manifest) {
|
|
|
114
202
|
}
|
|
115
203
|
|
|
116
204
|
/**
|
|
117
|
-
* Generates the report
|
|
205
|
+
* Generates the report, writes to Firestore, AND FIXES STABLE UPDATES.
|
|
118
206
|
*/
|
|
119
|
-
async function generateBuildReport(config, dependencies, manifest, daysBack = 90, customBuildId = null) {
|
|
207
|
+
async function generateBuildReport(config, dependencies, manifest, daysBack = 90, customBuildId = null, systemFingerprint = null) {
|
|
120
208
|
const { db, logger } = dependencies;
|
|
121
209
|
const buildId = customBuildId || `manual_${Date.now()}`;
|
|
122
210
|
|
|
123
|
-
|
|
211
|
+
// Calculate fingerprint if not provided (for manual runs)
|
|
212
|
+
const finalFingerprint = systemFingerprint || getSystemFingerprint(manifest);
|
|
213
|
+
|
|
214
|
+
logger.log('INFO', `[BuildReporter] Generating Build Report: ${buildId} (Scope: ${daysBack} days, Fingerprint: ${finalFingerprint.substring(0,8)})...`);
|
|
124
215
|
|
|
125
216
|
const today = new Date();
|
|
126
217
|
const startDate = new Date();
|
|
@@ -129,7 +220,9 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
129
220
|
const datesToCheck = getExpectedDateStrings(startDate, today);
|
|
130
221
|
const manifestMap = new Map(manifest.map(c => [normalizeName(c.name), c]));
|
|
131
222
|
|
|
132
|
-
// [
|
|
223
|
+
// [OPTIMIZATION] Cache SimHashes across dates so we only calculate once per code version
|
|
224
|
+
const simHashCache = new Map();
|
|
225
|
+
|
|
133
226
|
const reverseGraph = new Map();
|
|
134
227
|
manifest.forEach(c => {
|
|
135
228
|
const parentName = normalizeName(c.name);
|
|
@@ -142,17 +235,16 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
142
235
|
}
|
|
143
236
|
});
|
|
144
237
|
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
generatedAt: new Date().toISOString(),
|
|
238
|
+
const reportHeader = {
|
|
239
|
+
buildId,
|
|
240
|
+
packageVersion,
|
|
241
|
+
systemFingerprint: finalFingerprint, // Saved to Firestore
|
|
242
|
+
generatedAt: new Date().toISOString(),
|
|
150
243
|
summary: {},
|
|
151
244
|
_sharded: true
|
|
152
245
|
};
|
|
153
246
|
|
|
154
|
-
let totalRun = 0;
|
|
155
|
-
let totalReRun = 0;
|
|
247
|
+
let totalRun = 0, totalReRun = 0, totalStable = 0;
|
|
156
248
|
const detailWrites = [];
|
|
157
249
|
|
|
158
250
|
const limit = pLimit(20);
|
|
@@ -178,93 +270,89 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
178
270
|
const dailyStatus = results[0];
|
|
179
271
|
const availability = results[1];
|
|
180
272
|
const prevDailyStatus = (prevDateStr && results[2]) ? results[2] : (prevDateStr ? {} : null);
|
|
181
|
-
const rootDataStatus = availability ? availability.status : { hasPortfolio: false, hasHistory: false
|
|
273
|
+
const rootDataStatus = availability ? availability.status : { hasPortfolio: false, hasHistory: false };
|
|
182
274
|
|
|
183
275
|
const analysis = analyzeDateExecution(dateStr, manifest, rootDataStatus, dailyStatus, manifestMap, prevDailyStatus);
|
|
184
276
|
|
|
185
|
-
// ---------------------------------------------------------
|
|
186
|
-
// STRICT 5-CATEGORY MAPPING
|
|
187
|
-
// ---------------------------------------------------------
|
|
188
277
|
const dateSummary = {
|
|
189
|
-
run:
|
|
190
|
-
|
|
191
|
-
blocked: [], // Missing Data (Today) / Dependency Missing
|
|
192
|
-
impossible: [], // Missing Data (Historical) / Impossible Dependency
|
|
193
|
-
uptodate: [], // Hash Match (Previously "Skipped")
|
|
194
|
-
|
|
195
|
-
// Metadata for Verification
|
|
196
|
-
meta: {
|
|
197
|
-
totalIncluded: 0,
|
|
198
|
-
totalExpected: 0,
|
|
199
|
-
match: false
|
|
200
|
-
}
|
|
278
|
+
run: [], rerun: [], stable: [], blocked: [], impossible: [], uptodate: [],
|
|
279
|
+
meta: { totalIncluded: 0, totalExpected: 0, match: false }
|
|
201
280
|
};
|
|
202
281
|
|
|
203
|
-
// Calculate Expected Count (Computations in manifest that exist for this date)
|
|
204
282
|
const expectedCount = manifest.filter(c => !isDateBeforeAvailability(dateStr, c)).length;
|
|
205
283
|
dateSummary.meta.totalExpected = expectedCount;
|
|
206
284
|
|
|
207
|
-
// Helper to push only if date is valid for this specific calc
|
|
208
285
|
const pushIfValid = (targetArray, item, extraReason = null) => {
|
|
209
286
|
const calcManifest = manifestMap.get(item.name);
|
|
210
|
-
if (calcManifest && isDateBeforeAvailability(dateStr, calcManifest))
|
|
211
|
-
return; // EXCLUDED: Date is before data exists
|
|
212
|
-
}
|
|
287
|
+
if (calcManifest && isDateBeforeAvailability(dateStr, calcManifest)) return;
|
|
213
288
|
|
|
214
289
|
const entry = {
|
|
215
|
-
name:
|
|
290
|
+
name: item.name,
|
|
216
291
|
reason: item.reason || extraReason,
|
|
217
|
-
pass:
|
|
292
|
+
pass: calcManifest ? calcManifest.pass : '?'
|
|
218
293
|
};
|
|
219
|
-
|
|
220
|
-
// [IDEA 1] If this is a Re-Run, calculate Blast Radius
|
|
221
294
|
if (targetArray === dateSummary.rerun) {
|
|
222
295
|
entry.impact = calculateBlastRadius(item.name, reverseGraph);
|
|
223
296
|
}
|
|
224
|
-
|
|
225
297
|
targetArray.push(entry);
|
|
226
298
|
};
|
|
227
299
|
|
|
228
|
-
// 1. RUN
|
|
300
|
+
// 1. RUN
|
|
229
301
|
analysis.runnable.forEach(item => pushIfValid(dateSummary.run, item, "New Calculation"));
|
|
230
302
|
|
|
231
|
-
// 2. RE-RUN (
|
|
232
|
-
analysis.reRuns.
|
|
303
|
+
// 2. RE-RUN & STABLE Analysis (SimHash Integration)
|
|
304
|
+
if (analysis.reRuns.length > 0) {
|
|
305
|
+
// Pass simHashCache and db for registry writes
|
|
306
|
+
const { trueReRuns, stableUpdates } = await verifyBehavioralStability(analysis.reRuns, manifestMap, dailyStatus, logger, simHashCache, db);
|
|
307
|
+
|
|
308
|
+
trueReRuns.forEach(item => pushIfValid(dateSummary.rerun, item, "Logic Changed"));
|
|
309
|
+
stableUpdates.forEach(item => pushIfValid(dateSummary.stable, item, "Cosmetic Change"));
|
|
310
|
+
|
|
311
|
+
// [CRITICAL FIX] "Fix the Blast Radius"
|
|
312
|
+
// If updates are STABLE, we update the status NOW.
|
|
313
|
+
// This implies: Code Hash changes, but Sim Hash stays same.
|
|
314
|
+
// The Dispatcher will see the new Code Hash in status matches the Manifest, so it won't dispatch.
|
|
315
|
+
if (stableUpdates.length > 0) {
|
|
316
|
+
const updatesPayload = {};
|
|
317
|
+
for (const stable of stableUpdates) {
|
|
318
|
+
const m = manifestMap.get(stable.name);
|
|
319
|
+
// We preserve the *existing* resultHash because the logic is proven stable.
|
|
320
|
+
// We update the 'hash' to the NEW code hash.
|
|
321
|
+
if (m && dailyStatus[stable.name]) {
|
|
322
|
+
updatesPayload[stable.name] = {
|
|
323
|
+
hash: m.hash, // New Code Hash
|
|
324
|
+
simHash: stable.simHash, // Same Sim Hash
|
|
325
|
+
resultHash: dailyStatus[stable.name].resultHash, // Same Result Hash
|
|
326
|
+
dependencyResultHashes: dailyStatus[stable.name].dependencyResultHashes || {},
|
|
327
|
+
category: m.category,
|
|
328
|
+
composition: m.composition,
|
|
329
|
+
lastUpdated: new Date()
|
|
330
|
+
};
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
// Perform the "Fix"
|
|
334
|
+
if (Object.keys(updatesPayload).length > 0) {
|
|
335
|
+
await updateComputationStatus(dateStr, updatesPayload, config, dependencies);
|
|
336
|
+
logger.log('INFO', `[BuildReporter] 🩹 Fixed ${Object.keys(updatesPayload).length} stable items for ${dateStr}. They will NOT re-run.`);
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
}
|
|
233
340
|
|
|
234
|
-
// 3. BLOCKED
|
|
341
|
+
// 3. BLOCKED / IMPOSSIBLE / UPTODATE
|
|
235
342
|
analysis.blocked.forEach(item => pushIfValid(dateSummary.blocked, item));
|
|
236
343
|
analysis.failedDependency.forEach(item => pushIfValid(dateSummary.blocked, item, "Dependency Missing"));
|
|
237
|
-
|
|
238
|
-
// 4. IMPOSSIBLE (Permanent Issues)
|
|
239
344
|
analysis.impossible.forEach(item => pushIfValid(dateSummary.impossible, item));
|
|
240
|
-
|
|
241
|
-
// 5. UP-TO-DATE (Previously "Skipped")
|
|
242
345
|
analysis.skipped.forEach(item => pushIfValid(dateSummary.uptodate, item, "Up To Date"));
|
|
243
346
|
|
|
244
|
-
//
|
|
245
|
-
const includedCount = dateSummary.run.length +
|
|
246
|
-
dateSummary.
|
|
247
|
-
dateSummary.blocked.length +
|
|
248
|
-
dateSummary.impossible.length +
|
|
249
|
-
dateSummary.uptodate.length;
|
|
250
|
-
|
|
347
|
+
// Meta stats
|
|
348
|
+
const includedCount = dateSummary.run.length + dateSummary.rerun.length + dateSummary.stable.length +
|
|
349
|
+
dateSummary.blocked.length + dateSummary.impossible.length + dateSummary.uptodate.length;
|
|
251
350
|
dateSummary.meta.totalIncluded = includedCount;
|
|
252
|
-
dateSummary.meta.match
|
|
253
|
-
|
|
254
|
-
if (!dateSummary.meta.match) {
|
|
255
|
-
logger.log('WARN', `[BuildReporter] ⚠️ Mismatch on ${dateStr}: Expected ${expectedCount} but got ${includedCount}.`);
|
|
256
|
-
}
|
|
351
|
+
dateSummary.meta.match = (includedCount === expectedCount);
|
|
257
352
|
|
|
258
|
-
|
|
259
|
-
const detailRef = db.collection('computation_build_records').doc(buildId).collection('details').doc(dateStr);
|
|
260
|
-
detailWrites.push({
|
|
261
|
-
ref: detailRef,
|
|
262
|
-
data: dateSummary
|
|
263
|
-
});
|
|
353
|
+
detailWrites.push({ ref: db.collection('computation_build_records').doc(buildId).collection('details').doc(dateStr), data: dateSummary });
|
|
264
354
|
|
|
265
|
-
return {
|
|
266
|
-
stats: { run: dateSummary.run.length, rerun: dateSummary.rerun.length }
|
|
267
|
-
};
|
|
355
|
+
return { stats: { run: dateSummary.run.length, rerun: dateSummary.rerun.length, stable: dateSummary.stable.length } };
|
|
268
356
|
|
|
269
357
|
} catch (err) {
|
|
270
358
|
logger.log('ERROR', `[BuildReporter] Error analyzing date ${dateStr}: ${err.message}`);
|
|
@@ -274,69 +362,21 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
274
362
|
|
|
275
363
|
const results = await Promise.all(processingPromises);
|
|
276
364
|
|
|
277
|
-
results.forEach(res => {
|
|
278
|
-
if (res) {
|
|
279
|
-
totalRun += res.stats.run;
|
|
280
|
-
totalReRun += res.stats.rerun;
|
|
281
|
-
}
|
|
282
|
-
});
|
|
283
|
-
|
|
284
|
-
reportHeader.summary = {
|
|
285
|
-
totalReRuns: totalReRun,
|
|
286
|
-
totalNew: totalRun,
|
|
287
|
-
scanRange: `${datesToCheck[0]} to ${datesToCheck[datesToCheck.length-1]}`
|
|
288
|
-
};
|
|
365
|
+
results.forEach(res => { if (res) { totalRun += res.stats.run; totalReRun += res.stats.rerun; totalStable += res.stats.stable; } });
|
|
289
366
|
|
|
290
|
-
|
|
291
|
-
const reportRef = db.collection('computation_build_records').doc(buildId);
|
|
292
|
-
await reportRef.set(reportHeader);
|
|
293
|
-
|
|
294
|
-
// 2. Write Details (Protected & Parallelized)
|
|
295
|
-
// FIX: Using parallel individual writes instead of Batch to avoid DEADLINE_EXCEEDED
|
|
296
|
-
let detailsSuccess = true;
|
|
297
|
-
if (detailWrites.length > 0) {
|
|
298
|
-
logger.log('INFO', `[BuildReporter] Writing ${detailWrites.length} detail records (Parallel Strategy)...`);
|
|
299
|
-
|
|
300
|
-
try {
|
|
301
|
-
// Concurrency limit of 15 to be safe
|
|
302
|
-
const writeLimit = pLimit(15);
|
|
303
|
-
const writePromises = detailWrites.map(w => writeLimit(() =>
|
|
304
|
-
w.ref.set(w.data).catch(e => {
|
|
305
|
-
logger.log('WARN', `[BuildReporter] Failed to write detail for ${w.ref.path}: ${e.message}`);
|
|
306
|
-
throw e;
|
|
307
|
-
})
|
|
308
|
-
));
|
|
309
|
-
|
|
310
|
-
await Promise.all(writePromises);
|
|
311
|
-
logger.log('INFO', `[BuildReporter] Successfully wrote all detail records.`);
|
|
367
|
+
reportHeader.summary = { totalReRuns: totalReRun, totalNew: totalRun, totalStable: totalStable, scanRange: `${datesToCheck[0]} to ${datesToCheck[datesToCheck.length-1]}` };
|
|
312
368
|
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
369
|
+
await db.collection('computation_build_records').doc(buildId).set(reportHeader);
|
|
370
|
+
|
|
371
|
+
// Parallel write details
|
|
372
|
+
const writeLimit = pLimit(15);
|
|
373
|
+
await Promise.all(detailWrites.map(w => writeLimit(() => w.ref.set(w.data))));
|
|
318
374
|
|
|
319
|
-
|
|
320
|
-
// This runs regardless of detail write success/failure
|
|
321
|
-
const latestMetadata = {
|
|
322
|
-
...reportHeader,
|
|
323
|
-
note: detailsSuccess
|
|
324
|
-
? "Latest build report pointer (See subcollection for details)."
|
|
325
|
-
: "Latest build report pointer (WARNING: Partial detail records due to write error)."
|
|
326
|
-
};
|
|
375
|
+
await db.collection('computation_build_records').doc('latest').set({ ...reportHeader, note: "Latest build report pointer." });
|
|
327
376
|
|
|
328
|
-
|
|
329
|
-
await db.collection('computation_build_records').doc('latest').set(latestMetadata);
|
|
330
|
-
logger.log('SUCCESS', `[BuildReporter] Report ${buildId} saved. Re-runs: ${totalReRun}, New: ${totalRun}. Pointer Updated.`);
|
|
331
|
-
} catch (pointerErr) {
|
|
332
|
-
logger.log('FATAL', `[BuildReporter] Failed to update 'latest' pointer!`, pointerErr);
|
|
333
|
-
}
|
|
377
|
+
logger.log('SUCCESS', `[BuildReporter] Report ${buildId} saved. Re-runs: ${totalReRun}, Stable (Fixed): ${totalStable}, New: ${totalRun}.`);
|
|
334
378
|
|
|
335
|
-
return {
|
|
336
|
-
success: true,
|
|
337
|
-
reportId: buildId,
|
|
338
|
-
summary: reportHeader.summary
|
|
339
|
-
};
|
|
379
|
+
return { success: true, reportId: buildId, summary: reportHeader.summary };
|
|
340
380
|
}
|
|
341
381
|
|
|
342
382
|
module.exports = { ensureBuildReport, generateBuildReport };
|