bulltrackers-module 1.0.291 → 1.0.292
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -6,41 +6,29 @@ const SimRunner = require('../simulation/SimRunner');
|
|
|
6
6
|
const pLimit = require('p-limit');
|
|
7
7
|
const path = require('path');
|
|
8
8
|
const crypto = require('crypto');
|
|
9
|
-
const fs = require('fs');
|
|
9
|
+
const fs = require('fs');
|
|
10
10
|
const packageJson = require(path.join(__dirname, '..', '..', '..', 'package.json'));
|
|
11
11
|
const packageVersion = packageJson.version;
|
|
12
|
-
const { generateCodeHash } = require('../utils/utils');
|
|
12
|
+
const { generateCodeHash } = require('../utils/utils');
|
|
13
13
|
|
|
14
|
-
// Persistent Registry for SimHashes
|
|
15
14
|
const SIMHASH_REGISTRY_COLLECTION = 'system_simhash_registry';
|
|
16
15
|
|
|
17
|
-
// ---
|
|
18
|
-
|
|
19
|
-
// 1. define the Root of the system (one level up from 'tools')
|
|
16
|
+
// --- RECURSIVE SYSTEM HASHING ---
|
|
20
17
|
const SYSTEM_ROOT = path.resolve(__dirname, '..');
|
|
21
|
-
|
|
22
|
-
// 2. Define what to ignore to prevent noise or infinite loops
|
|
23
18
|
const IGNORED_DIRS = new Set(['node_modules', '.git', '.idea', 'coverage', 'logs', 'tests']);
|
|
24
19
|
const IGNORED_FILES = new Set(['package-lock.json', '.DS_Store', '.env']);
|
|
25
20
|
|
|
26
|
-
/**
|
|
27
|
-
* Recursively walks a directory and returns a list of file paths.
|
|
28
|
-
*/
|
|
29
21
|
function walkSync(dir, fileList = []) {
|
|
30
22
|
const files = fs.readdirSync(dir);
|
|
31
|
-
|
|
32
23
|
files.forEach(file => {
|
|
33
24
|
if (IGNORED_FILES.has(file)) return;
|
|
34
|
-
|
|
35
25
|
const filePath = path.join(dir, file);
|
|
36
26
|
const stat = fs.statSync(filePath);
|
|
37
|
-
|
|
38
27
|
if (stat.isDirectory()) {
|
|
39
28
|
if (!IGNORED_DIRS.has(file)) {
|
|
40
29
|
walkSync(filePath, fileList);
|
|
41
30
|
}
|
|
42
31
|
} else {
|
|
43
|
-
// Only hash code files (add .yaml if you want workflows included)
|
|
44
32
|
if (file.endsWith('.js') || file.endsWith('.json') || file.endsWith('.yaml')) {
|
|
45
33
|
fileList.push(filePath);
|
|
46
34
|
}
|
|
@@ -49,69 +37,41 @@ function walkSync(dir, fileList = []) {
|
|
|
49
37
|
return fileList;
|
|
50
38
|
}
|
|
51
39
|
|
|
52
|
-
/**
|
|
53
|
-
* Generates a single hash representing the entire infrastructure code state.
|
|
54
|
-
*/
|
|
55
40
|
function getInfrastructureHash() {
|
|
56
41
|
try {
|
|
57
42
|
const allFiles = walkSync(SYSTEM_ROOT);
|
|
58
|
-
allFiles.sort();
|
|
59
|
-
|
|
43
|
+
allFiles.sort();
|
|
60
44
|
const bigHash = crypto.createHash('sha256');
|
|
61
|
-
|
|
62
45
|
for (const filePath of allFiles) {
|
|
63
46
|
const content = fs.readFileSync(filePath, 'utf8');
|
|
64
47
|
const relativePath = path.relative(SYSTEM_ROOT, filePath);
|
|
65
|
-
|
|
66
|
-
// DECISION: How to clean?
|
|
67
48
|
let cleanContent = content;
|
|
68
|
-
|
|
69
|
-
// 1. If it's JS, use your system standard for code hashing
|
|
70
49
|
if (filePath.endsWith('.js')) {
|
|
71
|
-
// This strips comments and whitespace consistently with ManifestBuilder
|
|
72
|
-
// Note: generateCodeHash returns a hash, we can just use that hash
|
|
73
50
|
cleanContent = generateCodeHash(content);
|
|
74
|
-
}
|
|
75
|
-
// 2. If it's JSON/YAML, just strip basic whitespace to ignore indent changes
|
|
76
|
-
else {
|
|
51
|
+
} else {
|
|
77
52
|
cleanContent = content.replace(/\s+/g, '');
|
|
78
53
|
}
|
|
79
|
-
|
|
80
|
-
// Feed the PATH and the CLEAN CONTENT into the master hash
|
|
81
54
|
bigHash.update(`${relativePath}:${cleanContent}|`);
|
|
82
55
|
}
|
|
83
|
-
|
|
84
56
|
return bigHash.digest('hex');
|
|
85
57
|
} catch (e) {
|
|
86
58
|
console.warn(`[BuildReporter] ⚠️ Failed to generate infra hash: ${e.message}`);
|
|
87
|
-
return 'infra_hash_error';
|
|
59
|
+
return 'infra_hash_error';
|
|
88
60
|
}
|
|
89
61
|
}
|
|
90
62
|
|
|
91
|
-
/**
|
|
92
|
-
* UPDATED: System Fingerprint = Manifest Hash + Infrastructure Hash
|
|
93
|
-
*/
|
|
94
63
|
function getSystemFingerprint(manifest) {
|
|
95
|
-
// 1. Business Logic Hash (The Calculations)
|
|
96
64
|
const sortedManifestHashes = manifest.map(c => c.hash).sort().join('|');
|
|
97
|
-
|
|
98
|
-
// 2. Infrastructure Hash (The System Code)
|
|
99
65
|
const infraHash = getInfrastructureHash();
|
|
100
|
-
|
|
101
|
-
// 3. Combine
|
|
102
66
|
return crypto.createHash('sha256')
|
|
103
67
|
.update(sortedManifestHashes + infraHash)
|
|
104
68
|
.digest('hex');
|
|
105
69
|
}
|
|
106
70
|
|
|
107
|
-
/**
|
|
108
|
-
* Helper: Determines if a calculation should be excluded from the report.
|
|
109
|
-
*/
|
|
110
71
|
function isDateBeforeAvailability(dateStr, calcManifest) {
|
|
111
72
|
const targetDate = new Date(dateStr + 'T00:00:00Z');
|
|
112
73
|
const deps = calcManifest.rootDataDependencies || [];
|
|
113
74
|
if (deps.length === 0) return false;
|
|
114
|
-
|
|
115
75
|
for (const dep of deps) {
|
|
116
76
|
let startDate = null;
|
|
117
77
|
if (dep === 'portfolio') startDate = DEFINITIVE_EARLIEST_DATES.portfolio;
|
|
@@ -119,19 +79,14 @@ function isDateBeforeAvailability(dateStr, calcManifest) {
|
|
|
119
79
|
else if (dep === 'social') startDate = DEFINITIVE_EARLIEST_DATES.social;
|
|
120
80
|
else if (dep === 'insights') startDate = DEFINITIVE_EARLIEST_DATES.insights;
|
|
121
81
|
else if (dep === 'price') startDate = DEFINITIVE_EARLIEST_DATES.price;
|
|
122
|
-
|
|
123
82
|
if (startDate && targetDate < startDate) { return true; }
|
|
124
83
|
}
|
|
125
84
|
return false;
|
|
126
85
|
}
|
|
127
86
|
|
|
128
|
-
/**
|
|
129
|
-
* Helper: Calculates the transitive closure of dependents (Blast Radius).
|
|
130
|
-
*/
|
|
131
87
|
function calculateBlastRadius(targetCalcName, reverseGraph) {
|
|
132
88
|
const impactSet = new Set();
|
|
133
89
|
const queue = [targetCalcName];
|
|
134
|
-
|
|
135
90
|
while(queue.length > 0) {
|
|
136
91
|
const current = queue.shift();
|
|
137
92
|
const dependents = reverseGraph.get(current) || [];
|
|
@@ -142,7 +97,6 @@ function calculateBlastRadius(targetCalcName, reverseGraph) {
|
|
|
142
97
|
}
|
|
143
98
|
});
|
|
144
99
|
}
|
|
145
|
-
|
|
146
100
|
return {
|
|
147
101
|
directDependents: (reverseGraph.get(targetCalcName) || []).length,
|
|
148
102
|
totalCascadingDependents: impactSet.size,
|
|
@@ -150,14 +104,11 @@ function calculateBlastRadius(targetCalcName, reverseGraph) {
|
|
|
150
104
|
};
|
|
151
105
|
}
|
|
152
106
|
|
|
153
|
-
/**
|
|
154
|
-
* [NEW] Helper: Runs SimHash check with Caching and Registry Persistence.
|
|
155
|
-
*/
|
|
156
107
|
async function verifyBehavioralStability(candidates, manifestMap, dailyStatus, logger, simHashCache, db) {
|
|
157
108
|
const trueReRuns = [];
|
|
158
109
|
const stableUpdates = [];
|
|
159
110
|
|
|
160
|
-
//
|
|
111
|
+
// Concurrency for simulations
|
|
161
112
|
const limit = pLimit(10);
|
|
162
113
|
|
|
163
114
|
const checks = candidates.map(item => limit(async () => {
|
|
@@ -170,16 +121,10 @@ async function verifyBehavioralStability(candidates, manifestMap, dailyStatus, l
|
|
|
170
121
|
return;
|
|
171
122
|
}
|
|
172
123
|
|
|
173
|
-
// 1. Check Cache first (Avoid re-simulating the same code for 100 different dates)
|
|
174
124
|
let newSimHash = simHashCache.get(manifest.hash);
|
|
175
|
-
|
|
176
|
-
// 2. If Miss, Run Simulation & Persist to Registry
|
|
177
125
|
if (!newSimHash) {
|
|
178
126
|
newSimHash = await SimRunner.run(manifest, manifestMap);
|
|
179
127
|
simHashCache.set(manifest.hash, newSimHash);
|
|
180
|
-
|
|
181
|
-
// Write to Registry so Production Workers can find it without running SimRunner
|
|
182
|
-
// Fire-and-forget write to reduce latency
|
|
183
128
|
db.collection(SIMHASH_REGISTRY_COLLECTION).doc(manifest.hash).set({
|
|
184
129
|
simHash: newSimHash,
|
|
185
130
|
createdAt: new Date(),
|
|
@@ -187,17 +132,14 @@ async function verifyBehavioralStability(candidates, manifestMap, dailyStatus, l
|
|
|
187
132
|
}).catch(err => logger.log('WARN', `Failed to write SimHash registry for ${manifest.name}: ${err.message}`));
|
|
188
133
|
}
|
|
189
134
|
|
|
190
|
-
// 3. Compare
|
|
191
135
|
if (newSimHash === stored.simHash) {
|
|
192
|
-
// BEHAVIORAL MATCH: Code changed, but output is identical.
|
|
193
136
|
stableUpdates.push({
|
|
194
137
|
...item,
|
|
195
138
|
reason: "Code Updated (Logic Stable)",
|
|
196
|
-
simHash: newSimHash,
|
|
197
|
-
newHash: manifest.hash
|
|
139
|
+
simHash: newSimHash,
|
|
140
|
+
newHash: manifest.hash
|
|
198
141
|
});
|
|
199
142
|
} else {
|
|
200
|
-
// BEHAVIORAL MISMATCH: Logic changed.
|
|
201
143
|
trueReRuns.push({
|
|
202
144
|
...item,
|
|
203
145
|
reason: item.reason + ` [SimHash Mismatch]`,
|
|
@@ -215,9 +157,6 @@ async function verifyBehavioralStability(candidates, manifestMap, dailyStatus, l
|
|
|
215
157
|
return { trueReRuns, stableUpdates };
|
|
216
158
|
}
|
|
217
159
|
|
|
218
|
-
/**
|
|
219
|
-
* AUTO-RUN ENTRY POINT
|
|
220
|
-
*/
|
|
221
160
|
async function ensureBuildReport(config, dependencies, manifest) {
|
|
222
161
|
const { db, logger } = dependencies;
|
|
223
162
|
const now = new Date();
|
|
@@ -234,40 +173,28 @@ async function ensureBuildReport(config, dependencies, manifest) {
|
|
|
234
173
|
|
|
235
174
|
if (!shouldRun) { logger.log('INFO', `[BuildReporter] 🔒 Report for v${packageVersion} locked. Skipping.`); return; }
|
|
236
175
|
|
|
237
|
-
// [NEW] 1. Calculate Current System Fingerprint
|
|
238
176
|
const currentSystemHash = getSystemFingerprint(manifest);
|
|
239
|
-
|
|
240
|
-
// [NEW] 2. Fetch Last Build's Fingerprint
|
|
241
177
|
const latestBuildDoc = await db.collection('computation_build_records').doc('latest').get();
|
|
242
178
|
|
|
243
179
|
if (latestBuildDoc.exists) {
|
|
244
180
|
const latestData = latestBuildDoc.data();
|
|
245
|
-
|
|
246
|
-
// [OPTIMIZATION] If signatures match, we can clone the report or just skip
|
|
247
181
|
if (latestData.systemFingerprint === currentSystemHash) {
|
|
248
182
|
logger.log('INFO', `[BuildReporter] ⚡ System Fingerprint (${currentSystemHash.substring(0,8)}) matches latest build. Skipping Report.`);
|
|
249
|
-
|
|
250
|
-
// Create a "Skipped" record for the new version so we know it deployed
|
|
251
183
|
await db.collection('computation_build_records').doc(buildId).set({
|
|
252
184
|
buildId,
|
|
253
185
|
packageVersion,
|
|
254
186
|
systemFingerprint: currentSystemHash,
|
|
255
187
|
status: 'SKIPPED_IDENTICAL',
|
|
256
|
-
referenceBuild: latestData.buildId,
|
|
188
|
+
referenceBuild: latestData.buildId,
|
|
257
189
|
generatedAt: new Date().toISOString()
|
|
258
190
|
});
|
|
259
|
-
|
|
260
|
-
// Release lock and exit
|
|
261
191
|
lockRef.update({ status: 'SKIPPED', completedAt: new Date() }).catch(() => {});
|
|
262
192
|
return;
|
|
263
193
|
}
|
|
264
194
|
}
|
|
265
195
|
|
|
266
196
|
logger.log('INFO', `[BuildReporter] 🚀 Change Detected. Running Pre-flight Report for v${packageVersion}...`);
|
|
267
|
-
|
|
268
|
-
// Pass the fingerprint to generateBuildReport so it can save it
|
|
269
197
|
await generateBuildReport(config, dependencies, manifest, 90, buildId, currentSystemHash);
|
|
270
|
-
|
|
271
198
|
lockRef.update({ status: 'COMPLETED', completedAt: new Date() }).catch(() => {});
|
|
272
199
|
|
|
273
200
|
} catch (e) {
|
|
@@ -275,14 +202,9 @@ async function ensureBuildReport(config, dependencies, manifest) {
|
|
|
275
202
|
}
|
|
276
203
|
}
|
|
277
204
|
|
|
278
|
-
/**
|
|
279
|
-
* Generates the report, writes to Firestore, AND FIXES STABLE UPDATES.
|
|
280
|
-
*/
|
|
281
205
|
async function generateBuildReport(config, dependencies, manifest, daysBack = 90, customBuildId = null, systemFingerprint = null) {
|
|
282
206
|
const { db, logger } = dependencies;
|
|
283
207
|
const buildId = customBuildId || `manual_${Date.now()}`;
|
|
284
|
-
|
|
285
|
-
// Calculate fingerprint if not provided (for manual runs)
|
|
286
208
|
const finalFingerprint = systemFingerprint || getSystemFingerprint(manifest);
|
|
287
209
|
|
|
288
210
|
logger.log('INFO', `[BuildReporter] Generating Build Report: ${buildId} (Scope: ${daysBack} days, Fingerprint: ${finalFingerprint.substring(0,8)})...`);
|
|
@@ -293,8 +215,6 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
293
215
|
|
|
294
216
|
const datesToCheck = getExpectedDateStrings(startDate, today);
|
|
295
217
|
const manifestMap = new Map(manifest.map(c => [normalizeName(c.name), c]));
|
|
296
|
-
|
|
297
|
-
// [OPTIMIZATION] Cache SimHashes across dates so we only calculate once per code version
|
|
298
218
|
const simHashCache = new Map();
|
|
299
219
|
|
|
300
220
|
const reverseGraph = new Map();
|
|
@@ -312,16 +232,16 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
312
232
|
const reportHeader = {
|
|
313
233
|
buildId,
|
|
314
234
|
packageVersion,
|
|
315
|
-
systemFingerprint: finalFingerprint,
|
|
235
|
+
systemFingerprint: finalFingerprint,
|
|
316
236
|
generatedAt: new Date().toISOString(),
|
|
317
237
|
summary: {},
|
|
318
238
|
_sharded: true
|
|
319
239
|
};
|
|
320
240
|
|
|
321
241
|
let totalRun = 0, totalReRun = 0, totalStable = 0;
|
|
322
|
-
const detailWrites = [];
|
|
323
242
|
|
|
324
|
-
|
|
243
|
+
// [FIX] Reduced concurrency from 20 to 5 to avoid Firestore DEADLINE_EXCEEDED
|
|
244
|
+
const limit = pLimit(5);
|
|
325
245
|
|
|
326
246
|
const processingPromises = datesToCheck.map(dateStr => limit(async () => {
|
|
327
247
|
try {
|
|
@@ -374,29 +294,22 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
374
294
|
// 1. RUN
|
|
375
295
|
analysis.runnable.forEach(item => pushIfValid(dateSummary.run, item, "New Calculation"));
|
|
376
296
|
|
|
377
|
-
// 2. RE-RUN & STABLE Analysis
|
|
297
|
+
// 2. RE-RUN & STABLE Analysis
|
|
378
298
|
if (analysis.reRuns.length > 0) {
|
|
379
|
-
// Pass simHashCache and db for registry writes
|
|
380
299
|
const { trueReRuns, stableUpdates } = await verifyBehavioralStability(analysis.reRuns, manifestMap, dailyStatus, logger, simHashCache, db);
|
|
381
300
|
|
|
382
301
|
trueReRuns.forEach(item => pushIfValid(dateSummary.rerun, item, "Logic Changed"));
|
|
383
302
|
stableUpdates.forEach(item => pushIfValid(dateSummary.stable, item, "Cosmetic Change"));
|
|
384
303
|
|
|
385
|
-
// [CRITICAL FIX] "Fix the Blast Radius"
|
|
386
|
-
// If updates are STABLE, we update the status NOW.
|
|
387
|
-
// This implies: Code Hash changes, but Sim Hash stays same.
|
|
388
|
-
// The Dispatcher will see the new Code Hash in status matches the Manifest, so it won't dispatch.
|
|
389
304
|
if (stableUpdates.length > 0) {
|
|
390
305
|
const updatesPayload = {};
|
|
391
306
|
for (const stable of stableUpdates) {
|
|
392
307
|
const m = manifestMap.get(stable.name);
|
|
393
|
-
// We preserve the *existing* resultHash because the logic is proven stable.
|
|
394
|
-
// We update the 'hash' to the NEW code hash.
|
|
395
308
|
if (m && dailyStatus[stable.name]) {
|
|
396
309
|
updatesPayload[stable.name] = {
|
|
397
|
-
hash: m.hash,
|
|
398
|
-
simHash: stable.simHash,
|
|
399
|
-
resultHash: dailyStatus[stable.name].resultHash,
|
|
310
|
+
hash: m.hash,
|
|
311
|
+
simHash: stable.simHash,
|
|
312
|
+
resultHash: dailyStatus[stable.name].resultHash,
|
|
400
313
|
dependencyResultHashes: dailyStatus[stable.name].dependencyResultHashes || {},
|
|
401
314
|
category: m.category,
|
|
402
315
|
composition: m.composition,
|
|
@@ -404,7 +317,6 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
404
317
|
};
|
|
405
318
|
}
|
|
406
319
|
}
|
|
407
|
-
// Perform the "Fix"
|
|
408
320
|
if (Object.keys(updatesPayload).length > 0) {
|
|
409
321
|
await updateComputationStatus(dateStr, updatesPayload, config, dependencies);
|
|
410
322
|
logger.log('INFO', `[BuildReporter] 🩹 Fixed ${Object.keys(updatesPayload).length} stable items for ${dateStr}. They will NOT re-run.`);
|
|
@@ -424,11 +336,8 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
424
336
|
dateSummary.meta.totalIncluded = includedCount;
|
|
425
337
|
dateSummary.meta.match = (includedCount === expectedCount);
|
|
426
338
|
|
|
427
|
-
//
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
// ADD THIS (Write immediately):
|
|
431
|
-
await db.collection('computation_build_records')
|
|
339
|
+
// Write Immediately
|
|
340
|
+
await db.collection('computation_build_records')
|
|
432
341
|
.doc(buildId)
|
|
433
342
|
.collection('details')
|
|
434
343
|
.doc(dateStr)
|
|
@@ -450,10 +359,6 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
450
359
|
|
|
451
360
|
await db.collection('computation_build_records').doc(buildId).set(reportHeader);
|
|
452
361
|
|
|
453
|
-
// Parallel write details ---> Now redundant.
|
|
454
|
-
//const writeLimit = pLimit(15);
|
|
455
|
-
//await Promise.all(detailWrites.map(w => writeLimit(() => w.ref.set(w.data))));
|
|
456
|
-
|
|
457
362
|
await db.collection('computation_build_records').doc('latest').set({ ...reportHeader, note: "Latest build report pointer." });
|
|
458
363
|
|
|
459
364
|
logger.log('SUCCESS', `[BuildReporter] Report ${buildId} saved. Re-runs: ${totalReRun}, Stable (Fixed): ${totalStable}, New: ${totalRun}.`);
|