bulltrackers-module 1.0.287 → 1.0.288
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -2,9 +2,10 @@
|
|
|
2
2
|
* @fileoverview Dynamic Manifest Builder - Handles Topological Sort and Auto-Discovery.
|
|
3
3
|
* UPDATED: Generates Granular Hash Composition for Audit Trails.
|
|
4
4
|
* UPGRADE: Implements Tarjan's Algorithm for Precise Cycle Detection.
|
|
5
|
+
* FIXED: Now incorporates System Infrastructure Hash into Calculation Hashes.
|
|
5
6
|
*/
|
|
6
|
-
const { generateCodeHash, LEGACY_MAPPING } = require('../topology/HashManager.js');
|
|
7
|
-
const { normalizeName }
|
|
7
|
+
const { generateCodeHash, getInfrastructureHash, LEGACY_MAPPING } = require('../topology/HashManager.js');
|
|
8
|
+
const { normalizeName } = require('../utils/utils');
|
|
8
9
|
|
|
9
10
|
const SYSTEM_EPOCH = require('../system_epoch');
|
|
10
11
|
|
|
@@ -138,7 +139,6 @@ function detectCircularDependencies(manifestMap) {
|
|
|
138
139
|
}
|
|
139
140
|
|
|
140
141
|
if (cycles.length > 0) {
|
|
141
|
-
// Format the first cycle for display
|
|
142
142
|
const cycle = cycles[0];
|
|
143
143
|
return cycle.join(' -> ') + ' -> ' + cycle[0];
|
|
144
144
|
}
|
|
@@ -148,6 +148,10 @@ function detectCircularDependencies(manifestMap) {
|
|
|
148
148
|
function buildManifest(productLinesToRun = [], calculations) {
|
|
149
149
|
log.divider('Building Dynamic Manifest');
|
|
150
150
|
|
|
151
|
+
// [CRITICAL FIX] Calculate Infrastructure Hash once per build
|
|
152
|
+
const INFRA_HASH = getInfrastructureHash();
|
|
153
|
+
log.info(`[ManifestBuilder] System Infrastructure Hash: ${INFRA_HASH.substring(0, 8)}`);
|
|
154
|
+
|
|
151
155
|
const requestedLog = (!productLinesToRun || productLinesToRun.length === 0)
|
|
152
156
|
? "ALL (Wildcard/Empty)"
|
|
153
157
|
: productLinesToRun.join(', ');
|
|
@@ -171,7 +175,9 @@ function buildManifest(productLinesToRun = [], calculations) {
|
|
|
171
175
|
const codeStr = Class.toString();
|
|
172
176
|
const selfCodeHash = generateCodeHash(codeStr);
|
|
173
177
|
|
|
174
|
-
|
|
178
|
+
// [CRITICAL FIX] Include INFRA_HASH in the composite signature
|
|
179
|
+
// This ensures that if the system platform changes, ALL calculations are considered "changed"
|
|
180
|
+
let compositeHashString = selfCodeHash + `|EPOCH:${SYSTEM_EPOCH}|INFRA:${INFRA_HASH}`;
|
|
175
181
|
|
|
176
182
|
const usedDeps = [];
|
|
177
183
|
const usedLayerHashes = {};
|
|
@@ -221,6 +227,7 @@ function buildManifest(productLinesToRun = [], calculations) {
|
|
|
221
227
|
composition: {
|
|
222
228
|
epoch: SYSTEM_EPOCH,
|
|
223
229
|
code: selfCodeHash,
|
|
230
|
+
infra: INFRA_HASH, // Stored in composition for audit
|
|
224
231
|
layers: layerComposition,
|
|
225
232
|
deps: {}
|
|
226
233
|
},
|
|
@@ -235,9 +242,9 @@ function buildManifest(productLinesToRun = [], calculations) {
|
|
|
235
242
|
|
|
236
243
|
for (const folderName in calculations) {
|
|
237
244
|
if (folderName === 'legacy') continue;
|
|
238
|
-
const
|
|
239
|
-
for (const key in
|
|
240
|
-
const entry =
|
|
245
|
+
const calculationGroup = calculations[folderName];
|
|
246
|
+
for (const key in calculationGroup) {
|
|
247
|
+
const entry = calculationGroup[key];
|
|
241
248
|
if (typeof entry === 'function') { processCalc(entry, key, folderName); }
|
|
242
249
|
}
|
|
243
250
|
}
|
|
@@ -312,7 +319,6 @@ function buildManifest(productLinesToRun = [], calculations) {
|
|
|
312
319
|
queue.sort();
|
|
313
320
|
}
|
|
314
321
|
|
|
315
|
-
// UPGRADE: Use Tarjan's for precise cycle reporting
|
|
316
322
|
if (sortedManifest.length !== filteredManifestMap.size) {
|
|
317
323
|
const cycle = detectCircularDependencies(filteredManifestMap);
|
|
318
324
|
if (cycle) {
|
|
@@ -1,33 +1,107 @@
|
|
|
1
|
+
const { analyzeDateExecution } = require('../WorkflowOrchestrator');
|
|
2
|
+
const { fetchComputationStatus, updateComputationStatus } = require('../persistence/StatusRepository');
|
|
3
|
+
const { normalizeName, getExpectedDateStrings, DEFINITIVE_EARLIEST_DATES } = require('../utils/utils');
|
|
4
|
+
const { checkRootDataAvailability } = require('../data/AvailabilityChecker');
|
|
5
|
+
const SimRunner = require('../simulation/SimRunner');
|
|
6
|
+
const pLimit = require('p-limit');
|
|
7
|
+
const path = require('path');
|
|
8
|
+
const crypto = require('crypto');
|
|
9
|
+
const fs = require('fs'); // [NEW] Required for file reading
|
|
10
|
+
const packageJson = require(path.join(__dirname, '..', '..', '..', 'package.json'));
|
|
11
|
+
const packageVersion = packageJson.version;
|
|
12
|
+
const { generateCodeHash } = require('../utils/utils'); // Reuse your standard logic
|
|
13
|
+
|
|
14
|
+
// Persistent Registry for SimHashes
|
|
15
|
+
const SIMHASH_REGISTRY_COLLECTION = 'system_simhash_registry';
|
|
16
|
+
|
|
17
|
+
// --- [NEW] RECURSIVE SYSTEM HASHING ---
|
|
18
|
+
|
|
19
|
+
// 1. define the Root of the system (one level up from 'tools')
|
|
20
|
+
const SYSTEM_ROOT = path.resolve(__dirname, '..');
|
|
21
|
+
|
|
22
|
+
// 2. Define what to ignore to prevent noise or infinite loops
|
|
23
|
+
const IGNORED_DIRS = new Set(['node_modules', '.git', '.idea', 'coverage', 'logs', 'tests']);
|
|
24
|
+
const IGNORED_FILES = new Set(['package-lock.json', '.DS_Store', '.env']);
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Recursively walks a directory and returns a list of file paths.
|
|
28
|
+
*/
|
|
29
|
+
function walkSync(dir, fileList = []) {
|
|
30
|
+
const files = fs.readdirSync(dir);
|
|
31
|
+
|
|
32
|
+
files.forEach(file => {
|
|
33
|
+
if (IGNORED_FILES.has(file)) return;
|
|
34
|
+
|
|
35
|
+
const filePath = path.join(dir, file);
|
|
36
|
+
const stat = fs.statSync(filePath);
|
|
37
|
+
|
|
38
|
+
if (stat.isDirectory()) {
|
|
39
|
+
if (!IGNORED_DIRS.has(file)) {
|
|
40
|
+
walkSync(filePath, fileList);
|
|
41
|
+
}
|
|
42
|
+
} else {
|
|
43
|
+
// Only hash code files (add .yaml if you want workflows included)
|
|
44
|
+
if (file.endsWith('.js') || file.endsWith('.json') || file.endsWith('.yaml')) {
|
|
45
|
+
fileList.push(filePath);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
});
|
|
49
|
+
return fileList;
|
|
50
|
+
}
|
|
51
|
+
|
|
1
52
|
/**
|
|
2
|
-
*
|
|
3
|
-
* Generates a "Pre-Flight" report of what the computation system WILL do.
|
|
4
|
-
* UPGRADED: Implements Behavioral Hashing (SimHash) to detect Cosmetic vs Logic changes.
|
|
5
|
-
* OPTIMIZED: Caches SimHashes and actively updates status for Stable items to prevent re-runs.
|
|
6
|
-
* OPTIMIZED (V2): Implements System Fingerprinting to skip 90-day scan if manifest is identical.
|
|
53
|
+
* Generates a single hash representing the entire infrastructure code state.
|
|
7
54
|
*/
|
|
55
|
+
function getInfrastructureHash() {
|
|
56
|
+
try {
|
|
57
|
+
const allFiles = walkSync(SYSTEM_ROOT);
|
|
58
|
+
allFiles.sort(); // Crucial for determinism
|
|
59
|
+
|
|
60
|
+
const bigHash = crypto.createHash('sha256');
|
|
61
|
+
|
|
62
|
+
for (const filePath of allFiles) {
|
|
63
|
+
const content = fs.readFileSync(filePath, 'utf8');
|
|
64
|
+
const relativePath = path.relative(SYSTEM_ROOT, filePath);
|
|
65
|
+
|
|
66
|
+
// DECISION: How to clean?
|
|
67
|
+
let cleanContent = content;
|
|
68
|
+
|
|
69
|
+
// 1. If it's JS, use your system standard for code hashing
|
|
70
|
+
if (filePath.endsWith('.js')) {
|
|
71
|
+
// This strips comments and whitespace consistently with ManifestBuilder
|
|
72
|
+
// Note: generateCodeHash returns a hash, we can just use that hash
|
|
73
|
+
cleanContent = generateCodeHash(content);
|
|
74
|
+
}
|
|
75
|
+
// 2. If it's JSON/YAML, just strip basic whitespace to ignore indent changes
|
|
76
|
+
else {
|
|
77
|
+
cleanContent = content.replace(/\s+/g, '');
|
|
78
|
+
}
|
|
8
79
|
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
// Persistent Registry for SimHashes (so Workers don't have to recalc)
|
|
21
|
-
const SIMHASH_REGISTRY_COLLECTION = 'system_simhash_registry';
|
|
80
|
+
// Feed the PATH and the CLEAN CONTENT into the master hash
|
|
81
|
+
bigHash.update(`${relativePath}:${cleanContent}|`);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
return bigHash.digest('hex');
|
|
85
|
+
} catch (e) {
|
|
86
|
+
console.warn(`[BuildReporter] ⚠️ Failed to generate infra hash: ${e.message}`);
|
|
87
|
+
return 'infra_hash_error'; // Fail safe so we run the report rather than crash
|
|
88
|
+
}
|
|
89
|
+
}
|
|
22
90
|
|
|
23
91
|
/**
|
|
24
|
-
*
|
|
25
|
-
* If ANY calculation logic or dependency changes, this hash changes.
|
|
92
|
+
* UPDATED: System Fingerprint = Manifest Hash + Infrastructure Hash
|
|
26
93
|
*/
|
|
27
94
|
function getSystemFingerprint(manifest) {
|
|
28
|
-
//
|
|
29
|
-
const
|
|
30
|
-
|
|
95
|
+
// 1. Business Logic Hash (The Calculations)
|
|
96
|
+
const sortedManifestHashes = manifest.map(c => c.hash).sort().join('|');
|
|
97
|
+
|
|
98
|
+
// 2. Infrastructure Hash (The System Code)
|
|
99
|
+
const infraHash = getInfrastructureHash();
|
|
100
|
+
|
|
101
|
+
// 3. Combine
|
|
102
|
+
return crypto.createHash('sha256')
|
|
103
|
+
.update(sortedManifestHashes + infraHash)
|
|
104
|
+
.digest('hex');
|
|
31
105
|
}
|
|
32
106
|
|
|
33
107
|
/**
|
|
@@ -1,7 +1,10 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Manages code versioning, hashing, and legacy mappings.
|
|
3
|
+
* UPDATED: Includes Centralized Infrastructure Hashing to track system-level changes.
|
|
3
4
|
*/
|
|
4
5
|
const crypto = require('crypto');
|
|
6
|
+
const fs = require('fs');
|
|
7
|
+
const path = require('path');
|
|
5
8
|
|
|
6
9
|
// Legacy Keys Mapping (Ensures backward compatibility)
|
|
7
10
|
const LEGACY_MAPPING = {
|
|
@@ -24,6 +27,10 @@ const LEGACY_MAPPING = {
|
|
|
24
27
|
AdaptiveAnalytics: 'adaptive'
|
|
25
28
|
};
|
|
26
29
|
|
|
30
|
+
/**
|
|
31
|
+
* Generates a SHA-256 hash of a code string.
|
|
32
|
+
* Strips comments and whitespace for loose equality.
|
|
33
|
+
*/
|
|
27
34
|
function generateCodeHash(codeString) {
|
|
28
35
|
if (!codeString) return 'unknown';
|
|
29
36
|
let clean = codeString.replace(/\/\/.*$/gm, '');
|
|
@@ -32,4 +39,66 @@ function generateCodeHash(codeString) {
|
|
|
32
39
|
return crypto.createHash('sha256').update(clean).digest('hex');
|
|
33
40
|
}
|
|
34
41
|
|
|
35
|
-
|
|
42
|
+
// --- INFRASTRUCTURE HASHING (The "System Fingerprint") ---
|
|
43
|
+
|
|
44
|
+
const SYSTEM_ROOT = path.resolve(__dirname, '..');
|
|
45
|
+
const IGNORED_DIRS = new Set(['node_modules', '.git', '.idea', 'coverage', 'logs', 'tests', 'docs']);
|
|
46
|
+
const IGNORED_FILES = new Set(['package-lock.json', '.DS_Store', '.env', 'README.md']);
|
|
47
|
+
|
|
48
|
+
function walkSync(dir, fileList = []) {
|
|
49
|
+
const files = fs.readdirSync(dir);
|
|
50
|
+
files.forEach(file => {
|
|
51
|
+
if (IGNORED_FILES.has(file)) return;
|
|
52
|
+
const filePath = path.join(dir, file);
|
|
53
|
+
const stat = fs.statSync(filePath);
|
|
54
|
+
if (stat.isDirectory()) {
|
|
55
|
+
if (!IGNORED_DIRS.has(file)) {
|
|
56
|
+
walkSync(filePath, fileList);
|
|
57
|
+
}
|
|
58
|
+
} else {
|
|
59
|
+
// Hash JS, JSON, and YAML (Workflows)
|
|
60
|
+
if (file.endsWith('.js') || file.endsWith('.json') || file.endsWith('.yaml')) {
|
|
61
|
+
fileList.push(filePath);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
});
|
|
65
|
+
return fileList;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Generates a hash representing the state of the entire computation-system codebase.
|
|
70
|
+
* This ensures that changes to infrastructure (like ContextFactory or Executors)
|
|
71
|
+
* trigger a re-run of calculations even if the calculation logic itself didn't change.
|
|
72
|
+
*/
|
|
73
|
+
function getInfrastructureHash() {
|
|
74
|
+
try {
|
|
75
|
+
const allFiles = walkSync(SYSTEM_ROOT);
|
|
76
|
+
allFiles.sort(); // Crucial for determinism
|
|
77
|
+
|
|
78
|
+
const bigHash = crypto.createHash('sha256');
|
|
79
|
+
|
|
80
|
+
for (const filePath of allFiles) {
|
|
81
|
+
const content = fs.readFileSync(filePath, 'utf8');
|
|
82
|
+
const relativePath = path.relative(SYSTEM_ROOT, filePath);
|
|
83
|
+
|
|
84
|
+
let cleanContent = content;
|
|
85
|
+
|
|
86
|
+
// Reuse the standard code hash logic for JS files to be consistent
|
|
87
|
+
if (filePath.endsWith('.js')) {
|
|
88
|
+
cleanContent = generateCodeHash(content);
|
|
89
|
+
} else {
|
|
90
|
+
// For JSON/YAML, just strip whitespace
|
|
91
|
+
cleanContent = content.replace(/\s+/g, '');
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
bigHash.update(`${relativePath}:${cleanContent}|`);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
return bigHash.digest('hex');
|
|
98
|
+
} catch (e) {
|
|
99
|
+
console.warn(`[HashManager] ⚠️ Failed to generate infra hash: ${e.message}`);
|
|
100
|
+
return 'infra_error';
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
module.exports = { LEGACY_MAPPING, generateCodeHash, getInfrastructureHash };
|