bulltrackers-module 1.0.732 → 1.0.733
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/orchestrator/index.js +19 -17
- package/index.js +8 -29
- package/package.json +1 -1
- package/functions/computation-system/WorkflowOrchestrator.js +0 -213
- package/functions/computation-system/config/monitoring_config.js +0 -31
- package/functions/computation-system/config/validation_overrides.js +0 -10
- package/functions/computation-system/context/ContextFactory.js +0 -143
- package/functions/computation-system/context/ManifestBuilder.js +0 -379
- package/functions/computation-system/data/AvailabilityChecker.js +0 -236
- package/functions/computation-system/data/CachedDataLoader.js +0 -325
- package/functions/computation-system/data/DependencyFetcher.js +0 -455
- package/functions/computation-system/executors/MetaExecutor.js +0 -279
- package/functions/computation-system/executors/PriceBatchExecutor.js +0 -108
- package/functions/computation-system/executors/StandardExecutor.js +0 -465
- package/functions/computation-system/helpers/computation_dispatcher.js +0 -750
- package/functions/computation-system/helpers/computation_worker.js +0 -375
- package/functions/computation-system/helpers/monitor.js +0 -64
- package/functions/computation-system/helpers/on_demand_helpers.js +0 -154
- package/functions/computation-system/layers/extractors.js +0 -1097
- package/functions/computation-system/layers/index.js +0 -40
- package/functions/computation-system/layers/mathematics.js +0 -522
- package/functions/computation-system/layers/profiling.js +0 -537
- package/functions/computation-system/layers/validators.js +0 -170
- package/functions/computation-system/legacy/AvailabilityCheckerOld.js +0 -388
- package/functions/computation-system/legacy/CachedDataLoaderOld.js +0 -357
- package/functions/computation-system/legacy/DependencyFetcherOld.js +0 -478
- package/functions/computation-system/legacy/MetaExecutorold.js +0 -364
- package/functions/computation-system/legacy/StandardExecutorold.js +0 -476
- package/functions/computation-system/legacy/computation_dispatcherold.js +0 -944
- package/functions/computation-system/logger/logger.js +0 -297
- package/functions/computation-system/persistence/ContractValidator.js +0 -81
- package/functions/computation-system/persistence/FirestoreUtils.js +0 -56
- package/functions/computation-system/persistence/ResultCommitter.js +0 -283
- package/functions/computation-system/persistence/ResultsValidator.js +0 -130
- package/functions/computation-system/persistence/RunRecorder.js +0 -142
- package/functions/computation-system/persistence/StatusRepository.js +0 -52
- package/functions/computation-system/reporter_epoch.js +0 -6
- package/functions/computation-system/scripts/UpdateContracts.js +0 -128
- package/functions/computation-system/services/SnapshotService.js +0 -148
- package/functions/computation-system/simulation/Fabricator.js +0 -285
- package/functions/computation-system/simulation/SeededRandom.js +0 -41
- package/functions/computation-system/simulation/SimRunner.js +0 -51
- package/functions/computation-system/system_epoch.js +0 -2
- package/functions/computation-system/tools/BuildReporter.js +0 -531
- package/functions/computation-system/tools/ContractDiscoverer.js +0 -144
- package/functions/computation-system/tools/DeploymentValidator.js +0 -536
- package/functions/computation-system/tools/FinalSweepReporter.js +0 -322
- package/functions/computation-system/topology/HashManager.js +0 -55
- package/functions/computation-system/topology/ManifestLoader.js +0 -47
- package/functions/computation-system/utils/data_loader.js +0 -675
- package/functions/computation-system/utils/schema_capture.js +0 -121
- package/functions/computation-system/utils/utils.js +0 -188
|
@@ -1,322 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* FILENAME: computation-system/tools/FinalSweepReporter.js
|
|
3
|
-
* PURPOSE: Comprehensive forensic tool to diagnose why computations failed.
|
|
4
|
-
* EXECUTION: Triggered by Dispatcher (via Workflow) after execution passes complete.
|
|
5
|
-
*/
|
|
6
|
-
|
|
7
|
-
const { normalizeName, DEFINITIVE_EARLIEST_DATES } = require('../utils/utils');
|
|
8
|
-
const { fetchComputationStatus } = require('../persistence/StatusRepository');
|
|
9
|
-
const { checkRootDataAvailability } = require('../data/AvailabilityChecker');
|
|
10
|
-
const { analyzeDateExecution } = require('../WorkflowOrchestrator');
|
|
11
|
-
|
|
12
|
-
const REPORT_COLLECTION = 'Final_sweep_check_report';
|
|
13
|
-
const LEDGER_BASE = 'computation_audit_ledger';
|
|
14
|
-
const STALE_THRESHOLD_MS = 1000 * 60 * 20; // 20 minutes
|
|
15
|
-
|
|
16
|
-
class FinalSweepReporter {
|
|
17
|
-
constructor(config, dependencies) {
|
|
18
|
-
this.config = config;
|
|
19
|
-
this.db = dependencies.db;
|
|
20
|
-
this.logger = dependencies.logger;
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
/**
|
|
24
|
-
* Main Entry Point
|
|
25
|
-
* @param {string} dateStr - Target date (YYYY-MM-DD)
|
|
26
|
-
* @param {string} passId - The pass to analyze ("1", "2"...)
|
|
27
|
-
* @param {Array} manifest - Full computation manifest
|
|
28
|
-
*/
|
|
29
|
-
async runSweep(dateStr, passId, manifest) {
|
|
30
|
-
this.logger.log('INFO', `[FinalSweep] 🕵️ Starting forensic analysis for ${dateStr} (Pass ${passId})`);
|
|
31
|
-
|
|
32
|
-
// 1. Filter Manifest for Target Pass
|
|
33
|
-
const passCalcs = manifest.filter(c => String(c.pass) === passId);
|
|
34
|
-
if (passCalcs.length === 0) {
|
|
35
|
-
this.logger.log('WARN', `[FinalSweep] No calculations found for Pass ${passId}.`);
|
|
36
|
-
return { issuesCount: 0 };
|
|
37
|
-
}
|
|
38
|
-
|
|
39
|
-
const manifestMap = new Map(manifest.map(c => [normalizeName(c.name), c]));
|
|
40
|
-
|
|
41
|
-
// 2. Fetch Global State
|
|
42
|
-
const [dailyStatus, availability] = await Promise.all([
|
|
43
|
-
fetchComputationStatus(dateStr, this.config, { db: this.db }),
|
|
44
|
-
checkRootDataAvailability(dateStr, this.config, { db: this.db, logger: this.logger }, DEFINITIVE_EARLIEST_DATES)
|
|
45
|
-
]);
|
|
46
|
-
|
|
47
|
-
// 3. Fetch Yesterday (if needed for historical checks)
|
|
48
|
-
let prevDailyStatus = null;
|
|
49
|
-
if (passCalcs.some(c => c.isHistorical)) {
|
|
50
|
-
const prevDate = new Date(dateStr + 'T00:00:00Z');
|
|
51
|
-
prevDate.setUTCDate(prevDate.getUTCDate() - 1);
|
|
52
|
-
if (prevDate >= DEFINITIVE_EARLIEST_DATES.absoluteEarliest) {
|
|
53
|
-
prevDailyStatus = await fetchComputationStatus(prevDate.toISOString().slice(0, 10), this.config, { db: this.db });
|
|
54
|
-
}
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
const rootDataStatus = availability ? availability.status : {};
|
|
58
|
-
|
|
59
|
-
// 4. Run Logic Analysis
|
|
60
|
-
// We use the Orchestrator's logic to see who SHOULD have run but didn't
|
|
61
|
-
const analysis = analyzeDateExecution(
|
|
62
|
-
dateStr,
|
|
63
|
-
passCalcs,
|
|
64
|
-
rootDataStatus,
|
|
65
|
-
dailyStatus,
|
|
66
|
-
manifestMap,
|
|
67
|
-
prevDailyStatus
|
|
68
|
-
);
|
|
69
|
-
|
|
70
|
-
// Collect all potential issues
|
|
71
|
-
const problematicItems = [
|
|
72
|
-
...analysis.impossible.map(x => ({ ...x, category: 'IMPOSSIBLE' })),
|
|
73
|
-
...analysis.blocked.map(x => ({ ...x, category: 'BLOCKED' })),
|
|
74
|
-
...analysis.failedDependency.map(x => ({ ...x, category: 'DEPENDENCY_FAILURE' })),
|
|
75
|
-
// If it's still 'RUNNABLE' after the final pass, it means it was skipped/missed/failed silently
|
|
76
|
-
...analysis.runnable.map(x => ({ ...x, category: 'STUCK_RUNNABLE' })),
|
|
77
|
-
...analysis.reRuns.map(x => ({ ...x, category: 'STUCK_RERUN' }))
|
|
78
|
-
];
|
|
79
|
-
|
|
80
|
-
// 5. Deep Investigation & Reporting
|
|
81
|
-
let issuesCount = 0;
|
|
82
|
-
const batch = this.db.batch();
|
|
83
|
-
let opCount = 0;
|
|
84
|
-
|
|
85
|
-
// A. Analyze Standard Problems
|
|
86
|
-
for (const item of problematicItems) {
|
|
87
|
-
const forensics = await this.investigateComputation(
|
|
88
|
-
item.name, dateStr, passId, item.category, manifestMap,
|
|
89
|
-
dailyStatus, prevDailyStatus, rootDataStatus
|
|
90
|
-
);
|
|
91
|
-
|
|
92
|
-
if (forensics) {
|
|
93
|
-
this._queueReportWrite(batch, dateStr, item.name, forensics);
|
|
94
|
-
opCount++;
|
|
95
|
-
issuesCount++;
|
|
96
|
-
}
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
// B. Analyze "Ghost" Completions (Marked Complete but Empty)
|
|
100
|
-
for (const calc of passCalcs) {
|
|
101
|
-
const name = normalizeName(calc.name);
|
|
102
|
-
const status = dailyStatus[name];
|
|
103
|
-
|
|
104
|
-
if (status && status.hash === calc.hash) {
|
|
105
|
-
const hasData = await this.verifyDataExists(name, calc.category, dateStr);
|
|
106
|
-
if (!hasData) {
|
|
107
|
-
const forensics = {
|
|
108
|
-
computation: name,
|
|
109
|
-
date: dateStr,
|
|
110
|
-
category: 'GHOST_COMPLETION',
|
|
111
|
-
rootCause: 'DATA_WRITE_FAILURE',
|
|
112
|
-
reason: 'Status index says COMPLETE, but storage document is missing.',
|
|
113
|
-
severity: 'HIGH',
|
|
114
|
-
recommendations: ['Force Re-run', 'Check Write Permissions']
|
|
115
|
-
};
|
|
116
|
-
this._queueReportWrite(batch, dateStr, name, forensics);
|
|
117
|
-
opCount++;
|
|
118
|
-
issuesCount++;
|
|
119
|
-
}
|
|
120
|
-
}
|
|
121
|
-
}
|
|
122
|
-
|
|
123
|
-
if (opCount > 0) await batch.commit();
|
|
124
|
-
|
|
125
|
-
this.logger.log('SUCCESS', `[FinalSweep] Pass ${passId}: Generated ${issuesCount} forensic reports.`);
|
|
126
|
-
return { issuesCount };
|
|
127
|
-
}
|
|
128
|
-
|
|
129
|
-
_queueReportWrite(batch, dateStr, calcName, forensics) {
|
|
130
|
-
// Path: Final_sweep_check_report/YYYY-MM-DD/Computation_Name/document
|
|
131
|
-
const reportRef = this.db.collection(REPORT_COLLECTION)
|
|
132
|
-
.doc(dateStr)
|
|
133
|
-
.collection(normalizeName(calcName))
|
|
134
|
-
.doc('document');
|
|
135
|
-
|
|
136
|
-
batch.set(reportRef, {
|
|
137
|
-
...forensics,
|
|
138
|
-
generatedAt: new Date().toISOString()
|
|
139
|
-
});
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
/**
|
|
143
|
-
* Deep dive investigation logic
|
|
144
|
-
*/
|
|
145
|
-
async investigateComputation(calcName, dateStr, passId, category, manifestMap, dailyStatus, prevDailyStatus, rootDataStatus) {
|
|
146
|
-
const manifest = manifestMap.get(calcName);
|
|
147
|
-
if (!manifest) return null;
|
|
148
|
-
|
|
149
|
-
const forensics = {
|
|
150
|
-
computation: calcName,
|
|
151
|
-
pass: passId,
|
|
152
|
-
date: dateStr,
|
|
153
|
-
category: category,
|
|
154
|
-
rootCause: null,
|
|
155
|
-
reason: null,
|
|
156
|
-
ledgerState: null,
|
|
157
|
-
recommendations: [],
|
|
158
|
-
severity: 'MEDIUM'
|
|
159
|
-
};
|
|
160
|
-
|
|
161
|
-
// 1. CHECK AUDIT LEDGER
|
|
162
|
-
const ledgerPath = `${LEDGER_BASE}/${dateStr}/passes/${passId}/tasks/${normalizeName(calcName)}`;
|
|
163
|
-
const ledgerSnap = await this.db.doc(ledgerPath).get();
|
|
164
|
-
|
|
165
|
-
if (ledgerSnap.exists) {
|
|
166
|
-
const data = ledgerSnap.data();
|
|
167
|
-
// Filter out undefined values to prevent Firestore errors
|
|
168
|
-
const ledgerState = {
|
|
169
|
-
status: data.status,
|
|
170
|
-
workerId: data.workerId
|
|
171
|
-
};
|
|
172
|
-
// Only include error if it's defined
|
|
173
|
-
if (data.error !== undefined && data.error !== null) {
|
|
174
|
-
ledgerState.error = data.error;
|
|
175
|
-
}
|
|
176
|
-
forensics.ledgerState = ledgerState;
|
|
177
|
-
|
|
178
|
-
if (['PENDING', 'IN_PROGRESS'].includes(data.status)) {
|
|
179
|
-
// CRITICAL FIX: Handle Firestore Timestamp objects correctly
|
|
180
|
-
const getMillis = (field) => {
|
|
181
|
-
if (!field) return 0;
|
|
182
|
-
if (field.toDate && typeof field.toDate === 'function') {
|
|
183
|
-
return field.toDate().getTime();
|
|
184
|
-
}
|
|
185
|
-
const date = new Date(field);
|
|
186
|
-
return isNaN(date.getTime()) ? 0 : date.getTime();
|
|
187
|
-
};
|
|
188
|
-
const lastHb = getMillis(data.telemetry?.lastHeartbeat) || getMillis(data.startedAt);
|
|
189
|
-
if (Date.now() - lastHb > STALE_THRESHOLD_MS) {
|
|
190
|
-
forensics.rootCause = 'ZOMBIE_PROCESS';
|
|
191
|
-
forensics.reason = `Worker ${data.workerId} stopped heartbeating. Likely crashed/timeout.`;
|
|
192
|
-
forensics.severity = 'CRITICAL';
|
|
193
|
-
} else {
|
|
194
|
-
forensics.rootCause = 'STILL_RUNNING'; // Rare if sweep runs after everything
|
|
195
|
-
}
|
|
196
|
-
} else if (data.status === 'FAILED') {
|
|
197
|
-
if (data.resourceTier === 'high-mem') {
|
|
198
|
-
forensics.rootCause = 'CRASH_HIGH_MEM';
|
|
199
|
-
forensics.reason = 'Failed even on High-Memory tier. Code optimization required.';
|
|
200
|
-
forensics.severity = 'CRITICAL';
|
|
201
|
-
} else {
|
|
202
|
-
forensics.rootCause = 'FAILED_STANDARD';
|
|
203
|
-
forensics.reason = data.error?.message || 'Unknown Error';
|
|
204
|
-
}
|
|
205
|
-
|
|
206
|
-
if (data.error?.stage === 'OOM' || (data.error?.message || '').includes('memory')) {
|
|
207
|
-
forensics.rootCause = 'OUT_OF_MEMORY';
|
|
208
|
-
}
|
|
209
|
-
}
|
|
210
|
-
} else {
|
|
211
|
-
forensics.ledgerState = 'NEVER_DISPATCHED';
|
|
212
|
-
if (category === 'STUCK_RUNNABLE') {
|
|
213
|
-
forensics.rootCause = 'DISPATCHER_MISS';
|
|
214
|
-
forensics.reason = 'Logic says runnable, but Dispatcher never queued it.';
|
|
215
|
-
forensics.severity = 'HIGH';
|
|
216
|
-
}
|
|
217
|
-
}
|
|
218
|
-
|
|
219
|
-
// 2. ROOT DATA ANALYSIS
|
|
220
|
-
if (category === 'IMPOSSIBLE') {
|
|
221
|
-
const missing = [];
|
|
222
|
-
const userType = manifest.userType || 'all';
|
|
223
|
-
const deps = manifest.rootDataDependencies || [];
|
|
224
|
-
|
|
225
|
-
if (deps.includes('portfolio')) {
|
|
226
|
-
if (userType === 'speculator' && !rootDataStatus.speculatorPortfolio) missing.push('speculatorPortfolio');
|
|
227
|
-
else if (userType === 'normal' && !rootDataStatus.normalPortfolio) missing.push('normalPortfolio');
|
|
228
|
-
else if (userType === 'all' && !rootDataStatus.hasPortfolio) missing.push('portfolio');
|
|
229
|
-
}
|
|
230
|
-
if (deps.includes('history')) {
|
|
231
|
-
if (userType === 'speculator' && !rootDataStatus.speculatorHistory) missing.push('speculatorHistory');
|
|
232
|
-
else if (userType === 'normal' && !rootDataStatus.normalHistory) missing.push('normalHistory');
|
|
233
|
-
else if (userType === 'all' && !rootDataStatus.hasHistory) missing.push('history');
|
|
234
|
-
}
|
|
235
|
-
if (deps.includes('price') && !rootDataStatus.hasPrices) missing.push('price');
|
|
236
|
-
if (deps.includes('insights') && !rootDataStatus.hasInsights) missing.push('insights');
|
|
237
|
-
|
|
238
|
-
forensics.rootCause = 'MISSING_ROOT_DATA';
|
|
239
|
-
forensics.reason = `Missing: ${missing.join(', ')}`;
|
|
240
|
-
forensics.severity = 'LOW'; // Expected behavior
|
|
241
|
-
}
|
|
242
|
-
|
|
243
|
-
// 3. DEPENDENCY ANALYSIS
|
|
244
|
-
if (category === 'DEPENDENCY_FAILURE' || category === 'BLOCKED') {
|
|
245
|
-
const chain = await this.traceDependencyChain(calcName, manifestMap, dailyStatus, prevDailyStatus, manifest.isHistorical);
|
|
246
|
-
if (chain.length > 0) {
|
|
247
|
-
const root = chain[chain.length - 1];
|
|
248
|
-
forensics.rootCause = 'UPSTREAM_FAILURE';
|
|
249
|
-
forensics.reason = `Blocked by ${root.name} (${root.reason})`;
|
|
250
|
-
forensics.chain = chain;
|
|
251
|
-
}
|
|
252
|
-
}
|
|
253
|
-
|
|
254
|
-
return forensics;
|
|
255
|
-
}
|
|
256
|
-
|
|
257
|
-
async traceDependencyChain(calcName, manifestMap, dailyStatus, prevDailyStatus, isHistorical) {
|
|
258
|
-
const chain = [];
|
|
259
|
-
const visited = new Set();
|
|
260
|
-
|
|
261
|
-
const trace = (name, isPrev) => {
|
|
262
|
-
if (visited.has(name)) return;
|
|
263
|
-
visited.add(name);
|
|
264
|
-
|
|
265
|
-
const m = manifestMap.get(name);
|
|
266
|
-
if (!m) return; // Unknown calc
|
|
267
|
-
|
|
268
|
-
const status = isPrev ? (prevDailyStatus?.[name]) : (dailyStatus?.[name]);
|
|
269
|
-
|
|
270
|
-
if (!status) {
|
|
271
|
-
chain.push({ name, reason: 'MISSING_STATUS', isPrev });
|
|
272
|
-
return;
|
|
273
|
-
}
|
|
274
|
-
if (String(status.hash).startsWith('IMPOSSIBLE')) {
|
|
275
|
-
chain.push({ name, reason: 'IMPOSSIBLE', isPrev });
|
|
276
|
-
return;
|
|
277
|
-
}
|
|
278
|
-
if (m.hash !== status.hash) {
|
|
279
|
-
chain.push({ name, reason: 'VERSION_MISMATCH', isPrev });
|
|
280
|
-
return;
|
|
281
|
-
}
|
|
282
|
-
|
|
283
|
-
if (m.dependencies) {
|
|
284
|
-
for (const dep of m.dependencies) trace(normalizeName(dep), false);
|
|
285
|
-
}
|
|
286
|
-
};
|
|
287
|
-
|
|
288
|
-
const m = manifestMap.get(calcName);
|
|
289
|
-
if (m) {
|
|
290
|
-
if (m.dependencies) m.dependencies.forEach(d => trace(normalizeName(d), false));
|
|
291
|
-
if (isHistorical) trace(calcName, true);
|
|
292
|
-
}
|
|
293
|
-
return chain;
|
|
294
|
-
}
|
|
295
|
-
|
|
296
|
-
async verifyDataExists(calcName, category, dateStr) {
|
|
297
|
-
try {
|
|
298
|
-
const docRef = this.db.collection(this.config.resultsCollection)
|
|
299
|
-
.doc(dateStr)
|
|
300
|
-
.collection(this.config.resultsSubcollection)
|
|
301
|
-
.doc(category)
|
|
302
|
-
.collection(this.config.computationsSubcollection)
|
|
303
|
-
.doc(calcName);
|
|
304
|
-
|
|
305
|
-
const snap = await docRef.get();
|
|
306
|
-
if (!snap.exists) return false;
|
|
307
|
-
|
|
308
|
-
const data = snap.data();
|
|
309
|
-
if (data._completed === true) return true;
|
|
310
|
-
return false;
|
|
311
|
-
} catch (e) {
|
|
312
|
-
return false;
|
|
313
|
-
}
|
|
314
|
-
}
|
|
315
|
-
}
|
|
316
|
-
|
|
317
|
-
async function runFinalSweepCheck(config, dependencies, dateStr, pass, manifest) {
|
|
318
|
-
const reporter = new FinalSweepReporter(config, dependencies);
|
|
319
|
-
return await reporter.runSweep(dateStr, pass, manifest);
|
|
320
|
-
}
|
|
321
|
-
|
|
322
|
-
module.exports = { runFinalSweepCheck };
|
|
@@ -1,55 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* {
|
|
3
|
-
* type: uploaded file
|
|
4
|
-
* fileName: computation-system/topology/HashManager.js
|
|
5
|
-
* }
|
|
6
|
-
*/
|
|
7
|
-
/**
|
|
8
|
-
* @fileoverview Manages code versioning and legacy mappings.
|
|
9
|
-
* UPDATED: Removed global infrastructure scanning. Now relies on Manual Epochs.
|
|
10
|
-
*/
|
|
11
|
-
const crypto = require('crypto');
|
|
12
|
-
|
|
13
|
-
// Legacy Keys Mapping (Ensures backward compatibility)
|
|
14
|
-
const LEGACY_MAPPING = {
|
|
15
|
-
DataExtractor: 'extract',
|
|
16
|
-
HistoryExtractor: 'history',
|
|
17
|
-
MathPrimitives: 'compute',
|
|
18
|
-
Aggregators: 'aggregate',
|
|
19
|
-
Validators: 'validate',
|
|
20
|
-
SignalPrimitives: 'signals',
|
|
21
|
-
SCHEMAS: 'schemas',
|
|
22
|
-
DistributionAnalytics: 'distribution',
|
|
23
|
-
TimeSeries: 'TimeSeries',
|
|
24
|
-
priceExtractor: 'priceExtractor',
|
|
25
|
-
InsightsExtractor: 'insights',
|
|
26
|
-
UserClassifier: 'classifier',
|
|
27
|
-
Psychometrics: 'psychometrics',
|
|
28
|
-
CognitiveBiases: 'bias',
|
|
29
|
-
SkillAttribution: 'skill',
|
|
30
|
-
ExecutionAnalytics: 'execution',
|
|
31
|
-
AdaptiveAnalytics: 'adaptive'
|
|
32
|
-
};
|
|
33
|
-
|
|
34
|
-
/**
|
|
35
|
-
* Generates a SHA-256 hash of a code string.
|
|
36
|
-
* Strips comments and whitespace for loose equality.
|
|
37
|
-
*/
|
|
38
|
-
function generateCodeHash(codeString) {
|
|
39
|
-
if (!codeString) return 'unknown';
|
|
40
|
-
let clean = codeString.replace(/\/\/.*$/gm, '');
|
|
41
|
-
clean = clean.replace(/\/\*[\s\S]*?\*\//g, '');
|
|
42
|
-
clean = clean.replace(/\s+/g, '');
|
|
43
|
-
return crypto.createHash('sha256').update(clean).digest('hex');
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
/**
|
|
47
|
-
* DEPRECATED: Previously walked the file system.
|
|
48
|
-
* Now returns a static string because we rely on SYSTEM_EPOCH for global versioning.
|
|
49
|
-
* Kept for backward compatibility with older consumers.
|
|
50
|
-
*/
|
|
51
|
-
function getInfrastructureHash() {
|
|
52
|
-
return 'MANUAL_EPOCH_MODE';
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
module.exports = { LEGACY_MAPPING, generateCodeHash, getInfrastructureHash };
|
|
@@ -1,47 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Singleton Loader for the Manifest.
|
|
3
|
-
* Prevents expensive manifest rebuilding on every function invocation if not needed.
|
|
4
|
-
*/
|
|
5
|
-
const { build } = require('../context/ManifestBuilder');
|
|
6
|
-
const { StructuredLogger, PROCESS_TYPES, generateProcessId } = require('../logger/logger');
|
|
7
|
-
|
|
8
|
-
// [FIX] Cache using a Map to handle different productLine combinations
|
|
9
|
-
const manifestCache = new Map();
|
|
10
|
-
|
|
11
|
-
function getManifest(productLines = [], calculationsDir, dependencies = {}) {
|
|
12
|
-
// Generate a unique key for this specific request configuration
|
|
13
|
-
const cacheKey = JSON.stringify(productLines ? productLines.slice().sort() : ['ALL']);
|
|
14
|
-
|
|
15
|
-
if (manifestCache.has(cacheKey)) {
|
|
16
|
-
return manifestCache.get(cacheKey);
|
|
17
|
-
}
|
|
18
|
-
|
|
19
|
-
const logger = dependencies.logger || new StructuredLogger();
|
|
20
|
-
const pid = generateProcessId(PROCESS_TYPES.MANIFEST, 'build', new Date().toISOString().slice(0,10));
|
|
21
|
-
|
|
22
|
-
logger.log('INFO', 'Starting Manifest Build...', { processId: pid, scope: cacheKey });
|
|
23
|
-
|
|
24
|
-
const startTime = Date.now();
|
|
25
|
-
try {
|
|
26
|
-
const manifest = build(productLines, calculationsDir);
|
|
27
|
-
|
|
28
|
-
// Log Topology Stats
|
|
29
|
-
const passCounts = {};
|
|
30
|
-
manifest.forEach(c => { passCounts[c.pass] = (passCounts[c.pass] || 0) + 1; });
|
|
31
|
-
|
|
32
|
-
logger.log('INFO', 'Manifest Build Success', {
|
|
33
|
-
processId: pid,
|
|
34
|
-
durationMs: Date.now() - startTime,
|
|
35
|
-
totalCalculations: manifest.length,
|
|
36
|
-
topology: passCounts
|
|
37
|
-
});
|
|
38
|
-
|
|
39
|
-
manifestCache.set(cacheKey, manifest);
|
|
40
|
-
return manifest;
|
|
41
|
-
} catch (e) {
|
|
42
|
-
logger.log('FATAL', 'Manifest Build Failed', { processId: pid, error: e.message });
|
|
43
|
-
throw e;
|
|
44
|
-
}
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
module.exports = { getManifest };
|