bulltrackers-module 1.0.732 → 1.0.733
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/orchestrator/index.js +19 -17
- package/index.js +8 -29
- package/package.json +1 -1
- package/functions/computation-system/WorkflowOrchestrator.js +0 -213
- package/functions/computation-system/config/monitoring_config.js +0 -31
- package/functions/computation-system/config/validation_overrides.js +0 -10
- package/functions/computation-system/context/ContextFactory.js +0 -143
- package/functions/computation-system/context/ManifestBuilder.js +0 -379
- package/functions/computation-system/data/AvailabilityChecker.js +0 -236
- package/functions/computation-system/data/CachedDataLoader.js +0 -325
- package/functions/computation-system/data/DependencyFetcher.js +0 -455
- package/functions/computation-system/executors/MetaExecutor.js +0 -279
- package/functions/computation-system/executors/PriceBatchExecutor.js +0 -108
- package/functions/computation-system/executors/StandardExecutor.js +0 -465
- package/functions/computation-system/helpers/computation_dispatcher.js +0 -750
- package/functions/computation-system/helpers/computation_worker.js +0 -375
- package/functions/computation-system/helpers/monitor.js +0 -64
- package/functions/computation-system/helpers/on_demand_helpers.js +0 -154
- package/functions/computation-system/layers/extractors.js +0 -1097
- package/functions/computation-system/layers/index.js +0 -40
- package/functions/computation-system/layers/mathematics.js +0 -522
- package/functions/computation-system/layers/profiling.js +0 -537
- package/functions/computation-system/layers/validators.js +0 -170
- package/functions/computation-system/legacy/AvailabilityCheckerOld.js +0 -388
- package/functions/computation-system/legacy/CachedDataLoaderOld.js +0 -357
- package/functions/computation-system/legacy/DependencyFetcherOld.js +0 -478
- package/functions/computation-system/legacy/MetaExecutorold.js +0 -364
- package/functions/computation-system/legacy/StandardExecutorold.js +0 -476
- package/functions/computation-system/legacy/computation_dispatcherold.js +0 -944
- package/functions/computation-system/logger/logger.js +0 -297
- package/functions/computation-system/persistence/ContractValidator.js +0 -81
- package/functions/computation-system/persistence/FirestoreUtils.js +0 -56
- package/functions/computation-system/persistence/ResultCommitter.js +0 -283
- package/functions/computation-system/persistence/ResultsValidator.js +0 -130
- package/functions/computation-system/persistence/RunRecorder.js +0 -142
- package/functions/computation-system/persistence/StatusRepository.js +0 -52
- package/functions/computation-system/reporter_epoch.js +0 -6
- package/functions/computation-system/scripts/UpdateContracts.js +0 -128
- package/functions/computation-system/services/SnapshotService.js +0 -148
- package/functions/computation-system/simulation/Fabricator.js +0 -285
- package/functions/computation-system/simulation/SeededRandom.js +0 -41
- package/functions/computation-system/simulation/SimRunner.js +0 -51
- package/functions/computation-system/system_epoch.js +0 -2
- package/functions/computation-system/tools/BuildReporter.js +0 -531
- package/functions/computation-system/tools/ContractDiscoverer.js +0 -144
- package/functions/computation-system/tools/DeploymentValidator.js +0 -536
- package/functions/computation-system/tools/FinalSweepReporter.js +0 -322
- package/functions/computation-system/topology/HashManager.js +0 -55
- package/functions/computation-system/topology/ManifestLoader.js +0 -47
- package/functions/computation-system/utils/data_loader.js +0 -675
- package/functions/computation-system/utils/schema_capture.js +0 -121
- package/functions/computation-system/utils/utils.js +0 -188
|
@@ -1,531 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* FILENAME: computation-system/tools/BuildReporter.js
|
|
3
|
-
* PURPOSE: Generates the "Build Report" - a comprehensive audit of the computation system's state.
|
|
4
|
-
* UPDATED: Trigger logic now strictly depends on Package Version changes (Calculations OR Module).
|
|
5
|
-
* UPDATED: Added Algorithmic "Dynamic Warnings" for impossibility analysis.
|
|
6
|
-
*/
|
|
7
|
-
|
|
8
|
-
const { analyzeDateExecution } = require('../WorkflowOrchestrator');
|
|
9
|
-
const { fetchComputationStatus, updateComputationStatus } = require('../persistence/StatusRepository');
|
|
10
|
-
const { normalizeName, getExpectedDateStrings, DEFINITIVE_EARLIEST_DATES, getEarliestDataDates } = require('../utils/utils');
|
|
11
|
-
const { checkRootDataAvailability } = require('../data/AvailabilityChecker');
|
|
12
|
-
const SimRunner = require('../simulation/SimRunner');
|
|
13
|
-
const SYSTEM_EPOCH = require('../system_epoch');
|
|
14
|
-
const REPORTER_EPOCH = require('../reporter_epoch');
|
|
15
|
-
const pLimit = require('p-limit');
|
|
16
|
-
const path = require('path');
|
|
17
|
-
const fs = require('fs');
|
|
18
|
-
const crypto = require('crypto');
|
|
19
|
-
|
|
20
|
-
const BUILD_RECORDS_COLLECTION = 'computation_build_records';
|
|
21
|
-
const SIMHASH_REGISTRY_COLLECTION = 'system_simhash_registry';
|
|
22
|
-
|
|
23
|
-
/**
|
|
24
|
-
* Resolves version numbers for both the Infrastructure (Module) and the Logic (Calculations).
|
|
25
|
-
*/
|
|
26
|
-
function getPackageVersions() {
|
|
27
|
-
let moduleVersion = 'unknown';
|
|
28
|
-
let calcVersion = 'unknown';
|
|
29
|
-
|
|
30
|
-
try {
|
|
31
|
-
const modulePkgPath = path.join(__dirname, '../../../../bulltrackers-module/package.json');
|
|
32
|
-
const modulepkg = JSON.parse(fs.readFileSync(modulePkgPath, 'utf8'));
|
|
33
|
-
moduleVersion = modulepkg.version;
|
|
34
|
-
|
|
35
|
-
} catch (e) {
|
|
36
|
-
moduleVersion = "1"
|
|
37
|
-
console.warn('[BuildReporter] Could not resolve Module version', e.message);
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
try {
|
|
41
|
-
const calcPkgPath = path.join(__dirname, '../../../../calculations/package.json');
|
|
42
|
-
const calcpkg = JSON.parse(fs.readFileSync(calcPkgPath, 'utf8'));
|
|
43
|
-
calcVersion = calcpkg.version;
|
|
44
|
-
|
|
45
|
-
} catch (e2) {
|
|
46
|
-
calcVersion = "1"
|
|
47
|
-
console.warn('[BuildReporter] Could not resolve Calculations version', e2.message);
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
return { moduleVersion, calcVersion };
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
/**
|
|
55
|
-
* Publishes a message to trigger the dedicated Build Reporter Cloud Function.
|
|
56
|
-
* Fire-and-forget to avoid blocking initialization.
|
|
57
|
-
*/
|
|
58
|
-
async function requestBuildReport(config, dependencies) {
|
|
59
|
-
const { pubsubUtils, logger } = dependencies;
|
|
60
|
-
|
|
61
|
-
// Get versions (synchronous but fast, wrapped in try-catch)
|
|
62
|
-
let moduleVersion = 'unknown';
|
|
63
|
-
let calcVersion = 'unknown';
|
|
64
|
-
try {
|
|
65
|
-
const versions = getPackageVersions();
|
|
66
|
-
moduleVersion = versions.moduleVersion;
|
|
67
|
-
calcVersion = versions.calcVersion;
|
|
68
|
-
} catch (e) {
|
|
69
|
-
// If version resolution fails, use defaults
|
|
70
|
-
logger.log('WARN', `[BuildReporter] Version resolution failed, using defaults: ${e.message}`);
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
// Fire-and-forget: don't await, just log errors
|
|
74
|
-
pubsubUtils.publish(config.buildReporterTopic, {
|
|
75
|
-
requestedAt: new Date().toISOString(),
|
|
76
|
-
moduleVersion,
|
|
77
|
-
calcVersion
|
|
78
|
-
}).then(() => {
|
|
79
|
-
logger.log('INFO', `[BuildReporter] 🛰️ Trigger message sent to ${config.buildReporterTopic}`);
|
|
80
|
-
}).catch(e => {
|
|
81
|
-
logger.log('ERROR', `[BuildReporter] Failed to publish trigger: ${e.message}`);
|
|
82
|
-
});
|
|
83
|
-
|
|
84
|
-
return { success: true, status: 'PENDING' };
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
/**
|
|
88
|
-
* Cloud Function Entry Point for the Build Reporter.
|
|
89
|
-
*/
|
|
90
|
-
async function handleBuildReportTrigger(message, context, config, dependencies, manifest) {
|
|
91
|
-
const { logger } = dependencies;
|
|
92
|
-
logger.log('INFO', `[BuildReporter] 📥 Trigger received. Starting build analysis...`);
|
|
93
|
-
try {
|
|
94
|
-
return await generateBuildReport(config, dependencies, manifest);
|
|
95
|
-
} catch (e) {
|
|
96
|
-
logger.log('ERROR', `[BuildReporter] Fatal error in execution: ${e.message}`);
|
|
97
|
-
throw e;
|
|
98
|
-
}
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
function getSystemFingerprint(manifest) {
|
|
102
|
-
const sortedManifestHashes = manifest.map(c => c.hash).sort().join('|');
|
|
103
|
-
return crypto.createHash('sha256')
|
|
104
|
-
.update(sortedManifestHashes + SYSTEM_EPOCH + REPORTER_EPOCH)
|
|
105
|
-
.digest('hex');
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
function isDateBeforeAvailability(dateStr, calcManifest) {
|
|
109
|
-
const targetDate = new Date(dateStr + 'T00:00:00Z');
|
|
110
|
-
const deps = calcManifest.rootDataDependencies || [];
|
|
111
|
-
if (deps.length === 0) return false;
|
|
112
|
-
for (const dep of deps) {
|
|
113
|
-
let startDate = null;
|
|
114
|
-
if (dep === 'portfolio') startDate = DEFINITIVE_EARLIEST_DATES.portfolio;
|
|
115
|
-
else if (dep === 'history') startDate = DEFINITIVE_EARLIEST_DATES.history;
|
|
116
|
-
else if (dep === 'social') startDate = DEFINITIVE_EARLIEST_DATES.social;
|
|
117
|
-
else if (dep === 'insights') startDate = DEFINITIVE_EARLIEST_DATES.insights;
|
|
118
|
-
else if (dep === 'price') startDate = DEFINITIVE_EARLIEST_DATES.price;
|
|
119
|
-
if (startDate && targetDate < startDate) { return true; }
|
|
120
|
-
}
|
|
121
|
-
return false;
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
function calculateBlastRadius(targetCalcName, reverseGraph) {
|
|
125
|
-
const impactSet = new Set();
|
|
126
|
-
const queue = [targetCalcName];
|
|
127
|
-
while(queue.length > 0) {
|
|
128
|
-
const current = queue.shift();
|
|
129
|
-
const dependents = reverseGraph.get(current) || [];
|
|
130
|
-
dependents.forEach(child => {
|
|
131
|
-
if (!impactSet.has(child)) {
|
|
132
|
-
impactSet.add(child);
|
|
133
|
-
queue.push(child);
|
|
134
|
-
}
|
|
135
|
-
});
|
|
136
|
-
}
|
|
137
|
-
return {
|
|
138
|
-
directDependents: (reverseGraph.get(targetCalcName) || []).length,
|
|
139
|
-
totalCascadingDependents: impactSet.size,
|
|
140
|
-
affectedCalculations: Array.from(impactSet).slice(0, 50)
|
|
141
|
-
};
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
async function verifyBehavioralStability(candidates, manifestMap, dailyStatus, logger, simHashCache, db) {
|
|
145
|
-
const trueReRuns = [];
|
|
146
|
-
const stableUpdates = [];
|
|
147
|
-
const limit = pLimit(10);
|
|
148
|
-
|
|
149
|
-
const checks = candidates.map(item => limit(async () => {
|
|
150
|
-
try {
|
|
151
|
-
const manifest = manifestMap.get(item.name);
|
|
152
|
-
const stored = dailyStatus[item.name];
|
|
153
|
-
|
|
154
|
-
if (!stored || !stored.simHash || !manifest) {
|
|
155
|
-
trueReRuns.push(item);
|
|
156
|
-
return;
|
|
157
|
-
}
|
|
158
|
-
|
|
159
|
-
let newSimHash = simHashCache.get(manifest.hash);
|
|
160
|
-
if (!newSimHash) {
|
|
161
|
-
newSimHash = await SimRunner.run(manifest, manifestMap);
|
|
162
|
-
simHashCache.set(manifest.hash, newSimHash);
|
|
163
|
-
db.collection(SIMHASH_REGISTRY_COLLECTION).doc(manifest.hash).set({
|
|
164
|
-
simHash: newSimHash,
|
|
165
|
-
createdAt: new Date(),
|
|
166
|
-
calcName: manifest.name
|
|
167
|
-
}).catch(err => logger.log('WARN', `Failed to write SimHash registry for ${manifest.name}: ${err.message}`));
|
|
168
|
-
}
|
|
169
|
-
|
|
170
|
-
if (newSimHash === stored.simHash) {
|
|
171
|
-
stableUpdates.push({
|
|
172
|
-
...item,
|
|
173
|
-
reason: "Code Updated (Logic Stable)",
|
|
174
|
-
simHash: newSimHash,
|
|
175
|
-
newHash: manifest.hash
|
|
176
|
-
});
|
|
177
|
-
} else {
|
|
178
|
-
trueReRuns.push({
|
|
179
|
-
...item,
|
|
180
|
-
reason: item.reason + ` [SimHash Mismatch]`,
|
|
181
|
-
oldSimHash: stored.simHash,
|
|
182
|
-
newSimHash: newSimHash
|
|
183
|
-
});
|
|
184
|
-
}
|
|
185
|
-
} catch (e) {
|
|
186
|
-
logger.log('WARN', `[BuildReporter] SimHash check failed for ${item.name}: ${e.message}`);
|
|
187
|
-
trueReRuns.push(item);
|
|
188
|
-
}
|
|
189
|
-
}));
|
|
190
|
-
|
|
191
|
-
await Promise.all(checks);
|
|
192
|
-
return { trueReRuns, stableUpdates };
|
|
193
|
-
}
|
|
194
|
-
|
|
195
|
-
/**
|
|
196
|
-
* Algorithmic Analysis for Dynamic Warnings.
|
|
197
|
-
* Detects patterns in "Impossible" states (e.g., Weekends, Category-Wide).
|
|
198
|
-
*/
|
|
199
|
-
function generateDynamicWarnings(impossibleStats, categoryStats, totalDates) {
|
|
200
|
-
const warnings = [];
|
|
201
|
-
|
|
202
|
-
for (const [name, stats] of impossibleStats.entries()) {
|
|
203
|
-
const failureRate = stats.count / totalDates;
|
|
204
|
-
const categoryData = categoryStats.get(stats.category) || { total: 0, impossible: 0 };
|
|
205
|
-
const catTotal = Math.max(1, categoryData.total);
|
|
206
|
-
const catFailureRate = categoryData.impossible / catTotal;
|
|
207
|
-
|
|
208
|
-
// Skip negligible errors
|
|
209
|
-
if (failureRate < 0.05) continue;
|
|
210
|
-
|
|
211
|
-
let type = 'INTERMITTENT_FAILURE';
|
|
212
|
-
let contextMsg = `Fails on ${stats.count}/${totalDates} dates.`;
|
|
213
|
-
|
|
214
|
-
// 1. Weekend Analysis
|
|
215
|
-
const isWeekendOnly = stats.dates.every(d => {
|
|
216
|
-
const day = new Date(d + 'T00:00:00Z').getUTCDay();
|
|
217
|
-
return day === 0 || day === 6; // Sunday or Saturday
|
|
218
|
-
});
|
|
219
|
-
|
|
220
|
-
if (isWeekendOnly && failureRate < 0.35) {
|
|
221
|
-
type = 'WEEKEND_GAP';
|
|
222
|
-
contextMsg = 'Computation consistently fails only on weekends (likely price/market data gaps).';
|
|
223
|
-
} else if (failureRate > 0.95) {
|
|
224
|
-
type = 'PERMANENT_FAILURE';
|
|
225
|
-
if (catFailureRate > 0.8) {
|
|
226
|
-
contextMsg = `Entire category '${stats.category}' is failing (${(catFailureRate*100).toFixed(0)}% fail rate). Systemic issue.`;
|
|
227
|
-
} else {
|
|
228
|
-
contextMsg = `Category '${stats.category}' is healthy, but this calculation is 100% impossible. Outlier.`;
|
|
229
|
-
}
|
|
230
|
-
}
|
|
231
|
-
|
|
232
|
-
warnings.push({
|
|
233
|
-
computation: name,
|
|
234
|
-
category: stats.category,
|
|
235
|
-
pass: stats.pass,
|
|
236
|
-
type: type,
|
|
237
|
-
failureRate: (failureRate * 100).toFixed(1) + '%',
|
|
238
|
-
message: contextMsg,
|
|
239
|
-
reasons: Array.from(stats.reasons).slice(0, 3)
|
|
240
|
-
});
|
|
241
|
-
}
|
|
242
|
-
|
|
243
|
-
return warnings;
|
|
244
|
-
}
|
|
245
|
-
|
|
246
|
-
/**
|
|
247
|
-
* The main reporter logic.
|
|
248
|
-
*/
|
|
249
|
-
async function generateBuildReport(config, dependencies, manifest) {
|
|
250
|
-
const { db, logger } = dependencies;
|
|
251
|
-
const { moduleVersion, calcVersion } = getPackageVersions();
|
|
252
|
-
|
|
253
|
-
// 1. Version-Based Trigger Check
|
|
254
|
-
// We create a composite ID for this exact code state
|
|
255
|
-
const buildId = `build_mod-${moduleVersion}_calc-${calcVersion}_${REPORTER_EPOCH}`;
|
|
256
|
-
|
|
257
|
-
const existingDoc = await db.collection(BUILD_RECORDS_COLLECTION).doc(buildId).get();
|
|
258
|
-
if (existingDoc.exists) {
|
|
259
|
-
const data = existingDoc.data();
|
|
260
|
-
if (data.status === 'COMPLETED') {
|
|
261
|
-
logger.log('INFO', `[BuildReporter] ⚡ Report already exists for ${buildId}. Skipping execution.`);
|
|
262
|
-
return { success: true, status: 'SKIPPED_EXISTING', buildId };
|
|
263
|
-
}
|
|
264
|
-
}
|
|
265
|
-
|
|
266
|
-
logger.log('INFO', `[BuildReporter] 🚀 Generating new report for ${buildId}.`);
|
|
267
|
-
|
|
268
|
-
// 2. Initialize Data
|
|
269
|
-
await getEarliestDataDates(config, dependencies); // Ensure dates are loaded
|
|
270
|
-
const currentFingerprint = getSystemFingerprint(manifest);
|
|
271
|
-
const { absoluteEarliest } = DEFINITIVE_EARLIEST_DATES;
|
|
272
|
-
|
|
273
|
-
// Dynamic Window
|
|
274
|
-
const today = new Date();
|
|
275
|
-
let dynamicDaysBack = 90;
|
|
276
|
-
if (absoluteEarliest) {
|
|
277
|
-
const diffTime = Math.abs(today.getTime() - absoluteEarliest.getTime());
|
|
278
|
-
dynamicDaysBack = Math.ceil(diffTime / (1000 * 60 * 60 * 24)) + 2;
|
|
279
|
-
}
|
|
280
|
-
const startDate = new Date();
|
|
281
|
-
startDate.setDate(today.getDate() - dynamicDaysBack);
|
|
282
|
-
const datesToCheck = getExpectedDateStrings(startDate, today);
|
|
283
|
-
|
|
284
|
-
// 3. Stats Accumulators
|
|
285
|
-
const manifestMap = new Map(manifest.map(c => [normalizeName(c.name), c]));
|
|
286
|
-
const simHashCache = new Map();
|
|
287
|
-
const reverseGraph = new Map();
|
|
288
|
-
|
|
289
|
-
// Graph for Blast Radius
|
|
290
|
-
manifest.forEach(c => {
|
|
291
|
-
const parentName = normalizeName(c.name);
|
|
292
|
-
if (c.dependencies) {
|
|
293
|
-
c.dependencies.forEach(dep => {
|
|
294
|
-
const depName = normalizeName(dep);
|
|
295
|
-
if (!reverseGraph.has(depName)) reverseGraph.set(depName, []);
|
|
296
|
-
reverseGraph.get(depName).push(parentName);
|
|
297
|
-
});
|
|
298
|
-
}
|
|
299
|
-
});
|
|
300
|
-
|
|
301
|
-
// Global Accumulators for Report
|
|
302
|
-
let totalRun = 0, totalReRun = 0, totalStable = 0, totalErrors = 0;
|
|
303
|
-
const globalMismatchStats = new Map(); // { stored: 0, mismatch: 0 }
|
|
304
|
-
const impossibleAnalysis = new Map(); // { count: 0, dates: [], reasons: Set, category, pass }
|
|
305
|
-
const categoryStats = new Map(); // { total: 0, impossible: 0 }
|
|
306
|
-
const runnablePerDate = {};
|
|
307
|
-
const impossiblePerDate = {};
|
|
308
|
-
|
|
309
|
-
const reportHeader = {
|
|
310
|
-
buildId,
|
|
311
|
-
sharded: true,
|
|
312
|
-
generatedAt: new Date().toISOString(),
|
|
313
|
-
calculationPackageVersion: calcVersion,
|
|
314
|
-
bulltrackersModulePackageVersion: moduleVersion,
|
|
315
|
-
reporterEpoch: REPORTER_EPOCH,
|
|
316
|
-
status: 'IN_PROGRESS',
|
|
317
|
-
systemFingerprint: currentFingerprint,
|
|
318
|
-
earliestWindow: absoluteEarliest ? absoluteEarliest.toISOString().slice(0, 10) : 'UNKNOWN',
|
|
319
|
-
scanRange: `${datesToCheck[0]} to ${datesToCheck[datesToCheck.length-1]}`,
|
|
320
|
-
summary: {}
|
|
321
|
-
};
|
|
322
|
-
|
|
323
|
-
await db.collection(BUILD_RECORDS_COLLECTION).doc(buildId).set(reportHeader);
|
|
324
|
-
|
|
325
|
-
// 4. Execution Loop
|
|
326
|
-
const limit = pLimit(10);
|
|
327
|
-
|
|
328
|
-
for (let i = 0; i < datesToCheck.length; i += 5) {
|
|
329
|
-
const dateBatch = datesToCheck.slice(i, i + 5);
|
|
330
|
-
|
|
331
|
-
const results = await Promise.all(dateBatch.map(dateStr => limit(async () => {
|
|
332
|
-
try {
|
|
333
|
-
// Fetch State
|
|
334
|
-
const fetchPromises = [
|
|
335
|
-
fetchComputationStatus(dateStr, config, dependencies),
|
|
336
|
-
checkRootDataAvailability(dateStr, config, dependencies, DEFINITIVE_EARLIEST_DATES)
|
|
337
|
-
];
|
|
338
|
-
|
|
339
|
-
let prevDateStr = null;
|
|
340
|
-
if (manifest.some(c => c.isHistorical)) {
|
|
341
|
-
const prevDate = new Date(dateStr + 'T00:00:00Z');
|
|
342
|
-
prevDate.setUTCDate(prevDate.getUTCDate() - 1);
|
|
343
|
-
prevDateStr = prevDate.toISOString().slice(0, 10);
|
|
344
|
-
if (prevDate >= DEFINITIVE_EARLIEST_DATES.absoluteEarliest) {
|
|
345
|
-
fetchPromises.push(fetchComputationStatus(prevDateStr, config, dependencies));
|
|
346
|
-
}
|
|
347
|
-
}
|
|
348
|
-
|
|
349
|
-
const [dailyStatus, availability, prevRes] = await Promise.all(fetchPromises);
|
|
350
|
-
const prevDailyStatus = (prevDateStr && prevRes) ? prevRes : (prevDateStr ? {} : null);
|
|
351
|
-
const rootDataStatus = availability ? availability.status : { hasPortfolio: false, hasHistory: false };
|
|
352
|
-
|
|
353
|
-
// Analyze
|
|
354
|
-
const analysis = analyzeDateExecution(dateStr, manifest, rootDataStatus, dailyStatus, manifestMap, prevDailyStatus);
|
|
355
|
-
|
|
356
|
-
const dateSummary = {
|
|
357
|
-
run: [], rerun: [], stable: [], blocked: [], impossible: [], uptodate: []
|
|
358
|
-
};
|
|
359
|
-
|
|
360
|
-
// --- Metric Aggregation Logic ---
|
|
361
|
-
const processItem = (targetArray, item, extraReason) => {
|
|
362
|
-
const calcManifest = manifestMap.get(item.name);
|
|
363
|
-
if (calcManifest && isDateBeforeAvailability(dateStr, calcManifest)) return; // Skip invalid dates
|
|
364
|
-
|
|
365
|
-
const entry = { name: item.name, reason: item.reason || extraReason, pass: calcManifest?.pass || '?' };
|
|
366
|
-
|
|
367
|
-
// Stats for Mismatch Rate
|
|
368
|
-
if (!globalMismatchStats.has(item.name)) globalMismatchStats.set(item.name, { stored: 0, mismatch: 0 });
|
|
369
|
-
const stats = globalMismatchStats.get(item.name);
|
|
370
|
-
|
|
371
|
-
// "Stored" implies we have a result or are about to run one.
|
|
372
|
-
// We count Stable + Rerun as "Stored History comparisons"
|
|
373
|
-
// Runnable is "New", not "Stored".
|
|
374
|
-
if (targetArray === dateSummary.rerun) {
|
|
375
|
-
stats.stored++;
|
|
376
|
-
stats.mismatch++;
|
|
377
|
-
entry.impact = calculateBlastRadius(item.name, reverseGraph);
|
|
378
|
-
} else if (targetArray === dateSummary.stable) {
|
|
379
|
-
stats.stored++;
|
|
380
|
-
}
|
|
381
|
-
|
|
382
|
-
targetArray.push(entry);
|
|
383
|
-
};
|
|
384
|
-
|
|
385
|
-
// Track Impossible Stats
|
|
386
|
-
analysis.impossible.forEach(item => {
|
|
387
|
-
const m = manifestMap.get(item.name);
|
|
388
|
-
if (!m) return;
|
|
389
|
-
if (!impossibleAnalysis.has(item.name)) {
|
|
390
|
-
impossibleAnalysis.set(item.name, {
|
|
391
|
-
count: 0, dates: [], reasons: new Set(), category: m.category, pass: m.pass
|
|
392
|
-
});
|
|
393
|
-
}
|
|
394
|
-
const imp = impossibleAnalysis.get(item.name);
|
|
395
|
-
imp.count++;
|
|
396
|
-
imp.dates.push(dateStr);
|
|
397
|
-
imp.reasons.add(item.reason);
|
|
398
|
-
});
|
|
399
|
-
|
|
400
|
-
// Track Category Stats (Total vs Impossible)
|
|
401
|
-
const allEvaluated = [...analysis.runnable, ...analysis.reRuns, ...analysis.skipped, ...analysis.impossible]; // Stable often in skipped
|
|
402
|
-
allEvaluated.forEach(item => {
|
|
403
|
-
const m = manifestMap.get(item.name);
|
|
404
|
-
if (!m) return;
|
|
405
|
-
if (!categoryStats.has(m.category)) categoryStats.set(m.category, { total: 0, impossible: 0 });
|
|
406
|
-
const cat = categoryStats.get(m.category);
|
|
407
|
-
cat.total++;
|
|
408
|
-
if (analysis.impossible.find(x => x.name === item.name)) cat.impossible++;
|
|
409
|
-
});
|
|
410
|
-
|
|
411
|
-
analysis.runnable.forEach(item => processItem(dateSummary.run, item, "New Calculation"));
|
|
412
|
-
|
|
413
|
-
// Handle Re-Runs (SimHash Verification)
|
|
414
|
-
if (analysis.reRuns.length > 0) {
|
|
415
|
-
const { trueReRuns, stableUpdates } = await verifyBehavioralStability(analysis.reRuns, manifestMap, dailyStatus, logger, simHashCache, db);
|
|
416
|
-
trueReRuns.forEach(item => processItem(dateSummary.rerun, item, "Logic Changed"));
|
|
417
|
-
stableUpdates.forEach(item => processItem(dateSummary.stable, item, "Logic Stable"));
|
|
418
|
-
|
|
419
|
-
// Auto-Heal Status if Stable
|
|
420
|
-
if (stableUpdates.length > 0) {
|
|
421
|
-
const updatesPayload = {};
|
|
422
|
-
for (const stable of stableUpdates) {
|
|
423
|
-
const m = manifestMap.get(stable.name);
|
|
424
|
-
const stored = dailyStatus[stable.name];
|
|
425
|
-
|
|
426
|
-
// [FIX] Only auto-heal if we have valid result data
|
|
427
|
-
// Otherwise, force re-run to regenerate everything properly
|
|
428
|
-
const hasValidResults = stored?.resultHash &&
|
|
429
|
-
stored.resultHash !== 'empty' &&
|
|
430
|
-
stored.dependencyResultHashes &&
|
|
431
|
-
Object.keys(stored.dependencyResultHashes).length > 0;
|
|
432
|
-
|
|
433
|
-
if (m && stored && hasValidResults) {
|
|
434
|
-
updatesPayload[stable.name] = {
|
|
435
|
-
hash: m.hash, simHash: stable.simHash, resultHash: stored.resultHash,
|
|
436
|
-
dependencyResultHashes: stored.dependencyResultHashes,
|
|
437
|
-
category: m.category, composition: m.composition, lastUpdated: new Date()
|
|
438
|
-
};
|
|
439
|
-
} else {
|
|
440
|
-
// No valid results - treat as true re-run
|
|
441
|
-
processItem(dateSummary.rerun, stable, "Epoch Change - Rebuilding");
|
|
442
|
-
}
|
|
443
|
-
}
|
|
444
|
-
if (Object.keys(updatesPayload).length > 0) {
|
|
445
|
-
await updateComputationStatus(dateStr, updatesPayload, config, dependencies);
|
|
446
|
-
}
|
|
447
|
-
}}
|
|
448
|
-
|
|
449
|
-
// Add skipped items to Stable count for metrics
|
|
450
|
-
analysis.skipped.forEach(item => processItem(dateSummary.stable, item, "Up To Date"));
|
|
451
|
-
|
|
452
|
-
analysis.blocked.forEach(item => processItem(dateSummary.blocked, item));
|
|
453
|
-
analysis.failedDependency.forEach(item => processItem(dateSummary.blocked, item, "Dependency Missing"));
|
|
454
|
-
analysis.impossible.forEach(item => processItem(dateSummary.impossible, item));
|
|
455
|
-
|
|
456
|
-
// Per-Date Metrics
|
|
457
|
-
const runnableCount = dateSummary.run.length + dateSummary.rerun.length;
|
|
458
|
-
const impossibleCount = dateSummary.impossible.length;
|
|
459
|
-
|
|
460
|
-
return {
|
|
461
|
-
date: dateStr,
|
|
462
|
-
run: dateSummary.run.length,
|
|
463
|
-
rerun: dateSummary.rerun.length,
|
|
464
|
-
stable: dateSummary.stable.length,
|
|
465
|
-
runnableCount,
|
|
466
|
-
impossibleCount,
|
|
467
|
-
error: false,
|
|
468
|
-
summaryPayload: dateSummary
|
|
469
|
-
};
|
|
470
|
-
|
|
471
|
-
} catch (err) {
|
|
472
|
-
logger.log('ERROR', `[BuildReporter] Analysis failed for ${dateStr}: ${err.message}`);
|
|
473
|
-
return { run: 0, rerun: 0, stable: 0, error: true };
|
|
474
|
-
}
|
|
475
|
-
})));
|
|
476
|
-
|
|
477
|
-
// Accumulate Batch Results
|
|
478
|
-
for (const res of results) {
|
|
479
|
-
if (res.error) {
|
|
480
|
-
totalErrors++;
|
|
481
|
-
} else {
|
|
482
|
-
totalRun += res.run;
|
|
483
|
-
totalReRun += res.rerun;
|
|
484
|
-
totalStable += res.stable;
|
|
485
|
-
runnablePerDate[res.date] = res.runnableCount;
|
|
486
|
-
impossiblePerDate[res.date] = res.impossibleCount;
|
|
487
|
-
|
|
488
|
-
// Write detailed record
|
|
489
|
-
await db.collection(BUILD_RECORDS_COLLECTION).doc(buildId).collection('details').doc(res.date).set(res.summaryPayload);
|
|
490
|
-
}
|
|
491
|
-
}
|
|
492
|
-
|
|
493
|
-
await db.collection(BUILD_RECORDS_COLLECTION).doc(buildId).update({
|
|
494
|
-
checkpoint: `Processed ${Math.min(i + dateBatch.length, datesToCheck.length)}/${datesToCheck.length} dates`
|
|
495
|
-
});
|
|
496
|
-
}
|
|
497
|
-
|
|
498
|
-
// 5. Final Synthesis
|
|
499
|
-
const hashMismatchMetrics = {};
|
|
500
|
-
for (const [name, stats] of globalMismatchStats) {
|
|
501
|
-
if (stats.stored > 0) {
|
|
502
|
-
hashMismatchMetrics[name] = `${stats.mismatch}/${stats.stored}`;
|
|
503
|
-
}
|
|
504
|
-
}
|
|
505
|
-
|
|
506
|
-
const dynamicWarnings = generateDynamicWarnings(impossibleAnalysis, categoryStats, datesToCheck.length);
|
|
507
|
-
|
|
508
|
-
reportHeader.status = 'COMPLETED';
|
|
509
|
-
reportHeader.summary = {
|
|
510
|
-
totalReRuns: totalReRun,
|
|
511
|
-
totalNew: totalRun,
|
|
512
|
-
totalStable: totalStable,
|
|
513
|
-
totalErrors: totalErrors,
|
|
514
|
-
|
|
515
|
-
// Expanded Metrics
|
|
516
|
-
runnablePerDate,
|
|
517
|
-
impossiblePerDate,
|
|
518
|
-
hashMismatches: hashMismatchMetrics,
|
|
519
|
-
dynamicWarnings: dynamicWarnings
|
|
520
|
-
};
|
|
521
|
-
|
|
522
|
-
// Save
|
|
523
|
-
await db.collection(BUILD_RECORDS_COLLECTION).doc(buildId).set(reportHeader);
|
|
524
|
-
await db.collection(BUILD_RECORDS_COLLECTION).doc('latest').set({ ...reportHeader, note: "Latest completed build report." });
|
|
525
|
-
|
|
526
|
-
logger.log('SUCCESS', `[BuildReporter] Build ${buildId} completed. Warnings: ${dynamicWarnings.length}`);
|
|
527
|
-
|
|
528
|
-
return { success: true, buildId, summary: reportHeader.summary };
|
|
529
|
-
}
|
|
530
|
-
|
|
531
|
-
module.exports = { requestBuildReport, handleBuildReportTrigger, generateBuildReport };
|
|
@@ -1,144 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Analyzes calculation behavior via Monte Carlo simulation
|
|
3
|
-
* to generate "Loose" but mathematically sound contracts.
|
|
4
|
-
*/
|
|
5
|
-
const SimRunner = require('../simulation/SimRunner');
|
|
6
|
-
const { MathPrimitives } = require('../layers/mathematics');
|
|
7
|
-
const { normalizeName } = require('../utils/utils');
|
|
8
|
-
|
|
9
|
-
class ContractDiscoverer {
|
|
10
|
-
|
|
11
|
-
static async generateContract(calcManifest, fullManifestMap, iterations = 50) {
|
|
12
|
-
console.log(`[ContractDiscoverer] 🕵️♀️ Learning behavior for: ${calcManifest.name}`);
|
|
13
|
-
|
|
14
|
-
const samples = [];
|
|
15
|
-
const errors = [];
|
|
16
|
-
|
|
17
|
-
// 1. Monte Carlo Simulation
|
|
18
|
-
// Run the code against 50 different "universes" of data to see how it behaves.
|
|
19
|
-
for (let i = 0; i < iterations; i++) {
|
|
20
|
-
try {
|
|
21
|
-
// We use your existing SimRunner, which uses Fabricator
|
|
22
|
-
// The SimRunner needs to return the RAW result, not the hash.
|
|
23
|
-
// You might need a small helper in SimRunner or just instantiate directly here:
|
|
24
|
-
const result = await this._runSimulationRaw(calcManifest, fullManifestMap, i);
|
|
25
|
-
if (result) samples.push(result);
|
|
26
|
-
} catch (e) {
|
|
27
|
-
errors.push(e.message);
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
if (samples.length < 5) {
|
|
32
|
-
console.warn(`[ContractDiscoverer] ⚠️ Insufficient samples for ${calcManifest.name}. Skipping.`);
|
|
33
|
-
return null;
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
// 2. Statistical Inference
|
|
37
|
-
// We now analyze the 50 outputs to find "Invariants"
|
|
38
|
-
return this._inferContractFromSamples(samples, calcManifest.type);
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
// Helper to bypass the hashing logic of SimRunner and get raw object
|
|
42
|
-
static async _runSimulationRaw(manifest, map, seed) {
|
|
43
|
-
const Fabricator = require('../simulation/Fabricator');
|
|
44
|
-
const fabricator = new Fabricator(manifest.name + '_seed_' + seed);
|
|
45
|
-
const context = await fabricator.generateContext(manifest, map, seed);
|
|
46
|
-
const instance = new manifest.class();
|
|
47
|
-
await instance.process(context);
|
|
48
|
-
return instance.getResult ? await instance.getResult() : (instance.results || {});
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
static _inferContractFromSamples(samples, type) {
|
|
52
|
-
// Flatten samples if it's a Standard (Batch) calculation
|
|
53
|
-
// We want to analyze "What does A USER result look like?"
|
|
54
|
-
let flattened = samples;
|
|
55
|
-
if (type === 'standard') {
|
|
56
|
-
flattened = [];
|
|
57
|
-
samples.forEach(batch => {
|
|
58
|
-
Object.values(batch).forEach(userResult => flattened.push(userResult));
|
|
59
|
-
});
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
// Initialize Rule Set
|
|
63
|
-
const contract = {
|
|
64
|
-
requiredKeys: new Set(),
|
|
65
|
-
numericBounds: {}, // { min, max, isInteger }
|
|
66
|
-
distributions: {}, // { mean, stdDev }
|
|
67
|
-
enums: {}, // { allowedValues }
|
|
68
|
-
dataTypes: {} // { key: 'number' | 'string' | 'object' }
|
|
69
|
-
};
|
|
70
|
-
|
|
71
|
-
if (flattened.length === 0) return null;
|
|
72
|
-
|
|
73
|
-
// A. Structural Analysis (Keys & Types)
|
|
74
|
-
const first = flattened[0];
|
|
75
|
-
if (typeof first === 'object' && first !== null) {
|
|
76
|
-
Object.keys(first).forEach(key => contract.requiredKeys.add(key));
|
|
77
|
-
|
|
78
|
-
Object.keys(first).forEach(key => {
|
|
79
|
-
contract.dataTypes[key] = typeof first[key];
|
|
80
|
-
|
|
81
|
-
// Track all values for this key to find bounds
|
|
82
|
-
const values = flattened.map(item => item[key]).filter(v => v !== null && v !== undefined);
|
|
83
|
-
|
|
84
|
-
// B. Numeric Analysis (The "Power" part)
|
|
85
|
-
if (typeof first[key] === 'number') {
|
|
86
|
-
this._analyzeNumericField(key, values, contract);
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
// C. Categorical Analysis
|
|
90
|
-
if (typeof first[key] === 'string') {
|
|
91
|
-
const unique = new Set(values);
|
|
92
|
-
if (unique.size < 10) { // If only a few distinct strings, it's an Enum
|
|
93
|
-
contract.enums[key] = Array.from(unique);
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
});
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
return {
|
|
100
|
-
...contract,
|
|
101
|
-
requiredKeys: Array.from(contract.requiredKeys)
|
|
102
|
-
};
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
static _analyzeNumericField(key, values, contract) {
|
|
106
|
-
if (values.length === 0) return;
|
|
107
|
-
|
|
108
|
-
const min = Math.min(...values);
|
|
109
|
-
const max = Math.max(...values);
|
|
110
|
-
const avg = MathPrimitives.average(values);
|
|
111
|
-
const dev = MathPrimitives.standardDeviation(values);
|
|
112
|
-
|
|
113
|
-
// 1. Detect "Hard" Physics Limits (Probability, Ratios)
|
|
114
|
-
// If the value NEVER goes below 0 or above 1 in 50 runs, assume it's a Ratio.
|
|
115
|
-
// We assume "Financial Volatility" creates large numbers, but "Ratios" stay small.
|
|
116
|
-
const isRatio = (min >= 0 && max <= 1.0);
|
|
117
|
-
const isPercentage = (min >= 0 && max <= 100.0 && max > 1.0); // e.g. RSI
|
|
118
|
-
const isPositive = (min >= 0);
|
|
119
|
-
|
|
120
|
-
contract.numericBounds[key] = {
|
|
121
|
-
// We do NOT set strict upper bounds for financial values (Price, Vol, PnL)
|
|
122
|
-
// because crypto/finance can do 1000x.
|
|
123
|
-
// We ONLY set strict bounds for Ratios/Percentages.
|
|
124
|
-
min: isPositive ? 0 : -Infinity,
|
|
125
|
-
max: (isRatio ? 1.0 : (isPercentage ? 100.0 : Infinity))
|
|
126
|
-
};
|
|
127
|
-
|
|
128
|
-
// 2. Detect "Soft" Statistical Envelopes (6 Sigma)
|
|
129
|
-
// This handles the "Ridiculously Volatile" case.
|
|
130
|
-
// 6 Sigma covers 99.9999998% of cases even in non-normal distributions (Chebyshev's inequality).
|
|
131
|
-
// If a value is 20 Sigma away, it's likely a bug (e.g., Unix Timestamp interpreted as Price).
|
|
132
|
-
if (dev > 0) {
|
|
133
|
-
contract.distributions[key] = {
|
|
134
|
-
mean: avg,
|
|
135
|
-
stdDev: dev,
|
|
136
|
-
// "Loose" Envelope: 10 Standard Deviations allowed.
|
|
137
|
-
// This allows for massive volatility but catches data corruption.
|
|
138
|
-
sigmaLimit: 10
|
|
139
|
-
};
|
|
140
|
-
}
|
|
141
|
-
}
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
module.exports = ContractDiscoverer;
|