bulltrackers-module 1.0.732 → 1.0.733

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/functions/orchestrator/index.js +19 -17
  2. package/index.js +8 -29
  3. package/package.json +1 -1
  4. package/functions/computation-system/WorkflowOrchestrator.js +0 -213
  5. package/functions/computation-system/config/monitoring_config.js +0 -31
  6. package/functions/computation-system/config/validation_overrides.js +0 -10
  7. package/functions/computation-system/context/ContextFactory.js +0 -143
  8. package/functions/computation-system/context/ManifestBuilder.js +0 -379
  9. package/functions/computation-system/data/AvailabilityChecker.js +0 -236
  10. package/functions/computation-system/data/CachedDataLoader.js +0 -325
  11. package/functions/computation-system/data/DependencyFetcher.js +0 -455
  12. package/functions/computation-system/executors/MetaExecutor.js +0 -279
  13. package/functions/computation-system/executors/PriceBatchExecutor.js +0 -108
  14. package/functions/computation-system/executors/StandardExecutor.js +0 -465
  15. package/functions/computation-system/helpers/computation_dispatcher.js +0 -750
  16. package/functions/computation-system/helpers/computation_worker.js +0 -375
  17. package/functions/computation-system/helpers/monitor.js +0 -64
  18. package/functions/computation-system/helpers/on_demand_helpers.js +0 -154
  19. package/functions/computation-system/layers/extractors.js +0 -1097
  20. package/functions/computation-system/layers/index.js +0 -40
  21. package/functions/computation-system/layers/mathematics.js +0 -522
  22. package/functions/computation-system/layers/profiling.js +0 -537
  23. package/functions/computation-system/layers/validators.js +0 -170
  24. package/functions/computation-system/legacy/AvailabilityCheckerOld.js +0 -388
  25. package/functions/computation-system/legacy/CachedDataLoaderOld.js +0 -357
  26. package/functions/computation-system/legacy/DependencyFetcherOld.js +0 -478
  27. package/functions/computation-system/legacy/MetaExecutorold.js +0 -364
  28. package/functions/computation-system/legacy/StandardExecutorold.js +0 -476
  29. package/functions/computation-system/legacy/computation_dispatcherold.js +0 -944
  30. package/functions/computation-system/logger/logger.js +0 -297
  31. package/functions/computation-system/persistence/ContractValidator.js +0 -81
  32. package/functions/computation-system/persistence/FirestoreUtils.js +0 -56
  33. package/functions/computation-system/persistence/ResultCommitter.js +0 -283
  34. package/functions/computation-system/persistence/ResultsValidator.js +0 -130
  35. package/functions/computation-system/persistence/RunRecorder.js +0 -142
  36. package/functions/computation-system/persistence/StatusRepository.js +0 -52
  37. package/functions/computation-system/reporter_epoch.js +0 -6
  38. package/functions/computation-system/scripts/UpdateContracts.js +0 -128
  39. package/functions/computation-system/services/SnapshotService.js +0 -148
  40. package/functions/computation-system/simulation/Fabricator.js +0 -285
  41. package/functions/computation-system/simulation/SeededRandom.js +0 -41
  42. package/functions/computation-system/simulation/SimRunner.js +0 -51
  43. package/functions/computation-system/system_epoch.js +0 -2
  44. package/functions/computation-system/tools/BuildReporter.js +0 -531
  45. package/functions/computation-system/tools/ContractDiscoverer.js +0 -144
  46. package/functions/computation-system/tools/DeploymentValidator.js +0 -536
  47. package/functions/computation-system/tools/FinalSweepReporter.js +0 -322
  48. package/functions/computation-system/topology/HashManager.js +0 -55
  49. package/functions/computation-system/topology/ManifestLoader.js +0 -47
  50. package/functions/computation-system/utils/data_loader.js +0 -675
  51. package/functions/computation-system/utils/schema_capture.js +0 -121
  52. package/functions/computation-system/utils/utils.js +0 -188
@@ -1,283 +0,0 @@
1
- /**
2
- * @fileoverview Handles saving computation results.
3
- * REFACTORED:
4
- * 1. Writes ALL data to BigQuery (Source of Truth).
5
- * 2. Writes to Firestore ONLY for 'Page' (Fan-out) and 'Alert' computations.
6
- * 3. Removes GCS/Compression complexity for standard data (now BQ-only).
7
- */
8
- const { commitBatchInChunks, generateDataHash, FieldValue } = require('../utils/utils');
9
- const { updateComputationStatus } = require('./StatusRepository');
10
- const { batchStoreSchemas } = require('../utils/schema_capture');
11
- const { generateProcessId, PROCESS_TYPES } = require('../logger/logger');
12
- const { HeuristicValidator } = require('./ResultsValidator');
13
- const ContractValidator = require('./ContractValidator');
14
- const validationOverrides = require('../config/validation_overrides');
15
- const pLimit = require('p-limit');
16
-
17
- const DEFAULT_TTL_DAYS = 90;
18
- const SIMHASH_REGISTRY_COLLECTION = 'system_simhash_registry';
19
- const CONTRACTS_COLLECTION = 'system_contracts';
20
-
21
- async function commitResults(stateObj, dStr, passName, config, deps, skipStatusWrite = false, options = {}) {
22
- const successUpdates = {};
23
- const failureReport = [];
24
- const schemas = [];
25
- const alertTriggers = [];
26
- const { logger, db, calculationUtils } = deps;
27
-
28
- const pid = generateProcessId(PROCESS_TYPES.STORAGE, passName, dStr);
29
- const flushMode = options.flushMode || 'STANDARD';
30
- const isInitialWrite = options.isInitialWrite === true;
31
-
32
- // Pre-fetch contracts and hashes
33
- const calcNames = Object.keys(stateObj);
34
- const hashKeys = calcNames.map(n => stateObj[n].manifest?.hash).filter(Boolean);
35
- const [contractMap, simHashMap] = await Promise.all([
36
- fetchContracts(db, calcNames),
37
- batchFetchSimHashes(db, hashKeys)
38
- ]);
39
-
40
- for (const name in stateObj) {
41
- const calc = stateObj[name];
42
- const execStats = calc._executionStats || { processedUsers: 0, skippedUsers: 0 };
43
-
44
- const runMetrics = {
45
- storage: { sizeBytes: 0, location: 'BIGQUERY', keys: 0 },
46
- validation: { isValid: true, anomalies: [] },
47
- execution: execStats,
48
- io: { writes: 0, deletes: 0 }
49
- };
50
-
51
- const manifest = calc.manifest;
52
- const isAlert = manifest.isAlertComputation === true;
53
- const isPage = manifest.isPage === true;
54
- const ttlDays = manifest.ttlDays !== undefined ? manifest.ttlDays : DEFAULT_TTL_DAYS;
55
-
56
- try {
57
- const result = await calc.getResult();
58
-
59
- // --- 1. VALIDATION ---
60
- const configOverrides = validationOverrides[manifest.name] || {};
61
- const dataDeps = manifest.rootDataDependencies || [];
62
- // Relax validation for price-only computations
63
- if (dataDeps.length === 1 && dataDeps[0] === 'price') {
64
- Object.assign(configOverrides, { maxZeroPct: 100, maxFlatlinePct: 100, maxNullPct: 100, maxNanPct: 100 });
65
- delete configOverrides.weekend;
66
- }
67
-
68
- // Contract Validation
69
- const contract = contractMap[name];
70
- if (contract) {
71
- const contractCheck = ContractValidator.validate(result, contract);
72
- if (!contractCheck.valid) throw new Error(`[SEMANTIC_GATE] ${contractCheck.reason}`);
73
- }
74
-
75
- // Heuristic Validation (Circuit Breaker)
76
- if (result && Object.keys(result).length > 0) {
77
- const healthCheck = HeuristicValidator.analyze(manifest.name, result, dStr, configOverrides);
78
- if (!healthCheck.valid) {
79
- runMetrics.validation.isValid = false;
80
- runMetrics.validation.anomalies.push(healthCheck.reason);
81
- throw new Error(`[QUALITY_CIRCUIT_BREAKER] ${healthCheck.reason}`);
82
- }
83
- }
84
-
85
- const isEmpty = !result || (typeof result === 'object' && Object.keys(result).length === 0);
86
- const resultHash = isEmpty ? 'empty' : generateDataHash(result);
87
- const simHash = (flushMode !== 'INTERMEDIATE') ? (simHashMap[manifest.hash] || null) : null;
88
-
89
- // --- 2. HANDLE EMPTY RESULTS ---
90
- if (isEmpty) {
91
- if (flushMode === 'INTERMEDIATE') continue;
92
- if (manifest.hash) {
93
- successUpdates[name] = {
94
- hash: manifest.hash, simHash, resultHash,
95
- dependencyResultHashes: manifest.dependencyResultHashes || {},
96
- category: manifest.category, composition: manifest.composition, metrics: runMetrics
97
- };
98
- }
99
- continue;
100
- }
101
-
102
- // --- 3. WRITE TO BIGQUERY (UNIVERSAL) ---
103
- // ALL data goes to BigQuery first. This is the primary storage.
104
- // Using a fire-and-forget approach or await based on critical need.
105
- // We await here to ensure data safety before reporting success.
106
- await writeToBigQuery(result, name, dStr, manifest.category, logger, isAlert).catch(err => {
107
- logger.log('WARN', `[BigQuery] Write warning for ${name}: ${err.message}`);
108
- });
109
-
110
- // If it's NOT Page or Alert, we are done (No Firestore write)
111
- if (!isPage && !isAlert) {
112
- if (manifest.hash) {
113
- successUpdates[name] = {
114
- hash: manifest.hash, simHash, resultHash,
115
- dependencyResultHashes: manifest.dependencyResultHashes || {},
116
- category: manifest.category, composition: manifest.composition, metrics: runMetrics
117
- };
118
- }
119
- continue; // Skip Firestore logic
120
- }
121
-
122
- // --- 4. FIRESTORE WRITES (SELECTIVE) ---
123
- const expireAt = calculateExpirationDate(dStr, ttlDays);
124
-
125
- // A. PAGE COMPUTATIONS (Fan-Out)
126
- if (isPage) {
127
- const mainDocRef = db.collection(config.resultsCollection).doc(dStr)
128
- .collection(config.resultsSubcollection).doc(manifest.category)
129
- .collection(config.computationsSubcollection).doc(name);
130
-
131
- // Fan-out writes: One document per User ID
132
- const pageWrites = [];
133
- for (const [cid, userData] of Object.entries(result)) {
134
- // Unique document for each user ID
135
- const userDocRef = mainDocRef.collection('pages').doc(cid);
136
-
137
- const payload = (typeof userData === 'object' && userData !== null)
138
- ? { ...userData, _expireAt: expireAt }
139
- : { value: userData, _expireAt: expireAt };
140
-
141
- pageWrites.push({ ref: userDocRef, data: payload, options: { merge: false } });
142
- }
143
-
144
- if (pageWrites.length > 0) {
145
- await commitBatchInChunks(config, deps, pageWrites, `${name}::PageFanOut`);
146
- runMetrics.io.writes += pageWrites.length;
147
- runMetrics.storage.location = 'FIRESTORE_PAGES';
148
- }
149
-
150
- // Write Header Document (Metadata for frontend/indexing)
151
- const headerData = {
152
- _isPageMode: true,
153
- _pageCount: isInitialWrite ? pageWrites.length : FieldValue.increment(pageWrites.length),
154
- _lastUpdated: new Date().toISOString(),
155
- _expireAt: expireAt,
156
- _completed: flushMode !== 'INTERMEDIATE'
157
- };
158
- await mainDocRef.set(headerData, { merge: !isInitialWrite });
159
-
160
- logger.log('INFO', `[ResultCommitter] ${name}: Wrote ${pageWrites.length} user pages to Firestore.`);
161
- }
162
-
163
- // B. ALERT COMPUTATIONS (Single Doc for Triggers)
164
- if (isAlert) {
165
- // Alerts are written to a single document to trigger the listener
166
- const mainDocRef = db.collection(config.resultsCollection).doc(dStr)
167
- .collection(config.resultsSubcollection).doc(manifest.category)
168
- .collection(config.computationsSubcollection).doc(name);
169
-
170
- const alertPayload = {
171
- ...result,
172
- _isAlert: true,
173
- _lastUpdated: new Date().toISOString(),
174
- _expireAt: expireAt
175
- };
176
-
177
- await mainDocRef.set(alertPayload);
178
- runMetrics.io.writes += 1;
179
- runMetrics.storage.location = 'FIRESTORE_ALERT';
180
-
181
- // Add to triggers list for logging
182
- if (flushMode !== 'INTERMEDIATE') {
183
- alertTriggers.push({ date: dStr, computationName: name, documentPath: mainDocRef.path });
184
- }
185
- }
186
-
187
- // --- 5. FINALIZE ---
188
- if (manifest.hash) {
189
- successUpdates[name] = {
190
- hash: manifest.hash, simHash, resultHash,
191
- dependencyResultHashes: manifest.dependencyResultHashes || {},
192
- category: manifest.category, composition: manifest.composition, metrics: runMetrics
193
- };
194
- }
195
-
196
- // Store Schema
197
- if (manifest.class.getSchema && flushMode !== 'INTERMEDIATE') {
198
- const { class: _cls, ...safeMetadata } = manifest;
199
- safeMetadata.ttlDays = ttlDays;
200
- schemas.push({ name, category: manifest.category, schema: manifest.class.getSchema(), metadata: safeMetadata });
201
- }
202
-
203
- } catch (e) {
204
- logger.log('ERROR', `Commit failed for ${name}`, { error: e });
205
- failureReport.push({ name, error: { message: e.message, stack: e.stack }, metrics: runMetrics });
206
- }
207
- }
208
-
209
- if (schemas.length) batchStoreSchemas(deps, config, schemas).catch(() => {});
210
- if (!skipStatusWrite && Object.keys(successUpdates).length > 0 && flushMode !== 'INTERMEDIATE') {
211
- await updateComputationStatus(dStr, successUpdates, config, deps);
212
- }
213
-
214
- if (alertTriggers.length > 0) {
215
- logger.log('INFO', `[ResultCommitter] ${alertTriggers.length} alert computations updated in Firestore.`);
216
- }
217
-
218
- return { successUpdates, failureReport };
219
- }
220
-
221
- // --- HELPERS ---
222
-
223
- async function writeToBigQuery(result, name, dateContext, category, logger, isAlertComputation = false) {
224
- if (process.env.BIGQUERY_ENABLED === 'false') return;
225
-
226
- try {
227
- const { ensureComputationResultsTable, insertRowsWithMerge } = require('../../core/utils/bigquery_utils');
228
- await ensureComputationResultsTable(logger);
229
-
230
- // Simple metadata extraction
231
- const metadata = result.cids && Array.isArray(result.cids) ? { cids: result.cids } : null;
232
-
233
- const row = {
234
- date: dateContext,
235
- computation_name: name,
236
- category: category,
237
- result_data: result, // BigQuery handles JSON wrapping
238
- metadata: metadata,
239
- created_at: new Date().toISOString()
240
- };
241
-
242
- const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
243
-
244
- // Always use merge to ensure idempotency (overwrite previous run for same date/calc)
245
- await insertRowsWithMerge(datasetId, 'computation_results', [row], ['date', 'computation_name', 'category'], logger);
246
-
247
- } catch (error) {
248
- if (logger) logger.log('WARN', `[BigQuery] Write failed for ${name}: ${error.message}`);
249
- // Do not throw; we don't want to crash the computation pipeline if metrics fail
250
- }
251
- }
252
-
253
- async function batchFetchSimHashes(db, hashes) {
254
- if (!hashes || hashes.length === 0) return {};
255
- const map = {};
256
- const refs = hashes.map(h => db.collection(SIMHASH_REGISTRY_COLLECTION).doc(h));
257
- try {
258
- const snaps = await db.getAll(...refs);
259
- snaps.forEach(snap => { if (snap.exists) map[snap.id] = snap.data().simHash; });
260
- } catch (e) {}
261
- return map;
262
- }
263
-
264
- async function fetchContracts(db, calcNames) {
265
- if (!calcNames || calcNames.length === 0) return {};
266
- const map = {};
267
- const refs = calcNames.map(name => db.collection(CONTRACTS_COLLECTION).doc(name));
268
- try {
269
- const snaps = await db.getAll(...refs);
270
- snaps.forEach(snap => { if (snap.exists) map[snap.id] = snap.data(); });
271
- } catch (e) {}
272
- return map;
273
- }
274
-
275
- function calculateExpirationDate(dateStr, ttlDays) {
276
- if (!dateStr || !ttlDays || isNaN(Number(ttlDays))) return null;
277
- const base = new Date(dateStr);
278
- if (isNaN(base.getTime())) return null;
279
- base.setDate(base.getDate() + Number(ttlDays));
280
- return base;
281
- }
282
-
283
- module.exports = { commitResults };
@@ -1,130 +0,0 @@
1
- /**
2
- * @fileoverview HeuristicValidator.js
3
- * "Grey Box" validation that infers health using statistical analysis and structural sanity checks.
4
- * UPDATED: Added "Weekend Mode" to allow higher zero/null tolerance on Saturdays/Sundays.
5
- */
6
-
7
- class HeuristicValidator {
8
- /**
9
- * @param {string} calcName - Name for logging
10
- * @param {Object} data - The result data to inspect
11
- * @param {string} dateStr - The computation date (YYYY-MM-DD)
12
- * @param {Object} [overrides] - Optional central config overrides
13
- */
14
- static analyze(calcName, data, dateStr, overrides = {}) {
15
- // 1. Structure Check
16
- if (!data || typeof data !== 'object') return { valid: true };
17
-
18
- const keys = Object.keys(data);
19
- const totalItems = keys.length;
20
-
21
- if (totalItems < 5) return { valid: true };
22
-
23
- // 2. Sampling Configuration
24
- const sampleSize = Math.min(totalItems, 100);
25
- const step = Math.floor(totalItems / sampleSize);
26
-
27
- let zeroCount = 0;
28
- let nullCount = 0;
29
- let nanCount = 0;
30
- let emptyVectorCount = 0;
31
- let analyzedCount = 0;
32
-
33
- const numericValues = [];
34
-
35
- for (let i = 0; i < totalItems; i += step) {
36
- const key = keys[i];
37
- const val = data[key];
38
- if (!val) {
39
- nullCount++;
40
- analyzedCount++;
41
- continue;
42
- }
43
- analyzedCount++;
44
-
45
- // --- TYPE A: Object / Complex Result ---
46
- if (typeof val === 'object') {
47
- const subValues = Object.values(val);
48
-
49
- const isDeadObject = subValues.every(v => v === 0 || v === null || v === undefined);
50
- if (isDeadObject) nullCount++;
51
-
52
- const hasNan = subValues.some(v => typeof v === 'number' && (isNaN(v) || !isFinite(v)));
53
- if (hasNan) nanCount++;
54
-
55
- const arrayProps = ['profile', 'history', 'sparkline', 'buckets', 'prices'];
56
- for (const prop of arrayProps) { if (Array.isArray(val[prop]) && val[prop].length === 0) { emptyVectorCount++; } }
57
-
58
- const numericProp = subValues.find(v => typeof v === 'number' && v !== 0);
59
- if (numericProp !== undefined) numericValues.push(numericProp);
60
- }
61
- // --- TYPE B: Scalar / Primitive Result ---
62
- if (typeof val === 'number') {
63
- if (isNaN(val) || !isFinite(val)) { nanCount++; }
64
- else {
65
- numericValues.push(val);
66
- if (val === 0) zeroCount++;
67
- }
68
- }
69
- }
70
-
71
- // 3. Weekend Detection & Threshold Resolution
72
- let isWeekend = false;
73
- if (dateStr) {
74
- try {
75
- // Force UTC interpretation to align with system dates
76
- const safeDate = dateStr.includes('T') ? dateStr : `${dateStr}T00:00:00Z`;
77
- const day = new Date(safeDate).getUTCDay();
78
- // 0 = Sunday, 6 = Saturday
79
- isWeekend = (day === 0 || day === 6);
80
- } catch (e) { /* Fallback to standard validation if date is invalid */ }
81
- }
82
-
83
- // Default Thresholds
84
- let thresholds = {
85
- maxZeroPct: overrides.maxZeroPct ?? 99,
86
- maxNullPct: overrides.maxNullPct ?? 90,
87
- maxNanPct: overrides.maxNanPct ?? 0,
88
- maxFlatlinePct: overrides.maxFlatlinePct ?? 95
89
- };
90
-
91
- // Apply Weekend Overrides if applicable
92
- if (isWeekend && overrides.weekend) {
93
- thresholds = { ...thresholds, ...overrides.weekend };
94
- }
95
-
96
- // 4. Calculate Stats
97
- const zeroPct = (zeroCount / analyzedCount) * 100;
98
- const nullPct = (nullCount / analyzedCount) * 100;
99
- const nanPct = (nanCount / analyzedCount) * 100;
100
-
101
- // 5. Variance / Flatline Analysis
102
- let isFlatline = false;
103
- if (numericValues.length > 5) {
104
- const first = numericValues[0];
105
- const identicalCount = numericValues.filter(v => Math.abs(v - first) < 0.000001).length;
106
- const flatlinePct = (identicalCount / numericValues.length) * 100;
107
-
108
- if (flatlinePct > thresholds.maxFlatlinePct && Math.abs(first) > 0.0001) { isFlatline = true; }
109
- }
110
-
111
- // 6. Evaluations
112
- // Note: We include the applied thresholds in the error message for clarity
113
- if (nanPct > thresholds.maxNanPct) { return { valid: false, reason: `Mathematical Error: ${nanPct.toFixed(1)}% of sampled results contain NaN (Limit: ${thresholds.maxNanPct}%).` }; }
114
- if (zeroPct > thresholds.maxZeroPct) { return { valid: false, reason: `Data Integrity: ${zeroPct.toFixed(1)}% of results are 0 (Limit: ${thresholds.maxZeroPct}%${isWeekend ? ' [Weekend Mode]' : ''}).` }; }
115
- if (nullPct > thresholds.maxNullPct) { return { valid: false, reason: `Data Integrity: ${nullPct.toFixed(1)}% of results are Empty/Null (Limit: ${thresholds.maxNullPct}%${isWeekend ? ' [Weekend Mode]' : ''}).` }; }
116
-
117
- if (isFlatline) { return { valid: false, reason: `Anomaly: Detected Result Flatline. >${thresholds.maxFlatlinePct}% of outputs are identical (non-zero).` }; }
118
-
119
- if (calcName.includes('profile') || calcName.includes('distribution')) {
120
- const vectorEmptyPct = (emptyVectorCount / analyzedCount) * 100;
121
- if (vectorEmptyPct > 90) {
122
- return { valid: false, reason: `Data Integrity: ${vectorEmptyPct.toFixed(1)}% of distribution profiles are empty.` };
123
- }
124
- }
125
-
126
- return { valid: true };
127
- }
128
- }
129
-
130
- module.exports = { HeuristicValidator };
@@ -1,142 +0,0 @@
1
- /**
2
- * @fileoverview Utility for recording computation run attempts (The Audit Logger).
3
- * UPDATED: Stores 'trigger', 'execution' stats, 'cost' metrics, and 'forensics'.
4
- * UPDATED: Emits Structured Logs for GCP Observability.
5
- */
6
-
7
- const { FieldValue } = require('../utils/utils');
8
- const os = require('os');
9
-
10
- const AUDIT_COLLECTION = 'computation_audit_logs';
11
-
12
- function sanitizeErrorKey(message) {
13
- if (!message) return 'Unknown_Error';
14
- const shortMsg = message.toString().substring(0, 100);
15
- return shortMsg.replace(/[./\[\]*`]/g, '_').trim();
16
- }
17
-
18
- /**
19
- * Records a run attempt with detailed metrics and aggregated stats.
20
- */
21
- async function recordRunAttempt(db, context, status, error = null, detailedMetrics = { durationMs: 0 }, triggerReason = 'Unknown', resourceTier = 'standard') {
22
- if (!db || !context) return;
23
-
24
- const { date: targetDate, computation, pass } = context;
25
- const now = new Date();
26
- const triggerTimestamp = now.getTime();
27
-
28
- const computationDocRef = db.collection(AUDIT_COLLECTION).doc(computation);
29
- const runId = `${targetDate}_${triggerTimestamp}`;
30
- const runDocRef = computationDocRef.collection('history').doc(runId);
31
-
32
- const workerId = process.env.FUNCTION_TARGET || process.env.K_REVISION || os.hostname();
33
-
34
- let sizeMB = 0;
35
- if (detailedMetrics.storage && detailedMetrics.storage.sizeBytes) { sizeMB = Number((detailedMetrics.storage.sizeBytes / (1024 * 1024)).toFixed(4)); }
36
-
37
- const anomalies = detailedMetrics.validation?.anomalies || [];
38
- if (error && error.message && error.message.includes('Data Integrity')) { anomalies.push(error.message); }
39
-
40
- const rawExecStats = detailedMetrics.execution || {};
41
- const timings = rawExecStats.timings || {};
42
-
43
- const runEntry = {
44
- runId: runId,
45
- computationName: computation,
46
- pass: String(pass),
47
- workerId: workerId,
48
- targetDate: targetDate,
49
- triggerTime: now.toISOString(),
50
- durationMs: detailedMetrics.durationMs || 0,
51
- status: status,
52
-
53
- resourceTier: resourceTier,
54
- peakMemoryMB: detailedMetrics.peakMemoryMB || 0,
55
-
56
- firestoreOps: {
57
- reads: detailedMetrics.io?.reads || 0,
58
- writes: detailedMetrics.io?.writes || 0,
59
- deletes: detailedMetrics.io?.deletes || 0
60
- },
61
-
62
- composition: detailedMetrics.composition || null,
63
-
64
- trigger: {
65
- reason: triggerReason || 'Unknown',
66
- type: (triggerReason && triggerReason.includes('Layer')) ? 'CASCADE' : ((triggerReason && triggerReason.includes('New')) ? 'INIT' : 'UPDATE')
67
- },
68
-
69
- executionStats: {
70
- processedUsers: rawExecStats.processedUsers || 0,
71
- skippedUsers: rawExecStats.skippedUsers || 0,
72
- timings: {
73
- setupMs: Math.round(timings.setup || 0),
74
- streamMs: Math.round(timings.stream || 0),
75
- processingMs: Math.round(timings.processing || 0)
76
- }
77
- },
78
-
79
- outputStats: {
80
- sizeMB: sizeMB,
81
- isSharded: !!detailedMetrics.storage?.isSharded,
82
- shardCount: detailedMetrics.storage?.shardCount || 1,
83
- keysWritten: detailedMetrics.storage?.keys || 0
84
- },
85
-
86
- anomalies: anomalies,
87
- _schemaVersion: '2.3'
88
- };
89
-
90
- if (error) {
91
- runEntry.error = {
92
- message: error.message || 'Unknown Error',
93
- stage: error.stage || 'UNKNOWN',
94
- stack: error.stack ? error.stack.substring(0, 1000) : null,
95
- code: error.code || null
96
- };
97
- }
98
-
99
- // 1. [NEW] Emit Structured Log for GCP Observability
100
- // This allows you to query "jsonPayload.event = 'AUDIT_COMPLETE'" in Cloud Logging
101
- // independent of Firestore status.
102
- console.log(JSON.stringify({
103
- event: 'AUDIT_COMPLETE',
104
- severity: status === 'SUCCESS' ? 'INFO' : 'ERROR',
105
- computation: computation,
106
- runId: runId,
107
- status: status,
108
- metrics: {
109
- duration: runEntry.durationMs,
110
- memory: runEntry.peakMemoryMB,
111
- writes: runEntry.firestoreOps.writes
112
- },
113
- error: runEntry.error ? runEntry.error.message : null
114
- }));
115
-
116
- // 2. Existing Firestore Aggregation Logic
117
- const statsUpdate = {
118
- lastRunAt: now,
119
- lastRunStatus: status,
120
- totalRuns: FieldValue.increment(1),
121
- totalCostAccumulated: FieldValue.increment(0)
122
- };
123
-
124
- if (status === 'SUCCESS') { statsUpdate.successCount = FieldValue.increment(1);
125
- } else { statsUpdate.failureCount = FieldValue.increment(1);
126
- if (error) {
127
- const safeKey = sanitizeErrorKey(error.message);
128
- statsUpdate[`errorCounts.${safeKey}`] = FieldValue.increment(1);
129
- }
130
- }
131
-
132
- try {
133
- const batch = db.batch();
134
- batch.set(runDocRef, runEntry);
135
- batch.set(computationDocRef, statsUpdate, { merge: true });
136
- await batch.commit();
137
- } catch (e) {
138
- console.error(`[RunRecorder] ❌ CRITICAL: Failed to write audit log for ${computation}`, e);
139
- }
140
- }
141
-
142
- module.exports = { recordRunAttempt };
@@ -1,52 +0,0 @@
1
- /**
2
- * @fileoverview Manages computation status tracking in Firestore.
3
- * UPDATED: Supports Schema V2 (Object with Category & Composition) for deep auditing.
4
- */
5
-
6
- async function fetchComputationStatus(dateStr, config, { db }) {
7
- if (!dateStr) throw new Error('fetchStatus requires a key');
8
- const collection = config.computationStatusCollection || 'computation_status';
9
- const docRef = db.collection(collection).doc(dateStr);
10
- const snap = await docRef.get();
11
- if (!snap.exists) return {};
12
- const rawData = snap.data();
13
- const normalized = {};
14
-
15
- // Normalize V1 (String) to V2 (Object)
16
- for (const [name, value] of Object.entries(rawData)) {
17
- if (typeof value === 'string') {
18
- normalized[name] = { hash: value, category: null, composition: null }; // Legacy entry
19
- } else {
20
- normalized[name] = value;
21
- }
22
- }
23
-
24
- return normalized;
25
- }
26
-
27
- async function updateComputationStatus(dateStr, updates, config, { db }) {
28
- if (!dateStr) throw new Error('updateStatus requires a key');
29
- if (!updates || Object.keys(updates).length === 0) return;
30
-
31
- const collection = config.computationStatusCollection || 'computation_status';
32
- const docRef = db.collection(collection).doc(dateStr);
33
-
34
- const safeUpdates = {};
35
- for (const [key, val] of Object.entries(updates)) {
36
- if (typeof val === 'string') {
37
- // Legacy Call Fallback
38
- safeUpdates[key] = { hash: val, category: 'unknown', lastUpdated: new Date() };
39
- } else {
40
- // V2 Call: val should contain { hash, category, composition }
41
- safeUpdates[key] = {
42
- ...val,
43
- lastUpdated: new Date()
44
- };
45
- }
46
- }
47
-
48
- await docRef.set(safeUpdates, { merge: true });
49
- return true;
50
- }
51
-
52
- module.exports = { fetchComputationStatus, updateComputationStatus };
@@ -1,6 +0,0 @@
1
- /**
2
- * FILENAME: computation-system/reporter_epoch.js
3
- * PURPOSE: Master override for BuildReporter.
4
- * Increment this string to force a full re-analysis regardless of code or data stability.
5
- */
6
- module.exports = "reporter-epoch-11";