bulltrackers-module 1.0.732 → 1.0.733

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/functions/orchestrator/index.js +19 -17
  2. package/index.js +8 -29
  3. package/package.json +1 -1
  4. package/functions/computation-system/WorkflowOrchestrator.js +0 -213
  5. package/functions/computation-system/config/monitoring_config.js +0 -31
  6. package/functions/computation-system/config/validation_overrides.js +0 -10
  7. package/functions/computation-system/context/ContextFactory.js +0 -143
  8. package/functions/computation-system/context/ManifestBuilder.js +0 -379
  9. package/functions/computation-system/data/AvailabilityChecker.js +0 -236
  10. package/functions/computation-system/data/CachedDataLoader.js +0 -325
  11. package/functions/computation-system/data/DependencyFetcher.js +0 -455
  12. package/functions/computation-system/executors/MetaExecutor.js +0 -279
  13. package/functions/computation-system/executors/PriceBatchExecutor.js +0 -108
  14. package/functions/computation-system/executors/StandardExecutor.js +0 -465
  15. package/functions/computation-system/helpers/computation_dispatcher.js +0 -750
  16. package/functions/computation-system/helpers/computation_worker.js +0 -375
  17. package/functions/computation-system/helpers/monitor.js +0 -64
  18. package/functions/computation-system/helpers/on_demand_helpers.js +0 -154
  19. package/functions/computation-system/layers/extractors.js +0 -1097
  20. package/functions/computation-system/layers/index.js +0 -40
  21. package/functions/computation-system/layers/mathematics.js +0 -522
  22. package/functions/computation-system/layers/profiling.js +0 -537
  23. package/functions/computation-system/layers/validators.js +0 -170
  24. package/functions/computation-system/legacy/AvailabilityCheckerOld.js +0 -388
  25. package/functions/computation-system/legacy/CachedDataLoaderOld.js +0 -357
  26. package/functions/computation-system/legacy/DependencyFetcherOld.js +0 -478
  27. package/functions/computation-system/legacy/MetaExecutorold.js +0 -364
  28. package/functions/computation-system/legacy/StandardExecutorold.js +0 -476
  29. package/functions/computation-system/legacy/computation_dispatcherold.js +0 -944
  30. package/functions/computation-system/logger/logger.js +0 -297
  31. package/functions/computation-system/persistence/ContractValidator.js +0 -81
  32. package/functions/computation-system/persistence/FirestoreUtils.js +0 -56
  33. package/functions/computation-system/persistence/ResultCommitter.js +0 -283
  34. package/functions/computation-system/persistence/ResultsValidator.js +0 -130
  35. package/functions/computation-system/persistence/RunRecorder.js +0 -142
  36. package/functions/computation-system/persistence/StatusRepository.js +0 -52
  37. package/functions/computation-system/reporter_epoch.js +0 -6
  38. package/functions/computation-system/scripts/UpdateContracts.js +0 -128
  39. package/functions/computation-system/services/SnapshotService.js +0 -148
  40. package/functions/computation-system/simulation/Fabricator.js +0 -285
  41. package/functions/computation-system/simulation/SeededRandom.js +0 -41
  42. package/functions/computation-system/simulation/SimRunner.js +0 -51
  43. package/functions/computation-system/system_epoch.js +0 -2
  44. package/functions/computation-system/tools/BuildReporter.js +0 -531
  45. package/functions/computation-system/tools/ContractDiscoverer.js +0 -144
  46. package/functions/computation-system/tools/DeploymentValidator.js +0 -536
  47. package/functions/computation-system/tools/FinalSweepReporter.js +0 -322
  48. package/functions/computation-system/topology/HashManager.js +0 -55
  49. package/functions/computation-system/topology/ManifestLoader.js +0 -47
  50. package/functions/computation-system/utils/data_loader.js +0 -675
  51. package/functions/computation-system/utils/schema_capture.js +0 -121
  52. package/functions/computation-system/utils/utils.js +0 -188
@@ -1,297 +0,0 @@
1
- /**
2
- * @fileoverview Structured Logging System for Computation Engine
3
- * UPDATED: Added support for Google Cloud Trace Context injection.
4
- */
5
-
6
- const crypto = require('crypto');
7
-
8
- const LOG_LEVELS = {
9
- TRACE: 0, DEBUG: 1, INFO: 2, WARN: 3, ERROR: 4, FATAL: 5
10
- };
11
-
12
- const PROCESS_TYPES = {
13
- MANIFEST: 'manifest',
14
- ORCHESTRATOR: 'orchestrator',
15
- EXECUTOR: 'executor',
16
- STORAGE: 'storage',
17
- WORKER: 'worker',
18
- ANALYSIS: 'analysis',
19
- DISPATCH: 'dispatch'
20
- };
21
-
22
- function generateProcessId(type, identifier, date = '') {
23
- const input = `${type}|${identifier}|${date}`;
24
- return crypto.createHash('sha256').update(input).digest('hex').substring(0, 16);
25
- }
26
-
27
- function formatLogEntry(entry) {
28
- return JSON.stringify({
29
- timestamp: entry.timestamp,
30
- level: entry.level, // Google uses 'severity', mapped below
31
- severity: entry.severity,
32
- processType: entry.processType,
33
- processId: entry.processId,
34
- computationName: entry.computationName,
35
- date: entry.date,
36
- message: entry.message,
37
- context: entry.context,
38
- metadata: entry.metadata,
39
- stats: entry.stats,
40
- storage: entry.storage,
41
- details: entry.details,
42
- // Spread the global Trace fields here
43
- ...entry.globalMetadata
44
- });
45
- }
46
-
47
- class StructuredLogger {
48
- constructor(config = {}) {
49
- this.config = {
50
- minLevel: config.minLevel || LOG_LEVELS.INFO,
51
- enableConsole: config.enableConsole !== false,
52
- enableStructured: config.enableStructured !== false,
53
- includeStackTrace: config.includeStackTrace !== false,
54
- ...config
55
- };
56
- this.activeProcesses = new Map();
57
-
58
- // [NEW] Store global metadata (like Trace IDs) for this logger instance
59
- this.globalMetadata = config.globalMetadata || {};
60
- }
61
-
62
- startProcess(processType, computationName = null, date = null) {
63
- const processId = (computationName || date)
64
- ? generateProcessId(processType, computationName || 'general', date)
65
- : crypto.randomBytes(8).toString('hex');
66
-
67
- const processLogger = new ProcessLogger(this, processType, processId, computationName, date);
68
- this.activeProcesses.set(processId, {
69
- type: processType,
70
- computationName,
71
- date,
72
- startTime: Date.now(),
73
- logger: processLogger
74
- });
75
- return processLogger;
76
- }
77
-
78
- endProcess(processId) {
79
- const process = this.activeProcesses.get(processId);
80
- if (process) {
81
- const duration = Date.now() - process.startTime;
82
- this.activeProcesses.delete(processId);
83
- return duration;
84
- }
85
- return null;
86
- }
87
-
88
- /**
89
- * The Main Date Analysis Logger (UPDATED)
90
- * Handles 'impossible' category for permanent failures.
91
- */
92
- logDateAnalysis(dateStr, analysisReport) {
93
- const {
94
- runnable = [],
95
- blocked = [],
96
- impossible = [], // New Category
97
- reRuns = [],
98
- failedDependency = [],
99
- skipped = []
100
- } = analysisReport;
101
-
102
- // 1. Structured Output
103
- if (this.config.enableStructured) {
104
- console.log(JSON.stringify({
105
- timestamp: new Date().toISOString(),
106
- level: 'INFO',
107
- severity: 'INFO',
108
- processType: PROCESS_TYPES.ANALYSIS,
109
- date: dateStr,
110
- message: `Date Analysis for ${dateStr}`,
111
- stats: {
112
- runnable: runnable.length,
113
- blocked: blocked.length,
114
- impossible: impossible.length,
115
- reRuns: reRuns.length,
116
- failedDependency: failedDependency.length,
117
- skipped: skipped.length
118
- },
119
- details: analysisReport,
120
- ...this.globalMetadata
121
- }));
122
- }
123
-
124
- // 2. Human Readable Output
125
- if (this.config.enableConsole) {
126
- const symbols = {
127
- info: 'ℹ️', warn: '⚠️', check: '✅', block: '⛔',
128
- cycle: '🔄', skip: '⏭️', dead: '💀'
129
- };
130
-
131
- console.log(`\n🔍 === DATE ANALYSIS REPORT: ${dateStr} ===`);
132
-
133
- if (reRuns.length) {
134
- console.log(`\n${symbols.cycle} [HASH MISMATCH / RE-RUNS]`);
135
- reRuns.forEach(item => {
136
- console.log(` • ${item.name}: Hash changed. (Old: ${item.oldHash?.substring(0,6)}... New: ${item.newHash?.substring(0,6)}...)`);
137
- });
138
- }
139
-
140
- if (failedDependency.length) {
141
- console.log(`\n${symbols.block} [FAILED DEPENDENCIES] (Upstream failure)`);
142
- failedDependency.forEach(item => {
143
- console.log(` • ${item.name}: Missing ${item.missing ? item.missing.join(', ') : 'dependencies'}`);
144
- });
145
- }
146
-
147
- // NEW: Impossible items
148
- if (impossible.length) {
149
- console.log(`\n${symbols.dead} [IMPOSSIBLE] (Permanent Failure - Will not retry)`);
150
- impossible.forEach(item => {
151
- console.log(` • ${item.name}: ${item.reason}`);
152
- });
153
- }
154
-
155
- if (runnable.length) {
156
- console.log(`\n${symbols.check} [READY TO RUN]`);
157
- runnable.forEach(item => console.log(` • ${item.name}`));
158
- }
159
-
160
- if (blocked.length) {
161
- console.log(`\n${symbols.warn} [BLOCKED] (Waiting for data - Retriable)`);
162
- blocked.forEach(item => {
163
- console.log(` • ${item.name}: ${item.reason}`);
164
- });
165
- }
166
-
167
- if (skipped.length) {
168
- if (skipped.length > 10) {
169
- console.log(`\n${symbols.skip} [SKIPPED] (${skipped.length} calculations up-to-date)`);
170
- } else {
171
- console.log(`\n${symbols.skip} [SKIPPED]`);
172
- skipped.forEach(item => console.log(` • ${item.name}`));
173
- }
174
- }
175
-
176
- console.log(`\n=============================================\n`);
177
- }
178
- }
179
-
180
- logStorage(processId, calcName, date, path, sizeBytes, isSharded) {
181
- this.log(LOG_LEVELS.INFO, `Results stored for ${calcName}`, {
182
- storage: {
183
- path,
184
- sizeBytes,
185
- isSharded,
186
- sizeMB: (sizeBytes / 1024 / 1024).toFixed(2)
187
- }
188
- }, PROCESS_TYPES.STORAGE, processId, calcName, date);
189
- }
190
-
191
- log(level, message, context = {}, processType = null, processId = null, computationName = null, date = null) {
192
- const numericLevel = typeof level === 'string' ? LOG_LEVELS[level] : level;
193
- if (numericLevel < this.config.minLevel) return;
194
-
195
- let finalContext = context;
196
- let finalMetadata = {};
197
- let finalStats = undefined;
198
- let finalStorage = undefined;
199
-
200
- if (context && (context.processId || context.storage || context.stats)) {
201
- if (context.processId) processId = context.processId;
202
- if (context.processType) processType = context.processType;
203
- if (context.computationName) computationName = context.computationName;
204
- if (context.date) date = context.date;
205
- if (context.stats) finalStats = context.stats;
206
- if (context.storage) finalStorage = context.storage;
207
- finalContext = { ...context };
208
- delete finalContext.processId; delete finalContext.processType;
209
- delete finalContext.computationName; delete finalContext.date;
210
- delete finalContext.stats; delete finalContext.storage;
211
- }
212
-
213
- // Map Level to Google Severity
214
- let severity = 'INFO';
215
- if (numericLevel === LOG_LEVELS.WARN) severity = 'WARNING';
216
- else if (numericLevel === LOG_LEVELS.ERROR) severity = 'ERROR';
217
- else if (numericLevel === LOG_LEVELS.FATAL) severity = 'CRITICAL';
218
- else if (numericLevel === LOG_LEVELS.DEBUG) severity = 'DEBUG';
219
-
220
- const entry = {
221
- timestamp: new Date().toISOString(),
222
- level: Object.keys(LOG_LEVELS).find(k => LOG_LEVELS[k] === numericLevel) || 'INFO',
223
- severity: severity,
224
- processType,
225
- processId,
226
- computationName,
227
- date,
228
- message,
229
- context: typeof finalContext === 'string' ? { error: finalContext } : finalContext,
230
- metadata: finalMetadata,
231
- stats: finalStats,
232
- storage: finalStorage,
233
- globalMetadata: this.globalMetadata // [NEW] Pass trace context
234
- };
235
-
236
- if (numericLevel >= LOG_LEVELS.ERROR && this.config.includeStackTrace && finalContext.stack) {
237
- entry.metadata.stackTrace = finalContext.stack;
238
- }
239
-
240
- if (this.config.enableConsole) {
241
- this._consoleLog(entry);
242
- }
243
-
244
- if (this.config.enableStructured) {
245
- // [CRITICAL] This is what Google Cloud Logging picks up
246
- console.log(formatLogEntry(entry));
247
- }
248
- }
249
-
250
- _consoleLog(entry) {
251
- const symbols = { TRACE: '🔍', DEBUG: '🐛', INFO: 'ℹ️', WARN: '⚠️', ERROR: '❌', FATAL: '💀' };
252
- const colors = {
253
- TRACE: '\x1b[90m', DEBUG: '\x1b[36m', INFO: '\x1b[32m',
254
- WARN: '\x1b[33m', ERROR: '\x1b[31m', FATAL: '\x1b[35m'
255
- };
256
- const reset = '\x1b[0m';
257
- const color = colors[entry.level] || '';
258
- const symbol = symbols[entry.level] || 'ℹ️';
259
-
260
- let output = `${color}${symbol} [${entry.level}]${reset}`;
261
- if (entry.processType) output += ` [${entry.processType}]`;
262
- if (entry.computationName) output += ` [${entry.computationName}]`;
263
-
264
- output += ` ${entry.message}`;
265
-
266
- console.log(output);
267
- if (entry.context && Object.keys(entry.context).length > 0) {
268
- console.log(` Context:`, entry.context);
269
- }
270
- }
271
- }
272
-
273
- class ProcessLogger {
274
- constructor(parent, processType, processId, computationName, date) {
275
- this.parent = parent;
276
- this.processType = processType;
277
- this.processId = processId;
278
- this.computationName = computationName;
279
- this.date = date;
280
- this.startTime = Date.now();
281
- this.metrics = { operations: 0, errors: 0, warnings: 0 };
282
- }
283
- log(level, message, context = {}) {
284
- this.metrics.operations++;
285
- this.parent.log(level, message, context, this.processType, this.processId, this.computationName, this.date);
286
- }
287
- complete(success = true, finalMessage = null) {
288
- const duration = Date.now() - this.startTime;
289
- this.log(success ? LOG_LEVELS.INFO : LOG_LEVELS.ERROR,
290
- finalMessage || (success ? 'Process completed' : 'Process failed'),
291
- { stats: { durationMs: duration, ...this.metrics, success } });
292
- this.parent.endProcess(this.processId);
293
- return duration;
294
- }
295
- }
296
-
297
- module.exports = { StructuredLogger, ProcessLogger, LOG_LEVELS, PROCESS_TYPES, generateProcessId };
@@ -1,81 +0,0 @@
1
- /**
2
- * @fileoverview Enforces the contracts discovered by the offline tool.
3
- * Designed to be permissive with volatility ("Anomalies") but strict with logic ("Violations").
4
- */
5
- class ContractValidator {
6
-
7
- /**
8
- * @param {Object} result - The production output (single item or batch).
9
- * @param {Object} contract - The loaded contract JSON.
10
- * @returns {Object} { valid: boolean, reason: string }
11
- */
12
- static validate(result, contract) {
13
- if (!result || !contract) return { valid: true };
14
-
15
- // Handle Batches (StandardExecutor produces map of User -> Result)
16
- const items = Object.values(result);
17
- if (items.length === 0) return { valid: true };
18
-
19
- // We check a sample to save CPU, or check all if critical
20
- // For "Cohort" logic, we usually check all because one bad apple spoils the average.
21
- for (const item of items) {
22
- const check = this._validateItem(item, contract);
23
- if (!check.valid) return check;
24
- }
25
-
26
- return { valid: true };
27
- }
28
-
29
- static _validateItem(item, contract) {
30
- // 1. Structure Check
31
- if (contract.requiredKeys) {
32
- for (const key of contract.requiredKeys) {
33
- if (item[key] === undefined) {
34
- return { valid: false, reason: `Schema Violation: Missing key '${key}'` };
35
- }
36
- }
37
- }
38
-
39
- // 2. Numeric Physics Check (Hard Bounds)
40
- if (contract.numericBounds) {
41
- for (const [key, bounds] of Object.entries(contract.numericBounds)) {
42
- const val = item[key];
43
- if (typeof val !== 'number') continue;
44
-
45
- if (val < bounds.min) {
46
- return { valid: false, reason: `Physics Violation: ${key} (${val}) is below hard limit ${bounds.min}` };
47
- }
48
- if (val > bounds.max) {
49
- return { valid: false, reason: `Physics Violation: ${key} (${val}) is above hard limit ${bounds.max}` };
50
- }
51
- }
52
- }
53
-
54
- // 3. Statistical Sanity Check (Soft Bounds)
55
- // We generally DO NOT BLOCK on this for financial data, unless it's egregious.
56
- // We block if it's "Mathematically Impossible" based on the distribution.
57
- if (contract.distributions) {
58
- for (const [key, dist] of Object.entries(contract.distributions)) {
59
- const val = item[key];
60
- if (typeof val !== 'number') continue;
61
-
62
- const diff = Math.abs(val - dist.mean);
63
- const sigmas = diff / dist.stdDev;
64
-
65
- // 15 Sigma is our "Ridiculousness Threshold".
66
- // Even crypto doesn't move 15 standard deviations in one calculation step
67
- // unless the data is corrupt (e.g. integer overflow, or bad scraping).
68
- if (sigmas > 15 && diff > 1.0) { // Ensure diff is material
69
- return {
70
- valid: false,
71
- reason: `Statistical Impossibility: ${key} is ${sigmas.toFixed(1)} sigmas from mean. Value: ${val}, Mean: ${dist.mean}`
72
- };
73
- }
74
- }
75
- }
76
-
77
- return { valid: true };
78
- }
79
- }
80
-
81
- module.exports = ContractValidator;
@@ -1,56 +0,0 @@
1
- /**
2
- * @fileoverview Low-level Firestore interactions.
3
- */
4
- const { withRetry } = require('../utils/utils.js');
5
-
6
- async function commitBatchInChunks(config, deps, writes, operationName) {
7
- const { db, logger, calculationUtils } = deps;
8
- const retryFn = calculationUtils ? calculationUtils.withRetry : (fn) => fn();
9
-
10
- if (!writes || !writes.length) {
11
- logger.log('WARN', `[${operationName}] No writes to commit.`);
12
- return;
13
- }
14
-
15
- const MAX_BATCH_OPS = 300;
16
- const MAX_BATCH_BYTES = 9 * 1024 * 1024;
17
-
18
- let currentBatch = db.batch();
19
- let currentOpsCount = 0;
20
- let currentBytesEst = 0;
21
- let batchIndex = 1;
22
-
23
- const commitAndReset = async () => {
24
- if (currentOpsCount > 0) {
25
- try {
26
- await retryFn(
27
- () => currentBatch.commit(),
28
- `${operationName} (Chunk ${batchIndex})`
29
- );
30
- logger.log('INFO', `[${operationName}] Committed chunk ${batchIndex} (${currentOpsCount} ops, ~${(currentBytesEst / 1024 / 1024).toFixed(2)} MB).`);
31
- batchIndex++;
32
- } catch (err) {
33
- logger.log('ERROR', `[${operationName}] Failed to commit chunk ${batchIndex}. Size: ${(currentBytesEst / 1024 / 1024).toFixed(2)} MB.`, { error: err.message });
34
- throw err;
35
- }
36
- }
37
- currentBatch = db.batch();
38
- currentOpsCount = 0;
39
- currentBytesEst = 0;
40
- };
41
-
42
- for (const write of writes) {
43
- let docSize = 100;
44
- try { if (write.data) docSize = JSON.stringify(write.data).length; } catch (e) { }
45
- if (docSize > 900 * 1024) { logger.log('WARN', `[${operationName}] Large document detected (~${(docSize / 1024).toFixed(2)} KB).`); }
46
- if ((currentOpsCount + 1 > MAX_BATCH_OPS) || (currentBytesEst + docSize > MAX_BATCH_BYTES)) { await commitAndReset(); }
47
- const options = write.options || { merge: true };
48
- currentBatch.set(write.ref, write.data, options);
49
- currentOpsCount++;
50
- currentBytesEst += docSize;
51
- }
52
-
53
- await commitAndReset();
54
- }
55
-
56
- module.exports = { commitBatchInChunks };