bulltrackers-module 1.0.732 → 1.0.733

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/functions/orchestrator/index.js +19 -17
  2. package/index.js +8 -29
  3. package/package.json +1 -1
  4. package/functions/computation-system/WorkflowOrchestrator.js +0 -213
  5. package/functions/computation-system/config/monitoring_config.js +0 -31
  6. package/functions/computation-system/config/validation_overrides.js +0 -10
  7. package/functions/computation-system/context/ContextFactory.js +0 -143
  8. package/functions/computation-system/context/ManifestBuilder.js +0 -379
  9. package/functions/computation-system/data/AvailabilityChecker.js +0 -236
  10. package/functions/computation-system/data/CachedDataLoader.js +0 -325
  11. package/functions/computation-system/data/DependencyFetcher.js +0 -455
  12. package/functions/computation-system/executors/MetaExecutor.js +0 -279
  13. package/functions/computation-system/executors/PriceBatchExecutor.js +0 -108
  14. package/functions/computation-system/executors/StandardExecutor.js +0 -465
  15. package/functions/computation-system/helpers/computation_dispatcher.js +0 -750
  16. package/functions/computation-system/helpers/computation_worker.js +0 -375
  17. package/functions/computation-system/helpers/monitor.js +0 -64
  18. package/functions/computation-system/helpers/on_demand_helpers.js +0 -154
  19. package/functions/computation-system/layers/extractors.js +0 -1097
  20. package/functions/computation-system/layers/index.js +0 -40
  21. package/functions/computation-system/layers/mathematics.js +0 -522
  22. package/functions/computation-system/layers/profiling.js +0 -537
  23. package/functions/computation-system/layers/validators.js +0 -170
  24. package/functions/computation-system/legacy/AvailabilityCheckerOld.js +0 -388
  25. package/functions/computation-system/legacy/CachedDataLoaderOld.js +0 -357
  26. package/functions/computation-system/legacy/DependencyFetcherOld.js +0 -478
  27. package/functions/computation-system/legacy/MetaExecutorold.js +0 -364
  28. package/functions/computation-system/legacy/StandardExecutorold.js +0 -476
  29. package/functions/computation-system/legacy/computation_dispatcherold.js +0 -944
  30. package/functions/computation-system/logger/logger.js +0 -297
  31. package/functions/computation-system/persistence/ContractValidator.js +0 -81
  32. package/functions/computation-system/persistence/FirestoreUtils.js +0 -56
  33. package/functions/computation-system/persistence/ResultCommitter.js +0 -283
  34. package/functions/computation-system/persistence/ResultsValidator.js +0 -130
  35. package/functions/computation-system/persistence/RunRecorder.js +0 -142
  36. package/functions/computation-system/persistence/StatusRepository.js +0 -52
  37. package/functions/computation-system/reporter_epoch.js +0 -6
  38. package/functions/computation-system/scripts/UpdateContracts.js +0 -128
  39. package/functions/computation-system/services/SnapshotService.js +0 -148
  40. package/functions/computation-system/simulation/Fabricator.js +0 -285
  41. package/functions/computation-system/simulation/SeededRandom.js +0 -41
  42. package/functions/computation-system/simulation/SimRunner.js +0 -51
  43. package/functions/computation-system/system_epoch.js +0 -2
  44. package/functions/computation-system/tools/BuildReporter.js +0 -531
  45. package/functions/computation-system/tools/ContractDiscoverer.js +0 -144
  46. package/functions/computation-system/tools/DeploymentValidator.js +0 -536
  47. package/functions/computation-system/tools/FinalSweepReporter.js +0 -322
  48. package/functions/computation-system/topology/HashManager.js +0 -55
  49. package/functions/computation-system/topology/ManifestLoader.js +0 -47
  50. package/functions/computation-system/utils/data_loader.js +0 -675
  51. package/functions/computation-system/utils/schema_capture.js +0 -121
  52. package/functions/computation-system/utils/utils.js +0 -188
@@ -1,121 +0,0 @@
1
- /**
2
- * @fileoverview Schema capture utility for computation outputs
3
- * This module batches and stores pre-defined static schemas in Firestore.
4
- * UPDATED: Added schema validation to prevent silent batch failures.
5
- */
6
-
7
- /**
8
- * Recursively removes undefined values from an object.
9
- * Firestore doesn't allow undefined values, so we filter them out entirely.
10
- * @param {any} data - Data to sanitize
11
- * @returns {any} Sanitized data with undefined values removed
12
- */
13
- function removeUndefinedValues(data) {
14
- if (data === undefined) return undefined; // Will be filtered out
15
- if (data === null) return null;
16
- if (data instanceof Date) return data;
17
-
18
- if (Array.isArray(data)) {
19
- return data.map(item => removeUndefinedValues(item)).filter(item => item !== undefined);
20
- }
21
-
22
- if (typeof data === 'object') {
23
- const sanitized = {};
24
- for (const [key, value] of Object.entries(data)) {
25
- const sanitizedValue = removeUndefinedValues(value);
26
- if (sanitizedValue !== undefined) {
27
- sanitized[key] = sanitizedValue;
28
- }
29
- }
30
- return sanitized;
31
- }
32
-
33
- return data;
34
- }
35
-
36
- /**
37
- * Validates a schema object before storage.
38
- * Checks for circular references and size limits.
39
- * @param {object} schema
40
- * @returns {object} { valid: boolean, reason: string }
41
- */
42
- function validateSchema(schema) {
43
- try {
44
- // 1. Detect circular references
45
- const jsonStr = JSON.stringify(schema);
46
-
47
- // 2. Ensure it's not too large (Firestore limit: 1MB, reserve 100KB for metadata)
48
- const size = Buffer.byteLength(jsonStr);
49
- if (size > 900 * 1024) { return { valid: false, reason: `Schema exceeds 900KB limit (${(size/1024).toFixed(2)} KB)` }; }
50
- return { valid: true };
51
- } catch (e) { return { valid: false, reason: `Serialization failed: ${e.message}` }; }
52
- }
53
-
54
- /**
55
- * Batch store schemas for multiple computations.
56
- *
57
- * @param {object} dependencies - Contains db, logger
58
- * @param {object} config - Configuration object
59
- * @param {Array} schemas - Array of {name, category, schema, metadata} objects
60
- */
61
- async function batchStoreSchemas(dependencies, config, schemas) {
62
- const { db, logger } = dependencies;
63
-
64
- if (config.captureSchemas === false) {
65
- logger.log('INFO', '[SchemaCapture] Schema capture is disabled. Skipping.');
66
- return;
67
- }
68
-
69
- const batch = db.batch();
70
- const schemaCollection = config.schemaCollection || 'computation_schemas';
71
- let validCount = 0;
72
-
73
- for (const item of schemas) {
74
- try {
75
- if (!item.schema) {
76
- logger.log('WARN', `[SchemaCapture] No schema provided for ${item.name}. Skipping.`);
77
- continue;
78
- }
79
-
80
- // [IMPROVED] Validate before adding to batch
81
- const validation = validateSchema(item.schema);
82
- if (!validation.valid) {
83
- logger.log('WARN', `[SchemaCapture] Invalid schema for ${item.name}: ${validation.reason}`);
84
- continue;
85
- }
86
-
87
- const docRef = db.collection(schemaCollection).doc(item.name);
88
-
89
- // Critical: Always overwrite 'lastUpdated' to now
90
- // Sanitize metadata to remove undefined values (Firestore doesn't allow undefined)
91
- const sanitizedMetadata = item.metadata ? removeUndefinedValues(item.metadata) : {};
92
- const docData = removeUndefinedValues({
93
- computationName: item.name,
94
- category: item.category,
95
- schema: item.schema,
96
- metadata: sanitizedMetadata,
97
- lastUpdated: new Date()
98
- });
99
-
100
- batch.set(docRef, docData, { merge: true });
101
-
102
- validCount++;
103
-
104
- } catch (error) {
105
- logger.log('WARN', `[SchemaCapture] Failed to add schema to batch for ${item.name}`, { errorMessage: error.message });
106
- }
107
- }
108
-
109
- if (validCount > 0) {
110
- try {
111
- await batch.commit();
112
- logger.log('INFO', `[SchemaCapture] Batch stored ${validCount} computation schemas`);
113
- } catch (error) {
114
- logger.log('ERROR', '[SchemaCapture] Failed to commit schema batch', { errorMessage: error.message });
115
- }
116
- }
117
- }
118
-
119
- module.exports = {
120
- batchStoreSchemas
121
- };
@@ -1,188 +0,0 @@
1
- /**
2
- * FILENAME: computation-system/utils/utils.js
3
- * UPDATED: Added undefined sanitization to prevent Firestore write errors.
4
- */
5
-
6
- const { FieldValue, FieldPath } = require('@google-cloud/firestore');
7
- const crypto = require('crypto');
8
-
9
-
10
- // [UPDATED] Registry for Data Availability.
11
- // Populated dynamically by getEarliestDataDates().
12
- const DEFINITIVE_EARLIEST_DATES = {
13
- portfolio: null,
14
- history: null,
15
- social: null,
16
- insights: null,
17
- price: null,
18
- absoluteEarliest: null
19
- };
20
-
21
- /** Normalizes a calculation name to kebab-case */
22
- function normalizeName(name) { return name.replace(/_/g, '-'); }
23
-
24
- /** Generates a SHA-256 hash of a code string. */
25
- function generateCodeHash(codeString) {
26
- if (!codeString) return 'unknown';
27
- let clean = codeString.replace(/\/\/.*$/gm, '').replace(/\/\*[\s\S]*?\*\//g, '').replace(/\s+/g, '');
28
- return crypto.createHash('sha256').update(clean).digest('hex');
29
- }
30
-
31
- /** Generates a stable SHA-256 hash of a data object. */
32
- function generateDataHash(data) {
33
- if (data === undefined) return 'undefined';
34
- const stableStringify = (obj) => {
35
- if (typeof obj !== 'object' || obj === null) return JSON.stringify(obj);
36
- if (Array.isArray(obj)) return '[' + obj.map(stableStringify).join(',') + ']';
37
- return '{' + Object.keys(obj).sort().map(k => JSON.stringify(k) + ':' + stableStringify(obj[k])).join(',') + '}';
38
- };
39
- try {
40
- return crypto.createHash('sha256').update(stableStringify(data)).digest('hex');
41
- } catch (e) { return 'hash_error'; }
42
- }
43
-
44
- /** Exponential backoff retry logic. */
45
- async function withRetry(fn, operationName, maxRetries = 3) {
46
- let attempt = 0;
47
- while (attempt < maxRetries) {
48
- try { return await fn(); } catch (error) {
49
- attempt++;
50
- if (attempt >= maxRetries) throw error;
51
- await new Promise(resolve => setTimeout(resolve, 1000 * Math.pow(2, attempt - 1)));
52
- }
53
- }
54
- }
55
-
56
- /** * Recursively sanitizes data for Firestore compatibility.
57
- * Converts 'undefined' to 'null' to prevent validation errors.
58
- * Converts invalid Dates to null to prevent Timestamp serialization errors.
59
- * Converts NaN numbers to null to prevent serialization errors.
60
- */
61
- function sanitizeForFirestore(data) {
62
- if (data === undefined) return null;
63
- if (data === null) return null;
64
-
65
- // Handle Date objects - validate they're not Invalid Date (NaN timestamp)
66
- if (data instanceof Date) {
67
- if (isNaN(data.getTime())) {
68
- // Invalid Date - return null instead of crashing Firestore
69
- return null;
70
- }
71
- return data;
72
- }
73
-
74
- // Handle NaN numbers (would cause serialization issues)
75
- if (typeof data === 'number' && isNaN(data)) {
76
- return null;
77
- }
78
-
79
- // Handle Infinity (also not valid for Firestore)
80
- if (data === Infinity || data === -Infinity) {
81
- return null;
82
- }
83
-
84
- // Handle Firestore Types if necessary (e.g. GeoPoint, Timestamp), passed through as objects usually.
85
-
86
- if (Array.isArray(data)) {
87
- return data.map(item => sanitizeForFirestore(item));
88
- }
89
-
90
- if (typeof data === 'object') {
91
- // Check for specific Firestore types that shouldn't be traversed like plain objects
92
- if (data.constructor && (data.constructor.name === 'DocumentReference' || data.constructor.name === 'Timestamp')) {
93
- return data;
94
- }
95
-
96
- const sanitized = {};
97
- for (const [key, value] of Object.entries(data)) {
98
- sanitized[key] = sanitizeForFirestore(value);
99
- }
100
- return sanitized;
101
- }
102
-
103
- return data;
104
- }
105
-
106
- /** Commit a batch of writes in chunks. */
107
- async function commitBatchInChunks(config, deps, writes, operationName) {
108
- const { db, logger } = deps;
109
- const retryFn = (deps.calculationUtils?.withRetry) || withRetry;
110
- if (!writes?.length) return;
111
-
112
- const MAX_BATCH_OPS = 300, MAX_BATCH_BYTES = 9 * 1024 * 1024;
113
- let currentBatch = db.batch(), currentOpsCount = 0, currentBytesEst = 0, batchIndex = 1;
114
-
115
- const commitAndReset = async () => {
116
- if (currentOpsCount > 0) {
117
- await retryFn(() => currentBatch.commit(), `${operationName} (Chunk ${batchIndex})`);
118
- batchIndex++;
119
- }
120
- currentBatch = db.batch(); currentOpsCount = 0; currentBytesEst = 0;
121
- };
122
-
123
- for (const write of writes) {
124
- if (write.type === 'DELETE') {
125
- if (currentOpsCount + 1 > MAX_BATCH_OPS) await commitAndReset();
126
- currentBatch.delete(write.ref); currentOpsCount++; continue;
127
- }
128
-
129
- // [FIX] Sanitize data to remove 'undefined' values before calculating size or setting
130
- const safeData = sanitizeForFirestore(write.data);
131
-
132
- let docSize = 100;
133
- try { if (safeData) docSize = JSON.stringify(safeData).length; } catch (e) { }
134
-
135
- if ((currentOpsCount + 1 > MAX_BATCH_OPS) || (currentBytesEst + docSize > MAX_BATCH_BYTES)) await commitAndReset();
136
-
137
- // Use safeData instead of write.data
138
- currentBatch.set(write.ref, safeData, write.options || { merge: true });
139
- currentOpsCount++; currentBytesEst += docSize;
140
- }
141
- await commitAndReset();
142
- }
143
-
144
- /** Generate array of date strings between two dates. */
145
- function getExpectedDateStrings(startDate, endDate) {
146
- const dateStrings = [];
147
- if (startDate && endDate && startDate <= endDate) {
148
- const startUTC = new Date(Date.UTC(startDate.getUTCFullYear(), startDate.getUTCMonth(), startDate.getUTCDate()));
149
- for (let d = startUTC; d <= endDate; d.setUTCDate(d.getUTCDate() + 1)) { dateStrings.push(new Date(d).toISOString().slice(0, 10)); }
150
- }
151
- return dateStrings;
152
- }
153
-
154
- /**
155
- * [UPDATED] Single Source of Truth for Data Availability.
156
- * Updates the global DEFINITIVE_EARLIEST_DATES object.
157
- */
158
- async function getEarliestDataDates(config, deps) {
159
- const { db, logger } = deps;
160
- const indexCollection = process.env.ROOT_DATA_AVAILABILITY_COLLECTION || 'system_root_data_index';
161
-
162
- const getEarliestForType = async (flagName) => {
163
- try {
164
- const snap = await db.collection(indexCollection).where(flagName, '==', true).orderBy(FieldPath.documentId(), 'asc').limit(1).get();
165
- if (!snap.empty) return new Date(snap.docs[0].id + 'T00:00:00Z');
166
- } catch (e) { logger.log('WARN', `[Utils] Index query failed for ${flagName}`); }
167
- return null;
168
- };
169
-
170
- const [portfolio, history, social, insights, price] = await Promise.all([
171
- getEarliestForType('hasPortfolio'), getEarliestForType('hasHistory'),
172
- getEarliestForType('hasSocial'), getEarliestForType('hasInsights'), getEarliestForType('hasPrices')
173
- ]);
174
-
175
- const found = [portfolio, history, social, insights, price].filter(d => d !== null);
176
- const absoluteEarliest = found.length > 0 ? new Date(Math.min(...found)) : new Date('2023-01-01T00:00:00Z');
177
-
178
- // Sync the global registry
179
- Object.assign(DEFINITIVE_EARLIEST_DATES, { portfolio, history, social, insights, price, absoluteEarliest });
180
-
181
- return DEFINITIVE_EARLIEST_DATES;
182
- }
183
-
184
- module.exports = {
185
- FieldValue, FieldPath, normalizeName, commitBatchInChunks,
186
- getExpectedDateStrings, getEarliestDataDates, generateCodeHash,
187
- generateDataHash, withRetry, DEFINITIVE_EARLIEST_DATES, sanitizeForFirestore
188
- };