bulltrackers-module 1.0.290 → 1.0.292
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,14 +1,15 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* FILENAME: computation-system/helpers/computation_worker.js
|
|
3
3
|
* PURPOSE: Consumes tasks, executes logic, and signals Workflow upon Batch Completion.
|
|
4
|
-
* UPDATED: Implements
|
|
4
|
+
* UPDATED: Implements IAM Auth for Workflow Callbacks.
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
7
|
const { executeDispatchTask } = require('../WorkflowOrchestrator.js');
|
|
8
8
|
const { getManifest } = require('../topology/ManifestLoader');
|
|
9
9
|
const { StructuredLogger } = require('../logger/logger');
|
|
10
10
|
const { recordRunAttempt } = require('../persistence/RunRecorder');
|
|
11
|
-
const https = require('https');
|
|
11
|
+
const https = require('https');
|
|
12
|
+
const { GoogleAuth } = require('google-auth-library'); // [NEW] Required for Auth
|
|
12
13
|
|
|
13
14
|
let calculationPackage;
|
|
14
15
|
try { calculationPackage = require('aiden-shared-calculations-unified');
|
|
@@ -19,40 +20,56 @@ const MAX_RETRIES = 3;
|
|
|
19
20
|
|
|
20
21
|
/**
|
|
21
22
|
* [NEW] Helper: Fires the webhook back to Google Cloud Workflows.
|
|
23
|
+
* UPDATED: Now generates an IAM Bearer Token to authenticate the request.
|
|
22
24
|
*/
|
|
23
|
-
function triggerWorkflowCallback(url, status, logger) {
|
|
24
|
-
if (!url) return
|
|
25
|
+
async function triggerWorkflowCallback(url, status, logger) {
|
|
26
|
+
if (!url) return;
|
|
25
27
|
logger.log('INFO', `[Worker] 🔔 BATCH COMPLETE! Triggering Workflow Callback: ${status}`);
|
|
26
28
|
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
});
|
|
32
|
-
|
|
33
|
-
const req = https.request(url, {
|
|
34
|
-
method: 'POST',
|
|
35
|
-
headers: {
|
|
36
|
-
'Content-Type': 'application/json',
|
|
37
|
-
'Content-Length': Buffer.byteLength(body)
|
|
38
|
-
}
|
|
39
|
-
}, (res) => {
|
|
40
|
-
if (res.statusCode >= 200 && res.statusCode < 300) {
|
|
41
|
-
resolve();
|
|
42
|
-
} else {
|
|
43
|
-
logger.log('WARN', `Callback responded with ${res.statusCode}`);
|
|
44
|
-
resolve(); // Don't crash the worker if callback fails, logic is done.
|
|
45
|
-
}
|
|
29
|
+
try {
|
|
30
|
+
// 1. Get OAuth2 Access Token (Required for Workflows Callbacks)
|
|
31
|
+
const auth = new GoogleAuth({
|
|
32
|
+
scopes: ['https://www.googleapis.com/auth/cloud-platform']
|
|
46
33
|
});
|
|
34
|
+
const client = await auth.getClient();
|
|
35
|
+
const accessToken = await client.getAccessToken();
|
|
36
|
+
const token = accessToken.token;
|
|
47
37
|
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
38
|
+
// 2. Send Authenticated Request
|
|
39
|
+
return new Promise((resolve, reject) => {
|
|
40
|
+
const body = JSON.stringify({
|
|
41
|
+
status: status,
|
|
42
|
+
timestamp: new Date().toISOString()
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
const req = https.request(url, {
|
|
46
|
+
method: 'POST',
|
|
47
|
+
headers: {
|
|
48
|
+
'Content-Type': 'application/json',
|
|
49
|
+
'Content-Length': Buffer.byteLength(body),
|
|
50
|
+
'Authorization': `Bearer ${token}` // <--- CRITICAL FIX
|
|
51
|
+
}
|
|
52
|
+
}, (res) => {
|
|
53
|
+
if (res.statusCode >= 200 && res.statusCode < 300) {
|
|
54
|
+
resolve();
|
|
55
|
+
} else {
|
|
56
|
+
logger.log('WARN', `Callback responded with ${res.statusCode}`);
|
|
57
|
+
// We resolve anyway to avoid crashing the worker logic
|
|
58
|
+
resolve();
|
|
59
|
+
}
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
req.on('error', (e) => {
|
|
63
|
+
logger.log('ERROR', `Failed to trigger callback: ${e.message}`);
|
|
64
|
+
resolve();
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
req.write(body);
|
|
68
|
+
req.end();
|
|
51
69
|
});
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
});
|
|
70
|
+
} catch (e) {
|
|
71
|
+
logger.log('ERROR', `Failed to generate auth token for callback: ${e.message}`);
|
|
72
|
+
}
|
|
56
73
|
}
|
|
57
74
|
|
|
58
75
|
/**
|
|
@@ -133,12 +150,8 @@ async function handleComputationTask(message, config, dependencies) {
|
|
|
133
150
|
failedAt: new Date()
|
|
134
151
|
}, { merge: true });
|
|
135
152
|
|
|
136
|
-
// [CRITICAL] Even if it failed, we MUST decrement the counter.
|
|
137
|
-
// Otherwise the workflow waits 24h for a task that will never finish.
|
|
138
153
|
const callbackUrl = await decrementAndCheck(db, metaStatePath, logger);
|
|
139
154
|
if (callbackUrl) {
|
|
140
|
-
// We signal SUCCESS to the workflow because the *Batch* is finished processing (even if this task failed).
|
|
141
|
-
// The "monitor" or next pass can handle data gaps.
|
|
142
155
|
await triggerWorkflowCallback(callbackUrl, 'SUCCESS', logger);
|
|
143
156
|
}
|
|
144
157
|
|
|
@@ -162,7 +175,6 @@ async function handleComputationTask(message, config, dependencies) {
|
|
|
162
175
|
try { computationManifest = getManifest(config.activeProductLines || [], calculations, runDependencies);
|
|
163
176
|
} catch (manifestError) {
|
|
164
177
|
logger.log('FATAL', `[Worker] Failed to load Manifest: ${manifestError.message}`);
|
|
165
|
-
// Do NOT decrement here, let PubSub retry the task.
|
|
166
178
|
return;
|
|
167
179
|
}
|
|
168
180
|
|
|
@@ -178,7 +190,6 @@ async function handleComputationTask(message, config, dependencies) {
|
|
|
178
190
|
const successUpdates = result?.updates?.successUpdates || {};
|
|
179
191
|
|
|
180
192
|
if (failureReport.length > 0) {
|
|
181
|
-
// Logic/Storage failure (handled internally by executor)
|
|
182
193
|
const failReason = failureReport[0];
|
|
183
194
|
throw new Error(failReason.error.message || 'Computation Logic Failed');
|
|
184
195
|
}
|
|
@@ -189,7 +200,6 @@ async function handleComputationTask(message, config, dependencies) {
|
|
|
189
200
|
logger.log('WARN', `[Worker] ⚠️ Empty Result: ${computation}`);
|
|
190
201
|
}
|
|
191
202
|
|
|
192
|
-
// 1. Mark Ledger as COMPLETED
|
|
193
203
|
await db.collection(`computation_audit_ledger/${date}/passes/${pass}/tasks`).doc(computation).update({
|
|
194
204
|
status: 'COMPLETED',
|
|
195
205
|
completedAt: new Date()
|
|
@@ -197,18 +207,14 @@ async function handleComputationTask(message, config, dependencies) {
|
|
|
197
207
|
|
|
198
208
|
await recordRunAttempt(db, { date, computation, pass }, 'SUCCESS', null, { durationMs: duration }, triggerReason);
|
|
199
209
|
|
|
200
|
-
//
|
|
210
|
+
// Decrement & Callback
|
|
201
211
|
const callbackUrl = await decrementAndCheck(db, metaStatePath, logger);
|
|
202
|
-
|
|
203
|
-
// 3. [NEW] If last one, fire callback
|
|
204
212
|
if (callbackUrl) {
|
|
205
213
|
await triggerWorkflowCallback(callbackUrl, 'SUCCESS', logger);
|
|
206
214
|
}
|
|
207
215
|
}
|
|
208
216
|
} catch (err) {
|
|
209
217
|
// --- ERROR HANDLING ---
|
|
210
|
-
|
|
211
|
-
// Check for Permanent/Deterministic Errors
|
|
212
218
|
const isDeterministicError = err.stage === 'SHARDING_LIMIT_EXCEEDED' ||
|
|
213
219
|
err.stage === 'QUALITY_CIRCUIT_BREAKER' ||
|
|
214
220
|
err.stage === 'SEMANTIC_GATE' ||
|
|
@@ -233,28 +239,23 @@ async function handleComputationTask(message, config, dependencies) {
|
|
|
233
239
|
|
|
234
240
|
await recordRunAttempt(db, { date, computation, pass }, 'FAILURE', { message: err.message, stage: err.stage || 'PERMANENT_FAIL' }, { durationMs: 0 }, triggerReason);
|
|
235
241
|
|
|
236
|
-
// [CRITICAL] Permanent failure -> Must decrement so workflow doesn't hang
|
|
237
242
|
const callbackUrl = await decrementAndCheck(db, metaStatePath, logger);
|
|
238
243
|
if (callbackUrl) {
|
|
239
244
|
await triggerWorkflowCallback(callbackUrl, 'SUCCESS', logger);
|
|
240
245
|
}
|
|
241
246
|
|
|
242
|
-
return;
|
|
247
|
+
return;
|
|
243
248
|
} catch (dlqErr) { logger.log('FATAL', `[Worker] Failed to write to DLQ`, dlqErr); }
|
|
244
249
|
}
|
|
245
250
|
|
|
246
|
-
// Standard Retryable Error (Transient)
|
|
247
251
|
if (retryCount >= MAX_RETRIES) {
|
|
248
|
-
// Let the top-level poison check handle the decrement on the *next* delivery (or handle here if you prefer).
|
|
249
|
-
// Standard practice: throw so PubSub handles the backoff and redelivery.
|
|
250
|
-
// The poison logic at the top of this function will catch it on attempt N+1.
|
|
251
252
|
throw err;
|
|
252
253
|
}
|
|
253
254
|
|
|
254
255
|
logger.log('ERROR', `[Worker] ❌ Crash: ${computation}: ${err.message}`);
|
|
255
256
|
await recordRunAttempt(db, { date, computation, pass }, 'CRASH', { message: err.message, stack: err.stack, stage: 'SYSTEM_CRASH' }, { durationMs: 0 }, triggerReason);
|
|
256
257
|
|
|
257
|
-
throw err;
|
|
258
|
+
throw err;
|
|
258
259
|
}
|
|
259
260
|
}
|
|
260
261
|
|
|
@@ -6,41 +6,29 @@ const SimRunner = require('../simulation/SimRunner');
|
|
|
6
6
|
const pLimit = require('p-limit');
|
|
7
7
|
const path = require('path');
|
|
8
8
|
const crypto = require('crypto');
|
|
9
|
-
const fs = require('fs');
|
|
9
|
+
const fs = require('fs');
|
|
10
10
|
const packageJson = require(path.join(__dirname, '..', '..', '..', 'package.json'));
|
|
11
11
|
const packageVersion = packageJson.version;
|
|
12
|
-
const { generateCodeHash } = require('../utils/utils');
|
|
12
|
+
const { generateCodeHash } = require('../utils/utils');
|
|
13
13
|
|
|
14
|
-
// Persistent Registry for SimHashes
|
|
15
14
|
const SIMHASH_REGISTRY_COLLECTION = 'system_simhash_registry';
|
|
16
15
|
|
|
17
|
-
// ---
|
|
18
|
-
|
|
19
|
-
// 1. define the Root of the system (one level up from 'tools')
|
|
16
|
+
// --- RECURSIVE SYSTEM HASHING ---
|
|
20
17
|
const SYSTEM_ROOT = path.resolve(__dirname, '..');
|
|
21
|
-
|
|
22
|
-
// 2. Define what to ignore to prevent noise or infinite loops
|
|
23
18
|
const IGNORED_DIRS = new Set(['node_modules', '.git', '.idea', 'coverage', 'logs', 'tests']);
|
|
24
19
|
const IGNORED_FILES = new Set(['package-lock.json', '.DS_Store', '.env']);
|
|
25
20
|
|
|
26
|
-
/**
|
|
27
|
-
* Recursively walks a directory and returns a list of file paths.
|
|
28
|
-
*/
|
|
29
21
|
function walkSync(dir, fileList = []) {
|
|
30
22
|
const files = fs.readdirSync(dir);
|
|
31
|
-
|
|
32
23
|
files.forEach(file => {
|
|
33
24
|
if (IGNORED_FILES.has(file)) return;
|
|
34
|
-
|
|
35
25
|
const filePath = path.join(dir, file);
|
|
36
26
|
const stat = fs.statSync(filePath);
|
|
37
|
-
|
|
38
27
|
if (stat.isDirectory()) {
|
|
39
28
|
if (!IGNORED_DIRS.has(file)) {
|
|
40
29
|
walkSync(filePath, fileList);
|
|
41
30
|
}
|
|
42
31
|
} else {
|
|
43
|
-
// Only hash code files (add .yaml if you want workflows included)
|
|
44
32
|
if (file.endsWith('.js') || file.endsWith('.json') || file.endsWith('.yaml')) {
|
|
45
33
|
fileList.push(filePath);
|
|
46
34
|
}
|
|
@@ -49,69 +37,41 @@ function walkSync(dir, fileList = []) {
|
|
|
49
37
|
return fileList;
|
|
50
38
|
}
|
|
51
39
|
|
|
52
|
-
/**
|
|
53
|
-
* Generates a single hash representing the entire infrastructure code state.
|
|
54
|
-
*/
|
|
55
40
|
function getInfrastructureHash() {
|
|
56
41
|
try {
|
|
57
42
|
const allFiles = walkSync(SYSTEM_ROOT);
|
|
58
|
-
allFiles.sort();
|
|
59
|
-
|
|
43
|
+
allFiles.sort();
|
|
60
44
|
const bigHash = crypto.createHash('sha256');
|
|
61
|
-
|
|
62
45
|
for (const filePath of allFiles) {
|
|
63
46
|
const content = fs.readFileSync(filePath, 'utf8');
|
|
64
47
|
const relativePath = path.relative(SYSTEM_ROOT, filePath);
|
|
65
|
-
|
|
66
|
-
// DECISION: How to clean?
|
|
67
48
|
let cleanContent = content;
|
|
68
|
-
|
|
69
|
-
// 1. If it's JS, use your system standard for code hashing
|
|
70
49
|
if (filePath.endsWith('.js')) {
|
|
71
|
-
// This strips comments and whitespace consistently with ManifestBuilder
|
|
72
|
-
// Note: generateCodeHash returns a hash, we can just use that hash
|
|
73
50
|
cleanContent = generateCodeHash(content);
|
|
74
|
-
}
|
|
75
|
-
// 2. If it's JSON/YAML, just strip basic whitespace to ignore indent changes
|
|
76
|
-
else {
|
|
51
|
+
} else {
|
|
77
52
|
cleanContent = content.replace(/\s+/g, '');
|
|
78
53
|
}
|
|
79
|
-
|
|
80
|
-
// Feed the PATH and the CLEAN CONTENT into the master hash
|
|
81
54
|
bigHash.update(`${relativePath}:${cleanContent}|`);
|
|
82
55
|
}
|
|
83
|
-
|
|
84
56
|
return bigHash.digest('hex');
|
|
85
57
|
} catch (e) {
|
|
86
58
|
console.warn(`[BuildReporter] ⚠️ Failed to generate infra hash: ${e.message}`);
|
|
87
|
-
return 'infra_hash_error';
|
|
59
|
+
return 'infra_hash_error';
|
|
88
60
|
}
|
|
89
61
|
}
|
|
90
62
|
|
|
91
|
-
/**
|
|
92
|
-
* UPDATED: System Fingerprint = Manifest Hash + Infrastructure Hash
|
|
93
|
-
*/
|
|
94
63
|
function getSystemFingerprint(manifest) {
|
|
95
|
-
// 1. Business Logic Hash (The Calculations)
|
|
96
64
|
const sortedManifestHashes = manifest.map(c => c.hash).sort().join('|');
|
|
97
|
-
|
|
98
|
-
// 2. Infrastructure Hash (The System Code)
|
|
99
65
|
const infraHash = getInfrastructureHash();
|
|
100
|
-
|
|
101
|
-
// 3. Combine
|
|
102
66
|
return crypto.createHash('sha256')
|
|
103
67
|
.update(sortedManifestHashes + infraHash)
|
|
104
68
|
.digest('hex');
|
|
105
69
|
}
|
|
106
70
|
|
|
107
|
-
/**
|
|
108
|
-
* Helper: Determines if a calculation should be excluded from the report.
|
|
109
|
-
*/
|
|
110
71
|
function isDateBeforeAvailability(dateStr, calcManifest) {
|
|
111
72
|
const targetDate = new Date(dateStr + 'T00:00:00Z');
|
|
112
73
|
const deps = calcManifest.rootDataDependencies || [];
|
|
113
74
|
if (deps.length === 0) return false;
|
|
114
|
-
|
|
115
75
|
for (const dep of deps) {
|
|
116
76
|
let startDate = null;
|
|
117
77
|
if (dep === 'portfolio') startDate = DEFINITIVE_EARLIEST_DATES.portfolio;
|
|
@@ -119,19 +79,14 @@ function isDateBeforeAvailability(dateStr, calcManifest) {
|
|
|
119
79
|
else if (dep === 'social') startDate = DEFINITIVE_EARLIEST_DATES.social;
|
|
120
80
|
else if (dep === 'insights') startDate = DEFINITIVE_EARLIEST_DATES.insights;
|
|
121
81
|
else if (dep === 'price') startDate = DEFINITIVE_EARLIEST_DATES.price;
|
|
122
|
-
|
|
123
82
|
if (startDate && targetDate < startDate) { return true; }
|
|
124
83
|
}
|
|
125
84
|
return false;
|
|
126
85
|
}
|
|
127
86
|
|
|
128
|
-
/**
|
|
129
|
-
* Helper: Calculates the transitive closure of dependents (Blast Radius).
|
|
130
|
-
*/
|
|
131
87
|
function calculateBlastRadius(targetCalcName, reverseGraph) {
|
|
132
88
|
const impactSet = new Set();
|
|
133
89
|
const queue = [targetCalcName];
|
|
134
|
-
|
|
135
90
|
while(queue.length > 0) {
|
|
136
91
|
const current = queue.shift();
|
|
137
92
|
const dependents = reverseGraph.get(current) || [];
|
|
@@ -142,7 +97,6 @@ function calculateBlastRadius(targetCalcName, reverseGraph) {
|
|
|
142
97
|
}
|
|
143
98
|
});
|
|
144
99
|
}
|
|
145
|
-
|
|
146
100
|
return {
|
|
147
101
|
directDependents: (reverseGraph.get(targetCalcName) || []).length,
|
|
148
102
|
totalCascadingDependents: impactSet.size,
|
|
@@ -150,14 +104,11 @@ function calculateBlastRadius(targetCalcName, reverseGraph) {
|
|
|
150
104
|
};
|
|
151
105
|
}
|
|
152
106
|
|
|
153
|
-
/**
|
|
154
|
-
* [NEW] Helper: Runs SimHash check with Caching and Registry Persistence.
|
|
155
|
-
*/
|
|
156
107
|
async function verifyBehavioralStability(candidates, manifestMap, dailyStatus, logger, simHashCache, db) {
|
|
157
108
|
const trueReRuns = [];
|
|
158
109
|
const stableUpdates = [];
|
|
159
110
|
|
|
160
|
-
//
|
|
111
|
+
// Concurrency for simulations
|
|
161
112
|
const limit = pLimit(10);
|
|
162
113
|
|
|
163
114
|
const checks = candidates.map(item => limit(async () => {
|
|
@@ -170,16 +121,10 @@ async function verifyBehavioralStability(candidates, manifestMap, dailyStatus, l
|
|
|
170
121
|
return;
|
|
171
122
|
}
|
|
172
123
|
|
|
173
|
-
// 1. Check Cache first (Avoid re-simulating the same code for 100 different dates)
|
|
174
124
|
let newSimHash = simHashCache.get(manifest.hash);
|
|
175
|
-
|
|
176
|
-
// 2. If Miss, Run Simulation & Persist to Registry
|
|
177
125
|
if (!newSimHash) {
|
|
178
126
|
newSimHash = await SimRunner.run(manifest, manifestMap);
|
|
179
127
|
simHashCache.set(manifest.hash, newSimHash);
|
|
180
|
-
|
|
181
|
-
// Write to Registry so Production Workers can find it without running SimRunner
|
|
182
|
-
// Fire-and-forget write to reduce latency
|
|
183
128
|
db.collection(SIMHASH_REGISTRY_COLLECTION).doc(manifest.hash).set({
|
|
184
129
|
simHash: newSimHash,
|
|
185
130
|
createdAt: new Date(),
|
|
@@ -187,17 +132,14 @@ async function verifyBehavioralStability(candidates, manifestMap, dailyStatus, l
|
|
|
187
132
|
}).catch(err => logger.log('WARN', `Failed to write SimHash registry for ${manifest.name}: ${err.message}`));
|
|
188
133
|
}
|
|
189
134
|
|
|
190
|
-
// 3. Compare
|
|
191
135
|
if (newSimHash === stored.simHash) {
|
|
192
|
-
// BEHAVIORAL MATCH: Code changed, but output is identical.
|
|
193
136
|
stableUpdates.push({
|
|
194
137
|
...item,
|
|
195
138
|
reason: "Code Updated (Logic Stable)",
|
|
196
|
-
simHash: newSimHash,
|
|
197
|
-
newHash: manifest.hash
|
|
139
|
+
simHash: newSimHash,
|
|
140
|
+
newHash: manifest.hash
|
|
198
141
|
});
|
|
199
142
|
} else {
|
|
200
|
-
// BEHAVIORAL MISMATCH: Logic changed.
|
|
201
143
|
trueReRuns.push({
|
|
202
144
|
...item,
|
|
203
145
|
reason: item.reason + ` [SimHash Mismatch]`,
|
|
@@ -215,9 +157,6 @@ async function verifyBehavioralStability(candidates, manifestMap, dailyStatus, l
|
|
|
215
157
|
return { trueReRuns, stableUpdates };
|
|
216
158
|
}
|
|
217
159
|
|
|
218
|
-
/**
|
|
219
|
-
* AUTO-RUN ENTRY POINT
|
|
220
|
-
*/
|
|
221
160
|
async function ensureBuildReport(config, dependencies, manifest) {
|
|
222
161
|
const { db, logger } = dependencies;
|
|
223
162
|
const now = new Date();
|
|
@@ -234,40 +173,28 @@ async function ensureBuildReport(config, dependencies, manifest) {
|
|
|
234
173
|
|
|
235
174
|
if (!shouldRun) { logger.log('INFO', `[BuildReporter] 🔒 Report for v${packageVersion} locked. Skipping.`); return; }
|
|
236
175
|
|
|
237
|
-
// [NEW] 1. Calculate Current System Fingerprint
|
|
238
176
|
const currentSystemHash = getSystemFingerprint(manifest);
|
|
239
|
-
|
|
240
|
-
// [NEW] 2. Fetch Last Build's Fingerprint
|
|
241
177
|
const latestBuildDoc = await db.collection('computation_build_records').doc('latest').get();
|
|
242
178
|
|
|
243
179
|
if (latestBuildDoc.exists) {
|
|
244
180
|
const latestData = latestBuildDoc.data();
|
|
245
|
-
|
|
246
|
-
// [OPTIMIZATION] If signatures match, we can clone the report or just skip
|
|
247
181
|
if (latestData.systemFingerprint === currentSystemHash) {
|
|
248
182
|
logger.log('INFO', `[BuildReporter] ⚡ System Fingerprint (${currentSystemHash.substring(0,8)}) matches latest build. Skipping Report.`);
|
|
249
|
-
|
|
250
|
-
// Create a "Skipped" record for the new version so we know it deployed
|
|
251
183
|
await db.collection('computation_build_records').doc(buildId).set({
|
|
252
184
|
buildId,
|
|
253
185
|
packageVersion,
|
|
254
186
|
systemFingerprint: currentSystemHash,
|
|
255
187
|
status: 'SKIPPED_IDENTICAL',
|
|
256
|
-
referenceBuild: latestData.buildId,
|
|
188
|
+
referenceBuild: latestData.buildId,
|
|
257
189
|
generatedAt: new Date().toISOString()
|
|
258
190
|
});
|
|
259
|
-
|
|
260
|
-
// Release lock and exit
|
|
261
191
|
lockRef.update({ status: 'SKIPPED', completedAt: new Date() }).catch(() => {});
|
|
262
192
|
return;
|
|
263
193
|
}
|
|
264
194
|
}
|
|
265
195
|
|
|
266
196
|
logger.log('INFO', `[BuildReporter] 🚀 Change Detected. Running Pre-flight Report for v${packageVersion}...`);
|
|
267
|
-
|
|
268
|
-
// Pass the fingerprint to generateBuildReport so it can save it
|
|
269
197
|
await generateBuildReport(config, dependencies, manifest, 90, buildId, currentSystemHash);
|
|
270
|
-
|
|
271
198
|
lockRef.update({ status: 'COMPLETED', completedAt: new Date() }).catch(() => {});
|
|
272
199
|
|
|
273
200
|
} catch (e) {
|
|
@@ -275,14 +202,9 @@ async function ensureBuildReport(config, dependencies, manifest) {
|
|
|
275
202
|
}
|
|
276
203
|
}
|
|
277
204
|
|
|
278
|
-
/**
|
|
279
|
-
* Generates the report, writes to Firestore, AND FIXES STABLE UPDATES.
|
|
280
|
-
*/
|
|
281
205
|
async function generateBuildReport(config, dependencies, manifest, daysBack = 90, customBuildId = null, systemFingerprint = null) {
|
|
282
206
|
const { db, logger } = dependencies;
|
|
283
207
|
const buildId = customBuildId || `manual_${Date.now()}`;
|
|
284
|
-
|
|
285
|
-
// Calculate fingerprint if not provided (for manual runs)
|
|
286
208
|
const finalFingerprint = systemFingerprint || getSystemFingerprint(manifest);
|
|
287
209
|
|
|
288
210
|
logger.log('INFO', `[BuildReporter] Generating Build Report: ${buildId} (Scope: ${daysBack} days, Fingerprint: ${finalFingerprint.substring(0,8)})...`);
|
|
@@ -293,8 +215,6 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
293
215
|
|
|
294
216
|
const datesToCheck = getExpectedDateStrings(startDate, today);
|
|
295
217
|
const manifestMap = new Map(manifest.map(c => [normalizeName(c.name), c]));
|
|
296
|
-
|
|
297
|
-
// [OPTIMIZATION] Cache SimHashes across dates so we only calculate once per code version
|
|
298
218
|
const simHashCache = new Map();
|
|
299
219
|
|
|
300
220
|
const reverseGraph = new Map();
|
|
@@ -312,16 +232,16 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
312
232
|
const reportHeader = {
|
|
313
233
|
buildId,
|
|
314
234
|
packageVersion,
|
|
315
|
-
systemFingerprint: finalFingerprint,
|
|
235
|
+
systemFingerprint: finalFingerprint,
|
|
316
236
|
generatedAt: new Date().toISOString(),
|
|
317
237
|
summary: {},
|
|
318
238
|
_sharded: true
|
|
319
239
|
};
|
|
320
240
|
|
|
321
241
|
let totalRun = 0, totalReRun = 0, totalStable = 0;
|
|
322
|
-
const detailWrites = [];
|
|
323
242
|
|
|
324
|
-
|
|
243
|
+
// [FIX] Reduced concurrency from 20 to 5 to avoid Firestore DEADLINE_EXCEEDED
|
|
244
|
+
const limit = pLimit(5);
|
|
325
245
|
|
|
326
246
|
const processingPromises = datesToCheck.map(dateStr => limit(async () => {
|
|
327
247
|
try {
|
|
@@ -374,29 +294,22 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
374
294
|
// 1. RUN
|
|
375
295
|
analysis.runnable.forEach(item => pushIfValid(dateSummary.run, item, "New Calculation"));
|
|
376
296
|
|
|
377
|
-
// 2. RE-RUN & STABLE Analysis
|
|
297
|
+
// 2. RE-RUN & STABLE Analysis
|
|
378
298
|
if (analysis.reRuns.length > 0) {
|
|
379
|
-
// Pass simHashCache and db for registry writes
|
|
380
299
|
const { trueReRuns, stableUpdates } = await verifyBehavioralStability(analysis.reRuns, manifestMap, dailyStatus, logger, simHashCache, db);
|
|
381
300
|
|
|
382
301
|
trueReRuns.forEach(item => pushIfValid(dateSummary.rerun, item, "Logic Changed"));
|
|
383
302
|
stableUpdates.forEach(item => pushIfValid(dateSummary.stable, item, "Cosmetic Change"));
|
|
384
303
|
|
|
385
|
-
// [CRITICAL FIX] "Fix the Blast Radius"
|
|
386
|
-
// If updates are STABLE, we update the status NOW.
|
|
387
|
-
// This implies: Code Hash changes, but Sim Hash stays same.
|
|
388
|
-
// The Dispatcher will see the new Code Hash in status matches the Manifest, so it won't dispatch.
|
|
389
304
|
if (stableUpdates.length > 0) {
|
|
390
305
|
const updatesPayload = {};
|
|
391
306
|
for (const stable of stableUpdates) {
|
|
392
307
|
const m = manifestMap.get(stable.name);
|
|
393
|
-
// We preserve the *existing* resultHash because the logic is proven stable.
|
|
394
|
-
// We update the 'hash' to the NEW code hash.
|
|
395
308
|
if (m && dailyStatus[stable.name]) {
|
|
396
309
|
updatesPayload[stable.name] = {
|
|
397
|
-
hash: m.hash,
|
|
398
|
-
simHash: stable.simHash,
|
|
399
|
-
resultHash: dailyStatus[stable.name].resultHash,
|
|
310
|
+
hash: m.hash,
|
|
311
|
+
simHash: stable.simHash,
|
|
312
|
+
resultHash: dailyStatus[stable.name].resultHash,
|
|
400
313
|
dependencyResultHashes: dailyStatus[stable.name].dependencyResultHashes || {},
|
|
401
314
|
category: m.category,
|
|
402
315
|
composition: m.composition,
|
|
@@ -404,7 +317,6 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
404
317
|
};
|
|
405
318
|
}
|
|
406
319
|
}
|
|
407
|
-
// Perform the "Fix"
|
|
408
320
|
if (Object.keys(updatesPayload).length > 0) {
|
|
409
321
|
await updateComputationStatus(dateStr, updatesPayload, config, dependencies);
|
|
410
322
|
logger.log('INFO', `[BuildReporter] 🩹 Fixed ${Object.keys(updatesPayload).length} stable items for ${dateStr}. They will NOT re-run.`);
|
|
@@ -424,11 +336,8 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
424
336
|
dateSummary.meta.totalIncluded = includedCount;
|
|
425
337
|
dateSummary.meta.match = (includedCount === expectedCount);
|
|
426
338
|
|
|
427
|
-
//
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
// ADD THIS (Write immediately):
|
|
431
|
-
await db.collection('computation_build_records')
|
|
339
|
+
// Write Immediately
|
|
340
|
+
await db.collection('computation_build_records')
|
|
432
341
|
.doc(buildId)
|
|
433
342
|
.collection('details')
|
|
434
343
|
.doc(dateStr)
|
|
@@ -450,10 +359,6 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
450
359
|
|
|
451
360
|
await db.collection('computation_build_records').doc(buildId).set(reportHeader);
|
|
452
361
|
|
|
453
|
-
// Parallel write details ---> Now redundant.
|
|
454
|
-
//const writeLimit = pLimit(15);
|
|
455
|
-
//await Promise.all(detailWrites.map(w => writeLimit(() => w.ref.set(w.data))));
|
|
456
|
-
|
|
457
362
|
await db.collection('computation_build_records').doc('latest').set({ ...reportHeader, note: "Latest build report pointer." });
|
|
458
363
|
|
|
459
364
|
logger.log('SUCCESS', `[BuildReporter] Report ${buildId} saved. Re-runs: ${totalReRun}, Stable (Fixed): ${totalStable}, New: ${totalRun}.`);
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
# Cloud Workflows Definition for BullTrackers Computation Pipeline
|
|
2
2
|
# Orchestrates 5 sequential passes using Event-Driven Callbacks (Zero Polling).
|
|
3
|
-
#
|
|
3
|
+
# FIXED: Replaced invalid 'sys' callback functions with 'events' library functions.
|
|
4
|
+
# FIXED: Proper extraction of 'callback_details.url' for the dispatcher.
|
|
4
5
|
|
|
5
6
|
main:
|
|
6
7
|
params: [input]
|
|
@@ -40,11 +41,17 @@ main:
|
|
|
40
41
|
assign:
|
|
41
42
|
- attempt_count: ${attempt_count + 1}
|
|
42
43
|
|
|
43
|
-
# 1. GENERATE CALLBACK
|
|
44
|
-
#
|
|
44
|
+
# 1. GENERATE CALLBACK ENDPOINT
|
|
45
|
+
# We use the 'events' library. This returns an object containing the URL.
|
|
45
46
|
- create_callback:
|
|
46
|
-
call:
|
|
47
|
-
|
|
47
|
+
call: events.create_callback_endpoint
|
|
48
|
+
args:
|
|
49
|
+
http_callback_method: "POST"
|
|
50
|
+
result: callback_details
|
|
51
|
+
|
|
52
|
+
- extract_callback_url:
|
|
53
|
+
assign:
|
|
54
|
+
- callback_url: ${callback_details.url}
|
|
48
55
|
|
|
49
56
|
- log_start:
|
|
50
57
|
call: sys.log
|
|
@@ -53,21 +60,20 @@ main:
|
|
|
53
60
|
severity: "INFO"
|
|
54
61
|
|
|
55
62
|
# 2. TRIGGER DISPATCHER
|
|
56
|
-
# We pass the 'callback_url' to the dispatcher
|
|
63
|
+
# We pass the extracted 'callback_url' string to the dispatcher.
|
|
57
64
|
- trigger_dispatcher:
|
|
58
65
|
call: http.post
|
|
59
66
|
args:
|
|
60
67
|
url: ${dispatcher_url}
|
|
61
68
|
body:
|
|
62
69
|
date: ${date_to_run}
|
|
63
|
-
callbackUrl: ${callback_url}
|
|
70
|
+
callbackUrl: ${callback_url}
|
|
64
71
|
auth:
|
|
65
72
|
type: OIDC
|
|
66
73
|
timeout: 1800 # 30 mins max for dispatch analysis
|
|
67
74
|
result: dispatch_response
|
|
68
75
|
|
|
69
76
|
# 3. CHECK FOR "NOTHING TO DO"
|
|
70
|
-
# If the dispatcher found 0 tasks, it returns immediate success. We skip waiting.
|
|
71
77
|
- check_immediate_completion:
|
|
72
78
|
switch:
|
|
73
79
|
- condition: ${dispatch_response.body.dispatched == 0}
|
|
@@ -83,19 +89,20 @@ main:
|
|
|
83
89
|
next: pass_retry_loop
|
|
84
90
|
|
|
85
91
|
# 4. WAIT FOR WORKER SIGNAL
|
|
86
|
-
#
|
|
87
|
-
# This eliminates the need for polling logic.
|
|
92
|
+
# We must pass the original 'callback_details' object here, not the URL string.
|
|
88
93
|
- wait_for_completion:
|
|
89
|
-
call:
|
|
94
|
+
call: events.await_callback
|
|
90
95
|
args:
|
|
91
|
-
|
|
92
|
-
timeout: 86400 # Wait up to 24 hours
|
|
96
|
+
callback: ${callback_details}
|
|
97
|
+
timeout: 86400 # Wait up to 24 hours
|
|
93
98
|
result: callback_request
|
|
94
99
|
|
|
95
100
|
# 5. PROCESS SIGNAL
|
|
96
|
-
-
|
|
101
|
+
- parse_signal:
|
|
97
102
|
assign:
|
|
98
103
|
- signal_data: ${callback_request.http_request.body}
|
|
104
|
+
|
|
105
|
+
- evaluate_signal:
|
|
99
106
|
switch:
|
|
100
107
|
- condition: ${signal_data.status == "SUCCESS"}
|
|
101
108
|
steps:
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "bulltrackers-module",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.292",
|
|
4
4
|
"description": "Helper Functions for Bulltrackers.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"files": [
|
|
@@ -35,11 +35,12 @@
|
|
|
35
35
|
"cors": "^2.8.5",
|
|
36
36
|
"dotenv": "latest",
|
|
37
37
|
"express": "^4.19.2",
|
|
38
|
+
"google-auth-library": "^10.5.0",
|
|
39
|
+
"graphviz": "latest",
|
|
38
40
|
"node-graphviz": "^0.1.1",
|
|
39
41
|
"p-limit": "^3.1.0",
|
|
40
42
|
"require-all": "^3.0.0",
|
|
41
|
-
"sharedsetup": "latest"
|
|
42
|
-
"graphviz": "latest"
|
|
43
|
+
"sharedsetup": "latest"
|
|
43
44
|
},
|
|
44
45
|
"devDependencies": {
|
|
45
46
|
"bulltracker-deployer": "file:../bulltracker-deployer"
|