bulltrackers-module 1.0.731 → 1.0.733
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/orchestrator/index.js +19 -17
- package/index.js +8 -29
- package/package.json +6 -5
- package/functions/computation-system/WorkflowOrchestrator.js +0 -213
- package/functions/computation-system/config/monitoring_config.js +0 -31
- package/functions/computation-system/config/validation_overrides.js +0 -10
- package/functions/computation-system/context/ContextFactory.js +0 -132
- package/functions/computation-system/context/ManifestBuilder.js +0 -379
- package/functions/computation-system/data/AvailabilityChecker.js +0 -236
- package/functions/computation-system/data/CachedDataLoader.js +0 -325
- package/functions/computation-system/data/DependencyFetcher.js +0 -455
- package/functions/computation-system/executors/MetaExecutor.js +0 -279
- package/functions/computation-system/executors/PriceBatchExecutor.js +0 -108
- package/functions/computation-system/executors/StandardExecutor.js +0 -465
- package/functions/computation-system/helpers/computation_dispatcher.js +0 -750
- package/functions/computation-system/helpers/computation_worker.js +0 -375
- package/functions/computation-system/helpers/monitor.js +0 -64
- package/functions/computation-system/helpers/on_demand_helpers.js +0 -154
- package/functions/computation-system/layers/extractors.js +0 -1097
- package/functions/computation-system/layers/index.js +0 -40
- package/functions/computation-system/layers/mathematics.js +0 -522
- package/functions/computation-system/layers/profiling.js +0 -537
- package/functions/computation-system/layers/validators.js +0 -170
- package/functions/computation-system/legacy/AvailabilityCheckerOld.js +0 -388
- package/functions/computation-system/legacy/CachedDataLoaderOld.js +0 -357
- package/functions/computation-system/legacy/DependencyFetcherOld.js +0 -478
- package/functions/computation-system/legacy/MetaExecutorold.js +0 -364
- package/functions/computation-system/legacy/StandardExecutorold.js +0 -476
- package/functions/computation-system/legacy/computation_dispatcherold.js +0 -944
- package/functions/computation-system/logger/logger.js +0 -297
- package/functions/computation-system/persistence/ContractValidator.js +0 -81
- package/functions/computation-system/persistence/FirestoreUtils.js +0 -56
- package/functions/computation-system/persistence/ResultCommitter.js +0 -283
- package/functions/computation-system/persistence/ResultsValidator.js +0 -130
- package/functions/computation-system/persistence/RunRecorder.js +0 -142
- package/functions/computation-system/persistence/StatusRepository.js +0 -52
- package/functions/computation-system/reporter_epoch.js +0 -6
- package/functions/computation-system/scripts/UpdateContracts.js +0 -128
- package/functions/computation-system/services/SnapshotService.js +0 -148
- package/functions/computation-system/simulation/Fabricator.js +0 -285
- package/functions/computation-system/simulation/SeededRandom.js +0 -41
- package/functions/computation-system/simulation/SimRunner.js +0 -51
- package/functions/computation-system/system_epoch.js +0 -2
- package/functions/computation-system/tools/BuildReporter.js +0 -531
- package/functions/computation-system/tools/ContractDiscoverer.js +0 -144
- package/functions/computation-system/tools/DeploymentValidator.js +0 -536
- package/functions/computation-system/tools/FinalSweepReporter.js +0 -322
- package/functions/computation-system/topology/HashManager.js +0 -55
- package/functions/computation-system/topology/ManifestLoader.js +0 -47
- package/functions/computation-system/utils/data_loader.js +0 -597
- package/functions/computation-system/utils/schema_capture.js +0 -121
- package/functions/computation-system/utils/utils.js +0 -188
|
@@ -1,750 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* V2 TO REPLACE THE OLD.
|
|
3
|
-
* FILENAME: computation-system/helpers/computation_dispatcher.js
|
|
4
|
-
* PURPOSE: Sequential Cursor-Based Dispatcher (Refactored & Condensed).
|
|
5
|
-
* UPDATED: Added SNAPSHOT handling.
|
|
6
|
-
*/
|
|
7
|
-
|
|
8
|
-
const { getExpectedDateStrings, getEarliestDataDates, normalizeName, DEFINITIVE_EARLIEST_DATES } = require('../utils/utils.js');
|
|
9
|
-
const { groupByPass, analyzeDateExecution } = require('../WorkflowOrchestrator.js');
|
|
10
|
-
const { PubSubUtils } = require('../../core/utils/pubsub_utils');
|
|
11
|
-
const { fetchComputationStatus } = require('../persistence/StatusRepository');
|
|
12
|
-
const { checkRootDataAvailability } = require('../data/AvailabilityChecker');
|
|
13
|
-
const { runFinalSweepCheck } = require('../tools/FinalSweepReporter');
|
|
14
|
-
const { resolveDependencyChain } = require('./on_demand_helpers');
|
|
15
|
-
const { checkRootDependencies } = require('../data/AvailabilityChecker');
|
|
16
|
-
// 1. IMPORT SNAPSHOT SERVICE
|
|
17
|
-
const { generateDailySnapshots } = require('../services/SnapshotService');
|
|
18
|
-
const crypto = require('crypto');
|
|
19
|
-
// Import Google Auth Library for OAuth 2.0 authentication
|
|
20
|
-
let GoogleAuth = null;
|
|
21
|
-
try {
|
|
22
|
-
GoogleAuth = require('google-auth-library').GoogleAuth;
|
|
23
|
-
} catch (e) {
|
|
24
|
-
// google-auth-library might not be installed, will handle gracefully
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
const BASE_SECONDS_PER_WEIGHT_UNIT = 3;
|
|
28
|
-
const SESSION_CACHE_DURATION_MS = 1000 * 60 * 30; // 30 Minutes
|
|
29
|
-
const STALE_LOCK_THRESHOLD_MS = 1000 * 60 * 15;
|
|
30
|
-
|
|
31
|
-
/**
|
|
32
|
-
* Helper function to send an authenticated callback to Cloud Workflows
|
|
33
|
-
* Uses OAuth 2.0 access token from the default service account
|
|
34
|
-
*/
|
|
35
|
-
async function sendAuthenticatedCallback(callbackUrl, payload, logger, directFetch) {
|
|
36
|
-
if (!GoogleAuth) {
|
|
37
|
-
throw new Error('google-auth-library is required for authenticated callbacks. Please install: npm install google-auth-library');
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
try {
|
|
41
|
-
// Get OAuth 2.0 access token using default service account credentials
|
|
42
|
-
const auth = new GoogleAuth({
|
|
43
|
-
scopes: ['https://www.googleapis.com/auth/cloud-platform']
|
|
44
|
-
});
|
|
45
|
-
const accessToken = await auth.getAccessToken();
|
|
46
|
-
|
|
47
|
-
if (!accessToken) {
|
|
48
|
-
throw new Error('Failed to obtain OAuth 2.0 access token');
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
logger.log('INFO', '[Dispatcher] ✅ Obtained OAuth 2.0 access token for callback');
|
|
52
|
-
|
|
53
|
-
// Add timeout to prevent hanging (30 seconds should be plenty for a callback)
|
|
54
|
-
const timeoutPromise = new Promise((_, reject) =>
|
|
55
|
-
setTimeout(() => reject(new Error('Callback timeout after 30s')), 30000)
|
|
56
|
-
);
|
|
57
|
-
|
|
58
|
-
const fetchPromise = directFetch(callbackUrl, {
|
|
59
|
-
method: 'POST',
|
|
60
|
-
headers: {
|
|
61
|
-
'Content-Type': 'application/json',
|
|
62
|
-
'Authorization': `Bearer ${accessToken}`
|
|
63
|
-
},
|
|
64
|
-
body: JSON.stringify(payload)
|
|
65
|
-
});
|
|
66
|
-
|
|
67
|
-
const response = await Promise.race([fetchPromise, timeoutPromise]);
|
|
68
|
-
|
|
69
|
-
if (!response.ok) {
|
|
70
|
-
const errorText = await response.text().catch(() => 'Unable to read response');
|
|
71
|
-
throw new Error(`Callback returned non-OK status: ${response.status} ${response.statusText}. Response: ${errorText.substring(0, 200)}`);
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
logger.log('INFO', `[Dispatcher] ✅ Callback sent successfully (status: ${response.status})`);
|
|
75
|
-
return response;
|
|
76
|
-
} catch (err) {
|
|
77
|
-
logger.log('ERROR', `[Dispatcher] Authenticated callback failed: ${err.message}. Stack: ${err.stack?.substring(0, 300)}`);
|
|
78
|
-
throw err;
|
|
79
|
-
}
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
function getMillis(field) {
|
|
84
|
-
if (!field) return 0;
|
|
85
|
-
if (field.toDate && typeof field.toDate === 'function') return field.toDate().getTime();
|
|
86
|
-
const date = new Date(field);
|
|
87
|
-
return isNaN(date.getTime()) ? 0 : date.getTime();
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
function isComputationScheduled(dateStr, scheduleConfig) {
|
|
91
|
-
if (!scheduleConfig || !scheduleConfig.type || scheduleConfig.type === 'DAILY') return true;
|
|
92
|
-
const date = new Date(dateStr + 'T00:00:00Z');
|
|
93
|
-
|
|
94
|
-
if (scheduleConfig.type === 'WEEKLY') {
|
|
95
|
-
const days = Array.isArray(scheduleConfig.days) ? scheduleConfig.days : [scheduleConfig.day];
|
|
96
|
-
return days.includes(date.getUTCDay());
|
|
97
|
-
}
|
|
98
|
-
if (scheduleConfig.type === 'MONTHLY') {
|
|
99
|
-
const days = Array.isArray(scheduleConfig.days) ? scheduleConfig.days : [scheduleConfig.day];
|
|
100
|
-
return days.includes(date.getUTCDate());
|
|
101
|
-
}
|
|
102
|
-
return true;
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
function createTaskPayload(task, date, pass, dispatchId, resources, triggerReason) {
|
|
106
|
-
return {
|
|
107
|
-
action: 'RUN_COMPUTATION_DATE',
|
|
108
|
-
computation: task.name || task.computation,
|
|
109
|
-
date: date,
|
|
110
|
-
pass: pass,
|
|
111
|
-
dispatchId: dispatchId,
|
|
112
|
-
triggerReason: triggerReason || task.reason || 'DISPATCH',
|
|
113
|
-
resources: resources || task.resources || 'standard',
|
|
114
|
-
hash: task.hash,
|
|
115
|
-
traceContext: {
|
|
116
|
-
traceId: crypto.randomBytes(16).toString('hex'),
|
|
117
|
-
spanId: crypto.randomBytes(8).toString('hex'),
|
|
118
|
-
sampled: true
|
|
119
|
-
}
|
|
120
|
-
};
|
|
121
|
-
}
|
|
122
|
-
|
|
123
|
-
async function assessDateRunnability(date, computations, config, dependencies, manifestMap) {
|
|
124
|
-
const { status: rootStatus } = await checkRootDataAvailability(date, config, dependencies, DEFINITIVE_EARLIEST_DATES) || {};
|
|
125
|
-
if (!rootStatus) return null;
|
|
126
|
-
|
|
127
|
-
const dailyStatus = await fetchComputationStatus(date, config, dependencies);
|
|
128
|
-
|
|
129
|
-
let prevDailyStatus = null;
|
|
130
|
-
if (computations.some(c => c.isHistorical)) {
|
|
131
|
-
const prevD = new Date(date + 'T00:00:00Z');
|
|
132
|
-
prevD.setUTCDate(prevD.getUTCDate() - 1);
|
|
133
|
-
const earliest = (await getEarliestDataDates(config, dependencies)).absoluteEarliest;
|
|
134
|
-
if (prevD >= earliest) {
|
|
135
|
-
prevDailyStatus = await fetchComputationStatus(prevD.toISOString().slice(0, 10), config, dependencies);
|
|
136
|
-
}
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
const report = analyzeDateExecution(date, computations, rootStatus, dailyStatus, manifestMap, prevDailyStatus);
|
|
140
|
-
return { report, dailyStatus };
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
async function publishTaskBatch(dependencies, config, tasks, pass, topicOverride = null) {
|
|
144
|
-
if (tasks.length === 0) return 0;
|
|
145
|
-
|
|
146
|
-
const { logger } = dependencies;
|
|
147
|
-
const pubsubUtils = new PubSubUtils(dependencies);
|
|
148
|
-
const isHighMem = topicOverride ? topicOverride.includes('highmem') : tasks[0].resources === 'high-mem';
|
|
149
|
-
const topic = topicOverride || (isHighMem ? config.computationTopicHighMem : config.computationTopicStandard)
|
|
150
|
-
|| (isHighMem ? 'computation-tasks-highmem' : 'computation-tasks');
|
|
151
|
-
|
|
152
|
-
const names = tasks.map(t => t.computation).join(', ');
|
|
153
|
-
logger.log('INFO', `[Dispatcher] 📤 Dispatching ${tasks.length} tasks to ${topic}: ${names.slice(0, 100)}...`);
|
|
154
|
-
|
|
155
|
-
const CHUNK_SIZE = 250;
|
|
156
|
-
const chunks = [];
|
|
157
|
-
for (let i = 0; i < tasks.length; i += CHUNK_SIZE) chunks.push(tasks.slice(i, i + CHUNK_SIZE));
|
|
158
|
-
|
|
159
|
-
for (const chunk of chunks) {
|
|
160
|
-
await pubsubUtils.batchPublishTasks(dependencies, {
|
|
161
|
-
topicName: topic,
|
|
162
|
-
tasks: chunk,
|
|
163
|
-
taskType: `pass-${pass}-${isHighMem ? 'high' : 'std'}`
|
|
164
|
-
});
|
|
165
|
-
}
|
|
166
|
-
return tasks.length;
|
|
167
|
-
}
|
|
168
|
-
|
|
169
|
-
async function getStableDateSession(config, dependencies, pass, dateLimitStr, forceRebuild) {
|
|
170
|
-
const { db, logger } = dependencies;
|
|
171
|
-
const sessionId = `pass_${pass}_${dateLimitStr.replace(/-/g, '')}`;
|
|
172
|
-
const sessionRef = db.collection('dispatcher_sessions').doc(sessionId);
|
|
173
|
-
|
|
174
|
-
if (!forceRebuild) {
|
|
175
|
-
const snap = await sessionRef.get();
|
|
176
|
-
if (snap.exists && (Date.now() - new Date(snap.data().createdAt).getTime()) < SESSION_CACHE_DURATION_MS) {
|
|
177
|
-
return snap.data().dates;
|
|
178
|
-
}
|
|
179
|
-
}
|
|
180
|
-
|
|
181
|
-
logger.log('INFO', `[Session] 🔄 Rebuilding dispatch session for Pass ${pass}...`);
|
|
182
|
-
const earliest = await getEarliestDataDates(config, dependencies);
|
|
183
|
-
const dates = getExpectedDateStrings(earliest.absoluteEarliest, new Date(dateLimitStr + 'T00:00:00Z'));
|
|
184
|
-
await sessionRef.set({ dates, createdAt: new Date().toISOString(), configHash: dateLimitStr });
|
|
185
|
-
return dates;
|
|
186
|
-
}
|
|
187
|
-
|
|
188
|
-
// =============================================================================
|
|
189
|
-
// HANDLERS
|
|
190
|
-
// =============================================================================
|
|
191
|
-
|
|
192
|
-
// 2. NEW SNAPSHOT HANDLER (Asynchronous with Callback)
|
|
193
|
-
async function handleSnapshot(config, dependencies, reqBody) {
|
|
194
|
-
const { logger } = dependencies;
|
|
195
|
-
const targetDate = reqBody.date; // Optional: if provided, only process up to this date
|
|
196
|
-
const callbackUrl = reqBody.callback_url; // Callback URL from workflow
|
|
197
|
-
|
|
198
|
-
logger.log('INFO', `[Dispatcher] 📸 Snapshot request received. Date: ${targetDate || 'all'}, Callback URL: ${callbackUrl || 'NOT PROVIDED'}`);
|
|
199
|
-
|
|
200
|
-
// Use native fetch if available (Node 18+), otherwise fall back to node-fetch
|
|
201
|
-
const directFetch = typeof fetch !== 'undefined' ? fetch : require('node-fetch');
|
|
202
|
-
|
|
203
|
-
try {
|
|
204
|
-
// Get earliest available root data date
|
|
205
|
-
const earliestDates = await getEarliestDataDates(config, dependencies);
|
|
206
|
-
const earliestDate = earliestDates.absoluteEarliest;
|
|
207
|
-
|
|
208
|
-
if (!earliestDate) {
|
|
209
|
-
throw new Error('Could not determine earliest available root data date');
|
|
210
|
-
}
|
|
211
|
-
|
|
212
|
-
// Determine end date: use targetDate if provided, otherwise use today
|
|
213
|
-
const endDate = targetDate ? new Date(targetDate + 'T00:00:00Z') : new Date();
|
|
214
|
-
endDate.setUTCHours(0, 0, 0, 0);
|
|
215
|
-
|
|
216
|
-
// Generate all dates from earliest to end date
|
|
217
|
-
const startDate = new Date(earliestDate);
|
|
218
|
-
startDate.setUTCHours(0, 0, 0, 0);
|
|
219
|
-
|
|
220
|
-
const dateStrings = getExpectedDateStrings(startDate, endDate);
|
|
221
|
-
|
|
222
|
-
if (dateStrings.length === 0) {
|
|
223
|
-
logger.log('WARN', '[Dispatcher] No dates to process for snapshot');
|
|
224
|
-
const finalResult = { status: 'OK', processed: 0, skipped: 0 };
|
|
225
|
-
|
|
226
|
-
// Send callback if provided
|
|
227
|
-
if (callbackUrl) {
|
|
228
|
-
logger.log('INFO', `[Dispatcher] 📞 Calling back Workflow at: ${callbackUrl}`);
|
|
229
|
-
try {
|
|
230
|
-
await sendAuthenticatedCallback(callbackUrl, finalResult, logger, directFetch);
|
|
231
|
-
} catch (err) {
|
|
232
|
-
logger.log('ERROR', `[Dispatcher] Callback failed: ${err.message}`);
|
|
233
|
-
}
|
|
234
|
-
} else {
|
|
235
|
-
logger.log('WARN', '[Dispatcher] No callback URL provided, workflow will not be notified');
|
|
236
|
-
}
|
|
237
|
-
|
|
238
|
-
return finalResult;
|
|
239
|
-
}
|
|
240
|
-
|
|
241
|
-
logger.log('INFO', `[Dispatcher] 📸 Processing snapshots for ${dateStrings.length} dates from ${dateStrings[0]} to ${dateStrings[dateStrings.length - 1]}`);
|
|
242
|
-
|
|
243
|
-
// Process each date (snapshot service will skip if already exists)
|
|
244
|
-
const results = [];
|
|
245
|
-
const BATCH_SIZE = 5; // Process 5 dates in parallel to avoid overwhelming the system
|
|
246
|
-
|
|
247
|
-
for (let i = 0; i < dateStrings.length; i += BATCH_SIZE) {
|
|
248
|
-
const batch = dateStrings.slice(i, i + BATCH_SIZE);
|
|
249
|
-
const batchResults = await Promise.allSettled(
|
|
250
|
-
batch.map(dateStr => generateDailySnapshots(dateStr, config, dependencies))
|
|
251
|
-
);
|
|
252
|
-
|
|
253
|
-
batchResults.forEach((result, idx) => {
|
|
254
|
-
const dateStr = batch[idx];
|
|
255
|
-
if (result.status === 'fulfilled') {
|
|
256
|
-
const value = result.value;
|
|
257
|
-
results.push({ date: dateStr, status: value.status || 'OK' });
|
|
258
|
-
} else {
|
|
259
|
-
logger.log('ERROR', `[Dispatcher] Snapshot failed for ${dateStr}: ${result.reason?.message || result.reason}`);
|
|
260
|
-
results.push({ date: dateStr, status: 'ERROR', error: result.reason?.message || String(result.reason) });
|
|
261
|
-
}
|
|
262
|
-
});
|
|
263
|
-
}
|
|
264
|
-
|
|
265
|
-
const successful = results.filter(r => r.status === 'OK').length;
|
|
266
|
-
const skipped = results.filter(r => r.status === 'SKIPPED').length;
|
|
267
|
-
const failed = results.filter(r => r.status === 'ERROR').length;
|
|
268
|
-
|
|
269
|
-
logger.log('INFO', `[Dispatcher] 📸 Snapshot batch complete: ${successful} processed, ${skipped} skipped, ${failed} failed out of ${results.length} total`);
|
|
270
|
-
|
|
271
|
-
const finalResult = {
|
|
272
|
-
status: failed === 0 ? 'OK' : 'PARTIAL',
|
|
273
|
-
processed: successful,
|
|
274
|
-
skipped: skipped,
|
|
275
|
-
failed: failed,
|
|
276
|
-
total: results.length,
|
|
277
|
-
results: results
|
|
278
|
-
};
|
|
279
|
-
|
|
280
|
-
// Send callback to workflow if provided
|
|
281
|
-
if (callbackUrl) {
|
|
282
|
-
logger.log('INFO', `[Dispatcher] 📞 Calling back Workflow at: ${callbackUrl}`);
|
|
283
|
-
try {
|
|
284
|
-
await sendAuthenticatedCallback(callbackUrl, finalResult, logger, directFetch);
|
|
285
|
-
} catch (err) {
|
|
286
|
-
logger.log('ERROR', `[Dispatcher] Callback failed: ${err.message}`);
|
|
287
|
-
}
|
|
288
|
-
} else {
|
|
289
|
-
logger.log('WARN', '[Dispatcher] No callback URL provided, workflow will not be notified');
|
|
290
|
-
}
|
|
291
|
-
|
|
292
|
-
return finalResult;
|
|
293
|
-
} catch (e) {
|
|
294
|
-
logger.log('ERROR', `[Dispatcher] Snapshot failed: ${e.message}`);
|
|
295
|
-
const errorResult = { status: 'ERROR', error: e.message };
|
|
296
|
-
|
|
297
|
-
// Notify workflow of failure too, otherwise it hangs!
|
|
298
|
-
if (callbackUrl) {
|
|
299
|
-
logger.log('INFO', `[Dispatcher] 📞 Calling back Workflow with error at: ${callbackUrl}`);
|
|
300
|
-
try {
|
|
301
|
-
await sendAuthenticatedCallback(callbackUrl, errorResult, logger, directFetch);
|
|
302
|
-
} catch (err) {
|
|
303
|
-
logger.log('ERROR', `[Dispatcher] Error callback failed: ${err.message}`);
|
|
304
|
-
}
|
|
305
|
-
} else {
|
|
306
|
-
logger.log('WARN', '[Dispatcher] No callback URL provided, workflow will not be notified of error');
|
|
307
|
-
}
|
|
308
|
-
|
|
309
|
-
return errorResult;
|
|
310
|
-
}
|
|
311
|
-
}
|
|
312
|
-
|
|
313
|
-
async function handleForceRun(config, dependencies, computationManifest, reqBody) {
|
|
314
|
-
const { logger } = dependencies;
|
|
315
|
-
const computationName = reqBody.computation;
|
|
316
|
-
const dateInput = reqBody.date;
|
|
317
|
-
|
|
318
|
-
if (!computationName) throw new Error('Force Run requires "computation" name.');
|
|
319
|
-
|
|
320
|
-
const manifestItem = computationManifest.find(c => normalizeName(c.name) === normalizeName(computationName));
|
|
321
|
-
if (!manifestItem) throw new Error(`Computation '${computationName}' not found.`);
|
|
322
|
-
|
|
323
|
-
// --- STEP 1: RESOLVE FULL ANCESTRY ---
|
|
324
|
-
// We get the full chain of dependencies (ancestors) for the target.
|
|
325
|
-
// This includes the target itself and all upstream computations.
|
|
326
|
-
const chainPasses = resolveDependencyChain(computationName, computationManifest);
|
|
327
|
-
const allAncestors = chainPasses.flatMap(p => p.computations); // Flat list of all required names
|
|
328
|
-
|
|
329
|
-
// Create a map for quick lookup of ancestor manifests
|
|
330
|
-
const manifestMap = new Map(computationManifest.map(c => [normalizeName(c.name), c]));
|
|
331
|
-
|
|
332
|
-
// --- STEP 2: DETERMINE CANDIDATE DATES ---
|
|
333
|
-
let candidateDates = [];
|
|
334
|
-
if (dateInput) {
|
|
335
|
-
candidateDates = [dateInput];
|
|
336
|
-
} else {
|
|
337
|
-
logger.log('INFO', `[ForceRun] No date provided. Calculating date range for ${computationName}...`);
|
|
338
|
-
const earliest = await getEarliestDataDates(config, dependencies);
|
|
339
|
-
candidateDates = getExpectedDateStrings(earliest.absoluteEarliest, new Date());
|
|
340
|
-
}
|
|
341
|
-
|
|
342
|
-
logger.log('INFO', `[ForceRun] Checking ${candidateDates.length} candidate dates for runnability (Deep Check)...`);
|
|
343
|
-
|
|
344
|
-
const runnableDates = [];
|
|
345
|
-
const skippedDates = [];
|
|
346
|
-
const targetComputationNormalized = normalizeName(computationName);
|
|
347
|
-
// Remove schedule constraints for the force run assessment
|
|
348
|
-
const targetComp = { ...manifestItem, schedule: null };
|
|
349
|
-
|
|
350
|
-
for (const date of candidateDates) {
|
|
351
|
-
// --- STEP 3: DEEP ROOT CHECK ---
|
|
352
|
-
// Before even asking if the *target* is runnable, we ask:
|
|
353
|
-
// "Are the raw ingredients available for the ENTIRE chain?"
|
|
354
|
-
|
|
355
|
-
// Fetch Root Data Status for this date once
|
|
356
|
-
const availability = await checkRootDataAvailability(date, config, dependencies, DEFINITIVE_EARLIEST_DATES);
|
|
357
|
-
const rootStatus = availability ? availability.status : null;
|
|
358
|
-
|
|
359
|
-
if (!rootStatus) {
|
|
360
|
-
skippedDates.push({ date, reason: 'Availability Index Missing' });
|
|
361
|
-
continue;
|
|
362
|
-
}
|
|
363
|
-
|
|
364
|
-
let deepImpossibleReason = null;
|
|
365
|
-
|
|
366
|
-
// Check EVERY ancestor's root requirements
|
|
367
|
-
for (const ancName of allAncestors) {
|
|
368
|
-
const ancManifest = manifestMap.get(normalizeName(ancName));
|
|
369
|
-
if (!ancManifest) continue;
|
|
370
|
-
|
|
371
|
-
// Re-use the standard checker for each ancestor
|
|
372
|
-
const ancCheck = checkRootDependencies(ancManifest, rootStatus);
|
|
373
|
-
|
|
374
|
-
if (!ancCheck.canRun) {
|
|
375
|
-
// If an ancestor cannot exist, the target cannot exist.
|
|
376
|
-
deepImpossibleReason = `Ancestor '${ancName}' is missing roots: ${ancCheck.missing.join(', ')}`;
|
|
377
|
-
break;
|
|
378
|
-
}
|
|
379
|
-
}
|
|
380
|
-
|
|
381
|
-
if (deepImpossibleReason) {
|
|
382
|
-
// Skip this date entirely - it is strictly impossible.
|
|
383
|
-
skippedDates.push({ date, reason: deepImpossibleReason });
|
|
384
|
-
continue;
|
|
385
|
-
}
|
|
386
|
-
|
|
387
|
-
// --- STEP 4: STANDARD RUNNABILITY ---
|
|
388
|
-
// If deep roots are fine, we proceed to the standard check.
|
|
389
|
-
// This handles logic like "Waiting for yesterday" or "Already Complete"
|
|
390
|
-
const result = await assessDateRunnability(date, [targetComp], config, dependencies, manifestMap);
|
|
391
|
-
|
|
392
|
-
if (!result) {
|
|
393
|
-
skippedDates.push({ date, reason: 'Assessment Failed' });
|
|
394
|
-
continue;
|
|
395
|
-
}
|
|
396
|
-
|
|
397
|
-
const { report } = result;
|
|
398
|
-
const isRunnable = report.runnable.some(t => normalizeName(t.name) === targetComputationNormalized);
|
|
399
|
-
const needsReRun = report.reRuns.some(t => normalizeName(t.name) === targetComputationNormalized);
|
|
400
|
-
const hasFailedDep = report.failedDependency.some(t => normalizeName(t.name) === targetComputationNormalized);
|
|
401
|
-
const isSkipped = report.skipped.some(t => normalizeName(t.name) === targetComputationNormalized);
|
|
402
|
-
|
|
403
|
-
// NOTE: hasFailedDep is ALLOWED here because we are "Forcing" it.
|
|
404
|
-
// We know the roots exist (checked above), so the missing dependency is likely just
|
|
405
|
-
// "Not Computed Yet", which is exactly what the user wants to fix manually.
|
|
406
|
-
if (isRunnable || needsReRun || hasFailedDep || isSkipped) {
|
|
407
|
-
runnableDates.push(date);
|
|
408
|
-
} else if (report.blocked.length > 0) {
|
|
409
|
-
// Blocked usually means "Waiting for yesterday"
|
|
410
|
-
// For force runs, we often want to override this, but if it's strictly blocked
|
|
411
|
-
// by logic, we might still count it. Usually, we treat it as runnable.
|
|
412
|
-
runnableDates.push(date);
|
|
413
|
-
} else {
|
|
414
|
-
const imp = report.impossible.find(t => normalizeName(t.name) === targetComputationNormalized);
|
|
415
|
-
skippedDates.push({ date, reason: imp ? imp.reason : 'Unknown State' });
|
|
416
|
-
}
|
|
417
|
-
}
|
|
418
|
-
|
|
419
|
-
// ... (Remainder of the function remains the same: dispatching tasks) ...
|
|
420
|
-
|
|
421
|
-
logger.log('INFO', `[ForceRun] ✅ Found ${runnableDates.length} runnable dates out of ${candidateDates.length} candidates`);
|
|
422
|
-
|
|
423
|
-
if (runnableDates.length === 0) {
|
|
424
|
-
return {
|
|
425
|
-
status: 'NO_RUNNABLE_DATES',
|
|
426
|
-
computation: computationName,
|
|
427
|
-
mode: dateInput ? 'SINGLE_DATE' : 'ALL_DATES',
|
|
428
|
-
datesChecked: candidateDates.length,
|
|
429
|
-
datesRunnable: 0,
|
|
430
|
-
skippedReasons: skippedDates.slice(0, 10)
|
|
431
|
-
};
|
|
432
|
-
}
|
|
433
|
-
|
|
434
|
-
// Dispatch Logic
|
|
435
|
-
const topic = (reqBody.resources === 'high-mem')
|
|
436
|
-
? (config.computationTopicHighMem || 'computation-tasks-highmem')
|
|
437
|
-
: (config.computationTopicStandard || 'computation-tasks');
|
|
438
|
-
|
|
439
|
-
const dispatchId = require('crypto').randomUUID();
|
|
440
|
-
const tasks = runnableDates.map(date =>
|
|
441
|
-
createTaskPayload(manifestItem, date, manifestItem.pass || "1", dispatchId, reqBody.resources, 'MANUAL_FORCE_API')
|
|
442
|
-
);
|
|
443
|
-
|
|
444
|
-
const dispatchedCount = await publishTaskBatch(dependencies, config, tasks, manifestItem.pass || "1", topic);
|
|
445
|
-
|
|
446
|
-
return {
|
|
447
|
-
status: 'FORCED',
|
|
448
|
-
computation: computationName,
|
|
449
|
-
mode: dateInput ? 'SINGLE_DATE' : 'ALL_DATES',
|
|
450
|
-
datesChecked: candidateDates.length,
|
|
451
|
-
datesRunnable: runnableDates.length,
|
|
452
|
-
datesTriggered: dispatchedCount,
|
|
453
|
-
skippedCount: skippedDates.length,
|
|
454
|
-
targetTopic: topic
|
|
455
|
-
};
|
|
456
|
-
}
|
|
457
|
-
|
|
458
|
-
async function handlePassVerification(config, dependencies, computationManifest, reqBody) {
|
|
459
|
-
const { logger } = dependencies;
|
|
460
|
-
const pass = String(reqBody.pass || "1");
|
|
461
|
-
const dateLimit = reqBody.date || "2025-01-01";
|
|
462
|
-
|
|
463
|
-
logger.log('INFO', `[Verify] 🧹 Sweeping Pass ${pass} for unfinished work...`);
|
|
464
|
-
|
|
465
|
-
const sessionDates = await getStableDateSession(config, dependencies, pass, dateLimit, false);
|
|
466
|
-
const passComputations = groupByPass(computationManifest)[pass] || [];
|
|
467
|
-
const manifestMap = new Map(computationManifest.map(c => [normalizeName(c.name), c]));
|
|
468
|
-
const weightMap = new Map(computationManifest.map(c => [normalizeName(c.name), c.weight || 1.0]));
|
|
469
|
-
|
|
470
|
-
const missingTasks = [];
|
|
471
|
-
|
|
472
|
-
for (const date of sessionDates) {
|
|
473
|
-
const scheduled = passComputations.filter(c => isComputationScheduled(date, c.schedule));
|
|
474
|
-
if (scheduled.length === 0) continue;
|
|
475
|
-
|
|
476
|
-
const result = await assessDateRunnability(date, scheduled, config, dependencies, manifestMap);
|
|
477
|
-
if (!result) continue;
|
|
478
|
-
|
|
479
|
-
const pending = [...result.report.runnable, ...result.report.reRuns];
|
|
480
|
-
|
|
481
|
-
if (pending.length > 0) {
|
|
482
|
-
const totalWeight = pending.reduce((sum, t) => sum + (weightMap.get(normalizeName(t.name)) || 1.0), 0);
|
|
483
|
-
const eta = Math.max(30, Math.ceil(totalWeight * BASE_SECONDS_PER_WEIGHT_UNIT));
|
|
484
|
-
|
|
485
|
-
missingTasks.push({ date, taskCount: pending.length, eta });
|
|
486
|
-
}
|
|
487
|
-
}
|
|
488
|
-
|
|
489
|
-
logger.log('INFO', `[Verify] Found ${missingTasks.length} dates with pending work.`);
|
|
490
|
-
return { missingTasks };
|
|
491
|
-
}
|
|
492
|
-
|
|
493
|
-
async function handleSweepDispatch(config, dependencies, computationManifest, reqBody) {
|
|
494
|
-
const { logger, db } = dependencies;
|
|
495
|
-
const pass = String(reqBody.pass || "1");
|
|
496
|
-
const date = reqBody.date;
|
|
497
|
-
if (!date) throw new Error('Sweep dispatch requires date');
|
|
498
|
-
|
|
499
|
-
const manifestMap = new Map(computationManifest.map(c => [normalizeName(c.name), c]));
|
|
500
|
-
const calculations = groupByPass(computationManifest)[pass] || [];
|
|
501
|
-
const scheduled = calculations.filter(c => isComputationScheduled(date, c.schedule));
|
|
502
|
-
|
|
503
|
-
if (!scheduled.length) return { dispatched: 0 };
|
|
504
|
-
|
|
505
|
-
const result = await assessDateRunnability(date, scheduled, config, dependencies, manifestMap);
|
|
506
|
-
if (!result) return { dispatched: 0 };
|
|
507
|
-
|
|
508
|
-
const pending = [...result.report.runnable, ...result.report.reRuns];
|
|
509
|
-
const validTasks = [];
|
|
510
|
-
|
|
511
|
-
for (const task of pending) {
|
|
512
|
-
const ledgerPath = `computation_audit_ledger/${date}/passes/${pass}/tasks/${normalizeName(task.name)}`;
|
|
513
|
-
const doc = await db.doc(ledgerPath).get();
|
|
514
|
-
|
|
515
|
-
if (doc.exists) {
|
|
516
|
-
const data = doc.data();
|
|
517
|
-
const isActive = ['PENDING', 'IN_PROGRESS'].includes(data.status);
|
|
518
|
-
|
|
519
|
-
if (isActive) {
|
|
520
|
-
const lastActivity = getMillis(data.telemetry?.lastHeartbeat) || getMillis(data.startedAt);
|
|
521
|
-
if ((Date.now() - lastActivity) < STALE_LOCK_THRESHOLD_MS) continue;
|
|
522
|
-
logger.log('WARN', `[Sweep] 🧟 Breaking ZOMBIE lock for ${task.name}`);
|
|
523
|
-
}
|
|
524
|
-
if (['QUALITY_CIRCUIT_BREAKER', 'SEMANTIC_GATE'].includes(data.error?.stage) && data.hash === task.hash) {
|
|
525
|
-
continue;
|
|
526
|
-
}
|
|
527
|
-
}
|
|
528
|
-
validTasks.push(task);
|
|
529
|
-
}
|
|
530
|
-
|
|
531
|
-
const dispatchId = crypto.randomUUID();
|
|
532
|
-
const tasksPayload = validTasks.map(t =>
|
|
533
|
-
createTaskPayload(t, date, pass, dispatchId, 'high-mem', 'SWEEP_RECOVERY')
|
|
534
|
-
);
|
|
535
|
-
|
|
536
|
-
return { dispatched: await publishTaskBatch(dependencies, config, tasksPayload, pass, config.computationTopicHighMem) };
|
|
537
|
-
}
|
|
538
|
-
|
|
539
|
-
async function handleStandardDispatch(config, dependencies, computationManifest, reqBody) {
|
|
540
|
-
const { logger, db } = dependencies;
|
|
541
|
-
const pass = String(reqBody.pass || "1");
|
|
542
|
-
const dateLimit = reqBody.date || "2025-01-01";
|
|
543
|
-
const targetCursorN = parseInt(reqBody.cursorIndex || 1);
|
|
544
|
-
|
|
545
|
-
const manifestMap = new Map(computationManifest.map(c => [normalizeName(c.name), c]));
|
|
546
|
-
const passComputations = groupByPass(computationManifest)[pass] || [];
|
|
547
|
-
const manifestWeightMap = new Map(computationManifest.map(c => [normalizeName(c.name), c.weight || 1.0]));
|
|
548
|
-
const sessionDates = await getStableDateSession(config, dependencies, pass, dateLimit, reqBody.forceRebuild);
|
|
549
|
-
|
|
550
|
-
if (!passComputations.length || !sessionDates || targetCursorN > sessionDates.length) {
|
|
551
|
-
return { status: 'MOVE_TO_NEXT_PASS', dispatched: 0 };
|
|
552
|
-
}
|
|
553
|
-
|
|
554
|
-
const MAX_SCAN = 50;
|
|
555
|
-
const TIME_LIMIT = 40000;
|
|
556
|
-
const startT = Date.now();
|
|
557
|
-
|
|
558
|
-
let currentCursor = targetCursorN;
|
|
559
|
-
let tasksToDispatch = [];
|
|
560
|
-
let processedDate = null;
|
|
561
|
-
let scanned = 0;
|
|
562
|
-
|
|
563
|
-
while (currentCursor <= sessionDates.length) {
|
|
564
|
-
scanned++;
|
|
565
|
-
processedDate = sessionDates[currentCursor - 1];
|
|
566
|
-
|
|
567
|
-
if ((Date.now() - startT) > TIME_LIMIT || scanned > MAX_SCAN) {
|
|
568
|
-
logger.log('INFO', `[Dispatcher] ⏩ Fast-forward paused at ${processedDate} after scanning ${scanned} dates.`);
|
|
569
|
-
break;
|
|
570
|
-
}
|
|
571
|
-
|
|
572
|
-
const scheduled = passComputations.filter(c => isComputationScheduled(processedDate, c.schedule));
|
|
573
|
-
if (scheduled.length === 0) {
|
|
574
|
-
currentCursor++;
|
|
575
|
-
continue;
|
|
576
|
-
}
|
|
577
|
-
|
|
578
|
-
const result = await assessDateRunnability(processedDate, scheduled, config, dependencies, manifestMap);
|
|
579
|
-
if (result && (result.report.runnable.length > 0 || result.report.reRuns.length > 0)) {
|
|
580
|
-
let candidates = await attemptSimHashResolution(dependencies, processedDate, [...result.report.runnable, ...result.report.reRuns], result.dailyStatus, manifestMap);
|
|
581
|
-
const { standard, highMem } = await resolveRoutes(db, processedDate, pass, candidates, logger);
|
|
582
|
-
tasksToDispatch = [...standard, ...highMem];
|
|
583
|
-
|
|
584
|
-
if (tasksToDispatch.length > 0) break;
|
|
585
|
-
}
|
|
586
|
-
currentCursor++;
|
|
587
|
-
}
|
|
588
|
-
|
|
589
|
-
if (currentCursor > sessionDates.length && tasksToDispatch.length === 0) {
|
|
590
|
-
return {
|
|
591
|
-
status: 'CONTINUE_PASS',
|
|
592
|
-
dateProcessed: processedDate,
|
|
593
|
-
dispatched: 0,
|
|
594
|
-
n_cursor_ignored: false,
|
|
595
|
-
remainingDates: 0,
|
|
596
|
-
nextCursor: currentCursor
|
|
597
|
-
};
|
|
598
|
-
}
|
|
599
|
-
|
|
600
|
-
if (tasksToDispatch.length === 0) {
|
|
601
|
-
return {
|
|
602
|
-
status: 'CONTINUE_PASS',
|
|
603
|
-
dateProcessed: processedDate,
|
|
604
|
-
dispatched: 0,
|
|
605
|
-
n_cursor_ignored: false,
|
|
606
|
-
remainingDates: sessionDates.length - currentCursor + 1,
|
|
607
|
-
nextCursor: currentCursor
|
|
608
|
-
};
|
|
609
|
-
}
|
|
610
|
-
|
|
611
|
-
const totalWeight = tasksToDispatch.reduce((sum, t) => sum + (manifestWeightMap.get(normalizeName(t.name)) || 1.0), 0);
|
|
612
|
-
const etaSeconds = Math.max(20, Math.ceil(totalWeight * BASE_SECONDS_PER_WEIGHT_UNIT));
|
|
613
|
-
|
|
614
|
-
if (scanned > 1) {
|
|
615
|
-
logger.log('INFO', `[Dispatcher] ⏩ Fast-forwarded ${scanned - 1} empty dates. Dispatching ${tasksToDispatch.length} tasks for ${processedDate}.`);
|
|
616
|
-
} else {
|
|
617
|
-
logger.log('INFO', `[Dispatcher] ✅ Dispatching ${tasksToDispatch.length} tasks for ${processedDate}.`);
|
|
618
|
-
}
|
|
619
|
-
|
|
620
|
-
const dispatchId = crypto.randomUUID();
|
|
621
|
-
const standardPayload = tasksToDispatch.filter(t => t.resources !== 'high-mem').map(t => createTaskPayload(t, processedDate, pass, dispatchId, 'standard', t.reason));
|
|
622
|
-
const highMemPayload = tasksToDispatch.filter(t => t.resources === 'high-mem').map(t => createTaskPayload(t, processedDate, pass, dispatchId, 'high-mem', t.reason));
|
|
623
|
-
|
|
624
|
-
await Promise.all([
|
|
625
|
-
publishTaskBatch(dependencies, config, standardPayload, pass),
|
|
626
|
-
publishTaskBatch(dependencies, config, highMemPayload, pass)
|
|
627
|
-
]);
|
|
628
|
-
|
|
629
|
-
return {
|
|
630
|
-
status: 'CONTINUE_PASS',
|
|
631
|
-
dateProcessed: processedDate,
|
|
632
|
-
dispatched: tasksToDispatch.length,
|
|
633
|
-
n_cursor_ignored: false,
|
|
634
|
-
etaSeconds: etaSeconds,
|
|
635
|
-
remainingDates: sessionDates.length - targetCursorN,
|
|
636
|
-
nextCursor: currentCursor + 1
|
|
637
|
-
};
|
|
638
|
-
}
|
|
639
|
-
|
|
640
|
-
// =============================================================================
|
|
641
|
-
// LOGIC: Resolution & Routing
|
|
642
|
-
// =============================================================================
|
|
643
|
-
|
|
644
|
-
async function attemptSimHashResolution(dependencies, date, tasks, dailyStatus, manifestMap) {
|
|
645
|
-
const { db, logger } = dependencies;
|
|
646
|
-
const resolved = [], remaining = [];
|
|
647
|
-
const hashCache = new Map();
|
|
648
|
-
|
|
649
|
-
for (const task of tasks) {
|
|
650
|
-
const status = dailyStatus ? dailyStatus[task.name] : null;
|
|
651
|
-
const manifest = manifestMap.get(normalizeName(task.name));
|
|
652
|
-
|
|
653
|
-
if (status?.simHash && manifest) {
|
|
654
|
-
let knownSimHash = hashCache.get(manifest.hash);
|
|
655
|
-
if (!knownSimHash) {
|
|
656
|
-
const doc = await db.collection('system_simhash_registry').doc(manifest.hash).get();
|
|
657
|
-
if (doc.exists) {
|
|
658
|
-
knownSimHash = doc.data().simHash;
|
|
659
|
-
hashCache.set(manifest.hash, knownSimHash);
|
|
660
|
-
}
|
|
661
|
-
}
|
|
662
|
-
if (knownSimHash === status.simHash) {
|
|
663
|
-
resolved.push({ name: task.name, hash: manifest.hash, simHash: knownSimHash, prevStatus: status });
|
|
664
|
-
continue;
|
|
665
|
-
}
|
|
666
|
-
}
|
|
667
|
-
remaining.push(task);
|
|
668
|
-
}
|
|
669
|
-
|
|
670
|
-
if (resolved.length) {
|
|
671
|
-
const updates = {};
|
|
672
|
-
resolved.forEach(t => updates[t.name] = { ...t.prevStatus, hash: t.hash, simHash: t.simHash, reason: 'SimHash Auto-Resolve', lastUpdated: new Date().toISOString() });
|
|
673
|
-
await db.collection('computation_status').doc(date).set(updates, { merge: true });
|
|
674
|
-
logger.log('INFO', `[SimHash] ⏩ Resolved ${resolved.length} tasks for ${date}.`);
|
|
675
|
-
}
|
|
676
|
-
return remaining;
|
|
677
|
-
}
|
|
678
|
-
|
|
679
|
-
async function resolveRoutes(db, date, pass, tasks, logger) {
|
|
680
|
-
const standard = [];
|
|
681
|
-
const highMem = [];
|
|
682
|
-
|
|
683
|
-
const checks = tasks.map(async (task) => {
|
|
684
|
-
const name = normalizeName(task.name);
|
|
685
|
-
const doc = await db.doc(`computation_audit_ledger/${date}/passes/${pass}/tasks/${name}`).get();
|
|
686
|
-
|
|
687
|
-
if (!doc.exists) return { task, type: 'std' };
|
|
688
|
-
|
|
689
|
-
const data = doc.data();
|
|
690
|
-
|
|
691
|
-
if (['PENDING', 'IN_PROGRESS'].includes(data.status)) {
|
|
692
|
-
const lastActive = getMillis(data.telemetry?.lastHeartbeat) || getMillis(data.startedAt);
|
|
693
|
-
if ((Date.now() - lastActive) < STALE_LOCK_THRESHOLD_MS) return null;
|
|
694
|
-
logger.log('WARN', `[Dispatcher] 🧟 Breaking stale lock for ${name}`);
|
|
695
|
-
}
|
|
696
|
-
|
|
697
|
-
if (data.status === 'FAILED') {
|
|
698
|
-
const stage = data.error?.stage;
|
|
699
|
-
const isCodeChanged = data.hash !== task.hash;
|
|
700
|
-
|
|
701
|
-
if (['QUALITY_CIRCUIT_BREAKER', 'SEMANTIC_GATE'].includes(stage)) {
|
|
702
|
-
if (!isCodeChanged) return null;
|
|
703
|
-
return { task: { ...task, reason: 'Retry: Code Version Changed' }, type: 'std' };
|
|
704
|
-
}
|
|
705
|
-
|
|
706
|
-
if (data.resourceTier === 'high-mem') {
|
|
707
|
-
if (!isCodeChanged) return null;
|
|
708
|
-
return { task: { ...task, reason: 'Retry: Code Changed (HighMem Reset)' }, type: 'std' };
|
|
709
|
-
}
|
|
710
|
-
|
|
711
|
-
return { task: { ...task, reason: `Retry: ${data.error?.message}`, resources: 'high-mem' }, type: 'high' };
|
|
712
|
-
}
|
|
713
|
-
|
|
714
|
-
return { task, type: 'std' };
|
|
715
|
-
});
|
|
716
|
-
|
|
717
|
-
const results = (await Promise.all(checks)).filter(r => r !== null);
|
|
718
|
-
results.forEach(r => r.type === 'high' ? highMem.push(r.task) : standard.push(r.task));
|
|
719
|
-
|
|
720
|
-
return { standard, highMem };
|
|
721
|
-
}
|
|
722
|
-
|
|
723
|
-
// =============================================================================
|
|
724
|
-
// MAIN ENTRY
|
|
725
|
-
// =============================================================================
|
|
726
|
-
|
|
727
|
-
async function dispatchComputationPass(config, dependencies, computationManifest, reqBody = {}) {
|
|
728
|
-
switch (reqBody.action) {
|
|
729
|
-
case 'VERIFY': return handlePassVerification(config, dependencies, computationManifest, reqBody);
|
|
730
|
-
case 'SWEEP': return handleSweepDispatch(config, dependencies, computationManifest, reqBody);
|
|
731
|
-
case 'REPORT': return handleFinalSweepReporting(config, dependencies, computationManifest, reqBody);
|
|
732
|
-
case 'FORCE_RUN': return handleForceRun(config, dependencies, computationManifest, reqBody);
|
|
733
|
-
case 'SNAPSHOT': return handleSnapshot(config, dependencies, reqBody);
|
|
734
|
-
default: return handleStandardDispatch(config, dependencies, computationManifest, reqBody);
|
|
735
|
-
}
|
|
736
|
-
}
|
|
737
|
-
|
|
738
|
-
async function handleFinalSweepReporting(config, dependencies, computationManifest, reqBody) {
|
|
739
|
-
const { logger } = dependencies;
|
|
740
|
-
const date = reqBody.date || new Date().toISOString().slice(0, 10);
|
|
741
|
-
try {
|
|
742
|
-
const res = await runFinalSweepCheck(config, dependencies, date, String(reqBody.pass || "1"), computationManifest);
|
|
743
|
-
return { status: 'COMPLETED', date, issues: res.issuesCount };
|
|
744
|
-
} catch (e) {
|
|
745
|
-
logger.log('ERROR', `[Dispatcher] Report failed: ${e.message}`);
|
|
746
|
-
return { status: 'ERROR', error: e.message };
|
|
747
|
-
}
|
|
748
|
-
}
|
|
749
|
-
|
|
750
|
-
module.exports = { dispatchComputationPass };
|