bulltrackers-module 1.0.316 → 1.0.318
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/tools/BuildReporter.js +194 -178
- package/index.js +9 -4
- package/package.json +1 -1
|
@@ -1,24 +1,60 @@
|
|
|
1
1
|
/**
|
|
2
|
-
*
|
|
3
|
-
*
|
|
4
|
-
*
|
|
5
|
-
* }
|
|
2
|
+
* FILENAME: bulltrackers-module/functions/computation-system/tools/BuildReporter.js
|
|
3
|
+
* UPGRADED: Offloads heavy logic to a dedicated Cloud Function via Pub/Sub.
|
|
4
|
+
* FEATURES: Patch versioning, data-drift detection (window changes), and checkpointed writes.
|
|
6
5
|
*/
|
|
6
|
+
|
|
7
7
|
const { analyzeDateExecution } = require('../WorkflowOrchestrator');
|
|
8
8
|
const { fetchComputationStatus, updateComputationStatus } = require('../persistence/StatusRepository');
|
|
9
|
-
const { normalizeName, getExpectedDateStrings, DEFINITIVE_EARLIEST_DATES
|
|
9
|
+
const { normalizeName, getExpectedDateStrings, DEFINITIVE_EARLIEST_DATES } = require('../utils/utils');
|
|
10
10
|
const { checkRootDataAvailability } = require('../data/AvailabilityChecker');
|
|
11
11
|
const SimRunner = require('../simulation/SimRunner');
|
|
12
|
-
const SYSTEM_EPOCH = require('../system_epoch');
|
|
12
|
+
const SYSTEM_EPOCH = require('../system_epoch');
|
|
13
13
|
const pLimit = require('p-limit');
|
|
14
14
|
const path = require('path');
|
|
15
15
|
const crypto = require('crypto');
|
|
16
|
-
|
|
16
|
+
|
|
17
|
+
// Load package info for versioning
|
|
17
18
|
const packageJson = require(path.join(__dirname, '..', '..', '..', 'package.json'));
|
|
18
19
|
const packageVersion = packageJson.version;
|
|
19
20
|
|
|
21
|
+
const BUILD_RECORDS_COLLECTION = 'computation_build_records';
|
|
22
|
+
const BUILD_METADATA_DOC = 'system_build_metadata';
|
|
20
23
|
const SIMHASH_REGISTRY_COLLECTION = 'system_simhash_registry';
|
|
21
24
|
|
|
25
|
+
/**
|
|
26
|
+
* Publishes a message to trigger the dedicated Build Reporter Cloud Function.
|
|
27
|
+
* Replaces the old ensureBuildReport that ran locally on module load.
|
|
28
|
+
*/
|
|
29
|
+
async function requestBuildReport(config, dependencies) {
|
|
30
|
+
const { pubsubUtils, logger } = dependencies;
|
|
31
|
+
try {
|
|
32
|
+
await pubsubUtils.publish(config.buildReporterTopic, {
|
|
33
|
+
requestedAt: new Date().toISOString(),
|
|
34
|
+
packageVersion: packageVersion
|
|
35
|
+
});
|
|
36
|
+
logger.log('INFO', `[BuildReporter] 🛰️ Trigger message sent to ${config.buildReporterTopic}`);
|
|
37
|
+
return { success: true };
|
|
38
|
+
} catch (e) {
|
|
39
|
+
logger.log('ERROR', `[BuildReporter] Failed to publish trigger: ${e.message}`);
|
|
40
|
+
throw e;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Cloud Function Entry Point for the Build Reporter.
|
|
46
|
+
*/
|
|
47
|
+
async function handleBuildReportTrigger(message, context, config, dependencies, manifest) {
|
|
48
|
+
const { logger } = dependencies;
|
|
49
|
+
logger.log('INFO', `[BuildReporter] 📥 Trigger received. Starting build analysis...`);
|
|
50
|
+
try {
|
|
51
|
+
return await generateBuildReport(config, dependencies, manifest);
|
|
52
|
+
} catch (e) {
|
|
53
|
+
logger.log('ERROR', `[BuildReporter] Fatal error in execution: ${e.message}`);
|
|
54
|
+
throw e;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
22
58
|
/**
|
|
23
59
|
* Replaces expensive file walking with System Epoch + Manifest Hash.
|
|
24
60
|
*/
|
|
@@ -29,6 +65,20 @@ function getSystemFingerprint(manifest) {
|
|
|
29
65
|
.digest('hex');
|
|
30
66
|
}
|
|
31
67
|
|
|
68
|
+
/**
|
|
69
|
+
* Increments the patch number for the current package version in Firestore.
|
|
70
|
+
*/
|
|
71
|
+
async function getNextBuildId(db, version) {
|
|
72
|
+
const metaRef = db.collection(BUILD_RECORDS_COLLECTION).doc(BUILD_METADATA_DOC);
|
|
73
|
+
return await db.runTransaction(async (t) => {
|
|
74
|
+
const doc = await t.get(metaRef);
|
|
75
|
+
const data = doc.exists ? doc.data() : {};
|
|
76
|
+
const currentPatch = (data[version] || 0) + 1;
|
|
77
|
+
t.set(metaRef, { [version]: currentPatch }, { merge: true });
|
|
78
|
+
return `v${version}_p${currentPatch}`;
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
|
|
32
82
|
function isDateBeforeAvailability(dateStr, calcManifest) {
|
|
33
83
|
const targetDate = new Date(dateStr + 'T00:00:00Z');
|
|
34
84
|
const deps = calcManifest.rootDataDependencies || [];
|
|
@@ -65,11 +115,6 @@ function calculateBlastRadius(targetCalcName, reverseGraph) {
|
|
|
65
115
|
};
|
|
66
116
|
}
|
|
67
117
|
|
|
68
|
-
/**
|
|
69
|
-
* [OPTIMIZED] Logic Stability Check
|
|
70
|
-
* We trust SimHash here. If the code logic (behavior) is stable, we mark it stable.
|
|
71
|
-
* We do NOT check dependency drift here. The WorkflowOrchestrator handles that at runtime.
|
|
72
|
-
*/
|
|
73
118
|
async function verifyBehavioralStability(candidates, manifestMap, dailyStatus, logger, simHashCache, db) {
|
|
74
119
|
const trueReRuns = [];
|
|
75
120
|
const stableUpdates = [];
|
|
@@ -80,13 +125,11 @@ async function verifyBehavioralStability(candidates, manifestMap, dailyStatus, l
|
|
|
80
125
|
const manifest = manifestMap.get(item.name);
|
|
81
126
|
const stored = dailyStatus[item.name];
|
|
82
127
|
|
|
83
|
-
// 1. If no history exists, it must run.
|
|
84
128
|
if (!stored || !stored.simHash || !manifest) {
|
|
85
129
|
trueReRuns.push(item);
|
|
86
130
|
return;
|
|
87
131
|
}
|
|
88
132
|
|
|
89
|
-
// 2. Fetch or Compute SimHash
|
|
90
133
|
let newSimHash = simHashCache.get(manifest.hash);
|
|
91
134
|
if (!newSimHash) {
|
|
92
135
|
newSimHash = await SimRunner.run(manifest, manifestMap);
|
|
@@ -98,11 +141,7 @@ async function verifyBehavioralStability(candidates, manifestMap, dailyStatus, l
|
|
|
98
141
|
}).catch(err => logger.log('WARN', `Failed to write SimHash registry for ${manifest.name}: ${err.message}`));
|
|
99
142
|
}
|
|
100
143
|
|
|
101
|
-
// 3. Behavioral Comparison
|
|
102
144
|
if (newSimHash === stored.simHash) {
|
|
103
|
-
// STABLE: The logic is identical for mocked data.
|
|
104
|
-
// We mark this as stable, allowing the Orchestrator to skip it
|
|
105
|
-
// UNLESS the actual production input data has changed.
|
|
106
145
|
stableUpdates.push({
|
|
107
146
|
...item,
|
|
108
147
|
reason: "Code Updated (Logic Stable)",
|
|
@@ -110,7 +149,6 @@ async function verifyBehavioralStability(candidates, manifestMap, dailyStatus, l
|
|
|
110
149
|
newHash: manifest.hash
|
|
111
150
|
});
|
|
112
151
|
} else {
|
|
113
|
-
// UNSTABLE: The logic actually produces different results.
|
|
114
152
|
trueReRuns.push({
|
|
115
153
|
...item,
|
|
116
154
|
reason: item.reason + ` [SimHash Mismatch]`,
|
|
@@ -128,61 +166,45 @@ async function verifyBehavioralStability(candidates, manifestMap, dailyStatus, l
|
|
|
128
166
|
return { trueReRuns, stableUpdates };
|
|
129
167
|
}
|
|
130
168
|
|
|
131
|
-
|
|
169
|
+
/**
|
|
170
|
+
* The main reporter logic. Handles drift detection, minor versioning,
|
|
171
|
+
* and checkpointed batch writes to Firestore.
|
|
172
|
+
*/
|
|
173
|
+
async function generateBuildReport(config, dependencies, manifest) {
|
|
132
174
|
const { db, logger } = dependencies;
|
|
133
|
-
|
|
134
|
-
const
|
|
135
|
-
const
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
const latestBuildDoc = await db.collection('computation_build_records').doc('latest').get();
|
|
149
|
-
|
|
150
|
-
if (latestBuildDoc.exists) {
|
|
151
|
-
const latestData = latestBuildDoc.data();
|
|
152
|
-
if (latestData.systemFingerprint === currentSystemHash) {
|
|
153
|
-
logger.log('INFO', `[BuildReporter] ⚡ System Fingerprint (${currentSystemHash.substring(0,8)}) matches latest build. Skipping Report.`);
|
|
154
|
-
await db.collection('computation_build_records').doc(buildId).set({
|
|
155
|
-
buildId,
|
|
156
|
-
packageVersion,
|
|
157
|
-
systemFingerprint: currentSystemHash,
|
|
158
|
-
status: 'SKIPPED_IDENTICAL',
|
|
159
|
-
referenceBuild: latestData.buildId,
|
|
160
|
-
generatedAt: new Date().toISOString()
|
|
161
|
-
});
|
|
162
|
-
lockRef.update({ status: 'SKIPPED', completedAt: new Date() }).catch(() => {});
|
|
163
|
-
return;
|
|
164
|
-
}
|
|
165
|
-
}
|
|
166
|
-
|
|
167
|
-
logger.log('INFO', `[BuildReporter] 🚀 Change Detected. Running Pre-flight Report for v${packageVersion}...`);
|
|
168
|
-
await generateBuildReport(config, dependencies, manifest, 90, buildId, currentSystemHash);
|
|
169
|
-
lockRef.update({ status: 'COMPLETED', completedAt: new Date() }).catch(() => {});
|
|
170
|
-
|
|
171
|
-
} catch (e) {
|
|
172
|
-
logger.log('ERROR', `[BuildReporter] Auto-run check failed: ${e.message}`);
|
|
175
|
+
|
|
176
|
+
const currentFingerprint = getSystemFingerprint(manifest);
|
|
177
|
+
const latestBuildDoc = await db.collection(BUILD_RECORDS_COLLECTION).doc('latest').get();
|
|
178
|
+
const latest = latestBuildDoc.exists ? latestBuildDoc.data() : null;
|
|
179
|
+
|
|
180
|
+
// [DATA DRIFT DETECTION]
|
|
181
|
+
// Force a run if the definitive earliest data date has changed (e.g. new history backfilled)
|
|
182
|
+
const currentEarliestStr = DEFINITIVE_EARLIEST_DATES.absoluteEarliest?.toISOString() || 'NONE';
|
|
183
|
+
const lastEarliestStr = latest?.windowEarliest || 'NONE';
|
|
184
|
+
const windowChanged = currentEarliestStr !== lastEarliestStr;
|
|
185
|
+
|
|
186
|
+
// If fingerprints match AND the window is the same, we can truly skip.
|
|
187
|
+
if (latest && latest.systemFingerprint === currentFingerprint && !windowChanged) {
|
|
188
|
+
logger.log('INFO', `[BuildReporter] ⚡ System fingerprint and window stable. Skipping report.`);
|
|
189
|
+
return { success: true, status: 'SKIPPED_IDENTICAL' };
|
|
173
190
|
}
|
|
174
|
-
}
|
|
175
191
|
|
|
176
|
-
|
|
177
|
-
const
|
|
178
|
-
|
|
179
|
-
const finalFingerprint = systemFingerprint || getSystemFingerprint(manifest);
|
|
192
|
+
// Increment patch version (e.g., v1.0.0_p1, v1.0.0_p2)
|
|
193
|
+
const buildId = await getNextBuildId(db, packageVersion);
|
|
194
|
+
logger.log('INFO', `[BuildReporter] 🚀 Change Detected. Generating Build ${buildId}. Reason: ${windowChanged ? 'Data Window Drift' : 'Code Change'}`);
|
|
180
195
|
|
|
181
|
-
|
|
196
|
+
const today = new Date();
|
|
197
|
+
const { absoluteEarliest } = DEFINITIVE_EARLIEST_DATES;
|
|
198
|
+
|
|
199
|
+
// Dynamic Window calculation
|
|
200
|
+
let dynamicDaysBack = 90;
|
|
201
|
+
if (absoluteEarliest) {
|
|
202
|
+
const diffTime = Math.abs(today - absoluteEarliest);
|
|
203
|
+
dynamicDaysBack = Math.ceil(diffTime / (1000 * 60 * 60 * 24)) + 2;
|
|
204
|
+
}
|
|
182
205
|
|
|
183
|
-
const today = new Date();
|
|
184
206
|
const startDate = new Date();
|
|
185
|
-
startDate.setDate(today.getDate() -
|
|
207
|
+
startDate.setDate(today.getDate() - dynamicDaysBack);
|
|
186
208
|
|
|
187
209
|
const datesToCheck = getExpectedDateStrings(startDate, today);
|
|
188
210
|
const manifestMap = new Map(manifest.map(c => [normalizeName(c.name), c]));
|
|
@@ -203,137 +225,131 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
203
225
|
const reportHeader = {
|
|
204
226
|
buildId,
|
|
205
227
|
packageVersion,
|
|
206
|
-
systemFingerprint:
|
|
228
|
+
systemFingerprint: currentFingerprint,
|
|
229
|
+
windowEarliest: currentEarliestStr,
|
|
207
230
|
generatedAt: new Date().toISOString(),
|
|
231
|
+
status: 'IN_PROGRESS',
|
|
208
232
|
summary: {},
|
|
209
233
|
_sharded: true
|
|
210
234
|
};
|
|
211
235
|
|
|
212
|
-
|
|
236
|
+
// Initialize the build record
|
|
237
|
+
await db.collection(BUILD_RECORDS_COLLECTION).doc(buildId).set(reportHeader);
|
|
213
238
|
|
|
214
|
-
|
|
239
|
+
let totalRun = 0, totalReRun = 0, totalStable = 0;
|
|
240
|
+
const limit = pLimit(10); // Concurrency for fetching statuses
|
|
215
241
|
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
prevDateStr =
|
|
228
|
-
if (
|
|
229
|
-
|
|
242
|
+
// Process dates in chunks of 5 for checkpointed writing
|
|
243
|
+
for (let i = 0; i < datesToCheck.length; i += 5) {
|
|
244
|
+
const dateBatch = datesToCheck.slice(i, i + 5);
|
|
245
|
+
|
|
246
|
+
const results = await Promise.all(dateBatch.map(dateStr => limit(async () => {
|
|
247
|
+
try {
|
|
248
|
+
const fetchPromises = [
|
|
249
|
+
fetchComputationStatus(dateStr, config, dependencies),
|
|
250
|
+
checkRootDataAvailability(dateStr, config, dependencies, DEFINITIVE_EARLIEST_DATES)
|
|
251
|
+
];
|
|
252
|
+
|
|
253
|
+
let prevDateStr = null;
|
|
254
|
+
if (manifest.some(c => c.isHistorical)) {
|
|
255
|
+
const prevDate = new Date(dateStr + 'T00:00:00Z');
|
|
256
|
+
prevDate.setUTCDate(prevDate.getUTCDate() - 1);
|
|
257
|
+
prevDateStr = prevDate.toISOString().slice(0, 10);
|
|
258
|
+
if (prevDate >= DEFINITIVE_EARLIEST_DATES.absoluteEarliest) {
|
|
259
|
+
fetchPromises.push(fetchComputationStatus(prevDateStr, config, dependencies));
|
|
260
|
+
}
|
|
230
261
|
}
|
|
231
|
-
}
|
|
232
262
|
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
run: [], rerun: [], stable: [], blocked: [], impossible: [], uptodate: [],
|
|
243
|
-
meta: { totalIncluded: 0, totalExpected: 0, match: false }
|
|
244
|
-
};
|
|
245
|
-
|
|
246
|
-
const expectedCount = manifest.filter(c => !isDateBeforeAvailability(dateStr, c)).length;
|
|
247
|
-
dateSummary.meta.totalExpected = expectedCount;
|
|
248
|
-
|
|
249
|
-
const pushIfValid = (targetArray, item, extraReason = null) => {
|
|
250
|
-
const calcManifest = manifestMap.get(item.name);
|
|
251
|
-
if (calcManifest && isDateBeforeAvailability(dateStr, calcManifest)) return;
|
|
252
|
-
|
|
253
|
-
const entry = {
|
|
254
|
-
name: item.name,
|
|
255
|
-
reason: item.reason || extraReason,
|
|
256
|
-
pass: calcManifest ? calcManifest.pass : '?'
|
|
263
|
+
const [dailyStatus, availability, prevRes] = await Promise.all(fetchPromises);
|
|
264
|
+
const prevDailyStatus = (prevDateStr && prevRes) ? prevRes : (prevDateStr ? {} : null);
|
|
265
|
+
const rootDataStatus = availability ? availability.status : { hasPortfolio: false, hasHistory: false };
|
|
266
|
+
|
|
267
|
+
const analysis = analyzeDateExecution(dateStr, manifest, rootDataStatus, dailyStatus, manifestMap, prevDailyStatus);
|
|
268
|
+
|
|
269
|
+
const dateSummary = {
|
|
270
|
+
run: [], rerun: [], stable: [], blocked: [], impossible: [], uptodate: [],
|
|
271
|
+
meta: { totalIncluded: 0, totalExpected: 0, match: false }
|
|
257
272
|
};
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
273
|
+
|
|
274
|
+
const expectedCount = manifest.filter(c => !isDateBeforeAvailability(dateStr, c)).length;
|
|
275
|
+
dateSummary.meta.totalExpected = expectedCount;
|
|
276
|
+
|
|
277
|
+
const pushIfValid = (targetArray, item, extraReason = null) => {
|
|
278
|
+
const calcManifest = manifestMap.get(item.name);
|
|
279
|
+
if (calcManifest && isDateBeforeAvailability(dateStr, calcManifest)) return;
|
|
280
|
+
const entry = { name: item.name, reason: item.reason || extraReason, pass: calcManifest?.pass || '?' };
|
|
281
|
+
if (targetArray === dateSummary.rerun) entry.impact = calculateBlastRadius(item.name, reverseGraph);
|
|
282
|
+
targetArray.push(entry);
|
|
283
|
+
};
|
|
284
|
+
|
|
285
|
+
analysis.runnable.forEach(item => pushIfValid(dateSummary.run, item, "New Calculation"));
|
|
286
|
+
|
|
287
|
+
if (analysis.reRuns.length > 0) {
|
|
288
|
+
const { trueReRuns, stableUpdates } = await verifyBehavioralStability(analysis.reRuns, manifestMap, dailyStatus, logger, simHashCache, db);
|
|
289
|
+
trueReRuns.forEach(item => pushIfValid(dateSummary.rerun, item, "Logic Changed"));
|
|
290
|
+
stableUpdates.forEach(item => pushIfValid(dateSummary.stable, item, "Logic Stable"));
|
|
291
|
+
|
|
292
|
+
if (stableUpdates.length > 0) {
|
|
293
|
+
const updatesPayload = {};
|
|
294
|
+
for (const stable of stableUpdates) {
|
|
295
|
+
const m = manifestMap.get(stable.name);
|
|
296
|
+
const stored = dailyStatus[stable.name];
|
|
297
|
+
if (m && stored) {
|
|
298
|
+
updatesPayload[stable.name] = {
|
|
299
|
+
hash: m.hash, simHash: stable.simHash, resultHash: stored.resultHash,
|
|
300
|
+
dependencyResultHashes: stored.dependencyResultHashes || {},
|
|
301
|
+
category: m.category, composition: m.composition, lastUpdated: new Date()
|
|
302
|
+
};
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
if (Object.keys(updatesPayload).length > 0) {
|
|
306
|
+
await updateComputationStatus(dateStr, updatesPayload, config, dependencies);
|
|
291
307
|
}
|
|
292
|
-
}
|
|
293
|
-
if (Object.keys(updatesPayload).length > 0) {
|
|
294
|
-
await updateComputationStatus(dateStr, updatesPayload, config, dependencies);
|
|
295
|
-
logger.log('INFO', `[BuildReporter] 🩹 Fixed ${Object.keys(updatesPayload).length} stable items for ${dateStr}. They will NOT re-run.`);
|
|
296
308
|
}
|
|
297
309
|
}
|
|
298
|
-
}
|
|
299
|
-
|
|
300
|
-
analysis.blocked.forEach (item => pushIfValid(dateSummary.blocked, item));
|
|
301
|
-
analysis.failedDependency.forEach (item => pushIfValid(dateSummary.blocked, item, "Dependency Missing"));
|
|
302
|
-
analysis.impossible.forEach (item => pushIfValid(dateSummary.impossible, item));
|
|
303
|
-
analysis.skipped.forEach (item => pushIfValid(dateSummary.uptodate, item, "Up To Date"));
|
|
304
310
|
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
311
|
+
analysis.blocked.forEach(item => pushIfValid(dateSummary.blocked, item));
|
|
312
|
+
analysis.failedDependency.forEach(item => pushIfValid(dateSummary.blocked, item, "Dependency Missing"));
|
|
313
|
+
analysis.impossible.forEach(item => pushIfValid(dateSummary.impossible, item));
|
|
314
|
+
analysis.skipped.forEach(item => pushIfValid(dateSummary.uptodate, item, "Up To Date"));
|
|
309
315
|
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
.set(dateSummary);
|
|
316
|
+
const includedCount = dateSummary.run.length + dateSummary.rerun.length + dateSummary.stable.length +
|
|
317
|
+
dateSummary.blocked.length + dateSummary.impossible.length + dateSummary.uptodate.length;
|
|
318
|
+
dateSummary.meta.totalIncluded = includedCount;
|
|
319
|
+
dateSummary.meta.match = (includedCount === expectedCount);
|
|
315
320
|
|
|
316
|
-
|
|
321
|
+
// Write detailed date record
|
|
322
|
+
await db.collection(BUILD_RECORDS_COLLECTION).doc(buildId).collection('details').doc(dateStr).set(dateSummary);
|
|
317
323
|
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
const results = await Promise.all(processingPromises);
|
|
324
|
+
return { run: dateSummary.run.length, rerun: dateSummary.rerun.length, stable: dateSummary.stable.length };
|
|
325
|
+
} catch (err) {
|
|
326
|
+
logger.log('ERROR', `[BuildReporter] Analysis failed for ${dateStr}: ${err.message}`);
|
|
327
|
+
return { run: 0, rerun: 0, stable: 0 };
|
|
328
|
+
}
|
|
329
|
+
})));
|
|
325
330
|
|
|
326
|
-
|
|
331
|
+
// Accumulate stats and write a progress checkpoint
|
|
332
|
+
results.forEach(res => { totalRun += res.run; totalReRun += res.rerun; totalStable += res.stable; });
|
|
333
|
+
await db.collection(BUILD_RECORDS_COLLECTION).doc(buildId).update({
|
|
334
|
+
checkpoint: `Processed ${i + dateBatch.length}/${datesToCheck.length} dates`
|
|
335
|
+
});
|
|
336
|
+
}
|
|
327
337
|
|
|
328
|
-
reportHeader.
|
|
338
|
+
reportHeader.status = 'COMPLETED';
|
|
339
|
+
reportHeader.summary = {
|
|
340
|
+
totalReRuns: totalReRun,
|
|
341
|
+
totalNew: totalRun,
|
|
342
|
+
totalStable: totalStable,
|
|
343
|
+
scanRange: `${datesToCheck[0]} to ${datesToCheck[datesToCheck.length-1]}`
|
|
344
|
+
};
|
|
329
345
|
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
await db.collection(
|
|
346
|
+
// Finalize build record and update the 'latest' pointer
|
|
347
|
+
await db.collection(BUILD_RECORDS_COLLECTION).doc(buildId).set(reportHeader);
|
|
348
|
+
await db.collection(BUILD_RECORDS_COLLECTION).doc('latest').set({ ...reportHeader, note: "Latest completed build report." });
|
|
333
349
|
|
|
334
|
-
logger.log('SUCCESS', `[BuildReporter]
|
|
350
|
+
logger.log('SUCCESS', `[BuildReporter] Build ${buildId} completed. Re-runs: ${totalReRun}, Stable: ${totalStable}, New: ${totalRun}.`);
|
|
335
351
|
|
|
336
|
-
return { success: true,
|
|
352
|
+
return { success: true, buildId, summary: reportHeader.summary };
|
|
337
353
|
}
|
|
338
354
|
|
|
339
|
-
module.exports = {
|
|
355
|
+
module.exports = { requestBuildReport, handleBuildReportTrigger, generateBuildReport };
|
package/index.js
CHANGED
|
@@ -29,8 +29,12 @@ const { handleUpdate } = require('./functions
|
|
|
29
29
|
const { build: buildManifest } = require('./functions/computation-system/context/ManifestBuilder');
|
|
30
30
|
const { dispatchComputationPass } = require('./functions/computation-system/helpers/computation_dispatcher');
|
|
31
31
|
const { handleComputationTask } = require('./functions/computation-system/helpers/computation_worker');
|
|
32
|
-
const {
|
|
33
|
-
|
|
32
|
+
const {
|
|
33
|
+
requestBuildReport,
|
|
34
|
+
handleBuildReportTrigger,
|
|
35
|
+
generateBuildReport
|
|
36
|
+
} = require('./functions/computation-system/tools/BuildReporter');// [NEW] Import Monitor
|
|
37
|
+
|
|
34
38
|
const { checkPassStatus } = require('./functions/computation-system/helpers/monitor');
|
|
35
39
|
|
|
36
40
|
const dataLoader = require('./functions/computation-system/utils/data_loader');
|
|
@@ -92,9 +96,10 @@ const computationSystem = {
|
|
|
92
96
|
dataLoader,
|
|
93
97
|
computationUtils,
|
|
94
98
|
buildManifest,
|
|
95
|
-
|
|
99
|
+
// [UPDATED] Refactored Reporter Pipes
|
|
100
|
+
requestBuildReport,
|
|
101
|
+
handleBuildReportTrigger,
|
|
96
102
|
generateBuildReport,
|
|
97
|
-
// [NEW] Export Monitor Pipe
|
|
98
103
|
checkPassStatus
|
|
99
104
|
};
|
|
100
105
|
|