bulltrackers-module 1.0.277 → 1.0.278
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
* @fileoverview Handles saving computation results with observability and Smart Cleanup.
|
|
3
3
|
* UPDATED: Implements GZIP Compression for efficient storage.
|
|
4
4
|
* UPDATED: Implements Content-Based Hashing (ResultHash) for dependency short-circuiting.
|
|
5
|
+
* UPDATED: Auto-enforces Weekend Mode validation for 'Price-Only' computations.
|
|
5
6
|
*/
|
|
6
7
|
const { commitBatchInChunks, generateDataHash } = require('../utils/utils');
|
|
7
8
|
const { updateComputationStatus } = require('./StatusRepository');
|
|
@@ -28,30 +29,49 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
28
29
|
const pid = generateProcessId(PROCESS_TYPES.STORAGE, passName, dStr);
|
|
29
30
|
|
|
30
31
|
// Options defaults
|
|
31
|
-
const flushMode
|
|
32
|
-
const shardIndexes
|
|
32
|
+
const flushMode = options.flushMode || 'STANDARD';
|
|
33
|
+
const shardIndexes = options.shardIndexes || {};
|
|
33
34
|
const nextShardIndexes = {};
|
|
34
35
|
|
|
35
36
|
const fanOutLimit = pLimit(10);
|
|
36
37
|
|
|
37
38
|
for (const name in stateObj) {
|
|
38
|
-
const calc
|
|
39
|
+
const calc = stateObj[name];
|
|
39
40
|
const execStats = calc._executionStats || { processedUsers: 0, skippedUsers: 0 };
|
|
40
41
|
const currentShardIndex = shardIndexes[name] || 0;
|
|
41
42
|
|
|
42
43
|
const runMetrics = {
|
|
43
|
-
storage:
|
|
44
|
+
storage: { sizeBytes: 0, isSharded: false, shardCount: 1, keys: 0 },
|
|
44
45
|
validation: { isValid: true, anomalies: [] },
|
|
45
46
|
execution: execStats
|
|
46
47
|
};
|
|
47
48
|
|
|
48
49
|
try {
|
|
49
50
|
const result = await calc.getResult();
|
|
50
|
-
const
|
|
51
|
+
const configOverrides = validationOverrides[calc.manifest.name] || {};
|
|
51
52
|
|
|
53
|
+
// --- [NEW] AUTO-ENFORCE WEEKEND MODE FOR PRICE-ONLY CALCS ---
|
|
54
|
+
// If a calculation depends SOLELY on 'price', we assume market closures
|
|
55
|
+
// will cause 0s/Flatlines on weekends, so we enforce lenient validation.
|
|
56
|
+
const dataDeps = calc.manifest.rootDataDependencies || [];
|
|
57
|
+
const isPriceOnly = (dataDeps.length === 1 && dataDeps[0] === 'price');
|
|
58
|
+
|
|
59
|
+
let effectiveOverrides = { ...configOverrides };
|
|
60
|
+
|
|
61
|
+
if (isPriceOnly && !effectiveOverrides.weekend) {
|
|
62
|
+
effectiveOverrides.weekend = {
|
|
63
|
+
maxZeroPct: 100,
|
|
64
|
+
maxFlatlinePct: 100,
|
|
65
|
+
maxNullPct: 100 // Allow full nulls (e.g. holidays)
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
// -----------------------------------------------------------
|
|
69
|
+
|
|
52
70
|
// Validation
|
|
53
71
|
if (result && Object.keys(result).length > 0) {
|
|
54
|
-
|
|
72
|
+
// [FIX] Added 'dStr' as 3rd argument to match HeuristicValidator signature
|
|
73
|
+
const healthCheck = HeuristicValidator.analyze(calc.manifest.name, result, dStr, effectiveOverrides);
|
|
74
|
+
|
|
55
75
|
if (!healthCheck.valid) {
|
|
56
76
|
runMetrics.validation.isValid = false;
|
|
57
77
|
runMetrics.validation.anomalies.push(healthCheck.reason);
|
|
@@ -91,7 +111,7 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
91
111
|
const isMultiDate = resultKeys.length > 0 && resultKeys.every(k => /^\d{4}-\d{2}-\d{2}$/.test(k));
|
|
92
112
|
|
|
93
113
|
if (isMultiDate) {
|
|
94
|
-
const datePromises
|
|
114
|
+
const datePromises = resultKeys.map((historicalDate) => fanOutLimit(async () => {
|
|
95
115
|
const dailyData = result[historicalDate];
|
|
96
116
|
if (!dailyData || Object.keys(dailyData).length === 0) return;
|
|
97
117
|
|
|
@@ -24,11 +24,11 @@ class HeuristicValidator {
|
|
|
24
24
|
const sampleSize = Math.min(totalItems, 100);
|
|
25
25
|
const step = Math.floor(totalItems / sampleSize);
|
|
26
26
|
|
|
27
|
-
let zeroCount
|
|
28
|
-
let nullCount
|
|
29
|
-
let nanCount
|
|
27
|
+
let zeroCount = 0;
|
|
28
|
+
let nullCount = 0;
|
|
29
|
+
let nanCount = 0;
|
|
30
30
|
let emptyVectorCount = 0;
|
|
31
|
-
let analyzedCount
|
|
31
|
+
let analyzedCount = 0;
|
|
32
32
|
|
|
33
33
|
const numericValues = [];
|
|
34
34
|
|
|
@@ -82,9 +82,9 @@ class HeuristicValidator {
|
|
|
82
82
|
|
|
83
83
|
// Default Thresholds
|
|
84
84
|
let thresholds = {
|
|
85
|
-
maxZeroPct:
|
|
86
|
-
maxNullPct:
|
|
87
|
-
maxNanPct:
|
|
85
|
+
maxZeroPct: overrides.maxZeroPct ?? 99,
|
|
86
|
+
maxNullPct: overrides.maxNullPct ?? 90,
|
|
87
|
+
maxNanPct: overrides.maxNanPct ?? 0,
|
|
88
88
|
maxFlatlinePct: overrides.maxFlatlinePct ?? 95
|
|
89
89
|
};
|
|
90
90
|
|
|
@@ -4,6 +4,7 @@
|
|
|
4
4
|
* REFACTORED: Strict 5-category reporting with date-based exclusion logic.
|
|
5
5
|
* UPDATED: Added transactional locking to prevent duplicate reports on concurrent cold starts.
|
|
6
6
|
* UPDATED: Adds 'pass' number to detail records for better waterfall visibility.
|
|
7
|
+
* FIXED: Ensures 'latest' pointer updates even if detail writes fail.
|
|
7
8
|
*/
|
|
8
9
|
|
|
9
10
|
const { analyzeDateExecution } = require('../WorkflowOrchestrator');
|
|
@@ -242,15 +243,34 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
242
243
|
scanRange: `${datesToCheck[0]} to ${datesToCheck[datesToCheck.length-1]}`
|
|
243
244
|
};
|
|
244
245
|
|
|
246
|
+
// 1. Write the main report header (Specific Version)
|
|
245
247
|
const reportRef = db.collection('computation_build_records').doc(buildId);
|
|
246
248
|
await reportRef.set(reportHeader);
|
|
247
249
|
|
|
250
|
+
// 2. Write Details (Protected)
|
|
251
|
+
// [FIX] We wrap this in try-catch so that if the massive detail write fails,
|
|
252
|
+
// we still update the 'latest' pointer to the new version.
|
|
253
|
+
let detailsSuccess = true;
|
|
248
254
|
if (detailWrites.length > 0) {
|
|
249
255
|
logger.log('INFO', `[BuildReporter] Writing ${detailWrites.length} detail records...`);
|
|
250
|
-
|
|
256
|
+
try {
|
|
257
|
+
await commitBatchInChunks(config, dependencies, detailWrites, 'BuildReportDetails');
|
|
258
|
+
} catch (detailErr) {
|
|
259
|
+
detailsSuccess = false;
|
|
260
|
+
logger.log('ERROR', `[BuildReporter] ⚠️ Failed to write all details, but Report Header is saved.`, detailErr);
|
|
261
|
+
}
|
|
251
262
|
}
|
|
252
263
|
|
|
253
|
-
|
|
264
|
+
// 3. Update 'latest' pointer
|
|
265
|
+
// This now runs even if details failed, preventing the version mismatch bug.
|
|
266
|
+
const latestMetadata = {
|
|
267
|
+
...reportHeader,
|
|
268
|
+
note: detailsSuccess
|
|
269
|
+
? "Latest build report pointer (See subcollection for details)."
|
|
270
|
+
: "Latest build report pointer (WARNING: Partial detail records due to write error)."
|
|
271
|
+
};
|
|
272
|
+
|
|
273
|
+
await db.collection('computation_build_records').doc('latest').set(latestMetadata);
|
|
254
274
|
|
|
255
275
|
logger.log('SUCCESS', `[BuildReporter] Report ${buildId} saved. Re-runs: ${totalReRun}, New: ${totalRun}.`);
|
|
256
276
|
|