bulltrackers-module 1.0.278 → 1.0.279

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,7 +11,7 @@ const { generateProcessId, PROCESS_TYPES } = require('../logger/logger');
11
11
  const { HeuristicValidator } = require('./ResultsValidator');
12
12
  const validationOverrides = require('../config/validation_overrides');
13
13
  const pLimit = require('p-limit');
14
- const zlib = require('zlib'); // [NEW] Compression Lib
14
+ const zlib = require('zlib');
15
15
 
16
16
  const NON_RETRYABLE_ERRORS = [
17
17
  'PERMISSION_DENIED', 'DATA_LOSS', 'FAILED_PRECONDITION'
@@ -59,10 +59,11 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
59
59
  let effectiveOverrides = { ...configOverrides };
60
60
 
61
61
  if (isPriceOnly && !effectiveOverrides.weekend) {
62
+ // Apply strict leniency for weekend/holiday price actions
62
63
  effectiveOverrides.weekend = {
63
64
  maxZeroPct: 100,
64
65
  maxFlatlinePct: 100,
65
- maxNullPct: 100 // Allow full nulls (e.g. holidays)
66
+ maxNullPct: 100
66
67
  };
67
68
  }
68
69
  // -----------------------------------------------------------
@@ -193,17 +194,14 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
193
194
 
194
195
  async function writeSingleResult(result, docRef, name, dateContext, logger, config, deps, startShardIndex = 0, flushMode = 'STANDARD') {
195
196
 
196
- // --- [NEW] COMPRESSION STRATEGY ---
197
- // Try to compress before falling back to complex sharding
197
+ // --- COMPRESSION STRATEGY ---
198
198
  try {
199
199
  const jsonString = JSON.stringify(result);
200
200
  const rawBuffer = Buffer.from(jsonString);
201
201
 
202
- // Only attempt if meaningful size (> 50KB)
203
202
  if (rawBuffer.length > 50 * 1024) {
204
203
  const compressedBuffer = zlib.gzipSync(rawBuffer);
205
204
 
206
- // If compressed fits in one document (< 900KB safety limit)
207
205
  if (compressedBuffer.length < 900 * 1024) {
208
206
  logger.log('INFO', `[Compression] ${name}: Compressed ${(rawBuffer.length/1024).toFixed(0)}KB -> ${(compressedBuffer.length/1024).toFixed(0)}KB. Saved as Blob.`);
209
207
 
@@ -214,7 +212,6 @@ async function writeSingleResult(result, docRef, name, dateContext, logger, conf
214
212
  payload: compressedBuffer
215
213
  };
216
214
 
217
- // Write immediately
218
215
  await docRef.set(compressedPayload, { merge: true });
219
216
 
220
217
  return {
@@ -2,8 +2,7 @@
2
2
  * @fileoverview Build Reporter & Auto-Runner.
3
3
  * Generates a "Pre-Flight" report of what the computation system WILL do.
4
4
  * REFACTORED: Strict 5-category reporting with date-based exclusion logic.
5
- * UPDATED: Added transactional locking to prevent duplicate reports on concurrent cold starts.
6
- * UPDATED: Adds 'pass' number to detail records for better waterfall visibility.
5
+ * UPDATED: Replaced Batch Writes with Parallel Writes to prevent DEADLINE_EXCEEDED timeouts.
7
6
  * FIXED: Ensures 'latest' pointer updates even if detail writes fail.
8
7
  */
9
8
 
@@ -11,7 +10,6 @@ const { analyzeDateExecution } = req
11
10
  const { fetchComputationStatus } = require('../persistence/StatusRepository');
12
11
  const { normalizeName, getExpectedDateStrings, DEFINITIVE_EARLIEST_DATES } = require('../utils/utils');
13
12
  const { checkRootDataAvailability } = require('../data/AvailabilityChecker');
14
- const { commitBatchInChunks } = require('../persistence/FirestoreUtils');
15
13
  const pLimit = require('p-limit');
16
14
  const path = require('path');
17
15
  const packageJson = require(path.join(__dirname, '..', '..', '..', 'package.json'));
@@ -45,8 +43,7 @@ function isDateBeforeAvailability(dateStr, calcManifest) {
45
43
 
46
44
  /**
47
45
  * AUTO-RUN ENTRY POINT
48
- * UPDATED: Uses transactional locking to prevent race conditions.
49
- * If we deploy multiple computation pass nodes simultaneously, only one should run the report.
46
+ * Uses transactional locking to prevent race conditions.
50
47
  */
51
48
  async function ensureBuildReport(config, dependencies, manifest) {
52
49
  const { db, logger } = dependencies;
@@ -88,7 +85,7 @@ async function ensureBuildReport(config, dependencies, manifest) {
88
85
  }
89
86
 
90
87
  /**
91
- * Generates the report and saves to Firestore (Sharded).
88
+ * Generates the report and saves to Firestore.
92
89
  */
93
90
  async function generateBuildReport(config, dependencies, manifest, daysBack = 90, customBuildId = null) {
94
91
  const { db, logger } = dependencies;
@@ -153,7 +150,7 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
153
150
  impossible: [], // Missing Data (Historical) / Impossible Dependency
154
151
  uptodate: [], // Hash Match (Previously "Skipped")
155
152
 
156
- // [NEW] Metadata for Verification
153
+ // Metadata for Verification
157
154
  meta: {
158
155
  totalIncluded: 0,
159
156
  totalExpected: 0,
@@ -166,7 +163,6 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
166
163
  dateSummary.meta.totalExpected = expectedCount;
167
164
 
168
165
  // Helper to push only if date is valid for this specific calc
169
- // [UPDATED] Adds 'pass' number to the record
170
166
  const pushIfValid = (targetArray, item, extraReason = null) => {
171
167
  const calcManifest = manifestMap.get(item.name);
172
168
  if (calcManifest && isDateBeforeAvailability(dateStr, calcManifest)) {
@@ -187,7 +183,6 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
187
183
  analysis.reRuns.forEach(item => pushIfValid(dateSummary.rerun, item, "Hash Mismatch"));
188
184
 
189
185
  // 3. BLOCKED (Temporary Issues)
190
- // Merging 'blocked' and 'failedDependency' as both are temporary blocks
191
186
  analysis.blocked.forEach(item => pushIfValid(dateSummary.blocked, item));
192
187
  analysis.failedDependency.forEach(item => pushIfValid(dateSummary.blocked, item, "Dependency Missing"));
193
188
 
@@ -211,7 +206,7 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
211
206
  logger.log('WARN', `[BuildReporter] ⚠️ Mismatch on ${dateStr}: Expected ${expectedCount} but got ${includedCount}.`);
212
207
  }
213
208
 
214
- // ALWAYS WRITE THE REPORT (No filtering based on activity)
209
+ // QUEUE THE WRITE (Don't write yet)
215
210
  const detailRef = db.collection('computation_build_records').doc(buildId).collection('details').doc(dateStr);
216
211
  detailWrites.push({
217
212
  ref: detailRef,
@@ -247,22 +242,33 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
247
242
  const reportRef = db.collection('computation_build_records').doc(buildId);
248
243
  await reportRef.set(reportHeader);
249
244
 
250
- // 2. Write Details (Protected)
251
- // [FIX] We wrap this in try-catch so that if the massive detail write fails,
252
- // we still update the 'latest' pointer to the new version.
245
+ // 2. Write Details (Protected & Parallelized)
246
+ // FIX: Using parallel individual writes instead of Batch to avoid DEADLINE_EXCEEDED
253
247
  let detailsSuccess = true;
254
248
  if (detailWrites.length > 0) {
255
- logger.log('INFO', `[BuildReporter] Writing ${detailWrites.length} detail records...`);
249
+ logger.log('INFO', `[BuildReporter] Writing ${detailWrites.length} detail records (Parallel Strategy)...`);
250
+
256
251
  try {
257
- await commitBatchInChunks(config, dependencies, detailWrites, 'BuildReportDetails');
252
+ // Concurrency limit of 15 to be safe
253
+ const writeLimit = pLimit(15);
254
+ const writePromises = detailWrites.map(w => writeLimit(() =>
255
+ w.ref.set(w.data).catch(e => {
256
+ logger.log('WARN', `[BuildReporter] Failed to write detail for ${w.ref.path}: ${e.message}`);
257
+ throw e;
258
+ })
259
+ ));
260
+
261
+ await Promise.all(writePromises);
262
+ logger.log('INFO', `[BuildReporter] Successfully wrote all detail records.`);
263
+
258
264
  } catch (detailErr) {
259
265
  detailsSuccess = false;
260
- logger.log('ERROR', `[BuildReporter] ⚠️ Failed to write all details, but Report Header is saved.`, detailErr);
266
+ logger.log('ERROR', `[BuildReporter] ⚠️ Failed to write some details. Report Header is preserved.`, detailErr);
261
267
  }
262
268
  }
263
269
 
264
270
  // 3. Update 'latest' pointer
265
- // This now runs even if details failed, preventing the version mismatch bug.
271
+ // This runs regardless of detail write success/failure
266
272
  const latestMetadata = {
267
273
  ...reportHeader,
268
274
  note: detailsSuccess
@@ -270,9 +276,12 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
270
276
  : "Latest build report pointer (WARNING: Partial detail records due to write error)."
271
277
  };
272
278
 
273
- await db.collection('computation_build_records').doc('latest').set(latestMetadata);
274
-
275
- logger.log('SUCCESS', `[BuildReporter] Report ${buildId} saved. Re-runs: ${totalReRun}, New: ${totalRun}.`);
279
+ try {
280
+ await db.collection('computation_build_records').doc('latest').set(latestMetadata);
281
+ logger.log('SUCCESS', `[BuildReporter] Report ${buildId} saved. Re-runs: ${totalReRun}, New: ${totalRun}. Pointer Updated.`);
282
+ } catch (pointerErr) {
283
+ logger.log('FATAL', `[BuildReporter] Failed to update 'latest' pointer!`, pointerErr);
284
+ }
276
285
 
277
286
  return {
278
287
  success: true,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.278",
3
+ "version": "1.0.279",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [