bulltrackers-module 1.0.301 → 1.0.302

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -114,7 +114,7 @@ class StandardExecutor {
114
114
 
115
115
  usersSinceLastFlush += chunkSize;
116
116
  const heapStats = v8.getHeapStatistics();
117
- if (usersSinceLastFlush >= 5000 || (heapStats.used_heap_size / heapStats.heap_size_limit) > 0.70) {
117
+ if (usersSinceLastFlush >= 500 || (heapStats.used_heap_size / heapStats.heap_size_limit) > 0.70) {
118
118
  const flushResult = await StandardExecutor.flushBuffer(state, dateStr, passName, config, deps, shardIndexMap, executionStats, 'INTERMEDIATE', true, !hasFlushed);
119
119
  hasFlushed = true;
120
120
  StandardExecutor.mergeReports(aggregatedSuccess, aggregatedFailures, flushResult);
@@ -1,8 +1,7 @@
1
1
  /**
2
2
  * FILENAME: computation-system/helpers/computation_worker.js
3
3
  * PURPOSE: Consumes tasks, executes logic, and signals Workflow upon Batch Completion.
4
- * UPDATED: Implements IAM Auth for Workflow Callbacks.
5
- * UPDATED: Implements Peak Memory Heartbeat and Resource Tier tracking.
4
+ * UPDATED: Added "Contention-Aware Retry" for the Batch Counter to fix ABORTED errors.
6
5
  */
7
6
 
8
7
  const { executeDispatchTask } = require('../WorkflowOrchestrator.js');
@@ -99,24 +98,56 @@ async function triggerWorkflowCallback(url, status, logger) {
99
98
  }
100
99
 
101
100
  /**
102
- * Helper: Decrements 'remainingTasks' in Firestore.
101
+ * [UPDATED] Helper: Decrements 'remainingTasks' in Firestore.
102
+ * NOW INCLUDES CONTENTION RETRY LOGIC (The "Sentinel" Fix)
103
103
  */
104
104
  async function decrementAndCheck(db, metaStatePath, logger) {
105
105
  if (!metaStatePath) return null;
106
- try {
107
- const result = await db.runTransaction(async (t) => {
108
- const ref = db.doc(metaStatePath);
109
- const doc = await t.get(ref);
110
- if (!doc.exists) return null;
111
- const data = doc.data();
112
- const newRemaining = (data.remainingTasks || 0) - 1;
113
- t.update(ref, { remainingTasks: newRemaining, lastUpdated: new Date() });
114
- return { remaining: newRemaining, callbackUrl: data.callbackUrl };
115
- });
116
- if (result && result.remaining <= 0) return result.callbackUrl;
117
- } catch (e) {
118
- logger.log('ERROR', `[Worker] Failed to decrement batch counter: ${e.message}`);
106
+
107
+ const MAX_CONTENTION_RETRIES = 10;
108
+ let attempt = 0;
109
+
110
+ while (attempt < MAX_CONTENTION_RETRIES) {
111
+ try {
112
+ const result = await db.runTransaction(async (t) => {
113
+ const ref = db.doc(metaStatePath);
114
+ const doc = await t.get(ref);
115
+ if (!doc.exists) return null;
116
+
117
+ const data = doc.data();
118
+ // Safety: Don't decrement below zero
119
+ const currentRemaining = data.remainingTasks || 0;
120
+ if (currentRemaining <= 0) return { remaining: 0, callbackUrl: data.callbackUrl };
121
+
122
+ const newRemaining = currentRemaining - 1;
123
+ t.update(ref, { remainingTasks: newRemaining, lastUpdated: new Date() });
124
+
125
+ return { remaining: newRemaining, callbackUrl: data.callbackUrl };
126
+ });
127
+
128
+ // Success! Check if we are the "Sentinel" (the last one)
129
+ if (result && result.remaining <= 0) return result.callbackUrl;
130
+ return null; // We decremented successfully, but weren't the last one.
131
+
132
+ } catch (e) {
133
+ // Check if it's a contention error (ABORTED/10 or DEADLINE_EXCEEDED/4)
134
+ const isContention = e.code === 10 || e.code === 4 || (e.message && e.message.includes('contention'));
135
+
136
+ if (isContention) {
137
+ attempt++;
138
+ // JITTER: Random delay between 50ms and 500ms to desynchronize the herd
139
+ const delay = Math.floor(Math.random() * 450) + 50;
140
+ logger.log('WARN', `[Worker] Batch counter contention (Attempt ${attempt}/${MAX_CONTENTION_RETRIES}). Retrying in ${delay}ms...`);
141
+ await new Promise(r => setTimeout(r, delay));
142
+ } else {
143
+ // Fatal error (permission, etc)
144
+ logger.log('ERROR', `[Worker] Fatal error decrementing batch counter: ${e.message}`);
145
+ return null;
146
+ }
147
+ }
119
148
  }
149
+
150
+ logger.log('ERROR', `[Worker] Failed to decrement batch counter after ${MAX_CONTENTION_RETRIES} attempts. The count will be inaccurate.`);
120
151
  return null;
121
152
  }
122
153
 
@@ -48,7 +48,7 @@ async function loadDataByRefs(config, deps, refs) {
48
48
  const { withRetry } = calculationUtils;
49
49
  if (!refs || !refs.length) return {};
50
50
  const mergedPortfolios = {};
51
- const batchSize = config.partRefBatchSize || 50;
51
+ const batchSize = config.partRefBatchSize || 10;
52
52
  for (let i = 0; i < refs.length; i += batchSize) {
53
53
  const batchRefs = refs.slice(i, i + batchSize);
54
54
  const snapshots = await withRetry(() => db.getAll(...batchRefs), `getAll(batch ${Math.floor(i / batchSize)})`);
@@ -145,7 +145,7 @@ async function* streamPortfolioData(config, deps, dateString, providedRefs = nul
145
145
  const { logger } = deps;
146
146
  const refs = providedRefs || (await getPortfolioPartRefs(config, deps, dateString));
147
147
  if (refs.length === 0) { logger.log('WARN', `[streamPortfolioData] No portfolio refs found for ${dateString}. Stream is empty.`); return; }
148
- const batchSize = config.partRefBatchSize || 50;
148
+ const batchSize = config.partRefBatchSize || 10;
149
149
  logger.log('INFO', `[streamPortfolioData] Streaming ${refs.length} portfolio parts in chunks of ${batchSize}...`);
150
150
  for (let i = 0; i < refs.length; i += batchSize) {
151
151
  const batchRefs = refs.slice(i, i + batchSize);
@@ -160,7 +160,7 @@ async function* streamHistoryData(config, deps, dateString, providedRefs = null)
160
160
  const { logger } = deps;
161
161
  const refs = providedRefs || (await getHistoryPartRefs(config, deps, dateString));
162
162
  if (refs.length === 0) { logger.log('WARN', `[streamHistoryData] No history refs found for ${dateString}. Stream is empty.`); return; }
163
- const batchSize = config.partRefBatchSize || 50;
163
+ const batchSize = config.partRefBatchSize || 10;
164
164
  logger.log('INFO', `[streamHistoryData] Streaming ${refs.length} history parts in chunks of ${batchSize}...`);
165
165
  for (let i = 0; i < refs.length; i += batchSize) {
166
166
  const batchRefs = refs.slice(i, i + batchSize);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.301",
3
+ "version": "1.0.302",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [