bulltrackers-module 1.0.136 → 1.0.138

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -26,11 +26,52 @@ async function runComputationPass(config, dependencies, computationManifest) {
26
26
  const yesterday = new Date(); yesterday.setUTCDate(yesterday.getUTCDate()-1);
27
27
  const endDateUTC = new Date(Date.UTC(yesterday.getUTCFullYear(), yesterday.getUTCMonth(), yesterday.getUTCDate()));
28
28
  const earliestDates = await getEarliestDataDates(config, dependencies);
29
- const firstDate = earliestDates.absoluteEarliest;
30
- const startDateUTC = firstDate ? new Date(Date.UTC(firstDate.getUTCFullYear(), firstDate.getUTCMonth(), firstDate.getUTCDate())) : new Date(config.earliestComputationDate+'T00:00:00Z');
31
- const allExpectedDates = getExpectedDateStrings(startDateUTC, endDateUTC);
29
+
30
+ // --- REMOVE OLD/REPLACE WITH NEW LOGIC FOR PROBLEM 3 & 4 ---
31
+ // const firstDate = earliestDates.absoluteEarliest; // <-- REMOVE
32
+ // const startDateUTC = firstDate ? new Date(Date.UTC(firstDate.getUTCFullYear(), firstDate.getUTCMonth(), firstDate.getUTCDate())) : new Date(config.earliestComputationDate+'T00:00:00Z'); // <-- REMOVE
33
+
32
34
  const passes = groupByPass(computationManifest);
33
35
  const calcsInThisPass = passes[passToRun] || []; if (!calcsInThisPass.length) return logger.log('WARN', `[PassRunner] No calcs for Pass ${passToRun}. Exiting.`);
36
+
37
+ // --- START: NEW LOGIC FOR PROBLEM 3 & 4 ---
38
+ // Determine the earliest start date based *only* on data required for THIS pass.
39
+ const requiredRootData = new Set();
40
+ calcsInThisPass.forEach(c => {
41
+ (c.rootDataDependencies || []).forEach(dep => requiredRootData.add(dep));
42
+ });
43
+
44
+ let earliestStartDateForPass = null;
45
+ if (requiredRootData.size > 0) {
46
+ // Find the LATEST of the earliest dates for all required data types.
47
+ // e.g., If portfolio starts 09-25 and social starts 10-30, a calc
48
+ // needing both must start on 10-30.
49
+ let latestOfEarliestDates = new Date(0); // Start at epoch
50
+ requiredRootData.forEach(dep => {
51
+ const earliestDateForDep = earliestDates[dep]; // e.g., earliestDates.portfolio
52
+ if (earliestDateForDep && earliestDateForDep > latestOfEarliestDates) {
53
+ latestOfEarliestDates = earliestDateForDep;
54
+ }
55
+ });
56
+
57
+ if (latestOfEarliestDates.getTime() > 0) {
58
+ earliestStartDateForPass = latestOfEarliestDates;
59
+ }
60
+ }
61
+
62
+ // Use the pass-specific date. Fall back to absolute earliest, then config.
63
+ const firstDate = earliestStartDateForPass || earliestDates.absoluteEarliest;
64
+ const startDateUTC = firstDate
65
+ ? new Date(Date.UTC(firstDate.getUTCFullYear(), firstDate.getUTCMonth(), firstDate.getUTCDate()))
66
+ : new Date(config.earliestComputationDate+'T00:00:00Z');
67
+
68
+ logger.log('INFO', `[PassRunner] Pass ${passToRun} requires data: [${Array.from(requiredRootData).join(', ')}].`);
69
+ logger.log('INFO', `[PassRunner] Determined start date for this pass: ${startDateUTC.toISOString().slice(0, 10)}`);
70
+ // --- END: NEW LOGIC ---
71
+
72
+ const allExpectedDates = getExpectedDateStrings(startDateUTC, endDateUTC);
73
+ const firstDayOfBackfill = allExpectedDates.length > 0 ? allExpectedDates[0] : null; // --- MOVED FROM ABOVE ---
74
+
34
75
  const standardCalcs = calcsInThisPass.filter(c => c.type==='standard');
35
76
  const metaCalcs = calcsInThisPass.filter(c => c.type==='meta');
36
77
  const processDate = async (dateStr) => {
@@ -39,7 +80,7 @@ async function runComputationPass(config, dependencies, computationManifest) {
39
80
  const rootData = await checkRootDataAvailability(dateStr, config, dependencies, earliestDates);
40
81
  if (!rootData) { logger.log('WARN', `[PassRunner] Skipping ${dateStr} for Pass ${passToRun}: No root data.`);return;}
41
82
  const existingResults = await fetchExistingResults(dateStr, calcsInThisPass, computationManifest, config, dependencies);
42
- const { standardCalcsToRun, metaCalcsToRun } = filterCalculations(standardCalcs, metaCalcs, rootData.status, existingResults, passToRun, dateStr, logger);
83
+ const { standardCalcsToRun, metaCalcsToRun } = filterCalculations(standardCalcs, metaCalcs, rootData.status, existingResults, passToRun, dateStr, logger,dateStr === firstDayOfBackfill );
43
84
  if (standardCalcsToRun.length === 0 && metaCalcsToRun.length === 0) {logger.log('INFO', `[PassRunner] All calcs for ${dateStr} Pass ${passToRun} are already complete. Skipping.`);return;}
44
85
  if (standardCalcsToRun.length) await runStandardComputationPass(dateToProcess, standardCalcsToRun, `Pass ${passToRun} (Standard)`, config, dependencies, rootData);
45
86
  if (metaCalcsToRun.length) await runMetaComputationPass(dateToProcess, metaCalcsToRun, `Pass ${passToRun} (Meta)`, config, dependencies, existingResults, rootData);
@@ -72,20 +72,26 @@ async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config,
72
72
 
73
73
  /** --- MODIFIED: Stage 5: Filter calculations to skip completed work ---
74
74
  */
75
- function filterCalculations(standardCalcs, metaCalcs, rootDataStatus, existingResults, passToRun, dateStr, logger) {
75
+ function filterCalculations(standardCalcs, metaCalcs, rootDataStatus, existingResults, passToRun, dateStr, logger, isFirstDayOfBackfill = false) {
76
76
  const skipped = new Set();
77
- // Filter Standard Calcs
78
77
  const standardCalcsToRun = standardCalcs.filter(c => {
79
78
  if (existingResults[c.name]) {logger.log('TRACE', `[Pass ${passToRun}] Skipping ${c.name} for ${dateStr}. Result already exists.`);return false;}
80
- const { canRun, missing } = checkRootDependencies(c, rootDataStatus);
81
- if (canRun) return true;
82
- logger.log('INFO', `[Pass ${passToRun}] Skipping ${c.name} for ${dateStr}. Missing root data: [${missing.join(', ')}]`);
83
- skipped.add(c.name);
84
- return false;
79
+ if (isFirstDayOfBackfill && c.isHistorical) {
80
+ logger.log('INFO', `[Pass ${passToRun}] Skipping ${c.name} for ${dateStr}. Historical calc on the first day of backfill (no yesterday).`);
81
+ skipped.add(c.name);
82
+ return false;
83
+ }
85
84
  });
86
85
  // Filter Meta Calcs
87
86
  const metaCalcsToRun = metaCalcs.filter(c => {
88
87
  if (existingResults[c.name]) {logger.log('TRACE', `[Pass ${passToRun} Meta] Skipping ${c.name} for ${dateStr}. Result already exists.`);skipped.add(c.name);return false;}
88
+ // --- START: RECOMMENDED ADDITION ---
89
+ if (isFirstDayOfBackfill && c.isHistorical) {
90
+ logger.log('INFO', `[Pass ${passToRun} Meta] Skipping ${c.name} for ${dateStr}. Historical calc on the first day of backfill (no yesterday).`);
91
+ skipped.add(c.name);
92
+ return false;
93
+ }
94
+ // --- END: RECOMMENDED ADDITION ---
89
95
  // 1. Check root data
90
96
  const { canRun, missing: missingRoot } = checkRootDependencies(c, rootDataStatus);
91
97
  if (!canRun) {logger.log('INFO', `[Pass ${passToRun} Meta] Skipping ${c.name} for ${dateStr}. Missing root data: [${missingRoot.join(', ')}]`);skipped.add(c.name);return false;}
@@ -59,14 +59,30 @@ async function handleVerify(task, taskId, { db, logger, ...dependencies }, confi
59
59
  }
60
60
  }
61
61
 
62
+ // --- START FIX: MODIFIED FOR SHARDING ---
62
63
  if (Object.keys(usernameMap).length) {
63
- const mapRef = db.collection(config.FIRESTORE_COLLECTION_USERNAME_MAP).doc('cid_map_shard_1');
64
- batch.set(mapRef, usernameMap, { merge: true });
65
- logger.log('INFO', `[VERIFY] Staging ${Object.keys(usernameMap).length} username updates.`);
64
+ // Group updates by shard
65
+ const shardedUpdates = {};
66
+ for (const cid in usernameMap) {
67
+ // Re-implement the sharding logic here
68
+ const shardId = `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`;
69
+ if (!shardedUpdates[shardId]) {
70
+ shardedUpdates[shardId] = {};
71
+ }
72
+ shardedUpdates[shardId][cid] = usernameMap[cid];
73
+ }
74
+
75
+ // Write each shard to its own document
76
+ for (const shardId in shardedUpdates) {
77
+ const mapRef = db.collection(config.FIRESTORE_COLLECTION_USERNAME_MAP).doc(shardId);
78
+ batch.set(mapRef, shardedUpdates[shardId], { merge: true });
79
+ }
80
+ logger.log('INFO', `[VERIFY] Staging username updates across ${Object.keys(shardedUpdates).length} shards.`);
66
81
  }
82
+ // --- END FIX ---
67
83
 
68
84
  await batch.commit();
69
85
  if (validUserCount) logger.log('INFO', `[VERIFY] Verified and stored ${validUserCount} new ${userType} users.`);
70
86
  }
71
87
 
72
- module.exports = { handleVerify };
88
+ module.exports = { handleVerify };
@@ -31,6 +31,13 @@ class FirestoreBatchManager {
31
31
  logger.log('INFO', 'FirestoreBatchManager initialized.');
32
32
  }
33
33
 
34
+ // --- START FIX: ADDED SHARDING HELPER ---
35
+ _getUsernameShardId(cid) {
36
+ // Shard across 10 documents (supports ~200k users assuming 50 bytes/user)
37
+ return `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`;
38
+ }
39
+ // --- END FIX ---
40
+
34
41
  _scheduleFlush() {
35
42
  if (!this.batchTimeout) this.batchTimeout = setTimeout(() => this.flushBatches(), this.config.TASK_ENGINE_FLUSH_INTERVAL_MS);
36
43
  }
@@ -40,6 +47,7 @@ class FirestoreBatchManager {
40
47
  this.usernameMap.clear();
41
48
  this.logger.log('INFO', '[BATCH] Refreshing username map from Firestore...');
42
49
  try {
50
+ // This correctly gets ALL documents (shards) in the collection
43
51
  const snapshot = await this.db.collection(this.usernameMapCollectionName).get();
44
52
  snapshot.forEach(doc => {
45
53
  const data = doc.data();
@@ -54,14 +62,23 @@ class FirestoreBatchManager {
54
62
 
55
63
  getUsername(cid) { return this.usernameMap.get(String(cid)); }
56
64
 
65
+ // --- START FIX: MODIFIED FOR SHARDING ---
57
66
  addUsernameMapUpdate(cid, username) {
58
67
  if (!username) return;
59
68
  const cidStr = String(cid);
60
69
  this.usernameMap.set(cidStr, username);
61
- this.usernameMapUpdates[cidStr] = { username };
62
- this.logger.log('TRACE', `[BATCH] Queued username update for ${cidStr}.`);
70
+
71
+ // Organize updates by shard ID
72
+ const shardId = this._getUsernameShardId(cidStr);
73
+ if (!this.usernameMapUpdates[shardId]) {
74
+ this.usernameMapUpdates[shardId] = {};
75
+ }
76
+ this.usernameMapUpdates[shardId][cidStr] = { username };
77
+
78
+ this.logger.log('TRACE', `[BATCH] Queued username update for ${cidStr} in ${shardId}.`);
63
79
  this._scheduleFlush();
64
80
  }
81
+ // --- END FIX ---
65
82
 
66
83
  async addToTradingHistoryBatch(userId, blockId, date, historyData, userType) {
67
84
  const collection = userType === 'speculator' ? this.speculatorHistoryCollectionName : this.normalHistoryCollectionName;
@@ -151,12 +168,22 @@ class FirestoreBatchManager {
151
168
  delete this.speculatorTimestampFixBatch[docPath];
152
169
  }
153
170
 
154
- if (Object.keys(this.usernameMapUpdates).length) {
155
- firestoreBatch.set(this.db.collection(this.usernameMapCollectionName).doc('cid_map_shard_1'), this.usernameMapUpdates, { merge: true });
156
- batchOps++;
157
- this.logger.log('INFO', `[BATCH] Flushing ${Object.keys(this.usernameMapUpdates).length} username map updates.`);
158
- this.usernameMapUpdates = {};
171
+ // --- START FIX: MODIFIED FOR SHARDING ---
172
+ // Loop over each shardId in the updates object
173
+ for (const shardId in this.usernameMapUpdates) {
174
+ const updates = this.usernameMapUpdates[shardId];
175
+ if (updates && Object.keys(updates).length > 0) {
176
+ firestoreBatch.set(
177
+ this.db.collection(this.usernameMapCollectionName).doc(shardId),
178
+ updates,
179
+ { merge: true }
180
+ );
181
+ batchOps++;
182
+ this.logger.log('INFO', `[BATCH] Flushing ${Object.keys(updates).length} username updates to ${shardId}.`);
183
+ }
159
184
  }
185
+ this.usernameMapUpdates = {}; // Clear updates after staging them
186
+ // --- END FIX ---
160
187
 
161
188
  if (this.processedSpeculatorCids.size) {
162
189
  const cids = Array.from(this.processedSpeculatorCids);
@@ -180,4 +207,4 @@ class FirestoreBatchManager {
180
207
  }
181
208
  }
182
209
 
183
- module.exports = { FirestoreBatchManager };
210
+ module.exports = { FirestoreBatchManager };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.136",
3
+ "version": "1.0.138",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [