bulltrackers-module 1.0.180 → 1.0.181

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,251 +1,245 @@
1
- /**
2
- @fileoverview Utility class to manage stateful Firestore write batches.
3
-
1
+ /** @fileoverview Utility class to manage stateful Firestore write batches.
4
2
  REFACTORED: Renamed 'firestore' to 'db' for consistency.
5
-
6
3
  OPTIMIZED: Added logic to handle speculator timestamp fixes within the batch.
7
-
8
4
  --- MODIFIED: Added username map caching and trading history batching. ---
9
-
10
- --- MODIFIED: Added cross-invocation cache for history fetches. ---
11
-
12
- --- FIXED: Implemented deterministic sharding (part_N) to prevent document fragmentation. --- **/
5
+ --- FIXED: Implemented Modulo Sharding to pack sparse IDs into dense documents. ---
6
+ --- FIXED: Removed aggressive auto-flush timeout to prevent cost explosion. ---
7
+ **/
13
8
 
14
9
  const { FieldValue } = require('@google-cloud/firestore');
15
10
 
16
- class FirestoreBatchManager { constructor(db, headerManager, logger, config) { this.db = db; this.headerManager = headerManager; this.logger = logger; this.config = config; this.portfolioBatch = {}; this.timestampBatch = {}; this.tradingHistoryBatch = {}; this.speculatorTimestampFixBatch = {};
17
-
18
- // Username map cache
19
- this.usernameMap = new Map();
20
- this.usernameMapUpdates = {};
21
- this.usernameMapLastLoaded = 0;
22
-
23
- // History fetch cache (NEW)
24
- this.historyFetchedUserIds = new Set();
25
- this.historyCacheTimestamp = Date.now();
26
- // Set a 10-minute TTL on this cache (600,000 ms)
27
- this.HISTORY_CACHE_TTL_MS = config.HISTORY_CACHE_TTL_MS || 600000;
28
-
29
- this.processedSpeculatorCids = new Set();
30
- this.usernameMapCollectionName = config.FIRESTORE_COLLECTION_USERNAME_MAP;
31
- this.normalHistoryCollectionName = config.FIRESTORE_COLLECTION_NORMAL_HISTORY;
32
- this.speculatorHistoryCollectionName = config.FIRESTORE_COLLECTION_SPECULATOR_HISTORY;
33
- this.batchTimeout = null;
34
- logger.log('INFO', 'FirestoreBatchManager initialized.');
35
- }
36
-
37
- /* * NEW: Checks if a user's history has been fetched in the last 10 minutes.
38
- * If not, it logs them as fetched and returns false (to trigger a fetch).
39
- * @param {string} userId
40
- * @returns {boolean} True if already fetched, false if not.
41
- */
42
- checkAndSetHistoryFetched(userId) {
43
- // Check if the cache is stale
44
- if (Date.now() - this.historyCacheTimestamp > this.HISTORY_CACHE_TTL_MS) {
45
- this.logger.log('INFO', '[BATCH] History fetch cache (10m TTL) expired. Clearing set.');
46
- this.historyFetchedUserIds.clear();
11
+ class FirestoreBatchManager {
12
+ constructor(db, headerManager, logger, config) {
13
+ this.db = db;
14
+ this.headerManager = headerManager;
15
+ this.logger = logger;
16
+ this.config = config;
17
+ this.portfolioBatch = {};
18
+ this.timestampBatch = {};
19
+ this.tradingHistoryBatch = {};
20
+ this.speculatorTimestampFixBatch = {};
21
+
22
+ // Username map cache
23
+ this.usernameMap = new Map();
24
+ this.usernameMapUpdates = {};
25
+ this.usernameMapLastLoaded = 0;
26
+
27
+ // History fetch cache (NEW)
28
+ this.historyFetchedUserIds = new Set();
47
29
  this.historyCacheTimestamp = Date.now();
30
+ this.HISTORY_CACHE_TTL_MS = config.HISTORY_CACHE_TTL_MS || 600000;
31
+
32
+ this.processedSpeculatorCids = new Set();
33
+ this.usernameMapCollectionName = config.FIRESTORE_COLLECTION_USERNAME_MAP;
34
+ this.normalHistoryCollectionName = config.FIRESTORE_COLLECTION_NORMAL_HISTORY;
35
+ this.speculatorHistoryCollectionName = config.FIRESTORE_COLLECTION_SPECULATOR_HISTORY;
36
+ this.batchTimeout = null;
37
+ logger.log('INFO', 'FirestoreBatchManager initialized.');
48
38
  }
49
39
 
50
- if (this.historyFetchedUserIds.has(userId)) {
51
- return true; // Yes, already fetched
40
+ checkAndSetHistoryFetched(userId) {
41
+ if (Date.now() - this.historyCacheTimestamp > this.HISTORY_CACHE_TTL_MS) {
42
+ this.logger.log('INFO', '[BATCH] History fetch cache (10m TTL) expired. Clearing set.');
43
+ this.historyFetchedUserIds.clear();
44
+ this.historyCacheTimestamp = Date.now();
45
+ }
46
+ if (this.historyFetchedUserIds.has(userId)) { return true; }
47
+ this.historyFetchedUserIds.add(userId);
48
+ return false;
52
49
  }
53
50
 
54
- // Not fetched yet. Mark as fetched and return false.
55
- this.historyFetchedUserIds.add(userId);
56
- return false;
57
- }
58
-
59
- _getUsernameShardId(cid) { return `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`; }
60
-
61
- // _scheduleFlush() { if (!this.batchTimeout) this.batchTimeout = setTimeout(() => this.flushBatches(), this.config.TASK_ENGINE_FLUSH_INTERVAL_MS); } Old version
51
+ _getUsernameShardId(cid) { return `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`; }
52
+
53
+ // --- CRITICAL FIX: Removed aggressive timeout flush ---
54
+ // With sequential processing, the timer was firing too often, causing 1 write per user (expensive).
55
+ // Now we only flush if we hit the memory limit (MAX_BATCH_SIZE) or when explicitly called at the end.
56
+ _scheduleFlush() {
57
+ const maxBatch = this.config.TASK_ENGINE_MAX_BATCH_SIZE ? Number(this.config.TASK_ENGINE_MAX_BATCH_SIZE) : 400;
58
+ const totalOps = this._estimateBatchSize();
59
+ if (totalOps >= maxBatch) {
60
+ this.flushBatches();
61
+ return;
62
+ }
63
+ }
62
64
 
63
- _scheduleFlush() {
64
- const totalOps = this._estimateBatchSize();
65
- if (totalOps >= 400) { this.flushBatches(); return; }
66
- if (!this.batchTimeout) { this.batchTimeout = setTimeout(() => this.flushBatches(), this.config.TASK_ENGINE_FLUSH_INTERVAL_MS); }
67
- }
65
+ _estimateBatchSize() {
66
+ let ops = 0;
67
+ ops += Object.keys(this.portfolioBatch).length;
68
+ ops += Object.keys(this.tradingHistoryBatch).length;
69
+ ops += Object.keys(this.timestampBatch).length;
70
+ ops += Object.keys(this.speculatorTimestampFixBatch).length;
71
+ return ops;
72
+ }
68
73
 
69
- _estimateBatchSize() {
70
- let ops = 0;
71
- ops += Object.keys(this.portfolioBatch).length;
72
- ops += Object.keys(this.tradingHistoryBatch).length;
73
- ops += Object.keys(this.timestampBatch).length;
74
- ops += Object.keys(this.speculatorTimestampFixBatch).length;
75
- return ops;
76
- }
74
+ async loadUsernameMap() {
75
+ if (Date.now() - this.usernameMapLastLoaded < 3600000) return;
76
+ this.usernameMap.clear();
77
+ this.logger.log('INFO', '[BATCH] Refreshing username map from Firestore...');
78
+ try {
79
+ const snapshot = await this.db.collection(this.usernameMapCollectionName).get();
80
+ snapshot.forEach(doc => { const data = doc.data(); for (const cid in data) if (data[cid]?.username) this.usernameMap.set(String(cid), data[cid].username); });
81
+ this.usernameMapLastLoaded = Date.now();
82
+ this.logger.log('INFO', `[BATCH] Loaded ${this.usernameMap.size} usernames.`);
83
+ } catch (e) { this.logger.log('ERROR', '[BATCH] Failed to load username map.', { errorMessage: e.message }); }
84
+ }
77
85
 
78
- async loadUsernameMap() {
79
- if (Date.now() - this.usernameMapLastLoaded < 3600000) return;
80
- this.usernameMap.clear();
81
- this.logger.log('INFO', '[BATCH] Refreshing username map from Firestore...');
82
- try {
83
- const snapshot = await this.db.collection(this.usernameMapCollectionName).get();
84
- snapshot.forEach(doc => { const data = doc.data(); for (const cid in data) if (data[cid]?.username) this.usernameMap.set(String(cid), data[cid].username); });
85
- this.usernameMapLastLoaded = Date.now();
86
- this.logger.log('INFO', `[BATCH] Loaded ${this.usernameMap.size} usernames.`);
87
- } catch (e) { this.logger.log('ERROR', '[BATCH] Failed to load username map.', { errorMessage: e.message }); }
88
- }
86
+ getUsername(cid) { return this.usernameMap.get(String(cid)); }
89
87
 
90
- getUsername(cid) { return this.usernameMap.get(String(cid)); }
91
-
92
- addUsernameMapUpdate(cid, username) {
93
- if (!username) return;
94
- const cidStr = String(cid);
95
- this.usernameMap.set(cidStr, username);
96
- const shardId = this._getUsernameShardId(cidStr);
97
- if (!this.usernameMapUpdates[shardId]) { this.usernameMapUpdates[shardId] = {}; }
98
- this.usernameMapUpdates[shardId][cidStr] = { username };
99
- this.logger.log('TRACE', `[BATCH] Queued username update for ${cidStr} in ${shardId}.`);
100
- this._scheduleFlush();
101
- }
88
+ addUsernameMapUpdate(cid, username) {
89
+ if (!username) return;
90
+ const cidStr = String(cid);
91
+ this.usernameMap.set(cidStr, username);
92
+ const shardId = this._getUsernameShardId(cidStr);
93
+ if (!this.usernameMapUpdates[shardId]) { this.usernameMapUpdates[shardId] = {}; }
94
+ this.usernameMapUpdates[shardId][cidStr] = { username };
95
+ this._scheduleFlush();
96
+ }
102
97
 
103
- async addToTradingHistoryBatch(userId, blockId, date, historyData, userType) {
104
- const collection = userType === 'speculator' ? this.speculatorHistoryCollectionName : this.normalHistoryCollectionName;
105
- const path = `${collection}/${blockId}/snapshots/${date}`;
106
- this.tradingHistoryBatch[path] ??= {};
107
- this.tradingHistoryBatch[path][userId] = historyData;
108
- this._scheduleFlush();
109
- }
98
+ async addToTradingHistoryBatch(userId, blockId, date, historyData, userType) {
99
+ const collection = userType === 'speculator' ? this.speculatorHistoryCollectionName : this.normalHistoryCollectionName;
100
+ const path = `${collection}/${blockId}/snapshots/${date}`;
101
+ this.tradingHistoryBatch[path] ??= {};
102
+ this.tradingHistoryBatch[path][userId] = historyData;
103
+ this._scheduleFlush();
104
+ }
110
105
 
111
- async addToPortfolioBatch(userId, blockId, date, portfolioData, userType) {
112
- const collection = userType === 'speculator' ? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS : this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
113
- const path = `${collection}/${blockId}/snapshots/${date}`;
114
- this.portfolioBatch[path] ??= {};
115
- this.portfolioBatch[path][userId] = portfolioData;
116
- this._scheduleFlush();
117
- }
106
+ async addToPortfolioBatch(userId, blockId, date, portfolioData, userType) {
107
+ const collection = userType === 'speculator' ? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS : this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
108
+ const path = `${collection}/${blockId}/snapshots/${date}`;
109
+ this.portfolioBatch[path] ??= {};
110
+ this.portfolioBatch[path][userId] = portfolioData;
111
+ this._scheduleFlush();
112
+ }
118
113
 
119
- async updateUserTimestamp(userId, userType, instrumentId = null) {
120
- const collection = userType === 'speculator' ? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS : this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
121
- const docPath = `${collection}/${userType === 'speculator' ? 'speculators' : 'normal'}`;
122
- this.timestampBatch[docPath] ??= {};
123
- const key = userType === 'speculator' ? `${userId}_${instrumentId}` : userId;
124
- this.timestampBatch[docPath][key] = new Date();
125
- this._scheduleFlush();
126
- }
114
+ async updateUserTimestamp(userId, userType, instrumentId = null) {
115
+ const collection = userType === 'speculator' ? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS : this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
116
+ const docPath = `${collection}/${userType === 'speculator' ? 'speculators' : 'normal'}`;
117
+ this.timestampBatch[docPath] ??= {};
118
+ const key = userType === 'speculator' ? `${userId}_${instrumentId}` : userId;
119
+ this.timestampBatch[docPath][key] = new Date();
120
+ this._scheduleFlush();
121
+ }
127
122
 
128
- deleteFromTimestampBatch(userId, userType, instrumentId) {
129
- const collection = userType === 'speculator' ? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS : this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
130
- const docPath = `${collection}/${userType === 'speculator' ? 'speculators' : 'normal'}`;
131
- if (this.timestampBatch[docPath]) { const key = userType === 'speculator' ? `${userId}_${instrumentId}` : userId; delete this.timestampBatch[docPath][key]; }
132
- }
123
+ deleteFromTimestampBatch(userId, userType, instrumentId) {
124
+ const collection = userType === 'speculator' ? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS : this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
125
+ const docPath = `${collection}/${userType === 'speculator' ? 'speculators' : 'normal'}`;
126
+ if (this.timestampBatch[docPath]) { const key = userType === 'speculator' ? `${userId}_${instrumentId}` : userId; delete this.timestampBatch[docPath][key]; }
127
+ }
133
128
 
134
- addProcessedSpeculatorCids(cids) { cids.forEach(cid => this.processedSpeculatorCids.add(cid)); }
129
+ addProcessedSpeculatorCids(cids) { cids.forEach(cid => this.processedSpeculatorCids.add(cid)); }
135
130
 
136
- async addSpeculatorTimestampFix(userId, orchestratorBlockId) {
137
- const docPath = `${this.config.FIRESTORE_COLLECTION_SPECULATOR_BLOCKS}/${orchestratorBlockId}`;
138
- this.speculatorTimestampFixBatch[docPath] ??= {};
139
- this.speculatorTimestampFixBatch[docPath][`users.${userId}.lastVerified`] = new Date();
140
- this.speculatorTimestampFixBatch[docPath][`users.${userId}.lastHeldSpeculatorAsset`] = new Date();
141
- this.logger.log('TRACE', `[BATCH] Queued speculator timestamp fix for ${userId} in block ${orchestratorBlockId}`);
142
- this._scheduleFlush();
143
- }
131
+ async addSpeculatorTimestampFix(userId, orchestratorBlockId) {
132
+ const docPath = `${this.config.FIRESTORE_COLLECTION_SPECULATOR_BLOCKS}/${orchestratorBlockId}`;
133
+ this.speculatorTimestampFixBatch[docPath] ??= {};
134
+ this.speculatorTimestampFixBatch[docPath][`users.${userId}.lastVerified`] = new Date();
135
+ this.speculatorTimestampFixBatch[docPath][`users.${userId}.lastHeldSpeculatorAsset`] = new Date();
136
+ this._scheduleFlush();
137
+ }
144
138
 
145
- /**
146
- * --- REFACTORED: Deterministic Sharding ---
147
- * Groups users into 'part_N' shards based on their CID.
148
- * Uses { merge: true } to allow concurrent/sequential writes to fill documents
149
- * instead of creating fragmented random docs.
150
- */
151
- _flushDataBatch(batchData, firestoreBatch, logName) {
152
- let count = 0;
153
-
154
- // Use 200 as requested, or fall back to config if higher/defined.
155
- // This ensures we fit ~200 users per document to save space/reads.
156
- const SHARD_CAPACITY = this.config.TASK_ENGINE_MAX_USERS_PER_SHARD || 200;
157
-
158
- for (const basePath in batchData) {
159
- const users = batchData[basePath];
160
- const userIds = Object.keys(users);
161
- if (!userIds.length) continue;
162
-
163
- // 1. Group updates by Deterministic Shard ID
164
- const updatesByShard = {};
165
-
166
- for (const userId of userIds) {
167
- const cid = parseInt(userId, 10);
168
- let shardId;
169
-
170
- if (!isNaN(cid)) {
171
- // Block Logic:
172
- // Users are already partitioned into 1M blocks (basePath has blockId).
173
- // Within a block (0-999,999), we want shards of size SHARD_CAPACITY.
174
- // shardIndex = floor((cid % 1,000,000) / SHARD_CAPACITY)
175
- const blockOffset = cid % 1000000;
176
- const shardIndex = Math.floor(blockOffset / SHARD_CAPACITY);
177
- shardId = `part_${shardIndex}`;
178
- } else {
179
- // Fallback for non-numeric IDs (unlikely for CIDs)
180
- shardId = 'part_misc';
139
+ /**
140
+ * --- REFACTORED: Modulo Sharding ---
141
+ * Fixes the issue where sparse IDs created fragmented documents.
142
+ * We now calculate the number of shards needed to hold the target population
143
+ * and bucket users into them using modulo arithmetic.
144
+ */
145
+ _flushDataBatch(batchData, firestoreBatch, logName) {
146
+ let count = 0;
147
+
148
+ // 1. Determine Shard Strategy
149
+ // If we expect ~1500 users in a block and want 200 users per shard:
150
+ // We need ceil(1500 / 200) = 8 shards total (part_0 to part_7).
151
+ // Any ID, no matter how random, will map to one of these 8 buckets.
152
+ const TARGET_USERS = this.config.DISCOVERY_ORCHESTRATOR_TARGET_USERS_PER_BLOCK ? Number(this.config.DISCOVERY_ORCHESTRATOR_TARGET_USERS_PER_BLOCK) : 1500;
153
+ const SHARD_CAPACITY = this.config.TASK_ENGINE_MAX_USERS_PER_SHARD ? Number(this.config.TASK_ENGINE_MAX_USERS_PER_SHARD) : 200;
154
+
155
+ // Ensure at least 1 shard exists
156
+ const TOTAL_SHARDS = Math.max(1, Math.ceil(TARGET_USERS / SHARD_CAPACITY));
157
+
158
+ for (const basePath in batchData) {
159
+ const users = batchData[basePath];
160
+ const userIds = Object.keys(users);
161
+ if (!userIds.length) continue;
162
+
163
+ const updatesByShard = {};
164
+
165
+ for (const userId of userIds) {
166
+ const cid = parseInt(userId, 10);
167
+ let shardId;
168
+
169
+ if (!isNaN(cid)) {
170
+ // --- MODULO SHARDING ---
171
+ // Even if IDs are 10, 1000000, 500... they will round-robin into
172
+ // the fixed set of shards (e.g. 8 shards), ensuring density.
173
+ const shardIndex = cid % TOTAL_SHARDS;
174
+ shardId = `part_${shardIndex}`;
175
+ } else {
176
+ shardId = 'part_misc';
177
+ }
178
+
179
+ if (!updatesByShard[shardId]) {
180
+ updatesByShard[shardId] = {};
181
+ }
182
+ updatesByShard[shardId][userId] = users[userId];
181
183
  }
182
184
 
183
- if (!updatesByShard[shardId]) {
184
- updatesByShard[shardId] = {};
185
+ for (const shardId in updatesByShard) {
186
+ const chunkData = updatesByShard[shardId];
187
+ const docRef = this.db.collection(`${basePath}/parts`).doc(shardId);
188
+ // merge: true ensures we append to the doc if it was started in a previous batch
189
+ firestoreBatch.set(docRef, chunkData, { merge: true });
190
+ count++;
185
191
  }
186
- updatesByShard[shardId][userId] = users[userId];
187
- }
188
192
 
189
- // 2. Queue Writes with Merge
190
- for (const shardId in updatesByShard) {
191
- const chunkData = updatesByShard[shardId];
192
- const docRef = this.db.collection(`${basePath}/parts`).doc(shardId);
193
+ this.logger.log('INFO', `[BATCH] Staged ${userIds.length} ${logName} users into ${Object.keys(updatesByShard).length} buckets (Modulo ${TOTAL_SHARDS}) for ${basePath}.`);
193
194
 
194
- // CRITICAL: Use merge: true to append to existing shards
195
- firestoreBatch.set(docRef, chunkData, { merge: true });
196
- count++;
195
+ delete batchData[basePath];
197
196
  }
198
-
199
- this.logger.log('INFO', `[BATCH] Staged ${userIds.length} ${logName} users into ${Object.keys(updatesByShard).length} deterministic shards for ${basePath}.`);
200
-
201
- delete batchData[basePath];
197
+ return count;
202
198
  }
203
- return count;
204
- }
205
199
 
206
- async flushBatches() {
207
- if (this.batchTimeout) { clearTimeout(this.batchTimeout); this.batchTimeout = null; }
208
- const firestoreBatch = this.db.batch();
209
- let batchOps = 0;
210
- batchOps += this._flushDataBatch(this.portfolioBatch, firestoreBatch, 'Portfolio');
211
- batchOps += this._flushDataBatch(this.tradingHistoryBatch, firestoreBatch, 'Trade History');
212
- for (const docPath in this.timestampBatch) {
213
- const timestamps = this.timestampBatch[docPath];
214
- if (!Object.keys(timestamps).length) continue;
215
- const docRef = this.db.collection(docPath.split('/')[0]).doc('timestamps').collection('users').doc('normal');
216
- firestoreBatch.set(docRef, { users: timestamps }, { merge: true });
217
- batchOps++;
218
- delete this.timestampBatch[docPath];
219
- }
200
+ async flushBatches() {
201
+ if (this.batchTimeout) { clearTimeout(this.batchTimeout); this.batchTimeout = null; }
202
+ const firestoreBatch = this.db.batch();
203
+ let batchOps = 0;
204
+ batchOps += this._flushDataBatch(this.portfolioBatch, firestoreBatch, 'Portfolio');
205
+ batchOps += this._flushDataBatch(this.tradingHistoryBatch, firestoreBatch, 'Trade History');
206
+ for (const docPath in this.timestampBatch) {
207
+ const timestamps = this.timestampBatch[docPath];
208
+ if (!Object.keys(timestamps).length) continue;
209
+ const docRef = this.db.collection(docPath.split('/')[0]).doc('timestamps').collection('users').doc('normal');
210
+ firestoreBatch.set(docRef, { users: timestamps }, { merge: true });
211
+ batchOps++;
212
+ delete this.timestampBatch[docPath];
213
+ }
220
214
 
221
- for (const docPath in this.speculatorTimestampFixBatch) {
222
- const updates = this.speculatorTimestampFixBatch[docPath];
223
- if (!Object.keys(updates).length) continue;
224
- firestoreBatch.set(this.db.doc(docPath), updates, { merge: true });
225
- batchOps++;
226
- delete this.speculatorTimestampFixBatch[docPath];
227
- }
215
+ for (const docPath in this.speculatorTimestampFixBatch) {
216
+ const updates = this.speculatorTimestampFixBatch[docPath];
217
+ if (!Object.keys(updates).length) continue;
218
+ firestoreBatch.set(this.db.doc(docPath), updates, { merge: true });
219
+ batchOps++;
220
+ delete this.speculatorTimestampFixBatch[docPath];
221
+ }
228
222
 
229
- for (const shardId in this.usernameMapUpdates) {
230
- const updates = this.usernameMapUpdates[shardId];
231
- if (updates && Object.keys(updates).length > 0) { firestoreBatch.set( this.db.collection(this.usernameMapCollectionName).doc(shardId), updates, { merge: true } ); batchOps++; this.logger.log('INFO', `[BATCH] Flushing ${Object.keys(updates).length} username updates to ${shardId}.`); } }
232
- this.usernameMapUpdates = {};
233
-
234
- if (this.processedSpeculatorCids.size) {
235
- const cids = Array.from(this.processedSpeculatorCids);
236
- this.processedSpeculatorCids.clear();
237
- const snapshot = await this.db.collection(this.config.PENDING_SPECULATORS_COLLECTION).get();
238
- snapshot.forEach(doc => { const docData = doc.data().users || {}; const cidsInDoc = cids.filter(cid => docData[cid]); if (!cidsInDoc.length) return;
239
- const delBatch = this.db.batch();
240
- const updates = Object.fromEntries(cidsInDoc.map(cid => [`users.${cid}`, FieldValue.delete()]));
241
- delBatch.update(doc.ref, updates);
242
- delBatch.commit();
243
- this.logger.log('INFO', `[BATCH] Deleted ${cidsInDoc.length} CIDs from ${doc.id}`); }); }
244
-
245
- if (batchOps) await firestoreBatch.commit();
246
- await this.headerManager.flushPerformanceUpdates();
247
- this.logger.log('INFO', '[BATCH] All batches flushed successfully.');
248
- }
223
+ for (const shardId in this.usernameMapUpdates) {
224
+ const updates = this.usernameMapUpdates[shardId];
225
+ if (updates && Object.keys(updates).length > 0) { firestoreBatch.set( this.db.collection(this.usernameMapCollectionName).doc(shardId), updates, { merge: true } ); batchOps++; this.logger.log('INFO', `[BATCH] Flushing ${Object.keys(updates).length} username updates to ${shardId}.`); } }
226
+ this.usernameMapUpdates = {};
227
+
228
+ if (this.processedSpeculatorCids.size) {
229
+ const cids = Array.from(this.processedSpeculatorCids);
230
+ this.processedSpeculatorCids.clear();
231
+ const snapshot = await this.db.collection(this.config.PENDING_SPECULATORS_COLLECTION).get();
232
+ snapshot.forEach(doc => { const docData = doc.data().users || {}; const cidsInDoc = cids.filter(cid => docData[cid]); if (!cidsInDoc.length) return;
233
+ const delBatch = this.db.batch();
234
+ const updates = Object.fromEntries(cidsInDoc.map(cid => [`users.${cid}`, FieldValue.delete()]));
235
+ delBatch.update(doc.ref, updates);
236
+ delBatch.commit();
237
+ this.logger.log('INFO', `[BATCH] Deleted ${cidsInDoc.length} CIDs from ${doc.id}`); }); }
238
+
239
+ if (batchOps) await firestoreBatch.commit();
240
+ await this.headerManager.flushPerformanceUpdates();
241
+ this.logger.log('INFO', '[BATCH] All batches flushed successfully.');
242
+ }
249
243
  }
250
244
 
251
- module.exports = { FirestoreBatchManager };
245
+ module.exports = { FirestoreBatchManager };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.180",
3
+ "version": "1.0.181",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [