bulltrackers-module 1.0.126 → 1.0.128
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/helpers/computation_pass_runner.js +20 -773
- package/functions/computation-system/helpers/orchestration_helpers.js +88 -867
- package/functions/computation-system/utils/data_loader.js +58 -147
- package/functions/computation-system/utils/utils.js +55 -98
- package/functions/orchestrator/helpers/discovery_helpers.js +40 -188
- package/functions/orchestrator/helpers/update_helpers.js +21 -61
- package/functions/orchestrator/index.js +42 -121
- package/functions/task-engine/handler_creator.js +22 -143
- package/functions/task-engine/helpers/discover_helpers.js +20 -90
- package/functions/task-engine/helpers/update_helpers.js +90 -185
- package/functions/task-engine/helpers/verify_helpers.js +43 -159
- package/functions/task-engine/utils/firestore_batch_manager.js +97 -290
- package/functions/task-engine/utils/task_engine_utils.js +99 -0
- package/package.json +1 -1
- package/functions/task-engine/utils/api_calls.js +0 -0
- package/functions/task-engine/utils/firestore_ops.js +0 -0
|
@@ -4,373 +4,180 @@
|
|
|
4
4
|
* OPTIMIZED: Added logic to handle speculator timestamp fixes within the batch.
|
|
5
5
|
* --- MODIFIED: Added username map caching and trading history batching. ---
|
|
6
6
|
*/
|
|
7
|
-
|
|
8
7
|
const { FieldValue } = require('@google-cloud/firestore');
|
|
9
|
-
// No longer requires logger from sharedsetup, it's passed in.
|
|
10
8
|
|
|
11
9
|
class FirestoreBatchManager {
|
|
12
|
-
/**
|
|
13
|
-
* @param {Firestore} db - A Firestore instance.
|
|
14
|
-
* @param {IntelligentHeaderManager} headerManager - An IntelligentHeaderManager instance.
|
|
15
|
-
* @param {object} logger - A logger instance.
|
|
16
|
-
* @param {object} config - Configuration object.
|
|
17
|
-
*/
|
|
18
10
|
constructor(db, headerManager, logger, config) {
|
|
19
|
-
this.db = db;
|
|
11
|
+
this.db = db;
|
|
20
12
|
this.headerManager = headerManager;
|
|
21
|
-
this.logger = logger;
|
|
13
|
+
this.logger = logger;
|
|
22
14
|
this.config = config;
|
|
23
15
|
|
|
24
16
|
this.portfolioBatch = {};
|
|
25
17
|
this.timestampBatch = {};
|
|
26
|
-
this.
|
|
18
|
+
this.tradingHistoryBatch = {};
|
|
27
19
|
this.speculatorTimestampFixBatch = {};
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
this.tradingHistoryBatch = {}; // For the new trade history data
|
|
31
|
-
this.usernameMap = new Map(); // In-memory cache: Map<cid, username>
|
|
32
|
-
this.usernameMapUpdates = {}; // Batched updates for Firestore: { [cid]: { username } }
|
|
20
|
+
this.usernameMap = new Map();
|
|
21
|
+
this.usernameMapUpdates = {};
|
|
33
22
|
this.usernameMapLastLoaded = 0;
|
|
23
|
+
this.processedSpeculatorCids = new Set();
|
|
24
|
+
|
|
34
25
|
this.usernameMapCollectionName = config.FIRESTORE_COLLECTION_USERNAME_MAP;
|
|
35
26
|
this.normalHistoryCollectionName = config.FIRESTORE_COLLECTION_NORMAL_HISTORY;
|
|
36
27
|
this.speculatorHistoryCollectionName = config.FIRESTORE_COLLECTION_SPECULATOR_HISTORY;
|
|
37
|
-
// --- END NEW STATE ---
|
|
38
28
|
|
|
39
29
|
this.batchTimeout = null;
|
|
40
30
|
|
|
41
|
-
|
|
31
|
+
logger.log('INFO', 'FirestoreBatchManager initialized.');
|
|
42
32
|
}
|
|
43
33
|
|
|
44
|
-
/**
|
|
45
|
-
* Schedules a flush if one isn't already scheduled.
|
|
46
|
-
*/
|
|
47
34
|
_scheduleFlush() {
|
|
48
|
-
if (!this.batchTimeout)
|
|
49
|
-
this.batchTimeout = setTimeout(
|
|
50
|
-
() => this.flushBatches(),
|
|
51
|
-
this.config.TASK_ENGINE_FLUSH_INTERVAL_MS
|
|
52
|
-
);
|
|
53
|
-
}
|
|
35
|
+
if (!this.batchTimeout) this.batchTimeout = setTimeout(() => this.flushBatches(), this.config.TASK_ENGINE_FLUSH_INTERVAL_MS);
|
|
54
36
|
}
|
|
55
37
|
|
|
56
|
-
// --- NEW: Load the username map into memory ---
|
|
57
38
|
async loadUsernameMap() {
|
|
58
|
-
|
|
59
|
-
if (Date.now() - this.usernameMapLastLoaded < 3600000) {
|
|
60
|
-
return;
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
this.logger.log('INFO', '[BATCH] Refreshing username map from Firestore...');
|
|
39
|
+
if (Date.now() - this.usernameMapLastLoaded < 3600000) return;
|
|
64
40
|
this.usernameMap.clear();
|
|
65
|
-
|
|
41
|
+
this.logger.log('INFO', '[BATCH] Refreshing username map from Firestore...');
|
|
66
42
|
try {
|
|
67
43
|
const snapshot = await this.db.collection(this.usernameMapCollectionName).get();
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
const data = doc.data();
|
|
73
|
-
// Assumes doc.id is CID, or data is { [cid]: { username } }
|
|
74
|
-
for (const cid in data) {
|
|
75
|
-
if (data[cid] && data[cid].username) {
|
|
76
|
-
this.usernameMap.set(String(cid), data[cid].username);
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
});
|
|
80
|
-
}
|
|
44
|
+
snapshot.forEach(doc => {
|
|
45
|
+
const data = doc.data();
|
|
46
|
+
for (const cid in data) if (data[cid]?.username) this.usernameMap.set(String(cid), data[cid].username);
|
|
47
|
+
});
|
|
81
48
|
this.usernameMapLastLoaded = Date.now();
|
|
82
|
-
this.logger.log('INFO', `[BATCH] Loaded ${this.usernameMap.size} usernames
|
|
83
|
-
} catch (
|
|
84
|
-
this.logger.log('ERROR', '[BATCH] Failed to load username map.', { errorMessage:
|
|
49
|
+
this.logger.log('INFO', `[BATCH] Loaded ${this.usernameMap.size} usernames.`);
|
|
50
|
+
} catch (e) {
|
|
51
|
+
this.logger.log('ERROR', '[BATCH] Failed to load username map.', { errorMessage: e.message });
|
|
85
52
|
}
|
|
86
53
|
}
|
|
87
54
|
|
|
88
|
-
|
|
89
|
-
getUsername(cid) {
|
|
90
|
-
return this.usernameMap.get(String(cid));
|
|
91
|
-
}
|
|
55
|
+
getUsername(cid) { return this.usernameMap.get(String(cid)); }
|
|
92
56
|
|
|
93
|
-
// --- NEW: Add a username to the cache and the write batch ---
|
|
94
57
|
addUsernameMapUpdate(cid, username) {
|
|
95
|
-
const cidStr = String(cid);
|
|
96
58
|
if (!username) return;
|
|
97
|
-
|
|
98
|
-
// Update in-memory cache immediately
|
|
59
|
+
const cidStr = String(cid);
|
|
99
60
|
this.usernameMap.set(cidStr, username);
|
|
100
|
-
|
|
101
|
-
// Add to Firestore write batch
|
|
102
|
-
// We'll write this to a single-field doc for simple merging
|
|
103
|
-
this.usernameMapUpdates[cidStr] = { username: username };
|
|
61
|
+
this.usernameMapUpdates[cidStr] = { username };
|
|
104
62
|
this.logger.log('TRACE', `[BATCH] Queued username update for ${cidStr}.`);
|
|
105
63
|
this._scheduleFlush();
|
|
106
64
|
}
|
|
107
|
-
|
|
108
|
-
// --- NEW: Add trading history to its own batch ---
|
|
109
|
-
async addToTradingHistoryBatch(userId, blockId, date, historyData, userType) {
|
|
110
|
-
const collection = userType === 'speculator'
|
|
111
|
-
? this.speculatorHistoryCollectionName
|
|
112
|
-
: this.normalHistoryCollectionName;
|
|
113
|
-
|
|
114
|
-
const basePath = `${collection}/${blockId}/snapshots/${date}`;
|
|
115
65
|
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
this.tradingHistoryBatch[
|
|
121
|
-
|
|
122
|
-
// Trigger flush based on portfolio batch size, assuming they'll be roughly equal
|
|
123
|
-
const totalUsersInBatch = Object.values(this.tradingHistoryBatch).reduce((sum, users) => sum + Object.keys(users).length, 0);
|
|
124
|
-
|
|
125
|
-
if (totalUsersInBatch >= this.config.TASK_ENGINE_MAX_BATCH_SIZE) {
|
|
126
|
-
// DO NOT await a flush. Just schedule one.
|
|
127
|
-
this._scheduleFlush();
|
|
128
|
-
} else {
|
|
129
|
-
this._scheduleFlush();
|
|
130
|
-
}
|
|
66
|
+
async addToTradingHistoryBatch(userId, blockId, date, historyData, userType) {
|
|
67
|
+
const collection = userType === 'speculator' ? this.speculatorHistoryCollectionName : this.normalHistoryCollectionName;
|
|
68
|
+
const path = `${collection}/${blockId}/snapshots/${date}`;
|
|
69
|
+
this.tradingHistoryBatch[path] ??= {};
|
|
70
|
+
this.tradingHistoryBatch[path][userId] = historyData;
|
|
71
|
+
this._scheduleFlush();
|
|
131
72
|
}
|
|
132
73
|
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
* @param {string} userType
|
|
140
|
-
* @param {string|null} instrumentId
|
|
141
|
-
*/
|
|
142
|
-
async addToPortfolioBatch(userId, blockId, date, portfolioData, userType, instrumentId = null) {
|
|
143
|
-
const collection = userType === 'speculator'
|
|
144
|
-
? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS
|
|
145
|
-
: this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
|
|
146
|
-
const basePath = `${collection}/${blockId}/snapshots/${date}`;
|
|
147
|
-
|
|
148
|
-
if (!this.portfolioBatch[basePath]) {
|
|
149
|
-
this.portfolioBatch[basePath] = {};
|
|
150
|
-
}
|
|
151
|
-
|
|
152
|
-
this.portfolioBatch[basePath][userId] = portfolioData;
|
|
153
|
-
|
|
154
|
-
const totalUsersInBatch = Object.values(this.portfolioBatch).reduce((sum, users) => sum + Object.keys(users).length, 0);
|
|
155
|
-
|
|
156
|
-
if (totalUsersInBatch >= this.config.TASK_ENGINE_MAX_BATCH_SIZE) {
|
|
157
|
-
// DO NOT await a flush. Just schedule one.
|
|
158
|
-
this._scheduleFlush();
|
|
159
|
-
} else {
|
|
160
|
-
this._scheduleFlush();
|
|
161
|
-
}
|
|
74
|
+
async addToPortfolioBatch(userId, blockId, date, portfolioData, userType) {
|
|
75
|
+
const collection = userType === 'speculator' ? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS : this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
|
|
76
|
+
const path = `${collection}/${blockId}/snapshots/${date}`;
|
|
77
|
+
this.portfolioBatch[path] ??= {};
|
|
78
|
+
this.portfolioBatch[path][userId] = portfolioData;
|
|
79
|
+
this._scheduleFlush();
|
|
162
80
|
}
|
|
163
81
|
|
|
164
|
-
/**
|
|
165
|
-
* Adds a user timestamp update to the batch.
|
|
166
|
-
* @param {string} userId
|
|
167
|
-
* @param {string} userType
|
|
168
|
-
* @param {string|null} instrumentId
|
|
169
|
-
*/
|
|
170
82
|
async updateUserTimestamp(userId, userType, instrumentId = null) {
|
|
171
|
-
const collection = userType === 'speculator'
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
const
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
if (!this.timestampBatch[docPath]) {
|
|
178
|
-
this.timestampBatch[docPath] = {};
|
|
179
|
-
}
|
|
180
|
-
|
|
181
|
-
const timestampKey = userType === 'speculator' ? `${userId}_${instrumentId}` : userId;
|
|
182
|
-
this.timestampBatch[docPath][timestampKey] = new Date();
|
|
183
|
-
|
|
184
|
-
if (Object.keys(this.timestampBatch[docPath]).length >= this.config.TASK_ENGINE_MAX_BATCH_SIZE) {
|
|
185
|
-
// DO NOT await a flush. Just schedule one.
|
|
186
|
-
this._scheduleFlush();
|
|
187
|
-
} else {
|
|
188
|
-
this._scheduleFlush();
|
|
189
|
-
}
|
|
83
|
+
const collection = userType === 'speculator' ? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS : this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
|
|
84
|
+
const docPath = `${collection}/${userType === 'speculator' ? 'speculators' : 'normal'}`;
|
|
85
|
+
this.timestampBatch[docPath] ??= {};
|
|
86
|
+
const key = userType === 'speculator' ? `${userId}_${instrumentId}` : userId;
|
|
87
|
+
this.timestampBatch[docPath][key] = new Date();
|
|
88
|
+
this._scheduleFlush();
|
|
190
89
|
}
|
|
191
90
|
|
|
192
|
-
/**
|
|
193
|
-
* Removes a user timestamp from the batch (e.g., if user is private).
|
|
194
|
-
* @param {string} userId
|
|
195
|
-
* @param {string} userType
|
|
196
|
-
* @param {string|null} instrumentId
|
|
197
|
-
*/
|
|
198
91
|
deleteFromTimestampBatch(userId, userType, instrumentId) {
|
|
199
|
-
const collection = userType === 'speculator'
|
|
200
|
-
|
|
201
|
-
: this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
|
|
202
|
-
const docId = userType === 'speculator' ? 'speculators' : 'normal';
|
|
203
|
-
const docPath = `${collection}/${docId}`;
|
|
204
|
-
|
|
92
|
+
const collection = userType === 'speculator' ? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS : this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
|
|
93
|
+
const docPath = `${collection}/${userType === 'speculator' ? 'speculators' : 'normal'}`;
|
|
205
94
|
if (this.timestampBatch[docPath]) {
|
|
206
|
-
const
|
|
207
|
-
delete this.timestampBatch[docPath][
|
|
95
|
+
const key = userType === 'speculator' ? `${userId}_${instrumentId}` : userId;
|
|
96
|
+
delete this.timestampBatch[docPath][key];
|
|
208
97
|
}
|
|
209
98
|
}
|
|
210
99
|
|
|
211
|
-
|
|
212
|
-
* Adds discovered speculator CIDs to the in-memory set for later deletion.
|
|
213
|
-
* @param {Array<string>} cids
|
|
214
|
-
*/
|
|
215
|
-
addProcessedSpeculatorCids(cids) {
|
|
216
|
-
cids.forEach(cid => this.processedSpeculatorCids.add(cid));
|
|
217
|
-
}
|
|
100
|
+
addProcessedSpeculatorCids(cids) { cids.forEach(cid => this.processedSpeculatorCids.add(cid)); }
|
|
218
101
|
|
|
219
|
-
/**
|
|
220
|
-
* --- OPTIMIZATION: ADD THIS NEW METHOD ---
|
|
221
|
-
* Adds a speculator timestamp fix to the batch.
|
|
222
|
-
* @param {string} userId
|
|
223
|
-
* @param {string} orchestratorBlockId (e.g., "1000000")
|
|
224
|
-
*/
|
|
225
102
|
async addSpeculatorTimestampFix(userId, orchestratorBlockId) {
|
|
226
103
|
const docPath = `${this.config.FIRESTORE_COLLECTION_SPECULATOR_BLOCKS}/${orchestratorBlockId}`;
|
|
227
|
-
|
|
228
|
-
if (!this.speculatorTimestampFixBatch[docPath]) {
|
|
229
|
-
this.speculatorTimestampFixBatch[docPath] = {};
|
|
230
|
-
}
|
|
231
|
-
|
|
232
|
-
// These are the two fields the orchestrator reads
|
|
104
|
+
this.speculatorTimestampFixBatch[docPath] ??= {};
|
|
233
105
|
this.speculatorTimestampFixBatch[docPath][`users.${userId}.lastVerified`] = new Date();
|
|
234
106
|
this.speculatorTimestampFixBatch[docPath][`users.${userId}.lastHeldSpeculatorAsset`] = new Date();
|
|
235
|
-
|
|
236
|
-
this.
|
|
237
|
-
this._scheduleFlush(); // Schedule a flush
|
|
107
|
+
this.logger.log('TRACE', `[BATCH] Queued speculator timestamp fix for ${userId} in block ${orchestratorBlockId}`);
|
|
108
|
+
this._scheduleFlush();
|
|
238
109
|
}
|
|
239
|
-
// --- END NEW METHOD ---
|
|
240
110
|
|
|
241
|
-
/**
|
|
242
|
-
* Helper to flush a specific batch type (portfolio or history).
|
|
243
|
-
* @param {object} batchData - The batch object (e.g., this.portfolioBatch)
|
|
244
|
-
* @param {Firestore.Batch} firestoreBatch - The main Firestore batch object
|
|
245
|
-
* @param {string} logName - A name for logging (e.g., "Portfolio")
|
|
246
|
-
* @returns {number} The number of batch operations added.
|
|
247
|
-
*/
|
|
248
111
|
_flushDataBatch(batchData, firestoreBatch, logName) {
|
|
249
|
-
let
|
|
112
|
+
let count = 0;
|
|
250
113
|
for (const basePath in batchData) {
|
|
251
|
-
const
|
|
252
|
-
const userIds = Object.keys(
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
const chunkData = {};
|
|
259
|
-
chunkUserIds.forEach(userId => {
|
|
260
|
-
chunkData[userId] = userPortfolios[userId];
|
|
261
|
-
});
|
|
262
|
-
|
|
263
|
-
const docRef = this.db.collection(`${basePath}/parts`).doc(); // Use random ID
|
|
264
|
-
firestoreBatch.set(docRef, chunkData);
|
|
265
|
-
operationCount++;
|
|
266
|
-
}
|
|
267
|
-
this.logger.log('INFO', `[BATCH] Staged ${userIds.length} users' ${logName} data into ${Math.ceil(userIds.length / this.config.TASK_ENGINE_MAX_USERS_PER_SHARD)} shards for ${basePath}.`);
|
|
114
|
+
const users = batchData[basePath];
|
|
115
|
+
const userIds = Object.keys(users);
|
|
116
|
+
if (!userIds.length) continue;
|
|
117
|
+
for (let i = 0; i < userIds.length; i += this.config.TASK_ENGINE_MAX_USERS_PER_SHARD) {
|
|
118
|
+
const chunkData = Object.fromEntries(userIds.slice(i, i + this.config.TASK_ENGINE_MAX_USERS_PER_SHARD).map(id => [id, users[id]]));
|
|
119
|
+
firestoreBatch.set(this.db.collection(`${basePath}/parts`).doc(), chunkData);
|
|
120
|
+
count++;
|
|
268
121
|
}
|
|
122
|
+
this.logger.log('INFO', `[BATCH] Staged ${userIds.length} ${logName} users in ${Math.ceil(userIds.length / this.config.TASK_ENGINE_MAX_USERS_PER_SHARD)} shards for ${basePath}.`);
|
|
269
123
|
delete batchData[basePath];
|
|
270
124
|
}
|
|
271
|
-
return
|
|
125
|
+
return count;
|
|
272
126
|
}
|
|
273
127
|
|
|
274
|
-
|
|
275
|
-
/**
|
|
276
|
-
* Flushes all pending writes to Firestore and updates header performance.
|
|
277
|
-
*/
|
|
278
128
|
async flushBatches() {
|
|
279
|
-
if (this.batchTimeout) {
|
|
280
|
-
clearTimeout(this.batchTimeout);
|
|
281
|
-
this.batchTimeout = null;
|
|
282
|
-
}
|
|
129
|
+
if (this.batchTimeout) { clearTimeout(this.batchTimeout); this.batchTimeout = null; }
|
|
283
130
|
|
|
284
|
-
const
|
|
285
|
-
|
|
286
|
-
let batchOperationCount = 0;
|
|
131
|
+
const firestoreBatch = this.db.batch();
|
|
132
|
+
let batchOps = 0;
|
|
287
133
|
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
batchOperationCount += this._flushDataBatch(this.tradingHistoryBatch, firestoreBatch, "Trade History");
|
|
291
|
-
// --- END REFACTOR ---
|
|
134
|
+
batchOps += this._flushDataBatch(this.portfolioBatch, firestoreBatch, 'Portfolio');
|
|
135
|
+
batchOps += this._flushDataBatch(this.tradingHistoryBatch, firestoreBatch, 'Trade History');
|
|
292
136
|
|
|
293
137
|
for (const docPath in this.timestampBatch) {
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
batchOperationCount++;
|
|
301
|
-
} else {
|
|
302
|
-
promises.push(firestoreBatch.commit());
|
|
303
|
-
const newBatch = this.db.batch(); // Use this.db
|
|
304
|
-
newBatch.set(docRef, { users: this.timestampBatch[docPath] }, { merge: true });
|
|
305
|
-
promises.push(newBatch.commit());
|
|
306
|
-
batchOperationCount = 1;
|
|
307
|
-
}
|
|
308
|
-
delete this.timestampBatch[docPath];
|
|
309
|
-
}
|
|
310
|
-
}
|
|
311
|
-
|
|
312
|
-
if (this.processedSpeculatorCids.size > 0) {
|
|
313
|
-
this.logger.log('INFO', `[BATCH] Flushing ${this.processedSpeculatorCids.size} processed speculator CIDs from pending list documents.`);
|
|
314
|
-
const cidsToDelete = Array.from(this.processedSpeculatorCids);
|
|
315
|
-
this.processedSpeculatorCids.clear();
|
|
316
|
-
|
|
317
|
-
const pendingDocsSnapshot = await this.db.collection(this.config.PENDING_SPECULATORS_COLLECTION).get(); // Use this.db
|
|
318
|
-
|
|
319
|
-
if (!pendingDocsSnapshot.empty) {
|
|
320
|
-
const deletePromises = [];
|
|
321
|
-
pendingDocsSnapshot.forEach(doc => {
|
|
322
|
-
const docData = doc.data().users || {};
|
|
323
|
-
const cidsInThisDoc = cidsToDelete.filter(cid => docData.hasOwnProperty(cid));
|
|
324
|
-
|
|
325
|
-
if (cidsInThisDoc.length > 0) {
|
|
326
|
-
const deleteBatch = this.db.batch(); // Use this.db
|
|
327
|
-
const updates = {};
|
|
328
|
-
cidsInThisDoc.forEach(cid => {
|
|
329
|
-
updates[`users.${cid}`] = FieldValue.delete();
|
|
330
|
-
});
|
|
331
|
-
deleteBatch.update(doc.ref, updates);
|
|
332
|
-
deletePromises.push(deleteBatch.commit());
|
|
333
|
-
this.logger.log('INFO', `[BATCH] Staged deletion of ${cidsInThisDoc.length} CIDs from document: ${doc.id}`);
|
|
334
|
-
}
|
|
335
|
-
});
|
|
336
|
-
promises.push(...deletePromises);
|
|
337
|
-
}
|
|
138
|
+
const timestamps = this.timestampBatch[docPath];
|
|
139
|
+
if (!Object.keys(timestamps).length) continue;
|
|
140
|
+
const docRef = this.db.collection(docPath.split('/')[0]).doc('timestamps').collection('users').doc('normal');
|
|
141
|
+
firestoreBatch.set(docRef, { users: timestamps }, { merge: true });
|
|
142
|
+
batchOps++;
|
|
143
|
+
delete this.timestampBatch[docPath];
|
|
338
144
|
}
|
|
339
145
|
|
|
340
|
-
// --- OPTIMIZATION: ADD THIS LOOP ---
|
|
341
146
|
for (const docPath in this.speculatorTimestampFixBatch) {
|
|
342
147
|
const updates = this.speculatorTimestampFixBatch[docPath];
|
|
343
|
-
if (Object.keys(updates).length
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
// Add to the existing batch
|
|
347
|
-
firestoreBatch.set(docRef, updates, { merge: true });
|
|
348
|
-
batchOperationCount++;
|
|
349
|
-
}
|
|
148
|
+
if (!Object.keys(updates).length) continue;
|
|
149
|
+
firestoreBatch.set(this.db.doc(docPath), updates, { merge: true });
|
|
150
|
+
batchOps++;
|
|
350
151
|
delete this.speculatorTimestampFixBatch[docPath];
|
|
351
152
|
}
|
|
352
|
-
|
|
353
|
-
// --- NEW: Flush Username Map Updates ---
|
|
354
|
-
if (Object.keys(this.usernameMapUpdates).length > 0) {
|
|
355
|
-
this.logger.log('INFO', `[BATCH] Flushing ${Object.keys(this.usernameMapUpdates).length} username map updates.`);
|
|
356
|
-
// Simple sharding: just merge into one doc for now.
|
|
357
|
-
// A more robust solution would shard based on CID.
|
|
358
|
-
const mapDocRef = this.db.collection(this.usernameMapCollectionName).doc('cid_map_shard_1');
|
|
359
|
-
firestoreBatch.set(mapDocRef, this.usernameMapUpdates, { merge: true });
|
|
360
|
-
batchOperationCount++;
|
|
361
|
-
this.usernameMapUpdates = {}; // Clear the batch
|
|
362
|
-
}
|
|
363
|
-
// --- END NEW ---
|
|
364
153
|
|
|
365
|
-
if (
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
154
|
+
if (Object.keys(this.usernameMapUpdates).length) {
|
|
155
|
+
firestoreBatch.set(this.db.collection(this.usernameMapCollectionName).doc('cid_map_shard_1'), this.usernameMapUpdates, { merge: true });
|
|
156
|
+
batchOps++;
|
|
157
|
+
this.logger.log('INFO', `[BATCH] Flushing ${Object.keys(this.usernameMapUpdates).length} username map updates.`);
|
|
158
|
+
this.usernameMapUpdates = {};
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
if (this.processedSpeculatorCids.size) {
|
|
162
|
+
const cids = Array.from(this.processedSpeculatorCids);
|
|
163
|
+
this.processedSpeculatorCids.clear();
|
|
164
|
+
const snapshot = await this.db.collection(this.config.PENDING_SPECULATORS_COLLECTION).get();
|
|
165
|
+
snapshot.forEach(doc => {
|
|
166
|
+
const docData = doc.data().users || {};
|
|
167
|
+
const cidsInDoc = cids.filter(cid => docData[cid]);
|
|
168
|
+
if (!cidsInDoc.length) return;
|
|
169
|
+
const delBatch = this.db.batch();
|
|
170
|
+
const updates = Object.fromEntries(cidsInDoc.map(cid => [`users.${cid}`, FieldValue.delete()]));
|
|
171
|
+
delBatch.update(doc.ref, updates);
|
|
172
|
+
delBatch.commit();
|
|
173
|
+
this.logger.log('INFO', `[BATCH] Deleted ${cidsInDoc.length} CIDs from ${doc.id}`);
|
|
174
|
+
});
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
if (batchOps) await firestoreBatch.commit();
|
|
178
|
+
await this.headerManager.flushPerformanceUpdates();
|
|
372
179
|
this.logger.log('INFO', '[BATCH] All batches flushed successfully.');
|
|
373
180
|
}
|
|
374
181
|
}
|
|
375
182
|
|
|
376
|
-
module.exports = { FirestoreBatchManager };
|
|
183
|
+
module.exports = { FirestoreBatchManager };
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
/*
|
|
2
|
+
* FILENAME: CloudFunctions/NpmWrappers/bulltrackers-module/functions/task-engine/utils/task_engine_utils.js
|
|
3
|
+
* (MODIFIED: To pass down a Set to track history fetches)
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* @fileoverview Utility functions for the Task Engine handler.
|
|
8
|
+
* This file contains the "how" - the logic for parsing, sorting,
|
|
9
|
+
* and orchestrating task execution.
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
const { handleDiscover } = require('../helpers/discover_helpers');
|
|
13
|
+
const { handleVerify } = require('../helpers/verify_helpers');
|
|
14
|
+
const { handleUpdate, lookupUsernames } = require('../helpers/update_helpers');
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Parses Pub/Sub message into task array.
|
|
18
|
+
*/
|
|
19
|
+
function parseTaskPayload(message, logger) {
|
|
20
|
+
if (!message?.data) return logger.log('ERROR', '[TaskEngine] Invalid message structure.', { message }) || null;
|
|
21
|
+
let payload;
|
|
22
|
+
try { payload = JSON.parse(Buffer.from(message.data, 'base64').toString()); }
|
|
23
|
+
catch (e) { logger.log('ERROR', '[TaskEngine] Failed to parse message', { error: e.message }); return null; }
|
|
24
|
+
if (!Array.isArray(payload.tasks) || payload.tasks.length === 0) {
|
|
25
|
+
logger.log(payload.tasks?.length ? 'WARN' : 'ERROR', '[TaskEngine] Invalid or empty tasks array.', { payload });
|
|
26
|
+
return null;
|
|
27
|
+
}
|
|
28
|
+
return payload.tasks;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Sorts tasks into update (with username), lookup (missing username), and other (discover/verify).
|
|
33
|
+
*/
|
|
34
|
+
async function prepareTaskBatches(tasks, batchManager, logger) {
|
|
35
|
+
const tasksToRun = [], cidsToLookup = new Map(), otherTasks = [];
|
|
36
|
+
await batchManager.loadUsernameMap();
|
|
37
|
+
for (const task of tasks) {
|
|
38
|
+
if (task.type === 'update') {
|
|
39
|
+
const username = batchManager.getUsername(task.userId);
|
|
40
|
+
username ? tasksToRun.push({ task, username }) : cidsToLookup.set(String(task.userId), task);
|
|
41
|
+
} else otherTasks.push(task);
|
|
42
|
+
}
|
|
43
|
+
logger.log('INFO', `[TaskEngine] Sorting complete. Known: ${tasksToRun.length}, Lookup: ${cidsToLookup.size}, Other: ${otherTasks.length}`);
|
|
44
|
+
return { tasksToRun, cidsToLookup, otherTasks };
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Runs username lookups for missing CIDs and adds to tasksToRun.
|
|
49
|
+
*/
|
|
50
|
+
async function runUsernameLookups(tasksToRun, cidsToLookup, dependencies, config, batchManager, logger) {
|
|
51
|
+
if (!cidsToLookup.size) return;
|
|
52
|
+
logger.log('INFO', `[TaskEngine] Looking up ${cidsToLookup.size} usernames...`);
|
|
53
|
+
const foundUsers = await lookupUsernames([...cidsToLookup.keys()], dependencies, config);
|
|
54
|
+
for (const u of foundUsers) {
|
|
55
|
+
const cid = String(u.CID), username = u.Value.UserName;
|
|
56
|
+
batchManager.addUsernameMapUpdate(cid, username);
|
|
57
|
+
const task = cidsToLookup.get(cid);
|
|
58
|
+
if (task) { tasksToRun.push({ task, username }); cidsToLookup.delete(cid); }
|
|
59
|
+
}
|
|
60
|
+
if (cidsToLookup.size) logger.log('WARN', `[TaskEngine] Could not find ${cidsToLookup.size} usernames (likely private).`, { skippedCids: [...cidsToLookup.keys()] });
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Executes all tasks.
|
|
65
|
+
*/
|
|
66
|
+
async function executeTasks(tasksToRun, otherTasks, dependencies, config, taskId) {
|
|
67
|
+
const { logger } = dependencies;
|
|
68
|
+
|
|
69
|
+
// --- START MODIFICATION ---
|
|
70
|
+
// This Set will track history fetches *only for this batch*.
|
|
71
|
+
const historyFetchedForUser = new Set();
|
|
72
|
+
// --- END MODIFICATION ---
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
// Process discover/verify tasks
|
|
76
|
+
for (const task of otherTasks) {
|
|
77
|
+
const subTaskId = `${task.type}-${task.userType || 'unknown'}-${task.userId || task.cids?.[0] || 'sub'}`;
|
|
78
|
+
const handler = { discover: handleDiscover, verify: handleVerify }[task.type];
|
|
79
|
+
if (handler) try { await handler(task, subTaskId, dependencies, config); }
|
|
80
|
+
catch (err) { logger.log('ERROR', `[TaskEngine/${taskId}] Error in ${task.type} for ${subTaskId}`, { errorMessage: err.message }); }
|
|
81
|
+
else logger.log('ERROR', `[TaskEngine/${taskId}] Unknown task type: ${task.type}`);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Process update tasks (all have usernames)
|
|
85
|
+
for (const { task, username } of tasksToRun) {
|
|
86
|
+
const subTaskId = `${task.type}-${task.userType || 'unknown'}-${task.userId}`;
|
|
87
|
+
try {
|
|
88
|
+
// --- START MODIFICATION ---
|
|
89
|
+
// Pass the Set to the update handler
|
|
90
|
+
await handleUpdate(task, subTaskId, dependencies, config, username, historyFetchedForUser);
|
|
91
|
+
// --- END MODIFICATION ---
|
|
92
|
+
}
|
|
93
|
+
catch (err) { logger.log('ERROR', `[TaskEngine/${taskId}] Error in handleUpdate for ${task.userId}`, { errorMessage: err.message }); }
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
logger.log('SUCCESS', `[TaskEngine/${taskId}] Processed all tasks.`);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
module.exports = { parseTaskPayload, prepareTaskBatches, runUsernameLookups, executeTasks };
|
package/package.json
CHANGED
|
File without changes
|
|
File without changes
|