bulltrackers-module 1.0.175 → 1.0.176
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.MD +1901 -2
- package/functions/computation-system/controllers/computation_controller.js +1 -1
- package/functions/computation-system/helpers/computation_manifest_builder.js +9 -39
- package/functions/computation-system/helpers/computation_pass_runner.js +72 -66
- package/functions/computation-system/helpers/orchestration_helpers.js +136 -99
- package/functions/task-engine/utils/firestore_batch_manager.js +224 -180
- package/package.json +1 -1
|
@@ -1,207 +1,251 @@
|
|
|
1
|
-
/**
|
|
2
|
-
|
|
3
|
-
* REFACTORED: Renamed 'firestore' to 'db' for consistency.
|
|
4
|
-
* OPTIMIZED: Added logic to handle speculator timestamp fixes within the batch.
|
|
5
|
-
* --- MODIFIED: Added username map caching and trading history batching. ---
|
|
6
|
-
* --- MODIFIED: Added cross-invocation cache for history fetches. ---
|
|
7
|
-
*/
|
|
8
|
-
const { FieldValue } = require('@google-cloud/firestore');
|
|
1
|
+
/**
|
|
2
|
+
@fileoverview Utility class to manage stateful Firestore write batches.
|
|
9
3
|
|
|
10
|
-
|
|
11
|
-
constructor(db, headerManager, logger, config) {
|
|
12
|
-
this.db = db;
|
|
13
|
-
this.headerManager = headerManager;
|
|
14
|
-
this.logger = logger;
|
|
15
|
-
this.config = config;
|
|
16
|
-
this.portfolioBatch = {};
|
|
17
|
-
this.timestampBatch = {};
|
|
18
|
-
this.tradingHistoryBatch = {};
|
|
19
|
-
this.speculatorTimestampFixBatch = {};
|
|
20
|
-
|
|
21
|
-
// Username map cache
|
|
22
|
-
this.usernameMap = new Map();
|
|
23
|
-
this.usernameMapUpdates = {};
|
|
24
|
-
this.usernameMapLastLoaded = 0;
|
|
25
|
-
|
|
26
|
-
// History fetch cache (NEW)
|
|
27
|
-
this.historyFetchedUserIds = new Set();
|
|
28
|
-
this.historyCacheTimestamp = Date.now();
|
|
29
|
-
// Set a 10-minute TTL on this cache (600,000 ms)
|
|
30
|
-
this.HISTORY_CACHE_TTL_MS = config.HISTORY_CACHE_TTL_MS || 600000;
|
|
31
|
-
|
|
32
|
-
this.processedSpeculatorCids = new Set();
|
|
33
|
-
this.usernameMapCollectionName = config.FIRESTORE_COLLECTION_USERNAME_MAP;
|
|
34
|
-
this.normalHistoryCollectionName = config.FIRESTORE_COLLECTION_NORMAL_HISTORY;
|
|
35
|
-
this.speculatorHistoryCollectionName = config.FIRESTORE_COLLECTION_SPECULATOR_HISTORY;
|
|
36
|
-
this.batchTimeout = null;
|
|
37
|
-
logger.log('INFO', 'FirestoreBatchManager initialized.');
|
|
38
|
-
}
|
|
4
|
+
REFACTORED: Renamed 'firestore' to 'db' for consistency.
|
|
39
5
|
|
|
40
|
-
|
|
41
|
-
* NEW: Checks if a user's history has been fetched in the last 10 minutes.
|
|
42
|
-
* If not, it logs them as fetched and returns false (to trigger a fetch).
|
|
43
|
-
* @param {string} userId
|
|
44
|
-
* @returns {boolean} True if already fetched, false if not.
|
|
45
|
-
*/
|
|
46
|
-
checkAndSetHistoryFetched(userId) {
|
|
47
|
-
// Check if the cache is stale
|
|
48
|
-
if (Date.now() - this.historyCacheTimestamp > this.HISTORY_CACHE_TTL_MS) {
|
|
49
|
-
this.logger.log('INFO', '[BATCH] History fetch cache (10m TTL) expired. Clearing set.');
|
|
50
|
-
this.historyFetchedUserIds.clear();
|
|
51
|
-
this.historyCacheTimestamp = Date.now();
|
|
52
|
-
}
|
|
6
|
+
OPTIMIZED: Added logic to handle speculator timestamp fixes within the batch.
|
|
53
7
|
|
|
54
|
-
|
|
55
|
-
return true; // Yes, already fetched
|
|
56
|
-
}
|
|
8
|
+
--- MODIFIED: Added username map caching and trading history batching. ---
|
|
57
9
|
|
|
58
|
-
|
|
59
|
-
this.historyFetchedUserIds.add(userId);
|
|
60
|
-
return false;
|
|
61
|
-
}
|
|
10
|
+
--- MODIFIED: Added cross-invocation cache for history fetches. ---
|
|
62
11
|
|
|
63
|
-
|
|
12
|
+
--- FIXED: Implemented deterministic sharding (part_N) to prevent document fragmentation. --- **/
|
|
64
13
|
|
|
65
|
-
|
|
14
|
+
const { FieldValue } = require('@google-cloud/firestore');
|
|
66
15
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
}
|
|
16
|
+
class FirestoreBatchManager { constructor(db, headerManager, logger, config) { this.db = db; this.headerManager = headerManager; this.logger = logger; this.config = config; this.portfolioBatch = {}; this.timestampBatch = {}; this.tradingHistoryBatch = {}; this.speculatorTimestampFixBatch = {};
|
|
17
|
+
|
|
18
|
+
// Username map cache
|
|
19
|
+
this.usernameMap = new Map();
|
|
20
|
+
this.usernameMapUpdates = {};
|
|
21
|
+
this.usernameMapLastLoaded = 0;
|
|
22
|
+
|
|
23
|
+
// History fetch cache (NEW)
|
|
24
|
+
this.historyFetchedUserIds = new Set();
|
|
25
|
+
this.historyCacheTimestamp = Date.now();
|
|
26
|
+
// Set a 10-minute TTL on this cache (600,000 ms)
|
|
27
|
+
this.HISTORY_CACHE_TTL_MS = config.HISTORY_CACHE_TTL_MS || 600000;
|
|
28
|
+
|
|
29
|
+
this.processedSpeculatorCids = new Set();
|
|
30
|
+
this.usernameMapCollectionName = config.FIRESTORE_COLLECTION_USERNAME_MAP;
|
|
31
|
+
this.normalHistoryCollectionName = config.FIRESTORE_COLLECTION_NORMAL_HISTORY;
|
|
32
|
+
this.speculatorHistoryCollectionName = config.FIRESTORE_COLLECTION_SPECULATOR_HISTORY;
|
|
33
|
+
this.batchTimeout = null;
|
|
34
|
+
logger.log('INFO', 'FirestoreBatchManager initialized.');
|
|
35
|
+
}
|
|
72
36
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
37
|
+
/* * NEW: Checks if a user's history has been fetched in the last 10 minutes.
|
|
38
|
+
* If not, it logs them as fetched and returns false (to trigger a fetch).
|
|
39
|
+
* @param {string} userId
|
|
40
|
+
* @returns {boolean} True if already fetched, false if not.
|
|
41
|
+
*/
|
|
42
|
+
checkAndSetHistoryFetched(userId) {
|
|
43
|
+
// Check if the cache is stale
|
|
44
|
+
if (Date.now() - this.historyCacheTimestamp > this.HISTORY_CACHE_TTL_MS) {
|
|
45
|
+
this.logger.log('INFO', '[BATCH] History fetch cache (10m TTL) expired. Clearing set.');
|
|
46
|
+
this.historyFetchedUserIds.clear();
|
|
47
|
+
this.historyCacheTimestamp = Date.now();
|
|
80
48
|
}
|
|
81
49
|
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
this.usernameMap.clear();
|
|
85
|
-
this.logger.log('INFO', '[BATCH] Refreshing username map from Firestore...');
|
|
86
|
-
try {
|
|
87
|
-
const snapshot = await this.db.collection(this.usernameMapCollectionName).get();
|
|
88
|
-
snapshot.forEach(doc => { const data = doc.data(); for (const cid in data) if (data[cid]?.username) this.usernameMap.set(String(cid), data[cid].username); });
|
|
89
|
-
this.usernameMapLastLoaded = Date.now();
|
|
90
|
-
this.logger.log('INFO', `[BATCH] Loaded ${this.usernameMap.size} usernames.`);
|
|
91
|
-
} catch (e) { this.logger.log('ERROR', '[BATCH] Failed to load username map.', { errorMessage: e.message }); }
|
|
50
|
+
if (this.historyFetchedUserIds.has(userId)) {
|
|
51
|
+
return true; // Yes, already fetched
|
|
92
52
|
}
|
|
93
53
|
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
const cidStr = String(cid);
|
|
99
|
-
this.usernameMap.set(cidStr, username);
|
|
100
|
-
const shardId = this._getUsernameShardId(cidStr);
|
|
101
|
-
if (!this.usernameMapUpdates[shardId]) { this.usernameMapUpdates[shardId] = {}; }
|
|
102
|
-
this.usernameMapUpdates[shardId][cidStr] = { username };
|
|
103
|
-
this.logger.log('TRACE', `[BATCH] Queued username update for ${cidStr} in ${shardId}.`);
|
|
104
|
-
this._scheduleFlush();
|
|
105
|
-
}
|
|
54
|
+
// Not fetched yet. Mark as fetched and return false.
|
|
55
|
+
this.historyFetchedUserIds.add(userId);
|
|
56
|
+
return false;
|
|
57
|
+
}
|
|
106
58
|
|
|
107
|
-
|
|
108
|
-
const collection = userType === 'speculator' ? this.speculatorHistoryCollectionName : this.normalHistoryCollectionName;
|
|
109
|
-
const path = `${collection}/${blockId}/snapshots/${date}`;
|
|
110
|
-
this.tradingHistoryBatch[path] ??= {};
|
|
111
|
-
this.tradingHistoryBatch[path][userId] = historyData;
|
|
112
|
-
this._scheduleFlush();
|
|
113
|
-
}
|
|
59
|
+
_getUsernameShardId(cid) { return `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`; }
|
|
114
60
|
|
|
115
|
-
|
|
116
|
-
const collection = userType === 'speculator' ? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS : this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
|
|
117
|
-
const path = `${collection}/${blockId}/snapshots/${date}`;
|
|
118
|
-
this.portfolioBatch[path] ??= {};
|
|
119
|
-
this.portfolioBatch[path][userId] = portfolioData;
|
|
120
|
-
this._scheduleFlush();
|
|
121
|
-
}
|
|
61
|
+
// _scheduleFlush() { if (!this.batchTimeout) this.batchTimeout = setTimeout(() => this.flushBatches(), this.config.TASK_ENGINE_FLUSH_INTERVAL_MS); } Old version
|
|
122
62
|
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
this.timestampBatch[docPath][key] = new Date();
|
|
129
|
-
this._scheduleFlush();
|
|
130
|
-
}
|
|
63
|
+
_scheduleFlush() {
|
|
64
|
+
const totalOps = this._estimateBatchSize();
|
|
65
|
+
if (totalOps >= 400) { this.flushBatches(); return; }
|
|
66
|
+
if (!this.batchTimeout) { this.batchTimeout = setTimeout(() => this.flushBatches(), this.config.TASK_ENGINE_FLUSH_INTERVAL_MS); }
|
|
67
|
+
}
|
|
131
68
|
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
69
|
+
_estimateBatchSize() {
|
|
70
|
+
let ops = 0;
|
|
71
|
+
ops += Object.keys(this.portfolioBatch).length;
|
|
72
|
+
ops += Object.keys(this.tradingHistoryBatch).length;
|
|
73
|
+
ops += Object.keys(this.timestampBatch).length;
|
|
74
|
+
ops += Object.keys(this.speculatorTimestampFixBatch).length;
|
|
75
|
+
return ops;
|
|
76
|
+
}
|
|
137
77
|
|
|
138
|
-
|
|
78
|
+
async loadUsernameMap() {
|
|
79
|
+
if (Date.now() - this.usernameMapLastLoaded < 3600000) return;
|
|
80
|
+
this.usernameMap.clear();
|
|
81
|
+
this.logger.log('INFO', '[BATCH] Refreshing username map from Firestore...');
|
|
82
|
+
try {
|
|
83
|
+
const snapshot = await this.db.collection(this.usernameMapCollectionName).get();
|
|
84
|
+
snapshot.forEach(doc => { const data = doc.data(); for (const cid in data) if (data[cid]?.username) this.usernameMap.set(String(cid), data[cid].username); });
|
|
85
|
+
this.usernameMapLastLoaded = Date.now();
|
|
86
|
+
this.logger.log('INFO', `[BATCH] Loaded ${this.usernameMap.size} usernames.`);
|
|
87
|
+
} catch (e) { this.logger.log('ERROR', '[BATCH] Failed to load username map.', { errorMessage: e.message }); }
|
|
88
|
+
}
|
|
139
89
|
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
}
|
|
90
|
+
getUsername(cid) { return this.usernameMap.get(String(cid)); }
|
|
91
|
+
|
|
92
|
+
addUsernameMapUpdate(cid, username) {
|
|
93
|
+
if (!username) return;
|
|
94
|
+
const cidStr = String(cid);
|
|
95
|
+
this.usernameMap.set(cidStr, username);
|
|
96
|
+
const shardId = this._getUsernameShardId(cidStr);
|
|
97
|
+
if (!this.usernameMapUpdates[shardId]) { this.usernameMapUpdates[shardId] = {}; }
|
|
98
|
+
this.usernameMapUpdates[shardId][cidStr] = { username };
|
|
99
|
+
this.logger.log('TRACE', `[BATCH] Queued username update for ${cidStr} in ${shardId}.`);
|
|
100
|
+
this._scheduleFlush();
|
|
101
|
+
}
|
|
148
102
|
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
this.logger.log('INFO', `[BATCH] Staged ${userIds.length} ${logName} users in ${Math.ceil(userIds.length / this.config.TASK_ENGINE_MAX_USERS_PER_SHARD)} shards for ${basePath}.`);
|
|
157
|
-
delete batchData[basePath];
|
|
158
|
-
}
|
|
159
|
-
return count;
|
|
160
|
-
}
|
|
103
|
+
async addToTradingHistoryBatch(userId, blockId, date, historyData, userType) {
|
|
104
|
+
const collection = userType === 'speculator' ? this.speculatorHistoryCollectionName : this.normalHistoryCollectionName;
|
|
105
|
+
const path = `${collection}/${blockId}/snapshots/${date}`;
|
|
106
|
+
this.tradingHistoryBatch[path] ??= {};
|
|
107
|
+
this.tradingHistoryBatch[path][userId] = historyData;
|
|
108
|
+
this._scheduleFlush();
|
|
109
|
+
}
|
|
161
110
|
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
111
|
+
async addToPortfolioBatch(userId, blockId, date, portfolioData, userType) {
|
|
112
|
+
const collection = userType === 'speculator' ? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS : this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
|
|
113
|
+
const path = `${collection}/${blockId}/snapshots/${date}`;
|
|
114
|
+
this.portfolioBatch[path] ??= {};
|
|
115
|
+
this.portfolioBatch[path][userId] = portfolioData;
|
|
116
|
+
this._scheduleFlush();
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
async updateUserTimestamp(userId, userType, instrumentId = null) {
|
|
120
|
+
const collection = userType === 'speculator' ? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS : this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
|
|
121
|
+
const docPath = `${collection}/${userType === 'speculator' ? 'speculators' : 'normal'}`;
|
|
122
|
+
this.timestampBatch[docPath] ??= {};
|
|
123
|
+
const key = userType === 'speculator' ? `${userId}_${instrumentId}` : userId;
|
|
124
|
+
this.timestampBatch[docPath][key] = new Date();
|
|
125
|
+
this._scheduleFlush();
|
|
126
|
+
}
|
|
176
127
|
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
128
|
+
deleteFromTimestampBatch(userId, userType, instrumentId) {
|
|
129
|
+
const collection = userType === 'speculator' ? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS : this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
|
|
130
|
+
const docPath = `${collection}/${userType === 'speculator' ? 'speculators' : 'normal'}`;
|
|
131
|
+
if (this.timestampBatch[docPath]) { const key = userType === 'speculator' ? `${userId}_${instrumentId}` : userId; delete this.timestampBatch[docPath][key]; }
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
addProcessedSpeculatorCids(cids) { cids.forEach(cid => this.processedSpeculatorCids.add(cid)); }
|
|
135
|
+
|
|
136
|
+
async addSpeculatorTimestampFix(userId, orchestratorBlockId) {
|
|
137
|
+
const docPath = `${this.config.FIRESTORE_COLLECTION_SPECULATOR_BLOCKS}/${orchestratorBlockId}`;
|
|
138
|
+
this.speculatorTimestampFixBatch[docPath] ??= {};
|
|
139
|
+
this.speculatorTimestampFixBatch[docPath][`users.${userId}.lastVerified`] = new Date();
|
|
140
|
+
this.speculatorTimestampFixBatch[docPath][`users.${userId}.lastHeldSpeculatorAsset`] = new Date();
|
|
141
|
+
this.logger.log('TRACE', `[BATCH] Queued speculator timestamp fix for ${userId} in block ${orchestratorBlockId}`);
|
|
142
|
+
this._scheduleFlush();
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
/**
|
|
146
|
+
* --- REFACTORED: Deterministic Sharding ---
|
|
147
|
+
* Groups users into 'part_N' shards based on their CID.
|
|
148
|
+
* Uses { merge: true } to allow concurrent/sequential writes to fill documents
|
|
149
|
+
* instead of creating fragmented random docs.
|
|
150
|
+
*/
|
|
151
|
+
_flushDataBatch(batchData, firestoreBatch, logName) {
|
|
152
|
+
let count = 0;
|
|
153
|
+
|
|
154
|
+
// Use 200 as requested, or fall back to config if higher/defined.
|
|
155
|
+
// This ensures we fit ~200 users per document to save space/reads.
|
|
156
|
+
const SHARD_CAPACITY = this.config.TASK_ENGINE_MAX_USERS_PER_SHARD || 200;
|
|
157
|
+
|
|
158
|
+
for (const basePath in batchData) {
|
|
159
|
+
const users = batchData[basePath];
|
|
160
|
+
const userIds = Object.keys(users);
|
|
161
|
+
if (!userIds.length) continue;
|
|
162
|
+
|
|
163
|
+
// 1. Group updates by Deterministic Shard ID
|
|
164
|
+
const updatesByShard = {};
|
|
165
|
+
|
|
166
|
+
for (const userId of userIds) {
|
|
167
|
+
const cid = parseInt(userId, 10);
|
|
168
|
+
let shardId;
|
|
169
|
+
|
|
170
|
+
if (!isNaN(cid)) {
|
|
171
|
+
// Block Logic:
|
|
172
|
+
// Users are already partitioned into 1M blocks (basePath has blockId).
|
|
173
|
+
// Within a block (0-999,999), we want shards of size SHARD_CAPACITY.
|
|
174
|
+
// shardIndex = floor((cid % 1,000,000) / SHARD_CAPACITY)
|
|
175
|
+
const blockOffset = cid % 1000000;
|
|
176
|
+
const shardIndex = Math.floor(blockOffset / SHARD_CAPACITY);
|
|
177
|
+
shardId = `part_${shardIndex}`;
|
|
178
|
+
} else {
|
|
179
|
+
// Fallback for non-numeric IDs (unlikely for CIDs)
|
|
180
|
+
shardId = 'part_misc';
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
if (!updatesByShard[shardId]) {
|
|
184
|
+
updatesByShard[shardId] = {};
|
|
185
|
+
}
|
|
186
|
+
updatesByShard[shardId][userId] = users[userId];
|
|
183
187
|
}
|
|
184
188
|
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
if (this.processedSpeculatorCids.size) {
|
|
191
|
-
const cids = Array.from(this.processedSpeculatorCids);
|
|
192
|
-
this.processedSpeculatorCids.clear();
|
|
193
|
-
const snapshot = await this.db.collection(this.config.PENDING_SPECULATORS_COLLECTION).get();
|
|
194
|
-
snapshot.forEach(doc => { const docData = doc.data().users || {}; const cidsInDoc = cids.filter(cid => docData[cid]); if (!cidsInDoc.length) return;
|
|
195
|
-
const delBatch = this.db.batch();
|
|
196
|
-
const updates = Object.fromEntries(cidsInDoc.map(cid => [`users.${cid}`, FieldValue.delete()]));
|
|
197
|
-
delBatch.update(doc.ref, updates);
|
|
198
|
-
delBatch.commit();
|
|
199
|
-
this.logger.log('INFO', `[BATCH] Deleted ${cidsInDoc.length} CIDs from ${doc.id}`); }); }
|
|
189
|
+
// 2. Queue Writes with Merge
|
|
190
|
+
for (const shardId in updatesByShard) {
|
|
191
|
+
const chunkData = updatesByShard[shardId];
|
|
192
|
+
const docRef = this.db.collection(`${basePath}/parts`).doc(shardId);
|
|
200
193
|
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
194
|
+
// CRITICAL: Use merge: true to append to existing shards
|
|
195
|
+
firestoreBatch.set(docRef, chunkData, { merge: true });
|
|
196
|
+
count++;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
this.logger.log('INFO', `[BATCH] Staged ${userIds.length} ${logName} users into ${Object.keys(updatesByShard).length} deterministic shards for ${basePath}.`);
|
|
200
|
+
|
|
201
|
+
delete batchData[basePath];
|
|
202
|
+
}
|
|
203
|
+
return count;
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
async flushBatches() {
|
|
207
|
+
if (this.batchTimeout) { clearTimeout(this.batchTimeout); this.batchTimeout = null; }
|
|
208
|
+
const firestoreBatch = this.db.batch();
|
|
209
|
+
let batchOps = 0;
|
|
210
|
+
batchOps += this._flushDataBatch(this.portfolioBatch, firestoreBatch, 'Portfolio');
|
|
211
|
+
batchOps += this._flushDataBatch(this.tradingHistoryBatch, firestoreBatch, 'Trade History');
|
|
212
|
+
for (const docPath in this.timestampBatch) {
|
|
213
|
+
const timestamps = this.timestampBatch[docPath];
|
|
214
|
+
if (!Object.keys(timestamps).length) continue;
|
|
215
|
+
const docRef = this.db.collection(docPath.split('/')[0]).doc('timestamps').collection('users').doc('normal');
|
|
216
|
+
firestoreBatch.set(docRef, { users: timestamps }, { merge: true });
|
|
217
|
+
batchOps++;
|
|
218
|
+
delete this.timestampBatch[docPath];
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
for (const docPath in this.speculatorTimestampFixBatch) {
|
|
222
|
+
const updates = this.speculatorTimestampFixBatch[docPath];
|
|
223
|
+
if (!Object.keys(updates).length) continue;
|
|
224
|
+
firestoreBatch.set(this.db.doc(docPath), updates, { merge: true });
|
|
225
|
+
batchOps++;
|
|
226
|
+
delete this.speculatorTimestampFixBatch[docPath];
|
|
204
227
|
}
|
|
228
|
+
|
|
229
|
+
for (const shardId in this.usernameMapUpdates) {
|
|
230
|
+
const updates = this.usernameMapUpdates[shardId];
|
|
231
|
+
if (updates && Object.keys(updates).length > 0) { firestoreBatch.set( this.db.collection(this.usernameMapCollectionName).doc(shardId), updates, { merge: true } ); batchOps++; this.logger.log('INFO', `[BATCH] Flushing ${Object.keys(updates).length} username updates to ${shardId}.`); } }
|
|
232
|
+
this.usernameMapUpdates = {};
|
|
233
|
+
|
|
234
|
+
if (this.processedSpeculatorCids.size) {
|
|
235
|
+
const cids = Array.from(this.processedSpeculatorCids);
|
|
236
|
+
this.processedSpeculatorCids.clear();
|
|
237
|
+
const snapshot = await this.db.collection(this.config.PENDING_SPECULATORS_COLLECTION).get();
|
|
238
|
+
snapshot.forEach(doc => { const docData = doc.data().users || {}; const cidsInDoc = cids.filter(cid => docData[cid]); if (!cidsInDoc.length) return;
|
|
239
|
+
const delBatch = this.db.batch();
|
|
240
|
+
const updates = Object.fromEntries(cidsInDoc.map(cid => [`users.${cid}`, FieldValue.delete()]));
|
|
241
|
+
delBatch.update(doc.ref, updates);
|
|
242
|
+
delBatch.commit();
|
|
243
|
+
this.logger.log('INFO', `[BATCH] Deleted ${cidsInDoc.length} CIDs from ${doc.id}`); }); }
|
|
244
|
+
|
|
245
|
+
if (batchOps) await firestoreBatch.commit();
|
|
246
|
+
await this.headerManager.flushPerformanceUpdates();
|
|
247
|
+
this.logger.log('INFO', '[BATCH] All batches flushed successfully.');
|
|
248
|
+
}
|
|
205
249
|
}
|
|
206
250
|
|
|
207
|
-
module.exports = { FirestoreBatchManager };
|
|
251
|
+
module.exports = { FirestoreBatchManager };
|