bulltrackers-module 1.0.135 → 1.0.137
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -260,25 +260,48 @@ async function getSpeculatorsToUpdate(dependencies, config) {
|
|
|
260
260
|
const { dateThreshold, gracePeriodThreshold, speculatorBlocksCollectionName } = config;
|
|
261
261
|
logger.log('INFO','[Core Utils] Getting speculators to update...');
|
|
262
262
|
const updates = [];
|
|
263
|
+
|
|
264
|
+
// ⚠️ NEW: Collect per user first
|
|
265
|
+
const userMap = new Map(); // userId -> { instruments: Set }
|
|
266
|
+
|
|
263
267
|
try {
|
|
264
268
|
const blocksRef = db.collection(speculatorBlocksCollectionName);
|
|
265
269
|
const snapshot = await blocksRef.get();
|
|
266
|
-
if (snapshot.empty) {
|
|
270
|
+
if (snapshot.empty) {
|
|
271
|
+
logger.log('INFO','[Core Utils] No speculator blocks found.');
|
|
272
|
+
return [];
|
|
273
|
+
}
|
|
274
|
+
|
|
267
275
|
snapshot.forEach(doc => {
|
|
268
276
|
const blockData = doc.data();
|
|
269
277
|
for (const key in blockData) {
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
278
|
+
if (!key.startsWith('users.')) continue;
|
|
279
|
+
const userId = key.split('.')[1];
|
|
280
|
+
if (!userId) continue;
|
|
281
|
+
const userData = blockData[key];
|
|
282
|
+
const lastVerified = userData.lastVerified?.toDate ? userData.lastVerified.toDate() : new Date(0);
|
|
283
|
+
const lastHeld = userData.lastHeldSpeculatorAsset?.toDate ? userData.lastHeldSpeculatorAsset.toDate() : new Date(0);
|
|
284
|
+
|
|
285
|
+
if (lastVerified < dateThreshold && lastHeld > gracePeriodThreshold) {
|
|
286
|
+
if (!userMap.has(userId)) {
|
|
287
|
+
userMap.set(userId, new Set());
|
|
288
|
+
}
|
|
289
|
+
if (userData.instruments && Array.isArray(userData.instruments)) {
|
|
290
|
+
userData.instruments.forEach(id => userMap.get(userId).add(id));
|
|
291
|
+
}
|
|
292
|
+
}
|
|
279
293
|
}
|
|
280
294
|
});
|
|
281
|
-
|
|
295
|
+
|
|
296
|
+
// ⚠️ NEW: Return one task per user with ALL instruments
|
|
297
|
+
for (const [userId, instrumentSet] of userMap) {
|
|
298
|
+
updates.push({
|
|
299
|
+
userId,
|
|
300
|
+
instruments: Array.from(instrumentSet) // ⚠️ Array of all instruments
|
|
301
|
+
});
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
logger.log('INFO',`[Core Utils] Found ${updates.length} speculator users to update (covering ${[...userMap.values()].reduce((sum, set) => sum + set.size, 0)} total instruments).`);
|
|
282
305
|
return updates;
|
|
283
306
|
} catch (error) {
|
|
284
307
|
logger.log('ERROR','[Core Utils] Error getting speculators to update', { errorMessage: error.message });
|
|
@@ -40,100 +40,104 @@ async function lookupUsernames(cids, { logger, headerManager, proxyManager }, {
|
|
|
40
40
|
|
|
41
41
|
// --- START MODIFICATION: Added historyFetchedForUser argument ---
|
|
42
42
|
async function handleUpdate(task, taskId, { logger, headerManager, proxyManager, db, batchManager }, config, username, historyFetchedForUser) {
|
|
43
|
-
//
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
const
|
|
47
|
-
?
|
|
48
|
-
:
|
|
43
|
+
const { userId, instruments, instrumentId, userType } = task; // ⚠️ Now supports both
|
|
44
|
+
|
|
45
|
+
// ⚠️ Support both old (instrumentId) and new (instruments array) format
|
|
46
|
+
const instrumentsToProcess = userType === 'speculator'
|
|
47
|
+
? (instruments || [instrumentId]) // New format or fallback to old
|
|
48
|
+
: [undefined]; // Normal users don't have instruments
|
|
49
49
|
|
|
50
|
-
// --- MODIFICATION: Moved historyUrl definition inside conditional ---
|
|
51
50
|
const today = new Date().toISOString().slice(0, 10);
|
|
52
51
|
const portfolioBlockId = `${Math.floor(parseInt(userId) / 1000000)}M`;
|
|
53
|
-
|
|
54
|
-
// --- MODIFICATION: Select history header only if needed ---
|
|
52
|
+
|
|
55
53
|
let portfolioHeader = await headerManager.selectHeader();
|
|
56
|
-
let historyHeader = null;
|
|
54
|
+
let historyHeader = null;
|
|
57
55
|
if (!portfolioHeader) throw new Error("Could not select portfolio header.");
|
|
58
|
-
|
|
59
|
-
let
|
|
60
|
-
|
|
56
|
+
|
|
57
|
+
let wasHistorySuccess = false, isPrivate = false;
|
|
58
|
+
|
|
61
59
|
try {
|
|
62
|
-
//
|
|
60
|
+
// Fetch history ONCE per user
|
|
63
61
|
const promisesToRun = [];
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
promisesToRun.push(proxyManager.fetch(portfolioUrl, { headers: portfolioHeader.header }));
|
|
67
|
-
|
|
68
|
-
// 2. Conditionally fetch history
|
|
69
|
-
// let fetchHistory = false; // <-- FIX: This line is removed
|
|
62
|
+
let fetchHistory = false;
|
|
63
|
+
|
|
70
64
|
if (!historyFetchedForUser.has(userId)) {
|
|
71
|
-
historyHeader = await headerManager.selectHeader();
|
|
72
|
-
if (
|
|
73
|
-
|
|
74
|
-
historyFetchedForUser.add(userId);
|
|
75
|
-
} else {
|
|
76
|
-
fetchHistory = true; // <-- FIX: Assigns to outer scope variable
|
|
77
|
-
historyFetchedForUser.add(userId); // Mark as fetched for this batch
|
|
65
|
+
historyHeader = await headerManager.selectHeader();
|
|
66
|
+
if (historyHeader) {
|
|
67
|
+
fetchHistory = true;
|
|
68
|
+
historyFetchedForUser.add(userId);
|
|
78
69
|
const historyUrl = `${config.ETORO_API_USERSTATS_URL}${username}/trades/oneYearAgo?CopyAsAsset=true`;
|
|
79
70
|
promisesToRun.push(proxyManager.fetch(historyUrl, { headers: historyHeader.header }));
|
|
80
71
|
}
|
|
81
72
|
}
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
const historyRes = fetchHistory ? results[1] : null; // History is only index 1 if we fetched it
|
|
88
|
-
|
|
89
|
-
// --- Process Portfolio (results[0]) ---
|
|
90
|
-
if (portfolioRes.status === 'fulfilled' && portfolioRes.value.ok) {
|
|
91
|
-
const body = await portfolioRes.value.text();
|
|
92
|
-
if (body.includes("user is PRIVATE")) isPrivate = true;
|
|
93
|
-
else { wasPortfolioSuccess = true; await batchManager.addToPortfolioBatch(userId, portfolioBlockId, today, JSON.parse(body), userType, instrumentId); }
|
|
94
|
-
} else {
|
|
95
|
-
let errMsg = portfolioRes.status === 'rejected' ? portfolioRes.reason.message : `API status ${portfolioRes.value.status}`;
|
|
96
|
-
let rawText = portfolioRes.value?.text ? await portfolioRes.value.text() : 'N/A';
|
|
97
|
-
logger.log('WARN', `[UPDATE] Portfolio fetch failed for ${userId}`, { error: errMsg, proxyResponse: rawText });
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
// --- Process History (results[1], if it exists) ---
|
|
101
|
-
if (fetchHistory && historyRes) { // Check if we ran this promise
|
|
73
|
+
|
|
74
|
+
// Process history result (if fetched)
|
|
75
|
+
if (fetchHistory) {
|
|
76
|
+
const results = await Promise.allSettled(promisesToRun);
|
|
77
|
+
const historyRes = results[0];
|
|
102
78
|
if (historyRes.status === 'fulfilled' && historyRes.value.ok) {
|
|
103
79
|
const data = await historyRes.value.json();
|
|
104
80
|
wasHistorySuccess = true;
|
|
105
81
|
await batchManager.addToTradingHistoryBatch(userId, portfolioBlockId, today, data, userType);
|
|
106
|
-
} else {
|
|
107
|
-
// History fetch failed
|
|
108
|
-
let errMsg = historyRes.status === 'rejected' ? historyRes.reason.message : `API status ${historyRes.value.status}`;
|
|
109
|
-
let rawText = historyRes.value?.text ? await historyRes.value.text() : 'N/A';
|
|
110
|
-
logger.log('WARN', `[UPDATE] History fetch failed for ${userId} (${username})`, { error: errMsg, proxyResponse: rawText });
|
|
111
82
|
}
|
|
112
83
|
}
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
84
|
+
|
|
85
|
+
// Now fetch portfolio for EACH instrument (speculators) or once (normal)
|
|
86
|
+
for (const instrumentId of instrumentsToProcess) {
|
|
87
|
+
const portfolioUrl = userType === 'speculator'
|
|
88
|
+
? `${config.ETORO_API_POSITIONS_URL}?cid=${userId}&InstrumentID=${instrumentId}`
|
|
89
|
+
: `${config.ETORO_API_PORTFOLIO_URL}?cid=${userId}`;
|
|
90
|
+
|
|
91
|
+
let wasPortfolioSuccess = false;
|
|
92
|
+
|
|
93
|
+
const portfolioRes = await proxyManager.fetch(portfolioUrl, { headers: portfolioHeader.header });
|
|
94
|
+
|
|
95
|
+
if (portfolioRes.ok) {
|
|
96
|
+
const body = await portfolioRes.text();
|
|
97
|
+
if (body.includes("user is PRIVATE")) {
|
|
98
|
+
isPrivate = true;
|
|
99
|
+
break; // Stop processing this user
|
|
100
|
+
} else {
|
|
101
|
+
wasPortfolioSuccess = true;
|
|
102
|
+
await batchManager.addToPortfolioBatch(userId, portfolioBlockId, today, JSON.parse(body), userType, instrumentId);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
headerManager.updatePerformance(portfolioHeader.id, wasPortfolioSuccess);
|
|
107
|
+
|
|
108
|
+
// Re-select header for next instrument
|
|
109
|
+
if (instrumentsToProcess.length > 1 && instrumentId !== instrumentsToProcess[instrumentsToProcess.length - 1]) {
|
|
110
|
+
portfolioHeader = await headerManager.selectHeader();
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Handle private user
|
|
117
115
|
if (isPrivate) {
|
|
118
116
|
logger.log('WARN', `User ${userId} is private. Removing from updates.`);
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
117
|
+
// Delete for ALL instruments
|
|
118
|
+
for (const instrumentId of instrumentsToProcess) {
|
|
119
|
+
await batchManager.deleteFromTimestampBatch(userId, userType, instrumentId);
|
|
120
|
+
}
|
|
121
|
+
const blockCountsRef = db.doc(config.FIRESTORE_DOC_SPECULATOR_BLOCK_COUNTS);
|
|
122
|
+
for (const instrumentId of instrumentsToProcess) {
|
|
123
|
+
const incrementField = `counts.${instrumentId}_${Math.floor(userId/1e6)*1e6}`;
|
|
124
|
+
await blockCountsRef.set({ [incrementField]: FieldValue.increment(-1) }, { merge: true });
|
|
125
|
+
}
|
|
124
126
|
return;
|
|
125
127
|
}
|
|
126
|
-
|
|
127
|
-
|
|
128
|
+
|
|
129
|
+
// Update timestamps
|
|
130
|
+
for (const instrumentId of instrumentsToProcess) {
|
|
128
131
|
await batchManager.updateUserTimestamp(userId, userType, instrumentId);
|
|
129
|
-
if (userType === 'speculator') await batchManager.addSpeculatorTimestampFix(userId, String(Math.floor(userId/1e6)*1e6));
|
|
130
132
|
}
|
|
131
|
-
|
|
132
|
-
|
|
133
|
+
if (userType === 'speculator') {
|
|
134
|
+
await batchManager.addSpeculatorTimestampFix(userId, String(Math.floor(userId/1e6)*1e6));
|
|
135
|
+
}
|
|
133
136
|
|
|
134
|
-
|
|
135
|
-
if (
|
|
136
|
-
|
|
137
|
+
} finally {
|
|
138
|
+
if (historyHeader && fetchHistory) {
|
|
139
|
+
headerManager.updatePerformance(historyHeader.id, wasHistorySuccess);
|
|
140
|
+
}
|
|
137
141
|
}
|
|
138
142
|
}
|
|
139
143
|
|
|
@@ -59,14 +59,30 @@ async function handleVerify(task, taskId, { db, logger, ...dependencies }, confi
|
|
|
59
59
|
}
|
|
60
60
|
}
|
|
61
61
|
|
|
62
|
+
// --- START FIX: MODIFIED FOR SHARDING ---
|
|
62
63
|
if (Object.keys(usernameMap).length) {
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
64
|
+
// Group updates by shard
|
|
65
|
+
const shardedUpdates = {};
|
|
66
|
+
for (const cid in usernameMap) {
|
|
67
|
+
// Re-implement the sharding logic here
|
|
68
|
+
const shardId = `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`;
|
|
69
|
+
if (!shardedUpdates[shardId]) {
|
|
70
|
+
shardedUpdates[shardId] = {};
|
|
71
|
+
}
|
|
72
|
+
shardedUpdates[shardId][cid] = usernameMap[cid];
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// Write each shard to its own document
|
|
76
|
+
for (const shardId in shardedUpdates) {
|
|
77
|
+
const mapRef = db.collection(config.FIRESTORE_COLLECTION_USERNAME_MAP).doc(shardId);
|
|
78
|
+
batch.set(mapRef, shardedUpdates[shardId], { merge: true });
|
|
79
|
+
}
|
|
80
|
+
logger.log('INFO', `[VERIFY] Staging username updates across ${Object.keys(shardedUpdates).length} shards.`);
|
|
66
81
|
}
|
|
82
|
+
// --- END FIX ---
|
|
67
83
|
|
|
68
84
|
await batch.commit();
|
|
69
85
|
if (validUserCount) logger.log('INFO', `[VERIFY] Verified and stored ${validUserCount} new ${userType} users.`);
|
|
70
86
|
}
|
|
71
87
|
|
|
72
|
-
module.exports = { handleVerify };
|
|
88
|
+
module.exports = { handleVerify };
|
|
@@ -31,6 +31,13 @@ class FirestoreBatchManager {
|
|
|
31
31
|
logger.log('INFO', 'FirestoreBatchManager initialized.');
|
|
32
32
|
}
|
|
33
33
|
|
|
34
|
+
// --- START FIX: ADDED SHARDING HELPER ---
|
|
35
|
+
_getUsernameShardId(cid) {
|
|
36
|
+
// Shard across 10 documents (supports ~200k users assuming 50 bytes/user)
|
|
37
|
+
return `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`;
|
|
38
|
+
}
|
|
39
|
+
// --- END FIX ---
|
|
40
|
+
|
|
34
41
|
_scheduleFlush() {
|
|
35
42
|
if (!this.batchTimeout) this.batchTimeout = setTimeout(() => this.flushBatches(), this.config.TASK_ENGINE_FLUSH_INTERVAL_MS);
|
|
36
43
|
}
|
|
@@ -40,6 +47,7 @@ class FirestoreBatchManager {
|
|
|
40
47
|
this.usernameMap.clear();
|
|
41
48
|
this.logger.log('INFO', '[BATCH] Refreshing username map from Firestore...');
|
|
42
49
|
try {
|
|
50
|
+
// This correctly gets ALL documents (shards) in the collection
|
|
43
51
|
const snapshot = await this.db.collection(this.usernameMapCollectionName).get();
|
|
44
52
|
snapshot.forEach(doc => {
|
|
45
53
|
const data = doc.data();
|
|
@@ -54,14 +62,23 @@ class FirestoreBatchManager {
|
|
|
54
62
|
|
|
55
63
|
getUsername(cid) { return this.usernameMap.get(String(cid)); }
|
|
56
64
|
|
|
65
|
+
// --- START FIX: MODIFIED FOR SHARDING ---
|
|
57
66
|
addUsernameMapUpdate(cid, username) {
|
|
58
67
|
if (!username) return;
|
|
59
68
|
const cidStr = String(cid);
|
|
60
69
|
this.usernameMap.set(cidStr, username);
|
|
61
|
-
|
|
62
|
-
|
|
70
|
+
|
|
71
|
+
// Organize updates by shard ID
|
|
72
|
+
const shardId = this._getUsernameShardId(cidStr);
|
|
73
|
+
if (!this.usernameMapUpdates[shardId]) {
|
|
74
|
+
this.usernameMapUpdates[shardId] = {};
|
|
75
|
+
}
|
|
76
|
+
this.usernameMapUpdates[shardId][cidStr] = { username };
|
|
77
|
+
|
|
78
|
+
this.logger.log('TRACE', `[BATCH] Queued username update for ${cidStr} in ${shardId}.`);
|
|
63
79
|
this._scheduleFlush();
|
|
64
80
|
}
|
|
81
|
+
// --- END FIX ---
|
|
65
82
|
|
|
66
83
|
async addToTradingHistoryBatch(userId, blockId, date, historyData, userType) {
|
|
67
84
|
const collection = userType === 'speculator' ? this.speculatorHistoryCollectionName : this.normalHistoryCollectionName;
|
|
@@ -151,12 +168,22 @@ class FirestoreBatchManager {
|
|
|
151
168
|
delete this.speculatorTimestampFixBatch[docPath];
|
|
152
169
|
}
|
|
153
170
|
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
171
|
+
// --- START FIX: MODIFIED FOR SHARDING ---
|
|
172
|
+
// Loop over each shardId in the updates object
|
|
173
|
+
for (const shardId in this.usernameMapUpdates) {
|
|
174
|
+
const updates = this.usernameMapUpdates[shardId];
|
|
175
|
+
if (updates && Object.keys(updates).length > 0) {
|
|
176
|
+
firestoreBatch.set(
|
|
177
|
+
this.db.collection(this.usernameMapCollectionName).doc(shardId),
|
|
178
|
+
updates,
|
|
179
|
+
{ merge: true }
|
|
180
|
+
);
|
|
181
|
+
batchOps++;
|
|
182
|
+
this.logger.log('INFO', `[BATCH] Flushing ${Object.keys(updates).length} username updates to ${shardId}.`);
|
|
183
|
+
}
|
|
159
184
|
}
|
|
185
|
+
this.usernameMapUpdates = {}; // Clear updates after staging them
|
|
186
|
+
// --- END FIX ---
|
|
160
187
|
|
|
161
188
|
if (this.processedSpeculatorCids.size) {
|
|
162
189
|
const cids = Array.from(this.processedSpeculatorCids);
|
|
@@ -180,4 +207,4 @@ class FirestoreBatchManager {
|
|
|
180
207
|
}
|
|
181
208
|
}
|
|
182
209
|
|
|
183
|
-
module.exports = { FirestoreBatchManager };
|
|
210
|
+
module.exports = { FirestoreBatchManager };
|