bulltrackers-module 1.0.152 → 1.0.154

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/functions/appscript-api/index.js +8 -38
  2. package/functions/computation-system/helpers/computation_pass_runner.js +38 -183
  3. package/functions/computation-system/helpers/orchestration_helpers.js +105 -326
  4. package/functions/computation-system/utils/data_loader.js +38 -133
  5. package/functions/computation-system/utils/schema_capture.js +7 -41
  6. package/functions/computation-system/utils/utils.js +37 -124
  7. package/functions/core/utils/firestore_utils.js +8 -46
  8. package/functions/core/utils/intelligent_header_manager.js +26 -128
  9. package/functions/core/utils/intelligent_proxy_manager.js +33 -171
  10. package/functions/core/utils/pubsub_utils.js +7 -24
  11. package/functions/dispatcher/helpers/dispatch_helpers.js +9 -30
  12. package/functions/dispatcher/index.js +7 -30
  13. package/functions/etoro-price-fetcher/helpers/handler_helpers.js +12 -80
  14. package/functions/fetch-insights/helpers/handler_helpers.js +18 -70
  15. package/functions/generic-api/helpers/api_helpers.js +28 -167
  16. package/functions/generic-api/index.js +49 -188
  17. package/functions/invalid-speculator-handler/helpers/handler_helpers.js +10 -47
  18. package/functions/orchestrator/helpers/discovery_helpers.js +1 -5
  19. package/functions/orchestrator/index.js +1 -6
  20. package/functions/price-backfill/helpers/handler_helpers.js +13 -69
  21. package/functions/social-orchestrator/helpers/orchestrator_helpers.js +5 -37
  22. package/functions/social-task-handler/helpers/handler_helpers.js +29 -186
  23. package/functions/speculator-cleanup-orchestrator/helpers/cleanup_helpers.js +19 -78
  24. package/functions/task-engine/handler_creator.js +2 -8
  25. package/functions/task-engine/helpers/update_helpers.js +74 -100
  26. package/functions/task-engine/helpers/verify_helpers.js +11 -56
  27. package/functions/task-engine/utils/firestore_batch_manager.js +29 -65
  28. package/functions/task-engine/utils/task_engine_utils.js +14 -37
  29. package/index.js +45 -43
  30. package/package.json +1 -1
@@ -1,6 +1,8 @@
1
1
  /*
2
2
  * FILENAME: CloudFunctions/NpmWrappers/bulltrackers-module/functions/task-engine/helpers/update_helpers.js
3
3
  * (MODIFIED: To conditionally fetch history API once per user per batch)
4
+ * (MODIFIED: `lookupUsernames` runs batches in parallel)
5
+ * (MODIFIED: `handleUpdate` fetches history and all portfolios in parallel)
4
6
  */
5
7
 
6
8
  /**
@@ -12,136 +14,108 @@
12
14
  * --- MODIFIED: Conditionally fetches history only once per user per batch. ---
13
15
  */
14
16
  const { FieldValue } = require('@google-cloud/firestore');
17
+ const pLimit = require('p-limit'); // <--- IMPORT p-limit
15
18
 
16
- async function lookupUsernames(cids, { logger, headerManager, proxyManager }, { USERNAME_LOOKUP_BATCH_SIZE, ETORO_API_RANKINGS_URL }) {
19
+ /**
20
+ * (MODIFIED: Runs lookup batches in parallel)
21
+ */
22
+ async function lookupUsernames(cids, { logger, headerManager, proxyManager }, config) {
17
23
  if (!cids?.length) return [];
18
24
  logger.log('INFO', `[lookupUsernames] Looking up usernames for ${cids.length} CIDs.`);
19
- const allUsers = [];
20
- for (let i = 0; i < cids.length; i += USERNAME_LOOKUP_BATCH_SIZE) {
21
- const batch = cids.slice(i, i + USERNAME_LOOKUP_BATCH_SIZE).map(Number);
25
+ const limit = pLimit(config.USERNAME_LOOKUP_CONCURRENCY || 5);
26
+ const { USERNAME_LOOKUP_BATCH_SIZE, ETORO_API_RANKINGS_URL } = config;
27
+ const batches = [];
28
+ for (let i = 0; i < cids.length; i += USERNAME_LOOKUP_BATCH_SIZE) { batches.push(cids.slice(i, i + USERNAME_LOOKUP_BATCH_SIZE).map(Number)); }
29
+
30
+ const batchPromises = batches.map(batch => limit(async () => {
22
31
  const header = await headerManager.selectHeader();
23
- if (!header) { logger.log('ERROR', '[lookupUsernames] Could not select a header.'); continue; }
32
+ if (!header) { logger.log('ERROR', '[lookupUsernames] Could not select a header.'); return null; }
24
33
  let success = false;
25
34
  try {
26
- const res = await proxyManager.fetch(`${ETORO_API_RANKINGS_URL}?Period=LastTwoYears`, {
27
- method: 'POST', headers: { ...header.header, 'Content-Type': 'application/json' }, body: JSON.stringify(batch)
28
- });
35
+ const res = await proxyManager.fetch(`${ETORO_API_RANKINGS_URL}?Period=LastTwoYears`, { method: 'POST', headers: { ...header.header, 'Content-Type': 'application/json' }, body: JSON.stringify(batch) });
29
36
  if (!res.ok) throw new Error(`API status ${res.status}`);
30
37
  const data = await res.json();
31
- if (Array.isArray(data)) allUsers.push(...data);
32
38
  success = true;
33
- } catch (err) {
34
- logger.log('WARN', `[lookupUsernames] Failed batch`, { error: err.message });
35
- } finally { headerManager.updatePerformance(header.id, success); }
36
- }
39
+ logger.log('DEBUG', 'Looked up usernames', { batch: batch.slice(0, 5) });
40
+ return data;
41
+ } catch (err) { logger.log('WARN', `[lookupUsernames] Failed batch`, { error: err.message }); return null;
42
+ } finally { headerManager.updatePerformance(header.id, success); } }));
43
+ const results = await Promise.allSettled(batchPromises);
44
+ const allUsers = results.filter(r => r.status === 'fulfilled' && r.value && Array.isArray(r.value)).flatMap(r => r.value);
37
45
  logger.log('INFO', `[lookupUsernames] Found ${allUsers.length} public users out of ${cids.length}.`);
38
46
  return allUsers;
39
47
  }
40
48
 
41
- // --- START MODIFICATION: Added historyFetchedForUser argument ---
49
+
50
+ /**
51
+ * (MODIFIED: Fetches history and all portfolios in parallel)
52
+ */
42
53
  async function handleUpdate(task, taskId, { logger, headerManager, proxyManager, db, batchManager }, config, username, historyFetchedForUser) {
43
- const { userId, instruments, instrumentId, userType } = task; // ⚠️ Now supports both
44
-
45
- // ⚠️ Support both old (instrumentId) and new (instruments array) format
46
- const instrumentsToProcess = userType === 'speculator'
47
- ? (instruments || [instrumentId]) // New format or fallback to old
48
- : [undefined]; // Normal users don't have instruments
49
-
54
+ const { userId, instruments, instrumentId, userType } = task;
55
+ const instrumentsToProcess = userType === 'speculator' ? (instruments || [instrumentId]) : [undefined];
50
56
  const today = new Date().toISOString().slice(0, 10);
51
57
  const portfolioBlockId = `${Math.floor(parseInt(userId) / 1000000)}M`;
52
-
53
- let portfolioHeader = await headerManager.selectHeader();
58
+
54
59
  let historyHeader = null;
55
- if (!portfolioHeader) throw new Error("Could not select portfolio header.");
56
-
57
- let wasHistorySuccess = false, isPrivate = false;
58
-
59
- // --- FIX: 'fetchHistory' MUST be declared here, outside the 'try' block ---
60
- let fetchHistory = false;
61
-
60
+ let wasHistorySuccess = false;
61
+ let historyFetchPromise = null;
62
+ let isPrivate = false;
63
+
62
64
  try {
63
- // Fetch history ONCE per user
64
- const promisesToRun = [];
65
- // --- 'fetchHistory' is no longer declared here ---
66
-
65
+ // --- 1. Prepare History Fetch (if needed) ---
67
66
  if (!historyFetchedForUser.has(userId)) {
68
67
  historyHeader = await headerManager.selectHeader();
69
- if (historyHeader) {
70
- fetchHistory = true; // This now sets the outer variable
71
- historyFetchedForUser.add(userId);
68
+ if (historyHeader) { historyFetchedForUser.add(userId);
72
69
  const historyUrl = `${config.ETORO_API_USERSTATS_URL}${username}/trades/oneYearAgo?CopyAsAsset=true`;
73
- promisesToRun.push(proxyManager.fetch(historyUrl, { headers: historyHeader.header }));
74
- }
75
- }
76
-
77
- // Process history result (if fetched)
78
- if (fetchHistory) {
79
- const results = await Promise.allSettled(promisesToRun);
80
- const historyRes = results[0];
81
- if (historyRes.status === 'fulfilled' && historyRes.value.ok) {
82
- const data = await historyRes.value.json();
83
- wasHistorySuccess = true;
84
- await batchManager.addToTradingHistoryBatch(userId, portfolioBlockId, today, data, userType);
85
- }
86
- }
70
+ historyFetchPromise = proxyManager.fetch(historyUrl, { headers: historyHeader.header }); } }
71
+
72
+ // --- 2. Prepare All Portfolio Fetches ---
73
+ const portfolioRequests = [];
74
+ for (const instId of instrumentsToProcess) {
75
+ const portfolioHeader = await headerManager.selectHeader();
76
+ if (!portfolioHeader) throw new Error(`Could not select portfolio header for ${userId}`);
77
+ const portfolioUrl = userType === 'speculator' ? `${config.ETORO_API_POSITIONS_URL}?cid=${userId}&InstrumentID=${instId}` : `${config.ETORO_API_PORTFOLIO_URL}?cid=${userId}`;
78
+ portfolioRequests.push({ instrumentId: instId, url: portfolioUrl, header: portfolioHeader, promise: proxyManager.fetch(portfolioUrl, { headers: portfolioHeader.header }) }); }
87
79
 
88
- // Now fetch portfolio for EACH instrument (speculators) or once (normal)
89
- for (const instrumentId of instrumentsToProcess) {
90
- const portfolioUrl = userType === 'speculator'
91
- ? `${config.ETORO_API_POSITIONS_URL}?cid=${userId}&InstrumentID=${instrumentId}`
92
- : `${config.ETORO_API_PORTFOLIO_URL}?cid=${userId}`;
93
-
80
+ // --- 3. Execute All API Calls in Parallel ---
81
+ const allPromises = [ ...(historyFetchPromise ? [historyFetchPromise] : []), ...portfolioRequests.map(r => r.promise) ];
82
+ const allResults = await Promise.allSettled(allPromises);
83
+
84
+ // --- 4. Process History Result ---
85
+ let resultIndex = 0;
86
+ if (historyFetchPromise) {
87
+ const historyRes = allResults[resultIndex++];
88
+ if (historyRes.status === 'fulfilled' && historyRes.value.ok) { const data = await historyRes.value.json(); wasHistorySuccess = true; await batchManager.addToTradingHistoryBatch(userId, portfolioBlockId, today, data, userType); } }
89
+
90
+ // --- 5. Process Portfolio Results ---
91
+ for (let i = 0; i < portfolioRequests.length; i++) {
92
+ const requestInfo = portfolioRequests[i];
93
+ const portfolioRes = allResults[resultIndex++];
94
94
  let wasPortfolioSuccess = false;
95
-
96
- const portfolioRes = await proxyManager.fetch(portfolioUrl, { headers: portfolioHeader.header });
97
-
98
- if (portfolioRes.ok) {
99
- const body = await portfolioRes.text();
100
- if (body.includes("user is PRIVATE")) {
101
- isPrivate = true;
102
- break; // Stop processing this user
103
- } else {
104
- wasPortfolioSuccess = true;
105
- await batchManager.addToPortfolioBatch(userId, portfolioBlockId, today, JSON.parse(body), userType, instrumentId);
106
- }
107
- }
108
-
109
- headerManager.updatePerformance(portfolioHeader.id, wasPortfolioSuccess);
110
-
111
- // Re-select header for next instrument
112
- if (instrumentsToProcess.length > 1 && instrumentId !== instrumentsToProcess[instrumentsToProcess.length - 1]) {
113
- portfolioHeader = await headerManager.selectHeader();
114
- }
95
+ if (portfolioRes.status === 'fulfilled' && portfolioRes.value.ok) {
96
+ const body = await portfolioRes.value.text();
97
+ if (body.includes("user is PRIVATE")) { isPrivate = true; logger.log('WARN', `User ${userId} is private. Removing from updates.`); break;
98
+ } else { wasPortfolioSuccess = true; await batchManager.addToPortfolioBatch(userId, portfolioBlockId, today, JSON.parse(body), userType, requestInfo.instrumentId); }
99
+ logger.log('DEBUG', 'Processing portfolio for user', { userId, portfolioUrl: requestInfo.url });
100
+ } else { logger.log('WARN', `Failed to fetch portfolio`, { userId, url: requestInfo.url, error: portfolioRes.reason || `status ${portfolioRes.value?.status}` }); }
101
+ headerManager.updatePerformance(requestInfo.header.id, wasPortfolioSuccess);
115
102
  }
116
-
117
- // Handle private user
103
+
104
+ // --- 6. Handle Private Users & Timestamps ---
118
105
  if (isPrivate) {
119
106
  logger.log('WARN', `User ${userId} is private. Removing from updates.`);
120
- // Delete for ALL instruments
121
- for (const instrumentId of instrumentsToProcess) {
122
- await batchManager.deleteFromTimestampBatch(userId, userType, instrumentId);
123
- }
107
+ for (const instrumentId of instrumentsToProcess) { await batchManager.deleteFromTimestampBatch(userId, userType, instrumentId); }
124
108
  const blockCountsRef = db.doc(config.FIRESTORE_DOC_SPECULATOR_BLOCK_COUNTS);
125
109
  for (const instrumentId of instrumentsToProcess) {
126
110
  const incrementField = `counts.${instrumentId}_${Math.floor(userId/1e6)*1e6}`;
127
- await blockCountsRef.set({ [incrementField]: FieldValue.increment(-1) }, { merge: true });
128
- }
129
- return;
130
- }
131
-
132
- // Update timestamps
133
- for (const instrumentId of instrumentsToProcess) {
134
- await batchManager.updateUserTimestamp(userId, userType, instrumentId);
135
- }
136
- if (userType === 'speculator') {
137
- await batchManager.addSpeculatorTimestampFix(userId, String(Math.floor(userId/1e6)*1e6));
138
- }
139
-
140
- } finally {
141
- if (historyHeader && fetchHistory) {
142
- headerManager.updatePerformance(historyHeader.id, wasHistorySuccess);
111
+ await blockCountsRef.set({ [incrementField]: FieldValue.increment(-1) }, { merge: true }); }
112
+ return;
143
113
  }
144
- }
114
+
115
+ for (const instrumentId of instrumentsToProcess) { await batchManager.updateUserTimestamp(userId, userType, instrumentId); }
116
+ if (userType === 'speculator') { await batchManager.addSpeculatorTimestampFix(userId, String(Math.floor(userId/1e6)*1e6)); }
117
+
118
+ } finally { if (historyHeader) { headerManager.updatePerformance(historyHeader.id, wasHistorySuccess); } }
145
119
  }
146
120
 
147
121
  module.exports = { handleUpdate, lookupUsernames };
@@ -3,27 +3,15 @@ const { FieldValue } = require('@google-cloud/firestore');
3
3
  async function fetchAndVerifyUser(user, { logger, headerManager, proxyManager }, { userType, SPECULATOR_INSTRUMENTS_ARRAY }) {
4
4
  const selectedHeader = await headerManager.selectHeader();
5
5
  if (!selectedHeader) return null;
6
-
7
6
  let wasSuccess = false;
8
- try {
9
- const res = await proxyManager.fetch(`${process.env.ETORO_API_PORTFOLIO_URL}?cid=${user.cid}`, { headers: selectedHeader.header });
10
- if (!res.ok) return null;
11
-
7
+ try { const res = await proxyManager.fetch(`${process.env.ETORO_API_PORTFOLIO_URL}?cid=${user.cid}`, { headers: selectedHeader.header }); if (!res.ok) return null;
12
8
  wasSuccess = true;
13
9
  const portfolioData = await res.json();
14
- if (userType === 'speculator') {
15
- const instruments = portfolioData.AggregatedPositions.map(p => p.InstrumentID)
16
- .filter(id => SPECULATOR_INSTRUMENTS_ARRAY.includes(id));
17
- if (!instruments.length) return null;
18
- return { type: 'speculator', userId: user.cid, isBronze: user.isBronze, username: user.username, updateData: { instruments, lastVerified: new Date(), lastHeldSpeculatorAsset: new Date() } };
19
- }
10
+ if (userType === 'speculator') { const instruments = portfolioData.AggregatedPositions.map(p => p.InstrumentID) .filter(id => SPECULATOR_INSTRUMENTS_ARRAY.includes(id));
11
+ if (!instruments.length) return null; return { type: 'speculator', userId: user.cid, isBronze: user.isBronze, username: user.username, updateData: { instruments, lastVerified: new Date(), lastHeldSpeculatorAsset: new Date() } }; }
20
12
  return { type: 'normal', userId: user.cid, isBronze: user.isBronze, username: user.username, updateData: { lastVerified: new Date() } };
21
- } catch (err) {
22
- logger.log('WARN', `[VERIFY] Error processing user ${user.cid}`, { errorMessage: err.message });
23
- return null;
24
- } finally {
25
- if (selectedHeader) headerManager.updatePerformance(selectedHeader.id, wasSuccess);
26
- }
13
+ } catch (err) { logger.log('WARN', `[VERIFY] Error processing user ${user.cid}`, { errorMessage: err.message }); return null;
14
+ } finally { if (selectedHeader) headerManager.updatePerformance(selectedHeader.id, wasSuccess); }
27
15
  }
28
16
 
29
17
  async function handleVerify(task, taskId, { db, logger, ...dependencies }, config) {
@@ -31,56 +19,23 @@ async function handleVerify(task, taskId, { db, logger, ...dependencies }, confi
31
19
  const batch = db.batch();
32
20
  const speculatorUpdates = {}, normalUpdates = {}, bronzeStates = {}, usernameMap = {};
33
21
  const specSet = new Set(config.SPECULATOR_INSTRUMENTS_ARRAY);
34
-
35
22
  const results = await Promise.allSettled(users.map(u => fetchAndVerifyUser(u, dependencies, { ...config, userType })));
36
-
37
23
  let validUserCount = 0;
38
24
  results.forEach(r => {
39
- if (r.status === 'fulfilled' && r.value) {
40
- const d = r.value;
41
- usernameMap[d.userId] = { username: d.username };
42
- bronzeStates[d.userId] = d.isBronze;
43
- validUserCount++;
44
- if (d.type === 'speculator') speculatorUpdates[`users.${d.userId}`] = d.updateData;
45
- else normalUpdates[`users.${d.userId}`] = d.updateData;
46
- }
47
- });
48
-
25
+ if (r.status === 'fulfilled' && r.value) { const d = r.value; usernameMap[d.userId] = { username: d.username }; bronzeStates[d.userId] = d.isBronze; validUserCount++; if (d.type === 'speculator') speculatorUpdates[`users.${d.userId}`] = d.updateData; else normalUpdates[`users.${d.userId}`] = d.updateData; } });
49
26
  if (Object.keys(speculatorUpdates).length || Object.keys(normalUpdates).length) {
50
27
  const blockRef = db.collection(userType === 'speculator' ? config.FIRESTORE_COLLECTION_SPECULATOR_BLOCKS : config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS).doc(String(blockId));
51
28
  batch.set(blockRef, userType === 'speculator' ? speculatorUpdates : normalUpdates, { merge: true });
52
29
  const bronzeRef = db.collection(userType === 'speculator' ? config.FIRESTORE_COLLECTION_BRONZE_SPECULATORS : config.FIRESTORE_COLLECTION_BRONZE_NORMAL).doc(String(blockId));
53
30
  batch.set(bronzeRef, bronzeStates, { merge: true });
31
+ if (validUserCount) { const countsRef = db.doc(userType === 'speculator' ? config.FIRESTORE_DOC_SPECULATOR_BLOCK_COUNTS : config.FIRESTORE_DOC_BLOCK_COUNTS); const field = userType === 'speculator' ? `counts.${instrument}_${blockId}` : `counts.${blockId}`; batch.set(countsRef, { [field]: FieldValue.increment(validUserCount) }, { merge: true }); } }
54
32
 
55
- if (validUserCount) {
56
- const countsRef = db.doc(userType === 'speculator' ? config.FIRESTORE_DOC_SPECULATOR_BLOCK_COUNTS : config.FIRESTORE_DOC_BLOCK_COUNTS);
57
- const field = userType === 'speculator' ? `counts.${instrument}_${blockId}` : `counts.${blockId}`;
58
- batch.set(countsRef, { [field]: FieldValue.increment(validUserCount) }, { merge: true });
59
- }
60
- }
61
-
62
- // --- START FIX: MODIFIED FOR SHARDING ---
63
33
  if (Object.keys(usernameMap).length) {
64
- // Group updates by shard
65
34
  const shardedUpdates = {};
66
- for (const cid in usernameMap) {
67
- // Re-implement the sharding logic here
68
- const shardId = `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`;
69
- if (!shardedUpdates[shardId]) {
70
- shardedUpdates[shardId] = {};
71
- }
72
- shardedUpdates[shardId][cid] = usernameMap[cid];
73
- }
74
-
75
- // Write each shard to its own document
76
- for (const shardId in shardedUpdates) {
77
- const mapRef = db.collection(config.FIRESTORE_COLLECTION_USERNAME_MAP).doc(shardId);
78
- batch.set(mapRef, shardedUpdates[shardId], { merge: true });
79
- }
80
- logger.log('INFO', `[VERIFY] Staging username updates across ${Object.keys(shardedUpdates).length} shards.`);
81
- }
82
- // --- END FIX ---
83
-
35
+ for (const cid in usernameMap) { const shardId = `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`;
36
+ if (!shardedUpdates[shardId]) { shardedUpdates[shardId] = {}; } shardedUpdates[shardId][cid] = usernameMap[cid]; }
37
+ for (const shardId in shardedUpdates) { const mapRef = db.collection(config.FIRESTORE_COLLECTION_USERNAME_MAP).doc(shardId); batch.set(mapRef, shardedUpdates[shardId], { merge: true }); }
38
+ logger.log('INFO', `[VERIFY] Staging username updates across ${Object.keys(shardedUpdates).length} shards.`); }
84
39
  await batch.commit();
85
40
  if (validUserCount) logger.log('INFO', `[VERIFY] Verified and stored ${validUserCount} new ${userType} users.`);
86
41
  }
@@ -12,7 +12,6 @@ class FirestoreBatchManager {
12
12
  this.headerManager = headerManager;
13
13
  this.logger = logger;
14
14
  this.config = config;
15
-
16
15
  this.portfolioBatch = {};
17
16
  this.timestampBatch = {};
18
17
  this.tradingHistoryBatch = {};
@@ -21,25 +20,30 @@ class FirestoreBatchManager {
21
20
  this.usernameMapUpdates = {};
22
21
  this.usernameMapLastLoaded = 0;
23
22
  this.processedSpeculatorCids = new Set();
24
-
25
23
  this.usernameMapCollectionName = config.FIRESTORE_COLLECTION_USERNAME_MAP;
26
24
  this.normalHistoryCollectionName = config.FIRESTORE_COLLECTION_NORMAL_HISTORY;
27
25
  this.speculatorHistoryCollectionName = config.FIRESTORE_COLLECTION_SPECULATOR_HISTORY;
28
-
29
26
  this.batchTimeout = null;
30
-
31
27
  logger.log('INFO', 'FirestoreBatchManager initialized.');
32
28
  }
33
29
 
34
- // --- START FIX: ADDED SHARDING HELPER ---
35
- _getUsernameShardId(cid) {
36
- // Shard across 10 documents (supports ~200k users assuming 50 bytes/user)
37
- return `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`;
38
- }
39
- // --- END FIX ---
30
+ _getUsernameShardId(cid) { return `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`; }
31
+
32
+ // _scheduleFlush() { if (!this.batchTimeout) this.batchTimeout = setTimeout(() => this.flushBatches(), this.config.TASK_ENGINE_FLUSH_INTERVAL_MS); } Old version
40
33
 
41
34
  _scheduleFlush() {
42
- if (!this.batchTimeout) this.batchTimeout = setTimeout(() => this.flushBatches(), this.config.TASK_ENGINE_FLUSH_INTERVAL_MS);
35
+ const totalOps = this._estimateBatchSize();
36
+ if (totalOps >= 400) { this.flushBatches(); return; }
37
+ if (!this.batchTimeout) { this.batchTimeout = setTimeout(() => this.flushBatches(), this.config.TASK_ENGINE_FLUSH_INTERVAL_MS); }
38
+ }
39
+
40
+ _estimateBatchSize() {
41
+ let ops = 0;
42
+ ops += Object.keys(this.portfolioBatch).length;
43
+ ops += Object.keys(this.tradingHistoryBatch).length;
44
+ ops += Object.keys(this.timestampBatch).length;
45
+ ops += Object.keys(this.speculatorTimestampFixBatch).length;
46
+ return ops;
43
47
  }
44
48
 
45
49
  async loadUsernameMap() {
@@ -47,38 +51,25 @@ class FirestoreBatchManager {
47
51
  this.usernameMap.clear();
48
52
  this.logger.log('INFO', '[BATCH] Refreshing username map from Firestore...');
49
53
  try {
50
- // This correctly gets ALL documents (shards) in the collection
51
54
  const snapshot = await this.db.collection(this.usernameMapCollectionName).get();
52
- snapshot.forEach(doc => {
53
- const data = doc.data();
54
- for (const cid in data) if (data[cid]?.username) this.usernameMap.set(String(cid), data[cid].username);
55
- });
55
+ snapshot.forEach(doc => { const data = doc.data(); for (const cid in data) if (data[cid]?.username) this.usernameMap.set(String(cid), data[cid].username); });
56
56
  this.usernameMapLastLoaded = Date.now();
57
57
  this.logger.log('INFO', `[BATCH] Loaded ${this.usernameMap.size} usernames.`);
58
- } catch (e) {
59
- this.logger.log('ERROR', '[BATCH] Failed to load username map.', { errorMessage: e.message });
60
- }
58
+ } catch (e) { this.logger.log('ERROR', '[BATCH] Failed to load username map.', { errorMessage: e.message }); }
61
59
  }
62
60
 
63
61
  getUsername(cid) { return this.usernameMap.get(String(cid)); }
64
62
 
65
- // --- START FIX: MODIFIED FOR SHARDING ---
66
63
  addUsernameMapUpdate(cid, username) {
67
64
  if (!username) return;
68
65
  const cidStr = String(cid);
69
66
  this.usernameMap.set(cidStr, username);
70
-
71
- // Organize updates by shard ID
72
67
  const shardId = this._getUsernameShardId(cidStr);
73
- if (!this.usernameMapUpdates[shardId]) {
74
- this.usernameMapUpdates[shardId] = {};
75
- }
68
+ if (!this.usernameMapUpdates[shardId]) { this.usernameMapUpdates[shardId] = {}; }
76
69
  this.usernameMapUpdates[shardId][cidStr] = { username };
77
-
78
70
  this.logger.log('TRACE', `[BATCH] Queued username update for ${cidStr} in ${shardId}.`);
79
71
  this._scheduleFlush();
80
72
  }
81
- // --- END FIX ---
82
73
 
83
74
  async addToTradingHistoryBatch(userId, blockId, date, historyData, userType) {
84
75
  const collection = userType === 'speculator' ? this.speculatorHistoryCollectionName : this.normalHistoryCollectionName;
@@ -108,10 +99,7 @@ class FirestoreBatchManager {
108
99
  deleteFromTimestampBatch(userId, userType, instrumentId) {
109
100
  const collection = userType === 'speculator' ? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS : this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
110
101
  const docPath = `${collection}/${userType === 'speculator' ? 'speculators' : 'normal'}`;
111
- if (this.timestampBatch[docPath]) {
112
- const key = userType === 'speculator' ? `${userId}_${instrumentId}` : userId;
113
- delete this.timestampBatch[docPath][key];
114
- }
102
+ if (this.timestampBatch[docPath]) { const key = userType === 'speculator' ? `${userId}_${instrumentId}` : userId; delete this.timestampBatch[docPath][key]; }
115
103
  }
116
104
 
117
105
  addProcessedSpeculatorCids(cids) { cids.forEach(cid => this.processedSpeculatorCids.add(cid)); }
@@ -131,11 +119,7 @@ class FirestoreBatchManager {
131
119
  const users = batchData[basePath];
132
120
  const userIds = Object.keys(users);
133
121
  if (!userIds.length) continue;
134
- for (let i = 0; i < userIds.length; i += this.config.TASK_ENGINE_MAX_USERS_PER_SHARD) {
135
- const chunkData = Object.fromEntries(userIds.slice(i, i + this.config.TASK_ENGINE_MAX_USERS_PER_SHARD).map(id => [id, users[id]]));
136
- firestoreBatch.set(this.db.collection(`${basePath}/parts`).doc(), chunkData);
137
- count++;
138
- }
122
+ for (let i = 0; i < userIds.length; i += this.config.TASK_ENGINE_MAX_USERS_PER_SHARD) { const chunkData = Object.fromEntries(userIds.slice(i, i + this.config.TASK_ENGINE_MAX_USERS_PER_SHARD).map(id => [id, users[id]])); firestoreBatch.set(this.db.collection(`${basePath}/parts`).doc(), chunkData); count++; }
139
123
  this.logger.log('INFO', `[BATCH] Staged ${userIds.length} ${logName} users in ${Math.ceil(userIds.length / this.config.TASK_ENGINE_MAX_USERS_PER_SHARD)} shards for ${basePath}.`);
140
124
  delete batchData[basePath];
141
125
  }
@@ -144,13 +128,10 @@ class FirestoreBatchManager {
144
128
 
145
129
  async flushBatches() {
146
130
  if (this.batchTimeout) { clearTimeout(this.batchTimeout); this.batchTimeout = null; }
147
-
148
131
  const firestoreBatch = this.db.batch();
149
132
  let batchOps = 0;
150
-
151
133
  batchOps += this._flushDataBatch(this.portfolioBatch, firestoreBatch, 'Portfolio');
152
134
  batchOps += this._flushDataBatch(this.tradingHistoryBatch, firestoreBatch, 'Trade History');
153
-
154
135
  for (const docPath in this.timestampBatch) {
155
136
  const timestamps = this.timestampBatch[docPath];
156
137
  if (!Object.keys(timestamps).length) continue;
@@ -168,39 +149,22 @@ class FirestoreBatchManager {
168
149
  delete this.speculatorTimestampFixBatch[docPath];
169
150
  }
170
151
 
171
- // --- START FIX: MODIFIED FOR SHARDING ---
172
- // Loop over each shardId in the updates object
173
152
  for (const shardId in this.usernameMapUpdates) {
174
153
  const updates = this.usernameMapUpdates[shardId];
175
- if (updates && Object.keys(updates).length > 0) {
176
- firestoreBatch.set(
177
- this.db.collection(this.usernameMapCollectionName).doc(shardId),
178
- updates,
179
- { merge: true }
180
- );
181
- batchOps++;
182
- this.logger.log('INFO', `[BATCH] Flushing ${Object.keys(updates).length} username updates to ${shardId}.`);
183
- }
184
- }
185
- this.usernameMapUpdates = {}; // Clear updates after staging them
186
- // --- END FIX ---
154
+ if (updates && Object.keys(updates).length > 0) { firestoreBatch.set( this.db.collection(this.usernameMapCollectionName).doc(shardId), updates, { merge: true } ); batchOps++; this.logger.log('INFO', `[BATCH] Flushing ${Object.keys(updates).length} username updates to ${shardId}.`); } }
155
+ this.usernameMapUpdates = {};
187
156
 
188
157
  if (this.processedSpeculatorCids.size) {
189
158
  const cids = Array.from(this.processedSpeculatorCids);
190
159
  this.processedSpeculatorCids.clear();
191
160
  const snapshot = await this.db.collection(this.config.PENDING_SPECULATORS_COLLECTION).get();
192
- snapshot.forEach(doc => {
193
- const docData = doc.data().users || {};
194
- const cidsInDoc = cids.filter(cid => docData[cid]);
195
- if (!cidsInDoc.length) return;
196
- const delBatch = this.db.batch();
197
- const updates = Object.fromEntries(cidsInDoc.map(cid => [`users.${cid}`, FieldValue.delete()]));
198
- delBatch.update(doc.ref, updates);
199
- delBatch.commit();
200
- this.logger.log('INFO', `[BATCH] Deleted ${cidsInDoc.length} CIDs from ${doc.id}`);
201
- });
202
- }
203
-
161
+ snapshot.forEach(doc => { const docData = doc.data().users || {}; const cidsInDoc = cids.filter(cid => docData[cid]); if (!cidsInDoc.length) return;
162
+ const delBatch = this.db.batch();
163
+ const updates = Object.fromEntries(cidsInDoc.map(cid => [`users.${cid}`, FieldValue.delete()]));
164
+ delBatch.update(doc.ref, updates);
165
+ delBatch.commit();
166
+ this.logger.log('INFO', `[BATCH] Deleted ${cidsInDoc.length} CIDs from ${doc.id}`); }); }
167
+
204
168
  if (batchOps) await firestoreBatch.commit();
205
169
  await this.headerManager.flushPerformanceUpdates();
206
170
  this.logger.log('INFO', '[BATCH] All batches flushed successfully.');
@@ -1,6 +1,7 @@
1
1
  /*
2
2
  * FILENAME: CloudFunctions/NpmWrappers/bulltrackers-module/functions/task-engine/utils/task_engine_utils.js
3
3
  * (MODIFIED: To pass down a Set to track history fetches)
4
+ * (MODIFIED: To run all update tasks in parallel with a concurrency limit)
4
5
  */
5
6
 
6
7
  /**
@@ -12,6 +13,7 @@
12
13
  const { handleDiscover } = require('../helpers/discover_helpers');
13
14
  const { handleVerify } = require('../helpers/verify_helpers');
14
15
  const { handleUpdate, lookupUsernames } = require('../helpers/update_helpers');
16
+ const pLimit = require('p-limit');
15
17
 
16
18
  /**
17
19
  * Parses Pub/Sub message into task array.
@@ -21,10 +23,7 @@ function parseTaskPayload(message, logger) {
21
23
  let payload;
22
24
  try { payload = JSON.parse(Buffer.from(message.data, 'base64').toString()); }
23
25
  catch (e) { logger.log('ERROR', '[TaskEngine] Failed to parse message', { error: e.message }); return null; }
24
- if (!Array.isArray(payload.tasks) || payload.tasks.length === 0) {
25
- logger.log(payload.tasks?.length ? 'WARN' : 'ERROR', '[TaskEngine] Invalid or empty tasks array.', { payload });
26
- return null;
27
- }
26
+ if (!Array.isArray(payload.tasks) || payload.tasks.length === 0) { logger.log(payload.tasks?.length ? 'WARN' : 'ERROR', '[TaskEngine] Invalid or empty tasks array.', { payload }); return null; }
28
27
  return payload.tasks;
29
28
  }
30
29
 
@@ -35,11 +34,7 @@ async function prepareTaskBatches(tasks, batchManager, logger) {
35
34
  const tasksToRun = [], cidsToLookup = new Map(), otherTasks = [];
36
35
  await batchManager.loadUsernameMap();
37
36
  for (const task of tasks) {
38
- if (task.type === 'update') {
39
- const username = batchManager.getUsername(task.userId);
40
- username ? tasksToRun.push({ task, username }) : cidsToLookup.set(String(task.userId), task);
41
- } else otherTasks.push(task);
42
- }
37
+ if (task.type === 'update') { const username = batchManager.getUsername(task.userId); username ? tasksToRun.push({ task, username }) : cidsToLookup.set(String(task.userId), task); } else otherTasks.push(task); }
43
38
  logger.log('INFO', `[TaskEngine] Sorting complete. Known: ${tasksToRun.length}, Lookup: ${cidsToLookup.size}, Other: ${otherTasks.length}`);
44
39
  return { tasksToRun, cidsToLookup, otherTasks };
45
40
  }
@@ -51,12 +46,7 @@ async function runUsernameLookups(tasksToRun, cidsToLookup, dependencies, config
51
46
  if (!cidsToLookup.size) return;
52
47
  logger.log('INFO', `[TaskEngine] Looking up ${cidsToLookup.size} usernames...`);
53
48
  const foundUsers = await lookupUsernames([...cidsToLookup.keys()], dependencies, config);
54
- for (const u of foundUsers) {
55
- const cid = String(u.CID), username = u.Value.UserName;
56
- batchManager.addUsernameMapUpdate(cid, username);
57
- const task = cidsToLookup.get(cid);
58
- if (task) { tasksToRun.push({ task, username }); cidsToLookup.delete(cid); }
59
- }
49
+ for (const u of foundUsers) { const cid = String(u.CID), username = u.Value.UserName; batchManager.addUsernameMapUpdate(cid, username); const task = cidsToLookup.get(cid); if (task) { tasksToRun.push({ task, username }); cidsToLookup.delete(cid); } }
60
50
  if (cidsToLookup.size) logger.log('WARN', `[TaskEngine] Could not find ${cidsToLookup.size} usernames (likely private).`, { skippedCids: [...cidsToLookup.keys()] });
61
51
  }
62
52
 
@@ -65,35 +55,22 @@ async function runUsernameLookups(tasksToRun, cidsToLookup, dependencies, config
65
55
  */
66
56
  async function executeTasks(tasksToRun, otherTasks, dependencies, config, taskId) {
67
57
  const { logger } = dependencies;
68
-
69
- // --- START MODIFICATION ---
70
- // This Set will track history fetches *only for this batch*.
71
58
  const historyFetchedForUser = new Set();
72
- // --- END MODIFICATION ---
73
-
74
-
75
- // Process discover/verify tasks
76
59
  for (const task of otherTasks) {
77
60
  const subTaskId = `${task.type}-${task.userType || 'unknown'}-${task.userId || task.cids?.[0] || 'sub'}`;
78
61
  const handler = { discover: handleDiscover, verify: handleVerify }[task.type];
79
62
  if (handler) try { await handler(task, subTaskId, dependencies, config); }
80
63
  catch (err) { logger.log('ERROR', `[TaskEngine/${taskId}] Error in ${task.type} for ${subTaskId}`, { errorMessage: err.message }); }
81
- else logger.log('ERROR', `[TaskEngine/${taskId}] Unknown task type: ${task.type}`);
82
- }
83
-
84
- // Process update tasks (all have usernames)
85
- for (const { task, username } of tasksToRun) {
64
+ else logger.log('ERROR', `[TaskEngine/${taskId}] Unknown task type: ${task.type}`); }
65
+ const limit = pLimit(config.TASK_ENGINE_CONCURRENCY || 10);
66
+ let successCount = 0;
67
+ let errorCount = 0;
68
+ const updatePromises = tasksToRun.map(({ task, username }) => {
86
69
  const subTaskId = `${task.type}-${task.userType || 'unknown'}-${task.userId}`;
87
- try {
88
- // --- START MODIFICATION ---
89
- // Pass the Set to the update handler
90
- await handleUpdate(task, subTaskId, dependencies, config, username, historyFetchedForUser);
91
- // --- END MODIFICATION ---
92
- }
93
- catch (err) { logger.log('ERROR', `[TaskEngine/${taskId}] Error in handleUpdate for ${task.userId}`, { errorMessage: err.message }); }
94
- }
95
-
96
- logger.log('SUCCESS', `[TaskEngine/${taskId}] Processed all tasks.`);
70
+ return limit(() => handleUpdate(task, subTaskId, dependencies, config, username, historyFetchedForUser) .catch(err => { logger.log('ERROR', `[TaskEngine/${taskId}] Error in handleUpdate for ${task.userId}`, { errorMessage: err.message }); throw err; }) ); });
71
+ const results = await Promise.allSettled(updatePromises);
72
+ results.forEach(result => { if (result.status === 'fulfilled') { successCount++; } else { errorCount++; } });
73
+ logger.log( errorCount > 0 ? 'WARN' : 'SUCCESS', `[TaskEngine/${taskId}] Processed all ${tasksToRun.length} update tasks. Success: ${successCount}, Failed: ${errorCount}.` );
97
74
  }
98
75
 
99
76
  module.exports = { parseTaskPayload, prepareTaskBatches, runUsernameLookups, executeTasks };