bulltrackers-module 1.0.151 → 1.0.153

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/functions/appscript-api/index.js +8 -38
  2. package/functions/computation-system/helpers/computation_pass_runner.js +38 -183
  3. package/functions/computation-system/helpers/orchestration_helpers.js +120 -314
  4. package/functions/computation-system/utils/data_loader.js +47 -132
  5. package/functions/computation-system/utils/schema_capture.js +7 -41
  6. package/functions/computation-system/utils/utils.js +37 -124
  7. package/functions/core/utils/firestore_utils.js +8 -46
  8. package/functions/core/utils/intelligent_header_manager.js +26 -128
  9. package/functions/core/utils/intelligent_proxy_manager.js +33 -171
  10. package/functions/core/utils/pubsub_utils.js +7 -24
  11. package/functions/dispatcher/helpers/dispatch_helpers.js +9 -30
  12. package/functions/dispatcher/index.js +7 -30
  13. package/functions/etoro-price-fetcher/helpers/handler_helpers.js +12 -80
  14. package/functions/fetch-insights/helpers/handler_helpers.js +18 -70
  15. package/functions/generic-api/helpers/api_helpers.js +28 -167
  16. package/functions/generic-api/index.js +49 -188
  17. package/functions/invalid-speculator-handler/helpers/handler_helpers.js +10 -47
  18. package/functions/orchestrator/helpers/discovery_helpers.js +1 -5
  19. package/functions/orchestrator/index.js +1 -6
  20. package/functions/price-backfill/helpers/handler_helpers.js +13 -69
  21. package/functions/social-orchestrator/helpers/orchestrator_helpers.js +5 -37
  22. package/functions/social-task-handler/helpers/handler_helpers.js +29 -186
  23. package/functions/speculator-cleanup-orchestrator/helpers/cleanup_helpers.js +19 -78
  24. package/functions/task-engine/handler_creator.js +2 -8
  25. package/functions/task-engine/helpers/update_helpers.js +17 -83
  26. package/functions/task-engine/helpers/verify_helpers.js +11 -56
  27. package/functions/task-engine/utils/firestore_batch_manager.js +16 -67
  28. package/functions/task-engine/utils/task_engine_utils.js +6 -35
  29. package/index.js +45 -43
  30. package/package.json +1 -1
@@ -3,27 +3,15 @@ const { FieldValue } = require('@google-cloud/firestore');
3
3
  async function fetchAndVerifyUser(user, { logger, headerManager, proxyManager }, { userType, SPECULATOR_INSTRUMENTS_ARRAY }) {
4
4
  const selectedHeader = await headerManager.selectHeader();
5
5
  if (!selectedHeader) return null;
6
-
7
6
  let wasSuccess = false;
8
- try {
9
- const res = await proxyManager.fetch(`${process.env.ETORO_API_PORTFOLIO_URL}?cid=${user.cid}`, { headers: selectedHeader.header });
10
- if (!res.ok) return null;
11
-
7
+ try { const res = await proxyManager.fetch(`${process.env.ETORO_API_PORTFOLIO_URL}?cid=${user.cid}`, { headers: selectedHeader.header }); if (!res.ok) return null;
12
8
  wasSuccess = true;
13
9
  const portfolioData = await res.json();
14
- if (userType === 'speculator') {
15
- const instruments = portfolioData.AggregatedPositions.map(p => p.InstrumentID)
16
- .filter(id => SPECULATOR_INSTRUMENTS_ARRAY.includes(id));
17
- if (!instruments.length) return null;
18
- return { type: 'speculator', userId: user.cid, isBronze: user.isBronze, username: user.username, updateData: { instruments, lastVerified: new Date(), lastHeldSpeculatorAsset: new Date() } };
19
- }
10
+ if (userType === 'speculator') { const instruments = portfolioData.AggregatedPositions.map(p => p.InstrumentID) .filter(id => SPECULATOR_INSTRUMENTS_ARRAY.includes(id));
11
+ if (!instruments.length) return null; return { type: 'speculator', userId: user.cid, isBronze: user.isBronze, username: user.username, updateData: { instruments, lastVerified: new Date(), lastHeldSpeculatorAsset: new Date() } }; }
20
12
  return { type: 'normal', userId: user.cid, isBronze: user.isBronze, username: user.username, updateData: { lastVerified: new Date() } };
21
- } catch (err) {
22
- logger.log('WARN', `[VERIFY] Error processing user ${user.cid}`, { errorMessage: err.message });
23
- return null;
24
- } finally {
25
- if (selectedHeader) headerManager.updatePerformance(selectedHeader.id, wasSuccess);
26
- }
13
+ } catch (err) { logger.log('WARN', `[VERIFY] Error processing user ${user.cid}`, { errorMessage: err.message }); return null;
14
+ } finally { if (selectedHeader) headerManager.updatePerformance(selectedHeader.id, wasSuccess); }
27
15
  }
28
16
 
29
17
  async function handleVerify(task, taskId, { db, logger, ...dependencies }, config) {
@@ -31,56 +19,23 @@ async function handleVerify(task, taskId, { db, logger, ...dependencies }, confi
31
19
  const batch = db.batch();
32
20
  const speculatorUpdates = {}, normalUpdates = {}, bronzeStates = {}, usernameMap = {};
33
21
  const specSet = new Set(config.SPECULATOR_INSTRUMENTS_ARRAY);
34
-
35
22
  const results = await Promise.allSettled(users.map(u => fetchAndVerifyUser(u, dependencies, { ...config, userType })));
36
-
37
23
  let validUserCount = 0;
38
24
  results.forEach(r => {
39
- if (r.status === 'fulfilled' && r.value) {
40
- const d = r.value;
41
- usernameMap[d.userId] = { username: d.username };
42
- bronzeStates[d.userId] = d.isBronze;
43
- validUserCount++;
44
- if (d.type === 'speculator') speculatorUpdates[`users.${d.userId}`] = d.updateData;
45
- else normalUpdates[`users.${d.userId}`] = d.updateData;
46
- }
47
- });
48
-
25
+ if (r.status === 'fulfilled' && r.value) { const d = r.value; usernameMap[d.userId] = { username: d.username }; bronzeStates[d.userId] = d.isBronze; validUserCount++; if (d.type === 'speculator') speculatorUpdates[`users.${d.userId}`] = d.updateData; else normalUpdates[`users.${d.userId}`] = d.updateData; } });
49
26
  if (Object.keys(speculatorUpdates).length || Object.keys(normalUpdates).length) {
50
27
  const blockRef = db.collection(userType === 'speculator' ? config.FIRESTORE_COLLECTION_SPECULATOR_BLOCKS : config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS).doc(String(blockId));
51
28
  batch.set(blockRef, userType === 'speculator' ? speculatorUpdates : normalUpdates, { merge: true });
52
29
  const bronzeRef = db.collection(userType === 'speculator' ? config.FIRESTORE_COLLECTION_BRONZE_SPECULATORS : config.FIRESTORE_COLLECTION_BRONZE_NORMAL).doc(String(blockId));
53
30
  batch.set(bronzeRef, bronzeStates, { merge: true });
31
+ if (validUserCount) { const countsRef = db.doc(userType === 'speculator' ? config.FIRESTORE_DOC_SPECULATOR_BLOCK_COUNTS : config.FIRESTORE_DOC_BLOCK_COUNTS); const field = userType === 'speculator' ? `counts.${instrument}_${blockId}` : `counts.${blockId}`; batch.set(countsRef, { [field]: FieldValue.increment(validUserCount) }, { merge: true }); } }
54
32
 
55
- if (validUserCount) {
56
- const countsRef = db.doc(userType === 'speculator' ? config.FIRESTORE_DOC_SPECULATOR_BLOCK_COUNTS : config.FIRESTORE_DOC_BLOCK_COUNTS);
57
- const field = userType === 'speculator' ? `counts.${instrument}_${blockId}` : `counts.${blockId}`;
58
- batch.set(countsRef, { [field]: FieldValue.increment(validUserCount) }, { merge: true });
59
- }
60
- }
61
-
62
- // --- START FIX: MODIFIED FOR SHARDING ---
63
33
  if (Object.keys(usernameMap).length) {
64
- // Group updates by shard
65
34
  const shardedUpdates = {};
66
- for (const cid in usernameMap) {
67
- // Re-implement the sharding logic here
68
- const shardId = `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`;
69
- if (!shardedUpdates[shardId]) {
70
- shardedUpdates[shardId] = {};
71
- }
72
- shardedUpdates[shardId][cid] = usernameMap[cid];
73
- }
74
-
75
- // Write each shard to its own document
76
- for (const shardId in shardedUpdates) {
77
- const mapRef = db.collection(config.FIRESTORE_COLLECTION_USERNAME_MAP).doc(shardId);
78
- batch.set(mapRef, shardedUpdates[shardId], { merge: true });
79
- }
80
- logger.log('INFO', `[VERIFY] Staging username updates across ${Object.keys(shardedUpdates).length} shards.`);
81
- }
82
- // --- END FIX ---
83
-
35
+ for (const cid in usernameMap) { const shardId = `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`;
36
+ if (!shardedUpdates[shardId]) { shardedUpdates[shardId] = {}; } shardedUpdates[shardId][cid] = usernameMap[cid]; }
37
+ for (const shardId in shardedUpdates) { const mapRef = db.collection(config.FIRESTORE_COLLECTION_USERNAME_MAP).doc(shardId); batch.set(mapRef, shardedUpdates[shardId], { merge: true }); }
38
+ logger.log('INFO', `[VERIFY] Staging username updates across ${Object.keys(shardedUpdates).length} shards.`); }
84
39
  await batch.commit();
85
40
  if (validUserCount) logger.log('INFO', `[VERIFY] Verified and stored ${validUserCount} new ${userType} users.`);
86
41
  }
@@ -12,7 +12,6 @@ class FirestoreBatchManager {
12
12
  this.headerManager = headerManager;
13
13
  this.logger = logger;
14
14
  this.config = config;
15
-
16
15
  this.portfolioBatch = {};
17
16
  this.timestampBatch = {};
18
17
  this.tradingHistoryBatch = {};
@@ -21,64 +20,41 @@ class FirestoreBatchManager {
21
20
  this.usernameMapUpdates = {};
22
21
  this.usernameMapLastLoaded = 0;
23
22
  this.processedSpeculatorCids = new Set();
24
-
25
23
  this.usernameMapCollectionName = config.FIRESTORE_COLLECTION_USERNAME_MAP;
26
24
  this.normalHistoryCollectionName = config.FIRESTORE_COLLECTION_NORMAL_HISTORY;
27
25
  this.speculatorHistoryCollectionName = config.FIRESTORE_COLLECTION_SPECULATOR_HISTORY;
28
-
29
26
  this.batchTimeout = null;
30
-
31
27
  logger.log('INFO', 'FirestoreBatchManager initialized.');
32
28
  }
33
29
 
34
- // --- START FIX: ADDED SHARDING HELPER ---
35
- _getUsernameShardId(cid) {
36
- // Shard across 10 documents (supports ~200k users assuming 50 bytes/user)
37
- return `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`;
38
- }
39
- // --- END FIX ---
30
+ _getUsernameShardId(cid) { return `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`; }
40
31
 
41
- _scheduleFlush() {
42
- if (!this.batchTimeout) this.batchTimeout = setTimeout(() => this.flushBatches(), this.config.TASK_ENGINE_FLUSH_INTERVAL_MS);
43
- }
32
+ _scheduleFlush() { if (!this.batchTimeout) this.batchTimeout = setTimeout(() => this.flushBatches(), this.config.TASK_ENGINE_FLUSH_INTERVAL_MS); }
44
33
 
45
34
  async loadUsernameMap() {
46
35
  if (Date.now() - this.usernameMapLastLoaded < 3600000) return;
47
36
  this.usernameMap.clear();
48
37
  this.logger.log('INFO', '[BATCH] Refreshing username map from Firestore...');
49
38
  try {
50
- // This correctly gets ALL documents (shards) in the collection
51
39
  const snapshot = await this.db.collection(this.usernameMapCollectionName).get();
52
- snapshot.forEach(doc => {
53
- const data = doc.data();
54
- for (const cid in data) if (data[cid]?.username) this.usernameMap.set(String(cid), data[cid].username);
55
- });
40
+ snapshot.forEach(doc => { const data = doc.data(); for (const cid in data) if (data[cid]?.username) this.usernameMap.set(String(cid), data[cid].username); });
56
41
  this.usernameMapLastLoaded = Date.now();
57
42
  this.logger.log('INFO', `[BATCH] Loaded ${this.usernameMap.size} usernames.`);
58
- } catch (e) {
59
- this.logger.log('ERROR', '[BATCH] Failed to load username map.', { errorMessage: e.message });
60
- }
43
+ } catch (e) { this.logger.log('ERROR', '[BATCH] Failed to load username map.', { errorMessage: e.message }); }
61
44
  }
62
45
 
63
46
  getUsername(cid) { return this.usernameMap.get(String(cid)); }
64
47
 
65
- // --- START FIX: MODIFIED FOR SHARDING ---
66
48
  addUsernameMapUpdate(cid, username) {
67
49
  if (!username) return;
68
50
  const cidStr = String(cid);
69
51
  this.usernameMap.set(cidStr, username);
70
-
71
- // Organize updates by shard ID
72
52
  const shardId = this._getUsernameShardId(cidStr);
73
- if (!this.usernameMapUpdates[shardId]) {
74
- this.usernameMapUpdates[shardId] = {};
75
- }
53
+ if (!this.usernameMapUpdates[shardId]) { this.usernameMapUpdates[shardId] = {}; }
76
54
  this.usernameMapUpdates[shardId][cidStr] = { username };
77
-
78
55
  this.logger.log('TRACE', `[BATCH] Queued username update for ${cidStr} in ${shardId}.`);
79
56
  this._scheduleFlush();
80
57
  }
81
- // --- END FIX ---
82
58
 
83
59
  async addToTradingHistoryBatch(userId, blockId, date, historyData, userType) {
84
60
  const collection = userType === 'speculator' ? this.speculatorHistoryCollectionName : this.normalHistoryCollectionName;
@@ -108,10 +84,7 @@ class FirestoreBatchManager {
108
84
  deleteFromTimestampBatch(userId, userType, instrumentId) {
109
85
  const collection = userType === 'speculator' ? this.config.FIRESTORE_COLLECTION_SPECULATOR_PORTFOLIOS : this.config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS;
110
86
  const docPath = `${collection}/${userType === 'speculator' ? 'speculators' : 'normal'}`;
111
- if (this.timestampBatch[docPath]) {
112
- const key = userType === 'speculator' ? `${userId}_${instrumentId}` : userId;
113
- delete this.timestampBatch[docPath][key];
114
- }
87
+ if (this.timestampBatch[docPath]) { const key = userType === 'speculator' ? `${userId}_${instrumentId}` : userId; delete this.timestampBatch[docPath][key]; }
115
88
  }
116
89
 
117
90
  addProcessedSpeculatorCids(cids) { cids.forEach(cid => this.processedSpeculatorCids.add(cid)); }
@@ -131,11 +104,7 @@ class FirestoreBatchManager {
131
104
  const users = batchData[basePath];
132
105
  const userIds = Object.keys(users);
133
106
  if (!userIds.length) continue;
134
- for (let i = 0; i < userIds.length; i += this.config.TASK_ENGINE_MAX_USERS_PER_SHARD) {
135
- const chunkData = Object.fromEntries(userIds.slice(i, i + this.config.TASK_ENGINE_MAX_USERS_PER_SHARD).map(id => [id, users[id]]));
136
- firestoreBatch.set(this.db.collection(`${basePath}/parts`).doc(), chunkData);
137
- count++;
138
- }
107
+ for (let i = 0; i < userIds.length; i += this.config.TASK_ENGINE_MAX_USERS_PER_SHARD) { const chunkData = Object.fromEntries(userIds.slice(i, i + this.config.TASK_ENGINE_MAX_USERS_PER_SHARD).map(id => [id, users[id]])); firestoreBatch.set(this.db.collection(`${basePath}/parts`).doc(), chunkData); count++; }
139
108
  this.logger.log('INFO', `[BATCH] Staged ${userIds.length} ${logName} users in ${Math.ceil(userIds.length / this.config.TASK_ENGINE_MAX_USERS_PER_SHARD)} shards for ${basePath}.`);
140
109
  delete batchData[basePath];
141
110
  }
@@ -144,13 +113,10 @@ class FirestoreBatchManager {
144
113
 
145
114
  async flushBatches() {
146
115
  if (this.batchTimeout) { clearTimeout(this.batchTimeout); this.batchTimeout = null; }
147
-
148
116
  const firestoreBatch = this.db.batch();
149
117
  let batchOps = 0;
150
-
151
118
  batchOps += this._flushDataBatch(this.portfolioBatch, firestoreBatch, 'Portfolio');
152
119
  batchOps += this._flushDataBatch(this.tradingHistoryBatch, firestoreBatch, 'Trade History');
153
-
154
120
  for (const docPath in this.timestampBatch) {
155
121
  const timestamps = this.timestampBatch[docPath];
156
122
  if (!Object.keys(timestamps).length) continue;
@@ -168,39 +134,22 @@ class FirestoreBatchManager {
168
134
  delete this.speculatorTimestampFixBatch[docPath];
169
135
  }
170
136
 
171
- // --- START FIX: MODIFIED FOR SHARDING ---
172
- // Loop over each shardId in the updates object
173
137
  for (const shardId in this.usernameMapUpdates) {
174
138
  const updates = this.usernameMapUpdates[shardId];
175
- if (updates && Object.keys(updates).length > 0) {
176
- firestoreBatch.set(
177
- this.db.collection(this.usernameMapCollectionName).doc(shardId),
178
- updates,
179
- { merge: true }
180
- );
181
- batchOps++;
182
- this.logger.log('INFO', `[BATCH] Flushing ${Object.keys(updates).length} username updates to ${shardId}.`);
183
- }
184
- }
185
- this.usernameMapUpdates = {}; // Clear updates after staging them
186
- // --- END FIX ---
139
+ if (updates && Object.keys(updates).length > 0) { firestoreBatch.set( this.db.collection(this.usernameMapCollectionName).doc(shardId), updates, { merge: true } ); batchOps++; this.logger.log('INFO', `[BATCH] Flushing ${Object.keys(updates).length} username updates to ${shardId}.`); } }
140
+ this.usernameMapUpdates = {};
187
141
 
188
142
  if (this.processedSpeculatorCids.size) {
189
143
  const cids = Array.from(this.processedSpeculatorCids);
190
144
  this.processedSpeculatorCids.clear();
191
145
  const snapshot = await this.db.collection(this.config.PENDING_SPECULATORS_COLLECTION).get();
192
- snapshot.forEach(doc => {
193
- const docData = doc.data().users || {};
194
- const cidsInDoc = cids.filter(cid => docData[cid]);
195
- if (!cidsInDoc.length) return;
196
- const delBatch = this.db.batch();
197
- const updates = Object.fromEntries(cidsInDoc.map(cid => [`users.${cid}`, FieldValue.delete()]));
198
- delBatch.update(doc.ref, updates);
199
- delBatch.commit();
200
- this.logger.log('INFO', `[BATCH] Deleted ${cidsInDoc.length} CIDs from ${doc.id}`);
201
- });
202
- }
203
-
146
+ snapshot.forEach(doc => { const docData = doc.data().users || {}; const cidsInDoc = cids.filter(cid => docData[cid]); if (!cidsInDoc.length) return;
147
+ const delBatch = this.db.batch();
148
+ const updates = Object.fromEntries(cidsInDoc.map(cid => [`users.${cid}`, FieldValue.delete()]));
149
+ delBatch.update(doc.ref, updates);
150
+ delBatch.commit();
151
+ this.logger.log('INFO', `[BATCH] Deleted ${cidsInDoc.length} CIDs from ${doc.id}`); }); }
152
+
204
153
  if (batchOps) await firestoreBatch.commit();
205
154
  await this.headerManager.flushPerformanceUpdates();
206
155
  this.logger.log('INFO', '[BATCH] All batches flushed successfully.');
@@ -21,10 +21,7 @@ function parseTaskPayload(message, logger) {
21
21
  let payload;
22
22
  try { payload = JSON.parse(Buffer.from(message.data, 'base64').toString()); }
23
23
  catch (e) { logger.log('ERROR', '[TaskEngine] Failed to parse message', { error: e.message }); return null; }
24
- if (!Array.isArray(payload.tasks) || payload.tasks.length === 0) {
25
- logger.log(payload.tasks?.length ? 'WARN' : 'ERROR', '[TaskEngine] Invalid or empty tasks array.', { payload });
26
- return null;
27
- }
24
+ if (!Array.isArray(payload.tasks) || payload.tasks.length === 0) { logger.log(payload.tasks?.length ? 'WARN' : 'ERROR', '[TaskEngine] Invalid or empty tasks array.', { payload }); return null; }
28
25
  return payload.tasks;
29
26
  }
30
27
 
@@ -35,11 +32,7 @@ async function prepareTaskBatches(tasks, batchManager, logger) {
35
32
  const tasksToRun = [], cidsToLookup = new Map(), otherTasks = [];
36
33
  await batchManager.loadUsernameMap();
37
34
  for (const task of tasks) {
38
- if (task.type === 'update') {
39
- const username = batchManager.getUsername(task.userId);
40
- username ? tasksToRun.push({ task, username }) : cidsToLookup.set(String(task.userId), task);
41
- } else otherTasks.push(task);
42
- }
35
+ if (task.type === 'update') { const username = batchManager.getUsername(task.userId); username ? tasksToRun.push({ task, username }) : cidsToLookup.set(String(task.userId), task); } else otherTasks.push(task); }
43
36
  logger.log('INFO', `[TaskEngine] Sorting complete. Known: ${tasksToRun.length}, Lookup: ${cidsToLookup.size}, Other: ${otherTasks.length}`);
44
37
  return { tasksToRun, cidsToLookup, otherTasks };
45
38
  }
@@ -51,12 +44,7 @@ async function runUsernameLookups(tasksToRun, cidsToLookup, dependencies, config
51
44
  if (!cidsToLookup.size) return;
52
45
  logger.log('INFO', `[TaskEngine] Looking up ${cidsToLookup.size} usernames...`);
53
46
  const foundUsers = await lookupUsernames([...cidsToLookup.keys()], dependencies, config);
54
- for (const u of foundUsers) {
55
- const cid = String(u.CID), username = u.Value.UserName;
56
- batchManager.addUsernameMapUpdate(cid, username);
57
- const task = cidsToLookup.get(cid);
58
- if (task) { tasksToRun.push({ task, username }); cidsToLookup.delete(cid); }
59
- }
47
+ for (const u of foundUsers) { const cid = String(u.CID), username = u.Value.UserName; batchManager.addUsernameMapUpdate(cid, username); const task = cidsToLookup.get(cid); if (task) { tasksToRun.push({ task, username }); cidsToLookup.delete(cid); } }
60
48
  if (cidsToLookup.size) logger.log('WARN', `[TaskEngine] Could not find ${cidsToLookup.size} usernames (likely private).`, { skippedCids: [...cidsToLookup.keys()] });
61
49
  }
62
50
 
@@ -65,34 +53,17 @@ async function runUsernameLookups(tasksToRun, cidsToLookup, dependencies, config
65
53
  */
66
54
  async function executeTasks(tasksToRun, otherTasks, dependencies, config, taskId) {
67
55
  const { logger } = dependencies;
68
-
69
- // --- START MODIFICATION ---
70
- // This Set will track history fetches *only for this batch*.
71
56
  const historyFetchedForUser = new Set();
72
- // --- END MODIFICATION ---
73
-
74
-
75
- // Process discover/verify tasks
76
57
  for (const task of otherTasks) {
77
58
  const subTaskId = `${task.type}-${task.userType || 'unknown'}-${task.userId || task.cids?.[0] || 'sub'}`;
78
59
  const handler = { discover: handleDiscover, verify: handleVerify }[task.type];
79
60
  if (handler) try { await handler(task, subTaskId, dependencies, config); }
80
61
  catch (err) { logger.log('ERROR', `[TaskEngine/${taskId}] Error in ${task.type} for ${subTaskId}`, { errorMessage: err.message }); }
81
- else logger.log('ERROR', `[TaskEngine/${taskId}] Unknown task type: ${task.type}`);
82
- }
83
-
84
- // Process update tasks (all have usernames)
62
+ else logger.log('ERROR', `[TaskEngine/${taskId}] Unknown task type: ${task.type}`); }
85
63
  for (const { task, username } of tasksToRun) {
86
64
  const subTaskId = `${task.type}-${task.userType || 'unknown'}-${task.userId}`;
87
- try {
88
- // --- START MODIFICATION ---
89
- // Pass the Set to the update handler
90
- await handleUpdate(task, subTaskId, dependencies, config, username, historyFetchedForUser);
91
- // --- END MODIFICATION ---
92
- }
93
- catch (err) { logger.log('ERROR', `[TaskEngine/${taskId}] Error in handleUpdate for ${task.userId}`, { errorMessage: err.message }); }
94
- }
95
-
65
+ try { await handleUpdate(task, subTaskId, dependencies, config, username, historyFetchedForUser);}
66
+ catch (err) { logger.log('ERROR', `[TaskEngine/${taskId}] Error in handleUpdate for ${task.userId}`, { errorMessage: err.message }); } }
96
67
  logger.log('SUCCESS', `[TaskEngine/${taskId}] Processed all tasks.`);
97
68
  }
98
69
 
package/index.js CHANGED
@@ -7,84 +7,86 @@
7
7
  // --- Core Utilities (Classes and Stateless Helpers) ---
8
8
 
9
9
  const core = {
10
- IntelligentHeaderManager: require('./functions/core/utils/intelligent_header_manager').IntelligentHeaderManager,
11
- IntelligentProxyManager: require('./functions/core/utils/intelligent_proxy_manager').IntelligentProxyManager,
12
- FirestoreBatchManager: require('./functions/task-engine/utils/firestore_batch_manager').FirestoreBatchManager,
13
- firestoreUtils: require('./functions/core/utils/firestore_utils'),
14
- pubsubUtils: require('./functions/core/utils/pubsub_utils'),
10
+ IntelligentHeaderManager : require('./functions/core/utils/intelligent_header_manager') .IntelligentHeaderManager,
11
+ IntelligentProxyManager : require('./functions/core/utils/intelligent_proxy_manager') .IntelligentProxyManager,
12
+ FirestoreBatchManager : require('./functions/task-engine/utils/firestore_batch_manager').FirestoreBatchManager,
13
+ firestoreUtils : require('./functions/core/utils/firestore_utils'),
14
+ pubsubUtils : require('./functions/core/utils/pubsub_utils'),
15
15
  };
16
16
 
17
17
  // --- Pipe 1: Orchestrator ---
18
18
 
19
19
  const orchestrator = {
20
20
  // Main Pipes (Entry points for Cloud Functions)
21
- runDiscoveryOrchestrator: require('./functions/orchestrator/index').runDiscoveryOrchestrator,
22
- runUpdateOrchestrator: require('./functions/orchestrator/index').runUpdateOrchestrator,
21
+ runDiscoveryOrchestrator : require('./functions/orchestrator/index').runDiscoveryOrchestrator,
22
+ runUpdateOrchestrator : require('./functions/orchestrator/index').runUpdateOrchestrator,
23
23
 
24
24
  // Sub-Pipes (Discovery)
25
- checkDiscoveryNeed: require('./functions/orchestrator/helpers/discovery_helpers').checkDiscoveryNeed,
26
- getDiscoveryCandidates: require('./functions/orchestrator/helpers/discovery_helpers').getDiscoveryCandidates,
27
- dispatchDiscovery: require('./functions/orchestrator/helpers/discovery_helpers').dispatchDiscovery,
25
+ checkDiscoveryNeed : require('./functions/orchestrator/helpers/discovery_helpers').checkDiscoveryNeed,
26
+ getDiscoveryCandidates : require('./functions/orchestrator/helpers/discovery_helpers').getDiscoveryCandidates,
27
+ dispatchDiscovery : require('./functions/orchestrator/helpers/discovery_helpers').dispatchDiscovery,
28
28
 
29
29
  // Sub-Pipes (Updates)
30
- getUpdateTargets: require('./functions/orchestrator/helpers/update_helpers').getUpdateTargets,
31
- dispatchUpdates: require('./functions/orchestrator/helpers/update_helpers').dispatchUpdates,
30
+ getUpdateTargets : require('./functions/orchestrator/helpers/update_helpers').getUpdateTargets,
31
+ dispatchUpdates : require('./functions/orchestrator/helpers/update_helpers').dispatchUpdates,
32
32
  };
33
33
 
34
+
34
35
  // --- Pipe 2: Dispatcher ---
35
- // ... (identical to original file)
36
+
36
37
  const dispatcher = {
37
- handleRequest: require('./functions/dispatcher/index').handleRequest,
38
- dispatchTasksInBatches: require('./functions/dispatcher/helpers/dispatch_helpers').dispatchTasksInBatches,
38
+ handleRequest : require('./functions/dispatcher/index').handleRequest,
39
+ dispatchTasksInBatches : require('./functions/dispatcher/helpers/dispatch_helpers').dispatchTasksInBatches,
39
40
  };
40
41
 
42
+
41
43
  // --- Pipe 3: Task Engine ---
42
- // ... (identical to original file)
44
+
43
45
  const taskEngine = {
44
- handleRequest: require('./functions/task-engine/handler_creator').handleRequest,
45
- handleDiscover: require('./functions/task-engine/helpers/discover_helpers').handleDiscover,
46
- handleVerify: require('./functions/task-engine/helpers/verify_helpers').handleVerify,
47
- handleUpdate: require('./functions/task-engine/helpers/update_helpers').handleUpdate,
46
+ handleRequest : require('./functions/task-engine/handler_creator').handleRequest,
47
+ handleDiscover : require('./functions/task-engine/helpers/discover_helpers').handleDiscover,
48
+ handleVerify : require('./functions/task-engine/helpers/verify_helpers').handleVerify,
49
+ handleUpdate : require('./functions/task-engine/helpers/update_helpers').handleUpdate,
48
50
  };
49
51
 
52
+
50
53
  // --- Pipe 4: Computation System ---
54
+
51
55
  const computationSystem = {
52
- // --- (MODIFICATION) ---
53
- // The main pipe is now the new pass runner
54
- runComputationPass: require('./functions/computation-system/helpers/computation_pass_runner').runComputationPass,
55
- // --- (END MODIFICATION) ---
56
-
57
- // Sub-Pipes (Exposing utils for potential external use)
58
- dataLoader: require('./functions/computation-system/utils/data_loader'),
59
- computationUtils: require('./functions/computation-system/utils/utils'),
56
+ runComputationPass : require('./functions/computation-system/helpers/computation_pass_runner').runComputationPass,
57
+ dataLoader : require('./functions/computation-system/utils/data_loader'),
58
+ computationUtils : require('./functions/computation-system/utils/utils'),
60
59
  };
61
60
 
61
+
62
62
  // --- Pipe 5: API ---
63
- // ... (identical to original file)
63
+
64
64
  const api = {
65
- createApiApp: require('./functions/generic-api/index').createApiApp,
66
- helpers: require('./functions/generic-api/helpers/api_helpers'),
65
+ createApiApp : require('./functions/generic-api/index').createApiApp,
66
+ helpers : require('./functions/generic-api/helpers/api_helpers'),
67
67
  };
68
68
 
69
+
69
70
  // --- Pipe 6: Maintenance ---
70
- // ... (identical to original file)
71
+
71
72
  const maintenance = {
72
- runSpeculatorCleanup: require('./functions/speculator-cleanup-orchestrator/helpers/cleanup_helpers').runCleanup,
73
- handleInvalidSpeculator: require('./functions/invalid-speculator-handler/helpers/handler_helpers').handleInvalidSpeculator,
74
- runFetchInsights: require('./functions/fetch-insights/helpers/handler_helpers').fetchAndStoreInsights,
75
- runFetchPrices: require('./functions/etoro-price-fetcher/helpers/handler_helpers').fetchAndStorePrices,
76
- runSocialOrchestrator: require('./functions/social-orchestrator/helpers/orchestrator_helpers').runSocialOrchestrator,
77
- handleSocialTask: require('./functions/social-task-handler/helpers/handler_helpers').handleSocialTask,
78
- runBackfillAssetPrices: require('./functions/price-backfill/helpers/handler_helpers').runBackfillAssetPrices,
73
+ runSpeculatorCleanup : require('./functions/speculator-cleanup-orchestrator/helpers/cleanup_helpers') .runCleanup,
74
+ handleInvalidSpeculator : require('./functions/invalid-speculator-handler/helpers/handler_helpers') .handleInvalidSpeculator,
75
+ runFetchInsights : require('./functions/fetch-insights/helpers/handler_helpers').fetchAndStoreInsights,
76
+ runFetchPrices : require('./functions/etoro-price-fetcher/helpers/handler_helpers').fetchAndStorePrices,
77
+ runSocialOrchestrator : require('./functions/social-orchestrator/helpers/orchestrator_helpers') .runSocialOrchestrator,
78
+ handleSocialTask : require('./functions/social-task-handler/helpers/handler_helpers') .handleSocialTask,
79
+ runBackfillAssetPrices : require('./functions/price-backfill/helpers/handler_helpers') .runBackfillAssetPrices,
79
80
  };
80
81
 
82
+
81
83
  // --- Pipe 7: Proxy ---
82
- // ... (identical to original file)
84
+
83
85
  const proxy = {
84
- handlePost: require('./functions/appscript-api/index').handlePost,
86
+ handlePost : require('./functions/appscript-api/index').handlePost,
85
87
  };
86
88
 
87
- // --- EXPORT THE FINAL PIPE OBJECT ---
89
+
88
90
  module.exports = {
89
91
  pipe: {
90
92
  core,
@@ -96,4 +98,4 @@ module.exports = {
96
98
  maintenance,
97
99
  proxy,
98
100
  }
99
- };
101
+ };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.151",
3
+ "version": "1.0.153",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [