bulltrackers-module 1.0.492 โ†’ 1.0.494

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -345,7 +345,13 @@ async function handleSweepDispatch(config, dependencies, computationManifest, re
345
345
  };
346
346
  });
347
347
 
348
- logger.log('WARN', `[Sweep] ๐Ÿงน Forcing ${tasksPayload.length} tasks to HIGH-MEM for ${date}.`);
348
+ const taskNames = tasksPayload.map(t => t.computation || t.name).join(', ');
349
+ logger.log('WARN', `[Sweep] ๐Ÿงน Forcing ${tasksPayload.length} tasks to HIGH-MEM for ${date}.`, {
350
+ date: date,
351
+ pass: passToRun,
352
+ tasks: tasksPayload.map(t => ({ name: t.computation || t.name, reason: 'sweep' })),
353
+ topic: config.computationTopicHighMem || 'computation-tasks-highmem'
354
+ });
349
355
 
350
356
  await pubsubUtils.batchPublishTasks(dependencies, {
351
357
  topicName: config.computationTopicHighMem || 'computation-tasks-highmem',
@@ -503,6 +509,13 @@ async function handleStandardDispatch(config, dependencies, computationManifest,
503
509
 
504
510
  const pubPromises = [];
505
511
  if (standardTasks.length > 0) {
512
+ const taskNames = standardTasks.map(t => t.computation || t.name).join(', ');
513
+ logger.log('INFO', `[Dispatcher] ๐Ÿ“ค Dispatching ${standardTasks.length} standard tasks: ${taskNames}`, {
514
+ date: selectedDate,
515
+ pass: passToRun,
516
+ tasks: standardTasks.map(t => ({ name: t.computation || t.name, reason: t.triggerReason || 'new' })),
517
+ topic: config.computationTopicStandard || 'computation-tasks'
518
+ });
506
519
  pubPromises.push(pubsubUtils.batchPublishTasks(dependencies, {
507
520
  topicName: config.computationTopicStandard || 'computation-tasks',
508
521
  tasks: standardTasks,
@@ -510,6 +523,13 @@ async function handleStandardDispatch(config, dependencies, computationManifest,
510
523
  }));
511
524
  }
512
525
  if (highMemTasks.length > 0) {
526
+ const taskNames = highMemTasks.map(t => t.computation || t.name).join(', ');
527
+ logger.log('INFO', `[Dispatcher] ๐Ÿ“ค Dispatching ${highMemTasks.length} high-memory tasks: ${taskNames}`, {
528
+ date: selectedDate,
529
+ pass: passToRun,
530
+ tasks: highMemTasks.map(t => ({ name: t.computation || t.name, reason: t.triggerReason || 'retry' })),
531
+ topic: config.computationTopicHighMem || 'computation-tasks-highmem'
532
+ });
513
533
  pubPromises.push(pubsubUtils.batchPublishTasks(dependencies, {
514
534
  topicName: config.computationTopicHighMem || 'computation-tasks-highmem',
515
535
  tasks: highMemTasks,
@@ -164,7 +164,16 @@ class FinalSweepReporter {
164
164
 
165
165
  if (ledgerSnap.exists) {
166
166
  const data = ledgerSnap.data();
167
- forensics.ledgerState = { status: data.status, workerId: data.workerId, error: data.error };
167
+ // Filter out undefined values to prevent Firestore errors
168
+ const ledgerState = {
169
+ status: data.status,
170
+ workerId: data.workerId
171
+ };
172
+ // Only include error if it's defined
173
+ if (data.error !== undefined && data.error !== null) {
174
+ ledgerState.error = data.error;
175
+ }
176
+ forensics.ledgerState = ledgerState;
168
177
 
169
178
  if (['PENDING', 'IN_PROGRESS'].includes(data.status)) {
170
179
  const lastHb = data.telemetry?.lastHeartbeat ? new Date(data.telemetry.lastHeartbeat).getTime() : 0;
@@ -1,32 +1,30 @@
1
- Below is a quick-reference guide for testing the **Data Feeder Pipeline** using the Google Cloud Console UI.
2
-
3
- ---
4
-
5
1
  # ๐Ÿงช Workflow Testing Guide
6
2
 
7
- When triggering a manual execution in the **GCP Workflows Console**, use the following JSON objects in the **Input** field to bypass the schedule and test specific components.
3
+ Below is a quick-reference guide for manually triggering the **Data Feeder Pipeline** using the Google Cloud Console UI.
8
4
 
9
- ### How to Run a Test
5
+ ## How to Run a Test
10
6
 
11
- 1. Go to the **Workflows** page in your GCP Console.
12
- 2. Click on `data-feeder-pipeline`.
7
+ 1. Navigate to **Workflows** in your GCP Console.
8
+ 2. Select `data-feeder-pipeline`.
13
9
  3. Click the **Execute** button at the top.
14
- 4. Paste the relevant **JSON Input** from the table below into the input box.
10
+ 4. Paste the specific **JSON Input** from the table below into the input box to bypass schedules and target specific phases.
15
11
 
16
12
  ### Test Commands
17
13
 
18
- | Component to Test | JSON Input | Description |
19
- | --- | --- | --- |
20
- | **Market Data** | `{"target_step": "market"}` | Runs `price-fetcher`, `insights-fetcher`, and `market-data-indexer`. |
21
- | **Rankings** | `{"target_step": "rankings"}` | Runs the `fetch-popular-investors` function specifically. |
22
- | **Social Orchestrator** | `{"target_step": "social"}` | Runs the midnight social task and the midnight data indexer. |
23
- | **Global Sync** | `{"target_step": "global"}` | Triggers the `root-data-indexer` with no target date (Full Scan). |
24
- | **Full Pipeline** | `{}` | Runs the entire 24-hour cycle from the beginning (Standard Run). |
14
+ | Phase to Test | JSON Input | Action & Description |
15
+ | :--- | :--- | :--- |
16
+ | **Market Data (Phase 1)** | `{"target_step": "market"}` | **Runs:** Price Fetcher & Insights Fetcher.<br>**Note:** Automatically triggers indexing after each fetch. Workflow will pause at "Wait for Midnight" upon completion. |
17
+ | **Midnight Phase (Phase 3)** | `{"target_step": "midnight"}` | **Runs:** Popular Investor Rankings, Midnight Social Orchestrator, and Global Index Verification.<br>**Note:** Use this to test the critical 00:00 UTC logic without waiting for the daily schedule. |
18
+ | **Social Loop (Phase 4)** | `{"target_step": "social"}` | **Runs:** Enters the recurring 3-hour social fetch loop.<br>**Warning:** Triggers `social_loop_start`, which begins with a 3-hour sleep (`wait_3_hours`) before the first execution. |
19
+ | **Standard Run** | `{}` | **Runs:** The full 24-hour cycle starting from 22:00 UTC (Market Close). |
25
20
 
26
21
  ---
27
22
 
28
- ### ๐Ÿ’ก Pro-Tips for Testing
23
+ ### ๐Ÿ’ก Testing Notes & Pro-Tips
29
24
 
30
- * **Variable Checking:** After an execution finishes, click the **Details** tab in the execution logs to see the final values of variables like `sleep_midnight` or `sleep_loop` to verify the UTC alignment math worked correctly.
31
- * **Permissions:** Ensure your user account or the service account running the workflow has the `roles/workflows.invoker` role to trigger these tests.
32
- * **Timeouts:** If testing the full loop, remember that the workflow will "pause" at the `sys.sleep` steps. You can see the status as **Active** while it waits for the next 3-hour window.
25
+ * **Automatic Indexing:** As of V3.2, you will not see explicit "Index" steps in the workflow visualization for Price, Insights, or Rankings. These functions now trigger the `root-data-indexer` automatically upon completion. The only visible index step is the **Global Verification** in the Midnight Phase.
26
+ * **Verification:** To confirm data was indexed during a test:
27
+ * Check the logs of the individual Cloud Functions (`price-fetcher`, etc.).
28
+ * Or, run the **Midnight Phase** test, which ends with the explicit `global_index_midnight` step.
29
+ * **Variable Checking:** After execution, check the **Variables** tab to view `sleep_midnight` calculations to ensure UTC alignment is functioning correctly.
30
+ * **Permissions:** Ensure the executor has `roles/workflows.invoker`.
@@ -5,6 +5,7 @@
5
5
 
6
6
  const { FieldValue } = require('@google-cloud/firestore');
7
7
  const crypto = require('crypto');
8
+ const { tryDecompress } = require('./data_helpers');
8
9
 
9
10
  const RATE_LIMIT_HOURS = 1;
10
11
  const RATE_LIMIT_MS = RATE_LIMIT_HOURS * 60 * 60 * 1000;
@@ -253,7 +254,6 @@ async function getPiFetchStatus(req, res, dependencies, config) {
253
254
 
254
255
  const doc = await docRef.get();
255
256
  if (doc.exists) {
256
- const { tryDecompress } = require('./data_helpers');
257
257
  const data = tryDecompress(doc.data());
258
258
 
259
259
  if (data && data[String(piCidNum)]) {
@@ -316,7 +316,6 @@ async function getPiFetchStatus(req, res, dependencies, config) {
316
316
  }
317
317
  } else {
318
318
  // Data is in the main document (compressed or not)
319
- const { tryDecompress } = require('./data_helpers');
320
319
  mergedData = tryDecompress(docData);
321
320
 
322
321
  // Handle string decompression result
@@ -0,0 +1,213 @@
1
+ /**
2
+ * @fileoverview Test Alert Helpers
3
+ * Allows developers to send test alerts for testing the alert system
4
+ */
5
+
6
+ const { FieldValue } = require('@google-cloud/firestore');
7
+ const { getAllAlertTypes, getAlertTypeByComputation } = require('../../../alert-system/helpers/alert_type_registry');
8
+ const { isDeveloperAccount, getDevOverride } = require('./dev_helpers');
9
+
10
+ /**
11
+ * POST /user/dev/test-alert
12
+ * Send a test alert to users
13
+ *
14
+ * Request body:
15
+ * {
16
+ * userCid: number (required) - Developer account CID
17
+ * alertTypeId: string (optional) - Alert type ID, defaults to first available
18
+ * targetUsers: 'all' | 'dev' | number[] (optional) - Who to send to, defaults to 'dev'
19
+ * piCid: number (optional) - PI CID for the alert, defaults to 1
20
+ * piUsername: string (optional) - PI username, defaults to 'TestPI'
21
+ * metadata: object (optional) - Additional metadata for the alert
22
+ * }
23
+ */
24
+ async function sendTestAlert(req, res, dependencies, config) {
25
+ const { db, logger } = dependencies;
26
+ const { userCid, alertTypeId, targetUsers = 'dev', piCid = 1, piUsername = 'TestPI', metadata = {} } = req.body;
27
+
28
+ if (!userCid) {
29
+ return res.status(400).json({ error: "Missing userCid" });
30
+ }
31
+
32
+ // SECURITY CHECK: Only allow developer accounts
33
+ if (!isDeveloperAccount(userCid)) {
34
+ logger.log('WARN', `[sendTestAlert] Unauthorized attempt by user ${userCid}`);
35
+ return res.status(403).json({
36
+ error: "Forbidden",
37
+ message: "Test alerts are only available for authorized developer accounts"
38
+ });
39
+ }
40
+
41
+ try {
42
+ // Get alert type
43
+ let alertType;
44
+ if (alertTypeId) {
45
+ alertType = getAlertTypeByComputation(alertTypeId);
46
+ if (!alertType) {
47
+ const allTypes = getAllAlertTypes();
48
+ alertType = allTypes.find(t => t.id === alertTypeId);
49
+ }
50
+ }
51
+
52
+ // Default to first available alert type if not specified
53
+ if (!alertType) {
54
+ const allTypes = getAllAlertTypes();
55
+ if (allTypes.length === 0) {
56
+ return res.status(400).json({ error: "No alert types available" });
57
+ }
58
+ alertType = allTypes[0];
59
+ logger.log('INFO', `[sendTestAlert] Using default alert type: ${alertType.id}`);
60
+ }
61
+
62
+ // Determine target user CIDs
63
+ let targetCids = [];
64
+
65
+ if (targetUsers === 'all') {
66
+ // Get all users from user_notifications collection
67
+ const notificationsSnapshot = await db.collection('user_notifications').get();
68
+ targetCids = notificationsSnapshot.docs.map(doc => Number(doc.id));
69
+ logger.log('INFO', `[sendTestAlert] Sending to all ${targetCids.length} users`);
70
+ } else if (targetUsers === 'dev') {
71
+ // Get all developer accounts with dev override enabled
72
+ const devOverridesCollection = config.devOverridesCollection || 'dev_overrides';
73
+ const devOverridesSnapshot = await db.collection(devOverridesCollection).get();
74
+
75
+ for (const doc of devOverridesSnapshot.docs) {
76
+ const data = doc.data();
77
+ if (data.enabled === true) {
78
+ targetCids.push(Number(doc.id));
79
+ }
80
+ }
81
+
82
+ // Also include the requesting developer
83
+ if (!targetCids.includes(Number(userCid))) {
84
+ targetCids.push(Number(userCid));
85
+ }
86
+
87
+ logger.log('INFO', `[sendTestAlert] Sending to ${targetCids.length} developer accounts`);
88
+ } else if (Array.isArray(targetUsers)) {
89
+ // Specific user CIDs
90
+ targetCids = targetUsers.map(cid => Number(cid)).filter(cid => !isNaN(cid) && cid > 0);
91
+ logger.log('INFO', `[sendTestAlert] Sending to ${targetCids.length} specific users`);
92
+ } else {
93
+ return res.status(400).json({
94
+ error: "Invalid targetUsers",
95
+ message: "targetUsers must be 'all', 'dev', or an array of user CIDs"
96
+ });
97
+ }
98
+
99
+ if (targetCids.length === 0) {
100
+ return res.status(400).json({
101
+ error: "No target users",
102
+ message: "No users found matching the target criteria"
103
+ });
104
+ }
105
+
106
+ // Generate alert message
107
+ const { generateAlertMessage } = require('../../../alert-system/helpers/alert_type_registry');
108
+ const alertMessage = generateAlertMessage(alertType, piUsername, {
109
+ ...metadata,
110
+ isTest: true,
111
+ testSentBy: Number(userCid),
112
+ testSentAt: new Date().toISOString()
113
+ });
114
+
115
+ // Create notifications for each target user
116
+ const batch = db.batch();
117
+ const notificationRefs = [];
118
+ const counterUpdates = {};
119
+ const today = new Date().toISOString().split('T')[0];
120
+
121
+ for (const targetCid of targetCids) {
122
+ const notificationId = `test_alert_${Date.now()}_${targetCid}_${piCid}_${Math.random().toString(36).substring(2, 9)}`;
123
+ const notificationRef = db.collection('user_notifications')
124
+ .doc(String(targetCid))
125
+ .collection('notifications')
126
+ .doc(notificationId);
127
+
128
+ const notificationData = {
129
+ id: notificationId,
130
+ type: 'alert',
131
+ title: `[TEST] ${alertType.name}`,
132
+ message: alertMessage,
133
+ read: false,
134
+ createdAt: FieldValue.serverTimestamp(),
135
+ metadata: {
136
+ piCid: Number(piCid),
137
+ piUsername: piUsername,
138
+ alertType: alertType.id,
139
+ alertTypeName: alertType.name,
140
+ computationName: alertType.computationName,
141
+ computationDate: today,
142
+ severity: alertType.severity,
143
+ isTest: true,
144
+ testSentBy: Number(userCid),
145
+ testSentAt: new Date().toISOString(),
146
+ ...metadata
147
+ }
148
+ };
149
+
150
+ batch.set(notificationRef, notificationData);
151
+ notificationRefs.push(notificationRef);
152
+
153
+ // Track counter updates
154
+ if (!counterUpdates[targetCid]) {
155
+ counterUpdates[targetCid] = {
156
+ date: today,
157
+ unreadCount: 0,
158
+ totalCount: 0,
159
+ byType: {}
160
+ };
161
+ }
162
+ counterUpdates[targetCid].unreadCount += 1;
163
+ counterUpdates[targetCid].totalCount += 1;
164
+ counterUpdates[targetCid].byType[alertType.id] =
165
+ (counterUpdates[targetCid].byType[alertType.id] || 0) + 1;
166
+ }
167
+
168
+ // Update notification counters
169
+ for (const [targetCid, counter] of Object.entries(counterUpdates)) {
170
+ const counterRef = db.collection('user_notifications')
171
+ .doc(String(targetCid))
172
+ .collection('counters')
173
+ .doc(counter.date);
174
+
175
+ batch.set(counterRef, {
176
+ date: counter.date,
177
+ unreadCount: FieldValue.increment(counter.unreadCount),
178
+ totalCount: FieldValue.increment(counter.totalCount),
179
+ [`byType.${alertType.id}`]: FieldValue.increment(counter.byType[alertType.id] || 0),
180
+ lastUpdated: FieldValue.serverTimestamp()
181
+ }, { merge: true });
182
+ }
183
+
184
+ // Commit batch
185
+ await batch.commit();
186
+
187
+ logger.log('SUCCESS', `[sendTestAlert] Created ${notificationRefs.length} test notifications for alert type ${alertType.id}`);
188
+
189
+ return res.status(200).json({
190
+ success: true,
191
+ message: `Test alert sent to ${targetCids.length} users`,
192
+ alertType: {
193
+ id: alertType.id,
194
+ name: alertType.name,
195
+ computationName: alertType.computationName
196
+ },
197
+ targetUsers: {
198
+ count: targetCids.length,
199
+ cids: targetCids
200
+ },
201
+ piCid: Number(piCid),
202
+ piUsername: piUsername,
203
+ notificationsCreated: notificationRefs.length
204
+ });
205
+
206
+ } catch (error) {
207
+ logger.log('ERROR', `[sendTestAlert] Error sending test alert:`, error);
208
+ return res.status(500).json({ error: error.message });
209
+ }
210
+ }
211
+
212
+ module.exports = { sendTestAlert };
213
+
@@ -12,6 +12,7 @@ const { setDevOverride, getDevOverrideStatus } = require('./helpers/dev_helpers'
12
12
  const { getAlertTypes, getDynamicWatchlistComputations, getUserAlerts, getAlertCount, markAlertRead, markAllAlertsRead, deleteAlert } = require('./helpers/alert_helpers');
13
13
  const { requestPiFetch, getPiFetchStatus } = require('./helpers/on_demand_fetch_helpers');
14
14
  const { requestUserSync, getUserSyncStatus } = require('./helpers/user_sync_helpers');
15
+ const { sendTestAlert } = require('./helpers/test_alert_helpers');
15
16
 
16
17
  module.exports = (dependencies, config) => {
17
18
  const router = express.Router();
@@ -87,6 +88,7 @@ module.exports = (dependencies, config) => {
87
88
  // --- Developer Mode (only for whitelisted developer accounts) ---
88
89
  router.post('/dev/override', (req, res) => setDevOverride(req, res, dependencies, config));
89
90
  router.get('/dev/override', (req, res) => getDevOverrideStatus(req, res, dependencies, config));
91
+ router.post('/dev/test-alert', (req, res) => sendTestAlert(req, res, dependencies, config));
90
92
 
91
93
  // --- Alert Management ---
92
94
  router.get('/me/alert-types', (req, res) => getAlertTypes(req, res, dependencies, config));
@@ -270,19 +270,33 @@ exports.runRootDataIndexer = async (config, dependencies) => {
270
270
  ]);
271
271
 
272
272
  // Check if date exists in tracking documents
273
+ // The _dates document uses dot notation: fetchedDates.2025-12-29: true
274
+ // When read, this becomes: { fetchedDates: { "2025-12-29": true } }
273
275
  let foundPISocial = false;
274
276
  let foundSignedInSocial = false;
275
277
 
276
278
  if (piSocialTrackingDoc.exists) {
277
279
  const data = piSocialTrackingDoc.data();
278
- if (data.fetchedDates && data.fetchedDates[dateStr] === true) {
280
+ // Check both nested structure and flat dot-notation structure
281
+ if (data.fetchedDates && typeof data.fetchedDates === 'object') {
282
+ if (data.fetchedDates[dateStr] === true) {
283
+ foundPISocial = true;
284
+ }
285
+ } else if (data[`fetchedDates.${dateStr}`] === true) {
286
+ // Handle flat dot-notation structure (if Firestore stores it that way)
279
287
  foundPISocial = true;
280
288
  }
281
289
  }
282
290
 
283
291
  if (signedInSocialTrackingDoc.exists) {
284
292
  const data = signedInSocialTrackingDoc.data();
285
- if (data.fetchedDates && data.fetchedDates[dateStr] === true) {
293
+ // Check both nested structure and flat dot-notation structure
294
+ if (data.fetchedDates && typeof data.fetchedDates === 'object') {
295
+ if (data.fetchedDates[dateStr] === true) {
296
+ foundSignedInSocial = true;
297
+ }
298
+ } else if (data[`fetchedDates.${dateStr}`] === true) {
299
+ // Handle flat dot-notation structure (if Firestore stores it that way)
286
300
  foundSignedInSocial = true;
287
301
  }
288
302
  }
@@ -235,12 +235,27 @@ async function handleRequest(message, context, configObj, dependencies) {
235
235
  await handlePopularInvestorUpdate(taskData, configObj, dependencies);
236
236
  break;
237
237
  case 'ON_DEMAND_USER_UPDATE':
238
- const onDemandData = data || payload;
239
- if (!onDemandData.cid || !onDemandData.username) {
240
- logger.log('ERROR', `[TaskEngine] ON_DEMAND_USER_UPDATE missing required fields`, { data: onDemandData });
238
+ // For ON_DEMAND_USER_UPDATE, the entire payload IS the task data
239
+ // (not wrapped in a 'data' field like other task types)
240
+ // Extract task data from payload, excluding 'type'
241
+ const onDemandTaskData = data || {
242
+ cid: payload.cid,
243
+ username: payload.username,
244
+ requestId: payload.requestId,
245
+ source: payload.source,
246
+ requestedBy: payload.requestedBy,
247
+ effectiveRequestedBy: payload.effectiveRequestedBy,
248
+ metadata: payload.metadata,
249
+ priority: payload.priority,
250
+ data: payload.data // Include nested data object (includeSocial, since, etc.)
251
+ };
252
+
253
+ if (!onDemandTaskData.cid || !onDemandTaskData.username) {
254
+ logger.log('ERROR', `[TaskEngine] ON_DEMAND_USER_UPDATE missing required fields (cid or username)`, { payload, onDemandTaskData });
241
255
  return;
242
256
  }
243
- await handleOnDemandUserUpdate(onDemandData, configObj, dependencies);
257
+
258
+ await handleOnDemandUserUpdate(onDemandTaskData, configObj, dependencies);
244
259
  break;
245
260
  case 'SOCIAL_INSTRUMENT_FETCH':
246
261
  case 'SOCIAL_PI_FETCH':
@@ -500,6 +500,9 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
500
500
  // [FIX] Destructure dependencies first
501
501
  const { logger, proxyManager, batchManager, headerManager, db } = dependencies;
502
502
 
503
+ // Import notification helper once at the top
504
+ const { notifyTaskEngineComplete } = require('../../generic-api/user-api/helpers/notification_helpers');
505
+
503
506
  // Validate and set API URLs with defaults and fallbacks
504
507
  const ETORO_API_PORTFOLIO_URL = config.ETORO_API_PORTFOLIO_URL || process.env.ETORO_API_PORTFOLIO_URL || 'https://www.etoro.com/sapi/portfolios/portfolio';
505
508
  const ETORO_API_HISTORY_URL = config.ETORO_API_HISTORY_URL || process.env.ETORO_API_HISTORY_URL || 'https://www.etoro.com/sapi/trade-data-real/history/public/credit/flat';
@@ -558,6 +561,48 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
558
561
  let fetchSuccess = false;
559
562
  let portfolioFetched = false;
560
563
  let historyFetched = false;
564
+ let criticalError = null; // Track critical errors that should fail the sync
565
+
566
+ // Helper function to mark sync as failed and send notification
567
+ const markSyncFailed = async (errorMessage, stage) => {
568
+ if (requestId && source === 'on_demand_sync' && db) {
569
+ try {
570
+ const requestRef = db.collection('user_sync_requests')
571
+ .doc(String(cid))
572
+ .collection('requests')
573
+ .doc(requestId);
574
+
575
+ await requestRef.update({
576
+ status: 'failed',
577
+ error: errorMessage,
578
+ failedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp(),
579
+ updatedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp(),
580
+ failedStage: stage
581
+ });
582
+
583
+ // Send error notification
584
+ if (metadata?.requestingUserCid) {
585
+ try {
586
+ await notifyTaskEngineComplete(
587
+ db,
588
+ logger,
589
+ metadata.requestingUserCid,
590
+ requestId,
591
+ username,
592
+ false,
593
+ errorMessage
594
+ );
595
+ } catch (notifErr) {
596
+ logger.log('WARN', `[On-Demand Update] Failed to send error notification`, notifErr);
597
+ }
598
+ }
599
+
600
+ logger.log('ERROR', `[On-Demand Update] Marked sync ${requestId} as failed at stage: ${stage}. Error: ${errorMessage}`);
601
+ } catch (updateErr) {
602
+ logger.log('ERROR', `[On-Demand Update] Failed to mark sync as failed for ${requestId}`, updateErr);
603
+ }
604
+ }
605
+ };
561
606
 
562
607
  try {
563
608
  // Portfolio Fetch (only if portfolioOnly is true)
@@ -597,7 +642,10 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
597
642
  portfolioSuccess = true;
598
643
  } catch (fetchErr) {
599
644
  logger.log('ERROR', `[On-Demand] Direct fetch also failed for ${username}`, fetchErr);
600
- throw new Error(`Failed to fetch portfolio for ${cid}. Direct: ${fetchErr.message}`);
645
+ const errorMsg = `Failed to fetch portfolio for ${cid}. Direct: ${fetchErr.message}`;
646
+ criticalError = new Error(errorMsg);
647
+ await markSyncFailed(errorMsg, 'portfolio_fetch');
648
+ throw criticalError;
601
649
  }
602
650
  }
603
651
 
@@ -608,7 +656,15 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
608
656
  fetchSuccess = true;
609
657
  portfolioFetched = true;
610
658
 
611
- await batchManager.addToPortfolioBatch(String(cid), blockId, today, portfolioData, 'signed_in_user');
659
+ try {
660
+ await batchManager.addToPortfolioBatch(String(cid), blockId, today, portfolioData, 'signed_in_user');
661
+ } catch (batchErr) {
662
+ const errorMsg = `Failed to store portfolio data: ${batchErr.message}`;
663
+ logger.log('ERROR', `[On-Demand] ${errorMsg}`, batchErr);
664
+ criticalError = new Error(errorMsg);
665
+ await markSyncFailed(errorMsg, 'portfolio_storage');
666
+ throw criticalError;
667
+ }
612
668
  } else {
613
669
  logger.log('INFO', `[On-Demand Update] Skipping portfolio fetch (portfolioOnly=false) for ${username}`);
614
670
  }
@@ -660,23 +716,49 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
660
716
  }
661
717
 
662
718
  if (historySuccess && historyRes.ok) {
663
- const historyData = await historyRes.json();
664
- historyData.fetchedAt = new Date();
665
-
666
- const VALID_REASONS = [0, 1, 5];
667
- if (historyData.PublicHistoryPositions) {
668
- historyData.PublicHistoryPositions = historyData.PublicHistoryPositions.filter(
669
- p => VALID_REASONS.includes(p.CloseReason)
670
- );
719
+ try {
720
+ const historyData = await historyRes.json();
721
+ historyData.fetchedAt = new Date();
722
+
723
+ const VALID_REASONS = [0, 1, 5];
724
+ if (historyData.PublicHistoryPositions) {
725
+ historyData.PublicHistoryPositions = historyData.PublicHistoryPositions.filter(
726
+ p => VALID_REASONS.includes(p.CloseReason)
727
+ );
728
+ }
729
+
730
+ try {
731
+ await batchManager.addToTradingHistoryBatch(String(cid), blockId, today, historyData, 'signed_in_user');
732
+ historyFetched = true;
733
+ } catch (batchErr) {
734
+ const errorMsg = `Failed to store history data: ${batchErr.message}`;
735
+ logger.log('ERROR', `[On-Demand] ${errorMsg}`, batchErr);
736
+ criticalError = new Error(errorMsg);
737
+ await markSyncFailed(errorMsg, 'history_storage');
738
+ throw criticalError;
739
+ }
740
+ } catch (parseErr) {
741
+ const errorMsg = `Failed to parse history data: ${parseErr.message}`;
742
+ logger.log('ERROR', `[On-Demand] ${errorMsg}`, parseErr);
743
+ criticalError = new Error(errorMsg);
744
+ await markSyncFailed(errorMsg, 'history_parse');
745
+ throw criticalError;
671
746
  }
672
- await batchManager.addToTradingHistoryBatch(String(cid), blockId, today, historyData, 'signed_in_user');
673
- historyFetched = true;
747
+ } else {
748
+ // History fetch failed - this is critical if portfolioOnly is true
749
+ if (portfolioOnly) {
750
+ const errorMsg = `History fetch failed for ${username} (status: ${historyRes?.status || 'unknown'})`;
751
+ logger.log('ERROR', `[On-Demand Update] ${errorMsg}`);
752
+ criticalError = new Error(errorMsg);
753
+ await markSyncFailed(errorMsg, 'history_fetch');
754
+ throw criticalError;
674
755
  } else {
675
- logger.log('WARN', `[On-Demand Update] History fetch failed for ${username} (${historyRes.status})`);
756
+ logger.log('WARN', `[On-Demand Update] History fetch failed for ${username} (${historyRes?.status || 'unknown'})`);
676
757
  }
677
- } else {
678
- logger.log('INFO', `[On-Demand Update] Skipping history fetch (portfolioOnly=false) for ${username}`);
679
758
  }
759
+ } else {
760
+ logger.log('INFO', `[On-Demand Update] Skipping history fetch (portfolioOnly=false) for ${username}`);
761
+ }
680
762
 
681
763
  // Fetch social data if requested (for user signup or explicit request)
682
764
  let socialFetched = false;
@@ -764,7 +846,6 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
764
846
  // Send notification to requesting user if this is an on-demand sync
765
847
  if (requestId && source === 'on_demand_sync' && metadata?.requestingUserCid) {
766
848
  try {
767
- const { notifyTaskEngineComplete } = require('../../generic-api/user-api/helpers/notification_helpers');
768
849
  const success = portfolioFetched || historyFetched; // At least one should succeed
769
850
  await notifyTaskEngineComplete(
770
851
  db,
@@ -816,14 +897,12 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
816
897
  updatedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp()
817
898
  });
818
899
  } catch (indexerError) {
819
- logger.log('ERROR', `[On-Demand Update] Failed to run root data indexer for ${today}`, indexerError);
820
- // Continue anyway - computation might still work if index already exists
821
- // But update status to indicate we tried
822
- await requestRef.update({
823
- status: 'computing',
824
- indexerError: indexerError.message,
825
- updatedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp()
826
- });
900
+ const errorMsg = `Failed to run root data indexer: ${indexerError.message}`;
901
+ logger.log('ERROR', `[On-Demand Update] ${errorMsg}`, indexerError);
902
+ // Root data indexing is critical - if it fails, computations won't know data exists
903
+ criticalError = new Error(errorMsg);
904
+ await markSyncFailed(errorMsg, 'root_data_indexing');
905
+ throw criticalError;
827
906
  }
828
907
 
829
908
  // Trigger computations with dependency chain resolution
@@ -870,22 +949,30 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
870
949
  // Don't mark as completed yet - wait for computation to finish
871
950
  // The status will remain 'computing' until computation completes
872
951
  } else {
873
- logger.log('WARN', `[On-Demand Update] PubSub not available, cannot trigger computation`);
874
- // Mark as completed if we can't trigger computation
875
- await requestRef.update({
876
- status: 'completed',
877
- completedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp(),
878
- updatedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp()
879
- });
952
+ const errorMsg = 'PubSub not available, cannot trigger computation';
953
+ logger.log('ERROR', `[On-Demand Update] ${errorMsg}`);
954
+ criticalError = new Error(errorMsg);
955
+ await markSyncFailed(errorMsg, 'computation_trigger');
956
+ throw criticalError;
880
957
  }
881
958
  } catch (err) {
882
- logger.log('WARN', `[On-Demand Update] Failed to update request status or trigger computation for ${requestId}`, err);
959
+ // If this is a critical error we already handled, re-throw it
960
+ if (criticalError) {
961
+ throw criticalError;
962
+ }
963
+ // Otherwise, this is an unexpected error in the computation triggering block
964
+ const errorMsg = `Failed to trigger computations: ${err.message}`;
965
+ logger.log('ERROR', `[On-Demand Update] ${errorMsg}`, err);
966
+ criticalError = new Error(errorMsg);
967
+ await markSyncFailed(errorMsg, 'computation_trigger');
968
+ throw criticalError;
883
969
  }
884
970
  }
885
971
  } catch (error) {
886
972
  logger.log('ERROR', `[On-Demand Update] Failed for ${username}`, error);
887
973
 
888
974
  // Update request status to failed if this is a sync request
975
+ // Only update if we haven't already marked it as failed (to avoid duplicate updates)
889
976
  if (requestId && source === 'on_demand_sync' && db) {
890
977
  try {
891
978
  const requestRef = db.collection('user_sync_requests')
@@ -893,14 +980,39 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
893
980
  .collection('requests')
894
981
  .doc(requestId);
895
982
 
896
- const errorMessage = error.message || 'Unknown error occurred';
897
- await requestRef.update({
898
- status: 'failed',
899
- error: errorMessage,
900
- failedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp(),
901
- updatedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp()
902
- });
903
- logger.log('INFO', `[On-Demand Update] Updated sync request ${requestId} to failed: ${errorMessage}`);
983
+ // Check current status to avoid overwriting if already marked as failed
984
+ const currentRequest = await requestRef.get();
985
+ if (currentRequest.exists && currentRequest.data().status !== 'failed') {
986
+ const errorMessage = error.message || 'Unknown error occurred';
987
+ const failedStage = error.failedStage || 'unknown';
988
+
989
+ await requestRef.update({
990
+ status: 'failed',
991
+ error: errorMessage,
992
+ failedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp(),
993
+ updatedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp(),
994
+ failedStage: failedStage
995
+ });
996
+
997
+ // Send error notification if not already sent
998
+ if (metadata?.requestingUserCid) {
999
+ try {
1000
+ await notifyTaskEngineComplete(
1001
+ db,
1002
+ logger,
1003
+ metadata.requestingUserCid,
1004
+ requestId,
1005
+ username,
1006
+ false,
1007
+ errorMessage
1008
+ );
1009
+ } catch (notifErr) {
1010
+ logger.log('WARN', `[On-Demand Update] Failed to send error notification`, notifErr);
1011
+ }
1012
+ }
1013
+
1014
+ logger.log('INFO', `[On-Demand Update] Updated sync request ${requestId} to failed: ${errorMessage} (stage: ${failedStage})`);
1015
+ }
904
1016
  } catch (err) {
905
1017
  logger.log('WARN', `[On-Demand Update] Failed to update request status to failed for ${requestId}`, err);
906
1018
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.492",
3
+ "version": "1.0.494",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [