bulltrackers-module 1.0.724 → 1.0.726

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,9 @@
1
1
  /**
2
2
  * @fileoverview Alert Generation Helpers
3
3
  * Handles creating alerts from computation results
4
+ *
5
+ * UPDATED: Now sends FCM push notifications in addition to Firestore writes.
6
+ * This enables background notifications even when the user is offline.
4
7
  */
5
8
 
6
9
  const { FieldValue } = require('@google-cloud/firestore');
@@ -8,7 +11,7 @@ const zlib = require('zlib');
8
11
  const { Storage } = require('@google-cloud/storage');
9
12
  const { generateAlertMessage } = require('./alert_manifest_loader');
10
13
  const { evaluateDynamicConditions } = require('./dynamic_evaluator');
11
- // [UPDATED] Now uses dynamic manifest loading and condition evaluation
14
+ const { sendAlertPushNotification } = require('../../core/utils/fcm_utils');
12
15
 
13
16
  const storage = new Storage(); // Singleton GCS Client
14
17
 
@@ -174,20 +177,34 @@ async function processAlertForPI(db, logger, piCid, alertType, computationMetada
174
177
  ...(computationMetadata || {})
175
178
  };
176
179
 
177
- const writePromise = db.collection('SignedInUsers')
178
- .doc(String(userCid))
179
- .collection('alerts')
180
- .doc(notificationId)
181
- .set(alertData)
182
- .catch(err => {
180
+ // Combined promise: write to Firestore AND send FCM push notification
181
+ const writeAndPushPromise = (async () => {
182
+ // 1. Write to Firestore first (this is the source of truth)
183
+ await db.collection('SignedInUsers')
184
+ .doc(String(userCid))
185
+ .collection('alerts')
186
+ .doc(notificationId)
187
+ .set(alertData);
188
+
189
+ // 2. Send FCM push notification (non-blocking, don't fail if push fails)
190
+ // This enables notifications even when user is offline
191
+ try {
192
+ await sendAlertPushNotification(db, userCid, alertData, logger);
193
+ } catch (fcmError) {
194
+ // Log but don't throw - FCM failure shouldn't fail the alert
195
+ logger.log('WARN', `[processAlertForPI] FCM push failed for CID ${userCid}: ${fcmError.message}`);
196
+ }
197
+
198
+ return { userCid, success: true };
199
+ })().catch(err => {
183
200
  logger.log('ERROR', `[processAlertForPI] Failed to write alert for CID ${userCid}: ${err.message}`, err);
184
201
  throw err; // Re-throw so we know if writes are failing
185
202
  });
186
203
 
187
- notificationPromises.push(writePromise);
204
+ notificationPromises.push(writeAndPushPromise);
188
205
  }
189
206
 
190
- // Wait for all notifications to be written
207
+ // Wait for all notifications to be written and push notifications sent
191
208
  await Promise.all(notificationPromises);
192
209
 
193
210
  // 5. Notify the PI themselves if they are a signed-in user (Optional feature)
@@ -2,10 +2,12 @@
2
2
  * @fileoverview Alert System Trigger Handler
3
3
  * Can be triggered via Pub/Sub when computation results are written
4
4
  * [UPDATED] Now uses dynamic manifest loading instead of hardcoded registry
5
+ * [UPDATED] Now sends FCM push notifications for all alert types
5
6
  */
6
7
 
7
8
  const { loadAlertTypesFromManifest, getAlertTypeByComputation, isAlertComputation } = require('./helpers/alert_manifest_loader');
8
9
  const { processAlertForPI, readComputationResults, readComputationResultsWithShards } = require('./helpers/alert_helpers');
10
+ const { sendAlertPushNotification } = require('../core/utils/fcm_utils');
9
11
 
10
12
  // Cache for loaded alert types (loaded once per function instance)
11
13
  let cachedAlertTypes = null;
@@ -463,12 +465,21 @@ async function sendAllClearNotification(db, logger, userCid, piCid, piUsername,
463
465
  computationDate: date
464
466
  };
465
467
 
468
+ // 1. Write to Firestore (source of truth)
466
469
  await db.collection('SignedInUsers')
467
470
  .doc(String(userCid))
468
471
  .collection('alerts')
469
472
  .doc(notificationId)
470
473
  .set(alertData);
471
474
 
475
+ // 2. Send FCM push notification (non-blocking)
476
+ try {
477
+ await sendAlertPushNotification(db, userCid, alertData, logger);
478
+ } catch (fcmError) {
479
+ // Log but don't throw - FCM failure shouldn't fail the alert
480
+ logger.log('WARN', `[sendAllClearNotification] FCM push failed for user ${userCid}: ${fcmError.message}`);
481
+ }
482
+
472
483
  logger.log('INFO', `[sendAllClearNotification] Sent all-clear notification to user ${userCid} for PI ${piCid}`);
473
484
 
474
485
  } catch (error) {
@@ -0,0 +1,261 @@
1
+ /**
2
+ * @fileoverview FCM (Firebase Cloud Messaging) Token Management Routes
3
+ *
4
+ * Handles registration and unregistration of FCM tokens for push notifications.
5
+ * Tokens are associated with user CIDs and stored in Firestore.
6
+ *
7
+ * Storage: SignedInUsers/{cid}/fcm_tokens/{tokenId}
8
+ */
9
+
10
+ const express = require('express');
11
+ const { z } = require('zod');
12
+ const {
13
+ registerFCMToken,
14
+ unregisterFCMToken,
15
+ getUserFCMTokens,
16
+ touchToken
17
+ } = require('../../core/utils/fcm_utils');
18
+
19
+ const router = express.Router();
20
+
21
+ // Input validation schemas
22
+ const registerTokenSchema = z.object({
23
+ token: z.string().min(100).max(500), // FCM tokens are typically 150-180 chars
24
+ platform: z.enum(['web', 'ios', 'android']).optional().default('web'),
25
+ userAgent: z.string().max(500).optional()
26
+ });
27
+
28
+ const unregisterTokenSchema = z.object({
29
+ token: z.string().min(100).max(500)
30
+ });
31
+
32
+ /**
33
+ * POST /fcm/register-token
34
+ *
35
+ * Register an FCM token for the authenticated user.
36
+ * Called by the frontend after login when the user grants notification permissions.
37
+ *
38
+ * Body:
39
+ * - token: string (required) - The FCM token from getToken()
40
+ * - platform: string (optional) - 'web', 'ios', or 'android' (default: 'web')
41
+ * - userAgent: string (optional) - Browser/device user agent
42
+ *
43
+ * Returns:
44
+ * - success: boolean
45
+ * - tokenId: string (hashed token ID)
46
+ */
47
+ router.post('/register-token', async (req, res, next) => {
48
+ try {
49
+ // Validate input
50
+ const validated = registerTokenSchema.parse(req.body);
51
+
52
+ // Get user CID from authenticated request
53
+ const userCid = req.targetUserId;
54
+ if (!userCid) {
55
+ return res.status(401).json({
56
+ success: false,
57
+ error: 'Authentication required',
58
+ message: 'You must be logged in to register for push notifications'
59
+ });
60
+ }
61
+
62
+ const { db, logger } = req.dependencies;
63
+
64
+ // Register the token
65
+ const result = await registerFCMToken(
66
+ db,
67
+ userCid,
68
+ validated.token,
69
+ {
70
+ platform: validated.platform,
71
+ userAgent: validated.userAgent || req.headers['user-agent']
72
+ },
73
+ logger
74
+ );
75
+
76
+ logger.log('INFO', `[FCM API] Token registered for user ${userCid}`);
77
+
78
+ res.json({
79
+ success: true,
80
+ tokenId: result.tokenId,
81
+ message: 'Push notifications enabled'
82
+ });
83
+
84
+ } catch (error) {
85
+ if (error instanceof z.ZodError) {
86
+ return res.status(400).json({
87
+ success: false,
88
+ error: 'Invalid input',
89
+ details: error.errors
90
+ });
91
+ }
92
+ next(error);
93
+ }
94
+ });
95
+
96
+ /**
97
+ * POST /fcm/unregister-token
98
+ *
99
+ * Unregister an FCM token (e.g., on logout or when user disables notifications).
100
+ *
101
+ * Body:
102
+ * - token: string (required) - The FCM token to unregister
103
+ *
104
+ * Returns:
105
+ * - success: boolean
106
+ */
107
+ router.post('/unregister-token', async (req, res, next) => {
108
+ try {
109
+ // Validate input
110
+ const validated = unregisterTokenSchema.parse(req.body);
111
+
112
+ // Get user CID from authenticated request
113
+ const userCid = req.targetUserId;
114
+ if (!userCid) {
115
+ return res.status(401).json({
116
+ success: false,
117
+ error: 'Authentication required'
118
+ });
119
+ }
120
+
121
+ const { db, logger } = req.dependencies;
122
+
123
+ // Unregister the token
124
+ const result = await unregisterFCMToken(db, userCid, validated.token, logger);
125
+
126
+ logger.log('INFO', `[FCM API] Token unregistered for user ${userCid}`);
127
+
128
+ res.json({
129
+ success: true,
130
+ message: 'Push notifications disabled for this device'
131
+ });
132
+
133
+ } catch (error) {
134
+ if (error instanceof z.ZodError) {
135
+ return res.status(400).json({
136
+ success: false,
137
+ error: 'Invalid input',
138
+ details: error.errors
139
+ });
140
+ }
141
+ next(error);
142
+ }
143
+ });
144
+
145
+ /**
146
+ * GET /fcm/tokens
147
+ *
148
+ * Get the list of registered FCM tokens for the current user.
149
+ * Useful for debugging and showing which devices have notifications enabled.
150
+ *
151
+ * Returns:
152
+ * - success: boolean
153
+ * - count: number
154
+ * - tokens: Array<{tokenId, platform, createdAt}>
155
+ */
156
+ router.get('/tokens', async (req, res, next) => {
157
+ try {
158
+ // Get user CID from authenticated request
159
+ const userCid = req.targetUserId;
160
+ if (!userCid) {
161
+ return res.status(401).json({
162
+ success: false,
163
+ error: 'Authentication required'
164
+ });
165
+ }
166
+
167
+ const { db, logger } = req.dependencies;
168
+
169
+ // Get tokens (without exposing the actual token values)
170
+ const tokens = await getUserFCMTokens(db, userCid, logger);
171
+
172
+ // Return sanitized token info (don't expose actual tokens)
173
+ const sanitizedTokens = tokens.map(t => ({
174
+ tokenId: t.tokenId,
175
+ platform: t.platform
176
+ }));
177
+
178
+ res.json({
179
+ success: true,
180
+ count: sanitizedTokens.length,
181
+ tokens: sanitizedTokens
182
+ });
183
+
184
+ } catch (error) {
185
+ next(error);
186
+ }
187
+ });
188
+
189
+ /**
190
+ * POST /fcm/refresh-token
191
+ *
192
+ * Called when an FCM token is refreshed by the client.
193
+ * Updates the lastUsedAt timestamp and optionally replaces old token with new one.
194
+ *
195
+ * Body:
196
+ * - oldToken: string (optional) - Previous token to replace
197
+ * - newToken: string (required) - New FCM token
198
+ * - platform: string (optional) - Platform identifier
199
+ *
200
+ * Returns:
201
+ * - success: boolean
202
+ * - tokenId: string
203
+ */
204
+ router.post('/refresh-token', async (req, res, next) => {
205
+ try {
206
+ const refreshSchema = z.object({
207
+ oldToken: z.string().min(100).max(500).optional(),
208
+ newToken: z.string().min(100).max(500),
209
+ platform: z.enum(['web', 'ios', 'android']).optional().default('web')
210
+ });
211
+
212
+ const validated = refreshSchema.parse(req.body);
213
+
214
+ const userCid = req.targetUserId;
215
+ if (!userCid) {
216
+ return res.status(401).json({
217
+ success: false,
218
+ error: 'Authentication required'
219
+ });
220
+ }
221
+
222
+ const { db, logger } = req.dependencies;
223
+
224
+ // If old token provided, unregister it first
225
+ if (validated.oldToken) {
226
+ await unregisterFCMToken(db, userCid, validated.oldToken, logger);
227
+ }
228
+
229
+ // Register the new token
230
+ const result = await registerFCMToken(
231
+ db,
232
+ userCid,
233
+ validated.newToken,
234
+ {
235
+ platform: validated.platform,
236
+ userAgent: req.headers['user-agent']
237
+ },
238
+ logger
239
+ );
240
+
241
+ logger.log('INFO', `[FCM API] Token refreshed for user ${userCid}`);
242
+
243
+ res.json({
244
+ success: true,
245
+ tokenId: result.tokenId,
246
+ message: 'Token refreshed successfully'
247
+ });
248
+
249
+ } catch (error) {
250
+ if (error instanceof z.ZodError) {
251
+ return res.status(400).json({
252
+ success: false,
253
+ error: 'Invalid input',
254
+ details: error.errors
255
+ });
256
+ }
257
+ next(error);
258
+ }
259
+ });
260
+
261
+ module.exports = router;
@@ -4,7 +4,8 @@ const { verifyFirebaseToken } = require('../middleware/firebase_auth_middleware.
4
4
  const { handleSyncRequest } = require('./sync.js');
5
5
 
6
6
  const notificationRoutes = require('./notifications.js');
7
- const alertsRoutes = require('./alerts.js'); // <--- NEW
7
+ const alertsRoutes = require('./alerts.js');
8
+ const fcmRoutes = require('./fcm.js'); // FCM push notification token management
8
9
  const verificationRoutes = require('./verification.js');
9
10
  const profileRoutes = require('./profile.js');
10
11
  const piRoutes = require('./popular_investors.js');
@@ -29,7 +30,8 @@ module.exports = (dependencies) => {
29
30
  router.use(resolveUserIdentity);
30
31
 
31
32
  router.use('/notifications', notificationRoutes);
32
- router.use('/alerts', alertsRoutes); // <--- NEW
33
+ router.use('/alerts', alertsRoutes);
34
+ router.use('/fcm', fcmRoutes); // FCM push notification token management
33
35
  router.use('/verification', verificationRoutes);
34
36
  router.use('/profile', profileRoutes);
35
37
  router.use('/popular-investors', piRoutes);
@@ -231,7 +231,6 @@ function buildManifest(productLinesToRun = [], calculations) {
231
231
  const manifestEntry = {
232
232
  name: normalizedName,
233
233
  class: Class,
234
- // [CHANGED] Strictly use the folderName as the category.
235
234
  category: folderName,
236
235
  sourcePackage: folderName,
237
236
  type: metadata.type,
@@ -247,7 +246,6 @@ function buildManifest(productLinesToRun = [], calculations) {
247
246
  userType: metadata.userType,
248
247
  dependencies: dependencies,
249
248
  schedule: metadata.schedule || null,
250
- // [NEW] Added TTL Policy to Manifest
251
249
  ttlDays: metadata.ttlDays,
252
250
  pass: 0,
253
251
  hash: intrinsicHash,
@@ -130,7 +130,7 @@ class MetaExecutor {
130
130
  previousComputedDependencies: prevDeps, config, deps,
131
131
  allRankings: rankings,
132
132
  allRankingsYesterday: rankingsYesterday,
133
- piMasterList, // <--- INJECTED HERE
133
+ piMasterList,
134
134
  ...variableRoots,
135
135
  seriesData
136
136
  };
@@ -0,0 +1,352 @@
1
+ /**
2
+ * @fileoverview Firebase Cloud Messaging (FCM) Utilities
3
+ *
4
+ * Provides push notification functionality via FCM.
5
+ * Works in background even when user is offline.
6
+ * Supports web push notifications and future mobile app integration.
7
+ *
8
+ * Token Storage: SignedInUsers/{cid}/fcm_tokens/{tokenId}
9
+ */
10
+
11
+ const admin = require('firebase-admin');
12
+
13
+ // FCM Token collection path
14
+ const FCM_TOKENS_SUBCOLLECTION = 'fcm_tokens';
15
+
16
+ /**
17
+ * Register an FCM token for a user
18
+ * @param {Firestore} db - Firestore instance
19
+ * @param {string|number} userCid - User CID
20
+ * @param {string} token - FCM token from client
21
+ * @param {object} metadata - Additional metadata (platform, userAgent, etc.)
22
+ * @param {object} logger - Logger instance
23
+ * @returns {Promise<{success: boolean, tokenId: string}>}
24
+ */
25
+ async function registerFCMToken(db, userCid, token, metadata = {}, logger = console) {
26
+ if (!token || typeof token !== 'string') {
27
+ throw new Error('Invalid FCM token');
28
+ }
29
+
30
+ const cid = String(userCid);
31
+
32
+ // Use a hash of the token as the document ID to prevent duplicates
33
+ const tokenId = hashToken(token);
34
+
35
+ const tokenDoc = {
36
+ token: token,
37
+ platform: metadata.platform || 'web',
38
+ userAgent: metadata.userAgent || null,
39
+ createdAt: admin.firestore.FieldValue.serverTimestamp(),
40
+ lastUsedAt: admin.firestore.FieldValue.serverTimestamp(),
41
+ active: true
42
+ };
43
+
44
+ try {
45
+ await db.collection('SignedInUsers')
46
+ .doc(cid)
47
+ .collection(FCM_TOKENS_SUBCOLLECTION)
48
+ .doc(tokenId)
49
+ .set(tokenDoc, { merge: true });
50
+
51
+ logger.log('INFO', `[FCM] Registered token for user ${cid} (tokenId: ${tokenId.substring(0, 8)}...)`);
52
+
53
+ return { success: true, tokenId };
54
+ } catch (error) {
55
+ logger.log('ERROR', `[FCM] Failed to register token for user ${cid}: ${error.message}`);
56
+ throw error;
57
+ }
58
+ }
59
+
60
+ /**
61
+ * Unregister an FCM token (e.g., on logout)
62
+ * @param {Firestore} db - Firestore instance
63
+ * @param {string|number} userCid - User CID
64
+ * @param {string} token - FCM token to remove
65
+ * @param {object} logger - Logger instance
66
+ * @returns {Promise<{success: boolean}>}
67
+ */
68
+ async function unregisterFCMToken(db, userCid, token, logger = console) {
69
+ const cid = String(userCid);
70
+ const tokenId = hashToken(token);
71
+
72
+ try {
73
+ await db.collection('SignedInUsers')
74
+ .doc(cid)
75
+ .collection(FCM_TOKENS_SUBCOLLECTION)
76
+ .doc(tokenId)
77
+ .delete();
78
+
79
+ logger.log('INFO', `[FCM] Unregistered token for user ${cid}`);
80
+
81
+ return { success: true };
82
+ } catch (error) {
83
+ logger.log('WARN', `[FCM] Failed to unregister token for user ${cid}: ${error.message}`);
84
+ // Don't throw - token removal failure is non-critical
85
+ return { success: false, error: error.message };
86
+ }
87
+ }
88
+
89
+ /**
90
+ * Get all active FCM tokens for a user
91
+ * @param {Firestore} db - Firestore instance
92
+ * @param {string|number} userCid - User CID
93
+ * @param {object} logger - Logger instance
94
+ * @returns {Promise<string[]>} Array of FCM tokens
95
+ */
96
+ async function getUserFCMTokens(db, userCid, logger = console) {
97
+ const cid = String(userCid);
98
+
99
+ try {
100
+ const snapshot = await db.collection('SignedInUsers')
101
+ .doc(cid)
102
+ .collection(FCM_TOKENS_SUBCOLLECTION)
103
+ .where('active', '==', true)
104
+ .get();
105
+
106
+ if (snapshot.empty) {
107
+ return [];
108
+ }
109
+
110
+ return snapshot.docs.map(doc => ({
111
+ tokenId: doc.id,
112
+ token: doc.data().token,
113
+ platform: doc.data().platform
114
+ }));
115
+ } catch (error) {
116
+ logger.log('WARN', `[FCM] Failed to get tokens for user ${cid}: ${error.message}`);
117
+ return [];
118
+ }
119
+ }
120
+
121
+ /**
122
+ * Send a push notification to a specific user
123
+ * @param {Firestore} db - Firestore instance
124
+ * @param {string|number} userCid - User CID
125
+ * @param {object} notification - Notification payload
126
+ * @param {string} notification.title - Notification title
127
+ * @param {string} notification.body - Notification body
128
+ * @param {string} [notification.icon] - Notification icon URL
129
+ * @param {string} [notification.clickAction] - URL to open on click
130
+ * @param {object} [data] - Additional data payload
131
+ * @param {object} logger - Logger instance
132
+ * @returns {Promise<{success: boolean, sent: number, failed: number}>}
133
+ */
134
+ async function sendPushNotification(db, userCid, notification, data = {}, logger = console) {
135
+ const cid = String(userCid);
136
+
137
+ // Get user's FCM tokens
138
+ const tokenData = await getUserFCMTokens(db, cid, logger);
139
+
140
+ if (tokenData.length === 0) {
141
+ logger.log('DEBUG', `[FCM] No FCM tokens registered for user ${cid}, skipping push`);
142
+ return { success: true, sent: 0, failed: 0, reason: 'no_tokens' };
143
+ }
144
+
145
+ const tokens = tokenData.map(t => t.token);
146
+
147
+ // Build FCM message
148
+ const message = {
149
+ notification: {
150
+ title: notification.title,
151
+ body: notification.body,
152
+ ...(notification.icon && { imageUrl: notification.icon })
153
+ },
154
+ data: {
155
+ ...data,
156
+ clickAction: notification.clickAction || '/',
157
+ timestamp: new Date().toISOString()
158
+ },
159
+ webpush: {
160
+ notification: {
161
+ icon: notification.icon || '/icons/notification-icon.png',
162
+ badge: '/icons/badge-icon.png',
163
+ requireInteraction: notification.requireInteraction || false
164
+ },
165
+ fcmOptions: {
166
+ link: notification.clickAction || '/'
167
+ }
168
+ },
169
+ tokens: tokens
170
+ };
171
+
172
+ try {
173
+ const response = await admin.messaging().sendEachForMulticast(message);
174
+
175
+ let sent = response.successCount;
176
+ let failed = response.failureCount;
177
+
178
+ // Handle stale tokens
179
+ if (response.failureCount > 0) {
180
+ const staleTokens = [];
181
+
182
+ response.responses.forEach((resp, idx) => {
183
+ if (!resp.success) {
184
+ const errorCode = resp.error?.code;
185
+
186
+ // These error codes indicate the token is no longer valid
187
+ if (errorCode === 'messaging/invalid-registration-token' ||
188
+ errorCode === 'messaging/registration-token-not-registered') {
189
+ staleTokens.push(tokenData[idx]);
190
+ } else {
191
+ logger.log('WARN', `[FCM] Send failed for user ${cid}: ${resp.error?.message}`);
192
+ }
193
+ }
194
+ });
195
+
196
+ // Clean up stale tokens (async, don't wait)
197
+ if (staleTokens.length > 0) {
198
+ cleanupStaleTokens(db, cid, staleTokens, logger).catch(() => {});
199
+ }
200
+ }
201
+
202
+ logger.log('INFO', `[FCM] Sent push to user ${cid}: ${sent} sent, ${failed} failed`);
203
+
204
+ return { success: true, sent, failed };
205
+ } catch (error) {
206
+ logger.log('ERROR', `[FCM] Failed to send push to user ${cid}: ${error.message}`);
207
+ return { success: false, sent: 0, failed: tokens.length, error: error.message };
208
+ }
209
+ }
210
+
211
+ /**
212
+ * Send a push notification to multiple users
213
+ * @param {Firestore} db - Firestore instance
214
+ * @param {Array<string|number>} userCids - Array of user CIDs
215
+ * @param {object} notification - Notification payload
216
+ * @param {object} [data] - Additional data payload
217
+ * @param {object} logger - Logger instance
218
+ * @returns {Promise<{success: boolean, results: object}>}
219
+ */
220
+ async function sendPushNotificationToMany(db, userCids, notification, data = {}, logger = console) {
221
+ const results = {
222
+ total: userCids.length,
223
+ sent: 0,
224
+ failed: 0,
225
+ noTokens: 0
226
+ };
227
+
228
+ // Process in batches to avoid overwhelming FCM
229
+ const BATCH_SIZE = 50;
230
+
231
+ for (let i = 0; i < userCids.length; i += BATCH_SIZE) {
232
+ const batch = userCids.slice(i, i + BATCH_SIZE);
233
+
234
+ const promises = batch.map(async (cid) => {
235
+ const result = await sendPushNotification(db, cid, notification, data, logger);
236
+ return { cid, ...result };
237
+ });
238
+
239
+ const batchResults = await Promise.all(promises);
240
+
241
+ for (const result of batchResults) {
242
+ if (result.reason === 'no_tokens') {
243
+ results.noTokens++;
244
+ } else {
245
+ results.sent += result.sent;
246
+ results.failed += result.failed;
247
+ }
248
+ }
249
+ }
250
+
251
+ logger.log('INFO', `[FCM] Batch send complete: ${results.sent} sent, ${results.failed} failed, ${results.noTokens} users without tokens`);
252
+
253
+ return { success: true, results };
254
+ }
255
+
256
+ /**
257
+ * Send an alert notification (convenience wrapper for alert system)
258
+ * @param {Firestore} db - Firestore instance
259
+ * @param {string|number} userCid - User CID
260
+ * @param {object} alertData - Alert data from alert system
261
+ * @param {object} logger - Logger instance
262
+ * @returns {Promise<{success: boolean}>}
263
+ */
264
+ async function sendAlertPushNotification(db, userCid, alertData, logger = console) {
265
+ const notification = {
266
+ title: alertData.alertTypeName || 'BullTrackers Alert',
267
+ body: alertData.message || `New alert for ${alertData.piUsername}`,
268
+ icon: '/icons/alert-icon.png',
269
+ clickAction: `/alerts/${alertData.alertId}`,
270
+ requireInteraction: alertData.severity === 'high'
271
+ };
272
+
273
+ const data = {
274
+ type: 'alert',
275
+ alertId: alertData.alertId || '',
276
+ alertType: alertData.alertType || '',
277
+ piCid: String(alertData.piCid || ''),
278
+ piUsername: alertData.piUsername || '',
279
+ severity: alertData.severity || 'medium',
280
+ watchlistId: alertData.watchlistId || ''
281
+ };
282
+
283
+ return sendPushNotification(db, userCid, notification, data, logger);
284
+ }
285
+
286
+ /**
287
+ * Clean up stale/invalid FCM tokens
288
+ * @param {Firestore} db - Firestore instance
289
+ * @param {string} userCid - User CID
290
+ * @param {Array} staleTokens - Array of {tokenId, token} objects
291
+ * @param {object} logger - Logger instance
292
+ */
293
+ async function cleanupStaleTokens(db, userCid, staleTokens, logger = console) {
294
+ const batch = db.batch();
295
+
296
+ for (const tokenData of staleTokens) {
297
+ const tokenRef = db.collection('SignedInUsers')
298
+ .doc(userCid)
299
+ .collection(FCM_TOKENS_SUBCOLLECTION)
300
+ .doc(tokenData.tokenId);
301
+
302
+ batch.delete(tokenRef);
303
+ }
304
+
305
+ try {
306
+ await batch.commit();
307
+ logger.log('INFO', `[FCM] Cleaned up ${staleTokens.length} stale tokens for user ${userCid}`);
308
+ } catch (error) {
309
+ logger.log('WARN', `[FCM] Failed to cleanup stale tokens: ${error.message}`);
310
+ }
311
+ }
312
+
313
+ /**
314
+ * Update last used timestamp for a token
315
+ * @param {Firestore} db - Firestore instance
316
+ * @param {string|number} userCid - User CID
317
+ * @param {string} token - FCM token
318
+ */
319
+ async function touchToken(db, userCid, token) {
320
+ const cid = String(userCid);
321
+ const tokenId = hashToken(token);
322
+
323
+ await db.collection('SignedInUsers')
324
+ .doc(cid)
325
+ .collection(FCM_TOKENS_SUBCOLLECTION)
326
+ .doc(tokenId)
327
+ .update({
328
+ lastUsedAt: admin.firestore.FieldValue.serverTimestamp()
329
+ })
330
+ .catch(() => {}); // Ignore errors
331
+ }
332
+
333
+ /**
334
+ * Hash a token to create a stable document ID
335
+ * @param {string} token - FCM token
336
+ * @returns {string} Hashed token ID
337
+ */
338
+ function hashToken(token) {
339
+ const crypto = require('crypto');
340
+ return crypto.createHash('sha256').update(token).digest('hex').substring(0, 32);
341
+ }
342
+
343
+ module.exports = {
344
+ registerFCMToken,
345
+ unregisterFCMToken,
346
+ getUserFCMTokens,
347
+ sendPushNotification,
348
+ sendPushNotificationToMany,
349
+ sendAlertPushNotification,
350
+ touchToken,
351
+ FCM_TOKENS_SUBCOLLECTION
352
+ };
@@ -112,16 +112,17 @@ exports.fetchAndStorePrices = async (config, dependencies) => {
112
112
  }
113
113
  }
114
114
 
115
- // Write to BigQuery using load jobs (free, batched)
115
+ // Write to BigQuery using MERGE (prevents duplicates if fetcher runs twice on same day)
116
116
  if (process.env.BIGQUERY_ENABLED !== 'false' && bigqueryRows.length > 0) {
117
117
  try {
118
- const { insertRows, ensureAssetPricesTable } = require('../../core/utils/bigquery_utils');
118
+ const { insertRowsWithMerge, ensureAssetPricesTable } = require('../../core/utils/bigquery_utils');
119
119
  await ensureAssetPricesTable(logger);
120
120
 
121
121
  const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
122
- await insertRows(datasetId, 'asset_prices', bigqueryRows, logger);
122
+ // Use MERGE with dedup keys to prevent duplicates on re-runs
123
+ await insertRowsWithMerge(datasetId, 'asset_prices', bigqueryRows, ['date', 'instrument_id'], logger);
123
124
 
124
- logger.log('INFO', `[PriceFetcherHelpers] Successfully stored ${bigqueryRows.length} daily price records to BigQuery`);
125
+ logger.log('INFO', `[PriceFetcherHelpers] Successfully stored ${bigqueryRows.length} daily price records to BigQuery [MERGE]`);
125
126
  } catch (bqError) {
126
127
  logger.log('ERROR', `[PriceFetcherHelpers] BigQuery write failed: ${bqError.message}`);
127
128
  // Continue - don't fail the entire fetch for BigQuery errors
@@ -90,10 +90,10 @@ exports.fetchAndStoreInsights = async (config, dependencies) => {
90
90
  let storageSuccess = false;
91
91
  let storageMethod = 'NONE';
92
92
 
93
- // 1. Attempt BigQuery Write
93
+ // 1. Attempt BigQuery Write (using MERGE to prevent duplicates on re-runs)
94
94
  if (process.env.BIGQUERY_ENABLED !== 'false') {
95
95
  try {
96
- const { insertRows, ensureInstrumentInsightsTable } = require('../../core/utils/bigquery_utils');
96
+ const { insertRowsWithMerge, ensureInstrumentInsightsTable } = require('../../core/utils/bigquery_utils');
97
97
  await ensureInstrumentInsightsTable(logger);
98
98
 
99
99
  const fetchedAt = new Date().toISOString();
@@ -107,9 +107,10 @@ exports.fetchAndStoreInsights = async (config, dependencies) => {
107
107
  });
108
108
 
109
109
  const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
110
- await insertRows(datasetId, 'instrument_insights', bigqueryRows, logger);
110
+ // Use MERGE with dedup keys to prevent duplicates if fetch runs twice on same day
111
+ await insertRowsWithMerge(datasetId, 'instrument_insights', bigqueryRows, ['date', 'instrument_id'], logger);
111
112
 
112
- logger.log('INFO', `[FetchInsightsHelpers] ✅ Successfully stored ${bigqueryRows.length} insight records to BigQuery.`);
113
+ logger.log('INFO', `[FetchInsightsHelpers] ✅ Successfully stored ${bigqueryRows.length} insight records to BigQuery [MERGE].`);
113
114
  storageSuccess = true;
114
115
  storageMethod = 'BIGQUERY';
115
116
  } catch (bqError) {
@@ -124,19 +124,48 @@ async function fetchAndStorePopularInvestors(config, dependencies) {
124
124
  const fetchedCids = new Set(data.Items.map(item => String(item.CustomerId)));
125
125
  const knownCids = Object.keys(bqMasterList);
126
126
  const missingCids = knownCids.filter(cid => !fetchedCids.has(cid));
127
+
128
+ logger.log('INFO', `[PopularInvestorFetch] Comparison: API returned ${fetchedCids.size} PIs, Master List has ${knownCids.length} known PIs, ${missingCids.length} missing from API response`);
127
129
 
128
130
  if (missingCids.length > 0) {
131
+ // Log the missing users with their usernames for visibility
132
+ const missingUsernames = missingCids.map(cid => {
133
+ const userData = bqMasterList[cid];
134
+ return userData?.username || `unknown_${cid}`;
135
+ });
129
136
  logger.log('INFO', `[PopularInvestorFetch] Found ${missingCids.length} missing users. Fetching individually...`);
137
+ logger.log('INFO', `[PopularInvestorFetch] Missing users: ${missingUsernames.slice(0, 20).join(', ')}${missingCids.length > 20 ? ` ... and ${missingCids.length - 20} more` : ''}`);
138
+
130
139
  const { header } = await headerManager.selectHeader();
131
140
 
132
- for (const cid of missingCids) {
141
+ let successCount = 0;
142
+ let failCount = 0;
143
+
144
+ for (let i = 0; i < missingCids.length; i++) {
145
+ const cid = missingCids[i];
146
+ const username = bqMasterList[cid]?.username || `unknown_${cid}`;
147
+
148
+ // Log progress every 10 users or at start
149
+ if (i === 0 || (i + 1) % 10 === 0) {
150
+ logger.log('INFO', `[PopularInvestorFetch] Fetching missing user ${i + 1}/${missingCids.length}: ${username} (CID: ${cid})`);
151
+ }
152
+
133
153
  const userData = await fetchIndividualUserRankings(cid, { ...header, 'Referer': 'https://www.etoro.com/' }, proxyManager, logger);
134
154
  if (userData) {
135
155
  data.Items.push(userData);
136
156
  data.TotalRows++;
157
+ successCount++;
158
+ logger.log('TRACE', `[PopularInvestorFetch] ✅ Successfully fetched ${username} (CID: ${cid})`);
159
+ } else {
160
+ failCount++;
161
+ logger.log('WARN', `[PopularInvestorFetch] ❌ Failed to fetch ${username} (CID: ${cid})`);
137
162
  }
138
163
  await new Promise(r => setTimeout(r, 200)); // Rate limit
139
164
  }
165
+
166
+ logger.log('INFO', `[PopularInvestorFetch] Missing user fetch complete: ${successCount} recovered, ${failCount} failed`);
167
+ } else {
168
+ logger.log('INFO', `[PopularInvestorFetch] No missing users - all ${knownCids.length} known PIs were returned by the API`);
140
169
  }
141
170
  } catch (e) {
142
171
  logger.log('WARN', `[PopularInvestorFetch] Failed to check master list: ${e.message}`);
@@ -191,9 +191,10 @@ async function backfillRankings(startDate = null, endDate = null, logger = conso
191
191
  };
192
192
  });
193
193
 
194
- // Write to BigQuery using load jobs (free, batched)
194
+ // Write to BigQuery using MERGE (prevents duplicates if backfill is re-run)
195
195
  const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
196
- await insertRows(datasetId, 'pi_rankings', bigqueryRows, logger);
196
+ // Use MERGE with dedup keys to prevent duplicates on re-runs
197
+ await insertRowsWithMerge(datasetId, 'pi_rankings', bigqueryRows, ['date', 'pi_id'], logger);
197
198
 
198
199
  totalRows += bigqueryRows.length;
199
200
  processedDates++;
@@ -68,16 +68,17 @@ exports.runBackfillAssetPrices = async (config, dependencies) => {
68
68
  };
69
69
  });
70
70
 
71
- // Write to BigQuery using load jobs (free, batched)
71
+ // Write to BigQuery using MERGE (prevents duplicates if backfill is re-run)
72
72
  if (process.env.BIGQUERY_ENABLED !== 'false') {
73
73
  try {
74
- const { insertRows, ensureAssetPricesTable } = require('../../core/utils/bigquery_utils');
74
+ const { insertRowsWithMerge, ensureAssetPricesTable } = require('../../core/utils/bigquery_utils');
75
75
  await ensureAssetPricesTable(logger);
76
76
 
77
77
  const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
78
- await insertRows(datasetId, 'asset_prices', bigqueryRows, logger);
78
+ // Use MERGE with dedup keys to prevent duplicates on re-runs
79
+ await insertRowsWithMerge(datasetId, 'asset_prices', bigqueryRows, ['date', 'instrument_id'], logger);
79
80
 
80
- logger.log('TRACE', `[PriceBackfill] Successfully stored ${bigqueryRows.length} price records for ${ticker} (${instrumentId}) to BigQuery`);
81
+ logger.log('TRACE', `[PriceBackfill] Successfully stored ${bigqueryRows.length} price records for ${ticker} (${instrumentId}) to BigQuery [MERGE]`);
81
82
  } catch (bqError) {
82
83
  logger.log('ERROR', `[PriceBackfill] BigQuery write failed for ${ticker} (${instrumentId}): ${bqError.message}`);
83
84
  // Continue - don't fail the entire backfill for one instrument
@@ -14,9 +14,14 @@ const {
14
14
  ensurePortfolioSnapshotsTable,
15
15
  ensureTradeHistorySnapshotsTable,
16
16
  ensureSocialPostSnapshotsTable,
17
- insertRows
17
+ insertRowsWithMerge
18
18
  } = require('../../core/utils/bigquery_utils');
19
19
 
20
+ // Deduplication keys for BigQuery MERGE operations
21
+ const PORTFOLIO_DEDUP_KEYS = ['date', 'user_id', 'user_type'];
22
+ const HISTORY_DEDUP_KEYS = ['date', 'user_id', 'user_type'];
23
+ const SOCIAL_DEDUP_KEYS = ['date', 'user_id', 'user_type'];
24
+
20
25
  /**
21
26
  * Store portfolio data for a signed-in user
22
27
  * @param {object} params - Parameters
@@ -47,9 +52,10 @@ async function storeSignedInUserPortfolio({ db, logger, collectionRegistry, cid,
47
52
  await bigqueryBatchManager.addPortfolioRow(row);
48
53
  } else {
49
54
  // Direct write (fallback for when batch manager not available)
55
+ // Uses MERGE to prevent duplicates if user is processed twice on same day
50
56
  await ensurePortfolioSnapshotsTable(logger);
51
- await insertRows(datasetId, 'portfolio_snapshots', [row], logger);
52
- logger.log('INFO', `[DataStorage] ✅ Wrote portfolio to BigQuery for signed-in user ${cid} (date: ${date})`);
57
+ await insertRowsWithMerge(datasetId, 'portfolio_snapshots', [row], PORTFOLIO_DEDUP_KEYS, logger);
58
+ logger.log('INFO', `[DataStorage] ✅ Wrote portfolio to BigQuery for signed-in user ${cid} (date: ${date}) [MERGE]`);
53
59
  }
54
60
  } catch (bqError) {
55
61
  logger.log('WARN', `[DataStorage] BigQuery write failed for signed-in user ${cid} (${date}): ${bqError.message}`);
@@ -106,9 +112,10 @@ async function storeSignedInUserTradeHistory({ db, logger, collectionRegistry, c
106
112
  await bigqueryBatchManager.addHistoryRow(row);
107
113
  } else {
108
114
  // Direct write (fallback for when batch manager not available)
115
+ // Uses MERGE to prevent duplicates if user is processed twice on same day
109
116
  await ensureTradeHistorySnapshotsTable(logger);
110
- await insertRows(datasetId, 'trade_history_snapshots', [row], logger);
111
- logger.log('INFO', `[DataStorage] ✅ Wrote trade history to BigQuery for signed-in user ${cid} (date: ${date})`);
117
+ await insertRowsWithMerge(datasetId, 'trade_history_snapshots', [row], HISTORY_DEDUP_KEYS, logger);
118
+ logger.log('INFO', `[DataStorage] ✅ Wrote trade history to BigQuery for signed-in user ${cid} (date: ${date}) [MERGE]`);
112
119
  }
113
120
  } catch (bqError) {
114
121
  logger.log('WARN', `[DataStorage] BigQuery write failed for signed-in user ${cid} (${date}): ${bqError.message}`);
@@ -169,8 +176,9 @@ async function storeSignedInUserSocialPosts({ db, logger, collectionRegistry, ci
169
176
  fetched_at: new Date().toISOString()
170
177
  };
171
178
 
172
- await insertRows(datasetId, 'social_post_snapshots', [row], logger);
173
- logger.log('INFO', `[DataStorage] ✅ Wrote social posts to BigQuery for signed-in user ${cid} (date: ${date}, ${posts.length} posts)`);
179
+ // Uses MERGE to prevent duplicates if user is processed twice on same day
180
+ await insertRowsWithMerge(datasetId, 'social_post_snapshots', [row], SOCIAL_DEDUP_KEYS, logger);
181
+ logger.log('INFO', `[DataStorage] ✅ Wrote social posts to BigQuery for signed-in user ${cid} (date: ${date}, ${posts.length} posts) [MERGE]`);
174
182
  } catch (bqError) {
175
183
  logger.log('WARN', `[DataStorage] BigQuery write failed for signed-in user ${cid} (${date}): ${bqError.message}`);
176
184
  // Continue to Firestore write (fallback)
@@ -258,9 +266,10 @@ async function storePopularInvestorPortfolio({ db, logger, collectionRegistry, c
258
266
  await bigqueryBatchManager.addPortfolioRow(row);
259
267
  } else {
260
268
  // Direct write (fallback for when batch manager not available)
269
+ // Uses MERGE to prevent duplicates if PI is processed twice on same day
261
270
  await ensurePortfolioSnapshotsTable(logger);
262
- await insertRows(datasetId, 'portfolio_snapshots', [row], logger);
263
- logger.log('INFO', `[DataStorage] ✅ Wrote portfolio to BigQuery for PI ${cid} (date: ${date})`);
271
+ await insertRowsWithMerge(datasetId, 'portfolio_snapshots', [row], PORTFOLIO_DEDUP_KEYS, logger);
272
+ logger.log('INFO', `[DataStorage] ✅ Wrote portfolio to BigQuery for PI ${cid} (date: ${date}) [MERGE]`);
264
273
  }
265
274
  } catch (bqError) {
266
275
  logger.log('WARN', `[DataStorage] BigQuery write failed for PI ${cid} (${date}): ${bqError.message}`);
@@ -316,9 +325,10 @@ async function storePopularInvestorTradeHistory({ db, logger, collectionRegistry
316
325
  await bigqueryBatchManager.addHistoryRow(row);
317
326
  } else {
318
327
  // Direct write (fallback for when batch manager not available)
328
+ // Uses MERGE to prevent duplicates if PI is processed twice on same day
319
329
  await ensureTradeHistorySnapshotsTable(logger);
320
- await insertRows(datasetId, 'trade_history_snapshots', [row], logger);
321
- logger.log('INFO', `[DataStorage] ✅ Wrote trade history to BigQuery for PI ${cid} (date: ${date})`);
330
+ await insertRowsWithMerge(datasetId, 'trade_history_snapshots', [row], HISTORY_DEDUP_KEYS, logger);
331
+ logger.log('INFO', `[DataStorage] ✅ Wrote trade history to BigQuery for PI ${cid} (date: ${date}) [MERGE]`);
322
332
  }
323
333
  } catch (bqError) {
324
334
  logger.log('WARN', `[DataStorage] BigQuery write failed for PI ${cid} (${date}): ${bqError.message}`);
@@ -382,9 +392,10 @@ async function storePopularInvestorSocialPosts({ db, logger, collectionRegistry,
382
392
  await bigqueryBatchManager.addSocialRow(row);
383
393
  } else {
384
394
  // Direct write (fallback for when batch manager not available)
395
+ // Uses MERGE to prevent duplicates if PI is processed twice on same day
385
396
  await ensureSocialPostSnapshotsTable(logger);
386
- await insertRows(datasetId, 'social_post_snapshots', [row], logger);
387
- logger.log('INFO', `[DataStorage] ✅ Wrote social posts to BigQuery for PI ${cid} (date: ${date}, ${posts.length} posts)`);
397
+ await insertRowsWithMerge(datasetId, 'social_post_snapshots', [row], SOCIAL_DEDUP_KEYS, logger);
398
+ logger.log('INFO', `[DataStorage] ✅ Wrote social posts to BigQuery for PI ${cid} (date: ${date}, ${posts.length} posts) [MERGE]`);
388
399
  }
389
400
  } catch (bqError) {
390
401
  logger.log('WARN', `[DataStorage] BigQuery write failed for PI ${cid} (${date}): ${bqError.message}`);
@@ -1,27 +1,30 @@
1
1
  /**
2
2
  * @fileoverview BigQuery Batch Manager for Task Engine
3
3
  *
4
- * Batches BigQuery writes and flushes them using LOAD JOBS (FREE).
4
+ * Batches BigQuery writes and flushes them using MERGE operations.
5
5
  * Integrates with FirestoreBatchManager to flush together.
6
6
  *
7
- * Lessons learned from backfill script:
8
- * - Use createLoadJob() not table.load()
9
- * - Use temp files for load jobs
10
- * - Wait for job completion with getMetadata() polling
11
- * - Handle errors gracefully
12
- * - Clean up temp files properly
7
+ * UPDATED: Now uses insertRowsWithMerge to prevent duplicate rows
8
+ * when a user is processed twice on the same day.
9
+ *
10
+ * Deduplication keys:
11
+ * - portfolio_snapshots: ['date', 'user_id', 'user_type']
12
+ * - trade_history_snapshots: ['date', 'user_id', 'user_type']
13
+ * - social_post_snapshots: ['date', 'user_id', 'user_type']
13
14
  */
14
15
 
15
- const fs = require('fs');
16
- const path = require('path');
17
- const os = require('os');
18
16
  const {
19
- getOrCreateDataset,
20
17
  ensurePortfolioSnapshotsTable,
21
18
  ensureTradeHistorySnapshotsTable,
22
- ensureSocialPostSnapshotsTable
19
+ ensureSocialPostSnapshotsTable,
20
+ insertRowsWithMerge
23
21
  } = require('../../core/utils/bigquery_utils');
24
22
 
23
+ // Deduplication keys for each table type
24
+ const PORTFOLIO_DEDUP_KEYS = ['date', 'user_id', 'user_type'];
25
+ const HISTORY_DEDUP_KEYS = ['date', 'user_id', 'user_type'];
26
+ const SOCIAL_DEDUP_KEYS = ['date', 'user_id', 'user_type'];
27
+
25
28
  class BigQueryBatchManager {
26
29
  constructor(logger) {
27
30
  this.logger = logger;
@@ -74,83 +77,31 @@ class BigQueryBatchManager {
74
77
  }
75
78
 
76
79
  /**
77
- * Flush a buffer to BigQuery using load job
78
- * Uses lessons from backfill: createLoadJob, temp files, proper polling
80
+ * Flush a buffer to BigQuery using MERGE operation
81
+ * Uses insertRowsWithMerge to prevent duplicates when a user is processed twice on same day
79
82
  */
80
- async _flushBuffer(buffer, tableId, tableName) {
83
+ async _flushBuffer(buffer, tableId, tableName, dedupKeys) {
81
84
  if (buffer.length === 0) return 0;
82
85
 
83
86
  const rows = [...buffer]; // Copy buffer
84
87
  buffer.length = 0; // Clear buffer
85
88
 
86
89
  try {
87
- const dataset = await getOrCreateDataset(this.datasetId, this.logger);
88
- const table = dataset.table(tableId);
89
-
90
- // Write to temporary file (load jobs require a file, not in-memory data)
91
- const tempFile = path.join(os.tmpdir(), `bigquery_${tableId}_${Date.now()}_${Math.random().toString(36).substring(7)}.ndjson`);
92
- const ndjson = rows.map(r => JSON.stringify(r)).join('\n');
90
+ // Use insertRowsWithMerge for deduplication
91
+ // This prevents duplicate rows if a user is processed twice on the same day
92
+ const rowsInserted = await insertRowsWithMerge(
93
+ this.datasetId,
94
+ tableId,
95
+ rows,
96
+ dedupKeys,
97
+ this.logger
98
+ );
93
99
 
94
- try {
95
- fs.writeFileSync(tempFile, ndjson, 'utf8');
96
-
97
- // Create load job (FREE) - using createLoadJob as learned from backfill
98
- const [job] = await table.createLoadJob(tempFile, {
99
- sourceFormat: 'NEWLINE_DELIMITED_JSON',
100
- writeDisposition: 'WRITE_APPEND',
101
- autodetect: false // Use existing table schema
102
- });
103
-
104
- // Wait for job to complete using polling (as learned from backfill)
105
- let jobMetadata;
106
- const maxAttempts = 60; // 5 minutes max (5 second intervals)
107
- const pollInterval = 5000; // 5 seconds
108
-
109
- for (let attempt = 0; attempt < maxAttempts; attempt++) {
110
- [jobMetadata] = await job.getMetadata();
111
- const state = jobMetadata.status?.state;
112
-
113
- if (state === 'DONE') {
114
- break;
115
- }
116
-
117
- if (state === 'PENDING' || state === 'RUNNING') {
118
- // Wait before next poll
119
- await new Promise(resolve => setTimeout(resolve, pollInterval));
120
- } else {
121
- throw new Error(`Unexpected job state: ${state}`);
122
- }
123
- }
124
-
125
- // Check if we timed out
126
- if (jobMetadata.status?.state !== 'DONE') {
127
- throw new Error(`Load job did not complete within ${maxAttempts * pollInterval / 1000} seconds`);
128
- }
129
-
130
- // Check for errors
131
- if (jobMetadata.status?.errorResult) {
132
- throw new Error(`Load job failed: ${jobMetadata.status.errorResult.message}`);
133
- }
134
-
135
- const rowsLoaded = jobMetadata.statistics?.load?.outputRows || rows.length;
136
-
137
- if (this.logger) {
138
- this.logger.log('INFO', `[BigQueryBatch] ✅ Flushed ${rowsLoaded} ${tableName} rows to BigQuery using LOAD JOB (free)`);
139
- }
140
-
141
- return rowsLoaded;
142
- } finally {
143
- // Clean up temp file (as learned from backfill)
144
- try {
145
- if (fs.existsSync(tempFile)) {
146
- fs.unlinkSync(tempFile);
147
- }
148
- } catch (cleanupError) {
149
- if (this.logger) {
150
- this.logger.log('WARN', `[BigQueryBatch] Failed to delete temp file ${tempFile}: ${cleanupError.message}`);
151
- }
152
- }
100
+ if (this.logger) {
101
+ this.logger.log('INFO', `[BigQueryBatch] ✅ Flushed ${rows.length} ${tableName} rows to BigQuery using MERGE (${rowsInserted} new, ${rows.length - rowsInserted} updated)`);
153
102
  }
103
+
104
+ return rows.length;
154
105
  } catch (error) {
155
106
  // Log error but don't throw - allow Firestore writes to continue
156
107
  if (this.logger) {
@@ -172,9 +123,9 @@ class BigQueryBatchManager {
172
123
  }
173
124
 
174
125
  const results = await Promise.allSettled([
175
- this._flushBuffer(this.portfolioBuffer, 'portfolio_snapshots', 'portfolio'),
176
- this._flushBuffer(this.historyBuffer, 'trade_history_snapshots', 'history'),
177
- this._flushBuffer(this.socialBuffer, 'social_post_snapshots', 'social')
126
+ this._flushBuffer(this.portfolioBuffer, 'portfolio_snapshots', 'portfolio', PORTFOLIO_DEDUP_KEYS),
127
+ this._flushBuffer(this.historyBuffer, 'trade_history_snapshots', 'history', HISTORY_DEDUP_KEYS),
128
+ this._flushBuffer(this.socialBuffer, 'social_post_snapshots', 'social', SOCIAL_DEDUP_KEYS)
178
129
  ]);
179
130
 
180
131
  const totalFlushed = results
package/index.js CHANGED
@@ -11,7 +11,6 @@ const { FirestoreBatchManager } = require('./functions/task-engine/utils/fire
11
11
  const firestoreUtils = require('./functions/core/utils/firestore_utils');
12
12
 
13
13
  // Orchestrator
14
- // [UPDATED] Imported handleOrchestratorHttp
15
14
  const {
16
15
  runDiscoveryOrchestrator,
17
16
  runUpdateOrchestrator,
@@ -47,19 +46,15 @@ const dataLoader = require('./functions
47
46
  const computationUtils = require('./functions/computation-system/utils/utils');
48
47
 
49
48
 
50
- // API v2 (CommonJS)
51
49
  const { createApiV2App } = require('./functions/api-v2/index');
52
50
 
53
- // Maintenance & Data Acquisition
54
51
  const { fetchAndStoreInsights } = require('./functions/fetch-insights/helpers/handler_helpers');
55
52
  const { fetchAndStorePrices } = require('./functions/etoro-price-fetcher/helpers/handler_helpers');
56
53
  const { runSocialOrchestrator } = require('./functions/social-orchestrator/helpers/orchestrator_helpers');
57
54
  const { handleSocialTask } = require('./functions/social-task-handler/helpers/handler_helpers');
58
55
  const { runBackfillAssetPrices } = require('./functions/price-backfill/helpers/handler_helpers');
59
56
  const { runRootDataIndexer } = require('./functions/root-data-indexer/index');
60
- // [NEW] Popular Investor Fetcher
61
57
  const { runPopularInvestorFetch } = require('./functions/fetch-popular-investors/index');
62
- // [NEW] Backfill Task Engine Data
63
58
  const { backfillTaskEngineData } = require('./functions/maintenance/backfill-task-engine-data/index');
64
59
  const { backfillPIMasterListRankings } = require('./functions/maintenance/backfill-pi-master-list-rankings/index');
65
60
  const { backfillInstrumentInsights } = require('./functions/maintenance/backfill-instrument-insights/index');
@@ -86,7 +81,6 @@ const core = {
86
81
  };
87
82
 
88
83
  const orchestrator = {
89
- // [UPDATED] Exported handleOrchestratorHttp so it can be mapped in Cloud Functions
90
84
  handleOrchestratorHttp,
91
85
  runDiscoveryOrchestrator,
92
86
  runUpdateOrchestrator,
@@ -122,7 +116,7 @@ const computationSystem = {
122
116
  };
123
117
 
124
118
  const api = {
125
- createApiV2App, // New API v2 entry point
119
+ createApiV2App,
126
120
  };
127
121
 
128
122
  const maintenance = {
@@ -132,9 +126,7 @@ const maintenance = {
132
126
  handleSocialTask,
133
127
  runBackfillAssetPrices,
134
128
  runRootDataIndexer,
135
- // [NEW] Added to maintenance pipe
136
129
  runPopularInvestorFetch,
137
- // [NEW] BigQuery backfills
138
130
  backfillTaskEngineData,
139
131
  backfillPIMasterListRankings,
140
132
  backfillInstrumentInsights,
@@ -149,8 +141,8 @@ const maintenance = {
149
141
  const proxy = { handlePost };
150
142
 
151
143
  const alertSystem = {
152
- handleAlertTrigger, // Pub/Sub handler (kept for backward compatibility, but not used)
153
- handleComputationResultWrite, // Firestore trigger handler - main entry point
144
+ handleAlertTrigger,
145
+ handleComputationResultWrite,
154
146
  checkAndSendAllClearNotifications
155
147
  };
156
148
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.724",
3
+ "version": "1.0.726",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [