bulltrackers-module 1.0.467 → 1.0.469

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -112,6 +112,13 @@ async function handleAlertTrigger(message, context, config, dependencies) {
112
112
 
113
113
  logger.log('SUCCESS', `[AlertTrigger] Completed processing ${computationName}: ${processedCount} successful, ${errorCount} errors`);
114
114
 
115
+ // After processing alerts, check for "all clear" notifications for static watchlists
116
+ // This runs after all alert computations are processed
117
+ if (processedCount > 0 || results.cids.length > 0) {
118
+ // Check if any PIs were processed but didn't trigger alerts
119
+ await checkAndSendAllClearNotifications(db, logger, results.cids, date, config);
120
+ }
121
+
115
122
  } catch (error) {
116
123
  logger.log('ERROR', '[AlertTrigger] Fatal error processing alert trigger', error);
117
124
  throw error; // Re-throw for Pub/Sub retry
@@ -134,12 +141,23 @@ async function handleComputationResultWrite(change, context, config, dependencie
134
141
  }
135
142
 
136
143
  try {
137
- // 1. Check if this is an alert computation
138
- if (!isAlertComputation(computationName)) {
139
- logger.log('DEBUG', `[AlertTrigger] Not an alert computation: ${computationName}`);
144
+ // 1. Check if this is an alert computation OR PopularInvestorProfileMetrics
145
+ const isProfileMetrics = computationName === 'PopularInvestorProfileMetrics';
146
+ if (!isAlertComputation(computationName) && !isProfileMetrics) {
147
+ logger.log('DEBUG', `[AlertTrigger] Not an alert computation or profile metrics: ${computationName}`);
140
148
  return;
141
149
  }
142
150
 
151
+ // If it's PopularInvestorProfileMetrics, check for all-clear notifications only
152
+ if (isProfileMetrics) {
153
+ const docData = change.after.data();
154
+ const results = readComputationResults(docData);
155
+ if (results.cids && results.cids.length > 0) {
156
+ await checkAndSendAllClearNotifications(db, logger, results.cids, date, config);
157
+ }
158
+ return; // Don't process as alert computation
159
+ }
160
+
143
161
  const alertType = getAlertTypeByComputation(computationName);
144
162
  if (!alertType) {
145
163
  logger.log('WARN', `[AlertTrigger] Alert type not found for computation: ${computationName}`);
@@ -188,17 +206,189 @@ async function handleComputationResultWrite(change, context, config, dependencie
188
206
 
189
207
  logger.log('SUCCESS', `[AlertTrigger] Completed processing ${computationName}: ${processedCount} successful, ${errorCount} errors`);
190
208
 
209
+ // After processing alerts, check for "all clear" notifications for static watchlists
210
+ if (processedCount > 0 || results.cids.length > 0) {
211
+ // Check if any PIs were processed but didn't trigger alerts
212
+ await checkAndSendAllClearNotifications(db, logger, results.cids, date, config);
213
+ }
214
+
191
215
  } catch (error) {
192
216
  logger.log('ERROR', `[AlertTrigger] Fatal error processing ${computationName}`, error);
193
217
  // Don't throw - we don't want to retry the entire computation write
194
218
  }
195
219
  }
196
220
 
221
+ /**
222
+ * Check for PIs in static watchlists that were processed but didn't trigger alerts
223
+ * Send "all clear" notifications to users who have these PIs in their static watchlists
224
+ * This should be called after PopularInvestorProfileMetrics is computed (not just alert computations)
225
+ */
226
+ async function checkAndSendAllClearNotifications(db, logger, processedPICids, computationDate, config) {
227
+ try {
228
+ const today = computationDate || new Date().toISOString().split('T')[0];
229
+
230
+ if (!processedPICids || processedPICids.length === 0) {
231
+ return; // No PIs to check
232
+ }
233
+
234
+ // Get all static watchlists that contain any of the processed PIs
235
+ const watchlistsCollection = config.watchlistsCollection || 'watchlists';
236
+ const watchlistsSnapshot = await db.collection(watchlistsCollection).get();
237
+
238
+ const usersToNotify = new Map(); // userCid -> Map of piCid -> {username, ...}
239
+
240
+ for (const userDoc of watchlistsSnapshot.docs) {
241
+ const userCid = userDoc.id;
242
+ const userListsSnapshot = await userDoc.ref.collection('lists').get();
243
+
244
+ for (const listDoc of userListsSnapshot.docs) {
245
+ const listData = listDoc.data();
246
+
247
+ // Only check static watchlists
248
+ if (listData.type !== 'static' || !listData.items || !Array.isArray(listData.items)) {
249
+ continue;
250
+ }
251
+
252
+ // Check if any processed PI is in this watchlist
253
+ for (const item of listData.items) {
254
+ const piCid = Number(item.cid);
255
+ if (processedPICids.includes(piCid)) {
256
+ // Check if this PI triggered any alerts today
257
+ const hasAlerts = await checkIfPIHasAlertsToday(db, userCid, piCid, today);
258
+
259
+ if (!hasAlerts) {
260
+ // No alerts triggered - send "all clear" notification
261
+ if (!usersToNotify.has(userCid)) {
262
+ usersToNotify.set(userCid, new Map());
263
+ }
264
+ usersToNotify.get(userCid).set(piCid, {
265
+ cid: piCid,
266
+ username: item.username || `PI-${piCid}`
267
+ });
268
+ }
269
+ }
270
+ }
271
+ }
272
+ }
273
+
274
+ // Send notifications to all users
275
+ let totalNotifications = 0;
276
+ for (const [userCid, pisMap] of usersToNotify.entries()) {
277
+ for (const [piCid, pi] of pisMap.entries()) {
278
+ await sendAllClearNotification(db, logger, userCid, pi.cid, pi.username, today);
279
+ totalNotifications++;
280
+ }
281
+ }
282
+
283
+ if (totalNotifications > 0) {
284
+ logger.log('INFO', `[checkAndSendAllClearNotifications] Sent ${totalNotifications} all-clear notifications to ${usersToNotify.size} users`);
285
+ }
286
+
287
+ } catch (error) {
288
+ logger.log('ERROR', `[checkAndSendAllClearNotifications] Error checking all-clear notifications`, error);
289
+ // Don't throw - this is a non-critical feature
290
+ }
291
+ }
292
+
293
+ /**
294
+ * Check if a PI has any alerts for a user today
295
+ */
296
+ async function checkIfPIHasAlertsToday(db, userCid, piCid, date) {
297
+ try {
298
+ const alertsRef = db.collection('user_alerts')
299
+ .doc(String(userCid))
300
+ .collection('alerts');
301
+
302
+ // Check if there are any alerts for this PI today
303
+ const todayStart = new Date(date);
304
+ todayStart.setHours(0, 0, 0, 0);
305
+ const todayEnd = new Date(date);
306
+ todayEnd.setHours(23, 59, 59, 999);
307
+
308
+ const snapshot = await alertsRef
309
+ .where('piCid', '==', Number(piCid))
310
+ .where('computationDate', '==', date)
311
+ .limit(1)
312
+ .get();
313
+
314
+ return !snapshot.empty;
315
+ } catch (error) {
316
+ // If we can't check, assume there are alerts (safer)
317
+ return true;
318
+ }
319
+ }
320
+
321
+ /**
322
+ * Send an "all clear" notification to a user
323
+ */
324
+ async function sendAllClearNotification(db, logger, userCid, piCid, piUsername, date) {
325
+ try {
326
+ const { FieldValue } = require('@google-cloud/firestore');
327
+
328
+ // Check if we already sent an all-clear notification for this PI today
329
+ const existingRef = db.collection('user_alerts')
330
+ .doc(String(userCid))
331
+ .collection('alerts')
332
+ .where('piCid', '==', Number(piCid))
333
+ .where('alertType', '==', 'all_clear')
334
+ .where('computationDate', '==', date)
335
+ .limit(1);
336
+
337
+ const existingSnapshot = await existingRef.get();
338
+ if (!existingSnapshot.empty) {
339
+ // Already sent notification for this PI today
340
+ return;
341
+ }
342
+
343
+ // Create the notification using the notification system
344
+ const notificationRef = db.collection('user_notifications')
345
+ .doc(String(userCid))
346
+ .collection('notifications')
347
+ .doc();
348
+
349
+ await notificationRef.set({
350
+ id: notificationRef.id,
351
+ type: 'alert',
352
+ title: 'All Clear',
353
+ message: `${piUsername} was processed, all clear today!`,
354
+ piCid: Number(piCid),
355
+ piUsername: piUsername,
356
+ alertType: 'all_clear',
357
+ computationDate: date,
358
+ read: false,
359
+ createdAt: FieldValue.serverTimestamp(),
360
+ metadata: {
361
+ message: 'User was processed, all clear today!'
362
+ }
363
+ });
364
+
365
+ // Update notification counter
366
+ const counterRef = db.collection('user_notifications')
367
+ .doc(String(userCid))
368
+ .collection('counters')
369
+ .doc(date);
370
+
371
+ await counterRef.set({
372
+ date: date,
373
+ unreadCount: FieldValue.increment(1),
374
+ totalCount: FieldValue.increment(1),
375
+ lastUpdated: FieldValue.serverTimestamp()
376
+ }, { merge: true });
377
+
378
+ logger.log('INFO', `[sendAllClearNotification] Sent all-clear notification to user ${userCid} for PI ${piCid}`);
379
+
380
+ } catch (error) {
381
+ logger.log('ERROR', `[sendAllClearNotification] Error sending all-clear notification`, error);
382
+ // Don't throw - this is non-critical
383
+ }
384
+ }
385
+
197
386
  /**
198
387
  * Export the trigger handlers
199
388
  */
200
389
  module.exports = {
201
390
  handleAlertTrigger,
202
- handleComputationResultWrite
391
+ handleComputationResultWrite,
392
+ checkAndSendAllClearNotifications
203
393
  };
204
394
 
@@ -397,10 +397,42 @@ async function getSignedInUsersToUpdate(dependencies, config) {
397
397
  targets.push({ cid: String(cid), username });
398
398
  });
399
399
 
400
- // Now filter out users who were updated in the last 18 hours
401
- // Check user_sync_requests for last completed sync
400
+ // Now filter out users who were updated today
401
+ // Check actual portfolio/history snapshot dates, not just sync request timestamps
402
402
  const filteredTargets = [];
403
+ const today = new Date().toISOString().split('T')[0];
403
404
  const checkPromises = targets.map(async (target) => {
405
+ // Check if user has portfolio or history data for today
406
+ const blockId = `${Math.floor(parseInt(target.cid) / 1000000)}M`;
407
+
408
+ // Check portfolio snapshot for today
409
+ const portfolioSnapshotRef = db.collection(collectionName)
410
+ .doc(blockId)
411
+ .collection('snapshots')
412
+ .doc(today)
413
+ .collection('parts')
414
+ .limit(1);
415
+
416
+ const portfolioSnapshot = await portfolioSnapshotRef.get();
417
+
418
+ // Check history snapshot for today
419
+ const historyCollection = config.signedInUserHistoryCollection || process.env.FIRESTORE_COLLECTION_SIGNED_IN_USER_HISTORY || 'signed_in_user_history';
420
+ const historySnapshotRef = db.collection(historyCollection)
421
+ .doc(blockId)
422
+ .collection('snapshots')
423
+ .doc(today)
424
+ .collection('parts')
425
+ .limit(1);
426
+
427
+ const historySnapshot = await historySnapshotRef.get();
428
+
429
+ // If user has data for today, skip them (they're already up-to-date)
430
+ if (!portfolioSnapshot.empty || !historySnapshot.empty) {
431
+ skippedCount++;
432
+ return null; // Skip this user - already updated today
433
+ }
434
+
435
+ // Also check sync requests as a fallback (in case data exists but snapshot check failed)
404
436
  const syncRef = db.collection('user_sync_requests')
405
437
  .doc(target.cid)
406
438
  .collection('global')
@@ -418,7 +450,7 @@ async function getSignedInUsersToUpdate(dependencies, config) {
418
450
  }
419
451
  }
420
452
 
421
- return target; // Include this user
453
+ return target; // Include this user - needs update
422
454
  });
423
455
 
424
456
  const results = await Promise.all(checkPromises);
@@ -49,6 +49,51 @@ exports.fetchAndStorePrices = async (config, dependencies) => {
49
49
  const batchPromises = [];
50
50
  for (const shardId in shardUpdates) { const docRef = db.collection(priceCollectionName).doc(shardId); const payload = shardUpdates[shardId]; batchPromises.push(docRef.update(payload)); }
51
51
  await Promise.all(batchPromises);
52
+
53
+ // Extract all dates from the price data and create a date tracking document
54
+ const priceDatesSet = new Set();
55
+ const AUGUST_2025 = '2025-08-01';
56
+
57
+ for (const instrumentData of results) {
58
+ const dailyData = instrumentData?.ClosingPrices?.Daily;
59
+ const weeklyData = instrumentData?.ClosingPrices?.Weekly;
60
+ const monthlyData = instrumentData?.ClosingPrices?.Monthly;
61
+
62
+ if (dailyData?.Date) {
63
+ const dateKey = dailyData.Date.substring(0, 10);
64
+ if (dateKey >= AUGUST_2025) {
65
+ priceDatesSet.add(dateKey);
66
+ }
67
+ }
68
+ if (weeklyData?.Date) {
69
+ const dateKey = weeklyData.Date.substring(0, 10);
70
+ if (dateKey >= AUGUST_2025) {
71
+ priceDatesSet.add(dateKey);
72
+ }
73
+ }
74
+ if (monthlyData?.Date) {
75
+ const dateKey = monthlyData.Date.substring(0, 10);
76
+ if (dateKey >= AUGUST_2025) {
77
+ priceDatesSet.add(dateKey);
78
+ }
79
+ }
80
+ }
81
+
82
+ // Write date tracking document
83
+ const today = new Date().toISOString().split('T')[0];
84
+ const dateTrackingRef = db.collection('pricedatastoreddates').doc(today);
85
+ const priceDatesArray = Array.from(priceDatesSet).sort();
86
+
87
+ await dateTrackingRef.set({
88
+ fetchDate: today,
89
+ datesAvailable: priceDatesArray,
90
+ dateCount: priceDatesArray.length,
91
+ instrumentsProcessed: results.length,
92
+ storedAt: FieldValue.serverTimestamp()
93
+ });
94
+
95
+ logger.log('INFO', `[PriceFetcherHelpers] Wrote price date tracking document for ${today} with ${priceDatesArray.length} dates (from August 2025 onwards)`);
96
+
52
97
  const successMessage = `Successfully processed and saved daily prices for ${results.length} instruments to ${batchPromises.length} shards.`;
53
98
  logger.log('SUCCESS', `[PriceFetcherHelpers] ${successMessage}`);
54
99
  return { success: true, message: successMessage, instrumentsProcessed: results.length };
@@ -75,35 +75,61 @@ exports.runRootDataIndexer = async (config, dependencies) => {
75
75
  const scanMode = targetDate ? 'SINGLE_DATE' : 'FULL_SCAN';
76
76
  logger.log('INFO', `[RootDataIndexer] Starting Root Data Availability Scan... Mode: ${scanMode}`, { targetDate });
77
77
 
78
- // 1. Price Availability (Global Scan Mode Only)
79
- // If running for a single targetDate, we skip the global shard sampling to save time.
78
+ // 1. Price Availability - Read from date tracking documents
79
+ // Find the latest pricedatastoreddates document and extract available dates
80
80
  const priceAvailabilitySet = new Set();
81
81
 
82
- if (!targetDate) {
83
- try {
84
- // Path: asset_prices/shard_*
85
- const priceCollectionRef = db.collection(PRICE_COLLECTION_NAME);
86
- const priceShardsSnapshot = await priceCollectionRef.limit(10).get();
82
+ try {
83
+ // Get the latest price date tracking document
84
+ const dateTrackingRef = db.collection('pricedatastoreddates')
85
+ .orderBy('fetchDate', 'desc')
86
+ .limit(1);
87
+
88
+ const dateTrackingSnapshot = await dateTrackingRef.get();
89
+
90
+ if (!dateTrackingSnapshot.empty) {
91
+ const latestTrackingDoc = dateTrackingSnapshot.docs[0].data();
92
+ const datesAvailable = latestTrackingDoc.datesAvailable || [];
87
93
 
88
- if (!priceShardsSnapshot.empty) {
89
- // Sample up to 10 shards and extract date keys from them
90
- for (const shardDoc of priceShardsSnapshot.docs) {
91
- if (shardDoc.id.startsWith('shard_')) {
92
- const data = shardDoc.data();
93
- Object.values(data).forEach(instrument => {
94
- if (instrument && instrument.prices) {
95
- Object.keys(instrument.prices).forEach(dateKey => {
96
- if (/^\d{4}-\d{2}-\d{2}$/.test(dateKey)) {
97
- priceAvailabilitySet.add(dateKey);
98
- }
99
- });
100
- }
101
- });
94
+ // Add all dates from the tracking document
95
+ datesAvailable.forEach(dateKey => {
96
+ if (/^\d{4}-\d{2}-\d{2}$/.test(dateKey)) {
97
+ priceAvailabilitySet.add(dateKey);
98
+ }
99
+ });
100
+
101
+ logger.log('INFO', `[RootDataIndexer] Loaded ${priceAvailabilitySet.size} price dates from tracking document (fetchDate: ${latestTrackingDoc.fetchDate})`);
102
+ } else {
103
+ logger.log('WARN', '[RootDataIndexer] No price date tracking documents found. Falling back to empty set.');
104
+ }
105
+ } catch (e) {
106
+ logger.log('ERROR', '[RootDataIndexer] Failed to load price date tracking document.', { error: e.message });
107
+ // Fallback: try to sample shards if tracking document fails
108
+ if (!targetDate) {
109
+ try {
110
+ const priceCollectionRef = db.collection(PRICE_COLLECTION_NAME);
111
+ const priceShardsSnapshot = await priceCollectionRef.limit(10).get();
112
+
113
+ if (!priceShardsSnapshot.empty) {
114
+ for (const shardDoc of priceShardsSnapshot.docs) {
115
+ if (shardDoc.id.startsWith('shard_')) {
116
+ const data = shardDoc.data();
117
+ Object.values(data).forEach(instrument => {
118
+ if (instrument && instrument.prices) {
119
+ Object.keys(instrument.prices).forEach(dateKey => {
120
+ if (/^\d{4}-\d{2}-\d{2}$/.test(dateKey)) {
121
+ priceAvailabilitySet.add(dateKey);
122
+ }
123
+ });
124
+ }
125
+ });
126
+ }
102
127
  }
128
+ logger.log('INFO', `[RootDataIndexer] Fallback: Loaded ${priceAvailabilitySet.size} price dates from shard sampling.`);
103
129
  }
130
+ } catch (fallbackError) {
131
+ logger.log('ERROR', '[RootDataIndexer] Fallback shard sampling also failed.', { error: fallbackError.message });
104
132
  }
105
- } catch (e) {
106
- logger.log('ERROR', '[RootDataIndexer] Failed to sample price shards.', { error: e.message });
107
133
  }
108
134
  }
109
135
 
@@ -130,9 +156,9 @@ exports.runRootDataIndexer = async (config, dependencies) => {
130
156
  const promises = datesToScan.map(dateStr => limit(async () => {
131
157
  try {
132
158
  // Define Time Range for Social Query (Full Day UTC)
133
- const dayStart = new Date(dateStr);
134
- const dayEnd = new Date(dateStr);
135
- dayEnd.setHours(23, 59, 59, 999);
159
+ // Use UTC methods to ensure correct timezone handling
160
+ const dayStart = new Date(dateStr + 'T00:00:00.000Z');
161
+ const dayEnd = new Date(dateStr + 'T23:59:59.999Z');
136
162
 
137
163
  const availability = {
138
164
  date: dateStr,
@@ -250,17 +276,33 @@ exports.runRootDataIndexer = async (config, dependencies) => {
250
276
  let foundSignedInSocial = false;
251
277
 
252
278
  if (!universalSocialSnap.empty) {
279
+ logger.log('DEBUG', `[RootDataIndexer/${dateStr}] Found ${universalSocialSnap.docs.length} social posts in query`);
253
280
  universalSocialSnap.docs.forEach(doc => {
254
281
  const path = doc.ref.path;
282
+ const data = doc.data();
283
+ const fetchedAt = data.fetchedAt;
284
+
255
285
  // Use includes() to match collection name anywhere in path (more robust)
256
286
  // Path format: {collectionName}/{userId}/posts/{postId}
257
- if (path.includes(`/${PI_SOCIAL_COLL_NAME}/`) || path.startsWith(`${PI_SOCIAL_COLL_NAME}/`)) {
287
+ // Firestore paths don't have leading slash, so check both with and without
288
+ const piMatchPattern = `${PI_SOCIAL_COLL_NAME}/`;
289
+ const signedInMatchPattern = `${SIGNED_IN_SOCIAL_COLL_NAME}/`;
290
+
291
+ if (path.includes(piMatchPattern) || path.startsWith(piMatchPattern)) {
258
292
  foundPISocial = true;
293
+ logger.log('DEBUG', `[RootDataIndexer/${dateStr}] ✓ Found PI social: ${path}`);
259
294
  }
260
- if (path.includes(`/${SIGNED_IN_SOCIAL_COLL_NAME}/`) || path.startsWith(`${SIGNED_IN_SOCIAL_COLL_NAME}/`)) {
295
+ if (path.includes(signedInMatchPattern) || path.startsWith(signedInMatchPattern)) {
261
296
  foundSignedInSocial = true;
297
+ const fetchedAtStr = fetchedAt ? (fetchedAt.toDate ? fetchedAt.toDate().toISOString() : String(fetchedAt)) : 'missing';
298
+ logger.log('DEBUG', `[RootDataIndexer/${dateStr}] ✓ Found signed-in social: ${path}, fetchedAt: ${fetchedAtStr}`);
299
+ } else if (!path.includes(piMatchPattern)) {
300
+ // Log paths that don't match either pattern to help debug
301
+ logger.log('DEBUG', `[RootDataIndexer/${dateStr}] ✗ Path doesn't match: ${path} (looking for: "${signedInMatchPattern}" or "${piMatchPattern}")`);
262
302
  }
263
303
  });
304
+ } else {
305
+ logger.log('DEBUG', `[RootDataIndexer/${dateStr}] No social posts found in query (dayStart: ${dayStart.toISOString()}, dayEnd: ${dayEnd.toISOString()})`);
264
306
  }
265
307
 
266
308
  // --- Assign to Availability ---
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.467",
3
+ "version": "1.0.469",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [