bulltrackers-module 1.0.778 → 1.0.779

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. package/functions/alert-system/helpers/alert_helpers.js +114 -90
  2. package/functions/alert-system/helpers/alert_manifest_loader.js +88 -99
  3. package/functions/alert-system/index.js +81 -138
  4. package/functions/alert-system/tests/stage1-alert-manifest.test.js +94 -0
  5. package/functions/alert-system/tests/stage2-alert-metadata.test.js +93 -0
  6. package/functions/alert-system/tests/stage3-alert-handler.test.js +79 -0
  7. package/functions/api-v2/helpers/data-fetchers/firestore.js +613 -478
  8. package/functions/api-v2/routes/popular_investors.js +7 -7
  9. package/functions/api-v2/routes/profile.js +2 -1
  10. package/functions/api-v2/tests/stage4-profile-paths.test.js +52 -0
  11. package/functions/api-v2/tests/stage5-aum-bigquery.test.js +81 -0
  12. package/functions/api-v2/tests/stage7-pi-page-views.test.js +55 -0
  13. package/functions/api-v2/tests/stage8-watchlist-membership.test.js +49 -0
  14. package/functions/api-v2/tests/stage9-user-alert-settings.test.js +81 -0
  15. package/functions/computation-system-v2/computations/BehavioralAnomaly.js +104 -81
  16. package/functions/computation-system-v2/computations/NewSectorExposure.js +7 -7
  17. package/functions/computation-system-v2/computations/NewSocialPost.js +6 -6
  18. package/functions/computation-system-v2/computations/PositionInvestedIncrease.js +11 -11
  19. package/functions/computation-system-v2/computations/SignedInUserPIProfileMetrics.js +1 -1
  20. package/functions/computation-system-v2/config/bulltrackers.config.js +8 -0
  21. package/functions/computation-system-v2/framework/core/Manifest.js +1 -0
  22. package/functions/core/utils/bigquery_utils.js +32 -0
  23. package/package.json +1 -1
@@ -83,36 +83,88 @@ const latestUserCentricSnapshot = async (firestore, userId, collectionName, data
83
83
  // Currently all of the isPage computations are stored under the same popular-investor location, so computation path isn't really needed, can hardcode for now.
84
84
  // A lookback period to return the latest data we have for a given userID within a given lookback range, otherwise returning an identifiable error
85
85
 
86
+ // V2 profile metrics paths (one doc per entity per date)
87
+ const PROFILE_COMPUTATION_PATHS = {
88
+ SignedInUserProfileMetrics: 'SignedInUserProfiles/{entityId}/metrics/{date}',
89
+ SignedInUserPIProfileMetrics: 'users/{entityId}/signed_in_user_pi_profile_metrics/{date}',
90
+ PopularInvestorProfileMetrics: 'PiProfiles/{entityId}/metrics/{date}'
91
+ };
92
+
93
+ /**
94
+ * Fetch profile metrics from V2 Firestore path (one doc per date).
95
+ * @param {Object} firestore - Firestore instance
96
+ * @param {string} pathTemplate - e.g. 'PiProfiles/{entityId}/metrics/{date}'
97
+ * @param {string} entityId - User or PI ID
98
+ * @param {string} dateStr - Target date (YYYY-MM-DD)
99
+ * @param {number} lookbackDays - Days to look back
100
+ * @returns {Promise<Array<{date: string, data: object}>>} Sorted by date desc (newest first)
101
+ */
102
+ const getProfileMetricsFromPath = async (firestore, pathTemplate, entityId, dateStr, lookbackDays = 7) => {
103
+ const sanitizedEntityId = sanitizeCid(entityId);
104
+ const endDate = new Date(dateStr);
105
+ const startDate = new Date(endDate);
106
+ startDate.setDate(endDate.getDate() - lookbackDays);
107
+ const results = [];
108
+ for (let d = new Date(startDate); d <= endDate; d.setDate(d.getDate() + 1)) {
109
+ const dateKey = d.toISOString().split('T')[0];
110
+ const path = pathTemplate.replace(/{entityId}/g, sanitizedEntityId).replace(/{date}/g, dateKey);
111
+ const docRef = firestore.doc(path);
112
+ const docSnapshot = await docRef.get();
113
+ if (docSnapshot.exists) {
114
+ results.push({ date: dateKey, data: docSnapshot.data() });
115
+ }
116
+ }
117
+ if (results.length === 0) {
118
+ throw new Error(`No profile data found for entity ${sanitizedEntityId} within the last ${lookbackDays} days`);
119
+ }
120
+ results.sort((a, b) => new Date(b.date).getTime() - new Date(a.date).getTime());
121
+ return results;
122
+ };
123
+
124
+ /**
125
+ * Fetch profile metrics for a computation that writes to V2 path.
126
+ * @param {Object} db - Firestore instance
127
+ * @param {string} computationName - SignedInUserProfileMetrics | SignedInUserPIProfileMetrics | PopularInvestorProfileMetrics
128
+ * @param {string} entityId - User or PI ID
129
+ * @param {string} dateStr - Target date
130
+ * @param {number} lookbackDays - Lookback days
131
+ * @returns {Promise<Array<{date: string, data: object}>>}
132
+ */
133
+ const fetchProfileMetrics = async (db, computationName, entityId, dateStr, lookbackDays = 7) => {
134
+ const pathTemplate = PROFILE_COMPUTATION_PATHS[computationName];
135
+ if (!pathTemplate) {
136
+ throw new Error(`Unknown profile computation: ${computationName}`);
137
+ }
138
+ return getProfileMetricsFromPath(db, pathTemplate, entityId, dateStr, lookbackDays);
139
+ };
140
+
141
+ /** @deprecated Use fetchProfileMetrics for V2 profile computations (SignedInUserProfileMetrics, SignedInUserPIProfileMetrics, PopularInvestorProfileMetrics). */
86
142
  const pageCollection = async (firestore, dateStr, computationName, userId, lookbackDays = 7) => {
143
+ if (PROFILE_COMPUTATION_PATHS[computationName]) {
144
+ return fetchProfileMetrics(firestore, computationName, userId, dateStr, lookbackDays);
145
+ }
87
146
  try {
88
- // Sanitize user inputs
89
147
  const sanitizedUserId = sanitizeCid(userId);
90
148
  const sanitizedComputationName = sanitizeDocId(computationName);
91
-
92
149
  const endDate = new Date(dateStr);
93
150
  const startDate = new Date(endDate);
94
151
  startDate.setDate(endDate.getDate() - lookbackDays);
95
152
  const results = [];
96
153
  for (let d = new Date(startDate); d <= endDate; d.setDate(d.getDate() + 1)) {
97
154
  const dateKey = d.toISOString().split('T')[0];
98
- const docRef = firestore.collection('unified_insights').doc(dateKey)
155
+ const docRef = firestore.collection('unified_insights').doc(dateKey)
99
156
  .collection('results').doc('popular-investor')
100
157
  .collection('computations').doc(sanitizedComputationName)
101
158
  .collection('pages').doc(sanitizedUserId);
102
159
  const docSnapshot = await docRef.get();
103
160
  if (docSnapshot.exists) {
104
161
  results.push({ date: dateKey, data: docSnapshot.data() });
105
- }
162
+ }
106
163
  }
107
164
  if (results.length === 0) {
108
165
  throw new Error(`No page data found for User ID ${sanitizedUserId} in computation ${sanitizedComputationName} within the last ${lookbackDays} days`);
109
166
  }
110
- // Sort results by date descending (newest first) so the latest data is always first
111
- results.sort((a, b) => {
112
- const dateA = new Date(a.date);
113
- const dateB = new Date(b.date);
114
- return dateB.getTime() - dateA.getTime(); // Descending order (newest first)
115
- });
167
+ results.sort((a, b) => new Date(b.date).getTime() - new Date(a.date).getTime());
116
168
  return results;
117
169
  } catch (error) {
118
170
  console.error(`Error fetching page collection data: ${error}`);
@@ -169,18 +221,18 @@ const fetchPopularInvestorMasterList = async (firestore, userId = null) => {
169
221
  const data = docSnapshot.data().investors;
170
222
  if (userId) {
171
223
  if (data[userId]) {
172
- return data[userId];
224
+ return data[userId];
173
225
  } else {
174
226
  throw new Error(`User ID ${userId} not found in popular investor master list`);
175
227
  }
176
228
  } else {
177
229
  return data;
178
- }
230
+ }
179
231
  } catch (error) {
180
232
  console.error(`Error fetching popular investor master list: ${error}`);
181
233
  throw error;
182
234
  }
183
- };
235
+ };
184
236
 
185
237
  // 4. Fetch developer overrides data
186
238
  // GOAL : Used for allowing a given userID to have special abilities
@@ -191,12 +243,12 @@ const isDeveloper = async (firestore, userId) => {
191
243
  try {
192
244
  const docRef = firestore.collection('dev_overrides').doc(userId);
193
245
  const docSnapshot = await docRef.get();
194
-
246
+
195
247
  // Check both that document exists AND enabled === true
196
248
  if (!docSnapshot.exists) {
197
249
  return false;
198
250
  }
199
-
251
+
200
252
  const data = docSnapshot.data();
201
253
  return data?.enabled === true;
202
254
  }
@@ -388,15 +440,15 @@ const isDeveloper = async (firestore, userId) => {
388
440
  const manageUserWatchlist = async (db, userId, instruction, payload = {}) => {
389
441
  const todayStr = new Date().toISOString().split('T')[0]; // "2026-01-09"
390
442
  const batch = db.batch();
391
-
443
+
392
444
  // Sanitize user inputs
393
445
  const sanitizedUserId = sanitizeCid(userId);
394
-
446
+
395
447
  // 1. Determine Watchlist ID and Reference
396
448
  // Handle both 'id' (from frontend) and 'watchlistId' (legacy)
397
- const watchlistId = payload.id ? sanitizeDocId(payload.id)
398
- : payload.watchlistId ? sanitizeDocId(payload.watchlistId)
399
- : `watchlist_${Date.now()}_${Math.random().toString(16).substr(2, 8)}`;
449
+ const watchlistId = payload.id ? sanitizeDocId(payload.id)
450
+ : payload.watchlistId ? sanitizeDocId(payload.watchlistId)
451
+ : `watchlist_${Date.now()}_${Math.random().toString(16).substr(2, 8)}`;
400
452
  const userDocRef = db.collection('SignedInUsers').doc(sanitizedUserId).collection('watchlists').doc(watchlistId);
401
453
 
402
454
  try {
@@ -406,12 +458,14 @@ const manageUserWatchlist = async (db, userId, instruction, payload = {}) => {
406
458
 
407
459
  // 2. Fetch Old State (necessary for Update and Delete)
408
460
  let oldItems = [];
461
+ let oldDocData = null;
409
462
  if (instruction === 'update' || instruction === 'delete') {
410
463
  const currentSnap = await userDocRef.get();
411
464
  if (currentSnap.exists) {
412
- oldItems = currentSnap.data().items || [];
465
+ oldDocData = currentSnap.data();
466
+ oldItems = oldDocData.items || [];
413
467
  } else if (instruction === 'update') {
414
- // Fail safe: If updating a non-existent doc, treat as create
468
+ // Fail safe: If updating a non-existent doc, treat as create
415
469
  }
416
470
  }
417
471
 
@@ -429,33 +483,33 @@ const manageUserWatchlist = async (db, userId, instruction, payload = {}) => {
429
483
  isAutoGenerated: false
430
484
  };
431
485
  batch.set(userDocRef, newDocData);
432
- }
433
-
486
+ }
487
+
434
488
  else if (instruction === 'delete') {
435
489
  removedItems = oldItems; // All items are being removed
436
490
  // Delete the User Document
437
491
  batch.delete(userDocRef);
438
- }
439
-
492
+ }
493
+
440
494
  else if (instruction === 'update') {
441
495
  const newItems = payload.items || [];
442
-
496
+
443
497
  // Map IDs for comparison
444
498
  const oldCids = new Set(oldItems.map(i => Number(i.cid)));
445
499
  const newCids = new Set(newItems.map(i => Number(i.cid)));
446
500
 
447
501
  // Calculate Added (In New, not in Old)
448
502
  addedItems = newItems.filter(i => !oldCids.has(Number(i.cid)));
449
-
503
+
450
504
  // Calculate Removed (In Old, not in New)
451
505
  removedItems = oldItems.filter(i => !newCids.has(Number(i.cid)));
452
506
 
453
507
  if (payload.visibility) isPrivate = payload.visibility === 'private';
454
508
 
455
509
  // Update the User Document
456
- batch.set(userDocRef, {
457
- ...payload,
458
- updatedAt: FieldValue.serverTimestamp()
510
+ batch.set(userDocRef, {
511
+ ...payload,
512
+ updatedAt: FieldValue.serverTimestamp()
459
513
  }, { merge: true });
460
514
  }
461
515
 
@@ -480,7 +534,7 @@ const manageUserWatchlist = async (db, userId, instruction, payload = {}) => {
480
534
  .doc(piId).collection('watchlistData').doc('current');
481
535
 
482
536
  const dailyField = `dailyAdditions.${todayStr}`;
483
-
537
+
484
538
  batch.set(piCounterRef, {
485
539
  lastUpdated: FieldValue.serverTimestamp(),
486
540
  totalUsers: FieldValue.increment(1),
@@ -494,88 +548,125 @@ const manageUserWatchlist = async (db, userId, instruction, payload = {}) => {
494
548
  }, { merge: true });
495
549
  });
496
550
 
497
- // 5. Handle REMOVALS (Current Counter Decrement ONLY)
551
+ // 5. Handle REMOVALS (Current Counter Decrement only; BQ uses in-memory delta for removed PIs)
498
552
  removedItems.forEach(item => {
499
553
  const piId = String(item.cid);
500
-
501
- // Update PI Real-time Counter (Decrement)
502
554
  const piCounterRef = db.collection('PopularInvestors')
503
555
  .doc(piId).collection('watchlistData').doc('current');
504
-
505
- // We do NOT touch dailyAdditions here, only global totals
506
556
  batch.set(piCounterRef, {
507
557
  lastUpdated: FieldValue.serverTimestamp(),
508
558
  totalUsers: FieldValue.increment(-1),
509
- userCids: FieldValue.arrayRemove(userId)
559
+ userCids: FieldValue.arrayRemove(sanitizedUserId)
510
560
  }, { merge: true });
511
561
  });
512
562
 
513
563
  // 6. Commit Batch
514
564
  await batch.commit();
515
-
565
+
516
566
  // 7. Write to BigQuery (after Firestore commit to get final values)
517
567
  if (process.env.BIGQUERY_ENABLED !== 'false' && (addedItems.length > 0 || removedItems.length > 0)) {
518
568
  try {
519
- const { ensureWatchlistMembershipTable, insertRowsWithMerge } = require('../../../core/utils/bigquery_utils');
569
+ const { ensureWatchlistMembershipTable, ensureUserAlertSettingsTable, insertRowsWithMerge } = require('../../../core/utils/bigquery_utils');
520
570
  await ensureWatchlistMembershipTable();
521
-
571
+
522
572
  const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
523
573
  const bigqueryRows = [];
524
-
525
- // Process all affected PIs (both added and removed)
574
+
526
575
  const affectedPIs = new Set([
527
576
  ...addedItems.map(item => String(item.cid)),
528
577
  ...removedItems.map(item => String(item.cid))
529
578
  ]);
530
-
531
- // Read updated documents from subcollection for each affected PI
579
+ const removedPIIds = new Set(removedItems.map(item => String(item.cid)));
580
+
532
581
  for (const piId of affectedPIs) {
533
582
  const masterRef = db.collection('WatchlistMembershipData')
534
583
  .doc(todayStr).collection('popular_investors').doc(piId);
535
-
536
584
  const piDoc = await masterRef.get();
585
+
586
+ let totalUsers = 0;
587
+ let publicWatchlistCount = 0;
588
+ let privateWatchlistCount = 0;
589
+ let users = [];
590
+ let lastUpdated = new Date().toISOString();
591
+
537
592
  if (piDoc.exists) {
538
593
  const piData = piDoc.data();
539
-
540
- // Transform to BigQuery format
541
- const lastUpdated = piData.lastUpdated
542
- ? (piData.lastUpdated.toDate ? piData.lastUpdated.toDate().toISOString() : piData.lastUpdated)
543
- : new Date().toISOString();
544
-
545
- bigqueryRows.push({
546
- date: todayStr,
547
- pi_id: parseInt(piId, 10),
548
- total_users: piData.totalUsers || 0,
549
- public_watchlist_count: piData.publicWatchlistCount || 0,
550
- private_watchlist_count: piData.privateWatchlistCount || 0,
551
- users: Array.isArray(piData.users) ? piData.users : [],
552
- last_updated: lastUpdated
553
- });
594
+ totalUsers = piData.totalUsers || 0;
595
+ publicWatchlistCount = piData.publicWatchlistCount || 0;
596
+ privateWatchlistCount = piData.privateWatchlistCount || 0;
597
+ users = Array.isArray(piData.users) ? piData.users : [];
598
+ lastUpdated = piData.lastUpdated
599
+ ? (piData.lastUpdated.toDate ? piData.lastUpdated.toDate().toISOString() : String(piData.lastUpdated))
600
+ : lastUpdated;
601
+
602
+ if (removedPIIds.has(piId)) {
603
+ totalUsers = Math.max(0, totalUsers - 1);
604
+ users = users.filter((u) => u !== sanitizedUserId);
605
+ if (isPrivate) {
606
+ privateWatchlistCount = Math.max(0, privateWatchlistCount - 1);
607
+ } else {
608
+ publicWatchlistCount = Math.max(0, publicWatchlistCount - 1);
609
+ }
610
+ }
611
+ } else if (removedPIIds.has(piId)) {
612
+ totalUsers = 0;
613
+ users = [];
554
614
  }
615
+
616
+ bigqueryRows.push({
617
+ date: todayStr,
618
+ pi_id: parseInt(piId, 10),
619
+ total_users: totalUsers,
620
+ public_watchlist_count: publicWatchlistCount,
621
+ private_watchlist_count: privateWatchlistCount,
622
+ users,
623
+ last_updated: lastUpdated
624
+ });
555
625
  }
556
-
626
+
557
627
  if (bigqueryRows.length > 0) {
558
- // Use MERGE to update existing rows or insert new
559
628
  await insertRowsWithMerge(
560
629
  datasetId,
561
630
  'watchlist_membership',
562
631
  bigqueryRows,
563
- ['date', 'pi_id'], // Key fields for MERGE
564
- console // Simple logger
632
+ ['date', 'pi_id'],
633
+ console
565
634
  );
566
635
  }
636
+
637
+ // 8. Write to user_alert_settings (one row per watchlist create/update/delete)
638
+ await ensureUserAlertSettingsTable();
639
+ const watchlistName = payload.name ?? oldDocData?.name ?? null;
640
+ const watchlistVisibility = payload.visibility ?? oldDocData?.visibility ?? null;
641
+ const finalItems = instruction === 'delete' ? [] : (payload.items || []);
642
+ const userAlertRow = {
643
+ date: todayStr,
644
+ user_id: sanitizedUserId,
645
+ watchlist_id: watchlistId,
646
+ watchlist_name: watchlistName,
647
+ visibility: watchlistVisibility,
648
+ items: finalItems,
649
+ updated_at: new Date().toISOString()
650
+ };
651
+ await insertRowsWithMerge(
652
+ datasetId,
653
+ 'user_alert_settings',
654
+ [userAlertRow],
655
+ ['date', 'user_id', 'watchlist_id'],
656
+ console
657
+ );
567
658
  } catch (bqError) {
568
659
  console.error(`[manageUserWatchlist] BigQuery write failed: ${bqError.message}`);
569
660
  // Don't throw - Firestore write succeeded, BigQuery is secondary
570
661
  }
571
662
  }
572
-
573
- return {
574
- success: true,
575
- id: watchlistId,
576
- action: instruction,
577
- added: addedItems.length,
578
- removed: removedItems.length
663
+
664
+ return {
665
+ success: true,
666
+ id: watchlistId,
667
+ action: instruction,
668
+ added: addedItems.length,
669
+ removed: removedItems.length
579
670
  };
580
671
 
581
672
  } catch (error) {
@@ -619,7 +710,7 @@ const fetchUserVerificationData = async (firestore, userId) => {
619
710
  const docSnapshot = await docRef.get();
620
711
  if (!docSnapshot.exists) {
621
712
  throw new Error(`Verification data for User ID ${userId} not found`);
622
- }
713
+ }
623
714
  return { id: docSnapshot.id, ...docSnapshot.data() };
624
715
  } catch (error) {
625
716
  console.error(`Error fetching user verification data: ${error}`);
@@ -644,14 +735,14 @@ const createEmailLookup = async (firestore, email, cid) => {
644
735
  try {
645
736
  const hashedEmail = hashEmail(email);
646
737
  const lookupRef = firestore.collection('sys_user_lookup').doc(hashedEmail);
647
-
738
+
648
739
  await lookupRef.set({
649
740
  cid: Number(cid),
650
741
  emailHash: hashedEmail,
651
742
  createdAt: FieldValue.serverTimestamp(),
652
743
  updatedAt: FieldValue.serverTimestamp(),
653
744
  }, { merge: true });
654
-
745
+
655
746
  console.log(`[createEmailLookup] Created lookup document for email hash ${hashedEmail} -> CID ${cid}`);
656
747
  } catch (error) {
657
748
  console.error(`[createEmailLookup] Error creating lookup for ${email}:`, error);
@@ -672,33 +763,33 @@ const lookupCidByEmail = async (firestore, userEmail, firebaseUid = null) => {
672
763
  if (!userEmail) {
673
764
  throw new Error('Email is required for lookup');
674
765
  }
675
-
766
+
676
767
  try {
677
768
  // Normalize email for comparison
678
769
  const normalizedEmail = String(userEmail).toLowerCase().trim();
679
770
  const hashedEmail = hashEmail(normalizedEmail);
680
-
771
+
681
772
  // STEP 1: Try O(1) lookup from denormalized collection
682
773
  const lookupRef = firestore.collection('sys_user_lookup').doc(hashedEmail);
683
774
  const lookupDoc = await lookupRef.get();
684
-
775
+
685
776
  if (lookupDoc.exists) {
686
777
  const lookupData = lookupDoc.data();
687
778
  const cid = lookupData.cid;
688
-
779
+
689
780
  // Fetch verification data to return full user info
690
781
  const verificationRef = firestore.collection('SignedInUsers').doc(String(cid)).collection('verification').doc('data');
691
782
  const verificationDoc = await verificationRef.get();
692
-
783
+
693
784
  if (verificationDoc.exists) {
694
785
  const verificationData = verificationDoc.data();
695
-
786
+
696
787
  // Verify email is still in the verification document (safety check)
697
- const emails = Array.isArray(verificationData.email)
698
- ? verificationData.email
788
+ const emails = Array.isArray(verificationData.email)
789
+ ? verificationData.email
699
790
  : (verificationData.email ? [verificationData.email] : []);
700
791
  const normalizedEmails = emails.map(e => String(e).toLowerCase().trim());
701
-
792
+
702
793
  if (normalizedEmails.includes(normalizedEmail)) {
703
794
  // Validate and enforce Firebase UID if provided
704
795
  if (firebaseUid) {
@@ -716,7 +807,7 @@ const lookupCidByEmail = async (firestore, userEmail, firebaseUid = null) => {
716
807
  console.log(`[Migration] Added Firebase UID ${firebaseUid} to CID ${cid}`);
717
808
  }
718
809
  }
719
-
810
+
720
811
  // Return result from O(1) lookup
721
812
  return {
722
813
  cid: verificationData.etoroCID || Number(cid),
@@ -733,31 +824,31 @@ const lookupCidByEmail = async (firestore, userEmail, firebaseUid = null) => {
733
824
  }
734
825
  }
735
826
  }
736
-
827
+
737
828
  // STEP 2: Fallback to O(N) search (migration path)
738
829
  // This happens when:
739
830
  // 1. Lookup document doesn't exist yet (new user or not migrated)
740
831
  // 2. Lookup document exists but email not in verification (stale data)
741
832
  console.log(`[lookupCidByEmail] Lookup document not found for ${normalizedEmail}, performing O(N) search (migration)`);
742
-
833
+
743
834
  const signedInUsersSnapshot = await firestore.collection('SignedInUsers').get();
744
-
835
+
745
836
  // Search through each CID's verification document
746
837
  for (const userDoc of signedInUsersSnapshot.docs) {
747
838
  const verificationRef = userDoc.ref.collection('verification').doc('data');
748
839
  const verificationDoc = await verificationRef.get();
749
-
840
+
750
841
  if (verificationDoc.exists) {
751
842
  const verificationData = verificationDoc.data();
752
- const emails = Array.isArray(verificationData.email)
753
- ? verificationData.email
843
+ const emails = Array.isArray(verificationData.email)
844
+ ? verificationData.email
754
845
  : (verificationData.email ? [verificationData.email] : []);
755
-
846
+
756
847
  // Case-insensitive email comparison
757
848
  const normalizedEmails = emails.map(e => String(e).toLowerCase().trim());
758
849
  if (normalizedEmails.includes(normalizedEmail)) {
759
850
  const cid = verificationData.etoroCID || Number(userDoc.id);
760
-
851
+
761
852
  // Validate and enforce Firebase UID if provided
762
853
  if (firebaseUid) {
763
854
  if (verificationData.firebaseUids && Array.isArray(verificationData.firebaseUids)) {
@@ -774,13 +865,13 @@ const lookupCidByEmail = async (firestore, userEmail, firebaseUid = null) => {
774
865
  console.log(`[Migration] Added Firebase UID ${firebaseUid} to CID ${cid}`);
775
866
  }
776
867
  }
777
-
868
+
778
869
  // STEP 3: Create lookup document for future O(1) access (migration)
779
870
  // Create lookup for all emails in the verification document to handle multiple accounts
780
871
  for (const email of normalizedEmails) {
781
872
  await createEmailLookup(firestore, email, cid);
782
873
  }
783
-
874
+
784
875
  // Return result
785
876
  return {
786
877
  cid: cid,
@@ -794,7 +885,7 @@ const lookupCidByEmail = async (firestore, userEmail, firebaseUid = null) => {
794
885
  }
795
886
  }
796
887
  }
797
-
888
+
798
889
  // No match found
799
890
  return null;
800
891
  } catch (error) {
@@ -828,7 +919,7 @@ const requestPopularInvestorAddition = async (firestore, userId, piId, piUsernam
828
919
  if (masterData[piId]) {
829
920
  throw new Error(`Popular Investor ID ${piId} is already in the master list`);
830
921
  }
831
- }
922
+ }
832
923
  const ttlDate = new Date();
833
924
  ttlDate.setDate(ttlDate.getDate() + 7);
834
925
  await userRequestRef.set({
@@ -848,7 +939,7 @@ const requestPopularInvestorAddition = async (firestore, userId, piId, piUsernam
848
939
  } catch (error) {
849
940
  console.error(`Error requesting popular investor addition: ${error}`);
850
941
  throw error;
851
- }
942
+ }
852
943
  };
853
944
 
854
945
  // 8. Verification process to check if a given userID has copied a given popular investor
@@ -1038,10 +1129,10 @@ const manageReviews = async (db, userId, action, params = {}) => {
1038
1129
  const sanitizedUserId = sanitizeCid(userId);
1039
1130
  const { piId } = params;
1040
1131
  const sanitizedPiId = sanitizeCid(piId);
1041
-
1132
+
1042
1133
  const batch = db.batch();
1043
1134
  const todayStr = new Date().toISOString().split('T')[0];
1044
-
1135
+
1045
1136
  // Paths - all user inputs sanitized
1046
1137
  const newReviewPath = db.collection('SignedInUsers').doc(sanitizedUserId).collection('reviews').doc(sanitizedPiId);
1047
1138
  const piReviewPath = db.collection('PopularInvestors').doc(sanitizedPiId).collection('reviews').doc(`${sanitizedUserId}_${sanitizedPiId}`);
@@ -1089,7 +1180,7 @@ const manageReviews = async (db, userId, action, params = {}) => {
1089
1180
  // 1. Verify User Copies PI (Internal Logic)
1090
1181
  const portfolioRef = db.collection('SignedInUsers').doc(userId).collection('portfolio').doc('latest');
1091
1182
  const portfolioSnap = await portfolioRef.get();
1092
-
1183
+
1093
1184
  const hasCopy = portfolioSnap.exists && (portfolioSnap.data().AggregatedMirrors || [])
1094
1185
  .some(m => String(m.ParentCID) === String(piId));
1095
1186
 
@@ -1098,14 +1189,14 @@ const manageReviews = async (db, userId, action, params = {}) => {
1098
1189
  // 2. Validation
1099
1190
  const rating = Number(params.rating);
1100
1191
  if (!Number.isInteger(rating) || rating < 1 || rating > 5) throw new Error("Rating must be 1-5");
1101
-
1192
+
1102
1193
  const comment = (params.comment || "").trim();
1103
1194
  if (comment.length > 1000) throw new Error("Comment too long");
1104
1195
 
1105
1196
  // 3. Check for existing data
1106
1197
  const existingNew = await newReviewPath.get();
1107
1198
  const existingLegacy = !existingNew.exists ? await legacyReviewPath.get() : null;
1108
-
1199
+
1109
1200
  const now = FieldValue.serverTimestamp();
1110
1201
  let finalCreatedAt = now;
1111
1202
  let isEdit = false;
@@ -1135,7 +1226,7 @@ const manageReviews = async (db, userId, action, params = {}) => {
1135
1226
 
1136
1227
  batch.set(newReviewPath, reviewPayload);
1137
1228
  batch.set(piReviewPath, reviewPayload);
1138
-
1229
+
1139
1230
  batch.set(globalLogPath, {
1140
1231
  [`reviews.${userId}_${piId}`]: {
1141
1232
  ...reviewPayload,
@@ -1146,15 +1237,15 @@ const manageReviews = async (db, userId, action, params = {}) => {
1146
1237
 
1147
1238
  if (!isEdit) {
1148
1239
  const piRef = db.collection('PopularInvestors').doc(piId.toString());
1149
- batch.set(piRef, {
1150
- reviewStats: { totalReviews: FieldValue.increment(1) }
1240
+ batch.set(piRef, {
1241
+ reviewStats: { totalReviews: FieldValue.increment(1) }
1151
1242
  }, { merge: true });
1152
1243
  }
1153
1244
 
1154
1245
  await batch.commit();
1155
1246
  return { success: true, action: isEdit ? 'updated' : 'created' };
1156
1247
  }
1157
-
1248
+
1158
1249
  throw new Error("Invalid Action");
1159
1250
 
1160
1251
  } catch (error) {
@@ -1287,12 +1378,12 @@ const getComputationResults = async (db, computationName, dateStr, userId = null
1287
1378
  try {
1288
1379
  const bucketName = pointerData.gcsBucket || pointerData.gcsUri.split('/')[2];
1289
1380
  const fileName = pointerData.gcsPath || pointerData.gcsUri.split('/').slice(3).join('/');
1290
-
1381
+
1291
1382
  console.log(`[Computation] Reading from GCS: ${fileName} for ${computationName}`);
1292
-
1383
+
1293
1384
  // Stream download is memory efficient for large files
1294
1385
  const [fileContent] = await storage.bucket(bucketName).file(fileName).download();
1295
-
1386
+
1296
1387
  // Assume Gzip (as writer does it), if fails try plain
1297
1388
  try {
1298
1389
  return JSON.parse(zlib.gunzipSync(fileContent).toString('utf8'));
@@ -1318,7 +1409,7 @@ const getComputationResults = async (db, computationName, dateStr, userId = null
1318
1409
  if (pointerData._sharded === true) {
1319
1410
  const shardCount = pointerData._shardCount || 0;
1320
1411
  console.log(`[Computation] Reassembling ${shardCount} shards for ${computationName}`);
1321
-
1412
+
1322
1413
  if (shardCount === 0) return {}; // Return empty object for object-based results
1323
1414
 
1324
1415
  // Create an array of promises to fetch all shards in parallel
@@ -1329,17 +1420,17 @@ const getComputationResults = async (db, computationName, dateStr, userId = null
1329
1420
  }
1330
1421
 
1331
1422
  const shardSnaps = await Promise.all(shardPromises);
1332
-
1423
+
1333
1424
  // Reassemble data by merging objects (for object-based results like GlobalAumPerAsset30D)
1334
1425
  // Shards contain partial objects that need to be merged together
1335
1426
  let reassembledData = {};
1336
-
1427
+
1337
1428
  shardSnaps.forEach((snap, index) => {
1338
1429
  if (snap.exists) {
1339
1430
  const data = snap.data();
1340
1431
  // If the shard itself is compressed (common in big data), decompress it
1341
1432
  const content = (data._compressed) ? tryDecompress(data) : data;
1342
-
1433
+
1343
1434
  // Merge shard contents, ignoring internal metadata fields
1344
1435
  if (content && typeof content === 'object') {
1345
1436
  Object.entries(content).forEach(([k, v]) => {
@@ -1375,35 +1466,91 @@ const getComputationResults = async (db, computationName, dateStr, userId = null
1375
1466
  }
1376
1467
  };
1377
1468
 
1378
- // 10.4. Find most recent available GlobalAumPerAsset30D data
1379
- // Looks back up to 7 days to find the most recent available data
1469
+ // Normalize computation name for BigQuery (matches ManifestBuilder: lowercase alphanumeric)
1470
+ const normalizeComputationName = (name) => (name || '').toLowerCase().replace(/[^a-z0-9]/g, '');
1471
+
1472
+ const COMPUTATION_RESULTS_TABLE = 'computation_results_v3';
1473
+
1474
+ /**
1475
+ * Fetch computation result from BigQuery (computation_results_v3).
1476
+ * @param {string} computationName - Config name (e.g. GlobalAumPerAsset)
1477
+ * @param {string} dateStr - Date YYYY-MM-DD
1478
+ * @param {string} [entityId] - Optional entity_id (e.g. _global for global computations)
1479
+ * @param {object} [logger] - Logger
1480
+ * @returns {Promise<object|null>} Parsed result_data or null
1481
+ */
1482
+ const getComputationResultFromBigQuery = async (computationName, dateStr, entityId = '_global', logger = null) => {
1483
+ if (process.env.BIGQUERY_ENABLED === 'false') return null;
1484
+ const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
1485
+ const normalizedName = normalizeComputationName(computationName);
1486
+ try {
1487
+ const rows = await bigqueryQuery(
1488
+ `SELECT result_data FROM \`${datasetId}.${COMPUTATION_RESULTS_TABLE}\`
1489
+ WHERE date = @date AND computation_name = @name AND entity_id = @entityId
1490
+ ORDER BY updated_at DESC LIMIT 1`,
1491
+ { params: { date: dateStr, name: normalizedName, entityId } },
1492
+ logger
1493
+ );
1494
+ if (!rows || rows.length === 0) return null;
1495
+ const raw = rows[0].result_data;
1496
+ if (typeof raw === 'string') {
1497
+ try { return JSON.parse(raw); } catch (e) { return null; }
1498
+ }
1499
+ return raw;
1500
+ } catch (error) {
1501
+ if (logger) logger.log('WARN', `[BigQuery] getComputationResultFromBigQuery failed: ${error.message}`);
1502
+ return null;
1503
+ }
1504
+ };
1505
+
1506
+ /**
1507
+ * Fetch all entity results for a computation/date from BigQuery (e.g. PIDailyAssetAUM per PI).
1508
+ * @param {string} computationName - Config name (e.g. PIDailyAssetAUM)
1509
+ * @param {string} dateStr - Date YYYY-MM-DD
1510
+ * @param {object} [logger] - Logger
1511
+ * @returns {Promise<object>} Map of entity_id -> parsed result_data
1512
+ */
1513
+ const getComputationResultsFromBigQueryForDate = async (computationName, dateStr, logger = null) => {
1514
+ if (process.env.BIGQUERY_ENABLED === 'false') return {};
1515
+ const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
1516
+ const normalizedName = normalizeComputationName(computationName);
1517
+ try {
1518
+ const rows = await bigqueryQuery(
1519
+ `SELECT entity_id, result_data FROM \`${datasetId}.${COMPUTATION_RESULTS_TABLE}\`
1520
+ WHERE date = @date AND computation_name = @name`,
1521
+ { params: { date: dateStr, name: normalizedName } },
1522
+ logger
1523
+ );
1524
+ const out = {};
1525
+ if (rows) {
1526
+ for (const row of rows) {
1527
+ let data = row.result_data;
1528
+ if (typeof data === 'string') {
1529
+ try { data = JSON.parse(data); } catch (e) { continue; }
1530
+ }
1531
+ if (row.entity_id != null) out[String(row.entity_id)] = data || {};
1532
+ }
1533
+ }
1534
+ return out;
1535
+ } catch (error) {
1536
+ if (logger) logger.log('WARN', `[BigQuery] getComputationResultsFromBigQueryForDate failed: ${error.message}`);
1537
+ return {};
1538
+ }
1539
+ };
1540
+
1541
+ // 10.4. Find most recent available GlobalAumPerAsset data (from BigQuery)
1380
1542
  const findMostRecentGlobalAumData = async (db, startDateStr, maxLookbackDays = 7) => {
1381
1543
  try {
1382
1544
  const startDate = new Date(startDateStr);
1383
-
1384
- // Look back through dates to find the most recent available data
1385
1545
  for (let i = 0; i < maxLookbackDays; i++) {
1386
1546
  const checkDate = new Date(startDate);
1387
1547
  checkDate.setDate(startDate.getDate() - i);
1388
1548
  const dateKey = checkDate.toISOString().split('T')[0];
1389
-
1390
- try {
1391
- const computationData = await getComputationResults(db, 'GlobalAumPerAsset30D', dateKey);
1392
-
1393
- // If we got valid data (not null/empty), return it
1394
- if (computationData && typeof computationData === 'object' && Object.keys(computationData).length > 0) {
1395
- return {
1396
- date: dateKey,
1397
- data: computationData
1398
- };
1399
- }
1400
- } catch (error) {
1401
- // Missing data for this date - continue looking back
1402
- console.log(`[GlobalAum] No data found for ${dateKey}, checking previous date...`);
1549
+ const computationData = await getComputationResultFromBigQuery('GlobalAumPerAsset', dateKey, '_global');
1550
+ if (computationData && typeof computationData === 'object' && Object.keys(computationData).length > 0) {
1551
+ return { date: dateKey, data: computationData };
1403
1552
  }
1404
1553
  }
1405
-
1406
- // No data found in the lookback period
1407
1554
  return null;
1408
1555
  } catch (error) {
1409
1556
  console.error(`[GlobalAum] Error finding most recent data:`, error);
@@ -1411,55 +1558,28 @@ const findMostRecentGlobalAumData = async (db, startDateStr, maxLookbackDays = 7
1411
1558
  }
1412
1559
  };
1413
1560
 
1414
- // 10.5. Fetch GlobalAumPerAsset30D with 7-day lookback
1415
- // Returns the last 7 days of successfully stored data for a specific ticker or all assets
1561
+ // 10.5. Fetch GlobalAumPerAsset with lookback (from BigQuery)
1416
1562
  const fetchGlobalAumPerAssetWithLookback = async (db, dateStr, ticker = null, lookbackDays = 7) => {
1417
1563
  try {
1418
1564
  const endDate = new Date(dateStr);
1419
1565
  const results = [];
1420
-
1421
- // Fetch data for the last 7 days, handling missing data gracefully
1422
1566
  for (let i = 0; i < lookbackDays; i++) {
1423
1567
  const checkDate = new Date(endDate);
1424
1568
  checkDate.setDate(endDate.getDate() - i);
1425
1569
  const dateKey = checkDate.toISOString().split('T')[0];
1426
-
1427
- try {
1428
- const computationData = await getComputationResults(db, 'GlobalAumPerAsset30D', dateKey);
1429
-
1430
- // If we got valid data (not null/empty), add it to results
1431
- if (computationData && typeof computationData === 'object' && Object.keys(computationData).length > 0) {
1432
- if (ticker) {
1433
- // If searching for a specific ticker, only include if it exists
1434
- const tickerUpper = ticker.toUpperCase();
1435
- if (computationData[tickerUpper] !== undefined) {
1436
- results.push({
1437
- date: dateKey,
1438
- aum: computationData[tickerUpper],
1439
- ticker: tickerUpper
1440
- });
1441
- }
1442
- } else {
1443
- // Return all assets for this date
1444
- results.push({
1445
- date: dateKey,
1446
- data: computationData
1447
- });
1570
+ const computationData = await getComputationResultFromBigQuery('GlobalAumPerAsset', dateKey, '_global');
1571
+ if (computationData && typeof computationData === 'object' && Object.keys(computationData).length > 0) {
1572
+ if (ticker) {
1573
+ const tickerUpper = ticker.toUpperCase();
1574
+ if (computationData[tickerUpper] !== undefined) {
1575
+ results.push({ date: dateKey, aum: computationData[tickerUpper], ticker: tickerUpper });
1448
1576
  }
1577
+ } else {
1578
+ results.push({ date: dateKey, data: computationData });
1449
1579
  }
1450
- } catch (error) {
1451
- // Missing data for this date - skip it (graceful handling)
1452
- console.log(`[GlobalAum] No data found for ${dateKey}, skipping`);
1453
1580
  }
1454
1581
  }
1455
-
1456
- // Sort by date descending (newest first)
1457
- results.sort((a, b) => {
1458
- const dateA = new Date(a.date);
1459
- const dateB = new Date(b.date);
1460
- return dateB.getTime() - dateA.getTime();
1461
- });
1462
-
1582
+ results.sort((a, b) => new Date(b.date).getTime() - new Date(a.date).getTime());
1463
1583
  return results;
1464
1584
  } catch (error) {
1465
1585
  console.error(`[GlobalAum] Error fetching with lookback:`, error);
@@ -1473,12 +1593,12 @@ const fetchNotifications = async (firestore, userId, options = {}) => {
1473
1593
  try {
1474
1594
  let query = firestore.collection('SignedInUsers').doc(userId).collection('notifications');
1475
1595
  if (unreadOnly) query = query.where('read', '==', false);
1476
-
1596
+
1477
1597
  const snapshot = await query.orderBy('createdAt', 'desc').limit(limit * 2).get(); // Fetch more to filter
1478
-
1598
+
1479
1599
  // Filter out excluded types (like watchlistAlerts which go to alerts bell)
1480
1600
  let notifications = snapshot.docs.map(doc => ({ id: doc.id, ...doc.data() }));
1481
-
1601
+
1482
1602
  if (excludeTypes && excludeTypes.length > 0) {
1483
1603
  notifications = notifications.filter(notif => {
1484
1604
  // Exclude by type field or metadata.notificationType
@@ -1486,7 +1606,7 @@ const fetchNotifications = async (firestore, userId, options = {}) => {
1486
1606
  return !excludeTypes.includes(notifType);
1487
1607
  });
1488
1608
  }
1489
-
1609
+
1490
1610
  // Limit after filtering
1491
1611
  return notifications.slice(0, limit);
1492
1612
  } catch (error) {
@@ -1509,16 +1629,16 @@ const markNotificationRead = async (firestore, userId, notificationIds, markAll
1509
1629
  const MAX_NOTIFICATIONS_TO_PROCESS = 1000; // Limit to prevent timeouts
1510
1630
  const snapshot = await collectionRef.where('read', '==', false).limit(MAX_NOTIFICATIONS_TO_PROCESS).get();
1511
1631
  console.log(`[markNotificationRead] Marking ${snapshot.size} notifications as read for user ${userId} (limited to ${MAX_NOTIFICATIONS_TO_PROCESS})`);
1512
-
1632
+
1513
1633
  if (snapshot.size === 0) {
1514
1634
  console.log(`[markNotificationRead] No unread notifications found for user ${userId}`);
1515
1635
  return;
1516
1636
  }
1517
-
1637
+
1518
1638
  snapshot.docs.forEach(doc => {
1519
1639
  currentBatch.update(doc.ref, { read: true, readAt: new Date() });
1520
1640
  batchCount++;
1521
-
1641
+
1522
1642
  // If batch is full, commit it and start a new one
1523
1643
  if (batchCount >= MAX_BATCH_SIZE) {
1524
1644
  batches.push(currentBatch);
@@ -1526,7 +1646,7 @@ const markNotificationRead = async (firestore, userId, notificationIds, markAll
1526
1646
  batchCount = 0;
1527
1647
  }
1528
1648
  });
1529
-
1649
+
1530
1650
  // Add the last batch if it has operations
1531
1651
  if (batchCount > 0) {
1532
1652
  batches.push(currentBatch);
@@ -1537,7 +1657,7 @@ const markNotificationRead = async (firestore, userId, notificationIds, markAll
1537
1657
  const ref = collectionRef.doc(id);
1538
1658
  currentBatch.update(ref, { read: true, readAt: new Date() });
1539
1659
  batchCount++;
1540
-
1660
+
1541
1661
  // If batch is full, commit it and start a new one
1542
1662
  if (batchCount >= MAX_BATCH_SIZE) {
1543
1663
  batches.push(currentBatch);
@@ -1545,7 +1665,7 @@ const markNotificationRead = async (firestore, userId, notificationIds, markAll
1545
1665
  batchCount = 0;
1546
1666
  }
1547
1667
  });
1548
-
1668
+
1549
1669
  // Add the last batch if it has operations
1550
1670
  if (batchCount > 0) {
1551
1671
  batches.push(currentBatch);
@@ -1575,16 +1695,16 @@ const trackPopularInvestorView = async (db, piId, viewerId = null, viewerType =
1575
1695
  // References
1576
1696
  // A. Global Day Aggregation
1577
1697
  const globalDayRef = db.collection('PIPageViewsData').doc(todayStr);
1578
-
1698
+
1579
1699
  // B. Per PI Daily Stats
1580
1700
  const piDailyRef = db.collection('PopularInvestors').doc(piId).collection('profileViews').doc(todayStr);
1581
-
1701
+
1582
1702
  // 1. Determine Uniqueness (Read PI Daily)
1583
1703
  // We read the PI Daily stats to check the 'uniqueViewers' array
1584
1704
  const piDailySnap = await piDailyRef.get();
1585
1705
  let isUnique = true;
1586
1706
  let currentUniqueViewers = [];
1587
-
1707
+
1588
1708
  if (piDailySnap.exists) {
1589
1709
  const data = piDailySnap.data();
1590
1710
  currentUniqueViewers = data.uniqueViewers || [];
@@ -1600,19 +1720,19 @@ const trackPopularInvestorView = async (db, piId, viewerId = null, viewerType =
1600
1720
  piCid: Number(piId),
1601
1721
  totalViews: FieldValue.increment(1)
1602
1722
  };
1603
-
1723
+
1604
1724
  // Add to uniqueViewers array if unique
1605
1725
  if (viewerId && isUnique) {
1606
1726
  piDailyUpdate.uniqueViewers = FieldValue.arrayUnion(String(viewerId));
1607
1727
  }
1608
-
1728
+
1609
1729
  batch.set(piDailyRef, piDailyUpdate, { merge: true });
1610
1730
 
1611
1731
  // 3. PI Individual View Update (PopularInvestors/{piId}/views/{viewId})
1612
1732
  if (viewerId) {
1613
1733
  const viewId = `${piId}_${viewerId}_${timestamp}`;
1614
1734
  const individualRef = db.collection('PopularInvestors').doc(piId).collection('views').doc(viewId);
1615
-
1735
+
1616
1736
  batch.set(individualRef, {
1617
1737
  date: todayStr,
1618
1738
  piCid: Number(piId),
@@ -1635,39 +1755,49 @@ const trackPopularInvestorView = async (db, piId, viewerId = null, viewerType =
1635
1755
  globalUpdate[`${piId}.viewsByUser.${viewerId}.lastViewed`] = FieldValue.serverTimestamp();
1636
1756
  globalUpdate[`${piId}.viewsByUser.${viewerId}.viewCount`] = FieldValue.increment(1);
1637
1757
  }
1638
-
1758
+
1639
1759
  // Use set with merge to create the global document if it doesn't exist for the day
1640
1760
  batch.set(globalDayRef, globalUpdate, { merge: true });
1641
1761
 
1642
1762
  await batch.commit();
1643
-
1763
+
1644
1764
  // 5. Write to BigQuery (after Firestore commit to get final values)
1645
1765
  if (process.env.BIGQUERY_ENABLED !== 'false') {
1646
1766
  try {
1647
1767
  const { ensurePIPageViewsTable, insertRowsWithMerge } = require('../../../core/utils/bigquery_utils');
1648
1768
  await ensurePIPageViewsTable();
1649
-
1769
+
1650
1770
  // Read the updated document to get final values (after increments)
1651
1771
  const updatedDoc = await globalDayRef.get();
1652
1772
  if (updatedDoc.exists) {
1653
1773
  const data = updatedDoc.data();
1654
1774
  const piData = data[piId];
1655
-
1775
+
1656
1776
  if (piData) {
1657
- // Transform to BigQuery format
1658
- const lastUpdated = piData.lastUpdated
1659
- ? (piData.lastUpdated.toDate ? piData.lastUpdated.toDate().toISOString() : piData.lastUpdated)
1777
+ // Transform to BigQuery format (views_by_user: JSON-serializable; lastViewed as ISO string)
1778
+ const lastUpdated = piData.lastUpdated
1779
+ ? (piData.lastUpdated.toDate ? piData.lastUpdated.toDate().toISOString() : String(piData.lastUpdated))
1660
1780
  : new Date().toISOString();
1661
-
1781
+
1782
+ const viewsByUser = piData.viewsByUser || {};
1783
+ const viewsByUserForBq = {};
1784
+ for (const [uid, rec] of Object.entries(viewsByUser)) {
1785
+ if (!rec || typeof rec !== 'object') continue;
1786
+ const lastViewed = rec.lastViewed
1787
+ ? (rec.lastViewed.toDate ? rec.lastViewed.toDate().toISOString() : String(rec.lastViewed))
1788
+ : null;
1789
+ viewsByUserForBq[uid] = { lastViewed, viewCount: rec.viewCount ?? 0 };
1790
+ }
1791
+
1662
1792
  const bigqueryRow = {
1663
1793
  date: todayStr,
1664
1794
  pi_id: parseInt(piId, 10),
1665
1795
  total_views: piData.totalViews || 0,
1666
- unique_viewers: piData.uniqueViewers || 0,
1667
- views_by_user: piData.viewsByUser || {},
1796
+ unique_viewers: piData.uniqueViewers ?? 0,
1797
+ views_by_user: viewsByUserForBq,
1668
1798
  last_updated: lastUpdated
1669
1799
  };
1670
-
1800
+
1671
1801
  // Use MERGE to update existing row or insert new
1672
1802
  const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
1673
1803
  await insertRowsWithMerge(
@@ -1684,7 +1814,7 @@ const trackPopularInvestorView = async (db, piId, viewerId = null, viewerType =
1684
1814
  // Don't throw - Firestore write succeeded, BigQuery is secondary
1685
1815
  }
1686
1816
  }
1687
-
1817
+
1688
1818
  return { success: true };
1689
1819
  };
1690
1820
  // ==========================================
@@ -1706,12 +1836,12 @@ const fetchUserAlerts = async (db, userId, options = {}) => {
1706
1836
  const data = doc.data();
1707
1837
  // Convert Firestore timestamps to ISO strings
1708
1838
  if (data.createdAt) {
1709
- data.createdAt = data.createdAt.toDate ? data.createdAt.toDate().toISOString() :
1710
- (data.createdAt.toISOString ? data.createdAt.toISOString() : data.createdAt);
1839
+ data.createdAt = data.createdAt.toDate ? data.createdAt.toDate().toISOString() :
1840
+ (data.createdAt.toISOString ? data.createdAt.toISOString() : data.createdAt);
1711
1841
  }
1712
1842
  if (data.readAt) {
1713
- data.readAt = data.readAt.toDate ? data.readAt.toDate().toISOString() :
1714
- (data.readAt.toISOString ? data.readAt.toISOString() : data.readAt);
1843
+ data.readAt = data.readAt.toDate ? data.readAt.toDate().toISOString() :
1844
+ (data.readAt.toISOString ? data.readAt.toISOString() : data.readAt);
1715
1845
  }
1716
1846
  return { id: doc.id, ...data };
1717
1847
  });
@@ -1721,7 +1851,7 @@ const subscribeToWatchlistAlerts = async (db, userId, watchlistId, piId, alertCo
1721
1851
  // Path: watchlist_subscriptions/{userId}/alerts/{piId}
1722
1852
  // This allows quick lookup of "Does User X subscribe to PI Y?"
1723
1853
  const subRef = db.collection('watchlist_subscriptions').doc(userId).collection('alerts').doc(piId);
1724
-
1854
+
1725
1855
  await subRef.set({
1726
1856
  userCid: Number(userId),
1727
1857
  piCid: Number(piId),
@@ -1729,7 +1859,7 @@ const subscribeToWatchlistAlerts = async (db, userId, watchlistId, piId, alertCo
1729
1859
  alertTypes: alertConfig || { all: true }, // Default config
1730
1860
  subscribedAt: FieldValue.serverTimestamp()
1731
1861
  }, { merge: true });
1732
-
1862
+
1733
1863
  return { success: true };
1734
1864
  };
1735
1865
 
@@ -1761,14 +1891,14 @@ const fetchPublicWatchlists = async (db, limit = 50, offset = 0) => {
1761
1891
  */
1762
1892
  const publishWatchlistVersion = async (db, userId, watchlistId) => {
1763
1893
  const batch = db.batch();
1764
-
1894
+
1765
1895
  // 1. Fetch Source Watchlist
1766
1896
  const userRef = db.collection('SignedInUsers').doc(userId).collection('watchlists').doc(watchlistId);
1767
1897
  const userSnap = await userRef.get();
1768
-
1898
+
1769
1899
  if (!userSnap.exists) throw new Error("Watchlist not found");
1770
1900
  const data = userSnap.data();
1771
-
1901
+
1772
1902
  if (data.createdBy !== userId) throw new Error("Ownership required to publish");
1773
1903
 
1774
1904
  // 2. Get/Inc Version Number from Public Meta Doc
@@ -1779,18 +1909,18 @@ const publishWatchlistVersion = async (db, userId, watchlistId) => {
1779
1909
  // 3. Create Immutable Snapshot
1780
1910
  const versionId = `${watchlistId}_v${nextVersion}`;
1781
1911
  const versionRef = db.collection('public_watchlists').doc(watchlistId).collection('versions').doc(String(nextVersion));
1782
-
1912
+
1783
1913
  const snapshotData = {
1784
1914
  ...data,
1785
1915
  watchlistId,
1786
1916
  version: nextVersion,
1787
1917
  isImmutable: true, // Lock this version
1788
1918
  snapshotAt: new Date(),
1789
- copyCount: 0
1919
+ copyCount: 0
1790
1920
  };
1791
-
1921
+
1792
1922
  // Cleanup internal fields before publishing
1793
- delete snapshotData.isAutoGenerated;
1923
+ delete snapshotData.isAutoGenerated;
1794
1924
 
1795
1925
  batch.set(versionRef, snapshotData);
1796
1926
 
@@ -1831,7 +1961,7 @@ const copyWatchlist = async (db, userId, sourceWatchlistId, version = null) => {
1831
1961
  .collection('versions').doc(String(latest)).get();
1832
1962
  sourceData = vSnap.data();
1833
1963
  } else {
1834
- throw new Error("Public watchlist not found");
1964
+ throw new Error("Public watchlist not found");
1835
1965
  }
1836
1966
  }
1837
1967
 
@@ -1859,17 +1989,17 @@ const copyWatchlist = async (db, userId, sourceWatchlistId, version = null) => {
1859
1989
  // 3. Execute Copy & Increment Global Counter
1860
1990
  const batch = db.batch();
1861
1991
  batch.set(newRef, newPayload);
1862
-
1992
+
1863
1993
  // Increment copy count on the public reference
1864
1994
  const publicRef = db.collection('public_watchlists').doc(sourceWatchlistId);
1865
1995
  batch.update(publicRef, { copyCount: FieldValue.increment(1) });
1866
1996
 
1867
1997
  await batch.commit();
1868
-
1998
+
1869
1999
  // Note: We should technically call 'manageUserWatchlist' here to sync the PIs to the global
1870
2000
  // root data, but for brevity, assuming the UI will trigger a 'save' or we implement the
1871
2001
  // root data sync logic here similar to manageUserWatchlist.
1872
-
2002
+
1873
2003
  return { success: true, id: newWatchlistId };
1874
2004
  };
1875
2005
 
@@ -1896,31 +2026,31 @@ const fetchAllReviewsForPI = async (db, piId, limit = 100) => {
1896
2026
  const data = doc.data();
1897
2027
  // Convert Firestore timestamps to ISO strings
1898
2028
  if (data.createdAt) {
1899
- data.createdAt = data.createdAt.toDate ? data.createdAt.toDate().toISOString() :
1900
- (data.createdAt.toISOString ? data.createdAt.toISOString() :
1901
- (data.createdAt._seconds ? new Date(data.createdAt._seconds * 1000).toISOString() : data.createdAt));
2029
+ data.createdAt = data.createdAt.toDate ? data.createdAt.toDate().toISOString() :
2030
+ (data.createdAt.toISOString ? data.createdAt.toISOString() :
2031
+ (data.createdAt._seconds ? new Date(data.createdAt._seconds * 1000).toISOString() : data.createdAt));
1902
2032
  }
1903
2033
  if (data.updatedAt) {
1904
- data.updatedAt = data.updatedAt.toDate ? data.updatedAt.toDate().toISOString() :
1905
- (data.updatedAt.toISOString ? data.updatedAt.toISOString() :
1906
- (data.updatedAt._seconds ? new Date(data.updatedAt._seconds * 1000).toISOString() : data.updatedAt));
2034
+ data.updatedAt = data.updatedAt.toDate ? data.updatedAt.toDate().toISOString() :
2035
+ (data.updatedAt.toISOString ? data.updatedAt.toISOString() :
2036
+ (data.updatedAt._seconds ? new Date(data.updatedAt._seconds * 1000).toISOString() : data.updatedAt));
1907
2037
  }
1908
2038
  return data;
1909
2039
  });
1910
-
2040
+
1911
2041
  // 2. Calculate Stats
1912
2042
  let total = 0;
1913
2043
  const counts = { 1: 0, 2: 0, 3: 0, 4: 0, 5: 0 };
1914
-
2044
+
1915
2045
  reviews.forEach(r => {
1916
2046
  const rating = r.rating || 0;
1917
2047
  total += rating;
1918
- if(counts[rating] !== undefined) counts[rating]++;
2048
+ if (counts[rating] !== undefined) counts[rating]++;
1919
2049
  });
1920
-
2050
+
1921
2051
  const count = reviews.length;
1922
2052
  const avg = count > 0 ? Number((total / count).toFixed(2)) : 0;
1923
-
2053
+
1924
2054
  // Calculate percentages
1925
2055
  const percentages = {
1926
2056
  1: count > 0 ? Number((counts[1] / count * 100).toFixed(1)) : 0,
@@ -1930,14 +2060,14 @@ const fetchAllReviewsForPI = async (db, piId, limit = 100) => {
1930
2060
  5: count > 0 ? Number((counts[5] / count * 100).toFixed(1)) : 0
1931
2061
  };
1932
2062
 
1933
- return {
1934
- count,
1935
- averageRating: avg,
2063
+ return {
2064
+ count,
2065
+ averageRating: avg,
1936
2066
  ratingDistribution: {
1937
2067
  counts,
1938
2068
  percentages
1939
2069
  },
1940
- reviews
2070
+ reviews
1941
2071
  };
1942
2072
  };
1943
2073
 
@@ -1949,15 +2079,15 @@ const checkReviewEligibility = async (db, userId, piId) => {
1949
2079
  try {
1950
2080
  await fetchPopularInvestorMasterList(db, userId);
1951
2081
  // If this succeeds, user is a PI
1952
- const targetIsPI = await fetchPopularInvestorMasterList(db, piId).then(()=>true).catch(()=>false);
1953
- if(targetIsPI) return { eligible: false, reason: "PI cannot review PI" };
1954
- } catch(e) { /* User is not PI, continue */ }
2082
+ const targetIsPI = await fetchPopularInvestorMasterList(db, piId).then(() => true).catch(() => false);
2083
+ if (targetIsPI) return { eligible: false, reason: "PI cannot review PI" };
2084
+ } catch (e) { /* User is not PI, continue */ }
1955
2085
 
1956
2086
  // 3. Copier Check
1957
2087
  const hasCopied = await hasUserCopiedPopularInvestor(db, userId, piId);
1958
- return {
1959
- eligible: hasCopied,
1960
- reason: hasCopied ? "Eligible" : "Must copy PI to review"
2088
+ return {
2089
+ eligible: hasCopied,
2090
+ reason: hasCopied ? "Eligible" : "Must copy PI to review"
1961
2091
  };
1962
2092
  };
1963
2093
 
@@ -1987,16 +2117,16 @@ const initiateVerification = async (db, username) => {
1987
2117
  expiresAt: new Date(Date.now() + 3600000) // 1 hour expiry
1988
2118
  });
1989
2119
 
1990
- return {
1991
- success: true,
2120
+ return {
2121
+ success: true,
1992
2122
  message: "Please place this code in your eToro Bio or Short Bio.",
1993
- otp
2123
+ otp
1994
2124
  };
1995
2125
  };
1996
2126
 
1997
2127
  const finalizeVerification = async (db, pubsub, userId, username) => {
1998
2128
  if (!username) throw new Error("Missing username.");
1999
-
2129
+
2000
2130
  const verificationsCollection = 'verifications';
2001
2131
  const signedInUsersCollection = 'SignedInUsers';
2002
2132
 
@@ -2027,7 +2157,7 @@ const finalizeVerification = async (db, pubsub, userId, username) => {
2027
2157
  try {
2028
2158
  // Using native fetch (Node 18+)
2029
2159
  const response = await fetch(targetUrl, { method: 'GET', headers: requestHeaders });
2030
-
2160
+
2031
2161
  if (response.ok) {
2032
2162
  profileData = await response.json();
2033
2163
  } else {
@@ -2045,7 +2175,7 @@ const finalizeVerification = async (db, pubsub, userId, username) => {
2045
2175
  // C. Validate OTP in Bio
2046
2176
  const bio = profileData.userBio?.aboutMe || profileData.aboutMe || "";
2047
2177
  const bioShort = profileData.userBio?.aboutMeShort || profileData.aboutMeShort || "";
2048
-
2178
+
2049
2179
  if (!bio.includes(otp) && !bioShort.includes(otp)) {
2050
2180
  throw new Error("OTP not found in Bio. Ensure the code is saved in your 'About Me' section.");
2051
2181
  }
@@ -2055,8 +2185,8 @@ const finalizeVerification = async (db, pubsub, userId, username) => {
2055
2185
  const isOptOut = profileData.optOut === true;
2056
2186
 
2057
2187
  // 1. Mark Request Verified
2058
- await docRef.update({
2059
- status: 'VERIFIED',
2188
+ await docRef.update({
2189
+ status: 'VERIFIED',
2060
2190
  verifiedAt: FieldValue.serverTimestamp(),
2061
2191
  cid: realCID,
2062
2192
  isOptOut
@@ -2065,7 +2195,7 @@ const finalizeVerification = async (db, pubsub, userId, username) => {
2065
2195
  // 2. Create/Update verification data in new format location: /SignedInUsers/{cid}/verification/data
2066
2196
  const verificationDataRef = db.collection('SignedInUsers').doc(String(realCID)).collection('verification').doc('data');
2067
2197
  const existingVerificationDoc = await verificationDataRef.get();
2068
-
2198
+
2069
2199
  let emails = [];
2070
2200
  if (existingVerificationDoc.exists()) {
2071
2201
  const existingData = existingVerificationDoc.data();
@@ -2073,7 +2203,7 @@ const finalizeVerification = async (db, pubsub, userId, username) => {
2073
2203
  emails = Array.isArray(existingData.email) ? existingData.email : (existingData.email ? [existingData.email] : []);
2074
2204
  }
2075
2205
  // Note: Email will be added by frontend completeAccountSetup when Firebase Auth email is available
2076
-
2206
+
2077
2207
  await verificationDataRef.set({
2078
2208
  etoroUsername: profileData.username,
2079
2209
  etoroCID: realCID,
@@ -2084,7 +2214,7 @@ const finalizeVerification = async (db, pubsub, userId, username) => {
2084
2214
  accountSetupComplete: false, // Will be set to true by frontend completeAccountSetup
2085
2215
  createdAt: FieldValue.serverTimestamp(),
2086
2216
  }, { merge: true });
2087
-
2217
+
2088
2218
  // 3. Create lookup documents for all emails (O(1) lookup optimization)
2089
2219
  // This ensures future lookups are fast even if emails are added later
2090
2220
  for (const email of emails) {
@@ -2098,7 +2228,7 @@ const finalizeVerification = async (db, pubsub, userId, username) => {
2098
2228
  try {
2099
2229
  // Replicating the 'unifiedTask' payload from generic-api
2100
2230
  const requestId = `signup-${realCID}-${Date.now()}`;
2101
-
2231
+
2102
2232
  // Create request tracking doc
2103
2233
  await db.collection('user_sync_requests').doc(String(realCID))
2104
2234
  .collection('requests').doc(requestId).set({
@@ -2127,15 +2257,15 @@ const finalizeVerification = async (db, pubsub, userId, username) => {
2127
2257
  requestingUserCid: realCID
2128
2258
  }
2129
2259
  });
2130
-
2260
+
2131
2261
  console.log(`[Verification] Triggered unified data fetch for ${username} (${realCID})`);
2132
2262
  } catch (e) {
2133
2263
  console.error(`[Verification] Failed to trigger sync: ${e.message}`);
2134
2264
  }
2135
2265
  }
2136
2266
 
2137
- return {
2138
- success: true,
2267
+ return {
2268
+ success: true,
2139
2269
  message: "Account verified successfully. Data ingestion started.",
2140
2270
  cid: realCID
2141
2271
  };
@@ -2232,62 +2362,62 @@ const fetchPopularInvestorCategories = async (db) => {
2232
2362
  // ==========================================
2233
2363
 
2234
2364
  const ALERT_TYPES = {
2235
- increasedRisk: {
2236
- id: 'increasedRisk',
2237
- name: 'Increased Risk',
2238
- description: 'Alert when a Popular Investor\'s risk score increases',
2239
- computationName: 'RiskScoreIncrease',
2240
- severity: 'high',
2241
- enabled: true
2242
- },
2243
- volatilityChanges: {
2244
- id: 'volatilityChanges',
2245
- name: 'Significant Volatility',
2246
- description: 'Alert when a Popular Investor\'s portfolio volatility exceeds 50%',
2247
- computationName: 'SignificantVolatility',
2248
- severity: 'medium',
2249
- enabled: true
2250
- },
2251
- newSector: {
2252
- id: 'newSector',
2253
- name: 'New Sector Entry',
2254
- description: 'Alert when a Popular Investor enters a new sector',
2255
- computationName: 'NewSectorExposure',
2256
- severity: 'low',
2257
- enabled: true
2258
- },
2259
- increasedPositionSize: {
2260
- id: 'increasedPositionSize',
2261
- name: 'Increased Position Size',
2262
- description: 'Alert when a Popular Investor significantly increases a position size (>5%)',
2263
- computationName: 'PositionInvestedIncrease',
2264
- severity: 'medium',
2265
- enabled: true
2266
- },
2267
- newSocialPost: {
2268
- id: 'newSocialPost',
2269
- name: 'New Social Post',
2270
- description: 'Alert when a Popular Investor makes a new social post',
2271
- computationName: 'NewSocialPost',
2272
- severity: 'low',
2273
- enabled: true
2274
- },
2275
- behavioralAnomaly: {
2276
- id: 'behavioralAnomaly',
2277
- name: 'Behavioral Anomaly',
2278
- description: 'Alert when a Popular Investor deviates significantly from their baseline behavior',
2279
- computationName: 'BehavioralAnomaly',
2280
- severity: 'high',
2281
- enabled: true
2282
- },
2283
- testSystemProbe: {
2284
- id: 'testSystemProbe',
2285
- name: 'Test System Probe',
2286
- description: 'Always-on debug alert',
2287
- computationName: 'TestSystemProbe',
2288
- severity: 'info',
2289
- enabled: true
2290
- }
2365
+ increasedRisk: {
2366
+ id: 'increasedRisk',
2367
+ name: 'Increased Risk',
2368
+ description: 'Alert when a Popular Investor\'s risk score increases',
2369
+ computationName: 'RiskScoreIncrease',
2370
+ severity: 'high',
2371
+ enabled: true
2372
+ },
2373
+ volatilityChanges: {
2374
+ id: 'volatilityChanges',
2375
+ name: 'Significant Volatility',
2376
+ description: 'Alert when a Popular Investor\'s portfolio volatility exceeds 50%',
2377
+ computationName: 'SignificantVolatility',
2378
+ severity: 'medium',
2379
+ enabled: true
2380
+ },
2381
+ newSector: {
2382
+ id: 'newSector',
2383
+ name: 'New Sector Entry',
2384
+ description: 'Alert when a Popular Investor enters a new sector',
2385
+ computationName: 'NewSectorExposure',
2386
+ severity: 'low',
2387
+ enabled: true
2388
+ },
2389
+ increasedPositionSize: {
2390
+ id: 'increasedPositionSize',
2391
+ name: 'Increased Position Size',
2392
+ description: 'Alert when a Popular Investor significantly increases a position size (>5%)',
2393
+ computationName: 'PositionInvestedIncrease',
2394
+ severity: 'medium',
2395
+ enabled: true
2396
+ },
2397
+ newSocialPost: {
2398
+ id: 'newSocialPost',
2399
+ name: 'New Social Post',
2400
+ description: 'Alert when a Popular Investor makes a new social post',
2401
+ computationName: 'NewSocialPost',
2402
+ severity: 'low',
2403
+ enabled: true
2404
+ },
2405
+ behavioralAnomaly: {
2406
+ id: 'behavioralAnomaly',
2407
+ name: 'Behavioral Anomaly',
2408
+ description: 'Alert when a Popular Investor deviates significantly from their baseline behavior',
2409
+ computationName: 'BehavioralAnomaly',
2410
+ severity: 'high',
2411
+ enabled: true
2412
+ },
2413
+ testSystemProbe: {
2414
+ id: 'testSystemProbe',
2415
+ name: 'Test System Probe',
2416
+ description: 'Always-on debug alert',
2417
+ computationName: 'TestSystemProbe',
2418
+ severity: 'info',
2419
+ enabled: true
2420
+ }
2291
2421
  };
2292
2422
 
2293
2423
  const manageNotificationPreferences = async (db, userId, action, payload = {}) => {
@@ -2297,7 +2427,7 @@ const manageNotificationPreferences = async (db, userId, action, payload = {}) =
2297
2427
  if (action === 'get') {
2298
2428
  const snap = await docRef.get();
2299
2429
  let prefs = {};
2300
-
2430
+
2301
2431
  if (snap.exists) {
2302
2432
  prefs = snap.data();
2303
2433
  } else {
@@ -2317,7 +2447,7 @@ const manageNotificationPreferences = async (db, userId, action, payload = {}) =
2317
2447
  ...prefs
2318
2448
  };
2319
2449
  }
2320
-
2450
+
2321
2451
  if (action === 'update') {
2322
2452
  await docRef.set(payload, { merge: true });
2323
2453
  return { success: true };
@@ -2335,11 +2465,11 @@ const fetchAlertTypes = async () => {
2335
2465
  const searchPopularInvestors = async (db, queryStr) => {
2336
2466
  // 1. Get Master List (Cached/Fast)
2337
2467
  const allPis = await fetchPopularInvestorMasterList(db);
2338
-
2468
+
2339
2469
  // 2. Filter In-Memory (Server-side)
2340
2470
  const term = queryStr.toLowerCase();
2341
2471
  const results = [];
2342
-
2472
+
2343
2473
  Object.values(allPis).forEach(pi => {
2344
2474
  if (pi.username && pi.username.toLowerCase().includes(term)) {
2345
2475
  results.push({
@@ -2351,10 +2481,10 @@ const searchPopularInvestors = async (db, queryStr) => {
2351
2481
  });
2352
2482
  }
2353
2483
  });
2354
-
2484
+
2355
2485
  // 3. Sort Results (Shortest match first usually implies closer match)
2356
2486
  results.sort((a, b) => a.username.length - b.username.length);
2357
-
2487
+
2358
2488
  return results.slice(0, 20);
2359
2489
  };
2360
2490
 
@@ -2373,45 +2503,45 @@ const checkSyncRateLimits = async (db, targetId, requesterId, isDev, maxRequests
2373
2503
  const DEV_PER_TARGET_LIMIT_MS = 60 * 60 * 1000; // 1 hour
2374
2504
  const DEV_PER_TARGET_MAX_REQUESTS = 10; // Requests per target per hour
2375
2505
  const now = Date.now();
2376
-
2506
+
2377
2507
  // Check global developer limit (total requests across all targets)
2378
2508
  const globalDevRef = db.collection('developer_rate_limits').doc(String(requesterId)).collection('global').doc('latest');
2379
2509
  const globalDevSnap = await globalDevRef.get();
2380
-
2510
+
2381
2511
  let globalRequestCount = 0;
2382
2512
  let globalWindowStart = now;
2383
-
2513
+
2384
2514
  if (globalDevSnap.exists) {
2385
2515
  const globalData = globalDevSnap.data();
2386
2516
  globalRequestCount = globalData.requestCount || 0;
2387
2517
  globalWindowStart = globalData.windowStart?.toMillis() || now;
2388
-
2518
+
2389
2519
  // Reset window if expired
2390
2520
  if (now - globalWindowStart >= DEV_GLOBAL_LIMIT_MS) {
2391
2521
  globalRequestCount = 0;
2392
2522
  globalWindowStart = now;
2393
2523
  }
2394
2524
  }
2395
-
2525
+
2396
2526
  // Check per-target limit (prevent spamming specific users)
2397
2527
  const sanitizedTargetId = sanitizeCid(targetId);
2398
2528
  const perTargetRef = db.collection('user_sync_requests').doc(sanitizedTargetId).collection('developer_limits').doc(String(requesterId));
2399
2529
  const perTargetSnap = await perTargetRef.get();
2400
-
2530
+
2401
2531
  let perTargetCount = 0;
2402
2532
  let perTargetWindowStart = now;
2403
-
2533
+
2404
2534
  if (perTargetSnap.exists) {
2405
2535
  const perTargetData = perTargetSnap.data();
2406
2536
  perTargetCount = perTargetData.requestCount || 0;
2407
2537
  perTargetWindowStart = perTargetData.windowStart?.toMillis() || now;
2408
-
2538
+
2409
2539
  // Reset window if expired
2410
2540
  if (now - perTargetWindowStart >= DEV_PER_TARGET_LIMIT_MS) {
2411
2541
  perTargetCount = 0;
2412
2542
  perTargetWindowStart = now;
2413
2543
  }
2414
-
2544
+
2415
2545
  // Check per-target limit
2416
2546
  if (perTargetCount >= DEV_PER_TARGET_MAX_REQUESTS) {
2417
2547
  const waitMinutes = Math.ceil((perTargetWindowStart + DEV_PER_TARGET_LIMIT_MS - now) / 60000);
@@ -2421,7 +2551,7 @@ const checkSyncRateLimits = async (db, targetId, requesterId, isDev, maxRequests
2421
2551
  };
2422
2552
  }
2423
2553
  }
2424
-
2554
+
2425
2555
  // Check global limit
2426
2556
  if (globalRequestCount >= DEV_GLOBAL_MAX_REQUESTS) {
2427
2557
  const waitMinutes = Math.ceil((globalWindowStart + DEV_GLOBAL_LIMIT_MS - now) / 60000);
@@ -2430,36 +2560,36 @@ const checkSyncRateLimits = async (db, targetId, requesterId, isDev, maxRequests
2430
2560
  message: `Developer global rate limit exceeded. Try again in ${waitMinutes} minutes.`
2431
2561
  };
2432
2562
  }
2433
-
2563
+
2434
2564
  // Update both counters
2435
2565
  await globalDevRef.set({
2436
2566
  requestCount: globalRequestCount + 1,
2437
2567
  windowStart: Timestamp.fromMillis(globalWindowStart)
2438
2568
  }, { merge: true });
2439
-
2569
+
2440
2570
  await perTargetRef.set({
2441
2571
  requestCount: perTargetCount + 1,
2442
2572
  windowStart: Timestamp.fromMillis(perTargetWindowStart)
2443
2573
  }, { merge: true });
2444
-
2574
+
2445
2575
  return { allowed: true };
2446
2576
  }
2447
2577
 
2448
2578
  // Regular users: 1 request per 6 hours
2449
2579
  const RATE_LIMIT_MS = 6 * 60 * 60 * 1000; // 6 hours
2450
2580
  const now = Date.now();
2451
-
2581
+
2452
2582
  // Check global limit for target (prevent spamming one user/PI)
2453
2583
  const globalRef = db.collection('user_sync_requests').doc(String(targetId)).collection('global').doc('latest');
2454
2584
  const globalSnap = await globalRef.get();
2455
-
2585
+
2456
2586
  if (globalSnap.exists) {
2457
2587
  const last = globalSnap.data().lastRequestedAt?.toMillis() || 0;
2458
2588
  if (now - last < RATE_LIMIT_MS) {
2459
2589
  const waitHours = Math.ceil((last + RATE_LIMIT_MS - now) / 36e5);
2460
- return {
2461
- allowed: false,
2462
- message: `Recently synced. Try again in ${waitHours} hours.`
2590
+ return {
2591
+ allowed: false,
2592
+ message: `Recently synced. Try again in ${waitHours} hours.`
2463
2593
  };
2464
2594
  }
2465
2595
  }
@@ -2471,19 +2601,19 @@ const getSyncStatus = async (db, targetId) => {
2471
2601
  const reqRef = db.collection('user_sync_requests').doc(String(targetId))
2472
2602
  .collection('requests').orderBy('createdAt', 'desc').limit(1);
2473
2603
  const snap = await reqRef.get();
2474
-
2604
+
2475
2605
  if (snap.empty) return { status: 'never_requested' };
2476
-
2606
+
2477
2607
  const doc = snap.docs[0];
2478
2608
  const data = doc.data();
2479
2609
  const requestId = data.requestId;
2480
2610
  const currentStatus = data.status;
2481
-
2611
+
2482
2612
  // Server-side timeout: Check if request is stale (older than 2 minutes and still processing)
2483
2613
  const STALE_THRESHOLD_MS = 2 * 60 * 1000; // 2 minutes
2484
2614
  const processingStates = ['queued', 'dispatched', 'processing', 'indexing', 'computing'];
2485
2615
  const isProcessingState = processingStates.includes(currentStatus);
2486
-
2616
+
2487
2617
  if (isProcessingState) {
2488
2618
  // First, check if computation results exist (computation may have completed but status wasn't updated)
2489
2619
  // This is especially important for "computing" status
@@ -2491,7 +2621,7 @@ const getSyncStatus = async (db, targetId) => {
2491
2621
  try {
2492
2622
  const today = new Date().toISOString().split('T')[0];
2493
2623
  const userType = data.userType || 'SIGNED_IN_USER';
2494
-
2624
+
2495
2625
  // Determine which computation to check based on user type
2496
2626
  let computationNames = [];
2497
2627
  if (userType === 'POPULAR_INVESTOR') {
@@ -2513,7 +2643,7 @@ const getSyncStatus = async (db, targetId) => {
2513
2643
  // Not a PI
2514
2644
  }
2515
2645
  }
2516
-
2646
+
2517
2647
  // Check if any computation results exist for today
2518
2648
  let computationFound = false;
2519
2649
  for (const compName of computationNames) {
@@ -2526,7 +2656,7 @@ const getSyncStatus = async (db, targetId) => {
2526
2656
  .doc(compName)
2527
2657
  .collection('pages')
2528
2658
  .doc(String(targetId));
2529
-
2659
+
2530
2660
  const pageSnap = await pageRef.get();
2531
2661
  if (pageSnap.exists) {
2532
2662
  computationFound = true;
@@ -2537,7 +2667,7 @@ const getSyncStatus = async (db, targetId) => {
2537
2667
  console.error(`Error checking computation ${compName} for ${today}:`, error);
2538
2668
  }
2539
2669
  }
2540
-
2670
+
2541
2671
  // If computation results exist, mark as completed
2542
2672
  if (computationFound) {
2543
2673
  await doc.ref.update({
@@ -2545,7 +2675,7 @@ const getSyncStatus = async (db, targetId) => {
2545
2675
  completedAt: new Date(),
2546
2676
  updatedAt: new Date()
2547
2677
  });
2548
-
2678
+
2549
2679
  return {
2550
2680
  requestId: requestId,
2551
2681
  status: 'completed',
@@ -2561,17 +2691,17 @@ const getSyncStatus = async (db, targetId) => {
2561
2691
  console.error(`[getSyncStatus] Error checking computation results:`, error);
2562
2692
  }
2563
2693
  }
2564
-
2694
+
2565
2695
  // Check age of request for timeout
2566
2696
  const createdAt = data.createdAt;
2567
2697
  const updatedAt = data.updatedAt;
2568
-
2698
+
2569
2699
  // Use updatedAt if available, otherwise createdAt
2570
2700
  const checkTime = updatedAt || createdAt;
2571
2701
  if (checkTime) {
2572
2702
  const checkTimestamp = checkTime.toMillis ? checkTime.toMillis() : (checkTime.getTime ? checkTime.getTime() : new Date(checkTime).getTime());
2573
2703
  const age = Date.now() - checkTimestamp;
2574
-
2704
+
2575
2705
  if (age > STALE_THRESHOLD_MS) {
2576
2706
  // Mark as failed due to timeout
2577
2707
  const errorMessage = `Sync request timed out after ${Math.round(age / 1000 / 60)} minutes. The task may have failed.`;
@@ -2581,7 +2711,7 @@ const getSyncStatus = async (db, targetId) => {
2581
2711
  failedAt: new Date(),
2582
2712
  updatedAt: new Date()
2583
2713
  });
2584
-
2714
+
2585
2715
  return {
2586
2716
  requestId: requestId,
2587
2717
  status: 'failed',
@@ -2594,14 +2724,14 @@ const getSyncStatus = async (db, targetId) => {
2594
2724
  }
2595
2725
  }
2596
2726
  }
2597
-
2727
+
2598
2728
  // Return current status with timestamps
2599
2729
  const result = {
2600
2730
  requestId: requestId,
2601
2731
  status: currentStatus,
2602
2732
  type: data.userType
2603
2733
  };
2604
-
2734
+
2605
2735
  // Add timestamps if available
2606
2736
  if (data.createdAt) {
2607
2737
  result.createdAt = data.createdAt.toDate ? data.createdAt.toDate().toISOString() : (data.createdAt.toISOString ? data.createdAt.toISOString() : data.createdAt);
@@ -2621,10 +2751,10 @@ const getSyncStatus = async (db, targetId) => {
2621
2751
  if (data.error) {
2622
2752
  result.error = data.error;
2623
2753
  }
2624
-
2754
+
2625
2755
  // Add canRequest flag (true if not in processing state or failed)
2626
2756
  result.canRequest = !isProcessingState && currentStatus !== 'failed';
2627
-
2757
+
2628
2758
  return result;
2629
2759
  };
2630
2760
 
@@ -2636,7 +2766,7 @@ const autoGenerateWatchlist = async (db, userId) => {
2636
2766
  // 1. Fetch Copied PIs (Try Computation First, Fallback to Portfolio)
2637
2767
  let copiedPIs = [];
2638
2768
  const today = new Date().toISOString().split('T')[0];
2639
-
2769
+
2640
2770
  try {
2641
2771
  // Try Computation
2642
2772
  const compData = await getComputationResults(db, 'SignedInUserCopiedPIs', today, userId);
@@ -2654,7 +2784,7 @@ const autoGenerateWatchlist = async (db, userId) => {
2654
2784
  }
2655
2785
 
2656
2786
  const totalCopied = copiedPIs.length;
2657
-
2787
+
2658
2788
  // If no copied PIs, send notification and return early
2659
2789
  if (totalCopied === 0) {
2660
2790
  const notificationId = `watchlist_auto_gen_${Date.now()}_${userId}`;
@@ -2696,7 +2826,7 @@ const autoGenerateWatchlist = async (db, userId) => {
2696
2826
  // Check for existing auto-generated list
2697
2827
  const listsRef = db.collection('SignedInUsers').doc(userId).collection('watchlists');
2698
2828
  const autoSnap = await listsRef.where('isAutoGenerated', '==', true).limit(1).get();
2699
-
2829
+
2700
2830
  let instruction = 'create';
2701
2831
  let payload = {
2702
2832
  name: "My Copy Watchlist",
@@ -2705,7 +2835,7 @@ const autoGenerateWatchlist = async (db, userId) => {
2705
2835
  items,
2706
2836
  isAutoGenerated: true
2707
2837
  };
2708
-
2838
+
2709
2839
  let wasUpdate = false;
2710
2840
  if (!autoSnap.empty) {
2711
2841
  instruction = 'update';
@@ -2715,7 +2845,7 @@ const autoGenerateWatchlist = async (db, userId) => {
2715
2845
 
2716
2846
  const result = await manageUserWatchlist(db, userId, instruction, payload);
2717
2847
  const generated = items.length;
2718
-
2848
+
2719
2849
  // 4. Send notification with proper text
2720
2850
  const notificationId = `watchlist_auto_gen_${Date.now()}_${userId}`;
2721
2851
  let notificationMessage;
@@ -2724,7 +2854,7 @@ const autoGenerateWatchlist = async (db, userId) => {
2724
2854
  } else {
2725
2855
  notificationMessage = `Successfully generated watchlist with ${generated} Popular Investor${generated !== 1 ? 's' : ''} from ${totalCopied} copied PI${totalCopied !== 1 ? 's' : ''}.`;
2726
2856
  }
2727
-
2857
+
2728
2858
  await db.collection('SignedInUsers').doc(userId).collection('notifications').doc(notificationId).set({
2729
2859
  id: notificationId,
2730
2860
  type: 'watchlist_auto_generate',
@@ -2741,7 +2871,7 @@ const autoGenerateWatchlist = async (db, userId) => {
2741
2871
  wasUpdate: wasUpdate
2742
2872
  }
2743
2873
  });
2744
-
2874
+
2745
2875
  return { success: true, generated: generated, totalCopied: totalCopied, message: notificationMessage };
2746
2876
  };
2747
2877
 
@@ -2753,7 +2883,7 @@ const checkDataStatus = async (db, userId) => {
2753
2883
  // Check freshness of key data types
2754
2884
  const types = ['portfolio', 'tradeHistory', 'posts'];
2755
2885
  const status = {};
2756
-
2886
+
2757
2887
  for (const type of types) {
2758
2888
  const snap = await db.collection('SignedInUsers').doc(userId).collection(type).doc('latest').get();
2759
2889
  status[type] = {
@@ -2764,13 +2894,13 @@ const checkDataStatus = async (db, userId) => {
2764
2894
  lastUpdated: snap.exists ? snap.updateTime.toDate().toISOString() : null
2765
2895
  };
2766
2896
  }
2767
-
2897
+
2768
2898
  // Find the latest computation date for profile metrics (7-day lookback)
2769
2899
  const lookbackDays = 7;
2770
2900
  const today = new Date();
2771
2901
  let computationDate = null;
2772
2902
  let fallbackWindowExhausted = false;
2773
-
2903
+
2774
2904
  // Determine which computations to check based on user type
2775
2905
  // Signed-in users always have SignedInUserProfileMetrics
2776
2906
  // If they're also a PI, they may have SignedInUserPIPersonalizedMetrics
@@ -2782,14 +2912,14 @@ const checkDataStatus = async (db, userId) => {
2782
2912
  } catch (e) {
2783
2913
  // User is not a PI, only check SignedInUserProfileMetrics
2784
2914
  }
2785
-
2915
+
2786
2916
  // Check for computation results in the last 7 days
2787
2917
  // Check all relevant computation names for this user
2788
2918
  for (let i = 0; i < lookbackDays; i++) {
2789
2919
  const checkDate = new Date(today);
2790
2920
  checkDate.setDate(checkDate.getDate() - i);
2791
2921
  const dateStr = checkDate.toISOString().split('T')[0];
2792
-
2922
+
2793
2923
  // Check each computation name
2794
2924
  for (const compName of computationNames) {
2795
2925
  try {
@@ -2801,7 +2931,7 @@ const checkDataStatus = async (db, userId) => {
2801
2931
  .doc(compName)
2802
2932
  .collection('pages')
2803
2933
  .doc(String(userId));
2804
-
2934
+
2805
2935
  const pageSnap = await pageRef.get();
2806
2936
  if (pageSnap.exists) {
2807
2937
  computationDate = dateStr;
@@ -2812,18 +2942,18 @@ const checkDataStatus = async (db, userId) => {
2812
2942
  console.error(`Error checking computation ${compName} for ${dateStr}:`, error);
2813
2943
  }
2814
2944
  }
2815
-
2945
+
2816
2946
  // If we found a computation date, stop checking
2817
2947
  if (computationDate) {
2818
2948
  break;
2819
2949
  }
2820
2950
  }
2821
-
2951
+
2822
2952
  // If no computation found in 7 days, mark fallback window as exhausted
2823
2953
  if (!computationDate) {
2824
2954
  fallbackWindowExhausted = true;
2825
2955
  }
2826
-
2956
+
2827
2957
  // Return frontend-compatible format
2828
2958
  return {
2829
2959
  portfolio: {
@@ -2856,15 +2986,15 @@ const checkPopularInvestorDataStatus = async (db, piId) => {
2856
2986
  const lookbackDays = 7;
2857
2987
  const today = new Date();
2858
2988
  let computationDate = null;
2859
-
2989
+
2860
2990
  const computationName = 'PopularInvestorProfileMetrics';
2861
-
2991
+
2862
2992
  // Check for computation results in the last 7 days
2863
2993
  for (let i = 0; i < lookbackDays; i++) {
2864
2994
  const checkDate = new Date(today);
2865
2995
  checkDate.setDate(checkDate.getDate() - i);
2866
2996
  const dateStr = checkDate.toISOString().split('T')[0];
2867
-
2997
+
2868
2998
  try {
2869
2999
  const pageRef = db.collection('unified_insights')
2870
3000
  .doc(dateStr)
@@ -2874,7 +3004,7 @@ const checkPopularInvestorDataStatus = async (db, piId) => {
2874
3004
  .doc(computationName)
2875
3005
  .collection('pages')
2876
3006
  .doc(String(piId));
2877
-
3007
+
2878
3008
  const pageSnap = await pageRef.get();
2879
3009
  if (pageSnap.exists) {
2880
3010
  computationDate = dateStr;
@@ -2885,7 +3015,7 @@ const checkPopularInvestorDataStatus = async (db, piId) => {
2885
3015
  console.error(`Error checking computation ${computationName} for ${dateStr}:`, error);
2886
3016
  }
2887
3017
  }
2888
-
3018
+
2889
3019
  return {
2890
3020
  computationDate: computationDate,
2891
3021
  available: computationDate !== null
@@ -2933,7 +3063,7 @@ const getUserUsername = async (db, cid) => {
2933
3063
  const data = signedInDoc.data();
2934
3064
  return data.username || null;
2935
3065
  }
2936
-
3066
+
2937
3067
  // Try PI master list
2938
3068
  try {
2939
3069
  const piData = await fetchPopularInvestorMasterList(db, String(cid));
@@ -2959,17 +3089,17 @@ const getUserSubscriptions = async (db, userId) => {
2959
3089
  const subscriptionsRef = db.collection('watchlist_subscriptions')
2960
3090
  .doc(String(userId))
2961
3091
  .collection('alerts');
2962
-
3092
+
2963
3093
  const snapshot = await subscriptionsRef.get();
2964
3094
  const subscriptions = [];
2965
-
3095
+
2966
3096
  snapshot.forEach(doc => {
2967
3097
  subscriptions.push({
2968
3098
  piCid: Number(doc.id),
2969
3099
  ...doc.data()
2970
3100
  });
2971
3101
  });
2972
-
3102
+
2973
3103
  return subscriptions;
2974
3104
  } catch (error) {
2975
3105
  console.error(`Error fetching subscriptions: ${error}`);
@@ -2986,20 +3116,20 @@ const updateSubscription = async (db, userId, piCid, updates) => {
2986
3116
  .doc(String(userId))
2987
3117
  .collection('alerts')
2988
3118
  .doc(String(piCid));
2989
-
3119
+
2990
3120
  const subscriptionDoc = await subscriptionRef.get();
2991
-
3121
+
2992
3122
  if (!subscriptionDoc.exists) {
2993
3123
  throw new Error("Subscription not found");
2994
3124
  }
2995
-
3125
+
2996
3126
  const updateData = {
2997
3127
  ...updates,
2998
3128
  updatedAt: FieldValue.serverTimestamp()
2999
3129
  };
3000
-
3130
+
3001
3131
  await subscriptionRef.update(updateData);
3002
-
3132
+
3003
3133
  const updatedDoc = await subscriptionRef.get();
3004
3134
  return {
3005
3135
  id: updatedDoc.id,
@@ -3020,13 +3150,13 @@ const unsubscribeFromAlerts = async (db, userId, piCid) => {
3020
3150
  .doc(String(userId))
3021
3151
  .collection('alerts')
3022
3152
  .doc(String(piCid));
3023
-
3153
+
3024
3154
  const subscriptionDoc = await subscriptionRef.get();
3025
-
3155
+
3026
3156
  if (!subscriptionDoc.exists) {
3027
3157
  throw new Error("Subscription not found");
3028
3158
  }
3029
-
3159
+
3030
3160
  await subscriptionRef.delete();
3031
3161
  return { success: true };
3032
3162
  } catch (error) {
@@ -3046,22 +3176,22 @@ const getWatchlistTriggerCounts = async (db, userId, watchlistId) => {
3046
3176
  try {
3047
3177
  // Fetch watchlist
3048
3178
  const watchlist = await latestUserCentricSnapshot(
3049
- db,
3050
- userId,
3051
- 'watchlists',
3052
- 'watchlist',
3053
- 'SignedInUsers',
3179
+ db,
3180
+ userId,
3181
+ 'watchlists',
3182
+ 'watchlist',
3183
+ 'SignedInUsers',
3054
3184
  watchlistId
3055
3185
  );
3056
-
3186
+
3057
3187
  if (!watchlist || !watchlist.items) {
3058
3188
  throw new Error("Watchlist not found");
3059
3189
  }
3060
-
3190
+
3061
3191
  const items = watchlist.items || [];
3062
3192
  const triggerCounts = {};
3063
3193
  const userCidStr = String(userId);
3064
-
3194
+
3065
3195
  // Calculate date 7 days ago
3066
3196
  const today = new Date();
3067
3197
  const dates = [];
@@ -3070,15 +3200,15 @@ const getWatchlistTriggerCounts = async (db, userId, watchlistId) => {
3070
3200
  date.setDate(today.getDate() - i);
3071
3201
  dates.push(date.toISOString().split('T')[0]);
3072
3202
  }
3073
-
3203
+
3074
3204
  // For each PI in the watchlist
3075
3205
  for (const item of items) {
3076
3206
  const piCid = String(item.cid);
3077
3207
  let totalCount = 0;
3078
-
3208
+
3079
3209
  // Check each of the last 7 days - PARALLELIZED to fix N+1 query problem
3080
3210
  const dayDocs = await Promise.all(
3081
- dates.map(dateStr =>
3211
+ dates.map(dateStr =>
3082
3212
  db.collection('PIAlertHistoryData')
3083
3213
  .doc(dateStr)
3084
3214
  .get()
@@ -3088,20 +3218,20 @@ const getWatchlistTriggerCounts = async (db, userId, watchlistId) => {
3088
3218
  })
3089
3219
  )
3090
3220
  );
3091
-
3221
+
3092
3222
  // Process all day documents
3093
3223
  for (const dayDoc of dayDocs) {
3094
3224
  if (!dayDoc || !dayDoc.exists) continue;
3095
-
3225
+
3096
3226
  try {
3097
3227
  const dayData = dayDoc.data();
3098
3228
  const piData = dayData[piCid];
3099
-
3229
+
3100
3230
  if (piData) {
3101
3231
  // Check all alert types for this PI
3102
3232
  for (const [alertType, alertData] of Object.entries(piData)) {
3103
3233
  if (alertType === 'lastUpdated' || typeof alertData !== 'object') continue;
3104
-
3234
+
3105
3235
  // Check if triggeredFor array contains this user
3106
3236
  const triggeredFor = alertData.triggeredFor || [];
3107
3237
  if (Array.isArray(triggeredFor) && triggeredFor.includes(userCidStr)) {
@@ -3114,10 +3244,10 @@ const getWatchlistTriggerCounts = async (db, userId, watchlistId) => {
3114
3244
  console.warn(`Error processing day data for PI ${piCid}: ${e.message}`);
3115
3245
  }
3116
3246
  }
3117
-
3247
+
3118
3248
  triggerCounts[piCid] = totalCount;
3119
3249
  }
3120
-
3250
+
3121
3251
  return {
3122
3252
  triggerCounts,
3123
3253
  watchlistId,
@@ -3145,11 +3275,11 @@ const getWatchlistTriggerCounts = async (db, userId, watchlistId) => {
3145
3275
  const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}, timeRange = 'last_7_days', limit = 100) => {
3146
3276
  try {
3147
3277
  console.log(`[queryDynamicWatchlistMatches] Aggregating ${computationName} over ${timeRange}`);
3148
-
3278
+
3149
3279
  // 1. Determine Date Range
3150
3280
  const endDate = new Date();
3151
3281
  const startDate = new Date();
3152
-
3282
+
3153
3283
  switch (timeRange) {
3154
3284
  case 'today':
3155
3285
  // Just today
@@ -3161,10 +3291,10 @@ const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}
3161
3291
  default:
3162
3292
  startDate.setDate(startDate.getDate() - 7);
3163
3293
  }
3164
-
3294
+
3165
3295
  const startDateStr = startDate.toISOString().split('T')[0];
3166
3296
  const endDateStr = endDate.toISOString().split('T')[0];
3167
-
3297
+
3168
3298
  // 2. Try BigQuery first (if enabled)
3169
3299
  if (process.env.BIGQUERY_ENABLED !== 'false') {
3170
3300
  try {
@@ -3185,14 +3315,14 @@ const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}
3185
3315
  // Fall through to Firestore logic
3186
3316
  }
3187
3317
  }
3188
-
3318
+
3189
3319
  // 3. Fallback to Firestore (original logic)
3190
3320
  // Build list of dates to check (Newest -> Oldest)
3191
3321
  const dates = [];
3192
3322
  for (let d = new Date(endDate); d >= startDate; d.setDate(d.getDate() - 1)) {
3193
3323
  dates.push(d.toISOString().split('T')[0]);
3194
3324
  }
3195
-
3325
+
3196
3326
  // 4. Fetch Data for ALL Dates in Parallel
3197
3327
  const datePromises = dates.map(async (dateStr) => {
3198
3328
  try {
@@ -3203,9 +3333,9 @@ const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}
3203
3333
  .doc('alerts')
3204
3334
  .collection('computations')
3205
3335
  .doc(computationName);
3206
-
3336
+
3207
3337
  let docSnapshot = await docRef.get();
3208
-
3338
+
3209
3339
  // Fallback to popular-investor path
3210
3340
  if (!docSnapshot.exists) {
3211
3341
  docRef = db.collection('unified_insights')
@@ -3241,7 +3371,7 @@ const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}
3241
3371
  }
3242
3372
  });
3243
3373
  }
3244
-
3374
+
3245
3375
  return { date: dateStr, data: dayData };
3246
3376
 
3247
3377
  } catch (err) {
@@ -3253,28 +3383,28 @@ const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}
3253
3383
  // Wait for all days to load
3254
3384
  // rawResults is sorted Newest -> Oldest (matches 'dates' order)
3255
3385
  const rawResults = (await Promise.all(datePromises)).filter(r => r !== null);
3256
-
3386
+
3257
3387
  // 5. Aggregate Matches Per User
3258
3388
  // Map: piCid -> { firstMatchedAt, lastMatchedAt, history: [], ... }
3259
3389
  const piAggregates = new Map();
3260
-
3390
+
3261
3391
  // Process dates from Oldest -> Newest to build timeline correctly
3262
3392
  // Note: .reverse() mutates the array in place, so rawResults becomes Oldest->Newest
3263
3393
  const timeline = rawResults.reverse();
3264
-
3394
+
3265
3395
  for (const dayEntry of timeline) {
3266
3396
  const { date, data } = dayEntry;
3267
3397
  const seenCidsThisDay = new Set();
3268
-
3398
+
3269
3399
  // A. Process Users Present in the Daily File
3270
3400
  for (const [piCidStr, piData] of Object.entries(data)) {
3271
3401
  if (piData.error) continue;
3272
-
3402
+
3273
3403
  const piCid = Number(piCidStr);
3274
3404
  seenCidsThisDay.add(piCid);
3275
-
3405
+
3276
3406
  const filterResult = checkPIMatchesCriteria(computationName, piData, parameters);
3277
-
3407
+
3278
3408
  if (filterResult.passes) {
3279
3409
  // Initialize if new
3280
3410
  if (!piAggregates.has(piCid)) {
@@ -3287,12 +3417,12 @@ const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}
3287
3417
  latestData: null
3288
3418
  });
3289
3419
  }
3290
-
3420
+
3291
3421
  const agg = piAggregates.get(piCid);
3292
3422
  agg.lastMatchedAt = date;
3293
3423
  agg.matchCount++;
3294
3424
  agg.latestData = piData;
3295
-
3425
+
3296
3426
  agg.history.push({
3297
3427
  date: date,
3298
3428
  matched: true,
@@ -3305,7 +3435,7 @@ const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}
3305
3435
  const agg = piAggregates.get(piCid);
3306
3436
  // Update metadata to show why they failed (current value)
3307
3437
  agg.latestData = piData;
3308
-
3438
+
3309
3439
  agg.history.push({
3310
3440
  date: date,
3311
3441
  matched: false,
@@ -3315,7 +3445,7 @@ const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}
3315
3445
  }
3316
3446
  }
3317
3447
  }
3318
-
3448
+
3319
3449
  // B. Process Missing Users (Implicit Drop-off)
3320
3450
  // If a user was tracked previously but is missing today, record as non-match
3321
3451
  for (const [cid, agg] of piAggregates) {
@@ -3329,21 +3459,21 @@ const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}
3329
3459
  }
3330
3460
  }
3331
3461
  }
3332
-
3462
+
3333
3463
  // 6. Calculate Status (Dropped Off, Current) & Fetch Usernames
3334
3464
  const results = [];
3335
3465
  const todayStr = new Date().toISOString().split('T')[0];
3336
3466
  // Since rawResults was reversed, the last element is the Newest date
3337
3467
  const lastDataDate = timeline.length > 0 ? timeline[timeline.length - 1].date : todayStr;
3338
-
3468
+
3339
3469
  for (const [cid, agg] of piAggregates) {
3340
3470
  const history = agg.history;
3341
3471
  const lastEntry = history[history.length - 1];
3342
-
3472
+
3343
3473
  // Is Currently Matching?
3344
3474
  // Must be matched=true AND on the most recent data date available
3345
3475
  const isCurrent = lastEntry.matched && lastEntry.date === lastDataDate;
3346
-
3476
+
3347
3477
  // Calculate Drop Off Dates
3348
3478
  // Find all transitions from True -> False
3349
3479
  const droppedOffAt = [];
@@ -3354,41 +3484,41 @@ const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}
3354
3484
  droppedOffAt.push(curr.date);
3355
3485
  }
3356
3486
  }
3357
-
3487
+
3358
3488
  // Fetch Username (Optimistic)
3359
3489
  let username = `PI-${cid}`;
3360
3490
  if (db) {
3361
3491
  try {
3362
3492
  const piProfile = await fetchPopularInvestorMasterList(db, String(cid)).catch(() => null);
3363
3493
  if (piProfile) username = piProfile.username;
3364
- } catch (e) {}
3494
+ } catch (e) { }
3365
3495
  }
3366
-
3496
+
3367
3497
  results.push({
3368
3498
  cid: cid,
3369
3499
  username: username,
3370
-
3500
+
3371
3501
  // Aggregated Stats
3372
3502
  firstMatchedAt: agg.firstMatchedAt,
3373
3503
  lastMatchedAt: agg.lastMatchedAt,
3374
-
3504
+
3375
3505
  // [UPDATED] Array of dates where they stopped matching
3376
3506
  droppedOffAt: droppedOffAt,
3377
-
3507
+
3378
3508
  isCurrentlyMatching: isCurrent,
3379
3509
  matchCount: agg.matchCount,
3380
-
3510
+
3381
3511
  // Visualization Data
3382
3512
  history: agg.history,
3383
-
3513
+
3384
3514
  // Latest Snapshot Values
3385
3515
  latestValue: history[history.length - 1]?.value,
3386
-
3516
+
3387
3517
  // Metadata
3388
3518
  metadata: agg.latestData
3389
3519
  });
3390
3520
  }
3391
-
3521
+
3392
3522
  // 7. Sort Results
3393
3523
  // Priority: Currently Matching > Recently Dropped Off
3394
3524
  // Secondary: Match Value magnitude
@@ -3399,11 +3529,11 @@ const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}
3399
3529
  // If both same status, sort by magnitude of value (risk, change, etc)
3400
3530
  return Math.abs(b.latestValue || 0) - Math.abs(a.latestValue || 0);
3401
3531
  });
3402
-
3532
+
3403
3533
  const limitedResults = results.slice(0, limit);
3404
-
3534
+
3405
3535
  console.log(`[queryDynamicWatchlistMatches] Found ${results.length} unique PIs matching at least once.`);
3406
-
3536
+
3407
3537
  return {
3408
3538
  success: true,
3409
3539
  matches: limitedResults,
@@ -3429,7 +3559,7 @@ const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}
3429
3559
  */
3430
3560
  function checkPIMatchesCriteria(computationName, piData, parameters) {
3431
3561
  const result = { passes: false, matchValue: 0, currentValue: null, previousValue: null, change: null };
3432
-
3562
+
3433
3563
  switch (computationName) {
3434
3564
  case 'RiskScoreIncrease': {
3435
3565
  // Data format: { change, currentRisk, previousRisk, isBaselineReset }
@@ -3438,97 +3568,97 @@ function checkPIMatchesCriteria(computationName, piData, parameters) {
3438
3568
  const previousRisk = piData.previousRisk;
3439
3569
  const minChange = parameters.minChange ?? 0;
3440
3570
  const minRiskLevel = parameters.minRiskLevel ?? 0;
3441
-
3571
+
3442
3572
  result.currentValue = currentRisk;
3443
3573
  result.previousValue = previousRisk;
3444
3574
  result.change = change;
3445
3575
  result.matchValue = currentRisk; // Sort by current risk level
3446
-
3576
+
3447
3577
  // Pass if change >= minChange AND currentRisk >= minRiskLevel
3448
3578
  if (change >= minChange && currentRisk >= minRiskLevel) {
3449
3579
  result.passes = true;
3450
3580
  }
3451
3581
  break;
3452
3582
  }
3453
-
3583
+
3454
3584
  case 'SignificantVolatility': {
3455
3585
  // Data format: { volatility, threshold, samples }
3456
3586
  const volatility = piData.volatility || 0;
3457
3587
  const minVolatility = parameters.volatilityThreshold ?? 50;
3458
-
3588
+
3459
3589
  result.currentValue = volatility;
3460
3590
  result.matchValue = volatility;
3461
-
3591
+
3462
3592
  if (volatility >= minVolatility) {
3463
3593
  result.passes = true;
3464
3594
  }
3465
3595
  break;
3466
3596
  }
3467
-
3597
+
3468
3598
  case 'PositionInvestedIncrease': {
3469
3599
  // Data format: { moveCount, moves: [{symbol, prev, curr, diff}], isBaselineReset }
3470
3600
  const moveCount = piData.moveCount || 0;
3471
3601
  const moves = piData.moves || [];
3472
3602
  const minIncrease = parameters.minIncrease ?? 5;
3473
-
3603
+
3474
3604
  // Find the largest position increase
3475
3605
  const maxMove = moves.reduce((max, m) => m.diff > max ? m.diff : max, 0);
3476
-
3606
+
3477
3607
  result.currentValue = moveCount;
3478
3608
  result.matchValue = maxMove;
3479
-
3609
+
3480
3610
  if (maxMove >= minIncrease) {
3481
3611
  result.passes = true;
3482
3612
  }
3483
3613
  break;
3484
3614
  }
3485
-
3615
+
3486
3616
  case 'NewSectorExposure': {
3487
3617
  // Data format: { currentSectors, previousSectors, newExposures, isBaselineReset }
3488
3618
  const newExposures = piData.newExposures || [];
3489
-
3619
+
3490
3620
  result.currentValue = newExposures.length;
3491
3621
  result.matchValue = newExposures.length;
3492
-
3622
+
3493
3623
  if (newExposures.length > 0) {
3494
3624
  result.passes = true;
3495
3625
  }
3496
3626
  break;
3497
3627
  }
3498
-
3628
+
3499
3629
  case 'NewSocialPost': {
3500
3630
  // Data format: { hasNewPost, latestPostDate, postCount, title }
3501
3631
  const hasNewPost = piData.hasNewPost || false;
3502
-
3632
+
3503
3633
  result.currentValue = hasNewPost ? 1 : 0;
3504
3634
  result.matchValue = hasNewPost ? 1 : 0;
3505
-
3635
+
3506
3636
  if (hasNewPost) {
3507
3637
  result.passes = true;
3508
3638
  }
3509
3639
  break;
3510
3640
  }
3511
-
3641
+
3512
3642
  case 'BehavioralAnomaly': {
3513
3643
  // Data format: { anomalyScore, primaryDriver, driverSignificance }
3514
3644
  const anomalyScore = piData.anomalyScore || 0;
3515
3645
  const minScore = parameters.minAnomalyScore ?? 0;
3516
-
3646
+
3517
3647
  result.currentValue = anomalyScore;
3518
3648
  result.matchValue = anomalyScore;
3519
-
3649
+
3520
3650
  if (anomalyScore >= minScore) {
3521
3651
  result.passes = true;
3522
3652
  }
3523
3653
  break;
3524
3654
  }
3525
-
3655
+
3526
3656
  default:
3527
3657
  // Generic fallback - pass if there's any data
3528
3658
  result.passes = true;
3529
3659
  result.matchValue = 0;
3530
3660
  }
3531
-
3661
+
3532
3662
  return result;
3533
3663
  }
3534
3664
 
@@ -3539,23 +3669,23 @@ const subscribeToAllWatchlistPIs = async (db, userId, watchlistId, alertTypes =
3539
3669
  try {
3540
3670
  // Fetch watchlist
3541
3671
  const watchlist = await latestUserCentricSnapshot(
3542
- db,
3543
- userId,
3544
- 'watchlists',
3545
- 'watchlist',
3546
- 'SignedInUsers',
3672
+ db,
3673
+ userId,
3674
+ 'watchlists',
3675
+ 'watchlist',
3676
+ 'SignedInUsers',
3547
3677
  watchlistId
3548
3678
  );
3549
-
3679
+
3550
3680
  if (!watchlist) {
3551
3681
  throw new Error("Watchlist not found");
3552
3682
  }
3553
-
3683
+
3554
3684
  const items = watchlist.items || [];
3555
3685
  const subscriptionsRef = db.collection('watchlist_subscriptions')
3556
3686
  .doc(String(userId))
3557
3687
  .collection('alerts');
3558
-
3688
+
3559
3689
  // Default alert types
3560
3690
  const defaultAlertTypes = alertTypes || {
3561
3691
  newPositions: true,
@@ -3565,9 +3695,9 @@ const subscribeToAllWatchlistPIs = async (db, userId, watchlistId, alertTypes =
3565
3695
  increasedPositionSize: true,
3566
3696
  newSocialPost: true
3567
3697
  };
3568
-
3698
+
3569
3699
  let subscribedCount = 0;
3570
-
3700
+
3571
3701
  // Subscribe to each PI in the watchlist
3572
3702
  for (const item of items) {
3573
3703
  const subscriptionData = {
@@ -3579,11 +3709,11 @@ const subscribeToAllWatchlistPIs = async (db, userId, watchlistId, alertTypes =
3579
3709
  subscribedAt: FieldValue.serverTimestamp(),
3580
3710
  lastAlertAt: null
3581
3711
  };
3582
-
3712
+
3583
3713
  await subscriptionsRef.doc(String(item.cid)).set(subscriptionData, { merge: true });
3584
3714
  subscribedCount++;
3585
3715
  }
3586
-
3716
+
3587
3717
  return {
3588
3718
  success: true,
3589
3719
  subscribed: subscribedCount,
@@ -3611,22 +3741,22 @@ const subscribeToAllWatchlistPIs = async (db, userId, watchlistId, alertTypes =
3611
3741
  async function fetchWithBigQueryCache(firestore, cacheCollection, dateStr, bigqueryFetcher, dataType = 'data') {
3612
3742
  const CACHE_TTL_MINUTES = 10;
3613
3743
  const CACHE_TTL_MS = CACHE_TTL_MINUTES * 60 * 1000;
3614
-
3744
+
3615
3745
  try {
3616
3746
  // 1. Check Firestore cache
3617
3747
  const cacheDocRef = firestore.collection(cacheCollection).doc(dateStr);
3618
3748
  const cacheDoc = await cacheDocRef.get();
3619
-
3749
+
3620
3750
  if (cacheDoc.exists) {
3621
3751
  const cacheData = cacheDoc.data();
3622
3752
  const cachedAt = cacheData.cachedAt;
3623
-
3753
+
3624
3754
  // Check if cache is still valid
3625
3755
  if (cachedAt) {
3626
3756
  const cachedTimestamp = cachedAt.toMillis ? cachedAt.toMillis() : (cachedAt._seconds * 1000);
3627
3757
  const now = Date.now();
3628
3758
  const age = now - cachedTimestamp;
3629
-
3759
+
3630
3760
  if (age < CACHE_TTL_MS) {
3631
3761
  // Cache is valid, return cached data
3632
3762
  const { cachedAt, ...data } = cacheData;
@@ -3634,19 +3764,19 @@ async function fetchWithBigQueryCache(firestore, cacheCollection, dateStr, bigqu
3634
3764
  }
3635
3765
  }
3636
3766
  }
3637
-
3767
+
3638
3768
  // 2. Cache missing or expired, fetch from BigQuery
3639
3769
  if (process.env.BIGQUERY_ENABLED !== 'false') {
3640
3770
  try {
3641
3771
  const bigqueryData = await bigqueryFetcher();
3642
-
3772
+
3643
3773
  if (bigqueryData) {
3644
3774
  // 3. Cache the result in Firestore with TTL
3645
3775
  await cacheDocRef.set({
3646
3776
  ...bigqueryData,
3647
3777
  cachedAt: FieldValue.serverTimestamp()
3648
3778
  }, { merge: true });
3649
-
3779
+
3650
3780
  return bigqueryData;
3651
3781
  }
3652
3782
  } catch (error) {
@@ -3659,14 +3789,14 @@ async function fetchWithBigQueryCache(firestore, cacheCollection, dateStr, bigqu
3659
3789
  }
3660
3790
  }
3661
3791
  }
3662
-
3792
+
3663
3793
  // 4. Fallback: return cached data even if expired, or null
3664
3794
  if (cacheDoc.exists) {
3665
3795
  const cacheData = cacheDoc.data();
3666
3796
  const { cachedAt, ...data } = cacheData;
3667
3797
  return data;
3668
3798
  }
3669
-
3799
+
3670
3800
  return null;
3671
3801
  } catch (error) {
3672
3802
  console.error(`[API] Error in fetchWithBigQueryCache for ${dataType} (${dateStr}): ${error.message}`);
@@ -3745,8 +3875,13 @@ async function fetchPIAlertHistory(firestore, dateStr) {
3745
3875
  module.exports = {
3746
3876
  latestUserCentricSnapshot,
3747
3877
  pageCollection,
3878
+ fetchProfileMetrics,
3879
+ getProfileMetricsFromPath,
3880
+ PROFILE_COMPUTATION_PATHS,
3748
3881
  findMostRecentGlobalAumData,
3749
3882
  fetchGlobalAumPerAssetWithLookback,
3883
+ getComputationResultFromBigQuery,
3884
+ getComputationResultsFromBigQueryForDate,
3750
3885
  getComputationResults,
3751
3886
  fetchPopularInvestorMasterList,
3752
3887
  isDeveloper,