bulltrackers-module 1.0.717 → 1.0.719

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -513,6 +513,63 @@ const manageUserWatchlist = async (db, userId, instruction, payload = {}) => {
513
513
  // 6. Commit Batch
514
514
  await batch.commit();
515
515
 
516
+ // 7. Write to BigQuery (after Firestore commit to get final values)
517
+ if (process.env.BIGQUERY_ENABLED !== 'false' && (addedItems.length > 0 || removedItems.length > 0)) {
518
+ try {
519
+ const { ensureWatchlistMembershipTable, insertRowsWithMerge } = require('../../../core/utils/bigquery_utils');
520
+ await ensureWatchlistMembershipTable();
521
+
522
+ const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
523
+ const bigqueryRows = [];
524
+
525
+ // Process all affected PIs (both added and removed)
526
+ const affectedPIs = new Set([
527
+ ...addedItems.map(item => String(item.cid)),
528
+ ...removedItems.map(item => String(item.cid))
529
+ ]);
530
+
531
+ // Read updated documents from subcollection for each affected PI
532
+ for (const piId of affectedPIs) {
533
+ const masterRef = db.collection('WatchlistMembershipData')
534
+ .doc(todayStr).collection('popular_investors').doc(piId);
535
+
536
+ const piDoc = await masterRef.get();
537
+ if (piDoc.exists) {
538
+ const piData = piDoc.data();
539
+
540
+ // Transform to BigQuery format
541
+ const lastUpdated = piData.lastUpdated
542
+ ? (piData.lastUpdated.toDate ? piData.lastUpdated.toDate().toISOString() : piData.lastUpdated)
543
+ : new Date().toISOString();
544
+
545
+ bigqueryRows.push({
546
+ date: todayStr,
547
+ pi_id: parseInt(piId, 10),
548
+ total_users: piData.totalUsers || 0,
549
+ public_watchlist_count: piData.publicWatchlistCount || 0,
550
+ private_watchlist_count: piData.privateWatchlistCount || 0,
551
+ users: Array.isArray(piData.users) ? piData.users : [],
552
+ last_updated: lastUpdated
553
+ });
554
+ }
555
+ }
556
+
557
+ if (bigqueryRows.length > 0) {
558
+ // Use MERGE to update existing rows or insert new
559
+ await insertRowsWithMerge(
560
+ datasetId,
561
+ 'watchlist_membership',
562
+ bigqueryRows,
563
+ ['date', 'pi_id'], // Key fields for MERGE
564
+ console // Simple logger
565
+ );
566
+ }
567
+ } catch (bqError) {
568
+ console.error(`[manageUserWatchlist] BigQuery write failed: ${bqError.message}`);
569
+ // Don't throw - Firestore write succeeded, BigQuery is secondary
570
+ }
571
+ }
572
+
516
573
  return {
517
574
  success: true,
518
575
  id: watchlistId,
@@ -1583,6 +1640,51 @@ const trackPopularInvestorView = async (db, piId, viewerId = null, viewerType =
1583
1640
  batch.set(globalDayRef, globalUpdate, { merge: true });
1584
1641
 
1585
1642
  await batch.commit();
1643
+
1644
+ // 5. Write to BigQuery (after Firestore commit to get final values)
1645
+ if (process.env.BIGQUERY_ENABLED !== 'false') {
1646
+ try {
1647
+ const { ensurePIPageViewsTable, insertRowsWithMerge } = require('../../../core/utils/bigquery_utils');
1648
+ await ensurePIPageViewsTable();
1649
+
1650
+ // Read the updated document to get final values (after increments)
1651
+ const updatedDoc = await globalDayRef.get();
1652
+ if (updatedDoc.exists) {
1653
+ const data = updatedDoc.data();
1654
+ const piData = data[piId];
1655
+
1656
+ if (piData) {
1657
+ // Transform to BigQuery format
1658
+ const lastUpdated = piData.lastUpdated
1659
+ ? (piData.lastUpdated.toDate ? piData.lastUpdated.toDate().toISOString() : piData.lastUpdated)
1660
+ : new Date().toISOString();
1661
+
1662
+ const bigqueryRow = {
1663
+ date: todayStr,
1664
+ pi_id: parseInt(piId, 10),
1665
+ total_views: piData.totalViews || 0,
1666
+ unique_viewers: piData.uniqueViewers || 0,
1667
+ views_by_user: piData.viewsByUser || {},
1668
+ last_updated: lastUpdated
1669
+ };
1670
+
1671
+ // Use MERGE to update existing row or insert new
1672
+ const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
1673
+ await insertRowsWithMerge(
1674
+ datasetId,
1675
+ 'pi_page_views',
1676
+ [bigqueryRow],
1677
+ ['date', 'pi_id'], // Key fields for MERGE
1678
+ console // Simple logger
1679
+ );
1680
+ }
1681
+ }
1682
+ } catch (bqError) {
1683
+ console.error(`[trackPopularInvestorView] BigQuery write failed: ${bqError.message}`);
1684
+ // Don't throw - Firestore write succeeded, BigQuery is secondary
1685
+ }
1686
+ }
1687
+
1586
1688
  return { success: true };
1587
1689
  };
1588
1690
  // ==========================================
@@ -3494,6 +3596,152 @@ const subscribeToAllWatchlistPIs = async (db, userId, watchlistId, alertTypes =
3494
3596
  }
3495
3597
  };
3496
3598
 
3599
+ /**
3600
+ * Helper function for BigQuery caching pattern:
3601
+ * 1. Check Firestore cache (with TTL)
3602
+ * 2. If missing/expired, fetch from BigQuery
3603
+ * 3. Cache result in Firestore with 10-minute TTL
3604
+ * @param {Object} firestore - Firestore instance
3605
+ * @param {string} cacheCollection - Firestore collection for cache (e.g., 'PIRatingsData')
3606
+ * @param {string} dateStr - Date string (YYYY-MM-DD)
3607
+ * @param {Function} bigqueryFetcher - Async function that fetches from BigQuery
3608
+ * @param {string} dataType - Data type name for logging
3609
+ * @returns {Promise<Object|null>} Cached or fetched data
3610
+ */
3611
+ async function fetchWithBigQueryCache(firestore, cacheCollection, dateStr, bigqueryFetcher, dataType = 'data') {
3612
+ const CACHE_TTL_MINUTES = 10;
3613
+ const CACHE_TTL_MS = CACHE_TTL_MINUTES * 60 * 1000;
3614
+
3615
+ try {
3616
+ // 1. Check Firestore cache
3617
+ const cacheDocRef = firestore.collection(cacheCollection).doc(dateStr);
3618
+ const cacheDoc = await cacheDocRef.get();
3619
+
3620
+ if (cacheDoc.exists) {
3621
+ const cacheData = cacheDoc.data();
3622
+ const cachedAt = cacheData.cachedAt;
3623
+
3624
+ // Check if cache is still valid
3625
+ if (cachedAt) {
3626
+ const cachedTimestamp = cachedAt.toMillis ? cachedAt.toMillis() : (cachedAt._seconds * 1000);
3627
+ const now = Date.now();
3628
+ const age = now - cachedTimestamp;
3629
+
3630
+ if (age < CACHE_TTL_MS) {
3631
+ // Cache is valid, return cached data
3632
+ const { cachedAt, ...data } = cacheData;
3633
+ return data;
3634
+ }
3635
+ }
3636
+ }
3637
+
3638
+ // 2. Cache missing or expired, fetch from BigQuery
3639
+ if (process.env.BIGQUERY_ENABLED !== 'false') {
3640
+ try {
3641
+ const bigqueryData = await bigqueryFetcher();
3642
+
3643
+ if (bigqueryData) {
3644
+ // 3. Cache the result in Firestore with TTL
3645
+ await cacheDocRef.set({
3646
+ ...bigqueryData,
3647
+ cachedAt: FieldValue.serverTimestamp()
3648
+ }, { merge: true });
3649
+
3650
+ return bigqueryData;
3651
+ }
3652
+ } catch (error) {
3653
+ console.error(`[API] BigQuery fetch failed for ${dataType} (${dateStr}): ${error.message}`);
3654
+ // If BigQuery fails, return cached data if available (even if expired)
3655
+ if (cacheDoc.exists) {
3656
+ const cacheData = cacheDoc.data();
3657
+ const { cachedAt, ...data } = cacheData;
3658
+ return data;
3659
+ }
3660
+ }
3661
+ }
3662
+
3663
+ // 4. Fallback: return cached data even if expired, or null
3664
+ if (cacheDoc.exists) {
3665
+ const cacheData = cacheDoc.data();
3666
+ const { cachedAt, ...data } = cacheData;
3667
+ return data;
3668
+ }
3669
+
3670
+ return null;
3671
+ } catch (error) {
3672
+ console.error(`[API] Error in fetchWithBigQueryCache for ${dataType} (${dateStr}): ${error.message}`);
3673
+ return null;
3674
+ }
3675
+ }
3676
+
3677
+ /**
3678
+ * Fetch PI Ratings data with BigQuery caching
3679
+ * @param {Object} firestore - Firestore instance
3680
+ * @param {string} dateStr - Date string (YYYY-MM-DD)
3681
+ * @returns {Promise<Object|null>} Ratings data
3682
+ */
3683
+ async function fetchPIRatings(firestore, dateStr) {
3684
+ const { queryPIRatings } = require('../../../core/utils/bigquery_utils');
3685
+ return await fetchWithBigQueryCache(
3686
+ firestore,
3687
+ 'PIRatingsData',
3688
+ dateStr,
3689
+ () => queryPIRatings(dateStr),
3690
+ 'PI Ratings'
3691
+ );
3692
+ }
3693
+
3694
+ /**
3695
+ * Fetch PI Page Views data with BigQuery caching
3696
+ * @param {Object} firestore - Firestore instance
3697
+ * @param {string} dateStr - Date string (YYYY-MM-DD)
3698
+ * @returns {Promise<Object|null>} Page views data
3699
+ */
3700
+ async function fetchPIPageViews(firestore, dateStr) {
3701
+ const { queryPIPageViews } = require('../../../core/utils/bigquery_utils');
3702
+ return await fetchWithBigQueryCache(
3703
+ firestore,
3704
+ 'PIPageViewsData',
3705
+ dateStr,
3706
+ () => queryPIPageViews(dateStr),
3707
+ 'PI Page Views'
3708
+ );
3709
+ }
3710
+
3711
+ /**
3712
+ * Fetch Watchlist Membership data with BigQuery caching
3713
+ * @param {Object} firestore - Firestore instance
3714
+ * @param {string} dateStr - Date string (YYYY-MM-DD)
3715
+ * @returns {Promise<Object|null>} Watchlist membership data
3716
+ */
3717
+ async function fetchWatchlistMembership(firestore, dateStr) {
3718
+ const { queryWatchlistMembership } = require('../../../core/utils/bigquery_utils');
3719
+ return await fetchWithBigQueryCache(
3720
+ firestore,
3721
+ 'WatchlistMembershipData',
3722
+ dateStr,
3723
+ () => queryWatchlistMembership(dateStr),
3724
+ 'Watchlist Membership'
3725
+ );
3726
+ }
3727
+
3728
+ /**
3729
+ * Fetch PI Alert History data with BigQuery caching
3730
+ * @param {Object} firestore - Firestore instance
3731
+ * @param {string} dateStr - Date string (YYYY-MM-DD)
3732
+ * @returns {Promise<Object|null>} Alert history data
3733
+ */
3734
+ async function fetchPIAlertHistory(firestore, dateStr) {
3735
+ const { queryPIAlertHistory } = require('../../../core/utils/bigquery_utils');
3736
+ return await fetchWithBigQueryCache(
3737
+ firestore,
3738
+ 'PIAlertHistoryData',
3739
+ dateStr,
3740
+ () => queryPIAlertHistory(dateStr),
3741
+ 'PI Alert History'
3742
+ );
3743
+ }
3744
+
3497
3745
  module.exports = {
3498
3746
  latestUserCentricSnapshot,
3499
3747
  pageCollection,
@@ -3542,5 +3790,9 @@ module.exports = {
3542
3790
  unsubscribeFromAlerts,
3543
3791
  getWatchlistTriggerCounts,
3544
3792
  subscribeToAllWatchlistPIs,
3545
- queryDynamicWatchlistMatches
3793
+ queryDynamicWatchlistMatches,
3794
+ fetchPIRatings,
3795
+ fetchPIPageViews,
3796
+ fetchWatchlistMembership,
3797
+ fetchPIAlertHistory
3546
3798
  };
@@ -376,8 +376,58 @@ async function loadDailySocialPostInsights(config, deps, dateString) {
376
376
  const cached = await tryLoadFromGCS(config, dateString, 'social', logger);
377
377
  if (cached) return cached;
378
378
 
379
- // 2. FIRESTORE FALLBACK
380
- logger.log('INFO', `Loading and partitioning social data for ${dateString}`);
379
+ // 2. BIGQUERY FIRST (if enabled)
380
+ if (process.env.BIGQUERY_ENABLED !== 'false') {
381
+ try {
382
+ const { querySocialData } = require('../../core/utils/bigquery_utils');
383
+ const bigqueryData = await querySocialData(dateString, null, null, logger);
384
+
385
+ if (bigqueryData && Object.keys(bigqueryData).length > 0) {
386
+ logger.log('INFO', `[DataLoader] ✅ Using BigQuery for social data (${dateString}): ${Object.keys(bigqueryData).length} users`);
387
+
388
+ // Transform BigQuery data to expected format: { generic: {}, pi: {}, signedIn: {} }
389
+ // BigQuery returns: { userId: { posts_data: { posts: {...}, postCount: N }, user_type: '...' } }
390
+ const result = { generic: {}, pi: {}, signedIn: {} };
391
+
392
+ for (const [userId, userData] of Object.entries(bigqueryData)) {
393
+ const userType = userData.user_type || 'UNKNOWN';
394
+
395
+ // Handle posts_data - may be object (parsed JSON) or string (needs parsing)
396
+ let postsData = userData.posts_data || {};
397
+ if (typeof postsData === 'string') {
398
+ try {
399
+ postsData = JSON.parse(postsData);
400
+ } catch (e) {
401
+ logger.log('WARN', `[DataLoader] Failed to parse posts_data for user ${userId}: ${e.message}`);
402
+ continue;
403
+ }
404
+ }
405
+
406
+ // Extract posts map from posts_data structure: { posts: {...}, postCount: N }
407
+ const posts = postsData.posts || {};
408
+
409
+ // Partition by user type
410
+ if (userType === 'POPULAR_INVESTOR') {
411
+ result.pi[userId] = posts;
412
+ } else if (userType === 'SIGNED_IN_USER') {
413
+ result.signedIn[userId] = posts;
414
+ } else {
415
+ // Generic/unknown user types go to generic
416
+ result.generic[userId] = posts;
417
+ }
418
+ }
419
+
420
+ logger.log('INFO', `[DataLoader] ✅ Loaded Social Data from BigQuery: ${Object.keys(result.generic).length} Generic, ${Object.keys(result.pi).length} PIs, ${Object.keys(result.signedIn).length} Signed-In`);
421
+ return result;
422
+ }
423
+ } catch (bqError) {
424
+ logger.log('WARN', `[DataLoader] BigQuery social query failed for ${dateString}, falling back to Firestore: ${bqError.message}`);
425
+ // Fall through to Firestore
426
+ }
427
+ }
428
+
429
+ // 3. FIRESTORE FALLBACK
430
+ logger.log('INFO', `Loading and partitioning social data for ${dateString} (Firestore)`);
381
431
 
382
432
  const result = { generic: {}, pi: {}, signedIn: {} };
383
433
 
@@ -874,31 +924,33 @@ async function loadPIRatings(config, deps, dateString) {
874
924
  const cached = await tryLoadFromGCS(config, dateString, 'ratings', logger);
875
925
  if (cached) return cached;
876
926
 
877
- // 2. FIRESTORE FALLBACK
878
- logger.log('INFO', `Loading PI Ratings (Raw Logs) for ${dateString}`);
879
- try {
880
- const shardsColRef = db.collection('PiReviews').doc(dateString).collection('shards');
881
- const shardDocs = await withRetry(() => shardsColRef.listDocuments(), `listRatingShards(${dateString})`);
882
-
883
- if (!shardDocs || shardDocs.length === 0) {
884
- logger.log('WARN', `No rating shards found for ${dateString}`);
885
- return {};
927
+ // 2. BIGQUERY FIRST (if enabled)
928
+ if (process.env.BIGQUERY_ENABLED !== 'false') {
929
+ try {
930
+ const { queryPIRatings } = require('../../core/utils/bigquery_utils');
931
+ const bigqueryData = await queryPIRatings(dateString, logger);
932
+ if (bigqueryData) {
933
+ logger.log('INFO', `[DataLoader] Loaded PI Ratings from BigQuery for ${dateString}`);
934
+ return bigqueryData;
935
+ }
936
+ } catch (error) {
937
+ logger.log('WARN', `[DataLoader] BigQuery PI Ratings query failed, falling back to Firestore: ${error.message}`);
886
938
  }
939
+ }
887
940
 
888
- const rawReviewsByPi = {};
889
- for (const docRef of shardDocs) {
890
- const docSnap = await docRef.get();
891
- if (!docSnap.exists) continue;
892
- const rawData = tryDecompress(docSnap.data());
893
- Object.values(rawData).forEach(entry => {
894
- if (entry && typeof entry === 'object' && entry.piCid && entry.rating !== undefined) {
895
- if (!rawReviewsByPi[entry.piCid]) rawReviewsByPi[entry.piCid] = [];
896
- rawReviewsByPi[entry.piCid].push(entry);
897
- }
898
- });
941
+ // 3. FIRESTORE FALLBACK
942
+ const collectionName = config.piRatingsCollection || 'PIRatingsData';
943
+ logger.log('INFO', `Loading PI Ratings from Firestore for ${dateString}`);
944
+ try {
945
+ const docRef = db.collection(collectionName).doc(dateString);
946
+ const docSnap = await withRetry(() => docRef.get(), `getPIRatings(${dateString})`);
947
+ if (!docSnap.exists) {
948
+ logger.log('WARN', `PI Ratings not found for ${dateString}`);
949
+ return {};
899
950
  }
900
- logger.log('INFO', `Loaded raw reviews for ${Object.keys(rawReviewsByPi).length} PIs.`);
901
- return rawReviewsByPi;
951
+ const data = tryDecompress(docSnap.data());
952
+ const { date, lastUpdated, ...piRatings } = data;
953
+ return piRatings;
902
954
  } catch (error) {
903
955
  logger.log('ERROR', `Failed to load PI Ratings: ${error.message}`);
904
956
  return {};
@@ -914,9 +966,23 @@ async function loadPIPageViews(config, deps, dateString) {
914
966
  const cached = await tryLoadFromGCS(config, dateString, 'page_views', logger);
915
967
  if (cached) return cached;
916
968
 
917
- // 2. FIRESTORE FALLBACK
969
+ // 2. BIGQUERY FIRST (if enabled)
970
+ if (process.env.BIGQUERY_ENABLED !== 'false') {
971
+ try {
972
+ const { queryPIPageViews } = require('../../core/utils/bigquery_utils');
973
+ const bigqueryData = await queryPIPageViews(dateString, logger);
974
+ if (bigqueryData) {
975
+ logger.log('INFO', `[DataLoader] ✅ Loaded PI Page Views from BigQuery for ${dateString}`);
976
+ return bigqueryData;
977
+ }
978
+ } catch (error) {
979
+ logger.log('WARN', `[DataLoader] BigQuery PI Page Views query failed, falling back to Firestore: ${error.message}`);
980
+ }
981
+ }
982
+
983
+ // 3. FIRESTORE FALLBACK
918
984
  const collectionName = config.piPageViewsCollection || 'PIPageViewsData';
919
- logger.log('INFO', `Loading PI Page Views for ${dateString}`);
985
+ logger.log('INFO', `Loading PI Page Views from Firestore for ${dateString}`);
920
986
  try {
921
987
  const docRef = db.collection(collectionName).doc(dateString);
922
988
  const docSnap = await withRetry(() => docRef.get(), `getPIPageViews(${dateString})`);
@@ -939,9 +1005,23 @@ async function loadWatchlistMembership(config, deps, dateString) {
939
1005
  const cached = await tryLoadFromGCS(config, dateString, 'watchlist', logger);
940
1006
  if (cached) return cached;
941
1007
 
942
- // 2. FIRESTORE FALLBACK
1008
+ // 2. BIGQUERY FIRST (if enabled)
1009
+ if (process.env.BIGQUERY_ENABLED !== 'false') {
1010
+ try {
1011
+ const { queryWatchlistMembership } = require('../../core/utils/bigquery_utils');
1012
+ const bigqueryData = await queryWatchlistMembership(dateString, logger);
1013
+ if (bigqueryData) {
1014
+ logger.log('INFO', `[DataLoader] ✅ Loaded Watchlist Membership from BigQuery for ${dateString}`);
1015
+ return bigqueryData;
1016
+ }
1017
+ } catch (error) {
1018
+ logger.log('WARN', `[DataLoader] BigQuery Watchlist Membership query failed, falling back to Firestore: ${error.message}`);
1019
+ }
1020
+ }
1021
+
1022
+ // 3. FIRESTORE FALLBACK
943
1023
  const collectionName = config.watchlistMembershipCollection || 'WatchlistMembershipData';
944
- logger.log('INFO', `Loading Watchlist Membership for ${dateString}`);
1024
+ logger.log('INFO', `Loading Watchlist Membership from Firestore for ${dateString}`);
945
1025
  try {
946
1026
  const docRef = db.collection(collectionName).doc(dateString);
947
1027
  const docSnap = await withRetry(() => docRef.get(), `getWatchlistMembership(${dateString})`);
@@ -964,9 +1044,23 @@ async function loadPIAlertHistory(config, deps, dateString) {
964
1044
  const cached = await tryLoadFromGCS(config, dateString, 'alerts', logger);
965
1045
  if (cached) return cached;
966
1046
 
967
- // 2. FIRESTORE FALLBACK
1047
+ // 2. BIGQUERY FIRST (if enabled)
1048
+ if (process.env.BIGQUERY_ENABLED !== 'false') {
1049
+ try {
1050
+ const { queryPIAlertHistory } = require('../../core/utils/bigquery_utils');
1051
+ const bigqueryData = await queryPIAlertHistory(dateString, logger);
1052
+ if (bigqueryData) {
1053
+ logger.log('INFO', `[DataLoader] ✅ Loaded PI Alert History from BigQuery for ${dateString}`);
1054
+ return bigqueryData;
1055
+ }
1056
+ } catch (error) {
1057
+ logger.log('WARN', `[DataLoader] BigQuery PI Alert History query failed, falling back to Firestore: ${error.message}`);
1058
+ }
1059
+ }
1060
+
1061
+ // 3. FIRESTORE FALLBACK
968
1062
  const collectionName = config.piAlertHistoryCollection || 'PIAlertHistoryData';
969
- logger.log('INFO', `Loading PI Alert History for ${dateString}`);
1063
+ logger.log('INFO', `Loading PI Alert History from Firestore for ${dateString}`);
970
1064
  try {
971
1065
  const docRef = db.collection(collectionName).doc(dateString);
972
1066
  const docSnap = await withRetry(() => docRef.get(), `getPIAlertHistory(${dateString})`);
@@ -630,6 +630,43 @@ const SCHEMAS = {
630
630
  { name: 'instrument_id', type: 'INT64', mode: 'REQUIRED' },
631
631
  { name: 'ticker', type: 'STRING', mode: 'REQUIRED' },
632
632
  { name: 'last_updated', type: 'TIMESTAMP', mode: 'REQUIRED' }
633
+ ],
634
+ pi_ratings: [
635
+ { name: 'date', type: 'DATE', mode: 'REQUIRED' },
636
+ { name: 'pi_id', type: 'INT64', mode: 'REQUIRED' },
637
+ { name: 'average_rating', type: 'FLOAT64', mode: 'NULLABLE' },
638
+ { name: 'total_ratings', type: 'INT64', mode: 'NULLABLE' },
639
+ { name: 'ratings_by_user', type: 'JSON', mode: 'NULLABLE' },
640
+ { name: 'reviews', type: 'JSON', mode: 'NULLABLE' }, // Full review metadata array
641
+ { name: 'last_updated', type: 'TIMESTAMP', mode: 'REQUIRED' }
642
+ ],
643
+ pi_page_views: [
644
+ { name: 'date', type: 'DATE', mode: 'REQUIRED' },
645
+ { name: 'pi_id', type: 'INT64', mode: 'REQUIRED' },
646
+ { name: 'total_views', type: 'INT64', mode: 'NULLABLE' },
647
+ { name: 'unique_viewers', type: 'INT64', mode: 'NULLABLE' },
648
+ { name: 'views_by_user', type: 'JSON', mode: 'NULLABLE' },
649
+ { name: 'last_updated', type: 'TIMESTAMP', mode: 'REQUIRED' }
650
+ ],
651
+ watchlist_membership: [
652
+ { name: 'date', type: 'DATE', mode: 'REQUIRED' },
653
+ { name: 'pi_id', type: 'INT64', mode: 'REQUIRED' },
654
+ { name: 'total_users', type: 'INT64', mode: 'NULLABLE' },
655
+ { name: 'public_watchlist_count', type: 'INT64', mode: 'NULLABLE' },
656
+ { name: 'private_watchlist_count', type: 'INT64', mode: 'NULLABLE' },
657
+ { name: 'users', type: 'JSON', mode: 'NULLABLE' },
658
+ { name: 'last_updated', type: 'TIMESTAMP', mode: 'REQUIRED' }
659
+ ],
660
+ pi_alert_history: [
661
+ { name: 'date', type: 'DATE', mode: 'REQUIRED' },
662
+ { name: 'pi_id', type: 'INT64', mode: 'REQUIRED' },
663
+ { name: 'alert_type', type: 'STRING', mode: 'REQUIRED' },
664
+ { name: 'triggered', type: 'BOOLEAN', mode: 'NULLABLE' },
665
+ { name: 'trigger_count', type: 'INT64', mode: 'NULLABLE' },
666
+ { name: 'triggered_for', type: 'JSON', mode: 'NULLABLE' },
667
+ { name: 'metadata', type: 'JSON', mode: 'NULLABLE' },
668
+ { name: 'last_triggered', type: 'TIMESTAMP', mode: 'NULLABLE' },
669
+ { name: 'last_updated', type: 'TIMESTAMP', mode: 'REQUIRED' }
633
670
  ]
634
671
  };
635
672
 
@@ -838,6 +875,94 @@ async function ensureTickerMappingsTable(logger = null) {
838
875
  );
839
876
  }
840
877
 
878
+ /**
879
+ * Ensure pi_ratings table exists
880
+ * @param {object} logger - Logger instance
881
+ * @returns {Promise<Table>}
882
+ */
883
+ async function ensurePIRatingsTable(logger = null) {
884
+ const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
885
+ const tableId = 'pi_ratings';
886
+ const schema = getSchema(tableId);
887
+
888
+ return await ensureTableExists(
889
+ datasetId,
890
+ tableId,
891
+ schema,
892
+ {
893
+ partitionField: 'date',
894
+ clusterFields: ['pi_id']
895
+ },
896
+ logger
897
+ );
898
+ }
899
+
900
+ /**
901
+ * Ensure pi_page_views table exists
902
+ * @param {object} logger - Logger instance
903
+ * @returns {Promise<Table>}
904
+ */
905
+ async function ensurePIPageViewsTable(logger = null) {
906
+ const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
907
+ const tableId = 'pi_page_views';
908
+ const schema = getSchema(tableId);
909
+
910
+ return await ensureTableExists(
911
+ datasetId,
912
+ tableId,
913
+ schema,
914
+ {
915
+ partitionField: 'date',
916
+ clusterFields: ['pi_id']
917
+ },
918
+ logger
919
+ );
920
+ }
921
+
922
+ /**
923
+ * Ensure watchlist_membership table exists
924
+ * @param {object} logger - Logger instance
925
+ * @returns {Promise<Table>}
926
+ */
927
+ async function ensureWatchlistMembershipTable(logger = null) {
928
+ const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
929
+ const tableId = 'watchlist_membership';
930
+ const schema = getSchema(tableId);
931
+
932
+ return await ensureTableExists(
933
+ datasetId,
934
+ tableId,
935
+ schema,
936
+ {
937
+ partitionField: 'date',
938
+ clusterFields: ['pi_id']
939
+ },
940
+ logger
941
+ );
942
+ }
943
+
944
+ /**
945
+ * Ensure pi_alert_history table exists
946
+ * @param {object} logger - Logger instance
947
+ * @returns {Promise<Table>}
948
+ */
949
+ async function ensurePIAlertHistoryTable(logger = null) {
950
+ const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
951
+ const tableId = 'pi_alert_history';
952
+ const schema = getSchema(tableId);
953
+
954
+ return await ensureTableExists(
955
+ datasetId,
956
+ tableId,
957
+ schema,
958
+ {
959
+ partitionField: 'date',
960
+ clusterFields: ['pi_id', 'alert_type']
961
+ },
962
+ logger
963
+ );
964
+ }
965
+
841
966
  /**
842
967
  * Query portfolio data from BigQuery
843
968
  * @param {string} dateStr - Date string (YYYY-MM-DD)
@@ -1082,16 +1207,18 @@ async function querySocialData(dateStr, userIds = null, userTypes = null, logger
1082
1207
  const tablePath = `${datasetId}.social_post_snapshots`;
1083
1208
 
1084
1209
  try {
1085
- const conditions = [`date = '${dateStr}'`];
1210
+ // Build WHERE clause with parameterized queries (SQL injection safe)
1211
+ const conditions = [`date = @dateStr`];
1212
+ const params = { dateStr: dateStr };
1086
1213
 
1087
1214
  if (userIds && userIds.length > 0) {
1088
- const userIdList = userIds.map(id => String(id)).join(',');
1089
- conditions.push(`user_id IN (${userIdList})`);
1215
+ conditions.push(`user_id IN UNNEST(@userIds)`);
1216
+ params.userIds = userIds.map(id => parseInt(id, 10));
1090
1217
  }
1091
1218
 
1092
1219
  if (userTypes && userTypes.length > 0) {
1093
- const typeList = userTypes.map(t => `'${t.toUpperCase()}'`).join(',');
1094
- conditions.push(`user_type IN (${typeList})`);
1220
+ conditions.push(`user_type IN UNNEST(@userTypes)`);
1221
+ params.userTypes = userTypes.map(t => t.toUpperCase());
1095
1222
  }
1096
1223
 
1097
1224
  const whereClause = conditions.join(' AND ');
@@ -1110,7 +1237,7 @@ async function querySocialData(dateStr, userIds = null, userTypes = null, logger
1110
1237
  logger.log('INFO', `[BigQuery] 🔍 Querying social posts from ${tablePath} for date ${dateStr}${userTypes ? ` (types: ${userTypes.join(',')})` : ''}${userIds ? ` (${userIds.length} users)` : ''}`);
1111
1238
  }
1112
1239
 
1113
- const rows = await query(sqlQuery, {}, logger);
1240
+ const rows = await query(sqlQuery, { params }, logger);
1114
1241
 
1115
1242
  if (!rows || rows.length === 0) {
1116
1243
  if (logger) logger.log('INFO', `[BigQuery] No social data found in ${tablePath} for ${dateStr}`);
@@ -1711,6 +1838,285 @@ async function queryTickerMappings(logger = null) {
1711
1838
  }
1712
1839
  }
1713
1840
 
1841
+ /**
1842
+ * Query PI ratings from BigQuery for a specific date
1843
+ * Returns data in format: { piId: { averageRating, totalRatings, ratingsByUser } }
1844
+ * @param {string} dateStr - Date (YYYY-MM-DD)
1845
+ * @param {object} logger - Logger instance
1846
+ * @returns {Promise<object|null>} Ratings data map, or null if not found/error
1847
+ */
1848
+ async function queryPIRatings(dateStr, logger = null) {
1849
+ if (process.env.BIGQUERY_ENABLED === 'false') {
1850
+ if (logger) logger.log('DEBUG', '[BigQuery] PI ratings query skipped (BIGQUERY_ENABLED=false)');
1851
+ return null;
1852
+ }
1853
+
1854
+ const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
1855
+ const tablePath = `${datasetId}.pi_ratings`;
1856
+
1857
+ try {
1858
+ const sqlQuery = `
1859
+ SELECT
1860
+ pi_id,
1861
+ average_rating,
1862
+ total_ratings,
1863
+ ratings_by_user
1864
+ FROM \`${tablePath}\`
1865
+ WHERE date = @dateStr
1866
+ ORDER BY pi_id ASC
1867
+ `;
1868
+
1869
+ if (logger) {
1870
+ logger.log('INFO', `[BigQuery] 🔍 Querying PI ratings from ${tablePath} for ${dateStr}`);
1871
+ }
1872
+
1873
+ const rows = await query(sqlQuery, {
1874
+ params: {
1875
+ dateStr: dateStr
1876
+ }
1877
+ }, logger);
1878
+
1879
+ if (!rows || rows.length === 0) {
1880
+ if (logger) logger.log('INFO', `[BigQuery] No PI ratings found for ${dateStr}`);
1881
+ return null;
1882
+ }
1883
+
1884
+ // Transform to expected format: { piId: { averageRating, totalRatings, ratingsByUser } }
1885
+ const ratings = {};
1886
+ for (const row of rows) {
1887
+ const piId = String(row.pi_id);
1888
+ ratings[piId] = {
1889
+ averageRating: row.average_rating || 0,
1890
+ totalRatings: row.total_ratings || 0,
1891
+ ratingsByUser: row.ratings_by_user || {}
1892
+ };
1893
+ }
1894
+
1895
+ if (logger) {
1896
+ logger.log('INFO', `[BigQuery] ✅ Retrieved ratings for ${Object.keys(ratings).length} PIs for ${dateStr}`);
1897
+ }
1898
+
1899
+ return ratings;
1900
+ } catch (error) {
1901
+ if (logger) {
1902
+ logger.log('WARN', `[BigQuery] PI ratings query failed for ${dateStr}: ${error.message}`);
1903
+ }
1904
+ return null;
1905
+ }
1906
+ }
1907
+
1908
+ /**
1909
+ * Query PI page views from BigQuery for a specific date
1910
+ * Returns data in format: { piId: { totalViews, uniqueViewers, viewsByUser } }
1911
+ * @param {string} dateStr - Date (YYYY-MM-DD)
1912
+ * @param {object} logger - Logger instance
1913
+ * @returns {Promise<object|null>} Page views data map, or null if not found/error
1914
+ */
1915
+ async function queryPIPageViews(dateStr, logger = null) {
1916
+ if (process.env.BIGQUERY_ENABLED === 'false') {
1917
+ if (logger) logger.log('DEBUG', '[BigQuery] PI page views query skipped (BIGQUERY_ENABLED=false)');
1918
+ return null;
1919
+ }
1920
+
1921
+ const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
1922
+ const tablePath = `${datasetId}.pi_page_views`;
1923
+
1924
+ try {
1925
+ const sqlQuery = `
1926
+ SELECT
1927
+ pi_id,
1928
+ total_views,
1929
+ unique_viewers,
1930
+ views_by_user
1931
+ FROM \`${tablePath}\`
1932
+ WHERE date = @dateStr
1933
+ ORDER BY pi_id ASC
1934
+ `;
1935
+
1936
+ if (logger) {
1937
+ logger.log('INFO', `[BigQuery] 🔍 Querying PI page views from ${tablePath} for ${dateStr}`);
1938
+ }
1939
+
1940
+ const rows = await query(sqlQuery, {
1941
+ params: {
1942
+ dateStr: dateStr
1943
+ }
1944
+ }, logger);
1945
+
1946
+ if (!rows || rows.length === 0) {
1947
+ if (logger) logger.log('INFO', `[BigQuery] No PI page views found for ${dateStr}`);
1948
+ return null;
1949
+ }
1950
+
1951
+ // Transform to expected format: { piId: { totalViews, uniqueViewers, viewsByUser } }
1952
+ const pageViews = {};
1953
+ for (const row of rows) {
1954
+ const piId = String(row.pi_id);
1955
+ pageViews[piId] = {
1956
+ totalViews: row.total_views || 0,
1957
+ uniqueViewers: row.unique_viewers || 0,
1958
+ viewsByUser: row.views_by_user || {}
1959
+ };
1960
+ }
1961
+
1962
+ if (logger) {
1963
+ logger.log('INFO', `[BigQuery] ✅ Retrieved page views for ${Object.keys(pageViews).length} PIs for ${dateStr}`);
1964
+ }
1965
+
1966
+ return pageViews;
1967
+ } catch (error) {
1968
+ if (logger) {
1969
+ logger.log('WARN', `[BigQuery] PI page views query failed for ${dateStr}: ${error.message}`);
1970
+ }
1971
+ return null;
1972
+ }
1973
+ }
1974
+
1975
+ /**
1976
+ * Query watchlist membership from BigQuery for a specific date
1977
+ * Returns data in format: { piId: { totalUsers, users, publicWatchlistCount, privateWatchlistCount } }
1978
+ * @param {string} dateStr - Date (YYYY-MM-DD)
1979
+ * @param {object} logger - Logger instance
1980
+ * @returns {Promise<object|null>} Watchlist membership data map, or null if not found/error
1981
+ */
1982
+ async function queryWatchlistMembership(dateStr, logger = null) {
1983
+ if (process.env.BIGQUERY_ENABLED === 'false') {
1984
+ if (logger) logger.log('DEBUG', '[BigQuery] Watchlist membership query skipped (BIGQUERY_ENABLED=false)');
1985
+ return null;
1986
+ }
1987
+
1988
+ const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
1989
+ const tablePath = `${datasetId}.watchlist_membership`;
1990
+
1991
+ try {
1992
+ const sqlQuery = `
1993
+ SELECT
1994
+ pi_id,
1995
+ total_users,
1996
+ public_watchlist_count,
1997
+ private_watchlist_count,
1998
+ users
1999
+ FROM \`${tablePath}\`
2000
+ WHERE date = @dateStr
2001
+ ORDER BY pi_id ASC
2002
+ `;
2003
+
2004
+ if (logger) {
2005
+ logger.log('INFO', `[BigQuery] 🔍 Querying watchlist membership from ${tablePath} for ${dateStr}`);
2006
+ }
2007
+
2008
+ const rows = await query(sqlQuery, {
2009
+ params: {
2010
+ dateStr: dateStr
2011
+ }
2012
+ }, logger);
2013
+
2014
+ if (!rows || rows.length === 0) {
2015
+ if (logger) logger.log('INFO', `[BigQuery] No watchlist membership found for ${dateStr}`);
2016
+ return null;
2017
+ }
2018
+
2019
+ // Transform to expected format: { piId: { totalUsers, users, publicWatchlistCount, privateWatchlistCount } }
2020
+ const membership = {};
2021
+ for (const row of rows) {
2022
+ const piId = String(row.pi_id);
2023
+ membership[piId] = {
2024
+ totalUsers: row.total_users || 0,
2025
+ users: row.users || [],
2026
+ publicWatchlistCount: row.public_watchlist_count || 0,
2027
+ privateWatchlistCount: row.private_watchlist_count || 0
2028
+ };
2029
+ }
2030
+
2031
+ if (logger) {
2032
+ logger.log('INFO', `[BigQuery] ✅ Retrieved watchlist membership for ${Object.keys(membership).length} PIs for ${dateStr}`);
2033
+ }
2034
+
2035
+ return membership;
2036
+ } catch (error) {
2037
+ if (logger) {
2038
+ logger.log('WARN', `[BigQuery] Watchlist membership query failed for ${dateStr}: ${error.message}`);
2039
+ }
2040
+ return null;
2041
+ }
2042
+ }
2043
+
2044
+ /**
2045
+ * Query PI alert history from BigQuery for a specific date
2046
+ * Returns data in format: { piId: { alertType: { triggered, count, triggeredFor, metadata } } }
2047
+ * @param {string} dateStr - Date (YYYY-MM-DD)
2048
+ * @param {object} logger - Logger instance
2049
+ * @returns {Promise<object|null>} Alert history data map, or null if not found/error
2050
+ */
2051
+ async function queryPIAlertHistory(dateStr, logger = null) {
2052
+ if (process.env.BIGQUERY_ENABLED === 'false') {
2053
+ if (logger) logger.log('DEBUG', '[BigQuery] PI alert history query skipped (BIGQUERY_ENABLED=false)');
2054
+ return null;
2055
+ }
2056
+
2057
+ const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
2058
+ const tablePath = `${datasetId}.pi_alert_history`;
2059
+
2060
+ try {
2061
+ const sqlQuery = `
2062
+ SELECT
2063
+ pi_id,
2064
+ alert_type,
2065
+ triggered,
2066
+ trigger_count,
2067
+ triggered_for,
2068
+ metadata
2069
+ FROM \`${tablePath}\`
2070
+ WHERE date = @dateStr
2071
+ ORDER BY pi_id ASC, alert_type ASC
2072
+ `;
2073
+
2074
+ if (logger) {
2075
+ logger.log('INFO', `[BigQuery] 🔍 Querying PI alert history from ${tablePath} for ${dateStr}`);
2076
+ }
2077
+
2078
+ const rows = await query(sqlQuery, {
2079
+ params: {
2080
+ dateStr: dateStr
2081
+ }
2082
+ }, logger);
2083
+
2084
+ if (!rows || rows.length === 0) {
2085
+ if (logger) logger.log('INFO', `[BigQuery] No PI alert history found for ${dateStr}`);
2086
+ return null;
2087
+ }
2088
+
2089
+ // Transform to expected format: { piId: { alertType: { triggered, count, triggeredFor, metadata } } }
2090
+ const alertHistory = {};
2091
+ for (const row of rows) {
2092
+ const piId = String(row.pi_id);
2093
+ const alertType = row.alert_type;
2094
+
2095
+ if (!alertHistory[piId]) {
2096
+ alertHistory[piId] = {};
2097
+ }
2098
+
2099
+ alertHistory[piId][alertType] = {
2100
+ triggered: row.triggered || false,
2101
+ count: row.trigger_count || 0,
2102
+ triggeredFor: row.triggered_for || [],
2103
+ metadata: row.metadata || {}
2104
+ };
2105
+ }
2106
+
2107
+ if (logger) {
2108
+ logger.log('INFO', `[BigQuery] ✅ Retrieved alert history for ${Object.keys(alertHistory).length} PIs for ${dateStr}`);
2109
+ }
2110
+
2111
+ return alertHistory;
2112
+ } catch (error) {
2113
+ if (logger) {
2114
+ logger.log('WARN', `[BigQuery] PI alert history query failed for ${dateStr}: ${error.message}`);
2115
+ }
2116
+ return null;
2117
+ }
2118
+ }
2119
+
1714
2120
  module.exports = {
1715
2121
  getBigQueryClient,
1716
2122
  getOrCreateDataset,
@@ -1728,6 +2134,10 @@ module.exports = {
1728
2134
  ensurePIRankingsTable,
1729
2135
  ensureInstrumentInsightsTable,
1730
2136
  ensureTickerMappingsTable,
2137
+ ensurePIRatingsTable,
2138
+ ensurePIPageViewsTable,
2139
+ ensureWatchlistMembershipTable,
2140
+ ensurePIAlertHistoryTable,
1731
2141
  queryPortfolioData,
1732
2142
  queryHistoryData,
1733
2143
  querySocialData,
@@ -1736,6 +2146,10 @@ module.exports = {
1736
2146
  queryPIRankings,
1737
2147
  queryInstrumentInsights,
1738
2148
  queryTickerMappings,
2149
+ queryPIRatings,
2150
+ queryPIPageViews,
2151
+ queryWatchlistMembership,
2152
+ queryPIAlertHistory,
1739
2153
  queryComputationResult,
1740
2154
  queryComputationResultsRange,
1741
2155
  checkExistingRows,
@@ -0,0 +1,116 @@
1
+ /**
2
+ * @fileoverview Backfill Ticker Mappings from Firestore to BigQuery
3
+ *
4
+ * This function reads the single ticker mappings document from Firestore
5
+ * and writes it to BigQuery table.
6
+ *
7
+ * Usage (Local Node.js script):
8
+ * node index.js
9
+ *
10
+ * Features:
11
+ * - Reads single document from Firestore: instrument_mappings/etoro_to_ticker
12
+ * - Transforms to BigQuery rows (one row per instrument)
13
+ * - Uses insertRowsWithMerge (MERGE operation) for idempotent writes
14
+ * - Does NOT delete any Firestore data
15
+ */
16
+
17
+ const { Firestore } = require('@google-cloud/firestore');
18
+ const {
19
+ ensureTickerMappingsTable,
20
+ insertRowsWithMerge
21
+ } = require('../../core/utils/bigquery_utils');
22
+
23
+ const db = new Firestore();
24
+
25
+ /**
26
+ * Backfill ticker mappings from Firestore to BigQuery
27
+ */
28
+ async function backfillTickerMappings(logger = console) {
29
+ logger.log('INFO', '[Backfill] Starting ticker mappings backfill...');
30
+
31
+ try {
32
+ await ensureTickerMappingsTable(logger);
33
+
34
+ // Read the single document from Firestore
35
+ logger.log('INFO', '[Backfill] Fetching ticker mappings from Firestore...');
36
+ const docRef = db.collection('instrument_mappings').doc('etoro_to_ticker');
37
+ const docSnap = await docRef.get();
38
+
39
+ if (!docSnap.exists) {
40
+ logger.log('WARN', '[Backfill] Ticker mappings document not found in Firestore');
41
+ return { success: false, message: 'Document not found' };
42
+ }
43
+
44
+ const mappingsData = docSnap.data();
45
+ const instrumentIds = Object.keys(mappingsData);
46
+
47
+ if (instrumentIds.length === 0) {
48
+ logger.log('WARN', '[Backfill] Ticker mappings document is empty');
49
+ return { success: false, message: 'Document is empty' };
50
+ }
51
+
52
+ logger.log('INFO', `[Backfill] Found ${instrumentIds.length} ticker mappings`);
53
+
54
+ // Transform to BigQuery rows
55
+ const fetchedAt = new Date().toISOString();
56
+ const bigqueryRows = instrumentIds.map(instrumentId => {
57
+ return {
58
+ instrument_id: parseInt(instrumentId, 10),
59
+ ticker: String(mappingsData[instrumentId]),
60
+ last_updated: fetchedAt
61
+ };
62
+ });
63
+
64
+ // Write to BigQuery using MERGE (idempotent, can re-run safely)
65
+ const datasetId = process.env.BIGQUERY_DATASET_ID || 'bulltrackers_data';
66
+ const keyFields = ['instrument_id'];
67
+ await insertRowsWithMerge(datasetId, 'ticker_mappings', bigqueryRows, keyFields, logger);
68
+
69
+ logger.log('SUCCESS', `[Backfill] ✅ Ticker mappings backfill complete: ${bigqueryRows.length} mappings`);
70
+
71
+ return { success: true, totalRows: bigqueryRows.length };
72
+ } catch (error) {
73
+ logger.log('ERROR', `[Backfill] Ticker mappings backfill failed: ${error.message}`);
74
+ throw error;
75
+ }
76
+ }
77
+
78
+ /**
79
+ * Main entry point
80
+ */
81
+ async function backfillTickerMappingsMain() {
82
+ const logger = {
83
+ log: (level, message, ...args) => {
84
+ const timestamp = new Date().toISOString();
85
+ console.log(`[${timestamp}] [${level}] ${message}`, ...args);
86
+ }
87
+ };
88
+
89
+ logger.log('INFO', '[Backfill] Starting Ticker Mappings backfill...');
90
+
91
+ try {
92
+ const result = await backfillTickerMappings(logger);
93
+
94
+ logger.log('SUCCESS', '[Backfill] ✅ All backfills completed!');
95
+ return result;
96
+ } catch (error) {
97
+ logger.log('ERROR', `[Backfill] Fatal error: ${error.message}`);
98
+ throw error;
99
+ }
100
+ }
101
+
102
+ // CLI handling
103
+ if (require.main === module) {
104
+ backfillTickerMappingsMain()
105
+ .then(result => {
106
+ console.log('\n✅ Backfill completed successfully!');
107
+ console.log('Results:', JSON.stringify(result, null, 2));
108
+ process.exit(0);
109
+ })
110
+ .catch(error => {
111
+ console.error('\n❌ Backfill failed:', error);
112
+ process.exit(1);
113
+ });
114
+ }
115
+
116
+ module.exports = { backfillTickerMappings, backfillTickerMappingsMain };
package/index.js CHANGED
@@ -67,6 +67,10 @@ const { backfillPIMasterListRankings } = require('./functions
67
67
  const { backfillInstrumentInsights } = require('./functions/maintenance/backfill-instrument-insights/index');
68
68
  const { backfillTickerMappings } = require('./functions/maintenance/backfill-ticker-mappings/index');
69
69
  const { backfillPriceData } = require('./functions/maintenance/backfill-price-data-from-firestore/index');
70
+ const { backfillPIRatings } = require('./functions/maintenance/backfill-pi-ratings/index');
71
+ const { backfillPIPageViews } = require('./functions/maintenance/backfill-pi-page-views/index');
72
+ const { backfillWatchlistMembershipData } = require('./functions/maintenance/backfill-watchlist-membership/index');
73
+ const { backfillPIAlertHistory } = require('./functions/maintenance/backfill-pi-alert-history/index');
70
74
 
71
75
  // Alert System
72
76
  const { handleAlertTrigger, handleComputationResultWrite, checkAndSendAllClearNotifications } = require('./functions/alert-system/index');
@@ -139,7 +143,11 @@ const maintenance = {
139
143
  backfillPIMasterListRankings,
140
144
  backfillInstrumentInsights,
141
145
  backfillTickerMappings,
142
- backfillPriceData
146
+ backfillPriceData,
147
+ backfillPIRatings,
148
+ backfillPIPageViews,
149
+ backfillWatchlistMembershipData,
150
+ backfillPIAlertHistory
143
151
  };
144
152
 
145
153
  const proxy = { handlePost };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.717",
3
+ "version": "1.0.719",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [
@@ -24,7 +24,8 @@
24
24
  "functions/alert-system/",
25
25
  "functions/maintenance/backfill-instrument-insights",
26
26
  "functions/maintenance/backfill-pi-master-list-rankings",
27
- "functions/maintenance/backfill-task-engine-data"
27
+ "functions/maintenance/backfill-task-engine-data",
28
+ "functions/maintenance/backfill-ticker-mappings"
28
29
  ],
29
30
  "keywords": [
30
31
  "bulltrackers",