bulltrackers-module 1.0.491 → 1.0.493
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/generic-api/user-api/helpers/data_helpers.js +35 -5
- package/functions/generic-api/user-api/helpers/on_demand_fetch_helpers.js +1 -2
- package/functions/task-engine/handler_creator.js +19 -4
- package/functions/task-engine/helpers/popular_investor_helpers.js +152 -40
- package/package.json +1 -1
|
@@ -891,6 +891,7 @@ async function getUserComputations(req, res, dependencies, config) {
|
|
|
891
891
|
|
|
892
892
|
// If mode is 'latest', try to find the latest available date for the first computation
|
|
893
893
|
// Use effectiveCid for computation lookup
|
|
894
|
+
// CRITICAL: Only look back 7 days as per requirements
|
|
894
895
|
if (mode === 'latest') {
|
|
895
896
|
const firstCompName = computationNames[0];
|
|
896
897
|
const latestDate = await findLatestComputationDate(
|
|
@@ -901,7 +902,7 @@ async function getUserComputations(req, res, dependencies, config) {
|
|
|
901
902
|
category,
|
|
902
903
|
firstCompName,
|
|
903
904
|
effectiveCid,
|
|
904
|
-
|
|
905
|
+
7 // Only look back 7 days as per requirements
|
|
905
906
|
);
|
|
906
907
|
|
|
907
908
|
if (latestDate) {
|
|
@@ -910,14 +911,15 @@ async function getUserComputations(req, res, dependencies, config) {
|
|
|
910
911
|
logger.log('INFO', `[getUserComputations] Using fallback date ${latestDate} for effective CID ${effectiveCid} (today: ${today})`);
|
|
911
912
|
}
|
|
912
913
|
} else {
|
|
913
|
-
// No data found
|
|
914
|
+
// No data found after 7 days - return empty (frontend will use fallback)
|
|
915
|
+
logger.log('WARN', `[getUserComputations] No computation data found for CID ${effectiveCid} in last 7 days. Frontend will use fallback.`);
|
|
914
916
|
return res.status(200).json({
|
|
915
917
|
status: 'success',
|
|
916
918
|
userCid: String(effectiveCid),
|
|
917
919
|
mode,
|
|
918
920
|
computations: computationNames,
|
|
919
921
|
data: {},
|
|
920
|
-
isFallback:
|
|
922
|
+
isFallback: true, // Mark as fallback since no data found
|
|
921
923
|
requestedDate: today,
|
|
922
924
|
isImpersonating: isImpersonating || false,
|
|
923
925
|
actualCid: Number(userCid)
|
|
@@ -955,10 +957,38 @@ async function getUserComputations(req, res, dependencies, config) {
|
|
|
955
957
|
if (doc.exists) {
|
|
956
958
|
// Decompress if needed (handles byte string storage)
|
|
957
959
|
const rawData = doc.data();
|
|
958
|
-
|
|
960
|
+
let data = tryDecompress(rawData);
|
|
961
|
+
|
|
962
|
+
// Handle string decompression result
|
|
963
|
+
if (typeof data === 'string') {
|
|
964
|
+
try {
|
|
965
|
+
data = JSON.parse(data);
|
|
966
|
+
} catch (e) {
|
|
967
|
+
logger.log('WARN', `[getUserComputations] Failed to parse decompressed string for ${compName} on ${date}:`, e.message);
|
|
968
|
+
data = null;
|
|
969
|
+
}
|
|
970
|
+
}
|
|
971
|
+
|
|
972
|
+
// Check if data is sharded
|
|
973
|
+
if (data && data._sharded === true && data._shardCount) {
|
|
974
|
+
// Data is stored in shards - read all shards and merge
|
|
975
|
+
const shardsCol = docRef.collection('_shards');
|
|
976
|
+
const shardsSnapshot = await shardsCol.get();
|
|
977
|
+
|
|
978
|
+
if (!shardsSnapshot.empty) {
|
|
979
|
+
data = {};
|
|
980
|
+
for (const shardDoc of shardsSnapshot.docs) {
|
|
981
|
+
const shardData = shardDoc.data();
|
|
982
|
+
Object.assign(data, shardData);
|
|
983
|
+
}
|
|
984
|
+
} else {
|
|
985
|
+
data = null; // Sharded but no shards found
|
|
986
|
+
}
|
|
987
|
+
}
|
|
988
|
+
|
|
959
989
|
// Filter by user CID - computation results are stored as { cid: result }
|
|
960
990
|
// Use effectiveCid for lookup
|
|
961
|
-
let userResult = data[String(effectiveCid)];
|
|
991
|
+
let userResult = data && typeof data === 'object' ? data[String(effectiveCid)] : null;
|
|
962
992
|
|
|
963
993
|
// Apply dev override for computations that include copied PIs (use actual userCid for override check)
|
|
964
994
|
if (isDevOverrideActive && (compName === 'SignedInUserProfileMetrics' || compName === 'SignedInUserCopiedPIs')) {
|
|
@@ -5,6 +5,7 @@
|
|
|
5
5
|
|
|
6
6
|
const { FieldValue } = require('@google-cloud/firestore');
|
|
7
7
|
const crypto = require('crypto');
|
|
8
|
+
const { tryDecompress } = require('./data_helpers');
|
|
8
9
|
|
|
9
10
|
const RATE_LIMIT_HOURS = 1;
|
|
10
11
|
const RATE_LIMIT_MS = RATE_LIMIT_HOURS * 60 * 60 * 1000;
|
|
@@ -253,7 +254,6 @@ async function getPiFetchStatus(req, res, dependencies, config) {
|
|
|
253
254
|
|
|
254
255
|
const doc = await docRef.get();
|
|
255
256
|
if (doc.exists) {
|
|
256
|
-
const { tryDecompress } = require('./data_helpers');
|
|
257
257
|
const data = tryDecompress(doc.data());
|
|
258
258
|
|
|
259
259
|
if (data && data[String(piCidNum)]) {
|
|
@@ -316,7 +316,6 @@ async function getPiFetchStatus(req, res, dependencies, config) {
|
|
|
316
316
|
}
|
|
317
317
|
} else {
|
|
318
318
|
// Data is in the main document (compressed or not)
|
|
319
|
-
const { tryDecompress } = require('./data_helpers');
|
|
320
319
|
mergedData = tryDecompress(docData);
|
|
321
320
|
|
|
322
321
|
// Handle string decompression result
|
|
@@ -235,12 +235,27 @@ async function handleRequest(message, context, configObj, dependencies) {
|
|
|
235
235
|
await handlePopularInvestorUpdate(taskData, configObj, dependencies);
|
|
236
236
|
break;
|
|
237
237
|
case 'ON_DEMAND_USER_UPDATE':
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
238
|
+
// For ON_DEMAND_USER_UPDATE, the entire payload IS the task data
|
|
239
|
+
// (not wrapped in a 'data' field like other task types)
|
|
240
|
+
// Extract task data from payload, excluding 'type'
|
|
241
|
+
const onDemandTaskData = data || {
|
|
242
|
+
cid: payload.cid,
|
|
243
|
+
username: payload.username,
|
|
244
|
+
requestId: payload.requestId,
|
|
245
|
+
source: payload.source,
|
|
246
|
+
requestedBy: payload.requestedBy,
|
|
247
|
+
effectiveRequestedBy: payload.effectiveRequestedBy,
|
|
248
|
+
metadata: payload.metadata,
|
|
249
|
+
priority: payload.priority,
|
|
250
|
+
data: payload.data // Include nested data object (includeSocial, since, etc.)
|
|
251
|
+
};
|
|
252
|
+
|
|
253
|
+
if (!onDemandTaskData.cid || !onDemandTaskData.username) {
|
|
254
|
+
logger.log('ERROR', `[TaskEngine] ON_DEMAND_USER_UPDATE missing required fields (cid or username)`, { payload, onDemandTaskData });
|
|
241
255
|
return;
|
|
242
256
|
}
|
|
243
|
-
|
|
257
|
+
|
|
258
|
+
await handleOnDemandUserUpdate(onDemandTaskData, configObj, dependencies);
|
|
244
259
|
break;
|
|
245
260
|
case 'SOCIAL_INSTRUMENT_FETCH':
|
|
246
261
|
case 'SOCIAL_PI_FETCH':
|
|
@@ -500,6 +500,9 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
|
|
|
500
500
|
// [FIX] Destructure dependencies first
|
|
501
501
|
const { logger, proxyManager, batchManager, headerManager, db } = dependencies;
|
|
502
502
|
|
|
503
|
+
// Import notification helper once at the top
|
|
504
|
+
const { notifyTaskEngineComplete } = require('../../generic-api/user-api/helpers/notification_helpers');
|
|
505
|
+
|
|
503
506
|
// Validate and set API URLs with defaults and fallbacks
|
|
504
507
|
const ETORO_API_PORTFOLIO_URL = config.ETORO_API_PORTFOLIO_URL || process.env.ETORO_API_PORTFOLIO_URL || 'https://www.etoro.com/sapi/portfolios/portfolio';
|
|
505
508
|
const ETORO_API_HISTORY_URL = config.ETORO_API_HISTORY_URL || process.env.ETORO_API_HISTORY_URL || 'https://www.etoro.com/sapi/trade-data-real/history/public/credit/flat';
|
|
@@ -558,6 +561,48 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
|
|
|
558
561
|
let fetchSuccess = false;
|
|
559
562
|
let portfolioFetched = false;
|
|
560
563
|
let historyFetched = false;
|
|
564
|
+
let criticalError = null; // Track critical errors that should fail the sync
|
|
565
|
+
|
|
566
|
+
// Helper function to mark sync as failed and send notification
|
|
567
|
+
const markSyncFailed = async (errorMessage, stage) => {
|
|
568
|
+
if (requestId && source === 'on_demand_sync' && db) {
|
|
569
|
+
try {
|
|
570
|
+
const requestRef = db.collection('user_sync_requests')
|
|
571
|
+
.doc(String(cid))
|
|
572
|
+
.collection('requests')
|
|
573
|
+
.doc(requestId);
|
|
574
|
+
|
|
575
|
+
await requestRef.update({
|
|
576
|
+
status: 'failed',
|
|
577
|
+
error: errorMessage,
|
|
578
|
+
failedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp(),
|
|
579
|
+
updatedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp(),
|
|
580
|
+
failedStage: stage
|
|
581
|
+
});
|
|
582
|
+
|
|
583
|
+
// Send error notification
|
|
584
|
+
if (metadata?.requestingUserCid) {
|
|
585
|
+
try {
|
|
586
|
+
await notifyTaskEngineComplete(
|
|
587
|
+
db,
|
|
588
|
+
logger,
|
|
589
|
+
metadata.requestingUserCid,
|
|
590
|
+
requestId,
|
|
591
|
+
username,
|
|
592
|
+
false,
|
|
593
|
+
errorMessage
|
|
594
|
+
);
|
|
595
|
+
} catch (notifErr) {
|
|
596
|
+
logger.log('WARN', `[On-Demand Update] Failed to send error notification`, notifErr);
|
|
597
|
+
}
|
|
598
|
+
}
|
|
599
|
+
|
|
600
|
+
logger.log('ERROR', `[On-Demand Update] Marked sync ${requestId} as failed at stage: ${stage}. Error: ${errorMessage}`);
|
|
601
|
+
} catch (updateErr) {
|
|
602
|
+
logger.log('ERROR', `[On-Demand Update] Failed to mark sync as failed for ${requestId}`, updateErr);
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
};
|
|
561
606
|
|
|
562
607
|
try {
|
|
563
608
|
// Portfolio Fetch (only if portfolioOnly is true)
|
|
@@ -597,7 +642,10 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
|
|
|
597
642
|
portfolioSuccess = true;
|
|
598
643
|
} catch (fetchErr) {
|
|
599
644
|
logger.log('ERROR', `[On-Demand] Direct fetch also failed for ${username}`, fetchErr);
|
|
600
|
-
|
|
645
|
+
const errorMsg = `Failed to fetch portfolio for ${cid}. Direct: ${fetchErr.message}`;
|
|
646
|
+
criticalError = new Error(errorMsg);
|
|
647
|
+
await markSyncFailed(errorMsg, 'portfolio_fetch');
|
|
648
|
+
throw criticalError;
|
|
601
649
|
}
|
|
602
650
|
}
|
|
603
651
|
|
|
@@ -608,7 +656,15 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
|
|
|
608
656
|
fetchSuccess = true;
|
|
609
657
|
portfolioFetched = true;
|
|
610
658
|
|
|
611
|
-
|
|
659
|
+
try {
|
|
660
|
+
await batchManager.addToPortfolioBatch(String(cid), blockId, today, portfolioData, 'signed_in_user');
|
|
661
|
+
} catch (batchErr) {
|
|
662
|
+
const errorMsg = `Failed to store portfolio data: ${batchErr.message}`;
|
|
663
|
+
logger.log('ERROR', `[On-Demand] ${errorMsg}`, batchErr);
|
|
664
|
+
criticalError = new Error(errorMsg);
|
|
665
|
+
await markSyncFailed(errorMsg, 'portfolio_storage');
|
|
666
|
+
throw criticalError;
|
|
667
|
+
}
|
|
612
668
|
} else {
|
|
613
669
|
logger.log('INFO', `[On-Demand Update] Skipping portfolio fetch (portfolioOnly=false) for ${username}`);
|
|
614
670
|
}
|
|
@@ -660,23 +716,49 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
|
|
|
660
716
|
}
|
|
661
717
|
|
|
662
718
|
if (historySuccess && historyRes.ok) {
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
historyData.PublicHistoryPositions
|
|
669
|
-
|
|
670
|
-
|
|
719
|
+
try {
|
|
720
|
+
const historyData = await historyRes.json();
|
|
721
|
+
historyData.fetchedAt = new Date();
|
|
722
|
+
|
|
723
|
+
const VALID_REASONS = [0, 1, 5];
|
|
724
|
+
if (historyData.PublicHistoryPositions) {
|
|
725
|
+
historyData.PublicHistoryPositions = historyData.PublicHistoryPositions.filter(
|
|
726
|
+
p => VALID_REASONS.includes(p.CloseReason)
|
|
727
|
+
);
|
|
728
|
+
}
|
|
729
|
+
|
|
730
|
+
try {
|
|
731
|
+
await batchManager.addToTradingHistoryBatch(String(cid), blockId, today, historyData, 'signed_in_user');
|
|
732
|
+
historyFetched = true;
|
|
733
|
+
} catch (batchErr) {
|
|
734
|
+
const errorMsg = `Failed to store history data: ${batchErr.message}`;
|
|
735
|
+
logger.log('ERROR', `[On-Demand] ${errorMsg}`, batchErr);
|
|
736
|
+
criticalError = new Error(errorMsg);
|
|
737
|
+
await markSyncFailed(errorMsg, 'history_storage');
|
|
738
|
+
throw criticalError;
|
|
739
|
+
}
|
|
740
|
+
} catch (parseErr) {
|
|
741
|
+
const errorMsg = `Failed to parse history data: ${parseErr.message}`;
|
|
742
|
+
logger.log('ERROR', `[On-Demand] ${errorMsg}`, parseErr);
|
|
743
|
+
criticalError = new Error(errorMsg);
|
|
744
|
+
await markSyncFailed(errorMsg, 'history_parse');
|
|
745
|
+
throw criticalError;
|
|
671
746
|
}
|
|
672
|
-
|
|
673
|
-
|
|
747
|
+
} else {
|
|
748
|
+
// History fetch failed - this is critical if portfolioOnly is true
|
|
749
|
+
if (portfolioOnly) {
|
|
750
|
+
const errorMsg = `History fetch failed for ${username} (status: ${historyRes?.status || 'unknown'})`;
|
|
751
|
+
logger.log('ERROR', `[On-Demand Update] ${errorMsg}`);
|
|
752
|
+
criticalError = new Error(errorMsg);
|
|
753
|
+
await markSyncFailed(errorMsg, 'history_fetch');
|
|
754
|
+
throw criticalError;
|
|
674
755
|
} else {
|
|
675
|
-
logger.log('WARN', `[On-Demand Update] History fetch failed for ${username} (${historyRes
|
|
756
|
+
logger.log('WARN', `[On-Demand Update] History fetch failed for ${username} (${historyRes?.status || 'unknown'})`);
|
|
676
757
|
}
|
|
677
|
-
} else {
|
|
678
|
-
logger.log('INFO', `[On-Demand Update] Skipping history fetch (portfolioOnly=false) for ${username}`);
|
|
679
758
|
}
|
|
759
|
+
} else {
|
|
760
|
+
logger.log('INFO', `[On-Demand Update] Skipping history fetch (portfolioOnly=false) for ${username}`);
|
|
761
|
+
}
|
|
680
762
|
|
|
681
763
|
// Fetch social data if requested (for user signup or explicit request)
|
|
682
764
|
let socialFetched = false;
|
|
@@ -764,7 +846,6 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
|
|
|
764
846
|
// Send notification to requesting user if this is an on-demand sync
|
|
765
847
|
if (requestId && source === 'on_demand_sync' && metadata?.requestingUserCid) {
|
|
766
848
|
try {
|
|
767
|
-
const { notifyTaskEngineComplete } = require('../../generic-api/user-api/helpers/notification_helpers');
|
|
768
849
|
const success = portfolioFetched || historyFetched; // At least one should succeed
|
|
769
850
|
await notifyTaskEngineComplete(
|
|
770
851
|
db,
|
|
@@ -816,14 +897,12 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
|
|
|
816
897
|
updatedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp()
|
|
817
898
|
});
|
|
818
899
|
} catch (indexerError) {
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
//
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
updatedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp()
|
|
826
|
-
});
|
|
900
|
+
const errorMsg = `Failed to run root data indexer: ${indexerError.message}`;
|
|
901
|
+
logger.log('ERROR', `[On-Demand Update] ${errorMsg}`, indexerError);
|
|
902
|
+
// Root data indexing is critical - if it fails, computations won't know data exists
|
|
903
|
+
criticalError = new Error(errorMsg);
|
|
904
|
+
await markSyncFailed(errorMsg, 'root_data_indexing');
|
|
905
|
+
throw criticalError;
|
|
827
906
|
}
|
|
828
907
|
|
|
829
908
|
// Trigger computations with dependency chain resolution
|
|
@@ -870,22 +949,30 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
|
|
|
870
949
|
// Don't mark as completed yet - wait for computation to finish
|
|
871
950
|
// The status will remain 'computing' until computation completes
|
|
872
951
|
} else {
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
updatedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp()
|
|
879
|
-
});
|
|
952
|
+
const errorMsg = 'PubSub not available, cannot trigger computation';
|
|
953
|
+
logger.log('ERROR', `[On-Demand Update] ${errorMsg}`);
|
|
954
|
+
criticalError = new Error(errorMsg);
|
|
955
|
+
await markSyncFailed(errorMsg, 'computation_trigger');
|
|
956
|
+
throw criticalError;
|
|
880
957
|
}
|
|
881
958
|
} catch (err) {
|
|
882
|
-
|
|
959
|
+
// If this is a critical error we already handled, re-throw it
|
|
960
|
+
if (criticalError) {
|
|
961
|
+
throw criticalError;
|
|
962
|
+
}
|
|
963
|
+
// Otherwise, this is an unexpected error in the computation triggering block
|
|
964
|
+
const errorMsg = `Failed to trigger computations: ${err.message}`;
|
|
965
|
+
logger.log('ERROR', `[On-Demand Update] ${errorMsg}`, err);
|
|
966
|
+
criticalError = new Error(errorMsg);
|
|
967
|
+
await markSyncFailed(errorMsg, 'computation_trigger');
|
|
968
|
+
throw criticalError;
|
|
883
969
|
}
|
|
884
970
|
}
|
|
885
971
|
} catch (error) {
|
|
886
972
|
logger.log('ERROR', `[On-Demand Update] Failed for ${username}`, error);
|
|
887
973
|
|
|
888
974
|
// Update request status to failed if this is a sync request
|
|
975
|
+
// Only update if we haven't already marked it as failed (to avoid duplicate updates)
|
|
889
976
|
if (requestId && source === 'on_demand_sync' && db) {
|
|
890
977
|
try {
|
|
891
978
|
const requestRef = db.collection('user_sync_requests')
|
|
@@ -893,14 +980,39 @@ async function handleOnDemandUserUpdate(taskData, config, dependencies) {
|
|
|
893
980
|
.collection('requests')
|
|
894
981
|
.doc(requestId);
|
|
895
982
|
|
|
896
|
-
|
|
897
|
-
await requestRef.
|
|
898
|
-
|
|
899
|
-
error
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
|
|
983
|
+
// Check current status to avoid overwriting if already marked as failed
|
|
984
|
+
const currentRequest = await requestRef.get();
|
|
985
|
+
if (currentRequest.exists && currentRequest.data().status !== 'failed') {
|
|
986
|
+
const errorMessage = error.message || 'Unknown error occurred';
|
|
987
|
+
const failedStage = error.failedStage || 'unknown';
|
|
988
|
+
|
|
989
|
+
await requestRef.update({
|
|
990
|
+
status: 'failed',
|
|
991
|
+
error: errorMessage,
|
|
992
|
+
failedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp(),
|
|
993
|
+
updatedAt: require('@google-cloud/firestore').FieldValue.serverTimestamp(),
|
|
994
|
+
failedStage: failedStage
|
|
995
|
+
});
|
|
996
|
+
|
|
997
|
+
// Send error notification if not already sent
|
|
998
|
+
if (metadata?.requestingUserCid) {
|
|
999
|
+
try {
|
|
1000
|
+
await notifyTaskEngineComplete(
|
|
1001
|
+
db,
|
|
1002
|
+
logger,
|
|
1003
|
+
metadata.requestingUserCid,
|
|
1004
|
+
requestId,
|
|
1005
|
+
username,
|
|
1006
|
+
false,
|
|
1007
|
+
errorMessage
|
|
1008
|
+
);
|
|
1009
|
+
} catch (notifErr) {
|
|
1010
|
+
logger.log('WARN', `[On-Demand Update] Failed to send error notification`, notifErr);
|
|
1011
|
+
}
|
|
1012
|
+
}
|
|
1013
|
+
|
|
1014
|
+
logger.log('INFO', `[On-Demand Update] Updated sync request ${requestId} to failed: ${errorMessage} (stage: ${failedStage})`);
|
|
1015
|
+
}
|
|
904
1016
|
} catch (err) {
|
|
905
1017
|
logger.log('WARN', `[On-Demand Update] Failed to update request status to failed for ${requestId}`, err);
|
|
906
1018
|
}
|