bulltrackers-module 1.0.526 → 1.0.528
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/utils/data_loader.js +1 -1
- package/functions/fetch-popular-investors/helpers/fetch_helpers.js +47 -3
- package/functions/generic-api/user-api/helpers/core/path_resolution_helpers.js +1 -1
- package/functions/generic-api/user-api/helpers/core/user_status_helpers.js +4 -1
- package/functions/generic-api/user-api/helpers/fetch/on_demand_fetch_helpers.js +12 -7
- package/functions/generic-api/user-api/helpers/notifications/notification_helpers.js +62 -4
- package/functions/generic-api/user-api/helpers/search/pi_request_helpers.js +4 -1
- package/functions/generic-api/user-api/helpers/search/pi_search_helpers.js +4 -1
- package/functions/generic-api/user-api/helpers/watchlist/watchlist_generation_helpers.js +4 -1
- package/functions/task-engine/helpers/data_storage_helpers.js +69 -2
- package/functions/task-engine/helpers/popular_investor_helpers.js +144 -26
- package/package.json +1 -1
|
@@ -127,7 +127,7 @@ async function getPortfolioPartRefs(config, deps, dateString, requiredUserTypes
|
|
|
127
127
|
}
|
|
128
128
|
}
|
|
129
129
|
|
|
130
|
-
logger.log('INFO', `Found ${allPartRefs.length} total portfolio refs for ${dateString}`);
|
|
130
|
+
logger.log('INFO', `Found ${allPartRefs.length} total portfolio refs for ${dateString} for types: ${fetchAll ? 'ALL' : Array.from(types).join(',')}`);
|
|
131
131
|
return allPartRefs;
|
|
132
132
|
}
|
|
133
133
|
|
|
@@ -2,9 +2,11 @@
|
|
|
2
2
|
* @fileoverview Logic for fetching and storing Popular Investor rankings.
|
|
3
3
|
* Integrates IntelligentProxyManager for IP rotation and IntelligentHeaderManager for
|
|
4
4
|
* user-agent/header rotation and performance tracking.
|
|
5
|
+
* UPDATED: Added compression support for large rankings data to avoid Firestore index limits.
|
|
5
6
|
*/
|
|
6
7
|
const { IntelligentProxyManager } = require('../../core/utils/intelligent_proxy_manager');
|
|
7
8
|
const { IntelligentHeaderManager } = require('../../core/utils/intelligent_header_manager');
|
|
9
|
+
const zlib = require('zlib');
|
|
8
10
|
|
|
9
11
|
/**
|
|
10
12
|
* Fetches the top Popular Investors and stores the raw result in Firestore.
|
|
@@ -120,14 +122,56 @@ async function fetchAndStorePopularInvestors(config, dependencies) {
|
|
|
120
122
|
try {
|
|
121
123
|
const docRef = db.collection(finalRankingsCollectionName).doc(today);
|
|
122
124
|
|
|
123
|
-
|
|
125
|
+
// Prepare data for storage
|
|
126
|
+
const dataToStore = {
|
|
124
127
|
fetchedAt: new Date(),
|
|
125
128
|
totalRows: data.TotalRows,
|
|
126
129
|
itemsCount: data.Items.length,
|
|
127
130
|
...data
|
|
128
|
-
}
|
|
131
|
+
};
|
|
132
|
+
|
|
133
|
+
// Apply compression if data is large enough (similar to ResultCommitter pattern)
|
|
134
|
+
let firestorePayload;
|
|
135
|
+
try {
|
|
136
|
+
const jsonString = JSON.stringify(dataToStore);
|
|
137
|
+
const rawBuffer = Buffer.from(jsonString);
|
|
138
|
+
const SIZE_THRESHOLD = 50 * 1024; // 50KB threshold (same as ResultCommitter)
|
|
139
|
+
const MAX_COMPRESSED_SIZE = 900 * 1024; // 900KB max (Firestore limit is 1MB)
|
|
140
|
+
|
|
141
|
+
if (rawBuffer.length > SIZE_THRESHOLD) {
|
|
142
|
+
logger.log('INFO', `[PopularInvestorFetch] Rankings data size ${(rawBuffer.length / 1024).toFixed(2)}KB exceeds threshold. Compressing...`);
|
|
143
|
+
|
|
144
|
+
// Gzip the JSON string
|
|
145
|
+
const compressedBuffer = zlib.gzipSync(rawBuffer);
|
|
146
|
+
|
|
147
|
+
if (compressedBuffer.length < MAX_COMPRESSED_SIZE) {
|
|
148
|
+
// Create the compressed wrapper structure
|
|
149
|
+
firestorePayload = {
|
|
150
|
+
fetchedAt: new Date(), // Keep outer timestamp for indexing
|
|
151
|
+
totalRows: data.TotalRows, // Keep outer metadata for indexing
|
|
152
|
+
itemsCount: data.Items.length, // Keep outer metadata for indexing
|
|
153
|
+
_compressed: true,
|
|
154
|
+
payload: compressedBuffer
|
|
155
|
+
};
|
|
156
|
+
|
|
157
|
+
logger.log('INFO', `[PopularInvestorFetch] Compressed rankings data: ${(rawBuffer.length / 1024).toFixed(2)}KB -> ${(compressedBuffer.length / 1024).toFixed(2)}KB`);
|
|
158
|
+
} else {
|
|
159
|
+
// Compressed size still too large, store uncompressed (will need sharding in future)
|
|
160
|
+
logger.log('WARN', `[PopularInvestorFetch] Compressed size ${(compressedBuffer.length / 1024).toFixed(2)}KB still exceeds limit. Storing uncompressed (may hit index limits).`);
|
|
161
|
+
firestorePayload = dataToStore;
|
|
162
|
+
}
|
|
163
|
+
} else {
|
|
164
|
+
// Data is small enough, store uncompressed
|
|
165
|
+
firestorePayload = dataToStore;
|
|
166
|
+
}
|
|
167
|
+
} catch (compressionError) {
|
|
168
|
+
logger.log('WARN', `[PopularInvestorFetch] Compression failed, storing uncompressed: ${compressionError.message}`);
|
|
169
|
+
firestorePayload = dataToStore;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
await docRef.set(firestorePayload);
|
|
129
173
|
|
|
130
|
-
logger.log('SUCCESS', `[PopularInvestorFetch] Stored ${data.TotalRows} rankings into ${finalRankingsCollectionName}/${today}`);
|
|
174
|
+
logger.log('SUCCESS', `[PopularInvestorFetch] Stored ${data.TotalRows} rankings into ${finalRankingsCollectionName}/${today}${firestorePayload._compressed ? ' (compressed)' : ''}`);
|
|
131
175
|
|
|
132
176
|
// Update the master list of Popular Investors
|
|
133
177
|
try {
|
|
@@ -187,7 +187,7 @@ function getLegacyPath(dataType, userCid, config = {}, params = {}, category = n
|
|
|
187
187
|
// Popular Investor data (PI CID-based)
|
|
188
188
|
piFetchRequests: `pi_fetch_requests/${piCid}/requests/${requestId}`,
|
|
189
189
|
piFetchStatus: `pi_fetch_requests/${piCid}/global/latest`,
|
|
190
|
-
piUserFetchRequests: `
|
|
190
|
+
piUserFetchRequests: `PopularInvestors/${piCid}/userFetchRequests/${cid}`,
|
|
191
191
|
piReviews: `pi_reviews/${reviewId}`,
|
|
192
192
|
piSocialPosts: `pi_social_posts/${piCid}/posts/${postId}`,
|
|
193
193
|
piProfileViews: `profile_views/${piCid}_${date}`,
|
|
@@ -58,7 +58,10 @@ async function checkIfUserIsPI(db, userCid, config, logger = null) {
|
|
|
58
58
|
return null;
|
|
59
59
|
}
|
|
60
60
|
|
|
61
|
-
const
|
|
61
|
+
const rawRankingsData = rankingsDoc.data();
|
|
62
|
+
// Decompress if needed
|
|
63
|
+
const { tryDecompress } = require('../compression_helpers');
|
|
64
|
+
const rankingsData = tryDecompress(rawRankingsData);
|
|
62
65
|
const rankingsItems = rankingsData.Items || [];
|
|
63
66
|
|
|
64
67
|
// Find user in rankings
|
|
@@ -108,9 +108,10 @@ async function requestPiFetch(req, res, dependencies, config) {
|
|
|
108
108
|
});
|
|
109
109
|
|
|
110
110
|
// Update user rate limit (use effective CID)
|
|
111
|
-
|
|
111
|
+
// Use new path: PopularInvestors/{piCid}/userFetchRequests/{userCid}
|
|
112
|
+
const userRequestRef = db.collection('PopularInvestors')
|
|
112
113
|
.doc(String(piCidNum))
|
|
113
|
-
.collection('
|
|
114
|
+
.collection('userFetchRequests')
|
|
114
115
|
.doc(String(requestUserCid));
|
|
115
116
|
|
|
116
117
|
await userRequestRef.set({
|
|
@@ -150,11 +151,13 @@ async function requestPiFetch(req, res, dependencies, config) {
|
|
|
150
151
|
requestId,
|
|
151
152
|
requestedBy: requestUserCid, // Use effective CID
|
|
152
153
|
actualRequestedBy: Number(userCid), // Track actual developer CID
|
|
154
|
+
userType: 'POPULAR_INVESTOR', // Explicitly set userType for task engine
|
|
153
155
|
metadata: {
|
|
154
156
|
onDemand: true,
|
|
155
157
|
targetCid: piCidNum, // Target specific user for optimization
|
|
156
158
|
requestedAt: now.toISOString(),
|
|
157
|
-
isImpersonating: isImpersonating || false
|
|
159
|
+
isImpersonating: isImpersonating || false,
|
|
160
|
+
userType: 'POPULAR_INVESTOR' // Also in metadata for consistency
|
|
158
161
|
}
|
|
159
162
|
};
|
|
160
163
|
|
|
@@ -421,10 +424,10 @@ async function getPiFetchStatus(req, res, dependencies, config) {
|
|
|
421
424
|
async function checkRateLimits(db, userCid, piCid, logger) {
|
|
422
425
|
const now = Date.now();
|
|
423
426
|
|
|
424
|
-
// Check user rate limit
|
|
425
|
-
const userRequestRef = db.collection('
|
|
427
|
+
// Check user rate limit - use new path: PopularInvestors/{piCid}/userFetchRequests/{userCid}
|
|
428
|
+
const userRequestRef = db.collection('PopularInvestors')
|
|
426
429
|
.doc(String(piCid))
|
|
427
|
-
.collection('
|
|
430
|
+
.collection('userFetchRequests')
|
|
428
431
|
.doc(String(userCid));
|
|
429
432
|
|
|
430
433
|
const userRequestDoc = await userRequestRef.get();
|
|
@@ -523,7 +526,9 @@ async function getPiUsername(db, piCid, config, logger) {
|
|
|
523
526
|
return null;
|
|
524
527
|
}
|
|
525
528
|
|
|
526
|
-
const
|
|
529
|
+
const rawRankingsData = rankingsDoc.data();
|
|
530
|
+
// Decompress if needed
|
|
531
|
+
const rankingsData = tryDecompress(rawRankingsData);
|
|
527
532
|
const rankingsItems = rankingsData.Items || [];
|
|
528
533
|
|
|
529
534
|
// Search for the PI in the rankings items
|
|
@@ -162,6 +162,47 @@ async function sendOnDemandNotification(db, logger, userCid, type, title, messag
|
|
|
162
162
|
}
|
|
163
163
|
}
|
|
164
164
|
|
|
165
|
+
/**
|
|
166
|
+
* Send progress notification during task engine processing
|
|
167
|
+
*/
|
|
168
|
+
async function notifyTaskEngineProgress(db, logger, requestingUserCid, requestId, username, stage, dataType = null, options = {}) {
|
|
169
|
+
if (!requestingUserCid) return; // No user to notify
|
|
170
|
+
|
|
171
|
+
let title = 'Data Sync In Progress';
|
|
172
|
+
let message = `Syncing data for ${username}...`;
|
|
173
|
+
|
|
174
|
+
if (stage === 'started') {
|
|
175
|
+
title = 'Data Sync Started';
|
|
176
|
+
message = `Started syncing data for ${username}. This may take a few minutes.`;
|
|
177
|
+
} else if (stage === 'portfolio_complete') {
|
|
178
|
+
title = 'Portfolio Data Synced';
|
|
179
|
+
message = `Portfolio data for ${username} has been fetched and stored.`;
|
|
180
|
+
} else if (stage === 'history_complete') {
|
|
181
|
+
title = 'Trade History Synced';
|
|
182
|
+
message = `Trade history for ${username} has been fetched and stored.`;
|
|
183
|
+
} else if (stage === 'social_complete') {
|
|
184
|
+
title = 'Social Posts Synced';
|
|
185
|
+
message = `Social posts for ${username} have been fetched and stored.`;
|
|
186
|
+
} else if (stage === 'indexing') {
|
|
187
|
+
title = 'Indexing Data';
|
|
188
|
+
message = `Indexing data for ${username}...`;
|
|
189
|
+
} else if (stage === 'computing') {
|
|
190
|
+
title = 'Computing Metrics';
|
|
191
|
+
message = `Computing metrics for ${username}...`;
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
await sendOnDemandNotification(db, logger, requestingUserCid, 'progress', title, message, {
|
|
195
|
+
requestId,
|
|
196
|
+
username,
|
|
197
|
+
stage,
|
|
198
|
+
dataType,
|
|
199
|
+
notificationType: 'syncProcesses'
|
|
200
|
+
}, {
|
|
201
|
+
...options,
|
|
202
|
+
notificationType: 'syncProcesses'
|
|
203
|
+
});
|
|
204
|
+
}
|
|
205
|
+
|
|
165
206
|
/**
|
|
166
207
|
* Send notification when task engine completes data fetch
|
|
167
208
|
*/
|
|
@@ -172,15 +213,31 @@ async function notifyTaskEngineComplete(db, logger, requestingUserCid, requestId
|
|
|
172
213
|
const title = success
|
|
173
214
|
? 'Data Sync Complete'
|
|
174
215
|
: 'Data Sync Failed';
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
216
|
+
|
|
217
|
+
// Build a more detailed message based on what succeeded
|
|
218
|
+
let message = '';
|
|
219
|
+
if (success && typeof success === 'object') {
|
|
220
|
+
const completed = [];
|
|
221
|
+
if (success.portfolio) completed.push('portfolio');
|
|
222
|
+
if (success.history) completed.push('trade history');
|
|
223
|
+
if (success.social) completed.push('social posts');
|
|
224
|
+
|
|
225
|
+
if (completed.length > 0) {
|
|
226
|
+
message = `Your data sync for ${username} has completed. ${completed.join(', ')} data ${completed.length === 1 ? 'has' : 'have'} been stored.`;
|
|
227
|
+
} else {
|
|
228
|
+
message = `Your data sync for ${username} has completed, but no data was stored.`;
|
|
229
|
+
}
|
|
230
|
+
} else if (success) {
|
|
231
|
+
message = `Your data sync for ${username} has completed. Portfolio, history, and social data have been stored.`;
|
|
232
|
+
} else {
|
|
233
|
+
message = error || 'An error occurred while syncing your data. Please try again.';
|
|
234
|
+
}
|
|
178
235
|
|
|
179
236
|
await sendOnDemandNotification(db, logger, requestingUserCid, type, title, message, {
|
|
180
237
|
requestId,
|
|
181
238
|
username,
|
|
182
239
|
stage: 'task_engine',
|
|
183
|
-
success,
|
|
240
|
+
success: typeof success === 'object' ? success : { portfolio: success, history: success, social: success },
|
|
184
241
|
notificationType: 'syncProcesses'
|
|
185
242
|
}, {
|
|
186
243
|
...options,
|
|
@@ -482,6 +539,7 @@ async function getNotificationHistory(req, res, dependencies, config) {
|
|
|
482
539
|
|
|
483
540
|
module.exports = {
|
|
484
541
|
sendOnDemandNotification,
|
|
542
|
+
notifyTaskEngineProgress,
|
|
485
543
|
notifyTaskEngineComplete,
|
|
486
544
|
notifyComputationComplete,
|
|
487
545
|
notifyPIDataRefreshed,
|
|
@@ -244,7 +244,10 @@ async function checkPisInRankingsLegacy(db, items, config, logger) {
|
|
|
244
244
|
};
|
|
245
245
|
}
|
|
246
246
|
|
|
247
|
-
const
|
|
247
|
+
const rawRankingsData = rankingsDoc.data();
|
|
248
|
+
// Decompress if needed
|
|
249
|
+
const { tryDecompress } = require('../core/compression_helpers');
|
|
250
|
+
const rankingsData = tryDecompress(rawRankingsData);
|
|
248
251
|
const rankingsItems = rankingsData.Items || [];
|
|
249
252
|
|
|
250
253
|
// Create a set of CIDs that exist in rankings
|
|
@@ -4,6 +4,7 @@
|
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
6
|
const { findLatestRankingsDate } = require('../core/data_lookup_helpers');
|
|
7
|
+
const { tryDecompress } = require('../core/compression_helpers');
|
|
7
8
|
|
|
8
9
|
/**
|
|
9
10
|
* GET /user/search/pis
|
|
@@ -32,7 +33,9 @@ async function searchPopularInvestors(req, res, dependencies, config) {
|
|
|
32
33
|
return res.status(404).json({ error: "Rankings data not available" });
|
|
33
34
|
}
|
|
34
35
|
|
|
35
|
-
const
|
|
36
|
+
const rawRankingsData = rankingsDoc.data();
|
|
37
|
+
// Decompress if needed
|
|
38
|
+
const rankingsData = tryDecompress(rawRankingsData);
|
|
36
39
|
const rankingsItems = rankingsData.Items || [];
|
|
37
40
|
|
|
38
41
|
// Search by username (case-insensitive, partial match)
|
|
@@ -161,7 +161,10 @@ async function autoGenerateWatchlist(req, res, dependencies, config) {
|
|
|
161
161
|
return res.status(404).json({ error: "Rankings data not available" });
|
|
162
162
|
}
|
|
163
163
|
|
|
164
|
-
const
|
|
164
|
+
const rawRankingsData = rankingsDoc.data();
|
|
165
|
+
// Decompress if needed
|
|
166
|
+
const { tryDecompress } = require('../core/compression_helpers');
|
|
167
|
+
const rankingsData = tryDecompress(rawRankingsData);
|
|
165
168
|
const rankingsItems = rankingsData.Items || [];
|
|
166
169
|
const rankingsMap = new Map();
|
|
167
170
|
for (const item of rankingsItems) {
|
|
@@ -204,6 +204,21 @@ async function storePopularInvestorPortfolio({ db, logger, collectionRegistry, c
|
|
|
204
204
|
|
|
205
205
|
await rootDataRef.set(portfolioDoc, { merge: false });
|
|
206
206
|
|
|
207
|
+
// 3. Store latest snapshot to user-centric collection (for fallback)
|
|
208
|
+
const { getCollectionPath } = collectionRegistry || {};
|
|
209
|
+
if (!getCollectionPath) {
|
|
210
|
+
throw new Error('collectionRegistry.getCollectionPath is required');
|
|
211
|
+
}
|
|
212
|
+
// Path: PopularInvestors/{cid}/portfolio/latest (4 segments)
|
|
213
|
+
const userLatestPath = getCollectionPath('popularInvestors', 'portfolio', { piCid: String(cid) });
|
|
214
|
+
const userLatestRef = db.doc(userLatestPath);
|
|
215
|
+
|
|
216
|
+
await userLatestRef.set({
|
|
217
|
+
...portfolioDoc,
|
|
218
|
+
date: date,
|
|
219
|
+
cid: String(cid)
|
|
220
|
+
}, { merge: false });
|
|
221
|
+
|
|
207
222
|
logger.log('INFO', `[DataStorage] Stored portfolio for PI ${cid} (date: ${date})`);
|
|
208
223
|
}
|
|
209
224
|
|
|
@@ -219,7 +234,7 @@ async function storePopularInvestorPortfolio({ db, logger, collectionRegistry, c
|
|
|
219
234
|
* @returns {Promise<void>}
|
|
220
235
|
*/
|
|
221
236
|
async function storePopularInvestorTradeHistory({ db, logger, collectionRegistry, cid, date, historyData }) {
|
|
222
|
-
// Store to root data collection (for computations)
|
|
237
|
+
// 1. Store to root data collection (for computations)
|
|
223
238
|
// Structure: PopularInvestorTradeHistoryData/{date}/{cid}/{cid}
|
|
224
239
|
const rootDataRef = db.collection('PopularInvestorTradeHistoryData')
|
|
225
240
|
.doc(date)
|
|
@@ -232,6 +247,22 @@ async function storePopularInvestorTradeHistory({ db, logger, collectionRegistry
|
|
|
232
247
|
cid: String(cid)
|
|
233
248
|
}, { merge: false });
|
|
234
249
|
|
|
250
|
+
// 2. Store latest snapshot to user-centric collection (for fallback)
|
|
251
|
+
const { getCollectionPath } = collectionRegistry || {};
|
|
252
|
+
if (!getCollectionPath) {
|
|
253
|
+
throw new Error('collectionRegistry.getCollectionPath is required');
|
|
254
|
+
}
|
|
255
|
+
// Path: PopularInvestors/{cid}/tradeHistory/latest (4 segments)
|
|
256
|
+
const userLatestPath = getCollectionPath('popularInvestors', 'tradeHistory', { piCid: String(cid) });
|
|
257
|
+
const userLatestRef = db.doc(userLatestPath);
|
|
258
|
+
|
|
259
|
+
await userLatestRef.set({
|
|
260
|
+
...historyData,
|
|
261
|
+
fetchedAt: FieldValue.serverTimestamp(),
|
|
262
|
+
date: date,
|
|
263
|
+
cid: String(cid)
|
|
264
|
+
}, { merge: false });
|
|
265
|
+
|
|
235
266
|
logger.log('INFO', `[DataStorage] Stored trade history for PI ${cid} (date: ${date})`);
|
|
236
267
|
}
|
|
237
268
|
|
|
@@ -247,7 +278,7 @@ async function storePopularInvestorTradeHistory({ db, logger, collectionRegistry
|
|
|
247
278
|
* @returns {Promise<void>}
|
|
248
279
|
*/
|
|
249
280
|
async function storePopularInvestorSocialPosts({ db, logger, collectionRegistry, cid, date, posts }) {
|
|
250
|
-
// Store to root data collection (for computations)
|
|
281
|
+
// 1. Store to root data collection (for computations)
|
|
251
282
|
// Structure: PopularInvestorSocialPostData/{date}/{cid}/{cid}
|
|
252
283
|
const rootDataRef = db.collection('PopularInvestorSocialPostData')
|
|
253
284
|
.doc(date)
|
|
@@ -268,6 +299,42 @@ async function storePopularInvestorSocialPosts({ db, logger, collectionRegistry,
|
|
|
268
299
|
postCount: posts.length
|
|
269
300
|
}, { merge: false });
|
|
270
301
|
|
|
302
|
+
// 2. Store latest posts to user-centric collection (for fallback)
|
|
303
|
+
const { getCollectionPath } = collectionRegistry || {};
|
|
304
|
+
if (!getCollectionPath) {
|
|
305
|
+
throw new Error('collectionRegistry.getCollectionPath is required');
|
|
306
|
+
}
|
|
307
|
+
const userPostsRef = db.collection(
|
|
308
|
+
getCollectionPath('popularInvestors', 'socialPosts', { piCid: String(cid) })
|
|
309
|
+
);
|
|
310
|
+
|
|
311
|
+
// Store each post individually in user-centric collection
|
|
312
|
+
const batch = db.batch();
|
|
313
|
+
let batchCount = 0;
|
|
314
|
+
const MAX_BATCH_SIZE = 500;
|
|
315
|
+
|
|
316
|
+
for (const post of posts) {
|
|
317
|
+
const postId = post.id || post.postId;
|
|
318
|
+
if (!postId) continue;
|
|
319
|
+
|
|
320
|
+
const postRef = userPostsRef.doc(postId);
|
|
321
|
+
batch.set(postRef, {
|
|
322
|
+
...post,
|
|
323
|
+
fetchedAt: FieldValue.serverTimestamp(),
|
|
324
|
+
date: date
|
|
325
|
+
}, { merge: false });
|
|
326
|
+
|
|
327
|
+
batchCount++;
|
|
328
|
+
if (batchCount >= MAX_BATCH_SIZE) {
|
|
329
|
+
await batch.commit();
|
|
330
|
+
batchCount = 0;
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
if (batchCount > 0) {
|
|
335
|
+
await batch.commit();
|
|
336
|
+
}
|
|
337
|
+
|
|
271
338
|
logger.log('INFO', `[DataStorage] Stored ${posts.length} social posts for PI ${cid} (date: ${date})`);
|
|
272
339
|
}
|
|
273
340
|
|
|
@@ -7,7 +7,7 @@
|
|
|
7
7
|
const crypto = require('crypto');
|
|
8
8
|
const { shouldTryProxy, recordProxyOutcome, getFailureCount, getMaxFailures } = require('../utils/proxy_circuit_breaker');
|
|
9
9
|
const { FieldValue } = require('@google-cloud/firestore');
|
|
10
|
-
const { notifyTaskEngineComplete, notifyPIDataRefreshed } = require('../../generic-api/user-api/helpers/notifications/notification_helpers');
|
|
10
|
+
const { notifyTaskEngineComplete, notifyTaskEngineProgress, notifyPIDataRefreshed } = require('../../generic-api/user-api/helpers/notifications/notification_helpers');
|
|
11
11
|
const { conditionallyRunRootDataIndexer } = require('./root_data_indexer_helpers');
|
|
12
12
|
|
|
13
13
|
const {
|
|
@@ -164,6 +164,14 @@ async function handleGenericUserUpdate(taskData, config, dependencies, isPI) {
|
|
|
164
164
|
const reqRef = db.collection(source === 'on_demand_sync' ? 'user_sync_requests' : 'pi_fetch_requests')
|
|
165
165
|
.doc(String(cid)).collection('requests').doc(requestId);
|
|
166
166
|
await reqRef.update({ status: 'processing', startedAt: FieldValue.serverTimestamp() });
|
|
167
|
+
|
|
168
|
+
// Send progress notification when task starts
|
|
169
|
+
if (metadata?.requestingUserCid) {
|
|
170
|
+
notifyTaskEngineProgress(db, logger, metadata.requestingUserCid, requestId, username, 'started', null, {
|
|
171
|
+
collectionRegistry: dependencies.collectionRegistry,
|
|
172
|
+
config
|
|
173
|
+
}).catch(() => {});
|
|
174
|
+
}
|
|
167
175
|
} catch(e) {}
|
|
168
176
|
}
|
|
169
177
|
|
|
@@ -173,15 +181,44 @@ async function handleGenericUserUpdate(taskData, config, dependencies, isPI) {
|
|
|
173
181
|
|
|
174
182
|
try {
|
|
175
183
|
// 2. Execute Fetches (Intelligent Skipping)
|
|
176
|
-
if (components.portfolio !== false)
|
|
177
|
-
|
|
184
|
+
if (components.portfolio !== false) {
|
|
185
|
+
success.portfolio = await processPortfolio(context, config, runContext, isPI);
|
|
186
|
+
// Send progress notification after portfolio completes
|
|
187
|
+
if (requestId && metadata?.requestingUserCid && success.portfolio) {
|
|
188
|
+
notifyTaskEngineProgress(db, logger, metadata.requestingUserCid, requestId, username, 'portfolio_complete', 'portfolio', {
|
|
189
|
+
collectionRegistry: dependencies.collectionRegistry,
|
|
190
|
+
config
|
|
191
|
+
}).catch(() => {});
|
|
192
|
+
}
|
|
193
|
+
} else {
|
|
194
|
+
logger.log('INFO', `[Update] Skipped Portfolio for ${username}`);
|
|
195
|
+
}
|
|
178
196
|
|
|
179
|
-
if (components.tradeHistory !== false)
|
|
180
|
-
|
|
197
|
+
if (components.tradeHistory !== false) {
|
|
198
|
+
success.history = await processHistory(context, config, runContext, isPI);
|
|
199
|
+
// Send progress notification after history completes
|
|
200
|
+
if (requestId && metadata?.requestingUserCid && success.history) {
|
|
201
|
+
notifyTaskEngineProgress(db, logger, metadata.requestingUserCid, requestId, username, 'history_complete', 'tradeHistory', {
|
|
202
|
+
collectionRegistry: dependencies.collectionRegistry,
|
|
203
|
+
config
|
|
204
|
+
}).catch(() => {});
|
|
205
|
+
}
|
|
206
|
+
} else {
|
|
207
|
+
logger.log('INFO', `[Update] Skipped History for ${username}`);
|
|
208
|
+
}
|
|
181
209
|
|
|
182
210
|
// Social is usually always fetched on updates unless explicitly disabled in data
|
|
183
211
|
const includeSocial = taskData.data?.includeSocial !== false;
|
|
184
|
-
if (includeSocial)
|
|
212
|
+
if (includeSocial) {
|
|
213
|
+
success.social = await processSocial(context, config, runContext, isPI);
|
|
214
|
+
// Send progress notification after social completes
|
|
215
|
+
if (requestId && metadata?.requestingUserCid && success.social) {
|
|
216
|
+
notifyTaskEngineProgress(db, logger, metadata.requestingUserCid, requestId, username, 'social_complete', 'socialPosts', {
|
|
217
|
+
collectionRegistry: dependencies.collectionRegistry,
|
|
218
|
+
config
|
|
219
|
+
}).catch(() => {});
|
|
220
|
+
}
|
|
221
|
+
}
|
|
185
222
|
|
|
186
223
|
// 3. Post-Processing (Notifications & Indexing)
|
|
187
224
|
logger.log('SUCCESS', `[Update] Complete for ${username}. P:${success.portfolio} H:${success.history} S:${success.social}`);
|
|
@@ -239,49 +276,130 @@ async function finalizeOnDemandRequest(deps, config, taskData, isPI, success, to
|
|
|
239
276
|
.doc(String(cid)).collection('requests').doc(requestId);
|
|
240
277
|
|
|
241
278
|
await reqRef.update({ status: 'indexing', rawDataStoredAt: FieldValue.serverTimestamp() });
|
|
279
|
+
|
|
280
|
+
// Send progress notification for indexing
|
|
281
|
+
if (metadata?.requestingUserCid) {
|
|
282
|
+
notifyTaskEngineProgress(db, logger, metadata.requestingUserCid, requestId, username, 'indexing', null, {
|
|
283
|
+
collectionRegistry: deps.collectionRegistry,
|
|
284
|
+
config
|
|
285
|
+
}).catch(() => {});
|
|
286
|
+
}
|
|
242
287
|
|
|
243
288
|
const dataTypes = [];
|
|
244
289
|
if (success.portfolio) dataTypes.push(isPI ? 'piPortfolios' : 'signedInUserPortfolio');
|
|
245
290
|
if (success.history) dataTypes.push(isPI ? 'piHistory' : 'signedInUserHistory');
|
|
246
291
|
if (success.social) dataTypes.push(isPI ? 'piSocial' : 'signedInUserSocial');
|
|
247
292
|
|
|
293
|
+
// 1. Run Root Data Indexer and wait for completion
|
|
294
|
+
let indexerCompleted = false;
|
|
248
295
|
try {
|
|
249
|
-
await conditionallyRunRootDataIndexer({
|
|
296
|
+
const indexerResult = await conditionallyRunRootDataIndexer({
|
|
250
297
|
db, logger, dateStr: today,
|
|
251
298
|
rootDataIndexerConfig: config.rootDataIndexer,
|
|
252
299
|
dependencies: deps, counterRef: null, dataTypesRun: dataTypes
|
|
253
300
|
});
|
|
254
|
-
|
|
301
|
+
|
|
302
|
+
// indexerResult is true if it ran, false if skipped (already indexed)
|
|
303
|
+
// Both cases mean the indexer is "complete" for our purposes
|
|
304
|
+
indexerCompleted = true;
|
|
305
|
+
logger.log('INFO', `[On-Demand] Root data indexer ${indexerResult ? 'completed' : 'skipped (already indexed)'} for ${today}`);
|
|
306
|
+
|
|
307
|
+
await reqRef.update({
|
|
308
|
+
status: 'computing',
|
|
309
|
+
indexedAt: FieldValue.serverTimestamp(),
|
|
310
|
+
indexerCompleted: true,
|
|
311
|
+
indexerRan: indexerResult
|
|
312
|
+
});
|
|
313
|
+
|
|
314
|
+
// Send progress notification for computing
|
|
315
|
+
if (metadata?.requestingUserCid) {
|
|
316
|
+
notifyTaskEngineProgress(db, logger, metadata.requestingUserCid, requestId, username, 'computing', null, {
|
|
317
|
+
collectionRegistry: deps.collectionRegistry,
|
|
318
|
+
config
|
|
319
|
+
}).catch(() => {});
|
|
320
|
+
}
|
|
255
321
|
} catch (e) {
|
|
256
|
-
logger.log('ERROR', `[
|
|
257
|
-
await reqRef.update({
|
|
322
|
+
logger.log('ERROR', `[On-Demand] Root data indexer failed: ${e.message}`);
|
|
323
|
+
await reqRef.update({
|
|
324
|
+
status: 'indexer_failed',
|
|
325
|
+
indexerError: e.message,
|
|
326
|
+
indexerFailedAt: FieldValue.serverTimestamp()
|
|
327
|
+
});
|
|
328
|
+
// Don't continue to computation if indexer failed - data might not be properly indexed
|
|
329
|
+
// Log error but don't throw - we'll skip computations and let the function complete
|
|
330
|
+
logger.log('WARN', `[On-Demand] Skipping computation trigger due to indexer failure`);
|
|
331
|
+
indexerCompleted = false;
|
|
258
332
|
}
|
|
259
333
|
|
|
260
|
-
// 2. Trigger Computations
|
|
261
|
-
if (pubsub && config.computationSystem) {
|
|
334
|
+
// 2. Trigger Computations (only if root data indexer completed successfully)
|
|
335
|
+
if (indexerCompleted && pubsub && config.computationSystem) {
|
|
262
336
|
const { triggerComputationWithDependencies } = require('../../computation-system/helpers/on_demand_helpers');
|
|
263
|
-
const comps = isPI
|
|
264
|
-
? ['PopularInvestorProfileMetrics']
|
|
265
|
-
: ['SignedInUserProfileMetrics', 'SignedInUserCopiedList', 'SignedInUserCopiedPIs', 'SignedInUserPastCopies'];
|
|
266
337
|
|
|
267
|
-
//
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
338
|
+
// Use userType from metadata if available, otherwise fall back to isPI
|
|
339
|
+
const userType = metadata?.userType || (isPI ? 'POPULAR_INVESTOR' : 'SIGNED_IN_USER');
|
|
340
|
+
|
|
341
|
+
// Determine which computations to run based on userType
|
|
342
|
+
const comps = [];
|
|
343
|
+
if (userType === 'POPULAR_INVESTOR') {
|
|
344
|
+
comps.push('PopularInvestorProfileMetrics');
|
|
345
|
+
// PIs might also be signed-in users
|
|
346
|
+
const userDoc = await db.collection('signed_in_users').doc(String(cid)).get();
|
|
347
|
+
if (userDoc.exists) {
|
|
348
|
+
comps.push('SignedInUserPIPersonalizedMetrics');
|
|
349
|
+
}
|
|
350
|
+
} else if (userType === 'SIGNED_IN_USER') {
|
|
351
|
+
comps.push('SignedInUserProfileMetrics', 'SignedInUserCopiedList', 'SignedInUserCopiedPIs', 'SignedInUserPastCopies');
|
|
352
|
+
} else {
|
|
353
|
+
// Fallback to isPI-based logic for backward compatibility
|
|
354
|
+
if (isPI) {
|
|
355
|
+
comps.push('PopularInvestorProfileMetrics');
|
|
356
|
+
const userDoc = await db.collection('signed_in_users').doc(String(cid)).get();
|
|
357
|
+
if (userDoc.exists) comps.push('SignedInUserPIPersonalizedMetrics');
|
|
358
|
+
} else {
|
|
359
|
+
comps.push('SignedInUserProfileMetrics', 'SignedInUserCopiedList', 'SignedInUserCopiedPIs', 'SignedInUserPastCopies');
|
|
360
|
+
}
|
|
271
361
|
}
|
|
272
362
|
|
|
363
|
+
logger.log('INFO', `[On-Demand] Triggering ${comps.length} computations for ${userType} (${username}): ${comps.join(', ')}`);
|
|
364
|
+
|
|
273
365
|
for (const comp of comps) {
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
366
|
+
try {
|
|
367
|
+
await triggerComputationWithDependencies(comp, today, deps, config.computationSystem, {
|
|
368
|
+
triggerReason: source,
|
|
369
|
+
requestId,
|
|
370
|
+
userCid: cid,
|
|
371
|
+
username,
|
|
372
|
+
onDemand: true,
|
|
373
|
+
requestingUserCid: metadata?.requestingUserCid,
|
|
374
|
+
userType: userType // Pass userType explicitly
|
|
375
|
+
});
|
|
376
|
+
logger.log('INFO', `[On-Demand] Triggered computation: ${comp} for ${today}`);
|
|
377
|
+
} catch (compError) {
|
|
378
|
+
logger.log('ERROR', `[On-Demand] Failed to trigger computation ${comp}: ${compError.message}`);
|
|
379
|
+
// Continue with other computations even if one fails
|
|
380
|
+
}
|
|
278
381
|
}
|
|
382
|
+
|
|
383
|
+
// Update request status to indicate computations were triggered
|
|
384
|
+
await reqRef.update({
|
|
385
|
+
computationsTriggered: true,
|
|
386
|
+
computationsTriggeredAt: FieldValue.serverTimestamp(),
|
|
387
|
+
triggeredComputations: comps
|
|
388
|
+
});
|
|
389
|
+
} else if (!indexerCompleted) {
|
|
390
|
+
logger.log('WARN', `[On-Demand] Skipping computation trigger - root data indexer did not complete`);
|
|
391
|
+
} else if (!pubsub || !config.computationSystem) {
|
|
392
|
+
logger.log('WARN', `[On-Demand] Skipping computation trigger - pubsub or computationSystem config missing`);
|
|
279
393
|
}
|
|
280
394
|
|
|
281
|
-
// 3. Notify User (if applicable)
|
|
395
|
+
// 3. Notify User (if applicable) - Send success notification with detailed status
|
|
282
396
|
if (metadata?.requestingUserCid) {
|
|
283
|
-
|
|
284
|
-
|
|
397
|
+
// Check all data types for accurate success status
|
|
398
|
+
const anySuccess = success.portfolio || success.history || success.social;
|
|
399
|
+
notifyTaskEngineComplete(db, logger, metadata.requestingUserCid, requestId, username, success, anySuccess ? null : "Data fetch failed", {
|
|
400
|
+
collectionRegistry: deps.collectionRegistry,
|
|
401
|
+
config
|
|
402
|
+
}).catch(()=>{});
|
|
285
403
|
}
|
|
286
404
|
}
|
|
287
405
|
|