bulltrackers-module 1.0.525 → 1.0.527
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/data/AvailabilityChecker.js +14 -15
- package/functions/computation-system/executors/StandardExecutor.js +24 -6
- package/functions/computation-system/utils/data_loader.js +128 -144
- package/functions/core/utils/firestore_utils.js +74 -151
- package/functions/dispatcher/helpers/dispatch_helpers.js +37 -3
- package/functions/fetch-popular-investors/helpers/fetch_helpers.js +47 -3
- package/functions/generic-api/user-api/helpers/core/path_resolution_helpers.js +1 -1
- package/functions/generic-api/user-api/helpers/core/user_status_helpers.js +4 -1
- package/functions/generic-api/user-api/helpers/fetch/on_demand_fetch_helpers.js +12 -7
- package/functions/generic-api/user-api/helpers/notifications/notification_helpers.js +62 -4
- package/functions/generic-api/user-api/helpers/search/pi_request_helpers.js +4 -1
- package/functions/generic-api/user-api/helpers/search/pi_search_helpers.js +4 -1
- package/functions/generic-api/user-api/helpers/watchlist/watchlist_generation_helpers.js +4 -1
- package/functions/orchestrator/helpers/update_helpers.js +21 -14
- package/functions/task-engine/handler_creator.js +22 -69
- package/functions/task-engine/helpers/data_storage_helpers.js +69 -2
- package/functions/task-engine/helpers/popular_investor_helpers.js +342 -1254
- package/functions/task-engine/helpers/social_helpers.js +62 -42
- package/functions/task-engine/helpers/update_helpers.js +180 -118
- package/package.json +1 -1
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Checks availability of root data via the Root Data Index.
|
|
3
3
|
* REFACTORED: Fully supports granular flags for PI, Signed-In Users, Rankings, and Verification.
|
|
4
|
-
* FIXED:
|
|
4
|
+
* FIXED: Removed permissive fallbacks to enforce strict userType availability rules.
|
|
5
5
|
* UPDATED: Added strict social data checking for Popular Investors and Signed-In Users.
|
|
6
6
|
*/
|
|
7
7
|
const { normalizeName } = require('../utils/utils');
|
|
@@ -29,7 +29,7 @@ function checkRootDependencies(calcManifest, rootDataStatus) {
|
|
|
29
29
|
else if (userType === 'popular_investor' && rootDataStatus.piPortfolios) isAvailable = true;
|
|
30
30
|
else if (userType === 'signed_in_user' && rootDataStatus.signedInUserPortfolio) isAvailable = true;
|
|
31
31
|
else if (userType === 'all' && rootDataStatus.hasPortfolio) isAvailable = true;
|
|
32
|
-
|
|
32
|
+
// REMOVED: Unconditional fallback that bypassed strict userType checks
|
|
33
33
|
|
|
34
34
|
if (!isAvailable) {
|
|
35
35
|
if (userType === 'speculator') missing.push('speculatorPortfolio');
|
|
@@ -47,7 +47,7 @@ function checkRootDependencies(calcManifest, rootDataStatus) {
|
|
|
47
47
|
else if (userType === 'popular_investor' && rootDataStatus.piHistory) isAvailable = true;
|
|
48
48
|
else if (userType === 'signed_in_user' && rootDataStatus.signedInUserHistory) isAvailable = true;
|
|
49
49
|
else if (userType === 'all' && rootDataStatus.hasHistory) isAvailable = true;
|
|
50
|
-
|
|
50
|
+
// REMOVED: Unconditional fallback that bypassed strict userType checks
|
|
51
51
|
|
|
52
52
|
if (!isAvailable) {
|
|
53
53
|
if (userType === 'speculator') missing.push('speculatorHistory');
|
|
@@ -85,14 +85,17 @@ function checkRootDependencies(calcManifest, rootDataStatus) {
|
|
|
85
85
|
}
|
|
86
86
|
// [UPDATED] Strict Social Data Checking
|
|
87
87
|
else if (dep === 'social') {
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
} else if (userType === 'signed_in_user'
|
|
92
|
-
isAvailable = true;
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
isAvailable = true;
|
|
88
|
+
// Strictly check based on userType to prevent fallback to generic social data
|
|
89
|
+
if (userType === 'popular_investor') {
|
|
90
|
+
if (rootDataStatus.hasPISocial) isAvailable = true;
|
|
91
|
+
} else if (userType === 'signed_in_user') {
|
|
92
|
+
if (rootDataStatus.hasSignedInSocial) isAvailable = true;
|
|
93
|
+
} else {
|
|
94
|
+
// For 'all', 'normal', 'speculator' or undefined, accept generic social data
|
|
95
|
+
if (rootDataStatus.hasSocial) isAvailable = true;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
if (isAvailable) {
|
|
96
99
|
available.push('social');
|
|
97
100
|
} else {
|
|
98
101
|
if (userType === 'popular_investor') missing.push('piSocial');
|
|
@@ -144,10 +147,7 @@ function checkRootDependencies(calcManifest, rootDataStatus) {
|
|
|
144
147
|
}
|
|
145
148
|
|
|
146
149
|
// [FIX] Enforce canHaveMissingRoots logic:
|
|
147
|
-
// - If canHaveMissingRoots: false, ALL required rootdata must be available
|
|
148
|
-
// - If canHaveMissingRoots: true, AT LEAST ONE required rootdata must be available (not zero)
|
|
149
150
|
if (canHaveMissingRoots) {
|
|
150
|
-
// Must have at least one available rootdata type
|
|
151
151
|
const canRun = available.length > 0;
|
|
152
152
|
return { canRun, missing, available };
|
|
153
153
|
}
|
|
@@ -246,7 +246,6 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
|
|
|
246
246
|
piRankings: false, piPortfolios: false, piDeepPortfolios: false, piHistory: false,
|
|
247
247
|
signedInUserPortfolio: false, signedInUserHistory: false, signedInUserVerification: false,
|
|
248
248
|
hasPISocial: false, hasSignedInSocial: false,
|
|
249
|
-
// New Root Data Types
|
|
250
249
|
piRatings: false, piPageViews: false, watchlistMembership: false, piAlertHistory: false
|
|
251
250
|
}
|
|
252
251
|
};
|
|
@@ -17,12 +17,22 @@ class StandardExecutor {
|
|
|
17
17
|
static async run(date, calcs, passName, config, deps, rootData, fetchedDeps, previousFetchedDeps, skipStatusWrite = false) {
|
|
18
18
|
const dStr = date.toISOString().slice(0, 10);
|
|
19
19
|
const logger = deps.logger;
|
|
20
|
+
|
|
21
|
+
// [FIX] Determine required user types for this batch of calculations
|
|
22
|
+
const requiredUserTypes = new Set();
|
|
23
|
+
calcs.forEach(c => {
|
|
24
|
+
const type = (c.userType || 'ALL').toUpperCase();
|
|
25
|
+
requiredUserTypes.add(type);
|
|
26
|
+
});
|
|
27
|
+
// If any calc requires 'ALL' (or has no userType), we fetch everything
|
|
28
|
+
const userTypeArray = requiredUserTypes.has('ALL') ? null : Array.from(requiredUserTypes);
|
|
20
29
|
|
|
21
30
|
const fullRoot = { ...rootData };
|
|
22
31
|
if (calcs.some(c => c.isHistorical)) {
|
|
23
32
|
const prev = new Date(date); prev.setUTCDate(prev.getUTCDate() - 1);
|
|
24
33
|
const prevStr = prev.toISOString().slice(0, 10);
|
|
25
|
-
|
|
34
|
+
// [FIX] Pass userTypeArray to filter yesterday's data loading
|
|
35
|
+
fullRoot.yesterdayPortfolioRefs = await getPortfolioPartRefs(config, deps, prevStr, userTypeArray);
|
|
26
36
|
}
|
|
27
37
|
|
|
28
38
|
const state = {};
|
|
@@ -36,10 +46,11 @@ class StandardExecutor {
|
|
|
36
46
|
} catch (e) { logger.log('WARN', `Failed to init ${c.name}`); }
|
|
37
47
|
}
|
|
38
48
|
|
|
39
|
-
return await StandardExecutor.streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs, fetchedDeps, previousFetchedDeps, skipStatusWrite);
|
|
49
|
+
return await StandardExecutor.streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs, fetchedDeps, previousFetchedDeps, skipStatusWrite, userTypeArray);
|
|
40
50
|
}
|
|
41
51
|
|
|
42
|
-
|
|
52
|
+
// [UPDATED] Accepts requiredUserTypes param
|
|
53
|
+
static async streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs, fetchedDeps, previousFetchedDeps, skipStatusWrite, requiredUserTypes = null) {
|
|
43
54
|
const { logger } = deps;
|
|
44
55
|
const calcs = Object.values(state).filter(c => c && c.manifest);
|
|
45
56
|
const streamingCalcs = calcs.filter(c => c.manifest.rootDataDependencies.includes('portfolio') || c.manifest.rootDataDependencies.includes('history'));
|
|
@@ -80,11 +91,17 @@ class StandardExecutor {
|
|
|
80
91
|
earliestDates = await getEarliestDataDates(config, deps);
|
|
81
92
|
}
|
|
82
93
|
|
|
83
|
-
|
|
94
|
+
// [FIX] Pass requiredUserTypes to streamPortfolioData
|
|
95
|
+
// Note: portfolioRefs (for today) might be null if not pre-fetched, allowing streamPortfolioData to fetch filtered refs
|
|
96
|
+
const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs, requiredUserTypes);
|
|
97
|
+
|
|
84
98
|
const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
|
|
99
|
+
// Yesterday's refs were already filtered in run(), so we pass them directly
|
|
85
100
|
const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs) ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
|
|
101
|
+
|
|
86
102
|
const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
|
|
87
|
-
|
|
103
|
+
// [FIX] Pass requiredUserTypes to streamHistoryData
|
|
104
|
+
const tH_iter = (needsTradingHistory) ? streamHistoryData(config, deps, dateStr, historyRefs, requiredUserTypes) : null;
|
|
88
105
|
|
|
89
106
|
let yP_chunk = {}, tH_chunk = {};
|
|
90
107
|
let usersSinceLastFlush = 0;
|
|
@@ -139,7 +156,8 @@ class StandardExecutor {
|
|
|
139
156
|
|
|
140
157
|
return { successUpdates: aggregatedSuccess, failureReport: aggregatedFailures };
|
|
141
158
|
}
|
|
142
|
-
|
|
159
|
+
|
|
160
|
+
// ... rest of the file (flushBuffer, mergeReports, executePerUser) ...
|
|
143
161
|
static async flushBuffer(state, dateStr, passName, config, deps, shardIndexMap, executionStats, mode, skipStatusWrite, isInitialWrite = false) {
|
|
144
162
|
// ... (No changes to flushBuffer)
|
|
145
163
|
const transformedState = {};
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Data loader sub-pipes for the Computation System.
|
|
3
3
|
* REFACTORED: Now stateless and receive dependencies.
|
|
4
|
+
* FIXED: Added strict userType filtering to prevent fetching unnecessary data (e.g. Normal users for PI calcs).
|
|
4
5
|
* --- NEW: Updated to read PI/Signed-In data from SHARDS (Parts) instead of individual docs. ---
|
|
5
6
|
* --- NEW: Logic to merge Overall and Deep PI data from corresponding shards. ---
|
|
6
7
|
* --- UPDATED: Added loaders for Rankings and Verification data. ---
|
|
@@ -22,80 +23,87 @@ function tryDecompress(data) {
|
|
|
22
23
|
|
|
23
24
|
/** --- Data Loader Sub-Pipes (Stateless, Dependency-Injection) --- */
|
|
24
25
|
|
|
25
|
-
/** Stage 1: Get portfolio part document references for a given date
|
|
26
|
-
|
|
26
|
+
/** * Stage 1: Get portfolio part document references for a given date
|
|
27
|
+
* [UPDATED] Accepts requiredUserTypes to filter collections.
|
|
28
|
+
*/
|
|
29
|
+
async function getPortfolioPartRefs(config, deps, dateString, requiredUserTypes = null) {
|
|
27
30
|
const { db, logger, calculationUtils, collectionRegistry } = deps;
|
|
28
31
|
const { withRetry } = calculationUtils;
|
|
29
|
-
const { getCollectionPath } = collectionRegistry || {};
|
|
30
32
|
|
|
31
|
-
|
|
33
|
+
// Normalize required types. If null/empty or contains 'ALL', fetch everything.
|
|
34
|
+
const types = requiredUserTypes ? new Set(requiredUserTypes.map(t => t.toUpperCase())) : null;
|
|
35
|
+
const fetchAll = !types || types.size === 0 || types.has('ALL');
|
|
36
|
+
|
|
37
|
+
logger.log('INFO', `Getting portfolio part references for date: ${dateString}. Filter: ${fetchAll ? 'ALL' : Array.from(types).join(',')}`);
|
|
32
38
|
|
|
33
39
|
const allPartRefs = [];
|
|
34
40
|
|
|
35
41
|
// NEW STRUCTURE: Read from date-based collections (per-user documents)
|
|
36
|
-
// Structure: Collection/{date}/{cid}/{cid} where {date} is document, {cid} is subcollection, {cid} is document
|
|
37
42
|
try {
|
|
38
|
-
// Signed-In User Portfolios
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
43
|
+
// Signed-In User Portfolios
|
|
44
|
+
if (fetchAll || types.has('SIGNED_IN_USER')) {
|
|
45
|
+
const signedInPortCollectionName = 'SignedInUserPortfolioData';
|
|
46
|
+
const signedInPortDateDoc = db.collection(signedInPortCollectionName).doc(dateString);
|
|
47
|
+
const signedInPortSubcollections = await withRetry(
|
|
48
|
+
() => signedInPortDateDoc.listCollections(),
|
|
49
|
+
`listSignedInPortfolios(${dateString})`
|
|
50
|
+
);
|
|
51
|
+
|
|
52
|
+
signedInPortSubcollections.forEach(subcol => {
|
|
53
|
+
const cid = subcol.id;
|
|
54
|
+
allPartRefs.push({
|
|
55
|
+
ref: subcol.doc(cid),
|
|
56
|
+
type: 'SIGNED_IN_USER',
|
|
57
|
+
cid: cid,
|
|
58
|
+
collectionType: 'NEW_STRUCTURE'
|
|
59
|
+
});
|
|
55
60
|
});
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
// Popular Investor Portfolios: PopularInvestorPortfolioData/{date}/{cid}/{cid}
|
|
59
|
-
const piPortCollectionName = 'PopularInvestorPortfolioData';
|
|
60
|
-
const piPortDateDoc = db.collection(piPortCollectionName).doc(dateString);
|
|
61
|
-
const piPortSubcollections = await withRetry(
|
|
62
|
-
() => piPortDateDoc.listCollections(),
|
|
63
|
-
`listPIPortfolios(${dateString})`
|
|
64
|
-
);
|
|
61
|
+
}
|
|
65
62
|
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
const
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
63
|
+
// Popular Investor Portfolios
|
|
64
|
+
if (fetchAll || types.has('POPULAR_INVESTOR')) {
|
|
65
|
+
const piPortCollectionName = 'PopularInvestorPortfolioData';
|
|
66
|
+
const piPortDateDoc = db.collection(piPortCollectionName).doc(dateString);
|
|
67
|
+
const piPortSubcollections = await withRetry(
|
|
68
|
+
() => piPortDateDoc.listCollections(),
|
|
69
|
+
`listPIPortfolios(${dateString})`
|
|
70
|
+
);
|
|
71
|
+
|
|
72
|
+
piPortSubcollections.forEach(subcol => {
|
|
73
|
+
const cid = subcol.id;
|
|
74
|
+
allPartRefs.push({
|
|
75
|
+
ref: subcol.doc(cid),
|
|
76
|
+
type: 'POPULAR_INVESTOR',
|
|
77
|
+
cid: cid,
|
|
78
|
+
collectionType: 'NEW_STRUCTURE'
|
|
79
|
+
});
|
|
74
80
|
});
|
|
75
|
-
}
|
|
76
|
-
|
|
77
|
-
logger.log('INFO', `Found ${allPartRefs.length} portfolio refs from new structure for ${dateString}`);
|
|
81
|
+
}
|
|
78
82
|
} catch (newStructError) {
|
|
79
83
|
logger.log('WARN', `Failed to load from new structure, falling back to legacy: ${newStructError.message}`);
|
|
80
84
|
}
|
|
81
85
|
|
|
82
|
-
// LEGACY STRUCTURE: Read from block-based collections
|
|
83
|
-
const collectionsToQuery = [
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
config.
|
|
87
|
-
|
|
88
|
-
|
|
86
|
+
// LEGACY STRUCTURE: Read from block-based collections
|
|
87
|
+
const collectionsToQuery = [];
|
|
88
|
+
|
|
89
|
+
if ((fetchAll || types.has('NORMAL')) && config.normalUserPortfolioCollection)
|
|
90
|
+
collectionsToQuery.push({ name: config.normalUserPortfolioCollection, type: 'NORMAL' });
|
|
91
|
+
|
|
92
|
+
if ((fetchAll || types.has('SPECULATOR')) && config.speculatorPortfolioCollection)
|
|
93
|
+
collectionsToQuery.push({ name: config.speculatorPortfolioCollection, type: 'SPECULATOR' });
|
|
94
|
+
|
|
95
|
+
if ((fetchAll || types.has('POPULAR_INVESTOR')) && config.piPortfolioCollection)
|
|
96
|
+
collectionsToQuery.push({ name: config.piPortfolioCollection, type: 'POPULAR_INVESTOR' });
|
|
97
|
+
|
|
98
|
+
if ((fetchAll || types.has('SIGNED_IN_USER')) && config.signedInUsersCollection)
|
|
99
|
+
collectionsToQuery.push({ name: config.signedInUsersCollection, type: 'SIGNED_IN_USER' });
|
|
89
100
|
|
|
90
|
-
for (const collectionName of collectionsToQuery) {
|
|
101
|
+
for (const { name: collectionName, type: collectionType } of collectionsToQuery) {
|
|
91
102
|
try {
|
|
92
|
-
// Assume standard structure: Collection -> Block(e.g. 19M) -> snapshots -> date -> parts
|
|
93
103
|
const blockDocsQuery = db.collection(collectionName);
|
|
94
104
|
const blockDocRefs = await withRetry(() => blockDocsQuery.listDocuments(), `listDocuments(${collectionName})`);
|
|
95
105
|
|
|
96
|
-
if (!blockDocRefs.length)
|
|
97
|
-
continue;
|
|
98
|
-
}
|
|
106
|
+
if (!blockDocRefs.length) continue;
|
|
99
107
|
|
|
100
108
|
const partsPromises = blockDocRefs.map(blockDocRef => {
|
|
101
109
|
const partsCollectionRef = blockDocRef
|
|
@@ -108,14 +116,9 @@ async function getPortfolioPartRefs(config, deps, dateString) {
|
|
|
108
116
|
const partDocArrays = await Promise.all(partsPromises);
|
|
109
117
|
|
|
110
118
|
partDocArrays.forEach(partDocs => {
|
|
111
|
-
// Tag them so loadDataByRefs knows how to handle them
|
|
112
|
-
let type = 'PART';
|
|
113
|
-
if (collectionName === config.piPortfolioCollection) type = 'POPULAR_INVESTOR';
|
|
114
|
-
if (collectionName === config.signedInUsersCollection) type = 'SIGNED_IN_USER';
|
|
115
|
-
|
|
116
119
|
allPartRefs.push(...partDocs.map(ref => ({
|
|
117
120
|
ref,
|
|
118
|
-
type,
|
|
121
|
+
type: collectionType,
|
|
119
122
|
collectionType: 'LEGACY'
|
|
120
123
|
})));
|
|
121
124
|
});
|
|
@@ -144,9 +147,6 @@ async function loadDataByRefs(config, deps, refObjects) {
|
|
|
144
147
|
|
|
145
148
|
const snapshots = await withRetry(() => db.getAll(...refs), `getAll(batch ${Math.floor(i/batchSize)})`);
|
|
146
149
|
|
|
147
|
-
// --- Special Handling for PI Deep Data ---
|
|
148
|
-
// If we loaded a PI Overall Shard, we try to load the matching PI Deep Shard.
|
|
149
|
-
// Logic: Same Block, Same Date, Same Part ID.
|
|
150
150
|
const deepFetchPromises = [];
|
|
151
151
|
|
|
152
152
|
for (let j = 0; j < snapshots.length; j++) {
|
|
@@ -158,18 +158,13 @@ async function loadDataByRefs(config, deps, refObjects) {
|
|
|
158
158
|
const rawData = doc.data();
|
|
159
159
|
let chunkData;
|
|
160
160
|
|
|
161
|
-
// NEW STRUCTURE: Single user document per CID
|
|
162
161
|
if (meta.collectionType === 'NEW_STRUCTURE') {
|
|
163
162
|
const cid = meta.cid || doc.id;
|
|
164
|
-
// Data is stored directly in the document, not as a map
|
|
165
163
|
const userData = tryDecompress(rawData);
|
|
166
|
-
// Convert to map format: { cid: data }
|
|
167
164
|
chunkData = { [cid]: userData };
|
|
168
165
|
|
|
169
|
-
// Tag user type
|
|
170
166
|
if (meta.type === 'POPULAR_INVESTOR') {
|
|
171
167
|
chunkData[cid]._userType = 'POPULAR_INVESTOR';
|
|
172
|
-
// Check for deep positions in the same document
|
|
173
168
|
if (chunkData[cid].deepPositions) {
|
|
174
169
|
chunkData[cid].DeepPositions = chunkData[cid].deepPositions;
|
|
175
170
|
}
|
|
@@ -179,40 +174,28 @@ async function loadDataByRefs(config, deps, refObjects) {
|
|
|
179
174
|
|
|
180
175
|
deepFetchPromises.push(Promise.resolve(chunkData));
|
|
181
176
|
} else {
|
|
182
|
-
|
|
183
|
-
chunkData = tryDecompress(rawData); // Map: { userId: data }
|
|
177
|
+
chunkData = tryDecompress(rawData);
|
|
184
178
|
|
|
185
179
|
if (meta.type === 'POPULAR_INVESTOR' && config.piDeepPortfolioCollection) {
|
|
186
|
-
|
|
187
|
-
// Current: pi_portfolios_overall/19M/snapshots/{date}/parts/{part_X}
|
|
188
|
-
// Target: pi_portfolios_deep/19M/snapshots/{date}/parts/{part_X}
|
|
189
|
-
|
|
190
|
-
const pathSegments = doc.ref.path.split('/'); // [col, block, snap, date, parts, partId]
|
|
191
|
-
// Replace collection name with deep collection name
|
|
180
|
+
const pathSegments = doc.ref.path.split('/');
|
|
192
181
|
const deepCollection = config.piDeepPortfolioCollection;
|
|
193
182
|
const deepPath = `${deepCollection}/${pathSegments[1]}/${pathSegments[2]}/${pathSegments[3]}/${pathSegments[4]}/${pathSegments[5]}`;
|
|
194
183
|
|
|
195
|
-
// Fetch deeply
|
|
196
184
|
deepFetchPromises.push(
|
|
197
185
|
db.doc(deepPath).get().then(deepSnap => {
|
|
198
186
|
if (deepSnap.exists) {
|
|
199
187
|
const deepChunk = tryDecompress(deepSnap.data());
|
|
200
|
-
// Merge deep positions into overall data
|
|
201
188
|
for (const [uid, pData] of Object.entries(chunkData)) {
|
|
202
189
|
if (deepChunk[uid] && deepChunk[uid].positions) {
|
|
203
190
|
pData.DeepPositions = deepChunk[uid].positions;
|
|
204
191
|
}
|
|
205
192
|
}
|
|
206
193
|
}
|
|
207
|
-
// Tag internal type for ContextFactory
|
|
208
194
|
for (const pData of Object.values(chunkData)) {
|
|
209
195
|
pData._userType = 'POPULAR_INVESTOR';
|
|
210
196
|
}
|
|
211
197
|
return chunkData;
|
|
212
|
-
}).catch(
|
|
213
|
-
// If deep fetch fails, return chunkData as is (graceful degradation)
|
|
214
|
-
return chunkData;
|
|
215
|
-
})
|
|
198
|
+
}).catch(() => chunkData)
|
|
216
199
|
);
|
|
217
200
|
} else if (meta.type === 'SIGNED_IN_USER') {
|
|
218
201
|
for (const pData of Object.values(chunkData)) {
|
|
@@ -220,14 +203,12 @@ async function loadDataByRefs(config, deps, refObjects) {
|
|
|
220
203
|
}
|
|
221
204
|
deepFetchPromises.push(Promise.resolve(chunkData));
|
|
222
205
|
} else {
|
|
223
|
-
// Standard Part
|
|
224
206
|
deepFetchPromises.push(Promise.resolve(chunkData));
|
|
225
207
|
}
|
|
226
208
|
}
|
|
227
209
|
}
|
|
228
210
|
|
|
229
211
|
const resolvedChunks = await Promise.all(deepFetchPromises);
|
|
230
|
-
|
|
231
212
|
resolvedChunks.forEach(chunk => {
|
|
232
213
|
if (chunk && typeof chunk === 'object') {
|
|
233
214
|
Object.assign(mergedPortfolios, chunk);
|
|
@@ -403,76 +384,76 @@ async function loadDailySocialPostInsights(config, deps, dateString) {
|
|
|
403
384
|
return result;
|
|
404
385
|
}
|
|
405
386
|
|
|
406
|
-
/** Stage 6: Get history part references for a given date
|
|
407
|
-
|
|
387
|
+
/** * Stage 6: Get history part references for a given date
|
|
388
|
+
* [UPDATED] Accepts requiredUserTypes to filter collections.
|
|
389
|
+
*/
|
|
390
|
+
async function getHistoryPartRefs(config, deps, dateString, requiredUserTypes = null) {
|
|
408
391
|
const { db, logger, calculationUtils, collectionRegistry } = deps;
|
|
409
392
|
const { withRetry } = calculationUtils;
|
|
410
|
-
const { getCollectionPath } = collectionRegistry || {};
|
|
411
393
|
|
|
412
|
-
|
|
394
|
+
// Normalize required types
|
|
395
|
+
const types = requiredUserTypes ? new Set(requiredUserTypes.map(t => t.toUpperCase())) : null;
|
|
396
|
+
const fetchAll = !types || types.size === 0 || types.has('ALL');
|
|
397
|
+
|
|
398
|
+
logger.log('INFO', `Getting history part references for ${dateString}. Filter: ${fetchAll ? 'ALL' : Array.from(types).join(',')}`);
|
|
413
399
|
|
|
414
400
|
const allPartRefs = [];
|
|
415
401
|
|
|
416
|
-
// NEW STRUCTURE
|
|
417
|
-
// Structure: Collection/{date}/{cid}/{cid}
|
|
402
|
+
// NEW STRUCTURE
|
|
418
403
|
try {
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
cid: cid,
|
|
434
|
-
collectionType: 'NEW_STRUCTURE'
|
|
404
|
+
if (fetchAll || types.has('SIGNED_IN_USER')) {
|
|
405
|
+
const signedInHistCollectionName = 'SignedInUserTradeHistoryData';
|
|
406
|
+
const signedInHistDateDoc = db.collection(signedInHistCollectionName).doc(dateString);
|
|
407
|
+
const signedInHistSubcollections = await withRetry(
|
|
408
|
+
() => signedInHistDateDoc.listCollections(),
|
|
409
|
+
`listSignedInHistory(${dateString})`
|
|
410
|
+
);
|
|
411
|
+
signedInHistSubcollections.forEach(subcol => {
|
|
412
|
+
allPartRefs.push({
|
|
413
|
+
ref: subcol.doc(subcol.id),
|
|
414
|
+
type: 'SIGNED_IN_USER',
|
|
415
|
+
cid: subcol.id,
|
|
416
|
+
collectionType: 'NEW_STRUCTURE'
|
|
417
|
+
});
|
|
435
418
|
});
|
|
436
|
-
}
|
|
437
|
-
|
|
438
|
-
// Popular Investor History: PopularInvestorTradeHistoryData/{date}/{cid}/{cid}
|
|
439
|
-
const piHistCollectionName = 'PopularInvestorTradeHistoryData';
|
|
440
|
-
const piHistDateDoc = db.collection(piHistCollectionName).doc(dateString);
|
|
441
|
-
const piHistSubcollections = await withRetry(
|
|
442
|
-
() => piHistDateDoc.listCollections(),
|
|
443
|
-
`listPIHistory(${dateString})`
|
|
444
|
-
);
|
|
419
|
+
}
|
|
445
420
|
|
|
446
|
-
|
|
447
|
-
const
|
|
448
|
-
const
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
421
|
+
if (fetchAll || types.has('POPULAR_INVESTOR')) {
|
|
422
|
+
const piHistCollectionName = 'PopularInvestorTradeHistoryData';
|
|
423
|
+
const piHistDateDoc = db.collection(piHistCollectionName).doc(dateString);
|
|
424
|
+
const piHistSubcollections = await withRetry(
|
|
425
|
+
() => piHistDateDoc.listCollections(),
|
|
426
|
+
`listPIHistory(${dateString})`
|
|
427
|
+
);
|
|
428
|
+
piHistSubcollections.forEach(subcol => {
|
|
429
|
+
allPartRefs.push({
|
|
430
|
+
ref: subcol.doc(subcol.id),
|
|
431
|
+
type: 'POPULAR_INVESTOR',
|
|
432
|
+
cid: subcol.id,
|
|
433
|
+
collectionType: 'NEW_STRUCTURE'
|
|
434
|
+
});
|
|
454
435
|
});
|
|
455
|
-
}
|
|
456
|
-
|
|
457
|
-
logger.log('INFO', `Found ${allPartRefs.length} history refs from new structure for ${dateString}`);
|
|
436
|
+
}
|
|
458
437
|
} catch (newStructError) {
|
|
459
|
-
logger.log('WARN', `Failed to load from new structure
|
|
438
|
+
logger.log('WARN', `Failed to load from new structure: ${newStructError.message}`);
|
|
460
439
|
}
|
|
461
440
|
|
|
462
|
-
// LEGACY STRUCTURE
|
|
463
|
-
const collectionsToQuery = [
|
|
464
|
-
|
|
465
|
-
config.
|
|
466
|
-
|
|
467
|
-
config.
|
|
468
|
-
|
|
441
|
+
// LEGACY STRUCTURE
|
|
442
|
+
const collectionsToQuery = [];
|
|
443
|
+
if ((fetchAll || types.has('NORMAL')) && config.normalUserHistoryCollection)
|
|
444
|
+
collectionsToQuery.push(config.normalUserHistoryCollection);
|
|
445
|
+
if ((fetchAll || types.has('SPECULATOR')) && config.speculatorHistoryCollection)
|
|
446
|
+
collectionsToQuery.push(config.speculatorHistoryCollection);
|
|
447
|
+
if ((fetchAll || types.has('POPULAR_INVESTOR')) && config.piHistoryCollection)
|
|
448
|
+
collectionsToQuery.push(config.piHistoryCollection);
|
|
449
|
+
if ((fetchAll || types.has('SIGNED_IN_USER')) && config.signedInHistoryCollection)
|
|
450
|
+
collectionsToQuery.push(config.signedInHistoryCollection);
|
|
469
451
|
|
|
470
452
|
for (const collectionName of collectionsToQuery) {
|
|
471
453
|
try {
|
|
472
454
|
const blockDocsQuery = db.collection(collectionName);
|
|
473
455
|
const blockDocRefs = await withRetry(() => blockDocsQuery.listDocuments(), `listDocuments(${collectionName})`);
|
|
474
|
-
|
|
475
|
-
if (!blockDocRefs.length) { continue; }
|
|
456
|
+
if (!blockDocRefs.length) continue;
|
|
476
457
|
|
|
477
458
|
const partsPromises = blockDocRefs.map(blockDocRef => {
|
|
478
459
|
const partsCollectionRef = blockDocRef.collection(config.snapshotsSubcollection || 'snapshots')
|
|
@@ -482,7 +463,6 @@ async function getHistoryPartRefs(config, deps, dateString) {
|
|
|
482
463
|
|
|
483
464
|
const partDocArrays = await Promise.all(partsPromises);
|
|
484
465
|
partDocArrays.forEach(partDocs => {
|
|
485
|
-
// History parts are standard, no deep merge needed usually
|
|
486
466
|
allPartRefs.push(...partDocs.map(ref => ({
|
|
487
467
|
ref,
|
|
488
468
|
type: 'PART',
|
|
@@ -498,10 +478,12 @@ async function getHistoryPartRefs(config, deps, dateString) {
|
|
|
498
478
|
return allPartRefs;
|
|
499
479
|
}
|
|
500
480
|
|
|
501
|
-
/** Stage 7: Stream portfolio data in chunks
|
|
502
|
-
|
|
481
|
+
/** * Stage 7: Stream portfolio data in chunks
|
|
482
|
+
* [UPDATED] Passes requiredUserTypes to getPortfolioPartRefs
|
|
483
|
+
*/
|
|
484
|
+
async function* streamPortfolioData(config, deps, dateString, providedRefs = null, requiredUserTypes = null) {
|
|
503
485
|
const { logger } = deps;
|
|
504
|
-
const refs = providedRefs || (await getPortfolioPartRefs(config, deps, dateString));
|
|
486
|
+
const refs = providedRefs || (await getPortfolioPartRefs(config, deps, dateString, requiredUserTypes));
|
|
505
487
|
if (refs.length === 0) { logger.log('WARN', `[streamPortfolioData] No portfolio refs found for ${dateString}. Stream is empty.`); return; }
|
|
506
488
|
|
|
507
489
|
const batchSize = config.partRefBatchSize || 10;
|
|
@@ -515,10 +497,12 @@ async function* streamPortfolioData(config, deps, dateString, providedRefs = nul
|
|
|
515
497
|
logger.log('INFO', `[streamPortfolioData] Finished streaming for ${dateString}.`);
|
|
516
498
|
}
|
|
517
499
|
|
|
518
|
-
/** Stage 8: Stream history data in chunks
|
|
519
|
-
|
|
500
|
+
/** * Stage 8: Stream history data in chunks
|
|
501
|
+
* [UPDATED] Passes requiredUserTypes to getHistoryPartRefs
|
|
502
|
+
*/
|
|
503
|
+
async function* streamHistoryData(config, deps, dateString, providedRefs = null, requiredUserTypes = null) {
|
|
520
504
|
const { logger } = deps;
|
|
521
|
-
const refs = providedRefs || (await getHistoryPartRefs(config, deps, dateString));
|
|
505
|
+
const refs = providedRefs || (await getHistoryPartRefs(config, deps, dateString, requiredUserTypes));
|
|
522
506
|
if (refs.length === 0) { logger.log('WARN', `[streamHistoryData] No history refs found for ${dateString}. Stream is empty.`); return; }
|
|
523
507
|
|
|
524
508
|
const batchSize = config.partRefBatchSize || 10;
|