bulltrackers-module 1.0.152 → 1.0.153
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/appscript-api/index.js +8 -38
- package/functions/computation-system/helpers/computation_pass_runner.js +38 -183
- package/functions/computation-system/helpers/orchestration_helpers.js +105 -326
- package/functions/computation-system/utils/data_loader.js +38 -133
- package/functions/computation-system/utils/schema_capture.js +7 -41
- package/functions/computation-system/utils/utils.js +37 -124
- package/functions/core/utils/firestore_utils.js +8 -46
- package/functions/core/utils/intelligent_header_manager.js +26 -128
- package/functions/core/utils/intelligent_proxy_manager.js +33 -171
- package/functions/core/utils/pubsub_utils.js +7 -24
- package/functions/dispatcher/helpers/dispatch_helpers.js +9 -30
- package/functions/dispatcher/index.js +7 -30
- package/functions/etoro-price-fetcher/helpers/handler_helpers.js +12 -80
- package/functions/fetch-insights/helpers/handler_helpers.js +18 -70
- package/functions/generic-api/helpers/api_helpers.js +28 -167
- package/functions/generic-api/index.js +49 -188
- package/functions/invalid-speculator-handler/helpers/handler_helpers.js +10 -47
- package/functions/orchestrator/helpers/discovery_helpers.js +1 -5
- package/functions/orchestrator/index.js +1 -6
- package/functions/price-backfill/helpers/handler_helpers.js +13 -69
- package/functions/social-orchestrator/helpers/orchestrator_helpers.js +5 -37
- package/functions/social-task-handler/helpers/handler_helpers.js +29 -186
- package/functions/speculator-cleanup-orchestrator/helpers/cleanup_helpers.js +19 -78
- package/functions/task-engine/handler_creator.js +2 -8
- package/functions/task-engine/helpers/update_helpers.js +17 -83
- package/functions/task-engine/helpers/verify_helpers.js +11 -56
- package/functions/task-engine/utils/firestore_batch_manager.js +16 -67
- package/functions/task-engine/utils/task_engine_utils.js +6 -35
- package/index.js +45 -43
- package/package.json +1 -1
|
@@ -5,8 +5,6 @@
|
|
|
5
5
|
* --- FIXED: streamPortfolioData and streamHistoryData now accept optional 'providedRefs' ---
|
|
6
6
|
*/
|
|
7
7
|
|
|
8
|
-
// <<< FIX: REMOVED all top-level 'require' and 'dependencies' lines >>>
|
|
9
|
-
|
|
10
8
|
/**
|
|
11
9
|
* Sub-pipe: pipe.computationSystem.dataLoader.getPortfolioPartRefs
|
|
12
10
|
* @param {object} config - The computation system configuration object.
|
|
@@ -20,34 +18,16 @@
|
|
|
20
18
|
async function getPortfolioPartRefs(config, deps, dateString) {
|
|
21
19
|
const { db, logger, calculationUtils } = deps;
|
|
22
20
|
const { withRetry } = calculationUtils;
|
|
23
|
-
|
|
24
21
|
logger.log('INFO', `Getting portfolio part references for date: ${dateString}`);
|
|
25
22
|
const allPartRefs = [];
|
|
26
23
|
const collectionsToQuery = [config.normalUserPortfolioCollection, config.speculatorPortfolioCollection];
|
|
27
|
-
|
|
28
24
|
for (const collectionName of collectionsToQuery) {
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
const partsPromises = blockDocRefs.map(blockDocRef => {
|
|
36
|
-
const partsCollectionRef = blockDocRef.collection(config.snapshotsSubcollection).doc(dateString).collection(config.partsSubcollection);
|
|
37
|
-
// Each call is individually retried
|
|
38
|
-
return withRetry(() => partsCollectionRef.listDocuments(), `listDocuments(${partsCollectionRef.path})`);
|
|
39
|
-
});
|
|
40
|
-
|
|
41
|
-
// Wait for all parallel queries to finish
|
|
42
|
-
const partDocArrays = await Promise.all(partsPromises);
|
|
43
|
-
|
|
44
|
-
// Flatten the arrays of arrays into the final list
|
|
45
|
-
partDocArrays.forEach(partDocs => {
|
|
46
|
-
allPartRefs.push(...partDocs);
|
|
47
|
-
});
|
|
48
|
-
// --- END MODIFICATION ---
|
|
49
|
-
}
|
|
50
|
-
|
|
25
|
+
const blockDocsQuery = db.collection(collectionName);
|
|
26
|
+
const blockDocRefs = await withRetry(() => blockDocsQuery.listDocuments(), `listDocuments(${collectionName})`);
|
|
27
|
+
if (!blockDocRefs.length) { logger.log('WARN', `No block documents in ${collectionName}`); continue; }
|
|
28
|
+
const partsPromises = blockDocRefs.map(blockDocRef => { const partsCollectionRef = blockDocRef.collection(config.snapshotsSubcollection).doc(dateString).collection(config.partsSubcollection); return withRetry(() => partsCollectionRef.listDocuments(), `listDocuments(${partsCollectionRef.path})`); });
|
|
29
|
+
const partDocArrays = await Promise.all(partsPromises);
|
|
30
|
+
partDocArrays.forEach(partDocs => { allPartRefs.push(...partDocs); }); }
|
|
51
31
|
logger.log('INFO', `Found ${allPartRefs.length} portfolio part refs for ${dateString}`);
|
|
52
32
|
return allPartRefs;
|
|
53
33
|
}
|
|
@@ -56,23 +36,11 @@ async function getPortfolioPartRefs(config, deps, dateString) {
|
|
|
56
36
|
async function loadDataByRefs(config, deps, refs) {
|
|
57
37
|
const { db, logger, calculationUtils } = deps;
|
|
58
38
|
const { withRetry } = calculationUtils;
|
|
59
|
-
|
|
60
39
|
if (!refs || !refs.length) return {};
|
|
61
40
|
const mergedPortfolios = {};
|
|
62
41
|
const batchSize = config.partRefBatchSize || 50;
|
|
63
|
-
|
|
64
|
-
for (
|
|
65
|
-
const batchRefs = refs.slice(i, i + batchSize);
|
|
66
|
-
const snapshots = await withRetry(() => db.getAll(...batchRefs), `getAll(batch ${Math.floor(i / batchSize)})`);
|
|
67
|
-
|
|
68
|
-
for (const doc of snapshots) {
|
|
69
|
-
if (!doc.exists) continue;
|
|
70
|
-
const data = doc.data();
|
|
71
|
-
if (data && typeof data === 'object') Object.assign(mergedPortfolios, data);
|
|
72
|
-
else logger.log('WARN', `Doc ${doc.id} exists but data is not an object`, data);
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
|
|
42
|
+
for (let i = 0; i < refs.length; i += batchSize) { const batchRefs = refs.slice(i, i + batchSize); const snapshots = await withRetry(() => db.getAll(...batchRefs), `getAll(batch ${Math.floor(i / batchSize)})`);
|
|
43
|
+
for (const doc of snapshots) { if (!doc.exists) continue; const data = doc.data(); if (data && typeof data === 'object') Object.assign(mergedPortfolios, data); else logger.log('WARN', `Doc ${doc.id} exists but data is not an object`, data); } }
|
|
76
44
|
return mergedPortfolios;
|
|
77
45
|
}
|
|
78
46
|
|
|
@@ -80,7 +48,6 @@ async function loadDataByRefs(config, deps, refs) {
|
|
|
80
48
|
async function loadFullDayMap(config, deps, partRefs) {
|
|
81
49
|
const { logger } = deps;
|
|
82
50
|
if (!partRefs.length) return {};
|
|
83
|
-
|
|
84
51
|
logger.log('TRACE', `Loading full day map from ${partRefs.length} references...`);
|
|
85
52
|
const fullMap = await loadDataByRefs(config, deps, partRefs);
|
|
86
53
|
logger.log('TRACE', `Full day map loaded with ${Object.keys(fullMap).length} users`);
|
|
@@ -91,19 +58,15 @@ async function loadFullDayMap(config, deps, partRefs) {
|
|
|
91
58
|
async function loadDailyInsights(config, deps, dateString) {
|
|
92
59
|
const { db, logger, calculationUtils } = deps;
|
|
93
60
|
const { withRetry } = calculationUtils;
|
|
94
|
-
|
|
95
61
|
const insightsCollectionName = config.insightsCollectionName || 'daily_instrument_insights';
|
|
96
62
|
logger.log('INFO', `Loading daily insights for ${dateString} from ${insightsCollectionName}`);
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
} catch (error) {
|
|
105
|
-
logger.log('ERROR', `Failed to load daily insights for ${dateString}`, { errorMessage: error.message });
|
|
106
|
-
return null;
|
|
63
|
+
try { const docRef = db.collection(insightsCollectionName).doc(dateString);
|
|
64
|
+
const docSnap = await withRetry(() => docRef.get(), `getInsights(${dateString})`);
|
|
65
|
+
if (!docSnap.exists) { logger.log('WARN', `Insights not found for ${dateString}`); return null; }
|
|
66
|
+
logger.log('TRACE', `Successfully loaded insights for ${dateString}`);
|
|
67
|
+
return docSnap.data();
|
|
68
|
+
} catch (error) { logger.log('ERROR', `Failed to load daily insights for ${dateString}`, { errorMessage: error.message });
|
|
69
|
+
return null;
|
|
107
70
|
}
|
|
108
71
|
}
|
|
109
72
|
|
|
@@ -111,22 +74,17 @@ async function loadDailyInsights(config, deps, dateString) {
|
|
|
111
74
|
async function loadDailySocialPostInsights(config, deps, dateString) {
|
|
112
75
|
const { db, logger, calculationUtils } = deps;
|
|
113
76
|
const { withRetry } = calculationUtils;
|
|
114
|
-
|
|
115
77
|
const collectionName = config.socialInsightsCollectionName || 'daily_social_insights';
|
|
116
78
|
logger.log('INFO', `Loading social post insights for ${dateString} from ${collectionName}`);
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
return postsMap;
|
|
127
|
-
} catch (error) {
|
|
128
|
-
logger.log('ERROR', `Failed to load social post insights for ${dateString}`, { errorMessage: error.message });
|
|
129
|
-
return null;
|
|
79
|
+
try { const postsCollectionRef = db.collection(collectionName).doc(dateString).collection('posts');
|
|
80
|
+
const querySnapshot = await withRetry(() => postsCollectionRef.get(), `getSocialPosts(${dateString})`);
|
|
81
|
+
if (querySnapshot.empty) { logger.log('WARN', `No social post insights for ${dateString}`); return null; }
|
|
82
|
+
const postsMap = {};
|
|
83
|
+
querySnapshot.forEach(doc => { postsMap[doc.id] = doc.data(); });
|
|
84
|
+
logger.log('TRACE', `Loaded ${Object.keys(postsMap).length} social post insights`);
|
|
85
|
+
return postsMap;
|
|
86
|
+
} catch (error) { logger.log('ERROR', `Failed to load social post insights for ${dateString}`, { errorMessage: error.message });
|
|
87
|
+
return null;
|
|
130
88
|
}
|
|
131
89
|
}
|
|
132
90
|
|
|
@@ -134,35 +92,18 @@ async function loadDailySocialPostInsights(config, deps, dateString) {
|
|
|
134
92
|
async function getHistoryPartRefs(config, deps, dateString) {
|
|
135
93
|
const { db, logger, calculationUtils } = deps;
|
|
136
94
|
const { withRetry } = calculationUtils;
|
|
137
|
-
|
|
138
95
|
logger.log('INFO', `Getting history part references for ${dateString}`);
|
|
139
96
|
const allPartRefs = [];
|
|
140
97
|
const collectionsToQuery = [config.normalUserHistoryCollection, config.speculatorHistoryCollection];
|
|
141
|
-
|
|
142
98
|
for (const collectionName of collectionsToQuery) {
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
const partsCollectionRef = blockDocRef.collection(config.snapshotsSubcollection).doc(dateString).collection(config.partsSubcollection);
|
|
152
|
-
// Each call is individually retried
|
|
153
|
-
return withRetry(() => partsCollectionRef.listDocuments(), `listDocuments(${partsCollectionRef.path})`);
|
|
154
|
-
});
|
|
155
|
-
|
|
156
|
-
// Wait for all parallel queries to finish
|
|
157
|
-
const partDocArrays = await Promise.all(partsPromises);
|
|
158
|
-
|
|
159
|
-
// Flatten the arrays of arrays into the final list
|
|
160
|
-
partDocArrays.forEach(partDocs => {
|
|
161
|
-
allPartRefs.push(...partDocs);
|
|
162
|
-
});
|
|
163
|
-
// --- END MODIFICATION ---
|
|
164
|
-
}
|
|
165
|
-
|
|
99
|
+
if (!collectionName) { logger.log('WARN', `History collection undefined. Skipping.`); continue; }
|
|
100
|
+
const blockDocsQuery = db.collection(collectionName);
|
|
101
|
+
const blockDocRefs = await withRetry(() => blockDocsQuery.listDocuments(), `listDocuments(${collectionName})`);
|
|
102
|
+
if (!blockDocRefs.length) { logger.log('WARN', `No block documents in ${collectionName}`); continue; }
|
|
103
|
+
const partsPromises = blockDocRefs.map(blockDocRef => { const partsCollectionRef = blockDocRef.collection(config.snapshotsSubcollection).doc(dateString).collection(config.partsSubcollection);
|
|
104
|
+
return withRetry(() => partsCollectionRef.listDocuments(), `listDocuments(${partsCollectionRef.path})`); });
|
|
105
|
+
const partDocArrays = await Promise.all(partsPromises);
|
|
106
|
+
partDocArrays.forEach(partDocs => { allPartRefs.push(...partDocs); }); }
|
|
166
107
|
logger.log('INFO', `Found ${allPartRefs.length} history part refs for ${dateString}`);
|
|
167
108
|
return allPartRefs;
|
|
168
109
|
}
|
|
@@ -178,29 +119,13 @@ async function getHistoryPartRefs(config, deps, dateString) {
|
|
|
178
119
|
*/
|
|
179
120
|
async function* streamPortfolioData(config, deps, dateString, providedRefs = null) {
|
|
180
121
|
const { logger } = deps;
|
|
181
|
-
// --- FIX: Use providedRefs if available, otherwise fetch them ---
|
|
182
122
|
const refs = providedRefs || (await getPortfolioPartRefs(config, deps, dateString));
|
|
183
|
-
|
|
184
|
-
if (refs.length === 0) {
|
|
185
|
-
logger.log('WARN', `[streamPortfolioData] No portfolio refs found for ${dateString}. Stream is empty.`);
|
|
186
|
-
return;
|
|
187
|
-
}
|
|
188
|
-
|
|
189
|
-
// Use the same batch size as loadDataByRefs for consistency
|
|
123
|
+
if (refs.length === 0) { logger.log('WARN', `[streamPortfolioData] No portfolio refs found for ${dateString}. Stream is empty.`); return; }
|
|
190
124
|
const batchSize = config.partRefBatchSize || 50;
|
|
191
|
-
|
|
192
125
|
logger.log('INFO', `[streamPortfolioData] Streaming ${refs.length} portfolio parts in chunks of ${batchSize}...`);
|
|
193
|
-
|
|
194
|
-
for (let i = 0; i < refs.length; i += batchSize) {
|
|
195
|
-
const batchRefs = refs.slice(i, i + batchSize);
|
|
196
|
-
// Load one chunk of data
|
|
197
|
-
const data = await loadDataByRefs(config, deps, batchRefs);
|
|
198
|
-
// Yield it to the consumer, then release it from memory
|
|
199
|
-
yield data;
|
|
200
|
-
}
|
|
126
|
+
for (let i = 0; i < refs.length; i += batchSize) { const batchRefs = refs.slice(i, i + batchSize); const data = await loadDataByRefs(config, deps, batchRefs);yield data; }
|
|
201
127
|
logger.log('INFO', `[streamPortfolioData] Finished streaming for ${dateString}.`);
|
|
202
128
|
}
|
|
203
|
-
// --- END: Stage 7 ---
|
|
204
129
|
|
|
205
130
|
/**
|
|
206
131
|
* --- NEW: Stage 8: Stream history data in chunks ---
|
|
@@ -212,32 +137,12 @@ async function* streamPortfolioData(config, deps, dateString, providedRefs = nul
|
|
|
212
137
|
*/
|
|
213
138
|
async function* streamHistoryData(config, deps, dateString, providedRefs = null) {
|
|
214
139
|
const { logger } = deps;
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
if (refs.length === 0) {
|
|
219
|
-
logger.log('WARN', `[streamHistoryData] No history refs found for ${dateString}. Stream is empty.`);
|
|
220
|
-
return;
|
|
221
|
-
}
|
|
222
|
-
|
|
140
|
+
const refs = providedRefs || (await getHistoryPartRefs(config, deps, dateString));
|
|
141
|
+
if (refs.length === 0) { logger.log('WARN', `[streamHistoryData] No history refs found for ${dateString}. Stream is empty.`); return; }
|
|
223
142
|
const batchSize = config.partRefBatchSize || 50;
|
|
224
143
|
logger.log('INFO', `[streamHistoryData] Streaming ${refs.length} history parts in chunks of ${batchSize}...`);
|
|
225
|
-
|
|
226
|
-
for (let i = 0; i < refs.length; i += batchSize) {
|
|
227
|
-
const batchRefs = refs.slice(i, i + batchSize);
|
|
228
|
-
const data = await loadDataByRefs(config, deps, batchRefs);
|
|
229
|
-
yield data;
|
|
230
|
-
}
|
|
144
|
+
for (let i = 0; i < refs.length; i += batchSize) { const batchRefs = refs.slice(i, i + batchSize); const data = await loadDataByRefs(config, deps, batchRefs); yield data; }
|
|
231
145
|
logger.log('INFO', `[streamHistoryData] Finished streaming for ${dateString}.`);
|
|
232
146
|
}
|
|
233
147
|
|
|
234
|
-
module.exports = {
|
|
235
|
-
getPortfolioPartRefs,
|
|
236
|
-
loadDataByRefs,
|
|
237
|
-
loadFullDayMap,
|
|
238
|
-
loadDailyInsights,
|
|
239
|
-
loadDailySocialPostInsights,
|
|
240
|
-
getHistoryPartRefs,
|
|
241
|
-
streamPortfolioData, // <-- EXPORT NEW FUNCTION
|
|
242
|
-
streamHistoryData // <-- EXPORT NEW FUNCTION
|
|
243
|
-
};
|
|
148
|
+
module.exports = {getPortfolioPartRefs, loadDataByRefs, loadFullDayMap, loadDailyInsights, loadDailySocialPostInsights, getHistoryPartRefs, streamPortfolioData, streamHistoryData };
|
|
@@ -13,52 +13,18 @@
|
|
|
13
13
|
*/
|
|
14
14
|
async function batchStoreSchemas(dependencies, config, schemas) {
|
|
15
15
|
const { db, logger } = dependencies;
|
|
16
|
-
|
|
17
|
-
// Check if schema capture is disabled
|
|
18
|
-
if (config.captureSchemas === false) {
|
|
19
|
-
logger.log('INFO', '[SchemaCapture] Schema capture is disabled. Skipping.');
|
|
20
|
-
return;
|
|
21
|
-
}
|
|
22
|
-
|
|
16
|
+
if (config.captureSchemas === false) { logger.log('INFO', '[SchemaCapture] Schema capture is disabled. Skipping.'); return; }
|
|
23
17
|
const batch = db.batch();
|
|
24
18
|
const schemaCollection = config.schemaCollection || 'computation_schemas';
|
|
25
|
-
|
|
26
19
|
for (const item of schemas) {
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
const docRef = db.collection(schemaCollection).doc(item.name);
|
|
35
|
-
|
|
36
|
-
batch.set(docRef, {
|
|
37
|
-
computationName: item.name,
|
|
38
|
-
category: item.category,
|
|
39
|
-
schema: item.schema, // Use the provided schema directly
|
|
40
|
-
metadata: item.metadata || {},
|
|
41
|
-
lastUpdated: new Date()
|
|
42
|
-
// 'sampleKeys' is removed as we no longer have the output
|
|
43
|
-
}, { merge: true });
|
|
44
|
-
} catch (error) {
|
|
45
|
-
logger.log('WARN', `[SchemaCapture] Failed to add schema to batch for ${item.name}`, {
|
|
46
|
-
errorMessage: error.message
|
|
47
|
-
});
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
try {
|
|
52
|
-
await batch.commit();
|
|
53
|
-
logger.log('INFO', `[SchemaCapture] Batch stored ${schemas.length} computation schemas`);
|
|
54
|
-
} catch (error) {
|
|
55
|
-
logger.log('ERROR', '[SchemaCapture] Failed to commit schema batch', {
|
|
56
|
-
errorMessage: error.message
|
|
57
|
-
});
|
|
58
|
-
}
|
|
20
|
+
try { if (!item.schema) { logger.log('WARN', `[SchemaCapture] No schema provided for ${item.name}. Skipping.`); continue; }
|
|
21
|
+
const docRef = db.collection(schemaCollection).doc(item.name);
|
|
22
|
+
batch.set(docRef, { computationName: item.name, category: item.category, schema: item.schema, metadata: item.metadata || {}, lastUpdated: new Date() }, { merge: true });
|
|
23
|
+
} catch (error) { logger.log('WARN', `[SchemaCapture] Failed to add schema to batch for ${item.name}`, { errorMessage: error.message }); } }
|
|
24
|
+
try { await batch.commit(); logger.log('INFO', `[SchemaCapture] Batch stored ${schemas.length} computation schemas`);
|
|
25
|
+
} catch (error) { logger.log('ERROR', '[SchemaCapture] Failed to commit schema batch', { errorMessage: error.message }); }
|
|
59
26
|
}
|
|
60
27
|
|
|
61
28
|
module.exports = {
|
|
62
|
-
// generateSchema and storeComputationSchema are now obsolete
|
|
63
29
|
batchStoreSchemas
|
|
64
30
|
};
|
|
@@ -10,41 +10,31 @@
|
|
|
10
10
|
const { FieldValue, FieldPath } = require('@google-cloud/firestore');
|
|
11
11
|
|
|
12
12
|
/** Stage 1: Normalize a calculation name to kebab-case */
|
|
13
|
-
function normalizeName(name) {
|
|
14
|
-
return name.replace(/_/g, '-');
|
|
15
|
-
}
|
|
13
|
+
function normalizeName(name) { return name.replace(/_/g, '-'); }
|
|
16
14
|
|
|
17
15
|
/** Stage 2: Commit a batch of writes in chunks */
|
|
18
16
|
async function commitBatchInChunks(config, deps, writes, operationName) {
|
|
19
17
|
const { db, logger, calculationUtils } = deps;
|
|
20
18
|
const { withRetry } = calculationUtils;
|
|
21
|
-
|
|
22
19
|
const batchSizeLimit = config.batchSizeLimit || 450;
|
|
23
20
|
if (!writes.length) { logger.log('WARN', `[${operationName}] No writes to commit.`); return; }
|
|
24
|
-
|
|
25
21
|
for (let i = 0; i < writes.length; i += batchSizeLimit) {
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
logger.log('INFO', `[${operationName}] Committed chunk ${chunkNum}/${totalChunks} (${chunk.length} ops).`);
|
|
35
|
-
}
|
|
22
|
+
const chunk = writes.slice(i, i + batchSizeLimit);
|
|
23
|
+
const batch = db.batch();
|
|
24
|
+
chunk.forEach(write => batch.set(write.ref, write.data, { merge: true }));
|
|
25
|
+
const chunkNum = Math.floor(i / batchSizeLimit) + 1;
|
|
26
|
+
const totalChunks = Math.ceil(writes.length / batchSizeLimit);
|
|
27
|
+
await withRetry(() => batch.commit(), `${operationName} (Chunk ${chunkNum}/${totalChunks})`);
|
|
28
|
+
logger.log('INFO', `[${operationName}] Committed chunk ${chunkNum}/${totalChunks} (${chunk.length} ops).`); }
|
|
36
29
|
}
|
|
37
30
|
|
|
38
31
|
/** Stage 3: Generate an array of expected date strings between two dates */
|
|
39
32
|
function getExpectedDateStrings(startDate, endDate) {
|
|
40
33
|
const dateStrings = [];
|
|
41
34
|
if (startDate <= endDate) {
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
dateStrings.push(new Date(d).toISOString().slice(0, 10));
|
|
46
|
-
}
|
|
47
|
-
}
|
|
35
|
+
const startUTC = new Date(Date.UTC(startDate.getUTCFullYear(), startDate.getUTCMonth(), startDate.getUTCDate()));
|
|
36
|
+
const endUTC = new Date(Date.UTC(endDate.getUTCFullYear(), endDate.getUTCMonth(), endDate.getUTCDate()));
|
|
37
|
+
for (let d = startUTC; d <= endUTC; d.setUTCDate(d.getUTCDate() + 1)) { dateStrings.push(new Date(d).toISOString().slice(0, 10)); } }
|
|
48
38
|
return dateStrings;
|
|
49
39
|
}
|
|
50
40
|
|
|
@@ -55,25 +45,12 @@ function getExpectedDateStrings(startDate, endDate) {
|
|
|
55
45
|
async function getFirstDateFromSimpleCollection(config, deps, collectionName) {
|
|
56
46
|
const { db, logger, calculationUtils } = deps;
|
|
57
47
|
const { withRetry } = calculationUtils;
|
|
58
|
-
|
|
59
48
|
try {
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
.where(FieldPath.documentId(), '>=', '2000-01-01')
|
|
66
|
-
.orderBy(FieldPath.documentId(), 'asc')
|
|
67
|
-
.limit(1);
|
|
68
|
-
|
|
69
|
-
const snapshot = await withRetry(() => query.get(), `GetEarliestDoc(${collectionName})`);
|
|
70
|
-
|
|
71
|
-
if (!snapshot.empty && /^\d{4}-\d{2}-\d{2}$/.test(snapshot.docs[0].id)) {
|
|
72
|
-
return new Date(snapshot.docs[0].id + 'T00:00:00Z');
|
|
73
|
-
}
|
|
74
|
-
} catch (e) {
|
|
75
|
-
logger.log('ERROR', `GetFirstDate failed for ${collectionName}`, { errorMessage: e.message });
|
|
76
|
-
}
|
|
49
|
+
if (!collectionName) { logger.log('WARN', `[Core Utils] Collection name not provided for simple date query.`); return null; }
|
|
50
|
+
const query = db.collection(collectionName) .where(FieldPath.documentId(), '>=', '2000-01-01') .orderBy(FieldPath.documentId(), 'asc') .limit(1);
|
|
51
|
+
const snapshot = await withRetry(() => query.get(), `GetEarliestDoc(${collectionName})`);
|
|
52
|
+
if (!snapshot.empty && /^\d{4}-\d{2}-\d{2}$/.test(snapshot.docs[0].id)) { return new Date(snapshot.docs[0].id + 'T00:00:00Z'); }
|
|
53
|
+
} catch (e) { logger.log('ERROR', `GetFirstDate failed for ${collectionName}`, { errorMessage: e.message }); }
|
|
77
54
|
return null;
|
|
78
55
|
}
|
|
79
56
|
|
|
@@ -81,32 +58,14 @@ async function getFirstDateFromSimpleCollection(config, deps, collectionName) {
|
|
|
81
58
|
async function getFirstDateFromCollection(config, deps, collectionName) {
|
|
82
59
|
const { db, logger, calculationUtils } = deps;
|
|
83
60
|
const { withRetry } = calculationUtils;
|
|
84
|
-
|
|
85
61
|
let earliestDate = null;
|
|
86
|
-
try {
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
for (const blockDocRef of blockDocRefs) {
|
|
95
|
-
const snapshotQuery = blockDocRef.collection(config.snapshotsSubcollection)
|
|
96
|
-
.where(FieldPath.documentId(), '>=', '2000-01-01')
|
|
97
|
-
.orderBy(FieldPath.documentId(), 'asc')
|
|
98
|
-
.limit(1);
|
|
99
|
-
|
|
100
|
-
const snapshotSnap = await withRetry(() => snapshotQuery.get(), `GetEarliestSnapshot(${blockDocRef.path})`);
|
|
101
|
-
if (!snapshotSnap.empty && /^\d{4}-\d{2}-\d{2}$/.test(snapshotSnap.docs[0].id)) {
|
|
102
|
-
const foundDate = new Date(snapshotSnap.docs[0].id + 'T00:00:00Z');
|
|
103
|
-
if (!earliestDate || foundDate < earliestDate) earliestDate = foundDate;
|
|
104
|
-
}
|
|
105
|
-
}
|
|
106
|
-
} catch (e) {
|
|
107
|
-
logger.log('ERROR', `GetFirstDate failed for ${collectionName}`, { errorMessage: e.message });
|
|
108
|
-
}
|
|
109
|
-
|
|
62
|
+
try { if (!collectionName) { logger.log('WARN', `[Core Utils] Collection name not provided for sharded date query.`); return null; }
|
|
63
|
+
const blockDocRefs = await withRetry(() => db.collection(collectionName).listDocuments(), `GetBlocks(${collectionName})`);
|
|
64
|
+
if (!blockDocRefs.length) { logger.log('WARN', `No block documents in collection: ${collectionName}`); return null; }
|
|
65
|
+
for (const blockDocRef of blockDocRefs) { const snapshotQuery = blockDocRef.collection(config.snapshotsSubcollection) .where(FieldPath.documentId(), '>=', '2000-01-01') .orderBy(FieldPath.documentId(), 'asc') .limit(1);
|
|
66
|
+
const snapshotSnap = await withRetry(() => snapshotQuery.get(), `GetEarliestSnapshot(${blockDocRef.path})`);
|
|
67
|
+
if (!snapshotSnap.empty && /^\d{4}-\d{2}-\d{2}$/.test(snapshotSnap.docs[0].id)) { const foundDate = new Date(snapshotSnap.docs[0].id + 'T00:00:00Z');
|
|
68
|
+
if (!earliestDate || foundDate < earliestDate) earliestDate = foundDate; } } } catch (e) { logger.log('ERROR', `GetFirstDate failed for ${collectionName}`, { errorMessage: e.message }); }
|
|
110
69
|
return earliestDate;
|
|
111
70
|
}
|
|
112
71
|
|
|
@@ -116,71 +75,25 @@ async function getFirstDateFromCollection(config, deps, collectionName) {
|
|
|
116
75
|
async function getEarliestDataDates(config, deps) {
|
|
117
76
|
const { logger } = deps;
|
|
118
77
|
logger.log('INFO', 'Querying for earliest date from ALL source data collections...');
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
investorHistoryDate,
|
|
125
|
-
speculatorHistoryDate,
|
|
126
|
-
insightsDate,
|
|
127
|
-
socialDate
|
|
128
|
-
] = await Promise.all([
|
|
129
|
-
getFirstDateFromCollection(config, deps, config.normalUserPortfolioCollection),
|
|
130
|
-
getFirstDateFromCollection(config, deps, config.speculatorPortfolioCollection),
|
|
131
|
-
getFirstDateFromCollection(config, deps, config.normalUserHistoryCollection),
|
|
132
|
-
getFirstDateFromCollection(config, deps, config.speculatorHistoryCollection),
|
|
78
|
+
const [ investorDate, speculatorDate, investorHistoryDate, speculatorHistoryDate, insightsDate, socialDate ] = await Promise.all([
|
|
79
|
+
getFirstDateFromCollection (config, deps, config.normalUserPortfolioCollection),
|
|
80
|
+
getFirstDateFromCollection (config, deps, config.speculatorPortfolioCollection),
|
|
81
|
+
getFirstDateFromCollection (config, deps, config.normalUserHistoryCollection),
|
|
82
|
+
getFirstDateFromCollection (config, deps, config.speculatorHistoryCollection),
|
|
133
83
|
getFirstDateFromSimpleCollection(config, deps, config.insightsCollectionName),
|
|
134
84
|
getFirstDateFromSimpleCollection(config, deps, config.socialInsightsCollectionName)
|
|
135
85
|
]);
|
|
136
86
|
|
|
137
|
-
|
|
138
|
-
const getMinDate = (...dates) => {
|
|
139
|
-
const validDates = dates.filter(Boolean); // Filter out nulls
|
|
140
|
-
if (validDates.length === 0) return null;
|
|
141
|
-
return new Date(Math.min(...validDates));
|
|
142
|
-
};
|
|
143
|
-
|
|
87
|
+
const getMinDate = (...dates) => { const validDates = dates.filter(Boolean); if (validDates.length === 0) return null; return new Date(Math.min(...validDates)); };
|
|
144
88
|
const earliestPortfolioDate = getMinDate(investorDate, speculatorDate);
|
|
145
|
-
const earliestHistoryDate
|
|
146
|
-
const earliestInsightsDate
|
|
147
|
-
const earliestSocialDate
|
|
148
|
-
|
|
149
|
-
const
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
earliestInsightsDate,
|
|
153
|
-
earliestSocialDate
|
|
154
|
-
);
|
|
155
|
-
|
|
156
|
-
// Fallback date
|
|
157
|
-
const fallbackDate = new Date(config.earliestComputationDate + 'T00:00:00Z' || '2023-01-01T00:00:00Z');
|
|
158
|
-
|
|
159
|
-
const result = {
|
|
160
|
-
portfolio: earliestPortfolioDate || new Date('2999-12-31'), // Use a 'far future' date if null
|
|
161
|
-
history: earliestHistoryDate || new Date('2999-12-31'),
|
|
162
|
-
insights: earliestInsightsDate || new Date('2999-12-31'),
|
|
163
|
-
social: earliestSocialDate || new Date('2999-12-31'),
|
|
164
|
-
absoluteEarliest: absoluteEarliest || fallbackDate // Use fallback for the main loop
|
|
165
|
-
};
|
|
166
|
-
|
|
167
|
-
logger.log('INFO', 'Earliest data availability map built:', {
|
|
168
|
-
portfolio: result.portfolio.toISOString().slice(0, 10),
|
|
169
|
-
history: result.history.toISOString().slice(0, 10),
|
|
170
|
-
insights: result.insights.toISOString().slice(0, 10),
|
|
171
|
-
social: result.social.toISOString().slice(0, 10),
|
|
172
|
-
absoluteEarliest: result.absoluteEarliest.toISOString().slice(0, 10)
|
|
173
|
-
});
|
|
174
|
-
|
|
89
|
+
const earliestHistoryDate = getMinDate(investorHistoryDate, speculatorHistoryDate);
|
|
90
|
+
const earliestInsightsDate = getMinDate(insightsDate);
|
|
91
|
+
const earliestSocialDate = getMinDate(socialDate);
|
|
92
|
+
const absoluteEarliest = getMinDate(earliestPortfolioDate, earliestHistoryDate, earliestInsightsDate, earliestSocialDate );
|
|
93
|
+
const fallbackDate = new Date(config.earliestComputationDate + 'T00:00:00Z' || '2023-01-01T00:00:00Z');
|
|
94
|
+
const result = { portfolio: earliestPortfolioDate || new Date('2999-12-31'), history: earliestHistoryDate || new Date('2999-12-31'), insights: earliestInsightsDate || new Date('2999-12-31'), social: earliestSocialDate || new Date('2999-12-31'), absoluteEarliest: absoluteEarliest || fallbackDate };
|
|
95
|
+
logger.log('INFO', 'Earliest data availability map built:', { portfolio: result.portfolio.toISOString().slice(0, 10), history: result.history.toISOString().slice(0, 10), insights: result.insights.toISOString().slice(0, 10), social: result.social.toISOString().slice(0, 10), absoluteEarliest: result.absoluteEarliest.toISOString().slice(0, 10) });
|
|
175
96
|
return result;
|
|
176
97
|
}
|
|
177
98
|
|
|
178
|
-
module.exports = {
|
|
179
|
-
FieldValue,
|
|
180
|
-
FieldPath,
|
|
181
|
-
normalizeName,
|
|
182
|
-
commitBatchInChunks,
|
|
183
|
-
getExpectedDateStrings,
|
|
184
|
-
// getFirstDateFromSourceData, // This is replaced
|
|
185
|
-
getEarliestDataDates, // <-- EXPORT NEW FUNCTION
|
|
186
|
-
};
|
|
99
|
+
module.exports = { FieldValue, FieldPath, normalizeName, commitBatchInChunks, getExpectedDateStrings, getEarliestDataDates };
|
|
@@ -260,18 +260,9 @@ async function getSpeculatorsToUpdate(dependencies, config) {
|
|
|
260
260
|
const { dateThreshold, gracePeriodThreshold, speculatorBlocksCollectionName } = config;
|
|
261
261
|
logger.log('INFO','[Core Utils] Getting speculators to update...');
|
|
262
262
|
const updates = [];
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
try {
|
|
268
|
-
const blocksRef = db.collection(speculatorBlocksCollectionName);
|
|
269
|
-
const snapshot = await blocksRef.get();
|
|
270
|
-
if (snapshot.empty) {
|
|
271
|
-
logger.log('INFO','[Core Utils] No speculator blocks found.');
|
|
272
|
-
return [];
|
|
273
|
-
}
|
|
274
|
-
|
|
263
|
+
const userMap = new Map();
|
|
264
|
+
try { const blocksRef = db.collection(speculatorBlocksCollectionName); const snapshot = await blocksRef.get();
|
|
265
|
+
if (snapshot.empty) { logger.log('INFO','[Core Utils] No speculator blocks found.'); return []; }
|
|
275
266
|
snapshot.forEach(doc => {
|
|
276
267
|
const blockData = doc.data();
|
|
277
268
|
for (const key in blockData) {
|
|
@@ -281,42 +272,13 @@ async function getSpeculatorsToUpdate(dependencies, config) {
|
|
|
281
272
|
const userData = blockData[key];
|
|
282
273
|
const lastVerified = userData.lastVerified?.toDate ? userData.lastVerified.toDate() : new Date(0);
|
|
283
274
|
const lastHeld = userData.lastHeldSpeculatorAsset?.toDate ? userData.lastHeldSpeculatorAsset.toDate() : new Date(0);
|
|
284
|
-
|
|
285
275
|
if (lastVerified < dateThreshold && lastHeld > gracePeriodThreshold) {
|
|
286
|
-
if (!userMap.has(userId)) {
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
if (userData.instruments && Array.isArray(userData.instruments)) {
|
|
290
|
-
userData.instruments.forEach(id => userMap.get(userId).add(id));
|
|
291
|
-
}
|
|
292
|
-
}
|
|
293
|
-
}
|
|
294
|
-
});
|
|
295
|
-
|
|
296
|
-
// ⚠️ NEW: Return one task per user with ALL instruments
|
|
297
|
-
for (const [userId, instrumentSet] of userMap) {
|
|
298
|
-
updates.push({
|
|
299
|
-
userId,
|
|
300
|
-
instruments: Array.from(instrumentSet) // ⚠️ Array of all instruments
|
|
301
|
-
});
|
|
302
|
-
}
|
|
303
|
-
|
|
276
|
+
if (!userMap.has(userId)) { userMap.set(userId, new Set()); }
|
|
277
|
+
if (userData.instruments && Array.isArray(userData.instruments)) { userData.instruments.forEach(id => userMap.get(userId).add(id)); } } } });
|
|
278
|
+
for (const [userId, instrumentSet] of userMap) { updates.push({ userId, instruments: Array.from(instrumentSet) }); }
|
|
304
279
|
logger.log('INFO',`[Core Utils] Found ${updates.length} speculator users to update (covering ${[...userMap.values()].reduce((sum, set) => sum + set.size, 0)} total instruments).`);
|
|
305
280
|
return updates;
|
|
306
|
-
} catch (error) {
|
|
307
|
-
logger.log('ERROR','[Core Utils] Error getting speculators to update', { errorMessage: error.message });
|
|
308
|
-
throw error;
|
|
309
|
-
}
|
|
281
|
+
} catch (error) { logger.log('ERROR','[Core Utils] Error getting speculators to update', { errorMessage: error.message }); throw error; }
|
|
310
282
|
}
|
|
311
283
|
|
|
312
|
-
module.exports = {
|
|
313
|
-
getLatestNormalUserPortfolios,
|
|
314
|
-
resetProxyLocks,
|
|
315
|
-
getBlockCapacities,
|
|
316
|
-
getExclusionIds,
|
|
317
|
-
getPrioritizedSpeculators,
|
|
318
|
-
clearCollection,
|
|
319
|
-
batchWriteShardedIds,
|
|
320
|
-
getNormalUsersToUpdate,
|
|
321
|
-
getSpeculatorsToUpdate,
|
|
322
|
-
};
|
|
284
|
+
module.exports = { getLatestNormalUserPortfolios, resetProxyLocks, getBlockCapacities, getExclusionIds, getPrioritizedSpeculators, clearCollection, batchWriteShardedIds, getNormalUsersToUpdate, getSpeculatorsToUpdate, };
|