bulltrackers-module 1.0.151 → 1.0.153

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/functions/appscript-api/index.js +8 -38
  2. package/functions/computation-system/helpers/computation_pass_runner.js +38 -183
  3. package/functions/computation-system/helpers/orchestration_helpers.js +120 -314
  4. package/functions/computation-system/utils/data_loader.js +47 -132
  5. package/functions/computation-system/utils/schema_capture.js +7 -41
  6. package/functions/computation-system/utils/utils.js +37 -124
  7. package/functions/core/utils/firestore_utils.js +8 -46
  8. package/functions/core/utils/intelligent_header_manager.js +26 -128
  9. package/functions/core/utils/intelligent_proxy_manager.js +33 -171
  10. package/functions/core/utils/pubsub_utils.js +7 -24
  11. package/functions/dispatcher/helpers/dispatch_helpers.js +9 -30
  12. package/functions/dispatcher/index.js +7 -30
  13. package/functions/etoro-price-fetcher/helpers/handler_helpers.js +12 -80
  14. package/functions/fetch-insights/helpers/handler_helpers.js +18 -70
  15. package/functions/generic-api/helpers/api_helpers.js +28 -167
  16. package/functions/generic-api/index.js +49 -188
  17. package/functions/invalid-speculator-handler/helpers/handler_helpers.js +10 -47
  18. package/functions/orchestrator/helpers/discovery_helpers.js +1 -5
  19. package/functions/orchestrator/index.js +1 -6
  20. package/functions/price-backfill/helpers/handler_helpers.js +13 -69
  21. package/functions/social-orchestrator/helpers/orchestrator_helpers.js +5 -37
  22. package/functions/social-task-handler/helpers/handler_helpers.js +29 -186
  23. package/functions/speculator-cleanup-orchestrator/helpers/cleanup_helpers.js +19 -78
  24. package/functions/task-engine/handler_creator.js +2 -8
  25. package/functions/task-engine/helpers/update_helpers.js +17 -83
  26. package/functions/task-engine/helpers/verify_helpers.js +11 -56
  27. package/functions/task-engine/utils/firestore_batch_manager.js +16 -67
  28. package/functions/task-engine/utils/task_engine_utils.js +6 -35
  29. package/index.js +45 -43
  30. package/package.json +1 -1
@@ -2,10 +2,9 @@
2
2
  * @fileoverview Data loader sub-pipes for the Computation System.
3
3
  * REFACTORED: Now stateless and receive dependencies.
4
4
  * --- NEW: Added streamPortfolioData async generator ---
5
+ * --- FIXED: streamPortfolioData and streamHistoryData now accept optional 'providedRefs' ---
5
6
  */
6
7
 
7
- // <<< FIX: REMOVED all top-level 'require' and 'dependencies' lines >>>
8
-
9
8
  /**
10
9
  * Sub-pipe: pipe.computationSystem.dataLoader.getPortfolioPartRefs
11
10
  * @param {object} config - The computation system configuration object.
@@ -19,34 +18,16 @@
19
18
  async function getPortfolioPartRefs(config, deps, dateString) {
20
19
  const { db, logger, calculationUtils } = deps;
21
20
  const { withRetry } = calculationUtils;
22
-
23
21
  logger.log('INFO', `Getting portfolio part references for date: ${dateString}`);
24
22
  const allPartRefs = [];
25
23
  const collectionsToQuery = [config.normalUserPortfolioCollection, config.speculatorPortfolioCollection];
26
-
27
24
  for (const collectionName of collectionsToQuery) {
28
- const blockDocsQuery = db.collection(collectionName);
29
- const blockDocRefs = await withRetry(() => blockDocsQuery.listDocuments(), `listDocuments(${collectionName})`);
30
- if (!blockDocRefs.length) { logger.log('WARN', `No block documents in ${collectionName}`); continue; }
31
-
32
- // --- START MODIFICATION ---
33
- // Run all "listDocuments" calls in parallel instead of a sequential loop
34
- const partsPromises = blockDocRefs.map(blockDocRef => {
35
- const partsCollectionRef = blockDocRef.collection(config.snapshotsSubcollection).doc(dateString).collection(config.partsSubcollection);
36
- // Each call is individually retried
37
- return withRetry(() => partsCollectionRef.listDocuments(), `listDocuments(${partsCollectionRef.path})`);
38
- });
39
-
40
- // Wait for all parallel queries to finish
41
- const partDocArrays = await Promise.all(partsPromises);
42
-
43
- // Flatten the arrays of arrays into the final list
44
- partDocArrays.forEach(partDocs => {
45
- allPartRefs.push(...partDocs);
46
- });
47
- // --- END MODIFICATION ---
48
- }
49
-
25
+ const blockDocsQuery = db.collection(collectionName);
26
+ const blockDocRefs = await withRetry(() => blockDocsQuery.listDocuments(), `listDocuments(${collectionName})`);
27
+ if (!blockDocRefs.length) { logger.log('WARN', `No block documents in ${collectionName}`); continue; }
28
+ const partsPromises = blockDocRefs.map(blockDocRef => { const partsCollectionRef = blockDocRef.collection(config.snapshotsSubcollection).doc(dateString).collection(config.partsSubcollection); return withRetry(() => partsCollectionRef.listDocuments(), `listDocuments(${partsCollectionRef.path})`); });
29
+ const partDocArrays = await Promise.all(partsPromises);
30
+ partDocArrays.forEach(partDocs => { allPartRefs.push(...partDocs); }); }
50
31
  logger.log('INFO', `Found ${allPartRefs.length} portfolio part refs for ${dateString}`);
51
32
  return allPartRefs;
52
33
  }
@@ -55,23 +36,11 @@ async function getPortfolioPartRefs(config, deps, dateString) {
55
36
  async function loadDataByRefs(config, deps, refs) {
56
37
  const { db, logger, calculationUtils } = deps;
57
38
  const { withRetry } = calculationUtils;
58
-
59
39
  if (!refs || !refs.length) return {};
60
40
  const mergedPortfolios = {};
61
41
  const batchSize = config.partRefBatchSize || 50;
62
-
63
- for (let i = 0; i < refs.length; i += batchSize) {
64
- const batchRefs = refs.slice(i, i + batchSize);
65
- const snapshots = await withRetry(() => db.getAll(...batchRefs), `getAll(batch ${Math.floor(i / batchSize)})`);
66
-
67
- for (const doc of snapshots) {
68
- if (!doc.exists) continue;
69
- const data = doc.data();
70
- if (data && typeof data === 'object') Object.assign(mergedPortfolios, data);
71
- else logger.log('WARN', `Doc ${doc.id} exists but data is not an object`, data);
72
- }
73
- }
74
-
42
+ for (let i = 0; i < refs.length; i += batchSize) { const batchRefs = refs.slice(i, i + batchSize); const snapshots = await withRetry(() => db.getAll(...batchRefs), `getAll(batch ${Math.floor(i / batchSize)})`);
43
+ for (const doc of snapshots) { if (!doc.exists) continue; const data = doc.data(); if (data && typeof data === 'object') Object.assign(mergedPortfolios, data); else logger.log('WARN', `Doc ${doc.id} exists but data is not an object`, data); } }
75
44
  return mergedPortfolios;
76
45
  }
77
46
 
@@ -79,7 +48,6 @@ async function loadDataByRefs(config, deps, refs) {
79
48
  async function loadFullDayMap(config, deps, partRefs) {
80
49
  const { logger } = deps;
81
50
  if (!partRefs.length) return {};
82
-
83
51
  logger.log('TRACE', `Loading full day map from ${partRefs.length} references...`);
84
52
  const fullMap = await loadDataByRefs(config, deps, partRefs);
85
53
  logger.log('TRACE', `Full day map loaded with ${Object.keys(fullMap).length} users`);
@@ -90,19 +58,15 @@ async function loadFullDayMap(config, deps, partRefs) {
90
58
  async function loadDailyInsights(config, deps, dateString) {
91
59
  const { db, logger, calculationUtils } = deps;
92
60
  const { withRetry } = calculationUtils;
93
-
94
61
  const insightsCollectionName = config.insightsCollectionName || 'daily_instrument_insights';
95
62
  logger.log('INFO', `Loading daily insights for ${dateString} from ${insightsCollectionName}`);
96
-
97
- try {
98
- const docRef = db.collection(insightsCollectionName).doc(dateString);
99
- const docSnap = await withRetry(() => docRef.get(), `getInsights(${dateString})`);
100
- if (!docSnap.exists) { logger.log('WARN', `Insights not found for ${dateString}`); return null; }
101
- logger.log('TRACE', `Successfully loaded insights for ${dateString}`);
102
- return docSnap.data();
103
- } catch (error) {
104
- logger.log('ERROR', `Failed to load daily insights for ${dateString}`, { errorMessage: error.message });
105
- return null;
63
+ try { const docRef = db.collection(insightsCollectionName).doc(dateString);
64
+ const docSnap = await withRetry(() => docRef.get(), `getInsights(${dateString})`);
65
+ if (!docSnap.exists) { logger.log('WARN', `Insights not found for ${dateString}`); return null; }
66
+ logger.log('TRACE', `Successfully loaded insights for ${dateString}`);
67
+ return docSnap.data();
68
+ } catch (error) { logger.log('ERROR', `Failed to load daily insights for ${dateString}`, { errorMessage: error.message });
69
+ return null;
106
70
  }
107
71
  }
108
72
 
@@ -110,22 +74,17 @@ async function loadDailyInsights(config, deps, dateString) {
110
74
  async function loadDailySocialPostInsights(config, deps, dateString) {
111
75
  const { db, logger, calculationUtils } = deps;
112
76
  const { withRetry } = calculationUtils;
113
-
114
77
  const collectionName = config.socialInsightsCollectionName || 'daily_social_insights';
115
78
  logger.log('INFO', `Loading social post insights for ${dateString} from ${collectionName}`);
116
-
117
- try {
118
- const postsCollectionRef = db.collection(collectionName).doc(dateString).collection('posts');
119
- const querySnapshot = await withRetry(() => postsCollectionRef.get(), `getSocialPosts(${dateString})`);
120
- if (querySnapshot.empty) { logger.log('WARN', `No social post insights for ${dateString}`); return null; }
121
-
122
- const postsMap = {};
123
- querySnapshot.forEach(doc => { postsMap[doc.id] = doc.data(); });
124
- logger.log('TRACE', `Loaded ${Object.keys(postsMap).length} social post insights`);
125
- return postsMap;
126
- } catch (error) {
127
- logger.log('ERROR', `Failed to load social post insights for ${dateString}`, { errorMessage: error.message });
128
- return null;
79
+ try { const postsCollectionRef = db.collection(collectionName).doc(dateString).collection('posts');
80
+ const querySnapshot = await withRetry(() => postsCollectionRef.get(), `getSocialPosts(${dateString})`);
81
+ if (querySnapshot.empty) { logger.log('WARN', `No social post insights for ${dateString}`); return null; }
82
+ const postsMap = {};
83
+ querySnapshot.forEach(doc => { postsMap[doc.id] = doc.data(); });
84
+ logger.log('TRACE', `Loaded ${Object.keys(postsMap).length} social post insights`);
85
+ return postsMap;
86
+ } catch (error) { logger.log('ERROR', `Failed to load social post insights for ${dateString}`, { errorMessage: error.message });
87
+ return null;
129
88
  }
130
89
  }
131
90
 
@@ -133,35 +92,18 @@ async function loadDailySocialPostInsights(config, deps, dateString) {
133
92
  async function getHistoryPartRefs(config, deps, dateString) {
134
93
  const { db, logger, calculationUtils } = deps;
135
94
  const { withRetry } = calculationUtils;
136
-
137
95
  logger.log('INFO', `Getting history part references for ${dateString}`);
138
96
  const allPartRefs = [];
139
97
  const collectionsToQuery = [config.normalUserHistoryCollection, config.speculatorHistoryCollection];
140
-
141
98
  for (const collectionName of collectionsToQuery) {
142
- if (!collectionName) { logger.log('WARN', `History collection undefined. Skipping.`); continue; }
143
- const blockDocsQuery = db.collection(collectionName);
144
- const blockDocRefs = await withRetry(() => blockDocsQuery.listDocuments(), `listDocuments(${collectionName})`);
145
- if (!blockDocRefs.length) { logger.log('WARN', `No block documents in ${collectionName}`); continue; }
146
-
147
- // --- START MODIFICATION ---
148
- // Run all "listDocuments" calls in parallel instead of a sequential loop
149
- const partsPromises = blockDocRefs.map(blockDocRef => {
150
- const partsCollectionRef = blockDocRef.collection(config.snapshotsSubcollection).doc(dateString).collection(config.partsSubcollection);
151
- // Each call is individually retried
152
- return withRetry(() => partsCollectionRef.listDocuments(), `listDocuments(${partsCollectionRef.path})`);
153
- });
154
-
155
- // Wait for all parallel queries to finish
156
- const partDocArrays = await Promise.all(partsPromises);
157
-
158
- // Flatten the arrays of arrays into the final list
159
- partDocArrays.forEach(partDocs => {
160
- allPartRefs.push(...partDocs);
161
- });
162
- // --- END MODIFICATION ---
163
- }
164
-
99
+ if (!collectionName) { logger.log('WARN', `History collection undefined. Skipping.`); continue; }
100
+ const blockDocsQuery = db.collection(collectionName);
101
+ const blockDocRefs = await withRetry(() => blockDocsQuery.listDocuments(), `listDocuments(${collectionName})`);
102
+ if (!blockDocRefs.length) { logger.log('WARN', `No block documents in ${collectionName}`); continue; }
103
+ const partsPromises = blockDocRefs.map(blockDocRef => { const partsCollectionRef = blockDocRef.collection(config.snapshotsSubcollection).doc(dateString).collection(config.partsSubcollection);
104
+ return withRetry(() => partsCollectionRef.listDocuments(), `listDocuments(${partsCollectionRef.path})`); });
105
+ const partDocArrays = await Promise.all(partsPromises);
106
+ partDocArrays.forEach(partDocs => { allPartRefs.push(...partDocs); }); }
165
107
  logger.log('INFO', `Found ${allPartRefs.length} history part refs for ${dateString}`);
166
108
  return allPartRefs;
167
109
  }
@@ -173,61 +115,34 @@ async function getHistoryPartRefs(config, deps, dateString) {
173
115
  * @param {object} config - The computation system configuration object.
174
116
  * @param {object} deps - Contains db, logger, calculationUtils.
175
117
  * @param {string} dateString - The date in YYYY-MM-DD format.
118
+ * @param {Array<Firestore.DocumentReference> | null} [providedRefs=null] - Optional pre-fetched refs.
176
119
  */
177
- async function* streamPortfolioData(config, deps, dateString) {
120
+ async function* streamPortfolioData(config, deps, dateString, providedRefs = null) {
178
121
  const { logger } = deps;
179
- const refs = await getPortfolioPartRefs(config, deps, dateString);
180
- if (refs.length === 0) {
181
- logger.log('WARN', `[streamPortfolioData] No portfolio refs found for ${dateString}. Stream is empty.`);
182
- return;
183
- }
184
-
185
- // Use the same batch size as loadDataByRefs for consistency
122
+ const refs = providedRefs || (await getPortfolioPartRefs(config, deps, dateString));
123
+ if (refs.length === 0) { logger.log('WARN', `[streamPortfolioData] No portfolio refs found for ${dateString}. Stream is empty.`); return; }
186
124
  const batchSize = config.partRefBatchSize || 50;
187
-
188
125
  logger.log('INFO', `[streamPortfolioData] Streaming ${refs.length} portfolio parts in chunks of ${batchSize}...`);
189
-
190
- for (let i = 0; i < refs.length; i += batchSize) {
191
- const batchRefs = refs.slice(i, i + batchSize);
192
- // Load one chunk of data
193
- const data = await loadDataByRefs(config, deps, batchRefs);
194
- // Yield it to the consumer, then release it from memory
195
- yield data;
196
- }
126
+ for (let i = 0; i < refs.length; i += batchSize) { const batchRefs = refs.slice(i, i + batchSize); const data = await loadDataByRefs(config, deps, batchRefs);yield data; }
197
127
  logger.log('INFO', `[streamPortfolioData] Finished streaming for ${dateString}.`);
198
128
  }
199
- // --- END: Stage 7 ---
200
129
 
201
130
  /**
202
131
  * --- NEW: Stage 8: Stream history data in chunks ---
203
132
  * Streams history data in chunks for a given date.
133
+ * @param {object} config - The computation system configuration object.
134
+ * @param {object} deps - Contains db, logger, calculationUtils.
135
+ * @param {string} dateString - The date in YYYY-MM-DD format.
136
+ * @param {Array<Firestore.DocumentReference> | null} [providedRefs=null] - Optional pre-fetched refs.
204
137
  */
205
- async function* streamHistoryData(config, deps, dateString) {
138
+ async function* streamHistoryData(config, deps, dateString, providedRefs = null) {
206
139
  const { logger } = deps;
207
- const refs = await getHistoryPartRefs(config, deps, dateString); // <-- Uses history refs
208
- if (refs.length === 0) {
209
- logger.log('WARN', `[streamHistoryData] No history refs found for ${dateString}. Stream is empty.`);
210
- return;
211
- }
212
-
140
+ const refs = providedRefs || (await getHistoryPartRefs(config, deps, dateString));
141
+ if (refs.length === 0) { logger.log('WARN', `[streamHistoryData] No history refs found for ${dateString}. Stream is empty.`); return; }
213
142
  const batchSize = config.partRefBatchSize || 50;
214
143
  logger.log('INFO', `[streamHistoryData] Streaming ${refs.length} history parts in chunks of ${batchSize}...`);
215
-
216
- for (let i = 0; i < refs.length; i += batchSize) {
217
- const batchRefs = refs.slice(i, i + batchSize);
218
- const data = await loadDataByRefs(config, deps, batchRefs);
219
- yield data;
220
- }
144
+ for (let i = 0; i < refs.length; i += batchSize) { const batchRefs = refs.slice(i, i + batchSize); const data = await loadDataByRefs(config, deps, batchRefs); yield data; }
221
145
  logger.log('INFO', `[streamHistoryData] Finished streaming for ${dateString}.`);
222
146
  }
223
147
 
224
- module.exports = {
225
- getPortfolioPartRefs,
226
- loadDataByRefs,
227
- loadFullDayMap,
228
- loadDailyInsights,
229
- loadDailySocialPostInsights,
230
- getHistoryPartRefs,
231
- streamPortfolioData, // <-- EXPORT NEW FUNCTION
232
- streamHistoryData // <-- EXPORT NEW FUNCTION
233
- };
148
+ module.exports = {getPortfolioPartRefs, loadDataByRefs, loadFullDayMap, loadDailyInsights, loadDailySocialPostInsights, getHistoryPartRefs, streamPortfolioData, streamHistoryData };
@@ -13,52 +13,18 @@
13
13
  */
14
14
  async function batchStoreSchemas(dependencies, config, schemas) {
15
15
  const { db, logger } = dependencies;
16
-
17
- // Check if schema capture is disabled
18
- if (config.captureSchemas === false) {
19
- logger.log('INFO', '[SchemaCapture] Schema capture is disabled. Skipping.');
20
- return;
21
- }
22
-
16
+ if (config.captureSchemas === false) { logger.log('INFO', '[SchemaCapture] Schema capture is disabled. Skipping.'); return; }
23
17
  const batch = db.batch();
24
18
  const schemaCollection = config.schemaCollection || 'computation_schemas';
25
-
26
19
  for (const item of schemas) {
27
- try {
28
- // 'item.schema' is now the pre-built schema from static getSchema()
29
- if (!item.schema) {
30
- logger.log('WARN', `[SchemaCapture] No schema provided for ${item.name}. Skipping.`);
31
- continue;
32
- }
33
-
34
- const docRef = db.collection(schemaCollection).doc(item.name);
35
-
36
- batch.set(docRef, {
37
- computationName: item.name,
38
- category: item.category,
39
- schema: item.schema, // Use the provided schema directly
40
- metadata: item.metadata || {},
41
- lastUpdated: new Date()
42
- // 'sampleKeys' is removed as we no longer have the output
43
- }, { merge: true });
44
- } catch (error) {
45
- logger.log('WARN', `[SchemaCapture] Failed to add schema to batch for ${item.name}`, {
46
- errorMessage: error.message
47
- });
48
- }
49
- }
50
-
51
- try {
52
- await batch.commit();
53
- logger.log('INFO', `[SchemaCapture] Batch stored ${schemas.length} computation schemas`);
54
- } catch (error) {
55
- logger.log('ERROR', '[SchemaCapture] Failed to commit schema batch', {
56
- errorMessage: error.message
57
- });
58
- }
20
+ try { if (!item.schema) { logger.log('WARN', `[SchemaCapture] No schema provided for ${item.name}. Skipping.`); continue; }
21
+ const docRef = db.collection(schemaCollection).doc(item.name);
22
+ batch.set(docRef, { computationName: item.name, category: item.category, schema: item.schema, metadata: item.metadata || {}, lastUpdated: new Date() }, { merge: true });
23
+ } catch (error) { logger.log('WARN', `[SchemaCapture] Failed to add schema to batch for ${item.name}`, { errorMessage: error.message }); } }
24
+ try { await batch.commit(); logger.log('INFO', `[SchemaCapture] Batch stored ${schemas.length} computation schemas`);
25
+ } catch (error) { logger.log('ERROR', '[SchemaCapture] Failed to commit schema batch', { errorMessage: error.message }); }
59
26
  }
60
27
 
61
28
  module.exports = {
62
- // generateSchema and storeComputationSchema are now obsolete
63
29
  batchStoreSchemas
64
30
  };
@@ -10,41 +10,31 @@
10
10
  const { FieldValue, FieldPath } = require('@google-cloud/firestore');
11
11
 
12
12
  /** Stage 1: Normalize a calculation name to kebab-case */
13
- function normalizeName(name) {
14
- return name.replace(/_/g, '-');
15
- }
13
+ function normalizeName(name) { return name.replace(/_/g, '-'); }
16
14
 
17
15
  /** Stage 2: Commit a batch of writes in chunks */
18
16
  async function commitBatchInChunks(config, deps, writes, operationName) {
19
17
  const { db, logger, calculationUtils } = deps;
20
18
  const { withRetry } = calculationUtils;
21
-
22
19
  const batchSizeLimit = config.batchSizeLimit || 450;
23
20
  if (!writes.length) { logger.log('WARN', `[${operationName}] No writes to commit.`); return; }
24
-
25
21
  for (let i = 0; i < writes.length; i += batchSizeLimit) {
26
- const chunk = writes.slice(i, i + batchSizeLimit);
27
- const batch = db.batch();
28
- chunk.forEach(write => batch.set(write.ref, write.data, { merge: true }));
29
-
30
- const chunkNum = Math.floor(i / batchSizeLimit) + 1;
31
- const totalChunks = Math.ceil(writes.length / batchSizeLimit);
32
- await withRetry(() => batch.commit(), `${operationName} (Chunk ${chunkNum}/${totalChunks})`);
33
-
34
- logger.log('INFO', `[${operationName}] Committed chunk ${chunkNum}/${totalChunks} (${chunk.length} ops).`);
35
- }
22
+ const chunk = writes.slice(i, i + batchSizeLimit);
23
+ const batch = db.batch();
24
+ chunk.forEach(write => batch.set(write.ref, write.data, { merge: true }));
25
+ const chunkNum = Math.floor(i / batchSizeLimit) + 1;
26
+ const totalChunks = Math.ceil(writes.length / batchSizeLimit);
27
+ await withRetry(() => batch.commit(), `${operationName} (Chunk ${chunkNum}/${totalChunks})`);
28
+ logger.log('INFO', `[${operationName}] Committed chunk ${chunkNum}/${totalChunks} (${chunk.length} ops).`); }
36
29
  }
37
30
 
38
31
  /** Stage 3: Generate an array of expected date strings between two dates */
39
32
  function getExpectedDateStrings(startDate, endDate) {
40
33
  const dateStrings = [];
41
34
  if (startDate <= endDate) {
42
- const startUTC = new Date(Date.UTC(startDate.getUTCFullYear(), startDate.getUTCMonth(), startDate.getUTCDate()));
43
- const endUTC = new Date(Date.UTC(endDate.getUTCFullYear(), endDate.getUTCMonth(), endDate.getUTCDate()));
44
- for (let d = startUTC; d <= endUTC; d.setUTCDate(d.getUTCDate() + 1)) {
45
- dateStrings.push(new Date(d).toISOString().slice(0, 10));
46
- }
47
- }
35
+ const startUTC = new Date(Date.UTC(startDate.getUTCFullYear(), startDate.getUTCMonth(), startDate.getUTCDate()));
36
+ const endUTC = new Date(Date.UTC(endDate.getUTCFullYear(), endDate.getUTCMonth(), endDate.getUTCDate()));
37
+ for (let d = startUTC; d <= endUTC; d.setUTCDate(d.getUTCDate() + 1)) { dateStrings.push(new Date(d).toISOString().slice(0, 10)); } }
48
38
  return dateStrings;
49
39
  }
50
40
 
@@ -55,25 +45,12 @@ function getExpectedDateStrings(startDate, endDate) {
55
45
  async function getFirstDateFromSimpleCollection(config, deps, collectionName) {
56
46
  const { db, logger, calculationUtils } = deps;
57
47
  const { withRetry } = calculationUtils;
58
-
59
48
  try {
60
- if (!collectionName) {
61
- logger.log('WARN', `[Core Utils] Collection name not provided for simple date query.`);
62
- return null;
63
- }
64
- const query = db.collection(collectionName)
65
- .where(FieldPath.documentId(), '>=', '2000-01-01')
66
- .orderBy(FieldPath.documentId(), 'asc')
67
- .limit(1);
68
-
69
- const snapshot = await withRetry(() => query.get(), `GetEarliestDoc(${collectionName})`);
70
-
71
- if (!snapshot.empty && /^\d{4}-\d{2}-\d{2}$/.test(snapshot.docs[0].id)) {
72
- return new Date(snapshot.docs[0].id + 'T00:00:00Z');
73
- }
74
- } catch (e) {
75
- logger.log('ERROR', `GetFirstDate failed for ${collectionName}`, { errorMessage: e.message });
76
- }
49
+ if (!collectionName) { logger.log('WARN', `[Core Utils] Collection name not provided for simple date query.`); return null; }
50
+ const query = db.collection(collectionName) .where(FieldPath.documentId(), '>=', '2000-01-01') .orderBy(FieldPath.documentId(), 'asc') .limit(1);
51
+ const snapshot = await withRetry(() => query.get(), `GetEarliestDoc(${collectionName})`);
52
+ if (!snapshot.empty && /^\d{4}-\d{2}-\d{2}$/.test(snapshot.docs[0].id)) { return new Date(snapshot.docs[0].id + 'T00:00:00Z'); }
53
+ } catch (e) { logger.log('ERROR', `GetFirstDate failed for ${collectionName}`, { errorMessage: e.message }); }
77
54
  return null;
78
55
  }
79
56
 
@@ -81,32 +58,14 @@ async function getFirstDateFromSimpleCollection(config, deps, collectionName) {
81
58
  async function getFirstDateFromCollection(config, deps, collectionName) {
82
59
  const { db, logger, calculationUtils } = deps;
83
60
  const { withRetry } = calculationUtils;
84
-
85
61
  let earliestDate = null;
86
- try {
87
- if (!collectionName) {
88
- logger.log('WARN', `[Core Utils] Collection name not provided for sharded date query.`);
89
- return null;
90
- }
91
- const blockDocRefs = await withRetry(() => db.collection(collectionName).listDocuments(), `GetBlocks(${collectionName})`);
92
- if (!blockDocRefs.length) { logger.log('WARN', `No block documents in collection: ${collectionName}`); return null; }
93
-
94
- for (const blockDocRef of blockDocRefs) {
95
- const snapshotQuery = blockDocRef.collection(config.snapshotsSubcollection)
96
- .where(FieldPath.documentId(), '>=', '2000-01-01')
97
- .orderBy(FieldPath.documentId(), 'asc')
98
- .limit(1);
99
-
100
- const snapshotSnap = await withRetry(() => snapshotQuery.get(), `GetEarliestSnapshot(${blockDocRef.path})`);
101
- if (!snapshotSnap.empty && /^\d{4}-\d{2}-\d{2}$/.test(snapshotSnap.docs[0].id)) {
102
- const foundDate = new Date(snapshotSnap.docs[0].id + 'T00:00:00Z');
103
- if (!earliestDate || foundDate < earliestDate) earliestDate = foundDate;
104
- }
105
- }
106
- } catch (e) {
107
- logger.log('ERROR', `GetFirstDate failed for ${collectionName}`, { errorMessage: e.message });
108
- }
109
-
62
+ try { if (!collectionName) { logger.log('WARN', `[Core Utils] Collection name not provided for sharded date query.`); return null; }
63
+ const blockDocRefs = await withRetry(() => db.collection(collectionName).listDocuments(), `GetBlocks(${collectionName})`);
64
+ if (!blockDocRefs.length) { logger.log('WARN', `No block documents in collection: ${collectionName}`); return null; }
65
+ for (const blockDocRef of blockDocRefs) { const snapshotQuery = blockDocRef.collection(config.snapshotsSubcollection) .where(FieldPath.documentId(), '>=', '2000-01-01') .orderBy(FieldPath.documentId(), 'asc') .limit(1);
66
+ const snapshotSnap = await withRetry(() => snapshotQuery.get(), `GetEarliestSnapshot(${blockDocRef.path})`);
67
+ if (!snapshotSnap.empty && /^\d{4}-\d{2}-\d{2}$/.test(snapshotSnap.docs[0].id)) { const foundDate = new Date(snapshotSnap.docs[0].id + 'T00:00:00Z');
68
+ if (!earliestDate || foundDate < earliestDate) earliestDate = foundDate; } } } catch (e) { logger.log('ERROR', `GetFirstDate failed for ${collectionName}`, { errorMessage: e.message }); }
110
69
  return earliestDate;
111
70
  }
112
71
 
@@ -116,71 +75,25 @@ async function getFirstDateFromCollection(config, deps, collectionName) {
116
75
  async function getEarliestDataDates(config, deps) {
117
76
  const { logger } = deps;
118
77
  logger.log('INFO', 'Querying for earliest date from ALL source data collections...');
119
-
120
- // These return null on error or if empty
121
- const [
122
- investorDate,
123
- speculatorDate,
124
- investorHistoryDate,
125
- speculatorHistoryDate,
126
- insightsDate,
127
- socialDate
128
- ] = await Promise.all([
129
- getFirstDateFromCollection(config, deps, config.normalUserPortfolioCollection),
130
- getFirstDateFromCollection(config, deps, config.speculatorPortfolioCollection),
131
- getFirstDateFromCollection(config, deps, config.normalUserHistoryCollection),
132
- getFirstDateFromCollection(config, deps, config.speculatorHistoryCollection),
78
+ const [ investorDate, speculatorDate, investorHistoryDate, speculatorHistoryDate, insightsDate, socialDate ] = await Promise.all([
79
+ getFirstDateFromCollection (config, deps, config.normalUserPortfolioCollection),
80
+ getFirstDateFromCollection (config, deps, config.speculatorPortfolioCollection),
81
+ getFirstDateFromCollection (config, deps, config.normalUserHistoryCollection),
82
+ getFirstDateFromCollection (config, deps, config.speculatorHistoryCollection),
133
83
  getFirstDateFromSimpleCollection(config, deps, config.insightsCollectionName),
134
84
  getFirstDateFromSimpleCollection(config, deps, config.socialInsightsCollectionName)
135
85
  ]);
136
86
 
137
- // Helper to find the minimum (earliest) of a set of dates
138
- const getMinDate = (...dates) => {
139
- const validDates = dates.filter(Boolean); // Filter out nulls
140
- if (validDates.length === 0) return null;
141
- return new Date(Math.min(...validDates));
142
- };
143
-
87
+ const getMinDate = (...dates) => { const validDates = dates.filter(Boolean); if (validDates.length === 0) return null; return new Date(Math.min(...validDates)); };
144
88
  const earliestPortfolioDate = getMinDate(investorDate, speculatorDate);
145
- const earliestHistoryDate = getMinDate(investorHistoryDate, speculatorHistoryDate);
146
- const earliestInsightsDate = getMinDate(insightsDate); // Already a single date
147
- const earliestSocialDate = getMinDate(socialDate); // Already a single date
148
-
149
- const absoluteEarliest = getMinDate(
150
- earliestPortfolioDate,
151
- earliestHistoryDate,
152
- earliestInsightsDate,
153
- earliestSocialDate
154
- );
155
-
156
- // Fallback date
157
- const fallbackDate = new Date(config.earliestComputationDate + 'T00:00:00Z' || '2023-01-01T00:00:00Z');
158
-
159
- const result = {
160
- portfolio: earliestPortfolioDate || new Date('2999-12-31'), // Use a 'far future' date if null
161
- history: earliestHistoryDate || new Date('2999-12-31'),
162
- insights: earliestInsightsDate || new Date('2999-12-31'),
163
- social: earliestSocialDate || new Date('2999-12-31'),
164
- absoluteEarliest: absoluteEarliest || fallbackDate // Use fallback for the main loop
165
- };
166
-
167
- logger.log('INFO', 'Earliest data availability map built:', {
168
- portfolio: result.portfolio.toISOString().slice(0, 10),
169
- history: result.history.toISOString().slice(0, 10),
170
- insights: result.insights.toISOString().slice(0, 10),
171
- social: result.social.toISOString().slice(0, 10),
172
- absoluteEarliest: result.absoluteEarliest.toISOString().slice(0, 10)
173
- });
174
-
89
+ const earliestHistoryDate = getMinDate(investorHistoryDate, speculatorHistoryDate);
90
+ const earliestInsightsDate = getMinDate(insightsDate);
91
+ const earliestSocialDate = getMinDate(socialDate);
92
+ const absoluteEarliest = getMinDate(earliestPortfolioDate, earliestHistoryDate, earliestInsightsDate, earliestSocialDate );
93
+ const fallbackDate = new Date(config.earliestComputationDate + 'T00:00:00Z' || '2023-01-01T00:00:00Z');
94
+ const result = { portfolio: earliestPortfolioDate || new Date('2999-12-31'), history: earliestHistoryDate || new Date('2999-12-31'), insights: earliestInsightsDate || new Date('2999-12-31'), social: earliestSocialDate || new Date('2999-12-31'), absoluteEarliest: absoluteEarliest || fallbackDate };
95
+ logger.log('INFO', 'Earliest data availability map built:', { portfolio: result.portfolio.toISOString().slice(0, 10), history: result.history.toISOString().slice(0, 10), insights: result.insights.toISOString().slice(0, 10), social: result.social.toISOString().slice(0, 10), absoluteEarliest: result.absoluteEarliest.toISOString().slice(0, 10) });
175
96
  return result;
176
97
  }
177
98
 
178
- module.exports = {
179
- FieldValue,
180
- FieldPath,
181
- normalizeName,
182
- commitBatchInChunks,
183
- getExpectedDateStrings,
184
- // getFirstDateFromSourceData, // This is replaced
185
- getEarliestDataDates, // <-- EXPORT NEW FUNCTION
186
- };
99
+ module.exports = { FieldValue, FieldPath, normalizeName, commitBatchInChunks, getExpectedDateStrings, getEarliestDataDates };
@@ -260,18 +260,9 @@ async function getSpeculatorsToUpdate(dependencies, config) {
260
260
  const { dateThreshold, gracePeriodThreshold, speculatorBlocksCollectionName } = config;
261
261
  logger.log('INFO','[Core Utils] Getting speculators to update...');
262
262
  const updates = [];
263
-
264
- // ⚠️ NEW: Collect per user first
265
- const userMap = new Map(); // userId -> { instruments: Set }
266
-
267
- try {
268
- const blocksRef = db.collection(speculatorBlocksCollectionName);
269
- const snapshot = await blocksRef.get();
270
- if (snapshot.empty) {
271
- logger.log('INFO','[Core Utils] No speculator blocks found.');
272
- return [];
273
- }
274
-
263
+ const userMap = new Map();
264
+ try { const blocksRef = db.collection(speculatorBlocksCollectionName); const snapshot = await blocksRef.get();
265
+ if (snapshot.empty) { logger.log('INFO','[Core Utils] No speculator blocks found.'); return []; }
275
266
  snapshot.forEach(doc => {
276
267
  const blockData = doc.data();
277
268
  for (const key in blockData) {
@@ -281,42 +272,13 @@ async function getSpeculatorsToUpdate(dependencies, config) {
281
272
  const userData = blockData[key];
282
273
  const lastVerified = userData.lastVerified?.toDate ? userData.lastVerified.toDate() : new Date(0);
283
274
  const lastHeld = userData.lastHeldSpeculatorAsset?.toDate ? userData.lastHeldSpeculatorAsset.toDate() : new Date(0);
284
-
285
275
  if (lastVerified < dateThreshold && lastHeld > gracePeriodThreshold) {
286
- if (!userMap.has(userId)) {
287
- userMap.set(userId, new Set());
288
- }
289
- if (userData.instruments && Array.isArray(userData.instruments)) {
290
- userData.instruments.forEach(id => userMap.get(userId).add(id));
291
- }
292
- }
293
- }
294
- });
295
-
296
- // ⚠️ NEW: Return one task per user with ALL instruments
297
- for (const [userId, instrumentSet] of userMap) {
298
- updates.push({
299
- userId,
300
- instruments: Array.from(instrumentSet) // ⚠️ Array of all instruments
301
- });
302
- }
303
-
276
+ if (!userMap.has(userId)) { userMap.set(userId, new Set()); }
277
+ if (userData.instruments && Array.isArray(userData.instruments)) { userData.instruments.forEach(id => userMap.get(userId).add(id)); } } } });
278
+ for (const [userId, instrumentSet] of userMap) { updates.push({ userId, instruments: Array.from(instrumentSet) }); }
304
279
  logger.log('INFO',`[Core Utils] Found ${updates.length} speculator users to update (covering ${[...userMap.values()].reduce((sum, set) => sum + set.size, 0)} total instruments).`);
305
280
  return updates;
306
- } catch (error) {
307
- logger.log('ERROR','[Core Utils] Error getting speculators to update', { errorMessage: error.message });
308
- throw error;
309
- }
281
+ } catch (error) { logger.log('ERROR','[Core Utils] Error getting speculators to update', { errorMessage: error.message }); throw error; }
310
282
  }
311
283
 
312
- module.exports = {
313
- getLatestNormalUserPortfolios,
314
- resetProxyLocks,
315
- getBlockCapacities,
316
- getExclusionIds,
317
- getPrioritizedSpeculators,
318
- clearCollection,
319
- batchWriteShardedIds,
320
- getNormalUsersToUpdate,
321
- getSpeculatorsToUpdate,
322
- };
284
+ module.exports = { getLatestNormalUserPortfolios, resetProxyLocks, getBlockCapacities, getExclusionIds, getPrioritizedSpeculators, clearCollection, batchWriteShardedIds, getNormalUsersToUpdate, getSpeculatorsToUpdate, };