bulltrackers-module 1.0.132 → 1.0.134

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -7,50 +7,50 @@
7
7
  * This file contains the high-level "manual" of steps. The "how-to" logic
8
8
  * is extracted into 'computation_system_utils.js'.
9
9
  * --- MODIFIED: To use getEarliestDataDates and pass the date map to the orchestrator helpers. ---
10
+ * --- MODIFIED: To run date processing in parallel batches. ---
11
+ * --- MODIFIED: To fetch ALL existing results to enable incremental (skip) logic. ---
10
12
  */
11
- const { groupByPass, checkRootDataAvailability, fetchDependenciesForPass, filterCalculations, runStandardComputationPass, runMetaComputationPass } = require('./orchestration_helpers.js');
13
+
14
+ // --- MODIFIED: Renamed fetchDependenciesForPass to fetchExistingResults ---
15
+ const { groupByPass, checkRootDataAvailability, fetchExistingResults, filterCalculations, runStandardComputationPass, runMetaComputationPass } = require('./orchestration_helpers.js');
12
16
  // --- MODIFIED: Import getEarliestDataDates ---
13
17
  const { getExpectedDateStrings, getEarliestDataDates } = require('../utils/utils.js');
14
18
 
19
+ // --- NEW: Parallel processing batch size ---
20
+ const PARALLEL_BATCH_SIZE = 7; // Process a week at a time
21
+
15
22
  async function runComputationPass(config, dependencies, computationManifest) {
16
23
  const { logger } = dependencies;
17
24
  const passToRun = String(config.COMPUTATION_PASS_TO_RUN); if (!passToRun) return logger.log('ERROR', '[PassRunner] No pass defined. Aborting.');
18
25
  logger.log('INFO', `🚀 Starting PASS ${passToRun}...`);
19
-
20
26
  const yesterday = new Date(); yesterday.setUTCDate(yesterday.getUTCDate()-1);
21
27
  const endDateUTC = new Date(Date.UTC(yesterday.getUTCFullYear(), yesterday.getUTCMonth(), yesterday.getUTCDate()));
22
-
23
- // --- MODIFIED: Call new date function ---
24
28
  const earliestDates = await getEarliestDataDates(config, dependencies);
25
- const firstDate = earliestDates.absoluteEarliest; // Use the absolute earliest for the loop
26
- // --- END MODIFICATION ---
27
-
29
+ const firstDate = earliestDates.absoluteEarliest;
28
30
  const startDateUTC = firstDate ? new Date(Date.UTC(firstDate.getUTCFullYear(), firstDate.getUTCMonth(), firstDate.getUTCDate())) : new Date(config.earliestComputationDate+'T00:00:00Z');
29
31
  const allExpectedDates = getExpectedDateStrings(startDateUTC, endDateUTC);
30
-
31
32
  const passes = groupByPass(computationManifest);
32
33
  const calcsInThisPass = passes[passToRun] || []; if (!calcsInThisPass.length) return logger.log('WARN', `[PassRunner] No calcs for Pass ${passToRun}. Exiting.`);
33
-
34
34
  const standardCalcs = calcsInThisPass.filter(c => c.type==='standard');
35
35
  const metaCalcs = calcsInThisPass.filter(c => c.type==='meta');
36
-
37
- for (const dateStr of allExpectedDates) {
36
+ const processDate = async (dateStr) => {
38
37
  const dateToProcess = new Date(dateStr+'T00:00:00Z');
39
38
  try {
40
- // --- MODIFIED: Pass earliestDates map to checkRootDataAvailability ---
41
- const rootData = await checkRootDataAvailability(dateStr, config, dependencies, earliestDates); if (!rootData) continue;
42
- // --- END MODIFICATION ---
43
-
44
- const fetchedDeps = await fetchDependenciesForPass(dateStr, calcsInThisPass, computationManifest, config, dependencies);
45
- const { standardCalcsToRun, metaCalcsToRun } = filterCalculations(standardCalcs, metaCalcs, rootData.status, fetchedDeps, passToRun, dateStr, logger);
39
+ const rootData = await checkRootDataAvailability(dateStr, config, dependencies, earliestDates);
40
+ if (!rootData) { logger.log('WARN', `[PassRunner] Skipping ${dateStr} for Pass ${passToRun}: No root data.`);return;}
41
+ const existingResults = await fetchExistingResults(dateStr, calcsInThisPass, computationManifest, config, dependencies);
42
+ const { standardCalcsToRun, metaCalcsToRun } = filterCalculations(standardCalcs, metaCalcs, rootData.status, existingResults, passToRun, dateStr, logger);
43
+ if (standardCalcsToRun.length === 0 && metaCalcsToRun.length === 0) {logger.log('INFO', `[PassRunner] All calcs for ${dateStr} Pass ${passToRun} are already complete. Skipping.`);return;}
46
44
  if (standardCalcsToRun.length) await runStandardComputationPass(dateToProcess, standardCalcsToRun, `Pass ${passToRun} (Standard)`, config, dependencies, rootData);
47
- if (metaCalcsToRun.length) await runMetaComputationPass(dateToProcess, metaCalcsToRun, `Pass ${passToRun} (Meta)`, config, dependencies, fetchedDeps, rootData);
48
- logger.log('SUCCESS', `[PassRunner] Completed Pass ${passToRun} for ${dateStr}.`);
49
- } catch (err) {
50
- logger.log('ERROR', `[PassRunner] FAILED Pass ${passToRun} for ${dateStr}`, { errorMessage: err.message, stack: err.stack });
51
- }
45
+ if (metaCalcsToRun.length) await runMetaComputationPass(dateToProcess, metaCalcsToRun, `Pass ${passToRun} (Meta)`, config, dependencies, existingResults, rootData);
46
+ } catch (err) {logger.log('ERROR', `[PassRunner] FAILED Pass ${passToRun} for ${dateStr}`, { errorMessage: err.message, stack: err.stack });}
47
+ };
48
+ logger.log('INFO', `[PassRunner] Processing ${allExpectedDates.length} total dates in batches of ${PARALLEL_BATCH_SIZE}...`);
49
+ for (let i = 0; i < allExpectedDates.length; i += PARALLEL_BATCH_SIZE) {
50
+ const batch = allExpectedDates.slice(i, i + PARALLEL_BATCH_SIZE);
51
+ logger.log('INFO', `[PassRunner] Processing batch ${Math.floor(i / PARALLEL_BATCH_SIZE) + 1}/${Math.ceil(allExpectedDates.length / PARALLEL_BATCH_SIZE)} (Dates: ${batch[0]}...${batch[batch.length-1]})`);
52
+ await Promise.all(batch.map(dateStr => processDate(dateStr)));
52
53
  }
53
-
54
54
  logger.log('INFO', `[PassRunner] Pass ${passToRun} orchestration finished.`);
55
55
  }
56
56
 
@@ -1,5 +1,6 @@
1
1
  const { FieldPath } = require('@google-cloud/firestore');
2
- const { getPortfolioPartRefs, loadFullDayMap, loadDataByRefs, loadDailyInsights, loadDailySocialPostInsights, getHistoryPartRefs } = require('../utils/data_loader.js');
2
+ // --- MODIFIED: Import streamPortfolioData ---
3
+ const { getPortfolioPartRefs, loadFullDayMap, loadDataByRefs, loadDailyInsights, loadDailySocialPostInsights, getHistoryPartRefs, streamPortfolioData } = require('../utils/data_loader.js');
3
4
  const { normalizeName, commitBatchInChunks } = require('../utils/utils.js');
4
5
 
5
6
  /** Stage 1: Group manifest by pass number */
@@ -11,8 +12,7 @@ function groupByPass(manifest) { return manifest.reduce((acc, calc) => { (acc[ca
11
12
  function checkRootDependencies(calcManifest, rootDataStatus) {
12
13
  const missing = [];
13
14
  if (!calcManifest.rootDataDependencies || !calcManifest.rootDataDependencies.length) {
14
- return { canRun: true, missing };
15
- }
15
+ return { canRun: true, missing };}
16
16
  for (const dep of calcManifest.rootDataDependencies) {
17
17
  if (dep === 'portfolio' && !rootDataStatus.hasPortfolio) missing.push('portfolio');
18
18
  else if (dep === 'insights' && !rootDataStatus.hasInsights) missing.push('insights');
@@ -28,81 +28,38 @@ function checkRootDependencies(calcManifest, rootDataStatus) {
28
28
  async function checkRootDataAvailability(dateStr, config, dependencies, earliestDates) {
29
29
  const { logger } = dependencies;
30
30
  logger.log('INFO', `[PassRunner] Checking root data for ${dateStr}...`);
31
-
32
31
  const dateToProcess = new Date(dateStr + 'T00:00:00Z');
33
32
  let portfolioRefs = [], insightsData = null, socialData = null, historyRefs = [];
34
33
  let hasPortfolio = false, hasInsights = false, hasSocial = false, hasHistory = false;
35
-
36
34
  try {
37
35
  const tasks = [];
38
-
39
- if (dateToProcess >= earliestDates.portfolio) {
40
- tasks.push(
41
- getPortfolioPartRefs(config, dependencies, dateStr).then(res => {
42
- portfolioRefs = res;
43
- hasPortfolio = !!(res?.length);
44
- })
45
- );
46
- }
47
-
36
+ if (dateToProcess >= earliestDates.portfolio)
37
+ {tasks.push(getPortfolioPartRefs(config, dependencies, dateStr).then(res => {portfolioRefs = res;hasPortfolio = !!(res?.length);}));}
48
38
  if (dateToProcess >= earliestDates.insights) {
49
- tasks.push(
50
- loadDailyInsights(config, dependencies, dateStr).then(res => {
51
- insightsData = res;
52
- hasInsights = !!res;
53
- })
54
- );
55
- }
56
-
39
+ tasks.push(loadDailyInsights(config, dependencies, dateStr).then(res => {insightsData = res;hasInsights = !!res;}));}
57
40
  if (dateToProcess >= earliestDates.social) {
58
- tasks.push(
59
- loadDailySocialPostInsights(config, dependencies, dateStr).then(res => {
60
- socialData = res;
61
- hasSocial = !!res;
62
- })
63
- );
64
- }
65
-
41
+ tasks.push(loadDailySocialPostInsights(config, dependencies, dateStr).then(res => {socialData = res;hasSocial = !!res;}));}
66
42
  if (dateToProcess >= earliestDates.history) {
67
- tasks.push(
68
- getHistoryPartRefs(config, dependencies, dateStr).then(res => {
69
- historyRefs = res;
70
- hasHistory = !!(res?.length);
71
- })
72
- );
73
- }
74
-
43
+ tasks.push(getHistoryPartRefs(config, dependencies, dateStr).then(res => {historyRefs = res;hasHistory = !!(res?.length);}));}
75
44
  await Promise.all(tasks);
76
-
77
- if (!(hasPortfolio || hasInsights || hasSocial || hasHistory)) {
78
- logger.log('WARN', `[PassRunner] No root data for ${dateStr}.`);
79
- return null;
80
- }
81
-
82
- return {
83
- portfolioRefs,
84
- insightsData,
85
- socialData,
86
- historyRefs,
87
- status: { hasPortfolio, hasInsights, hasSocial, hasHistory }
88
- };
89
-
90
- } catch (err) {
91
- logger.log('ERROR', `[PassRunner] Error checking data for ${dateStr}`, { errorMessage: err.message });
92
- return null;
93
- }
45
+ if (!(hasPortfolio || hasInsights || hasSocial || hasHistory)) {logger.log('WARN', `[PassRunner] No root data for ${dateStr}.`); return null;}
46
+ return {portfolioRefs, insightsData,socialData,historyRefs,status: { hasPortfolio, hasInsights, hasSocial, hasHistory }};
47
+ } catch (err) { logger.log('ERROR', `[PassRunner] Error checking data for ${dateStr}`, { errorMessage: err.message }); return null; }
94
48
  }
95
49
 
96
50
 
97
51
 
98
- /** Stage 4: Fetch computed dependencies from Firestore */
99
- async function fetchDependenciesForPass(dateStr, calcsInPass, fullManifest, config, { db, logger }) {
52
+ /** --- MODIFIED: Stage 4: Fetch ALL existing computed results for the pass ---
53
+ * This function now checks for *all* calcs in the pass, not just meta-dependencies,
54
+ * to enable skipping completed work.
55
+ */
56
+ async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config, { db, logger }) {
100
57
  const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
101
- const requiredDeps = new Set(calcsInPass.filter(c => c.type==='meta'&&c.dependencies).flatMap(c => c.dependencies.map(normalizeName)));
102
- if (!requiredDeps.size) return {};
103
- logger.log('INFO', `[PassRunner] Fetching ${requiredDeps.size} deps for ${dateStr}...`);
58
+ const allCalcsInPass = new Set(calcsInPass.map(c => normalizeName(c.name)));
59
+ if (!allCalcsInPass.size) return {};
60
+ logger.log('INFO', `[PassRunner] Checking for ${allCalcsInPass.size} existing results for ${dateStr}...`);
104
61
  const docRefs = [], depNames = [];
105
- for (const calcName of requiredDeps) {
62
+ for (const calcName of allCalcsInPass) {
106
63
  const calcManifest = manifestMap.get(calcName);
107
64
  if (!calcManifest) { logger.log('ERROR', `[PassRunner] Missing manifest for ${calcName}`); continue; }
108
65
  docRefs.push(db.collection(config.resultsCollection).doc(dateStr).collection(config.resultsSubcollection).doc(calcManifest.category||'unknown').collection(config.computationsSubcollection).doc(calcName));
@@ -113,43 +70,28 @@ async function fetchDependenciesForPass(dateStr, calcsInPass, fullManifest, conf
113
70
  return fetched;
114
71
  }
115
72
 
116
- /** * --- MODIFIED: Added detailed logging ---
117
- * Stage 5: Filter calculations based on available root data and dependencies
73
+ /** --- MODIFIED: Stage 5: Filter calculations to skip completed work ---
118
74
  */
119
- function filterCalculations(standardCalcs, metaCalcs, rootDataStatus, fetchedDeps, passToRun, dateStr, logger) {
75
+ function filterCalculations(standardCalcs, metaCalcs, rootDataStatus, existingResults, passToRun, dateStr, logger) {
120
76
  const skipped = new Set();
121
-
122
77
  // Filter Standard Calcs
123
78
  const standardCalcsToRun = standardCalcs.filter(c => {
79
+ if (existingResults[c.name]) {logger.log('TRACE', `[Pass ${passToRun}] Skipping ${c.name} for ${dateStr}. Result already exists.`);return false;}
124
80
  const { canRun, missing } = checkRootDependencies(c, rootDataStatus);
125
81
  if (canRun) return true;
126
-
127
82
  logger.log('INFO', `[Pass ${passToRun}] Skipping ${c.name} for ${dateStr}. Missing root data: [${missing.join(', ')}]`);
128
83
  skipped.add(c.name);
129
84
  return false;
130
85
  });
131
-
132
86
  // Filter Meta Calcs
133
87
  const metaCalcsToRun = metaCalcs.filter(c => {
88
+ if (existingResults[c.name]) {logger.log('TRACE', `[Pass ${passToRun} Meta] Skipping ${c.name} for ${dateStr}. Result already exists.`);skipped.add(c.name);return false;}
134
89
  // 1. Check root data
135
90
  const { canRun, missing: missingRoot } = checkRootDependencies(c, rootDataStatus);
136
- if (!canRun) {
137
- logger.log('INFO', `[Pass ${passToRun} Meta] Skipping ${c.name} for ${dateStr}. Missing root data: [${missingRoot.join(', ')}]`);
138
- skipped.add(c.name);
139
- return false;
140
- }
141
-
91
+ if (!canRun) {logger.log('INFO', `[Pass ${passToRun} Meta] Skipping ${c.name} for ${dateStr}. Missing root data: [${missingRoot.join(', ')}]`);skipped.add(c.name);return false;}
142
92
  // 2. Check computed dependencies
143
- const missingDeps = (c.dependencies || []).map(normalizeName).filter(d => !fetchedDeps[d]);
144
- if (missingDeps.length > 0) {
145
- logger.log('WARN', `[Pass ${passToRun} Meta] Skipping ${c.name} for ${dateStr}. Missing computed deps: [${missingDeps.join(', ')}]`);
146
- skipped.add(c.name);
147
- return false;
148
- }
149
-
150
- return true; // All checks passed
151
- });
152
-
93
+ const missingDeps = (c.dependencies || []).map(normalizeName).filter(d => !existingResults[d]);
94
+ if (missingDeps.length > 0) {logger.log('WARN', `[Pass ${passToRun} Meta] Skipping ${c.name} for ${dateStr}. Missing computed deps: [${missingDeps.join(', ')}]`);skipped.add(c.name);return false;} return true;});
153
95
  return { standardCalcsToRun, metaCalcsToRun };
154
96
  }
155
97
 
@@ -165,54 +107,60 @@ async function loadHistoricalData(date, calcs, config, deps, rootData) { const u
165
107
  await Promise.all(tasks); return updated;
166
108
  }
167
109
 
168
- /** Stage 8: Stream and process data for standard calculations */
169
- async function streamAndProcess(dateStr, todayRefs, state, passName, config, deps, rootData) { const { logger, calculationUtils } = deps;
110
+ /** * --- MODIFIED: Stage 8: Stream and process data for standard calculations ---
111
+ * This function now uses an async generator to stream portfolio data
112
+ * instead of loading it all into memory.
113
+ */
114
+ async function streamAndProcess(dateStr, state, passName, config, deps, rootData) {
115
+ const { logger, calculationUtils } = deps;
170
116
  const { todayInsights, yesterdayInsights, todaySocialPostInsights, yesterdaySocialPostInsights, todayHistoryData, yesterdayHistoryData, yesterdayPortfolios } = rootData;
171
- const batchSize=config.partRefBatchSize||10; let firstUser=true;
117
+ let firstUser=true;
172
118
  const context={instrumentMappings:(await calculationUtils.loadInstrumentMappings()).instrumentToTicker, sectorMapping:(await calculationUtils.loadInstrumentMappings()).instrumentToSector, todayDateStr:dateStr, dependencies:deps, config};
173
- for(let i=0;i<todayRefs.length;i+=batchSize){ const batch=todayRefs.slice(i,i+batchSize); const chunk=await loadDataByRefs(config,deps,batch); for(const uid in chunk){ const p=chunk[uid]; if(!p) continue; const userType=p.PublicPositions?'speculator':'normal'; context.userType=userType; for(const name in state){ const calc=state[name]; if(!calc||typeof calc.process!=='function') continue; const cat=calc.manifest.category, isSocialOrInsights=cat==='socialPosts'||cat==='insights', isHistorical=calc.manifest.isHistorical, isSpec=cat==='speculators'; let args=[p,null,uid,todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,todayHistoryData,yesterdayHistoryData]; if(isSocialOrInsights&&!firstUser) continue; if(isHistorical){ const pY=yesterdayPortfolios[uid]; if(!pY) continue; args=[p,pY,uid,todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,todayHistoryData,yesterdayHistoryData]; } if((userType==='normal'&&isSpec)||(userType==='speculator'&&!isSpec&&name!=='users-processed')) continue; try{ await Promise.resolve(calc.process(...args)); } catch(e){logger.log('WARN',`Process error ${name} for ${uid}`,{err:e.message});} } firstUser=false; } }
119
+ for await (const chunk of streamPortfolioData(config, deps, dateStr)) {
120
+ for(const uid in chunk){ const p=chunk[uid]; if(!p) continue;
121
+ const userType=p.PublicPositions?'speculator':'normal';
122
+ context.userType=userType;
123
+ for(const name in state){
124
+ const calc=state[name]; if(!calc||typeof calc.process!=='function') continue;
125
+ const cat=calc.manifest.category, isSocialOrInsights=cat==='socialPosts'||cat==='insights', isHistorical=calc.manifest.isHistorical, isSpec=cat==='speculators';
126
+ let args=[p,null,uid,context,todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,todayHistoryData,yesterdayHistoryData];
127
+ if(isSocialOrInsights&&!firstUser) continue;
128
+ if(isHistorical){ const pY=yesterdayPortfolios[uid]; if(!pY) continue; args=[p,pY,uid,context,todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,todayHistoryData,yesterdayHistoryData]; }
129
+ if((userType==='normal'&&isSpec)||(userType==='speculator'&&!isSpec&&name!=='users-processed')) continue;
130
+ try{ await Promise.resolve(calc.process(...args)); } catch(e){logger.log('WARN',`Process error ${name} for ${uid}`,{err:e.message});} }
131
+ firstUser=false;
132
+ }
133
+ }
174
134
  }
175
135
 
176
136
  /** Stage 9: Run standard computations */
177
137
  async function runStandardComputationPass(date, calcs, passName, config, deps, rootData) {
178
138
  const dStr = date.toISOString().slice(0, 10), logger = deps.logger;
139
+ if (calcs.length === 0) return;
179
140
  logger.log('INFO', `[${passName}] Running ${dStr} with ${calcs.length} calcs.`);
180
141
  const fullRoot = await loadHistoricalData(date, calcs, config, deps, rootData);
181
142
  const state = initializeCalculators(calcs, logger);
182
- await streamAndProcess(dStr, fullRoot.portfolioRefs, state, passName, config, deps, fullRoot);
183
-
184
- // --- START: FULL COMMIT LOGIC ---
143
+ await streamAndProcess(dStr, state, passName, config, deps, fullRoot);
185
144
  let success = 0;
186
145
  const standardWrites = [];
187
- const shardedWrites = {}; // Format: { [collectionName]: { [docId]: data } }
188
-
146
+ const shardedWrites = {};
189
147
  for (const name in state) {
190
148
  const calc = state[name];
191
149
  if (!calc || typeof calc.getResult !== 'function') continue;
192
-
193
- try {
194
- const result = await Promise.resolve(calc.getResult());
150
+ try {const result = await Promise.resolve(calc.getResult());
195
151
  if (result && Object.keys(result).length > 0) {
196
-
197
- // Separate sharded data from standard data
198
152
  const standardResult = {};
199
153
  for (const key in result) {
200
154
  if (key.startsWith('sharded_')) {
201
- // This is sharded data, e.g., sharded_user_profitability
202
- // The value is expected to be: { "collection_name": { "doc1": {...}, "doc2": {...} } }
203
155
  const shardedData = result[key];
204
156
  for (const collectionName in shardedData) {
205
157
  if (!shardedWrites[collectionName]) shardedWrites[collectionName] = {};
206
- // Merge doc data (e.g., combining data for "user_profitability_shard_1")
207
158
  Object.assign(shardedWrites[collectionName], shardedData[collectionName]);
208
159
  }
209
160
  } else {
210
- // This is a standard, single-doc result
211
161
  standardResult[key] = result[key];
212
162
  }
213
163
  }
214
-
215
- // Add standard result to the batch
216
164
  if (Object.keys(standardResult).length > 0) {
217
165
  const docRef = deps.db.collection(config.resultsCollection).doc(dStr)
218
166
  .collection(config.resultsSubcollection).doc(calc.manifest.category)
@@ -220,80 +168,70 @@ async function runStandardComputationPass(date, calcs, passName, config, deps, r
220
168
 
221
169
  standardWrites.push({ ref: docRef, data: standardResult });
222
170
  }
223
- success++; // Mark as success even if only sharded data was produced
171
+ success++;
224
172
  }
225
173
  } catch (e) {
226
174
  logger.log('ERROR', `getResult failed ${name} for ${dStr}`, { err: e.message, stack: e.stack });
227
175
  }
228
176
  }
229
-
230
- // Commit standard (non-sharded) writes in chunks
231
177
  if (standardWrites.length > 0) {
232
178
  await commitBatchInChunks(config, deps, standardWrites, `${passName} Standard ${dStr}`);
233
179
  }
234
-
235
- // Commit all sharded writes
236
- for (const collectionName in shardedWrites) {
237
- const docs = shardedWrites[collectionName];
180
+ for (const docPath in shardedWrites) { // 'docPath' is the key, e.g., 'user_profile_history_shard_0' or 'social_.../history'
181
+ const docData = shardedWrites[docPath]; // 'docData' is the object to write, e.g., { profiles: ... }
238
182
  const shardedDocWrites = [];
239
- for (const docId in docs) {
240
- // This assumes docId is the full path for sharded docs, or just the doc ID
241
- // Based on user_profitability_tracker, it's just the doc ID.
242
- const docRef = deps.db.collection(collectionName).doc(docId);
243
- shardedDocWrites.push({ ref: docRef, data: docs[docId] });
183
+ let docRef;
184
+ if (docPath.includes('/')) {
185
+ // Path is absolute, e.g., 'social_prediction_regime_state/history'
186
+ docRef = deps.db.doc(docPath);
187
+ } else {
188
+ // Path is a docId, e.g., 'user_profile_history_shard_0'
189
+ // We must infer its collection from config.
190
+ const collection = (docPath.startsWith('user_profile_history'))
191
+ ? config.shardedUserProfileCollection // 'user_profile_history'
192
+ : config.shardedProfitabilityCollection; // Fallback
193
+ docRef = deps.db.collection(collection).doc(docPath);
194
+ }
195
+ // Ensure data is a valid object before pushing
196
+ if (docData && typeof docData === 'object' && !Array.isArray(docData)) {
197
+ shardedDocWrites.push({ ref: docRef, data: docData });
198
+ } else {
199
+ logger.log('ERROR', `[${passName}] Invalid sharded document data for ${docPath}. Not an object.`, { data: docData });
244
200
  }
201
+ // Commit this single document write (or small batch if logic is changed later)
245
202
  if (shardedDocWrites.length > 0) {
246
- await commitBatchInChunks(config, deps, shardedDocWrites, `${passName} Sharded ${collectionName} ${dStr}`);
203
+ // Use the docPath in the operation name for clearer logging
204
+ await commitBatchInChunks(config, deps, shardedDocWrites, `${passName} Sharded ${docPath} ${dStr}`);
247
205
  }
248
206
  }
249
- // --- END: FULL COMMIT LOGIC ---
250
-
251
207
  logger.log(success === calcs.length ? 'SUCCESS' : 'WARN', `[${passName}] Completed ${dStr}. Success: ${success}/${calcs.length}`);
252
208
  }
253
209
 
254
210
  /** Stage 10: Run meta computations */
255
211
  async function runMetaComputationPass(date, calcs, passName, config, deps, fetchedDeps, rootData) {
256
212
  const dStr = date.toISOString().slice(0, 10), logger = deps.logger;
213
+ if (calcs.length === 0) return;
257
214
  logger.log('INFO', `[${passName}] Running ${dStr} with ${calcs.length} calcs.`);
258
-
259
- // --- START: FULL COMMIT LOGIC ---
260
215
  let success = 0;
261
216
  const standardWrites = [];
262
- const shardedWrites = {}; // Format: { [collectionName]: { [docId]: data } }
263
-
217
+ const shardedWrites = {};
264
218
  for (const mCalc of calcs) {
265
219
  const name = normalizeName(mCalc.name), Cl = mCalc.class;
266
- if (typeof Cl !== 'function') {
267
- logger.log('ERROR', `Invalid class ${name}`);
268
- continue;
269
- }
220
+ if (typeof Cl !== 'function') {logger.log('ERROR', `Invalid class ${name}`);continue;}
270
221
  const inst = new Cl();
271
222
  try {
272
- // Pass the full dependencies object to process()
273
223
  const result = await Promise.resolve(inst.process(dStr, { ...deps, rootData }, config, fetchedDeps));
274
-
275
- if (result && Object.keys(result).length > 0) {
276
-
277
- // Separate sharded data from standard data
278
- const standardResult = {};
279
- for (const key in result) {
280
- if (key.startsWith('sharded_')) {
281
- const shardedData = result[key];
282
- for (const collectionName in shardedData) {
283
- if (!shardedWrites[collectionName]) shardedWrites[collectionName] = {};
284
- Object.assign(shardedWrites[collectionName], shardedData[collectionName]);
285
- }
224
+ if (result && Object.keys(result).length > 0) {const standardResult = {}; for (const key in result) {
225
+ if (key.startsWith('sharded_')) {const shardedData = result[key];for (const collectionName in shardedData)
226
+ {if (!shardedWrites[collectionName]) shardedWrites[collectionName] = {};Object.assign(shardedWrites[collectionName], shardedData[collectionName]);}
286
227
  } else {
287
228
  standardResult[key] = result[key];
288
229
  }
289
230
  }
290
-
291
- // Add standard result to the batch
292
231
  if (Object.keys(standardResult).length > 0) {
293
232
  const docRef = deps.db.collection(config.resultsCollection).doc(dStr)
294
233
  .collection(config.resultsSubcollection).doc(mCalc.category)
295
234
  .collection(config.computationsSubcollection).doc(name);
296
-
297
235
  standardWrites.push({ ref: docRef, data: standardResult });
298
236
  }
299
237
  success++;
@@ -302,32 +240,23 @@ async function runMetaComputationPass(date, calcs, passName, config, deps, fetch
302
240
  logger.log('ERROR', `Meta-calc failed ${name} for ${dStr}`, { err: e.message, stack: e.stack });
303
241
  }
304
242
  }
305
-
306
- // Commit standard (non-sharded) writes in chunks
307
243
  if (standardWrites.length > 0) {
308
244
  await commitBatchInChunks(config, deps, standardWrites, `${passName} Meta ${dStr}`);
309
245
  }
310
-
311
- // Commit all sharded writes
312
246
  for (const collectionName in shardedWrites) {
313
247
  const docs = shardedWrites[collectionName];
314
248
  const shardedDocWrites = [];
315
249
  for (const docId in docs) {
316
- // Special case for stateful meta-calcs that write to a specific path
317
250
  const docRef = docId.includes('/')
318
- ? deps.db.doc(docId) // docId is a full path
319
- : deps.db.collection(collectionName).doc(docId); // docId is just an ID
320
-
251
+ ? deps.db.doc(docId)
252
+ : deps.db.collection(collectionName).doc(docId);
321
253
  shardedDocWrites.push({ ref: docRef, data: docs[docId] });
322
254
  }
323
255
  if (shardedDocWrites.length > 0) {
324
256
  await commitBatchInChunks(config, deps, shardedDocWrites, `${passName} Sharded ${collectionName} ${dStr}`);
325
257
  }
326
- }
327
- // --- END: FULL COMMIT LOGIC ---
328
-
258
+ }
329
259
  logger.log(success === calcs.length ? 'SUCCESS' : 'WARN', `[${passName}] Completed ${dStr}. Success: ${success}/${calcs.length}`);
330
260
  }
331
261
 
332
-
333
- module.exports = { groupByPass, checkRootDataAvailability, fetchDependenciesForPass, filterCalculations, runStandardComputationPass, runMetaComputationPass };
262
+ module.exports = { groupByPass, checkRootDataAvailability, fetchExistingResults, filterCalculations, runStandardComputationPass, runMetaComputationPass };
@@ -1,6 +1,7 @@
1
1
  /**
2
2
  * @fileoverview Data loader sub-pipes for the Computation System.
3
3
  * REFACTORED: Now stateless and receive dependencies.
4
+ * --- NEW: Added streamPortfolioData async generator ---
4
5
  */
5
6
 
6
7
  // <<< FIX: REMOVED all top-level 'require' and 'dependencies' lines >>>
@@ -165,6 +166,39 @@ async function getHistoryPartRefs(config, deps, dateString) {
165
166
  return allPartRefs;
166
167
  }
167
168
 
169
+ // --- NEW: Stage 7: Stream portfolio data in chunks ---
170
+ /**
171
+ * Streams portfolio data in chunks for a given date.
172
+ * This is an async generator.
173
+ * @param {object} config - The computation system configuration object.
174
+ * @param {object} deps - Contains db, logger, calculationUtils.
175
+ * @param {string} dateString - The date in YYYY-MM-DD format.
176
+ */
177
+ async function* streamPortfolioData(config, deps, dateString) {
178
+ const { logger } = deps;
179
+ const refs = await getPortfolioPartRefs(config, deps, dateString);
180
+ if (refs.length === 0) {
181
+ logger.log('WARN', `[streamPortfolioData] No portfolio refs found for ${dateString}. Stream is empty.`);
182
+ return;
183
+ }
184
+
185
+ // Use the same batch size as loadDataByRefs for consistency
186
+ const batchSize = config.partRefBatchSize || 50;
187
+
188
+ logger.log('INFO', `[streamPortfolioData] Streaming ${refs.length} portfolio parts in chunks of ${batchSize}...`);
189
+
190
+ for (let i = 0; i < refs.length; i += batchSize) {
191
+ const batchRefs = refs.slice(i, i + batchSize);
192
+ // Load one chunk of data
193
+ const data = await loadDataByRefs(config, deps, batchRefs);
194
+ // Yield it to the consumer, then release it from memory
195
+ yield data;
196
+ }
197
+ logger.log('INFO', `[streamPortfolioData] Finished streaming for ${dateString}.`);
198
+ }
199
+ // --- END: Stage 7 ---
200
+
201
+
168
202
  module.exports = {
169
203
  getPortfolioPartRefs,
170
204
  loadDataByRefs,
@@ -172,4 +206,5 @@ module.exports = {
172
206
  loadDailyInsights,
173
207
  loadDailySocialPostInsights,
174
208
  getHistoryPartRefs,
209
+ streamPortfolioData, // <-- EXPORT NEW FUNCTION
175
210
  };
@@ -17,28 +17,17 @@ const { FieldValue, FieldPath } = require('@google-cloud/firestore');
17
17
  async function getLatestNormalUserPortfolios(dependencies, config) {
18
18
  const { db, logger } = dependencies;
19
19
  const { normalUserCollectionName, snapshotsSubCollectionName, partsSubCollectionName } = config;
20
-
21
20
  logger.log('INFO', `[Core Utils] Fetching latest portfolios from ${normalUserCollectionName}...`);
22
21
  const allPortfolios = {};
23
-
24
22
  const yesterday = new Date();
25
23
  yesterday.setDate(yesterday.getDate() - 1);
26
24
  const dateString = yesterday.toISOString().slice(0, 10);
27
-
28
25
  const blockDocs = await db.collection(normalUserCollectionName).listDocuments();
29
-
30
26
  for (const blockDoc of blockDocs) {
31
27
  const snapshotDocRef = blockDoc.collection(snapshotsSubCollectionName).doc(dateString);
32
28
  const partsCollectionRef = snapshotDocRef.collection(partsSubCollectionName);
33
29
  const partsSnapshot = await partsCollectionRef.get();
34
-
35
- if (!partsSnapshot.empty) {
36
- partsSnapshot.forEach(partDoc => {
37
- Object.assign(allPortfolios, partDoc.data());
38
- });
39
- }
40
- }
41
-
30
+ if (!partsSnapshot.empty) {partsSnapshot.forEach(partDoc => {Object.assign(allPortfolios, partDoc.data());});}}
42
31
  logger.log('INFO', `[Core Utils] Found ${Object.keys(allPortfolios).length} user portfolios from ${dateString}'s snapshot.`);
43
32
  return allPortfolios;
44
33
  }
@@ -53,27 +42,14 @@ async function getLatestNormalUserPortfolios(dependencies, config) {
53
42
  async function resetProxyLocks(dependencies, config) {
54
43
  const { db, logger } = dependencies;
55
44
  const { proxyPerformanceDocPath } = config;
56
-
57
45
  logger.log('INFO','[Core Utils] Resetting proxy locks...');
58
- try {
59
- if (!proxyPerformanceDocPath) {
60
- logger.log('ERROR', '[Core Utils] Missing proxyPerformanceDocPath. Cannot reset locks.');
61
- return;
62
- }
63
-
46
+ try {if (!proxyPerformanceDocPath) {logger.log('ERROR', '[Core Utils] Missing proxyPerformanceDocPath. Cannot reset locks.');return;}
64
47
  const perfDocRef = db.doc(proxyPerformanceDocPath);
65
-
66
- await perfDocRef.update({
67
- locks: FieldValue.delete()
68
- });
69
-
48
+ await perfDocRef.update({locks: FieldValue.delete() });
70
49
  logger.log('INFO',`[Core Utils] Proxy locks map reset in ${proxyPerformanceDocPath}.`);
71
50
  } catch (error) {
72
- if (error.code === 5) { // 5 = NOT_FOUND
73
- logger.log('WARN',`[Core Utils] Proxy performance doc or 'locks' field not found at ${proxyPerformanceDocPath}. No locks to reset.`);
74
- } else {
75
- logger.log('ERROR','[Core Utils] Error resetting proxy locks', { errorMessage: error.message, path: proxyPerformanceDocPath });
76
- }
51
+ if (error.code === 5) { logger.log('WARN',`[Core Utils] Proxy performance doc or 'locks' field not found at ${proxyPerformanceDocPath}. No locks to reset.`);
52
+ } else {logger.log('ERROR','[Core Utils] Error resetting proxy locks', { errorMessage: error.message, path: proxyPerformanceDocPath });}
77
53
  }
78
54
  }
79
55
 
@@ -89,29 +65,15 @@ async function resetProxyLocks(dependencies, config) {
89
65
  async function getBlockCapacities(dependencies, config, userType) {
90
66
  const { db, logger } = dependencies;
91
67
  const { speculatorBlockCountsDocPath, normalBlockCountsDocPath } = config;
92
-
93
68
  logger.log('INFO',`[Core Utils] Getting block capacities for ${userType}...`);
94
69
  try {
95
- const docPath = userType === 'speculator'
96
- ? speculatorBlockCountsDocPath
97
- : normalBlockCountsDocPath;
98
-
99
- if (!docPath) {
100
- logger.log('ERROR', `[Core Utils] Missing block counts document path for ${userType}.`);
101
- return {};
102
- }
103
-
70
+ const docPath = userType === 'speculator'? speculatorBlockCountsDocPath : normalBlockCountsDocPath;
71
+ if (!docPath) {logger.log('ERROR', `[Core Utils] Missing block counts document path for ${userType}.`);return {};}
104
72
  const countsRef = db.doc(docPath);
105
73
  const countsDoc = await countsRef.get();
106
- if (!countsDoc.exists) {
107
- logger.log('WARN',`[Core Utils] Block counts document not found for ${userType} at ${docPath}. Returning empty.`);
108
- return {};
109
- }
74
+ if (!countsDoc.exists) {logger.log('WARN',`[Core Utils] Block counts document not found for ${userType} at ${docPath}. Returning empty.`);return {};}
110
75
  return countsDoc.data().counts || {};
111
- } catch (error) {
112
- logger.log('ERROR',`[Core Utils] Error getting block capacities for ${userType}`, { errorMessage: error.message });
113
- throw error;
114
- }
76
+ } catch (error) {logger.log('ERROR',`[Core Utils] Error getting block capacities for ${userType}`, { errorMessage: error.message });throw error;}
115
77
  }
116
78
 
117
79
  /**
@@ -127,63 +89,30 @@ async function getBlockCapacities(dependencies, config, userType) {
127
89
  */
128
90
  async function getExclusionIds(dependencies, config, userType) {
129
91
  const { db, logger } = dependencies;
130
- const {
131
- specBlocksCollection,
132
- pendingSpecCollection,
133
- invalidSpecCollection,
134
- existingNormalUserIds // Get the pre-fetched IDs
135
- } = config;
136
-
92
+ const { specBlocksCollection, pendingSpecCollection, invalidSpecCollection,existingNormalUserIds } = config;
137
93
  logger.log('INFO',`[Core Utils] Getting exclusion IDs for ${userType} discovery...`);
138
-
139
94
  const exclusionIds = new Set(existingNormalUserIds);
140
95
  logger.log('TRACE', `[Core Utils] Loaded ${exclusionIds.size} existing normal user IDs for exclusion.`);
141
-
142
96
  const promises = [];
143
-
144
97
  try {
145
98
  // 1. Existing Speculators
146
99
  const specBlocksRef = db.collection(specBlocksCollection);
147
- promises.push(specBlocksRef.get().then(snapshot => {
148
- snapshot.forEach(doc => {
149
- const users = doc.data().users || {};
150
- Object.keys(users).forEach(key => exclusionIds.add(key.split('.')[1]));
151
- });
152
- logger.log('TRACE','[Core Utils] Fetched existing speculator IDs for exclusion.');
153
- }));
154
-
100
+ promises.push(specBlocksRef.get().then(snapshot => {snapshot.forEach(doc => {const users = doc.data().users || {}; Object.keys(users).forEach(key => exclusionIds.add(key.split('.')[1])); });
101
+ logger.log('TRACE','[Core Utils] Fetched existing speculator IDs for exclusion.');}));
155
102
  // 2. Pending Speculators
156
- if (userType === 'speculator') {
157
- const pendingRef = db.collection(pendingSpecCollection);
158
- promises.push(pendingRef.get().then(snapshot => {
159
- snapshot.forEach(doc => {
160
- Object.keys(doc.data().users || {}).forEach(cid => exclusionIds.add(cid));
161
- });
162
- logger.log('TRACE','[Core Utils] Fetched pending speculator IDs for exclusion.');
163
- }));
164
- }
165
-
103
+ if (userType === 'speculator') {const pendingRef = db.collection(pendingSpecCollection);
104
+ promises.push(pendingRef.get().then(snapshot => {snapshot.forEach(doc => {Object.keys(doc.data().users || {}).forEach(cid => exclusionIds.add(cid));});
105
+ logger.log('TRACE','[Core Utils] Fetched pending speculator IDs for exclusion.');})); }
166
106
  // 3. Invalid Speculators
167
107
  const invalidRef = db.collection(invalidSpecCollection);
168
- promises.push(invalidRef.get().then(snapshot => {
169
- snapshot.forEach(doc => {
170
- const data = doc.data();
171
- if (data) {
172
- Object.keys(data.users || {}).forEach(cid => exclusionIds.add(cid));
173
- }
174
- });
175
- logger.log('TRACE','[Core Utils] Fetched invalid speculator IDs for exclusion.');
176
- }));
177
-
108
+ promises.push(invalidRef.get().then(snapshot => { snapshot.forEach(doc => {const data = doc.data();if (data) {Object.keys(data.users || {}).forEach(cid => exclusionIds.add(cid));}});
109
+ logger.log('TRACE','[Core Utils] Fetched invalid speculator IDs for exclusion.');}));
178
110
  await Promise.all(promises);
179
111
  logger.log('INFO',`[Core Utils] Total unique exclusion IDs found: ${exclusionIds.size}`);
180
112
  return exclusionIds;
181
-
182
- } catch (error)
183
- {
113
+ } catch (error){
184
114
  logger.log('ERROR','[Core Utils] Error getting exclusion IDs', { errorMessage: error.message });
185
- throw error;
186
- }
115
+ throw error; }
187
116
  }
188
117
 
189
118
  /**
@@ -198,20 +127,12 @@ async function getPrioritizedSpeculators(dependencies, exclusionIds, speculatorI
198
127
  const { logger } = dependencies;
199
128
  logger.log('INFO','[Core Utils] Scanning normal users for prioritized speculators...');
200
129
  const candidates = new Set();
201
-
202
130
  try {
203
131
  for (const userId in latestNormalPortfolios) {
204
132
  if (exclusionIds.has(userId)) continue;
205
-
206
133
  const portfolio = latestNormalPortfolios[userId];
207
- const holdsSpeculatorAsset = portfolio?.AggregatedPositions?.some(p =>
208
- speculatorInstrumentSet.has(p.InstrumentID)
209
- );
210
-
211
- if (holdsSpeculatorAsset) {
212
- candidates.add(userId);
213
- }
214
- }
134
+ const holdsSpeculatorAsset = portfolio?.AggregatedPositions?.some(p => speculatorInstrumentSet.has(p.InstrumentID));
135
+ if (holdsSpeculatorAsset) { candidates.add(userId);}}
215
136
  logger.log('INFO',`[Core Utils] Found ${candidates.size} potential prioritized speculators.`);
216
137
  return Array.from(candidates);
217
138
  } catch (error) {
@@ -235,23 +156,15 @@ async function clearCollection(dependencies, collectionPath, maxBatchSize = 400)
235
156
  let query = collectionRef.limit(maxBatchSize);
236
157
  let snapshot;
237
158
  let deleteCount = 0;
238
-
239
159
  while (true) {
240
160
  snapshot = await query.get();
241
- if (snapshot.size === 0) {
242
- break;
243
- }
244
-
161
+ if (snapshot.size === 0) { break; }
245
162
  const batch = db.batch();
246
163
  snapshot.docs.forEach(doc => batch.delete(doc.ref));
247
164
  await batch.commit();
248
165
  deleteCount += snapshot.size;
249
-
250
- if (snapshot.size < maxBatchSize) {
251
- break;
252
- }
253
- query = collectionRef.limit(maxBatchSize);
254
- }
166
+ if (snapshot.size < maxBatchSize) {break; }
167
+ query = collectionRef.limit(maxBatchSize); }
255
168
  logger.log('SUCCESS', `[Core Utils] Scorched earth complete. Deleted ${deleteCount} documents from ${collectionPath}.`);
256
169
  } catch (error) {
257
170
  logger.log('ERROR', `[Core Utils] Error clearing collection ${collectionPath}`, { errorMessage: error.message });
@@ -272,17 +185,9 @@ async function clearCollection(dependencies, collectionPath, maxBatchSize = 400)
272
185
  */
273
186
  async function batchWriteShardedIds(dependencies, config) {
274
187
  const { db, logger } = dependencies;
275
- const {
276
- collectionPath,
277
- items,
278
- timestamp,
279
- maxFieldsPerDoc,
280
- maxWritesPerBatch
281
- } = config;
282
-
188
+ const {collectionPath, items, timestamp, maxFieldsPerDoc, maxWritesPerBatch } = config;
283
189
  logger.log('INFO', `[Core Utils] Batch writing ${items.length} IDs to sharded path: ${collectionPath} (max ${maxFieldsPerDoc}/doc, ${maxWritesPerBatch} docs/batch)...`);
284
190
  if (items.length === 0) return;
285
-
286
191
  try {
287
192
  const collectionRef = db.collection(collectionPath);
288
193
  let batch = db.batch();
@@ -290,27 +195,19 @@ async function batchWriteShardedIds(dependencies, config) {
290
195
  let currentFieldCount = 0;
291
196
  let batchWriteCount = 0;
292
197
  let docCounter = 0;
293
-
294
198
  for (let i = 0; i < items.length; i++) {
295
199
  const userId = items[i];
296
200
  const key = `users.${userId}`;
297
201
  currentDocFields[key] = timestamp;
298
202
  currentFieldCount++;
299
-
300
203
  if (currentFieldCount >= maxFieldsPerDoc || i === items.length - 1) {
301
204
  const docRef = collectionRef.doc(`pending_${docCounter}_${Date.now()}_${Math.random().toString(36).substring(2, 8)}`);
302
205
  batch.set(docRef, currentDocFields);
303
206
  batchWriteCount++;
304
-
305
207
  currentDocFields = {};
306
208
  currentFieldCount = 0;
307
209
  docCounter++;
308
-
309
- if (batchWriteCount >= maxWritesPerBatch || i === items.length - 1) {
310
- await batch.commit();
311
- batch = db.batch();
312
- batchWriteCount = 0;
313
- }
210
+ if (batchWriteCount >= maxWritesPerBatch || i === items.length - 1) { await batch.commit(); batch = db.batch(); batchWriteCount = 0; }
314
211
  }
315
212
  }
316
213
  logger.log('SUCCESS', `[Core Utils] Sharded write complete for ${collectionPath}. Created ${docCounter} documents.`);
@@ -336,24 +233,11 @@ async function getNormalUsersToUpdate(dependencies, config) {
336
233
  logger.log('INFO','[Core Utils] Getting normal users to update...');
337
234
  const usersToUpdate = [];
338
235
  try {
339
- const timestampDocRef = db.collection(normalUserCollectionName)
340
- .doc('timestamps')
341
- .collection('users')
342
- .doc(normalUserTimestampsDocId);
236
+ const timestampDocRef = db.collection(normalUserCollectionName) .doc('timestamps') .collection('users') .doc(normalUserTimestampsDocId);
343
237
  const timestampDoc = await timestampDocRef.get();
344
-
345
- if (!timestampDoc.exists) {
346
- logger.log('WARN',`[Core Utils] Normal user timestamp document not found at ${timestampDocRef.path}.`);
347
- return [];
348
- }
349
-
238
+ if (!timestampDoc.exists) { logger.log('WARN',`[Core Utils] Normal user timestamp document not found at ${timestampDocRef.path}.`); return []; }
350
239
  const timestamps = timestampDoc.data().users || {};
351
- for (const userId in timestamps) {
352
- const lastProcessed = timestamps[userId]?.toDate ? timestamps[userId].toDate() : new Date(0);
353
- if (lastProcessed < dateThreshold) {
354
- usersToUpdate.push(userId);
355
- }
356
- }
240
+ for (const userId in timestamps) { const lastProcessed = timestamps[userId]?.toDate ? timestamps[userId].toDate() : new Date(0); if (lastProcessed < dateThreshold) { usersToUpdate.push(userId); }}
357
241
  logger.log('INFO',`[Core Utils] Found ${usersToUpdate.length} normal users to update.`);
358
242
  return usersToUpdate;
359
243
  } catch (error) {
@@ -374,44 +258,26 @@ async function getNormalUsersToUpdate(dependencies, config) {
374
258
  async function getSpeculatorsToUpdate(dependencies, config) {
375
259
  const { db, logger } = dependencies;
376
260
  const { dateThreshold, gracePeriodThreshold, speculatorBlocksCollectionName } = config;
377
-
378
261
  logger.log('INFO','[Core Utils] Getting speculators to update...');
379
262
  const updates = [];
380
263
  try {
381
264
  const blocksRef = db.collection(speculatorBlocksCollectionName);
382
265
  const snapshot = await blocksRef.get();
383
-
384
- if (snapshot.empty) {
385
- logger.log('INFO','[Core Utils] No speculator blocks found.');
386
- return [];
387
- }
388
-
266
+ if (snapshot.empty) { logger.log('INFO','[Core Utils] No speculator blocks found.'); return []; }
389
267
  snapshot.forEach(doc => {
390
268
  const blockData = doc.data();
391
-
392
- // Iterate over the document's top-level keys
393
269
  for (const key in blockData) {
394
- // Filter for keys that match the 'users.CID' format
395
270
  if (!key.startsWith('users.')) continue;
396
-
397
271
  const userId = key.split('.')[1];
398
- if (!userId) continue; // Safety check
399
-
400
- const userData = blockData[key]; // Get the user's map
401
-
272
+ if (!userId) continue;
273
+ const userData = blockData[key];
402
274
  const lastVerified = userData.lastVerified?.toDate ? userData.lastVerified.toDate() : new Date(0);
403
275
  const lastHeld = userData.lastHeldSpeculatorAsset?.toDate ? userData.lastHeldSpeculatorAsset.toDate() : new Date(0);
404
-
405
- if (lastVerified < dateThreshold && lastHeld > gracePeriodThreshold) {
406
- if (userData.instruments && Array.isArray(userData.instruments)) {
407
- userData.instruments.forEach(instrumentId => {
408
- updates.push({ userId, instrumentId });
409
- });
276
+ if (lastVerified < dateThreshold && lastHeld > gracePeriodThreshold) { if (userData.instruments && Array.isArray(userData.instruments)) { userData.instruments.forEach(instrumentId => { updates.push({ userId, instrumentId }); });
410
277
  }
411
278
  }
412
279
  }
413
280
  });
414
-
415
281
  logger.log('INFO',`[Core Utils] Found ${updates.length} speculator user/instrument pairs to update.`);
416
282
  return updates;
417
283
  } catch (error) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.132",
3
+ "version": "1.0.134",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [