bulltrackers-module 1.0.133 → 1.0.134

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -23,69 +23,34 @@ async function runComputationPass(config, dependencies, computationManifest) {
23
23
  const { logger } = dependencies;
24
24
  const passToRun = String(config.COMPUTATION_PASS_TO_RUN); if (!passToRun) return logger.log('ERROR', '[PassRunner] No pass defined. Aborting.');
25
25
  logger.log('INFO', `🚀 Starting PASS ${passToRun}...`);
26
-
27
26
  const yesterday = new Date(); yesterday.setUTCDate(yesterday.getUTCDate()-1);
28
27
  const endDateUTC = new Date(Date.UTC(yesterday.getUTCFullYear(), yesterday.getUTCMonth(), yesterday.getUTCDate()));
29
-
30
- // --- MODIFIED: Call new date function ---
31
28
  const earliestDates = await getEarliestDataDates(config, dependencies);
32
- const firstDate = earliestDates.absoluteEarliest; // Use the absolute earliest for the loop
33
- // --- END MODIFICATION ---
34
-
29
+ const firstDate = earliestDates.absoluteEarliest;
35
30
  const startDateUTC = firstDate ? new Date(Date.UTC(firstDate.getUTCFullYear(), firstDate.getUTCMonth(), firstDate.getUTCDate())) : new Date(config.earliestComputationDate+'T00:00:00Z');
36
31
  const allExpectedDates = getExpectedDateStrings(startDateUTC, endDateUTC);
37
-
38
32
  const passes = groupByPass(computationManifest);
39
33
  const calcsInThisPass = passes[passToRun] || []; if (!calcsInThisPass.length) return logger.log('WARN', `[PassRunner] No calcs for Pass ${passToRun}. Exiting.`);
40
-
41
34
  const standardCalcs = calcsInThisPass.filter(c => c.type==='standard');
42
35
  const metaCalcs = calcsInThisPass.filter(c => c.type==='meta');
43
-
44
- // --- NEW: Helper function to process a single date ---
45
36
  const processDate = async (dateStr) => {
46
37
  const dateToProcess = new Date(dateStr+'T00:00:00Z');
47
38
  try {
48
- // --- MODIFIED: Pass earliestDates map to checkRootDataAvailability ---
49
39
  const rootData = await checkRootDataAvailability(dateStr, config, dependencies, earliestDates);
50
- if (!rootData) {
51
- logger.log('WARN', `[PassRunner] Skipping ${dateStr} for Pass ${passToRun}: No root data.`);
52
- return; // Use 'return' instead of 'continue' for a mapped function
53
- }
54
- // --- END MODIFICATION ---
55
-
56
- // --- MODIFIED: Fetch ALL existing results for this pass, not just dependencies ---
40
+ if (!rootData) { logger.log('WARN', `[PassRunner] Skipping ${dateStr} for Pass ${passToRun}: No root data.`);return;}
57
41
  const existingResults = await fetchExistingResults(dateStr, calcsInThisPass, computationManifest, config, dependencies);
58
-
59
- // --- MODIFIED: Pass existingResults to filterCalculations ---
60
42
  const { standardCalcsToRun, metaCalcsToRun } = filterCalculations(standardCalcs, metaCalcs, rootData.status, existingResults, passToRun, dateStr, logger);
61
-
62
- if (standardCalcsToRun.length === 0 && metaCalcsToRun.length === 0) {
63
- logger.log('INFO', `[PassRunner] All calcs for ${dateStr} Pass ${passToRun} are already complete. Skipping.`);
64
- return;
65
- }
66
-
67
- // --- MODIFIED: Pass existingResults (as fetchedDeps) to meta pass ---
43
+ if (standardCalcsToRun.length === 0 && metaCalcsToRun.length === 0) {logger.log('INFO', `[PassRunner] All calcs for ${dateStr} Pass ${passToRun} are already complete. Skipping.`);return;}
68
44
  if (standardCalcsToRun.length) await runStandardComputationPass(dateToProcess, standardCalcsToRun, `Pass ${passToRun} (Standard)`, config, dependencies, rootData);
69
45
  if (metaCalcsToRun.length) await runMetaComputationPass(dateToProcess, metaCalcsToRun, `Pass ${passToRun} (Meta)`, config, dependencies, existingResults, rootData);
70
-
71
- // Note: We no longer log "Completed" here, as the sub-functions do.
72
- // logger.log('SUCCESS', `[PassRunner] Completed Pass ${passToRun} for ${dateStr}.`);
73
-
74
- } catch (err) {
75
- logger.log('ERROR', `[PassRunner] FAILED Pass ${passToRun} for ${dateStr}`, { errorMessage: err.message, stack: err.stack });
76
- }
46
+ } catch (err) {logger.log('ERROR', `[PassRunner] FAILED Pass ${passToRun} for ${dateStr}`, { errorMessage: err.message, stack: err.stack });}
77
47
  };
78
- // --- END: Helper function ---
79
-
80
- // --- NEW: Replace sequential loop with parallel batch loop ---
81
48
  logger.log('INFO', `[PassRunner] Processing ${allExpectedDates.length} total dates in batches of ${PARALLEL_BATCH_SIZE}...`);
82
49
  for (let i = 0; i < allExpectedDates.length; i += PARALLEL_BATCH_SIZE) {
83
50
  const batch = allExpectedDates.slice(i, i + PARALLEL_BATCH_SIZE);
84
51
  logger.log('INFO', `[PassRunner] Processing batch ${Math.floor(i / PARALLEL_BATCH_SIZE) + 1}/${Math.ceil(allExpectedDates.length / PARALLEL_BATCH_SIZE)} (Dates: ${batch[0]}...${batch[batch.length-1]})`);
85
52
  await Promise.all(batch.map(dateStr => processDate(dateStr)));
86
53
  }
87
- // --- END: Parallel batch loop ---
88
-
89
54
  logger.log('INFO', `[PassRunner] Pass ${passToRun} orchestration finished.`);
90
55
  }
91
56
 
@@ -55,21 +55,16 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
55
55
  */
56
56
  async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config, { db, logger }) {
57
57
  const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
58
-
59
- // --- MODIFIED: Check all calcs in this pass, not just meta-deps ---
60
58
  const allCalcsInPass = new Set(calcsInPass.map(c => normalizeName(c.name)));
61
59
  if (!allCalcsInPass.size) return {};
62
-
63
60
  logger.log('INFO', `[PassRunner] Checking for ${allCalcsInPass.size} existing results for ${dateStr}...`);
64
61
  const docRefs = [], depNames = [];
65
-
66
62
  for (const calcName of allCalcsInPass) {
67
63
  const calcManifest = manifestMap.get(calcName);
68
64
  if (!calcManifest) { logger.log('ERROR', `[PassRunner] Missing manifest for ${calcName}`); continue; }
69
65
  docRefs.push(db.collection(config.resultsCollection).doc(dateStr).collection(config.resultsSubcollection).doc(calcManifest.category||'unknown').collection(config.computationsSubcollection).doc(calcName));
70
66
  depNames.push(calcName);
71
67
  }
72
-
73
68
  const fetched = {};
74
69
  if (docRefs.length) (await db.getAll(...docRefs)).forEach((doc,i)=>fetched[depNames[i]]=doc.exists?doc.data():null);
75
70
  return fetched;
@@ -79,51 +74,24 @@ async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config,
79
74
  */
80
75
  function filterCalculations(standardCalcs, metaCalcs, rootDataStatus, existingResults, passToRun, dateStr, logger) {
81
76
  const skipped = new Set();
82
-
83
77
  // Filter Standard Calcs
84
78
  const standardCalcsToRun = standardCalcs.filter(c => {
85
- // --- NEW: Skip if result already exists ---
86
- if (existingResults[c.name]) {
87
- logger.log('TRACE', `[Pass ${passToRun}] Skipping ${c.name} for ${dateStr}. Result already exists.`);
88
- return false;
89
- }
90
-
79
+ if (existingResults[c.name]) {logger.log('TRACE', `[Pass ${passToRun}] Skipping ${c.name} for ${dateStr}. Result already exists.`);return false;}
91
80
  const { canRun, missing } = checkRootDependencies(c, rootDataStatus);
92
81
  if (canRun) return true;
93
-
94
82
  logger.log('INFO', `[Pass ${passToRun}] Skipping ${c.name} for ${dateStr}. Missing root data: [${missing.join(', ')}]`);
95
83
  skipped.add(c.name);
96
84
  return false;
97
85
  });
98
-
99
86
  // Filter Meta Calcs
100
87
  const metaCalcsToRun = metaCalcs.filter(c => {
101
- // --- NEW: Skip if result already exists ---
102
- if (existingResults[c.name]) {
103
- logger.log('TRACE', `[Pass ${passToRun} Meta] Skipping ${c.name} for ${dateStr}. Result already exists.`);
104
- skipped.add(c.name);
105
- return false;
106
- }
107
-
88
+ if (existingResults[c.name]) {logger.log('TRACE', `[Pass ${passToRun} Meta] Skipping ${c.name} for ${dateStr}. Result already exists.`);skipped.add(c.name);return false;}
108
89
  // 1. Check root data
109
90
  const { canRun, missing: missingRoot } = checkRootDependencies(c, rootDataStatus);
110
- if (!canRun) {
111
- logger.log('INFO', `[Pass ${passToRun} Meta] Skipping ${c.name} for ${dateStr}. Missing root data: [${missingRoot.join(', ')}]`);
112
- skipped.add(c.name);
113
- return false;
114
- }
115
-
91
+ if (!canRun) {logger.log('INFO', `[Pass ${passToRun} Meta] Skipping ${c.name} for ${dateStr}. Missing root data: [${missingRoot.join(', ')}]`);skipped.add(c.name);return false;}
116
92
  // 2. Check computed dependencies
117
- // --- MODIFIED: Check existingResults (was fetchedDeps) ---
118
93
  const missingDeps = (c.dependencies || []).map(normalizeName).filter(d => !existingResults[d]);
119
- if (missingDeps.length > 0) {
120
- logger.log('WARN', `[Pass ${passToRun} Meta] Skipping ${c.name} for ${dateStr}. Missing computed deps: [${missingDeps.join(', ')}]`);
121
- skipped.add(c.name);
122
- return false;
123
- }
124
- return true; // All checks passed
125
- });
126
-
94
+ if (missingDeps.length > 0) {logger.log('WARN', `[Pass ${passToRun} Meta] Skipping ${c.name} for ${dateStr}. Missing computed deps: [${missingDeps.join(', ')}]`);skipped.add(c.name);return false;} return true;});
127
95
  return { standardCalcsToRun, metaCalcsToRun };
128
96
  }
129
97
 
@@ -146,91 +114,53 @@ async function loadHistoricalData(date, calcs, config, deps, rootData) { const u
146
114
  async function streamAndProcess(dateStr, state, passName, config, deps, rootData) {
147
115
  const { logger, calculationUtils } = deps;
148
116
  const { todayInsights, yesterdayInsights, todaySocialPostInsights, yesterdaySocialPostInsights, todayHistoryData, yesterdayHistoryData, yesterdayPortfolios } = rootData;
149
-
150
117
  let firstUser=true;
151
- // Load mappings once for all chunks
152
118
  const context={instrumentMappings:(await calculationUtils.loadInstrumentMappings()).instrumentToTicker, sectorMapping:(await calculationUtils.loadInstrumentMappings()).instrumentToSector, todayDateStr:dateStr, dependencies:deps, config};
153
-
154
- // Use the new async generator stream
155
119
  for await (const chunk of streamPortfolioData(config, deps, dateStr)) {
156
- // This inner loop processes one chunk of users at a time
157
- for(const uid in chunk){
158
- const p=chunk[uid]; if(!p) continue;
120
+ for(const uid in chunk){ const p=chunk[uid]; if(!p) continue;
159
121
  const userType=p.PublicPositions?'speculator':'normal';
160
122
  context.userType=userType;
161
123
  for(const name in state){
162
124
  const calc=state[name]; if(!calc||typeof calc.process!=='function') continue;
163
125
  const cat=calc.manifest.category, isSocialOrInsights=cat==='socialPosts'||cat==='insights', isHistorical=calc.manifest.isHistorical, isSpec=cat==='speculators';
164
- // --- MODIFIED: Argument list now includes context ---
165
126
  let args=[p,null,uid,context,todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,todayHistoryData,yesterdayHistoryData];
166
-
167
127
  if(isSocialOrInsights&&!firstUser) continue;
168
-
169
- if(isHistorical){
170
- const pY=yesterdayPortfolios[uid]; if(!pY) continue;
171
- args=[p,pY,uid,context,todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,todayHistoryData,yesterdayHistoryData];
172
- }
173
-
128
+ if(isHistorical){ const pY=yesterdayPortfolios[uid]; if(!pY) continue; args=[p,pY,uid,context,todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,todayHistoryData,yesterdayHistoryData]; }
174
129
  if((userType==='normal'&&isSpec)||(userType==='speculator'&&!isSpec&&name!=='users-processed')) continue;
175
-
176
- try{
177
- // Pass context to all process functions.
178
- await Promise.resolve(calc.process(...args));
179
- } catch(e){logger.log('WARN',`Process error ${name} for ${uid}`,{err:e.message});}
180
- }
130
+ try{ await Promise.resolve(calc.process(...args)); } catch(e){logger.log('WARN',`Process error ${name} for ${uid}`,{err:e.message});} }
181
131
  firstUser=false;
182
132
  }
183
- // Memory from 'chunk' is released here before the next iteration
184
133
  }
185
134
  }
186
- // --- END MODIFICATION ---
187
135
 
188
136
  /** Stage 9: Run standard computations */
189
137
  async function runStandardComputationPass(date, calcs, passName, config, deps, rootData) {
190
138
  const dStr = date.toISOString().slice(0, 10), logger = deps.logger;
191
- // --- MODIFIED: Log only if there are calcs to run ---
192
139
  if (calcs.length === 0) return;
193
140
  logger.log('INFO', `[${passName}] Running ${dStr} with ${calcs.length} calcs.`);
194
-
195
- // --- MODIFIED: We no longer need portfolioRefs in fullRoot, as streamAndProcess handles it.
196
141
  const fullRoot = await loadHistoricalData(date, calcs, config, deps, rootData);
197
142
  const state = initializeCalculators(calcs, logger);
198
-
199
- // --- MODIFIED: Call to streamAndProcess no longer passes portfolioRefs ---
200
143
  await streamAndProcess(dStr, state, passName, config, deps, fullRoot);
201
-
202
- // --- START: FULL COMMIT LOGIC ---
203
144
  let success = 0;
204
145
  const standardWrites = [];
205
- const shardedWrites = {}; // Format: { [collectionName]: { [docId]: data } }
206
-
146
+ const shardedWrites = {};
207
147
  for (const name in state) {
208
148
  const calc = state[name];
209
149
  if (!calc || typeof calc.getResult !== 'function') continue;
210
-
211
- try {
212
- const result = await Promise.resolve(calc.getResult());
150
+ try {const result = await Promise.resolve(calc.getResult());
213
151
  if (result && Object.keys(result).length > 0) {
214
-
215
- // Separate sharded data from standard data
216
152
  const standardResult = {};
217
153
  for (const key in result) {
218
154
  if (key.startsWith('sharded_')) {
219
- // This is sharded data, e.g., sharded_user_profitability
220
- // The value is expected to be: { "collection_name": { "doc1": {...}, "doc2": {...} } }
221
155
  const shardedData = result[key];
222
156
  for (const collectionName in shardedData) {
223
157
  if (!shardedWrites[collectionName]) shardedWrites[collectionName] = {};
224
- // Merge doc data (e.g., combining data for "user_profitability_shard_1")
225
158
  Object.assign(shardedWrites[collectionName], shardedData[collectionName]);
226
159
  }
227
160
  } else {
228
- // This is a standard, single-doc result
229
161
  standardResult[key] = result[key];
230
162
  }
231
163
  }
232
-
233
- // Add standard result to the batch
234
164
  if (Object.keys(standardResult).length > 0) {
235
165
  const docRef = deps.db.collection(config.resultsCollection).doc(dStr)
236
166
  .collection(config.resultsSubcollection).doc(calc.manifest.category)
@@ -238,82 +168,70 @@ async function runStandardComputationPass(date, calcs, passName, config, deps, r
238
168
 
239
169
  standardWrites.push({ ref: docRef, data: standardResult });
240
170
  }
241
- success++; // Mark as success even if only sharded data was produced
171
+ success++;
242
172
  }
243
173
  } catch (e) {
244
174
  logger.log('ERROR', `getResult failed ${name} for ${dStr}`, { err: e.message, stack: e.stack });
245
175
  }
246
176
  }
247
-
248
- // Commit standard (non-sharded) writes in chunks
249
177
  if (standardWrites.length > 0) {
250
178
  await commitBatchInChunks(config, deps, standardWrites, `${passName} Standard ${dStr}`);
251
179
  }
252
-
253
- // Commit all sharded writes
254
- for (const collectionName in shardedWrites) {
255
- const docs = shardedWrites[collectionName];
180
+ for (const docPath in shardedWrites) { // 'docPath' is the key, e.g., 'user_profile_history_shard_0' or 'social_.../history'
181
+ const docData = shardedWrites[docPath]; // 'docData' is the object to write, e.g., { profiles: ... }
256
182
  const shardedDocWrites = [];
257
- for (const docId in docs) {
258
- // This assumes docId is the full path for sharded docs, or just the doc ID
259
- // Based on user_profitability_tracker, it's just the doc ID.
260
- const docRef = deps.db.collection(collectionName).doc(docId);
261
- shardedDocWrites.push({ ref: docRef, data: docs[docId] });
183
+ let docRef;
184
+ if (docPath.includes('/')) {
185
+ // Path is absolute, e.g., 'social_prediction_regime_state/history'
186
+ docRef = deps.db.doc(docPath);
187
+ } else {
188
+ // Path is a docId, e.g., 'user_profile_history_shard_0'
189
+ // We must infer its collection from config.
190
+ const collection = (docPath.startsWith('user_profile_history'))
191
+ ? config.shardedUserProfileCollection // 'user_profile_history'
192
+ : config.shardedProfitabilityCollection; // Fallback
193
+ docRef = deps.db.collection(collection).doc(docPath);
194
+ }
195
+ // Ensure data is a valid object before pushing
196
+ if (docData && typeof docData === 'object' && !Array.isArray(docData)) {
197
+ shardedDocWrites.push({ ref: docRef, data: docData });
198
+ } else {
199
+ logger.log('ERROR', `[${passName}] Invalid sharded document data for ${docPath}. Not an object.`, { data: docData });
262
200
  }
201
+ // Commit this single document write (or small batch if logic is changed later)
263
202
  if (shardedDocWrites.length > 0) {
264
- await commitBatchInChunks(config, deps, shardedDocWrites, `${passName} Sharded ${collectionName} ${dStr}`);
203
+ // Use the docPath in the operation name for clearer logging
204
+ await commitBatchInChunks(config, deps, shardedDocWrites, `${passName} Sharded ${docPath} ${dStr}`);
265
205
  }
266
206
  }
267
- // --- END: FULL COMMIT LOGIC ---
268
-
269
207
  logger.log(success === calcs.length ? 'SUCCESS' : 'WARN', `[${passName}] Completed ${dStr}. Success: ${success}/${calcs.length}`);
270
208
  }
271
209
 
272
210
  /** Stage 10: Run meta computations */
273
211
  async function runMetaComputationPass(date, calcs, passName, config, deps, fetchedDeps, rootData) {
274
212
  const dStr = date.toISOString().slice(0, 10), logger = deps.logger;
275
- // --- MODIFIED: Log only if there are calcs to run ---
276
213
  if (calcs.length === 0) return;
277
214
  logger.log('INFO', `[${passName}] Running ${dStr} with ${calcs.length} calcs.`);
278
-
279
- // --- START: FULL COMMIT LOGIC ---
280
215
  let success = 0;
281
216
  const standardWrites = [];
282
- const shardedWrites = {}; // Format: { [collectionName]: { [docId]: data } }
283
-
217
+ const shardedWrites = {};
284
218
  for (const mCalc of calcs) {
285
219
  const name = normalizeName(mCalc.name), Cl = mCalc.class;
286
- if (typeof Cl !== 'function') {
287
- logger.log('ERROR', `Invalid class ${name}`);
288
- continue;
289
- }
220
+ if (typeof Cl !== 'function') {logger.log('ERROR', `Invalid class ${name}`);continue;}
290
221
  const inst = new Cl();
291
222
  try {
292
- // Pass the full dependencies object to process()
293
223
  const result = await Promise.resolve(inst.process(dStr, { ...deps, rootData }, config, fetchedDeps));
294
-
295
- if (result && Object.keys(result).length > 0) {
296
-
297
- // Separate sharded data from standard data
298
- const standardResult = {};
299
- for (const key in result) {
300
- if (key.startsWith('sharded_')) {
301
- const shardedData = result[key];
302
- for (const collectionName in shardedData) {
303
- if (!shardedWrites[collectionName]) shardedWrites[collectionName] = {};
304
- Object.assign(shardedWrites[collectionName], shardedData[collectionName]);
305
- }
224
+ if (result && Object.keys(result).length > 0) {const standardResult = {}; for (const key in result) {
225
+ if (key.startsWith('sharded_')) {const shardedData = result[key];for (const collectionName in shardedData)
226
+ {if (!shardedWrites[collectionName]) shardedWrites[collectionName] = {};Object.assign(shardedWrites[collectionName], shardedData[collectionName]);}
306
227
  } else {
307
228
  standardResult[key] = result[key];
308
229
  }
309
230
  }
310
-
311
- // Add standard result to the batch
312
231
  if (Object.keys(standardResult).length > 0) {
313
232
  const docRef = deps.db.collection(config.resultsCollection).doc(dStr)
314
233
  .collection(config.resultsSubcollection).doc(mCalc.category)
315
234
  .collection(config.computationsSubcollection).doc(name);
316
-
317
235
  standardWrites.push({ ref: docRef, data: standardResult });
318
236
  }
319
237
  success++;
@@ -322,33 +240,23 @@ async function runMetaComputationPass(date, calcs, passName, config, deps, fetch
322
240
  logger.log('ERROR', `Meta-calc failed ${name} for ${dStr}`, { err: e.message, stack: e.stack });
323
241
  }
324
242
  }
325
-
326
- // Commit standard (non-sharded) writes in chunks
327
243
  if (standardWrites.length > 0) {
328
244
  await commitBatchInChunks(config, deps, standardWrites, `${passName} Meta ${dStr}`);
329
245
  }
330
-
331
- // Commit all sharded writes
332
246
  for (const collectionName in shardedWrites) {
333
247
  const docs = shardedWrites[collectionName];
334
248
  const shardedDocWrites = [];
335
249
  for (const docId in docs) {
336
- // Special case for stateful meta-calcs that write to a specific path
337
250
  const docRef = docId.includes('/')
338
- ? deps.db.doc(docId) // docId is a full path
339
- : deps.db.collection(collectionName).doc(docId); // docId is just an ID
340
-
251
+ ? deps.db.doc(docId)
252
+ : deps.db.collection(collectionName).doc(docId);
341
253
  shardedDocWrites.push({ ref: docRef, data: docs[docId] });
342
254
  }
343
255
  if (shardedDocWrites.length > 0) {
344
256
  await commitBatchInChunks(config, deps, shardedDocWrites, `${passName} Sharded ${collectionName} ${dStr}`);
345
257
  }
346
- }
347
- // --- END: FULL COMMIT LOGIC ---
348
-
258
+ }
349
259
  logger.log(success === calcs.length ? 'SUCCESS' : 'WARN', `[${passName}] Completed ${dStr}. Success: ${success}/${calcs.length}`);
350
260
  }
351
261
 
352
-
353
- // --- MODIFIED: Export new function name ---
354
262
  module.exports = { groupByPass, checkRootDataAvailability, fetchExistingResults, filterCalculations, runStandardComputationPass, runMetaComputationPass };
@@ -17,28 +17,17 @@ const { FieldValue, FieldPath } = require('@google-cloud/firestore');
17
17
  async function getLatestNormalUserPortfolios(dependencies, config) {
18
18
  const { db, logger } = dependencies;
19
19
  const { normalUserCollectionName, snapshotsSubCollectionName, partsSubCollectionName } = config;
20
-
21
20
  logger.log('INFO', `[Core Utils] Fetching latest portfolios from ${normalUserCollectionName}...`);
22
21
  const allPortfolios = {};
23
-
24
22
  const yesterday = new Date();
25
23
  yesterday.setDate(yesterday.getDate() - 1);
26
24
  const dateString = yesterday.toISOString().slice(0, 10);
27
-
28
25
  const blockDocs = await db.collection(normalUserCollectionName).listDocuments();
29
-
30
26
  for (const blockDoc of blockDocs) {
31
27
  const snapshotDocRef = blockDoc.collection(snapshotsSubCollectionName).doc(dateString);
32
28
  const partsCollectionRef = snapshotDocRef.collection(partsSubCollectionName);
33
29
  const partsSnapshot = await partsCollectionRef.get();
34
-
35
- if (!partsSnapshot.empty) {
36
- partsSnapshot.forEach(partDoc => {
37
- Object.assign(allPortfolios, partDoc.data());
38
- });
39
- }
40
- }
41
-
30
+ if (!partsSnapshot.empty) {partsSnapshot.forEach(partDoc => {Object.assign(allPortfolios, partDoc.data());});}}
42
31
  logger.log('INFO', `[Core Utils] Found ${Object.keys(allPortfolios).length} user portfolios from ${dateString}'s snapshot.`);
43
32
  return allPortfolios;
44
33
  }
@@ -53,27 +42,14 @@ async function getLatestNormalUserPortfolios(dependencies, config) {
53
42
  async function resetProxyLocks(dependencies, config) {
54
43
  const { db, logger } = dependencies;
55
44
  const { proxyPerformanceDocPath } = config;
56
-
57
45
  logger.log('INFO','[Core Utils] Resetting proxy locks...');
58
- try {
59
- if (!proxyPerformanceDocPath) {
60
- logger.log('ERROR', '[Core Utils] Missing proxyPerformanceDocPath. Cannot reset locks.');
61
- return;
62
- }
63
-
46
+ try {if (!proxyPerformanceDocPath) {logger.log('ERROR', '[Core Utils] Missing proxyPerformanceDocPath. Cannot reset locks.');return;}
64
47
  const perfDocRef = db.doc(proxyPerformanceDocPath);
65
-
66
- await perfDocRef.update({
67
- locks: FieldValue.delete()
68
- });
69
-
48
+ await perfDocRef.update({locks: FieldValue.delete() });
70
49
  logger.log('INFO',`[Core Utils] Proxy locks map reset in ${proxyPerformanceDocPath}.`);
71
50
  } catch (error) {
72
- if (error.code === 5) { // 5 = NOT_FOUND
73
- logger.log('WARN',`[Core Utils] Proxy performance doc or 'locks' field not found at ${proxyPerformanceDocPath}. No locks to reset.`);
74
- } else {
75
- logger.log('ERROR','[Core Utils] Error resetting proxy locks', { errorMessage: error.message, path: proxyPerformanceDocPath });
76
- }
51
+ if (error.code === 5) { logger.log('WARN',`[Core Utils] Proxy performance doc or 'locks' field not found at ${proxyPerformanceDocPath}. No locks to reset.`);
52
+ } else {logger.log('ERROR','[Core Utils] Error resetting proxy locks', { errorMessage: error.message, path: proxyPerformanceDocPath });}
77
53
  }
78
54
  }
79
55
 
@@ -89,29 +65,15 @@ async function resetProxyLocks(dependencies, config) {
89
65
  async function getBlockCapacities(dependencies, config, userType) {
90
66
  const { db, logger } = dependencies;
91
67
  const { speculatorBlockCountsDocPath, normalBlockCountsDocPath } = config;
92
-
93
68
  logger.log('INFO',`[Core Utils] Getting block capacities for ${userType}...`);
94
69
  try {
95
- const docPath = userType === 'speculator'
96
- ? speculatorBlockCountsDocPath
97
- : normalBlockCountsDocPath;
98
-
99
- if (!docPath) {
100
- logger.log('ERROR', `[Core Utils] Missing block counts document path for ${userType}.`);
101
- return {};
102
- }
103
-
70
+ const docPath = userType === 'speculator'? speculatorBlockCountsDocPath : normalBlockCountsDocPath;
71
+ if (!docPath) {logger.log('ERROR', `[Core Utils] Missing block counts document path for ${userType}.`);return {};}
104
72
  const countsRef = db.doc(docPath);
105
73
  const countsDoc = await countsRef.get();
106
- if (!countsDoc.exists) {
107
- logger.log('WARN',`[Core Utils] Block counts document not found for ${userType} at ${docPath}. Returning empty.`);
108
- return {};
109
- }
74
+ if (!countsDoc.exists) {logger.log('WARN',`[Core Utils] Block counts document not found for ${userType} at ${docPath}. Returning empty.`);return {};}
110
75
  return countsDoc.data().counts || {};
111
- } catch (error) {
112
- logger.log('ERROR',`[Core Utils] Error getting block capacities for ${userType}`, { errorMessage: error.message });
113
- throw error;
114
- }
76
+ } catch (error) {logger.log('ERROR',`[Core Utils] Error getting block capacities for ${userType}`, { errorMessage: error.message });throw error;}
115
77
  }
116
78
 
117
79
  /**
@@ -127,63 +89,30 @@ async function getBlockCapacities(dependencies, config, userType) {
127
89
  */
128
90
  async function getExclusionIds(dependencies, config, userType) {
129
91
  const { db, logger } = dependencies;
130
- const {
131
- specBlocksCollection,
132
- pendingSpecCollection,
133
- invalidSpecCollection,
134
- existingNormalUserIds // Get the pre-fetched IDs
135
- } = config;
136
-
92
+ const { specBlocksCollection, pendingSpecCollection, invalidSpecCollection,existingNormalUserIds } = config;
137
93
  logger.log('INFO',`[Core Utils] Getting exclusion IDs for ${userType} discovery...`);
138
-
139
94
  const exclusionIds = new Set(existingNormalUserIds);
140
95
  logger.log('TRACE', `[Core Utils] Loaded ${exclusionIds.size} existing normal user IDs for exclusion.`);
141
-
142
96
  const promises = [];
143
-
144
97
  try {
145
98
  // 1. Existing Speculators
146
99
  const specBlocksRef = db.collection(specBlocksCollection);
147
- promises.push(specBlocksRef.get().then(snapshot => {
148
- snapshot.forEach(doc => {
149
- const users = doc.data().users || {};
150
- Object.keys(users).forEach(key => exclusionIds.add(key.split('.')[1]));
151
- });
152
- logger.log('TRACE','[Core Utils] Fetched existing speculator IDs for exclusion.');
153
- }));
154
-
100
+ promises.push(specBlocksRef.get().then(snapshot => {snapshot.forEach(doc => {const users = doc.data().users || {}; Object.keys(users).forEach(key => exclusionIds.add(key.split('.')[1])); });
101
+ logger.log('TRACE','[Core Utils] Fetched existing speculator IDs for exclusion.');}));
155
102
  // 2. Pending Speculators
156
- if (userType === 'speculator') {
157
- const pendingRef = db.collection(pendingSpecCollection);
158
- promises.push(pendingRef.get().then(snapshot => {
159
- snapshot.forEach(doc => {
160
- Object.keys(doc.data().users || {}).forEach(cid => exclusionIds.add(cid));
161
- });
162
- logger.log('TRACE','[Core Utils] Fetched pending speculator IDs for exclusion.');
163
- }));
164
- }
165
-
103
+ if (userType === 'speculator') {const pendingRef = db.collection(pendingSpecCollection);
104
+ promises.push(pendingRef.get().then(snapshot => {snapshot.forEach(doc => {Object.keys(doc.data().users || {}).forEach(cid => exclusionIds.add(cid));});
105
+ logger.log('TRACE','[Core Utils] Fetched pending speculator IDs for exclusion.');})); }
166
106
  // 3. Invalid Speculators
167
107
  const invalidRef = db.collection(invalidSpecCollection);
168
- promises.push(invalidRef.get().then(snapshot => {
169
- snapshot.forEach(doc => {
170
- const data = doc.data();
171
- if (data) {
172
- Object.keys(data.users || {}).forEach(cid => exclusionIds.add(cid));
173
- }
174
- });
175
- logger.log('TRACE','[Core Utils] Fetched invalid speculator IDs for exclusion.');
176
- }));
177
-
108
+ promises.push(invalidRef.get().then(snapshot => { snapshot.forEach(doc => {const data = doc.data();if (data) {Object.keys(data.users || {}).forEach(cid => exclusionIds.add(cid));}});
109
+ logger.log('TRACE','[Core Utils] Fetched invalid speculator IDs for exclusion.');}));
178
110
  await Promise.all(promises);
179
111
  logger.log('INFO',`[Core Utils] Total unique exclusion IDs found: ${exclusionIds.size}`);
180
112
  return exclusionIds;
181
-
182
- } catch (error)
183
- {
113
+ } catch (error){
184
114
  logger.log('ERROR','[Core Utils] Error getting exclusion IDs', { errorMessage: error.message });
185
- throw error;
186
- }
115
+ throw error; }
187
116
  }
188
117
 
189
118
  /**
@@ -198,20 +127,12 @@ async function getPrioritizedSpeculators(dependencies, exclusionIds, speculatorI
198
127
  const { logger } = dependencies;
199
128
  logger.log('INFO','[Core Utils] Scanning normal users for prioritized speculators...');
200
129
  const candidates = new Set();
201
-
202
130
  try {
203
131
  for (const userId in latestNormalPortfolios) {
204
132
  if (exclusionIds.has(userId)) continue;
205
-
206
133
  const portfolio = latestNormalPortfolios[userId];
207
- const holdsSpeculatorAsset = portfolio?.AggregatedPositions?.some(p =>
208
- speculatorInstrumentSet.has(p.InstrumentID)
209
- );
210
-
211
- if (holdsSpeculatorAsset) {
212
- candidates.add(userId);
213
- }
214
- }
134
+ const holdsSpeculatorAsset = portfolio?.AggregatedPositions?.some(p => speculatorInstrumentSet.has(p.InstrumentID));
135
+ if (holdsSpeculatorAsset) { candidates.add(userId);}}
215
136
  logger.log('INFO',`[Core Utils] Found ${candidates.size} potential prioritized speculators.`);
216
137
  return Array.from(candidates);
217
138
  } catch (error) {
@@ -235,23 +156,15 @@ async function clearCollection(dependencies, collectionPath, maxBatchSize = 400)
235
156
  let query = collectionRef.limit(maxBatchSize);
236
157
  let snapshot;
237
158
  let deleteCount = 0;
238
-
239
159
  while (true) {
240
160
  snapshot = await query.get();
241
- if (snapshot.size === 0) {
242
- break;
243
- }
244
-
161
+ if (snapshot.size === 0) { break; }
245
162
  const batch = db.batch();
246
163
  snapshot.docs.forEach(doc => batch.delete(doc.ref));
247
164
  await batch.commit();
248
165
  deleteCount += snapshot.size;
249
-
250
- if (snapshot.size < maxBatchSize) {
251
- break;
252
- }
253
- query = collectionRef.limit(maxBatchSize);
254
- }
166
+ if (snapshot.size < maxBatchSize) {break; }
167
+ query = collectionRef.limit(maxBatchSize); }
255
168
  logger.log('SUCCESS', `[Core Utils] Scorched earth complete. Deleted ${deleteCount} documents from ${collectionPath}.`);
256
169
  } catch (error) {
257
170
  logger.log('ERROR', `[Core Utils] Error clearing collection ${collectionPath}`, { errorMessage: error.message });
@@ -272,17 +185,9 @@ async function clearCollection(dependencies, collectionPath, maxBatchSize = 400)
272
185
  */
273
186
  async function batchWriteShardedIds(dependencies, config) {
274
187
  const { db, logger } = dependencies;
275
- const {
276
- collectionPath,
277
- items,
278
- timestamp,
279
- maxFieldsPerDoc,
280
- maxWritesPerBatch
281
- } = config;
282
-
188
+ const {collectionPath, items, timestamp, maxFieldsPerDoc, maxWritesPerBatch } = config;
283
189
  logger.log('INFO', `[Core Utils] Batch writing ${items.length} IDs to sharded path: ${collectionPath} (max ${maxFieldsPerDoc}/doc, ${maxWritesPerBatch} docs/batch)...`);
284
190
  if (items.length === 0) return;
285
-
286
191
  try {
287
192
  const collectionRef = db.collection(collectionPath);
288
193
  let batch = db.batch();
@@ -290,27 +195,19 @@ async function batchWriteShardedIds(dependencies, config) {
290
195
  let currentFieldCount = 0;
291
196
  let batchWriteCount = 0;
292
197
  let docCounter = 0;
293
-
294
198
  for (let i = 0; i < items.length; i++) {
295
199
  const userId = items[i];
296
200
  const key = `users.${userId}`;
297
201
  currentDocFields[key] = timestamp;
298
202
  currentFieldCount++;
299
-
300
203
  if (currentFieldCount >= maxFieldsPerDoc || i === items.length - 1) {
301
204
  const docRef = collectionRef.doc(`pending_${docCounter}_${Date.now()}_${Math.random().toString(36).substring(2, 8)}`);
302
205
  batch.set(docRef, currentDocFields);
303
206
  batchWriteCount++;
304
-
305
207
  currentDocFields = {};
306
208
  currentFieldCount = 0;
307
209
  docCounter++;
308
-
309
- if (batchWriteCount >= maxWritesPerBatch || i === items.length - 1) {
310
- await batch.commit();
311
- batch = db.batch();
312
- batchWriteCount = 0;
313
- }
210
+ if (batchWriteCount >= maxWritesPerBatch || i === items.length - 1) { await batch.commit(); batch = db.batch(); batchWriteCount = 0; }
314
211
  }
315
212
  }
316
213
  logger.log('SUCCESS', `[Core Utils] Sharded write complete for ${collectionPath}. Created ${docCounter} documents.`);
@@ -336,24 +233,11 @@ async function getNormalUsersToUpdate(dependencies, config) {
336
233
  logger.log('INFO','[Core Utils] Getting normal users to update...');
337
234
  const usersToUpdate = [];
338
235
  try {
339
- const timestampDocRef = db.collection(normalUserCollectionName)
340
- .doc('timestamps')
341
- .collection('users')
342
- .doc(normalUserTimestampsDocId);
236
+ const timestampDocRef = db.collection(normalUserCollectionName) .doc('timestamps') .collection('users') .doc(normalUserTimestampsDocId);
343
237
  const timestampDoc = await timestampDocRef.get();
344
-
345
- if (!timestampDoc.exists) {
346
- logger.log('WARN',`[Core Utils] Normal user timestamp document not found at ${timestampDocRef.path}.`);
347
- return [];
348
- }
349
-
238
+ if (!timestampDoc.exists) { logger.log('WARN',`[Core Utils] Normal user timestamp document not found at ${timestampDocRef.path}.`); return []; }
350
239
  const timestamps = timestampDoc.data().users || {};
351
- for (const userId in timestamps) {
352
- const lastProcessed = timestamps[userId]?.toDate ? timestamps[userId].toDate() : new Date(0);
353
- if (lastProcessed < dateThreshold) {
354
- usersToUpdate.push(userId);
355
- }
356
- }
240
+ for (const userId in timestamps) { const lastProcessed = timestamps[userId]?.toDate ? timestamps[userId].toDate() : new Date(0); if (lastProcessed < dateThreshold) { usersToUpdate.push(userId); }}
357
241
  logger.log('INFO',`[Core Utils] Found ${usersToUpdate.length} normal users to update.`);
358
242
  return usersToUpdate;
359
243
  } catch (error) {
@@ -374,44 +258,26 @@ async function getNormalUsersToUpdate(dependencies, config) {
374
258
  async function getSpeculatorsToUpdate(dependencies, config) {
375
259
  const { db, logger } = dependencies;
376
260
  const { dateThreshold, gracePeriodThreshold, speculatorBlocksCollectionName } = config;
377
-
378
261
  logger.log('INFO','[Core Utils] Getting speculators to update...');
379
262
  const updates = [];
380
263
  try {
381
264
  const blocksRef = db.collection(speculatorBlocksCollectionName);
382
265
  const snapshot = await blocksRef.get();
383
-
384
- if (snapshot.empty) {
385
- logger.log('INFO','[Core Utils] No speculator blocks found.');
386
- return [];
387
- }
388
-
266
+ if (snapshot.empty) { logger.log('INFO','[Core Utils] No speculator blocks found.'); return []; }
389
267
  snapshot.forEach(doc => {
390
268
  const blockData = doc.data();
391
-
392
- // Iterate over the document's top-level keys
393
269
  for (const key in blockData) {
394
- // Filter for keys that match the 'users.CID' format
395
270
  if (!key.startsWith('users.')) continue;
396
-
397
271
  const userId = key.split('.')[1];
398
- if (!userId) continue; // Safety check
399
-
400
- const userData = blockData[key]; // Get the user's map
401
-
272
+ if (!userId) continue;
273
+ const userData = blockData[key];
402
274
  const lastVerified = userData.lastVerified?.toDate ? userData.lastVerified.toDate() : new Date(0);
403
275
  const lastHeld = userData.lastHeldSpeculatorAsset?.toDate ? userData.lastHeldSpeculatorAsset.toDate() : new Date(0);
404
-
405
- if (lastVerified < dateThreshold && lastHeld > gracePeriodThreshold) {
406
- if (userData.instruments && Array.isArray(userData.instruments)) {
407
- userData.instruments.forEach(instrumentId => {
408
- updates.push({ userId, instrumentId });
409
- });
276
+ if (lastVerified < dateThreshold && lastHeld > gracePeriodThreshold) { if (userData.instruments && Array.isArray(userData.instruments)) { userData.instruments.forEach(instrumentId => { updates.push({ userId, instrumentId }); });
410
277
  }
411
278
  }
412
279
  }
413
280
  });
414
-
415
281
  logger.log('INFO',`[Core Utils] Found ${updates.length} speculator user/instrument pairs to update.`);
416
282
  return updates;
417
283
  } catch (error) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.133",
3
+ "version": "1.0.134",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [