bulltrackers-module 1.0.159 → 1.0.160
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -99,7 +99,13 @@ async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config,
|
|
|
99
99
|
const fetched = {};
|
|
100
100
|
if (docRefs.length) {
|
|
101
101
|
(await db.getAll(...docRefs)).forEach((doc, i) => {
|
|
102
|
-
|
|
102
|
+
// --- FIX [PROBLEM 8]: Add completion marker check ---
|
|
103
|
+
const data = doc.exists ? doc.data() : null;
|
|
104
|
+
if (data && data._completed === true) {
|
|
105
|
+
fetched[depNames[i]] = data;
|
|
106
|
+
} else {
|
|
107
|
+
fetched[depNames[i]] = null; // Treat as not existing if incomplete
|
|
108
|
+
}
|
|
103
109
|
});
|
|
104
110
|
}
|
|
105
111
|
|
|
@@ -139,19 +145,18 @@ function filterCalculations(standardCalcs, metaCalcs, rootDataStatus, existingRe
|
|
|
139
145
|
return earliestRunDate;
|
|
140
146
|
};
|
|
141
147
|
const filterCalc = (calc) => {
|
|
142
|
-
|
|
148
|
+
// --- FIX [PROBLEM 8]: The check for existingResults is now correct ---
|
|
149
|
+
if (existingResults[calc.name]) {logger.log('TRACE', `[Pass ${passToRun}] Skipping ${calc.name} for ${dateStr}. Result already exists (and is complete).`); skipped.add(calc.name); return false;}
|
|
150
|
+
|
|
143
151
|
const earliestRunDate = getTrueEarliestRunDate(calc);
|
|
144
152
|
if (dateToProcess < earliestRunDate) {logger.log('TRACE', `[Pass ${passToRun}] Skipping ${calc.name} for ${dateStr}. Date is before true earliest run date (${earliestRunDate.toISOString().slice(0, 10)}).`); skipped.add(calc.name); return false; }
|
|
145
153
|
const { canRun, missing: missingRoot } = checkRootDependencies(calc, rootDataStatus);
|
|
146
154
|
if (!canRun) {logger.log('INFO', `[Pass ${passToRun}] Skipping ${calc.name} for ${dateStr}. Data missing for this date: [${missingRoot.join(', ')}]`);skipped.add(calc.name); return false;}
|
|
147
155
|
|
|
148
|
-
// --- FIX: This check is now robust ---
|
|
149
|
-
// 'existingResults' now contains all dependencies, so this check
|
|
150
|
-
// will correctly find 'pnl_distribution_per_stock' and *not* skip.
|
|
151
156
|
if (calc.type === 'meta') {
|
|
152
157
|
const missingDeps = (calc.dependencies || [])
|
|
153
158
|
.map(normalizeName)
|
|
154
|
-
.filter(d => !existingResults[d]);
|
|
159
|
+
.filter(d => !existingResults[d]); // This check is now robust
|
|
155
160
|
if (missingDeps.length > 0) {
|
|
156
161
|
logger.log('WARN', `[Pass ${passToRun} Meta] Skipping ${calc.name} for ${dateStr}. Missing computed deps: [${missingDeps.join(', ')}]`);
|
|
157
162
|
skipped.add(calc.name);
|
|
@@ -177,15 +182,20 @@ async function loadHistoricalData(date, calcs, config, deps, rootData) {
|
|
|
177
182
|
const needsYesterdayInsights = calcs.some(c => c.isHistorical && c.rootDataDependencies.includes('insights'));
|
|
178
183
|
const needsYesterdaySocial = calcs.some(c => c.isHistorical && c.rootDataDependencies.includes('social'));
|
|
179
184
|
const needsYesterdayPortfolio = calcs.some(c => c.isHistorical && c.rootDataDependencies.includes('portfolio'));
|
|
185
|
+
|
|
186
|
+
// --- FIX: Add T-1 COMPUTED dependency loading ---
|
|
187
|
+
const needsYesterdayDependencies = calcs.some(c => c.isHistorical && c.dependencies && c.dependencies.length > 0);
|
|
188
|
+
|
|
180
189
|
const prev = new Date(date);
|
|
181
190
|
prev.setUTCDate(prev.getUTCDate() - 1);
|
|
182
191
|
const prevStr = prev.toISOString().slice(0, 10);
|
|
192
|
+
|
|
183
193
|
if(needsYesterdayInsights) {
|
|
184
|
-
tasks.push((async()=>{
|
|
194
|
+
tasks.push((async()=>{
|
|
185
195
|
logger.log('INFO', `[PassRunner] Loading YESTERDAY insights data for ${prevStr}`);
|
|
186
196
|
updated.yesterdayInsights=await loadDailyInsights(config,deps,prevStr); })());}
|
|
187
197
|
if(needsYesterdaySocial) {
|
|
188
|
-
tasks.push((async()=>{
|
|
198
|
+
tasks.push((async()=>{
|
|
189
199
|
logger.log('INFO', `[PassRunner] Loading YESTERDAY social data for ${prevStr}`);
|
|
190
200
|
updated.yesterdaySocialPostInsights=await loadDailySocialPostInsights(config,deps,prevStr); })());}
|
|
191
201
|
|
|
@@ -195,6 +205,17 @@ async function loadHistoricalData(date, calcs, config, deps, rootData) {
|
|
|
195
205
|
updated.yesterdayPortfolioRefs = await getPortfolioPartRefs(config, deps, prevStr);
|
|
196
206
|
})());
|
|
197
207
|
}
|
|
208
|
+
|
|
209
|
+
// --- FIX: Load T-1 COMPUTED dependencies ---
|
|
210
|
+
if(needsYesterdayDependencies) {
|
|
211
|
+
tasks.push((async()=>{
|
|
212
|
+
logger.log('INFO', `[PassRunner] Loading YESTERDAY computed dependencies for ${prevStr}`);
|
|
213
|
+
// This is a simplified fetch, assuming all calcs in this pass share the same T-1 deps
|
|
214
|
+
// A more robust solution would aggregate all unique T-1 deps.
|
|
215
|
+
updated.yesterdayDependencyData = await fetchExistingResults(prevStr, calcs, calcs.map(c => c.manifest), config, deps);
|
|
216
|
+
})());
|
|
217
|
+
}
|
|
218
|
+
|
|
198
219
|
await Promise.all(tasks);
|
|
199
220
|
return updated;
|
|
200
221
|
}
|
|
@@ -204,9 +225,12 @@ async function loadHistoricalData(date, calcs, config, deps, rootData) {
|
|
|
204
225
|
*/
|
|
205
226
|
async function streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs) {
|
|
206
227
|
const { logger, calculationUtils } = deps;
|
|
207
|
-
const { todayInsights, yesterdayInsights, todaySocialPostInsights, yesterdaySocialPostInsights } = rootData;
|
|
228
|
+
const { todayInsights, yesterdayInsights, todaySocialPostInsights, yesterdaySocialPostInsights, yesterdayDependencyData } = rootData;
|
|
208
229
|
const calcsThatStreamPortfolio = Object.values(state).filter(calc => calc && calc.manifest && (calc.manifest.rootDataDependencies.includes('portfolio') || calc.manifest.category === 'speculators'));
|
|
209
|
-
|
|
230
|
+
|
|
231
|
+
// --- FIX: Add yesterday's computed data to context ---
|
|
232
|
+
const context={instrumentMappings:(await calculationUtils.loadInstrumentMappings()).instrumentToTicker, sectorMapping:(await calculationUtils.loadInstrumentMappings()).instrumentToSector, todayDateStr:dateStr, dependencies:deps, config, yesterdaysDependencyData: yesterdayDependencyData};
|
|
233
|
+
|
|
210
234
|
let firstUser=true;
|
|
211
235
|
for(const name in state){ const calc=state[name]; if(!calc||typeof calc.process!=='function') continue;
|
|
212
236
|
const cat=calc.manifest.category;
|
|
@@ -249,6 +273,11 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
|
|
|
249
273
|
firstUser=false;
|
|
250
274
|
if (pY) { delete yesterdayPortfolios[uid]; }
|
|
251
275
|
if (hT) { delete todayHistoryData[uid]; } } }
|
|
276
|
+
|
|
277
|
+
// --- FIX [PROBLEM 7]: Clear stale data to prevent memory leak ---
|
|
278
|
+
yesterdayPortfolios = {};
|
|
279
|
+
todayHistoryData = {};
|
|
280
|
+
|
|
252
281
|
logger.log('INFO', `[${passName}] Finished streaming data for ${dateStr}.`);
|
|
253
282
|
}
|
|
254
283
|
|
|
@@ -279,6 +308,8 @@ async function runStandardComputationPass(date, calcs, passName, config, deps, r
|
|
|
279
308
|
} else { standardResult[key] = result[key]; }}
|
|
280
309
|
if (Object.keys(standardResult).length > 0) {
|
|
281
310
|
const docRef = deps.db.collection(config.resultsCollection).doc(dStr) .collection(config.resultsSubcollection).doc(calc.manifest.category) .collection(config.computationsSubcollection).doc(name);
|
|
311
|
+
// --- FIX [PROBLEM 8]: Add completion marker ---
|
|
312
|
+
standardResult._completed = true;
|
|
282
313
|
standardWrites.push({ ref: docRef, data: standardResult });}
|
|
283
314
|
const calcClass = calc.manifest.class;
|
|
284
315
|
let staticSchema = null;
|
|
@@ -297,7 +328,10 @@ async function runStandardComputationPass(date, calcs, passName, config, deps, r
|
|
|
297
328
|
if (docPath.includes('/')) { docRef = deps.db.doc(docPath); } else {
|
|
298
329
|
const collection = (docPath.startsWith('user_profile_history')) ? config.shardedUserProfileCollection : config.shardedProfitabilityCollection;
|
|
299
330
|
docRef = deps.db.collection(collection).doc(docPath); }
|
|
300
|
-
if (docData && typeof docData === 'object' && !Array.isArray(docData)) {
|
|
331
|
+
if (docData && typeof docData === 'object' && !Array.isArray(docData)) {
|
|
332
|
+
// --- FIX [PROBLEM 8]: Add completion marker to sharded writes ---
|
|
333
|
+
docData._completed = true;
|
|
334
|
+
shardedDocWrites.push({ ref: docRef, data: docData });
|
|
301
335
|
} else { logger.log('ERROR', `[${passName}] Invalid sharded document data for ${docPath}. Not an object.`, { data: docData }); }
|
|
302
336
|
if (shardedDocWrites.length > 0) { await commitBatchInChunks(config, deps, shardedDocWrites, `${passName} Sharded ${docPath} ${dStr}`); } }
|
|
303
337
|
const logMetadata = {};
|
|
@@ -336,6 +370,8 @@ async function runMetaComputationPass(date, calcs, passName, config, deps, fetch
|
|
|
336
370
|
}
|
|
337
371
|
if (Object.keys(standardResult).length > 0) {
|
|
338
372
|
const docRef = deps.db.collection(config.resultsCollection).doc(dStr) .collection(config.resultsSubcollection).doc(mCalc.category) .collection(config.computationsSubcollection).doc(name);
|
|
373
|
+
// --- FIX [PROBLEM 8]: Add completion marker ---
|
|
374
|
+
standardResult._completed = true;
|
|
339
375
|
standardWrites.push({ ref: docRef, data: standardResult });
|
|
340
376
|
}
|
|
341
377
|
const calcClass = mCalc.class;
|
|
@@ -353,7 +389,13 @@ async function runMetaComputationPass(date, calcs, passName, config, deps, fetch
|
|
|
353
389
|
for (const collectionName in shardedWrites) {
|
|
354
390
|
const docs = shardedWrites[collectionName];
|
|
355
391
|
const shardedDocWrites = [];
|
|
356
|
-
for (const docId in docs) {
|
|
392
|
+
for (const docId in docs) {
|
|
393
|
+
const docRef = docId.includes('/') ? deps.db.doc(docId) : deps.db.collection(collectionName).doc(docId);
|
|
394
|
+
// --- FIX [PROBLEM 8]: Add completion marker to sharded writes ---
|
|
395
|
+
const docData = docs[docId];
|
|
396
|
+
docData._completed = true;
|
|
397
|
+
shardedDocWrites.push({ ref: docRef, data: docData });
|
|
398
|
+
}
|
|
357
399
|
if (shardedDocWrites.length > 0) { await commitBatchInChunks(config, deps, shardedDocWrites, `${passName} Sharded ${collectionName} ${dStr}`); } }
|
|
358
400
|
const logMetadata = {};
|
|
359
401
|
if (failedCalcs.length > 0) { logMetadata.failedComputations = failedCalcs; }
|