bulltrackers-module 1.0.705 → 1.0.706
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Executor for "Standard" (User-Level) calculations.
|
|
3
3
|
* REFACTORED: Hoisted data loading, centralized Series/Root logic.
|
|
4
|
-
* UPDATED: Calls getResult() on computations to trigger summary logging
|
|
4
|
+
* UPDATED: Calls getResult() on computations to trigger summary logging.
|
|
5
|
+
* FIXED: Force-increments shard indexes after every flush to prevent batch overwrites.
|
|
5
6
|
*/
|
|
6
7
|
const { normalizeName, getEarliestDataDates } = require('../utils/utils');
|
|
7
8
|
const { streamPortfolioData, streamHistoryData, getPortfolioPartRefs, getHistoryPartRefs } = require('../utils/data_loader');
|
|
@@ -81,8 +82,6 @@ class StandardExecutor {
|
|
|
81
82
|
const loader = new CachedDataLoader(config, deps);
|
|
82
83
|
|
|
83
84
|
// --- 1. PRE-LOAD GLOBAL DATA (Hoisted) ---
|
|
84
|
-
// Load all "Singleton" datasets once (Ratings, Rankings, Series, Mappings)
|
|
85
|
-
// instead of checking/loading per-user inside the loop.
|
|
86
85
|
const startSetup = performance.now();
|
|
87
86
|
|
|
88
87
|
const [
|
|
@@ -269,8 +268,10 @@ class StandardExecutor {
|
|
|
269
268
|
// =========================================================================
|
|
270
269
|
static async flushBuffer(state, dateStr, passName, config, deps, shardMap, stats, mode, skipStatus, isInitial) {
|
|
271
270
|
const transformedState = {};
|
|
271
|
+
const activeCalcs = [];
|
|
272
|
+
|
|
272
273
|
for (const [name, inst] of Object.entries(state)) {
|
|
273
|
-
// [FIX]
|
|
274
|
+
// [FIX 1] Allow computations to finalize/log summary stats via getResult()
|
|
274
275
|
let data;
|
|
275
276
|
if (typeof inst.getResult === 'function') {
|
|
276
277
|
data = await inst.getResult();
|
|
@@ -278,6 +279,11 @@ class StandardExecutor {
|
|
|
278
279
|
data = inst.results || {};
|
|
279
280
|
}
|
|
280
281
|
|
|
282
|
+
// Track active calculations to increment their shards later
|
|
283
|
+
if (Object.keys(data).length > 0) {
|
|
284
|
+
activeCalcs.push(name);
|
|
285
|
+
}
|
|
286
|
+
|
|
281
287
|
// Pivot user-date structure if needed
|
|
282
288
|
const first = Object.keys(data)[0];
|
|
283
289
|
if (first && data[first] && typeof data[first] === 'object' && /^\d{4}-\d{2}-\d{2}$/.test(Object.keys(data[first])[0])) {
|
|
@@ -293,8 +299,19 @@ class StandardExecutor {
|
|
|
293
299
|
transformedState[name] = { manifest: inst.manifest, getResult: async () => data, _executionStats: stats[name] };
|
|
294
300
|
inst.results = {}; // Clear buffer
|
|
295
301
|
}
|
|
302
|
+
|
|
296
303
|
const res = await commitResults(transformedState, dateStr, passName, config, deps, skipStatus, { flushMode: mode, shardIndexes: shardMap, isInitialWrite: isInitial });
|
|
304
|
+
|
|
305
|
+
// Update shardMap from result
|
|
297
306
|
if (res.shardIndexes) Object.assign(shardMap, res.shardIndexes);
|
|
307
|
+
|
|
308
|
+
// [FIX 2] Force increment shard indexes for active calculations.
|
|
309
|
+
// This ensures the NEXT batch writes to a NEW shard (e.g. results_1)
|
|
310
|
+
// instead of overwriting the current one (results_0).
|
|
311
|
+
activeCalcs.forEach(name => {
|
|
312
|
+
shardMap[name] = (shardMap[name] || 0) + 1;
|
|
313
|
+
});
|
|
314
|
+
|
|
298
315
|
return res;
|
|
299
316
|
}
|
|
300
317
|
|