bulltrackers-module 1.0.220 → 1.0.222

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,7 @@
2
2
  * @fileoverview Main Orchestrator. Coordinates the topological execution of calculations.
3
3
  */
4
4
  const { normalizeName, getExpectedDateStrings } = require('./utils/utils');
5
- const { checkRootDependencies, checkRootDataAvailability } = require('./data/AvailabilityChecker');
5
+ const { checkRootDataAvailability, getViableCalculations } = require('./data/AvailabilityChecker');
6
6
  const { fetchExistingResults } = require('./data/DependencyFetcher');
7
7
  const { fetchComputationStatus, updateComputationStatus } = require('./persistence/StatusRepository');
8
8
  const { runBatchPriceComputation } = require('./executors/PriceBatchExecutor');
@@ -68,7 +68,7 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
68
68
  const { logger } = dependencies;
69
69
  const dateToProcess = new Date(dateStr + 'T00:00:00Z');
70
70
 
71
- // 1. Version Check
71
+ // 1. Version Check: Determine which calculations are *stale*
72
72
  const dailyStatus = await fetchComputationStatus(dateStr, config, dependencies);
73
73
  const calcsToAttempt = [];
74
74
 
@@ -77,22 +77,17 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
77
77
  const storedStatus = dailyStatus[cName];
78
78
  const currentHash = calc.hash;
79
79
 
80
- if (calc.dependencies && calc.dependencies.length > 0) {
81
- const missing = calc.dependencies.filter(depName => !dailyStatus[normalizeName(depName)]);
82
- if (missing.length > 0) {
83
- logger.log('TRACE', `[Skip] ${cName} missing deps: ${missing.join(', ')}`);
84
- continue;
85
- }
86
- }
87
80
  if (!storedStatus) {
88
- logger.log('INFO', `[Versioning] ${cName}: New run needed.`);
81
+ // New calculation
89
82
  calcsToAttempt.push(calc); continue;
90
83
  }
91
84
  if (typeof storedStatus === 'string' && currentHash && storedStatus !== currentHash) {
85
+ // Code changed, must re-run
92
86
  logger.log('INFO', `[Versioning] ${cName}: Code Changed.`);
93
87
  calcsToAttempt.push(calc); continue;
94
88
  }
95
89
  if (storedStatus === true && currentHash) {
90
+ // Migrating legacy status
96
91
  logger.log('INFO', `[Versioning] ${cName}: Upgrading legacy status.`);
97
92
  calcsToAttempt.push(calc); continue;
98
93
  }
@@ -100,7 +95,7 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
100
95
 
101
96
  if (!calcsToAttempt.length) return null;
102
97
 
103
- // 2. Data Check
98
+ // 2. Data Availability Check
104
99
  const earliestDates = {
105
100
  portfolio: new Date('2025-09-25T00:00:00Z'),
106
101
  history: new Date('2025-11-05T00:00:00Z'),
@@ -112,8 +107,15 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
112
107
  const rootData = await checkRootDataAvailability(dateStr, config, dependencies, earliestDates);
113
108
  if (!rootData) { logger.log('INFO', `[DateRunner] Root data missing for ${dateStr}. Skipping.`); return null; }
114
109
 
115
- const runnableCalcs = calcsToAttempt.filter(c => checkRootDependencies(c, rootData.status).canRun);
116
- if (!runnableCalcs.length) return null;
110
+ // 3. Viability Check (Smart Execution Map)
111
+ // Filter candidates: Remove any calculation that misses Root Data OR Dependencies
112
+ const runnableCalcs = getViableCalculations(calcsToAttempt, rootData.status, dailyStatus);
113
+
114
+ if (!runnableCalcs.length) {
115
+ // If we had candidates but they were pruned, it means they are blocked.
116
+ // logger.log('INFO', `[DateRunner] ${dateStr}: Candidates pruned due to missing deps/data.`);
117
+ return null;
118
+ }
117
119
 
118
120
  const standardToRun = runnableCalcs.filter(c => c.type === 'standard');
119
121
  const metaToRun = runnableCalcs.filter(c => c.type === 'meta');
@@ -123,6 +125,7 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
123
125
 
124
126
  try {
125
127
  const calcsRunning = [...standardToRun, ...metaToRun];
128
+ // Fetch dependencies for the *runnable* calculations
126
129
  const existingResults = await fetchExistingResults(dateStr, calcsRunning, computationManifest, config, dependencies, false);
127
130
  const prevDate = new Date(dateToProcess); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
128
131
  const prevDateStr = prevDate.toISOString().slice(0, 10);
@@ -1,7 +1,7 @@
1
1
  /**
2
2
  * @fileoverview Factory for creating the Computation Context.
3
3
  */
4
- const mathLayer = require('../layers/index');
4
+ const mathLayer = require('../layers/index');
5
5
  const { LEGACY_MAPPING } = require('../topology/HashManager');
6
6
 
7
7
  class ContextFactory {
@@ -2,7 +2,7 @@
2
2
  * @fileoverview Dynamic Manifest Builder - Handles Topological Sort and Auto-Discovery.
3
3
  */
4
4
  const { generateCodeHash, LEGACY_MAPPING } = require('../topology/HashManager.js');
5
- const { normalizeName } = require('../utils/utils');
5
+ const { normalizeName } = require('../utils/utils');
6
6
 
7
7
  // Import Layers
8
8
  const MathematicsLayer = require('../layers/mathematics');
@@ -93,12 +93,12 @@ function buildManifest(productLinesToRun = [], calculations) {
93
93
  if (!Class || typeof Class !== 'function') return;
94
94
  const normalizedName = normalizeName(name);
95
95
 
96
- if (typeof Class.getMetadata !== 'function') { log.fatal(`Calculation "${normalizedName}" missing static getMetadata().`); hasFatalError = true; return; }
96
+ if (typeof Class.getMetadata !== 'function') { log.fatal(`Calculation "${normalizedName}" missing static getMetadata().`); hasFatalError = true; return; }
97
97
  if (typeof Class.getDependencies !== 'function') { log.fatal(`Calculation "${normalizedName}" missing static getDependencies().`); hasFatalError = true; return; }
98
98
 
99
- const metadata = Class.getMetadata();
99
+ const metadata = Class.getMetadata();
100
100
  const dependencies = Class.getDependencies().map(normalizeName);
101
- const codeStr = Class.toString();
101
+ const codeStr = Class.toString();
102
102
 
103
103
  let compositeHashString = generateCodeHash(codeStr);
104
104
  const usedDeps = [];
@@ -5,8 +5,9 @@ const {
5
5
  getPortfolioPartRefs,
6
6
  loadDailyInsights,
7
7
  loadDailySocialPostInsights,
8
- getHistoryPartRefs
8
+ getHistoryPartRefs
9
9
  } = require('../utils/data_loader');
10
+ const { normalizeName } = require('../utils/utils');
10
11
 
11
12
  function checkRootDependencies(calcManifest, rootDataStatus) {
12
13
  const missing = [];
@@ -21,15 +22,60 @@ function checkRootDependencies(calcManifest, rootDataStatus) {
21
22
  return { canRun: missing.length === 0, missing };
22
23
  }
23
24
 
25
+ /**
26
+ * Filters candidates to only those that are strictly "viable" to run.
27
+ * A calculation is Viable if:
28
+ * 1. All required Root Data is present.
29
+ * 2. All required Dependencies (from previous passes) are present in dailyStatus.
30
+ * * @param {Array} candidates - Calculations attempting to run in this pass.
31
+ * @param {Object} rootDataStatus - { hasPortfolio: bool, hasPrices: bool... }
32
+ * @param {Object} dailyStatus - Map of { "calc-name": "hash" } for completed items.
33
+ */
34
+ function getViableCalculations(candidates, rootDataStatus, dailyStatus) {
35
+ const viable = [];
36
+
37
+ for (const calc of candidates) {
38
+ // 1. Check Root Data
39
+ const rootCheck = checkRootDependencies(calc, rootDataStatus);
40
+ if (!rootCheck.canRun) {
41
+ // Root data missing -> Impossible to run.
42
+ continue;
43
+ }
44
+
45
+ // 2. Check Dependencies
46
+ let dependenciesMet = true;
47
+ if (calc.dependencies && calc.dependencies.length > 0) {
48
+ for (const depName of calc.dependencies) {
49
+ const normDep = normalizeName(depName);
50
+
51
+ // If a dependency is missing from dailyStatus, it failed in a previous pass.
52
+ // Therefore, the current calculation is impossible.
53
+ if (!dailyStatus[normDep]) {
54
+ dependenciesMet = false;
55
+ break;
56
+ }
57
+ }
58
+ }
59
+
60
+ if (dependenciesMet) {
61
+ viable.push(calc);
62
+ }
63
+ }
64
+
65
+ return viable;
66
+ }
67
+
24
68
  async function checkRootDataAvailability(dateStr, config, dependencies, earliestDates) {
25
- const { logger } = dependencies;
69
+ const { logger, db } = dependencies;
26
70
  const dateToProcess = new Date(dateStr + 'T00:00:00Z');
71
+
27
72
  let portfolioRefs = [], historyRefs = [];
28
73
  let hasPortfolio = false, hasInsights = false, hasSocial = false, hasHistory = false, hasPrices = false;
29
74
  let insightsData = null, socialData = null;
30
75
 
31
76
  try {
32
77
  const tasks = [];
78
+ // Only check data sources if the date is after the earliest known data point
33
79
  if (dateToProcess >= earliestDates.portfolio) {
34
80
  tasks.push(getPortfolioPartRefs(config, dependencies, dateStr).then(r => { portfolioRefs = r; hasPortfolio = !!r.length; }));
35
81
  }
@@ -43,11 +89,12 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
43
89
  tasks.push(getHistoryPartRefs(config, dependencies, dateStr).then(r => { historyRefs = r; hasHistory = !!r.length; }));
44
90
  }
45
91
  if (dateToProcess >= earliestDates.price) {
46
- tasks.push(checkPriceDataAvailability(config, dependencies).then(r => { hasPrices = r; }));
92
+ tasks.push(checkPriceAvailability(config, db).then(r => { hasPrices = r; }));
47
93
  }
48
94
 
49
95
  await Promise.all(tasks);
50
96
 
97
+ // If ABSOLUTELY NO data exists, we can return null early
51
98
  if (!(hasPortfolio || hasInsights || hasSocial || hasHistory || hasPrices)) return null;
52
99
 
53
100
  return {
@@ -56,7 +103,7 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
56
103
  todayInsights: insightsData,
57
104
  todaySocialPostInsights: socialData,
58
105
  status: { hasPortfolio, hasInsights, hasSocial, hasHistory, hasPrices },
59
- yesterdayPortfolioRefs: null // Filled later if needed
106
+ yesterdayPortfolioRefs: null // Filled later by StandardExecutor if needed
60
107
  };
61
108
  } catch (err) {
62
109
  logger.log('ERROR', `Error checking data: ${err.message}`);
@@ -64,7 +111,11 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
64
111
  }
65
112
  }
66
113
 
67
- async function checkPriceDataAvailability(config, { db }) {
114
+ /**
115
+ * Checks if any price data exists in the collection.
116
+ * Note: Uses a lightweight limit(1) query.
117
+ */
118
+ async function checkPriceAvailability(config, db) {
68
119
  try {
69
120
  const collection = config.priceCollection || 'asset_prices';
70
121
  const snapshot = await db.collection(collection).limit(1).get();
@@ -72,4 +123,4 @@ async function checkPriceDataAvailability(config, { db }) {
72
123
  } catch (e) { return false; }
73
124
  }
74
125
 
75
- module.exports = { checkRootDependencies, checkRootDataAvailability };
126
+ module.exports = { checkRootDependencies, checkRootDataAvailability, getViableCalculations };
@@ -15,7 +15,7 @@ class CachedDataLoader {
15
15
  this.cache = {
16
16
  mappings: null,
17
17
  insights: new Map(),
18
- social: new Map()
18
+ social: new Map()
19
19
  };
20
20
  }
21
21
 
@@ -1,10 +1,10 @@
1
1
  /**
2
2
  * @fileoverview Executor for "Meta" (global) calculations.
3
3
  */
4
- const { normalizeName } = require('../utils/utils');
4
+ const { normalizeName } = require('../utils/utils');
5
5
  const { CachedDataLoader } = require('../data/CachedDataLoader');
6
- const { ContextFactory } = require('../context/ContextFactory');
7
- const { commitResults } = require('../persistence/ResultCommitter');
6
+ const { ContextFactory } = require('../context/ContextFactory');
7
+ const { commitResults } = require('../persistence/ResultCommitter');
8
8
 
9
9
  class MetaExecutor {
10
10
  static async run(date, calcs, passName, config, deps, fetchedDeps, previousFetchedDeps, rootData, skipStatusWrite = false) {
@@ -30,8 +30,8 @@ class MetaExecutor {
30
30
  static async executeOncePerDay(calcInstance, metadata, dateStr, computedDeps, prevDeps, config, deps, loader) {
31
31
  const mappings = await loader.loadMappings();
32
32
  const { logger } = deps;
33
- const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await loader.loadInsights(dateStr) } : null;
34
- const social = metadata.rootDataDependencies?.includes('social') ? { today: await loader.loadSocial(dateStr) } : null;
33
+ const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await loader.loadInsights(dateStr) } : null;
34
+ const social = metadata.rootDataDependencies?.includes('social') ? { today: await loader.loadSocial(dateStr) } : null;
35
35
 
36
36
  if (metadata.rootDataDependencies?.includes('price')) {
37
37
  logger.log('INFO', `[Executor] Running Batched/Sharded Execution for ${metadata.name}`);
@@ -1,21 +1,21 @@
1
1
  /**
2
2
  * @fileoverview Specialized Executor for Price-Dependent Batch computations.
3
3
  */
4
- const pLimit = require('p-limit');
5
- const { normalizeName } = require('../utils/utils');
4
+ const pLimit = require('p-limit');
5
+ const { normalizeName } = require('../utils/utils');
6
6
  const { getRelevantShardRefs, loadDataByRefs } = require('../utils/data_loader');
7
- const { CachedDataLoader } = require('../data/CachedDataLoader');
8
- const mathLayer = require('../layers/index');
9
- const { LEGACY_MAPPING } = require('../topology/HashManager');
7
+ const { CachedDataLoader } = require('../data/CachedDataLoader');
8
+ const mathLayer = require('../layers/index');
9
+ const { LEGACY_MAPPING } = require('../topology/HashManager');
10
10
 
11
11
  async function runBatchPriceComputation(config, deps, dateStrings, calcs, targetTickers = []) {
12
12
  const { logger, db, calculationUtils } = deps;
13
13
  const cachedLoader = new CachedDataLoader(config, deps);
14
- const mappings = await cachedLoader.loadMappings();
14
+ const mappings = await cachedLoader.loadMappings();
15
15
 
16
16
  let targetInstrumentIds = [];
17
17
  if (targetTickers && targetTickers.length > 0) {
18
- const tickerToInst = mappings.tickerToInstrument || {};
18
+ const tickerToInst = mappings.tickerToInstrument || {};
19
19
  targetInstrumentIds = targetTickers.map(t => tickerToInst[t]).filter(id => id);
20
20
  if (targetInstrumentIds.length === 0) { logger.log('WARN', '[BatchPrice] Target tickers provided but no IDs found. Aborting.'); return; }
21
21
  }
@@ -1,23 +1,23 @@
1
1
  /**
2
2
  * @fileoverview Executor for "Standard" (per-user) calculations.
3
3
  */
4
- const { normalizeName } = require('../utils/utils');
4
+ const { normalizeName } = require('../utils/utils');
5
5
  const { streamPortfolioData, streamHistoryData, getPortfolioPartRefs } = require('../utils/data_loader');
6
- const { CachedDataLoader } = require('../data/CachedDataLoader');
7
- const { ContextFactory } = require('../context/ContextFactory');
8
- const { commitResults } = require('../persistence/ResultCommitter');
9
- const mathLayer = require('../layers/index');
6
+ const { CachedDataLoader } = require('../data/CachedDataLoader');
7
+ const { ContextFactory } = require('../context/ContextFactory');
8
+ const { commitResults } = require('../persistence/ResultCommitter');
9
+ const mathLayer = require('../layers/index');
10
10
 
11
11
  class StandardExecutor {
12
12
  static async run(date, calcs, passName, config, deps, rootData, fetchedDeps, previousFetchedDeps, skipStatusWrite = false) {
13
- const dStr = date.toISOString().slice(0, 10);
13
+ const dStr = date.toISOString().slice(0, 10);
14
14
  const logger = deps.logger;
15
15
 
16
16
  // 1. Prepare Yesterdays Data if needed
17
17
  const fullRoot = { ...rootData };
18
18
  if (calcs.some(c => c.isHistorical)) {
19
- const prev = new Date(date); prev.setUTCDate(prev.getUTCDate() - 1);
20
- const prevStr = prev.toISOString().slice(0, 10);
19
+ const prev = new Date(date); prev.setUTCDate(prev.getUTCDate() - 1);
20
+ const prevStr = prev.toISOString().slice(0, 10);
21
21
  fullRoot.yesterdayPortfolioRefs = await getPortfolioPartRefs(config, deps, prevStr);
22
22
  }
23
23
 
@@ -53,19 +53,15 @@ class StandardExecutor {
53
53
  const cachedLoader = new CachedDataLoader(config, deps);
54
54
  await cachedLoader.loadMappings();
55
55
 
56
- const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
56
+ const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
57
57
  const prevDateStr = prevDate.toISOString().slice(0, 10);
58
58
 
59
- const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
59
+ const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
60
60
  const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
61
- const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs)
62
- ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs)
63
- : null;
61
+ const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs) ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
64
62
 
65
63
  const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
66
- const tH_iter = (needsTradingHistory && historyRefs)
67
- ? streamHistoryData(config, deps, dateStr, historyRefs)
68
- : null;
64
+ const tH_iter = (needsTradingHistory && historyRefs) ? streamHistoryData(config, deps, dateStr, historyRefs) : null;
69
65
 
70
66
  let yP_chunk = {}, tH_chunk = {};
71
67
 
@@ -74,9 +70,7 @@ class StandardExecutor {
74
70
  if (tH_iter) tH_chunk = (await tH_iter.next()).value || {};
75
71
 
76
72
  // Execute chunk for all calcs
77
- const promises = streamingCalcs.map(calc =>
78
- StandardExecutor.executePerUser(calc, calc.manifest, dateStr, tP_chunk, yP_chunk, tH_chunk, fetchedDeps, previousFetchedDeps, config, deps, cachedLoader)
79
- );
73
+ const promises = streamingCalcs.map(calc => StandardExecutor.executePerUser(calc, calc.manifest, dateStr, tP_chunk, yP_chunk, tH_chunk, fetchedDeps, previousFetchedDeps, config, deps, cachedLoader) );
80
74
  await Promise.all(promises);
81
75
  }
82
76
  logger.log('INFO', `[${passName}] Streaming complete.`);
@@ -87,12 +81,12 @@ class StandardExecutor {
87
81
  const targetUserType = metadata.userType;
88
82
  const mappings = await loader.loadMappings();
89
83
  const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await loader.loadInsights(dateStr) } : null;
90
- const SCHEMAS = mathLayer.SCHEMAS;
84
+ const SCHEMAS = mathLayer.SCHEMAS;
91
85
 
92
86
  for (const [userId, todayPortfolio] of Object.entries(portfolioData)) {
93
87
  const yesterdayPortfolio = yesterdayPortfolioData ? yesterdayPortfolioData[userId] : null;
94
- const todayHistory = historyData ? historyData[userId] : null;
95
- const actualUserType = todayPortfolio.PublicPositions ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
88
+ const todayHistory = historyData ? historyData[userId] : null;
89
+ const actualUserType = todayPortfolio.PublicPositions ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
96
90
 
97
91
  if (targetUserType !== 'all') {
98
92
  const mappedTarget = (targetUserType === 'speculator') ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
@@ -5,8 +5,8 @@
5
5
  */
6
6
 
7
7
  const { getExpectedDateStrings } = require('../utils/utils.js');
8
- const { groupByPass } = require('../WorkflowOrchestrator.js');
9
- const { PubSubUtils } = require('../../core/utils/pubsub_utils');
8
+ const { groupByPass } = require('../WorkflowOrchestrator.js');
9
+ const { PubSubUtils } = require('../../core/utils/pubsub_utils');
10
10
 
11
11
  const TOPIC_NAME = 'computation-tasks';
12
12
 
@@ -15,14 +15,14 @@ const TOPIC_NAME = 'computation-tasks';
15
15
  * Instead of running them, it queues them in Pub/Sub.
16
16
  */
17
17
  async function dispatchComputationPass(config, dependencies, computationManifest) {
18
- const { logger } = dependencies;
18
+ const { logger } = dependencies;
19
19
  const pubsubUtils = new PubSubUtils(dependencies);
20
- const passToRun = String(config.COMPUTATION_PASS_TO_RUN);
20
+ const passToRun = String(config.COMPUTATION_PASS_TO_RUN);
21
21
 
22
22
  if (!passToRun) { return logger.log('ERROR', '[Dispatcher] No pass defined (COMPUTATION_PASS_TO_RUN). Aborting.'); }
23
23
 
24
24
  // 1. Validate Pass Existence
25
- const passes = groupByPass(computationManifest);
25
+ const passes = groupByPass(computationManifest);
26
26
  const calcsInThisPass = passes[passToRun] || [];
27
27
 
28
28
  if (!calcsInThisPass.length) { return logger.log('WARN', `[Dispatcher] No calcs for Pass ${passToRun}. Exiting.`); }
@@ -1,7 +1,7 @@
1
1
  /**
2
2
  * @fileoverview Low-level Firestore interactions.
3
3
  */
4
- const { withRetry } = require('../utils/utils.js'); // Assuming this exists or is passed in deps
4
+ const { withRetry } = require('../utils/utils.js');
5
5
 
6
6
  async function commitBatchInChunks(config, deps, writes, operationName) {
7
7
  const { db, logger, calculationUtils } = deps;
@@ -12,13 +12,13 @@ async function commitBatchInChunks(config, deps, writes, operationName) {
12
12
  return;
13
13
  }
14
14
 
15
- const MAX_BATCH_OPS = 300;
15
+ const MAX_BATCH_OPS = 300;
16
16
  const MAX_BATCH_BYTES = 9 * 1024 * 1024;
17
17
 
18
- let currentBatch = db.batch();
18
+ let currentBatch = db.batch();
19
19
  let currentOpsCount = 0;
20
20
  let currentBytesEst = 0;
21
- let batchIndex = 1;
21
+ let batchIndex = 1;
22
22
 
23
23
  const commitAndReset = async () => {
24
24
  if (currentOpsCount > 0) {
@@ -34,7 +34,7 @@ async function commitBatchInChunks(config, deps, writes, operationName) {
34
34
  throw err;
35
35
  }
36
36
  }
37
- currentBatch = db.batch();
37
+ currentBatch = db.batch();
38
38
  currentOpsCount = 0;
39
39
  currentBytesEst = 0;
40
40
  };
@@ -1,13 +1,13 @@
1
1
  /**
2
2
  * @fileoverview Handles saving computation results with transparent auto-sharding.
3
3
  */
4
- const { commitBatchInChunks } = require('./FirestoreUtils');
4
+ const { commitBatchInChunks } = require('./FirestoreUtils');
5
5
  const { updateComputationStatus } = require('./StatusRepository');
6
- const { batchStoreSchemas } = require('../utils/schema_capture');
6
+ const { batchStoreSchemas } = require('../utils/schema_capture');
7
7
 
8
8
  async function commitResults(stateObj, dStr, passName, config, deps, skipStatusWrite = false) {
9
9
  const successUpdates = {};
10
- const schemas = [];
10
+ const schemas = [];
11
11
 
12
12
  for (const name in stateObj) {
13
13
  const calc = stateObj[name];
@@ -40,7 +40,7 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
40
40
  if (updates.length > 0) {
41
41
  await commitBatchInChunks(config, deps, updates, `${name} Results`);
42
42
  successUpdates[name] = calc.manifest.hash || true;
43
- const isSharded = updates.some(u => u.data._sharded === true);
43
+ const isSharded = updates.some(u => u.data._sharded === true);
44
44
  deps.logger.log('INFO', `${name} for ${dStr}: \u2714 Success (Written ${isSharded ? 'Sharded' : 'Standard'})`);
45
45
  } else {
46
46
  deps.logger.log('INFO', `${name} for ${dStr}: - Empty Data`);
@@ -74,42 +74,43 @@ function calculateFirestoreBytes(value) {
74
74
 
75
75
  async function prepareAutoShardedWrites(result, docRef, logger) {
76
76
  const SAFETY_THRESHOLD_BYTES = 1000 * 1024; // 1MB Limit
77
- const OVERHEAD_ALLOWANCE = 20 * 1024;
78
- const CHUNK_LIMIT = SAFETY_THRESHOLD_BYTES - OVERHEAD_ALLOWANCE;
79
- const totalSize = calculateFirestoreBytes(result);
80
- const docPathSize = Buffer.byteLength(docRef.path, 'utf8') + 16;
81
-
82
- if ((totalSize + docPathSize) < CHUNK_LIMIT) {
83
- const data = { ...result, _completed: true, _sharded: false };
84
- return [{ ref: docRef, data, options: { merge: true } }];
85
- }
77
+ const OVERHEAD_ALLOWANCE = 20 * 1024;
78
+ const CHUNK_LIMIT = SAFETY_THRESHOLD_BYTES - OVERHEAD_ALLOWANCE;
79
+ const totalSize = calculateFirestoreBytes(result);
80
+ const docPathSize = Buffer.byteLength(docRef.path, 'utf8') + 16;
81
+ const writes = [];
82
+ const shardCollection = docRef.collection('_shards');
83
+ let currentChunk = {};
84
+ let currentChunkSize = 0;
85
+ let shardIndex = 0;
86
+
87
+ // If under limit, write directly
88
+ if ((totalSize + docPathSize) < CHUNK_LIMIT) { const data = { ...result, _completed: true, _sharded: false }; return [{ ref: docRef, data, options: { merge: true } }]; }
86
89
 
87
90
  logger.log('INFO', `[AutoShard] Result size ~${Math.round(totalSize/1024)}KB exceeds limit. Sharding...`);
88
- const writes = [];
89
- const shardCollection = docRef.collection('_shards');
90
- let currentChunk = {};
91
- let currentChunkSize = 0;
92
- let shardIndex = 0;
93
91
 
92
+ // iF over limit, shard the document
94
93
  for (const [key, value] of Object.entries(result)) {
95
94
  if (key.startsWith('_')) continue;
96
- const keySize = Buffer.byteLength(key, 'utf8') + 1;
95
+ const keySize = Buffer.byteLength(key, 'utf8') + 1;
97
96
  const valueSize = calculateFirestoreBytes(value);
98
- const itemSize = keySize + valueSize;
97
+ const itemSize = keySize + valueSize;
99
98
 
99
+ // If adding this item exceeds the chunk limit, commit current chunk
100
100
  if (currentChunkSize + itemSize > CHUNK_LIMIT) {
101
101
  writes.push({ ref: shardCollection.doc(`shard_${shardIndex}`), data: currentChunk, options: { merge: false } });
102
102
  shardIndex++;
103
- currentChunk = {};
103
+ currentChunk = {};
104
104
  currentChunkSize = 0;
105
105
  }
106
106
  currentChunk[key] = value;
107
107
  currentChunkSize += itemSize;
108
108
  }
109
- if (Object.keys(currentChunk).length > 0) {
110
- writes.push({ ref: shardCollection.doc(`shard_${shardIndex}`), data: currentChunk, options: { merge: false } });
111
- }
109
+
110
+ // Write the final chunk
111
+ if (Object.keys(currentChunk).length > 0) { writes.push({ ref: shardCollection.doc(`shard_${shardIndex}`), data: currentChunk, options: { merge: false } }); }
112
112
 
113
+ // Finally, write the pointer document
113
114
  const pointerData = { _completed: true, _sharded: true, _shardCount: shardIndex + 1, _lastUpdated: new Date().toISOString() };
114
115
  writes.push({ ref: docRef, data: pointerData, options: { merge: false } });
115
116
  return writes;
@@ -3,19 +3,25 @@
3
3
  */
4
4
 
5
5
  async function fetchComputationStatus(dateStr, config, { db }) {
6
- if (!key) throw new Error('fetchStatus requires a key');
6
+ // FIX: Check dateStr directly, or define 'key' before checking it.
7
+ if (!dateStr) throw new Error('fetchStatus requires a key');
8
+
7
9
  const key = dateStr;
8
10
  const collection = config.computationStatusCollection || 'computation_status';
9
11
  const docRef = db.collection(collection).doc(key);
12
+
10
13
  const snap = await docRef.get();
11
14
  return snap.exists ? snap.data() : {};
12
15
  }
13
16
 
14
17
  async function updateComputationStatus(dateStr, updates, config, { db }) {
15
18
  if (!dateStr) throw new Error('updateStatus requires a key');
19
+
16
20
  if (!updates || Object.keys(updates).length === 0) return;
21
+
17
22
  const collection = config.computationStatusCollection || 'computation_status';
18
23
  const docRef = db.collection(collection).doc(dateStr);
24
+
19
25
  await docRef.set(updates, { merge: true });
20
26
  return true;
21
27
  }
@@ -3,7 +3,7 @@
3
3
  */
4
4
 
5
5
  const { FieldValue, FieldPath } = require('@google-cloud/firestore');
6
- const crypto = require('crypto');
6
+ const crypto = require('crypto');
7
7
 
8
8
  /** Stage 1: Normalize a calculation name to kebab-case */
9
9
  function normalizeName(name) { return name.replace(/_/g, '-'); }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.220",
3
+ "version": "1.0.222",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [