bulltrackers-module 1.0.220 → 1.0.221

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  /**
2
2
  * @fileoverview Factory for creating the Computation Context.
3
3
  */
4
- const mathLayer = require('../layers/index');
4
+ const mathLayer = require('../layers/index');
5
5
  const { LEGACY_MAPPING } = require('../topology/HashManager');
6
6
 
7
7
  class ContextFactory {
@@ -2,7 +2,7 @@
2
2
  * @fileoverview Dynamic Manifest Builder - Handles Topological Sort and Auto-Discovery.
3
3
  */
4
4
  const { generateCodeHash, LEGACY_MAPPING } = require('../topology/HashManager.js');
5
- const { normalizeName } = require('../utils/utils');
5
+ const { normalizeName } = require('../utils/utils');
6
6
 
7
7
  // Import Layers
8
8
  const MathematicsLayer = require('../layers/mathematics');
@@ -93,12 +93,12 @@ function buildManifest(productLinesToRun = [], calculations) {
93
93
  if (!Class || typeof Class !== 'function') return;
94
94
  const normalizedName = normalizeName(name);
95
95
 
96
- if (typeof Class.getMetadata !== 'function') { log.fatal(`Calculation "${normalizedName}" missing static getMetadata().`); hasFatalError = true; return; }
96
+ if (typeof Class.getMetadata !== 'function') { log.fatal(`Calculation "${normalizedName}" missing static getMetadata().`); hasFatalError = true; return; }
97
97
  if (typeof Class.getDependencies !== 'function') { log.fatal(`Calculation "${normalizedName}" missing static getDependencies().`); hasFatalError = true; return; }
98
98
 
99
- const metadata = Class.getMetadata();
99
+ const metadata = Class.getMetadata();
100
100
  const dependencies = Class.getDependencies().map(normalizeName);
101
- const codeStr = Class.toString();
101
+ const codeStr = Class.toString();
102
102
 
103
103
  let compositeHashString = generateCodeHash(codeStr);
104
104
  const usedDeps = [];
@@ -15,7 +15,7 @@ class CachedDataLoader {
15
15
  this.cache = {
16
16
  mappings: null,
17
17
  insights: new Map(),
18
- social: new Map()
18
+ social: new Map()
19
19
  };
20
20
  }
21
21
 
@@ -1,10 +1,10 @@
1
1
  /**
2
2
  * @fileoverview Executor for "Meta" (global) calculations.
3
3
  */
4
- const { normalizeName } = require('../utils/utils');
4
+ const { normalizeName } = require('../utils/utils');
5
5
  const { CachedDataLoader } = require('../data/CachedDataLoader');
6
- const { ContextFactory } = require('../context/ContextFactory');
7
- const { commitResults } = require('../persistence/ResultCommitter');
6
+ const { ContextFactory } = require('../context/ContextFactory');
7
+ const { commitResults } = require('../persistence/ResultCommitter');
8
8
 
9
9
  class MetaExecutor {
10
10
  static async run(date, calcs, passName, config, deps, fetchedDeps, previousFetchedDeps, rootData, skipStatusWrite = false) {
@@ -1,17 +1,17 @@
1
1
  /**
2
2
  * @fileoverview Specialized Executor for Price-Dependent Batch computations.
3
3
  */
4
- const pLimit = require('p-limit');
5
- const { normalizeName } = require('../utils/utils');
4
+ const pLimit = require('p-limit');
5
+ const { normalizeName } = require('../utils/utils');
6
6
  const { getRelevantShardRefs, loadDataByRefs } = require('../utils/data_loader');
7
- const { CachedDataLoader } = require('../data/CachedDataLoader');
8
- const mathLayer = require('../layers/index');
9
- const { LEGACY_MAPPING } = require('../topology/HashManager');
7
+ const { CachedDataLoader } = require('../data/CachedDataLoader');
8
+ const mathLayer = require('../layers/index');
9
+ const { LEGACY_MAPPING } = require('../topology/HashManager');
10
10
 
11
11
  async function runBatchPriceComputation(config, deps, dateStrings, calcs, targetTickers = []) {
12
12
  const { logger, db, calculationUtils } = deps;
13
13
  const cachedLoader = new CachedDataLoader(config, deps);
14
- const mappings = await cachedLoader.loadMappings();
14
+ const mappings = await cachedLoader.loadMappings();
15
15
 
16
16
  let targetInstrumentIds = [];
17
17
  if (targetTickers && targetTickers.length > 0) {
@@ -1,23 +1,23 @@
1
1
  /**
2
2
  * @fileoverview Executor for "Standard" (per-user) calculations.
3
3
  */
4
- const { normalizeName } = require('../utils/utils');
4
+ const { normalizeName } = require('../utils/utils');
5
5
  const { streamPortfolioData, streamHistoryData, getPortfolioPartRefs } = require('../utils/data_loader');
6
- const { CachedDataLoader } = require('../data/CachedDataLoader');
7
- const { ContextFactory } = require('../context/ContextFactory');
8
- const { commitResults } = require('../persistence/ResultCommitter');
9
- const mathLayer = require('../layers/index');
6
+ const { CachedDataLoader } = require('../data/CachedDataLoader');
7
+ const { ContextFactory } = require('../context/ContextFactory');
8
+ const { commitResults } = require('../persistence/ResultCommitter');
9
+ const mathLayer = require('../layers/index');
10
10
 
11
11
  class StandardExecutor {
12
12
  static async run(date, calcs, passName, config, deps, rootData, fetchedDeps, previousFetchedDeps, skipStatusWrite = false) {
13
- const dStr = date.toISOString().slice(0, 10);
13
+ const dStr = date.toISOString().slice(0, 10);
14
14
  const logger = deps.logger;
15
15
 
16
16
  // 1. Prepare Yesterdays Data if needed
17
17
  const fullRoot = { ...rootData };
18
18
  if (calcs.some(c => c.isHistorical)) {
19
- const prev = new Date(date); prev.setUTCDate(prev.getUTCDate() - 1);
20
- const prevStr = prev.toISOString().slice(0, 10);
19
+ const prev = new Date(date); prev.setUTCDate(prev.getUTCDate() - 1);
20
+ const prevStr = prev.toISOString().slice(0, 10);
21
21
  fullRoot.yesterdayPortfolioRefs = await getPortfolioPartRefs(config, deps, prevStr);
22
22
  }
23
23
 
@@ -53,19 +53,15 @@ class StandardExecutor {
53
53
  const cachedLoader = new CachedDataLoader(config, deps);
54
54
  await cachedLoader.loadMappings();
55
55
 
56
- const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
56
+ const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
57
57
  const prevDateStr = prevDate.toISOString().slice(0, 10);
58
58
 
59
- const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
59
+ const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
60
60
  const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
61
- const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs)
62
- ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs)
63
- : null;
61
+ const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs) ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
64
62
 
65
63
  const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
66
- const tH_iter = (needsTradingHistory && historyRefs)
67
- ? streamHistoryData(config, deps, dateStr, historyRefs)
68
- : null;
64
+ const tH_iter = (needsTradingHistory && historyRefs) ? streamHistoryData(config, deps, dateStr, historyRefs) : null;
69
65
 
70
66
  let yP_chunk = {}, tH_chunk = {};
71
67
 
@@ -74,9 +70,7 @@ class StandardExecutor {
74
70
  if (tH_iter) tH_chunk = (await tH_iter.next()).value || {};
75
71
 
76
72
  // Execute chunk for all calcs
77
- const promises = streamingCalcs.map(calc =>
78
- StandardExecutor.executePerUser(calc, calc.manifest, dateStr, tP_chunk, yP_chunk, tH_chunk, fetchedDeps, previousFetchedDeps, config, deps, cachedLoader)
79
- );
73
+ const promises = streamingCalcs.map(calc => StandardExecutor.executePerUser(calc, calc.manifest, dateStr, tP_chunk, yP_chunk, tH_chunk, fetchedDeps, previousFetchedDeps, config, deps, cachedLoader) );
80
74
  await Promise.all(promises);
81
75
  }
82
76
  logger.log('INFO', `[${passName}] Streaming complete.`);
@@ -87,12 +81,12 @@ class StandardExecutor {
87
81
  const targetUserType = metadata.userType;
88
82
  const mappings = await loader.loadMappings();
89
83
  const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await loader.loadInsights(dateStr) } : null;
90
- const SCHEMAS = mathLayer.SCHEMAS;
84
+ const SCHEMAS = mathLayer.SCHEMAS;
91
85
 
92
86
  for (const [userId, todayPortfolio] of Object.entries(portfolioData)) {
93
87
  const yesterdayPortfolio = yesterdayPortfolioData ? yesterdayPortfolioData[userId] : null;
94
- const todayHistory = historyData ? historyData[userId] : null;
95
- const actualUserType = todayPortfolio.PublicPositions ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
88
+ const todayHistory = historyData ? historyData[userId] : null;
89
+ const actualUserType = todayPortfolio.PublicPositions ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
96
90
 
97
91
  if (targetUserType !== 'all') {
98
92
  const mappedTarget = (targetUserType === 'speculator') ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
@@ -5,8 +5,8 @@
5
5
  */
6
6
 
7
7
  const { getExpectedDateStrings } = require('../utils/utils.js');
8
- const { groupByPass } = require('../WorkflowOrchestrator.js');
9
- const { PubSubUtils } = require('../../core/utils/pubsub_utils');
8
+ const { groupByPass } = require('../WorkflowOrchestrator.js');
9
+ const { PubSubUtils } = require('../../core/utils/pubsub_utils');
10
10
 
11
11
  const TOPIC_NAME = 'computation-tasks';
12
12
 
@@ -15,14 +15,14 @@ const TOPIC_NAME = 'computation-tasks';
15
15
  * Instead of running them, it queues them in Pub/Sub.
16
16
  */
17
17
  async function dispatchComputationPass(config, dependencies, computationManifest) {
18
- const { logger } = dependencies;
18
+ const { logger } = dependencies;
19
19
  const pubsubUtils = new PubSubUtils(dependencies);
20
- const passToRun = String(config.COMPUTATION_PASS_TO_RUN);
20
+ const passToRun = String(config.COMPUTATION_PASS_TO_RUN);
21
21
 
22
22
  if (!passToRun) { return logger.log('ERROR', '[Dispatcher] No pass defined (COMPUTATION_PASS_TO_RUN). Aborting.'); }
23
23
 
24
24
  // 1. Validate Pass Existence
25
- const passes = groupByPass(computationManifest);
25
+ const passes = groupByPass(computationManifest);
26
26
  const calcsInThisPass = passes[passToRun] || [];
27
27
 
28
28
  if (!calcsInThisPass.length) { return logger.log('WARN', `[Dispatcher] No calcs for Pass ${passToRun}. Exiting.`); }
@@ -1,7 +1,7 @@
1
1
  /**
2
2
  * @fileoverview Low-level Firestore interactions.
3
3
  */
4
- const { withRetry } = require('../utils/utils.js'); // Assuming this exists or is passed in deps
4
+ const { withRetry } = require('../utils/utils.js');
5
5
 
6
6
  async function commitBatchInChunks(config, deps, writes, operationName) {
7
7
  const { db, logger, calculationUtils } = deps;
@@ -12,13 +12,13 @@ async function commitBatchInChunks(config, deps, writes, operationName) {
12
12
  return;
13
13
  }
14
14
 
15
- const MAX_BATCH_OPS = 300;
15
+ const MAX_BATCH_OPS = 300;
16
16
  const MAX_BATCH_BYTES = 9 * 1024 * 1024;
17
17
 
18
- let currentBatch = db.batch();
18
+ let currentBatch = db.batch();
19
19
  let currentOpsCount = 0;
20
20
  let currentBytesEst = 0;
21
- let batchIndex = 1;
21
+ let batchIndex = 1;
22
22
 
23
23
  const commitAndReset = async () => {
24
24
  if (currentOpsCount > 0) {
@@ -34,7 +34,7 @@ async function commitBatchInChunks(config, deps, writes, operationName) {
34
34
  throw err;
35
35
  }
36
36
  }
37
- currentBatch = db.batch();
37
+ currentBatch = db.batch();
38
38
  currentOpsCount = 0;
39
39
  currentBytesEst = 0;
40
40
  };
@@ -1,13 +1,13 @@
1
1
  /**
2
2
  * @fileoverview Handles saving computation results with transparent auto-sharding.
3
3
  */
4
- const { commitBatchInChunks } = require('./FirestoreUtils');
4
+ const { commitBatchInChunks } = require('./FirestoreUtils');
5
5
  const { updateComputationStatus } = require('./StatusRepository');
6
- const { batchStoreSchemas } = require('../utils/schema_capture');
6
+ const { batchStoreSchemas } = require('../utils/schema_capture');
7
7
 
8
8
  async function commitResults(stateObj, dStr, passName, config, deps, skipStatusWrite = false) {
9
9
  const successUpdates = {};
10
- const schemas = [];
10
+ const schemas = [];
11
11
 
12
12
  for (const name in stateObj) {
13
13
  const calc = stateObj[name];
@@ -40,7 +40,7 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
40
40
  if (updates.length > 0) {
41
41
  await commitBatchInChunks(config, deps, updates, `${name} Results`);
42
42
  successUpdates[name] = calc.manifest.hash || true;
43
- const isSharded = updates.some(u => u.data._sharded === true);
43
+ const isSharded = updates.some(u => u.data._sharded === true);
44
44
  deps.logger.log('INFO', `${name} for ${dStr}: \u2714 Success (Written ${isSharded ? 'Sharded' : 'Standard'})`);
45
45
  } else {
46
46
  deps.logger.log('INFO', `${name} for ${dStr}: - Empty Data`);
@@ -74,42 +74,43 @@ function calculateFirestoreBytes(value) {
74
74
 
75
75
  async function prepareAutoShardedWrites(result, docRef, logger) {
76
76
  const SAFETY_THRESHOLD_BYTES = 1000 * 1024; // 1MB Limit
77
- const OVERHEAD_ALLOWANCE = 20 * 1024;
78
- const CHUNK_LIMIT = SAFETY_THRESHOLD_BYTES - OVERHEAD_ALLOWANCE;
79
- const totalSize = calculateFirestoreBytes(result);
80
- const docPathSize = Buffer.byteLength(docRef.path, 'utf8') + 16;
81
-
82
- if ((totalSize + docPathSize) < CHUNK_LIMIT) {
83
- const data = { ...result, _completed: true, _sharded: false };
84
- return [{ ref: docRef, data, options: { merge: true } }];
85
- }
77
+ const OVERHEAD_ALLOWANCE = 20 * 1024;
78
+ const CHUNK_LIMIT = SAFETY_THRESHOLD_BYTES - OVERHEAD_ALLOWANCE;
79
+ const totalSize = calculateFirestoreBytes(result);
80
+ const docPathSize = Buffer.byteLength(docRef.path, 'utf8') + 16;
81
+ const writes = [];
82
+ const shardCollection = docRef.collection('_shards');
83
+ let currentChunk = {};
84
+ let currentChunkSize = 0;
85
+ let shardIndex = 0;
86
+
87
+ // If under limit, write directly
88
+ if ((totalSize + docPathSize) < CHUNK_LIMIT) { const data = { ...result, _completed: true, _sharded: false }; return [{ ref: docRef, data, options: { merge: true } }]; }
86
89
 
87
90
  logger.log('INFO', `[AutoShard] Result size ~${Math.round(totalSize/1024)}KB exceeds limit. Sharding...`);
88
- const writes = [];
89
- const shardCollection = docRef.collection('_shards');
90
- let currentChunk = {};
91
- let currentChunkSize = 0;
92
- let shardIndex = 0;
93
91
 
92
+ // iF over limit, shard the document
94
93
  for (const [key, value] of Object.entries(result)) {
95
94
  if (key.startsWith('_')) continue;
96
- const keySize = Buffer.byteLength(key, 'utf8') + 1;
95
+ const keySize = Buffer.byteLength(key, 'utf8') + 1;
97
96
  const valueSize = calculateFirestoreBytes(value);
98
- const itemSize = keySize + valueSize;
97
+ const itemSize = keySize + valueSize;
99
98
 
99
+ // If adding this item exceeds the chunk limit, commit current chunk
100
100
  if (currentChunkSize + itemSize > CHUNK_LIMIT) {
101
101
  writes.push({ ref: shardCollection.doc(`shard_${shardIndex}`), data: currentChunk, options: { merge: false } });
102
102
  shardIndex++;
103
- currentChunk = {};
103
+ currentChunk = {};
104
104
  currentChunkSize = 0;
105
105
  }
106
106
  currentChunk[key] = value;
107
107
  currentChunkSize += itemSize;
108
108
  }
109
- if (Object.keys(currentChunk).length > 0) {
110
- writes.push({ ref: shardCollection.doc(`shard_${shardIndex}`), data: currentChunk, options: { merge: false } });
111
- }
109
+
110
+ // Write the final chunk
111
+ if (Object.keys(currentChunk).length > 0) { writes.push({ ref: shardCollection.doc(`shard_${shardIndex}`), data: currentChunk, options: { merge: false } }); }
112
112
 
113
+ // Finally, write the pointer document
113
114
  const pointerData = { _completed: true, _sharded: true, _shardCount: shardIndex + 1, _lastUpdated: new Date().toISOString() };
114
115
  writes.push({ ref: docRef, data: pointerData, options: { merge: false } });
115
116
  return writes;
@@ -3,19 +3,25 @@
3
3
  */
4
4
 
5
5
  async function fetchComputationStatus(dateStr, config, { db }) {
6
- if (!key) throw new Error('fetchStatus requires a key');
6
+ // FIX: Check dateStr directly, or define 'key' before checking it.
7
+ if (!dateStr) throw new Error('fetchStatus requires a key');
8
+
7
9
  const key = dateStr;
8
10
  const collection = config.computationStatusCollection || 'computation_status';
9
11
  const docRef = db.collection(collection).doc(key);
12
+
10
13
  const snap = await docRef.get();
11
14
  return snap.exists ? snap.data() : {};
12
15
  }
13
16
 
14
17
  async function updateComputationStatus(dateStr, updates, config, { db }) {
15
18
  if (!dateStr) throw new Error('updateStatus requires a key');
19
+
16
20
  if (!updates || Object.keys(updates).length === 0) return;
21
+
17
22
  const collection = config.computationStatusCollection || 'computation_status';
18
23
  const docRef = db.collection(collection).doc(dateStr);
24
+
19
25
  await docRef.set(updates, { merge: true });
20
26
  return true;
21
27
  }
@@ -3,7 +3,7 @@
3
3
  */
4
4
 
5
5
  const { FieldValue, FieldPath } = require('@google-cloud/firestore');
6
- const crypto = require('crypto');
6
+ const crypto = require('crypto');
7
7
 
8
8
  /** Stage 1: Normalize a calculation name to kebab-case */
9
9
  function normalizeName(name) { return name.replace(/_/g, '-'); }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.220",
3
+ "version": "1.0.221",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [