bulltrackers-module 1.0.584 → 1.0.586

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,11 +2,18 @@
2
2
  * @fileoverview Checks availability of root data via the Root Data Index.
3
3
  * REFACTORED: Fully supports granular flags for PI, Signed-In Users, Rankings, and Verification.
4
4
  * UPDATED: Enforces 'mandatoryRoots' metadata to override permissive flags.
5
+ * NEW: Added 'getAvailabilityWindow' for efficient batch availability lookups using range queries.
5
6
  */
6
7
  const { normalizeName } = require('../utils/utils');
7
8
 
8
9
  const INDEX_COLLECTION = process.env.ROOT_DATA_AVAILABILITY_COLLECTION || 'system_root_data_index';
9
10
 
11
+ /**
12
+ * Checks if a specific calculation can run based on its dependencies and the current data status.
13
+ * @param {Object} calcManifest - The calculation manifest.
14
+ * @param {Object} rootDataStatus - The availability status object.
15
+ * @returns {Object} { canRun: boolean, missing: Array, available: Array }
16
+ */
10
17
  function checkRootDependencies(calcManifest, rootDataStatus) {
11
18
  const missing = [];
12
19
  const available = [];
@@ -228,6 +235,9 @@ function getViableCalculations(candidates, fullManifest, rootDataStatus, dailySt
228
235
  return viable;
229
236
  }
230
237
 
238
+ /**
239
+ * Checks root data availability for a single date.
240
+ */
231
241
  async function checkRootDataAvailability(dateStr, config, dependencies, earliestDates) {
232
242
  const { logger, db } = dependencies;
233
243
 
@@ -290,4 +300,66 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
290
300
  }
291
301
  }
292
302
 
293
- module.exports = { checkRootDependencies, checkRootDataAvailability, getViableCalculations };
303
+ /**
304
+ * [NEW] Fetches availability status for a range of dates.
305
+ * Uses a range query to only retrieve indices that actually exist, preventing wasted reads on empty days.
306
+ * @param {Object} deps - Dependencies (must include db)
307
+ * @param {string} startDateStr - ISO Date string (YYYY-MM-DD) inclusive start
308
+ * @param {string} endDateStr - ISO Date string (YYYY-MM-DD) inclusive end
309
+ * @returns {Promise<Map<string, Object>>} Map of dateStr -> status object
310
+ */
311
+ async function getAvailabilityWindow(deps, startDateStr, endDateStr) {
312
+ const { db } = deps;
313
+
314
+ // Perform Range Query on Document ID (Date String)
315
+ const snapshot = await db.collection(INDEX_COLLECTION)
316
+ .where(db.FieldPath.documentId(), '>=', startDateStr)
317
+ .where(db.FieldPath.documentId(), '<=', endDateStr)
318
+ .get();
319
+
320
+ const availabilityMap = new Map();
321
+
322
+ snapshot.forEach(doc => {
323
+ const data = doc.data();
324
+ const details = data.details || {};
325
+ const dateStr = doc.id;
326
+
327
+ // Construct status object matching checkRootDataAvailability structure
328
+ const status = {
329
+ hasPortfolio: !!data.hasPortfolio,
330
+ hasHistory: !!data.hasHistory,
331
+ hasSocial: !!data.hasSocial,
332
+ hasInsights: !!data.hasInsights,
333
+ hasPrices: !!data.hasPrices,
334
+ speculatorPortfolio: !!details.speculatorPortfolio,
335
+ normalPortfolio: !!details.normalPortfolio,
336
+ speculatorHistory: !!details.speculatorHistory,
337
+ normalHistory: !!details.normalHistory,
338
+ piRankings: !!details.piRankings,
339
+ piPortfolios: !!details.piPortfolios,
340
+ piDeepPortfolios: !!details.piDeepPortfolios,
341
+ piHistory: !!details.piHistory,
342
+ signedInUserPortfolio: !!details.signedInUserPortfolio,
343
+ signedInUserHistory: !!details.signedInUserHistory,
344
+ signedInUserVerification: !!details.signedInUserVerification,
345
+ hasPISocial: !!details.hasPISocial || !!data.hasPISocial,
346
+ hasSignedInSocial: !!details.hasSignedInSocial || !!data.hasSignedInSocial,
347
+ piRatings: !!details.piRatings,
348
+ piPageViews: !!details.piPageViews,
349
+ watchlistMembership: !!details.watchlistMembership,
350
+ piAlertHistory: !!details.piAlertHistory,
351
+ piMasterList: !!details.piMasterList
352
+ };
353
+
354
+ availabilityMap.set(dateStr, status);
355
+ });
356
+
357
+ return availabilityMap;
358
+ }
359
+
360
+ module.exports = {
361
+ checkRootDependencies,
362
+ checkRootDataAvailability,
363
+ getViableCalculations,
364
+ getAvailabilityWindow // [NEW] Exported
365
+ };
@@ -18,8 +18,21 @@ const {
18
18
  loadPIWatchlistData,
19
19
  loadPopularInvestorMasterList
20
20
  } = require('../utils/data_loader');
21
+ const { getAvailabilityWindow } = require('./AvailabilityChecker');
21
22
  const zlib = require('zlib');
22
23
 
24
+ // [NEW] Mapping of Loader Methods to Availability Flags in the Index
25
+ // Used to intelligently skip reads for data that is known to be missing.
26
+ const LOADER_DEPENDENCY_MAP = {
27
+ 'loadRankings': 'piRankings',
28
+ 'loadRatings': 'piRatings',
29
+ 'loadPageViews': 'piPageViews',
30
+ 'loadWatchlistMembership': 'watchlistMembership',
31
+ 'loadAlertHistory': 'piAlertHistory',
32
+ 'loadInsights': 'hasInsights',
33
+ 'loadSocial': 'hasSocial' // broad check covering PI, SignedIn, and Generic
34
+ };
35
+
23
36
  class CachedDataLoader {
24
37
  constructor(config, dependencies) {
25
38
  this.config = config;
@@ -149,9 +162,10 @@ class CachedDataLoader {
149
162
  return data;
150
163
  }
151
164
 
152
- // --- [NEW] Series Loading Logic ---
165
+ // --- [UPDATED] Series Loading Logic with Pre-flight Availability Check ---
153
166
  /**
154
167
  * Optimistically loads a series of root data over a lookback period.
168
+ * Uses the Root Data Index (getAvailabilityWindow) to avoid reading non-existent data.
155
169
  * @param {string} loaderMethod - The method name to call (e.g., 'loadAlertHistory')
156
170
  * @param {string} dateStr - The end date (exclusive or inclusive depending on data availability)
157
171
  * @param {number} lookbackDays - Number of days to look back
@@ -159,26 +173,83 @@ class CachedDataLoader {
159
173
  async loadSeries(loaderMethod, dateStr, lookbackDays) {
160
174
  if (!this[loaderMethod]) throw new Error(`[CachedDataLoader] Unknown method ${loaderMethod}`);
161
175
 
162
- const results = {};
176
+ // 1. Calculate Date Range
163
177
  const endDate = new Date(dateStr);
178
+ const startDate = new Date(endDate);
179
+ startDate.setUTCDate(startDate.getUTCDate() - (lookbackDays - 1)); // -1 because range is inclusive of end
180
+
181
+ const startStr = startDate.toISOString().slice(0, 10);
182
+ const endStr = endDate.toISOString().slice(0, 10);
183
+
184
+ // 2. Pre-flight: Fetch Availability Window
185
+ // This is a single range query that tells us exactly which days have data.
186
+ let availabilityMap = new Map();
187
+ try {
188
+ availabilityMap = await getAvailabilityWindow(this.deps, startStr, endStr);
189
+ } catch (e) {
190
+ console.warn(`[CachedDataLoader] Availability check failed for series. Falling back to optimistic fetch. Error: ${e.message}`);
191
+ // If availability check fails, we proceed with optimistic fetching (empty map)
192
+ }
193
+
194
+ // 3. Identify Required Flag in Availability Index
195
+ const requiredFlag = LOADER_DEPENDENCY_MAP[loaderMethod];
196
+
197
+ const results = {};
164
198
  const promises = [];
199
+ let skippedCount = 0;
165
200
 
166
- // Fetch N days back (including dateStr if relevant, usually handled by caller logic)
167
- // Here we fetch [dateStr, dateStr-1, ... dateStr-(N-1)]
201
+ // 4. Fetch N days back
168
202
  for (let i = 0; i < lookbackDays; i++) {
169
203
  const d = new Date(endDate);
170
204
  d.setUTCDate(d.getUTCDate() - i);
171
205
  const dString = d.toISOString().slice(0, 10);
172
206
 
173
- promises.push(
174
- this[loaderMethod](dString)
175
- .then(data => ({ date: dString, data }))
176
- .catch(err => {
177
- // Optimistic: Log warning but continue
178
- console.warn(`[CachedDataLoader] Failed to load series item ${loaderMethod} for ${dString}: ${err.message}`);
179
- return { date: dString, data: null };
180
- })
181
- );
207
+ // CHECK: Does index exist AND does it have our data?
208
+ // If map is empty (e.g. error), we default to trying (optimistic) unless we strictly trust the map.
209
+ // But if the query succeeded, map only contains dates that exist.
210
+ const dayStatus = availabilityMap.get(dString);
211
+
212
+ // Logic:
213
+ // 1. If dayStatus is undefined, it means the DATE itself is missing from the index (no data at all). -> SKIP
214
+ // 2. If dayStatus is defined, check the specific flag. -> IF FALSE SKIP
215
+
216
+ // Note: If availabilityMap is empty but dates SHOULD exist, we might have an issue.
217
+ // However, getAvailabilityWindow returns only existing docs. So if it's not in map, it's not in DB.
218
+
219
+ let shouldFetch = false;
220
+
221
+ if (availabilityMap.size > 0) {
222
+ // If we have index data, we trust it.
223
+ if (dayStatus) {
224
+ if (!requiredFlag || dayStatus[requiredFlag]) {
225
+ shouldFetch = true;
226
+ }
227
+ }
228
+ } else {
229
+ // If map is empty, it could mean NO data exists in that range, OR check failed.
230
+ // If check failed (caught above), we might want to try anyway?
231
+ // For now, if map is empty (validly), we assume no data.
232
+ // To be safe against empty map meaning "everything missing", we can verify if the map was populated.
233
+ // But getAvailabilityWindow returns a new Map(), so size 0 means 0 results found.
234
+ // Thus: Skip everything.
235
+ shouldFetch = false;
236
+ }
237
+
238
+ // Fallback for when we didn't run availability check (e.g. no deps provided or import fail)?
239
+ // The availabilityMap is initialized above.
240
+
241
+ if (shouldFetch) {
242
+ promises.push(
243
+ this[loaderMethod](dString)
244
+ .then(data => ({ date: dString, data }))
245
+ .catch(err => {
246
+ console.warn(`[CachedDataLoader] Failed to load series item ${loaderMethod} for ${dString}: ${err.message}`);
247
+ return { date: dString, data: null };
248
+ })
249
+ );
250
+ } else {
251
+ skippedCount++;
252
+ }
182
253
  }
183
254
 
184
255
  const loaded = await Promise.all(promises);
@@ -191,6 +262,11 @@ class CachedDataLoader {
191
262
  }
192
263
  });
193
264
 
265
+ // Debug log to confirm efficiency
266
+ if (skippedCount > 0) {
267
+ // console.debug(`[CachedDataLoader] Smart Series Load: Requested ${lookbackDays}, Found ${foundCount}, Skipped ${skippedCount} missing dates.`);
268
+ }
269
+
194
270
  return {
195
271
  dates: Object.keys(results).sort(),
196
272
  data: results,
@@ -200,4 +276,4 @@ class CachedDataLoader {
200
276
  }
201
277
  }
202
278
 
203
- module.exports = { CachedDataLoader };
279
+ module.exports = { CachedDataLoader };
@@ -1,11 +1,12 @@
1
1
  /**
2
2
  * @fileoverview Fetches results from previous computations, handling auto-sharding and decompression.
3
+ * UPDATED: Implemented 'Batched Series Fetching' to reduce Firestore read operations by ~98% for time-series lookups.
3
4
  */
4
5
  const { normalizeName } = require('../utils/utils');
5
6
  const zlib = require('zlib');
7
+ const pLimit = require('p-limit');
6
8
 
7
9
  async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config, { db }, includeSelf = false) {
8
- // ... [Existing implementation unchanged] ...
9
10
  const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
10
11
  const calcsToFetch = new Set();
11
12
 
@@ -42,6 +43,7 @@ async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config,
42
43
  if (!doc.exists) return;
43
44
  const data = doc.data();
44
45
 
46
+ // Handle Decompression
45
47
  if (data._compressed === true && data.payload) {
46
48
  try {
47
49
  const unzipped = zlib.gunzipSync(data.payload);
@@ -51,9 +53,12 @@ async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config,
51
53
  fetched[name] = {};
52
54
  }
53
55
  }
56
+ // Handle Sharding
54
57
  else if (data._sharded === true) {
55
58
  hydrationPromises.push(hydrateAutoShardedResult(doc.ref, name));
56
- } else if (data._completed) {
59
+ }
60
+ // Standard
61
+ else if (data._completed) {
57
62
  fetched[name] = data;
58
63
  }
59
64
  });
@@ -72,46 +77,130 @@ async function hydrateAutoShardedResult(docRef, resultName) {
72
77
  const assembledData = { _completed: true };
73
78
  snapshot.forEach(doc => {
74
79
  const chunk = doc.data();
75
- Object.assign(assembledData, chunk);
80
+ // [FIX] Ensure we don't merge metadata fields that might corrupt the object
81
+ const { _expireAt, ...safeChunk } = chunk;
82
+ Object.assign(assembledData, safeChunk);
76
83
  });
77
84
  delete assembledData._sharded;
78
85
  delete assembledData._completed;
79
86
  return { name: resultName, data: assembledData };
80
87
  }
81
88
 
82
- // [NEW] Fetch Result Series
89
+ /**
90
+ * [OPTIMIZED] Fetch Result Series using Batch Read
91
+ * Reduces N x M reads to a single (or chunked) getAll operation.
92
+ */
83
93
  async function fetchResultSeries(dateStr, calcsToFetchNames, fullManifest, config, deps, lookbackDays) {
94
+ const { db } = deps;
84
95
  const results = {}; // Structure: { [date]: { [calcName]: data } }
85
96
  const endDate = new Date(dateStr);
86
- const promises = [];
87
-
88
- // Create a dummy "calcsInPass" object to satisfy fetchExistingResults signature
89
- // We just need objects that have .dependencies matching what we want to fetch
90
- const dummyCalc = { dependencies: calcsToFetchNames, isHistorical: false };
91
-
97
+
98
+ // 1. Build Manifest Map for quick lookups
99
+ const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
100
+
101
+ // 2. Pre-calculate all Document References needed
102
+ const batchRequest = [];
103
+
92
104
  for (let i = 0; i < lookbackDays; i++) {
93
105
  const d = new Date(endDate);
94
106
  d.setUTCDate(d.getUTCDate() - i);
95
107
  const dString = d.toISOString().slice(0, 10);
108
+
109
+ for (const name of calcsToFetchNames) {
110
+ const normName = normalizeName(name);
111
+ const m = manifestMap.get(normName);
112
+ if (!m) continue;
96
113
 
97
- promises.push(
98
- fetchExistingResults(dString, [dummyCalc], fullManifest, config, deps, false)
99
- .then(res => ({ date: dString, data: res }))
100
- .catch(e => {
101
- console.warn(`[DependencyFetcher] Failed to fetch series for ${dString}: ${e.message}`);
102
- return { date: dString, data: {} };
103
- })
104
- );
114
+ const ref = db.collection(config.resultsCollection)
115
+ .doc(dString)
116
+ .collection(config.resultsSubcollection)
117
+ .doc(m.category || 'unknown')
118
+ .collection(config.computationsSubcollection)
119
+ .doc(normName);
120
+
121
+ batchRequest.push({ date: dString, name: normName, ref });
122
+ }
105
123
  }
106
124
 
107
- const series = await Promise.all(promises);
108
- series.forEach(({ date, data }) => {
109
- if (data && Object.keys(data).length > 0) {
110
- results[date] = data;
125
+ if (batchRequest.length === 0) return {};
126
+
127
+ // 3. Batch Fetch (Chunked to respect Firestore limits, usually 100-500 is safe)
128
+ const BATCH_SIZE = 100;
129
+ const hydrationTasks = [];
130
+
131
+ // Helper to process a batch of snapshots
132
+ const processBatch = async (items) => {
133
+ const refs = items.map(i => i.ref);
134
+ let snapshots;
135
+ try {
136
+ snapshots = await db.getAll(...refs);
137
+ } catch (e) {
138
+ console.warn(`[DependencyFetcher] Batch read failed: ${e.message}. Skipping batch.`);
139
+ return;
111
140
  }
112
- });
141
+
142
+ for (let i = 0; i < snapshots.length; i++) {
143
+ const doc = snapshots[i];
144
+ const meta = items[i];
145
+
146
+ if (!doc.exists) continue;
147
+
148
+ const data = doc.data();
149
+ let finalData = null;
150
+
151
+ // A. Compressed
152
+ if (data._compressed === true && data.payload) {
153
+ try {
154
+ const unzipped = zlib.gunzipSync(data.payload);
155
+ finalData = JSON.parse(unzipped.toString());
156
+ } catch (e) {
157
+ console.error(`[Hydration] Failed to decompress ${meta.name} for ${meta.date}`, e);
158
+ }
159
+ }
160
+ // B. Sharded (Defer hydration to avoid blocking the loop)
161
+ else if (data._sharded === true) {
162
+ hydrationTasks.push({
163
+ date: meta.date,
164
+ name: meta.name,
165
+ ref: doc.ref
166
+ });
167
+ continue; // Skip immediate assignment
168
+ }
169
+ // C. Standard
170
+ else if (data._completed) {
171
+ finalData = data;
172
+ }
173
+
174
+ // Assign if we have data
175
+ if (finalData) {
176
+ if (!results[meta.date]) results[meta.date] = {};
177
+ results[meta.date][meta.name] = finalData;
178
+ }
179
+ }
180
+ };
181
+
182
+ // Execute batches
183
+ for (let i = 0; i < batchRequest.length; i += BATCH_SIZE) {
184
+ const chunk = batchRequest.slice(i, i + BATCH_SIZE);
185
+ await processBatch(chunk);
186
+ }
187
+
188
+ // 4. Handle Sharded Results (Parallel Hydration)
189
+ if (hydrationTasks.length > 0) {
190
+ // Limit concurrency for shard fetching to avoid overwhelming the client
191
+ const limit = pLimit(20);
192
+ await Promise.all(hydrationTasks.map(task => limit(async () => {
193
+ try {
194
+ const res = await hydrateAutoShardedResult(task.ref, task.name);
195
+ if (!results[task.date]) results[task.date] = {};
196
+ results[task.date][task.name] = res.data;
197
+ } catch (e) {
198
+ console.warn(`[DependencyFetcher] Failed to hydrate shards for ${task.name}/${task.date}: ${e.message}`);
199
+ }
200
+ })));
201
+ }
113
202
 
114
203
  return results;
115
204
  }
116
205
 
117
- module.exports = { fetchExistingResults, fetchResultSeries };
206
+ module.exports = { fetchExistingResults, fetchResultSeries };
@@ -122,7 +122,12 @@ class StandardExecutor {
122
122
  let hasFlushed = false;
123
123
  const cachedLoader = new CachedDataLoader(config, deps);
124
124
  const startSetup = performance.now();
125
- await cachedLoader.loadMappings();
125
+
126
+ // [OPTIMIZATION] Hoist Static Data Load out of User Loop
127
+ const mappings = await cachedLoader.loadMappings();
128
+ // Pre-load Master List to cache it once
129
+ const piMasterList = await cachedLoader.loadPIMasterList();
130
+
126
131
  const setupDuration = performance.now() - startSetup;
127
132
  Object.keys(executionStats).forEach(name => executionStats[name].timings.setup += setupDuration);
128
133
 
@@ -221,8 +226,10 @@ class StandardExecutor {
221
226
  fetchedDeps, previousFetchedDeps, config, deps, cachedLoader,
222
227
  executionStats[normalizeName(calc.manifest.name)],
223
228
  earliestDates,
224
- // [NEW] Pass loaded series data
225
- seriesData
229
+ seriesData,
230
+ // [NEW] Pass Hoisted Data
231
+ mappings,
232
+ piMasterList
226
233
  )
227
234
  ));
228
235
 
@@ -325,25 +332,21 @@ class StandardExecutor {
325
332
  if (newResult.failureReport) failureAcc.push(...newResult.failureReport);
326
333
  }
327
334
 
328
- static async executePerUser(calcInstance, metadata, dateStr, portfolioData, yesterdayPortfolioData, historyData, computedDeps, prevDeps, config, deps, loader, stats, earliestDates, seriesData = {}) {
335
+ static async executePerUser(calcInstance, metadata, dateStr, portfolioData, yesterdayPortfolioData, historyData, computedDeps, prevDeps, config, deps, loader, stats, earliestDates, seriesData = {}, mappings = null, piMasterList = null) {
329
336
  const { logger } = deps;
330
337
  const targetUserType = metadata.userType;
331
- // [FIX] Always load Global Helpers
332
- const mappings = await loader.loadMappings();
333
- // [FIX] Correct method: loadPIMasterList() (no args needed as loader has context)
334
- const piMasterList = await loader.loadPIMasterList();
338
+
339
+ // [OPTIMIZATION] Use passed mappings/list if available, else load (fallback)
340
+ const mappingsToUse = mappings || await loader.loadMappings();
341
+ const piMasterListToUse = piMasterList || await loader.loadPIMasterList();
342
+
335
343
  const SCHEMAS = mathLayer.SCHEMAS;
336
344
 
337
- // 1. Load Root Data
345
+ // 1. Load Root Data (CachedLoader handles memoization for these)
338
346
  const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await loader.loadInsights(dateStr) } : null;
339
-
340
- // [FIX] Correct method: loadVerifications() (no args)
341
347
  const verifications = metadata.rootDataDependencies?.includes('verification') ? await loader.loadVerifications() : null;
342
-
343
- // [FIX] Correct method: loadRankings(dateStr) (no config/deps args)
344
348
  const rankings = metadata.rootDataDependencies?.includes('rankings') ? await loader.loadRankings(dateStr) : null;
345
349
 
346
- // [FIX] Correct method: loadRankings(prevStr)
347
350
  let yesterdayRankings = null;
348
351
  if (metadata.rootDataDependencies?.includes('rankings') && metadata.isHistorical) {
349
352
  const prevDate = new Date(dateStr); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
@@ -351,73 +354,31 @@ class StandardExecutor {
351
354
  yesterdayRankings = await loader.loadRankings(prevStr);
352
355
  }
353
356
 
354
- // [FIX] Correct method: loadSocial(dateStr)
355
357
  const socialContainer = metadata.rootDataDependencies?.includes('social') ? await loader.loadSocial(dateStr) : null;
356
358
 
357
359
  const allowMissing = metadata.canHaveMissingRoots === true;
358
360
 
359
- // [FIX] Correct method: loadRatings(dateStr)
360
- let ratings = null;
361
- if (metadata.rootDataDependencies?.includes('ratings')) {
362
- try {
363
- ratings = await loader.loadRatings(dateStr);
364
- } catch (e) {
365
- if (!allowMissing) {
366
- throw new Error(`[StandardExecutor] Required root 'ratings' failed to load for ${metadata.name}: ${e.message}`);
367
- }
368
- ratings = null;
369
- }
370
- if (!ratings && !allowMissing) {
371
- throw new Error(`[StandardExecutor] Required root 'ratings' is missing for ${metadata.name}`);
372
- }
373
- }
374
-
375
- // [FIX] Correct method: loadPageViews(dateStr)
376
- let pageViews = null;
377
- if (metadata.rootDataDependencies?.includes('pageViews')) {
378
- try {
379
- pageViews = await loader.loadPageViews(dateStr);
380
- } catch (e) {
381
- if (!allowMissing) {
382
- throw new Error(`[StandardExecutor] Required root 'pageViews' failed to load for ${metadata.name}: ${e.message}`);
383
- }
384
- pageViews = null;
385
- }
386
- if (!pageViews && !allowMissing) {
387
- throw new Error(`[StandardExecutor] Required root 'pageViews' is missing for ${metadata.name}`);
388
- }
389
- }
390
-
391
- // [FIX] Correct method: loadWatchlistMembership(dateStr)
392
- let watchlistMembership = null;
393
- if (metadata.rootDataDependencies?.includes('watchlist')) {
394
- try {
395
- watchlistMembership = await loader.loadWatchlistMembership(dateStr);
396
- } catch (e) {
397
- if (!allowMissing) {
398
- throw new Error(`[StandardExecutor] Required root 'watchlist' failed to load for ${metadata.name}: ${e.message}`);
399
- }
400
- watchlistMembership = null;
401
- }
402
- if (!watchlistMembership && !allowMissing) {
403
- throw new Error(`[StandardExecutor] Required root 'watchlist' is missing for ${metadata.name}`);
404
- }
405
- }
406
-
407
- // [FIX] Correct method: loadAlertHistory(dateStr)
408
- let alertHistory = null;
409
- if (metadata.rootDataDependencies?.includes('alerts')) {
410
- try {
411
- alertHistory = await loader.loadAlertHistory(dateStr);
412
- } catch (e) {
413
- if (!allowMissing) {
414
- throw new Error(`[StandardExecutor] Required root 'alerts' failed to load for ${metadata.name}: ${e.message}`);
415
- }
416
- alertHistory = null;
417
- }
418
- if (!alertHistory && !allowMissing) {
419
- throw new Error(`[StandardExecutor] Required root 'alerts' is missing for ${metadata.name}`);
420
- }
361
+ // Helper to safely load roots
362
+ const safeLoad = async (method, name) => {
363
+ if (!metadata.rootDataDependencies?.includes(name)) return null;
364
+ try {
365
+ return await loader[method](dateStr);
366
+ } catch (e) {
367
+ if (!allowMissing) throw new Error(`[StandardExecutor] Required root '${name}' failed: ${e.message}`);
368
+ return null;
369
+ }
370
+ };
371
+
372
+ const ratings = await safeLoad('loadRatings', 'ratings');
373
+ const pageViews = await safeLoad('loadPageViews', 'pageViews');
374
+ const watchlistMembership = await safeLoad('loadWatchlistMembership', 'watchlist');
375
+ const alertHistory = await safeLoad('loadAlertHistory', 'alerts');
376
+
377
+ if (!allowMissing) {
378
+ if (metadata.rootDataDependencies?.includes('ratings') && !ratings) throw new Error("Missing ratings");
379
+ if (metadata.rootDataDependencies?.includes('pageViews') && !pageViews) throw new Error("Missing pageViews");
380
+ if (metadata.rootDataDependencies?.includes('watchlist') && !watchlistMembership) throw new Error("Missing watchlist");
381
+ if (metadata.rootDataDependencies?.includes('alerts') && !alertHistory) throw new Error("Missing alerts");
421
382
  }
422
383
 
423
384
  let chunkSuccess = 0;
@@ -470,7 +431,9 @@ class StandardExecutor {
470
431
 
471
432
  const context = ContextFactory.buildPerUserContext({
472
433
  todayPortfolio, yesterdayPortfolio, todayHistory, userId,
473
- userType: actualUserType, dateStr, metadata, mappings, insights,
434
+ userType: actualUserType, dateStr, metadata,
435
+ mappings: mappingsToUse,
436
+ insights,
474
437
  socialData: effectiveSocialData ? { today: effectiveSocialData } : null,
475
438
  computedDependencies: computedDeps, previousComputedDependencies: prevDeps,
476
439
  config, deps,
@@ -489,7 +452,7 @@ class StandardExecutor {
489
452
  watchlistMembership: watchlistMembership || {},
490
453
  alertHistory: alertHistory || {},
491
454
 
492
- piMasterList,
455
+ piMasterList: piMasterListToUse,
493
456
  // [NEW] Pass Series Data
494
457
  seriesData
495
458
  });
@@ -512,4 +475,4 @@ class StandardExecutor {
512
475
  }
513
476
  }
514
477
 
515
- module.exports = { StandardExecutor };
478
+ module.exports = { StandardExecutor };
@@ -412,4 +412,111 @@ class DistributionAnalytics {
412
412
  }
413
413
  }
414
414
 
415
- module.exports = { MathPrimitives, SignalPrimitives, Aggregators, TimeSeries, DistributionAnalytics, FinancialEngineering, TimeSeriesAnalysis };
415
+ /**
416
+ * file: computation-system/layers/mathematics.js
417
+ * [Previous content remains, adding LinearAlgebra class]
418
+ */
419
+
420
+ class LinearAlgebra {
421
+ /**
422
+ * Calculates the Covariance Matrix and Mean Vector for a dataset
423
+ * @param {Array<Array<number>>} data - Rows are observations, Cols are features
424
+ * @returns {Object} { matrix: Array<Array<number>>, means: Array<number> }
425
+ */
426
+ static covarianceMatrix(data) {
427
+ if (!data || data.length === 0) return { matrix: [], means: [] };
428
+ const n = data.length;
429
+ const numFeatures = data[0].length;
430
+
431
+ // 1. Calculate Means
432
+ const means = new Array(numFeatures).fill(0);
433
+ for (let i = 0; i < n; i++) {
434
+ for (let j = 0; j < numFeatures; j++) {
435
+ means[j] += data[i][j];
436
+ }
437
+ }
438
+ for (let j = 0; j < numFeatures; j++) means[j] /= n;
439
+
440
+ // 2. Calculate Covariance
441
+ // Cov(x,y) = Σ(x_i - x_mean)(y_i - y_mean) / (N-1)
442
+ const cov = Array(numFeatures).fill(0).map(() => Array(numFeatures).fill(0));
443
+ for (let i = 0; i < numFeatures; i++) {
444
+ for (let j = 0; j < numFeatures; j++) {
445
+ let sum = 0;
446
+ for (let k = 0; k < n; k++) {
447
+ sum += (data[k][i] - means[i]) * (data[k][j] - means[j]);
448
+ }
449
+ cov[i][j] = sum / (n > 1 ? n - 1 : 1);
450
+ }
451
+ }
452
+ return { matrix: cov, means };
453
+ }
454
+
455
+ /**
456
+ * Inverts a Matrix using Gaussian Elimination
457
+ * Required to transform the distance into standard deviations
458
+ * @param {Array<Array<number>>} M - Square Matrix
459
+ */
460
+ static invertMatrix(M) {
461
+ if (!M || M.length === 0) return null;
462
+ const n = M.length;
463
+
464
+ // Deep copy to create the augmented matrix [M | I]
465
+ const A = M.map(row => [...row]);
466
+ const I = Array(n).fill(0).map((_, i) => Array(n).fill(0).map((_, j) => (i === j ? 1 : 0)));
467
+
468
+ for (let i = 0; i < n; i++) {
469
+ // Find pivot
470
+ let pivot = A[i][i];
471
+ if (Math.abs(pivot) < 1e-10) return null; // Singular matrix (features are perfectly correlated)
472
+
473
+ // Normalize row i
474
+ for (let j = 0; j < n; j++) {
475
+ A[i][j] /= pivot;
476
+ I[i][j] /= pivot;
477
+ }
478
+
479
+ // Eliminate other rows
480
+ for (let k = 0; k < n; k++) {
481
+ if (k !== i) {
482
+ const factor = A[k][i];
483
+ for (let j = 0; j < n; j++) {
484
+ A[k][j] -= factor * A[i][j];
485
+ I[k][j] -= factor * I[i][j];
486
+ }
487
+ }
488
+ }
489
+ }
490
+ return I;
491
+ }
492
+
493
+ /**
494
+ * Calculates Mahalanobis Distance
495
+ * D = sqrt( (x - μ)^T * Σ^-1 * (x - μ) )
496
+ * @param {Array<number>} vector - The current day's feature vector
497
+ * @param {Array<number>} means - The baseline mean vector
498
+ * @param {Array<Array<number>>} inverseCovariance - The inverted covariance matrix
499
+ */
500
+ static mahalanobisDistance(vector, means, inverseCovariance) {
501
+ if (!inverseCovariance || vector.length !== means.length) return 0;
502
+ const n = vector.length;
503
+
504
+ // Difference Vector (x - μ)
505
+ const diff = vector.map((val, i) => val - means[i]);
506
+
507
+ let distanceSq = 0;
508
+ for (let i = 0; i < n; i++) {
509
+ let rowSum = 0;
510
+ for (let j = 0; j < n; j++) {
511
+ rowSum += diff[j] * inverseCovariance[j][i];
512
+ }
513
+ distanceSq += rowSum * diff[i];
514
+ }
515
+
516
+ return Math.sqrt(Math.max(0, distanceSq));
517
+ }
518
+ }
519
+
520
+ // ... existing exports ...
521
+ module.exports = { MathPrimitives, SignalPrimitives, Aggregators, TimeSeries, DistributionAnalytics, FinancialEngineering, TimeSeriesAnalysis, LinearAlgebra };
522
+
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.584",
3
+ "version": "1.0.586",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [