bulltrackers-module 1.0.262 → 1.0.263

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -227,9 +227,8 @@ async function deleteOldCalculationData(dateStr, oldCategory, calcName, config,
227
227
 
228
228
  const shardsCol = oldDocRef.collection('_shards');
229
229
  const shardsSnap = await withRetry(() => shardsCol.listDocuments(), 'ListOldShards');
230
-
231
- const batch = db.batch();
232
- let ops = 0;
230
+ const batch = db.batch();
231
+ let ops = 0;
233
232
 
234
233
  for (const shardDoc of shardsSnap) { batch.delete(shardDoc); ops++; }
235
234
  batch.delete(oldDocRef);
@@ -97,32 +97,20 @@ class HeuristicValidator {
97
97
  // If we found numeric values, check if they are all the same
98
98
  let isFlatline = false;
99
99
  if (numericValues.length > 5) {
100
- const first = numericValues[0];
100
+ const first = numericValues[0];
101
101
  const identicalCount = numericValues.filter(v => Math.abs(v - first) < 0.000001).length;
102
- const flatlinePct = (identicalCount / numericValues.length) * 100;
102
+ const flatlinePct = (identicalCount / numericValues.length) * 100;
103
103
 
104
104
  // Only flag flatline if the value isn't 0 (0 is handled by maxZeroPct)
105
- if (flatlinePct > thresholds.maxFlatlinePct && Math.abs(first) > 0.0001) {
106
- isFlatline = true;
107
- }
105
+ if (flatlinePct > thresholds.maxFlatlinePct && Math.abs(first) > 0.0001) { isFlatline = true; }
108
106
  }
109
107
 
110
108
  // 6. Evaluations
111
- if (nanPct > thresholds.maxNanPct) {
112
- return { valid: false, reason: `Mathematical Error: ${nanPct.toFixed(1)}% of sampled results contain NaN or Infinity.` };
113
- }
114
-
115
- if (zeroPct > thresholds.maxZeroPct) {
116
- return { valid: false, reason: `Data Integrity: ${zeroPct.toFixed(1)}% of sampled results are 0. (Suspected Logic Failure)` };
117
- }
109
+ if (nanPct > thresholds.maxNanPct) { return { valid: false, reason: `Mathematical Error: ${nanPct.toFixed(1)}% of sampled results contain NaN or Infinity.` }; }
110
+ if (zeroPct > thresholds.maxZeroPct) { return { valid: false, reason: `Data Integrity: ${zeroPct.toFixed(1)}% of sampled results are 0. (Suspected Logic Failure)` }; }
111
+ if (nullPct > thresholds.maxNullPct) { return { valid: false, reason: `Data Integrity: ${nullPct.toFixed(1)}% of sampled results are Empty/Null.` }; }
118
112
 
119
- if (nullPct > thresholds.maxNullPct) {
120
- return { valid: false, reason: `Data Integrity: ${nullPct.toFixed(1)}% of sampled results are Empty/Null.` };
121
- }
122
-
123
- if (isFlatline) {
124
- return { valid: false, reason: `Anomaly: Detected Result Flatline. >${thresholds.maxFlatlinePct}% of outputs are identical (non-zero).` };
125
- }
113
+ if (isFlatline) { return { valid: false, reason: `Anomaly: Detected Result Flatline. >${thresholds.maxFlatlinePct}% of outputs are identical (non-zero).` }; }
126
114
 
127
115
  // Special check for Distribution/Profile calculations
128
116
  if (calcName.includes('profile') || calcName.includes('distribution')) {
@@ -48,7 +48,7 @@ async function recordRunAttempt(db, context, status, error = null, detailedMetri
48
48
 
49
49
  // History Doc: Stores this specific run
50
50
  // ID Format: targetDate_triggerTimestamp (Sortable by data date, then execution time)
51
- const runId = `${targetDate}_${triggerTimestamp}`;
51
+ const runId = `${targetDate}_${triggerTimestamp}`;
52
52
  const runDocRef = computationDocRef.collection('history').doc(runId);
53
53
 
54
54
  // 2. Prepare Metrics & Environment Info
@@ -56,16 +56,11 @@ async function recordRunAttempt(db, context, status, error = null, detailedMetri
56
56
 
57
57
  // Calculate size in MB
58
58
  let sizeMB = 0;
59
- if (detailedMetrics.storage && detailedMetrics.storage.sizeBytes) {
60
- sizeMB = Number((detailedMetrics.storage.sizeBytes / (1024 * 1024)).toFixed(4));
61
- }
59
+ if (detailedMetrics.storage && detailedMetrics.storage.sizeBytes) { sizeMB = Number((detailedMetrics.storage.sizeBytes / (1024 * 1024)).toFixed(4)); }
62
60
 
63
61
  // Extract Validation Anomalies (Unusual Keys/Values)
64
62
  const anomalies = detailedMetrics.validation?.anomalies || [];
65
- if (error && error.message && error.message.includes('Data Integrity')) {
66
- // If the error itself was a validation failure, add it to anomalies
67
- anomalies.push(error.message);
68
- }
63
+ if (error && error.message && error.message.includes('Data Integrity')) { anomalies.push(error.message); }
69
64
 
70
65
  // 3. Construct the Run Log Entry
71
66
  const runEntry = {
@@ -5,23 +5,17 @@
5
5
 
6
6
  async function fetchComputationStatus(dateStr, config, { db }) {
7
7
  if (!dateStr) throw new Error('fetchStatus requires a key');
8
-
9
8
  const collection = config.computationStatusCollection || 'computation_status';
10
- const docRef = db.collection(collection).doc(dateStr);
11
-
12
- const snap = await docRef.get();
9
+ const docRef = db.collection(collection).doc(dateStr);
10
+ const snap = await docRef.get();
13
11
  if (!snap.exists) return {};
14
-
15
- const rawData = snap.data();
12
+ const rawData = snap.data();
16
13
  const normalized = {};
17
14
 
18
15
  // Normalize V1 (String) to V2 (Object)
19
16
  for (const [name, value] of Object.entries(rawData)) {
20
- if (typeof value === 'string') {
21
- normalized[name] = { hash: value, category: null }; // Legacy entry
22
- } else {
23
- normalized[name] = value; // V2 entry { hash, category }
24
- }
17
+ if (typeof value === 'string') { normalized[name] = { hash: value, category: null }; // Legacy entry
18
+ } else { normalized[name] = value; }
25
19
  }
26
20
 
27
21
  return normalized;
@@ -34,18 +28,10 @@ async function updateComputationStatus(dateStr, updates, config, { db }) {
34
28
  const collection = config.computationStatusCollection || 'computation_status';
35
29
  const docRef = db.collection(collection).doc(dateStr);
36
30
 
37
- // We expect updates to be an object: { "CalcName": { hash: "...", category: "..." } }
38
- // But result committer might still pass strings if we don't update it.
39
- // We will enforce the structure here just in case.
40
-
41
31
  const safeUpdates = {};
42
32
  for (const [key, val] of Object.entries(updates)) {
43
- if (typeof val === 'string') {
44
- // Fallback if caller wasn't updated (shouldn't happen with full patch)
45
- safeUpdates[key] = { hash: val, category: 'unknown', lastUpdated: new Date() };
46
- } else {
47
- safeUpdates[key] = { ...val, lastUpdated: new Date() };
48
- }
33
+ if (typeof val === 'string') { safeUpdates[key] = { hash: val, category: 'unknown', lastUpdated: new Date() };
34
+ } else { safeUpdates[key] = { ...val, lastUpdated: new Date() }; }
49
35
  }
50
36
 
51
37
  await docRef.set(safeUpdates, { merge: true });
@@ -1,17 +1,17 @@
1
1
  /**
2
2
  * @fileoverview Build Reporter & Auto-Runner.
3
3
  * Generates a "Pre-Flight" report of what the computation system WILL do.
4
- * UPDATED: Optimized with Parallel Status Fetches inside the date loop.
4
+ * UPDATED: Fixed 'latest' document overwrite bug.
5
5
  */
6
6
 
7
- const { analyzeDateExecution } = require('../WorkflowOrchestrator');
8
- const { fetchComputationStatus } = require('../persistence/StatusRepository');
7
+ const { analyzeDateExecution } = require('../WorkflowOrchestrator');
8
+ const { fetchComputationStatus } = require('../persistence/StatusRepository');
9
9
  const { normalizeName, getExpectedDateStrings, DEFINITIVE_EARLIEST_DATES } = require('../utils/utils');
10
- const { checkRootDataAvailability } = require('../data/AvailabilityChecker');
11
- const pLimit = require('p-limit');
12
- const path = require('path');
13
- const packageJson = require(path.join(__dirname, '..', '..', '..', 'package.json'));
14
- const packageVersion = packageJson.version;
10
+ const { checkRootDataAvailability } = require('../data/AvailabilityChecker');
11
+ const pLimit = require('p-limit');
12
+ const path = require('path');
13
+ const packageJson = require(path.join(__dirname, '..', '..', '..', 'package.json'));
14
+ const packageVersion = packageJson.version;
15
15
 
16
16
  /**
17
17
  * AUTO-RUN ENTRY POINT
@@ -20,11 +20,13 @@ const packageVersion = packageJson.version;
20
20
  async function ensureBuildReport(config, dependencies, manifest) {
21
21
  const { db, logger } = dependencies;
22
22
  const now = new Date();
23
+ // Create a standardized build ID
23
24
  const buildId = `v${packageVersion}_${now.getFullYear()}-${String(now.getMonth()+1).padStart(2,'0')}-${String(now.getDate()).padStart(2,'0')}_${String(now.getHours()).padStart(2,'0')}-${String(now.getMinutes()).padStart(2,'0')}-${String(now.getSeconds()).padStart(2,'0')}`;
24
25
  const latestRef = db.collection('computation_build_records').doc('latest');
25
26
 
26
27
  try {
27
28
  const latestDoc = await latestRef.get();
29
+ // Check using 'packageVersion' key to match what we store
28
30
  const priorVersion = latestDoc.exists ? latestDoc.data().packageVersion : null;
29
31
 
30
32
  if (priorVersion === packageVersion) {
@@ -33,13 +35,12 @@ async function ensureBuildReport(config, dependencies, manifest) {
33
35
  }
34
36
 
35
37
  logger.log('INFO', `[BuildReporter] 🚀 New Version Detected (${packageVersion}). Auto-running Pre-flight Report...`);
38
+
39
+ // Run generation. This function handles writing the 'latest' document with FULL data.
36
40
  await generateBuildReport(config, dependencies, manifest, 90, buildId);
37
41
 
38
- await latestRef.set({
39
- packageVersion,
40
- buildId,
41
- generatedAt: now.toISOString()
42
- });
42
+ // [FIX] REMOVED: The redundant write that was overwriting the full report with just metadata.
43
+ // The generateBuildReport function now serves as the single source of truth for writing 'latest'.
43
44
 
44
45
  } catch (e) {
45
46
  logger.log('ERROR', `[BuildReporter] Auto-run check failed: ${e.message}`);
@@ -65,7 +66,7 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
65
66
 
66
67
  const reportData = {
67
68
  buildId,
68
- version: packageVersion,
69
+ packageVersion: packageVersion, // [FIX] Renamed 'version' to 'packageVersion' for consistency with ensureBuildReport check
69
70
  generatedAt: new Date().toISOString(),
70
71
  summary: {},
71
72
  dates: {}
@@ -117,9 +118,9 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
117
118
  willRun: [], willReRun: [], blocked: [], impossible: []
118
119
  };
119
120
 
120
- analysis.runnable.forEach(item => dateSummary.willRun.push({ name: item.name, reason: "New / No Previous Record" }));
121
- analysis.reRuns.forEach(item => dateSummary.willReRun.push({ name: item.name, reason: item.previousCategory ? "Migration" : "Hash Mismatch" }));
122
- analysis.impossible.forEach(item => dateSummary.impossible.push({ name: item.name, reason: item.reason }));
121
+ analysis.runnable.forEach (item => dateSummary.willRun.push ({ name: item.name, reason: "New / No Previous Record" }));
122
+ analysis.reRuns.forEach (item => dateSummary.willReRun.push ({ name: item.name, reason: item.previousCategory ? "Migration" : "Hash Mismatch" }));
123
+ analysis.impossible.forEach (item => dateSummary.impossible.push ({ name: item.name, reason: item.reason }));
123
124
  [...analysis.blocked, ...analysis.failedDependency].forEach(item => dateSummary.blocked.push({ name: item.name, reason: item.reason || 'Dependency' }));
124
125
 
125
126
  const hasUpdates = dateSummary.willRun.length || dateSummary.willReRun.length || dateSummary.blocked.length || dateSummary.impossible.length;
@@ -155,6 +156,7 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
155
156
  await reportRef.set(reportData);
156
157
 
157
158
  // 5. Update 'latest' pointer
159
+ // This now writes the FULL reportData, making 'latest' a cache of the actual report.
158
160
  await db.collection('computation_build_records').doc('latest').set({
159
161
  ...reportData,
160
162
  note: "Latest build report pointer."
@@ -187,15 +187,10 @@ async function ensurePriceShardIndex(config, deps) {
187
187
  const snap = await indexDocRef.get();
188
188
  if (snap.exists) {
189
189
  const data = snap.data();
190
-
191
- // [FIX] Check TTL (24 hours)
192
190
  const lastUpdated = data.lastUpdated ? new Date(data.lastUpdated).getTime() : 0;
193
191
  const now = Date.now();
194
192
  const oneDayMs = 24 * 60 * 60 * 1000;
195
-
196
- if ((now - lastUpdated) < oneDayMs) {
197
- return data.index || {};
198
- }
193
+ if ((now - lastUpdated) < oneDayMs) { return data.index || {}; }
199
194
  logger.log('INFO', '[ShardIndex] Index is stale (>24h). Rebuilding...');
200
195
  } else {
201
196
  logger.log('INFO', '[ShardIndex] Index not found. Building new Price Shard Index...');
@@ -17,14 +17,9 @@ function validateSchema(schema) {
17
17
 
18
18
  // 2. Ensure it's not too large (Firestore limit: 1MB, reserve 100KB for metadata)
19
19
  const size = Buffer.byteLength(jsonStr);
20
- if (size > 900 * 1024) {
21
- return { valid: false, reason: `Schema exceeds 900KB limit (${(size/1024).toFixed(2)} KB)` };
22
- }
23
-
20
+ if (size > 900 * 1024) { return { valid: false, reason: `Schema exceeds 900KB limit (${(size/1024).toFixed(2)} KB)` }; }
24
21
  return { valid: true };
25
- } catch (e) {
26
- return { valid: false, reason: `Serialization failed: ${e.message}` };
27
- }
22
+ } catch (e) { return { valid: false, reason: `Serialization failed: ${e.message}` }; }
28
23
  }
29
24
 
30
25
  /**
@@ -65,10 +60,10 @@ async function batchStoreSchemas(dependencies, config, schemas) {
65
60
  // Critical: Always overwrite 'lastUpdated' to now
66
61
  batch.set(docRef, {
67
62
  computationName: item.name,
68
- category: item.category,
69
- schema: item.schema,
70
- metadata: item.metadata || {},
71
- lastUpdated: new Date()
63
+ category: item.category,
64
+ schema: item.schema,
65
+ metadata: item.metadata || {},
66
+ lastUpdated: new Date()
72
67
  }, { merge: true });
73
68
 
74
69
  validCount++;
@@ -159,13 +159,13 @@ async function getFirstDateFromCollection(config, deps, collectionName) {
159
159
  async function getEarliestDataDates(config, deps) {
160
160
  const { logger } = deps;
161
161
  const [ investorDate, speculatorDate, investorHistoryDate, speculatorHistoryDate, insightsDate, socialDate, priceDate ] = await Promise.all([
162
- getFirstDateFromCollection(config, deps, config.normalUserPortfolioCollection),
163
- getFirstDateFromCollection(config, deps, config.speculatorPortfolioCollection),
164
- getFirstDateFromCollection(config, deps, config.normalUserHistoryCollection),
165
- getFirstDateFromCollection(config, deps, config.speculatorHistoryCollection),
166
- getFirstDateFromSimpleCollection(config, deps, config.insightsCollectionName),
167
- getFirstDateFromSimpleCollection(config, deps, config.socialInsightsCollectionName),
168
- getFirstDateFromPriceCollection(config, deps)
162
+ getFirstDateFromCollection (config, deps, config.normalUserPortfolioCollection),
163
+ getFirstDateFromCollection (config, deps, config.speculatorPortfolioCollection),
164
+ getFirstDateFromCollection (config, deps, config.normalUserHistoryCollection),
165
+ getFirstDateFromCollection (config, deps, config.speculatorHistoryCollection),
166
+ getFirstDateFromSimpleCollection (config, deps, config.insightsCollectionName),
167
+ getFirstDateFromSimpleCollection (config, deps, config.socialInsightsCollectionName),
168
+ getFirstDateFromPriceCollection (config, deps)
169
169
  ]);
170
170
 
171
171
  const getMinDate = (...dates) => {
@@ -221,5 +221,5 @@ module.exports = {
221
221
  getEarliestDataDates,
222
222
  generateCodeHash,
223
223
  withRetry,
224
- DEFINITIVE_EARLIEST_DATES // [NEW EXPORT]
224
+ DEFINITIVE_EARLIEST_DATES
225
225
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.262",
3
+ "version": "1.0.263",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [