bulltrackers-module 1.0.252 → 1.0.254

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  /**
2
2
  * @fileoverview Checks availability of root data via the Root Data Index.
3
- * REFACTORED: Now relies on the centralized 'system_root_data_index' map.
3
+ * REFACTORED: Now supports granular 'userType' checks (Speculator vs Normal).
4
4
  */
5
5
  const { normalizeName } = require('../utils/utils');
6
6
 
@@ -10,11 +10,34 @@ const INDEX_COLLECTION = process.env.ROOT_DATA_AVAILABILITY_COLLECTION || 'syste
10
10
  function checkRootDependencies(calcManifest, rootDataStatus) {
11
11
  const missing = [];
12
12
  if (!calcManifest.rootDataDependencies) return { canRun: true, missing };
13
+
14
+ const userType = calcManifest.userType || 'all';
15
+
13
16
  for (const dep of calcManifest.rootDataDependencies) {
14
- if (dep === 'portfolio' && !rootDataStatus.hasPortfolio) missing.push('portfolio');
17
+ if (dep === 'portfolio') {
18
+ if (userType === 'speculator') {
19
+ if (!rootDataStatus.speculatorPortfolio) missing.push('speculatorPortfolio');
20
+ } else if (userType === 'normal') {
21
+ if (!rootDataStatus.normalPortfolio) missing.push('normalPortfolio');
22
+ } else {
23
+ // 'all', 'aggregate', or 'n/a' -> Check if ANY portfolio data exists
24
+ // This satisfies the "either/or" requirement for generic calculations
25
+ if (!rootDataStatus.hasPortfolio) missing.push('portfolio');
26
+ }
27
+ }
28
+ else if (dep === 'history') {
29
+ if (userType === 'speculator') {
30
+ if (!rootDataStatus.speculatorHistory) missing.push('speculatorHistory');
31
+ } else if (userType === 'normal') {
32
+ if (!rootDataStatus.normalHistory) missing.push('normalHistory');
33
+ } else {
34
+ // 'all', 'aggregate', or 'n/a' -> Check if ANY history data exists
35
+ if (!rootDataStatus.hasHistory) missing.push('history');
36
+ }
37
+ }
38
+ // These data types are global and do not have user subtypes
15
39
  else if (dep === 'insights' && !rootDataStatus.hasInsights) missing.push('insights');
16
40
  else if (dep === 'social' && !rootDataStatus.hasSocial) missing.push('social');
17
- else if (dep === 'history' && !rootDataStatus.hasHistory) missing.push('history');
18
41
  else if (dep === 'price' && !rootDataStatus.hasPrices) missing.push('price');
19
42
  }
20
43
  return { canRun: missing.length === 0, missing };
@@ -49,27 +72,33 @@ function getViableCalculations(candidates, fullManifest, rootDataStatus, dailySt
49
72
 
50
73
  /**
51
74
  * Checks data availability by reading the centralized index.
52
- * Only falls back to raw checks if explicitly configured or index is missing.
75
+ * Extracts granular details to support Speculator-specific checks.
53
76
  */
54
77
  async function checkRootDataAvailability(dateStr, config, dependencies, earliestDates) {
55
78
  const { logger, db } = dependencies;
56
79
 
57
80
  try {
58
81
  // 1. Try reading the Index
59
- const indexDoc = await db.collection(INDEX_COLLECTION).doc(dateStr).get();
82
+ const indexDoc = db.collection(INDEX_COLLECTION).doc(dateStr).get();
60
83
 
61
84
  if (indexDoc.exists) {
62
85
  const data = indexDoc.data();
63
- // Return status based on the map
64
- // Note: We return null references. The data loaders in streamPortfolioData
65
- // have logic to fetch refs if providedRefs is null (which they are here).
86
+ const details = data.details || {}; // Extract granular details if present
87
+
66
88
  return {
67
89
  status: {
90
+ // Global flags (Aggregate - true if EITHER exists)
68
91
  hasPortfolio: !!data.hasPortfolio,
69
92
  hasHistory: !!data.hasHistory,
70
93
  hasSocial: !!data.hasSocial,
71
94
  hasInsights: !!data.hasInsights,
72
- hasPrices: !!data.hasPrices
95
+ hasPrices: !!data.hasPrices,
96
+
97
+ // Granular flags (Specific)
98
+ speculatorPortfolio: !!details.speculatorPortfolio,
99
+ normalPortfolio: !!details.normalPortfolio,
100
+ speculatorHistory: !!details.speculatorHistory,
101
+ normalHistory: !!details.normalHistory
73
102
  },
74
103
  portfolioRefs: null,
75
104
  historyRefs: null,
@@ -79,10 +108,12 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
79
108
  };
80
109
  } else {
81
110
  // Index missing: implies data hasn't been indexed yet or doesn't exist.
82
- // For safety in this strict model, we assume MISSING.
83
111
  logger.log('WARN', `[Availability] Index not found for ${dateStr}. Assuming NO data.`);
84
112
  return {
85
- status: { hasPortfolio: false, hasHistory: false, hasSocial: false, hasInsights: false, hasPrices: false }
113
+ status: {
114
+ hasPortfolio: false, hasHistory: false, hasSocial: false, hasInsights: false, hasPrices: false,
115
+ speculatorPortfolio: false, normalPortfolio: false, speculatorHistory: false, normalHistory: false
116
+ }
86
117
  };
87
118
  }
88
119
 
@@ -1,7 +1,7 @@
1
1
  /**
2
2
  * FILENAME: computation-system/helpers/computation_dispatcher.js
3
3
  * PURPOSE: "Smart Dispatcher" - Analyzes state and only dispatches valid, runnable tasks.
4
- * UPDATED: Implements pre-dispatch analysis to guarantee worker success.
4
+ * UPDATED: Implements Audit Ledger creation (PENDING state) before dispatch.
5
5
  */
6
6
 
7
7
  const { getExpectedDateStrings, normalizeName, DEFINITIVE_EARLIEST_DATES } = require('../utils/utils.js');
@@ -9,6 +9,7 @@ const { groupByPass, analyzeDateExecution } = require('../WorkflowOrchestrat
9
9
  const { PubSubUtils } = require('../../core/utils/pubsub_utils');
10
10
  const { fetchComputationStatus, updateComputationStatus } = require('../persistence/StatusRepository');
11
11
  const { checkRootDataAvailability } = require('../data/AvailabilityChecker');
12
+ const { commitBatchInChunks } = require('../persistence/FirestoreUtils'); // [NEW IMPORT]
12
13
  const pLimit = require('p-limit');
13
14
 
14
15
  const TOPIC_NAME = 'computation-tasks';
@@ -19,7 +20,7 @@ const STATUS_IMPOSSIBLE = 'IMPOSSIBLE';
19
20
  * Performs full pre-flight checks (Root Data, Dependencies, History) before emitting.
20
21
  */
21
22
  async function dispatchComputationPass(config, dependencies, computationManifest) {
22
- const { logger } = dependencies;
23
+ const { logger, db } = dependencies; // Added db destructuring
23
24
  const pubsubUtils = new PubSubUtils(dependencies);
24
25
  const passToRun = String(config.COMPUTATION_PASS_TO_RUN);
25
26
 
@@ -86,7 +87,6 @@ async function dispatchComputationPass(config, dependencies, computationManifest
86
87
  });
87
88
 
88
89
  // Mark Blocked/Failed Deps (Temporary Failure)
89
- // We write these so the status reflects reality, but we DO NOT dispatch them.
90
90
  [...report.blocked, ...report.failedDependency].forEach(item => {
91
91
  statusUpdates[item.name] = { hash: false, category: 'unknown', reason: item.reason };
92
92
  });
@@ -103,6 +103,7 @@ async function dispatchComputationPass(config, dependencies, computationManifest
103
103
  date: dateStr,
104
104
  pass: passToRun,
105
105
  computation: normalizeName(item.name),
106
+ hash: item.hash || item.newHash, // [NEW] Ensure Hash is passed for Ledger
106
107
  timestamp: Date.now()
107
108
  });
108
109
  });
@@ -116,7 +117,30 @@ async function dispatchComputationPass(config, dependencies, computationManifest
116
117
 
117
118
  // 4. Batch Dispatch Valid Tasks
118
119
  if (tasksToDispatch.length > 0) {
119
- logger.log('INFO', `[Dispatcher] Generated ${tasksToDispatch.length} VALID tasks. Dispatching...`);
120
+ // --- [NEW] STEP 4.1: CREATE AUDIT LEDGER ENTRIES ---
121
+ logger.log('INFO', `[Dispatcher] 📝 Creating Audit Ledger entries for ${tasksToDispatch.length} tasks...`);
122
+
123
+ const ledgerWrites = [];
124
+ for (const task of tasksToDispatch) {
125
+ const ledgerRef = db.collection(`computation_audit_ledger/${task.date}/passes/${task.pass}/tasks`).doc(task.computation);
126
+ ledgerWrites.push({
127
+ ref: ledgerRef,
128
+ data: {
129
+ status: 'PENDING',
130
+ computation: task.computation,
131
+ expectedHash: task.hash || 'unknown',
132
+ createdAt: new Date(),
133
+ retries: 0
134
+ },
135
+ options: { merge: true } // Merge allows updating retries/timestamps without wiping history
136
+ });
137
+ }
138
+
139
+ // Commit Ledger writes using chunked batch utility
140
+ await commitBatchInChunks(config, dependencies, ledgerWrites, 'AuditLedger Creation');
141
+ // ---------------------------------------------------
142
+
143
+ logger.log('INFO', `[Dispatcher] ✅ Generated ${tasksToDispatch.length} VALID tasks. Dispatching to Pub/Sub...`);
120
144
 
121
145
  await pubsubUtils.batchPublishTasks(dependencies, {
122
146
  topicName: TOPIC_NAME,
@@ -1,30 +1,36 @@
1
1
  /**
2
2
  * @fileoverview Handles saving computation results with observability and Smart Cleanup.
3
+ * UPDATED: Implements Audit Ledger completion logic ("Closing the Ledger").
3
4
  */
4
5
  const { commitBatchInChunks } = require('./FirestoreUtils');
5
6
  const { updateComputationStatus } = require('./StatusRepository');
6
7
  const { batchStoreSchemas } = require('../utils/schema_capture');
7
8
  const { generateProcessId, PROCESS_TYPES } = require('../logger/logger');
9
+ // Note: normalizeName is typically needed for doc IDs, but keys in stateObj are usually already normalized.
10
+ // If not, ensure it is imported. Based on StandardExecutor, keys are normalized.
8
11
 
9
12
  async function commitResults(stateObj, dStr, passName, config, deps, skipStatusWrite = false) {
10
13
  const successUpdates = {};
11
14
  const schemas = [];
12
15
  const cleanupTasks = []; // Tasks to delete old data
13
- const { logger } = deps;
16
+ const { logger, db } = deps;
14
17
  const pid = generateProcessId(PROCESS_TYPES.STORAGE, passName, dStr);
18
+
19
+ // [NEW] Extract numeric pass ID from string (e.g., "Pass 1" -> "1")
20
+ const passNum = passName.replace(/[^0-9]/g, '');
15
21
 
16
22
  for (const name in stateObj) {
17
23
  const calc = stateObj[name];
18
24
  try {
19
25
  const result = await calc.getResult();
20
26
 
21
- // [UPDATE] Validate Result: Check for Null, Empty Object, or Zero
27
+ // Validate Result: Check for Null, Empty Object, or Zero
22
28
  const isEmpty = !result ||
23
29
  (typeof result === 'object' && Object.keys(result).length === 0) ||
24
30
  (typeof result === 'number' && result === 0);
25
31
 
26
32
  if (isEmpty) {
27
- // [UPDATE] Mark status as FALSE (Failed/Empty) so it re-runs or is flagged
33
+ // Mark status as FALSE (Failed/Empty) so it re-runs or is flagged
28
34
  if (calc.manifest.hash) {
29
35
  successUpdates[name] = {
30
36
  hash: false,
@@ -35,7 +41,7 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
35
41
  continue;
36
42
  }
37
43
 
38
- const mainDocRef = deps.db.collection(config.resultsCollection)
44
+ const mainDocRef = db.collection(config.resultsCollection)
39
45
  .doc(dStr)
40
46
  .collection(config.resultsSubcollection)
41
47
  .doc(calc.manifest.category)
@@ -44,6 +50,22 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
44
50
 
45
51
  const updates = await prepareAutoShardedWrites(result, mainDocRef, logger);
46
52
 
53
+ // --- [NEW] ADD AUDIT LEDGER COMPLETION TO BATCH ---
54
+ if (passNum && calc.manifest) {
55
+ const ledgerRef = db.collection(`computation_audit_ledger/${dStr}/passes/${passNum}/tasks`).doc(name);
56
+ updates.push({
57
+ ref: ledgerRef,
58
+ data: {
59
+ status: 'COMPLETED',
60
+ completedAt: new Date(),
61
+ actualHash: calc.manifest.hash,
62
+ _verified: true
63
+ },
64
+ options: { merge: true }
65
+ });
66
+ }
67
+ // --------------------------------------------------
68
+
47
69
  // Capture Schema
48
70
  if (calc.manifest.class.getSchema) {
49
71
  const { class: _cls, ...safeMetadata } = calc.manifest;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.252",
3
+ "version": "1.0.254",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [