bulltrackers-module 1.0.218 → 1.0.219

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,14 +1,13 @@
1
1
  /**
2
2
  * @fileoverview
3
- * Dynamic Manifest Builder (v6 - Merkle Tree Dependency Hashing)
3
+ * Dynamic Manifest Builder (v6.1 - Granular Merkle Tree Dependency Hashing)
4
4
  *
5
5
  * KEY FEATURES:
6
- * 1. Smart Layer Hashing: Detects used layers (Math, Extractors) to avoid stale helper code.
6
+ * 1. Smart Granular Hashing: Detects SPECIFIC classes used within layers to avoid unnecessary re-runs.
7
+ * - Changing 'MathPrimitives' won't invalidate computations that only use 'SignalPrimitives'.
7
8
  * 2. Cascading Invalidation (Merkle Hashing):
8
9
  * The final hash of a computation is derived from:
9
- * [Own Code] + [Layer States] + [Hashes of all Dependencies]
10
- * * This guarantees that if Calculation A is updated, Calculation B (which depends on A)
11
- * will automatically generate a new hash, forcing the system to re-run it.
10
+ * [Own Code] + [Specific Layer Class Hashes] + [Hashes of all Dependencies]
12
11
  */
13
12
 
14
13
  const { generateCodeHash } = require('../utils/utils');
@@ -20,57 +19,75 @@ const ProfilingLayer = require('../layers/profiling');
20
19
  const ValidatorsLayer = require('../layers/validators');
21
20
 
22
21
  /* --------------------------------------------------
23
- * 1. Layer Hash Generation
22
+ * 1. Granular Layer Hash Generation
24
23
  * -------------------------------------------------- */
25
24
 
26
- function generateLayerHash(layerExports, layerName) {
25
+ /**
26
+ * Generates a map of hashes for every export in a layer.
27
+ * Returns: { 'MathPrimitives': 'sha256...', 'SignalPrimitives': 'sha256...', ... }
28
+ */
29
+ function generateLayerHashes(layerExports, layerName) {
30
+ const hashes = {};
27
31
  const keys = Object.keys(layerExports).sort();
28
- let combinedSource = `LAYER:${layerName}`;
29
32
 
30
33
  for (const key of keys) {
31
34
  const item = layerExports[key];
32
- if (typeof item === 'function') { combinedSource += item.toString();
33
- } else if (typeof item === 'object' && item !== null) { combinedSource += JSON.stringify(item);
34
- } else { combinedSource += String(item); }
35
+ // We hash the specific export content prefixed with its identity
36
+ let source = `LAYER:${layerName}:EXPORT:${key}`;
37
+
38
+ if (typeof item === 'function') {
39
+ source += item.toString();
40
+ } else if (typeof item === 'object' && item !== null) {
41
+ source += JSON.stringify(item);
42
+ } else {
43
+ source += String(item);
44
+ }
45
+
46
+ hashes[key] = generateCodeHash(source);
35
47
  }
36
- return generateCodeHash(combinedSource);
48
+ return hashes;
37
49
  }
38
50
 
39
- // Pre-compute layer hashes at startup
51
+ // Pre-compute granular layer hashes at startup
52
+ // Structure: { mathematics: { MathPrimitives: '...', SignalPrimitives: '...' }, ... }
40
53
  const LAYER_HASHES = {
41
- 'mathematics': generateLayerHash(MathematicsLayer, 'mathematics'),
42
- 'extractors': generateLayerHash(ExtractorsLayer, 'extractors'),
43
- 'profiling': generateLayerHash(ProfilingLayer, 'profiling'),
44
- 'validators': generateLayerHash(ValidatorsLayer, 'validators')
54
+ 'mathematics': generateLayerHashes(MathematicsLayer, 'mathematics'),
55
+ 'extractors': generateLayerHashes(ExtractorsLayer, 'extractors'),
56
+ 'profiling': generateLayerHashes(ProfilingLayer, 'profiling'),
57
+ 'validators': generateLayerHashes(ValidatorsLayer, 'validators')
45
58
  };
46
59
 
47
- // Map code patterns to Layer dependencies
60
+ // Map code patterns to SPECIFIC Layer Exports
61
+ // Structure: { LayerName: { ExportName: [Triggers...] } }
48
62
  const LAYER_TRIGGERS = {
49
- 'mathematics': [
50
- 'math.compute', 'MathPrimitives',
51
- 'math.signals', 'SignalPrimitives', 'signals.',
52
- 'math.aggregate', 'Aggregators',
53
- 'math.timeseries', 'TimeSeries', 'timeSeries.',
54
- 'math.distribution', 'DistributionAnalytics', 'distribution.',
55
- 'math.financial', 'FinancialEngineering'
56
- ],
57
- 'extractors': [
58
- 'math.extract', 'DataExtractor',
59
- 'math.history', 'HistoryExtractor',
60
- 'math.prices', 'priceExtractor',
61
- 'math.insights', 'InsightsExtractor', 'insights.',
62
- 'math.tradeSeries', 'TradeSeriesBuilder'
63
- ],
64
- 'profiling': [
65
- 'math.profiling', 'SCHEMAS',
66
- 'math.classifier', 'UserClassifier',
67
- 'math.psychometrics', 'Psychometrics',
68
- 'math.bias', 'CognitiveBiases',
69
- 'math.skill', 'SkillAttribution'
70
- ],
71
- 'validators': [
72
- 'math.validate', 'Validators'
73
- ]
63
+ 'mathematics': {
64
+ 'MathPrimitives': ['math.compute', 'MathPrimitives'],
65
+ 'SignalPrimitives': ['math.signals', 'SignalPrimitives', 'signals.'],
66
+ 'Aggregators': ['math.aggregate', 'Aggregators'],
67
+ 'TimeSeries': ['math.timeseries', 'TimeSeries', 'timeSeries.'],
68
+ 'TimeSeriesAnalysis': ['TimeSeriesAnalysis'],
69
+ 'DistributionAnalytics': ['math.distribution', 'DistributionAnalytics', 'distribution.'],
70
+ 'FinancialEngineering': ['math.financial', 'FinancialEngineering']
71
+ },
72
+ 'extractors': {
73
+ 'DataExtractor': ['math.extract', 'DataExtractor'],
74
+ 'HistoryExtractor': ['math.history', 'HistoryExtractor'],
75
+ 'priceExtractor': ['math.prices', 'priceExtractor'],
76
+ 'InsightsExtractor': ['math.insights', 'InsightsExtractor', 'insights.'],
77
+ 'TradeSeriesBuilder': ['math.tradeSeries', 'TradeSeriesBuilder']
78
+ },
79
+ 'profiling': {
80
+ 'SCHEMAS': ['math.profiling', 'SCHEMAS'],
81
+ 'UserClassifier': ['math.classifier', 'UserClassifier'],
82
+ 'Psychometrics': ['math.psychometrics', 'Psychometrics'],
83
+ 'CognitiveBiases': ['math.bias', 'CognitiveBiases'],
84
+ 'SkillAttribution': ['math.skill', 'SkillAttribution'],
85
+ 'ExecutionAnalytics': ['ExecutionAnalytics'],
86
+ 'AdaptiveAnalytics': ['AdaptiveAnalytics']
87
+ },
88
+ 'validators': {
89
+ 'Validators': ['math.validate', 'Validators']
90
+ }
74
91
  };
75
92
 
76
93
  /* --------------------------------------------------
@@ -121,7 +138,7 @@ function getDependencySet(endpoints, adjacencyList) {
121
138
  * -------------------------------------------------- */
122
139
 
123
140
  function buildManifest(productLinesToRun = [], calculations) {
124
- log.divider('Building Dynamic Manifest (Merkle Hashing)');
141
+ log.divider('Building Dynamic Manifest (Granular Merkle Hashing)');
125
142
  log.info(`Target Product Lines: [${productLinesToRun.join(', ')}]`);
126
143
 
127
144
  const manifestMap = new Map();
@@ -150,21 +167,33 @@ function buildManifest(productLinesToRun = [], calculations) {
150
167
 
151
168
  let finalCategory = folderName === 'core' && metadata.category ? metadata.category : folderName;
152
169
 
153
- // --- PHASE 1: INTRINSIC HASH (Code + Layers) ---
154
- // We do NOT include dependencies yet.
170
+ // --- PHASE 1: INTRINSIC HASH (Code + Granular Layers) ---
155
171
  let compositeHashString = generateCodeHash(codeStr);
156
- const usedLayers = [];
157
-
158
- // Check for specific layer usage
159
- for (const [layerName, triggers] of Object.entries(LAYER_TRIGGERS)) {
160
- if (triggers.some(trigger => codeStr.includes(trigger))) { compositeHashString += LAYER_HASHES[layerName]; usedLayers.push(layerName); }
172
+ const usedDeps = [];
173
+
174
+ // Check for specific layer usage (Granular Check)
175
+ for (const [layerName, exportsMap] of Object.entries(LAYER_TRIGGERS)) {
176
+ const layerHashes = LAYER_HASHES[layerName]; // { Export: Hash }
177
+
178
+ for (const [exportName, triggers] of Object.entries(exportsMap)) {
179
+ // If code uses this specific export...
180
+ if (triggers.some(trigger => codeStr.includes(trigger))) {
181
+ const exportHash = layerHashes[exportName];
182
+ if (exportHash) {
183
+ compositeHashString += exportHash;
184
+ usedDeps.push(`${layerName}.${exportName}`);
185
+ }
186
+ }
187
+ }
161
188
  }
162
189
 
163
- // Safe Mode Fallback
190
+ // Safe Mode Fallback: If no dependencies detected, assume usage of ALL to be safe.
164
191
  let isSafeMode = false;
165
- if (usedLayers.length === 0) {
192
+ if (usedDeps.length === 0) {
166
193
  isSafeMode = true;
167
- Object.values(LAYER_HASHES).forEach(h => compositeHashString += h);
194
+ Object.values(LAYER_HASHES).forEach(layerObj => {
195
+ Object.values(layerObj).forEach(h => compositeHashString += h);
196
+ });
168
197
  }
169
198
 
170
199
  const baseHash = generateCodeHash(compositeHashString);
@@ -181,7 +210,7 @@ function buildManifest(productLinesToRun = [], calculations) {
181
210
  dependencies: dependencies,
182
211
  pass: 0,
183
212
  hash: baseHash,
184
- debugUsedLayers: isSafeMode ? ['ALL (Safe Mode)'] : usedLayers
213
+ debugUsedLayers: isSafeMode ? ['ALL (Safe Mode)'] : usedDeps
185
214
  };
186
215
 
187
216
  manifestMap.set(normalizedName, manifestEntry);
@@ -264,7 +293,7 @@ function buildManifest(productLinesToRun = [], calculations) {
264
293
  log.step('Computing Cascading Merkle Hashes...');
265
294
 
266
295
  for (const entry of sortedManifest) {
267
- // Start with the intrinsic hash (Code + Layers)
296
+ // Start with the intrinsic hash (Code + Granular Layers)
268
297
  let dependencySignature = entry.hash;
269
298
 
270
299
  if (entry.dependencies && entry.dependencies.length > 0) {
@@ -70,16 +70,9 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
70
70
  const storedStatus = dailyStatus[cName];
71
71
  const currentHash = calc.hash;
72
72
 
73
- // 1. Dependency Check
74
73
  if (calc.dependencies && calc.dependencies.length > 0) { const missing = calc.dependencies.filter(depName => !dailyStatus[normalizeName(depName)]); if (missing.length > 0) { logger.log('TRACE', `[Skip] ${cName} missing deps: ${missing.join(', ')}`); continue; } }
75
-
76
- // 2. Logic A: No previous run
77
74
  if (!storedStatus) { logger.log('INFO', `[Versioning] ${cName}: New run needed (No prior status).`); calcsToAttempt.push(calc); continue; }
78
-
79
- // 3. Logic B: Hash Mismatch
80
75
  if (typeof storedStatus === 'string' && currentHash && storedStatus !== currentHash) { logger.log('INFO', `[Versioning] ${cName}: Code Changed. (Old: ${storedStatus.substring(0,6)}... New: ${currentHash.substring(0,6)}...)`); calcsToAttempt.push(calc); continue; }
81
-
82
- // 4. Logic C: Upgrade Legacy Boolean -> Hash
83
76
  if (storedStatus === true && currentHash) { logger.log('INFO', `[Versioning] ${cName}: Upgrading legacy status to Hash.`); calcsToAttempt.push(calc); continue; }
84
77
  }
85
78
 
@@ -109,11 +102,8 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
109
102
  const prevDateStr = prevDate.toISOString().slice(0, 10);
110
103
  const previousResults = await fetchExistingResults(prevDateStr, calcsRunning, computationManifest, config, dependencies, true);
111
104
 
112
- if (standardToRun.length) { const updates = await runStandardComputationPass(dateToProcess, standardToRun, `Pass ${passToRun} (Std)`, config, dependencies, rootData, existingResults, previousResults, false);
113
- Object.assign(dateUpdates, updates); }
114
-
115
- if (metaToRun.length) { const updates = await runMetaComputationPass(dateToProcess, metaToRun, `Pass ${passToRun} (Meta)`, config, dependencies, existingResults, previousResults, rootData, false);
116
- Object.assign(dateUpdates, updates); }
105
+ if (standardToRun.length) { const updates = await runStandardComputationPass(dateToProcess, standardToRun, `Pass ${passToRun} (Std)`, config, dependencies, rootData, existingResults, previousResults, false); Object.assign(dateUpdates, updates); }
106
+ if (metaToRun.length) { const updates = await runMetaComputationPass(dateToProcess, metaToRun, `Pass ${passToRun} (Meta)`, config, dependencies, existingResults, previousResults, rootData, false); Object.assign(dateUpdates, updates); }
117
107
 
118
108
  } catch (err) {
119
109
  logger.log('ERROR', `[DateRunner] FAILED Pass ${passToRun} for ${dateStr}`, { errorMessage: err.message });
@@ -28,7 +28,7 @@ const LEGACY_MAPPING = {
28
28
 
29
29
  function groupByPass(manifest) { return manifest.reduce((acc, calc) => { (acc[calc.pass] = acc[calc.pass] || []).push(calc); return acc; }, {}); }
30
30
 
31
- function validateResultPatterns(logger, calcName, results, category) {
31
+ function validateResultPatterns(logger, calcName, results, category) { // TODO Unused? Could use this for validation
32
32
  if (category === 'speculator' || category === 'speculators') return;
33
33
  const tickers = Object.keys(results); const totalItems = tickers.length; if (totalItems < 5) return;
34
34
  const sampleTicker = tickers.find(t => results[t] && typeof results[t] === 'object'); if (!sampleTicker) return;
@@ -252,6 +252,14 @@ class InsightsExtractor {
252
252
  * @param {string} timeframe - 'today' or 'yesterday'
253
253
  * @returns {Array} Array of insight objects
254
254
  */
255
+
256
+ // MAJOR FIX TO GET INSIGHTS METHOD FOR THE CORRECT STRUCTURE AND SUPPORTING YESTERDAY + TODAY DATA REQUESTS
257
+ // THIS IS INJECTED TO BE USED LIKE :
258
+
259
+ // process(context) {
260
+ // const { insights: insightsHelper } = context.math;
261
+ // const insights = insightsHelper.getInsights(context); This is the direct call
262
+
255
263
  static getInsights(context, timeframe = 'today') {
256
264
  const insightsData = context.insights;
257
265
 
@@ -263,9 +271,7 @@ class InsightsExtractor {
263
271
  if (!doc) return [];
264
272
 
265
273
  // Extract the insights array from the document
266
- if (doc.insights && Array.isArray(doc.insights)) {
267
- return doc.insights;
268
- }
274
+ if (doc.insights && Array.isArray(doc.insights)) { return doc.insights; }
269
275
 
270
276
  return [];
271
277
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.218",
3
+ "version": "1.0.219",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [