bulltrackers-module 1.0.286 → 1.0.288
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/context/ManifestBuilder.js +87 -20
- package/functions/computation-system/layers/extractors.js +34 -58
- package/functions/computation-system/layers/mathematics.js +103 -85
- package/functions/computation-system/layers/profiling.js +60 -97
- package/functions/computation-system/tools/BuildReporter.js +97 -23
- package/functions/computation-system/topology/HashManager.js +70 -1
- package/package.json +1 -1
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Dynamic Manifest Builder - Handles Topological Sort and Auto-Discovery.
|
|
3
3
|
* UPDATED: Generates Granular Hash Composition for Audit Trails.
|
|
4
|
+
* UPGRADE: Implements Tarjan's Algorithm for Precise Cycle Detection.
|
|
5
|
+
* FIXED: Now incorporates System Infrastructure Hash into Calculation Hashes.
|
|
4
6
|
*/
|
|
5
|
-
const { generateCodeHash, LEGACY_MAPPING } = require('../topology/HashManager.js');
|
|
6
|
-
const { normalizeName }
|
|
7
|
+
const { generateCodeHash, getInfrastructureHash, LEGACY_MAPPING } = require('../topology/HashManager.js');
|
|
8
|
+
const { normalizeName } = require('../utils/utils');
|
|
7
9
|
|
|
8
10
|
const SYSTEM_EPOCH = require('../system_epoch');
|
|
9
11
|
|
|
@@ -82,9 +84,74 @@ function getDependencySet(endpoints, adjacencyList) {
|
|
|
82
84
|
return required;
|
|
83
85
|
}
|
|
84
86
|
|
|
87
|
+
/**
|
|
88
|
+
* Helper: Detects cycles using Tarjan's SCC Algorithm.
|
|
89
|
+
* Returns a string description of the first cycle found.
|
|
90
|
+
*/
|
|
91
|
+
function detectCircularDependencies(manifestMap) {
|
|
92
|
+
let index = 0;
|
|
93
|
+
const stack = [];
|
|
94
|
+
const indices = new Map();
|
|
95
|
+
const lowLinks = new Map();
|
|
96
|
+
const onStack = new Set();
|
|
97
|
+
const cycles = [];
|
|
98
|
+
|
|
99
|
+
function strongconnect(v) {
|
|
100
|
+
indices.set(v, index);
|
|
101
|
+
lowLinks.set(v, index);
|
|
102
|
+
index++;
|
|
103
|
+
stack.push(v);
|
|
104
|
+
onStack.add(v);
|
|
105
|
+
|
|
106
|
+
const entry = manifestMap.get(v);
|
|
107
|
+
if (entry && entry.dependencies) {
|
|
108
|
+
for (const w of entry.dependencies) {
|
|
109
|
+
if (!manifestMap.has(w)) continue; // Skip external/missing deps (handled elsewhere)
|
|
110
|
+
|
|
111
|
+
if (!indices.has(w)) {
|
|
112
|
+
strongconnect(w);
|
|
113
|
+
lowLinks.set(v, Math.min(lowLinks.get(v), lowLinks.get(w)));
|
|
114
|
+
} else if (onStack.has(w)) {
|
|
115
|
+
lowLinks.set(v, Math.min(lowLinks.get(v), indices.get(w)));
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
if (lowLinks.get(v) === indices.get(v)) {
|
|
121
|
+
const scc = [];
|
|
122
|
+
let w;
|
|
123
|
+
do {
|
|
124
|
+
w = stack.pop();
|
|
125
|
+
onStack.delete(w);
|
|
126
|
+
scc.push(w);
|
|
127
|
+
} while (w !== v);
|
|
128
|
+
|
|
129
|
+
if (scc.length > 1) {
|
|
130
|
+
cycles.push(scc);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
for (const name of manifestMap.keys()) {
|
|
136
|
+
if (!indices.has(name)) {
|
|
137
|
+
strongconnect(name);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
if (cycles.length > 0) {
|
|
142
|
+
const cycle = cycles[0];
|
|
143
|
+
return cycle.join(' -> ') + ' -> ' + cycle[0];
|
|
144
|
+
}
|
|
145
|
+
return null;
|
|
146
|
+
}
|
|
147
|
+
|
|
85
148
|
function buildManifest(productLinesToRun = [], calculations) {
|
|
86
149
|
log.divider('Building Dynamic Manifest');
|
|
87
150
|
|
|
151
|
+
// [CRITICAL FIX] Calculate Infrastructure Hash once per build
|
|
152
|
+
const INFRA_HASH = getInfrastructureHash();
|
|
153
|
+
log.info(`[ManifestBuilder] System Infrastructure Hash: ${INFRA_HASH.substring(0, 8)}`);
|
|
154
|
+
|
|
88
155
|
const requestedLog = (!productLinesToRun || productLinesToRun.length === 0)
|
|
89
156
|
? "ALL (Wildcard/Empty)"
|
|
90
157
|
: productLinesToRun.join(', ');
|
|
@@ -108,10 +175,11 @@ function buildManifest(productLinesToRun = [], calculations) {
|
|
|
108
175
|
const codeStr = Class.toString();
|
|
109
176
|
const selfCodeHash = generateCodeHash(codeStr);
|
|
110
177
|
|
|
111
|
-
|
|
178
|
+
// [CRITICAL FIX] Include INFRA_HASH in the composite signature
|
|
179
|
+
// This ensures that if the system platform changes, ALL calculations are considered "changed"
|
|
180
|
+
let compositeHashString = selfCodeHash + `|EPOCH:${SYSTEM_EPOCH}|INFRA:${INFRA_HASH}`;
|
|
112
181
|
|
|
113
182
|
const usedDeps = [];
|
|
114
|
-
// Track layer hashes for composition analysis
|
|
115
183
|
const usedLayerHashes = {};
|
|
116
184
|
|
|
117
185
|
for (const [layerName, exportsMap] of Object.entries(LAYER_TRIGGERS)) {
|
|
@@ -123,7 +191,6 @@ function buildManifest(productLinesToRun = [], calculations) {
|
|
|
123
191
|
compositeHashString += exportHash;
|
|
124
192
|
usedDeps.push(`${layerName}.${exportName}`);
|
|
125
193
|
|
|
126
|
-
// Group hashes by layer for the composition report
|
|
127
194
|
if (!usedLayerHashes[layerName]) usedLayerHashes[layerName] = '';
|
|
128
195
|
usedLayerHashes[layerName] += exportHash;
|
|
129
196
|
}
|
|
@@ -131,13 +198,11 @@ function buildManifest(productLinesToRun = [], calculations) {
|
|
|
131
198
|
}
|
|
132
199
|
}
|
|
133
200
|
|
|
134
|
-
// Simplify layer hashes to one hash per layer for the report
|
|
135
201
|
const layerComposition = {};
|
|
136
202
|
for(const [lName, lStr] of Object.entries(usedLayerHashes)) {
|
|
137
203
|
layerComposition[lName] = generateCodeHash(lStr);
|
|
138
204
|
}
|
|
139
205
|
|
|
140
|
-
// Safe Mode Fallback
|
|
141
206
|
let isSafeMode = false;
|
|
142
207
|
if (usedDeps.length === 0) {
|
|
143
208
|
isSafeMode = true;
|
|
@@ -158,16 +223,14 @@ function buildManifest(productLinesToRun = [], calculations) {
|
|
|
158
223
|
userType: metadata.userType,
|
|
159
224
|
dependencies: dependencies,
|
|
160
225
|
pass: 0,
|
|
161
|
-
hash: intrinsicHash,
|
|
162
|
-
|
|
163
|
-
// [NEW] Composition Object for Audit
|
|
226
|
+
hash: intrinsicHash,
|
|
164
227
|
composition: {
|
|
165
228
|
epoch: SYSTEM_EPOCH,
|
|
166
229
|
code: selfCodeHash,
|
|
230
|
+
infra: INFRA_HASH, // Stored in composition for audit
|
|
167
231
|
layers: layerComposition,
|
|
168
|
-
deps: {}
|
|
232
|
+
deps: {}
|
|
169
233
|
},
|
|
170
|
-
|
|
171
234
|
debugUsedLayers: isSafeMode ? ['ALL (Safe Mode)'] : usedDeps
|
|
172
235
|
};
|
|
173
236
|
|
|
@@ -179,9 +242,9 @@ function buildManifest(productLinesToRun = [], calculations) {
|
|
|
179
242
|
|
|
180
243
|
for (const folderName in calculations) {
|
|
181
244
|
if (folderName === 'legacy') continue;
|
|
182
|
-
const
|
|
183
|
-
for (const key in
|
|
184
|
-
const entry =
|
|
245
|
+
const calculationGroup = calculations[folderName];
|
|
246
|
+
for (const key in calculationGroup) {
|
|
247
|
+
const entry = calculationGroup[key];
|
|
185
248
|
if (typeof entry === 'function') { processCalc(entry, key, folderName); }
|
|
186
249
|
}
|
|
187
250
|
}
|
|
@@ -189,7 +252,6 @@ function buildManifest(productLinesToRun = [], calculations) {
|
|
|
189
252
|
if (hasFatalError) throw new Error('Manifest build failed due to missing static methods.');
|
|
190
253
|
log.success(`Loaded ${manifestMap.size} calculations.`);
|
|
191
254
|
|
|
192
|
-
// --- Topological Sort & Product Line Filtering ---
|
|
193
255
|
const allNames = new Set(manifestMap.keys());
|
|
194
256
|
for (const [name, entry] of manifestMap) {
|
|
195
257
|
for (const dep of entry.dependencies) {
|
|
@@ -257,17 +319,22 @@ function buildManifest(productLinesToRun = [], calculations) {
|
|
|
257
319
|
queue.sort();
|
|
258
320
|
}
|
|
259
321
|
|
|
260
|
-
if (sortedManifest.length !== filteredManifestMap.size)
|
|
322
|
+
if (sortedManifest.length !== filteredManifestMap.size) {
|
|
323
|
+
const cycle = detectCircularDependencies(filteredManifestMap);
|
|
324
|
+
if (cycle) {
|
|
325
|
+
throw new Error(`Circular dependency detected: ${cycle}`);
|
|
326
|
+
} else {
|
|
327
|
+
throw new Error('Circular dependency detected (Unknown topology error).');
|
|
328
|
+
}
|
|
329
|
+
}
|
|
261
330
|
|
|
262
|
-
// --- Cascading Hash (Phase 2) ---
|
|
263
331
|
for (const entry of sortedManifest) {
|
|
264
|
-
let dependencySignature = entry.hash;
|
|
332
|
+
let dependencySignature = entry.hash;
|
|
265
333
|
|
|
266
334
|
if (entry.dependencies && entry.dependencies.length > 0) {
|
|
267
335
|
const depHashes = entry.dependencies.map(depName => {
|
|
268
336
|
const depEntry = filteredManifestMap.get(depName);
|
|
269
337
|
if (depEntry) {
|
|
270
|
-
// Populate Composition
|
|
271
338
|
entry.composition.deps[depName] = depEntry.hash;
|
|
272
339
|
return depEntry.hash;
|
|
273
340
|
}
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Extractors Layer
|
|
3
3
|
* Core access methods to raw data.
|
|
4
|
-
* FIX: HistoryExtractor handles both Legacy and Modern (Granular) schemas.
|
|
5
4
|
*/
|
|
6
5
|
|
|
7
6
|
const { SCHEMAS } = require('./profiling');
|
|
@@ -23,6 +22,39 @@ class TradeSeriesBuilder {
|
|
|
23
22
|
}
|
|
24
23
|
return curve;
|
|
25
24
|
}
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* UPGRADE: Kadane's Algorithm style O(n) scan for Maximum Drawdown.
|
|
28
|
+
* Efficiently finds the largest peak-to-trough decline.
|
|
29
|
+
* @param {Array<number>} curve - The cumulative equity curve
|
|
30
|
+
* @returns {Object} { maxDrawdownPct: number, peakIndex: number, troughIndex: number }
|
|
31
|
+
*/
|
|
32
|
+
static calculateMaxDrawdown(curve) {
|
|
33
|
+
if (!curve || curve.length < 2) return { maxDrawdownPct: 0, peakIndex: 0, troughIndex: 0 };
|
|
34
|
+
|
|
35
|
+
let maxDrawdown = 0;
|
|
36
|
+
let peakValue = curve[0];
|
|
37
|
+
let peakIndex = 0;
|
|
38
|
+
let tempPeakIndex = 0;
|
|
39
|
+
let troughIndex = 0;
|
|
40
|
+
|
|
41
|
+
for (let i = 1; i < curve.length; i++) {
|
|
42
|
+
const currentVal = curve[i];
|
|
43
|
+
|
|
44
|
+
if (currentVal > peakValue) {
|
|
45
|
+
peakValue = currentVal;
|
|
46
|
+
tempPeakIndex = i;
|
|
47
|
+
} else {
|
|
48
|
+
const drawdown = (peakValue - currentVal) / peakValue;
|
|
49
|
+
if (drawdown > maxDrawdown) {
|
|
50
|
+
maxDrawdown = drawdown;
|
|
51
|
+
peakIndex = tempPeakIndex;
|
|
52
|
+
troughIndex = i;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
return { maxDrawdownPct: maxDrawdown * 100, peakIndex, troughIndex };
|
|
57
|
+
}
|
|
26
58
|
}
|
|
27
59
|
|
|
28
60
|
class DataExtractor {
|
|
@@ -106,31 +138,18 @@ class DataExtractor {
|
|
|
106
138
|
static getOpenDateTime(position) { return (!position || !position.OpenDateTime) ? null : new Date(position.OpenDateTime); }
|
|
107
139
|
|
|
108
140
|
|
|
109
|
-
/**
|
|
110
|
-
* Returns trades that were active during the given date's 24-hour window.
|
|
111
|
-
* @param {Array} historyTrades - The PublicHistoryPositions array.
|
|
112
|
-
* @param {string} dateStr - YYYY-MM-DD string.
|
|
113
|
-
*/
|
|
114
141
|
static getActiveTradesForDate(historyTrades, dateStr) {
|
|
115
142
|
if (!historyTrades || !Array.isArray(historyTrades)) return [];
|
|
116
|
-
|
|
117
|
-
// Define the day's window
|
|
118
143
|
const startTime = new Date(dateStr + "T00:00:00.000Z").getTime();
|
|
119
144
|
const endTime = new Date(dateStr + "T23:59:59.999Z").getTime();
|
|
120
|
-
|
|
121
145
|
return historyTrades.filter(t => {
|
|
122
146
|
if (!t.OpenDateTime) return false;
|
|
123
147
|
const openTime = new Date(t.OpenDateTime).getTime();
|
|
124
|
-
|
|
125
|
-
// 1. Must be opened before the day ended
|
|
126
148
|
if (openTime > endTime) return false;
|
|
127
|
-
|
|
128
|
-
// 2. If closed, must be closed after the day started
|
|
129
149
|
if (t.CloseDateTime) {
|
|
130
150
|
const closeTime = new Date(t.CloseDateTime).getTime();
|
|
131
151
|
if (closeTime < startTime) return false;
|
|
132
152
|
}
|
|
133
|
-
|
|
134
153
|
return true;
|
|
135
154
|
});
|
|
136
155
|
}
|
|
@@ -140,7 +159,6 @@ class priceExtractor {
|
|
|
140
159
|
static getHistory(pricesContext, tickerOrId) {
|
|
141
160
|
if (!pricesContext || !pricesContext.history) return [];
|
|
142
161
|
let assetData = pricesContext.history[tickerOrId];
|
|
143
|
-
|
|
144
162
|
if (!assetData) {
|
|
145
163
|
const id = Object.keys(pricesContext.history).find(key => {
|
|
146
164
|
const data = pricesContext.history[key];
|
|
@@ -148,11 +166,9 @@ class priceExtractor {
|
|
|
148
166
|
});
|
|
149
167
|
if (id) assetData = pricesContext.history[id];
|
|
150
168
|
}
|
|
151
|
-
|
|
152
169
|
if (!assetData || !assetData.prices) return [];
|
|
153
170
|
const priceMap = assetData.prices;
|
|
154
171
|
const sortedDates = Object.keys(priceMap).sort((a, b) => a.localeCompare(b));
|
|
155
|
-
|
|
156
172
|
return sortedDates.map(date => ({
|
|
157
173
|
date: date,
|
|
158
174
|
price: priceMap[date]
|
|
@@ -177,15 +193,12 @@ class HistoryExtractor {
|
|
|
177
193
|
}
|
|
178
194
|
|
|
179
195
|
static getTradedAssets(historyDoc) {
|
|
180
|
-
// 1. Try Modern Granular Data (Derive Assets from Trades)
|
|
181
196
|
if (historyDoc?.PublicHistoryPositions?.length) {
|
|
182
197
|
const trades = historyDoc.PublicHistoryPositions;
|
|
183
198
|
const assetsMap = new Map();
|
|
184
|
-
|
|
185
199
|
for (const t of trades) {
|
|
186
200
|
const instId = t.InstrumentID;
|
|
187
201
|
if (!instId) continue;
|
|
188
|
-
|
|
189
202
|
if (!assetsMap.has(instId)) {
|
|
190
203
|
assetsMap.set(instId, {
|
|
191
204
|
instrumentId: instId,
|
|
@@ -193,12 +206,10 @@ class HistoryExtractor {
|
|
|
193
206
|
count: 0
|
|
194
207
|
});
|
|
195
208
|
}
|
|
196
|
-
|
|
197
209
|
const asset = assetsMap.get(instId);
|
|
198
210
|
const open = new Date(t.OpenDateTime);
|
|
199
211
|
const close = new Date(t.CloseDateTime);
|
|
200
212
|
const durationMins = (close - open) / 60000;
|
|
201
|
-
|
|
202
213
|
if (durationMins > 0) {
|
|
203
214
|
asset.totalDuration += durationMins;
|
|
204
215
|
asset.count++;
|
|
@@ -209,12 +220,9 @@ class HistoryExtractor {
|
|
|
209
220
|
avgHoldingTimeInMinutes: a.count > 0 ? (a.totalDuration / a.count) : 0
|
|
210
221
|
}));
|
|
211
222
|
}
|
|
212
|
-
|
|
213
|
-
// 2. Fallback to Legacy 'assets' array
|
|
214
223
|
if (historyDoc?.assets && Array.isArray(historyDoc.assets)) {
|
|
215
224
|
return historyDoc.assets;
|
|
216
225
|
}
|
|
217
|
-
|
|
218
226
|
return [];
|
|
219
227
|
}
|
|
220
228
|
|
|
@@ -227,7 +235,6 @@ class HistoryExtractor {
|
|
|
227
235
|
}
|
|
228
236
|
|
|
229
237
|
static getSummary(historyDoc) {
|
|
230
|
-
// 1. Try Modern Granular Data (Derive Summary)
|
|
231
238
|
if (historyDoc?.PublicHistoryPositions?.length) {
|
|
232
239
|
const trades = historyDoc.PublicHistoryPositions;
|
|
233
240
|
let totalTrades = trades.length;
|
|
@@ -237,7 +244,6 @@ class HistoryExtractor {
|
|
|
237
244
|
let profCount = 0;
|
|
238
245
|
let lossCount = 0;
|
|
239
246
|
let totalDur = 0;
|
|
240
|
-
|
|
241
247
|
for (const t of trades) {
|
|
242
248
|
if (t.NetProfit > 0) {
|
|
243
249
|
wins++;
|
|
@@ -251,7 +257,6 @@ class HistoryExtractor {
|
|
|
251
257
|
const close = new Date(t.CloseDateTime);
|
|
252
258
|
totalDur += (close - open) / 60000;
|
|
253
259
|
}
|
|
254
|
-
|
|
255
260
|
return {
|
|
256
261
|
totalTrades: totalTrades,
|
|
257
262
|
winRatio: totalTrades > 0 ? (wins / totalTrades) * 100 : 0,
|
|
@@ -260,49 +265,20 @@ class HistoryExtractor {
|
|
|
260
265
|
avgHoldingTimeInMinutes: totalTrades > 0 ? totalDur / totalTrades : 0
|
|
261
266
|
};
|
|
262
267
|
}
|
|
263
|
-
|
|
264
|
-
// 2. Fallback to Legacy 'all' object
|
|
265
268
|
if (historyDoc?.all) {
|
|
266
269
|
return historyDoc.all;
|
|
267
270
|
}
|
|
268
|
-
|
|
269
271
|
return null;
|
|
270
272
|
}
|
|
271
273
|
}
|
|
272
274
|
|
|
273
|
-
/**
|
|
274
|
-
* FIXED: InsightsExtractor to properly handle document structure
|
|
275
|
-
* Document structure: { fetchedAt: Timestamp, insights: [...] }
|
|
276
|
-
*/
|
|
277
|
-
|
|
278
275
|
class InsightsExtractor {
|
|
279
|
-
/**
|
|
280
|
-
* Gets insights array for today (default) or yesterday
|
|
281
|
-
* @param {Object} context - Computation context
|
|
282
|
-
* @param {string} timeframe - 'today' or 'yesterday'
|
|
283
|
-
* @returns {Array} Array of insight objects
|
|
284
|
-
*/
|
|
285
|
-
|
|
286
|
-
// MAJOR FIX TO GET INSIGHTS METHOD FOR THE CORRECT STRUCTURE AND SUPPORTING YESTERDAY + TODAY DATA REQUESTS
|
|
287
|
-
// THIS IS INJECTED TO BE USED LIKE :
|
|
288
|
-
|
|
289
|
-
// process(context) {
|
|
290
|
-
// const { insights: insightsHelper } = context.math;
|
|
291
|
-
// const insights = insightsHelper.getInsights(context); This is the direct call
|
|
292
|
-
|
|
293
276
|
static getInsights(context, timeframe = 'today') {
|
|
294
277
|
const insightsData = context.insights;
|
|
295
|
-
|
|
296
278
|
if (!insightsData) return [];
|
|
297
|
-
|
|
298
|
-
// Get the document for the requested timeframe
|
|
299
|
-
const doc = insightsData[timeframe]; // { fetchedAt: ..., insights: [...] }
|
|
300
|
-
|
|
279
|
+
const doc = insightsData[timeframe];
|
|
301
280
|
if (!doc) return [];
|
|
302
|
-
|
|
303
|
-
// Extract the insights array from the document
|
|
304
281
|
if (doc.insights && Array.isArray(doc.insights)) { return doc.insights; }
|
|
305
|
-
|
|
306
282
|
return [];
|
|
307
283
|
}
|
|
308
284
|
|
|
@@ -112,93 +112,98 @@ class MathPrimitives {
|
|
|
112
112
|
}
|
|
113
113
|
|
|
114
114
|
class FinancialEngineering {
|
|
115
|
-
/**
|
|
116
|
-
* Calculates the Sortino Ratio based on a series of trade returns.
|
|
117
|
-
* Uses Downside Deviation (risk of loss) rather than Standard Deviation (volatility).
|
|
118
|
-
* @param {number[]} returns - Array of PnL percentages from trades.
|
|
119
|
-
* @param {number} targetReturn - Minimum acceptable return (default 0).
|
|
120
|
-
*/
|
|
121
115
|
static sortinoRatio(returns, targetReturn = 0) {
|
|
122
116
|
if (!returns || returns.length < 2) return 0;
|
|
123
|
-
|
|
124
117
|
const avgReturn = MathPrimitives.average(returns);
|
|
125
|
-
|
|
126
|
-
// Calculate Downside Deviation (only negative deviations from target)
|
|
127
118
|
const downsideDiffs = returns.map(r => Math.min(0, r - targetReturn));
|
|
128
119
|
const downsideVariance = downsideDiffs.reduce((sum, d) => sum + (d * d), 0) / returns.length;
|
|
129
120
|
const downsideDev = Math.sqrt(downsideVariance);
|
|
130
|
-
|
|
131
|
-
if (downsideDev === 0) return 0; // No downside risk found
|
|
121
|
+
if (downsideDev === 0) return 0;
|
|
132
122
|
return (avgReturn - targetReturn) / downsideDev;
|
|
133
123
|
}
|
|
134
124
|
|
|
135
|
-
/**
|
|
136
|
-
* Calculates the Kelly Criterion (Optimal Leverage Fraction).
|
|
137
|
-
* f* = (bp - q) / b
|
|
138
|
-
* @param {number} winRatio - Win Rate (0-100).
|
|
139
|
-
* @param {number} avgWinPct - Average Win %.
|
|
140
|
-
* @param {number} avgLossPct - Average Loss % (must be negative or positive magnitude).
|
|
141
|
-
*/
|
|
142
125
|
static kellyCriterion(winRatio, avgWinPct, avgLossPct) {
|
|
143
126
|
const p = winRatio / 100;
|
|
144
127
|
const q = 1 - p;
|
|
145
128
|
const lossMag = Math.abs(avgLossPct);
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
const b = avgWinPct / lossMag; // Payoff odds (b to 1)
|
|
150
|
-
|
|
151
|
-
// Kelly Formula: (bp - q) / b
|
|
129
|
+
if (lossMag === 0) return 0;
|
|
130
|
+
const b = avgWinPct / lossMag;
|
|
152
131
|
const f = (b * p - q) / b;
|
|
153
|
-
|
|
154
|
-
return Math.max(0, f); // Clamp negative Kelly to 0 (Do not bet)
|
|
132
|
+
return Math.max(0, f);
|
|
155
133
|
}
|
|
156
134
|
}
|
|
157
135
|
|
|
158
136
|
class TimeSeriesAnalysis {
|
|
159
|
-
/**
|
|
160
|
-
* Calculates the Hurst Exponent via Rescaled Range (R/S) Analysis.
|
|
161
|
-
* H = 0.5: Random Walk (Gambler).
|
|
162
|
-
* H > 0.5: Persistent/Trending (Momentum).
|
|
163
|
-
* H < 0.5: Anti-Persistent/Mean Reverting (Oscillator).
|
|
164
|
-
* @param {number[]} series - Time series data (e.g. cumulative PnL or prices).
|
|
165
|
-
*/
|
|
166
137
|
static hurstExponent(series) {
|
|
167
|
-
if (!series || series.length < 10) return 0.5;
|
|
168
|
-
|
|
169
|
-
// Create logarithmic differences (returns)
|
|
138
|
+
if (!series || series.length < 10) return 0.5;
|
|
170
139
|
const logReturns = [];
|
|
171
140
|
for (let i = 1; i < series.length; i++) {
|
|
172
141
|
logReturns.push(Math.log(series[i] / series[i-1]));
|
|
173
142
|
}
|
|
174
|
-
|
|
175
|
-
// Simplified R/S calculation over full range
|
|
176
143
|
const mean = MathPrimitives.average(logReturns);
|
|
177
144
|
const stdDev = MathPrimitives.standardDeviation(logReturns);
|
|
178
|
-
|
|
179
145
|
if (stdDev === 0) return 0.5;
|
|
180
|
-
|
|
181
|
-
// Calculate Deviations from mean
|
|
182
146
|
const deviations = logReturns.map(r => r - mean);
|
|
183
|
-
|
|
184
|
-
// Calculate Cumulative Deviations (Cumulative Range)
|
|
185
147
|
let sum = 0;
|
|
186
148
|
const cumulativeDeviations = deviations.map(d => sum += d);
|
|
187
|
-
|
|
188
149
|
const maxDev = Math.max(...cumulativeDeviations);
|
|
189
150
|
const minDev = Math.min(...cumulativeDeviations);
|
|
190
151
|
const range = maxDev - minDev;
|
|
191
|
-
|
|
192
152
|
const rs = range / stdDev;
|
|
193
153
|
const n = logReturns.length;
|
|
194
|
-
|
|
195
|
-
// H = log(R/S) / log(n)
|
|
196
|
-
// Note: This is a point-estimate simplification of the full regression method
|
|
197
|
-
// but sufficient for behavioral classification.
|
|
198
154
|
const hurst = Math.log(rs) / Math.log(n);
|
|
199
|
-
|
|
200
155
|
return Math.min(1, Math.max(0, hurst));
|
|
201
156
|
}
|
|
157
|
+
|
|
158
|
+
/**
|
|
159
|
+
* UPGRADE: Fast Fourier Transform (Cooley-Tukey Algorithm) - O(n log n)
|
|
160
|
+
* Useful for detecting cycles in price data.
|
|
161
|
+
* @param {Array<number>} data - Input signal
|
|
162
|
+
* @returns {Array<Object>} Array of { real, imag } complex numbers
|
|
163
|
+
*/
|
|
164
|
+
static fft(data) {
|
|
165
|
+
const n = data.length;
|
|
166
|
+
if (n <= 1) return data.map(v => ({ real: v, imag: 0 }));
|
|
167
|
+
|
|
168
|
+
if ((n & (n - 1)) !== 0) {
|
|
169
|
+
// If n is not power of 2, zero-pad to next power of 2 for O(n log n)
|
|
170
|
+
const nextPow2 = Math.pow(2, Math.ceil(Math.log2(n)));
|
|
171
|
+
const padded = new Array(nextPow2).fill(0);
|
|
172
|
+
for(let i=0; i<n; i++) padded[i] = data[i];
|
|
173
|
+
return this.fft(padded);
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
const even = new Array(n / 2);
|
|
177
|
+
const odd = new Array(n / 2);
|
|
178
|
+
for (let i = 0; i < n / 2; i++) {
|
|
179
|
+
even[i] = data[2 * i];
|
|
180
|
+
odd[i] = data[2 * i + 1];
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
const evenResult = this.fft(even);
|
|
184
|
+
const oddResult = this.fft(odd);
|
|
185
|
+
|
|
186
|
+
const result = new Array(n);
|
|
187
|
+
for (let k = 0; k < n / 2; k++) {
|
|
188
|
+
const t = -2 * Math.PI * k / n;
|
|
189
|
+
const expReal = Math.cos(t);
|
|
190
|
+
const expImag = Math.sin(t);
|
|
191
|
+
|
|
192
|
+
// Multiply oddResult[k] by exp
|
|
193
|
+
const tReal = expReal * oddResult[k].real - expImag * oddResult[k].imag;
|
|
194
|
+
const tImag = expReal * oddResult[k].imag + expImag * oddResult[k].real;
|
|
195
|
+
|
|
196
|
+
result[k] = {
|
|
197
|
+
real: evenResult[k].real + tReal,
|
|
198
|
+
imag: evenResult[k].imag + tImag
|
|
199
|
+
};
|
|
200
|
+
result[k + n / 2] = {
|
|
201
|
+
real: evenResult[k].real - tReal,
|
|
202
|
+
imag: evenResult[k].imag - tImag
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
return result;
|
|
206
|
+
}
|
|
202
207
|
}
|
|
203
208
|
|
|
204
209
|
class SignalPrimitives {
|
|
@@ -206,7 +211,6 @@ class SignalPrimitives {
|
|
|
206
211
|
if (!dependencies || !dependencies[calcName]) return fallback;
|
|
207
212
|
const tickerData = dependencies[calcName][ticker];
|
|
208
213
|
if (!tickerData) return fallback;
|
|
209
|
-
|
|
210
214
|
const val = tickerData[fieldName];
|
|
211
215
|
return (typeof val === 'number') ? val : fallback;
|
|
212
216
|
}
|
|
@@ -241,10 +245,7 @@ class SignalPrimitives {
|
|
|
241
245
|
if (!previousComputed || !previousComputed[calcName]) return null;
|
|
242
246
|
const tickerData = previousComputed[calcName][ticker];
|
|
243
247
|
if (!tickerData) return null;
|
|
244
|
-
|
|
245
|
-
if (fieldName) {
|
|
246
|
-
return tickerData[fieldName];
|
|
247
|
-
}
|
|
248
|
+
if (fieldName) return tickerData[fieldName];
|
|
248
249
|
return tickerData;
|
|
249
250
|
}
|
|
250
251
|
}
|
|
@@ -255,18 +256,14 @@ class Aggregators {
|
|
|
255
256
|
for (const [userId, portfolio] of Object.entries(usersData)) {
|
|
256
257
|
const userType = portfolio.PublicPositions ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
|
|
257
258
|
const positions = DataExtractor.getPositions(portfolio, userType);
|
|
258
|
-
|
|
259
259
|
for (const pos of positions) {
|
|
260
260
|
const id = DataExtractor.getInstrumentId(pos);
|
|
261
261
|
const pnl = DataExtractor.getNetProfit(pos);
|
|
262
262
|
if (!id || pnl === 0) continue;
|
|
263
|
-
|
|
264
263
|
const ticker = tickerMap[id];
|
|
265
264
|
if (!ticker) continue;
|
|
266
|
-
|
|
267
265
|
if (!buckets.has(ticker)) buckets.set(ticker, { winners: [], losers: [] });
|
|
268
266
|
const b = buckets.get(ticker);
|
|
269
|
-
|
|
270
267
|
if (pnl > 0) b.winners.push(userId);
|
|
271
268
|
else b.losers.push(userId);
|
|
272
269
|
}
|
|
@@ -276,20 +273,16 @@ class Aggregators {
|
|
|
276
273
|
|
|
277
274
|
static getWeightedSentiment(positions) {
|
|
278
275
|
if (!positions || positions.length === 0) return 0;
|
|
279
|
-
|
|
280
276
|
let totalWeightedPnL = 0;
|
|
281
277
|
let totalWeight = 0;
|
|
282
|
-
|
|
283
278
|
for (const pos of positions) {
|
|
284
279
|
const pnl = DataExtractor.getNetProfit(pos);
|
|
285
280
|
const weight = DataExtractor.getPositionWeight(pos);
|
|
286
|
-
|
|
287
281
|
if (weight > 0) {
|
|
288
282
|
totalWeightedPnL += (pnl * weight);
|
|
289
283
|
totalWeight += weight;
|
|
290
284
|
}
|
|
291
285
|
}
|
|
292
|
-
|
|
293
286
|
if (totalWeight === 0) return 0;
|
|
294
287
|
return totalWeightedPnL / totalWeight;
|
|
295
288
|
}
|
|
@@ -299,61 +292,90 @@ class TimeSeries {
|
|
|
299
292
|
static updateEMAState(value, state, alpha = 0.1) {
|
|
300
293
|
const mean = state ? (state.mean || 0) : 0;
|
|
301
294
|
const variance = state ? (state.variance || 1) : 1;
|
|
302
|
-
|
|
303
295
|
if (value === undefined || value === null || isNaN(value)) {
|
|
304
296
|
return { mean, variance };
|
|
305
297
|
}
|
|
306
|
-
|
|
307
298
|
const diff = value - mean;
|
|
308
299
|
const newMean = mean + (alpha * diff);
|
|
309
300
|
const newVariance = (1 - alpha) * (variance + (alpha * diff * diff));
|
|
310
|
-
|
|
311
301
|
return { mean: newMean, variance: newVariance };
|
|
312
302
|
}
|
|
313
303
|
|
|
314
304
|
static pearsonCorrelation(x, y) {
|
|
315
305
|
if (!x || !y || x.length !== y.length || x.length === 0) return 0;
|
|
316
|
-
|
|
317
306
|
const n = x.length;
|
|
318
307
|
let sumX = 0, sumY = 0, sumXY = 0, sumX2 = 0, sumY2 = 0;
|
|
319
|
-
|
|
320
308
|
for (let i = 0; i < n; i++) {
|
|
321
|
-
sumX += x[i];
|
|
322
|
-
sumY += y[i];
|
|
309
|
+
sumX += x[i]; sumY += y[i];
|
|
323
310
|
sumXY += x[i] * y[i];
|
|
324
|
-
sumX2 += x[i] * x[i];
|
|
325
|
-
sumY2 += y[i] * y[i];
|
|
311
|
+
sumX2 += x[i] * x[i]; sumY2 += y[i] * y[i];
|
|
326
312
|
}
|
|
327
|
-
|
|
328
313
|
const numerator = (n * sumXY) - (sumX * sumY);
|
|
329
314
|
const denominator = Math.sqrt(((n * sumX2) - (sumX * sumX)) * ((n * sumY2) - (sumY * sumY)));
|
|
330
|
-
|
|
331
315
|
return (denominator === 0) ? 0 : numerator / denominator;
|
|
332
316
|
}
|
|
317
|
+
|
|
318
|
+
/**
|
|
319
|
+
* UPGRADE: Sliding Window Min/Max using Monotonic Queue - O(n)
|
|
320
|
+
* Calculates rolling min/max for a stream of numbers efficiently.
|
|
321
|
+
* @param {Array<number>} data - Input series
|
|
322
|
+
* @param {number} windowSize - The rolling window size
|
|
323
|
+
* @returns {Object} { min: Array, max: Array }
|
|
324
|
+
*/
|
|
325
|
+
static slidingWindowExtrema(data, windowSize) {
|
|
326
|
+
if (!data || data.length === 0) return { min: [], max: [] };
|
|
327
|
+
const resultMin = [];
|
|
328
|
+
const resultMax = [];
|
|
329
|
+
const dequeMin = []; // Indexes for Min (increasing values)
|
|
330
|
+
const dequeMax = []; // Indexes for Max (decreasing values)
|
|
331
|
+
|
|
332
|
+
for (let i = 0; i < data.length; i++) {
|
|
333
|
+
// 1. Remove out of range
|
|
334
|
+
if (dequeMin.length > 0 && dequeMin[0] <= i - windowSize) dequeMin.shift();
|
|
335
|
+
if (dequeMax.length > 0 && dequeMax[0] <= i - windowSize) dequeMax.shift();
|
|
336
|
+
|
|
337
|
+
// 2. Maintain Monotonic properties
|
|
338
|
+
// Min: Remove elements from tail that are >= current
|
|
339
|
+
while (dequeMin.length > 0 && data[dequeMin[dequeMin.length - 1]] >= data[i]) {
|
|
340
|
+
dequeMin.pop();
|
|
341
|
+
}
|
|
342
|
+
// Max: Remove elements from tail that are <= current
|
|
343
|
+
while (dequeMax.length > 0 && data[dequeMax[dequeMax.length - 1]] <= data[i]) {
|
|
344
|
+
dequeMax.pop();
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
// 3. Add current
|
|
348
|
+
dequeMin.push(i);
|
|
349
|
+
dequeMax.push(i);
|
|
350
|
+
|
|
351
|
+
// 4. Record result (once window is full)
|
|
352
|
+
if (i >= windowSize - 1) {
|
|
353
|
+
resultMin.push(data[dequeMin[0]]);
|
|
354
|
+
resultMax.push(data[dequeMax[0]]);
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
return { min: resultMin, max: resultMax };
|
|
358
|
+
}
|
|
333
359
|
}
|
|
334
360
|
|
|
335
361
|
class DistributionAnalytics {
|
|
336
362
|
static computeKDE(data, bandwidth, steps = 60) {
|
|
337
363
|
if (!data || data.length === 0) return [];
|
|
338
|
-
|
|
339
364
|
let min = Infinity, max = -Infinity;
|
|
340
365
|
for (const p of data) {
|
|
341
366
|
if (p.value < min) min = p.value;
|
|
342
367
|
if (p.value > max) max = p.value;
|
|
343
368
|
}
|
|
344
|
-
|
|
345
369
|
min -= bandwidth * 3;
|
|
346
370
|
max += bandwidth * 3;
|
|
347
371
|
const stepSize = (max - min) / steps;
|
|
348
372
|
const curve = [];
|
|
349
|
-
|
|
350
373
|
for (let i = 0; i <= steps; i++) {
|
|
351
374
|
const x = min + (i * stepSize);
|
|
352
375
|
let density = 0;
|
|
353
376
|
for (const p of data) {
|
|
354
377
|
const diff = (x - p.value);
|
|
355
378
|
if (Math.abs(diff) > bandwidth * 3) continue;
|
|
356
|
-
|
|
357
379
|
const u = diff / bandwidth;
|
|
358
380
|
const k = 0.39894228 * Math.exp(-0.5 * u * u);
|
|
359
381
|
density += (p.weight * k) / bandwidth;
|
|
@@ -364,7 +386,7 @@ class DistributionAnalytics {
|
|
|
364
386
|
}
|
|
365
387
|
|
|
366
388
|
static integrateProfile(curve, startPrice, endPrice) {
|
|
367
|
-
if (!curve || !Array.isArray(curve)) return 0;
|
|
389
|
+
if (!curve || !Array.isArray(curve)) return 0;
|
|
368
390
|
let sum = 0;
|
|
369
391
|
for (let i = 0; i < curve.length - 1; i++) {
|
|
370
392
|
const p1 = curve[i];
|
|
@@ -379,16 +401,12 @@ class DistributionAnalytics {
|
|
|
379
401
|
static linearRegression(xValues, yValues) {
|
|
380
402
|
const n = xValues.length;
|
|
381
403
|
if (n !== yValues.length || n < 2) return { slope: 0, r2: 0 };
|
|
382
|
-
|
|
383
404
|
let sumX = 0, sumY = 0, sumXY = 0, sumXX = 0, sumYY = 0;
|
|
384
405
|
for (let i = 0; i < n; i++) {
|
|
385
|
-
sumX += xValues[i];
|
|
386
|
-
sumY += yValues[i];
|
|
406
|
+
sumX += xValues[i]; sumY += yValues[i];
|
|
387
407
|
sumXY += xValues[i] * yValues[i];
|
|
388
|
-
sumXX += xValues[i] * xValues[i];
|
|
389
|
-
sumYY += yValues[i] * yValues[i];
|
|
408
|
+
sumXX += xValues[i] * xValues[i]; sumYY += yValues[i] * yValues[i];
|
|
390
409
|
}
|
|
391
|
-
|
|
392
410
|
const slope = (n * sumXY - sumX * sumY) / (n * sumXX - sumX * sumX);
|
|
393
411
|
return { slope, n };
|
|
394
412
|
}
|
|
@@ -21,15 +21,7 @@ const SCHEMAS = {
|
|
|
21
21
|
}
|
|
22
22
|
};
|
|
23
23
|
|
|
24
|
-
// ========================================================================
|
|
25
|
-
// 1. SMART MONEY SCORING ENGINE (NEW)
|
|
26
|
-
// ========================================================================
|
|
27
|
-
|
|
28
24
|
class SmartMoneyScorer {
|
|
29
|
-
|
|
30
|
-
/**
|
|
31
|
-
* Internal Helper: Calculate Pearson Correlation
|
|
32
|
-
*/
|
|
33
25
|
static _correlation(x, y) {
|
|
34
26
|
if (!x || !y || x.length !== y.length || x.length < 2) return 0;
|
|
35
27
|
const n = x.length;
|
|
@@ -44,14 +36,6 @@ class SmartMoneyScorer {
|
|
|
44
36
|
return (denominator === 0) ? 0 : numerator / denominator;
|
|
45
37
|
}
|
|
46
38
|
|
|
47
|
-
/**
|
|
48
|
-
* Mode 1: Portfolio-Based Scoring
|
|
49
|
-
* Heuristics:
|
|
50
|
-
* 1. Diversification (Sector/Asset count)
|
|
51
|
-
* 2. Allocation Efficiency (Correlation of Size vs Profit)
|
|
52
|
-
* 3. Shorting Competence
|
|
53
|
-
* 4. Concentration Risk (HHI)
|
|
54
|
-
*/
|
|
55
39
|
static scorePortfolio(portfolio, userType, prices, mappings, math) {
|
|
56
40
|
const positions = math.extract.getPositions(portfolio, userType);
|
|
57
41
|
if (!positions || positions.length === 0) return { score: 0, label: SCHEMAS.LABELS.NEUTRAL };
|
|
@@ -66,10 +50,9 @@ class SmartMoneyScorer {
|
|
|
66
50
|
const sectors = new Set();
|
|
67
51
|
const tickers = new Set();
|
|
68
52
|
|
|
69
|
-
// 1. Data Aggregation
|
|
70
53
|
for (const pos of positions) {
|
|
71
54
|
const invested = math.extract.getPositionWeight(pos, userType);
|
|
72
|
-
const pnl = math.extract.getNetProfit(pos);
|
|
55
|
+
const pnl = math.extract.getNetProfit(pos);
|
|
73
56
|
const instId = math.extract.getInstrumentId(pos);
|
|
74
57
|
const isShort = math.extract.getDirection(pos) === 'Sell';
|
|
75
58
|
|
|
@@ -94,56 +77,38 @@ class SmartMoneyScorer {
|
|
|
94
77
|
|
|
95
78
|
if (totalInvested === 0) return { score: 0, label: SCHEMAS.LABELS.NEUTRAL };
|
|
96
79
|
|
|
97
|
-
// 2. Metrics Calculation
|
|
98
80
|
const avgPnL = weightedPnL / totalInvested;
|
|
99
|
-
|
|
100
|
-
// A. Allocation Efficiency (Do they bet big on winners?)
|
|
101
|
-
// Correlation between Invested Amount and PnL %
|
|
102
|
-
const allocEfficiency = this._correlation(weights, pnls); // -1 to 1
|
|
81
|
+
const allocEfficiency = this._correlation(weights, pnls);
|
|
103
82
|
|
|
104
|
-
// B. Diversification & Concentration (HHI)
|
|
105
|
-
// Sum of squared market shares. 1.0 = Monopoly. 0.0 = Infinite.
|
|
106
83
|
let hhi = 0;
|
|
107
84
|
for (const w of weights) {
|
|
108
85
|
const share = w / totalInvested;
|
|
109
86
|
hhi += (share * share);
|
|
110
87
|
}
|
|
111
88
|
|
|
112
|
-
// C. Shorting Competence
|
|
113
89
|
const shortRatio = shortInvested / totalInvested;
|
|
114
90
|
const avgShortPnL = shortInvested > 0 ? shortPnL / shortInvested : 0;
|
|
115
91
|
|
|
116
|
-
// 3. Scoring Logic
|
|
117
92
|
let score = 50;
|
|
118
93
|
|
|
119
|
-
// Efficiency Bonus: If > 0.5, they size winners up. (+20)
|
|
120
|
-
// If < -0.3, they are bagholding losers with large size (-15)
|
|
121
94
|
if (allocEfficiency > 0.5) score += 20;
|
|
122
95
|
else if (allocEfficiency < -0.3) score -= 15;
|
|
123
96
|
|
|
124
|
-
// Profitability (The ultimate metric)
|
|
125
97
|
if (avgPnL > 5) score += 10;
|
|
126
98
|
if (avgPnL > 20) score += 10;
|
|
127
99
|
if (avgPnL < -10) score -= 10;
|
|
128
100
|
if (avgPnL < -25) score -= 15;
|
|
129
101
|
|
|
130
|
-
// Concentration Logic
|
|
131
|
-
// High Concentration (HHI > 0.3) is "Smart" ONLY if profitable (Sniper)
|
|
132
|
-
// High Concentration and unprofitable is "Dumb" (Bagholder/Gambler)
|
|
133
102
|
if (hhi > 0.3) {
|
|
134
|
-
if (avgPnL > 5) score += 10;
|
|
135
|
-
else if (avgPnL < -5) score -= 10;
|
|
103
|
+
if (avgPnL > 5) score += 10;
|
|
104
|
+
else if (avgPnL < -5) score -= 10;
|
|
136
105
|
}
|
|
137
106
|
|
|
138
|
-
// Diversification Logic
|
|
139
|
-
// High Sector count (>3) reduces risk penalty
|
|
140
107
|
if (sectors.size >= 4) score += 5;
|
|
141
108
|
|
|
142
|
-
// Shorting Logic
|
|
143
|
-
// Penalize speculation unless they are actually good at it
|
|
144
109
|
if (shortRatio > 0.1) {
|
|
145
|
-
if (avgShortPnL > 0) score += 10;
|
|
146
|
-
else score -= 10;
|
|
110
|
+
if (avgShortPnL > 0) score += 10;
|
|
111
|
+
else score -= 10;
|
|
147
112
|
}
|
|
148
113
|
|
|
149
114
|
return {
|
|
@@ -152,33 +117,18 @@ class SmartMoneyScorer {
|
|
|
152
117
|
};
|
|
153
118
|
}
|
|
154
119
|
|
|
155
|
-
/**
|
|
156
|
-
* Mode 2: History-Based Scoring
|
|
157
|
-
* Heuristics:
|
|
158
|
-
* 1. Win/Loss Ratio & Profit Factor
|
|
159
|
-
* 2. Asset Consistency (Revenge trading vs Specialist)
|
|
160
|
-
* 3. Entry Efficiency (Buying Lows)
|
|
161
|
-
* 4. Exit Efficiency (Selling Highs - Opportunity Cost)
|
|
162
|
-
* 5. Churn (Overtrading)
|
|
163
|
-
* 6. DCA/Entry Patterns
|
|
164
|
-
*/
|
|
165
120
|
static scoreHistory(historyDoc, prices, mappings, math) {
|
|
166
|
-
// Handle V2 Schema (PublicHistoryPositions)
|
|
167
121
|
const trades = historyDoc?.PublicHistoryPositions || [];
|
|
168
|
-
// Handle V1 Schema fallback if needed (though prompt implies V2)
|
|
169
122
|
|
|
170
123
|
if (trades.length < 5) return { score: 0, label: SCHEMAS.LABELS.NEUTRAL };
|
|
171
|
-
|
|
172
|
-
// Filter valid trades
|
|
173
124
|
const validTrades = trades.filter(t => t.OpenDateTime && t.CloseDateTime && t.InstrumentID);
|
|
174
125
|
if (validTrades.length < 5) return { score: 0, label: SCHEMAS.LABELS.NEUTRAL };
|
|
175
126
|
|
|
176
127
|
let wins = 0, losses = 0;
|
|
177
128
|
let totalWinPct = 0, totalLossPct = 0;
|
|
178
129
|
let entryScores = [];
|
|
179
|
-
const assetsTraded = new Map();
|
|
130
|
+
const assetsTraded = new Map();
|
|
180
131
|
|
|
181
|
-
// Time sorting for Churn analysis
|
|
182
132
|
validTrades.sort((a, b) => new Date(a.OpenDateTime) - new Date(b.OpenDateTime));
|
|
183
133
|
const firstDate = new Date(validTrades[0].OpenDateTime);
|
|
184
134
|
const lastDate = new Date(validTrades[validTrades.length-1].OpenDateTime);
|
|
@@ -187,21 +137,17 @@ class SmartMoneyScorer {
|
|
|
187
137
|
for (const t of validTrades) {
|
|
188
138
|
const ticker = mappings.instrumentToTicker[t.InstrumentID];
|
|
189
139
|
|
|
190
|
-
// Asset Consistency
|
|
191
140
|
if (!assetsTraded.has(t.InstrumentID)) assetsTraded.set(t.InstrumentID, { count: 0, pnl: 0 });
|
|
192
141
|
const assetStat = assetsTraded.get(t.InstrumentID);
|
|
193
142
|
assetStat.count++;
|
|
194
143
|
assetStat.pnl += t.NetProfit;
|
|
195
144
|
|
|
196
|
-
// A. Win/Loss Stats
|
|
197
145
|
if (t.NetProfit > 0) { wins++; totalWinPct += t.NetProfit; }
|
|
198
146
|
else { losses++; totalLossPct += Math.abs(t.NetProfit); }
|
|
199
147
|
|
|
200
|
-
// B. Entry Timing (Requires Price History)
|
|
201
148
|
if (ticker && prices) {
|
|
202
149
|
const priceHist = math.priceExtractor.getHistory(prices, ticker);
|
|
203
150
|
if (priceHist && priceHist.length > 0) {
|
|
204
|
-
// 1.0 = Perfect Low, 0.0 = Bought High
|
|
205
151
|
const eff = ExecutionAnalytics.calculateEfficiency(t.OpenRate, priceHist, t.OpenDateTime, t.IsBuy ? 'Buy' : 'Sell');
|
|
206
152
|
entryScores.push(eff);
|
|
207
153
|
}
|
|
@@ -212,45 +158,33 @@ class SmartMoneyScorer {
|
|
|
212
158
|
const avgLoss = losses > 0 ? totalLossPct / losses : 1;
|
|
213
159
|
const profitFactor = (wins * avgWin) / Math.max(1, (losses * avgLoss));
|
|
214
160
|
|
|
215
|
-
// C. Entry Skill
|
|
216
161
|
const avgEntrySkill = entryScores.length > 0 ? math.compute.average(entryScores) : 0.5;
|
|
217
162
|
|
|
218
|
-
// D. Consistency / Specialization
|
|
219
|
-
// Do they trade 100 tickers once (Gambler) or 5 tickers 20 times (Specialist)?
|
|
220
163
|
const totalTrades = validTrades.length;
|
|
221
164
|
const uniqueAssets = assetsTraded.size;
|
|
222
|
-
const specializationRatio = 1 - (uniqueAssets / totalTrades);
|
|
165
|
+
const specializationRatio = 1 - (uniqueAssets / totalTrades);
|
|
223
166
|
|
|
224
|
-
// E. Overtrading (Churn)
|
|
225
167
|
const tradesPerDay = totalTrades / daysActive;
|
|
226
168
|
|
|
227
|
-
// F. Revenge Trading Check
|
|
228
|
-
// High count on a specific asset with negative total PnL
|
|
229
169
|
let revengeScore = 0;
|
|
230
170
|
for (const [id, stat] of assetsTraded.entries()) {
|
|
231
171
|
if (stat.pnl < -20 && stat.count > 5) revengeScore += 1;
|
|
232
172
|
}
|
|
233
173
|
|
|
234
|
-
// Scoring Logic
|
|
235
174
|
let score = 50;
|
|
236
175
|
|
|
237
|
-
// Profit Factor (Primary Driver)
|
|
238
176
|
if (profitFactor > 1.2) score += 10;
|
|
239
177
|
if (profitFactor > 2.0) score += 15;
|
|
240
178
|
if (profitFactor < 0.8) score -= 15;
|
|
241
179
|
|
|
242
|
-
|
|
243
|
-
if (avgEntrySkill
|
|
244
|
-
if (avgEntrySkill < 0.3) score -= 10; // FOMO
|
|
180
|
+
if (avgEntrySkill > 0.7) score += 10;
|
|
181
|
+
if (avgEntrySkill < 0.3) score -= 10;
|
|
245
182
|
|
|
246
|
-
|
|
247
|
-
if (specializationRatio
|
|
248
|
-
if (specializationRatio < 0.1 && totalTrades > 20) score -= 5; // Scattergun penalty
|
|
183
|
+
if (specializationRatio > 0.6) score += 5;
|
|
184
|
+
if (specializationRatio < 0.1 && totalTrades > 20) score -= 5;
|
|
249
185
|
|
|
250
|
-
|
|
251
|
-
if (tradesPerDay > 10 && profitFactor < 1.0) score -= 10; // Brokerage Cash Cow
|
|
186
|
+
if (tradesPerDay > 10 && profitFactor < 1.0) score -= 10;
|
|
252
187
|
|
|
253
|
-
// Revenge Penalty
|
|
254
188
|
if (revengeScore > 0) score -= (revengeScore * 5);
|
|
255
189
|
|
|
256
190
|
return {
|
|
@@ -259,14 +193,9 @@ class SmartMoneyScorer {
|
|
|
259
193
|
};
|
|
260
194
|
}
|
|
261
195
|
|
|
262
|
-
/**
|
|
263
|
-
* Mode 3: Hybrid Scoring
|
|
264
|
-
* Merges Portfolio (Unrealized/Current) and History (Realized/Past).
|
|
265
|
-
*/
|
|
266
196
|
static scoreHybrid(context) {
|
|
267
197
|
const { user, prices, mappings, math } = context;
|
|
268
198
|
|
|
269
|
-
// Get Sub-Scores
|
|
270
199
|
const pScore = this.scorePortfolio(user.portfolio.today, user.type, prices, mappings, math);
|
|
271
200
|
const hScore = this.scoreHistory(user.history.today, prices, mappings, math);
|
|
272
201
|
|
|
@@ -277,7 +206,6 @@ class SmartMoneyScorer {
|
|
|
277
206
|
const hasPortfolio = pScore && pScore.score > 0;
|
|
278
207
|
|
|
279
208
|
if (hasHistory && hasPortfolio) {
|
|
280
|
-
// Weighted: 60% Track Record (History), 40% Current Positioning (Portfolio)
|
|
281
209
|
finalScore = (hScore.score * 0.6) + (pScore.score * 0.4);
|
|
282
210
|
method = 'Hybrid';
|
|
283
211
|
} else if (hasHistory) {
|
|
@@ -288,7 +216,6 @@ class SmartMoneyScorer {
|
|
|
288
216
|
method = 'PortfolioOnly';
|
|
289
217
|
}
|
|
290
218
|
|
|
291
|
-
// Classification Label
|
|
292
219
|
let label = SCHEMAS.LABELS.NEUTRAL;
|
|
293
220
|
if (finalScore >= 80) label = SCHEMAS.LABELS.ELITE;
|
|
294
221
|
else if (finalScore >= 65) label = SCHEMAS.LABELS.SMART;
|
|
@@ -307,10 +234,6 @@ class SmartMoneyScorer {
|
|
|
307
234
|
}
|
|
308
235
|
}
|
|
309
236
|
|
|
310
|
-
// ========================================================================
|
|
311
|
-
// 2. SUPPORTING ANALYTICS ENGINES
|
|
312
|
-
// ========================================================================
|
|
313
|
-
|
|
314
237
|
class CognitiveBiases {
|
|
315
238
|
static calculateAnchoringScore(openPositions, thresholdPct = 2.0, minDaysHeld = 14) {
|
|
316
239
|
if (!openPositions || openPositions.length === 0) return 0;
|
|
@@ -376,6 +299,49 @@ class ExecutionAnalytics {
|
|
|
376
299
|
}
|
|
377
300
|
}
|
|
378
301
|
|
|
302
|
+
/**
|
|
303
|
+
* UPGRADE: Risk Geometry Class
|
|
304
|
+
* Contains Convex Hull (Monotone Chain) for Efficient Frontier analysis.
|
|
305
|
+
*/
|
|
306
|
+
class RiskGeometry {
|
|
307
|
+
static crossProduct(o, a, b) {
|
|
308
|
+
return (a.x - o.x) * (b.y - o.y) - (a.y - o.y) * (b.x - o.x);
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
/**
|
|
312
|
+
* Computes the Convex Hull of a set of 2D points using Monotone Chain algorithm.
|
|
313
|
+
* @param {Array<{x: number, y: number}>} points
|
|
314
|
+
* @returns {Array<{x: number, y: number}>} Points on the hull
|
|
315
|
+
*/
|
|
316
|
+
static computeConvexHull(points) {
|
|
317
|
+
if (points.length <= 1) return points;
|
|
318
|
+
|
|
319
|
+
// Sort by x coordinate (and y if x is same)
|
|
320
|
+
points.sort((a, b) => a.x === b.x ? a.y - b.y : a.x - b.x);
|
|
321
|
+
|
|
322
|
+
const lower = [];
|
|
323
|
+
for (const p of points) {
|
|
324
|
+
while (lower.length >= 2 && this.crossProduct(lower[lower.length - 2], lower[lower.length - 1], p) <= 0) {
|
|
325
|
+
lower.pop();
|
|
326
|
+
}
|
|
327
|
+
lower.push(p);
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
const upper = [];
|
|
331
|
+
for (let i = points.length - 1; i >= 0; i--) {
|
|
332
|
+
const p = points[i];
|
|
333
|
+
while (upper.length >= 2 && this.crossProduct(upper[upper.length - 2], upper[upper.length - 1], p) <= 0) {
|
|
334
|
+
upper.pop();
|
|
335
|
+
}
|
|
336
|
+
upper.push(p);
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
upper.pop();
|
|
340
|
+
lower.pop();
|
|
341
|
+
return lower.concat(upper);
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
|
|
379
345
|
class Psychometrics {
|
|
380
346
|
static computeDispositionSkew(historyTrades, currentPositions) {
|
|
381
347
|
const getMedian = (arr) => { if (!arr.length) return 0; const sorted = [...arr].sort((a, b) => a - b); const mid = Math.floor(sorted.length / 2); return sorted.length % 2 !== 0 ? sorted[mid] : (sorted[mid - 1] + sorted[mid]) / 2; };
|
|
@@ -413,20 +379,16 @@ class AdaptiveAnalytics {
|
|
|
413
379
|
}
|
|
414
380
|
}
|
|
415
381
|
|
|
416
|
-
// Legacy Wrapper for backward compatibility with older calculations
|
|
417
382
|
class UserClassifier {
|
|
418
383
|
static classify(context) {
|
|
419
|
-
// Delegate to the new robust Hybrid Scorer
|
|
420
384
|
const result = SmartMoneyScorer.scoreHybrid(context);
|
|
421
|
-
|
|
422
|
-
// Map new result structure to legacy structure expected by V1 calcs
|
|
423
385
|
return {
|
|
424
386
|
intelligence: {
|
|
425
387
|
label: result.label,
|
|
426
388
|
score: result.totalScore,
|
|
427
389
|
isSmart: result.totalScore >= 65
|
|
428
390
|
},
|
|
429
|
-
style: { primary: SCHEMAS.STYLES.INVESTOR },
|
|
391
|
+
style: { primary: SCHEMAS.STYLES.INVESTOR },
|
|
430
392
|
metrics: {
|
|
431
393
|
profitFactor: result.components.history?.metrics?.profitFactor || 0,
|
|
432
394
|
allocEfficiency: result.components.portfolio?.metrics?.allocEfficiency || 0
|
|
@@ -438,10 +400,11 @@ class UserClassifier {
|
|
|
438
400
|
module.exports = {
|
|
439
401
|
SCHEMAS,
|
|
440
402
|
UserClassifier,
|
|
441
|
-
SmartMoneyScorer,
|
|
403
|
+
SmartMoneyScorer,
|
|
442
404
|
ExecutionAnalytics,
|
|
443
405
|
Psychometrics,
|
|
444
406
|
AdaptiveAnalytics,
|
|
445
407
|
CognitiveBiases,
|
|
446
|
-
SkillAttribution
|
|
408
|
+
SkillAttribution,
|
|
409
|
+
RiskGeometry
|
|
447
410
|
};
|
|
@@ -1,33 +1,107 @@
|
|
|
1
|
+
const { analyzeDateExecution } = require('../WorkflowOrchestrator');
|
|
2
|
+
const { fetchComputationStatus, updateComputationStatus } = require('../persistence/StatusRepository');
|
|
3
|
+
const { normalizeName, getExpectedDateStrings, DEFINITIVE_EARLIEST_DATES } = require('../utils/utils');
|
|
4
|
+
const { checkRootDataAvailability } = require('../data/AvailabilityChecker');
|
|
5
|
+
const SimRunner = require('../simulation/SimRunner');
|
|
6
|
+
const pLimit = require('p-limit');
|
|
7
|
+
const path = require('path');
|
|
8
|
+
const crypto = require('crypto');
|
|
9
|
+
const fs = require('fs'); // [NEW] Required for file reading
|
|
10
|
+
const packageJson = require(path.join(__dirname, '..', '..', '..', 'package.json'));
|
|
11
|
+
const packageVersion = packageJson.version;
|
|
12
|
+
const { generateCodeHash } = require('../utils/utils'); // Reuse your standard logic
|
|
13
|
+
|
|
14
|
+
// Persistent Registry for SimHashes
|
|
15
|
+
const SIMHASH_REGISTRY_COLLECTION = 'system_simhash_registry';
|
|
16
|
+
|
|
17
|
+
// --- [NEW] RECURSIVE SYSTEM HASHING ---
|
|
18
|
+
|
|
19
|
+
// 1. define the Root of the system (one level up from 'tools')
|
|
20
|
+
const SYSTEM_ROOT = path.resolve(__dirname, '..');
|
|
21
|
+
|
|
22
|
+
// 2. Define what to ignore to prevent noise or infinite loops
|
|
23
|
+
const IGNORED_DIRS = new Set(['node_modules', '.git', '.idea', 'coverage', 'logs', 'tests']);
|
|
24
|
+
const IGNORED_FILES = new Set(['package-lock.json', '.DS_Store', '.env']);
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Recursively walks a directory and returns a list of file paths.
|
|
28
|
+
*/
|
|
29
|
+
function walkSync(dir, fileList = []) {
|
|
30
|
+
const files = fs.readdirSync(dir);
|
|
31
|
+
|
|
32
|
+
files.forEach(file => {
|
|
33
|
+
if (IGNORED_FILES.has(file)) return;
|
|
34
|
+
|
|
35
|
+
const filePath = path.join(dir, file);
|
|
36
|
+
const stat = fs.statSync(filePath);
|
|
37
|
+
|
|
38
|
+
if (stat.isDirectory()) {
|
|
39
|
+
if (!IGNORED_DIRS.has(file)) {
|
|
40
|
+
walkSync(filePath, fileList);
|
|
41
|
+
}
|
|
42
|
+
} else {
|
|
43
|
+
// Only hash code files (add .yaml if you want workflows included)
|
|
44
|
+
if (file.endsWith('.js') || file.endsWith('.json') || file.endsWith('.yaml')) {
|
|
45
|
+
fileList.push(filePath);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
});
|
|
49
|
+
return fileList;
|
|
50
|
+
}
|
|
51
|
+
|
|
1
52
|
/**
|
|
2
|
-
*
|
|
3
|
-
* Generates a "Pre-Flight" report of what the computation system WILL do.
|
|
4
|
-
* UPGRADED: Implements Behavioral Hashing (SimHash) to detect Cosmetic vs Logic changes.
|
|
5
|
-
* OPTIMIZED: Caches SimHashes and actively updates status for Stable items to prevent re-runs.
|
|
6
|
-
* OPTIMIZED (V2): Implements System Fingerprinting to skip 90-day scan if manifest is identical.
|
|
53
|
+
* Generates a single hash representing the entire infrastructure code state.
|
|
7
54
|
*/
|
|
55
|
+
function getInfrastructureHash() {
|
|
56
|
+
try {
|
|
57
|
+
const allFiles = walkSync(SYSTEM_ROOT);
|
|
58
|
+
allFiles.sort(); // Crucial for determinism
|
|
59
|
+
|
|
60
|
+
const bigHash = crypto.createHash('sha256');
|
|
61
|
+
|
|
62
|
+
for (const filePath of allFiles) {
|
|
63
|
+
const content = fs.readFileSync(filePath, 'utf8');
|
|
64
|
+
const relativePath = path.relative(SYSTEM_ROOT, filePath);
|
|
65
|
+
|
|
66
|
+
// DECISION: How to clean?
|
|
67
|
+
let cleanContent = content;
|
|
68
|
+
|
|
69
|
+
// 1. If it's JS, use your system standard for code hashing
|
|
70
|
+
if (filePath.endsWith('.js')) {
|
|
71
|
+
// This strips comments and whitespace consistently with ManifestBuilder
|
|
72
|
+
// Note: generateCodeHash returns a hash, we can just use that hash
|
|
73
|
+
cleanContent = generateCodeHash(content);
|
|
74
|
+
}
|
|
75
|
+
// 2. If it's JSON/YAML, just strip basic whitespace to ignore indent changes
|
|
76
|
+
else {
|
|
77
|
+
cleanContent = content.replace(/\s+/g, '');
|
|
78
|
+
}
|
|
8
79
|
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
// Persistent Registry for SimHashes (so Workers don't have to recalc)
|
|
21
|
-
const SIMHASH_REGISTRY_COLLECTION = 'system_simhash_registry';
|
|
80
|
+
// Feed the PATH and the CLEAN CONTENT into the master hash
|
|
81
|
+
bigHash.update(`${relativePath}:${cleanContent}|`);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
return bigHash.digest('hex');
|
|
85
|
+
} catch (e) {
|
|
86
|
+
console.warn(`[BuildReporter] ⚠️ Failed to generate infra hash: ${e.message}`);
|
|
87
|
+
return 'infra_hash_error'; // Fail safe so we run the report rather than crash
|
|
88
|
+
}
|
|
89
|
+
}
|
|
22
90
|
|
|
23
91
|
/**
|
|
24
|
-
*
|
|
25
|
-
* If ANY calculation logic or dependency changes, this hash changes.
|
|
92
|
+
* UPDATED: System Fingerprint = Manifest Hash + Infrastructure Hash
|
|
26
93
|
*/
|
|
27
94
|
function getSystemFingerprint(manifest) {
|
|
28
|
-
//
|
|
29
|
-
const
|
|
30
|
-
|
|
95
|
+
// 1. Business Logic Hash (The Calculations)
|
|
96
|
+
const sortedManifestHashes = manifest.map(c => c.hash).sort().join('|');
|
|
97
|
+
|
|
98
|
+
// 2. Infrastructure Hash (The System Code)
|
|
99
|
+
const infraHash = getInfrastructureHash();
|
|
100
|
+
|
|
101
|
+
// 3. Combine
|
|
102
|
+
return crypto.createHash('sha256')
|
|
103
|
+
.update(sortedManifestHashes + infraHash)
|
|
104
|
+
.digest('hex');
|
|
31
105
|
}
|
|
32
106
|
|
|
33
107
|
/**
|
|
@@ -1,7 +1,10 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Manages code versioning, hashing, and legacy mappings.
|
|
3
|
+
* UPDATED: Includes Centralized Infrastructure Hashing to track system-level changes.
|
|
3
4
|
*/
|
|
4
5
|
const crypto = require('crypto');
|
|
6
|
+
const fs = require('fs');
|
|
7
|
+
const path = require('path');
|
|
5
8
|
|
|
6
9
|
// Legacy Keys Mapping (Ensures backward compatibility)
|
|
7
10
|
const LEGACY_MAPPING = {
|
|
@@ -24,6 +27,10 @@ const LEGACY_MAPPING = {
|
|
|
24
27
|
AdaptiveAnalytics: 'adaptive'
|
|
25
28
|
};
|
|
26
29
|
|
|
30
|
+
/**
|
|
31
|
+
* Generates a SHA-256 hash of a code string.
|
|
32
|
+
* Strips comments and whitespace for loose equality.
|
|
33
|
+
*/
|
|
27
34
|
function generateCodeHash(codeString) {
|
|
28
35
|
if (!codeString) return 'unknown';
|
|
29
36
|
let clean = codeString.replace(/\/\/.*$/gm, '');
|
|
@@ -32,4 +39,66 @@ function generateCodeHash(codeString) {
|
|
|
32
39
|
return crypto.createHash('sha256').update(clean).digest('hex');
|
|
33
40
|
}
|
|
34
41
|
|
|
35
|
-
|
|
42
|
+
// --- INFRASTRUCTURE HASHING (The "System Fingerprint") ---
|
|
43
|
+
|
|
44
|
+
const SYSTEM_ROOT = path.resolve(__dirname, '..');
|
|
45
|
+
const IGNORED_DIRS = new Set(['node_modules', '.git', '.idea', 'coverage', 'logs', 'tests', 'docs']);
|
|
46
|
+
const IGNORED_FILES = new Set(['package-lock.json', '.DS_Store', '.env', 'README.md']);
|
|
47
|
+
|
|
48
|
+
function walkSync(dir, fileList = []) {
|
|
49
|
+
const files = fs.readdirSync(dir);
|
|
50
|
+
files.forEach(file => {
|
|
51
|
+
if (IGNORED_FILES.has(file)) return;
|
|
52
|
+
const filePath = path.join(dir, file);
|
|
53
|
+
const stat = fs.statSync(filePath);
|
|
54
|
+
if (stat.isDirectory()) {
|
|
55
|
+
if (!IGNORED_DIRS.has(file)) {
|
|
56
|
+
walkSync(filePath, fileList);
|
|
57
|
+
}
|
|
58
|
+
} else {
|
|
59
|
+
// Hash JS, JSON, and YAML (Workflows)
|
|
60
|
+
if (file.endsWith('.js') || file.endsWith('.json') || file.endsWith('.yaml')) {
|
|
61
|
+
fileList.push(filePath);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
});
|
|
65
|
+
return fileList;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Generates a hash representing the state of the entire computation-system codebase.
|
|
70
|
+
* This ensures that changes to infrastructure (like ContextFactory or Executors)
|
|
71
|
+
* trigger a re-run of calculations even if the calculation logic itself didn't change.
|
|
72
|
+
*/
|
|
73
|
+
function getInfrastructureHash() {
|
|
74
|
+
try {
|
|
75
|
+
const allFiles = walkSync(SYSTEM_ROOT);
|
|
76
|
+
allFiles.sort(); // Crucial for determinism
|
|
77
|
+
|
|
78
|
+
const bigHash = crypto.createHash('sha256');
|
|
79
|
+
|
|
80
|
+
for (const filePath of allFiles) {
|
|
81
|
+
const content = fs.readFileSync(filePath, 'utf8');
|
|
82
|
+
const relativePath = path.relative(SYSTEM_ROOT, filePath);
|
|
83
|
+
|
|
84
|
+
let cleanContent = content;
|
|
85
|
+
|
|
86
|
+
// Reuse the standard code hash logic for JS files to be consistent
|
|
87
|
+
if (filePath.endsWith('.js')) {
|
|
88
|
+
cleanContent = generateCodeHash(content);
|
|
89
|
+
} else {
|
|
90
|
+
// For JSON/YAML, just strip whitespace
|
|
91
|
+
cleanContent = content.replace(/\s+/g, '');
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
bigHash.update(`${relativePath}:${cleanContent}|`);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
return bigHash.digest('hex');
|
|
98
|
+
} catch (e) {
|
|
99
|
+
console.warn(`[HashManager] ⚠️ Failed to generate infra hash: ${e.message}`);
|
|
100
|
+
return 'infra_error';
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
module.exports = { LEGACY_MAPPING, generateCodeHash, getInfrastructureHash };
|