bulltrackers-module 1.0.768 → 1.0.769

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/functions/computation-system-v2/UserPortfolioMetrics.js +50 -0
  2. package/functions/computation-system-v2/computations/BehavioralAnomaly.js +557 -337
  3. package/functions/computation-system-v2/computations/GlobalAumPerAsset30D.js +103 -0
  4. package/functions/computation-system-v2/computations/PIDailyAssetAUM.js +134 -0
  5. package/functions/computation-system-v2/computations/PiFeatureVectors.js +227 -0
  6. package/functions/computation-system-v2/computations/PiRecommender.js +359 -0
  7. package/functions/computation-system-v2/computations/SignedInUserList.js +51 -0
  8. package/functions/computation-system-v2/computations/SignedInUserMirrorHistory.js +138 -0
  9. package/functions/computation-system-v2/computations/SignedInUserPIProfileMetrics.js +106 -0
  10. package/functions/computation-system-v2/computations/SignedInUserProfileMetrics.js +324 -0
  11. package/functions/computation-system-v2/config/bulltrackers.config.js +30 -128
  12. package/functions/computation-system-v2/core-api.js +17 -9
  13. package/functions/computation-system-v2/data_schema_reference.MD +108 -0
  14. package/functions/computation-system-v2/devtools/builder/builder.js +362 -0
  15. package/functions/computation-system-v2/devtools/builder/examples/user-metrics.yaml +26 -0
  16. package/functions/computation-system-v2/devtools/index.js +36 -0
  17. package/functions/computation-system-v2/devtools/shared/MockDataFactory.js +235 -0
  18. package/functions/computation-system-v2/devtools/shared/SchemaTemplates.js +475 -0
  19. package/functions/computation-system-v2/devtools/shared/SystemIntrospector.js +517 -0
  20. package/functions/computation-system-v2/devtools/shared/index.js +16 -0
  21. package/functions/computation-system-v2/devtools/simulation/DAGAnalyzer.js +243 -0
  22. package/functions/computation-system-v2/devtools/simulation/MockDataFetcher.js +306 -0
  23. package/functions/computation-system-v2/devtools/simulation/MockStorageManager.js +336 -0
  24. package/functions/computation-system-v2/devtools/simulation/SimulationEngine.js +525 -0
  25. package/functions/computation-system-v2/devtools/simulation/SimulationServer.js +581 -0
  26. package/functions/computation-system-v2/devtools/simulation/index.js +17 -0
  27. package/functions/computation-system-v2/devtools/simulation/simulate.js +324 -0
  28. package/functions/computation-system-v2/devtools/vscode-computation/package.json +90 -0
  29. package/functions/computation-system-v2/devtools/vscode-computation/snippets/computation.json +128 -0
  30. package/functions/computation-system-v2/devtools/vscode-computation/src/extension.ts +401 -0
  31. package/functions/computation-system-v2/devtools/vscode-computation/src/providers/codeActions.ts +152 -0
  32. package/functions/computation-system-v2/devtools/vscode-computation/src/providers/completions.ts +207 -0
  33. package/functions/computation-system-v2/devtools/vscode-computation/src/providers/diagnostics.ts +205 -0
  34. package/functions/computation-system-v2/devtools/vscode-computation/src/providers/hover.ts +205 -0
  35. package/functions/computation-system-v2/devtools/vscode-computation/tsconfig.json +22 -0
  36. package/functions/computation-system-v2/docs/HowToCreateComputations.MD +602 -0
  37. package/functions/computation-system-v2/framework/data/DataFetcher.js +250 -184
  38. package/functions/computation-system-v2/framework/data/MaterializedViewManager.js +84 -0
  39. package/functions/computation-system-v2/framework/data/QueryBuilder.js +38 -38
  40. package/functions/computation-system-v2/framework/execution/Orchestrator.js +215 -129
  41. package/functions/computation-system-v2/framework/scheduling/ScheduleValidator.js +17 -19
  42. package/functions/computation-system-v2/framework/storage/StateRepository.js +32 -2
  43. package/functions/computation-system-v2/framework/storage/StorageManager.js +105 -67
  44. package/functions/computation-system-v2/framework/testing/ComputationTester.js +12 -6
  45. package/functions/computation-system-v2/handlers/dispatcher.js +57 -29
  46. package/functions/computation-system-v2/legacy/PiAssetRecommender.js.old +115 -0
  47. package/functions/computation-system-v2/legacy/PiSimilarityMatrix.js +104 -0
  48. package/functions/computation-system-v2/legacy/PiSimilarityVector.js +71 -0
  49. package/functions/computation-system-v2/scripts/debug_aggregation.js +25 -0
  50. package/functions/computation-system-v2/scripts/test-invalidation-scenarios.js +234 -0
  51. package/package.json +1 -1
@@ -1,15 +1,455 @@
1
1
  /**
2
- * @fileoverview Behavioral Anomaly Detection (V2)
3
- * Detects significant deviations in investor behavior compared to their 60-day baseline.
4
- * * MATH: Uses Mahalanobis Distance to detect outliers in multi-dimensional space.
5
- * FEATURES:
6
- * 1. Concentration (HHI): Are they suddenly putting all eggs in one basket?
7
- * 2. Martingale (Loss Chasing): Are they increasing leverage after losses?
8
- * 3. Capacity Strain: Is their AUM growing faster than their copy count implies (risk of slippage)?
9
- * 4. Risk Score: Sudden spikes in risk.
2
+ * @fileoverview Behavioral Anomaly Detection V4.1 - Predictive Risk Intelligence
3
+ * * ARCHITECTURE:
4
+ * - Hybrid Math: Uses 6-Dim Core for Stability + 18-Dim Full Spectrum for Detail.
5
+ * - Cost Control: Strict 30-day lookback enforced (compatible with DAG constraints).
6
+ * - Logic Fix: "Primary Driver" uses Statistical Z-Scores, not raw values.
7
+ * - Robustness: Dynamic Z-Scoring removes dependency on hardcoded divisors.
10
8
  */
9
+
11
10
  const { Computation } = require('../framework');
12
11
 
12
+ // =============================================================================
13
+ // ADVANCED MATH KERNEL
14
+ // =============================================================================
15
+ const AdvancedMath = {
16
+ entropy: (distribution) => {
17
+ const total = distribution.reduce((sum, val) => sum + val, 0);
18
+ if (total === 0) return 0;
19
+ let entropy = 0;
20
+ for (const val of distribution) {
21
+ if (val > 0) {
22
+ const p = val / total;
23
+ entropy -= p * Math.log2(p);
24
+ }
25
+ }
26
+ return entropy;
27
+ },
28
+
29
+ hhi: (shares) => {
30
+ const total = shares.reduce((sum, val) => sum + val, 0);
31
+ if (total === 0) return 0;
32
+ let hhi = 0;
33
+ for (const share of shares) {
34
+ const marketShare = share / total;
35
+ hhi += marketShare * marketShare;
36
+ }
37
+ return hhi * 10000;
38
+ },
39
+
40
+ velocity: (current, previous, timeDelta = 1) => {
41
+ if (timeDelta === 0) return 0;
42
+ return (current - previous) / timeDelta;
43
+ },
44
+
45
+ acceleration: (current, previous, previousPrevious, timeDelta = 1) => {
46
+ const v1 = AdvancedMath.velocity(current, previous, timeDelta);
47
+ const v0 = AdvancedMath.velocity(previous, previousPrevious, timeDelta);
48
+ return AdvancedMath.velocity(v1, v0, timeDelta);
49
+ },
50
+
51
+ percentileRank: (value, array) => {
52
+ if (!array.length) return 0.5;
53
+ let rank = 0;
54
+ for (const val of array) {
55
+ if (val < value) rank++;
56
+ }
57
+ return rank / array.length;
58
+ },
59
+
60
+ detectRegime: (recentVectors) => {
61
+ if (recentVectors.length < 3) return 'unknown';
62
+ // Calculate aggregate volatility across the CORE features (indices 0-5)
63
+ const coreIndices = [0, 1, 2, 3, 4, 5];
64
+ const volatilities = [];
65
+
66
+ for (const f of coreIndices) {
67
+ const values = recentVectors.map(v => v[f]);
68
+ const mean = values.reduce((sum, v) => sum + v, 0) / values.length;
69
+ const variance = values.reduce((sum, v) => sum + Math.pow(v - mean, 2), 0) / values.length;
70
+ volatilities.push(Math.sqrt(variance));
71
+ }
72
+ const avgVolatility = volatilities.reduce((sum, v) => sum + v, 0) / volatilities.length;
73
+
74
+ if (avgVolatility < 0.3) return 'calm';
75
+ if (avgVolatility < 0.8) return 'active';
76
+ return 'stressed';
77
+ }
78
+ };
79
+
80
+ // =============================================================================
81
+ // FEATURE ENGINEERING
82
+ // =============================================================================
83
+ const FeatureExtractor = {
84
+ extract: (day, prevDay, prevPrevDay, historyBlob, rankings, rules, maps) => {
85
+ const { portfolio } = day;
86
+ const positions = rules.portfolio.extractPositions(portfolio) || [];
87
+ const rankingsData = day.rankings || {};
88
+ const { sectorMap, tickerMap } = maps;
89
+
90
+ // Resolve Sector
91
+ const getSector = (instId) => {
92
+ const ticker = tickerMap.get(String(instId));
93
+ if (!ticker) return 'Unknown';
94
+ return sectorMap.get(ticker) || 'Unknown';
95
+ };
96
+
97
+ // 1. Sector Concentration (HHI)
98
+ const sectorExposure = {};
99
+ let totalInvestedPct = 0;
100
+ positions.forEach(p => {
101
+ const sector = getSector(p.InstrumentID);
102
+ const val = p.Invested || 0;
103
+ sectorExposure[sector] = (sectorExposure[sector] || 0) + val;
104
+ totalInvestedPct += val;
105
+ });
106
+ const sectorHHI = AdvancedMath.hhi(Object.values(sectorExposure));
107
+
108
+ // 2. Martingale/Distress Score
109
+ let martingaleScore = 0;
110
+ let martingaleCount = 0;
111
+ positions.forEach(p => {
112
+ const pnl = p.NetProfit || 0;
113
+ const invested = p.Invested || 0;
114
+ if (pnl < -4 && invested > 4) {
115
+ martingaleScore += Math.abs(pnl) * invested;
116
+ martingaleCount++;
117
+ }
118
+ });
119
+
120
+ // 3. Leverage Profile (Lazy Loaded)
121
+ const { avgLeverage, highLevCount } = FeatureExtractor._extractLeverageProfile(day.date, historyBlob);
122
+
123
+ // 4. Risk Score
124
+ const riskScore = rankingsData.RiskScore || 1;
125
+
126
+ // 5. Complexity
127
+ const complexity = positions.length;
128
+
129
+ // 6. Exposure
130
+ const exposure = totalInvestedPct;
131
+
132
+ // --- EXTENDED FEATURES (Full Spectrum) ---
133
+
134
+ // 7. Entropy
135
+ const positionSizes = positions.map(p => p.Invested || 0);
136
+ const portfolioEntropy = AdvancedMath.entropy(positionSizes);
137
+
138
+ // 8. Drawdown
139
+ const drawdownSeverity = Math.abs(rankingsData.PeakToValley || 0);
140
+
141
+ // 9. Win Rate Deviation
142
+ const winRateDeviation = Math.abs((rankingsData.WinRatio || 50) - 50);
143
+
144
+ // 10. Skewness
145
+ const sortedSizes = [...positionSizes].sort((a, b) => b - a);
146
+ const top3Share = sortedSizes.slice(0, 3).reduce((sum, v) => sum + v, 0);
147
+ const positionSkewness = totalInvestedPct > 0 ? top3Share / totalInvestedPct : 0;
148
+
149
+ // 11. Stress Ratio
150
+ const losingCount = positions.filter(p => (p.NetProfit || 0) < 0).length;
151
+ const stressRatio = positions.length > 0 ? losingCount / positions.length : 0;
152
+
153
+ // 12. Credit Imbalance
154
+ const realizedCredit = portfolio?.CreditByRealizedEquity || 0;
155
+ const unrealizedCredit = portfolio?.CreditByUnrealizedEquity || 0;
156
+ const creditImbalance = Math.abs(realizedCredit - unrealizedCredit);
157
+
158
+ // 13. High Lev Freq
159
+ const highLevFrequency = highLevCount / Math.max(1, positions.length);
160
+
161
+ // 14. Copier Momentum
162
+ const copiers = rankingsData.Copiers || 0;
163
+ const baselineCopiers = rankingsData.BaseLineCopiers || copiers;
164
+ const copierMomentum = baselineCopiers > 0 ? (copiers - baselineCopiers) / baselineCopiers : 0;
165
+
166
+ // 15. AUM Tier
167
+ const aumTier = (rankingsData.AUMTier || 0);
168
+
169
+ // 16. Exposure Velocity
170
+ let exposureVelocity = 0;
171
+ if (prevDay && prevDay.portfolio) {
172
+ const prevPositions = rules.portfolio.extractPositions(prevDay.portfolio) || [];
173
+ const prevExposure = prevPositions.reduce((sum, p) => sum + (p.Invested || 0), 0);
174
+ exposureVelocity = exposure - prevExposure;
175
+ }
176
+
177
+ // 17. Risk Acceleration
178
+ let riskAcceleration = 0;
179
+ if (prevDay && prevDay.rankings && prevPrevDay && prevPrevDay.rankings) {
180
+ const r0 = prevPrevDay.rankings.RiskScore || 0;
181
+ const r1 = prevDay.rankings.RiskScore || 0;
182
+ const r2 = riskScore;
183
+ riskAcceleration = AdvancedMath.acceleration(r2, r1, r0, 1);
184
+ }
185
+
186
+ // 18. Behavioral Momentum
187
+ const behavioralMomentum = Math.abs(exposureVelocity) + Math.abs(riskAcceleration);
188
+
189
+ return {
190
+ vector: [
191
+ // CORE (0-5)
192
+ sectorHHI,
193
+ Math.log(martingaleScore + 1),
194
+ avgLeverage,
195
+ riskScore,
196
+ complexity,
197
+ exposure,
198
+
199
+ // EXTENDED (6-17)
200
+ portfolioEntropy,
201
+ drawdownSeverity,
202
+ winRateDeviation,
203
+ positionSkewness,
204
+ stressRatio,
205
+ creditImbalance,
206
+ highLevFrequency,
207
+ copierMomentum,
208
+ aumTier,
209
+ exposureVelocity,
210
+ riskAcceleration,
211
+ behavioralMomentum
212
+ ],
213
+ metadata: {
214
+ martingaleCount,
215
+ losingCount,
216
+ totalPositions: positions.length,
217
+ exposure,
218
+ regime: 'unknown'
219
+ }
220
+ };
221
+ },
222
+
223
+ _extractLeverageProfile: (dateStr, historyBlob) => {
224
+ const targetDate = new Date(dateStr);
225
+ const lookbackWindow = new Date(targetDate);
226
+ lookbackWindow.setDate(lookbackWindow.getDate() - 90);
227
+
228
+ let sumLev = 0;
229
+ let tradeCount = 0;
230
+ let highLevCount = 0;
231
+
232
+ for (const t of historyBlob) {
233
+ const closeDate = new Date(t.CloseDateTime);
234
+ if (closeDate <= targetDate && closeDate >= lookbackWindow) {
235
+ const lev = t.Leverage || 1;
236
+ sumLev += lev;
237
+ tradeCount++;
238
+ if (lev > 2) highLevCount++;
239
+ }
240
+ }
241
+
242
+ return {
243
+ avgLeverage: tradeCount > 0 ? sumLev / tradeCount : 1,
244
+ highLevCount
245
+ };
246
+ }
247
+ };
248
+
249
+ // =============================================================================
250
+ // ANOMALY DETECTION ENGINE
251
+ // =============================================================================
252
+ const AnomalyEngine = {
253
+ score: (todayFeatures, historicalFeatures, regime) => {
254
+ // 1. DYNAMIC STANDARDIZATION (Z-Scoring)
255
+ // We capture zVector here because it represents the "True Statistical Deviation"
256
+ const { zVector, histZVectors } = AnomalyEngine._standardize(todayFeatures.vector, historicalFeatures);
257
+
258
+ // 2. HYBRID MAHALANOBIS (Core Features Only)
259
+ // Slicing indices 0-5 (Core) for structural stability on 30-day lookback
260
+ const coreZVector = zVector.slice(0, 6);
261
+ const coreHistZVectors = histZVectors.map(v => v.slice(0, 6));
262
+ const mahalanobisScore = AnomalyEngine._mahalanobisDistance(coreZVector, coreHistZVectors);
263
+
264
+ // 3. FULL SPECTRUM PERCENTILE (All 18 Features)
265
+ const percentileScore = AnomalyEngine._percentileScore(todayFeatures.vector, historicalFeatures);
266
+
267
+ // 4. REGIME CONDITIONAL
268
+ const regimeScore = AnomalyEngine._regimeScore(todayFeatures, historicalFeatures, regime);
269
+
270
+ // 5. VELOCITY
271
+ const velocityScore = (Math.abs(zVector[15]) + Math.abs(zVector[16]) + Math.abs(zVector[17])) / 3;
272
+
273
+ const ensembleScore =
274
+ 0.40 * mahalanobisScore +
275
+ 0.30 * percentileScore +
276
+ 0.15 * regimeScore +
277
+ 0.15 * velocityScore;
278
+
279
+ return {
280
+ overall: ensembleScore,
281
+ components: {
282
+ mahalanobis: mahalanobisScore,
283
+ percentile: percentileScore,
284
+ regime: regimeScore,
285
+ velocity: velocityScore
286
+ },
287
+ // EXPORT Z-SCORES FOR INTERPRETER
288
+ zScores: zVector
289
+ };
290
+ },
291
+
292
+ _standardize: (todayVec, history) => {
293
+ const dim = todayVec.length;
294
+ const n = history.length;
295
+ const means = new Array(dim).fill(0);
296
+ const stdDevs = new Array(dim).fill(0);
297
+
298
+ const vectors = history.map(h => h.vector);
299
+ for (const v of vectors) for (let i = 0; i < dim; i++) means[i] += v[i];
300
+ for (let i = 0; i < dim; i++) means[i] /= n;
301
+
302
+ for (const v of vectors) for (let i = 0; i < dim; i++) stdDevs[i] += Math.pow(v[i] - means[i], 2);
303
+ for (let i = 0; i < dim; i++) {
304
+ stdDevs[i] = Math.sqrt(stdDevs[i] / n);
305
+ if (stdDevs[i] === 0) stdDevs[i] = 1;
306
+ }
307
+
308
+ const zVector = todayVec.map((v, i) => (v - means[i]) / stdDevs[i]);
309
+ const histZVectors = vectors.map(v => v.map((val, i) => (val - means[i]) / stdDevs[i]));
310
+
311
+ return { zVector, histZVectors };
312
+ },
313
+
314
+ _mahalanobisDistance: (todayZVec, histZVectors) => {
315
+ const means = new Array(todayZVec.length).fill(0);
316
+ const cov = AnomalyEngine._covariance(histZVectors, means);
317
+ const invCov = AnomalyEngine._invert(cov);
318
+
319
+ if (!invCov) return 0;
320
+
321
+ let sum = 0;
322
+ for (let i = 0; i < todayZVec.length; i++) {
323
+ for (let j = 0; j < todayZVec.length; j++) {
324
+ sum += todayZVec[i] * invCov[i][j] * todayZVec[j];
325
+ }
326
+ }
327
+ return Math.sqrt(Math.max(0, sum));
328
+ },
329
+
330
+ _percentileScore: (todayVec, historicalFeatures) => {
331
+ let totalDeviation = 0;
332
+ const featureCount = todayVec.length;
333
+ for (let i = 0; i < featureCount; i++) {
334
+ const historicalValues = historicalFeatures.map(f => f.vector[i]);
335
+ const percentile = AdvancedMath.percentileRank(todayVec[i], historicalValues);
336
+ totalDeviation += Math.abs(percentile - 0.5) * 2;
337
+ }
338
+ return totalDeviation / featureCount;
339
+ },
340
+
341
+ _regimeScore: (todayFeatures, historicalFeatures, regime) => {
342
+ const sameRegimeFeatures = historicalFeatures.filter(f => f.metadata.regime === regime);
343
+ const comparisonSet = sameRegimeFeatures.length < 5 ? historicalFeatures : sameRegimeFeatures;
344
+ return AnomalyEngine._percentileScore(todayFeatures.vector, comparisonSet);
345
+ },
346
+
347
+ _covariance: (vectors, means) => {
348
+ const dim = vectors[0].length;
349
+ const n = vectors.length;
350
+ const matrix = Array(dim).fill(0).map(() => Array(dim).fill(0));
351
+ for (const v of vectors) {
352
+ for (let i = 0; i < dim; i++) {
353
+ for (let j = 0; j < dim; j++) {
354
+ matrix[i][j] += (v[i] - means[i]) * (v[j] - means[j]);
355
+ }
356
+ }
357
+ }
358
+ // Ridge Regularization
359
+ return matrix.map((row, i) => row.map((val, j) => (val / (n - 1)) + (i === j ? 1e-3 : 0)));
360
+ },
361
+
362
+ _invert: (M) => {
363
+ try {
364
+ const n = M.length;
365
+ const A = M.map(row => [...row]);
366
+ const I = M.map((_, i) => M.map((__, j) => (i === j ? 1 : 0)));
367
+ for (let i = 0; i < n; i++) {
368
+ let pivot = A[i][i];
369
+ if (Math.abs(pivot) < 1e-9) pivot = 1e-9;
370
+ for (let j = 0; j < n; j++) { A[i][j] /= pivot; I[i][j] /= pivot; }
371
+ for (let k = 0; k < n; k++) {
372
+ if (k !== i) {
373
+ const f = A[k][i];
374
+ for (let j = 0; j < n; j++) { A[k][j] -= f * A[i][j]; I[k][j] -= f * I[i][j]; }
375
+ }
376
+ }
377
+ }
378
+ return I;
379
+ } catch (e) { return null; }
380
+ }
381
+ };
382
+
383
+ // =============================================================================
384
+ // PREDICTIVE ENGINE & INTERPRETER
385
+ // =============================================================================
386
+ const PredictiveEngine = {
387
+ forecastRisk: (todayFeatures, historicalFeatures, anomalyScore) => {
388
+ const momentum = todayFeatures.vector[17];
389
+ const riskAccel = todayFeatures.vector[16];
390
+
391
+ let baseProbability = 1 / (1 + Math.exp(-(anomalyScore - 3)));
392
+
393
+ if (momentum > 1.0) baseProbability = Math.min(0.99, baseProbability * 1.2);
394
+ if (riskAccel > 1.0) baseProbability = Math.min(0.99, baseProbability * 1.15);
395
+
396
+ return {
397
+ probability7d: baseProbability,
398
+ confidence: historicalFeatures.length >= 25 ? 'high' : 'medium',
399
+ trajectory: momentum > 0 ? 'escalating' : 'stable'
400
+ };
401
+ }
402
+ };
403
+
404
+ const SemanticInterpreter = {
405
+ interpret: (scores, features, prediction, regime, featureNames) => {
406
+ // ROBUST LOGIC: Use the Z-Scores passed from AnomalyEngine.
407
+ const zScores = scores.zScores || [];
408
+
409
+ let maxIdx = 0;
410
+ let maxZ = 0;
411
+
412
+ // Find feature with highest ABSOLUTE Z-score (Statistically most significant)
413
+ zScores.forEach((z, i) => {
414
+ if (Math.abs(z) > maxZ) {
415
+ maxZ = Math.abs(z);
416
+ maxIdx = i;
417
+ }
418
+ });
419
+
420
+ const primaryDriver = featureNames[maxIdx];
421
+ const driverZ = zScores[maxIdx];
422
+ const rawValue = features.vector[maxIdx];
423
+
424
+ let description = '';
425
+ let severity = 'low';
426
+
427
+ if (scores.overall > 5.0) { severity = 'critical'; description = `🚨 CRITICAL: Severe anomaly in ${regime} regime. `; }
428
+ else if (scores.overall > 4.0) { severity = 'high'; description = `⚠️ HIGH RISK: Significant deviation. `; }
429
+ else if (scores.overall > 3.0) { severity = 'medium'; description = `⚡ MODERATE: Unusual pattern. `; }
430
+ else { description = `ℹ️ NOTICE: Minor shift. `; }
431
+
432
+ const direction = driverZ > 0 ? "increased" : "decreased";
433
+
434
+ description += `Driven by ${primaryDriver}. `;
435
+ description += `Value ${direction} to ${rawValue.toFixed(2)} (${driverZ > 0 ? '+' : ''}${maxZ.toFixed(1)}σ). `;
436
+
437
+ if (prediction.probability7d > 0.7) description += `High escalation risk (${(prediction.probability7d*100).toFixed(0)}%).`;
438
+
439
+ return {
440
+ description,
441
+ severity,
442
+ primaryDriver,
443
+ driverValue: rawValue.toFixed(2),
444
+ driverSigma: (driverZ > 0 ? '+' : '-') + maxZ.toFixed(1) + 'σ',
445
+ regime
446
+ };
447
+ }
448
+ };
449
+
450
+ // =============================================================================
451
+ // MAIN COMPUTATION
452
+ // =============================================================================
13
453
  class BehavioralAnomaly extends Computation {
14
454
 
15
455
  static getConfig() {
@@ -20,373 +460,153 @@ class BehavioralAnomaly extends Computation {
20
460
  isHistorical: true,
21
461
 
22
462
  requires: {
23
- // We need 60 days of history to build a statistical baseline
463
+ // COST CONTROL: 30-day limit
24
464
  'portfolio_snapshots': {
25
- lookback: 60,
465
+ lookback: 30,
26
466
  mandatory: true,
27
467
  fields: ['user_id', 'portfolio_data', 'date']
28
468
  },
29
469
  'pi_rankings': {
30
- lookback: 60,
31
- mandatory: true,
470
+ lookback: 30,
471
+ mandatory: true,
32
472
  fields: ['pi_id', 'rankings_data', 'date']
33
473
  },
34
474
  'trade_history_snapshots': {
35
- lookback: 60,
36
- mandatory: false, // Not all users have trade history every day
475
+ lookback: 0,
476
+ mandatory: false,
37
477
  fields: ['user_id', 'history_data', 'date']
38
- }
478
+ },
479
+ 'ticker_mappings': { mandatory: false, fields: ['instrument_id', 'ticker'] },
480
+ 'sector_mappings': { mandatory: false, fields: ['symbol', 'sector'] },
481
+ 'pi_master_list': { mandatory: false, fields: ['cid', 'username'] }
39
482
  },
40
483
 
41
484
  storage: {
42
485
  bigquery: true,
43
- firestore: {
44
- enabled: true,
45
- path: 'alerts/{date}/BehavioralAnomaly/{entityId}',
46
- merge: true
486
+ firestore: {
487
+ enabled: true,
488
+ path: 'alerts/{date}/BehavioralAnomaly/{entityId}',
489
+ merge: true
47
490
  }
48
491
  },
49
-
492
+
50
493
  userType: 'POPULAR_INVESTOR',
51
494
  alert: {
52
- id: 'behavioralAnomaly',
53
- frontendName: 'Behavioral Anomaly',
54
- description: 'Alert when a Popular Investor deviates significantly from their baseline behavior',
55
- messageTemplate: 'Behavioral Alert for {username}: {primaryDriver} Deviation ({driverSignificance}) detected.',
56
- severity: 'high',
57
- configKey: 'behavioralAnomaly',
58
- isDynamic: true,
59
- thresholds: [
60
- {
61
- key: 'anomalyScoreThreshold',
62
- type: 'number',
63
- label: 'Sensitivity Threshold',
64
- default: 3.5,
65
- min: 2.0, max: 10.0, step: 0.1
66
- }
67
- ],
68
- resultFields: {
69
- driver: 'primaryDriver',
70
- score: 'driverSignificance'
71
- }
495
+ id: 'behavioral_anomaly_v4',
496
+ frontendName: 'Behavioral Risk Intelligence',
497
+ severity: 'high',
498
+ isDynamic: true
72
499
  }
73
500
  };
74
501
  }
75
502
 
76
503
  async process(context) {
77
- const { data, entityId, date, rules } = context;
504
+ const { data, entityId, date, rules, references } = context;
78
505
 
79
- // =====================================================================
80
- // 1. DATA PREPARATION
81
- // =====================================================================
82
-
83
- // Helper: Safe Date String
84
- const toDateStr = (d) => {
85
- if (!d) return "";
86
- if (d.value) return d.value;
87
- return d instanceof Date ? d.toISOString().slice(0, 10) : String(d);
88
- };
506
+ // 1. Map Building
507
+ const tickerMap = new Map();
508
+ if (references.ticker_mappings) {
509
+ Object.values(references.ticker_mappings).forEach(r => tickerMap.set(String(r.instrument_id), r.ticker));
510
+ }
511
+ const sectorMap = new Map();
512
+ if (references.sector_mappings) {
513
+ Object.values(references.sector_mappings).forEach(r => sectorMap.set(r.symbol, r.sector));
514
+ }
515
+ const maps = { tickerMap, sectorMap };
89
516
 
90
- // Helper: Access Data Map or Array safely (V2 Pattern)
91
- const getEntityRows = (dataset) => {
517
+ // 2. Data Alignment
518
+ const toDateStr = (d) => d && d.value ? d.value : (d instanceof Date ? d.toISOString().slice(0, 10) : String(d));
519
+ const getRows = (dataset) => {
92
520
  if (!dataset) return [];
93
- if (dataset[entityId]) {
94
- return Array.isArray(dataset[entityId]) ? dataset[entityId] : [dataset[entityId]];
95
- }
96
- if (Array.isArray(dataset)) {
97
- return dataset.filter(r => String(r.pi_id || r.user_id || r.cid) === String(entityId));
98
- }
521
+ if (dataset[entityId]) return Array.isArray(dataset[entityId]) ? dataset[entityId] : [dataset[entityId]];
522
+ if (Array.isArray(dataset)) return dataset.filter(r => String(r.user_id || r.pi_id || r.cid) === String(entityId));
99
523
  return [];
100
524
  };
101
525
 
102
- // Fetch Data
103
- const portfolios = getEntityRows(data['portfolio_snapshots']);
104
- const rankings = getEntityRows(data['pi_rankings']);
105
- const history = getEntityRows(data['trade_history_snapshots']);
106
-
107
- // Index by Date for fast alignment
108
- // { "2026-01-28": row }
109
- const portMap = new Map(portfolios.map(r => [toDateStr(r.date), r]));
110
- const rankMap = new Map(rankings.map(r => [toDateStr(r.date), r]));
111
- const histMap = new Map(history.map(r => [toDateStr(r.date), r]));
112
-
113
- // Identify "Today" (Execution Date)
114
- const todayPort = portMap.get(date);
115
- const todayRank = rankMap.get(date);
116
- const todayHist = histMap.get(date);
117
-
118
- if (!todayPort || !todayRank) {
119
- // Cannot run anomaly detection without today's data
120
- return;
121
- }
122
-
123
- // =====================================================================
124
- // 2. FEATURE ENGINEERING
125
- // =====================================================================
126
-
127
- /**
128
- * 1. Concentration (HHI)
129
- * Sum of squared weights. High HHI = Low Diversification.
130
- */
131
- const calculateHHI = (portRow) => {
132
- const pData = rules.portfolio.extractPortfolioData(portRow);
133
- const positions = rules.portfolio.extractPositions(pData);
134
-
135
- if (!positions || positions.length === 0) return 0;
136
-
137
- let sumSquares = 0;
138
- let totalInvested = 0;
139
-
140
- positions.forEach(p => {
141
- // Invested is typically a %. If it's absolute $, we normalize later.
142
- // Assuming standard eToro data where Invested is a % (0-100 or 0-1)
143
- const val = rules.portfolio.getInvested(p) || 0;
144
- totalInvested += val;
145
- sumSquares += (val * val);
146
- });
147
-
148
- // Normalize if total > 0 (Standard HHI ranges 0 to 10,000)
149
- // If weights sum to 100, HHI is sum(w^2).
150
- return sumSquares;
151
- };
152
-
153
- /**
154
- * 2. Martingale Score
155
- * Checks if leverage increases after a loss (Loss Chasing).
156
- */
157
- const calculateMartingale = (histRow) => {
158
- const trades = rules.trades.extractTrades(histRow);
159
- if (!trades || trades.length < 2) return 0;
160
-
161
- // Sort by close date (Oldest -> Newest)
162
- trades.sort((a, b) => {
163
- const dA = rules.trades.getCloseDate(a);
164
- const dB = rules.trades.getCloseDate(b);
165
- return (dA || 0) - (dB || 0);
166
- });
167
-
168
- // Look at recent behavior (last 30 trades in this snapshot)
169
- const recent = trades.slice(-30);
170
- let lossEvents = 0;
171
- let martingaleResponses = 0;
172
-
173
- for (let i = 0; i < recent.length - 1; i++) {
174
- const current = recent[i];
175
- const next = recent[i+1];
176
-
177
- const profit = rules.trades.getNetProfit(current);
178
- if (profit < 0) {
179
- lossEvents++;
180
- const curLev = rules.trades.getLeverage(current) || 1;
181
- const nextLev = rules.trades.getLeverage(next) || 1;
182
-
183
- if (nextLev > curLev) {
184
- martingaleResponses++;
185
- }
186
- }
187
- }
188
-
189
- return lossEvents > 0 ? (martingaleResponses / lossEvents) : 0;
190
- };
191
-
192
- /**
193
- * 3. Capacity Strain
194
- * Copiers / AUM. If copiers grow but AUM doesn't, efficiency drops.
195
- */
196
- const calculateStrain = (rankRow) => {
197
- const rData = rules.rankings.extractRankingsData(rankRow);
198
- if (!rData) return 0;
199
-
200
- const copiers = rules.rankings.getCopiers(rData) || 0;
201
- // AUM Tier is usually an int (1-6). We need approximate value or raw AUM if available.
202
- // Using raw AUMValue if available in JSON, else estimate from tier.
203
- const aum = rData.AUMValue || (rules.rankings.getAUMTier(rData) * 10000) || 1;
204
-
205
- return (copiers / (aum / 1000)); // Normalized ratio
206
- };
207
-
208
- /**
209
- * 4. Risk Score
210
- */
211
- const getRisk = (rankRow) => {
212
- const rData = rules.rankings.extractRankingsData(rankRow);
213
- return rules.rankings.getRiskScore(rData) || 1;
214
- };
215
-
216
- // --- Build Vectors ---
217
- const getDailyVector = (pRow, rRow, hRow) => {
218
- if (!pRow || !rRow) return null;
219
- return [
220
- calculateHHI(pRow),
221
- hRow ? calculateMartingale(hRow) : 0,
222
- calculateStrain(rRow),
223
- getRisk(rRow)
224
- ];
225
- };
226
-
227
- // =====================================================================
228
- // 3. BASELINE CONSTRUCTION
229
- // =====================================================================
230
-
231
- const trainingVectors = [];
232
-
233
- // Loop through last 60 days (excluding today)
234
- const sortedDates = Array.from(portMap.keys()).sort();
526
+ const portfolios = getRows(data.portfolio_snapshots);
527
+ const rankings = getRows(data.pi_rankings);
528
+ const historyRows = getRows(data.trade_history_snapshots);
529
+ const currentHistoryBlob = historyRows.length > 0 ? (historyRows[0].history_data?.PublicHistoryPositions || []) : [];
530
+ const identityRows = getRows(data.pi_master_list);
531
+ const username = identityRows.length > 0 ? identityRows[0].username : "Unknown";
532
+
533
+ const dailyData = new Map();
534
+ portfolios.forEach(p => {
535
+ const d = toDateStr(p.date);
536
+ if (!dailyData.has(d)) dailyData.set(d, {});
537
+ dailyData.get(d).portfolio = p.portfolio_data;
538
+ dailyData.get(d).date = d;
539
+ });
540
+ rankings.forEach(r => {
541
+ const d = toDateStr(r.date);
542
+ if (dailyData.has(d)) dailyData.get(d).rankings = r.rankings_data;
543
+ });
544
+
545
+ if (!dailyData.has(date)) return;
546
+
547
+ // 3. Historical Extraction (30 days)
548
+ const historicalFeatures = [];
549
+ const lookbackDate = new Date(date);
550
+ lookbackDate.setDate(lookbackDate.getDate() - 30);
551
+
552
+ let prevDay = null;
553
+ let prevPrevDay = null;
235
554
 
236
- for (const d of sortedDates) {
237
- if (d === date) continue; // Don't include today in the baseline training
238
-
239
- const pRow = portMap.get(d);
240
- const rRow = rankMap.get(d); // Matches date?
241
- const hRow = histMap.get(d); // History available for this date?
242
-
243
- // We need intersection of Portfolio and Ranking to form a valid vector point
244
- if (pRow && rRow) {
245
- const vec = getDailyVector(pRow, rRow, hRow);
246
- if (vec) trainingVectors.push(vec);
555
+ for (let d = new Date(lookbackDate); d < new Date(date); d.setDate(d.getDate() + 1)) {
556
+ const dStr = d.toISOString().slice(0, 10);
557
+ const dayData = dailyData.get(dStr);
558
+
559
+ if (dayData && dayData.portfolio) {
560
+ const features = FeatureExtractor.extract(dayData, prevDay, prevPrevDay, currentHistoryBlob, dayData.rankings, rules, maps);
561
+ historicalFeatures.push(features);
562
+ prevPrevDay = prevDay;
563
+ prevDay = dayData;
247
564
  }
248
565
  }
249
566
 
250
- // Need minimum data to be statistically significant
251
- if (trainingVectors.length < 15) {
252
- this.setResult(entityId, {
253
- triggered: false,
254
- status: 'INSUFFICIENT_BASELINE',
255
- dataPoints: trainingVectors.length
256
- });
257
- return;
567
+ if (historicalFeatures.length < 15) {
568
+ this.setResult(entityId, { triggered: false, status: 'INSUFFICIENT_HISTORY' });
569
+ return;
258
570
  }
259
571
 
260
- // =====================================================================
261
- // 4. LINEAR ALGEBRA ENGINE
262
- // =====================================================================
263
-
264
- const MathLib = {
265
- // Mean of each column
266
- mean: (vectors) => {
267
- const dim = vectors[0].length;
268
- const means = new Array(dim).fill(0);
269
- for (const v of vectors) {
270
- for (let i = 0; i < dim; i++) means[i] += v[i];
271
- }
272
- return means.map(x => x / vectors.length);
273
- },
572
+ // 4. Today's Features
573
+ const todayData = dailyData.get(date);
574
+ const todayFeatures = FeatureExtractor.extract(todayData, prevDay, prevPrevDay, currentHistoryBlob, todayData.rankings, rules, maps);
274
575
 
275
- // Covariance Matrix
276
- covariance: (vectors, means) => {
277
- const dim = vectors[0].length;
278
- const n = vectors.length;
279
- const matrix = Array(dim).fill(0).map(() => Array(dim).fill(0));
280
-
281
- for (const v of vectors) {
282
- for (let i = 0; i < dim; i++) {
283
- for (let j = 0; j < dim; j++) {
284
- matrix[i][j] += (v[i] - means[i]) * (v[j] - means[j]);
285
- }
286
- }
287
- }
288
- return matrix.map(row => row.map(val => val / (n - 1))); // Sample Covariance
289
- },
290
-
291
- // Matrix Inversion (Gauss-Jordan)
292
- invert: (M) => {
293
- // Simplified for 4x4. Returns null if singular.
294
- try {
295
- const n = M.length;
296
- const A = M.map(row => [...row]); // Clone
297
- const I = M.map((row, i) => M.map((_, j) => (i === j ? 1 : 0))); // Identity
298
-
299
- for (let i = 0; i < n; i++) {
300
- let pivot = A[i][i];
301
- if (Math.abs(pivot) < 1e-8) return null; // Singular
302
-
303
- for (let j = 0; j < n; j++) {
304
- A[i][j] /= pivot;
305
- I[i][j] /= pivot;
306
- }
307
-
308
- for (let k = 0; k < n; k++) {
309
- if (k !== i) {
310
- const factor = A[k][i];
311
- for (let j = 0; j < n; j++) {
312
- A[k][j] -= factor * A[i][j];
313
- I[k][j] -= factor * I[i][j];
314
- }
315
- }
316
- }
317
- }
318
- return I;
319
- } catch (e) { return null; }
320
- },
321
-
322
- // Mahalanobis Distance
323
- distance: (v, means, invCov) => {
324
- const diff = v.map((val, i) => val - means[i]);
325
- const dim = diff.length;
326
- let sum = 0;
327
-
328
- for (let i = 0; i < dim; i++) {
329
- let temp = 0;
330
- for (let j = 0; j < dim; j++) {
331
- temp += diff[j] * invCov[j][i];
332
- }
333
- sum += temp * diff[i];
334
- }
335
- return Math.sqrt(Math.max(0, sum));
336
- }
337
- };
338
-
339
- // =====================================================================
340
- // 5. ANOMALY DETECTION
341
- // =====================================================================
342
-
343
- const means = MathLib.mean(trainingVectors);
344
- const covMatrix = MathLib.covariance(trainingVectors, means);
345
- const invCov = MathLib.invert(covMatrix);
346
-
347
- if (!invCov) {
348
- this.setResult(entityId, { triggered: false, status: 'SINGULAR_MATRIX_ERROR' });
349
- return;
350
- }
351
-
352
- const todayVector = getDailyVector(todayPort, todayRank, todayHist);
353
- const distance = MathLib.distance(todayVector, means, invCov);
354
-
355
- // Threshold from Alert Config (or default 3.5 sigma)
356
- const threshold = 3.5;
357
- const isAnomaly = distance > threshold;
358
-
359
- const featureNames = ['Concentration (HHI)', 'Martingale Behavior', 'Capacity Strain', 'Risk Score'];
576
+ // 5. Detection & Scoring
577
+ const recentVectors = historicalFeatures.slice(-5).map(f => f.vector);
578
+ const regime = AdvancedMath.detectRegime(recentVectors);
579
+ historicalFeatures.forEach(f => f.metadata.regime = regime);
360
580
 
361
- let primaryDriver = 'Unknown';
362
- let maxZ = 0;
581
+ const scores = AnomalyEngine.score(todayFeatures, historicalFeatures, regime);
582
+ const prediction = PredictiveEngine.forecastRisk(todayFeatures, historicalFeatures, scores.overall);
363
583
 
364
- // Determine *Why* it's an anomaly (Z-Score contribution)
365
- if (isAnomaly) {
366
- todayVector.forEach((val, i) => {
367
- const stdDev = Math.sqrt(covMatrix[i][i]);
368
- const z = stdDev > 0 ? Math.abs((val - means[i]) / stdDev) : 0;
369
- if (z > maxZ) {
370
- maxZ = z;
371
- primaryDriver = featureNames[i];
372
- }
373
- });
374
- }
584
+ const featureNames = [
585
+ 'Sector HHI', 'Martingale', 'Avg Leverage', 'Risk Score', 'Complexity', 'Exposure',
586
+ 'Entropy', 'Drawdown', 'Win Rate Dev', 'Skewness', 'Stress Ratio', 'Credit Imbal',
587
+ 'High Lev Freq', 'Copier Mom', 'AUM Tier', 'Exp Velocity', 'Risk Accel', 'Behav Mom'
588
+ ];
375
589
 
376
- const result = {
377
- triggered: isAnomaly,
378
- anomalyScore: Number(distance.toFixed(2)),
379
- primaryDriver: isAnomaly ? primaryDriver : null,
380
- driverSignificance: isAnomaly ? `${maxZ.toFixed(1)}σ` : null,
381
- baselineDays: trainingVectors.length,
382
- username: rules.rankings.extractRankingsData(todayRank)?.UserName || "Unknown",
383
- _debug: {
384
- currentVector: todayVector,
385
- baselineMeans: means
386
- }
387
- };
388
-
389
- this.setResult(entityId, result);
590
+ const interpretation = SemanticInterpreter.interpret(scores, todayFeatures, prediction, regime, featureNames);
591
+
592
+ const THRESHOLD = 3.5;
593
+ const triggered = scores.overall > THRESHOLD || prediction.probability7d > 0.75;
594
+
595
+ this.setResult(entityId, {
596
+ triggered,
597
+ score: Number(scores.overall.toFixed(2)),
598
+ severity: interpretation.severity,
599
+ regime,
600
+ description: interpretation.description,
601
+ username,
602
+ driver: interpretation.primaryDriver,
603
+ driverValue: interpretation.driverValue,
604
+ prediction: {
605
+ probability: (prediction.probability7d * 100).toFixed(1) + '%',
606
+ trajectory: prediction.trajectory
607
+ },
608
+ features: todayFeatures.vector.map(v => Number(v.toFixed(3)))
609
+ });
390
610
  }
391
611
  }
392
612