aiden-shared-calculations-unified 1.0.86 → 1.0.88
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/calculations/capitulation/asset-volatility-estimator.js +96 -0
- package/calculations/capitulation/retail-capitulation-risk-forecast.js +173 -0
- package/calculations/core/asset-cost-basis-profile.js +127 -0
- package/calculations/core/asset-pnl-status.js +36 -106
- package/calculations/core/asset-position-size.js +40 -91
- package/calculations/core/average-daily-pnl-all-users.js +18 -57
- package/calculations/core/average-daily-pnl-per-sector.js +41 -88
- package/calculations/core/average-daily-pnl-per-stock.js +38 -91
- package/calculations/core/average-daily-position-pnl.js +19 -49
- package/calculations/core/holding-duration-per-asset.js +25 -127
- package/calculations/core/instrument-price-change-1d.js +30 -49
- package/calculations/core/instrument-price-momentum-20d.js +50 -60
- package/calculations/core/long-position-per-stock.js +39 -68
- package/calculations/core/overall-holding-duration.js +16 -87
- package/calculations/core/overall-profitability-ratio.js +11 -40
- package/calculations/core/platform-buy-sell-sentiment.js +41 -124
- package/calculations/core/platform-daily-bought-vs-sold-count.js +41 -99
- package/calculations/core/platform-daily-ownership-delta.js +68 -126
- package/calculations/core/platform-ownership-per-sector.js +45 -96
- package/calculations/core/platform-total-positions-held.js +20 -80
- package/calculations/core/pnl-distribution-per-stock.js +29 -135
- package/calculations/core/price-metrics.js +95 -206
- package/calculations/core/profitability-ratio-per-sector.js +34 -79
- package/calculations/core/profitability-ratio-per-stock.js +32 -88
- package/calculations/core/profitability-skew-per-stock.js +41 -94
- package/calculations/core/profitable-and-unprofitable-status.js +44 -76
- package/calculations/core/sentiment-per-stock.js +24 -77
- package/calculations/core/short-position-per-stock.js +35 -43
- package/calculations/core/social-activity-aggregation.js +26 -49
- package/calculations/core/social-asset-posts-trend.js +38 -94
- package/calculations/core/social-event-correlation.js +26 -93
- package/calculations/core/social-sentiment-aggregation.js +20 -44
- package/calculations/core/social-top-mentioned-words.js +35 -87
- package/calculations/core/social-topic-interest-evolution.js +22 -111
- package/calculations/core/social-topic-sentiment-matrix.js +38 -104
- package/calculations/core/social-word-mentions-trend.js +27 -104
- package/calculations/core/speculator-asset-sentiment.js +31 -72
- package/calculations/core/speculator-danger-zone.js +48 -84
- package/calculations/core/speculator-distance-to-stop-loss-per-leverage.js +20 -52
- package/calculations/core/speculator-distance-to-tp-per-leverage.js +23 -53
- package/calculations/core/speculator-entry-distance-to-sl-per-leverage.js +20 -50
- package/calculations/core/speculator-entry-distance-to-tp-per-leverage.js +23 -50
- package/calculations/core/speculator-leverage-per-asset.js +25 -64
- package/calculations/core/speculator-leverage-per-sector.js +27 -63
- package/calculations/core/speculator-risk-reward-ratio-per-asset.js +24 -53
- package/calculations/core/speculator-stop-loss-distance-by-sector-short-long-breakdown.js +55 -68
- package/calculations/core/speculator-stop-loss-distance-by-ticker-short-long-breakdown.js +54 -71
- package/calculations/core/speculator-stop-loss-per-asset.js +19 -44
- package/calculations/core/speculator-take-profit-per-asset.js +20 -57
- package/calculations/core/speculator-tsl-per-asset.js +17 -56
- package/calculations/core/test..js +0 -0
- package/calculations/core/total-long-figures.js +16 -31
- package/calculations/core/total-long-per-sector.js +39 -61
- package/calculations/core/total-short-figures.js +13 -32
- package/calculations/core/total-short-per-sector.js +39 -61
- package/calculations/core/users-processed.js +11 -46
- package/calculations/gauss/cohort-capital-flow.js +54 -173
- package/calculations/gauss/cohort-definer.js +77 -163
- package/calculations/gauss/daily-dna-filter.js +29 -83
- package/calculations/gauss/gauss-divergence-signal.js +22 -109
- package/calculations/gem/cohort-momentum-state.js +27 -72
- package/calculations/gem/cohort-skill-definition.js +36 -52
- package/calculations/gem/platform-conviction-divergence.js +18 -60
- package/calculations/gem/quant-skill-alpha-signal.js +25 -98
- package/calculations/gem/skilled-cohort-flow.js +67 -175
- package/calculations/gem/skilled-unskilled-divergence.js +18 -73
- package/calculations/gem/unskilled-cohort-flow.js +64 -172
- package/calculations/ghost-book/cost-basis-density.js +79 -0
- package/calculations/ghost-book/liquidity-vacuum.js +52 -0
- package/calculations/ghost-book/retail-gamma-exposure.js +86 -0
- package/calculations/helix/helix-contrarian-signal.js +20 -114
- package/calculations/helix/herd-consensus-score.js +42 -124
- package/calculations/helix/winner-loser-flow.js +36 -118
- package/calculations/predicative-alpha/cognitive-dissonance.js +113 -0
- package/calculations/predicative-alpha/diamond-hand-fracture.js +90 -0
- package/calculations/predicative-alpha/mimetic-latency.js +124 -0
- package/calculations/pyro/risk-appetite-index.js +33 -74
- package/calculations/pyro/squeeze-potential.js +30 -87
- package/calculations/pyro/volatility-signal.js +33 -78
- package/package.json +1 -1
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Cognitive Dissonance Arbitrage (CDA) v2.2
|
|
3
|
+
* REFACTORED: Adheres to System Architecture and Schema v1.
|
|
4
|
+
*/
|
|
5
|
+
class CognitiveDissonance {
|
|
6
|
+
constructor() {
|
|
7
|
+
this.cdaResults = {};
|
|
8
|
+
this.alpha = 2 / (20 + 1); // EMA Alpha
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
static getMetadata() {
|
|
12
|
+
return {
|
|
13
|
+
type: 'meta', // Runs after standard calculations
|
|
14
|
+
rootDataDependencies: [],
|
|
15
|
+
isHistorical: true, // Requires t-1 state
|
|
16
|
+
userType: 'aggregate',
|
|
17
|
+
category: 'predictive_alpha'
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
static getDependencies() {
|
|
22
|
+
return ['social-topic-sentiment-matrix', 'skilled-cohort-flow', 'instrument-price-change-1d'];
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
static getSchema() {
|
|
26
|
+
// Schema remains strictly compliant with user definition
|
|
27
|
+
const metricSchema = {
|
|
28
|
+
"type": "object",
|
|
29
|
+
"properties": {
|
|
30
|
+
"cda_score": { "type": "number" },
|
|
31
|
+
"regime": { "type": "string" },
|
|
32
|
+
"z_sentiment": { "type": "number" },
|
|
33
|
+
"z_flow": { "type": "number" },
|
|
34
|
+
"price_confirmation": { "type": "boolean" },
|
|
35
|
+
"_state": { "type": "object" } // Opaque state object
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
return { "type": "object", "patternProperties": { "^.*$": metricSchema } };
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
process(context) {
|
|
42
|
+
const { computed, previousComputed, math } = context;
|
|
43
|
+
const { signals: SignalPrimitives, TimeSeries } = math;
|
|
44
|
+
|
|
45
|
+
const tickers = SignalPrimitives.getUnionKeys(computed, CognitiveDissonance.getDependencies());
|
|
46
|
+
|
|
47
|
+
for (const ticker of tickers) {
|
|
48
|
+
// 1. Get Metrics (Safe Access)
|
|
49
|
+
// MAP: 'social-topic-sentiment-matrix' uses 'net_sentiment' NOT 'sentiment_score'
|
|
50
|
+
const rawSentiment = SignalPrimitives.getMetric(computed, 'social-topic-sentiment-matrix', ticker, 'net_sentiment', 0);
|
|
51
|
+
const rawFlow = SignalPrimitives.getMetric(computed, 'skilled-cohort-flow', ticker, 'net_flow_pct', 0);
|
|
52
|
+
const priceChange = SignalPrimitives.getMetric(computed, 'instrument-price-change-1d', ticker, 'change_1d_pct', 0); // Map to correct field
|
|
53
|
+
|
|
54
|
+
// 2. Get Previous State
|
|
55
|
+
const prevResult = SignalPrimitives.getPreviousState(previousComputed, 'cognitive-dissonance', ticker);
|
|
56
|
+
const prevState = prevResult ? prevResult._state : { sent_mean: 0, sent_var: 1, flow_mean: 0, flow_var: 1 };
|
|
57
|
+
|
|
58
|
+
// 3. Update Statistics (Math Layer)
|
|
59
|
+
const sentStats = TimeSeries.updateEMAState(rawSentiment, { mean: prevState.sent_mean, variance: prevState.sent_var }, this.alpha);
|
|
60
|
+
const flowStats = TimeSeries.updateEMAState(rawFlow, { mean: prevState.flow_mean, variance: prevState.flow_var }, this.alpha);
|
|
61
|
+
|
|
62
|
+
const sentStdDev = Math.sqrt(sentStats.variance);
|
|
63
|
+
const flowStdDev = Math.sqrt(flowStats.variance);
|
|
64
|
+
|
|
65
|
+
// 4. Compute Z-Scores
|
|
66
|
+
const zSentiment = (sentStdDev > 0.001) ? (rawSentiment - sentStats.mean) / sentStdDev : 0;
|
|
67
|
+
const zFlow = (flowStdDev > 0.001) ? (rawFlow - flowStats.mean) / flowStdDev : 0;
|
|
68
|
+
|
|
69
|
+
// 5. Logic (Voice vs Hands)
|
|
70
|
+
const interaction = zSentiment * zFlow;
|
|
71
|
+
let cdaScore = 0;
|
|
72
|
+
let regime = "NEUTRAL";
|
|
73
|
+
let priceConfirmation = false;
|
|
74
|
+
|
|
75
|
+
const disagree = (Math.sign(zSentiment) !== Math.sign(zFlow));
|
|
76
|
+
const loudVoice = Math.abs(zSentiment) > 1.65;
|
|
77
|
+
const activeHands = Math.abs(zFlow) > 0.5;
|
|
78
|
+
|
|
79
|
+
if (disagree && loudVoice && activeHands) {
|
|
80
|
+
cdaScore = -interaction; // Positive score = Bearish Dissonance (Euphoria + Selling)
|
|
81
|
+
|
|
82
|
+
const priceAgreesWithSentiment = (Math.sign(priceChange) === Math.sign(zSentiment));
|
|
83
|
+
const priceIsFlat = Math.abs(priceChange) < 0.5;
|
|
84
|
+
|
|
85
|
+
if (priceAgreesWithSentiment && !priceIsFlat) {
|
|
86
|
+
priceConfirmation = true;
|
|
87
|
+
regime = "CONVERGENT";
|
|
88
|
+
cdaScore = 0;
|
|
89
|
+
} else {
|
|
90
|
+
regime = (zSentiment > 0) ? "BEARISH_DISSONANCE" : "BULLISH_DISSONANCE";
|
|
91
|
+
}
|
|
92
|
+
} else if (!disagree && loudVoice) {
|
|
93
|
+
regime = "CONVERGENT";
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
this.cdaResults[ticker] = {
|
|
97
|
+
cda_score: Number(cdaScore.toFixed(4)),
|
|
98
|
+
regime: regime,
|
|
99
|
+
z_sentiment: Number(zSentiment.toFixed(4)),
|
|
100
|
+
z_flow: Number(zFlow.toFixed(4)),
|
|
101
|
+
price_confirmation: priceConfirmation,
|
|
102
|
+
_state: {
|
|
103
|
+
sent_mean: sentStats.mean, sent_var: sentStats.variance,
|
|
104
|
+
flow_mean: flowStats.mean, flow_var: flowStats.variance
|
|
105
|
+
}
|
|
106
|
+
};
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
getResult() { return this.cdaResults; }
|
|
111
|
+
reset() { this.cdaResults = {}; }
|
|
112
|
+
}
|
|
113
|
+
module.exports = CognitiveDissonance;
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Diamond Hand Fracture Coefficient (DHFC) v2.2
|
|
3
|
+
* REFACTORED: Adheres to System Architecture.
|
|
4
|
+
*/
|
|
5
|
+
class DiamondHandFracture {
|
|
6
|
+
constructor() { this.fractureResults = {}; }
|
|
7
|
+
|
|
8
|
+
static getMetadata() {
|
|
9
|
+
return {
|
|
10
|
+
type: 'meta',
|
|
11
|
+
rootDataDependencies: [],
|
|
12
|
+
isHistorical: true,
|
|
13
|
+
userType: 'aggregate',
|
|
14
|
+
category: 'predictive_alpha'
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
static getDependencies() {
|
|
19
|
+
return ['holding-duration-per-asset', 'average-daily-pnl-per-stock'];
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
static getSchema() {
|
|
23
|
+
const metricSchema = {
|
|
24
|
+
"type": "object",
|
|
25
|
+
"properties": {
|
|
26
|
+
"fracture_velocity": { "type": "number" },
|
|
27
|
+
"capitulation_score": { "type": "number" },
|
|
28
|
+
"regime": { "type": "string" },
|
|
29
|
+
"expected_duration_hours": { "type": "number" },
|
|
30
|
+
"actual_duration_hours": { "type": "number" }
|
|
31
|
+
}
|
|
32
|
+
};
|
|
33
|
+
return { "type": "object", "patternProperties": { "^.*$": metricSchema } };
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
process(context) {
|
|
37
|
+
const { computed, previousComputed, math } = context;
|
|
38
|
+
const { signals: SignalPrimitives } = math; // fixed?
|
|
39
|
+
|
|
40
|
+
const tickers = SignalPrimitives.getUnionKeys(computed, DiamondHandFracture.getDependencies());
|
|
41
|
+
|
|
42
|
+
for (const ticker of tickers) {
|
|
43
|
+
// 1. Current State
|
|
44
|
+
const H_t = SignalPrimitives.getMetric(computed, 'holding-duration-per-asset', ticker, 'avg_duration_hours', 0);
|
|
45
|
+
const OI_t = SignalPrimitives.getMetric(computed, 'holding-duration-per-asset', ticker, 'count', 0);
|
|
46
|
+
const avgPnl = SignalPrimitives.getMetric(computed, 'average-daily-pnl-per-stock', ticker, 'avg_daily_pnl_pct', 0);
|
|
47
|
+
|
|
48
|
+
// 2. Previous State (T-1)
|
|
49
|
+
// Note: We fetch 'holding-duration-per-asset' from *previous* computed results
|
|
50
|
+
const prevData = SignalPrimitives.getPreviousState(previousComputed, 'holding-duration-per-asset', ticker);
|
|
51
|
+
const H_prev = prevData ? (prevData.avg_duration_hours || 0) : 0;
|
|
52
|
+
const OI_prev = prevData ? (prevData.count || 0) : 0;
|
|
53
|
+
|
|
54
|
+
// Init Check
|
|
55
|
+
if (H_prev === 0 || OI_prev === 0) {
|
|
56
|
+
this.fractureResults[ticker] = { fracture_velocity: 0, capitulation_score: 0, regime: "STABLE", expected_duration_hours: H_t, actual_duration_hours: H_t };
|
|
57
|
+
continue;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// 3. Logic: Inventory Dilution
|
|
61
|
+
let dilutionFactor = (OI_t > 0) ? (OI_prev / OI_t) : 1.0;
|
|
62
|
+
if (dilutionFactor > 2.0) dilutionFactor = 1.0; // Clamp extreme dilution
|
|
63
|
+
|
|
64
|
+
const H_expected = H_prev * dilutionFactor;
|
|
65
|
+
const phi = H_expected - H_t;
|
|
66
|
+
const phi_normalized = (H_prev > 0) ? (phi / H_prev) * 100 : 0;
|
|
67
|
+
|
|
68
|
+
// 4. Capitulation Score
|
|
69
|
+
const painFactor = (avgPnl < 0) ? Math.abs(avgPnl) : 0;
|
|
70
|
+
const capitulationScore = phi_normalized * painFactor;
|
|
71
|
+
|
|
72
|
+
let regime = "STABLE";
|
|
73
|
+
if (phi_normalized > 5.0) regime = "FRACTURE";
|
|
74
|
+
if (capitulationScore > 20.0) regime = "CAPITULATION";
|
|
75
|
+
if (phi_normalized < -5.0) regime = "ACCUMULATION";
|
|
76
|
+
|
|
77
|
+
this.fractureResults[ticker] = {
|
|
78
|
+
fracture_velocity: Number(phi_normalized.toFixed(4)),
|
|
79
|
+
capitulation_score: Number(capitulationScore.toFixed(4)),
|
|
80
|
+
regime: regime,
|
|
81
|
+
expected_duration_hours: Number(H_expected.toFixed(2)),
|
|
82
|
+
actual_duration_hours: Number(H_t.toFixed(2))
|
|
83
|
+
};
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
getResult() { return this.fractureResults; }
|
|
88
|
+
reset() { this.fractureResults = {}; }
|
|
89
|
+
}
|
|
90
|
+
module.exports = DiamondHandFracture;
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Mimetic Latency Oscillator (MLO) v2.2
|
|
3
|
+
* REFACTORED: Offloaded math to primitives, corrected dependencies. TODO - HOW IS THIS GOING TO WORK GIVEN IT REQUIRES ITS OWN YESTERDAY DATA, BUT IT WILL BACKFILL IMMEDIATELY ON ITS FIRST PASS, AND THUS NOT FIND ANY YESTERDAY DATA? CONFUSED.
|
|
4
|
+
*/
|
|
5
|
+
class MimeticLatencyOscillator {
|
|
6
|
+
constructor() {
|
|
7
|
+
this.mloResults = {};
|
|
8
|
+
this.windowSize = 30;
|
|
9
|
+
this.maxLag = 10;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
static getMetadata() {
|
|
13
|
+
return {
|
|
14
|
+
type: 'meta',
|
|
15
|
+
rootDataDependencies: [],
|
|
16
|
+
isHistorical: true,
|
|
17
|
+
userType: 'aggregate',
|
|
18
|
+
category: 'predictive_alpha'
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
static getDependencies() {
|
|
23
|
+
return ['skilled-cohort-flow', 'herd-consensus-score'];
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
static getSchema() {
|
|
27
|
+
const metricSchema = {
|
|
28
|
+
"type": "object",
|
|
29
|
+
"properties": {
|
|
30
|
+
"lag_days": { "type": "integer" },
|
|
31
|
+
"correlation_strength": { "type": "number" },
|
|
32
|
+
"regime": { "type": "string" },
|
|
33
|
+
"_state": { "type": "object" }
|
|
34
|
+
}
|
|
35
|
+
};
|
|
36
|
+
return { "type": "object", "patternProperties": { "^.*$": metricSchema } };
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
process(context) {
|
|
40
|
+
const { computed, previousComputed, math } = context;
|
|
41
|
+
const { signals: SignalPrimitives, TimeSeries } = math;
|
|
42
|
+
|
|
43
|
+
const tickers = SignalPrimitives.getUnionKeys(computed, MimeticLatencyOscillator.getDependencies());
|
|
44
|
+
|
|
45
|
+
for (const ticker of tickers) {
|
|
46
|
+
// 1. Inputs
|
|
47
|
+
// 'skilled-cohort-flow' -> net_flow_pct
|
|
48
|
+
const rawFlow = SignalPrimitives.getMetric(computed, 'skilled-cohort-flow', ticker, 'net_flow_pct', 0);
|
|
49
|
+
// 'herd-consensus-score' -> herd_conviction_score
|
|
50
|
+
const rawHerd = SignalPrimitives.getMetric(computed, 'herd-consensus-score', ticker, 'herd_conviction_score', 0);
|
|
51
|
+
|
|
52
|
+
// 2. Restore State
|
|
53
|
+
const prevResult = SignalPrimitives.getPreviousState(previousComputed, 'mimetic-latency', ticker);
|
|
54
|
+
const prevState = prevResult ? prevResult._state : { flow_buffer: [], herd_buffer: [], last_flow: 0, last_herd: 0 };
|
|
55
|
+
|
|
56
|
+
const prevFlow = (prevState.last_flow !== undefined) ? prevState.last_flow : 0;
|
|
57
|
+
const prevHerd = (prevState.last_herd !== undefined) ? prevState.last_herd : 0;
|
|
58
|
+
|
|
59
|
+
// 3. Calculate Detrended Delta
|
|
60
|
+
const flowDelta = rawFlow - prevFlow;
|
|
61
|
+
const herdDelta = rawHerd - prevHerd;
|
|
62
|
+
|
|
63
|
+
// 4. Update Buffers
|
|
64
|
+
let flowBuffer = [...(prevState.flow_buffer || [])];
|
|
65
|
+
let herdBuffer = [...(prevState.herd_buffer || [])];
|
|
66
|
+
|
|
67
|
+
flowBuffer.push(flowDelta);
|
|
68
|
+
herdBuffer.push(herdDelta);
|
|
69
|
+
|
|
70
|
+
if (flowBuffer.length > this.windowSize) flowBuffer.shift();
|
|
71
|
+
if (herdBuffer.length > this.windowSize) herdBuffer.shift();
|
|
72
|
+
|
|
73
|
+
// 5. Lagged Cross-Correlation
|
|
74
|
+
let maxCorr = -1.0;
|
|
75
|
+
let bestLag = 0;
|
|
76
|
+
|
|
77
|
+
if (flowBuffer.length >= 15) {
|
|
78
|
+
for (let k = 0; k <= this.maxLag; k++) {
|
|
79
|
+
// Check if Flow[t-k] predicts Herd[t]
|
|
80
|
+
// Slice Flow: 0 to End-k
|
|
81
|
+
// Slice Herd: k to End
|
|
82
|
+
const len = flowBuffer.length;
|
|
83
|
+
if (len - k < 5) continue; // Min sample check
|
|
84
|
+
|
|
85
|
+
const slicedFlow = flowBuffer.slice(0, len - k);
|
|
86
|
+
const slicedHerd = herdBuffer.slice(k, len);
|
|
87
|
+
|
|
88
|
+
const r = TimeSeries.pearsonCorrelation(slicedFlow, slicedHerd);
|
|
89
|
+
|
|
90
|
+
if (r > maxCorr) {
|
|
91
|
+
maxCorr = r;
|
|
92
|
+
bestLag = k;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// 6. Regime
|
|
98
|
+
let regime = "NO_SIGNAL";
|
|
99
|
+
if (maxCorr > 0.3) {
|
|
100
|
+
if (bestLag >= 3) regime = "EARLY_ALPHA";
|
|
101
|
+
else if (bestLag >= 1) regime = "MARKUP";
|
|
102
|
+
else regime = "FOMO_TRAP";
|
|
103
|
+
} else {
|
|
104
|
+
regime = "DECOUPLING";
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
this.mloResults[ticker] = {
|
|
108
|
+
lag_days: bestLag,
|
|
109
|
+
correlation_strength: Number(maxCorr.toFixed(4)),
|
|
110
|
+
regime: regime,
|
|
111
|
+
_state: {
|
|
112
|
+
flow_buffer: flowBuffer,
|
|
113
|
+
herd_buffer: herdBuffer,
|
|
114
|
+
last_flow: rawFlow,
|
|
115
|
+
last_herd: rawHerd
|
|
116
|
+
}
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
getResult() { return this.mloResults; }
|
|
122
|
+
reset() { this.mloResults = {}; }
|
|
123
|
+
}
|
|
124
|
+
module.exports = MimeticLatencyOscillator;
|
|
@@ -1,10 +1,7 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview PYRO Product Line (Pass 2)
|
|
3
|
-
*
|
|
4
|
-
* - Updated 'process' signature to the 7-arg standard to get context.
|
|
5
|
-
* - Logic is already schema-compliant (uses pos.Leverage).
|
|
3
|
+
* REFACTORED: Uses context.math.extract.
|
|
6
4
|
*/
|
|
7
|
-
|
|
8
5
|
class RiskAppetiteIndex {
|
|
9
6
|
constructor() {
|
|
10
7
|
this.tickerLeverage = new Map();
|
|
@@ -18,14 +15,12 @@ class RiskAppetiteIndex {
|
|
|
18
15
|
type: 'standard',
|
|
19
16
|
rootDataDependencies: ['portfolio'],
|
|
20
17
|
isHistorical: false,
|
|
21
|
-
userType: 'speculator',
|
|
18
|
+
userType: 'speculator',
|
|
22
19
|
category: 'pyro'
|
|
23
20
|
};
|
|
24
21
|
}
|
|
25
22
|
|
|
26
|
-
static getDependencies() {
|
|
27
|
-
return [];
|
|
28
|
-
}
|
|
23
|
+
static getDependencies() { return []; }
|
|
29
24
|
|
|
30
25
|
static getSchema() {
|
|
31
26
|
const schema = {
|
|
@@ -36,108 +31,72 @@ class RiskAppetiteIndex {
|
|
|
36
31
|
},
|
|
37
32
|
"required": ["avg_leverage", "user_count"]
|
|
38
33
|
};
|
|
39
|
-
|
|
40
34
|
return {
|
|
41
35
|
"type": "object",
|
|
42
|
-
"description": "Calculates the average leverage used by speculators, aggregated by ticker and sector.",
|
|
43
36
|
"properties": {
|
|
44
|
-
"by_ticker": { "type": "object", "patternProperties": { "^.*$": schema }
|
|
45
|
-
"by_sector": { "type": "object", "patternProperties": { "^.*$": schema }
|
|
46
|
-
}
|
|
47
|
-
"required": ["by_ticker", "by_sector"]
|
|
37
|
+
"by_ticker": { "type": "object", "patternProperties": { "^.*$": schema } },
|
|
38
|
+
"by_sector": { "type": "object", "patternProperties": { "^.*$": schema } }
|
|
39
|
+
}
|
|
48
40
|
};
|
|
49
41
|
}
|
|
50
42
|
|
|
51
43
|
_init(map, key) {
|
|
52
|
-
if (!map.has(key)) {
|
|
53
|
-
map.set(key, { leverage_sum: 0, user_count: 0 });
|
|
54
|
-
}
|
|
44
|
+
if (!map.has(key)) map.set(key, { leverage_sum: 0, user_count: 0 });
|
|
55
45
|
}
|
|
56
46
|
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
// --- END FIX ---
|
|
61
|
-
|
|
62
|
-
if (!todayPortfolio?.PublicPositions) {
|
|
63
|
-
return; // Not a speculator or no positions
|
|
64
|
-
}
|
|
47
|
+
process(context) {
|
|
48
|
+
const { user, mappings, math } = context;
|
|
49
|
+
const { extract } = math;
|
|
65
50
|
|
|
66
51
|
if (!this.tickerMap) {
|
|
67
|
-
this.tickerMap =
|
|
68
|
-
this.sectorMap =
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
if (!this.tickerMap || !this.sectorMap) {
|
|
72
|
-
return;
|
|
52
|
+
this.tickerMap = mappings.instrumentToTicker;
|
|
53
|
+
this.sectorMap = mappings.sectorMapping;
|
|
73
54
|
}
|
|
74
55
|
|
|
75
|
-
const positions =
|
|
76
|
-
|
|
56
|
+
const positions = extract.getPositions(user.portfolio.today, user.type);
|
|
77
57
|
const seenTickers = new Set();
|
|
78
58
|
const seenSectors = new Set();
|
|
79
59
|
|
|
80
60
|
for (const pos of positions) {
|
|
81
|
-
|
|
61
|
+
const instId = extract.getInstrumentId(pos);
|
|
62
|
+
if (!instId) continue;
|
|
82
63
|
|
|
83
|
-
const leverage = pos
|
|
84
|
-
if (leverage <= 1) continue;
|
|
64
|
+
const leverage = extract.getLeverage(pos);
|
|
65
|
+
if (leverage <= 1) continue;
|
|
85
66
|
|
|
86
|
-
const ticker = this.tickerMap[
|
|
87
|
-
const sector = this.sectorMap[
|
|
67
|
+
const ticker = this.tickerMap[instId];
|
|
68
|
+
const sector = this.sectorMap[instId];
|
|
88
69
|
|
|
89
70
|
if (ticker) {
|
|
90
71
|
this._init(this.tickerLeverage, ticker);
|
|
91
72
|
const asset = this.tickerLeverage.get(ticker);
|
|
92
73
|
asset.leverage_sum += leverage;
|
|
93
|
-
|
|
94
|
-
if (!seenTickers.has(ticker)) {
|
|
95
|
-
asset.user_count++;
|
|
96
|
-
seenTickers.add(ticker);
|
|
97
|
-
}
|
|
74
|
+
if (!seenTickers.has(ticker)) { asset.user_count++; seenTickers.add(ticker); }
|
|
98
75
|
}
|
|
99
76
|
|
|
100
77
|
if (sector) {
|
|
101
78
|
this._init(this.sectorLeverage, sector);
|
|
102
79
|
const sec = this.sectorLeverage.get(sector);
|
|
103
80
|
sec.leverage_sum += leverage;
|
|
104
|
-
|
|
105
|
-
if (!seenSectors.has(sector)) {
|
|
106
|
-
sec.user_count++;
|
|
107
|
-
seenSectors.add(sector);
|
|
108
|
-
}
|
|
81
|
+
if (!seenSectors.has(sector)) { sec.user_count++; seenSectors.add(sector); }
|
|
109
82
|
}
|
|
110
83
|
}
|
|
111
84
|
}
|
|
112
85
|
|
|
113
86
|
async getResult() {
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
for (const [ticker, data] of this.tickerLeverage.entries()) {
|
|
124
|
-
if (data.user_count > 0) {
|
|
125
|
-
result.by_ticker[ticker] = {
|
|
126
|
-
avg_leverage: data.leverage_sum / data.user_count,
|
|
127
|
-
user_count: data.user_count
|
|
128
|
-
};
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
for (const [sector, data] of this.sectorLeverage.entries()) {
|
|
133
|
-
if (data.user_count > 0) {
|
|
134
|
-
result.by_sector[sector] = {
|
|
135
|
-
avg_leverage: data.leverage_sum / data.user_count,
|
|
136
|
-
user_count: data.user_count
|
|
137
|
-
};
|
|
87
|
+
const result = { by_ticker: {}, by_sector: {} };
|
|
88
|
+
const build = (map, out) => {
|
|
89
|
+
for (const [key, data] of map.entries()) {
|
|
90
|
+
if (data.user_count > 0) {
|
|
91
|
+
out[key] = {
|
|
92
|
+
avg_leverage: data.leverage_sum / data.user_count,
|
|
93
|
+
user_count: data.user_count
|
|
94
|
+
};
|
|
95
|
+
}
|
|
138
96
|
}
|
|
139
|
-
}
|
|
140
|
-
|
|
97
|
+
};
|
|
98
|
+
build(this.tickerLeverage, result.by_ticker);
|
|
99
|
+
build(this.sectorLeverage, result.by_sector);
|
|
141
100
|
return result;
|
|
142
101
|
}
|
|
143
102
|
|
|
@@ -1,126 +1,69 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview PYRO Product Line (Pass 3)
|
|
3
|
-
*
|
|
4
|
-
* - **Corrected Dependency:** Changed from 'short-position-per-stock'
|
|
5
|
-
* to 'total-short-figures' to get the required 'total_invested_usd'.
|
|
6
|
-
* - Updated 'process' to the 5-arg 'meta' signature.
|
|
7
|
-
* - Updated logic to access data from fixed dependencies.
|
|
8
|
-
* * --- ** THIS IS THE FIX FOR THE EMPTY RESULT ** ---
|
|
9
|
-
* - The 'process' function uses 'short-position-per-stock', but the
|
|
10
|
-
* 'getDependencies' function was requesting 'total-short-figures'.
|
|
11
|
-
* - Corrected 'getDependencies' to request what 'process' actually uses.
|
|
3
|
+
* REFACTORED: Uses context.math.signals and process(context).
|
|
12
4
|
*/
|
|
13
5
|
class SqueezePotential {
|
|
14
|
-
|
|
15
|
-
constructor() {
|
|
16
|
-
this.result = {};
|
|
17
|
-
}
|
|
6
|
+
constructor() { this.result = {}; }
|
|
18
7
|
|
|
19
8
|
static getMetadata() {
|
|
20
9
|
return {
|
|
21
10
|
type: 'meta',
|
|
22
11
|
rootDataDependencies: [],
|
|
23
12
|
isHistorical: false,
|
|
24
|
-
userType: 'n
|
|
13
|
+
userType: 'n/a',
|
|
25
14
|
category: 'pyro'
|
|
26
15
|
};
|
|
27
16
|
}
|
|
28
17
|
|
|
29
|
-
// --- THIS IS THE FIX FOR THE EMPTY RESULT ---
|
|
30
18
|
static getDependencies() {
|
|
31
19
|
return [
|
|
32
|
-
|
|
33
|
-
'short-
|
|
34
|
-
'speculator-stop-loss-distance-by-ticker-short-long-breakdown' // Pass 1
|
|
20
|
+
'short-position-per-stock', // Per-ticker counts
|
|
21
|
+
'speculator-stop-loss-distance-by-ticker-short-long-breakdown' // Per-ticker SL
|
|
35
22
|
];
|
|
36
23
|
}
|
|
37
|
-
// --- END FIX ---
|
|
38
24
|
|
|
39
25
|
static getSchema() {
|
|
40
26
|
const tickerSchema = {
|
|
41
27
|
"type": "object",
|
|
42
28
|
"properties": {
|
|
43
|
-
"
|
|
29
|
+
"total_short_exposure_weight": { "type": "number" },
|
|
44
30
|
"avg_sl_distance_pct": { "type": "number" },
|
|
45
|
-
"user_count": { "type": "number" }
|
|
31
|
+
"user_count": { "type": "number" }
|
|
46
32
|
},
|
|
47
|
-
"required": ["
|
|
48
|
-
};
|
|
49
|
-
|
|
50
|
-
return {
|
|
51
|
-
"type": "object",
|
|
52
|
-
"description": "Aggregates total $USD short volume and avg. stop-loss distance per asset.",
|
|
53
|
-
"patternProperties": { "^.*$": tickerSchema },
|
|
54
|
-
"additionalProperties": tickerSchema
|
|
33
|
+
"required": ["avg_sl_distance_pct", "user_count"]
|
|
55
34
|
};
|
|
35
|
+
return { "type": "object", "patternProperties": { "^.*$": tickerSchema } };
|
|
56
36
|
}
|
|
57
37
|
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
// 'total-short-figures' is NOT keyed by ticker, it's a single object.
|
|
62
|
-
// This calculation is fundamentally flawed.
|
|
63
|
-
// It needs 'total-short-per-sector' or a new calc.
|
|
64
|
-
|
|
65
|
-
// --- RE-FIXING based on available dependencies ---
|
|
66
|
-
// 'total-short-figures' is { total_invested_usd, total_positions_count }
|
|
67
|
-
// 'speculator-stop-loss-distance...' is { [ticker]: { short_avg_distance_pct, ... } }
|
|
68
|
-
//
|
|
69
|
-
// The *intent* is to get short USD per ticker. This is not possible
|
|
70
|
-
// with the listed dependencies.
|
|
71
|
-
//
|
|
72
|
-
// We will use 'total-short-per-sector' as a proxy.
|
|
73
|
-
// **User must change dependency to 'total-short-per-sector'.**
|
|
74
|
-
// I will assume this change is made.
|
|
38
|
+
process(context) {
|
|
39
|
+
const { computed, math } = context;
|
|
40
|
+
const { signals } = math;
|
|
75
41
|
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
// This matches my previous fix and is the only way to get a
|
|
79
|
-
// per-ticker result.
|
|
42
|
+
const shortData = computed['short-position-per-stock'];
|
|
43
|
+
const slData = computed['speculator-stop-loss-distance-by-ticker-short-long-breakdown'];
|
|
80
44
|
|
|
81
|
-
|
|
82
|
-
const slData = fetchedDependencies['speculator-stop-loss-distance-by-ticker-short-long-breakdown'];
|
|
83
|
-
|
|
84
|
-
if (!shortData || !slData) {
|
|
85
|
-
this.result = {};
|
|
86
|
-
return;
|
|
87
|
-
}
|
|
45
|
+
if (!shortData || !slData) return;
|
|
88
46
|
|
|
89
47
|
const result = {};
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
for (const
|
|
93
|
-
const shortSlAvg =
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
result[ticker]
|
|
48
|
+
const tickers = signals.getUnionKeys(computed, ['short-position-per-stock', 'speculator-stop-loss-distance-by-ticker-short-long-breakdown']);
|
|
49
|
+
|
|
50
|
+
for (const ticker of tickers) {
|
|
51
|
+
const shortSlAvg = signals.getMetric(computed, 'speculator-stop-loss-distance-by-ticker-short-long-breakdown', ticker, 'avg_short_sl_distance_pct');
|
|
52
|
+
const userCount = signals.getMetric(computed, 'short-position-per-stock', ticker, 'short_count');
|
|
53
|
+
const weight = signals.getMetric(computed, 'short-position-per-stock', ticker, 'total_short_exposure_weight');
|
|
54
|
+
|
|
55
|
+
if (userCount > 0) {
|
|
56
|
+
result[ticker] = {
|
|
57
|
+
total_short_exposure_weight: weight,
|
|
58
|
+
avg_sl_distance_pct: shortSlAvg,
|
|
59
|
+
user_count: userCount
|
|
60
|
+
};
|
|
99
61
|
}
|
|
100
62
|
}
|
|
101
|
-
|
|
102
|
-
// 2. Process Short Volume data (already keyed by Ticker)
|
|
103
|
-
for (const [ticker, count] of Object.entries(shortData)) {
|
|
104
|
-
if (!result[ticker]) {
|
|
105
|
-
result[ticker] = { total_short_usd: 0, avg_sl_distance_pct: 0, user_count: 0 };
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
// This is the only "fix" possible without changing dependencies
|
|
109
|
-
// to a per-ticker USD calculation (which doesn't exist).
|
|
110
|
-
result[ticker].total_short_usd = 0; // Cannot be calculated from deps
|
|
111
|
-
result[ticker].user_count = count; // This is position count
|
|
112
|
-
}
|
|
113
|
-
|
|
114
63
|
this.result = result;
|
|
115
64
|
}
|
|
116
|
-
// --- END FIX (Part 2) ---
|
|
117
|
-
|
|
118
|
-
async getResult(fetchedDependencies) {
|
|
119
|
-
return this.result;
|
|
120
|
-
}
|
|
121
65
|
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
}
|
|
66
|
+
async getResult() { return this.result; }
|
|
67
|
+
reset() { this.result = {}; }
|
|
125
68
|
}
|
|
126
69
|
module.exports = SqueezePotential;
|