aiden-shared-calculations-unified 1.0.47 → 1.0.49
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/calculations/behavioural/historical/historical_performance_aggregator.js +70 -0
- package/calculations/meta/negative_expectancy_cohort_flow.js +224 -0
- package/calculations/meta/positive_expectancy_cohort_flow.js +227 -0
- package/calculations/meta/shark_attack_signal.js +60 -0
- package/calculations/meta/smart_dumb_divergence_index_v2.js +127 -0
- package/calculations/meta/user_expectancy_score.js +54 -0
- package/package.json +1 -1
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Pass 1: Aggregates each user's historical performance
|
|
3
|
+
* from the root history data feed.
|
|
4
|
+
*/
|
|
5
|
+
class HistoricalPerformanceAggregator {
|
|
6
|
+
constructor() {
|
|
7
|
+
this.allUserStats = {};
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* @param {object} todayPortfolio - Not used, but part of signature.
|
|
12
|
+
* @param {object} yesterdayPortfolio - Not used.
|
|
13
|
+
* @param {string} userId - The user's ID.
|
|
14
|
+
* @param {object} context - Shared context data.
|
|
15
|
+
* @param {null} todayInsights - Not used.
|
|
16
|
+
* @param {null} yesterdayInsights - Not used.
|
|
17
|
+
* @param {null} todaySocialPostInsights - Not used.
|
|
18
|
+
* @param {null} yesterdaySocialPostInsights - Not used.
|
|
19
|
+
* @param {object} todayHistoryData - The full map of { [userId]: history }
|
|
20
|
+
* @param {null} yesterdayHistoryData - Not used.
|
|
21
|
+
*/
|
|
22
|
+
async process(
|
|
23
|
+
todayPortfolio, yesterdayPortfolio, userId, context,
|
|
24
|
+
todayInsights, yesterdayInsights,
|
|
25
|
+
todaySocialPostInsights, yesterdaySocialPostInsights,
|
|
26
|
+
todayHistoryData, yesterdayHistoryData
|
|
27
|
+
) {
|
|
28
|
+
// This calc only runs if the history data was successfully loaded
|
|
29
|
+
if (!todayHistoryData || !userId) {
|
|
30
|
+
return;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const userHistory = todayHistoryData[userId];
|
|
34
|
+
|
|
35
|
+
// 'all' is the object where instrumentId === -1
|
|
36
|
+
if (!userHistory || !userHistory.all) {
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const stats = userHistory.all;
|
|
41
|
+
|
|
42
|
+
// Ignore users who are only mirroring
|
|
43
|
+
if (stats.isMirror === true) {
|
|
44
|
+
return;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// We only care about users with a meaningful trade history
|
|
48
|
+
if (!stats.totalTrades || stats.totalTrades < 10) {
|
|
49
|
+
return;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
this.allUserStats[userId] = {
|
|
53
|
+
winRatio: stats.winRatio,
|
|
54
|
+
avgProfitPct: stats.avgProfitPct,
|
|
55
|
+
avgLossPct: stats.avgLossPct,
|
|
56
|
+
totalTrades: stats.totalTrades,
|
|
57
|
+
avgHoldingTime: stats.avgHoldingTimeInMinutes
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
async getResult() {
|
|
62
|
+
return this.allUserStats;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
reset() {
|
|
66
|
+
this.allUserStats = {};
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
module.exports = HistoricalPerformanceAggregator;
|
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Calculates "Net Crowd Flow" and "Sector Rotation"
|
|
3
|
+
* *only* for the "Dumb Cohort" (Bottom 20% of Investor Scores).
|
|
4
|
+
*
|
|
5
|
+
* --- META REFACTOR (v2) ---
|
|
6
|
+
* This calculation is `type: "meta"` and expects its dependencies
|
|
7
|
+
* (the user-investment-profile results) to be fetched by the pass runner.
|
|
8
|
+
* It then streams root portfolio data.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
const { Firestore } = require('@google-cloud/firestore');
|
|
12
|
+
const firestore = new Firestore();
|
|
13
|
+
const { loadAllPriceData, getDailyPriceChange } = require('../../utils/price_data_provider');
|
|
14
|
+
const { loadInstrumentMappings, getInstrumentSectorMap } = require('../../utils/sector_mapping_provider');
|
|
15
|
+
// NOTE: Corrected relative path for data_loader
|
|
16
|
+
const { loadDataByRefs, getPortfolioPartRefs, loadFullDayMap } = require('../../../bulltrackers-module/functions/computation-system/utils/data_loader');
|
|
17
|
+
|
|
18
|
+
const COHORT_PERCENTILE = 0.2; // Bottom 20%
|
|
19
|
+
const PROFILE_CALC_ID = 'user-investment-profile'; // The calc to read IS scores from
|
|
20
|
+
|
|
21
|
+
class DumbCohortFlow {
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* (NEW) Statically declare dependencies.
|
|
25
|
+
*/
|
|
26
|
+
static getDependencies() {
|
|
27
|
+
return ['user_expectancy_score'];
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
constructor() {
|
|
31
|
+
// Meta-calc, no constructor state needed
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Loads the Investor Scores from the fetched dependency.
|
|
36
|
+
*/
|
|
37
|
+
_loadCohort(logger, fetchedDependencies) {
|
|
38
|
+
logger.log('INFO', '[NegativeExpectancyCohortFlow] Loading Expectancy Scores from fetched dependency...');
|
|
39
|
+
|
|
40
|
+
const profileData = fetchedDependencies['user-expectancy-score'];
|
|
41
|
+
|
|
42
|
+
if (!profileData || Object.keys(profileData).length === 0) {
|
|
43
|
+
logger.log('WARN', `[NegativeExpectancyCohortFlow] Cannot find dependency 'user-expectancy-score'. Cohort will not be built.`);
|
|
44
|
+
return null;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
const allScores = Object.entries(profileData).map(([userId, data]) => ({
|
|
48
|
+
userId,
|
|
49
|
+
score: data.expectancy_score
|
|
50
|
+
}));
|
|
51
|
+
|
|
52
|
+
allScores.sort((a, b) => a.score - b.score);
|
|
53
|
+
|
|
54
|
+
// Find the 20th percentile (Bottom 20%)
|
|
55
|
+
const thresholdIndex = Math.floor(allScores.length * 0.20);
|
|
56
|
+
const thresholdScore = allScores[thresholdIndex]?.score || 0;
|
|
57
|
+
|
|
58
|
+
// Filter for users with a *negative expectancy score* AND are in the bottom 20%
|
|
59
|
+
const cohortIds = new Set(
|
|
60
|
+
allScores.filter(s => s.score <= thresholdScore && s.score < 0)
|
|
61
|
+
.map(s => s.userId)
|
|
62
|
+
);
|
|
63
|
+
|
|
64
|
+
logger.log('INFO', `[NegativeExpectancyCohortFlow] Cohort built. ${cohortIds.size} users at or below ${thresholdScore.toFixed(2)} (20th percentile) and < 0.`);
|
|
65
|
+
return cohortIds;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// --- Asset Flow Helpers (unchanged) ---
|
|
69
|
+
_initAsset(asset_values, instrumentId) {
|
|
70
|
+
if (!asset_values[instrumentId]) {
|
|
71
|
+
asset_values[instrumentId] = { day1_value_sum: 0, day2_value_sum: 0 };
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
_sumAssetValue(positions) {
|
|
75
|
+
const valueMap = {};
|
|
76
|
+
if (!positions || !Array.isArray(positions)) return valueMap;
|
|
77
|
+
for (const pos of positions) {
|
|
78
|
+
if (pos && pos.InstrumentID && pos.Value) {
|
|
79
|
+
valueMap[pos.InstrumentID] = (valueMap[pos.InstrumentID] || 0) + pos.Value;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
return valueMap;
|
|
83
|
+
}
|
|
84
|
+
_accumulateSectorInvestment(portfolio, target, sectorMap) {
|
|
85
|
+
if (portfolio && portfolio.AggregatedPositions) {
|
|
86
|
+
for (const pos of portfolio.AggregatedPositions) {
|
|
87
|
+
const sector = sectorMap[pos.InstrumentID] || 'N/A';
|
|
88
|
+
target[sector] = (target[sector] || 0) + (pos.Invested || pos.Amount || 0);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* REFACTORED PROCESS METHOD
|
|
95
|
+
* @param {string} dateStr The date to run the analysis for (e.g., "2025-10-31").
|
|
96
|
+
* @param {object} dependencies The shared dependencies (db, logger, rootData, etc.).
|
|
97
|
+
* @param {object} config The computation system configuration.
|
|
98
|
+
* @param {object} fetchedDependencies In-memory results from previous passes.
|
|
99
|
+
* @returns {Promise<object|null>} The analysis result or null.
|
|
100
|
+
*/
|
|
101
|
+
async process(dateStr, dependencies, config, fetchedDependencies) {
|
|
102
|
+
const { logger, db, rootData, calculationUtils } = dependencies;
|
|
103
|
+
const { portfolioRefs } = rootData;
|
|
104
|
+
logger.log('INFO', '[DumbCohortFlow] Starting meta-process...');
|
|
105
|
+
|
|
106
|
+
// 1. Load Cohort from in-memory dependency
|
|
107
|
+
const dumbCohortIds = this._loadCohort(logger, fetchedDependencies);
|
|
108
|
+
if (!dumbCohortIds) {
|
|
109
|
+
return null; // Dependency failed
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// 2. Load external dependencies (prices, sectors)
|
|
113
|
+
const [priceMap, mappings, sectorMap] = await Promise.all([
|
|
114
|
+
loadAllPriceData(),
|
|
115
|
+
loadInstrumentMappings(),
|
|
116
|
+
getInstrumentSectorMap()
|
|
117
|
+
]);
|
|
118
|
+
if (!priceMap || !mappings || !sectorMap || Object.keys(priceMap).length === 0) {
|
|
119
|
+
logger.log('ERROR', '[DumbCohortFlow] Failed to load critical price/mapping/sector data. Aborting.');
|
|
120
|
+
return null;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// 3. Load "yesterday's" portfolio data for comparison
|
|
124
|
+
const yesterdayDate = new Date(dateStr + 'T00:00:00Z');
|
|
125
|
+
yesterdayDate.setUTCDate(yesterdayDate.getUTCDate() - 1);
|
|
126
|
+
const yesterdayStr = yesterdayDate.toISOString().slice(0, 10);
|
|
127
|
+
const yesterdayRefs = await getPortfolioPartRefs(config, dependencies, yesterdayStr);
|
|
128
|
+
const yesterdayPortfolios = await loadFullDayMap(config, dependencies, yesterdayRefs);
|
|
129
|
+
logger.log('INFO', `[DumbCohortFlow] Loaded ${yesterdayRefs.length} part refs for yesterday.`);
|
|
130
|
+
|
|
131
|
+
// 4. Stream "today's" portfolio data and process
|
|
132
|
+
const asset_values = {};
|
|
133
|
+
const todaySectorInvestment = {};
|
|
134
|
+
const yesterdaySectorInvestment = {};
|
|
135
|
+
let user_count = 0;
|
|
136
|
+
|
|
137
|
+
const batchSize = config.partRefBatchSize || 10;
|
|
138
|
+
for (let i = 0; i < portfolioRefs.length; i += batchSize) {
|
|
139
|
+
const batchRefs = portfolioRefs.slice(i, i + batchSize);
|
|
140
|
+
const todayPortfoliosChunk = await loadDataByRefs(config, dependencies, batchRefs);
|
|
141
|
+
|
|
142
|
+
for (const uid in todayPortfoliosChunk) {
|
|
143
|
+
|
|
144
|
+
if (!dumbCohortIds.has(uid)) continue; // --- Filter user ---
|
|
145
|
+
|
|
146
|
+
const pToday = todayPortfoliosChunk[uid];
|
|
147
|
+
const pYesterday = yesterdayPortfolios[uid];
|
|
148
|
+
|
|
149
|
+
if (!pToday || !pYesterday || !pToday.AggregatedPositions || !pYesterday.AggregatedPositions) {
|
|
150
|
+
continue;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// 4a. RUN ASSET FLOW LOGIC
|
|
154
|
+
const yesterdayValues = this._sumAssetValue(pYesterday.AggregatedPositions);
|
|
155
|
+
const todayValues = this._sumAssetValue(pToday.AggregatedPositions);
|
|
156
|
+
const allInstrumentIds = new Set([...Object.keys(yesterdayValues), ...Object.keys(todayValues)]);
|
|
157
|
+
|
|
158
|
+
for (const instrumentId of allInstrumentIds) {
|
|
159
|
+
this._initAsset(asset_values, instrumentId);
|
|
160
|
+
asset_values[instrumentId].day1_value_sum += (yesterdayValues[instrumentId] || 0);
|
|
161
|
+
asset_values[instrumentId].day2_value_sum += (todayValues[instrumentId] || 0);
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
// 4b. RUN SECTOR ROTATION LOGIC
|
|
165
|
+
this._accumulateSectorInvestment(pToday, todaySectorInvestment, sectorMap);
|
|
166
|
+
this._accumulateSectorInvestment(pYesterday, yesterdaySectorInvestment, sectorMap);
|
|
167
|
+
user_count++;
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
logger.log('INFO', `[DumbCohortFlow] Processed ${user_count} users in cohort.`);
|
|
172
|
+
|
|
173
|
+
// --- 5. GETRESULT LOGIC ---
|
|
174
|
+
if (user_count === 0) {
|
|
175
|
+
logger.warn('[DumbCohortFlow] No users processed for dumb cohort. Returning null.');
|
|
176
|
+
return null;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
// 5a. Calculate Asset Flow
|
|
180
|
+
const finalAssetFlow = {};
|
|
181
|
+
for (const instrumentId in asset_values) {
|
|
182
|
+
const ticker = mappings.instrumentToTicker[instrumentId] || `id_${instrumentId}`;
|
|
183
|
+
const avg_day1_value = asset_values[instrumentId].day1_value_sum / user_count;
|
|
184
|
+
const avg_day2_value = asset_values[instrumentId].day2_value_sum / user_count;
|
|
185
|
+
const priceChangePct = getDailyPriceChange(instrumentId, yesterdayStr, dateStr, priceMap);
|
|
186
|
+
|
|
187
|
+
if (priceChangePct === null) continue;
|
|
188
|
+
|
|
189
|
+
const expected_day2_value = avg_day1_value * (1 + priceChangePct);
|
|
190
|
+
const net_crowd_flow_pct = avg_day2_value - expected_day2_value;
|
|
191
|
+
|
|
192
|
+
finalAssetFlow[ticker] = {
|
|
193
|
+
net_crowd_flow_pct: net_crowd_flow_pct,
|
|
194
|
+
avg_value_day1_pct: avg_day1_value,
|
|
195
|
+
avg_value_day2_pct: avg_day2_value
|
|
196
|
+
};
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
// 5b. Calculate Sector Rotation
|
|
200
|
+
const finalSectorRotation = {};
|
|
201
|
+
const allSectors = new Set([...Object.keys(todaySectorInvestment), ...Object.keys(yesterdaySectorInvestment)]);
|
|
202
|
+
for (const sector of allSectors) {
|
|
203
|
+
const todayAmount = todaySectorInvestment[sector] || 0;
|
|
204
|
+
const yesterdayAmount = yesterdaySectorInvestment[sector] || 0;
|
|
205
|
+
finalSectorRotation[sector] = todayAmount - yesterdayAmount;
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
if (Object.keys(finalAssetFlow).length === 0) {
|
|
209
|
+
logger.warn('[DumbCohortFlow] No asset flow calculated (likely all price data missing). Returning null.');
|
|
210
|
+
return null;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
return {
|
|
214
|
+
asset_flow: finalAssetFlow,
|
|
215
|
+
sector_rotation: finalSectorRotation,
|
|
216
|
+
user_sample_size: user_count
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
async getResult() { return null; }
|
|
221
|
+
reset() { }
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
module.exports = DumbCohortFlow;
|
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Calculates "Net Crowd Flow" and "Sector Rotation"
|
|
3
|
+
* *only* for the "Smart Cohort" (Top 20% of Investor Scores).
|
|
4
|
+
*
|
|
5
|
+
* --- META REFACTOR (v2) ---
|
|
6
|
+
* This calculation is `type: "meta"` and expects its dependencies
|
|
7
|
+
* (the user-investment-profile results) to be fetched by the pass runner.
|
|
8
|
+
* It then streams root portfolio data.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
const { Firestore } = require('@google-cloud/firestore');
|
|
12
|
+
const firestore = new Firestore();
|
|
13
|
+
const { loadAllPriceData, getDailyPriceChange } = require('../../utils/price_data_provider');
|
|
14
|
+
const { loadInstrumentMappings, getInstrumentSectorMap } = require('../../utils/sector_mapping_provider');
|
|
15
|
+
// NOTE: Corrected relative path for data_loader
|
|
16
|
+
const { loadDataByRefs, getPortfolioPartRefs, loadFullDayMap } = require('../../../bulltrackers-module/functions/computation-system/utils/data_loader');
|
|
17
|
+
|
|
18
|
+
const COHORT_PERCENTILE = 0.8; // Top 20%
|
|
19
|
+
const PROFILE_CALC_ID = 'user-investment-profile'; // The calc to read IS scores from
|
|
20
|
+
|
|
21
|
+
class SmartCohortFlow {
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* (NEW) Statically declare dependencies.
|
|
25
|
+
*/
|
|
26
|
+
static getDependencies() {
|
|
27
|
+
return ['user_expectancy_score'];
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
constructor() {
|
|
31
|
+
// Meta-calc, no constructor state needed
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Loads the Investor Scores from the fetched dependency.
|
|
36
|
+
*/
|
|
37
|
+
_loadCohort(logger, fetchedDependencies) {
|
|
38
|
+
logger.log('INFO', '[PositiveExpectancyCohortFlow] Loading Expectancy Scores from fetched dependency...');
|
|
39
|
+
|
|
40
|
+
// 1. Get the new dependency
|
|
41
|
+
const profileData = fetchedDependencies['user-expectancy-score'];
|
|
42
|
+
|
|
43
|
+
if (!profileData || Object.keys(profileData).length === 0) {
|
|
44
|
+
logger.log('WARN', `[PositiveExpectancyCohortFlow] Cannot find dependency 'user-expectancy-score'. Cohort will not be built.`);
|
|
45
|
+
return null;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// 2. The data is already the map we need: { [userId]: { expectancy_score: X } }
|
|
49
|
+
const allScores = Object.entries(profileData).map(([userId, data]) => ({
|
|
50
|
+
userId,
|
|
51
|
+
score: data.expectancy_score
|
|
52
|
+
}));
|
|
53
|
+
|
|
54
|
+
// 3. Sort by score, lowest to highest
|
|
55
|
+
allScores.sort((a, b) => a.score - b.score);
|
|
56
|
+
|
|
57
|
+
// 4. Find the 80th percentile (Top 20%)
|
|
58
|
+
const thresholdIndex = Math.floor(allScores.length * 0.80);
|
|
59
|
+
const thresholdScore = allScores[thresholdIndex]?.score || 999;
|
|
60
|
+
|
|
61
|
+
// 5. Filter for users with a *positive expectancy score* AND are in the top 20%
|
|
62
|
+
const cohortIds = new Set(
|
|
63
|
+
allScores.filter(s => s.score >= thresholdScore && s.score > 0)
|
|
64
|
+
.map(s => s.userId)
|
|
65
|
+
);
|
|
66
|
+
|
|
67
|
+
logger.log('INFO', `[PositiveExpectancyCohortFlow] Cohort built. ${cohortIds.size} users at or above ${thresholdScore.toFixed(2)} (80th percentile) and > 0.`);
|
|
68
|
+
return cohortIds;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// --- Asset Flow Helpers (unchanged) ---
|
|
72
|
+
_initAsset(asset_values, instrumentId) {
|
|
73
|
+
if (!asset_values[instrumentId]) {
|
|
74
|
+
asset_values[instrumentId] = { day1_value_sum: 0, day2_value_sum: 0 };
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
_sumAssetValue(positions) {
|
|
78
|
+
const valueMap = {};
|
|
79
|
+
if (!positions || !Array.isArray(positions)) return valueMap;
|
|
80
|
+
for (const pos of positions) {
|
|
81
|
+
if (pos && pos.InstrumentID && pos.Value) {
|
|
82
|
+
valueMap[pos.InstrumentID] = (valueMap[pos.InstrumentID] || 0) + pos.Value;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
return valueMap;
|
|
86
|
+
}
|
|
87
|
+
_accumulateSectorInvestment(portfolio, target, sectorMap) {
|
|
88
|
+
if (portfolio && portfolio.AggregatedPositions) {
|
|
89
|
+
for (const pos of portfolio.AggregatedPositions) {
|
|
90
|
+
const sector = sectorMap[pos.InstrumentID] || 'N/A';
|
|
91
|
+
target[sector] = (target[sector] || 0) + (pos.Invested || pos.Amount || 0);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* REFACTORED PROCESS METHOD
|
|
98
|
+
* @param {string} dateStr The date to run the analysis for (e.g., "2025-10-31").
|
|
99
|
+
* @param {object} dependencies The shared dependencies (db, logger, rootData, etc.).
|
|
100
|
+
* @param {object} config The computation system configuration.
|
|
101
|
+
* @param {object} fetchedDependencies In-memory results from previous passes.
|
|
102
|
+
* @returns {Promise<object|null>} The analysis result or null.
|
|
103
|
+
*/
|
|
104
|
+
async process(dateStr, dependencies, config, fetchedDependencies) {
|
|
105
|
+
const { logger, db, rootData, calculationUtils } = dependencies;
|
|
106
|
+
const { portfolioRefs } = rootData;
|
|
107
|
+
logger.log('INFO', '[SmartCohortFlow] Starting meta-process...');
|
|
108
|
+
|
|
109
|
+
// 1. Load Cohort from in-memory dependency
|
|
110
|
+
const smartCohortIds = this._loadCohort(logger, fetchedDependencies);
|
|
111
|
+
if (!smartCohortIds) {
|
|
112
|
+
return null; // Dependency failed
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// 2. Load external dependencies (prices, sectors)
|
|
116
|
+
const [priceMap, mappings, sectorMap] = await Promise.all([
|
|
117
|
+
loadAllPriceData(),
|
|
118
|
+
loadInstrumentMappings(),
|
|
119
|
+
getInstrumentSectorMap()
|
|
120
|
+
]);
|
|
121
|
+
if (!priceMap || !mappings || !sectorMap || Object.keys(priceMap).length === 0) {
|
|
122
|
+
logger.log('ERROR', '[SmartCohortFlow] Failed to load critical price/mapping/sector data. Aborting.');
|
|
123
|
+
return null;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// 3. Load "yesterday's" portfolio data for comparison
|
|
127
|
+
const yesterdayDate = new Date(dateStr + 'T00:00:00Z');
|
|
128
|
+
yesterdayDate.setUTCDate(yesterdayDate.getUTCDate() - 1);
|
|
129
|
+
const yesterdayStr = yesterdayDate.toISOString().slice(0, 10);
|
|
130
|
+
const yesterdayRefs = await getPortfolioPartRefs(config, dependencies, yesterdayStr);
|
|
131
|
+
const yesterdayPortfolios = await loadFullDayMap(config, dependencies, yesterdayRefs);
|
|
132
|
+
logger.log('INFO', `[SmartCohortFlow] Loaded ${yesterdayRefs.length} part refs for yesterday.`);
|
|
133
|
+
|
|
134
|
+
// 4. Stream "today's" portfolio data and process
|
|
135
|
+
const asset_values = {};
|
|
136
|
+
const todaySectorInvestment = {};
|
|
137
|
+
const yesterdaySectorInvestment = {};
|
|
138
|
+
let user_count = 0;
|
|
139
|
+
|
|
140
|
+
const batchSize = config.partRefBatchSize || 10;
|
|
141
|
+
for (let i = 0; i < portfolioRefs.length; i += batchSize) {
|
|
142
|
+
const batchRefs = portfolioRefs.slice(i, i + batchSize);
|
|
143
|
+
const todayPortfoliosChunk = await loadDataByRefs(config, dependencies, batchRefs);
|
|
144
|
+
|
|
145
|
+
for (const uid in todayPortfoliosChunk) {
|
|
146
|
+
|
|
147
|
+
if (!smartCohortIds.has(uid)) continue; // --- Filter user ---
|
|
148
|
+
|
|
149
|
+
const pToday = todayPortfoliosChunk[uid];
|
|
150
|
+
const pYesterday = yesterdayPortfolios[uid];
|
|
151
|
+
|
|
152
|
+
if (!pToday || !pYesterday || !pToday.AggregatedPositions || !pYesterday.AggregatedPositions) {
|
|
153
|
+
continue;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
// 4a. RUN ASSET FLOW LOGIC
|
|
157
|
+
const yesterdayValues = this._sumAssetValue(pYesterday.AggregatedPositions);
|
|
158
|
+
const todayValues = this._sumAssetValue(pToday.AggregatedPositions);
|
|
159
|
+
const allInstrumentIds = new Set([...Object.keys(yesterdayValues), ...Object.keys(todayValues)]);
|
|
160
|
+
|
|
161
|
+
for (const instrumentId of allInstrumentIds) {
|
|
162
|
+
this._initAsset(asset_values, instrumentId);
|
|
163
|
+
asset_values[instrumentId].day1_value_sum += (yesterdayValues[instrumentId] || 0);
|
|
164
|
+
asset_values[instrumentId].day2_value_sum += (todayValues[instrumentId] || 0);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
// 4b. RUN SECTOR ROTATION LOGIC
|
|
168
|
+
this._accumulateSectorInvestment(pToday, todaySectorInvestment, sectorMap);
|
|
169
|
+
this._accumulateSectorInvestment(pYesterday, yesterdaySectorInvestment, sectorMap);
|
|
170
|
+
user_count++;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
logger.log('INFO', `[SmartCohortFlow] Processed ${user_count} users in cohort.`);
|
|
175
|
+
|
|
176
|
+
// --- 5. GETRESULT LOGIC ---
|
|
177
|
+
if (user_count === 0) {
|
|
178
|
+
logger.warn('[SmartCohortFlow] No users processed for smart cohort. Returning null.');
|
|
179
|
+
return null;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// 5a. Calculate Asset Flow
|
|
183
|
+
const finalAssetFlow = {};
|
|
184
|
+
for (const instrumentId in asset_values) {
|
|
185
|
+
const ticker = mappings.instrumentToTicker[instrumentId] || `id_${instrumentId}`;
|
|
186
|
+
const avg_day1_value = asset_values[instrumentId].day1_value_sum / user_count;
|
|
187
|
+
const avg_day2_value = asset_values[instrumentId].day2_value_sum / user_count;
|
|
188
|
+
const priceChangePct = getDailyPriceChange(instrumentId, yesterdayStr, dateStr, priceMap);
|
|
189
|
+
|
|
190
|
+
if (priceChangePct === null) continue;
|
|
191
|
+
|
|
192
|
+
const expected_day2_value = avg_day1_value * (1 + priceChangePct);
|
|
193
|
+
const net_crowd_flow_pct = avg_day2_value - expected_day2_value;
|
|
194
|
+
|
|
195
|
+
finalAssetFlow[ticker] = {
|
|
196
|
+
net_crowd_flow_pct: net_crowd_flow_pct,
|
|
197
|
+
avg_value_day1_pct: avg_day1_value,
|
|
198
|
+
avg_value_day2_pct: avg_day2_value
|
|
199
|
+
};
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
// 5b. Calculate Sector Rotation
|
|
203
|
+
const finalSectorRotation = {};
|
|
204
|
+
const allSectors = new Set([...Object.keys(todaySectorInvestment), ...Object.keys(yesterdaySectorInvestment)]);
|
|
205
|
+
for (const sector of allSectors) {
|
|
206
|
+
const todayAmount = todaySectorInvestment[sector] || 0;
|
|
207
|
+
const yesterdayAmount = yesterdaySectorInvestment[sector] || 0;
|
|
208
|
+
finalSectorRotation[sector] = todayAmount - yesterdayAmount;
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
if (Object.keys(finalAssetFlow).length === 0) {
|
|
212
|
+
logger.warn('[SmartCohortFlow] No asset flow calculated (likely all price data missing). Returning null.');
|
|
213
|
+
return null;
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
return {
|
|
217
|
+
asset_flow: finalAssetFlow,
|
|
218
|
+
sector_rotation: finalSectorRotation,
|
|
219
|
+
user_sample_size: user_count
|
|
220
|
+
};
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
async getResult() { return null; }
|
|
224
|
+
reset() { }
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
module.exports = SmartCohortFlow;
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Pass 4: Identifies "Shark Attack" signals where
|
|
3
|
+
* the positive expectancy cohort is buying what the in-loss cohort
|
|
4
|
+
* is panic-selling.
|
|
5
|
+
*/
|
|
6
|
+
class SharkAttackSignal {
|
|
7
|
+
|
|
8
|
+
static getDependencies() {
|
|
9
|
+
// Depends on Pass 3 "smart" flow and Pass 1 "in-loss" flow
|
|
10
|
+
return ['positive-expectancy-cohort-flow', 'in-loss-asset-crowd-flow'];
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
constructor() {
|
|
14
|
+
this.buyThreshold = 0.005; // Min "smart" buy-in
|
|
15
|
+
this.sellThreshold = -0.005; // Min "in-loss" capitulation
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
async process(dateStr, dependencies, config, fetchedDependencies) {
|
|
19
|
+
const { logger } = dependencies;
|
|
20
|
+
|
|
21
|
+
const smartData = fetchedDependencies['positive-expectancy-cohort-flow'];
|
|
22
|
+
const lossData = fetchedDependencies['in-loss-asset-crowd-flow'];
|
|
23
|
+
|
|
24
|
+
if (!smartData || !smartData.asset_flow || !lossData) {
|
|
25
|
+
logger.log('WARN', `[SharkAttack] Missing computed dependency data for ${dateStr}. Skipping.`);
|
|
26
|
+
return null;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
const smartFlow = smartData.asset_flow;
|
|
30
|
+
const lossFlow = lossData; // This is the root object
|
|
31
|
+
const signals = {};
|
|
32
|
+
|
|
33
|
+
const allTickers = new Set([...Object.keys(smartFlow), ...Object.keys(lossFlow)]);
|
|
34
|
+
|
|
35
|
+
for (const ticker of allTickers) {
|
|
36
|
+
const sFlow = smartFlow[ticker]?.net_crowd_flow_pct || 0;
|
|
37
|
+
const lFlow = lossFlow[ticker]?.net_crowd_flow_pct || 0;
|
|
38
|
+
|
|
39
|
+
// Check for the divergence: Smart is buying, In-Loss is selling
|
|
40
|
+
if (sFlow >= this.buyThreshold && lFlow <= this.sellThreshold) {
|
|
41
|
+
|
|
42
|
+
signals[ticker] = {
|
|
43
|
+
status: "SHARK_ATTACK",
|
|
44
|
+
detail: "Positive expectancy cohort is buying from the capitulating in-loss cohort.",
|
|
45
|
+
smart_flow: sFlow,
|
|
46
|
+
loss_flow: lFlow,
|
|
47
|
+
// Calculate a simple conviction score
|
|
48
|
+
conviction: sFlow + Math.abs(lFlow)
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return signals;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
async getResult() { return null; }
|
|
57
|
+
reset() {}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
module.exports = SharkAttackSignal;
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Meta-calculation (Pass 4) that correlates the asset/sector flow
|
|
3
|
+
* of the "Smart Cohort" vs. the "Dumb Cohort" to find divergence signals.
|
|
4
|
+
*
|
|
5
|
+
* --- META REFACTOR (v2) ---
|
|
6
|
+
* This calculation is now stateless. It declares its dependencies and
|
|
7
|
+
* expects them to be passed to its `process` method.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
class SmartDumbDivergenceIndex {
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* (NEW) Statically declare dependencies.
|
|
14
|
+
*/
|
|
15
|
+
static getDependencies() {
|
|
16
|
+
return ['positive-expectancy-cohort-flow', 'negative-expectancy-cohort-flow'];
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
constructor() {
|
|
20
|
+
// Minimum net flow (as a percentage) to be considered a signal
|
|
21
|
+
this.FLOW_THRESHOLD = 0.005; // Formerly 0.5
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* REFACTORED PROCESS METHOD
|
|
26
|
+
* @param {string} dateStr The date to run the analysis for (e.g., "2025-10-31").
|
|
27
|
+
* @param {object} dependencies The shared dependencies (db, logger).
|
|
28
|
+
* @param {object} config The computation system configuration.
|
|
29
|
+
* @param {object} fetchedDependencies In-memory results from previous passes.
|
|
30
|
+
* e.g., { 'smart-cohort-flow': ..., 'dumb-cohort-flow': ... }
|
|
31
|
+
* @returns {Promise<object|null>} The analysis result or null.
|
|
32
|
+
*/
|
|
33
|
+
async process(dateStr, dependencies, config, fetchedDependencies) {
|
|
34
|
+
const { logger } = dependencies;
|
|
35
|
+
|
|
36
|
+
// 1. Get dependencies from the new argument
|
|
37
|
+
const smartData = fetchedDependencies['positive-expectancy-cohort-flow'];
|
|
38
|
+
const dumbData = fetchedDependencies['negative-expectancy-cohort-flow'];
|
|
39
|
+
|
|
40
|
+
// 2. Handle missing dependencies
|
|
41
|
+
if (!smartData || !dumbData) {
|
|
42
|
+
logger.log('WARN', `[SmartDumbDivergence] Missing cohort flow data dependency for ${dateStr}. Skipping.`);
|
|
43
|
+
return null;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const results = {
|
|
47
|
+
assets: {},
|
|
48
|
+
sectors: {}
|
|
49
|
+
};
|
|
50
|
+
|
|
51
|
+
const smartAssetFlow = smartData.asset_flow;
|
|
52
|
+
const dumbAssetFlow = dumbData.asset_flow;
|
|
53
|
+
const smartSectorFlow = smartData.sector_rotation;
|
|
54
|
+
const dumbSectorFlow = dumbData.sector_rotation;
|
|
55
|
+
|
|
56
|
+
if (!smartAssetFlow || !dumbAssetFlow || !smartSectorFlow || !dumbSectorFlow) {
|
|
57
|
+
logger.log('WARN', `[SmartDumbDivergence] Dependency data for ${dateStr} is incomplete (missing asset_flow or sector_rotation). Skipping.`);
|
|
58
|
+
return null;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// 3. Correlate Assets
|
|
62
|
+
const allTickers = new Set([...Object.keys(smartAssetFlow), ...Object.keys(dumbAssetFlow)]);
|
|
63
|
+
for (const ticker of allTickers) {
|
|
64
|
+
const sFlow = smartAssetFlow[ticker]?.net_crowd_flow_pct || 0;
|
|
65
|
+
const dFlow = dumbAssetFlow[ticker]?.net_crowd_flow_pct || 0;
|
|
66
|
+
|
|
67
|
+
const smartBuys = sFlow >= this.FLOW_THRESHOLD;
|
|
68
|
+
const smartSells = sFlow <= -this.FLOW_THRESHOLD;
|
|
69
|
+
const dumbBuys = dFlow >= this.FLOW_THRESHOLD;
|
|
70
|
+
const dumbSells = dFlow <= -this.FLOW_THRESHOLD;
|
|
71
|
+
|
|
72
|
+
let status = 'No_Divergence';
|
|
73
|
+
let detail = 'Cohorts are aligned or flow is insignificant.';
|
|
74
|
+
|
|
75
|
+
if (smartBuys && dumbSells) {
|
|
76
|
+
status = 'Capitulation';
|
|
77
|
+
detail = 'Smart cohort is buying the dip from the panic-selling dumb cohort.';
|
|
78
|
+
} else if (smartSells && dumbBuys) {
|
|
79
|
+
status = 'Euphoria';
|
|
80
|
+
detail = 'Smart cohort is selling into the FOMO-buying dumb cohort.';
|
|
81
|
+
} else if (smartBuys && dumbBuys) {
|
|
82
|
+
status = 'Aligned_Buy';
|
|
83
|
+
} else if (smartSells && dumbSells) {
|
|
84
|
+
status = 'Aligned_Sell';
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
if (status !== 'No_Divergence') {
|
|
88
|
+
results.assets[ticker] = {
|
|
89
|
+
status: status,
|
|
90
|
+
detail: detail,
|
|
91
|
+
smart_cohort_flow_pct: sFlow,
|
|
92
|
+
dumb_cohort_flow_pct: dFlow
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// 4. Correlate Sectors
|
|
98
|
+
const allSectors = new Set([...Object.keys(smartSectorFlow), ...Object.keys(dumbSectorFlow)]);
|
|
99
|
+
for (const sector of allSectors) {
|
|
100
|
+
const sFlow = smartSectorFlow[sector] || 0;
|
|
101
|
+
const dFlow = dumbSectorFlow[sector] || 0;
|
|
102
|
+
|
|
103
|
+
let status = 'No_Divergence';
|
|
104
|
+
|
|
105
|
+
if (sFlow > 0 && dFlow < 0) {
|
|
106
|
+
status = 'Capitulation';
|
|
107
|
+
} else if (sFlow < 0 && dFlow > 0) {
|
|
108
|
+
status = 'Euphoria';
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
if (status !== 'No_Divergence') {
|
|
112
|
+
results.sectors[sector] = {
|
|
113
|
+
status: status,
|
|
114
|
+
smart_cohort_flow_usd: sFlow,
|
|
115
|
+
dumb_cohort_flow_usd: dFlow
|
|
116
|
+
};
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
return results;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
async getResult() { return null; }
|
|
124
|
+
reset() {}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
module.exports = SmartDumbDivergenceIndex;
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Pass 2: Calculates the "Expectancy Score" for each user
|
|
3
|
+
* based on their historical trading performance.
|
|
4
|
+
* Expectancy = (WinRatio * AvgProfitPct) + (LossRatio * AvgLossPct)
|
|
5
|
+
*/
|
|
6
|
+
class UserExpectancyScore {
|
|
7
|
+
|
|
8
|
+
static getDependencies() {
|
|
9
|
+
// Depends on our new Pass 1 calculation
|
|
10
|
+
return ['historical-performance-aggregator'];
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
constructor() {}
|
|
14
|
+
|
|
15
|
+
async process(dateStr, dependencies, config, fetchedDependencies) {
|
|
16
|
+
const { logger } = dependencies;
|
|
17
|
+
const performanceData = fetchedDependencies['historical-performance-aggregator'];
|
|
18
|
+
|
|
19
|
+
if (!performanceData) {
|
|
20
|
+
logger.log('WARN', `[UserExpectancyScore] Missing dependency 'historical-performance-aggregator' for ${dateStr}. Skipping.`);
|
|
21
|
+
return null;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const expectancyScores = {};
|
|
25
|
+
|
|
26
|
+
for (const userId in performanceData) {
|
|
27
|
+
const stats = performanceData[userId];
|
|
28
|
+
|
|
29
|
+
// Ensure we have all the data needed
|
|
30
|
+
if (typeof stats.winRatio !== 'number' || typeof stats.avgProfitPct !== 'number' || typeof stats.avgLossPct !== 'number') {
|
|
31
|
+
continue;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const winRatio = stats.winRatio / 100.0;
|
|
35
|
+
const lossRatio = 1.0 - winRatio;
|
|
36
|
+
|
|
37
|
+
// Expectancy formula
|
|
38
|
+
const expectancy = (winRatio * stats.avgProfitPct) + (lossRatio * stats.avgLossPct);
|
|
39
|
+
|
|
40
|
+
expectancyScores[userId] = {
|
|
41
|
+
expectancy_score: expectancy,
|
|
42
|
+
winRatio: stats.winRatio, // Pass through for debugging/filtering
|
|
43
|
+
totalTrades: stats.totalTrades
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
return expectancyScores;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
async getResult() { return null; }
|
|
51
|
+
reset() {}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
module.exports = UserExpectancyScore;
|