aiden-shared-calculations-unified 1.0.17 → 1.0.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/calculations/backtests/strategy-performance.js +246 -0
- package/calculations/behavioural/historical/dumb-cohort-flow.js +196 -0
- package/calculations/behavioural/historical/smart-cohort-flow.js +196 -0
- package/calculations/behavioural/historical/user-investment-profile.js +393 -0
- package/calculations/meta/smart-dumb-divergence-index.js +128 -0
- package/calculations/pnl/historical/user_profitability_tracker.js +63 -22
- package/package.json +1 -1
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Backtest (Pass 4) calculation.
|
|
3
|
+
* Runs a full historical simulation of a trading strategy
|
|
4
|
+
* based on meta-signals.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
// Note: Ensure this path is correct relative to your 'calculations' dir
|
|
8
|
+
const { loadAllPriceData } = require('../../utils/price_data_provider');
|
|
9
|
+
|
|
10
|
+
class StrategyPerformance {
|
|
11
|
+
constructor() {
|
|
12
|
+
this.INITIAL_CASH = 100000;
|
|
13
|
+
this.TRADE_SIZE_USD = 5000;
|
|
14
|
+
|
|
15
|
+
// --- Strategy Configuration ---
|
|
16
|
+
// Defines which signals from which computations trigger a BUY or SELL
|
|
17
|
+
this.strategySignals = {
|
|
18
|
+
'smart-dumb-divergence-index': {
|
|
19
|
+
'Capitulation': 'BUY',
|
|
20
|
+
'Euphoria': 'SELL'
|
|
21
|
+
},
|
|
22
|
+
'profit_cohort_divergence': {
|
|
23
|
+
'Capitulation': 'BUY',
|
|
24
|
+
'Profit Taking': 'SELL'
|
|
25
|
+
}
|
|
26
|
+
};
|
|
27
|
+
// --- End Configuration ---
|
|
28
|
+
|
|
29
|
+
this.priceMap = null;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Helper to find the first date a computation was stored.
|
|
34
|
+
* This determines the start date of the backtest.
|
|
35
|
+
* @param {object} db - Firestore instance
|
|
36
|
+
* @param {string} collection - resultsCollection
|
|
37
|
+
* @param {string} computation - computation name (e.g., 'smart-dumb-divergence-index')
|
|
38
|
+
* @param {string} category - computation category (e.g., 'meta')
|
|
39
|
+
* @returns {Promise<string|null>} YYYY-MM-DD string or null
|
|
40
|
+
*/
|
|
41
|
+
async _findSignalInceptionDate(db, collection, computation, category) {
|
|
42
|
+
// Query for the oldest doc. This is a bit slow but runs once.
|
|
43
|
+
const snapshot = await db.collection(collection)
|
|
44
|
+
.where(`${category}.${computation}`, '==', true)
|
|
45
|
+
.orderBy(db.FieldPath.documentId(), 'asc')
|
|
46
|
+
.limit(1)
|
|
47
|
+
.get();
|
|
48
|
+
|
|
49
|
+
if (snapshot.empty) return null;
|
|
50
|
+
return snapshot.docs[0].id;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Fetches all required signals for the entire backtest period in one go.
|
|
55
|
+
*/
|
|
56
|
+
async _fetchAllSignals(db, collection, resultsSub, compsSub, dates) {
|
|
57
|
+
const refs = [];
|
|
58
|
+
const signalMap = new Map();
|
|
59
|
+
|
|
60
|
+
for (const date of dates) {
|
|
61
|
+
for (const computation in this.strategySignals) {
|
|
62
|
+
const key = `${date}_${computation}`;
|
|
63
|
+
|
|
64
|
+
// Dynamically find category (this is a bit brittle, but works for your structure)
|
|
65
|
+
let category = 'meta'; // default
|
|
66
|
+
if (computation.includes('cohort')) category = 'behavioural';
|
|
67
|
+
|
|
68
|
+
const docRef = db.collection(collection).doc(date)
|
|
69
|
+
.collection(resultsSub).doc(category)
|
|
70
|
+
.collection(compsSub).doc(computation);
|
|
71
|
+
refs.push({ key, ref: docRef });
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// This will be a large db.getAll call, but it's very efficient.
|
|
76
|
+
const snapshots = await db.getAll(...refs.map(r => r.ref));
|
|
77
|
+
snapshots.forEach((snap, idx) => {
|
|
78
|
+
if (snap.exists) signalMap.set(refs[idx].key, snap.data());
|
|
79
|
+
});
|
|
80
|
+
return signalMap;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Helper to find an instrument ID from a ticker.
|
|
85
|
+
* This is a simplified lookup from the priceMap.
|
|
86
|
+
*/
|
|
87
|
+
_findInstrumentId(ticker) {
|
|
88
|
+
// This is inefficient, but will work. A reverse map would be faster.
|
|
89
|
+
for (const instrumentId in this.priceMap) {
|
|
90
|
+
const priceData = this.priceMap[instrumentId];
|
|
91
|
+
if (priceData && priceData.ticker && priceData.ticker === ticker) {
|
|
92
|
+
return instrumentId;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
// Fallback for tickers that might have suffixes (e.g., from asset-crowd-flow)
|
|
96
|
+
for (const instrumentId in this.priceMap) {
|
|
97
|
+
const priceData = this.priceMap[instrumentId];
|
|
98
|
+
if (priceData && priceData.ticker && ticker.startsWith(priceData.ticker)) {
|
|
99
|
+
return instrumentId;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
return null;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Main "meta-style" process function.
|
|
108
|
+
* @param {string} dateStr - Today's date.
|
|
109
|
+
* @param {object} dependencies - db, logger.
|
|
110
|
+
* @param {object} config - Computation config.
|
|
111
|
+
*/
|
|
112
|
+
async process(dateStr, dependencies, config) {
|
|
113
|
+
const { db, logger } = dependencies;
|
|
114
|
+
const { resultsCollection, resultsSubcollection, computationsSubcollection } = config;
|
|
115
|
+
|
|
116
|
+
// 1. Load Price Data
|
|
117
|
+
if (!this.priceMap) {
|
|
118
|
+
logger.log('INFO', '[Backtest] Loading all price data for simulation...');
|
|
119
|
+
this.priceMap = await loadAllPriceData();
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
// 2. Find Backtest Start Date
|
|
123
|
+
// We find the oldest signal. In a real system, you'd find the *newest* of the "oldest" dates.
|
|
124
|
+
const inceptionDateStr = await this._findSignalInceptionDate(
|
|
125
|
+
db,
|
|
126
|
+
resultsCollection,
|
|
127
|
+
'smart-dumb-divergence-index', // Our core signal
|
|
128
|
+
'meta' // The category of this signal
|
|
129
|
+
);
|
|
130
|
+
|
|
131
|
+
if (!inceptionDateStr) {
|
|
132
|
+
logger.log('WARN', '[Backtest] No signal history found for smart-dumb-divergence-index. Skipping.');
|
|
133
|
+
return null;
|
|
134
|
+
}
|
|
135
|
+
logger.log('INFO', `[Backtest] Found signal inception date: ${inceptionDateStr}`);
|
|
136
|
+
|
|
137
|
+
// 3. Build Date Range
|
|
138
|
+
const allDates = [];
|
|
139
|
+
const current = new Date(inceptionDateStr + 'T00:00:00Z');
|
|
140
|
+
const end = new Date(dateStr + 'T00:00:00Z');
|
|
141
|
+
while (current <= end) {
|
|
142
|
+
allDates.push(current.toISOString().slice(0, 10));
|
|
143
|
+
current.setUTCDate(current.getUTCDate() + 1);
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (allDates.length < 2) {
|
|
147
|
+
logger.log('WARN', '[Backtest] Not enough history to run simulation.');
|
|
148
|
+
return null;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
// 4. Fetch ALL signals for ALL dates in one go
|
|
152
|
+
logger.log('INFO', `[Backtest] Fetching ${allDates.length} days of signal data...`);
|
|
153
|
+
const signalDataMap = await this._fetchAllSignals(
|
|
154
|
+
db, resultsCollection, resultsSubcollection, computationsSubcollection, allDates
|
|
155
|
+
);
|
|
156
|
+
|
|
157
|
+
// 5. --- Run the Simulation Loop ---
|
|
158
|
+
const portfolio = { cash: this.INITIAL_CASH, positions: {} }; // { ticker: { shares, instrumentId, marketValue } }
|
|
159
|
+
const history = []; // To store daily portfolio value
|
|
160
|
+
|
|
161
|
+
for (const date of allDates) {
|
|
162
|
+
// A. Mark-to-Market existing positions
|
|
163
|
+
let portfolioValue = portfolio.cash;
|
|
164
|
+
for (const ticker in portfolio.positions) {
|
|
165
|
+
const pos = portfolio.positions[ticker];
|
|
166
|
+
const price = this.priceMap[pos.instrumentId]?.[date];
|
|
167
|
+
|
|
168
|
+
if (price) {
|
|
169
|
+
pos.marketValue = price * pos.shares;
|
|
170
|
+
portfolioValue += pos.marketValue;
|
|
171
|
+
} else {
|
|
172
|
+
portfolioValue += pos.marketValue; // Use last known value if price missing
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
history.push({ date, portfolioValue });
|
|
176
|
+
|
|
177
|
+
// B. Generate trades for *this* date
|
|
178
|
+
const tradesToMake = {}; // { 'AAPL': 'BUY', 'TSLA': 'SELL' }
|
|
179
|
+
for (const computation in this.strategySignals) {
|
|
180
|
+
const signalData = signalDataMap.get(`${date}_${computation}`);
|
|
181
|
+
if (!signalData) continue;
|
|
182
|
+
|
|
183
|
+
const signalRules = this.strategySignals[computation];
|
|
184
|
+
// The signalData is the *entire doc* (e.g., { "AAPL": { status: "Capitulation", ... } })
|
|
185
|
+
for (const ticker in signalData) {
|
|
186
|
+
const signal = signalData[ticker]?.status; // e.g., "Capitulation"
|
|
187
|
+
if (signalRules[signal]) {
|
|
188
|
+
tradesToMake[ticker] = signalRules[signal]; // 'BUY' or 'SELL'
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
// C. Execute Trades
|
|
194
|
+
for (const ticker in tradesToMake) {
|
|
195
|
+
const action = tradesToMake[ticker];
|
|
196
|
+
|
|
197
|
+
const instrumentId = this._findInstrumentId(ticker);
|
|
198
|
+
if (!instrumentId) {
|
|
199
|
+
// logger.log('WARN', `[Backtest] No instrumentId for ${ticker}`);
|
|
200
|
+
continue;
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
const price = this.priceMap[instrumentId]?.[date];
|
|
204
|
+
if (!price || price <= 0) {
|
|
205
|
+
// logger.log('WARN', `[Backtest] No price for ${ticker} on ${date}`);
|
|
206
|
+
continue;
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
if (action === 'BUY' && portfolio.cash >= this.TRADE_SIZE_USD) {
|
|
210
|
+
if (!portfolio.positions[ticker]) { // Only buy if not already holding
|
|
211
|
+
const shares = this.TRADE_SIZE_USD / price;
|
|
212
|
+
portfolio.cash -= this.TRADE_SIZE_USD;
|
|
213
|
+
portfolio.positions[ticker] = {
|
|
214
|
+
shares: shares,
|
|
215
|
+
instrumentId: instrumentId,
|
|
216
|
+
marketValue: this.TRADE_SIZE_USD
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
} else if (action === 'SELL' && portfolio.positions[ticker]) {
|
|
220
|
+
// Simple: sell all
|
|
221
|
+
portfolio.cash += portfolio.positions[ticker].marketValue;
|
|
222
|
+
delete portfolio.positions[ticker];
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
} // --- End Simulation Loop ---
|
|
226
|
+
|
|
227
|
+
const finalValue = history[history.length - 1]?.portfolioValue || this.INITIAL_CASH;
|
|
228
|
+
const totalReturnPct = ((finalValue - this.INITIAL_CASH) / this.INITIAL_CASH) * 100;
|
|
229
|
+
|
|
230
|
+
logger.log('INFO', `[Backtest] Simulation complete. Final Value: ${finalValue}, Return: ${totalReturnPct.toFixed(2)}%`);
|
|
231
|
+
|
|
232
|
+
return {
|
|
233
|
+
strategyName: 'SmartDumbDivergence_v1',
|
|
234
|
+
inceptionDate: inceptionDateStr,
|
|
235
|
+
endDate: dateStr,
|
|
236
|
+
finalPortfolioValue: finalValue,
|
|
237
|
+
totalReturnPercent: totalReturnPct,
|
|
238
|
+
dailyHistory: history // This can be plotted on the frontend
|
|
239
|
+
};
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
async getResult() { return null; }
|
|
243
|
+
reset() {}
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
module.exports = StrategyPerformance;
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Calculates "Net Crowd Flow" and "Sector Rotation"
|
|
3
|
+
* *only* for the "Dumb Cohort" (Bottom 20% of Investor Scores).
|
|
4
|
+
*
|
|
5
|
+
* This calc depends on 'user-investment-profile.js' being run first for the same day.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
const { Firestore } = require('@google-cloud/firestore');
|
|
9
|
+
const firestore = new Firestore();
|
|
10
|
+
const { loadAllPriceData, getDailyPriceChange } = require('../../../utils/price_data_provider');
|
|
11
|
+
const { loadInstrumentMappings, getInstrumentSectorMap } = require('../../../utils/sector_mapping_provider');
|
|
12
|
+
|
|
13
|
+
const COHORT_PERCENTILE = 0.2; // Bottom 20%
|
|
14
|
+
const PROFILE_CALC_ID = 'user-investment-profile'; // The calc to read IS scores from
|
|
15
|
+
|
|
16
|
+
class DumbCohortFlow {
|
|
17
|
+
constructor() {
|
|
18
|
+
// Asset Flow
|
|
19
|
+
this.asset_values = {}; // { instrumentId: { day1_value_sum: 0, day2_value_sum: 0 } }
|
|
20
|
+
// Sector Rotation
|
|
21
|
+
this.todaySectorInvestment = {};
|
|
22
|
+
this.yesterdaySectorInvestment = {};
|
|
23
|
+
|
|
24
|
+
this.dumbCohortIds = null; // Set of user IDs
|
|
25
|
+
this.user_count = 0; // Number of *cohort* users
|
|
26
|
+
this.priceMap = null;
|
|
27
|
+
this.mappings = null;
|
|
28
|
+
this.sectorMap = null;
|
|
29
|
+
this.dates = {};
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Loads the Investor Scores, calculates the cohort threshold, and builds the Set of user IDs.
|
|
34
|
+
*/
|
|
35
|
+
async _loadCohort(context, dependencies) {
|
|
36
|
+
const { db, logger } = dependencies;
|
|
37
|
+
logger.log('INFO', '[DumbCohortFlow] Loading Investor Scores to build cohort...');
|
|
38
|
+
|
|
39
|
+
try {
|
|
40
|
+
const scoreMapRef = db.collection(context.config.resultsCollection).doc(context.todayDateStr)
|
|
41
|
+
.collection(context.config.resultsSubcollection).doc('behavioural')
|
|
42
|
+
.collection(context.config.computationsSubcollection).doc(PROFILE_CALC_ID);
|
|
43
|
+
|
|
44
|
+
const doc = await scoreMapRef.get();
|
|
45
|
+
if (!doc.exists || !doc.data().daily_investor_scores) {
|
|
46
|
+
logger.log('WARN', '[DumbCohortFlow] Cannot find dependency: daily_investor_scores. Cohort will be empty.');
|
|
47
|
+
this.dumbCohortIds = new Set();
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const scores = doc.data().daily_investor_scores;
|
|
52
|
+
const allScores = Object.entries(scores).map(([userId, score]) => ({ userId, score }));
|
|
53
|
+
allScores.sort((a, b) => a.score - b.score);
|
|
54
|
+
|
|
55
|
+
const thresholdIndex = Math.floor(allScores.length * COHORT_PERCENTILE);
|
|
56
|
+
const thresholdScore = allScores[thresholdIndex]?.score || 0; // Get 20th percentile score
|
|
57
|
+
|
|
58
|
+
this.dumbCohortIds = new Set(
|
|
59
|
+
allScores.filter(s => s.score <= thresholdScore).map(s => s.userId) // Get users *at or below*
|
|
60
|
+
);
|
|
61
|
+
|
|
62
|
+
logger.log('INFO', `[DumbCohortFlow] Cohort built. ${this.dumbCohortIds.size} users at or below ${thresholdScore.toFixed(2)} (20th percentile).`);
|
|
63
|
+
|
|
64
|
+
} catch (e) {
|
|
65
|
+
logger.log('ERROR', '[DumbCohortFlow] Failed to load cohort.', { error: e.message });
|
|
66
|
+
this.dumbCohortIds = new Set();
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// --- Asset Flow Helpers ---
|
|
71
|
+
_initAsset(instrumentId) {
|
|
72
|
+
if (!this.asset_values[instrumentId]) {
|
|
73
|
+
this.asset_values[instrumentId] = { day1_value_sum: 0, day2_value_sum: 0 };
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
_sumAssetValue(positions) {
|
|
77
|
+
const valueMap = {};
|
|
78
|
+
if (!positions || !Array.isArray(positions)) return valueMap;
|
|
79
|
+
for (const pos of positions) {
|
|
80
|
+
if (pos && pos.InstrumentID && pos.Value) {
|
|
81
|
+
valueMap[pos.InstrumentID] = (valueMap[pos.InstrumentID] || 0) + pos.Value;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
return valueMap;
|
|
85
|
+
}
|
|
86
|
+
// --- Sector Rotation Helper ---
|
|
87
|
+
_accumulateSectorInvestment(portfolio, target) {
|
|
88
|
+
if (portfolio && portfolio.AggregatedPositions) {
|
|
89
|
+
for (const pos of portfolio.AggregatedPositions) {
|
|
90
|
+
const sector = this.sectorMap[pos.InstrumentID] || 'N/A';
|
|
91
|
+
target[sector] = (target[sector] || 0) + (pos.Invested || pos.Amount || 0);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* PROCESS: Runs daily for each user.
|
|
98
|
+
*/
|
|
99
|
+
async process(todayPortfolio, yesterdayPortfolio, userId, context) {
|
|
100
|
+
// 1. Load cohort on first run
|
|
101
|
+
if (!this.dumbCohortIds) {
|
|
102
|
+
await this._loadCohort(context, context.dependencies);
|
|
103
|
+
this.dates.today = context.todayDateStr;
|
|
104
|
+
this.dates.yesterday = context.yesterdayDateStr;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// 2. Filter user
|
|
108
|
+
if (!this.dumbCohortIds.has(userId) || !todayPortfolio || !yesterdayPortfolio || !todayPortfolio.AggregatedPositions || !yesterdayPortfolio.AggregatedPositions) {
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// 3. User is in the cohort, load maps if needed
|
|
113
|
+
if (!this.sectorMap) {
|
|
114
|
+
this.sectorMap = await getInstrumentSectorMap();
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// --- 4. RUN ASSET FLOW LOGIC ---
|
|
118
|
+
const yesterdayValues = this._sumAssetValue(yesterdayPortfolio.AggregatedPositions);
|
|
119
|
+
const todayValues = this._sumAssetValue(todayPortfolio.AggregatedPositions);
|
|
120
|
+
const allInstrumentIds = new Set([...Object.keys(yesterdayValues), ...Object.keys(todayValues)]);
|
|
121
|
+
|
|
122
|
+
for (const instrumentId of allInstrumentIds) {
|
|
123
|
+
this._initAsset(instrumentId);
|
|
124
|
+
this.asset_values[instrumentId].day1_value_sum += (yesterdayValues[instrumentId] || 0);
|
|
125
|
+
this.asset_values[instrumentId].day2_value_sum += (todayValues[instrumentId] || 0);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// --- 5. RUN SECTOR ROTATION LOGIC ---
|
|
129
|
+
this._accumulateSectorInvestment(todayPortfolio, this.todaySectorInvestment);
|
|
130
|
+
this._accumulateSectorInvestment(yesterdayPortfolio, this.yesterdaySectorInvestment);
|
|
131
|
+
|
|
132
|
+
this.user_count++;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
/**
|
|
136
|
+
* GETRESULT: Aggregates and returns the flow data for the cohort.
|
|
137
|
+
*/
|
|
138
|
+
async getResult() {
|
|
139
|
+
if (this.user_count === 0 || !this.dates.today) {
|
|
140
|
+
return { asset_flow: {}, sector_rotation: {}, user_sample_size: 0 };
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
// 1. Load dependencies
|
|
144
|
+
if (!this.priceMap || !this.mappings) {
|
|
145
|
+
const [priceData, mappingData] = await Promise.all([
|
|
146
|
+
loadAllPriceData(),
|
|
147
|
+
loadInstrumentMappings()
|
|
148
|
+
]);
|
|
149
|
+
this.priceMap = priceData;
|
|
150
|
+
this.mappings = mappingData;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// --- 2. Calculate Asset Flow ---
|
|
154
|
+
const finalAssetFlow = {};
|
|
155
|
+
const todayStr = this.dates.today;
|
|
156
|
+
const yesterdayStr = this.dates.yesterday;
|
|
157
|
+
|
|
158
|
+
for (const instrumentId in this.asset_values) {
|
|
159
|
+
const ticker = this.mappings.instrumentToTicker[instrumentId] || `id_${instrumentId}`;
|
|
160
|
+
const avg_day1_value = this.asset_values[instrumentId].day1_value_sum / this.user_count;
|
|
161
|
+
const avg_day2_value = this.asset_values[instrumentId].day2_value_sum / this.user_count;
|
|
162
|
+
const priceChangePct = getDailyPriceChange(instrumentId, yesterdayStr, todayStr, this.priceMap);
|
|
163
|
+
|
|
164
|
+
if (priceChangePct === null) continue;
|
|
165
|
+
|
|
166
|
+
const expected_day2_value = avg_day1_value * (1 + priceChangePct);
|
|
167
|
+
const net_crowd_flow_pct = avg_day2_value - expected_day2_value;
|
|
168
|
+
|
|
169
|
+
finalAssetFlow[ticker] = {
|
|
170
|
+
net_crowd_flow_pct: net_crowd_flow_pct,
|
|
171
|
+
avg_value_day1_pct: avg_day1_value,
|
|
172
|
+
avg_value_day2_pct: avg_day2_value
|
|
173
|
+
};
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// --- 3. Calculate Sector Rotation ---
|
|
177
|
+
const finalSectorRotation = {};
|
|
178
|
+
const allSectors = new Set([...Object.keys(this.todaySectorInvestment), ...Object.keys(this.yesterdaySectorInvestment)]);
|
|
179
|
+
for (const sector of allSectors) {
|
|
180
|
+
const todayAmount = this.todaySectorInvestment[sector] || 0;
|
|
181
|
+
const yesterdayAmount = this.yesterdaySectorInvestment[sector] || 0;
|
|
182
|
+
finalSectorRotation[sector] = todayAmount - yesterdayAmount;
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// 4. Return combined result
|
|
186
|
+
return {
|
|
187
|
+
asset_flow: finalAssetFlow,
|
|
188
|
+
sector_rotation: finalSectorRotation,
|
|
189
|
+
user_sample_size: this.user_count
|
|
190
|
+
};
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
reset() { /* ... reset all constructor properties ... */ }
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
module.exports = DumbCohortFlow;
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Calculates "Net Crowd Flow" and "Sector Rotation"
|
|
3
|
+
* *only* for the "Smart Cohort" (Top 20% of Investor Scores).
|
|
4
|
+
*
|
|
5
|
+
* This calc depends on 'user-investment-profile.js' being run first for the same day.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
const { Firestore } = require('@google-cloud/firestore');
|
|
9
|
+
const firestore = new Firestore();
|
|
10
|
+
const { loadAllPriceData, getDailyPriceChange } = require('../../../utils/price_data_provider');
|
|
11
|
+
const { loadInstrumentMappings, getInstrumentSectorMap } = require('../../../utils/sector_mapping_provider');
|
|
12
|
+
|
|
13
|
+
const COHORT_PERCENTILE = 0.8; // Top 20%
|
|
14
|
+
const PROFILE_CALC_ID = 'user-investment-profile'; // The calc to read IS scores from
|
|
15
|
+
|
|
16
|
+
class SmartCohortFlow {
|
|
17
|
+
constructor() {
|
|
18
|
+
// Asset Flow
|
|
19
|
+
this.asset_values = {}; // { instrumentId: { day1_value_sum: 0, day2_value_sum: 0 } }
|
|
20
|
+
// Sector Rotation
|
|
21
|
+
this.todaySectorInvestment = {};
|
|
22
|
+
this.yesterdaySectorInvestment = {};
|
|
23
|
+
|
|
24
|
+
this.smartCohortIds = null; // Set of user IDs
|
|
25
|
+
this.user_count = 0; // Number of *cohort* users
|
|
26
|
+
this.priceMap = null;
|
|
27
|
+
this.mappings = null;
|
|
28
|
+
this.sectorMap = null;
|
|
29
|
+
this.dates = {};
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Loads the Investor Scores, calculates the cohort threshold, and builds the Set of user IDs.
|
|
34
|
+
*/
|
|
35
|
+
async _loadCohort(context, dependencies) {
|
|
36
|
+
const { db, logger } = dependencies;
|
|
37
|
+
logger.log('INFO', '[SmartCohortFlow] Loading Investor Scores to build cohort...');
|
|
38
|
+
|
|
39
|
+
try {
|
|
40
|
+
const scoreMapRef = db.collection(context.config.resultsCollection).doc(context.todayDateStr)
|
|
41
|
+
.collection(context.config.resultsSubcollection).doc('behavioural')
|
|
42
|
+
.collection(context.config.computationsSubcollection).doc(PROFILE_CALC_ID);
|
|
43
|
+
|
|
44
|
+
const doc = await scoreMapRef.get();
|
|
45
|
+
if (!doc.exists || !doc.data().daily_investor_scores) {
|
|
46
|
+
logger.log('WARN', '[SmartCohortFlow] Cannot find dependency: daily_investor_scores. Cohort will be empty.');
|
|
47
|
+
this.smartCohortIds = new Set();
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const scores = doc.data().daily_investor_scores;
|
|
52
|
+
const allScores = Object.entries(scores).map(([userId, score]) => ({ userId, score }));
|
|
53
|
+
allScores.sort((a, b) => a.score - b.score);
|
|
54
|
+
|
|
55
|
+
const thresholdIndex = Math.floor(allScores.length * COHORT_PERCENTILE);
|
|
56
|
+
const thresholdScore = allScores[thresholdIndex]?.score || 999;
|
|
57
|
+
|
|
58
|
+
this.smartCohortIds = new Set(
|
|
59
|
+
allScores.filter(s => s.score >= thresholdScore).map(s => s.userId)
|
|
60
|
+
);
|
|
61
|
+
|
|
62
|
+
logger.log('INFO', `[SmartCohortFlow] Cohort built. ${this.smartCohortIds.size} users at or above ${thresholdScore.toFixed(2)} (80th percentile).`);
|
|
63
|
+
|
|
64
|
+
} catch (e) {
|
|
65
|
+
logger.log('ERROR', '[SmartCohortFlow] Failed to load cohort.', { error: e.message });
|
|
66
|
+
this.smartCohortIds = new Set();
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// --- Asset Flow Helpers ---
|
|
71
|
+
_initAsset(instrumentId) {
|
|
72
|
+
if (!this.asset_values[instrumentId]) {
|
|
73
|
+
this.asset_values[instrumentId] = { day1_value_sum: 0, day2_value_sum: 0 };
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
_sumAssetValue(positions) {
|
|
77
|
+
const valueMap = {};
|
|
78
|
+
if (!positions || !Array.isArray(positions)) return valueMap;
|
|
79
|
+
for (const pos of positions) {
|
|
80
|
+
if (pos && pos.InstrumentID && pos.Value) {
|
|
81
|
+
valueMap[pos.InstrumentID] = (valueMap[pos.InstrumentID] || 0) + pos.Value;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
return valueMap;
|
|
85
|
+
}
|
|
86
|
+
// --- Sector Rotation Helper ---
|
|
87
|
+
_accumulateSectorInvestment(portfolio, target) {
|
|
88
|
+
if (portfolio && portfolio.AggregatedPositions) {
|
|
89
|
+
for (const pos of portfolio.AggregatedPositions) {
|
|
90
|
+
const sector = this.sectorMap[pos.InstrumentID] || 'N/A';
|
|
91
|
+
target[sector] = (target[sector] || 0) + (pos.Invested || pos.Amount || 0);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* PROCESS: Runs daily for each user.
|
|
98
|
+
*/
|
|
99
|
+
async process(todayPortfolio, yesterdayPortfolio, userId, context) {
|
|
100
|
+
// 1. Load cohort on first run
|
|
101
|
+
if (!this.smartCohortIds) {
|
|
102
|
+
await this._loadCohort(context, context.dependencies);
|
|
103
|
+
this.dates.today = context.todayDateStr;
|
|
104
|
+
this.dates.yesterday = context.yesterdayDateStr;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// 2. Filter user
|
|
108
|
+
if (!this.smartCohortIds.has(userId) || !todayPortfolio || !yesterdayPortfolio || !todayPortfolio.AggregatedPositions || !yesterdayPortfolio.AggregatedPositions) {
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// 3. User is in the cohort, load maps if needed
|
|
113
|
+
if (!this.sectorMap) {
|
|
114
|
+
this.sectorMap = await getInstrumentSectorMap();
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// --- 4. RUN ASSET FLOW LOGIC ---
|
|
118
|
+
const yesterdayValues = this._sumAssetValue(yesterdayPortfolio.AggregatedPositions);
|
|
119
|
+
const todayValues = this._sumAssetValue(todayPortfolio.AggregatedPositions);
|
|
120
|
+
const allInstrumentIds = new Set([...Object.keys(yesterdayValues), ...Object.keys(todayValues)]);
|
|
121
|
+
|
|
122
|
+
for (const instrumentId of allInstrumentIds) {
|
|
123
|
+
this._initAsset(instrumentId);
|
|
124
|
+
this.asset_values[instrumentId].day1_value_sum += (yesterdayValues[instrumentId] || 0);
|
|
125
|
+
this.asset_values[instrumentId].day2_value_sum += (todayValues[instrumentId] || 0);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// --- 5. RUN SECTOR ROTATION LOGIC ---
|
|
129
|
+
this._accumulateSectorInvestment(todayPortfolio, this.todaySectorInvestment);
|
|
130
|
+
this._accumulateSectorInvestment(yesterdayPortfolio, this.yesterdaySectorInvestment);
|
|
131
|
+
|
|
132
|
+
this.user_count++;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
/**
|
|
136
|
+
* GETRESULT: Aggregates and returns the flow data for the cohort.
|
|
137
|
+
*/
|
|
138
|
+
async getResult() {
|
|
139
|
+
if (this.user_count === 0 || !this.dates.today) {
|
|
140
|
+
return { asset_flow: {}, sector_rotation: {}, user_sample_size: 0 };
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
// 1. Load dependencies
|
|
144
|
+
if (!this.priceMap || !this.mappings) {
|
|
145
|
+
const [priceData, mappingData] = await Promise.all([
|
|
146
|
+
loadAllPriceData(),
|
|
147
|
+
loadInstrumentMappings()
|
|
148
|
+
]);
|
|
149
|
+
this.priceMap = priceData;
|
|
150
|
+
this.mappings = mappingData;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// --- 2. Calculate Asset Flow ---
|
|
154
|
+
const finalAssetFlow = {};
|
|
155
|
+
const todayStr = this.dates.today;
|
|
156
|
+
const yesterdayStr = this.dates.yesterday;
|
|
157
|
+
|
|
158
|
+
for (const instrumentId in this.asset_values) {
|
|
159
|
+
const ticker = this.mappings.instrumentToTicker[instrumentId] || `id_${instrumentId}`;
|
|
160
|
+
const avg_day1_value = this.asset_values[instrumentId].day1_value_sum / this.user_count;
|
|
161
|
+
const avg_day2_value = this.asset_values[instrumentId].day2_value_sum / this.user_count;
|
|
162
|
+
const priceChangePct = getDailyPriceChange(instrumentId, yesterdayStr, todayStr, this.priceMap);
|
|
163
|
+
|
|
164
|
+
if (priceChangePct === null) continue;
|
|
165
|
+
|
|
166
|
+
const expected_day2_value = avg_day1_value * (1 + priceChangePct);
|
|
167
|
+
const net_crowd_flow_pct = avg_day2_value - expected_day2_value;
|
|
168
|
+
|
|
169
|
+
finalAssetFlow[ticker] = {
|
|
170
|
+
net_crowd_flow_pct: net_crowd_flow_pct,
|
|
171
|
+
avg_value_day1_pct: avg_day1_value,
|
|
172
|
+
avg_value_day2_pct: avg_day2_value
|
|
173
|
+
};
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// --- 3. Calculate Sector Rotation ---
|
|
177
|
+
const finalSectorRotation = {};
|
|
178
|
+
const allSectors = new Set([...Object.keys(this.todaySectorInvestment), ...Object.keys(this.yesterdaySectorInvestment)]);
|
|
179
|
+
for (const sector of allSectors) {
|
|
180
|
+
const todayAmount = this.todaySectorInvestment[sector] || 0;
|
|
181
|
+
const yesterdayAmount = this.yesterdaySectorInvestment[sector] || 0;
|
|
182
|
+
finalSectorRotation[sector] = todayAmount - yesterdayAmount; // Note: This is total $, not avg.
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// 4. Return combined result
|
|
186
|
+
return {
|
|
187
|
+
asset_flow: finalAssetFlow,
|
|
188
|
+
sector_rotation: finalSectorRotation,
|
|
189
|
+
user_sample_size: this.user_count
|
|
190
|
+
};
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
reset() { /* ... reset all constructor properties ... */ }
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
module.exports = SmartCohortFlow;
|
|
@@ -0,0 +1,393 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Calculates a rolling 90-day "Investor Score" (IS) for each normal user.
|
|
3
|
+
* Heuristic engine (not an academic finance model). Outputs:
|
|
4
|
+
* - sharded_user_profile: { <shardKey>: { profiles: { userId: [history...] }, lastUpdated } }
|
|
5
|
+
* - daily_investor_scores: { userId: finalIS }
|
|
6
|
+
*
|
|
7
|
+
* Notes:
|
|
8
|
+
* - NetProfit / ProfitAndLoss fields are assumed to be percent returns in decimal (e.g. 0.03 = +3%).
|
|
9
|
+
* - The "Sharpe" used here is a cross-sectional dispersion proxy computed over position returns,
|
|
10
|
+
* weighted by invested amounts. It's renamed/treated as a dispersionRiskProxy in comments.
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
const { Firestore } = require('@google-cloud/firestore');
|
|
14
|
+
const firestore = new Firestore();
|
|
15
|
+
const { loadAllPriceData } = require('../../../utils/price_data_provider');
|
|
16
|
+
const { getInstrumentSectorMap, loadInstrumentMappings } = require('../../../utils/sector_mapping_provider');
|
|
17
|
+
|
|
18
|
+
// Config
|
|
19
|
+
const NUM_SHARDS = 50; // Must match the number of shards to read/write
|
|
20
|
+
const ROLLING_DAYS = 90;
|
|
21
|
+
const SHARD_COLLECTION_NAME = 'user_profile_history'; // The collection to store sharded history
|
|
22
|
+
const PNL_TRACKER_CALC_ID = 'user-profitability-tracker'; // The calc to read PNL from
|
|
23
|
+
|
|
24
|
+
// Helper: stable shard index for numeric or string IDs
|
|
25
|
+
function getShardIndex(id) {
|
|
26
|
+
const n = parseInt(id, 10);
|
|
27
|
+
if (!Number.isNaN(n)) return Math.abs(n) % NUM_SHARDS;
|
|
28
|
+
// simple deterministic string hash fallback for non-numeric IDs (UUIDs)
|
|
29
|
+
let h = 0;
|
|
30
|
+
for (let i = 0; i < id.length; i++) {
|
|
31
|
+
h = ((h << 5) - h) + id.charCodeAt(i);
|
|
32
|
+
h |= 0; // keep 32-bit
|
|
33
|
+
}
|
|
34
|
+
return Math.abs(h) % NUM_SHARDS;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
class UserInvestmentProfile {
|
|
38
|
+
constructor() {
|
|
39
|
+
// will hold today's per-user raw heuristic scores
|
|
40
|
+
this.dailyUserScores = {}; // { userId: { score_rd, score_disc, score_time } }
|
|
41
|
+
|
|
42
|
+
// cached dependencies
|
|
43
|
+
this.priceMap = null;
|
|
44
|
+
this.sectorMap = null;
|
|
45
|
+
this.pnlScores = null; // { userId: dailyPnlDecimal }
|
|
46
|
+
this.dates = {};
|
|
47
|
+
this.dependenciesLoaded = false;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Loads external dependencies once per run.
|
|
52
|
+
*/
|
|
53
|
+
async _loadDependencies(context, dependencies) {
|
|
54
|
+
if (this.dependenciesLoaded) return;
|
|
55
|
+
|
|
56
|
+
const { db, logger } = dependencies;
|
|
57
|
+
const { todayDateStr } = context;
|
|
58
|
+
|
|
59
|
+
if (logger) logger.log('INFO', '[UserInvestmentProfile] Loading dependencies...');
|
|
60
|
+
|
|
61
|
+
// load price data and sector mapping in parallel
|
|
62
|
+
const [priceData, sectorData] = await Promise.all([
|
|
63
|
+
loadAllPriceData(),
|
|
64
|
+
getInstrumentSectorMap()
|
|
65
|
+
]);
|
|
66
|
+
this.priceMap = priceData || {};
|
|
67
|
+
this.sectorMap = sectorData || {};
|
|
68
|
+
|
|
69
|
+
// load PNL map (daily percent returns per user) from PNL calc
|
|
70
|
+
this.pnlScores = {};
|
|
71
|
+
try {
|
|
72
|
+
const pnlCalcRef = db.collection(context.config.resultsCollection).doc(todayDateStr)
|
|
73
|
+
.collection(context.config.resultsSubcollection).doc('pnl')
|
|
74
|
+
.collection(context.config.computationsSubcollection).doc(PNL_TRACKER_CALC_ID);
|
|
75
|
+
|
|
76
|
+
const pnlSnap = await pnlCalcRef.get();
|
|
77
|
+
if (pnlSnap.exists) {
|
|
78
|
+
this.pnlScores = pnlSnap.data().daily_pnl_map || {};
|
|
79
|
+
if (logger) logger.log('INFO', `[UserInvestmentProfile] Loaded ${Object.keys(this.pnlScores).length} PNL scores.`);
|
|
80
|
+
} else {
|
|
81
|
+
if (logger) logger.log('WARN', `[UserInvestmentProfile] Could not find PNL scores dependency for ${todayDateStr}. PNL score will be 0.`);
|
|
82
|
+
}
|
|
83
|
+
} catch (e) {
|
|
84
|
+
if (logger) logger.log('ERROR', `[UserInvestmentProfile] Failed to load PNL scores.`, { error: e.message });
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
this.dependenciesLoaded = true;
|
|
88
|
+
if (logger) logger.log('INFO', '[UserInvestmentProfile] All dependencies loaded.');
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* HEURISTIC 1: Risk & Diversification Score (0-10).
|
|
93
|
+
*
|
|
94
|
+
* Implementation notes:
|
|
95
|
+
* - NetProfit is assumed to be a percent return in decimal per position (e.g. 0.03 = +3%).
|
|
96
|
+
* - We compute a weighted mean/std of returns across positions (weights = invested amounts).
|
|
97
|
+
* This gives a cross-sectional dispersion proxy (not a time-series Sharpe).
|
|
98
|
+
*/
|
|
99
|
+
_calculateRiskAndDivScore(todayPortfolio) {
|
|
100
|
+
if (!todayPortfolio.AggregatedPositions || todayPortfolio.AggregatedPositions.length === 0) {
|
|
101
|
+
return 5; // neutral
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
const positions = todayPortfolio.AggregatedPositions;
|
|
105
|
+
let totalInvested = 0;
|
|
106
|
+
let weightedRetSum = 0;
|
|
107
|
+
let weightedRetSqSum = 0;
|
|
108
|
+
let maxPosition = 0;
|
|
109
|
+
const sectors = new Set();
|
|
110
|
+
|
|
111
|
+
for (const pos of positions) {
|
|
112
|
+
const invested = pos.InvestedAmount || pos.Amount || 0;
|
|
113
|
+
const netProfit = ('NetProfit' in pos) ? pos.NetProfit : (pos.ProfitAndLoss || 0); // decimal % return
|
|
114
|
+
const ret = invested > 0 ? (netProfit) : netProfit; // if invested==0 we still include ret but weight 0
|
|
115
|
+
|
|
116
|
+
weightedRetSum += ret * invested;
|
|
117
|
+
weightedRetSqSum += (ret * ret) * invested;
|
|
118
|
+
totalInvested += invested;
|
|
119
|
+
if (invested > maxPosition) maxPosition = invested;
|
|
120
|
+
|
|
121
|
+
sectors.add(this.sectorMap[pos.InstrumentID] || 'N/A');
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// Weighted mean & variance of returns
|
|
125
|
+
const meanReturn = totalInvested > 0 ? (weightedRetSum / totalInvested) : 0;
|
|
126
|
+
const meanReturnSq = totalInvested > 0 ? (weightedRetSqSum / totalInvested) : (meanReturn * meanReturn);
|
|
127
|
+
const variance = Math.max(0, meanReturnSq - (meanReturn * meanReturn));
|
|
128
|
+
const stdReturn = Math.sqrt(variance);
|
|
129
|
+
|
|
130
|
+
// dispersion proxy: mean / std (if std is zero we treat as neutral 0)
|
|
131
|
+
let dispersionRiskProxy = stdReturn > 0 ? meanReturn / stdReturn : 0;
|
|
132
|
+
|
|
133
|
+
// cap and map dispersion proxy to [0..10].
|
|
134
|
+
// dispersionRiskProxy can be outside [-2..4], clamp to reasonable bounds first.
|
|
135
|
+
const capped = Math.max(-2, Math.min(4, dispersionRiskProxy));
|
|
136
|
+
const scoreSharpe = ((capped + 2) / 6) * 10; // maps [-2..4] -> [0..10]
|
|
137
|
+
|
|
138
|
+
// Sector diversification (monotonic - diminishing returns)
|
|
139
|
+
const sectorCount = sectors.size;
|
|
140
|
+
let scoreDiversification = 0;
|
|
141
|
+
if (sectorCount === 1) scoreDiversification = 0;
|
|
142
|
+
else if (sectorCount <= 4) scoreDiversification = 5;
|
|
143
|
+
else if (sectorCount <= 7) scoreDiversification = 8;
|
|
144
|
+
else scoreDiversification = 10;
|
|
145
|
+
|
|
146
|
+
// Position sizing / concentration penalty
|
|
147
|
+
const concentrationRatio = totalInvested > 0 ? (maxPosition / totalInvested) : 0;
|
|
148
|
+
let scoreSizing = 0;
|
|
149
|
+
if (concentrationRatio > 0.8) scoreSizing = 0;
|
|
150
|
+
else if (concentrationRatio > 0.5) scoreSizing = 2;
|
|
151
|
+
else if (concentrationRatio > 0.3) scoreSizing = 5;
|
|
152
|
+
else if (concentrationRatio > 0.15) scoreSizing = 8;
|
|
153
|
+
else scoreSizing = 10;
|
|
154
|
+
|
|
155
|
+
const final = (scoreSharpe * 0.4) + (scoreDiversification * 0.3) + (scoreSizing * 0.3);
|
|
156
|
+
return Math.max(0, Math.min(10, final));
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
/**
|
|
160
|
+
* HEURISTIC 2: Discipline Score (0-10).
|
|
161
|
+
*
|
|
162
|
+
* Uses yesterday's positions to evaluate closes, averaging down, holding losers/winners.
|
|
163
|
+
* Defensive: uses safe field fallbacks and guards against division by zero.
|
|
164
|
+
*/
|
|
165
|
+
_calculateDisciplineScore(yesterdayPortfolio = {}, todayPortfolio = {}) {
|
|
166
|
+
const yPositions = yesterdayPortfolio.AggregatedPositions || [];
|
|
167
|
+
const tPositions = new Map((todayPortfolio.AggregatedPositions || []).map(p => [p.PositionID, p]));
|
|
168
|
+
|
|
169
|
+
if (yPositions.length === 0) {
|
|
170
|
+
return 5; // neutral if nothing to judge
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
let eventPoints = 0;
|
|
174
|
+
let eventCount = 0;
|
|
175
|
+
|
|
176
|
+
for (const yPos of yPositions) {
|
|
177
|
+
const profitAndLoss = ('ProfitAndLoss' in yPos) ? yPos.ProfitAndLoss : (yPos.NetProfit || 0);
|
|
178
|
+
const invested = yPos.InvestedAmount || yPos.Amount || 0;
|
|
179
|
+
const pnlPercent = profitAndLoss; // This is already the decimal % return
|
|
180
|
+
|
|
181
|
+
const tPos = tPositions.get(yPos.PositionID);
|
|
182
|
+
|
|
183
|
+
if (!tPos) {
|
|
184
|
+
// Closed position
|
|
185
|
+
eventCount++;
|
|
186
|
+
if (pnlPercent < -0.05) eventPoints += 10; // cut loser (good)
|
|
187
|
+
else if (pnlPercent > 0.20) eventPoints += 8; // took big profit (good)
|
|
188
|
+
else if (pnlPercent > 0 && pnlPercent < 0.05) eventPoints += 2; // paper hands (bad)
|
|
189
|
+
else eventPoints += 5; // neutral close
|
|
190
|
+
} else {
|
|
191
|
+
// Held or modified
|
|
192
|
+
if (pnlPercent < -0.10) {
|
|
193
|
+
eventCount++;
|
|
194
|
+
const tInvested = tPos.InvestedAmount || tPos.Amount || 0;
|
|
195
|
+
if (tInvested > invested) eventPoints += 0; // averaged down (very poor)
|
|
196
|
+
else eventPoints += 3; // held loser (poor)
|
|
197
|
+
} else if (pnlPercent > 0.15) {
|
|
198
|
+
eventCount++;
|
|
199
|
+
eventPoints += 10; // held/added to winner (good)
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
const avg = (eventCount > 0) ? (eventPoints / eventCount) : 5;
|
|
205
|
+
return Math.max(0, Math.min(10, avg));
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
/**
|
|
209
|
+
* HEURISTIC 3: Market Timing Score (0-10).
|
|
210
|
+
*
|
|
211
|
+
* For new positions opened today (not present yesterday), measure proximity of openRate to
|
|
212
|
+
* the last 30-day low/high. Uses date-sorted price history and clamps.
|
|
213
|
+
*/
|
|
214
|
+
_calculateMarketTimingScore(yesterdayPortfolio = {}, todayPortfolio = {}) {
|
|
215
|
+
const yIds = new Set((yesterdayPortfolio.AggregatedPositions || []).map(p => p.PositionID));
|
|
216
|
+
const newPositions = (todayPortfolio.AggregatedPositions || []).filter(p => !yIds.has(p.PositionID));
|
|
217
|
+
|
|
218
|
+
if (newPositions.length === 0) return 5;
|
|
219
|
+
|
|
220
|
+
let timingPoints = 0;
|
|
221
|
+
let timingCount = 0;
|
|
222
|
+
|
|
223
|
+
for (const tPos of newPositions) {
|
|
224
|
+
const prices = this.priceMap[tPos.InstrumentID];
|
|
225
|
+
if (!prices) continue;
|
|
226
|
+
|
|
227
|
+
// Accept prices as either array or {date:price} map; build sorted array of prices
|
|
228
|
+
let historyPrices = [];
|
|
229
|
+
if (Array.isArray(prices)) {
|
|
230
|
+
// assume array of numbers or objects with .price/.close
|
|
231
|
+
historyPrices = prices
|
|
232
|
+
.map(p => (typeof p === 'number' ? p : (p.price || p.close || null)))
|
|
233
|
+
.filter(v => v != null);
|
|
234
|
+
} else {
|
|
235
|
+
// object keyed by date -> price
|
|
236
|
+
const entries = Object.keys(prices)
|
|
237
|
+
.map(d => ({ d, p: prices[d] }))
|
|
238
|
+
.filter(e => e.p != null)
|
|
239
|
+
.sort((a, b) => new Date(a.d) - new Date(b.d));
|
|
240
|
+
historyPrices = entries.map(e => e.p);
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
const last30 = historyPrices.slice(-30);
|
|
244
|
+
if (last30.length < 2) continue;
|
|
245
|
+
|
|
246
|
+
const minPrice = Math.min(...last30);
|
|
247
|
+
const maxPrice = Math.max(...last30);
|
|
248
|
+
const openRate = tPos.OpenRate;
|
|
249
|
+
const range = maxPrice - minPrice;
|
|
250
|
+
if (!isFinite(range) || range === 0) continue;
|
|
251
|
+
|
|
252
|
+
let proximity = (openRate - minPrice) / range; // 0 = at low, 1 = at high
|
|
253
|
+
proximity = Math.max(0, Math.min(1, proximity)); // clamp to [0,1]
|
|
254
|
+
|
|
255
|
+
timingCount++;
|
|
256
|
+
if (proximity < 0.2) timingPoints += 10;
|
|
257
|
+
else if (proximity < 0.4) timingPoints += 8;
|
|
258
|
+
else if (proximity > 0.9) timingPoints += 1;
|
|
259
|
+
else if (proximity > 0.7) timingPoints += 3;
|
|
260
|
+
else timingPoints += 5;
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
const avg = (timingCount > 0) ? (timingPoints / timingCount) : 5;
|
|
264
|
+
return Math.max(0, Math.min(10, avg));
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
/**
|
|
268
|
+
* PROCESS: called per-user per-day to compute and store today's heuristics.
|
|
269
|
+
*/
|
|
270
|
+
async process(todayPortfolio, yesterdayPortfolio, userId, context, todayInsights, yesterdayInsights, todaySocial, yesterdaySocial) {
|
|
271
|
+
// run only for normal users with portfolios
|
|
272
|
+
if (!todayPortfolio || !todayPortfolio.AggregatedPositions) return;
|
|
273
|
+
|
|
274
|
+
if (!this.dependenciesLoaded) {
|
|
275
|
+
await this._loadDependencies(context, context.dependencies);
|
|
276
|
+
this.dates.today = context.todayDateStr;
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
const yPort = yesterdayPortfolio || {};
|
|
280
|
+
|
|
281
|
+
const score_rd = this._calculateRiskAndDivScore(todayPortfolio);
|
|
282
|
+
const score_disc = this._calculateDisciplineScore(yPort, todayPortfolio);
|
|
283
|
+
const score_time = this._calculateMarketTimingScore(yPort, todayPortfolio);
|
|
284
|
+
|
|
285
|
+
this.dailyUserScores[userId] = {
|
|
286
|
+
score_rd,
|
|
287
|
+
score_disc,
|
|
288
|
+
score_time
|
|
289
|
+
};
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
/**
|
|
293
|
+
* GETRESULT: Aggregate into rolling 90-day history, compute avg components and final IS.
|
|
294
|
+
*
|
|
295
|
+
* Returns a structure prepared for writing where each shardKey maps to:
|
|
296
|
+
* { profiles: { userId: historyArray, ... }, lastUpdated: todayStr }
|
|
297
|
+
*
|
|
298
|
+
* This must match how existing shards are read (snap.data().profiles).
|
|
299
|
+
*/
|
|
300
|
+
async getResult() {
|
|
301
|
+
if (Object.keys(this.dailyUserScores).length === 0) return {};
|
|
302
|
+
|
|
303
|
+
const todayStr = this.dates.today || (new Date()).toISOString().slice(0, 10);
|
|
304
|
+
|
|
305
|
+
// prepare sharded output objects with profiles container (Option A)
|
|
306
|
+
const shardedResults = {};
|
|
307
|
+
for (let i = 0; i < NUM_SHARDS; i++) {
|
|
308
|
+
const shardKey = `${SHARD_COLLECTION_NAME}_shard_${i}`;
|
|
309
|
+
shardedResults[shardKey] = { profiles: {}, lastUpdated: todayStr };
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
const dailyInvestorScoreMap = {};
|
|
313
|
+
|
|
314
|
+
// fetch existing shards in parallel
|
|
315
|
+
const shardPromises = [];
|
|
316
|
+
for (let i = 0; i < NUM_SHARDS; i++) {
|
|
317
|
+
const docRef = firestore.collection(SHARD_COLLECTION_NAME).doc(`${SHARD_COLLECTION_NAME}_shard_${i}`);
|
|
318
|
+
shardPromises.push(docRef.get());
|
|
319
|
+
}
|
|
320
|
+
const shardSnapshots = await Promise.all(shardPromises);
|
|
321
|
+
|
|
322
|
+
// Build existingShards map of profiles for quick access
|
|
323
|
+
const existingShards = shardSnapshots.map((snap, idx) => {
|
|
324
|
+
if (!snap.exists) return {}; // no profiles
|
|
325
|
+
const data = snap.data() || {};
|
|
326
|
+
return data.profiles || {};
|
|
327
|
+
});
|
|
328
|
+
|
|
329
|
+
// process users
|
|
330
|
+
for (const userId of Object.keys(this.dailyUserScores)) {
|
|
331
|
+
const shardIndex = getShardIndex(userId);
|
|
332
|
+
const scores = this.dailyUserScores[userId];
|
|
333
|
+
|
|
334
|
+
// fetch existing history for this user (if present)
|
|
335
|
+
const existingProfiles = existingShards[shardIndex] || {};
|
|
336
|
+
// clone to avoid mutating snapshot data directly
|
|
337
|
+
const history = (existingProfiles[userId] || []).slice();
|
|
338
|
+
|
|
339
|
+
history.push({
|
|
340
|
+
date: todayStr,
|
|
341
|
+
...scores,
|
|
342
|
+
pnl: (this.pnlScores && (userId in this.pnlScores)) ? this.pnlScores[userId] : 0
|
|
343
|
+
});
|
|
344
|
+
|
|
345
|
+
const newHistory = history.slice(-ROLLING_DAYS);
|
|
346
|
+
|
|
347
|
+
// compute rolling averages
|
|
348
|
+
let avg_rd = 0, avg_disc = 0, avg_time = 0, avg_pnl = 0;
|
|
349
|
+
for (const entry of newHistory) {
|
|
350
|
+
avg_rd += (entry.score_rd || 0);
|
|
351
|
+
avg_disc += (entry.score_disc || 0);
|
|
352
|
+
avg_time += (entry.score_time || 0);
|
|
353
|
+
avg_pnl += (entry.pnl || 0);
|
|
354
|
+
}
|
|
355
|
+
const N = newHistory.length || 1;
|
|
356
|
+
avg_rd /= N;
|
|
357
|
+
avg_disc /= N;
|
|
358
|
+
avg_time /= N;
|
|
359
|
+
avg_pnl /= N;
|
|
360
|
+
|
|
361
|
+
// Normalize PNL: avg_pnl is decimal percent (0.005 -> 0.5%). Map to 0-10 scale:
|
|
362
|
+
// multiply by 1000 (0.005 -> 5). Clamp to [-10, 10] to avoid outliers.
|
|
363
|
+
const normalizedPnl = Math.max(-10, Math.min(10, avg_pnl * 1000));
|
|
364
|
+
|
|
365
|
+
// Final IS (weights): discipline 40%, risk/div 30%, timing 20%, pnl 10%
|
|
366
|
+
const finalISRaw = (avg_disc * 0.4) + (avg_rd * 0.3) + (avg_time * 0.2) + (normalizedPnl * 0.1);
|
|
367
|
+
const finalIS = Math.max(0, Math.min(10, finalISRaw));
|
|
368
|
+
|
|
369
|
+
// store in prepared shard result under 'profiles'
|
|
370
|
+
const shardKey = `${SHARD_COLLECTION_NAME}_shard_${shardIndex}`;
|
|
371
|
+
shardedResults[shardKey].profiles[userId] = newHistory;
|
|
372
|
+
|
|
373
|
+
// also set the daily investor score
|
|
374
|
+
dailyInvestorScoreMap[userId] = finalIS;
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
return {
|
|
378
|
+
sharded_user_profile: shardedResults,
|
|
379
|
+
daily_investor_scores: dailyInvestorScoreMap
|
|
380
|
+
};
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
reset() {
|
|
384
|
+
this.dailyUserScores = {};
|
|
385
|
+
this.dependenciesLoaded = false;
|
|
386
|
+
this.priceMap = null;
|
|
387
|
+
this.sectorMap = null;
|
|
388
|
+
this.pnlScores = null;
|
|
389
|
+
this.dates = {};
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
module.exports = UserInvestmentProfile;
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Meta-calculation (Pass 3) that correlates the asset/sector flow
|
|
3
|
+
* of the "Smart Cohort" vs. the "Dumb Cohort" to find divergence signals.
|
|
4
|
+
*
|
|
5
|
+
* This identifies:
|
|
6
|
+
* 1. "Capitulation": Smart cohort is buying what the dumb cohort is panic-selling.
|
|
7
|
+
* 2. "Euphoria": Smart cohort is selling what the dumb cohort is FOMO-buying.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
class SmartDumbDivergenceIndex {
|
|
11
|
+
constructor() {
|
|
12
|
+
// Minimum net flow (as a percentage) to be considered a signal
|
|
13
|
+
this.FLOW_THRESHOLD = 0.5;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* @param {string} dateStr The date to run the analysis for (e.g., "2025-10-31").
|
|
18
|
+
* @param {object} dependencies The shared dependencies (db, logger).
|
|
19
|
+
* @param {object} config The computation system configuration.
|
|
20
|
+
* @returns {Promise<object|null>} The analysis result or null.
|
|
21
|
+
*/
|
|
22
|
+
async process(dateStr, dependencies, config) {
|
|
23
|
+
const { db, logger } = dependencies;
|
|
24
|
+
const collection = config.resultsCollection;
|
|
25
|
+
const resultsSub = config.resultsSubcollection || 'results';
|
|
26
|
+
const compsSub = config.computationsSubcollection || 'computations';
|
|
27
|
+
|
|
28
|
+
// 1. Define dependencies
|
|
29
|
+
const refsToGet = [
|
|
30
|
+
{
|
|
31
|
+
key: 'smart_flow',
|
|
32
|
+
ref: db.collection(collection).doc(dateStr).collection(resultsSub).doc('behavioural').collection(compsSub).doc('smart-cohort-flow')
|
|
33
|
+
},
|
|
34
|
+
{
|
|
35
|
+
key: 'dumb_flow',
|
|
36
|
+
ref: db.collection(collection).doc(dateStr).collection(resultsSub).doc('behavioural').collection(compsSub).doc('dumb-cohort-flow')
|
|
37
|
+
}
|
|
38
|
+
];
|
|
39
|
+
|
|
40
|
+
// 2. Fetch
|
|
41
|
+
const snapshots = await db.getAll(...refsToGet.map(r => r.ref));
|
|
42
|
+
const smartData = snapshots[0].exists ? snapshots[0].data() : null;
|
|
43
|
+
const dumbData = snapshots[1].exists ? snapshots[1].data() : null;
|
|
44
|
+
|
|
45
|
+
// 3. Handle "day-delay"
|
|
46
|
+
if (!smartData || !dumbData) {
|
|
47
|
+
logger.log('WARN', `[SmartDumbDivergence] Missing cohort flow data for ${dateStr}. Allowing backfill.`);
|
|
48
|
+
return null; // Let backfill handle it
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const results = {
|
|
52
|
+
assets: {},
|
|
53
|
+
sectors: {}
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
const smartAssetFlow = smartData.asset_flow || {};
|
|
57
|
+
const dumbAssetFlow = dumbData.asset_flow || {};
|
|
58
|
+
const smartSectorFlow = smartData.sector_rotation || {};
|
|
59
|
+
const dumbSectorFlow = dumbData.sector_rotation || {};
|
|
60
|
+
|
|
61
|
+
// 4. Correlate Assets
|
|
62
|
+
const allTickers = new Set([...Object.keys(smartAssetFlow), ...Object.keys(dumbAssetFlow)]);
|
|
63
|
+
for (const ticker of allTickers) {
|
|
64
|
+
const sFlow = smartAssetFlow[ticker]?.net_crowd_flow_pct || 0;
|
|
65
|
+
const dFlow = dumbAssetFlow[ticker]?.net_crowd_flow_pct || 0;
|
|
66
|
+
|
|
67
|
+
const smartBuys = sFlow >= this.FLOW_THRESHOLD;
|
|
68
|
+
const smartSells = sFlow <= -this.FLOW_THRESHOLD;
|
|
69
|
+
const dumbBuys = dFlow >= this.FLOW_THRESHOLD;
|
|
70
|
+
const dumbSells = dFlow <= -this.FLOW_THRESHOLD;
|
|
71
|
+
|
|
72
|
+
let status = 'No_Divergence';
|
|
73
|
+
let detail = 'Cohorts are aligned or flow is insignificant.';
|
|
74
|
+
|
|
75
|
+
if (smartBuys && dumbSells) {
|
|
76
|
+
status = 'Capitulation';
|
|
77
|
+
detail = 'Smart cohort is buying the dip from the panic-selling dumb cohort.';
|
|
78
|
+
} else if (smartSells && dumbBuys) {
|
|
79
|
+
status = 'Euphoria';
|
|
80
|
+
detail = 'Smart cohort is selling into the FOMO-buying dumb cohort.';
|
|
81
|
+
} else if (smartBuys && dumbBuys) {
|
|
82
|
+
status = 'Aligned_Buy';
|
|
83
|
+
} else if (smartSells && dumbSells) {
|
|
84
|
+
status = 'Aligned_Sell';
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
if (status !== 'No_Divergence') {
|
|
88
|
+
results.assets[ticker] = {
|
|
89
|
+
status: status,
|
|
90
|
+
detail: detail,
|
|
91
|
+
smart_cohort_flow_pct: sFlow,
|
|
92
|
+
dumb_cohort_flow_pct: dFlow
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// 5. Correlate Sectors (Note: flow is total $, not %)
|
|
98
|
+
// We can just check for opposing signs
|
|
99
|
+
const allSectors = new Set([...Object.keys(smartSectorFlow), ...Object.keys(dumbSectorFlow)]);
|
|
100
|
+
for (const sector of allSectors) {
|
|
101
|
+
const sFlow = smartSectorFlow[sector] || 0;
|
|
102
|
+
const dFlow = dumbSectorFlow[sector] || 0;
|
|
103
|
+
|
|
104
|
+
let status = 'No_Divergence';
|
|
105
|
+
|
|
106
|
+
if (sFlow > 0 && dFlow < 0) {
|
|
107
|
+
status = 'Capitulation';
|
|
108
|
+
} else if (sFlow < 0 && dFlow > 0) {
|
|
109
|
+
status = 'Euphoria';
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
if (status !== 'No_Divergence') {
|
|
113
|
+
results.sectors[sector] = {
|
|
114
|
+
status: status,
|
|
115
|
+
smart_cohort_flow_usd: sFlow,
|
|
116
|
+
dumb_cohort_flow_usd: dFlow
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
return results;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
async getResult() { return null; }
|
|
125
|
+
reset() {}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
module.exports = SmartDumbDivergenceIndex;
|
|
@@ -1,47 +1,68 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Tracks user profitability over a 7-day rolling window.
|
|
3
|
-
* This version shards the output
|
|
3
|
+
* This version shards the output AND calculates the user's *weighted average daily PNL (as a decimal %)*.
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
6
|
const { Firestore } = require('@google-cloud/firestore');
|
|
7
7
|
const firestore = new Firestore();
|
|
8
8
|
|
|
9
|
-
// Define a constant for the number of shards
|
|
10
9
|
const NUM_SHARDS = 50;
|
|
11
10
|
|
|
12
11
|
class UserProfitabilityTracker {
|
|
13
12
|
constructor() {
|
|
14
|
-
|
|
13
|
+
// This will store { userId: { weightedPnlSum: 0, totalInvested: 0 } }
|
|
14
|
+
this.dailyData = {};
|
|
15
15
|
}
|
|
16
16
|
|
|
17
|
-
|
|
18
|
-
|
|
17
|
+
/**
|
|
18
|
+
* Calculates the weighted PNL for the day.
|
|
19
|
+
* NetProfit is a decimal % return (e.g., 0.03)
|
|
20
|
+
* Invested is a decimal % weight (e.g., 0.05)
|
|
21
|
+
*/
|
|
22
|
+
calculateWeightedDailyPnl(portfolio) {
|
|
23
|
+
if (!portfolio || !portfolio.AggregatedPositions || portfolio.AggregatedPositions.length === 0) {
|
|
24
|
+
return { weightedPnl: 0, totalInvested: 0 };
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
let weightedPnlSum = 0;
|
|
28
|
+
let totalInvested = 0;
|
|
19
29
|
|
|
20
|
-
const
|
|
21
|
-
|
|
22
|
-
|
|
30
|
+
for (const pos of portfolio.AggregatedPositions) {
|
|
31
|
+
// Use NetProfit (the % return)
|
|
32
|
+
const netProfit = ('NetProfit' in pos) ? pos.NetProfit : (pos.ProfitAndLoss || 0);
|
|
33
|
+
// Use InvestedAmount (the % portfolio weight)
|
|
34
|
+
const invested = pos.InvestedAmount || pos.Amount || 0;
|
|
35
|
+
|
|
36
|
+
if (invested > 0) {
|
|
37
|
+
weightedPnlSum += netProfit * invested;
|
|
38
|
+
totalInvested += invested;
|
|
39
|
+
}
|
|
23
40
|
}
|
|
41
|
+
|
|
42
|
+
return { weightedPnlSum, totalInvested };
|
|
24
43
|
}
|
|
25
44
|
|
|
26
|
-
|
|
27
|
-
if (
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
45
|
+
process(todayPortfolio, yesterdayPortfolio, userId) {
|
|
46
|
+
if (!todayPortfolio) return;
|
|
47
|
+
|
|
48
|
+
const { weightedPnlSum, totalInvested } = this.calculateWeightedDailyPnl(todayPortfolio);
|
|
49
|
+
|
|
50
|
+
if (totalInvested > 0) {
|
|
51
|
+
this.dailyData[userId] = { weightedPnlSum, totalInvested };
|
|
31
52
|
}
|
|
32
|
-
return null;
|
|
33
53
|
}
|
|
34
54
|
|
|
35
55
|
async getResult() {
|
|
36
56
|
const today = new Date().toISOString().slice(0, 10);
|
|
37
|
-
const results = {};
|
|
57
|
+
const results = {}; // For sharded history
|
|
58
|
+
const dailyPnlMap = {}; // For the new profile calc
|
|
38
59
|
|
|
39
60
|
// Prepare sharded data structure
|
|
40
61
|
for (let i = 0; i < NUM_SHARDS; i++) {
|
|
41
62
|
results[`user_profitability_shard_${i}`] = {};
|
|
42
63
|
}
|
|
43
64
|
|
|
44
|
-
// Fetch
|
|
65
|
+
// ... (Fetch existing shards logic, same as your file) ...
|
|
45
66
|
const shardPromises = [];
|
|
46
67
|
for (let i = 0; i < NUM_SHARDS; i++) {
|
|
47
68
|
const docRef = firestore.collection('historical_insights').doc(`user_profitability_shard_${i}`);
|
|
@@ -50,18 +71,38 @@ class UserProfitabilityTracker {
|
|
|
50
71
|
const shardSnapshots = await Promise.all(shardPromises);
|
|
51
72
|
const existingData = shardSnapshots.map(snap => (snap.exists ? snap.data().profits : {}));
|
|
52
73
|
|
|
53
|
-
|
|
54
|
-
for (const userId in this.
|
|
74
|
+
|
|
75
|
+
for (const userId in this.dailyData) {
|
|
76
|
+
const { weightedPnlSum, totalInvested } = this.dailyData[userId];
|
|
77
|
+
|
|
78
|
+
// Calculate the final weighted average % return for the day
|
|
79
|
+
// We cap totalInvested at 1.0 (100%) in case of data issues
|
|
80
|
+
const totalWeight = Math.min(1.0, totalInvested);
|
|
81
|
+
const dailyAvgPnl = (totalWeight > 0) ? (weightedPnlSum / totalWeight) : 0;
|
|
82
|
+
|
|
83
|
+
// Store this for the profile calc dependency
|
|
84
|
+
dailyPnlMap[userId] = dailyAvgPnl;
|
|
85
|
+
|
|
86
|
+
// --- Now, update the sharded history ---
|
|
55
87
|
const shardIndex = parseInt(userId, 10) % NUM_SHARDS;
|
|
56
88
|
const userHistory = existingData[shardIndex][userId] || [];
|
|
57
89
|
|
|
58
|
-
|
|
90
|
+
// Store the decimal % pnl in the history
|
|
91
|
+
userHistory.push({ date: today, pnl: dailyAvgPnl });
|
|
59
92
|
|
|
60
|
-
|
|
61
|
-
results[
|
|
93
|
+
const shardKey = `user_profitability_shard_${shardIndex}`;
|
|
94
|
+
if (!results[shardKey]) results[shardKey] = {};
|
|
95
|
+
results[shardKey][userId] = userHistory.slice(-7);
|
|
62
96
|
}
|
|
63
97
|
|
|
64
|
-
return {
|
|
98
|
+
return {
|
|
99
|
+
sharded_user_profitability: results,
|
|
100
|
+
daily_pnl_map: dailyPnlMap // <-- This now correctly outputs the weighted avg % PNL
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
reset() {
|
|
105
|
+
this.dailyData = {};
|
|
65
106
|
}
|
|
66
107
|
}
|
|
67
108
|
|