aiden-shared-calculations-unified 1.0.16 → 1.0.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,393 @@
1
+ /**
2
+ * @fileoverview Calculates a rolling 90-day "Investor Score" (IS) for each normal user.
3
+ * Heuristic engine (not an academic finance model). Outputs:
4
+ * - sharded_user_profile: { <shardKey>: { profiles: { userId: [history...] }, lastUpdated } }
5
+ * - daily_investor_scores: { userId: finalIS }
6
+ *
7
+ * Notes:
8
+ * - NetProfit / ProfitAndLoss fields are assumed to be percent returns in decimal (e.g. 0.03 = +3%).
9
+ * - The "Sharpe" used here is a cross-sectional dispersion proxy computed over position returns,
10
+ * weighted by invested amounts. It's renamed/treated as a dispersionRiskProxy in comments.
11
+ */
12
+
13
+ const { Firestore } = require('@google-cloud/firestore');
14
+ const firestore = new Firestore();
15
+ const { loadAllPriceData } = require('../../../utils/price_data_provider');
16
+ const { getInstrumentSectorMap, loadInstrumentMappings } = require('../../../utils/sector_mapping_provider');
17
+
18
+ // Config
19
+ const NUM_SHARDS = 50; // Must match the number of shards to read/write
20
+ const ROLLING_DAYS = 90;
21
+ const SHARD_COLLECTION_NAME = 'user_profile_history'; // The collection to store sharded history
22
+ const PNL_TRACKER_CALC_ID = 'user-profitability-tracker'; // The calc to read PNL from
23
+
24
+ // Helper: stable shard index for numeric or string IDs
25
+ function getShardIndex(id) {
26
+ const n = parseInt(id, 10);
27
+ if (!Number.isNaN(n)) return Math.abs(n) % NUM_SHARDS;
28
+ // simple deterministic string hash fallback for non-numeric IDs (UUIDs)
29
+ let h = 0;
30
+ for (let i = 0; i < id.length; i++) {
31
+ h = ((h << 5) - h) + id.charCodeAt(i);
32
+ h |= 0; // keep 32-bit
33
+ }
34
+ return Math.abs(h) % NUM_SHARDS;
35
+ }
36
+
37
+ class UserInvestmentProfile {
38
+ constructor() {
39
+ // will hold today's per-user raw heuristic scores
40
+ this.dailyUserScores = {}; // { userId: { score_rd, score_disc, score_time } }
41
+
42
+ // cached dependencies
43
+ this.priceMap = null;
44
+ this.sectorMap = null;
45
+ this.pnlScores = null; // { userId: dailyPnlDecimal }
46
+ this.dates = {};
47
+ this.dependenciesLoaded = false;
48
+ }
49
+
50
+ /**
51
+ * Loads external dependencies once per run.
52
+ */
53
+ async _loadDependencies(context, dependencies) {
54
+ if (this.dependenciesLoaded) return;
55
+
56
+ const { db, logger } = dependencies;
57
+ const { todayDateStr } = context;
58
+
59
+ if (logger) logger.log('INFO', '[UserInvestmentProfile] Loading dependencies...');
60
+
61
+ // load price data and sector mapping in parallel
62
+ const [priceData, sectorData] = await Promise.all([
63
+ loadAllPriceData(),
64
+ getInstrumentSectorMap()
65
+ ]);
66
+ this.priceMap = priceData || {};
67
+ this.sectorMap = sectorData || {};
68
+
69
+ // load PNL map (daily percent returns per user) from PNL calc
70
+ this.pnlScores = {};
71
+ try {
72
+ const pnlCalcRef = db.collection(context.config.resultsCollection).doc(todayDateStr)
73
+ .collection(context.config.resultsSubcollection).doc('pnl')
74
+ .collection(context.config.computationsSubcollection).doc(PNL_TRACKER_CALC_ID);
75
+
76
+ const pnlSnap = await pnlCalcRef.get();
77
+ if (pnlSnap.exists) {
78
+ this.pnlScores = pnlSnap.data().daily_pnl_map || {};
79
+ if (logger) logger.log('INFO', `[UserInvestmentProfile] Loaded ${Object.keys(this.pnlScores).length} PNL scores.`);
80
+ } else {
81
+ if (logger) logger.log('WARN', `[UserInvestmentProfile] Could not find PNL scores dependency for ${todayDateStr}. PNL score will be 0.`);
82
+ }
83
+ } catch (e) {
84
+ if (logger) logger.log('ERROR', `[UserInvestmentProfile] Failed to load PNL scores.`, { error: e.message });
85
+ }
86
+
87
+ this.dependenciesLoaded = true;
88
+ if (logger) logger.log('INFO', '[UserInvestmentProfile] All dependencies loaded.');
89
+ }
90
+
91
+ /**
92
+ * HEURISTIC 1: Risk & Diversification Score (0-10).
93
+ *
94
+ * Implementation notes:
95
+ * - NetProfit is assumed to be a percent return in decimal per position (e.g. 0.03 = +3%).
96
+ * - We compute a weighted mean/std of returns across positions (weights = invested amounts).
97
+ * This gives a cross-sectional dispersion proxy (not a time-series Sharpe).
98
+ */
99
+ _calculateRiskAndDivScore(todayPortfolio) {
100
+ if (!todayPortfolio.AggregatedPositions || todayPortfolio.AggregatedPositions.length === 0) {
101
+ return 5; // neutral
102
+ }
103
+
104
+ const positions = todayPortfolio.AggregatedPositions;
105
+ let totalInvested = 0;
106
+ let weightedRetSum = 0;
107
+ let weightedRetSqSum = 0;
108
+ let maxPosition = 0;
109
+ const sectors = new Set();
110
+
111
+ for (const pos of positions) {
112
+ const invested = pos.InvestedAmount || pos.Amount || 0;
113
+ const netProfit = ('NetProfit' in pos) ? pos.NetProfit : (pos.ProfitAndLoss || 0); // decimal % return
114
+ const ret = invested > 0 ? (netProfit) : netProfit; // if invested==0 we still include ret but weight 0
115
+
116
+ weightedRetSum += ret * invested;
117
+ weightedRetSqSum += (ret * ret) * invested;
118
+ totalInvested += invested;
119
+ if (invested > maxPosition) maxPosition = invested;
120
+
121
+ sectors.add(this.sectorMap[pos.InstrumentID] || 'N/A');
122
+ }
123
+
124
+ // Weighted mean & variance of returns
125
+ const meanReturn = totalInvested > 0 ? (weightedRetSum / totalInvested) : 0;
126
+ const meanReturnSq = totalInvested > 0 ? (weightedRetSqSum / totalInvested) : (meanReturn * meanReturn);
127
+ const variance = Math.max(0, meanReturnSq - (meanReturn * meanReturn));
128
+ const stdReturn = Math.sqrt(variance);
129
+
130
+ // dispersion proxy: mean / std (if std is zero we treat as neutral 0)
131
+ let dispersionRiskProxy = stdReturn > 0 ? meanReturn / stdReturn : 0;
132
+
133
+ // cap and map dispersion proxy to [0..10].
134
+ // dispersionRiskProxy can be outside [-2..4], clamp to reasonable bounds first.
135
+ const capped = Math.max(-2, Math.min(4, dispersionRiskProxy));
136
+ const scoreSharpe = ((capped + 2) / 6) * 10; // maps [-2..4] -> [0..10]
137
+
138
+ // Sector diversification (monotonic - diminishing returns)
139
+ const sectorCount = sectors.size;
140
+ let scoreDiversification = 0;
141
+ if (sectorCount === 1) scoreDiversification = 0;
142
+ else if (sectorCount <= 4) scoreDiversification = 5;
143
+ else if (sectorCount <= 7) scoreDiversification = 8;
144
+ else scoreDiversification = 10;
145
+
146
+ // Position sizing / concentration penalty
147
+ const concentrationRatio = totalInvested > 0 ? (maxPosition / totalInvested) : 0;
148
+ let scoreSizing = 0;
149
+ if (concentrationRatio > 0.8) scoreSizing = 0;
150
+ else if (concentrationRatio > 0.5) scoreSizing = 2;
151
+ else if (concentrationRatio > 0.3) scoreSizing = 5;
152
+ else if (concentrationRatio > 0.15) scoreSizing = 8;
153
+ else scoreSizing = 10;
154
+
155
+ const final = (scoreSharpe * 0.4) + (scoreDiversification * 0.3) + (scoreSizing * 0.3);
156
+ return Math.max(0, Math.min(10, final));
157
+ }
158
+
159
+ /**
160
+ * HEURISTIC 2: Discipline Score (0-10).
161
+ *
162
+ * Uses yesterday's positions to evaluate closes, averaging down, holding losers/winners.
163
+ * Defensive: uses safe field fallbacks and guards against division by zero.
164
+ */
165
+ _calculateDisciplineScore(yesterdayPortfolio = {}, todayPortfolio = {}) {
166
+ const yPositions = yesterdayPortfolio.AggregatedPositions || [];
167
+ const tPositions = new Map((todayPortfolio.AggregatedPositions || []).map(p => [p.PositionID, p]));
168
+
169
+ if (yPositions.length === 0) {
170
+ return 5; // neutral if nothing to judge
171
+ }
172
+
173
+ let eventPoints = 0;
174
+ let eventCount = 0;
175
+
176
+ for (const yPos of yPositions) {
177
+ const profitAndLoss = ('ProfitAndLoss' in yPos) ? yPos.ProfitAndLoss : (yPos.NetProfit || 0);
178
+ const invested = yPos.InvestedAmount || yPos.Amount || 0;
179
+ const pnlPercent = profitAndLoss; // This is already the decimal % return
180
+
181
+ const tPos = tPositions.get(yPos.PositionID);
182
+
183
+ if (!tPos) {
184
+ // Closed position
185
+ eventCount++;
186
+ if (pnlPercent < -0.05) eventPoints += 10; // cut loser (good)
187
+ else if (pnlPercent > 0.20) eventPoints += 8; // took big profit (good)
188
+ else if (pnlPercent > 0 && pnlPercent < 0.05) eventPoints += 2; // paper hands (bad)
189
+ else eventPoints += 5; // neutral close
190
+ } else {
191
+ // Held or modified
192
+ if (pnlPercent < -0.10) {
193
+ eventCount++;
194
+ const tInvested = tPos.InvestedAmount || tPos.Amount || 0;
195
+ if (tInvested > invested) eventPoints += 0; // averaged down (very poor)
196
+ else eventPoints += 3; // held loser (poor)
197
+ } else if (pnlPercent > 0.15) {
198
+ eventCount++;
199
+ eventPoints += 10; // held/added to winner (good)
200
+ }
201
+ }
202
+ }
203
+
204
+ const avg = (eventCount > 0) ? (eventPoints / eventCount) : 5;
205
+ return Math.max(0, Math.min(10, avg));
206
+ }
207
+
208
+ /**
209
+ * HEURISTIC 3: Market Timing Score (0-10).
210
+ *
211
+ * For new positions opened today (not present yesterday), measure proximity of openRate to
212
+ * the last 30-day low/high. Uses date-sorted price history and clamps.
213
+ */
214
+ _calculateMarketTimingScore(yesterdayPortfolio = {}, todayPortfolio = {}) {
215
+ const yIds = new Set((yesterdayPortfolio.AggregatedPositions || []).map(p => p.PositionID));
216
+ const newPositions = (todayPortfolio.AggregatedPositions || []).filter(p => !yIds.has(p.PositionID));
217
+
218
+ if (newPositions.length === 0) return 5;
219
+
220
+ let timingPoints = 0;
221
+ let timingCount = 0;
222
+
223
+ for (const tPos of newPositions) {
224
+ const prices = this.priceMap[tPos.InstrumentID];
225
+ if (!prices) continue;
226
+
227
+ // Accept prices as either array or {date:price} map; build sorted array of prices
228
+ let historyPrices = [];
229
+ if (Array.isArray(prices)) {
230
+ // assume array of numbers or objects with .price/.close
231
+ historyPrices = prices
232
+ .map(p => (typeof p === 'number' ? p : (p.price || p.close || null)))
233
+ .filter(v => v != null);
234
+ } else {
235
+ // object keyed by date -> price
236
+ const entries = Object.keys(prices)
237
+ .map(d => ({ d, p: prices[d] }))
238
+ .filter(e => e.p != null)
239
+ .sort((a, b) => new Date(a.d) - new Date(b.d));
240
+ historyPrices = entries.map(e => e.p);
241
+ }
242
+
243
+ const last30 = historyPrices.slice(-30);
244
+ if (last30.length < 2) continue;
245
+
246
+ const minPrice = Math.min(...last30);
247
+ const maxPrice = Math.max(...last30);
248
+ const openRate = tPos.OpenRate;
249
+ const range = maxPrice - minPrice;
250
+ if (!isFinite(range) || range === 0) continue;
251
+
252
+ let proximity = (openRate - minPrice) / range; // 0 = at low, 1 = at high
253
+ proximity = Math.max(0, Math.min(1, proximity)); // clamp to [0,1]
254
+
255
+ timingCount++;
256
+ if (proximity < 0.2) timingPoints += 10;
257
+ else if (proximity < 0.4) timingPoints += 8;
258
+ else if (proximity > 0.9) timingPoints += 1;
259
+ else if (proximity > 0.7) timingPoints += 3;
260
+ else timingPoints += 5;
261
+ }
262
+
263
+ const avg = (timingCount > 0) ? (timingPoints / timingCount) : 5;
264
+ return Math.max(0, Math.min(10, avg));
265
+ }
266
+
267
+ /**
268
+ * PROCESS: called per-user per-day to compute and store today's heuristics.
269
+ */
270
+ async process(todayPortfolio, yesterdayPortfolio, userId, context, todayInsights, yesterdayInsights, todaySocial, yesterdaySocial) {
271
+ // run only for normal users with portfolios
272
+ if (!todayPortfolio || !todayPortfolio.AggregatedPositions) return;
273
+
274
+ if (!this.dependenciesLoaded) {
275
+ await this._loadDependencies(context, context.dependencies);
276
+ this.dates.today = context.todayDateStr;
277
+ }
278
+
279
+ const yPort = yesterdayPortfolio || {};
280
+
281
+ const score_rd = this._calculateRiskAndDivScore(todayPortfolio);
282
+ const score_disc = this._calculateDisciplineScore(yPort, todayPortfolio);
283
+ const score_time = this._calculateMarketTimingScore(yPort, todayPortfolio);
284
+
285
+ this.dailyUserScores[userId] = {
286
+ score_rd,
287
+ score_disc,
288
+ score_time
289
+ };
290
+ }
291
+
292
+ /**
293
+ * GETRESULT: Aggregate into rolling 90-day history, compute avg components and final IS.
294
+ *
295
+ * Returns a structure prepared for writing where each shardKey maps to:
296
+ * { profiles: { userId: historyArray, ... }, lastUpdated: todayStr }
297
+ *
298
+ * This must match how existing shards are read (snap.data().profiles).
299
+ */
300
+ async getResult() {
301
+ if (Object.keys(this.dailyUserScores).length === 0) return {};
302
+
303
+ const todayStr = this.dates.today || (new Date()).toISOString().slice(0, 10);
304
+
305
+ // prepare sharded output objects with profiles container (Option A)
306
+ const shardedResults = {};
307
+ for (let i = 0; i < NUM_SHARDS; i++) {
308
+ const shardKey = `${SHARD_COLLECTION_NAME}_shard_${i}`;
309
+ shardedResults[shardKey] = { profiles: {}, lastUpdated: todayStr };
310
+ }
311
+
312
+ const dailyInvestorScoreMap = {};
313
+
314
+ // fetch existing shards in parallel
315
+ const shardPromises = [];
316
+ for (let i = 0; i < NUM_SHARDS; i++) {
317
+ const docRef = firestore.collection(SHARD_COLLECTION_NAME).doc(`${SHARD_COLLECTION_NAME}_shard_${i}`);
318
+ shardPromises.push(docRef.get());
319
+ }
320
+ const shardSnapshots = await Promise.all(shardPromises);
321
+
322
+ // Build existingShards map of profiles for quick access
323
+ const existingShards = shardSnapshots.map((snap, idx) => {
324
+ if (!snap.exists) return {}; // no profiles
325
+ const data = snap.data() || {};
326
+ return data.profiles || {};
327
+ });
328
+
329
+ // process users
330
+ for (const userId of Object.keys(this.dailyUserScores)) {
331
+ const shardIndex = getShardIndex(userId);
332
+ const scores = this.dailyUserScores[userId];
333
+
334
+ // fetch existing history for this user (if present)
335
+ const existingProfiles = existingShards[shardIndex] || {};
336
+ // clone to avoid mutating snapshot data directly
337
+ const history = (existingProfiles[userId] || []).slice();
338
+
339
+ history.push({
340
+ date: todayStr,
341
+ ...scores,
342
+ pnl: (this.pnlScores && (userId in this.pnlScores)) ? this.pnlScores[userId] : 0
343
+ });
344
+
345
+ const newHistory = history.slice(-ROLLING_DAYS);
346
+
347
+ // compute rolling averages
348
+ let avg_rd = 0, avg_disc = 0, avg_time = 0, avg_pnl = 0;
349
+ for (const entry of newHistory) {
350
+ avg_rd += (entry.score_rd || 0);
351
+ avg_disc += (entry.score_disc || 0);
352
+ avg_time += (entry.score_time || 0);
353
+ avg_pnl += (entry.pnl || 0);
354
+ }
355
+ const N = newHistory.length || 1;
356
+ avg_rd /= N;
357
+ avg_disc /= N;
358
+ avg_time /= N;
359
+ avg_pnl /= N;
360
+
361
+ // Normalize PNL: avg_pnl is decimal percent (0.005 -> 0.5%). Map to 0-10 scale:
362
+ // multiply by 1000 (0.005 -> 5). Clamp to [-10, 10] to avoid outliers.
363
+ const normalizedPnl = Math.max(-10, Math.min(10, avg_pnl * 1000));
364
+
365
+ // Final IS (weights): discipline 40%, risk/div 30%, timing 20%, pnl 10%
366
+ const finalISRaw = (avg_disc * 0.4) + (avg_rd * 0.3) + (avg_time * 0.2) + (normalizedPnl * 0.1);
367
+ const finalIS = Math.max(0, Math.min(10, finalISRaw));
368
+
369
+ // store in prepared shard result under 'profiles'
370
+ const shardKey = `${SHARD_COLLECTION_NAME}_shard_${shardIndex}`;
371
+ shardedResults[shardKey].profiles[userId] = newHistory;
372
+
373
+ // also set the daily investor score
374
+ dailyInvestorScoreMap[userId] = finalIS;
375
+ }
376
+
377
+ return {
378
+ sharded_user_profile: shardedResults,
379
+ daily_investor_scores: dailyInvestorScoreMap
380
+ };
381
+ }
382
+
383
+ reset() {
384
+ this.dailyUserScores = {};
385
+ this.dependenciesLoaded = false;
386
+ this.priceMap = null;
387
+ this.sectorMap = null;
388
+ this.pnlScores = null;
389
+ this.dates = {};
390
+ }
391
+ }
392
+
393
+ module.exports = UserInvestmentProfile;
@@ -0,0 +1,138 @@
1
+ /**
2
+ * @fileoverview Meta-calculation (Pass 3) that analyzes "what" the crowd does
3
+ * following a net deposit signal. It determines if the new capital is used to
4
+ * buy *new* assets or *add* to existing ones.
5
+ */
6
+
7
+ const { FieldValue } = require('@google-cloud/firestore');
8
+
9
+ class CapitalDeploymentStrategy {
10
+ constructor() {
11
+ this.lookbackDays = 7;
12
+ this.correlationWindow = 3; // How many days after a signal to link behavior
13
+ this.depositSignalThreshold = -1.0; // From crowd_cash_flow_proxy
14
+ }
15
+
16
+ _getDateStr(baseDate, daysAgo) {
17
+ const date = new Date(baseDate + 'T00:00:00Z');
18
+ date.setUTCDate(date.getUTCDate() - daysAgo);
19
+ return date.toISOString().slice(0, 10);
20
+ }
21
+
22
+ /**
23
+ * @param {string} dateStr The date to run the analysis for (e.g., "2025-10-31").
24
+ * @param {object} dependencies The shared dependencies (db, logger).
25
+ * @param {object} config The computation system configuration.
26
+ * @returns {Promise<object|null>} The analysis result or null.
27
+ */
28
+ async process(dateStr, dependencies, config) {
29
+ const { db, logger } = dependencies;
30
+ const collection = config.resultsCollection;
31
+ const resultsSub = config.resultsSubcollection || 'results';
32
+ const compsSub = config.computationsSubcollection || 'computations';
33
+
34
+ // 1. Find the most recent deposit signal
35
+ let depositSignal = null;
36
+ let depositSignalDay = null;
37
+ let refsToGet = [];
38
+
39
+ for (let i = 1; i <= this.lookbackDays; i++) {
40
+ const checkDate = this._getDateStr(dateStr, i);
41
+ refsToGet.push({
42
+ date: checkDate,
43
+ key: `signal_${checkDate}`,
44
+ ref: db.collection(collection).doc(checkDate).collection(resultsSub).doc('capital_flow').collection(compsSub).doc('crowd-cash-flow-proxy')
45
+ });
46
+ }
47
+
48
+ const signalSnapshots = await db.getAll(...refsToGet.map(r => r.ref));
49
+ const dataMap = new Map();
50
+ signalSnapshots.forEach((snap, idx) => {
51
+ if (snap.exists) dataMap.set(refsToGet[idx].key, snap.data());
52
+ });
53
+
54
+ for (let i = 1; i <= this.lookbackDays; i++) {
55
+ const checkDate = this._getDateStr(dateStr, i);
56
+ const flowData = dataMap.get(`signal_${checkDate}`);
57
+ if (flowData && flowData.cash_flow_effect_proxy < this.depositSignalThreshold) {
58
+ depositSignal = flowData;
59
+ depositSignalDay = checkDate;
60
+ break; // Found the most recent signal
61
+ }
62
+ }
63
+
64
+ if (!depositSignal) {
65
+ return {
66
+ status: 'no_deposit_signal_found',
67
+ lookback_days: this.lookbackDays
68
+ };
69
+ }
70
+
71
+ // 2. Check if today is within the correlation window
72
+ const daysSinceSignal = (new Date(dateStr) - new Date(depositSignalDay)) / (1000 * 60 * 60 * 24);
73
+
74
+ if (daysSinceSignal <= 0 || daysSinceSignal > this.correlationWindow) {
75
+ return {
76
+ status: 'outside_correlation_window',
77
+ signal_day: depositSignalDay,
78
+ days_since_signal: daysSinceSignal
79
+ };
80
+ }
81
+
82
+ // 3. Fetch deployment data for *today*
83
+ // We are correlating the *past signal* with *today's action*
84
+ refsToGet = [
85
+ {
86
+ key: 'new_alloc',
87
+ ref: db.collection(collection).doc(dateStr).collection(resultsSub).doc('capital_flow').collection(compsSub).doc('new-allocation-percentage')
88
+ },
89
+ {
90
+ key: 're_alloc',
91
+ ref: db.collection(collection).doc(dateStr).collection(resultsSub).doc('capital_flow').collection(compsSub).doc('reallocation-increase-percentage')
92
+ }
93
+ ];
94
+
95
+ const deploymentSnapshots = await db.getAll(...refsToGet.map(r => r.ref));
96
+ const newAllocData = deploymentSnapshots[0].exists ? deploymentSnapshots[0].data() : null;
97
+ const reAllocData = deploymentSnapshots[1].exists ? deploymentSnapshots[1].data() : null;
98
+
99
+ // 4. Handle "day-delay" for *this* data
100
+ if (!newAllocData || !reAllocData) {
101
+ logger.log('WARN', `[CapitalDeploymentStrategy] Missing deployment data for ${dateStr}. Allowing backfill.`);
102
+ // This is a "same-day" meta-calc, so we return null to let backfill run
103
+ return null;
104
+ }
105
+
106
+ // 5. Calculate deployment bias
107
+ const newAlloc = newAllocData.average_new_allocation_percentage || 0;
108
+ const reAlloc = reAllocData.average_reallocation_increase_percentage || 0;
109
+
110
+ const totalDeployment = newAlloc + reAlloc;
111
+ let newAssetBias = 0;
112
+ let existingAssetBias = 0;
113
+
114
+ if (totalDeployment > 0) {
115
+ newAssetBias = (newAlloc / totalDeployment) * 100;
116
+ existingAssetBias = (reAlloc / totalDeployment) * 100;
117
+ }
118
+
119
+ return {
120
+ status: 'analysis_complete',
121
+ analysis_date: dateStr,
122
+ signal_date: depositSignalDay,
123
+ days_since_signal: daysSinceSignal,
124
+ signal_deposit_proxy_pct: Math.abs(depositSignal.cash_flow_effect_proxy),
125
+ deployment_new_alloc_pct: newAlloc,
126
+ deployment_existing_alloc_pct: reAlloc,
127
+ total_deployment_pct: totalDeployment,
128
+ new_asset_bias: newAssetBias,
129
+ existing_asset_bias: existingAssetBias
130
+ };
131
+ }
132
+
133
+ // Must exist for the meta-computation runner
134
+ async getResult() { return null; }
135
+ reset() {}
136
+ }
137
+
138
+ module.exports = CapitalDeploymentStrategy;
@@ -0,0 +1,92 @@
1
+ /**
2
+ * @fileoverview Meta-calculation (Pass 3) to calculate a proxy for the
3
+ * crowd's "Sharpe Ratio" (risk-adjusted return) on a per-asset basis.
4
+ * It uses the components from 'pnl_distribution_per_stock' to calculate
5
+ * the standard deviation of P/L, which serves as the "risk".
6
+ */
7
+
8
+ class CrowdSharpeRatioProxy {
9
+ constructor() {}
10
+
11
+ /**
12
+ * @param {string} dateStr The date to run the analysis for (e.g., "2025-10-31").
13
+ * @param {object} dependencies The shared dependencies (db, logger).
14
+ * @param {object} config The computation system configuration.
15
+ * @returns {Promise<object|null>} The analysis result or null.
16
+ */
17
+ async process(dateStr, dependencies, config) {
18
+ const { db, logger } = dependencies;
19
+ const collection = config.resultsCollection;
20
+
21
+ // 1. Define dependency
22
+ const dependency = { category: 'pnl', computation: 'pnl-distribution-per-stock' };
23
+
24
+ // 2. Build ref and fetch
25
+ const docRef = db.collection(collection).doc(dateStr)
26
+ .collection('results').doc(dependency.category)
27
+ .collection('computations').doc(dependency.computation);
28
+
29
+ const snapshot = await docRef.get();
30
+
31
+ // 3. Handle the "day-delay"
32
+ if (!snapshot.exists) {
33
+ logger.log('WARN', `[CrowdSharpeRatioProxy] Missing dependency 'pnl-distribution-per-stock' for ${dateStr}. Allowing backfill.`);
34
+ return null; // Let backfill handle it
35
+ }
36
+
37
+ const data = snapshot.data();
38
+ const pnlDistribution = data.pnl_distribution_by_asset;
39
+
40
+ if (!pnlDistribution) {
41
+ logger.log('WARN', `[CrowdSharpeRatioProxy] Dependency data for ${dateStr} is empty. Skipping.`);
42
+ return null;
43
+ }
44
+
45
+ const results = {};
46
+
47
+ // 4. Calculate Sharpe Proxy for each asset
48
+ for (const ticker in pnlDistribution) {
49
+ const stats = pnlDistribution[ticker];
50
+ const N = stats.position_count;
51
+
52
+ // Need at least 2 data points to calculate variance
53
+ if (N < 2) continue;
54
+
55
+ const mean = stats.pnl_sum / N; // E(x)
56
+ const mean_sq = stats.pnl_sum_sq / N; // E(x^2)
57
+
58
+ const variance = mean_sq - (mean * mean);
59
+
60
+ // If variance is negative (floating point error) or zero, we can't get std_dev
61
+ if (variance <= 0) {
62
+ results[ticker] = {
63
+ average_pnl: mean,
64
+ std_dev_pnl: 0,
65
+ sharpe_ratio_proxy: 0,
66
+ position_count: N
67
+ };
68
+ continue;
69
+ }
70
+
71
+ const std_dev = Math.sqrt(variance); // "Risk"
72
+
73
+ // Calculate Sharpe Ratio (Return / Risk)
74
+ // (Assuming 0 risk-free rate)
75
+ const sharpe_proxy = mean / std_dev;
76
+
77
+ results[ticker] = {
78
+ average_pnl: mean,
79
+ std_dev_pnl: std_dev,
80
+ sharpe_ratio_proxy: sharpe_proxy,
81
+ position_count: N
82
+ };
83
+ }
84
+
85
+ return results;
86
+ }
87
+
88
+ async getResult() { return null; }
89
+ reset() {}
90
+ }
91
+
92
+ module.exports = CrowdSharpeRatioProxy;