bulltrackers-module 1.0.263 → 1.0.264
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/executors/StandardExecutor.js +42 -7
- package/functions/computation-system/layers/profiling.js +309 -149
- package/functions/computation-system/persistence/FirestoreUtils.js +2 -10
- package/functions/computation-system/persistence/ResultCommitter.js +137 -117
- package/functions/computation-system/tools/BuildReporter.js +7 -18
- package/functions/root-data-indexer/index.js +34 -63
- package/package.json +1 -1
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Executor for "Standard" (per-user) calculations.
|
|
3
|
-
* UPDATED: Handles lazy loading of data references
|
|
3
|
+
* UPDATED: Handles lazy loading of data references.
|
|
4
|
+
* UPDATED: Supports Multi-Date Fan-Out Aggregation (Time Machine Mode).
|
|
4
5
|
*/
|
|
5
6
|
const { normalizeName } = require('../utils/utils');
|
|
6
7
|
const { streamPortfolioData, streamHistoryData, getPortfolioPartRefs } = require('../utils/data_loader');
|
|
@@ -39,8 +40,46 @@ class StandardExecutor {
|
|
|
39
40
|
// 3. Stream & Process
|
|
40
41
|
await StandardExecutor.streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs, fetchedDeps, previousFetchedDeps);
|
|
41
42
|
|
|
42
|
-
// 4. Commit
|
|
43
|
-
|
|
43
|
+
// 4. Pre-Commit Transformation for Fan-Out
|
|
44
|
+
// If a calc produced multi-date output per user, we must transpose it:
|
|
45
|
+
// FROM: UserA -> { "2024-01-01": data, "2024-01-02": data }
|
|
46
|
+
// TO: "2024-01-01" -> { UserA: data }, "2024-01-02" -> { UserA: data }
|
|
47
|
+
|
|
48
|
+
const transformedState = {};
|
|
49
|
+
for (const [name, inst] of Object.entries(state)) {
|
|
50
|
+
const result = await inst.getResult(); // { userId: { date: data } } or { userId: data }
|
|
51
|
+
const firstUser = Object.keys(result)[0];
|
|
52
|
+
|
|
53
|
+
// Check if the inner value is a Date Map
|
|
54
|
+
// Only checks the first user as heuristic; implies uniform return type
|
|
55
|
+
if (firstUser && result[firstUser] && typeof result[firstUser] === 'object') {
|
|
56
|
+
const innerKeys = Object.keys(result[firstUser]);
|
|
57
|
+
// Check if keys look like YYYY-MM-DD
|
|
58
|
+
const isDateMap = innerKeys.length > 0 && innerKeys.every(k => /^\d{4}-\d{2}-\d{2}$/.test(k));
|
|
59
|
+
|
|
60
|
+
if (isDateMap) {
|
|
61
|
+
const transposed = {};
|
|
62
|
+
for (const [userId, dateMap] of Object.entries(result)) {
|
|
63
|
+
for (const [dateKey, dailyData] of Object.entries(dateMap)) {
|
|
64
|
+
if (!transposed[dateKey]) transposed[dateKey] = {};
|
|
65
|
+
transposed[dateKey][userId] = dailyData;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// Mock a "getResult" for the committer that returns the Transposed Map
|
|
70
|
+
transformedState[name] = {
|
|
71
|
+
manifest: inst.manifest,
|
|
72
|
+
getResult: async () => transposed
|
|
73
|
+
};
|
|
74
|
+
continue;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
// Normal behavior
|
|
78
|
+
transformedState[name] = inst;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// 5. Commit
|
|
82
|
+
return await commitResults(transformedState, dStr, passName, config, deps, skipStatusWrite);
|
|
44
83
|
}
|
|
45
84
|
|
|
46
85
|
static async streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs, fetchedDeps, previousFetchedDeps) {
|
|
@@ -58,20 +97,16 @@ class StandardExecutor {
|
|
|
58
97
|
const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
|
|
59
98
|
const prevDateStr = prevDate.toISOString().slice(0, 10);
|
|
60
99
|
|
|
61
|
-
// [FIX] pass null if portfolioRefs is null; streamPortfolioData handles the fetch
|
|
62
100
|
const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
|
|
63
101
|
|
|
64
102
|
const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
|
|
65
|
-
// yesterdayPortfolioRefs are manually fetched in run(), so they are usually populated
|
|
66
103
|
const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs) ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
|
|
67
104
|
|
|
68
105
|
const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
|
|
69
|
-
// [FIX] Removed '&& historyRefs' check. We pass null to streamHistoryData if refs are missing, allowing it to fetch them.
|
|
70
106
|
const tH_iter = (needsTradingHistory) ? streamHistoryData(config, deps, dateStr, historyRefs) : null;
|
|
71
107
|
|
|
72
108
|
let yP_chunk = {}, tH_chunk = {};
|
|
73
109
|
|
|
74
|
-
// [FIX] Ensure manual iterators are closed if loop fails
|
|
75
110
|
try {
|
|
76
111
|
for await (const tP_chunk of tP_iter) {
|
|
77
112
|
if (yP_iter) yP_chunk = (await yP_iter.next()).value || {};
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* @fileoverview Profiling Layer - Intelligence Engine (
|
|
2
|
+
* @fileoverview Profiling Layer - Intelligence Engine (V6)
|
|
3
3
|
* Encapsulates advanced behavioral profiling, psychological scoring, and classification schemas.
|
|
4
|
+
* UPDATED: Added SmartMoneyScorer for advanced multi-factor user classification.
|
|
4
5
|
*/
|
|
5
6
|
|
|
6
7
|
const SCHEMAS = {
|
|
@@ -21,118 +22,332 @@ const SCHEMAS = {
|
|
|
21
22
|
};
|
|
22
23
|
|
|
23
24
|
// ========================================================================
|
|
24
|
-
// 1.
|
|
25
|
+
// 1. SMART MONEY SCORING ENGINE (NEW)
|
|
25
26
|
// ========================================================================
|
|
26
27
|
|
|
27
|
-
class
|
|
28
|
+
class SmartMoneyScorer {
|
|
29
|
+
|
|
28
30
|
/**
|
|
29
|
-
*
|
|
30
|
-
* Checks if the user holds "dead money" positions that are hovering near breakeven
|
|
31
|
-
* for extended periods, refusing to close them.
|
|
32
|
-
* @param {Array} openPositions - Current holdings. Needs OpenDateTime (Speculators).
|
|
33
|
-
* @param {number} thresholdPct - +/- % range around 0 PnL (e.g. 2%).
|
|
34
|
-
* @param {number} minDaysHeld - Minimum days held to qualify as "Anchored".
|
|
31
|
+
* Internal Helper: Calculate Pearson Correlation
|
|
35
32
|
*/
|
|
36
|
-
static
|
|
37
|
-
if (!
|
|
33
|
+
static _correlation(x, y) {
|
|
34
|
+
if (!x || !y || x.length !== y.length || x.length < 2) return 0;
|
|
35
|
+
const n = x.length;
|
|
36
|
+
let sumX = 0, sumY = 0, sumXY = 0, sumX2 = 0, sumY2 = 0;
|
|
37
|
+
for (let i = 0; i < n; i++) {
|
|
38
|
+
sumX += x[i]; sumY += y[i];
|
|
39
|
+
sumXY += x[i] * y[i];
|
|
40
|
+
sumX2 += x[i] * x[i]; sumY2 += y[i] * y[i];
|
|
41
|
+
}
|
|
42
|
+
const numerator = (n * sumXY) - (sumX * sumY);
|
|
43
|
+
const denominator = Math.sqrt(((n * sumX2) - (sumX * sumX)) * ((n * sumY2) - (sumY * sumY)));
|
|
44
|
+
return (denominator === 0) ? 0 : numerator / denominator;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Mode 1: Portfolio-Based Scoring
|
|
49
|
+
* Heuristics:
|
|
50
|
+
* 1. Diversification (Sector/Asset count)
|
|
51
|
+
* 2. Allocation Efficiency (Correlation of Size vs Profit)
|
|
52
|
+
* 3. Shorting Competence
|
|
53
|
+
* 4. Concentration Risk (HHI)
|
|
54
|
+
*/
|
|
55
|
+
static scorePortfolio(portfolio, userType, prices, mappings, math) {
|
|
56
|
+
const positions = math.extract.getPositions(portfolio, userType);
|
|
57
|
+
if (!positions || positions.length === 0) return { score: 0, label: SCHEMAS.LABELS.NEUTRAL };
|
|
58
|
+
|
|
59
|
+
let totalInvested = 0;
|
|
60
|
+
let weightedPnL = 0;
|
|
61
|
+
let shortInvested = 0;
|
|
62
|
+
let shortPnL = 0;
|
|
38
63
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
const
|
|
42
|
-
const
|
|
64
|
+
const weights = [];
|
|
65
|
+
const pnls = [];
|
|
66
|
+
const sectors = new Set();
|
|
67
|
+
const tickers = new Set();
|
|
43
68
|
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
69
|
+
// 1. Data Aggregation
|
|
70
|
+
for (const pos of positions) {
|
|
71
|
+
const invested = math.extract.getPositionWeight(pos, userType);
|
|
72
|
+
const pnl = math.extract.getNetProfit(pos); // %
|
|
73
|
+
const instId = math.extract.getInstrumentId(pos);
|
|
74
|
+
const isShort = math.extract.getDirection(pos) === 'Sell';
|
|
75
|
+
|
|
76
|
+
const sector = mappings.instrumentToSector[instId];
|
|
77
|
+
const ticker = mappings.instrumentToTicker[instId];
|
|
78
|
+
|
|
79
|
+
if (invested > 0) {
|
|
80
|
+
totalInvested += invested;
|
|
81
|
+
weightedPnL += (pnl * invested);
|
|
82
|
+
weights.push(invested);
|
|
83
|
+
pnls.push(pnl);
|
|
49
84
|
|
|
50
|
-
|
|
51
|
-
if (
|
|
52
|
-
|
|
85
|
+
if (sector) sectors.add(sector);
|
|
86
|
+
if (ticker) tickers.add(ticker);
|
|
87
|
+
|
|
88
|
+
if (isShort) {
|
|
89
|
+
shortInvested += invested;
|
|
90
|
+
shortPnL += (pnl * invested);
|
|
53
91
|
}
|
|
54
92
|
}
|
|
55
93
|
}
|
|
94
|
+
|
|
95
|
+
if (totalInvested === 0) return { score: 0, label: SCHEMAS.LABELS.NEUTRAL };
|
|
96
|
+
|
|
97
|
+
// 2. Metrics Calculation
|
|
98
|
+
const avgPnL = weightedPnL / totalInvested;
|
|
56
99
|
|
|
57
|
-
|
|
100
|
+
// A. Allocation Efficiency (Do they bet big on winners?)
|
|
101
|
+
// Correlation between Invested Amount and PnL %
|
|
102
|
+
const allocEfficiency = this._correlation(weights, pnls); // -1 to 1
|
|
103
|
+
|
|
104
|
+
// B. Diversification & Concentration (HHI)
|
|
105
|
+
// Sum of squared market shares. 1.0 = Monopoly. 0.0 = Infinite.
|
|
106
|
+
let hhi = 0;
|
|
107
|
+
for (const w of weights) {
|
|
108
|
+
const share = w / totalInvested;
|
|
109
|
+
hhi += (share * share);
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// C. Shorting Competence
|
|
113
|
+
const shortRatio = shortInvested / totalInvested;
|
|
114
|
+
const avgShortPnL = shortInvested > 0 ? shortPnL / shortInvested : 0;
|
|
115
|
+
|
|
116
|
+
// 3. Scoring Logic
|
|
117
|
+
let score = 50;
|
|
118
|
+
|
|
119
|
+
// Efficiency Bonus: If > 0.5, they size winners up. (+20)
|
|
120
|
+
// If < -0.3, they are bagholding losers with large size (-15)
|
|
121
|
+
if (allocEfficiency > 0.5) score += 20;
|
|
122
|
+
else if (allocEfficiency < -0.3) score -= 15;
|
|
123
|
+
|
|
124
|
+
// Profitability (The ultimate metric)
|
|
125
|
+
if (avgPnL > 5) score += 10;
|
|
126
|
+
if (avgPnL > 20) score += 10;
|
|
127
|
+
if (avgPnL < -10) score -= 10;
|
|
128
|
+
if (avgPnL < -25) score -= 15;
|
|
129
|
+
|
|
130
|
+
// Concentration Logic
|
|
131
|
+
// High Concentration (HHI > 0.3) is "Smart" ONLY if profitable (Sniper)
|
|
132
|
+
// High Concentration and unprofitable is "Dumb" (Bagholder/Gambler)
|
|
133
|
+
if (hhi > 0.3) {
|
|
134
|
+
if (avgPnL > 5) score += 10; // Sniper
|
|
135
|
+
else if (avgPnL < -5) score -= 10; // Reckless
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// Diversification Logic
|
|
139
|
+
// High Sector count (>3) reduces risk penalty
|
|
140
|
+
if (sectors.size >= 4) score += 5;
|
|
141
|
+
|
|
142
|
+
// Shorting Logic
|
|
143
|
+
// Penalize speculation unless they are actually good at it
|
|
144
|
+
if (shortRatio > 0.1) {
|
|
145
|
+
if (avgShortPnL > 0) score += 10; // Smart Short
|
|
146
|
+
else score -= 10; // Failed Speculation
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
return {
|
|
150
|
+
score: Math.max(0, Math.min(100, score)),
|
|
151
|
+
metrics: { allocEfficiency, hhi, avgPnL, shortRatio, sectorCount: sectors.size }
|
|
152
|
+
};
|
|
58
153
|
}
|
|
59
154
|
|
|
60
155
|
/**
|
|
61
|
-
*
|
|
62
|
-
*
|
|
63
|
-
*
|
|
156
|
+
* Mode 2: History-Based Scoring
|
|
157
|
+
* Heuristics:
|
|
158
|
+
* 1. Win/Loss Ratio & Profit Factor
|
|
159
|
+
* 2. Asset Consistency (Revenge trading vs Specialist)
|
|
160
|
+
* 3. Entry Efficiency (Buying Lows)
|
|
161
|
+
* 4. Exit Efficiency (Selling Highs - Opportunity Cost)
|
|
162
|
+
* 5. Churn (Overtrading)
|
|
163
|
+
* 6. DCA/Entry Patterns
|
|
64
164
|
*/
|
|
65
|
-
static
|
|
66
|
-
|
|
67
|
-
|
|
165
|
+
static scoreHistory(historyDoc, prices, mappings, math) {
|
|
166
|
+
// Handle V2 Schema (PublicHistoryPositions)
|
|
167
|
+
const trades = historyDoc?.PublicHistoryPositions || [];
|
|
168
|
+
// Handle V1 Schema fallback if needed (though prompt implies V2)
|
|
169
|
+
|
|
170
|
+
if (trades.length < 5) return { score: 0, label: SCHEMAS.LABELS.NEUTRAL };
|
|
68
171
|
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
172
|
+
// Filter valid trades
|
|
173
|
+
const validTrades = trades.filter(t => t.OpenDateTime && t.CloseDateTime && t.InstrumentID);
|
|
174
|
+
if (validTrades.length < 5) return { score: 0, label: SCHEMAS.LABELS.NEUTRAL };
|
|
175
|
+
|
|
176
|
+
let wins = 0, losses = 0;
|
|
177
|
+
let totalWinPct = 0, totalLossPct = 0;
|
|
178
|
+
let entryScores = [];
|
|
179
|
+
const assetsTraded = new Map(); // ID -> { count, pnl }
|
|
180
|
+
|
|
181
|
+
// Time sorting for Churn analysis
|
|
182
|
+
validTrades.sort((a, b) => new Date(a.OpenDateTime) - new Date(b.OpenDateTime));
|
|
183
|
+
const firstDate = new Date(validTrades[0].OpenDateTime);
|
|
184
|
+
const lastDate = new Date(validTrades[validTrades.length-1].OpenDateTime);
|
|
185
|
+
const daysActive = Math.max(1, (lastDate - firstDate) / 86400000);
|
|
186
|
+
|
|
187
|
+
for (const t of validTrades) {
|
|
188
|
+
const ticker = mappings.instrumentToTicker[t.InstrumentID];
|
|
73
189
|
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
190
|
+
// Asset Consistency
|
|
191
|
+
if (!assetsTraded.has(t.InstrumentID)) assetsTraded.set(t.InstrumentID, { count: 0, pnl: 0 });
|
|
192
|
+
const assetStat = assetsTraded.get(t.InstrumentID);
|
|
193
|
+
assetStat.count++;
|
|
194
|
+
assetStat.pnl += t.NetProfit;
|
|
195
|
+
|
|
196
|
+
// A. Win/Loss Stats
|
|
197
|
+
if (t.NetProfit > 0) { wins++; totalWinPct += t.NetProfit; }
|
|
198
|
+
else { losses++; totalLossPct += Math.abs(t.NetProfit); }
|
|
199
|
+
|
|
200
|
+
// B. Entry Timing (Requires Price History)
|
|
201
|
+
if (ticker && prices) {
|
|
202
|
+
const priceHist = math.priceExtractor.getHistory(prices, ticker);
|
|
203
|
+
if (priceHist && priceHist.length > 0) {
|
|
204
|
+
// 1.0 = Perfect Low, 0.0 = Bought High
|
|
205
|
+
const eff = ExecutionAnalytics.calculateEfficiency(t.OpenRate, priceHist, t.OpenDateTime, t.IsBuy ? 'Buy' : 'Sell');
|
|
206
|
+
entryScores.push(eff);
|
|
207
|
+
}
|
|
80
208
|
}
|
|
81
209
|
}
|
|
82
210
|
|
|
83
|
-
const
|
|
84
|
-
const
|
|
211
|
+
const avgWin = wins > 0 ? totalWinPct / wins : 0;
|
|
212
|
+
const avgLoss = losses > 0 ? totalLossPct / losses : 1;
|
|
213
|
+
const profitFactor = (wins * avgWin) / Math.max(1, (losses * avgLoss));
|
|
214
|
+
|
|
215
|
+
// C. Entry Skill
|
|
216
|
+
const avgEntrySkill = entryScores.length > 0 ? math.compute.average(entryScores) : 0.5;
|
|
85
217
|
|
|
86
|
-
|
|
87
|
-
|
|
218
|
+
// D. Consistency / Specialization
|
|
219
|
+
// Do they trade 100 tickers once (Gambler) or 5 tickers 20 times (Specialist)?
|
|
220
|
+
const totalTrades = validTrades.length;
|
|
221
|
+
const uniqueAssets = assetsTraded.size;
|
|
222
|
+
const specializationRatio = 1 - (uniqueAssets / totalTrades); // Higher = More specialized
|
|
223
|
+
|
|
224
|
+
// E. Overtrading (Churn)
|
|
225
|
+
const tradesPerDay = totalTrades / daysActive;
|
|
226
|
+
|
|
227
|
+
// F. Revenge Trading Check
|
|
228
|
+
// High count on a specific asset with negative total PnL
|
|
229
|
+
let revengeScore = 0;
|
|
230
|
+
for (const [id, stat] of assetsTraded.entries()) {
|
|
231
|
+
if (stat.pnl < -20 && stat.count > 5) revengeScore += 1;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
// Scoring Logic
|
|
235
|
+
let score = 50;
|
|
236
|
+
|
|
237
|
+
// Profit Factor (Primary Driver)
|
|
238
|
+
if (profitFactor > 1.2) score += 10;
|
|
239
|
+
if (profitFactor > 2.0) score += 15;
|
|
240
|
+
if (profitFactor < 0.8) score -= 15;
|
|
241
|
+
|
|
242
|
+
// Entry Efficiency
|
|
243
|
+
if (avgEntrySkill > 0.7) score += 10; // Sniper
|
|
244
|
+
if (avgEntrySkill < 0.3) score -= 10; // FOMO
|
|
245
|
+
|
|
246
|
+
// Specialization
|
|
247
|
+
if (specializationRatio > 0.6) score += 5; // Specialist bonus
|
|
248
|
+
if (specializationRatio < 0.1 && totalTrades > 20) score -= 5; // Scattergun penalty
|
|
249
|
+
|
|
250
|
+
// Churn Penalty
|
|
251
|
+
if (tradesPerDay > 10 && profitFactor < 1.0) score -= 10; // Brokerage Cash Cow
|
|
252
|
+
|
|
253
|
+
// Revenge Penalty
|
|
254
|
+
if (revengeScore > 0) score -= (revengeScore * 5);
|
|
255
|
+
|
|
256
|
+
return {
|
|
257
|
+
score: Math.max(0, Math.min(100, score)),
|
|
258
|
+
metrics: { profitFactor, avgEntrySkill, specializationRatio, tradesPerDay, revengeScore }
|
|
259
|
+
};
|
|
88
260
|
}
|
|
89
261
|
|
|
90
262
|
/**
|
|
91
|
-
*
|
|
92
|
-
*
|
|
93
|
-
* Losses hurt approx 2.25x more than gains feel good.
|
|
94
|
-
* @param {number} pnl - Net Profit %.
|
|
263
|
+
* Mode 3: Hybrid Scoring
|
|
264
|
+
* Merges Portfolio (Unrealized/Current) and History (Realized/Past).
|
|
95
265
|
*/
|
|
96
|
-
static
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
266
|
+
static scoreHybrid(context) {
|
|
267
|
+
const { user, prices, mappings, math } = context;
|
|
268
|
+
|
|
269
|
+
// Get Sub-Scores
|
|
270
|
+
const pScore = this.scorePortfolio(user.portfolio.today, user.type, prices, mappings, math);
|
|
271
|
+
const hScore = this.scoreHistory(user.history.today, prices, mappings, math);
|
|
272
|
+
|
|
273
|
+
let finalScore = 50;
|
|
274
|
+
let method = 'Neutral';
|
|
275
|
+
|
|
276
|
+
const hasHistory = hScore && hScore.score > 0;
|
|
277
|
+
const hasPortfolio = pScore && pScore.score > 0;
|
|
278
|
+
|
|
279
|
+
if (hasHistory && hasPortfolio) {
|
|
280
|
+
// Weighted: 60% Track Record (History), 40% Current Positioning (Portfolio)
|
|
281
|
+
finalScore = (hScore.score * 0.6) + (pScore.score * 0.4);
|
|
282
|
+
method = 'Hybrid';
|
|
283
|
+
} else if (hasHistory) {
|
|
284
|
+
finalScore = hScore.score;
|
|
285
|
+
method = 'HistoryOnly';
|
|
286
|
+
} else if (hasPortfolio) {
|
|
287
|
+
finalScore = pScore.score;
|
|
288
|
+
method = 'PortfolioOnly';
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
// Classification Label
|
|
292
|
+
let label = SCHEMAS.LABELS.NEUTRAL;
|
|
293
|
+
if (finalScore >= 80) label = SCHEMAS.LABELS.ELITE;
|
|
294
|
+
else if (finalScore >= 65) label = SCHEMAS.LABELS.SMART;
|
|
295
|
+
else if (finalScore <= 35) label = SCHEMAS.LABELS.GAMBLER;
|
|
296
|
+
else if (finalScore <= 50) label = SCHEMAS.LABELS.DUMB;
|
|
297
|
+
|
|
298
|
+
return {
|
|
299
|
+
totalScore: Math.round(finalScore),
|
|
300
|
+
label: label,
|
|
301
|
+
method: method,
|
|
302
|
+
components: {
|
|
303
|
+
portfolio: pScore,
|
|
304
|
+
history: hScore
|
|
305
|
+
}
|
|
306
|
+
};
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
// ========================================================================
|
|
311
|
+
// 2. SUPPORTING ANALYTICS ENGINES
|
|
312
|
+
// ========================================================================
|
|
313
|
+
|
|
314
|
+
class CognitiveBiases {
|
|
315
|
+
static calculateAnchoringScore(openPositions, thresholdPct = 2.0, minDaysHeld = 14) {
|
|
316
|
+
if (!openPositions || openPositions.length === 0) return 0;
|
|
317
|
+
let anchoredCount = 0, validPositions = 0;
|
|
318
|
+
const now = Date.now(), msPerDay = 86400000;
|
|
319
|
+
for (const pos of openPositions) {
|
|
320
|
+
if (pos.OpenDateTime) {
|
|
321
|
+
validPositions++;
|
|
322
|
+
const ageDays = (now - new Date(pos.OpenDateTime).getTime()) / msPerDay;
|
|
323
|
+
if (ageDays > minDaysHeld && Math.abs(pos.NetProfit) < thresholdPct) { anchoredCount++; }
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
return validPositions > 0 ? (anchoredCount / validPositions) : 0;
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
static calculateDispositionEffect(historyTrades) {
|
|
330
|
+
let winDur = 0, winCount = 0, lossDur = 0, lossCount = 0;
|
|
331
|
+
for (const t of historyTrades) {
|
|
332
|
+
if (!t.OpenDateTime || !t.CloseDateTime) continue;
|
|
333
|
+
const dur = (new Date(t.CloseDateTime) - new Date(t.OpenDateTime)) / 3600000;
|
|
334
|
+
if (t.NetProfit > 0) { winDur += dur; winCount++; } else if (t.NetProfit < 0) { lossDur += dur; lossCount++; }
|
|
101
335
|
}
|
|
336
|
+
const avgWinHold = winCount > 0 ? winDur / winCount : 0;
|
|
337
|
+
const avgLossHold = lossCount > 0 ? lossDur / lossCount : 0;
|
|
338
|
+
if (avgWinHold === 0) return 2.0;
|
|
339
|
+
return avgLossHold / avgWinHold;
|
|
102
340
|
}
|
|
103
341
|
}
|
|
104
342
|
|
|
105
343
|
class SkillAttribution {
|
|
106
|
-
/**
|
|
107
|
-
* Calculates Selection Skill (Alpha) by comparing User PnL vs Asset Benchmark.
|
|
108
|
-
* Note: Since we don't have individual asset performance histories easily available
|
|
109
|
-
* in the user context, we use the 'Insights' global growth as a daily benchmark proxy.
|
|
110
|
-
* @param {Array} userPositions - Current open positions.
|
|
111
|
-
* @param {Object} dailyInsights - Map of InstrumentID -> Insight Data (which contains 'growth').
|
|
112
|
-
*/
|
|
113
344
|
static calculateSelectionAlpha(userPositions, dailyInsights) {
|
|
114
|
-
let totalAlpha = 0;
|
|
115
|
-
let count = 0;
|
|
116
|
-
|
|
345
|
+
let totalAlpha = 0, count = 0;
|
|
117
346
|
for (const pos of userPositions) {
|
|
118
347
|
const instrumentId = pos.InstrumentID;
|
|
119
|
-
|
|
120
|
-
// If passed as array, we find the item.
|
|
121
|
-
let insight = null;
|
|
122
|
-
if (Array.isArray(dailyInsights)) {
|
|
123
|
-
insight = dailyInsights.find(i => i.instrumentId === instrumentId);
|
|
124
|
-
}
|
|
125
|
-
|
|
348
|
+
let insight = Array.isArray(dailyInsights) ? dailyInsights.find(i => i.instrumentId === instrumentId) : null;
|
|
126
349
|
if (insight && typeof insight.growth === 'number') {
|
|
127
|
-
|
|
128
|
-
// We use NetProfit as a proxy for "Performance" state.
|
|
129
|
-
// A Better proxy: Is their NetProfit > The Asset's Weekly Growth?
|
|
130
|
-
// This is a rough heuristic given schema limitations.
|
|
131
|
-
|
|
132
|
-
// If the user is long and PnL > 0, and Growth is negative, that's high alpha (Bucking the trend).
|
|
133
|
-
// Simplified: Just returning the difference.
|
|
134
|
-
const diff = pos.NetProfit - insight.growth;
|
|
135
|
-
totalAlpha += diff;
|
|
350
|
+
totalAlpha += (pos.NetProfit - insight.growth);
|
|
136
351
|
count++;
|
|
137
352
|
}
|
|
138
353
|
}
|
|
@@ -198,90 +413,35 @@ class AdaptiveAnalytics {
|
|
|
198
413
|
}
|
|
199
414
|
}
|
|
200
415
|
|
|
416
|
+
// Legacy Wrapper for backward compatibility with older calculations
|
|
201
417
|
class UserClassifier {
|
|
202
418
|
static classify(context) {
|
|
203
|
-
|
|
204
|
-
const
|
|
205
|
-
const validHistory = history.filter(t => t.OpenDateTime);
|
|
206
|
-
validHistory.sort((a, b) => new Date(a.OpenDateTime) - new Date(b.OpenDateTime));
|
|
207
|
-
const portfolio = math.extract.getPositions(user.portfolio.today, user.type);
|
|
208
|
-
const summary = math.history.getSummary(user.history.today);
|
|
209
|
-
if (!summary) return { intelligence: { label: SCHEMAS.LABELS.NEUTRAL, score: 0 }, style: { primary: SCHEMAS.STYLES.INVESTOR } };
|
|
210
|
-
|
|
211
|
-
let entryScores = [];
|
|
212
|
-
const recentTrades = validHistory.slice(-20);
|
|
213
|
-
for (const t of recentTrades) {
|
|
214
|
-
const ticker = context.mappings.instrumentToTicker[t.InstrumentID];
|
|
215
|
-
const priceData = math.priceExtractor.getHistory(prices, ticker);
|
|
216
|
-
if (priceData && priceData.length > 0) { entryScores.push(ExecutionAnalytics.calculateEfficiency(t.OpenRate, priceData, t.OpenDateTime, 'Buy')); }
|
|
217
|
-
}
|
|
218
|
-
const avgEntryEff = math.compute.average(entryScores) || 0.5;
|
|
219
|
-
const dispositionSkew = Psychometrics.computeDispositionSkew(validHistory, portfolio);
|
|
220
|
-
const revengeScore = Psychometrics.detectRevengeTrading(validHistory);
|
|
221
|
-
const adaptationScore = AdaptiveAnalytics.analyzeDrawdownAdaptation(validHistory);
|
|
222
|
-
|
|
223
|
-
// New Cognitive Bias Checks
|
|
224
|
-
const anchoring = CognitiveBiases.calculateAnchoringScore(portfolio);
|
|
225
|
-
const dispositionTime = CognitiveBiases.calculateDispositionEffect(validHistory);
|
|
226
|
-
|
|
227
|
-
const riskAdjustedReturn = summary.avgLossPct === 0 ? 10 : (summary.avgProfitPct / Math.abs(summary.avgLossPct));
|
|
228
|
-
let smartScore = 50;
|
|
229
|
-
if (riskAdjustedReturn > 1.5) smartScore += 10;
|
|
230
|
-
if (riskAdjustedReturn > 3.0) smartScore += 10;
|
|
231
|
-
if (summary.winRatio > 60) smartScore += 10;
|
|
232
|
-
if (avgEntryEff > 0.7) smartScore += 10;
|
|
233
|
-
if (avgEntryEff < 0.3) smartScore -= 5;
|
|
234
|
-
if (dispositionSkew > 15) smartScore -= 20; else if (dispositionSkew < 5) smartScore += 10;
|
|
235
|
-
if (revengeScore > 0.3) smartScore -= 25;
|
|
236
|
-
if (adaptationScore > 0.5) smartScore += 5; if (adaptationScore < -0.5) smartScore -= 10;
|
|
419
|
+
// Delegate to the new robust Hybrid Scorer
|
|
420
|
+
const result = SmartMoneyScorer.scoreHybrid(context);
|
|
237
421
|
|
|
238
|
-
//
|
|
239
|
-
if (anchoring > 0.3) smartScore -= 10;
|
|
240
|
-
if (dispositionTime > 1.5) smartScore -= 10;
|
|
241
|
-
|
|
242
|
-
let label = SCHEMAS.LABELS.NEUTRAL;
|
|
243
|
-
if (smartScore >= 80) label = SCHEMAS.LABELS.ELITE;
|
|
244
|
-
else if (smartScore >= 65) label = SCHEMAS.LABELS.SMART;
|
|
245
|
-
else if (smartScore <= 30) label = SCHEMAS.LABELS.GAMBLER;
|
|
246
|
-
else if (smartScore <= 45) label = SCHEMAS.LABELS.DUMB;
|
|
247
|
-
|
|
248
|
-
const styleProfile = this.classifyStyle(validHistory, portfolio);
|
|
422
|
+
// Map new result structure to legacy structure expected by V1 calcs
|
|
249
423
|
return {
|
|
250
|
-
intelligence: {
|
|
251
|
-
|
|
424
|
+
intelligence: {
|
|
425
|
+
label: result.label,
|
|
426
|
+
score: result.totalScore,
|
|
427
|
+
isSmart: result.totalScore >= 65
|
|
428
|
+
},
|
|
429
|
+
style: { primary: SCHEMAS.STYLES.INVESTOR }, // Placeholder
|
|
252
430
|
metrics: {
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
revengeTendency: revengeScore,
|
|
256
|
-
riskRewardRatio: riskAdjustedReturn,
|
|
257
|
-
drawdownAdaptation: adaptationScore,
|
|
258
|
-
biasAnchoring: anchoring,
|
|
259
|
-
biasDispositionTime: dispositionTime
|
|
431
|
+
profitFactor: result.components.history?.metrics?.profitFactor || 0,
|
|
432
|
+
allocEfficiency: result.components.portfolio?.metrics?.allocEfficiency || 0
|
|
260
433
|
}
|
|
261
434
|
};
|
|
262
435
|
}
|
|
263
|
-
|
|
264
|
-
static classifyStyle(history, portfolio) {
|
|
265
|
-
let totalMinutes = 0; let validTrades = 0;
|
|
266
|
-
history.forEach(t => { if (t.OpenDateTime && t.CloseDateTime) { const open = new Date(t.OpenDateTime); const close = new Date(t.CloseDateTime); totalMinutes += (close - open) / 60000; validTrades++; } });
|
|
267
|
-
const avgHoldTime = validTrades > 0 ? totalMinutes / validTrades : 0;
|
|
268
|
-
let baseStyle = SCHEMAS.STYLES.INVESTOR;
|
|
269
|
-
if (validTrades > 0) { if (avgHoldTime < 60) baseStyle = SCHEMAS.STYLES.SCALPER; else if (avgHoldTime < 60 * 24) baseStyle = SCHEMAS.STYLES.DAY_TRADER; else if (avgHoldTime < 60 * 24 * 7) baseStyle = SCHEMAS.STYLES.SWING_TRADER; }
|
|
270
|
-
const subStyles = new Set();
|
|
271
|
-
const assets = [...history, ...portfolio]; let leverageCount = 0;
|
|
272
|
-
assets.forEach(p => { if ((p.Leverage || 1) > 1) leverageCount++; });
|
|
273
|
-
const tradeCount = assets.length || 1;
|
|
274
|
-
if ((leverageCount / tradeCount) > 0.3) subStyles.add("Speculative"); if ((leverageCount / tradeCount) > 0.8) subStyles.add("High-Leverage");
|
|
275
|
-
return { primary: baseStyle, tags: Array.from(subStyles), avgHoldTimeMinutes: avgHoldTime };
|
|
276
|
-
}
|
|
277
436
|
}
|
|
278
437
|
|
|
279
438
|
module.exports = {
|
|
280
439
|
SCHEMAS,
|
|
281
440
|
UserClassifier,
|
|
441
|
+
SmartMoneyScorer, // <-- Exporting the new engine
|
|
282
442
|
ExecutionAnalytics,
|
|
283
443
|
Psychometrics,
|
|
284
444
|
AdaptiveAnalytics,
|
|
285
|
-
CognitiveBiases,
|
|
286
|
-
SkillAttribution
|
|
445
|
+
CognitiveBiases,
|
|
446
|
+
SkillAttribution
|
|
287
447
|
};
|
|
@@ -42,18 +42,10 @@ async function commitBatchInChunks(config, deps, writes, operationName) {
|
|
|
42
42
|
for (const write of writes) {
|
|
43
43
|
let docSize = 100;
|
|
44
44
|
try { if (write.data) docSize = JSON.stringify(write.data).length; } catch (e) { }
|
|
45
|
-
|
|
46
|
-
if (docSize >
|
|
47
|
-
logger.log('WARN', `[${operationName}] Large document detected (~${(docSize / 1024).toFixed(2)} KB).`);
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
if ((currentOpsCount + 1 > MAX_BATCH_OPS) || (currentBytesEst + docSize > MAX_BATCH_BYTES)) {
|
|
51
|
-
await commitAndReset();
|
|
52
|
-
}
|
|
53
|
-
|
|
45
|
+
if (docSize > 900 * 1024) { logger.log('WARN', `[${operationName}] Large document detected (~${(docSize / 1024).toFixed(2)} KB).`); }
|
|
46
|
+
if ((currentOpsCount + 1 > MAX_BATCH_OPS) || (currentBytesEst + docSize > MAX_BATCH_BYTES)) { await commitAndReset(); }
|
|
54
47
|
const options = write.options || { merge: true };
|
|
55
48
|
currentBatch.set(write.ref, write.data, options);
|
|
56
|
-
|
|
57
49
|
currentOpsCount++;
|
|
58
50
|
currentBytesEst += docSize;
|
|
59
51
|
}
|
|
@@ -2,14 +2,15 @@
|
|
|
2
2
|
* @fileoverview Handles saving computation results with observability and Smart Cleanup.
|
|
3
3
|
* UPDATED: Returns detailed failure reports AND metrics for the Audit Logger.
|
|
4
4
|
* UPDATED: Stops retrying on non-transient errors.
|
|
5
|
+
* UPDATED: Supports Multi-Date Fan-Out (Time Machine Mode) with CONCURRENCY THROTTLING.
|
|
5
6
|
*/
|
|
6
7
|
const { commitBatchInChunks } = require('./FirestoreUtils');
|
|
7
8
|
const { updateComputationStatus } = require('./StatusRepository');
|
|
8
9
|
const { batchStoreSchemas } = require('../utils/schema_capture');
|
|
9
10
|
const { generateProcessId, PROCESS_TYPES } = require('../logger/logger');
|
|
10
|
-
|
|
11
11
|
const { HeuristicValidator } = require('./ResultsValidator');
|
|
12
12
|
const validationOverrides = require('../config/validation_overrides');
|
|
13
|
+
const pLimit = require('p-limit'); // <--- CRITICAL IMPORT
|
|
13
14
|
|
|
14
15
|
const NON_RETRYABLE_ERRORS = [
|
|
15
16
|
'INVALID_ARGUMENT', // Schema/Type mismatch
|
|
@@ -26,7 +27,8 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
26
27
|
const { logger, db } = deps;
|
|
27
28
|
const pid = generateProcessId(PROCESS_TYPES.STORAGE, passName, dStr);
|
|
28
29
|
|
|
29
|
-
|
|
30
|
+
// SAFETY LIMIT: Only allow 10 concurrent daily writes to prevent network saturation during Fan-Out
|
|
31
|
+
const fanOutLimit = pLimit(10);
|
|
30
32
|
|
|
31
33
|
for (const name in stateObj) {
|
|
32
34
|
const calc = stateObj[name];
|
|
@@ -44,144 +46,88 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
44
46
|
const healthCheck = HeuristicValidator.analyze(calc.manifest.name, result, overrides);
|
|
45
47
|
|
|
46
48
|
if (!healthCheck.valid) {
|
|
47
|
-
// If validation failed, we consider it an anomaly but we BLOCK the write (throw error)
|
|
48
49
|
runMetrics.validation.isValid = false;
|
|
49
50
|
runMetrics.validation.anomalies.push(healthCheck.reason);
|
|
50
51
|
throw { message: healthCheck.reason, stage: 'QUALITY_CIRCUIT_BREAKER' };
|
|
51
52
|
}
|
|
52
53
|
|
|
53
|
-
// Check for minor anomalies (validation warnings that didn't fail) - optional implementation
|
|
54
|
-
// For now, we assume if valid=true, anomalies are empty unless we add warning logic later.
|
|
55
|
-
|
|
56
54
|
const isEmpty = !result || (typeof result === 'object' && Object.keys(result).length === 0) || (typeof result === 'number' && result === 0);
|
|
57
55
|
if (isEmpty) {
|
|
58
|
-
// Log empty success
|
|
59
56
|
if (calc.manifest.hash) {
|
|
60
|
-
successUpdates[name] = {
|
|
61
|
-
hash: false,
|
|
62
|
-
category: calc.manifest.category,
|
|
63
|
-
metrics: runMetrics // Return empty metrics
|
|
64
|
-
};
|
|
57
|
+
successUpdates[name] = { hash: false, category: calc.manifest.category, metrics: runMetrics };
|
|
65
58
|
}
|
|
66
59
|
continue;
|
|
67
60
|
}
|
|
68
61
|
|
|
69
|
-
// Calculate Key Count rough estimate
|
|
70
62
|
if (typeof result === 'object') runMetrics.storage.keys = Object.keys(result).length;
|
|
71
63
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
// --- REACTIVE SELF-HEALING LOOP ---
|
|
80
|
-
let committed = false;
|
|
81
|
-
// Strategy: 1=Normal, 2=Safe (Halved), 3=Aggressive (Quartered + Key Limit)
|
|
82
|
-
const strategies = [
|
|
83
|
-
{ bytes: 900 * 1024, keys: null }, // Attempt 1: Standard
|
|
84
|
-
{ bytes: 450 * 1024, keys: 10000 }, // Attempt 2: High Index usage
|
|
85
|
-
{ bytes: 200 * 1024, keys: 2000 } // Attempt 3: Extreme fragmentation
|
|
86
|
-
];
|
|
87
|
-
|
|
88
|
-
let lastError = null;
|
|
89
|
-
|
|
90
|
-
for (let attempt = 0; attempt < strategies.length; attempt++) {
|
|
91
|
-
if (committed) break;
|
|
64
|
+
// --- MULTI-DATE FAN-OUT DETECTION ---
|
|
65
|
+
// If the result keys are ALL date strings (YYYY-MM-DD), we split the writes.
|
|
66
|
+
const resultKeys = Object.keys(result || {});
|
|
67
|
+
const isMultiDate = resultKeys.length > 0 && resultKeys.every(k => /^\d{4}-\d{2}-\d{2}$/.test(k));
|
|
68
|
+
|
|
69
|
+
if (isMultiDate) {
|
|
70
|
+
logger.log('INFO', `[ResultCommitter] 🕰️ Multi-Date Output detected for ${name} (${resultKeys.length} days). Throttled Fan-Out...`);
|
|
92
71
|
|
|
93
|
-
|
|
72
|
+
// Group updates by DATE. result is { "2024-01-01": { user1: ... }, "2024-01-02": { user1: ... } }
|
|
73
|
+
// We execute a fan-out commit for each date using p-limit.
|
|
94
74
|
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
_verified: true,
|
|
120
|
-
_shardingStrategy: attempt + 1
|
|
121
|
-
},
|
|
122
|
-
options: { merge: true }
|
|
123
|
-
});
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
// 3. Attempt Commit
|
|
127
|
-
await commitBatchInChunks(config, deps, updates, `${name} Results (Att ${attempt+1})`);
|
|
128
|
-
|
|
129
|
-
// Log Success
|
|
130
|
-
if (logger && logger.logStorage) {
|
|
131
|
-
logger.logStorage(pid, name, dStr, mainDocRef.path, totalSize, isSharded);
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
committed = true; // Exit loop
|
|
135
|
-
|
|
136
|
-
} catch (commitErr) {
|
|
137
|
-
lastError = commitErr;
|
|
138
|
-
const msg = commitErr.message || '';
|
|
139
|
-
|
|
140
|
-
const isNonRetryable = NON_RETRYABLE_ERRORS.includes(commitErr.code);
|
|
141
|
-
if (isNonRetryable) {
|
|
142
|
-
logger.log('ERROR', `[SelfHealing] ${name} encountered FATAL error (Attempt ${attempt + 1}): ${msg}. Aborting.`);
|
|
143
|
-
throw commitErr;
|
|
144
|
-
}
|
|
145
|
-
|
|
146
|
-
const isSizeError = msg.includes('Transaction too big') || msg.includes('payload is too large');
|
|
147
|
-
const isIndexError = msg.includes('too many index entries') || msg.includes('INVALID_ARGUMENT');
|
|
148
|
-
|
|
149
|
-
if (isSizeError || isIndexError) {
|
|
150
|
-
logger.log('WARN', `[SelfHealing] ${name} failed write attempt ${attempt + 1}. Retrying with tighter constraints...`, { error: msg });
|
|
151
|
-
continue; // Try next strategy
|
|
152
|
-
} else {
|
|
153
|
-
logger.log('WARN', `[SelfHealing] ${name} unknown error (Attempt ${attempt + 1}). Retrying...`, { error: msg });
|
|
154
|
-
}
|
|
75
|
+
const datePromises = resultKeys.map((historicalDate) => fanOutLimit(async () => {
|
|
76
|
+
const dailyData = result[historicalDate];
|
|
77
|
+
if (!dailyData || Object.keys(dailyData).length === 0) return;
|
|
78
|
+
|
|
79
|
+
const historicalDocRef = db.collection(config.resultsCollection)
|
|
80
|
+
.doc(historicalDate) // Use the HISTORICAL date, not dStr
|
|
81
|
+
.collection(config.resultsSubcollection)
|
|
82
|
+
.doc(calc.manifest.category)
|
|
83
|
+
.collection(config.computationsSubcollection)
|
|
84
|
+
.doc(name);
|
|
85
|
+
|
|
86
|
+
// Re-use the existing sharding logic for this specific date payload
|
|
87
|
+
await writeSingleResult(dailyData, historicalDocRef, name, historicalDate, logger, config, deps);
|
|
88
|
+
}));
|
|
89
|
+
|
|
90
|
+
await Promise.all(datePromises);
|
|
91
|
+
|
|
92
|
+
// Mark success for the Target Date (dStr) so the workflow continues
|
|
93
|
+
if (calc.manifest.hash) {
|
|
94
|
+
successUpdates[name] = {
|
|
95
|
+
hash: calc.manifest.hash,
|
|
96
|
+
category: calc.manifest.category,
|
|
97
|
+
metrics: runMetrics // Pass metrics up
|
|
98
|
+
};
|
|
155
99
|
}
|
|
156
|
-
}
|
|
157
100
|
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
101
|
+
} else {
|
|
102
|
+
// --- STANDARD MODE (Single Date) ---
|
|
103
|
+
const mainDocRef = db.collection(config.resultsCollection)
|
|
104
|
+
.doc(dStr)
|
|
105
|
+
.collection(config.resultsSubcollection)
|
|
106
|
+
.doc(calc.manifest.category)
|
|
107
|
+
.collection(config.computationsSubcollection)
|
|
108
|
+
.doc(name);
|
|
109
|
+
|
|
110
|
+
// Use the encapsulated write function
|
|
111
|
+
const writeStats = await writeSingleResult(result, mainDocRef, name, dStr, logger, config, deps);
|
|
112
|
+
|
|
113
|
+
runMetrics.storage.sizeBytes = writeStats.totalSize;
|
|
114
|
+
runMetrics.storage.isSharded = writeStats.isSharded;
|
|
115
|
+
runMetrics.storage.shardCount = writeStats.shardCount;
|
|
116
|
+
|
|
117
|
+
// Mark Success & Pass Metrics
|
|
118
|
+
if (calc.manifest.hash) {
|
|
119
|
+
successUpdates[name] = {
|
|
120
|
+
hash: calc.manifest.hash,
|
|
121
|
+
category: calc.manifest.category,
|
|
122
|
+
metrics: runMetrics
|
|
123
|
+
};
|
|
124
|
+
}
|
|
174
125
|
}
|
|
175
126
|
|
|
176
127
|
// Capture Schema
|
|
177
128
|
if (calc.manifest.class.getSchema) {
|
|
178
129
|
const { class: _cls, ...safeMetadata } = calc.manifest;
|
|
179
|
-
schemas.push({
|
|
180
|
-
name,
|
|
181
|
-
category: calc.manifest.category,
|
|
182
|
-
schema: calc.manifest.class.getSchema(),
|
|
183
|
-
metadata: safeMetadata
|
|
184
|
-
});
|
|
130
|
+
schemas.push({ name, category: calc.manifest.category, schema: calc.manifest.class.getSchema(), metadata: safeMetadata });
|
|
185
131
|
}
|
|
186
132
|
|
|
187
133
|
// Cleanup Migration
|
|
@@ -210,6 +156,80 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
210
156
|
return { successUpdates, failureReport };
|
|
211
157
|
}
|
|
212
158
|
|
|
159
|
+
/**
|
|
160
|
+
* Encapsulated write logic for reuse in Fan-Out.
|
|
161
|
+
* Handles sharding strategy and retries.
|
|
162
|
+
*/
|
|
163
|
+
async function writeSingleResult(result, docRef, name, dateContext, logger, config, deps) {
|
|
164
|
+
// Strategy: 1=Normal, 2=Safe (Halved), 3=Aggressive (Quartered + Key Limit)
|
|
165
|
+
const strategies = [
|
|
166
|
+
{ bytes: 900 * 1024, keys: null }, // Attempt 1: Standard
|
|
167
|
+
{ bytes: 450 * 1024, keys: 10000 }, // Attempt 2: High Index usage
|
|
168
|
+
{ bytes: 200 * 1024, keys: 2000 } // Attempt 3: Extreme fragmentation
|
|
169
|
+
];
|
|
170
|
+
|
|
171
|
+
let committed = false;
|
|
172
|
+
let lastError = null;
|
|
173
|
+
let finalStats = { totalSize: 0, isSharded: false, shardCount: 1 };
|
|
174
|
+
|
|
175
|
+
for (let attempt = 0; attempt < strategies.length; attempt++) {
|
|
176
|
+
if (committed) break;
|
|
177
|
+
|
|
178
|
+
const constraints = strategies[attempt];
|
|
179
|
+
|
|
180
|
+
try {
|
|
181
|
+
// 1. Prepare Shards with current constraints
|
|
182
|
+
const updates = await prepareAutoShardedWrites(result, docRef, logger, constraints.bytes, constraints.keys);
|
|
183
|
+
|
|
184
|
+
// Stats
|
|
185
|
+
const pointer = updates.find(u => u.data._completed === true);
|
|
186
|
+
finalStats.isSharded = pointer && pointer.data._sharded === true;
|
|
187
|
+
finalStats.shardCount = finalStats.isSharded ? (pointer.data._shardCount || 1) : 1;
|
|
188
|
+
finalStats.totalSize = updates.reduce((acc, u) => acc + (u.data ? JSON.stringify(u.data).length : 0), 0);
|
|
189
|
+
|
|
190
|
+
// 2. Attempt Commit
|
|
191
|
+
await commitBatchInChunks(config, deps, updates, `${name}::${dateContext} (Att ${attempt+1})`);
|
|
192
|
+
|
|
193
|
+
// Log Success
|
|
194
|
+
if (logger && logger.logStorage) {
|
|
195
|
+
logger.logStorage(null, name, dateContext, docRef.path, finalStats.totalSize, finalStats.isSharded);
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
committed = true; // Exit loop
|
|
199
|
+
|
|
200
|
+
} catch (commitErr) {
|
|
201
|
+
lastError = commitErr;
|
|
202
|
+
const msg = commitErr.message || '';
|
|
203
|
+
|
|
204
|
+
const isNonRetryable = NON_RETRYABLE_ERRORS.includes(commitErr.code);
|
|
205
|
+
if (isNonRetryable) {
|
|
206
|
+
logger.log('ERROR', `[SelfHealing] ${name} encountered FATAL error (Attempt ${attempt + 1}): ${msg}. Aborting.`);
|
|
207
|
+
throw commitErr;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
const isSizeError = msg.includes('Transaction too big') || msg.includes('payload is too large');
|
|
211
|
+
const isIndexError = msg.includes('too many index entries') || msg.includes('INVALID_ARGUMENT');
|
|
212
|
+
|
|
213
|
+
if (isSizeError || isIndexError) {
|
|
214
|
+
logger.log('WARN', `[SelfHealing] ${name} on ${dateContext} failed write attempt ${attempt + 1}. Retrying with tighter constraints...`, { error: msg });
|
|
215
|
+
continue; // Try next strategy
|
|
216
|
+
} else {
|
|
217
|
+
logger.log('WARN', `[SelfHealing] ${name} on ${dateContext} unknown error (Attempt ${attempt + 1}). Retrying...`, { error: msg });
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
if (!committed) {
|
|
223
|
+
throw {
|
|
224
|
+
message: `Exhausted sharding strategies for ${name} on ${dateContext}. Last error: ${lastError?.message}`,
|
|
225
|
+
stack: lastError?.stack,
|
|
226
|
+
stage: 'SHARDING_LIMIT_EXCEEDED'
|
|
227
|
+
};
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
return finalStats;
|
|
231
|
+
}
|
|
232
|
+
|
|
213
233
|
/**
|
|
214
234
|
* Deletes result documents from a previous category location.
|
|
215
235
|
*/
|
|
@@ -38,9 +38,6 @@ async function ensureBuildReport(config, dependencies, manifest) {
|
|
|
38
38
|
|
|
39
39
|
// Run generation. This function handles writing the 'latest' document with FULL data.
|
|
40
40
|
await generateBuildReport(config, dependencies, manifest, 90, buildId);
|
|
41
|
-
|
|
42
|
-
// [FIX] REMOVED: The redundant write that was overwriting the full report with just metadata.
|
|
43
|
-
// The generateBuildReport function now serves as the single source of truth for writing 'latest'.
|
|
44
41
|
|
|
45
42
|
} catch (e) {
|
|
46
43
|
logger.log('ERROR', `[BuildReporter] Auto-run check failed: ${e.message}`);
|
|
@@ -57,23 +54,23 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
57
54
|
logger.log('INFO', `[BuildReporter] Generating Build Report: ${buildId} (Scope: ${daysBack} days)...`);
|
|
58
55
|
|
|
59
56
|
// 1. Determine Date Range
|
|
60
|
-
const today
|
|
57
|
+
const today = new Date();
|
|
61
58
|
const startDate = new Date();
|
|
62
59
|
startDate.setDate(today.getDate() - daysBack);
|
|
63
60
|
|
|
64
61
|
const datesToCheck = getExpectedDateStrings(startDate, today);
|
|
65
|
-
const manifestMap
|
|
62
|
+
const manifestMap = new Map(manifest.map(c => [normalizeName(c.name), c]));
|
|
66
63
|
|
|
67
64
|
const reportData = {
|
|
68
65
|
buildId,
|
|
69
|
-
packageVersion: packageVersion,
|
|
66
|
+
packageVersion: packageVersion,
|
|
70
67
|
generatedAt: new Date().toISOString(),
|
|
71
68
|
summary: {},
|
|
72
69
|
dates: {}
|
|
73
70
|
};
|
|
74
71
|
|
|
75
72
|
let totalReRuns = 0;
|
|
76
|
-
let totalNew
|
|
73
|
+
let totalNew = 0;
|
|
77
74
|
|
|
78
75
|
// 2. PARALLEL PROCESSING
|
|
79
76
|
const limit = pLimit(20);
|
|
@@ -106,17 +103,13 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
106
103
|
// If we fetched prevStatus, it's at index 2
|
|
107
104
|
const prevDailyStatus = (prevDateStr && results[2]) ? results[2] : (prevDateStr ? {} : null);
|
|
108
105
|
|
|
109
|
-
const rootDataStatus = availability ? availability.status : {
|
|
110
|
-
hasPortfolio: false, hasHistory: false, hasSocial: false, hasInsights: false, hasPrices: false
|
|
111
|
-
};
|
|
106
|
+
const rootDataStatus = availability ? availability.status : { hasPortfolio: false, hasHistory: false, hasSocial: false, hasInsights: false, hasPrices: false };
|
|
112
107
|
|
|
113
108
|
// D. Run Logic Analysis
|
|
114
109
|
const analysis = analyzeDateExecution(dateStr, manifest, rootDataStatus, dailyStatus, manifestMap, prevDailyStatus);
|
|
115
110
|
|
|
116
111
|
// E. Format Findings
|
|
117
|
-
const dateSummary = {
|
|
118
|
-
willRun: [], willReRun: [], blocked: [], impossible: []
|
|
119
|
-
};
|
|
112
|
+
const dateSummary = { willRun: [], willReRun: [], blocked: [], impossible: [] };
|
|
120
113
|
|
|
121
114
|
analysis.runnable.forEach (item => dateSummary.willRun.push ({ name: item.name, reason: "New / No Previous Record" }));
|
|
122
115
|
analysis.reRuns.forEach (item => dateSummary.willReRun.push ({ name: item.name, reason: item.previousCategory ? "Migration" : "Hash Mismatch" }));
|
|
@@ -156,11 +149,7 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
156
149
|
await reportRef.set(reportData);
|
|
157
150
|
|
|
158
151
|
// 5. Update 'latest' pointer
|
|
159
|
-
|
|
160
|
-
await db.collection('computation_build_records').doc('latest').set({
|
|
161
|
-
...reportData,
|
|
162
|
-
note: "Latest build report pointer."
|
|
163
|
-
});
|
|
152
|
+
await db.collection('computation_build_records').doc('latest').set({ ...reportData, note: "Latest build report pointer." });
|
|
164
153
|
|
|
165
154
|
logger.log('SUCCESS', `[BuildReporter] Report ${buildId} saved. Re-runs: ${totalReRuns}, New: ${totalNew}.`);
|
|
166
155
|
|
|
@@ -5,14 +5,12 @@
|
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
7
|
const { FieldValue } = require('@google-cloud/firestore');
|
|
8
|
-
const pLimit
|
|
8
|
+
const pLimit = require('p-limit');
|
|
9
9
|
|
|
10
10
|
// Hardcoded verification blocks as per logic requirements
|
|
11
11
|
const CANARY_BLOCK_ID = '19M';
|
|
12
|
-
const CANARY_PART_ID
|
|
13
|
-
|
|
14
|
-
// [FIX] Hardcoded to 'shard_0' based on your confirmed data path (/asset_prices/shard_0)
|
|
15
|
-
const PRICE_SHARD_ID = 'shard_0';
|
|
12
|
+
const CANARY_PART_ID = 'part_0';
|
|
13
|
+
const PRICE_SHARD_ID = 'shard_0';
|
|
16
14
|
|
|
17
15
|
/**
|
|
18
16
|
* Main pipe: pipe.maintenance.runRootDataIndexer
|
|
@@ -25,36 +23,21 @@ exports.runRootDataIndexer = async (config, dependencies) => {
|
|
|
25
23
|
collections
|
|
26
24
|
} = config;
|
|
27
25
|
|
|
28
|
-
// [FIX] Hardcode the collection name to ignore any incorrect config values
|
|
29
26
|
const PRICE_COLLECTION_NAME = 'asset_prices';
|
|
30
|
-
|
|
31
27
|
logger.log('INFO', '[RootDataIndexer] Starting Root Data Availability Scan...');
|
|
32
|
-
|
|
33
|
-
// 1. Pre-fetch Price Data Availability (Optimization)
|
|
34
28
|
const priceAvailabilitySet = new Set();
|
|
35
|
-
|
|
36
|
-
// --- DEBUGGING START ---
|
|
37
29
|
logger.log('INFO', `[RootDataIndexer] DEBUG: Attempting to fetch price shard. Collection: "${PRICE_COLLECTION_NAME}", Doc ID: "${PRICE_SHARD_ID}"`);
|
|
38
|
-
// --- DEBUGGING END ---
|
|
39
|
-
|
|
40
30
|
try {
|
|
41
|
-
// [FIX] Use the hardcoded collection name
|
|
42
31
|
const priceShardRef = db.collection(PRICE_COLLECTION_NAME).doc(PRICE_SHARD_ID);
|
|
43
32
|
const priceSnap = await priceShardRef.get();
|
|
44
|
-
|
|
45
33
|
if (priceSnap.exists) {
|
|
46
|
-
const data
|
|
34
|
+
const data = priceSnap.data();
|
|
47
35
|
const instruments = Object.values(data);
|
|
48
|
-
|
|
49
|
-
// --- DEBUGGING START ---
|
|
50
36
|
logger.log('INFO', `[RootDataIndexer] DEBUG: Shard document found. Contains ${instruments.length} instrument entries.`);
|
|
51
|
-
|
|
52
37
|
if (instruments.length > 0) {
|
|
53
|
-
// Log the structure of the first instrument found to verify schema match
|
|
54
38
|
const sampleKey = Object.keys(data)[0];
|
|
55
39
|
const sampleVal = data[sampleKey];
|
|
56
40
|
logger.log('INFO', `[RootDataIndexer] DEBUG: Sample Instrument Data (ID: ${sampleKey}):`, sampleVal);
|
|
57
|
-
|
|
58
41
|
if (!sampleVal.prices) {
|
|
59
42
|
logger.log('WARN', `[RootDataIndexer] DEBUG: ⚠️ Sample instrument is MISSING the 'prices' field! Available fields: ${Object.keys(sampleVal).join(', ')}`);
|
|
60
43
|
} else {
|
|
@@ -64,23 +47,10 @@ exports.runRootDataIndexer = async (config, dependencies) => {
|
|
|
64
47
|
} else {
|
|
65
48
|
logger.log('WARN', '[RootDataIndexer] DEBUG: Shard document exists but appears empty (0 instruments found).');
|
|
66
49
|
}
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
// Iterate over all instruments in this shard to find any available dates
|
|
70
|
-
Object.values(data).forEach(instrument => {
|
|
71
|
-
if (instrument.prices) {
|
|
72
|
-
Object.keys(instrument.prices).forEach(dateKey => {
|
|
73
|
-
// Validate format YYYY-MM-DD
|
|
74
|
-
if (/^\d{4}-\d{2}-\d{2}$/.test(dateKey)) {
|
|
75
|
-
priceAvailabilitySet.add(dateKey);
|
|
76
|
-
}
|
|
77
|
-
});
|
|
78
|
-
}
|
|
79
|
-
});
|
|
50
|
+
|
|
51
|
+
Object.values(data).forEach(instrument => { if (instrument.prices) { Object.keys(instrument.prices).forEach(dateKey => { if (/^\d{4}-\d{2}-\d{2}$/.test(dateKey)) { priceAvailabilitySet.add(dateKey); } }); } });
|
|
80
52
|
} else {
|
|
81
|
-
// --- DEBUGGING START ---
|
|
82
53
|
logger.log('ERROR', `[RootDataIndexer] DEBUG: 🛑 FATAL: Document "${PRICE_SHARD_ID}" does NOT exist in collection "${PRICE_COLLECTION_NAME}". Price availability will be false for all dates.`);
|
|
83
|
-
// --- DEBUGGING END ---
|
|
84
54
|
}
|
|
85
55
|
logger.log('INFO', `[RootDataIndexer] Loaded price availability map. Found prices for ${priceAvailabilitySet.size} unique dates.`);
|
|
86
56
|
} catch (e) {
|
|
@@ -89,18 +59,16 @@ exports.runRootDataIndexer = async (config, dependencies) => {
|
|
|
89
59
|
|
|
90
60
|
// 2. Determine Date Range (Earliest -> Tomorrow)
|
|
91
61
|
const start = new Date(earliestDate || '2023-01-01');
|
|
92
|
-
const end
|
|
62
|
+
const end = new Date();
|
|
93
63
|
end.setDate(end.getDate() + 1); // Look ahead 1 day
|
|
94
64
|
|
|
95
65
|
const datesToScan = [];
|
|
96
|
-
for (let d = new Date(start); d <= end; d.setDate(d.getDate() + 1)) {
|
|
97
|
-
datesToScan.push(d.toISOString().slice(0, 10));
|
|
98
|
-
}
|
|
66
|
+
for (let d = new Date(start); d <= end; d.setDate(d.getDate() + 1)) { datesToScan.push(d.toISOString().slice(0, 10)); }
|
|
99
67
|
|
|
100
68
|
logger.log('INFO', `[RootDataIndexer] Scanning ${datesToScan.length} dates from ${datesToScan[0]} to ${datesToScan[datesToScan.length-1]}`);
|
|
101
69
|
|
|
102
70
|
// 3. Scan in Parallel
|
|
103
|
-
const limit
|
|
71
|
+
const limit = pLimit(20); // Concurrent date checks
|
|
104
72
|
let updatesCount = 0;
|
|
105
73
|
|
|
106
74
|
const promises = datesToScan.map(dateStr => limit(async () => {
|
|
@@ -111,17 +79,17 @@ exports.runRootDataIndexer = async (config, dependencies) => {
|
|
|
111
79
|
|
|
112
80
|
// Defaults
|
|
113
81
|
hasPortfolio: false,
|
|
114
|
-
hasHistory:
|
|
115
|
-
hasSocial:
|
|
116
|
-
hasInsights:
|
|
117
|
-
hasPrices:
|
|
82
|
+
hasHistory: false,
|
|
83
|
+
hasSocial: false,
|
|
84
|
+
hasInsights: false,
|
|
85
|
+
hasPrices: false,
|
|
118
86
|
|
|
119
87
|
// Detailed breakdown
|
|
120
88
|
details: {
|
|
121
|
-
normalPortfolio:
|
|
89
|
+
normalPortfolio: false,
|
|
122
90
|
speculatorPortfolio: false,
|
|
123
|
-
normalHistory:
|
|
124
|
-
speculatorHistory:
|
|
91
|
+
normalHistory: false,
|
|
92
|
+
speculatorHistory: false
|
|
125
93
|
}
|
|
126
94
|
};
|
|
127
95
|
|
|
@@ -165,32 +133,35 @@ exports.runRootDataIndexer = async (config, dependencies) => {
|
|
|
165
133
|
|
|
166
134
|
// --- EXECUTE CHECKS ---
|
|
167
135
|
const [
|
|
168
|
-
normPortSnap,
|
|
169
|
-
|
|
170
|
-
|
|
136
|
+
normPortSnap,
|
|
137
|
+
specPortSnap,
|
|
138
|
+
normHistSnap,
|
|
139
|
+
specHistSnap,
|
|
140
|
+
insightsSnap,
|
|
141
|
+
socialQuerySnap
|
|
171
142
|
] = await Promise.all([
|
|
172
|
-
normPortRef.get(),
|
|
173
|
-
|
|
143
|
+
normPortRef.get(),
|
|
144
|
+
specPortRef.get(),
|
|
145
|
+
normHistRef.get(),
|
|
146
|
+
specHistRef.get(),
|
|
174
147
|
insightsRef.get(),
|
|
175
148
|
socialPostsRef.limit(1).get()
|
|
176
149
|
]);
|
|
177
150
|
|
|
178
151
|
// Evaluate Findings
|
|
179
|
-
availability.details.normalPortfolio
|
|
152
|
+
availability.details.normalPortfolio = normPortSnap.exists;
|
|
180
153
|
availability.details.speculatorPortfolio = specPortSnap.exists;
|
|
181
|
-
availability.hasPortfolio
|
|
154
|
+
availability.hasPortfolio = normPortSnap.exists || specPortSnap.exists;
|
|
182
155
|
|
|
183
|
-
availability.details.normalHistory
|
|
184
|
-
availability.details.speculatorHistory
|
|
185
|
-
availability.hasHistory
|
|
156
|
+
availability.details.normalHistory = normHistSnap.exists;
|
|
157
|
+
availability.details.speculatorHistory = specHistSnap.exists;
|
|
158
|
+
availability.hasHistory = normHistSnap.exists || specHistSnap.exists;
|
|
186
159
|
|
|
187
|
-
availability.hasInsights
|
|
188
|
-
availability.hasSocial
|
|
160
|
+
availability.hasInsights = insightsSnap.exists;
|
|
161
|
+
availability.hasSocial = !socialQuerySnap.empty;
|
|
189
162
|
|
|
190
|
-
|
|
191
|
-
availability.hasPrices = priceAvailabilitySet.has(dateStr);
|
|
163
|
+
availability.hasPrices = priceAvailabilitySet.has(dateStr);
|
|
192
164
|
|
|
193
|
-
// Write to Index
|
|
194
165
|
await db.collection(availabilityCollection).doc(dateStr).set(availability);
|
|
195
166
|
updatesCount++;
|
|
196
167
|
|