bulltrackers-module 1.0.207 → 1.0.209
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* FIXED: computation_controller.js
|
|
3
|
-
* V4.
|
|
3
|
+
* V4.2: Added InsightsExtractor and UserClassifier to Context
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
|
-
const { DataExtractor, HistoryExtractor, MathPrimitives, Aggregators, Validators, SCHEMAS, SignalPrimitives, DistributionAnalytics, TimeSeries, priceExtractor } = require('../layers/math_primitives');
|
|
7
|
-
const { loadDailyInsights, loadDailySocialPostInsights, getRelevantShardRefs, getPriceShardRefs } = require('../utils/data_loader');
|
|
6
|
+
const { DataExtractor, HistoryExtractor, MathPrimitives, Aggregators, Validators, SCHEMAS, SignalPrimitives, DistributionAnalytics, TimeSeries, priceExtractor, InsightsExtractor, UserClassifier } = require('../layers/math_primitives');
|
|
7
|
+
const { loadDailyInsights, loadDailySocialPostInsights, getRelevantShardRefs, getPriceShardRefs } = require('../utils/data_loader');
|
|
8
8
|
|
|
9
9
|
class DataLoader {
|
|
10
10
|
constructor(config, dependencies) {
|
|
@@ -64,7 +64,7 @@ class DataLoader {
|
|
|
64
64
|
}
|
|
65
65
|
}
|
|
66
66
|
|
|
67
|
-
class ContextBuilder {
|
|
67
|
+
class ContextBuilder {
|
|
68
68
|
static buildPerUserContext(options) {
|
|
69
69
|
const { todayPortfolio, yesterdayPortfolio, todayHistory, yesterdayHistory, userId, userType, dateStr, metadata, mappings, insights, socialData, computedDependencies, previousComputedDependencies, config, deps } = options;
|
|
70
70
|
return {
|
|
@@ -73,14 +73,27 @@ class ContextBuilder { //TODO, THE MATH EXTRACT HERE COULD SURELY BE DYNAMIC LIS
|
|
|
73
73
|
insights: { today: insights?.today, yesterday: insights?.yesterday },
|
|
74
74
|
social: { today: socialData?.today, yesterday: socialData?.yesterday },
|
|
75
75
|
mappings: mappings || {},
|
|
76
|
-
math: {
|
|
76
|
+
math: {
|
|
77
|
+
extract: DataExtractor,
|
|
78
|
+
history: HistoryExtractor,
|
|
79
|
+
compute: MathPrimitives,
|
|
80
|
+
aggregate: Aggregators,
|
|
81
|
+
validate: Validators,
|
|
82
|
+
signals: SignalPrimitives,
|
|
83
|
+
schemas: SCHEMAS,
|
|
84
|
+
distribution : DistributionAnalytics,
|
|
85
|
+
TimeSeries: TimeSeries,
|
|
86
|
+
priceExtractor : priceExtractor,
|
|
87
|
+
insights: InsightsExtractor, // Mapped for new Meta calcs
|
|
88
|
+
classifier: UserClassifier // Mapped for Smart/Dumb logic
|
|
89
|
+
},
|
|
77
90
|
computed: computedDependencies || {},
|
|
78
91
|
previousComputed: previousComputedDependencies || {},
|
|
79
92
|
meta: metadata, config, deps
|
|
80
93
|
};
|
|
81
94
|
}
|
|
82
95
|
|
|
83
|
-
static buildMetaContext(options) {
|
|
96
|
+
static buildMetaContext(options) {
|
|
84
97
|
const { dateStr, metadata, mappings, insights, socialData, prices, computedDependencies, previousComputedDependencies, config, deps } = options;
|
|
85
98
|
return {
|
|
86
99
|
date: { today: dateStr },
|
|
@@ -88,7 +101,20 @@ class ContextBuilder { //TODO, THE MATH EXTRACT HERE COULD SURELY BE DYNAMIC LIS
|
|
|
88
101
|
social: { today: socialData?.today, yesterday: socialData?.yesterday },
|
|
89
102
|
prices: prices || {},
|
|
90
103
|
mappings: mappings || {},
|
|
91
|
-
math: {
|
|
104
|
+
math: {
|
|
105
|
+
extract: DataExtractor,
|
|
106
|
+
history: HistoryExtractor,
|
|
107
|
+
compute: MathPrimitives,
|
|
108
|
+
aggregate: Aggregators,
|
|
109
|
+
validate: Validators,
|
|
110
|
+
signals: SignalPrimitives,
|
|
111
|
+
schemas: SCHEMAS,
|
|
112
|
+
distribution: DistributionAnalytics,
|
|
113
|
+
TimeSeries: TimeSeries,
|
|
114
|
+
priceExtractor : priceExtractor,
|
|
115
|
+
insights: InsightsExtractor, // Mapped for new Meta calcs
|
|
116
|
+
classifier: UserClassifier // Mapped for Smart/Dumb logic
|
|
117
|
+
},
|
|
92
118
|
computed: computedDependencies || {},
|
|
93
119
|
previousComputed: previousComputedDependencies || {},
|
|
94
120
|
meta: metadata, config, deps
|
|
@@ -159,4 +185,4 @@ class ComputationController {
|
|
|
159
185
|
}
|
|
160
186
|
}
|
|
161
187
|
|
|
162
|
-
module.exports = { ComputationController };
|
|
188
|
+
module.exports = { ComputationController };
|
|
@@ -2,15 +2,121 @@
|
|
|
2
2
|
* @fileoverview Math Layer - Single Source of Truth (V3 Final)
|
|
3
3
|
* Encapsulates Schema Knowledge, Mathematical Primitives, and Signal Extractors.
|
|
4
4
|
* * STRICT COMPLIANCE:
|
|
5
|
-
* - Adheres to 'schema.md' definitions
|
|
5
|
+
* - Adheres to 'schema.md' definitions.
|
|
6
6
|
* - standardizes access to P&L, Weights, and Rates.
|
|
7
|
-
* - Provides safe fallbacks for all fields.
|
|
8
7
|
*/
|
|
9
8
|
|
|
10
9
|
const SCHEMAS = {
|
|
11
|
-
USER_TYPES: { NORMAL: 'normal', SPECULATOR: 'speculator' }
|
|
10
|
+
USER_TYPES: { NORMAL: 'normal', SPECULATOR: 'speculator' },
|
|
11
|
+
STYLES: { DAY_TRADER: 'Day Trader', SWING_TRADER: 'Swing Trader', INVESTOR: 'Investor' },
|
|
12
|
+
LABELS: { SMART: 'Smart Money', DUMB: 'Dumb Money', NEUTRAL: 'Neutral' }
|
|
12
13
|
};
|
|
13
14
|
|
|
15
|
+
class InsightsExtractor {
|
|
16
|
+
/**
|
|
17
|
+
* Extracts the raw array of insight objects from the context.
|
|
18
|
+
* Checks for standard context injection paths.
|
|
19
|
+
*/
|
|
20
|
+
static getInsights(context) {
|
|
21
|
+
// Support multiple potential injection paths depending on controller version
|
|
22
|
+
return context.insights || context.daily_instrument_insights || [];
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* returns the specific insight object for a given instrument ID.
|
|
27
|
+
*/
|
|
28
|
+
static getInsightForInstrument(insights, instrumentId) {
|
|
29
|
+
if (!insights || !Array.isArray(insights)) return null;
|
|
30
|
+
return insights.find(i => i.instrumentId === instrumentId) || null;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// --- Standard Metrics ---
|
|
34
|
+
|
|
35
|
+
static getTotalOwners(insight) {
|
|
36
|
+
return insight ? (insight.total || 0) : 0;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
static getLongPercent(insight) {
|
|
40
|
+
return insight ? (insight.buy || 0) : 0;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
static getShortPercent(insight) {
|
|
44
|
+
return insight ? (insight.sell || 0) : 0;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
static getGrowthPercent(insight) {
|
|
48
|
+
return insight ? (insight.growth || 0) : 0;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// --- Derived Counts (Estimated) ---
|
|
52
|
+
|
|
53
|
+
static getLongCount(insight) {
|
|
54
|
+
const total = this.getTotalOwners(insight);
|
|
55
|
+
const buyPct = this.getLongPercent(insight);
|
|
56
|
+
return Math.floor(total * (buyPct / 100));
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
static getShortCount(insight) {
|
|
60
|
+
const total = this.getTotalOwners(insight);
|
|
61
|
+
const sellPct = this.getShortPercent(insight);
|
|
62
|
+
return Math.floor(total * (sellPct / 100));
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Calculates the net change in users from yesterday based on growth %.
|
|
67
|
+
* Formula: NetChange = Total - (Total / (1 + Growth/100))
|
|
68
|
+
*/
|
|
69
|
+
static getNetOwnershipChange(insight) {
|
|
70
|
+
const total = this.getTotalOwners(insight);
|
|
71
|
+
const growth = this.getGrowthPercent(insight);
|
|
72
|
+
if (total === 0) return 0;
|
|
73
|
+
|
|
74
|
+
// Reverse engineer yesterday's count
|
|
75
|
+
// Today = Yesterday * (1 + growth)
|
|
76
|
+
// Yesterday = Today / (1 + growth)
|
|
77
|
+
const prevTotal = total / (1 + (growth / 100)); // TODO: Check precision issues
|
|
78
|
+
return Math.round(total - prevTotal);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
class UserClassifier {
|
|
83
|
+
/**
|
|
84
|
+
* Classifies a user as Smart/Dumb based on Win Rate and Profitability.
|
|
85
|
+
* @param {Object} historySummary - Result from HistoryExtractor.getSummary()
|
|
86
|
+
*/
|
|
87
|
+
static classifySmartDumb(historySummary) {
|
|
88
|
+
if (!historySummary || historySummary.totalTrades < 5) return SCHEMAS.LABELS.NEUTRAL; // Insufficient data
|
|
89
|
+
|
|
90
|
+
const { winRatio, avgProfitPct, avgLossPct } = historySummary;
|
|
91
|
+
|
|
92
|
+
// 1. The "Consistent Winner" (Smart)
|
|
93
|
+
if (winRatio > 60 && avgProfitPct > Math.abs(avgLossPct)) return SCHEMAS.LABELS.SMART;
|
|
94
|
+
|
|
95
|
+
// 2. The "Sniper" (Smart - Low Win Rate but huge winners)
|
|
96
|
+
if (winRatio > 30 && avgProfitPct > (Math.abs(avgLossPct) * 2.5)) return SCHEMAS.LABELS.SMART;
|
|
97
|
+
|
|
98
|
+
// 3. The "Bagholder" (Dumb - High win rate but one loss wipes them out)
|
|
99
|
+
if (winRatio > 80 && (Math.abs(avgLossPct) > avgProfitPct * 4)) return SCHEMAS.LABELS.DUMB;
|
|
100
|
+
|
|
101
|
+
// 4. The "Gambler" (Dumb - Losing money consistently)
|
|
102
|
+
if (winRatio < 40 && avgProfitPct < Math.abs(avgLossPct)) return SCHEMAS.LABELS.DUMB;
|
|
103
|
+
|
|
104
|
+
return SCHEMAS.LABELS.NEUTRAL;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Classifies trading style based on average holding time.
|
|
109
|
+
* @param {number} avgHoldingMinutes
|
|
110
|
+
*/
|
|
111
|
+
static classifyStyle(avgHoldingMinutes) {
|
|
112
|
+
if (avgHoldingMinutes <= 0) return SCHEMAS.STYLES.INVESTOR; // Default
|
|
113
|
+
|
|
114
|
+
if (avgHoldingMinutes < 60 * 24) return SCHEMAS.STYLES.DAY_TRADER; // < 1 Day
|
|
115
|
+
if (avgHoldingMinutes < 60 * 24 * 30) return SCHEMAS.STYLES.SWING_TRADER; // < 1 Month
|
|
116
|
+
return SCHEMAS.STYLES.INVESTOR; // > 1 Month
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
14
120
|
class DataExtractor { // For generic access of data types
|
|
15
121
|
// ========================================================================
|
|
16
122
|
// 1. COLLECTION ACCESSORS
|
|
@@ -124,15 +230,12 @@ class DataExtractor { // For generic access of data types
|
|
|
124
230
|
static getPortfolioDailyPnl(portfolio, userType) {
|
|
125
231
|
if (!portfolio) return 0;
|
|
126
232
|
|
|
127
|
-
// 1. Speculator (Explicit 'NetProfit' field on root)
|
|
128
|
-
if (userType === SCHEMAS.USER_TYPES.SPECULATOR) {
|
|
129
|
-
|
|
130
|
-
return portfolio.NetProfit || 0; // Data for speculators only works when we are looking at the ticker-specific data, or the trade history data which is agnostic on user type
|
|
233
|
+
// 1. Speculator (Explicit 'NetProfit' field on root)
|
|
234
|
+
if (userType === SCHEMAS.USER_TYPES.SPECULATOR) {
|
|
235
|
+
return portfolio.NetProfit || 0;
|
|
131
236
|
}
|
|
132
237
|
|
|
133
|
-
// 2. Normal (Aggregated Calculation)
|
|
134
|
-
// Logic: Sum(Value - Invested) across the 'InstrumentType' breakdown
|
|
135
|
-
// This gives the net performance change for the day relative to the portfolio.
|
|
238
|
+
// 2. Normal (Aggregated Calculation)
|
|
136
239
|
if (portfolio.AggregatedPositionsByInstrumentTypeID) {
|
|
137
240
|
return portfolio.AggregatedPositionsByInstrumentTypeID.reduce((sum, agg) => {
|
|
138
241
|
return sum + ((agg.Value || 0) - (agg.Invested || 0));
|
|
@@ -167,8 +270,6 @@ class DataExtractor { // For generic access of data types
|
|
|
167
270
|
|
|
168
271
|
static getStopLossRate(position) {
|
|
169
272
|
const rate = position ? (position.StopLossRate || 0) : 0;
|
|
170
|
-
// Fix for eToro bug: SL disabled positions can return 0.01 or similar small values.
|
|
171
|
-
// If the rate is positive but extremely small (<= 0.01), treat as 0 (disabled).
|
|
172
273
|
if (rate > 0 && rate <= 0.01) return 0; // Normalizes bug value to 0
|
|
173
274
|
if (rate < 0) return 0;
|
|
174
275
|
return rate;
|
|
@@ -176,8 +277,6 @@ class DataExtractor { // For generic access of data types
|
|
|
176
277
|
|
|
177
278
|
static getTakeProfitRate(position) {
|
|
178
279
|
const rate = position ? (position.TakeProfitRate || 0) : 0;
|
|
179
|
-
// Fix for eToro bug: TP disabled positions can return INF or small values.
|
|
180
|
-
// If the rate is positive but extremely small (<= 0.01), treat as 0 (disabled).
|
|
181
280
|
if (rate > 0 && rate <= 0.01) return 0; // Normalizes bug value to 0
|
|
182
281
|
return rate;
|
|
183
282
|
}
|
|
@@ -186,38 +285,17 @@ class DataExtractor { // For generic access of data types
|
|
|
186
285
|
return position ? (position.HasTrailingStopLoss === true) : false; // Default false IF NOT FOUND
|
|
187
286
|
}
|
|
188
287
|
|
|
189
|
-
/**
|
|
190
|
-
* Extract Open Date Time.
|
|
191
|
-
* Used for bagholder calculations.
|
|
192
|
-
*/
|
|
193
288
|
static getOpenDateTime(position) {
|
|
194
289
|
if (!position || !position.OpenDateTime) return null;
|
|
195
290
|
return new Date(position.OpenDateTime);
|
|
196
291
|
}
|
|
197
292
|
}
|
|
198
293
|
|
|
199
|
-
/**
|
|
200
|
-
* --- NEW CLASS: priceExtractor ---
|
|
201
|
-
* Handles schema-aware extraction of asset price history.
|
|
202
|
-
* Mitigates typo risk by centralizing access to context.prices.
|
|
203
|
-
*/
|
|
204
294
|
class priceExtractor {
|
|
205
|
-
/**
|
|
206
|
-
* Retrieves the sorted price history for a specific instrument.
|
|
207
|
-
* @param {object} pricesContext - The global context.prices object.
|
|
208
|
-
* @param {string|number} tickerOrId - The Ticker or InstrumentID to fetch.
|
|
209
|
-
* @returns {Array<{date: string, price: number}>} Sorted array of price objects.
|
|
210
|
-
*/
|
|
211
295
|
static getHistory(pricesContext, tickerOrId) {
|
|
212
296
|
if (!pricesContext || !pricesContext.history) return [];
|
|
213
|
-
|
|
214
|
-
// The history map is keyed by InstrumentID (e.g., "10000")
|
|
215
|
-
// We iterate to find the entry matching the request (by ID or Ticker)
|
|
216
|
-
// Optimization: If input is ID, direct lookup. If ticker, search.
|
|
217
|
-
|
|
218
297
|
let assetData = pricesContext.history[tickerOrId];
|
|
219
298
|
|
|
220
|
-
// Fallback: If direct lookup failed, maybe it's a ticker symbol?
|
|
221
299
|
if (!assetData) {
|
|
222
300
|
const id = Object.keys(pricesContext.history).find(key => {
|
|
223
301
|
const data = pricesContext.history[key];
|
|
@@ -228,7 +306,6 @@ class priceExtractor {
|
|
|
228
306
|
|
|
229
307
|
if (!assetData || !assetData.prices) return [];
|
|
230
308
|
|
|
231
|
-
// Convert Map<Date, Price> to Array<{date, price}> and sort
|
|
232
309
|
const priceMap = assetData.prices;
|
|
233
310
|
const sortedDates = Object.keys(priceMap).sort((a, b) => a.localeCompare(b));
|
|
234
311
|
|
|
@@ -238,20 +315,12 @@ class priceExtractor {
|
|
|
238
315
|
})).filter(item => item.price > 0);
|
|
239
316
|
}
|
|
240
317
|
|
|
241
|
-
/**
|
|
242
|
-
* Returns all available assets with their histories.
|
|
243
|
-
* Useful for computations iterating over the entire market.
|
|
244
|
-
* @param {object} pricesContext - The global context.prices object.
|
|
245
|
-
* @returns {Map<string, Array<{date: string, price: number}>>} Map<Ticker, HistoryArray>
|
|
246
|
-
*/
|
|
247
318
|
static getAllHistories(pricesContext) {
|
|
248
319
|
if (!pricesContext || !pricesContext.history) return new Map();
|
|
249
320
|
|
|
250
321
|
const results = new Map();
|
|
251
322
|
for (const [id, data] of Object.entries(pricesContext.history)) {
|
|
252
323
|
const ticker = data.ticker || id;
|
|
253
|
-
// Reuse the single-asset logic for consistency, though slightly less efficient
|
|
254
|
-
// than inlining the sort. For safety/DRY, we reuse.
|
|
255
324
|
const history = this.getHistory(pricesContext, id);
|
|
256
325
|
if (history.length > 0) {
|
|
257
326
|
results.set(ticker, history);
|
|
@@ -262,26 +331,14 @@ class priceExtractor {
|
|
|
262
331
|
}
|
|
263
332
|
|
|
264
333
|
class HistoryExtractor {
|
|
265
|
-
// --- Schema Accessor (REFACTORED for Granular API) ---
|
|
266
|
-
/**
|
|
267
|
-
* Extracts the daily history snapshot from the User object.
|
|
268
|
-
* Returns the raw granular positions list.
|
|
269
|
-
*/
|
|
270
334
|
static getDailyHistory(user) {
|
|
271
|
-
// The new API returns { PublicHistoryPositions: [...] }
|
|
272
335
|
return user?.history?.today || null;
|
|
273
336
|
}
|
|
274
337
|
|
|
275
|
-
// --- Data Extractors (ADAPTER PATTERN) ---
|
|
276
|
-
/**
|
|
277
|
-
* Adapts granular trade history into the legacy 'TradedAssets' format.
|
|
278
|
-
* Groups trades by InstrumentID and calculates average holding time.
|
|
279
|
-
*/
|
|
280
338
|
static getTradedAssets(historyDoc) {
|
|
281
339
|
const trades = historyDoc?.PublicHistoryPositions || [];
|
|
282
340
|
if (!trades.length) return [];
|
|
283
341
|
|
|
284
|
-
// Group by InstrumentID
|
|
285
342
|
const assetsMap = new Map();
|
|
286
343
|
|
|
287
344
|
for (const t of trades) {
|
|
@@ -297,11 +354,9 @@ class HistoryExtractor {
|
|
|
297
354
|
}
|
|
298
355
|
|
|
299
356
|
const asset = assetsMap.get(instId);
|
|
300
|
-
|
|
301
|
-
// Calculate Duration in Minutes
|
|
302
357
|
const open = new Date(t.OpenDateTime);
|
|
303
358
|
const close = new Date(t.CloseDateTime);
|
|
304
|
-
const durationMins = (close - open) / 60000;
|
|
359
|
+
const durationMins = (close - open) / 60000;
|
|
305
360
|
|
|
306
361
|
if (durationMins > 0) {
|
|
307
362
|
asset.totalDuration += durationMins;
|
|
@@ -309,8 +364,6 @@ class HistoryExtractor {
|
|
|
309
364
|
}
|
|
310
365
|
}
|
|
311
366
|
|
|
312
|
-
// Convert Map to Array format expected by existing calculations
|
|
313
|
-
// (Returns objects with .instrumentId and .avgHoldingTimeInMinutes)
|
|
314
367
|
return Array.from(assetsMap.values()).map(a => ({
|
|
315
368
|
instrumentId: a.instrumentId,
|
|
316
369
|
avgHoldingTimeInMinutes: a.count > 0 ? (a.totalDuration / a.count) : 0
|
|
@@ -325,10 +378,6 @@ class HistoryExtractor {
|
|
|
325
378
|
return asset ? (asset.avgHoldingTimeInMinutes || 0) : 0;
|
|
326
379
|
}
|
|
327
380
|
|
|
328
|
-
/**
|
|
329
|
-
* Adapts granular trade history into the legacy 'Summary' format.
|
|
330
|
-
* Calculates WinRatio, AvgProfit, etc. on the fly from the raw list.
|
|
331
|
-
*/
|
|
332
381
|
static getSummary(historyDoc) {
|
|
333
382
|
const trades = historyDoc?.PublicHistoryPositions || [];
|
|
334
383
|
if (!trades.length) return null;
|
|
@@ -342,7 +391,6 @@ class HistoryExtractor {
|
|
|
342
391
|
let totalDur = 0;
|
|
343
392
|
|
|
344
393
|
for (const t of trades) {
|
|
345
|
-
// P&L Stats (NetProfit is %)
|
|
346
394
|
if (t.NetProfit > 0) {
|
|
347
395
|
wins++;
|
|
348
396
|
totalProf += t.NetProfit;
|
|
@@ -352,10 +400,9 @@ class HistoryExtractor {
|
|
|
352
400
|
lossCount++;
|
|
353
401
|
}
|
|
354
402
|
|
|
355
|
-
// Duration Stats
|
|
356
403
|
const open = new Date(t.OpenDateTime);
|
|
357
404
|
const close = new Date(t.CloseDateTime);
|
|
358
|
-
totalDur += (close - open) / 60000;
|
|
405
|
+
totalDur += (close - open) / 60000;
|
|
359
406
|
}
|
|
360
407
|
|
|
361
408
|
return {
|
|
@@ -369,9 +416,6 @@ class HistoryExtractor {
|
|
|
369
416
|
}
|
|
370
417
|
|
|
371
418
|
class SignalPrimitives {
|
|
372
|
-
/**
|
|
373
|
-
* Safely extracts a specific numeric field for a specific ticker from a dependency.
|
|
374
|
-
*/
|
|
375
419
|
static getMetric(dependencies, calcName, ticker, fieldName, fallback = 0) {
|
|
376
420
|
if (!dependencies || !dependencies[calcName]) return fallback;
|
|
377
421
|
const tickerData = dependencies[calcName][ticker];
|
|
@@ -381,13 +425,9 @@ class SignalPrimitives {
|
|
|
381
425
|
return (typeof val === 'number') ? val : fallback;
|
|
382
426
|
}
|
|
383
427
|
|
|
384
|
-
/**
|
|
385
|
-
* Creates a unified Set of all keys (tickers) present across multiple dependency results.
|
|
386
|
-
*/
|
|
387
428
|
static getUnionKeys(dependencies, calcNames) {
|
|
388
429
|
const keys = new Set();
|
|
389
430
|
if (!dependencies) return [];
|
|
390
|
-
|
|
391
431
|
for (const name of calcNames) {
|
|
392
432
|
const resultObj = dependencies[name];
|
|
393
433
|
if (resultObj && typeof resultObj === 'object') {
|
|
@@ -397,40 +437,29 @@ class SignalPrimitives {
|
|
|
397
437
|
return Array.from(keys);
|
|
398
438
|
}
|
|
399
439
|
|
|
400
|
-
|
|
401
|
-
* Hyperbolic Tangent Normalization.
|
|
402
|
-
* Maps inputs to a strict -Scale to +Scale range.
|
|
403
|
-
*/
|
|
404
|
-
static normalizeTanh(value, scale = 10, sensitivity = 10.0) { // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/tanh
|
|
440
|
+
static normalizeTanh(value, scale = 10, sensitivity = 10.0) {
|
|
405
441
|
if (value === 0) return 0;
|
|
406
442
|
return Math.tanh(value / sensitivity) * scale;
|
|
407
443
|
}
|
|
408
444
|
|
|
409
|
-
|
|
410
|
-
* Standard Z-Score normalization.
|
|
411
|
-
*/
|
|
412
|
-
static normalizeZScore(value, mean, stdDev) { // https://gist.github.com/Restuta/5cbf1d186f17febe5e899febb63e1b86
|
|
445
|
+
static normalizeZScore(value, mean, stdDev) {
|
|
413
446
|
if (!stdDev || stdDev === 0) return 0;
|
|
414
447
|
return (value - mean) / stdDev;
|
|
415
448
|
}
|
|
416
449
|
|
|
417
|
-
/**
|
|
418
|
-
* Simple Divergence (A - B).
|
|
419
|
-
*/
|
|
420
450
|
static divergence(valueA, valueB) {
|
|
421
451
|
return (valueA || 0) - (valueB || 0);
|
|
422
452
|
}
|
|
423
453
|
|
|
424
|
-
static getPreviousState(previousComputed, calcName, ticker, fieldName = null) {
|
|
425
|
-
if (!previousComputed || !previousComputed[calcName]) return null;
|
|
426
|
-
// Which handles the self-reference.
|
|
454
|
+
static getPreviousState(previousComputed, calcName, ticker, fieldName = null) {
|
|
455
|
+
if (!previousComputed || !previousComputed[calcName]) return null;
|
|
427
456
|
const tickerData = previousComputed[calcName][ticker];
|
|
428
457
|
if (!tickerData) return null;
|
|
429
458
|
|
|
430
459
|
if (fieldName) {
|
|
431
460
|
return tickerData[fieldName];
|
|
432
461
|
}
|
|
433
|
-
return tickerData;
|
|
462
|
+
return tickerData;
|
|
434
463
|
}
|
|
435
464
|
}
|
|
436
465
|
|
|
@@ -460,37 +489,17 @@ class MathPrimitives {
|
|
|
460
489
|
return value > threshold ? 'winner' : 'loser';
|
|
461
490
|
}
|
|
462
491
|
|
|
463
|
-
|
|
464
|
-
* Calculates the probability of an asset hitting a specific price barrier (Stop Loss/Take Profit)
|
|
465
|
-
* within a given timeframe using the First Passage Time of Geometric Brownian Motion.
|
|
466
|
-
* * Formula: P(T < t) = Φ((b - vt) / σ√t) + exp(2vb/σ²) * Φ((b + vt) / σ√t)
|
|
467
|
-
* Where:
|
|
468
|
-
* b = ln(Barrier/Price)
|
|
469
|
-
* v = drift - 0.5 * volatility^2
|
|
470
|
-
* @param {number} currentPrice - The current price of the asset
|
|
471
|
-
* @param {number} barrierPrice - The target price (SL or TP)
|
|
472
|
-
* @param {number} volatility - Annualized volatility (e.g., 0.40 for 40%)
|
|
473
|
-
* @param {number} days - Number of days to forecast (e.g., 3)
|
|
474
|
-
* @param {number} drift - (Optional) Annualized drift. Default 0 (Risk Neutral).
|
|
475
|
-
* @returns {number} Probability (0.0 to 1.0)
|
|
476
|
-
*/
|
|
477
|
-
static calculateHitProbability(currentPrice, barrierPrice, volatility, days, drift = 0) { // https://www.ma.ic.ac.uk/~bin06/M3A22/m3f22chVII.pdf
|
|
492
|
+
static calculateHitProbability(currentPrice, barrierPrice, volatility, days, drift = 0) {
|
|
478
493
|
if (currentPrice <= 0 || barrierPrice <= 0 || volatility <= 0 || days <= 0) return 0;
|
|
479
494
|
|
|
480
|
-
const t = days / 365.0;
|
|
495
|
+
const t = days / 365.0;
|
|
481
496
|
const sigma = volatility;
|
|
482
497
|
const mu = drift;
|
|
483
|
-
|
|
484
|
-
// The barrier in log-space
|
|
485
498
|
const b = Math.log(barrierPrice / currentPrice);
|
|
486
|
-
|
|
487
|
-
// Adjusted drift (nu)
|
|
488
499
|
const nu = mu - 0.5 * Math.pow(sigma, 2);
|
|
489
|
-
|
|
490
500
|
const sqrtT = Math.sqrt(t);
|
|
491
501
|
const sigmaSqrtT = sigma * sqrtT;
|
|
492
502
|
|
|
493
|
-
// Helper for Standard Normal CDF (Φ)
|
|
494
503
|
const normCDF = (x) => {
|
|
495
504
|
const t = 1 / (1 + 0.2316419 * Math.abs(x));
|
|
496
505
|
const d = 0.3989423 * Math.exp(-x * x / 2);
|
|
@@ -498,41 +507,20 @@ class MathPrimitives {
|
|
|
498
507
|
return x > 0 ? 1 - prob : prob;
|
|
499
508
|
};
|
|
500
509
|
|
|
501
|
-
// Standard First Passage Time Formula parts
|
|
502
510
|
const term1 = (b - nu * t) / sigmaSqrtT;
|
|
503
511
|
const term2 = (2 * nu * b) / (sigma * sigma);
|
|
504
512
|
const term3 = (b + nu * t) / sigmaSqrtT;
|
|
505
513
|
|
|
506
|
-
// If barrier is below price (Stop Loss for Long), b is negative.
|
|
507
|
-
// If barrier is above price (Take Profit for Long), we flip the logic essentially,
|
|
508
|
-
// but the formula works for the distance.
|
|
509
|
-
// However, for strict GBM hitting time, we usually treat 'b' as the distance.
|
|
510
|
-
// For this implementation, we check direction relative to barrier.
|
|
511
|
-
|
|
512
|
-
// If we are already at or past the barrier, probability is 100%
|
|
513
514
|
if ((currentPrice > barrierPrice && barrierPrice > currentPrice) ||
|
|
514
515
|
(currentPrice < barrierPrice && barrierPrice < currentPrice)) {
|
|
515
516
|
return 1.0;
|
|
516
517
|
}
|
|
517
518
|
|
|
518
|
-
// Calculate Probability
|
|
519
|
-
// Note: If nu is 0, the second term simplifies significantly, but we keep full form.
|
|
520
519
|
const probability = normCDF(( -Math.abs(b) - nu * t ) / sigmaSqrtT) + Math.exp((2 * nu * Math.abs(b)) / (sigma * sigma)) * normCDF(( -Math.abs(b) + nu * t ) / sigmaSqrtT);
|
|
521
520
|
|
|
522
521
|
return Math.min(Math.max(probability, 0), 1);
|
|
523
522
|
}
|
|
524
523
|
|
|
525
|
-
/**
|
|
526
|
-
* --- NEW PRIMITIVE ---
|
|
527
|
-
* Simulates future price paths using Geometric Brownian Motion (Monte Carlo).
|
|
528
|
-
* Used for testing portfolio resilience against potential market moves.
|
|
529
|
-
* @param {number} currentPrice - S0
|
|
530
|
-
* @param {number} volatility - Annualized volatility (sigma)
|
|
531
|
-
* @param {number} days - Time horizon in days (t)
|
|
532
|
-
* @param {number} simulations - Number of paths to generate (e.g., 1000)
|
|
533
|
-
* @param {number} drift - Annualized drift (mu), default 0
|
|
534
|
-
* @returns {Float32Array} Array of simulated end prices
|
|
535
|
-
*/
|
|
536
524
|
static simulateGBM(currentPrice, volatility, days, simulations = 1000, drift = 0) {
|
|
537
525
|
if (currentPrice <= 0 || volatility <= 0 || days <= 0) return new Float32Array(0);
|
|
538
526
|
|
|
@@ -542,29 +530,17 @@ class MathPrimitives {
|
|
|
542
530
|
const driftTerm = (mu - 0.5 * sigma * sigma) * t;
|
|
543
531
|
const volTerm = sigma * Math.sqrt(t);
|
|
544
532
|
|
|
545
|
-
// Use Float32Array for memory efficiency with large simulation counts
|
|
546
533
|
const results = new Float32Array(simulations);
|
|
547
534
|
|
|
548
535
|
for (let i = 0; i < simulations; i++) {
|
|
549
|
-
// Box-Muller transform for efficient standard normal distribution generation
|
|
550
536
|
const u1 = Math.random();
|
|
551
537
|
const u2 = Math.random();
|
|
552
538
|
const z = Math.sqrt(-2.0 * Math.log(u1)) * Math.cos(2.0 * Math.PI * u2);
|
|
553
|
-
|
|
554
|
-
// GBM Formula: St = S0 * exp((mu - 0.5*sigma^2)t + sigma*Wt)
|
|
555
539
|
results[i] = currentPrice * Math.exp(driftTerm + volTerm * z);
|
|
556
540
|
}
|
|
557
541
|
return results;
|
|
558
542
|
}
|
|
559
543
|
|
|
560
|
-
/**
|
|
561
|
-
* --- NEW PRIMITIVE ---
|
|
562
|
-
* Simulates "Population Breakdown" (Capitulation) Risk.
|
|
563
|
-
* Correlates simulated price drops with user pain thresholds.
|
|
564
|
-
* @param {Float32Array} pricePaths - Array of simulated prices (from simulateGBM).
|
|
565
|
-
* @param {Array<{entryPrice: number, thresholdPct: number}>} userProfiles - Array of user states.
|
|
566
|
-
* @returns {number} Expected % of population that capitulates (0.0 - 1.0).
|
|
567
|
-
*/
|
|
568
544
|
static simulatePopulationBreakdown(pricePaths, userProfiles) {
|
|
569
545
|
if (!pricePaths.length || !userProfiles.length) return 0;
|
|
570
546
|
|
|
@@ -572,43 +548,30 @@ class MathPrimitives {
|
|
|
572
548
|
const totalSims = pricePaths.length;
|
|
573
549
|
const totalUsers = userProfiles.length;
|
|
574
550
|
|
|
575
|
-
// For each simulated future price scenario...
|
|
576
551
|
for (let i = 0; i < totalSims; i++) {
|
|
577
552
|
const simPrice = pricePaths[i];
|
|
578
553
|
let capitulatedUsersInScenario = 0;
|
|
579
554
|
|
|
580
|
-
// ...check every user to see if they survive
|
|
581
555
|
for (let j = 0; j < totalUsers; j++) {
|
|
582
556
|
const user = userProfiles[j];
|
|
583
|
-
// Calculate hypothetical P&L for this user in this scenario
|
|
584
|
-
// P&L% = (CurrentValue - EntryValue) / EntryValue
|
|
585
557
|
const hypotheticalPnL = ((simPrice - user.entryPrice) / user.entryPrice) * 100;
|
|
586
558
|
|
|
587
|
-
// If hypothetical P&L is worse (lower) than their historical pain threshold, they capitulate.
|
|
588
|
-
// Note: thresholdPct is typically negative (e.g., -15.0)
|
|
589
559
|
if (hypotheticalPnL < user.thresholdPct) {
|
|
590
560
|
capitulatedUsersInScenario++;
|
|
591
561
|
}
|
|
592
562
|
}
|
|
593
563
|
|
|
594
|
-
// Add the % of users who broke in this scenario to the accumulator
|
|
595
564
|
totalBreakdownEvents += (capitulatedUsersInScenario / totalUsers);
|
|
596
565
|
}
|
|
597
566
|
|
|
598
|
-
// Return the average capitulation rate across all simulations
|
|
599
567
|
return totalBreakdownEvents / totalSims;
|
|
600
568
|
}
|
|
601
569
|
}
|
|
602
570
|
|
|
603
571
|
class Aggregators {
|
|
604
|
-
|
|
605
|
-
* Helper to bucket users by P&L Status.
|
|
606
|
-
* Used by legacy systems or specific aggregators.
|
|
607
|
-
*/
|
|
608
|
-
static bucketUsersByPnlPerAsset(usersData, tickerMap) { // https://www.geeksforgeeks.org/javascript/bucket-sort-visualization-using-javascript/
|
|
572
|
+
static bucketUsersByPnlPerAsset(usersData, tickerMap) {
|
|
609
573
|
const buckets = new Map();
|
|
610
574
|
for (const [userId, portfolio] of Object.entries(usersData)) {
|
|
611
|
-
// Auto-detect type if not provided (Legacy compatibility) TODO : We do not need legacy compatability, legacy computations do not run.
|
|
612
575
|
const userType = portfolio.PublicPositions ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
|
|
613
576
|
const positions = DataExtractor.getPositions(portfolio, userType);
|
|
614
577
|
|
|
@@ -630,13 +593,6 @@ class Aggregators {
|
|
|
630
593
|
return Object.fromEntries(buckets);
|
|
631
594
|
}
|
|
632
595
|
|
|
633
|
-
/**
|
|
634
|
-
* --- NEW PRIMITIVE ---
|
|
635
|
-
* Calculates Weighted Sentiment (Avg P&L) for a set of positions.
|
|
636
|
-
* Solves "Dirty Data" / Variable N issue by weighting by investment size.
|
|
637
|
-
* @param {Array} positions - Array of raw position objects.
|
|
638
|
-
* @returns {number} Weighted Average NetProfit %.
|
|
639
|
-
*/
|
|
640
596
|
static getWeightedSentiment(positions) {
|
|
641
597
|
if (!positions || positions.length === 0) return 0;
|
|
642
598
|
|
|
@@ -644,9 +600,8 @@ class Aggregators {
|
|
|
644
600
|
let totalWeight = 0;
|
|
645
601
|
|
|
646
602
|
for (const pos of positions) {
|
|
647
|
-
// Use DataExtractor to be safe and schema-agnostic
|
|
648
603
|
const pnl = DataExtractor.getNetProfit(pos);
|
|
649
|
-
const weight = DataExtractor.getPositionWeight(pos);
|
|
604
|
+
const weight = DataExtractor.getPositionWeight(pos);
|
|
650
605
|
|
|
651
606
|
if (weight > 0) {
|
|
652
607
|
totalWeightedPnL += (pnl * weight);
|
|
@@ -659,12 +614,10 @@ class Aggregators {
|
|
|
659
614
|
}
|
|
660
615
|
}
|
|
661
616
|
|
|
662
|
-
// Validation layer -- Used to validate the data incoming
|
|
663
617
|
class Validators {
|
|
664
618
|
static validatePortfolio(portfolio, userType) {
|
|
665
619
|
if (!portfolio) return { valid: false, errors: ['Portfolio is null'] };
|
|
666
620
|
|
|
667
|
-
// Handle both types of schema
|
|
668
621
|
if (userType === SCHEMAS.USER_TYPES.SPECULATOR) {
|
|
669
622
|
if (!portfolio.PublicPositions) return { valid: false, errors: ['Missing PublicPositions'] };
|
|
670
623
|
} else {
|
|
@@ -676,38 +629,25 @@ class Validators {
|
|
|
676
629
|
}
|
|
677
630
|
|
|
678
631
|
class TimeSeries {
|
|
679
|
-
/**
|
|
680
|
-
* Updates a Rolling Mean and Variance using Welford's Online Algorithm (EMA variant). // https://jonisalonen.com/2013/deriving-welfords-method-for-computing-variance/
|
|
681
|
-
* @param {number} value - New data point.
|
|
682
|
-
* @param {Object} state - { mean: number, variance: number }
|
|
683
|
-
* @param {number} alpha - Decay factor (e.g., 0.1 for ~20 days).
|
|
684
|
-
*/
|
|
685
632
|
static updateEMAState(value, state, alpha = 0.1) {
|
|
686
633
|
const mean = state ? (state.mean || 0) : 0;
|
|
687
|
-
const variance = state ? (state.variance || 1) : 1;
|
|
634
|
+
const variance = state ? (state.variance || 1) : 1;
|
|
688
635
|
|
|
689
636
|
if (value === undefined || value === null || isNaN(value)) {
|
|
690
637
|
return { mean, variance };
|
|
691
638
|
}
|
|
692
639
|
|
|
693
|
-
// EMA Update for Mean
|
|
694
640
|
const diff = value - mean;
|
|
695
641
|
const newMean = mean + (alpha * diff);
|
|
696
|
-
|
|
697
|
-
// EMA Update for Variance
|
|
698
642
|
const newVariance = (1 - alpha) * (variance + (alpha * diff * diff));
|
|
699
643
|
|
|
700
644
|
return { mean: newMean, variance: newVariance };
|
|
701
645
|
}
|
|
702
646
|
|
|
703
|
-
/**
|
|
704
|
-
* Calculates Pearson Correlation between two arrays. https://gist.github.com/matt-west/6500993
|
|
705
|
-
*/
|
|
706
647
|
static pearsonCorrelation(x, y) {
|
|
707
648
|
if (!x || !y || x.length !== y.length || x.length === 0) return 0;
|
|
708
649
|
|
|
709
650
|
const n = x.length;
|
|
710
|
-
// Simple sums
|
|
711
651
|
let sumX = 0, sumY = 0, sumXY = 0, sumX2 = 0, sumY2 = 0;
|
|
712
652
|
|
|
713
653
|
for (let i = 0; i < n; i++) {
|
|
@@ -727,14 +667,6 @@ class TimeSeries {
|
|
|
727
667
|
|
|
728
668
|
|
|
729
669
|
class DistributionAnalytics {
|
|
730
|
-
/**
|
|
731
|
-
* Gaussian Kernel Density Estimation (KDE)
|
|
732
|
-
* Converts discrete price points into a continuous probability density curve.
|
|
733
|
-
* Optimized for memory: accepts pre-binned data.
|
|
734
|
-
* @param {Array<{value: number, weight: number}>} data - Points or Bins.
|
|
735
|
-
* @param {number} bandwidth - Smoothing factor (h).
|
|
736
|
-
* @param {number} steps - Resolution of the output curve.
|
|
737
|
-
*/
|
|
738
670
|
static computeKDE(data, bandwidth, steps = 60) {
|
|
739
671
|
if (!data || data.length === 0) return [];
|
|
740
672
|
|
|
@@ -744,7 +676,6 @@ class DistributionAnalytics {
|
|
|
744
676
|
if (p.value > max) max = p.value;
|
|
745
677
|
}
|
|
746
678
|
|
|
747
|
-
// Pad range to capture tails
|
|
748
679
|
min -= bandwidth * 3;
|
|
749
680
|
max += bandwidth * 3;
|
|
750
681
|
const stepSize = (max - min) / steps;
|
|
@@ -753,14 +684,12 @@ class DistributionAnalytics {
|
|
|
753
684
|
for (let i = 0; i <= steps; i++) {
|
|
754
685
|
const x = min + (i * stepSize);
|
|
755
686
|
let density = 0;
|
|
756
|
-
// Vectorized-like summation https://cvw.cac.cornell.edu/vector/intro/how-vector-works#:~:text=Vectorization%20is%20a%20process%20by,performance%20increases%20obtained%20by%20vectorization.
|
|
757
687
|
for (const p of data) {
|
|
758
688
|
const diff = (x - p.value);
|
|
759
|
-
// Optimization: Skip calculation for points too far away (> 3 std devs)
|
|
760
689
|
if (Math.abs(diff) > bandwidth * 3) continue;
|
|
761
690
|
|
|
762
691
|
const u = diff / bandwidth;
|
|
763
|
-
const k = 0.39894228 * Math.exp(-0.5 * u * u);
|
|
692
|
+
const k = 0.39894228 * Math.exp(-0.5 * u * u);
|
|
764
693
|
density += (p.weight * k) / bandwidth;
|
|
765
694
|
}
|
|
766
695
|
if (density > 0) curve.push({ price: x, density });
|
|
@@ -769,19 +698,19 @@ class DistributionAnalytics {
|
|
|
769
698
|
}
|
|
770
699
|
|
|
771
700
|
static integrateProfile(curve, startPrice, endPrice) {
|
|
701
|
+
if (!curve || !Array.isArray(curve)) return 0; // Fix for potential crash
|
|
772
702
|
let sum = 0;
|
|
773
703
|
for (let i = 0; i < curve.length - 1; i++) {
|
|
774
704
|
const p1 = curve[i];
|
|
775
705
|
const p2 = curve[i+1];
|
|
776
706
|
if (p1.price >= startPrice && p2.price <= endPrice) {
|
|
777
|
-
// Trapezoidal Rule https://www.khanacademy.org/math/ap-calculus-ab/ab-integration-new/ab-6-2/a/understanding-the-trapezoid-rule
|
|
778
707
|
sum += (p2.price - p1.price) * ((p1.density + p2.density) / 2);
|
|
779
708
|
}
|
|
780
709
|
}
|
|
781
710
|
return sum;
|
|
782
711
|
}
|
|
783
712
|
|
|
784
|
-
static linearRegression(xValues, yValues) {
|
|
713
|
+
static linearRegression(xValues, yValues) {
|
|
785
714
|
const n = xValues.length;
|
|
786
715
|
if (n !== yValues.length || n < 2) return { slope: 0, r2: 0 };
|
|
787
716
|
|
|
@@ -799,23 +728,17 @@ class DistributionAnalytics {
|
|
|
799
728
|
}
|
|
800
729
|
}
|
|
801
730
|
|
|
802
|
-
|
|
803
|
-
// TODO, THIS EXPORT CAN SURELY BE DYNAMICALLY PRODUCED? TAKE WHATEVER CLASSES ARE DEFINED? THE IDEA IS THAT WE CAN SIMPLY PRODUCE NEW CLASSES AND NOT NEED TO MAKE ANY FURTHER CHANGES SO :
|
|
804
|
-
// 1. DYNAMICALLY FINDS THE CLASS, EXPORTS HERE
|
|
805
|
-
// 2. DYNAMICALLY IMPORTS INTO COMPUTATION CONTROLLER
|
|
806
|
-
// 3. DYNAMICALLY POPULATES THE MATH VALUE OF THE COMPUTATION CONTROLLER FOR STANDARD AND META COMPUTATION CONTEXTBUILDERS
|
|
807
|
-
// 4. GO AHEAD AND WRITE YOUR COMPUTATION FILES USING THE NEW MATH PRIMITIVES DEFINED AS NEEDED.
|
|
808
|
-
// THIS THEN MEANS THE MATH PRIMTIVIES CAN BE UPDATED EASILY WITHOUT FURTHER CHANGES TO OTHER SYSTEMS AND AVOID COMPLICATED DEBUGGING OR FORGETFUL INTEGRATIONS.
|
|
809
|
-
|
|
810
731
|
module.exports = {
|
|
811
732
|
Aggregators,
|
|
812
733
|
DataExtractor,
|
|
813
734
|
DistributionAnalytics,
|
|
814
735
|
HistoryExtractor,
|
|
736
|
+
InsightsExtractor, // Exported
|
|
815
737
|
MathPrimitives,
|
|
816
738
|
SCHEMAS,
|
|
817
739
|
SignalPrimitives,
|
|
818
740
|
TimeSeries,
|
|
741
|
+
UserClassifier, // Exported
|
|
819
742
|
Validators,
|
|
820
743
|
priceExtractor
|
|
821
744
|
};
|
|
@@ -12,18 +12,30 @@
|
|
|
12
12
|
*/
|
|
13
13
|
async function checkDiscoveryNeed(userType, config, dependencies) {
|
|
14
14
|
const { logger, firestoreUtils } = dependencies;
|
|
15
|
+
const { targetUsersPerBlock, normalBlockCountsDocPath, allHighValueBlocks } = config;
|
|
16
|
+
|
|
17
|
+
// --- REFACTOR: ORGANIC SPECULATOR DISCOVERY ---
|
|
18
|
+
// We no longer "hunt" for speculators using block targets.
|
|
19
|
+
// Speculators are now discovered organically via the Normal User Update Loop (Feedback Loop).
|
|
20
|
+
if (userType === 'speculator') {
|
|
21
|
+
logger.log('INFO', '[Orchestrator Helpers] Speculator discovery is Organic/Feedback-Driven. Skipping active discovery checks.');
|
|
22
|
+
return { needsDiscovery: false, blocksToFill: [] };
|
|
23
|
+
}
|
|
24
|
+
|
|
15
25
|
logger.log('INFO', `[Orchestrator Helpers] Checking discovery need for ${userType}...`);
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
const blockCounts = await firestoreUtils.getBlockCapacities(dependencies, {
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
26
|
+
|
|
27
|
+
// Logic for Normal Users remains (fill blocks to target)
|
|
28
|
+
const blockCounts = await firestoreUtils.getBlockCapacities(dependencies, { normalBlockCountsDocPath }, userType);
|
|
29
|
+
|
|
30
|
+
const underPopulatedBlocks = allHighValueBlocks.filter(block => (blockCounts[block.startId] || 0) < targetUsersPerBlock);
|
|
31
|
+
const needsDiscovery = underPopulatedBlocks.length > 0;
|
|
32
|
+
|
|
33
|
+
if (!needsDiscovery) {
|
|
34
|
+
logger.log('SUCCESS', `✅ All blocks are at target capacity for ${userType} users.`);
|
|
35
|
+
} else {
|
|
36
|
+
logger.log('INFO', `Found ${underPopulatedBlocks.length} underpopulated blocks for ${userType}.`);
|
|
37
|
+
}
|
|
38
|
+
|
|
27
39
|
return { needsDiscovery, blocksToFill: underPopulatedBlocks };
|
|
28
40
|
}
|
|
29
41
|
|
|
@@ -38,29 +50,70 @@ async function checkDiscoveryNeed(userType, config, dependencies) {
|
|
|
38
50
|
async function getDiscoveryCandidates(userType, blocksToFill, config, dependencies) {
|
|
39
51
|
const { logger, firestoreUtils } = dependencies;
|
|
40
52
|
logger.log('INFO', `[Orchestrator Helpers] Getting discovery candidates for ${userType}...`);
|
|
41
|
-
|
|
42
|
-
const
|
|
53
|
+
|
|
54
|
+
const {
|
|
55
|
+
maxTasksPerRun,
|
|
56
|
+
discoveryBatchSize,
|
|
57
|
+
maxRandomCidsToDiscover,
|
|
58
|
+
specBlocksCollection,
|
|
59
|
+
pendingSpecCollection,
|
|
60
|
+
normalUserCollection,
|
|
61
|
+
invalidSpecCollection,
|
|
62
|
+
snapshotsSubCollectionName,
|
|
63
|
+
partsSubCollectionName
|
|
64
|
+
} = config;
|
|
65
|
+
|
|
43
66
|
const dispatchedCids = new Set();
|
|
44
|
-
|
|
67
|
+
|
|
68
|
+
// Fetch exclusion list to avoid re-discovering existing users
|
|
69
|
+
// Note: For Normal users, we check against existing Normal users.
|
|
70
|
+
const latestNormalPortfolios = await firestoreUtils.getLatestNormalUserPortfolios(dependencies, {
|
|
71
|
+
normalUserCollectionName: normalUserCollection,
|
|
72
|
+
snapshotsSubCollectionName,
|
|
73
|
+
partsSubCollectionName
|
|
74
|
+
});
|
|
75
|
+
|
|
45
76
|
const existingNormalUserIds = new Set(Object.keys(latestNormalPortfolios));
|
|
46
|
-
const exclusionIds = await firestoreUtils.getExclusionIds(dependencies, {
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
77
|
+
const exclusionIds = await firestoreUtils.getExclusionIds(dependencies, {
|
|
78
|
+
specBlocksCollection,
|
|
79
|
+
pendingSpecCollection,
|
|
80
|
+
normalUserCollection,
|
|
81
|
+
invalidSpecCollection,
|
|
82
|
+
existingNormalUserIds
|
|
83
|
+
}, userType);
|
|
84
|
+
|
|
85
|
+
// --- RANDOM DISCOVERY LOOP (For Normal Users) ---
|
|
51
86
|
let dispatchedRandomCidCount = 0;
|
|
52
|
-
|
|
53
|
-
while ((dispatchedRandomCidCount +
|
|
87
|
+
|
|
88
|
+
while ((dispatchedRandomCidCount + dispatchedCids.size) < maxRandomCidsToDiscover &&
|
|
89
|
+
dispatchedCids.size < maxTasksPerRun &&
|
|
90
|
+
blocksToFill.length > 0) {
|
|
91
|
+
|
|
54
92
|
const blockIndex = dispatchedCids.size % blocksToFill.length;
|
|
55
93
|
const block = blocksToFill[blockIndex];
|
|
56
94
|
if (!block) break;
|
|
95
|
+
|
|
57
96
|
for (let j = 0; j < discoveryBatchSize; j++) {
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
97
|
+
if ((dispatchedRandomCidCount + dispatchedCids.size) >= maxRandomCidsToDiscover ||
|
|
98
|
+
dispatchedCids.size >= maxTasksPerRun) break;
|
|
99
|
+
|
|
100
|
+
let randomId;
|
|
101
|
+
let retryCount = 0;
|
|
102
|
+
const MAX_RETRIES = 50;
|
|
103
|
+
|
|
104
|
+
do {
|
|
105
|
+
if (++retryCount > MAX_RETRIES) break;
|
|
106
|
+
// Generate random CID within the block range
|
|
107
|
+
randomId = String(Math.floor(Math.random() * 1000000) + block.startId);
|
|
108
|
+
} while (exclusionIds.has(randomId) || dispatchedCids.has(randomId));
|
|
109
|
+
|
|
110
|
+
if (retryCount <= MAX_RETRIES) {
|
|
111
|
+
dispatchedCids.add(randomId);
|
|
112
|
+
dispatchedRandomCidCount++;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
64
117
|
logger.log('INFO', `Generated ${dispatchedRandomCidCount} random CIDs for ${userType} discovery.`);
|
|
65
118
|
logger.log('INFO', `Total candidates for dispatch: ${dispatchedCids.size}`);
|
|
66
119
|
return dispatchedCids;
|
|
@@ -75,34 +128,54 @@ async function getDiscoveryCandidates(userType, blocksToFill, config, dependenci
|
|
|
75
128
|
* @returns {Promise<void>}
|
|
76
129
|
*/
|
|
77
130
|
async function dispatchDiscovery(userType, candidates, config, dependencies) {
|
|
78
|
-
const { logger,
|
|
79
|
-
const {topicName,dispatchBatchSize
|
|
80
|
-
|
|
131
|
+
const { logger, pubsub } = dependencies;
|
|
132
|
+
const { topicName, dispatchBatchSize } = config;
|
|
133
|
+
|
|
134
|
+
if (candidates.size === 0) {
|
|
135
|
+
logger.log('INFO', `[Orchestrator Helpers] No ${userType} candidates to dispatch.`);
|
|
136
|
+
return;
|
|
137
|
+
}
|
|
138
|
+
|
|
81
139
|
logger.log('INFO', `[Orchestrator Helpers] Dispatching ${candidates.size} discovery tasks for ${userType}...`);
|
|
82
|
-
|
|
140
|
+
|
|
83
141
|
const cidsArray = Array.from(candidates);
|
|
84
|
-
if (isSpeculator)
|
|
85
|
-
{await firestoreUtils.clearCollection(dependencies, pendingSpecCollection);
|
|
86
|
-
await firestoreUtils.batchWriteShardedIds(dependencies, {collectionPath: pendingSpecCollection,items: cidsArray, timestamp: new Date(),maxFieldsPerDoc: pendingMaxFieldsPerDoc,maxWritesPerBatch: pendingMaxWritesPerBatch});}
|
|
87
142
|
const tasks = [];
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
143
|
+
|
|
144
|
+
for (let i = 0; i < cidsArray.length; i += dispatchBatchSize) {
|
|
145
|
+
const batchCids = cidsArray.slice(i, i + dispatchBatchSize).map(cid => parseInt(cid));
|
|
146
|
+
if (batchCids.length > 0) {
|
|
147
|
+
const blockId = Math.floor(batchCids[0] / 1000000) * 1000000;
|
|
148
|
+
tasks.push({
|
|
149
|
+
type: 'discover',
|
|
150
|
+
cids: batchCids,
|
|
151
|
+
blockId,
|
|
152
|
+
userType
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
92
157
|
const topic = pubsub.topic(topicName);
|
|
93
158
|
let totalCidsPublished = 0;
|
|
94
159
|
let messagesPublished = 0;
|
|
160
|
+
|
|
95
161
|
for (let i = 0; i < tasks.length; i += dispatchBatchSize) {
|
|
96
162
|
const batchOfTasks = tasks.slice(i, i + dispatchBatchSize);
|
|
97
163
|
const messagePayload = { tasks: batchOfTasks };
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
164
|
+
|
|
165
|
+
try {
|
|
166
|
+
await topic.publishMessage({ json: messagePayload });
|
|
167
|
+
|
|
168
|
+
const cidsInThisMessage = batchOfTasks.reduce((acc, task) => acc + task.cids.length, 0);
|
|
169
|
+
totalCidsPublished += cidsInThisMessage;
|
|
170
|
+
messagesPublished++;
|
|
171
|
+
|
|
172
|
+
logger.log('INFO', `[Orchestrator Helpers] Dispatched batch ${messagesPublished} with ${batchOfTasks.length} discover tasks (${cidsInThisMessage} CIDs) as 1 Pub/Sub message.`);
|
|
173
|
+
} catch (publishError) {
|
|
174
|
+
logger.log('ERROR', `[Orchestrator Helpers] Failed to publish discover batch ${messagesPublished + 1}.`, { error: publishError.message });
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
logger.log('SUCCESS', `[Orchestrator Helpers] Dispatched ${totalCidsPublished} CIDs in ${tasks.length} tasks for ${userType} discovery.`);
|
|
106
179
|
}
|
|
107
180
|
|
|
108
181
|
module.exports = { checkDiscoveryNeed, getDiscoveryCandidates, dispatchDiscovery };
|