bulltrackers-module 1.0.205 → 1.0.206

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -25,7 +25,7 @@ const PARALLEL_BATCH_SIZE = 7;
25
25
  */
26
26
  async function runComputationPass(config, dependencies, computationManifest) {
27
27
  const { logger } = dependencies;
28
- const passToRun = String(config.COMPUTATION_PASS_TO_RUN);
28
+ const passToRun = String(config.COMPUTATION_PASS_TO_RUN);
29
29
  if (!passToRun) return logger.log('ERROR', '[PassRunner] No pass defined. Aborting.');
30
30
 
31
31
  logger.log('INFO', `🚀 Starting PASS ${passToRun} (Legacy Mode)...`);
@@ -33,21 +33,21 @@ async function runComputationPass(config, dependencies, computationManifest) {
33
33
  // Hardcoded earliest dates
34
34
  const earliestDates = {
35
35
  portfolio: new Date('2025-09-25T00:00:00Z'),
36
- history: new Date('2025-11-05T00:00:00Z'),
37
- social: new Date('2025-10-30T00:00:00Z'),
38
- insights: new Date('2025-08-26T00:00:00Z'),
39
- price: new Date('2025-08-01T00:00:00Z')
36
+ history: new Date('2025-11-05T00:00:00Z'),
37
+ social: new Date('2025-10-30T00:00:00Z'),
38
+ insights: new Date('2025-08-26T00:00:00Z'),
39
+ price: new Date('2025-08-01T00:00:00Z')
40
40
  };
41
41
  earliestDates.absoluteEarliest = Object.values(earliestDates).reduce((a, b) => a < b ? a : b);
42
42
 
43
- const passes = groupByPass(computationManifest);
43
+ const passes = groupByPass(computationManifest);
44
44
  const calcsInThisPass = passes[passToRun] || [];
45
45
 
46
46
  if (!calcsInThisPass.length)
47
47
  return logger.log('WARN', `[PassRunner] No calcs for Pass ${passToRun}. Exiting.`);
48
48
 
49
49
  const passEarliestDate = earliestDates.absoluteEarliest;
50
- const endDateUTC = new Date(Date.UTC(new Date().getUTCFullYear(), new Date().getUTCMonth(), new Date().getUTCDate() - 1));
50
+ const endDateUTC = new Date(Date.UTC(new Date().getUTCFullYear(), new Date().getUTCMonth(), new Date().getUTCDate() - 1));
51
51
  const allExpectedDates = getExpectedDateStrings(passEarliestDate, endDateUTC);
52
52
 
53
53
  // Legacy Batch Optimization for Price (Only used in legacy loop)
@@ -98,10 +98,10 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
98
98
  // 2. Check Root Data Availability
99
99
  const earliestDates = {
100
100
  portfolio: new Date('2025-09-25T00:00:00Z'),
101
- history: new Date('2025-11-05T00:00:00Z'),
102
- social: new Date('2025-10-30T00:00:00Z'),
103
- insights: new Date('2025-08-26T00:00:00Z'),
104
- price: new Date('2025-08-01T00:00:00Z')
101
+ history: new Date('2025-11-05T00:00:00Z'),
102
+ social: new Date('2025-10-30T00:00:00Z'),
103
+ insights: new Date('2025-08-26T00:00:00Z'),
104
+ price: new Date('2025-08-01T00:00:00Z')
105
105
  };
106
106
 
107
107
  const rootData = await checkRootDataAvailability(dateStr, config, dependencies, earliestDates);
@@ -129,8 +129,8 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
129
129
 
130
130
  // Fetch dependencies (results from this day or yesterday)
131
131
  const existingResults = await fetchExistingResults(dateStr, calcsRunning, computationManifest, config, dependencies, false);
132
- const prevDate = new Date(dateToProcess); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
133
- const prevDateStr = prevDate.toISOString().slice(0, 10);
132
+ const prevDate = new Date(dateToProcess); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
133
+ const prevDateStr = prevDate.toISOString().slice(0, 10);
134
134
  const previousResults = await fetchExistingResults(prevDateStr, calcsRunning, computationManifest, config, dependencies, true);
135
135
 
136
136
  if (standardToRun.length) {
@@ -5,8 +5,8 @@
5
5
  * allowing others in the same pass/date to succeed and be recorded.
6
6
  */
7
7
 
8
- const { ComputationController } = require('../controllers/computation_controller');
9
- const { batchStoreSchemas } = require('../utils/schema_capture');
8
+ const { ComputationController } = require('../controllers/computation_controller');
9
+ const { batchStoreSchemas } = require('../utils/schema_capture');
10
10
  const { normalizeName, commitBatchInChunks } = require('../utils/utils');
11
11
  const {
12
12
  getPortfolioPartRefs, loadDailyInsights, loadDailySocialPostInsights,
@@ -33,7 +33,7 @@ function groupByPass(manifest) { return manifest.reduce((acc, calc) => { (acc[ca
33
33
  function validateResultPatterns(logger, calcName, results, category) {
34
34
  if (category === 'speculator' || category === 'speculators') return;
35
35
 
36
- const tickers = Object.keys(results);
36
+ const tickers = Object.keys(results);
37
37
  const totalItems = tickers.length;
38
38
 
39
39
  if (totalItems < 5) return;
@@ -46,8 +46,8 @@ function validateResultPatterns(logger, calcName, results, category) {
46
46
  keys.forEach(key => {
47
47
  if (key.startsWith('_')) return;
48
48
 
49
- let nullCount = 0;
50
- let nanCount = 0;
49
+ let nullCount = 0;
50
+ let nanCount = 0;
51
51
  let undefinedCount = 0;
52
52
 
53
53
  for (const t of tickers) {
@@ -72,11 +72,11 @@ function checkRootDependencies(calcManifest, rootDataStatus) {
72
72
  const missing = [];
73
73
  if (!calcManifest.rootDataDependencies) return { canRun: true, missing };
74
74
  for (const dep of calcManifest.rootDataDependencies) {
75
- if (dep === 'portfolio' && !rootDataStatus.hasPortfolio) missing.push('portfolio');
76
- else if (dep === 'insights' && !rootDataStatus.hasInsights) missing.push('insights');
77
- else if (dep === 'social' && !rootDataStatus.hasSocial) missing.push('social');
78
- else if (dep === 'history' && !rootDataStatus.hasHistory) missing.push('history');
79
- else if (dep === 'price' && !rootDataStatus.hasPrices) missing.push('price');
75
+ if (dep === 'portfolio' && !rootDataStatus.hasPortfolio) missing.push('portfolio');
76
+ else if (dep === 'insights' && !rootDataStatus.hasInsights) missing.push('insights');
77
+ else if (dep === 'social' && !rootDataStatus.hasSocial) missing.push('social');
78
+ else if (dep === 'history' && !rootDataStatus.hasHistory) missing.push('history');
79
+ else if (dep === 'price' && !rootDataStatus.hasPrices) missing.push('price');
80
80
  }
81
81
  return { canRun: missing.length === 0, missing };
82
82
  }
@@ -90,10 +90,10 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
90
90
 
91
91
  try {
92
92
  const tasks = [];
93
- if (dateToProcess >= earliestDates.portfolio) tasks.push(getPortfolioPartRefs(config, dependencies, dateStr).then(r => { portfolioRefs = r; hasPortfolio = !!r.length; }));
94
- if (dateToProcess >= earliestDates.insights) tasks.push(loadDailyInsights(config, dependencies, dateStr).then(r => { insightsData = r; hasInsights = !!r; }));
95
- if (dateToProcess >= earliestDates.social) tasks.push(loadDailySocialPostInsights(config, dependencies, dateStr).then(r => { socialData = r; hasSocial = !!r; }));
96
- if (dateToProcess >= earliestDates.history) tasks.push(getHistoryPartRefs(config, dependencies, dateStr).then(r => { historyRefs = r; hasHistory = !!r.length; }));
93
+ if (dateToProcess >= earliestDates.portfolio) tasks.push(getPortfolioPartRefs (config, dependencies, dateStr).then(r => { portfolioRefs = r; hasPortfolio = !!r.length; }));
94
+ if (dateToProcess >= earliestDates.insights) tasks.push(loadDailyInsights (config, dependencies, dateStr).then(r => { insightsData = r; hasInsights = !!r; }));
95
+ if (dateToProcess >= earliestDates.social) tasks.push(loadDailySocialPostInsights (config, dependencies, dateStr).then(r => { socialData = r; hasSocial = !!r; }));
96
+ if (dateToProcess >= earliestDates.history) tasks.push(getHistoryPartRefs (config, dependencies, dateStr).then(r => { historyRefs = r; hasHistory = !!r.length; }));
97
97
 
98
98
  if (dateToProcess >= earliestDates.price) {
99
99
  tasks.push(checkPriceDataAvailability(config, dependencies).then(r => { hasPrices = r; }));
@@ -142,22 +142,22 @@ async function fetchComputationStatus(dateStr, config, { db }) {
142
142
 
143
143
  async function fetchGlobalComputationStatus(config, { db }) {
144
144
  const collection = config.computationStatusCollection || 'computation_status';
145
- const docRef = db.collection(collection).doc('global_status');
146
- const snap = await docRef.get();
145
+ const docRef = db.collection(collection).doc('global_status');
146
+ const snap = await docRef.get();
147
147
  return snap.exists ? snap.data() : {};
148
148
  }
149
149
 
150
150
  async function updateComputationStatus(dateStr, updates, config, { db }) {
151
151
  if (!updates || Object.keys(updates).length === 0) return;
152
152
  const collection = config.computationStatusCollection || 'computation_status';
153
- const docRef = db.collection(collection).doc(dateStr);
153
+ const docRef = db.collection(collection).doc(dateStr);
154
154
  await docRef.set(updates, { merge: true });
155
155
  }
156
156
 
157
157
  async function updateGlobalComputationStatus(updatesByDate, config, { db }) {
158
158
  if (!updatesByDate || Object.keys(updatesByDate).length === 0) return;
159
159
  const collection = config.computationStatusCollection || 'computation_status';
160
- const docRef = db.collection(collection).doc('global_status');
160
+ const docRef = db.collection(collection).doc('global_status');
161
161
  const flattenUpdates = {};
162
162
  for (const [date, statuses] of Object.entries(updatesByDate)) {
163
163
  for (const [calc, status] of Object.entries(statuses)) {
@@ -180,7 +180,7 @@ async function updateGlobalComputationStatus(updatesByDate, config, { db }) {
180
180
  }
181
181
 
182
182
  async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config, { db }, includeSelf = false) {
183
- const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
183
+ const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
184
184
  const calcsToFetch = new Set();
185
185
  for (const calc of calcsInPass) {
186
186
  if (calc.dependencies) { calc.dependencies.forEach(d => calcsToFetch.add(normalizeName(d))); }
@@ -189,7 +189,7 @@ async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config,
189
189
  if (!calcsToFetch.size) return {};
190
190
  const fetched = {};
191
191
  const docRefs = [];
192
- const names = [];
192
+ const names = [];
193
193
  for (const name of calcsToFetch) {
194
194
  const m = manifestMap.get(name);
195
195
  if (m) {
@@ -208,8 +208,8 @@ async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config,
208
208
 
209
209
  async function streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs, fetchedDeps, previousFetchedDeps) {
210
210
  const { logger } = deps;
211
- const controller = new ComputationController(config, deps);
212
- const calcs = Object.values(state).filter(c => c && c.manifest);
211
+ const controller = new ComputationController(config, deps);
212
+ const calcs = Object.values(state).filter(c => c && c.manifest);
213
213
  const streamingCalcs = calcs.filter(c =>
214
214
  c.manifest.rootDataDependencies.includes('portfolio') ||
215
215
  c.manifest.rootDataDependencies.includes('history')
@@ -220,14 +220,14 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
220
220
  logger.log('INFO', `[${passName}] Streaming for ${streamingCalcs.length} computations...`);
221
221
 
222
222
  await controller.loader.loadMappings();
223
- const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
223
+ const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
224
224
  const prevDateStr = prevDate.toISOString().slice(0, 10);
225
225
 
226
- const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
226
+ const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
227
227
  const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
228
- const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs) ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
229
- const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
230
- const tH_iter = (needsTradingHistory && historyRefs) ? streamHistoryData(config, deps, dateStr, historyRefs) : null;
228
+ const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs) ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
229
+ const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
230
+ const tH_iter = (needsTradingHistory && historyRefs) ? streamHistoryData(config, deps, dateStr, historyRefs) : null;
231
231
 
232
232
  let yP_chunk = {};
233
233
  let tH_chunk = {};
@@ -316,9 +316,9 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
316
316
  continue;
317
317
  }
318
318
 
319
- const standardRes = {};
319
+ const standardRes = {};
320
320
  const shardedWrites = [];
321
- const calcWrites = []; // Accumulate all writes for THIS specific calculation
321
+ const calcWrites = []; // Accumulate all writes for THIS specific calculation
322
322
 
323
323
  // 1. Separate Standard and Sharded Data
324
324
  for (const key in result) {
@@ -95,8 +95,8 @@ class DataExtractor { // For generic access of data types
95
95
  * Extract Current Equity Value %.
96
96
  * Schema: 'Value' is the current value as a % of total portfolio equity.
97
97
  */
98
- static getPositionValuePct(position) { // TODO - VERIFY THIS WORKS FOR SPECULATORS,
99
- return position ? (position.Value || 0) : 0; // IS VALUE ACTUALLY THE VALUE OF POSITION AS A % OF TOTAL PORTFOLIO EQUITY? IS IT THE SAME FOR NORMAL USERS?
98
+ static getPositionValuePct(position) {
99
+ return position ? (position.Value || 0) : 0;
100
100
  }
101
101
 
102
102
  /**
@@ -262,39 +262,108 @@ class priceExtractor {
262
262
  }
263
263
 
264
264
  class HistoryExtractor {
265
- // --- Schema Accessor (NEW) ---
265
+ // --- Schema Accessor (REFACTORED for Granular API) ---
266
266
  /**
267
267
  * Extracts the daily history snapshot from the User object.
268
- * This decouples the computation from knowing 'user.history.today'.
268
+ * Returns the raw granular positions list.
269
269
  */
270
270
  static getDailyHistory(user) {
271
+ // The new API returns { PublicHistoryPositions: [...] }
271
272
  return user?.history?.today || null;
272
273
  }
273
274
 
274
- // --- Data Extractors ---
275
+ // --- Data Extractors (ADAPTER PATTERN) ---
276
+ /**
277
+ * Adapts granular trade history into the legacy 'TradedAssets' format.
278
+ * Groups trades by InstrumentID and calculates average holding time.
279
+ */
275
280
  static getTradedAssets(historyDoc) {
276
- if (!historyDoc || !Array.isArray(historyDoc.assets)) return [];
277
- return historyDoc.assets;
281
+ const trades = historyDoc?.PublicHistoryPositions || [];
282
+ if (!trades.length) return [];
283
+
284
+ // Group by InstrumentID
285
+ const assetsMap = new Map();
286
+
287
+ for (const t of trades) {
288
+ const instId = t.InstrumentID;
289
+ if (!instId) continue;
290
+
291
+ if (!assetsMap.has(instId)) {
292
+ assetsMap.set(instId, {
293
+ instrumentId: instId,
294
+ totalDuration: 0,
295
+ count: 0
296
+ });
297
+ }
298
+
299
+ const asset = assetsMap.get(instId);
300
+
301
+ // Calculate Duration in Minutes
302
+ const open = new Date(t.OpenDateTime);
303
+ const close = new Date(t.CloseDateTime);
304
+ const durationMins = (close - open) / 60000; // ms -> min
305
+
306
+ if (durationMins > 0) {
307
+ asset.totalDuration += durationMins;
308
+ asset.count++;
309
+ }
310
+ }
311
+
312
+ // Convert Map to Array format expected by existing calculations
313
+ // (Returns objects with .instrumentId and .avgHoldingTimeInMinutes)
314
+ return Array.from(assetsMap.values()).map(a => ({
315
+ instrumentId: a.instrumentId,
316
+ avgHoldingTimeInMinutes: a.count > 0 ? (a.totalDuration / a.count) : 0
317
+ }));
278
318
  }
279
319
 
280
320
  static getInstrumentId(asset) {
281
321
  return asset ? asset.instrumentId : null;
282
322
  }
283
323
 
284
- static getAvgHoldingTimeMinutes(asset) { // Note, in minutes, we could convert values here into hours or days but we leave as-is for now.
324
+ static getAvgHoldingTimeMinutes(asset) {
285
325
  return asset ? (asset.avgHoldingTimeInMinutes || 0) : 0;
286
326
  }
287
327
 
288
- static getSummary(historyDoc) { // This returns the top-level summary of trade history
289
- const all = historyDoc?.all;
290
- if (!all) return null;
328
+ /**
329
+ * Adapts granular trade history into the legacy 'Summary' format.
330
+ * Calculates WinRatio, AvgProfit, etc. on the fly from the raw list.
331
+ */
332
+ static getSummary(historyDoc) {
333
+ const trades = historyDoc?.PublicHistoryPositions || [];
334
+ if (!trades.length) return null;
335
+
336
+ let totalTrades = trades.length;
337
+ let wins = 0;
338
+ let totalProf = 0;
339
+ let totalLoss = 0;
340
+ let profCount = 0;
341
+ let lossCount = 0;
342
+ let totalDur = 0;
343
+
344
+ for (const t of trades) {
345
+ // P&L Stats (NetProfit is %)
346
+ if (t.NetProfit > 0) {
347
+ wins++;
348
+ totalProf += t.NetProfit;
349
+ profCount++;
350
+ } else if (t.NetProfit < 0) {
351
+ totalLoss += t.NetProfit;
352
+ lossCount++;
353
+ }
354
+
355
+ // Duration Stats
356
+ const open = new Date(t.OpenDateTime);
357
+ const close = new Date(t.CloseDateTime);
358
+ totalDur += (close - open) / 60000; // ms -> min
359
+ }
291
360
 
292
- return { // The all object contains instrumentid of -1 value, we do not include this, it's a junk backend-eToro placeholder.
293
- totalTrades: all.totalTrades || 0,
294
- winRatio: all.winRatio || 0,
295
- avgProfitPct: all.avgProfitPct || 0,
296
- avgLossPct: all.avgLossPct || 0,
297
- avgHoldingTimeInMinutes: all.avgHoldingTimeInMinutes || 0
361
+ return {
362
+ totalTrades: totalTrades,
363
+ winRatio: totalTrades > 0 ? (wins / totalTrades) * 100 : 0,
364
+ avgProfitPct: profCount > 0 ? totalProf / profCount : 0,
365
+ avgLossPct: lossCount > 0 ? totalLoss / lossCount : 0,
366
+ avgHoldingTimeInMinutes: totalTrades > 0 ? totalDur / totalTrades : 0
298
367
  };
299
368
  }
300
369
  }
@@ -354,7 +423,7 @@ class SignalPrimitives {
354
423
 
355
424
  static getPreviousState(previousComputed, calcName, ticker, fieldName = null) { // This is used for either fetching computations listed in getdependencies() OR self-history
356
425
  if (!previousComputed || !previousComputed[calcName]) return null; // Using this for self-history DOES NOT cause a circular dependency because we assign a special rule in orchestration_helpers
357
- // Which handles the self-reference, see 2. SMART SELF-FETCH in orchestration_helpers
426
+ // Which handles the self-reference.
358
427
  const tickerData = previousComputed[calcName][ticker];
359
428
  if (!tickerData) return null;
360
429
 
@@ -374,7 +443,7 @@ class MathPrimitives {
374
443
  static median(values) {
375
444
  if (!values || !values.length) return 0;
376
445
  const sorted = [...values].sort((a, b) => a - b);
377
- const mid = Math.floor(sorted.length / 2);
446
+ const mid = Math.floor(sorted.length / 2);
378
447
  return sorted.length % 2 === 0
379
448
  ? (sorted[mid - 1] + sorted[mid]) / 2
380
449
  : sorted[mid];
@@ -382,7 +451,7 @@ class MathPrimitives {
382
451
 
383
452
  static standardDeviation(values) {
384
453
  if (!values || !values.length) return 0;
385
- const avg = this.average(values);
454
+ const avg = this.average(values);
386
455
  const squareDiffs = values.map(val => Math.pow((val || 0) - avg, 2));
387
456
  return Math.sqrt(this.average(squareDiffs));
388
457
  }
@@ -398,7 +467,7 @@ class MathPrimitives {
398
467
  * Where:
399
468
  * b = ln(Barrier/Price)
400
469
  * v = drift - 0.5 * volatility^2
401
- * * @param {number} currentPrice - The current price of the asset
470
+ * @param {number} currentPrice - The current price of the asset
402
471
  * @param {number} barrierPrice - The target price (SL or TP)
403
472
  * @param {number} volatility - Annualized volatility (e.g., 0.40 for 40%)
404
473
  * @param {number} days - Number of days to forecast (e.g., 3)
@@ -408,9 +477,9 @@ class MathPrimitives {
408
477
  static calculateHitProbability(currentPrice, barrierPrice, volatility, days, drift = 0) { // https://www.ma.ic.ac.uk/~bin06/M3A22/m3f22chVII.pdf
409
478
  if (currentPrice <= 0 || barrierPrice <= 0 || volatility <= 0 || days <= 0) return 0;
410
479
 
411
- const t = days / 365.0; // Convert days to years
480
+ const t = days / 365.0; // Convert days to years
412
481
  const sigma = volatility;
413
- const mu = drift;
482
+ const mu = drift;
414
483
 
415
484
  // The barrier in log-space
416
485
  const b = Math.log(barrierPrice / currentPrice);
@@ -418,7 +487,7 @@ class MathPrimitives {
418
487
  // Adjusted drift (nu)
419
488
  const nu = mu - 0.5 * Math.pow(sigma, 2);
420
489
 
421
- const sqrtT = Math.sqrt(t);
490
+ const sqrtT = Math.sqrt(t);
422
491
  const sigmaSqrtT = sigma * sqrtT;
423
492
 
424
493
  // Helper for Standard Normal CDF (Φ)
@@ -448,8 +517,7 @@ class MathPrimitives {
448
517
 
449
518
  // Calculate Probability
450
519
  // Note: If nu is 0, the second term simplifies significantly, but we keep full form.
451
- const probability = normCDF(( -Math.abs(b) - nu * t ) / sigmaSqrtT) +
452
- Math.exp((2 * nu * Math.abs(b)) / (sigma * sigma)) * normCDF(( -Math.abs(b) + nu * t ) / sigmaSqrtT);
520
+ const probability = normCDF(( -Math.abs(b) - nu * t ) / sigmaSqrtT) + Math.exp((2 * nu * Math.abs(b)) / (sigma * sigma)) * normCDF(( -Math.abs(b) + nu * t ) / sigmaSqrtT);
453
521
 
454
522
  return Math.min(Math.max(probability, 0), 1);
455
523
  }
@@ -468,11 +536,11 @@ class MathPrimitives {
468
536
  static simulateGBM(currentPrice, volatility, days, simulations = 1000, drift = 0) {
469
537
  if (currentPrice <= 0 || volatility <= 0 || days <= 0) return new Float32Array(0);
470
538
 
471
- const t = days / 365.0;
472
- const sigma = volatility;
473
- const mu = drift;
539
+ const t = days / 365.0;
540
+ const sigma = volatility;
541
+ const mu = drift;
474
542
  const driftTerm = (mu - 0.5 * sigma * sigma) * t;
475
- const volTerm = sigma * Math.sqrt(t);
543
+ const volTerm = sigma * Math.sqrt(t);
476
544
 
477
545
  // Use Float32Array for memory efficiency with large simulation counts
478
546
  const results = new Float32Array(simulations);
@@ -481,7 +549,7 @@ class MathPrimitives {
481
549
  // Box-Muller transform for efficient standard normal distribution generation
482
550
  const u1 = Math.random();
483
551
  const u2 = Math.random();
484
- const z = Math.sqrt(-2.0 * Math.log(u1)) * Math.cos(2.0 * Math.PI * u2);
552
+ const z = Math.sqrt(-2.0 * Math.log(u1)) * Math.cos(2.0 * Math.PI * u2);
485
553
 
486
554
  // GBM Formula: St = S0 * exp((mu - 0.5*sigma^2)t + sigma*Wt)
487
555
  results[i] = currentPrice * Math.exp(driftTerm + volTerm * z);
@@ -643,14 +711,14 @@ class TimeSeries {
643
711
  let sumX = 0, sumY = 0, sumXY = 0, sumX2 = 0, sumY2 = 0;
644
712
 
645
713
  for (let i = 0; i < n; i++) {
646
- sumX += x[i];
647
- sumY += y[i];
714
+ sumX += x[i];
715
+ sumY += y[i];
648
716
  sumXY += x[i] * y[i];
649
717
  sumX2 += x[i] * x[i];
650
718
  sumY2 += y[i] * y[i];
651
719
  }
652
720
 
653
- const numerator = (n * sumXY) - (sumX * sumY);
721
+ const numerator = (n * sumXY) - (sumX * sumY);
654
722
  const denominator = Math.sqrt(((n * sumX2) - (sumX * sumX)) * ((n * sumY2) - (sumY * sumY)));
655
723
 
656
724
  return (denominator === 0) ? 0 : numerator / denominator;
@@ -719,8 +787,8 @@ class DistributionAnalytics {
719
787
 
720
788
  let sumX = 0, sumY = 0, sumXY = 0, sumXX = 0, sumYY = 0;
721
789
  for (let i = 0; i < n; i++) {
722
- sumX += xValues[i];
723
- sumY += yValues[i];
790
+ sumX += xValues[i];
791
+ sumY += yValues[i];
724
792
  sumXY += xValues[i] * yValues[i];
725
793
  sumXX += xValues[i] * xValues[i];
726
794
  sumYY += yValues[i] * yValues[i];
@@ -1,12 +1,12 @@
1
1
  /*
2
2
  * FILENAME: CloudFunctions/NpmWrappers/bulltrackers-module/functions/task-engine/helpers/update_helpers.js
3
+ * (OPTIMIZED V3: Removed obsolete username lookup logic)
3
4
  * (OPTIMIZED V2: Added "Circuit Breaker" for Proxy failures)
4
- * (OPTIMIZED V2: Downgraded verbose per-user logs to TRACE to save costs)
5
5
  * (REFACTORED: Concurrency set to 1, added fallback and verbose logging)
6
6
  */
7
7
 
8
8
  const { FieldValue } = require('@google-cloud/firestore');
9
- const pLimit = require('p-limit');
9
+ const crypto = require('crypto');
10
10
 
11
11
  // --- CIRCUIT BREAKER STATE ---
12
12
  // Persists across function invocations in the same instance.
@@ -28,7 +28,6 @@ function recordProxyOutcome(success) {
28
28
  if (success) {
29
29
  if (_consecutiveProxyFailures > 0) {
30
30
  // Optional: Only log recovery to reduce noise
31
- // console.log('[ProxyCircuit] Proxy recovered.');
32
31
  }
33
32
  _consecutiveProxyFailures = 0;
34
33
  } else {
@@ -36,86 +35,11 @@ function recordProxyOutcome(success) {
36
35
  }
37
36
  }
38
37
 
39
- /**
40
- * (REFACTORED: Concurrency set to 1, added fallback and verbose logging)
41
- */
42
- async function lookupUsernames(cids, { logger, headerManager, proxyManager }, config) {
43
- if (!cids?.length) return [];
44
- logger.log('INFO', `[lookupUsernames] Looking up usernames for ${cids.length} CIDs.`);
45
-
46
- // --- Set concurrency to 1 because appscript gets really fucked up with undocumented rate limits if we try spam it concurrently, a shame but that's life. DO NOT CHANGE THIS
47
- const limit = pLimit(1);
48
- const { USERNAME_LOOKUP_BATCH_SIZE, ETORO_API_RANKINGS_URL } = config;
49
- const batches = [];
50
- for (let i = 0; i < cids.length; i += USERNAME_LOOKUP_BATCH_SIZE) { batches.push(cids.slice(i, i + USERNAME_LOOKUP_BATCH_SIZE).map(Number)); }
51
-
52
- const batchPromises = batches.map((batch, index) => limit(async () => {
53
- const batchId = `batch-${index + 1}`;
54
- logger.log('TRACE', `[lookupUsernames/${batchId}] Processing batch of ${batch.length} CIDs...`); // DOWNGRADED TO TRACE
55
-
56
- const header = await headerManager.selectHeader();
57
- if (!header) { logger.log('ERROR', `[lookupUsernames/${batchId}] Could not select a header.`); return null; }
58
-
59
- let wasSuccess = false;
60
- let proxyUsed = false;
61
- let response;
62
- const url = `${ETORO_API_RANKINGS_URL}?Period=LastTwoYears`;
63
- const options = { method: 'POST', headers: { ...header.header, 'Content-Type': 'application/json' }, body: JSON.stringify(batch) };
64
-
65
- // --- 1. Try Proxy (Circuit Breaker Protected) ---
66
- if (shouldTryProxy()) {
67
- try {
68
- logger.log('TRACE', `[lookupUsernames/${batchId}] Attempting fetch via AppScript proxy...`);
69
- response = await proxyManager.fetch(url, options);
70
- if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`);
71
-
72
- wasSuccess = true;
73
- proxyUsed = true;
74
- recordProxyOutcome(true); // Reset failure count
75
- logger.log('TRACE', `[lookupUsernames/${batchId}] AppScript proxy fetch successful.`); // DOWNGRADED TO TRACE
76
-
77
- } catch (proxyError) {
78
- recordProxyOutcome(false); // Increment failure count
79
- logger.log('WARN', `[lookupUsernames/${batchId}] AppScript proxy fetch FAILED. Error: ${proxyError.message}. Failures: ${_consecutiveProxyFailures}/${MAX_PROXY_FAILURES}.`, { error: proxyError.message, source: 'AppScript' });
80
- // Fall through to direct...
81
- }
82
- } else {
83
- logger.log('TRACE', `[lookupUsernames/${batchId}] Circuit Breaker Open. Skipping Proxy.`);
84
- }
85
-
86
- // --- 2. Direct Fallback ---
87
- if (!wasSuccess) {
88
- try {
89
- response = await fetch(url, options);
90
- if (!response.ok) { const errorText = await response.text(); throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`); }
91
- logger.log('TRACE', `[lookupUsernames/${batchId}] Direct node-fetch fallback successful.`); // DOWNGRADED TO TRACE
92
- wasSuccess = true; // It worked eventually
93
- } catch (fallbackError) {
94
- logger.log('ERROR', `[lookupUsernames/${batchId}] Direct node-fetch fallback FAILED. Giving up on this batch.`, { error: fallbackError.message, source: 'eToro/Network' });
95
- return null; // Give up on this batch
96
- }
97
- }
98
-
99
- if (proxyUsed) { headerManager.updatePerformance(header.id, wasSuccess); }
100
-
101
- try {
102
- const data = await response.json(); return data;
103
- } catch (parseError) {
104
- logger.log('ERROR', `[lookupUsernames/${batchId}] Failed to parse JSON response.`, { error: parseError.message }); return null;
105
- }
106
- }));
107
-
108
- const results = await Promise.allSettled(batchPromises);
109
- const allUsers = results.filter(r => r.status === 'fulfilled' && r.value && Array.isArray(r.value)).flatMap(r => r.value);
110
- logger.log('INFO', `[lookupUsernames] Found ${allUsers.length} public users out of ${cids.length}.`);
111
- return allUsers;
112
- }
113
-
114
-
115
38
  /**
116
39
  * (REFACTORED: Fully sequential, verbose logging, node-fetch fallback)
117
40
  */
118
- async function handleUpdate(task, taskId, { logger, headerManager, proxyManager, db, batchManager }, config, username) {
41
+ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager, db, batchManager }, config) {
42
+ // Note: 'username' param removed from signature as it is no longer needed.
119
43
  const { userId, instruments, instrumentId, userType } = task;
120
44
  const instrumentsToProcess = userType === 'speculator' ? (instruments || [instrumentId]) : [undefined];
121
45
  const today = new Date().toISOString().slice(0, 10);
@@ -137,7 +61,15 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
137
61
  if (!historyHeader) {
138
62
  logger.log('WARN', `[handleUpdate/${userId}] Could not select history header. Skipping history.`);
139
63
  } else {
140
- const historyUrl = `${config.ETORO_API_USERSTATS_URL}${username}/trades/oneYearAgo?CopyAsAsset=true`;
64
+
65
+ // --- REFACTOR: New Granular API Logic ---
66
+ // No username required. Uses CID (userId) directly.
67
+ const d = new Date();
68
+ d.setFullYear(d.getFullYear() - 1);
69
+ const oneYearAgoStr = d.toISOString();
70
+ const uuid = crypto.randomUUID ? crypto.randomUUID() : '0205aca7-bd37-4884-8455-f28ce1add2de'; // Fallback for older nodes
71
+
72
+ const historyUrl = `https://www.etoro.com/sapi/trade-data-real/history/public/credit/flat?StartTime=${oneYearAgoStr}&PageNumber=1&ItemsPerPage=30000&PublicHistoryPortfolioFilter=&CID=${userId}&client_request_id=${uuid}`;
141
73
  const options = { headers: historyHeader.header };
142
74
  let response;
143
75
 
@@ -278,4 +210,4 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
278
210
  logger.log('TRACE', `[handleUpdate/${userId}] Update task finished successfully.`); // DOWNGRADED TO TRACE
279
211
  }
280
212
 
281
- module.exports = { handleUpdate, lookupUsernames };
213
+ module.exports = { handleUpdate };
@@ -11,7 +11,7 @@
11
11
 
12
12
  const { handleDiscover } = require('../helpers/discover_helpers');
13
13
  const { handleVerify } = require('../helpers/verify_helpers');
14
- const { handleUpdate, lookupUsernames } = require('../helpers/update_helpers');
14
+ const { handleUpdate } = require('../helpers/update_helpers'); // Removed lookupUsernames import
15
15
  const pLimit = require('p-limit');
16
16
 
17
17
  /**
@@ -27,27 +27,25 @@ function parseTaskPayload(message, logger) {
27
27
  }
28
28
 
29
29
  /**
30
- * Sorts tasks into update (with username), lookup (missing username), and other (discover/verify).
30
+ * Sorts tasks into update and other (discover/verify).
31
+ * REFACTORED: Simplified. No username lookup logic needed.
31
32
  */
32
33
  async function prepareTaskBatches(tasks, batchManager, logger) {
33
- const tasksToRun = [], cidsToLookup = new Map(), otherTasks = [];
34
- await batchManager.loadUsernameMap();
34
+ const tasksToRun = [], otherTasks = [];
35
+
35
36
  for (const task of tasks) {
36
- if (task.type === 'update') { const username = batchManager.getUsername(task.userId); username ? tasksToRun.push({ task, username }) : cidsToLookup.set(String(task.userId), task); } else otherTasks.push(task); }
37
- logger.log('INFO', `[TaskEngine] Sorting complete. Known: ${tasksToRun.length}, Lookup: ${cidsToLookup.size}, Other: ${otherTasks.length}`);
38
- return { tasksToRun, cidsToLookup, otherTasks };
39
- }
40
-
41
- /**
42
- * Runs username lookups for missing CIDs and adds to tasksToRun.
43
- */
44
- async function runUsernameLookups(tasksToRun, cidsToLookup, dependencies, config, batchManager, logger) {
45
- if (!cidsToLookup.size) return;
46
- logger.log('INFO', `[TaskEngine] Looking up ${cidsToLookup.size} usernames...`);
47
- // Pass config to lookupUsernames
48
- const foundUsers = await lookupUsernames([...cidsToLookup.keys()], dependencies, config); // <--- PASS FULL CONFIG
49
- for (const u of foundUsers) { const cid = String(u.CID), username = u.Value.UserName; batchManager.addUsernameMapUpdate(cid, username); const task = cidsToLookup.get(cid); if (task) { tasksToRun.push({ task, username }); cidsToLookup.delete(cid); } }
50
- if (cidsToLookup.size) logger.log('WARN', `[TaskEngine] Could not find ${cidsToLookup.size} usernames (likely private).`, { skippedCids: [...cidsToLookup.keys()] });
37
+ if (task.type === 'update') {
38
+ // New API uses CID (userId), so we push directly to run.
39
+ tasksToRun.push(task);
40
+ } else {
41
+ otherTasks.push(task);
42
+ }
43
+ }
44
+
45
+ // We explicitly return empty structures for compatibility if handler_creator expects them,
46
+ // though ideally handler_creator should also be simplified.
47
+ // For now, we return compatible object structure.
48
+ return { tasksToRun, cidsToLookup: new Map(), otherTasks };
51
49
  }
52
50
 
53
51
  /**
@@ -88,11 +86,16 @@ async function executeTasks(tasksToRun, otherTasks, dependencies, config, taskId
88
86
  }
89
87
 
90
88
  // 2. Queue 'update' tasks
91
- for (const { task, username } of tasksToRun) {
89
+ for (const task of tasksToRun) {
90
+ // We unpack 'task' directly now, no wrapping object {task, username}
91
+ // However, we must ensure backward compatibility if the array was {task, username} before.
92
+ // In prepareTaskBatches above, we pushed raw 'task'.
93
+ // So we use 'task' directly.
94
+
92
95
  const subTaskId = `${task.type}-${task.userType || 'unknown'}-${task.userId}`;
93
96
  allTaskPromises.push(
94
97
  limit(() =>
95
- handleUpdate(task, subTaskId, dependencies, config, username)
98
+ handleUpdate(task, subTaskId, dependencies, config)
96
99
  .then(() => taskCounters.update++)
97
100
  .catch(err => {
98
101
  logger.log('ERROR', `[TaskEngine/${taskId}] Error in handleUpdate for ${task.userId}`, { errorMessage: err.message });
@@ -112,4 +115,5 @@ async function executeTasks(tasksToRun, otherTasks, dependencies, config, taskId
112
115
  );
113
116
  }
114
117
 
115
- module.exports = { parseTaskPayload, prepareTaskBatches, runUsernameLookups, executeTasks };
118
+ // Note: runUsernameLookups removed from exports
119
+ module.exports = { parseTaskPayload, prepareTaskBatches, executeTasks };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.205",
3
+ "version": "1.0.206",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [
@@ -1,19 +0,0 @@
1
-
2
- // Mock types
3
- namespace Firestore {
4
- export class DocumentReference { }
5
- }
6
-
7
- const pLimit = (concurrency: number) => {
8
- return (fn: () => Promise<any>) => fn();
9
- };
10
-
11
- const OUTER_CONCURRENCY_LIMIT = 2;
12
- const outerLimit = pLimit(OUTER_CONCURRENCY_LIMIT);
13
-
14
- const shardChunks: Firestore.DocumentReference[][] = [];
15
-
16
- // The problematic code
17
- const chunkPromises = shardChunks.map((shardChunkRefs, index) => outerLimit(async () => {
18
- console.log(index);
19
- }));