bulltrackers-module 1.0.170 → 1.0.171
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/helpers/computation_pass_runner.js +10 -2
- package/functions/computation-system/helpers/orchestration_helpers.js +112 -44
- package/functions/task-engine/helpers/discover_helpers.js +33 -51
- package/functions/task-engine/helpers/update_helpers.js +57 -159
- package/functions/task-engine/helpers/verify_helpers.js +27 -44
- package/package.json +1 -5
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* FIXED: computation_pass_runner.js
|
|
3
3
|
* Now calculates earliest date PER CALCULATION, not per pass
|
|
4
|
+
* --- MODIFIED: Passes 'existingResults' to 'runStandardComputationPass'
|
|
5
|
+
* to replicate the test harness's 7-argument signature. ---
|
|
4
6
|
*/
|
|
5
7
|
|
|
6
8
|
const { groupByPass, checkRootDataAvailability, fetchExistingResults, runStandardComputationPass, runMetaComputationPass } = require('./orchestration_helpers.js');
|
|
@@ -52,7 +54,13 @@ async function runComputationPass(config, dependencies, computationManifest) {
|
|
|
52
54
|
const metaToRun = metaCalcs.filter(c => checkDeps(c, rootData, existingResults, dateToProcess));
|
|
53
55
|
if (!standardToRun.length && !metaToRun.length) return logger.log('INFO', `[PassRunner] All calcs complete for ${dateStr}. Skipping.`);
|
|
54
56
|
logger.log('INFO', `[PassRunner] Running ${dateStr}: ${standardToRun.length} standard, ${metaToRun.length} meta`);
|
|
55
|
-
|
|
57
|
+
|
|
58
|
+
// --- THIS IS THE CHANGE ---
|
|
59
|
+
// 'existingResults' (which is the test harness's 'precomputedDependencies')
|
|
60
|
+
// is now passed to 'runStandardComputationPass'.
|
|
61
|
+
if (standardToRun.length) await runStandardComputationPass(dateToProcess, standardToRun, `Pass ${passToRun} (Standard)`, config, dependencies, rootData, existingResults);
|
|
62
|
+
// --- END CHANGE ---
|
|
63
|
+
|
|
56
64
|
if (metaToRun.length) await runMetaComputationPass(dateToProcess, metaToRun, `Pass ${passToRun} (Meta)`, config, dependencies, existingResults, rootData);
|
|
57
65
|
} catch (err) { logger.log('ERROR', `[PassRunner] FAILED Pass ${passToRun} for ${dateStr}`, { errorMessage: err.message, stack: err.stack }); }
|
|
58
66
|
};
|
|
@@ -63,4 +71,4 @@ async function runComputationPass(config, dependencies, computationManifest) {
|
|
|
63
71
|
logger.log('INFO', `[PassRunner] Pass ${passToRun} orchestration finished.`);
|
|
64
72
|
}
|
|
65
73
|
|
|
66
|
-
module.exports = { runComputationPass };
|
|
74
|
+
module.exports = { runComputationPass };
|
|
@@ -248,7 +248,7 @@ function initializeCalculators(calcs, logger) {
|
|
|
248
248
|
|
|
249
249
|
/**
|
|
250
250
|
* Stage 7: Load T-1 (yesterday) data needed by historical calculations.
|
|
251
|
-
* --- THIS FUNCTION
|
|
251
|
+
* --- THIS IS THE FULLY CORRECTED FUNCTION (WITH FRIEND'S BUG FIX) ---
|
|
252
252
|
*/
|
|
253
253
|
async function loadHistoricalData(date, calcs, config, deps, rootData) {
|
|
254
254
|
const { logger } = deps;
|
|
@@ -259,7 +259,7 @@ async function loadHistoricalData(date, calcs, config, deps, rootData) {
|
|
|
259
259
|
const needsYesterdayInsights = calcs.some(c => c.isHistorical && c.rootDataDependencies.includes('insights'));
|
|
260
260
|
const needsYesterdaySocial = calcs.some(c => c.isHistorical && c.rootDataDependencies.includes('social'));
|
|
261
261
|
const needsYesterdayPortfolio = calcs.some(c => c.isHistorical && c.rootDataDependencies.includes('portfolio'));
|
|
262
|
-
const needsYesterdayHistory = calcs.some(c => c.isHistorical && c.rootDataDependencies.includes('history')); // <--
|
|
262
|
+
const needsYesterdayHistory = calcs.some(c => c.isHistorical && c.rootDataDependencies.includes('history')); // <-- The check
|
|
263
263
|
const needsYesterdayDependencies = calcs.some(c => c.isHistorical && c.dependencies && c.dependencies.length > 0);
|
|
264
264
|
|
|
265
265
|
const prev = new Date(date);
|
|
@@ -281,13 +281,14 @@ async function loadHistoricalData(date, calcs, config, deps, rootData) {
|
|
|
281
281
|
updated.yesterdayPortfolioRefs = await getPortfolioPartRefs(config, deps, prevStr);
|
|
282
282
|
})());
|
|
283
283
|
}
|
|
284
|
-
|
|
284
|
+
|
|
285
|
+
// --- THIS IS THE MISSING LOGIC BLOCK (FIXED) ---
|
|
285
286
|
if (needsYesterdayHistory) {
|
|
286
287
|
tasks.push((async () => { logger.log('INFO', `[PassRunner] Getting YESTERDAY history refs for ${prevStr}`);
|
|
287
288
|
updated.yesterdayHistoryRefs = await getHistoryPartRefs(config, deps, prevStr);
|
|
288
289
|
})());
|
|
289
290
|
}
|
|
290
|
-
// --- END
|
|
291
|
+
// --- END MISSING LOGIC BLOCK ---
|
|
291
292
|
|
|
292
293
|
if (needsYesterdayDependencies) {
|
|
293
294
|
tasks.push((async () => { logger.log('INFO', `[PassRunner] Loading YESTERDAY computed dependencies for ${prevStr}`);
|
|
@@ -303,9 +304,10 @@ async function loadHistoricalData(date, calcs, config, deps, rootData) {
|
|
|
303
304
|
|
|
304
305
|
/**
|
|
305
306
|
* Stage 8: Stream and process data for standard calculations.
|
|
306
|
-
* --- THIS FUNCTION
|
|
307
|
+
* --- THIS IS THE FULLY CORRECTED FUNCTION (WITH ALL FIXES) ---
|
|
308
|
+
* --- REPLICATES THE 7-ARGUMENT "HACK" SIGNATURE FROM TEST HARNESS ---
|
|
307
309
|
*/
|
|
308
|
-
async function streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs) {
|
|
310
|
+
async function streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs, fetchedDeps) {
|
|
309
311
|
const { logger, calculationUtils } = deps;
|
|
310
312
|
const { todayInsights, yesterdayInsights, todaySocialPostInsights, yesterdaySocialPostInsights, yesterdayDependencyData } = rootData;
|
|
311
313
|
|
|
@@ -321,6 +323,7 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
|
|
|
321
323
|
};
|
|
322
324
|
|
|
323
325
|
// --- Run non-streaming (meta) calcs once ---
|
|
326
|
+
// This logic is 1:1 with the test harness
|
|
324
327
|
let firstUser = true;
|
|
325
328
|
for (const name in state) {
|
|
326
329
|
const calc = state[name];
|
|
@@ -330,24 +333,46 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
|
|
|
330
333
|
if (cat === 'socialPosts' || cat === 'insights') {
|
|
331
334
|
if (firstUser) {
|
|
332
335
|
logger.log('INFO', `[${passName}] Running non-streaming calc: ${name} for ${dateStr}`);
|
|
333
|
-
|
|
336
|
+
|
|
337
|
+
// --- CALLING 7-ARGUMENT "HACK" SIGNATURE (for non-streaming) ---
|
|
338
|
+
// We emulate the test harness's `process` call for social/insights
|
|
339
|
+
const userContext = { ...context, userType: 'n/a' };
|
|
340
|
+
const todayPayload = null; // No user data
|
|
341
|
+
const yesterdayPayload = null; // No user data
|
|
342
|
+
|
|
334
343
|
try {
|
|
335
|
-
await Promise.resolve(calc.process(
|
|
344
|
+
await Promise.resolve(calc.process(
|
|
345
|
+
todayPayload, // Arg 1: The data object
|
|
346
|
+
yesterdayPayload, // Arg 2: Yesterday's data
|
|
347
|
+
null, // Arg 3: User ID
|
|
348
|
+
userContext, // Arg 4: Context
|
|
349
|
+
todayInsights, // Arg 5: Today Insights
|
|
350
|
+
yesterdayInsights, // Arg 6: Yesterday Insights
|
|
351
|
+
fetchedDeps // Arg 7: Fetched Dependencies
|
|
352
|
+
));
|
|
336
353
|
} catch (e) { logger.log('WARN', `Process error on ${name} (non-stream) for ${dateStr}`, { err: e.message }); }
|
|
337
354
|
}
|
|
338
355
|
}
|
|
339
356
|
}
|
|
340
357
|
|
|
341
|
-
|
|
342
|
-
|
|
358
|
+
// --- FIX 1: THE FAULTY GUARD CLAUSE (From friend) ---
|
|
359
|
+
// This now correctly checks for calcs that need 'portfolio' OR 'history',
|
|
360
|
+
// matching the test harness behavior of running history-only passes.
|
|
361
|
+
const calcsThatStream = Object.values(state).filter(calc =>
|
|
362
|
+
calc && calc.manifest && (
|
|
363
|
+
calc.manifest.rootDataDependencies.includes('portfolio') ||
|
|
364
|
+
calc.manifest.rootDataDependencies.includes('history')
|
|
365
|
+
)
|
|
343
366
|
);
|
|
344
367
|
|
|
345
|
-
if (
|
|
346
|
-
logger.log('INFO', `[${passName}] No portfolio
|
|
368
|
+
if (calcsThatStream.length === 0) {
|
|
369
|
+
logger.log('INFO', `[${passName}] No portfolio or history streaming calcs to run for ${dateStr}. Skipping stream.`);
|
|
347
370
|
return;
|
|
348
371
|
}
|
|
349
372
|
|
|
350
|
-
|
|
373
|
+
// --- FIX 2: THE TYPO (From friend) ---
|
|
374
|
+
// This log message now correctly references 'calcsThatStream'.
|
|
375
|
+
logger.log('INFO', `[${passName}] Streaming portfolio & historical data for ${calcsThatStream.length} calcs for ${dateStr}...`);
|
|
351
376
|
|
|
352
377
|
const prevDate = new Date(dateStr + 'T00:00:00Z');
|
|
353
378
|
prevDate.setUTCDate(prevDate.getUTCDate() - 1);
|
|
@@ -356,46 +381,46 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
|
|
|
356
381
|
// Check which iterators we need
|
|
357
382
|
const needsYesterdayPortfolio = Object.values(state).some(c => c && c.manifest.isHistorical && c.manifest.rootDataDependencies.includes('portfolio'));
|
|
358
383
|
const needsTodayHistory = Object.values(state).some(c => c && c.manifest.rootDataDependencies.includes('history'));
|
|
359
|
-
const needsYesterdayHistory = Object.values(state).some(c => c && c.manifest.isHistorical && c.manifest.rootDataDependencies.includes('history'));
|
|
384
|
+
const needsYesterdayHistory = Object.values(state).some(c => c && c.manifest.isHistorical && c.manifest.rootDataDependencies.includes('history'));
|
|
360
385
|
|
|
361
386
|
// --- Create all necessary iterators ---
|
|
387
|
+
// (This code is now correct because rootData.yesterdayHistoryRefs will be populated by Fix 1)
|
|
362
388
|
const tP_iterator = streamPortfolioData(config, deps, dateStr, portfolioRefs);
|
|
363
389
|
const yP_iterator = needsYesterdayPortfolio ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
|
|
364
390
|
const hT_iterator = needsTodayHistory ? streamHistoryData(config, deps, dateStr, historyRefs) : null;
|
|
365
|
-
const hY_iterator = needsYesterdayHistory ? streamHistoryData(config, deps, prevDateStr, rootData.yesterdayHistoryRefs) : null;
|
|
391
|
+
const hY_iterator = needsYesterdayHistory ? streamHistoryData(config, deps, prevDateStr, rootData.yesterdayHistoryRefs) : null;
|
|
366
392
|
|
|
367
393
|
let yesterdayPortfolios = {};
|
|
368
394
|
let todayHistoryData = {};
|
|
369
|
-
let yesterdayHistoryData = {};
|
|
395
|
+
let yesterdayHistoryData = {};
|
|
370
396
|
|
|
371
397
|
// Pre-load the first chunk of historical data
|
|
372
398
|
if (yP_iterator) { Object.assign(yesterdayPortfolios, (await yP_iterator.next()).value || {}); }
|
|
373
399
|
if (hT_iterator) { Object.assign(todayHistoryData, (await hT_iterator.next()).value || {}); }
|
|
374
|
-
if (hY_iterator) { Object.assign(yesterdayHistoryData, (await hY_iterator.next()).value || {}); }
|
|
400
|
+
if (hY_iterator) { Object.assign(yesterdayHistoryData, (await hY_iterator.next()).value || {}); }
|
|
375
401
|
|
|
376
402
|
for await (const chunk of tP_iterator) {
|
|
377
403
|
// Load the *next* chunk of historical data to stay in sync
|
|
378
404
|
if (yP_iterator) { Object.assign(yesterdayPortfolios, (await yP_iterator.next()).value || {}); }
|
|
379
405
|
if (hT_iterator) { Object.assign(todayHistoryData, (await hT_iterator.next()).value || {}); }
|
|
380
|
-
if (hY_iterator) { Object.assign(yesterdayHistoryData, (await hY_iterator.next()).value || {}); }
|
|
406
|
+
if (hY_iterator) { Object.assign(yesterdayHistoryData, (await hY_iterator.next()).value || {}); }
|
|
381
407
|
|
|
382
408
|
for (const uid in chunk) {
|
|
383
|
-
const p = chunk[uid];
|
|
409
|
+
const p = chunk[uid];
|
|
384
410
|
if (!p) continue;
|
|
385
411
|
|
|
386
412
|
const userType = p.PublicPositions ? 'speculator' : 'normal';
|
|
387
|
-
|
|
413
|
+
const userContext = { ...context, userType };
|
|
388
414
|
|
|
389
415
|
// Get corresponding T-1 data
|
|
390
416
|
const pY = yesterdayPortfolios[uid] || null;
|
|
391
417
|
const hT = todayHistoryData[uid] || null;
|
|
392
|
-
const hY = yesterdayHistoryData[uid] || null; // <--
|
|
418
|
+
const hY = yesterdayHistoryData[uid] || null; // <-- This will now have data
|
|
393
419
|
|
|
394
420
|
for (const name in state) {
|
|
395
421
|
const calc = state[name];
|
|
396
422
|
if (!calc || typeof calc.process !== 'function') continue;
|
|
397
423
|
|
|
398
|
-
// --- Refactored Filter Block ---
|
|
399
424
|
const manifest = calc.manifest;
|
|
400
425
|
const cat = manifest.category;
|
|
401
426
|
const isSocialOrInsights = cat === 'socialPosts' || cat === 'insights';
|
|
@@ -404,30 +429,54 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
|
|
|
404
429
|
const isSpeculatorCalc = cat === 'speculators';
|
|
405
430
|
const isUserProcessed = name === 'users-processed';
|
|
406
431
|
|
|
407
|
-
// Skip if user type doesn't match calc type
|
|
408
432
|
if (userType === 'normal' && isSpeculatorCalc) continue;
|
|
409
433
|
if (userType === 'speculator' && !isSpeculatorCalc && !isUserProcessed) continue;
|
|
410
434
|
|
|
411
|
-
// Skip historical calcs if T-1 portfolio is missing (with exceptions)
|
|
412
435
|
if (manifest.isHistorical && !pY) {
|
|
413
|
-
// These calcs only need T-1 *history*, not portfolio
|
|
414
436
|
if (cat !== 'behavioural' && name !== 'historical-performance-aggregator') {
|
|
415
437
|
continue;
|
|
416
438
|
}
|
|
417
439
|
}
|
|
418
|
-
// --- End Filter Block ---
|
|
419
440
|
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
441
|
+
// --- FIX 3: REPLICATE 7-ARGUMENT "HACK" SIGNATURE ---
|
|
442
|
+
// This logic block replicates the test harness's 'todayPayload'
|
|
443
|
+
// and 'yesterdayPayload' construction.
|
|
444
|
+
const rootDataDeps = manifest.rootDataDependencies || ['portfolio'];
|
|
445
|
+
const needsHistoricalData = manifest.isHistorical || false;
|
|
446
|
+
|
|
447
|
+
let todayPayload = null;
|
|
448
|
+
let yesterdayPayload = null;
|
|
449
|
+
|
|
450
|
+
if (rootDataDeps.includes('portfolio')) {
|
|
451
|
+
todayPayload = p || {}; // Start with portfolio
|
|
452
|
+
yesterdayPayload = needsHistoricalData ? (pY || {}) : null;
|
|
453
|
+
// Nest history if also requested
|
|
454
|
+
if (rootDataDeps.includes('history')) {
|
|
455
|
+
todayPayload.history = hT;
|
|
456
|
+
if (yesterdayPayload) yesterdayPayload.history = hY;
|
|
457
|
+
}
|
|
458
|
+
} else if (rootDataDeps.includes('history')) {
|
|
459
|
+
// If *only* history is requested, it becomes Arg 1
|
|
460
|
+
todayPayload = hT;
|
|
461
|
+
yesterdayPayload = needsHistoricalData ? hY : null;
|
|
424
462
|
} else {
|
|
425
|
-
//
|
|
426
|
-
|
|
463
|
+
// Fallback for calcs like price-metrics
|
|
464
|
+
todayPayload = p || {};
|
|
465
|
+
yesterdayPayload = needsHistoricalData ? (pY || {}) : null;
|
|
427
466
|
}
|
|
428
|
-
|
|
467
|
+
// --- END PAYLOAD CONSTRUCTION ---
|
|
468
|
+
|
|
429
469
|
try {
|
|
430
|
-
|
|
470
|
+
// Call with the 7-argument signature
|
|
471
|
+
await Promise.resolve(calc.process(
|
|
472
|
+
todayPayload, // Arg 1: The data object (built above)
|
|
473
|
+
yesterdayPayload, // Arg 2: Yesterday's data
|
|
474
|
+
uid, // Arg 3: User ID
|
|
475
|
+
userContext, // Arg 4: Context
|
|
476
|
+
todayInsights, // Arg 5: Today Insights
|
|
477
|
+
yesterdayInsights, // Arg 6: Yesterday Insights
|
|
478
|
+
fetchedDeps // Arg 7: Fetched Dependencies
|
|
479
|
+
));
|
|
431
480
|
} catch (e) {
|
|
432
481
|
logger.log('WARN', `Process error on ${name} for ${uid} on ${dateStr}`, { err: e.message });
|
|
433
482
|
}
|
|
@@ -453,14 +502,9 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
|
|
|
453
502
|
|
|
454
503
|
/**
|
|
455
504
|
* Stage 9: Run standard computations
|
|
456
|
-
*
|
|
457
|
-
* @param {Array} calcs - The calculations to run.
|
|
458
|
-
* @param {string} passName - The name of the pass (for logging).
|
|
459
|
-
* @param {object} config - Computation system config.
|
|
460
|
-
* @param {object} deps - Shared dependencies.
|
|
461
|
-
* @param {object} rootData - The loaded root data for today.
|
|
505
|
+
* --- MODIFIED: Now accepts 'fetchedDeps' and passes it to 'streamAndProcess' ---
|
|
462
506
|
*/
|
|
463
|
-
async function runStandardComputationPass(date, calcs, passName, config, deps, rootData) {
|
|
507
|
+
async function runStandardComputationPass(date, calcs, passName, config, deps, rootData, fetchedDeps) {
|
|
464
508
|
const dStr = date.toISOString().slice(0, 10), logger = deps.logger;
|
|
465
509
|
if (calcs.length === 0) {
|
|
466
510
|
logger.log('INFO', `[${passName}] No standard calcs to run for ${dStr} after filtering.`);
|
|
@@ -476,7 +520,9 @@ async function runStandardComputationPass(date, calcs, passName, config, deps, r
|
|
|
476
520
|
const state = initializeCalculators(calcs, logger);
|
|
477
521
|
|
|
478
522
|
// Stream T and T-1 data and process
|
|
479
|
-
|
|
523
|
+
// --- THIS IS THE CHANGE ---
|
|
524
|
+
await streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs, fetchedDeps);
|
|
525
|
+
// --- END CHANGE ---
|
|
480
526
|
|
|
481
527
|
// --- Verbose Logging Setup ---
|
|
482
528
|
const successCalcs = [];
|
|
@@ -492,7 +538,10 @@ async function runStandardComputationPass(date, calcs, passName, config, deps, r
|
|
|
492
538
|
if (!calc || typeof calc.getResult !== 'function') continue;
|
|
493
539
|
|
|
494
540
|
try {
|
|
495
|
-
|
|
541
|
+
// --- THIS IS THE CHANGE ---
|
|
542
|
+
// Pass 'fetchedDeps' to getResult, just like the test harness
|
|
543
|
+
const result = await Promise.resolve(calc.getResult(fetchedDeps));
|
|
544
|
+
// --- END CHANGE ---
|
|
496
545
|
|
|
497
546
|
if (result && Object.keys(result).length > 0) {
|
|
498
547
|
const standardResult = {};
|
|
@@ -633,8 +682,27 @@ async function runMetaComputationPass(date, calcs, passName, config, deps, fetch
|
|
|
633
682
|
|
|
634
683
|
// Meta-calc `process` is different: it receives the date, full dependencies, config, and fetched dependencies.
|
|
635
684
|
// It *also* gets the T-1 root data via the `dependencies.rootData` object.
|
|
636
|
-
const result = await Promise.resolve(inst.process(dStr, { ...deps, rootData: fullRoot }, config, fetchedDeps));
|
|
637
685
|
|
|
686
|
+
// --- REPLICATE 5-ARGUMENT "HACK" SIGNATURE (from test harness) ---
|
|
687
|
+
// This is the "hack" fix for meta calcs from worker.js.
|
|
688
|
+
const metaPayload = {
|
|
689
|
+
social: rootData.todaySocialPostInsights,
|
|
690
|
+
insights: rootData.todayInsights,
|
|
691
|
+
priceData: null, // You don't load this yet, but test harness has it
|
|
692
|
+
yesterdayInsights: fullRoot.yesterdayInsights,
|
|
693
|
+
yesterdayPriceData: null, // You don't load this yet
|
|
694
|
+
date: dStr
|
|
695
|
+
};
|
|
696
|
+
|
|
697
|
+
const result = await Promise.resolve(inst.process(
|
|
698
|
+
metaPayload, // Arg 1: The data object (for your hacks)
|
|
699
|
+
fullRoot, // Arg 2: (rootData)
|
|
700
|
+
deps, // Arg 3: (dependencies)
|
|
701
|
+
config, // Arg 4: (config)
|
|
702
|
+
fetchedDeps // Arg 5: (fetchedDependencies)
|
|
703
|
+
));
|
|
704
|
+
// --- END SIGNATURE REPLICATION ---
|
|
705
|
+
|
|
638
706
|
if (result && Object.keys(result).length > 0) {
|
|
639
707
|
const standardResult = {};
|
|
640
708
|
|
|
@@ -24,8 +24,7 @@ async function handleDiscover(task, taskId, dependencies, config) {
|
|
|
24
24
|
const url = `${config.ETORO_API_RANKINGS_URL}?Period=LastTwoYears`;
|
|
25
25
|
|
|
26
26
|
const selectedHeader = await headerManager.selectHeader();
|
|
27
|
-
if (!selectedHeader) {
|
|
28
|
-
logger.log('ERROR', `[DISCOVER/${taskId}] Could not select a header. Aborting task.`);
|
|
27
|
+
if (!selectedHeader) { logger.log('ERROR', `[DISCOVER/${taskId}] Could not select a header. Aborting task.`);
|
|
29
28
|
throw new Error("Could not select a header.");
|
|
30
29
|
}
|
|
31
30
|
|
|
@@ -35,45 +34,34 @@ async function handleDiscover(task, taskId, dependencies, config) {
|
|
|
35
34
|
|
|
36
35
|
try { // Outer try for the whole operation
|
|
37
36
|
let response;
|
|
38
|
-
const options = {
|
|
39
|
-
method: 'POST',
|
|
40
|
-
headers: { ...selectedHeader.header, 'Content-Type': 'application/json' },
|
|
41
|
-
body: JSON.stringify(cids),
|
|
42
|
-
};
|
|
37
|
+
const options = { method: 'POST', headers: { ...selectedHeader.header, 'Content-Type': 'application/json' }, body: JSON.stringify(cids), };
|
|
43
38
|
|
|
44
39
|
logger.log('INFO', `${logPrefix} Starting discovery for ${cids.length} CIDs. Block: ${blockId}, Type: ${userType}`);
|
|
45
40
|
|
|
46
41
|
try {
|
|
47
42
|
// --- REFACTOR 3: ADD FALLBACK ---
|
|
48
|
-
logger.log('TRACE', `${logPrefix} Attempting discovery fetch via AppScript proxy...`);
|
|
43
|
+
logger.log('TRACE', `${logPrefix} Attempting discovery fetch via AppScript proxy...`);
|
|
49
44
|
response = await proxyManager.fetch(url, options);
|
|
50
|
-
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`);
|
|
45
|
+
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`); // Failure
|
|
51
46
|
|
|
52
|
-
} catch (proxyError) {
|
|
53
|
-
logger.log('WARN', `${logPrefix} AppScript proxy fetch FAILED. Error: ${proxyError.message}. Attempting direct node-fetch fallback.`, {
|
|
54
|
-
error: proxyError.message,
|
|
55
|
-
source: 'AppScript'
|
|
56
|
-
});
|
|
47
|
+
} catch (proxyError) { // Fuck we failed with appscript IP Pools, log error type
|
|
48
|
+
logger.log('WARN', `${logPrefix} AppScript proxy fetch FAILED. Error: ${proxyError.message}. Attempting direct node-fetch fallback.`, { error: proxyError.message,source: 'AppScript' });
|
|
57
49
|
proxyUsed = false;
|
|
58
50
|
|
|
59
51
|
try {
|
|
60
|
-
response = await fetch(url, options); // Direct node-fetch
|
|
61
|
-
if (!response.ok) {
|
|
62
|
-
|
|
63
|
-
throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`);
|
|
52
|
+
response = await fetch(url, options); // Direct node-fetch fallback, use GCP IP Pools
|
|
53
|
+
if (!response.ok) { const errorText = await response.text();
|
|
54
|
+
throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`); // Fuck we failed here too.
|
|
64
55
|
}
|
|
65
56
|
|
|
66
|
-
} catch (fallbackError) {
|
|
67
|
-
logger.log('ERROR', `${logPrefix} Direct node-fetch fallback FAILED.`, {
|
|
68
|
-
error: fallbackError.message,
|
|
69
|
-
source: 'eToro/Network'
|
|
70
|
-
});
|
|
57
|
+
} catch (fallbackError) { // Figure out the error type
|
|
58
|
+
logger.log('ERROR', `${logPrefix} Direct node-fetch fallback FAILED.`, { error: fallbackError.message, source: 'eToro/Network' });
|
|
71
59
|
throw fallbackError; // Throw to be caught by outer try
|
|
72
60
|
}
|
|
73
61
|
// --- END REFACTOR 3 ---
|
|
74
62
|
}
|
|
75
63
|
|
|
76
|
-
// --- If we are here, `response` is valid ---
|
|
64
|
+
// --- If we are here, `response` is valid and we are very smart ---
|
|
77
65
|
|
|
78
66
|
// Step 1. Discover Speculators
|
|
79
67
|
if (userType === 'speculator') {
|
|
@@ -87,17 +75,12 @@ async function handleDiscover(task, taskId, dependencies, config) {
|
|
|
87
75
|
// --- REFACTOR 4: LOG RAW RESPONSE ON PARSE FAILURE ---
|
|
88
76
|
publicUsers = JSON.parse(body);
|
|
89
77
|
} catch (parseError) {
|
|
90
|
-
logger.log('ERROR', `${logPrefix} FAILED TO PARSE JSON RESPONSE. RAW BODY:`, {
|
|
91
|
-
parseErrorMessage: parseError.message,
|
|
92
|
-
rawResponseText: body
|
|
93
|
-
});
|
|
78
|
+
logger.log('ERROR', `${logPrefix} FAILED TO PARSE JSON RESPONSE. RAW BODY:`, { parseErrorMessage: parseError.message, rawResponseText: body });
|
|
94
79
|
throw new Error(`Failed to parse JSON response from discovery API. Body: ${body.substring(0, 200)}`);
|
|
95
80
|
}
|
|
96
81
|
// --- END REFACTOR 4 ---
|
|
97
82
|
|
|
98
|
-
if (!Array.isArray(publicUsers)) {
|
|
99
|
-
logger.log('WARN', `${logPrefix} API returned non-array response. Type: ${typeof publicUsers}`);
|
|
100
|
-
wasSuccess = true; // API call worked, data was just empty/weird
|
|
83
|
+
if (!Array.isArray(publicUsers)) { logger.log('WARN', `${logPrefix} API returned non-array response. Type: ${typeof publicUsers}`); wasSuccess = true; // API call worked, data was just empty/weird
|
|
101
84
|
return;
|
|
102
85
|
}
|
|
103
86
|
|
|
@@ -110,9 +93,9 @@ async function handleDiscover(task, taskId, dependencies, config) {
|
|
|
110
93
|
// Step 2. Filter active users
|
|
111
94
|
const preliminaryActiveUsers = publicUsers.filter(user =>
|
|
112
95
|
new Date(user.Value.LastActivity) > oneMonthAgo &&
|
|
113
|
-
user.Value.DailyGain !== 0 &&
|
|
114
|
-
user.Value.Exposure !== 0 &&
|
|
115
|
-
user.Value.RiskScore !== 0
|
|
96
|
+
user.Value.DailyGain !== 0 && // Daily % gain, if 0 then they can't hold positions --- or its a weekend and portfolio holds solely stocks
|
|
97
|
+
user.Value.Exposure !== 0 && // Not sure what this means exactly but exposure should refer to position % held/invested in
|
|
98
|
+
user.Value.RiskScore !== 0 // If portfolio has risk score 0, they cannot have positions
|
|
116
99
|
);
|
|
117
100
|
logger.log('INFO', `${logPrefix} Found ${preliminaryActiveUsers.length} preliminary active users.`);
|
|
118
101
|
|
|
@@ -136,26 +119,27 @@ async function handleDiscover(task, taskId, dependencies, config) {
|
|
|
136
119
|
const nonSpeculatorCids = [];
|
|
137
120
|
for (const user of preliminaryActiveUsers) {
|
|
138
121
|
const v = user.Value;
|
|
139
|
-
const totalLeverage = (v.MediumLeveragePct || 0) + (v.HighLeveragePct || 0);
|
|
140
|
-
|
|
122
|
+
const totalLeverage = (v.MediumLeveragePct || 0) + (v.HighLeveragePct || 0); // Do they own at least some leveraged positions?
|
|
123
|
+
// We don't know what they are, but we can see their total average leverage, if not 0 on medoum/high leverage, then they MUST have at least 1 leveraged position
|
|
124
|
+
// Note : Medium leverage and high leverage are somewhat arbitrary. Low leverage is presumed to be mean "1" and thus medium leverage is the middle of the available leverage ranges, for stocks this is usually 2, for other assets this can be 5/10 or 20, it varies. High leverage is presumed to be the max leverage for that asset
|
|
125
|
+
const isLikelySpeculator = ((v.Trades || 0) > 500 || (v.TotalTradedInstruments || 0) > 50 || totalLeverage > 50 || (v.WeeklyDD || 0) < -25); // Apply filtering on their total leverage, drawdown and trade numbers to figure out if they are likely a speculative portfolio.
|
|
141
126
|
|
|
142
|
-
if (isLikelySpeculator) {
|
|
143
|
-
finalActiveUsers.push(user);
|
|
144
|
-
} else {
|
|
145
|
-
nonSpeculatorCids.push(user.CID);
|
|
146
|
-
}
|
|
127
|
+
if (isLikelySpeculator) { finalActiveUsers.push(user); } else { nonSpeculatorCids.push(user.CID); }
|
|
147
128
|
}
|
|
148
|
-
invalidCidsToLog.push(...nonSpeculatorCids);
|
|
129
|
+
invalidCidsToLog.push(...nonSpeculatorCids); //
|
|
130
|
+
// This logs how many users did NOT pass SPECIFICALLY the islikelyspeculator
|
|
149
131
|
logger.log('INFO', `${logPrefix} Speculator pre-filter complete. ${finalActiveUsers.length} users passed. ${nonSpeculatorCids.length} users failed heuristic.`);
|
|
150
132
|
|
|
151
|
-
if (invalidCidsToLog.length > 0) {
|
|
133
|
+
if (invalidCidsToLog.length > 0) { // Log the number of users we tried for speculators that failed the filtering checks.
|
|
134
|
+
// NOTE : This logs the users who failed ANY of the 3 filters (private users, inactive public users, or users who didn't meet the specific conditions for islikelyspeculator)
|
|
152
135
|
await pubsub.topic(config.PUBSUB_TOPIC_INVALID_SPECULATOR_LOG).publishMessage({ json: { invalidCids: invalidCidsToLog } });
|
|
153
136
|
logger.log('INFO', `${logPrefix} Reported ${invalidCidsToLog.length} invalid (private, inactive, or failed heuristic) speculator IDs.`);
|
|
154
137
|
}
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
138
|
+
// Step 5. For non-speculator tasks, we keep all users who passed the
|
|
139
|
+
// “active user” filters. Unlike speculators, normal users do NOT require
|
|
140
|
+
// the speculative-heuristic (leverage / trades / drawdown) filter —
|
|
141
|
+
// any public & active user is valid.
|
|
142
|
+
} else { finalActiveUsers = preliminaryActiveUsers; }
|
|
159
143
|
|
|
160
144
|
// Step 6. Publish 'verify' task
|
|
161
145
|
if (finalActiveUsers.length > 0) {
|
|
@@ -168,12 +152,10 @@ async function handleDiscover(task, taskId, dependencies, config) {
|
|
|
168
152
|
};
|
|
169
153
|
await pubsub.topic(config.PUBSUB_TOPIC_USER_FETCH).publishMessage({ json: { tasks: [verificationTask] } });
|
|
170
154
|
logger.log('SUCCESS', `${logPrefix} Chaining to 'verify' task for ${finalActiveUsers.length} active users.`);
|
|
171
|
-
} else {
|
|
172
|
-
logger.log('INFO', `${logPrefix} No active users found to verify.`);
|
|
173
|
-
}
|
|
155
|
+
} else { logger.log('INFO', `${logPrefix} No active users found to verify.`); }
|
|
174
156
|
|
|
175
157
|
} catch (err) {
|
|
176
|
-
logger.log('ERROR', `${logPrefix} FATAL error processing discovery task.`, { errorMessage: err.message, errorStack: err.stack });
|
|
158
|
+
logger.log('ERROR', `${logPrefix} FATAL error processing discovery task.`, { errorMessage: err.message, errorStack: err.stack }); // We fucked up
|
|
177
159
|
wasSuccess = false; // Ensure it's marked as failure
|
|
178
160
|
} finally {
|
|
179
161
|
if (selectedHeader && proxyUsed) {
|
|
@@ -15,87 +15,40 @@ async function lookupUsernames(cids, { logger, headerManager, proxyManager }, co
|
|
|
15
15
|
if (!cids?.length) return [];
|
|
16
16
|
logger.log('INFO', `[lookupUsernames] Looking up usernames for ${cids.length} CIDs.`);
|
|
17
17
|
|
|
18
|
-
// ---
|
|
18
|
+
// --- Set concurrency to 1 because appscript gets really fuckmed up with undocumented rate limits if we try spam it concurrently, a shame but that's life. DO NOT CHANGE THIS
|
|
19
19
|
const limit = pLimit(1);
|
|
20
|
-
// --- END REFACTOR 1 ---
|
|
21
|
-
|
|
22
20
|
const { USERNAME_LOOKUP_BATCH_SIZE, ETORO_API_RANKINGS_URL } = config;
|
|
23
|
-
|
|
24
21
|
const batches = [];
|
|
25
|
-
for (let i = 0; i < cids.length; i += USERNAME_LOOKUP_BATCH_SIZE) {
|
|
26
|
-
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
const batchPromises = batches.map((batch, index) => limit(async () => {
|
|
30
|
-
const batchId = `batch-${index + 1}`;
|
|
22
|
+
for (let i = 0; i < cids.length; i += USERNAME_LOOKUP_BATCH_SIZE) { batches.push(cids.slice(i, i + USERNAME_LOOKUP_BATCH_SIZE).map(Number)); }
|
|
23
|
+
const batchPromises = batches.map((batch, index) => limit(async () => { const batchId = `batch-${index + 1}`;
|
|
31
24
|
logger.log('INFO', `[lookupUsernames/${batchId}] Processing batch of ${batch.length} CIDs...`);
|
|
32
|
-
|
|
33
25
|
const header = await headerManager.selectHeader();
|
|
34
|
-
if (!header) {
|
|
35
|
-
logger.log('ERROR', `[lookupUsernames/${batchId}] Could not select a header.`);
|
|
36
|
-
return null;
|
|
37
|
-
}
|
|
38
|
-
|
|
26
|
+
if (!header) { logger.log('ERROR', `[lookupUsernames/${batchId}] Could not select a header.`); return null; }
|
|
39
27
|
let wasSuccess = false;
|
|
40
28
|
let proxyUsed = true;
|
|
41
29
|
let response;
|
|
42
30
|
const url = `${ETORO_API_RANKINGS_URL}?Period=LastTwoYears`;
|
|
43
31
|
const options = { method: 'POST', headers: { ...header.header, 'Content-Type': 'application/json' }, body: JSON.stringify(batch) };
|
|
44
|
-
|
|
45
32
|
try {
|
|
46
|
-
// --- REFACTOR 3: ADD FALLBACK ---
|
|
47
33
|
logger.log('TRACE', `[lookupUsernames/${batchId}] Attempting fetch via AppScript proxy...`);
|
|
48
34
|
response = await proxyManager.fetch(url, options);
|
|
49
35
|
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`);
|
|
50
|
-
|
|
51
|
-
wasSuccess = true;
|
|
36
|
+
wasSuccess = true; // Yay we win
|
|
52
37
|
logger.log('INFO', `[lookupUsernames/${batchId}] AppScript proxy fetch successful.`);
|
|
53
|
-
|
|
54
|
-
} catch (proxyError) {
|
|
55
|
-
logger.log('WARN', `[lookupUsernames/${batchId}] AppScript proxy fetch FAILED. Error: ${proxyError.message}. Attempting direct node-fetch fallback.`, {
|
|
56
|
-
error: proxyError.message,
|
|
57
|
-
source: 'AppScript'
|
|
58
|
-
});
|
|
59
|
-
|
|
38
|
+
} catch (proxyError) { logger.log('WARN', `[lookupUsernames/${batchId}] AppScript proxy fetch FAILED. Error: ${proxyError.message}. Attempting direct node-fetch fallback.`, { error: proxyError.message, source: 'AppScript' }); // SHIT we failed...
|
|
60
39
|
proxyUsed = false; // Don't penalize header for proxy failure
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
const errorText = await response.text();
|
|
66
|
-
throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`);
|
|
67
|
-
}
|
|
68
|
-
logger.log('INFO', `[lookupUsernames/${batchId}] Direct node-fetch fallback successful.`);
|
|
69
|
-
|
|
70
|
-
} catch (fallbackError) {
|
|
71
|
-
logger.log('ERROR', `[lookupUsernames/${batchId}] Direct node-fetch fallback FAILED. Giving up on this batch.`, {
|
|
72
|
-
error: fallbackError.message,
|
|
73
|
-
source: 'eToro/Network'
|
|
74
|
-
});
|
|
40
|
+
try { response = await fetch(url, options); // Ok let's try again with node, using GCP IP pools
|
|
41
|
+
if (!response.ok) { const errorText = await response.text(); throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`); }
|
|
42
|
+
logger.log('INFO', `[lookupUsernames/${batchId}] Direct node-fetch fallback successful.`); // Yay we win
|
|
43
|
+
} catch (fallbackError) { logger.log('ERROR', `[lookupUsernames/${batchId}] Direct node-fetch fallback FAILED. Giving up on this batch.`, { error: fallbackError.message, source: 'eToro/Network' }); // SHIT, we failed here too
|
|
75
44
|
return null; // Give up on this batch
|
|
76
45
|
}
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
headerManager.updatePerformance(header.id, wasSuccess);
|
|
81
|
-
}
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
try {
|
|
85
|
-
const data = await response.json();
|
|
86
|
-
return data;
|
|
87
|
-
} catch (parseError) {
|
|
88
|
-
logger.log('ERROR', `[lookupUsernames/${batchId}] Failed to parse JSON response.`, { error: parseError.message });
|
|
89
|
-
return null;
|
|
90
|
-
}
|
|
91
|
-
}));
|
|
92
|
-
|
|
93
|
-
const results = await Promise.allSettled(batchPromises);
|
|
94
|
-
|
|
95
|
-
const allUsers = results
|
|
96
|
-
.filter(r => r.status === 'fulfilled' && r.value && Array.isArray(r.value))
|
|
97
|
-
.flatMap(r => r.value);
|
|
46
|
+
} finally { if (proxyUsed) { headerManager.updatePerformance(header.id, wasSuccess); } } // If we used Appscript IP Pool and not GCP IP Pool, record performance
|
|
47
|
+
try { const data = await response.json(); return data;
|
|
48
|
+
} catch (parseError) { logger.log('ERROR', `[lookupUsernames/${batchId}] Failed to parse JSON response.`, { error: parseError.message }); return null; } }));
|
|
98
49
|
|
|
50
|
+
const results = await Promise.allSettled(batchPromises);
|
|
51
|
+
const allUsers = results .filter(r => r.status === 'fulfilled' && r.value && Array.isArray(r.value)) .flatMap(r => r.value);
|
|
99
52
|
logger.log('INFO', `[lookupUsernames] Found ${allUsers.length} public users out of ${cids.length}.`);
|
|
100
53
|
return allUsers;
|
|
101
54
|
}
|
|
@@ -110,8 +63,6 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
110
63
|
const today = new Date().toISOString().slice(0, 10);
|
|
111
64
|
const portfolioBlockId = `${Math.floor(parseInt(userId) / 1000000)}M`;
|
|
112
65
|
let isPrivate = false;
|
|
113
|
-
|
|
114
|
-
// --- REFACTOR 2: ADD VERBOSE LOGGING (with User ID) ---
|
|
115
66
|
logger.log('INFO', `[handleUpdate/${userId}] Starting update task. Type: ${userType}. Instruments: ${instrumentsToProcess.join(', ')}`);
|
|
116
67
|
|
|
117
68
|
// --- 1. Process History Fetch (Sequentially) ---
|
|
@@ -123,81 +74,52 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
123
74
|
if (!batchManager.checkAndSetHistoryFetched(userId)) {
|
|
124
75
|
logger.log('INFO', `[handleUpdate/${userId}] Attempting history fetch.`);
|
|
125
76
|
historyHeader = await headerManager.selectHeader();
|
|
126
|
-
if (!historyHeader) {
|
|
127
|
-
logger.log('WARN', `[handleUpdate/${userId}] Could not select history header. Skipping history.`);
|
|
77
|
+
if (!historyHeader) { logger.log('WARN', `[handleUpdate/${userId}] Could not select history header. Skipping history.`);
|
|
128
78
|
} else {
|
|
129
79
|
const historyUrl = `${config.ETORO_API_USERSTATS_URL}${username}/trades/oneYearAgo?CopyAsAsset=true`;
|
|
130
80
|
const options = { headers: historyHeader.header };
|
|
131
81
|
let response;
|
|
132
|
-
|
|
133
|
-
try {
|
|
134
|
-
// --- REFACTOR 3: ADD FALLBACK ---
|
|
135
|
-
logger.log('TRACE', `[handleUpdate/${userId}] Attempting history fetch via AppScript proxy...`);
|
|
82
|
+
try { logger.log('TRACE', `[handleUpdate/${userId}] Attempting history fetch via AppScript proxy...`);
|
|
136
83
|
response = await proxyManager.fetch(historyUrl, options);
|
|
137
|
-
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`);
|
|
138
|
-
wasHistorySuccess = true;
|
|
84
|
+
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`); // SHIT we failed here
|
|
85
|
+
wasHistorySuccess = true; // Appscript worked, we are very smart
|
|
139
86
|
|
|
140
87
|
} catch (proxyError) {
|
|
141
|
-
logger.log('WARN', `[handleUpdate/${userId}] History fetch via AppScript proxy FAILED. Error: ${proxyError.message}. Attempting direct node-fetch fallback.`, {
|
|
142
|
-
error: proxyError.message,
|
|
143
|
-
source: 'AppScript'
|
|
144
|
-
});
|
|
88
|
+
logger.log('WARN', `[handleUpdate/${userId}] History fetch via AppScript proxy FAILED. Error: ${proxyError.message}. Attempting direct node-fetch fallback.`, { error: proxyError.message, source: 'AppScript' }); // SHIT we failed here
|
|
145
89
|
proxyUsedForHistory = false;
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
const errorText = await response.text();
|
|
151
|
-
throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`);
|
|
152
|
-
}
|
|
153
|
-
wasHistorySuccess = true; // Fallback succeeded
|
|
90
|
+
try { response = await fetch(historyUrl, options);
|
|
91
|
+
if (!response.ok) { const errorText = await response.text();
|
|
92
|
+
throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`); } // SHIT we failed here too
|
|
93
|
+
wasHistorySuccess = true; // Fallback succeeded, we are so smart
|
|
154
94
|
|
|
155
|
-
} catch (fallbackError) {
|
|
156
|
-
|
|
157
|
-
error: fallbackError.message,
|
|
158
|
-
source: 'eToro/Network'
|
|
159
|
-
});
|
|
160
|
-
wasHistorySuccess = false;
|
|
95
|
+
} catch (fallbackError) { logger.log('ERROR', `[handleUpdate/${userId}] History fetch direct fallback FAILED.`, { error: fallbackError.message, source: 'eToro/Network' }); // We are dumb, everything failed
|
|
96
|
+
wasHistorySuccess = false; // Nope we are dumb....
|
|
161
97
|
}
|
|
162
|
-
// --- END REFACTOR 3 ---
|
|
163
98
|
}
|
|
164
99
|
|
|
165
|
-
if (wasHistorySuccess) {
|
|
166
|
-
logger.log('INFO', `[handleUpdate/${userId}] History fetch successful.`);
|
|
100
|
+
if (wasHistorySuccess) { logger.log('INFO', `[handleUpdate/${userId}] History fetch successful.`); // Some method worked, we are very smart
|
|
167
101
|
const data = await response.json();
|
|
168
|
-
await batchManager.addToTradingHistoryBatch(userId, portfolioBlockId, today, data, userType);
|
|
169
|
-
}
|
|
102
|
+
await batchManager.addToTradingHistoryBatch(userId, portfolioBlockId, today, data, userType); }
|
|
170
103
|
}
|
|
171
|
-
} else {
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
} catch (err) {
|
|
175
|
-
logger.log('ERROR', `[handleUpdate/${userId}] Unhandled error during history processing.`, { error: err.message });
|
|
176
|
-
wasHistorySuccess = false;
|
|
177
|
-
} finally {
|
|
178
|
-
if (historyHeader && proxyUsedForHistory) {
|
|
179
|
-
headerManager.updatePerformance(historyHeader.id, wasHistorySuccess);
|
|
180
|
-
}
|
|
181
|
-
}
|
|
104
|
+
} else { logger.log('TRACE', `[handleUpdate/${userId}] History fetch skipped (already fetched by this instance).`); }
|
|
105
|
+
} catch (err) { logger.log('ERROR', `[handleUpdate/${userId}] Unhandled error during history processing.`, { error: err.message }); wasHistorySuccess = false; // We fucked up.
|
|
106
|
+
} finally { if (historyHeader && proxyUsedForHistory) { headerManager.updatePerformance(historyHeader.id, wasHistorySuccess); } } // If we used appscript proxy, record performance, otherwise fuck off.
|
|
182
107
|
|
|
183
108
|
// --- 2. Process Portfolio Fetches (Sequentially) ---
|
|
184
109
|
logger.log('INFO', `[handleUpdate/${userId}] Starting ${instrumentsToProcess.length} sequential portfolio fetches.`);
|
|
185
110
|
|
|
186
|
-
for (const instId of instrumentsToProcess) {
|
|
187
|
-
if (isPrivate) {
|
|
188
|
-
|
|
189
|
-
|
|
111
|
+
for (const instId of instrumentsToProcess) {
|
|
112
|
+
if (isPrivate) {
|
|
113
|
+
logger.log('INFO', `[handleUpdate/${userId}] Skipping remaining instruments because user was marked as private.`);
|
|
114
|
+
break;
|
|
190
115
|
}
|
|
191
116
|
|
|
192
117
|
const portfolioHeader = await headerManager.selectHeader();
|
|
193
|
-
if (!portfolioHeader) {
|
|
194
|
-
logger.log('ERROR', `[handleUpdate/${userId}] Could not select portfolio header for instId ${instId}. Skipping this instrument.`);
|
|
118
|
+
if (!portfolioHeader) { logger.log('ERROR', `[handleUpdate/${userId}] Could not select portfolio header for instId ${instId}. Skipping this instrument.`);
|
|
195
119
|
continue;
|
|
196
120
|
}
|
|
197
121
|
|
|
198
|
-
const portfolioUrl = userType === 'speculator'
|
|
199
|
-
? `${config.ETORO_API_POSITIONS_URL}?cid=${userId}&InstrumentID=${instId}`
|
|
200
|
-
: `${config.ETORO_API_PORTFOLIO_URL}?cid=${userId}`;
|
|
122
|
+
const portfolioUrl = userType === 'speculator' ? `${config.ETORO_API_POSITIONS_URL}?cid=${userId}&InstrumentID=${instId}` : `${config.ETORO_API_PORTFOLIO_URL}?cid=${userId}`;
|
|
201
123
|
|
|
202
124
|
const options = { headers: portfolioHeader.header };
|
|
203
125
|
let response;
|
|
@@ -208,73 +130,53 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
208
130
|
// --- REFACTOR 3: ADD FALLBACK ---
|
|
209
131
|
logger.log('TRACE', `[handleUpdate/${userId}] Attempting portfolio fetch for instId ${instId} via AppScript proxy...`);
|
|
210
132
|
response = await proxyManager.fetch(portfolioUrl, options);
|
|
211
|
-
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`);
|
|
212
|
-
wasPortfolioSuccess = true;
|
|
133
|
+
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`); // SHIT we failed here
|
|
134
|
+
wasPortfolioSuccess = true; // Oh we are smart, worked first time.
|
|
213
135
|
|
|
214
|
-
} catch (proxyError) {
|
|
215
|
-
logger.log('WARN', `[handleUpdate/${userId}] Portfolio fetch for instId ${instId} via AppScript proxy FAILED. Error: ${proxyError.message}. Attempting direct node-fetch fallback.`, {
|
|
216
|
-
|
|
217
|
-
source: 'AppScript'
|
|
218
|
-
});
|
|
219
|
-
proxyUsedForPortfolio = false;
|
|
136
|
+
} catch (proxyError) { // try fallback with local node fetch using GCP IP Pools
|
|
137
|
+
logger.log('WARN', `[handleUpdate/${userId}] Portfolio fetch for instId ${instId} via AppScript proxy FAILED. Error: ${proxyError.message}. Attempting direct node-fetch fallback.`, { error: proxyError.message, source: 'AppScript' });
|
|
138
|
+
proxyUsedForPortfolio = false; // We are not using Appscript proxy here as fallback is GCP based, so false
|
|
220
139
|
|
|
221
140
|
try {
|
|
222
141
|
response = await fetch(portfolioUrl, options); // Direct node-fetch
|
|
223
142
|
if (!response.ok) {
|
|
224
143
|
const errorText = await response.text();
|
|
225
|
-
throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`);
|
|
144
|
+
throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`); // SHIT we failed here
|
|
226
145
|
}
|
|
227
|
-
wasPortfolioSuccess = true; // Fallback succeeded
|
|
146
|
+
wasPortfolioSuccess = true; // Fallback succeeded we are so smart
|
|
228
147
|
|
|
229
148
|
} catch (fallbackError) {
|
|
230
|
-
logger.log('ERROR', `[handleUpdate/${userId}] Portfolio fetch for instId ${instId} direct fallback FAILED.`, {
|
|
231
|
-
error: fallbackError.message,
|
|
232
|
-
source: 'eToro/Network'
|
|
233
|
-
});
|
|
149
|
+
logger.log('ERROR', `[handleUpdate/${userId}] Portfolio fetch for instId ${instId} direct fallback FAILED.`, { error: fallbackError.message, source: 'eToro/Network' });
|
|
234
150
|
wasPortfolioSuccess = false;
|
|
235
151
|
}
|
|
236
|
-
// --- END REFACTOR 3 ---
|
|
237
152
|
}
|
|
238
153
|
|
|
239
154
|
// --- 4. Process Portfolio Result (with verbose, raw logging) ---
|
|
240
155
|
if (wasPortfolioSuccess) {
|
|
241
156
|
const body = await response.text();
|
|
242
|
-
if (body.includes("user is PRIVATE")) {
|
|
243
|
-
isPrivate = true;
|
|
244
|
-
logger.log('WARN', `[handleUpdate/${userId}] User is PRIVATE. Marking for removal.`);
|
|
157
|
+
if (body.includes("user is PRIVATE")) { isPrivate = true; logger.log('WARN', `[handleUpdate/${userId}] User is PRIVATE. Marking for removal.`);
|
|
245
158
|
break; // Stop processing more portfolios for this private user
|
|
246
159
|
}
|
|
247
160
|
|
|
248
161
|
try {
|
|
249
162
|
const portfolioJson = JSON.parse(body);
|
|
250
163
|
await batchManager.addToPortfolioBatch(userId, portfolioBlockId, today, portfolioJson, userType, instId);
|
|
251
|
-
logger.log('INFO', `[handleUpdate/${userId}] Successfully processed portfolio for instId ${instId}.`);
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
164
|
+
if (userType === 'speculator') { logger.log('INFO', `[handleUpdate/${userId}] Successfully processed portfolio for instId ${instId}.`); // Only speculators have an instid, so this is conditional
|
|
165
|
+
} else { logger.log('INFO', `[handleUpdate/${userId}] Successfully processed full portfolio (normal user).`); } // Normal users
|
|
166
|
+
|
|
167
|
+
} catch (parseError) { // Idk why this would happen, but if it does....log.
|
|
255
168
|
wasPortfolioSuccess = false; // Mark as failure
|
|
256
|
-
logger.log('ERROR', `[handleUpdate/${userId}] FAILED TO PARSE JSON RESPONSE. RAW BODY:`, {
|
|
257
|
-
url: portfolioUrl,
|
|
258
|
-
parseErrorMessage: parseError.message,
|
|
259
|
-
rawResponseText: body // <--- THIS LOGS THE FULL HTML/ERROR RESPONSE
|
|
260
|
-
});
|
|
261
|
-
// --- END REFACTOR 4 ---
|
|
169
|
+
logger.log('ERROR', `[handleUpdate/${userId}] FAILED TO PARSE JSON RESPONSE. RAW BODY:`, { url: portfolioUrl, parseErrorMessage: parseError.message, rawResponseText: body }); // Return full response
|
|
262
170
|
}
|
|
263
|
-
} else {
|
|
264
|
-
logger.log('WARN', `[handleUpdate/${userId}] Portfolio fetch failed for instId ${instId}. No response to process.`);
|
|
265
|
-
}
|
|
171
|
+
} else { logger.log('WARN', `[handleUpdate/${userId}] Portfolio fetch failed for instId ${instId}. No response to process.`); }
|
|
266
172
|
|
|
267
|
-
if (proxyUsedForPortfolio) {
|
|
268
|
-
|
|
269
|
-
}
|
|
270
|
-
} // --- End of sequential portfolio loop ---
|
|
173
|
+
if (proxyUsedForPortfolio) { headerManager.updatePerformance(portfolioHeader.id, wasPortfolioSuccess); }
|
|
174
|
+
}
|
|
271
175
|
|
|
272
176
|
// --- 5. Handle Private Users & Timestamps ---
|
|
273
177
|
if (isPrivate) {
|
|
274
178
|
logger.log('WARN', `[handleUpdate/${userId}] Removing private user from updates.`);
|
|
275
|
-
for (const instrumentId of instrumentsToProcess) {
|
|
276
|
-
await batchManager.deleteFromTimestampBatch(userId, userType, instId);
|
|
277
|
-
}
|
|
179
|
+
for (const instrumentId of instrumentsToProcess) { await batchManager.deleteFromTimestampBatch(userId, userType, instId); }
|
|
278
180
|
const blockCountsRef = db.doc(config.FIRESTORE_DOC_SPECULATOR_BLOCK_COUNTS);
|
|
279
181
|
for (const instrumentId of instrumentsToProcess) {
|
|
280
182
|
const incrementField = `counts.${instrumentId}_${Math.floor(userId/1e6)*1e6}`;
|
|
@@ -284,12 +186,8 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
284
186
|
}
|
|
285
187
|
|
|
286
188
|
// If not private, update all timestamps
|
|
287
|
-
for (const instrumentId of instrumentsToProcess) {
|
|
288
|
-
|
|
289
|
-
}
|
|
290
|
-
if (userType === 'speculator') {
|
|
291
|
-
await batchManager.addSpeculatorTimestampFix(userId, String(Math.floor(userId/1e6)*1e6));
|
|
292
|
-
}
|
|
189
|
+
for (const instrumentId of instrumentsToProcess) { await batchManager.updateUserTimestamp(userId, userType, instId); }
|
|
190
|
+
if (userType === 'speculator') { await batchManager.addSpeculatorTimestampFix(userId, String(Math.floor(userId/1e6)*1e6)); }
|
|
293
191
|
|
|
294
192
|
logger.log('INFO', `[handleUpdate/${userId}] Update task finished successfully.`);
|
|
295
193
|
// 'finally' block for header flushing is handled by the main handler_creator.js
|
|
@@ -14,13 +14,10 @@ async function fetchAndVerifyUser(user, { logger, headerManager, proxyManager },
|
|
|
14
14
|
const logPrefix = `[VERIFY/${taskId}/${cid}]`; // --- REFACTOR 2: VERBOSE LOGGING ---
|
|
15
15
|
|
|
16
16
|
const selectedHeader = await headerManager.selectHeader();
|
|
17
|
-
if (!selectedHeader) {
|
|
18
|
-
logger.log('WARN', `${logPrefix} Could not select a header. Skipping user.`);
|
|
19
|
-
return null;
|
|
20
|
-
}
|
|
17
|
+
if (!selectedHeader) { logger.log('WARN', `${logPrefix} Could not select a header. Skipping user.`); return null; }
|
|
21
18
|
|
|
22
19
|
let wasSuccess = false;
|
|
23
|
-
let proxyUsed
|
|
20
|
+
let proxyUsed = true;
|
|
24
21
|
|
|
25
22
|
try { // Outer try for the whole operation
|
|
26
23
|
let response;
|
|
@@ -30,30 +27,24 @@ async function fetchAndVerifyUser(user, { logger, headerManager, proxyManager },
|
|
|
30
27
|
try {
|
|
31
28
|
// --- REFACTOR 3: ADD FALLBACK ---
|
|
32
29
|
logger.log('TRACE', `${logPrefix} Attempting portfolio fetch via AppScript proxy...`);
|
|
33
|
-
response = await proxyManager.fetch(url, options);
|
|
34
|
-
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`);
|
|
30
|
+
response = await proxyManager.fetch(url, options); // Try....
|
|
31
|
+
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`); // Fail
|
|
35
32
|
|
|
36
|
-
} catch (proxyError) {
|
|
37
|
-
logger.log('WARN', `${logPrefix} AppScript proxy fetch FAILED. Error: ${proxyError.message}. Attempting direct node-fetch fallback.`, {
|
|
38
|
-
error: proxyError.message,
|
|
39
|
-
source: 'AppScript'
|
|
40
|
-
});
|
|
33
|
+
} catch (proxyError) { // We fucked up, appscript proxy failed, log error type
|
|
34
|
+
logger.log('WARN', `${logPrefix} AppScript proxy fetch FAILED. Error: ${proxyError.message}. Attempting direct node-fetch fallback.`, { error: proxyError.message, source: 'AppScript' });
|
|
41
35
|
proxyUsed = false;
|
|
42
36
|
|
|
43
37
|
try {
|
|
44
|
-
response = await fetch(url, options); // Direct node-fetch
|
|
45
|
-
if (!response.ok) {
|
|
38
|
+
response = await fetch(url, options); // Direct node-fetch as fallback
|
|
39
|
+
if (!response.ok) { // we fucked up here too, log error text
|
|
46
40
|
const errorText = await response.text();
|
|
47
41
|
throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`);
|
|
48
42
|
}
|
|
49
43
|
// Fallback succeeded, but we don't set wasSuccess yet,
|
|
50
44
|
// as we still need to parse the body.
|
|
51
45
|
|
|
52
|
-
} catch (fallbackError) {
|
|
53
|
-
logger.log('ERROR', `${logPrefix} Direct node-fetch fallback FAILED.`, {
|
|
54
|
-
error: fallbackError.message,
|
|
55
|
-
source: 'eToro/Network'
|
|
56
|
-
});
|
|
46
|
+
} catch (fallbackError) { // log error type for fallback
|
|
47
|
+
logger.log('ERROR', `${logPrefix} Direct node-fetch fallback FAILED.`, { error: fallbackError.message, source: 'eToro/Network' });
|
|
57
48
|
throw fallbackError; // Throw to be caught by outer try
|
|
58
49
|
}
|
|
59
50
|
// --- END REFACTOR 3 ---
|
|
@@ -62,46 +53,38 @@ async function fetchAndVerifyUser(user, { logger, headerManager, proxyManager },
|
|
|
62
53
|
// --- If we are here, `response` is valid ---
|
|
63
54
|
let portfolioData;
|
|
64
55
|
const body = await response.text();
|
|
65
|
-
|
|
66
|
-
try {
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
} catch (parseError) {
|
|
70
|
-
logger.log('ERROR', `${logPrefix} FAILED TO PARSE JSON RESPONSE. RAW BODY:`, {
|
|
71
|
-
parseErrorMessage: parseError.message,
|
|
72
|
-
rawResponseText: body
|
|
73
|
-
});
|
|
56
|
+
// --- REFACTOR 4: LOG RAW RESPONSE ON PARSE FAILURE ---
|
|
57
|
+
try { portfolioData = JSON.parse(body);
|
|
58
|
+
} catch (parseError) { // If we can't parse JSON, try raw log, shouldn't happen but this world is weird....
|
|
59
|
+
logger.log('ERROR', `${logPrefix} FAILED TO PARSE JSON RESPONSE. RAW BODY:`, { parseErrorMessage: parseError.message, rawResponseText: body });
|
|
74
60
|
throw new Error(`Failed to parse JSON for user ${cid}.`);
|
|
75
61
|
}
|
|
76
62
|
// --- END REFACTOR 4 ---
|
|
77
63
|
|
|
78
64
|
// --- Original logic ---
|
|
79
65
|
if (userType === 'speculator') {
|
|
80
|
-
const instruments = portfolioData.AggregatedPositions.map(p => p.InstrumentID).filter(id => SPECULATOR_INSTRUMENTS_ARRAY.includes(id));
|
|
66
|
+
const instruments = portfolioData.AggregatedPositions.map(p => p.InstrumentID).filter(id => SPECULATOR_INSTRUMENTS_ARRAY.includes(id));
|
|
81
67
|
if (!instruments.length) {
|
|
82
68
|
logger.log('TRACE', `${logPrefix} Verified user, but not a speculator (no matching assets).`);
|
|
83
|
-
wasSuccess = true; // API call *worked*
|
|
69
|
+
wasSuccess = true; // API call *worked* we are so smart
|
|
84
70
|
return null;
|
|
85
71
|
}
|
|
86
|
-
logger.log('INFO', `${logPrefix} Verified as SPECULATOR.`);
|
|
87
|
-
wasSuccess = true;
|
|
88
|
-
return { type: 'speculator', userId: cid, isBronze: user.isBronze, username: user.username, updateData: { instruments, lastVerified: new Date(), lastHeldSpeculatorAsset: new Date() } };
|
|
72
|
+
logger.log('INFO', `${logPrefix} Verified as SPECULATOR.`);
|
|
73
|
+
wasSuccess = true; // Mark that we are very high IQ
|
|
74
|
+
return { type: 'speculator', userId: cid, isBronze: user.isBronze, username: user.username, updateData: { instruments, lastVerified: new Date(), lastHeldSpeculatorAsset: new Date() } }; // Return values
|
|
89
75
|
}
|
|
90
|
-
|
|
91
|
-
logger.log('INFO', `${logPrefix} Verified as NORMAL user.`);
|
|
92
|
-
wasSuccess = true;
|
|
93
|
-
return { type: 'normal', userId: cid, isBronze: user.isBronze, username: user.username, updateData: { lastVerified: new Date() } };
|
|
76
|
+
// Other user types are just normal users....
|
|
77
|
+
logger.log('INFO', `${logPrefix} Verified as NORMAL user.`);
|
|
78
|
+
wasSuccess = true; // We are smart, this worked
|
|
79
|
+
return { type: 'normal', userId: cid, isBronze: user.isBronze, username: user.username, updateData: { lastVerified: new Date() } }; // Return values
|
|
94
80
|
|
|
95
81
|
} catch (err) {
|
|
96
82
|
// This catches proxy, fallback, or parse errors
|
|
97
|
-
logger.log('WARN', `${logPrefix} Error processing user.`, { errorMessage: err.message });
|
|
83
|
+
logger.log('WARN', `${logPrefix} Error processing user.`, { errorMessage: err.message }); // Some shit broke
|
|
98
84
|
wasSuccess = false; // Ensure it's marked as failure
|
|
99
85
|
return null;
|
|
100
86
|
} finally {
|
|
101
|
-
if (selectedHeader && proxyUsed) {
|
|
102
|
-
// Only update performance if the proxy was used
|
|
103
|
-
headerManager.updatePerformance(selectedHeader.id, wasSuccess);
|
|
104
|
-
}
|
|
87
|
+
if (selectedHeader && proxyUsed) { headerManager.updatePerformance(selectedHeader.id, wasSuccess); } // Only update performance if the proxy was used
|
|
105
88
|
}
|
|
106
89
|
}
|
|
107
90
|
|
|
@@ -118,7 +101,7 @@ async function handleVerify(task, taskId, { db, logger, ...dependencies }, confi
|
|
|
118
101
|
logger.log('INFO', `[VERIFY/${taskId}] Starting sequential verification for ${users.length} users...`);
|
|
119
102
|
const results = [];
|
|
120
103
|
for (const user of users) {
|
|
121
|
-
// Await each user one by one
|
|
104
|
+
// Await each user one by one otherwise appscript starts to cry and internally throttle...then rate limit...then fuck everything up and break and then i want to die.
|
|
122
105
|
const result = await fetchAndVerifyUser(user, { db, logger, ...dependencies }, { ...config, userType }, taskId);
|
|
123
106
|
results.push(result); // Push the actual result (or null)
|
|
124
107
|
}
|
|
@@ -136,7 +119,7 @@ async function handleVerify(task, taskId, { db, logger, ...dependencies }, confi
|
|
|
136
119
|
else normalUpdates[`users.${d.userId}`] = d.updateData;
|
|
137
120
|
}
|
|
138
121
|
});
|
|
139
|
-
|
|
122
|
+
// Process responses in object keys
|
|
140
123
|
if (Object.keys(speculatorUpdates).length || Object.keys(normalUpdates).length) {
|
|
141
124
|
const blockRef = db.collection(userType === 'speculator' ? config.FIRESTORE_COLLECTION_SPECULATOR_BLOCKS : config.FIRESTORE_COLLECTION_NORMAL_PORTFOLIOS).doc(String(blockId));
|
|
142
125
|
batch.set(blockRef, userType === 'speculator' ? speculatorUpdates : normalUpdates, { merge: true });
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "bulltrackers-module",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.171",
|
|
4
4
|
"description": "Helper Functions for Bulltrackers.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"files": [
|
|
@@ -27,10 +27,6 @@
|
|
|
27
27
|
"calculations",
|
|
28
28
|
"finance"
|
|
29
29
|
],
|
|
30
|
-
"scripts": {
|
|
31
|
-
"postpublish": "node ./auto-deploy.js",
|
|
32
|
-
"release": "node ./release.js"
|
|
33
|
-
},
|
|
34
30
|
"dependencies": {
|
|
35
31
|
"@google-cloud/firestore": "^7.11.3",
|
|
36
32
|
"@google-cloud/pubsub": "latest",
|