bulltrackers-module 1.0.213 → 1.0.215
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/controllers/computation_controller.js +25 -82
- package/functions/computation-system/helpers/computation_dispatcher.js +11 -24
- package/functions/computation-system/helpers/computation_manifest_builder.js +52 -89
- package/functions/computation-system/helpers/computation_pass_runner.js +23 -63
- package/functions/computation-system/helpers/computation_worker.js +11 -53
- package/functions/computation-system/helpers/orchestration_helpers.js +89 -208
- package/functions/task-engine/helpers/update_helpers.js +34 -70
- package/package.json +1 -1
- package/functions/computation-system/layers/math_primitives.js +0 -744
|
@@ -5,43 +5,26 @@
|
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
7
|
const { FieldValue } = require('@google-cloud/firestore');
|
|
8
|
-
const crypto
|
|
8
|
+
const crypto = require('crypto');
|
|
9
9
|
|
|
10
10
|
// --- CIRCUIT BREAKER STATE ---
|
|
11
11
|
let _consecutiveProxyFailures = 0;
|
|
12
|
-
const MAX_PROXY_FAILURES
|
|
12
|
+
const MAX_PROXY_FAILURES = 3;
|
|
13
13
|
|
|
14
|
-
function shouldTryProxy() {
|
|
15
|
-
return _consecutiveProxyFailures < MAX_PROXY_FAILURES;
|
|
16
|
-
}
|
|
14
|
+
function shouldTryProxy() { return _consecutiveProxyFailures < MAX_PROXY_FAILURES; }
|
|
17
15
|
|
|
18
16
|
function recordProxyOutcome(success) {
|
|
19
|
-
if (success) {
|
|
20
|
-
|
|
21
|
-
} else {
|
|
22
|
-
_consecutiveProxyFailures++;
|
|
23
|
-
}
|
|
17
|
+
if (success) { _consecutiveProxyFailures = 0;
|
|
18
|
+
} else { _consecutiveProxyFailures++; }
|
|
24
19
|
}
|
|
25
20
|
|
|
26
21
|
function detectSpeculatorTargets(historyData, portfolioData) {
|
|
27
22
|
if (!historyData?.PublicHistoryPositions || !portfolioData?.AggregatedPositions) return [];
|
|
28
|
-
|
|
29
23
|
const leveragedAssets = new Set();
|
|
30
|
-
for (const pos of historyData.PublicHistoryPositions) {
|
|
31
|
-
if (pos.Leverage > 1 && pos.InstrumentID) {
|
|
32
|
-
leveragedAssets.add(pos.InstrumentID);
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
|
|
24
|
+
for (const pos of historyData.PublicHistoryPositions) { if (pos.Leverage > 1 && pos.InstrumentID) { leveragedAssets.add(pos.InstrumentID); } }
|
|
36
25
|
if (leveragedAssets.size === 0) return [];
|
|
37
|
-
|
|
38
26
|
const targets = [];
|
|
39
|
-
for (const pos of portfolioData.AggregatedPositions) {
|
|
40
|
-
if (leveragedAssets.has(pos.InstrumentID)) {
|
|
41
|
-
targets.push(pos.InstrumentID);
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
|
|
27
|
+
for (const pos of portfolioData.AggregatedPositions) { if (leveragedAssets.has(pos.InstrumentID)) { targets.push(pos.InstrumentID); } }
|
|
45
28
|
return targets;
|
|
46
29
|
}
|
|
47
30
|
|
|
@@ -49,37 +32,35 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
49
32
|
const { userId, instruments, instrumentId, userType } = task;
|
|
50
33
|
|
|
51
34
|
const instrumentsToProcess = userType === 'speculator' ? (instruments || [instrumentId]) : [undefined];
|
|
52
|
-
const today
|
|
53
|
-
const portfolioBlockId
|
|
54
|
-
let isPrivate
|
|
55
|
-
|
|
56
|
-
let
|
|
57
|
-
let
|
|
58
|
-
let hasPortfolioErrors = false;
|
|
35
|
+
const today = new Date().toISOString().slice(0, 10);
|
|
36
|
+
const portfolioBlockId = `${Math.floor(parseInt(userId) / 1000000)}M`;
|
|
37
|
+
let isPrivate = false;
|
|
38
|
+
let capturedHistory = null;
|
|
39
|
+
let capturedPortfolio = null;
|
|
40
|
+
let hasPortfolioErrors = false;
|
|
59
41
|
|
|
60
42
|
const scopeLog = userType === 'speculator' ? `Instruments: [${instrumentsToProcess.join(', ')}]` : 'Scope: Full Portfolio';
|
|
61
43
|
logger.log('TRACE', `[handleUpdate/${userId}] Starting update task. Type: ${userType}. ${scopeLog}`);
|
|
62
44
|
|
|
63
45
|
// --- 1. Process History Fetch (Sequentially) ---
|
|
64
|
-
let historyHeader
|
|
65
|
-
let wasHistorySuccess
|
|
46
|
+
let historyHeader = null;
|
|
47
|
+
let wasHistorySuccess = false;
|
|
66
48
|
let proxyUsedForHistory = false;
|
|
67
49
|
|
|
68
50
|
try {
|
|
69
51
|
if (!batchManager.checkAndSetHistoryFetched(userId)) {
|
|
70
52
|
logger.log('TRACE', `[handleUpdate/${userId}] Attempting history fetch.`);
|
|
71
53
|
historyHeader = await headerManager.selectHeader();
|
|
72
|
-
if (!historyHeader) {
|
|
73
|
-
logger.log('WARN', `[handleUpdate/${userId}] Could not select history header. Skipping history.`);
|
|
54
|
+
if (!historyHeader) { logger.log('WARN', `[handleUpdate/${userId}] Could not select history header. Skipping history.`);
|
|
74
55
|
} else {
|
|
75
56
|
|
|
76
57
|
const d = new Date();
|
|
77
58
|
d.setFullYear(d.getFullYear() - 1);
|
|
78
59
|
const oneYearAgoStr = d.toISOString();
|
|
79
|
-
const uuid
|
|
60
|
+
const uuid = crypto.randomUUID ? crypto.randomUUID() : '0205aca7-bd37-4884-8455-f28ce1add2de';
|
|
80
61
|
|
|
81
62
|
const historyUrl = `https://www.etoro.com/sapi/trade-data-real/history/public/credit/flat?StartTime=${oneYearAgoStr}&PageNumber=1&ItemsPerPage=30000&PublicHistoryPortfolioFilter=&CID=${userId}&client_request_id=${uuid}`;
|
|
82
|
-
const options
|
|
63
|
+
const options = { headers: historyHeader.header };
|
|
83
64
|
let response;
|
|
84
65
|
|
|
85
66
|
// --- PROXY ATTEMPT ---
|
|
@@ -88,7 +69,7 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
88
69
|
logger.log('TRACE', `[handleUpdate/${userId}] Attempting history fetch via AppScript proxy...`);
|
|
89
70
|
response = await proxyManager.fetch(historyUrl, options);
|
|
90
71
|
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`);
|
|
91
|
-
wasHistorySuccess
|
|
72
|
+
wasHistorySuccess = true;
|
|
92
73
|
proxyUsedForHistory = true;
|
|
93
74
|
recordProxyOutcome(true);
|
|
94
75
|
|
|
@@ -113,9 +94,7 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
113
94
|
|
|
114
95
|
if (wasHistorySuccess) {
|
|
115
96
|
const data = await response.json();
|
|
116
|
-
|
|
117
|
-
// --- FILTER LOGIC FOR GRANULAR API ---
|
|
118
|
-
// 0 = Manual, 1 = Stop Loss, 5 = Take Profit.
|
|
97
|
+
|
|
119
98
|
const VALID_REASONS = [0, 1, 5];
|
|
120
99
|
if (data.PublicHistoryPositions && Array.isArray(data.PublicHistoryPositions)) {
|
|
121
100
|
const originalCount = data.PublicHistoryPositions.length;
|
|
@@ -128,14 +107,9 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
128
107
|
await batchManager.addToTradingHistoryBatch(userId, portfolioBlockId, today, data, userType);
|
|
129
108
|
}
|
|
130
109
|
}
|
|
131
|
-
} else {
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
} catch (err) {
|
|
135
|
-
logger.log('ERROR', `[handleUpdate/${userId}] Unhandled error during history processing.`, { error: err.message }); wasHistorySuccess = false;
|
|
136
|
-
} finally {
|
|
137
|
-
if (historyHeader && proxyUsedForHistory) { headerManager.updatePerformance(historyHeader.id, wasHistorySuccess); }
|
|
138
|
-
}
|
|
110
|
+
} else { logger.log('TRACE', `[handleUpdate/${userId}] History fetch skipped (already fetched).`); }
|
|
111
|
+
} catch (err) { logger.log('ERROR', `[handleUpdate/${userId}] Unhandled error during history processing.`, { error: err.message }); wasHistorySuccess = false;
|
|
112
|
+
} finally { if (historyHeader && proxyUsedForHistory) { headerManager.updatePerformance(historyHeader.id, wasHistorySuccess); } }
|
|
139
113
|
|
|
140
114
|
// --- 2. Process Portfolio Fetches (Sequentially) ---
|
|
141
115
|
logger.log('TRACE', `[handleUpdate/${userId}] Starting ${instrumentsToProcess.length} sequential portfolio fetches.`);
|
|
@@ -154,7 +128,7 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
154
128
|
const portfolioUrl = userType === 'speculator' ? `${config.ETORO_API_POSITIONS_URL}?cid=${userId}&InstrumentID=${instId}` : `${config.ETORO_API_PORTFOLIO_URL}?cid=${userId}`;
|
|
155
129
|
const options = { headers: portfolioHeader.header };
|
|
156
130
|
let response;
|
|
157
|
-
let wasPortfolioSuccess
|
|
131
|
+
let wasPortfolioSuccess = false;
|
|
158
132
|
let proxyUsedForPortfolio = false;
|
|
159
133
|
|
|
160
134
|
if (shouldTryProxy()) {
|
|
@@ -162,7 +136,7 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
162
136
|
logger.log('TRACE', `[handleUpdate/${userId}] Attempting fetch for ${scopeName} via AppScript proxy...`);
|
|
163
137
|
response = await proxyManager.fetch(portfolioUrl, options);
|
|
164
138
|
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`);
|
|
165
|
-
wasPortfolioSuccess
|
|
139
|
+
wasPortfolioSuccess = true;
|
|
166
140
|
proxyUsedForPortfolio = true;
|
|
167
141
|
recordProxyOutcome(true);
|
|
168
142
|
|
|
@@ -190,13 +164,13 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
190
164
|
|
|
191
165
|
try {
|
|
192
166
|
const portfolioJson = JSON.parse(body);
|
|
193
|
-
capturedPortfolio
|
|
167
|
+
capturedPortfolio = portfolioJson;
|
|
194
168
|
await batchManager.addToPortfolioBatch(userId, portfolioBlockId, today, portfolioJson, userType, instId);
|
|
195
169
|
logger.log('TRACE', `[handleUpdate/${userId}] Portfolio for ${scopeName} processed successfully.`);
|
|
196
170
|
|
|
197
171
|
} catch (parseError) {
|
|
198
172
|
wasPortfolioSuccess = false;
|
|
199
|
-
hasPortfolioErrors
|
|
173
|
+
hasPortfolioErrors = true;
|
|
200
174
|
logger.log('ERROR', `[handleUpdate/${userId}] FAILED TO PARSE JSON RESPONSE for ${scopeName}.`, { url: portfolioUrl, parseErrorMessage: parseError.message });
|
|
201
175
|
}
|
|
202
176
|
} else {
|
|
@@ -210,8 +184,7 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
210
184
|
if (userType !== 'speculator' && capturedHistory && capturedPortfolio && pubsub && config.PUBSUB_TOPIC_TASK_ENGINE) {
|
|
211
185
|
try {
|
|
212
186
|
const speculatorAssets = detectSpeculatorTargets(capturedHistory, capturedPortfolio);
|
|
213
|
-
if (speculatorAssets.length > 0) {
|
|
214
|
-
logger.log('INFO', `[handleUpdate/${userId}] DETECTED SPECULATOR BEHAVIOR. Queuing ${speculatorAssets.length} targeted updates.`);
|
|
187
|
+
if (speculatorAssets.length > 0) { logger.log('INFO', `[handleUpdate/${userId}] DETECTED SPECULATOR BEHAVIOR. Queuing ${speculatorAssets.length} targeted updates.`);
|
|
215
188
|
|
|
216
189
|
const newTasks = speculatorAssets.map(assetId => ({
|
|
217
190
|
type: 'update',
|
|
@@ -223,16 +196,12 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
223
196
|
const dataBuffer = Buffer.from(JSON.stringify({ tasks: newTasks }));
|
|
224
197
|
await pubsub.topic(config.PUBSUB_TOPIC_TASK_ENGINE).publishMessage({ data: dataBuffer });
|
|
225
198
|
}
|
|
226
|
-
} catch (detectionError) {
|
|
227
|
-
logger.log('ERROR', `[handleUpdate/${userId}] Error during Speculator Detection.`, { error: detectionError.message });
|
|
228
|
-
}
|
|
199
|
+
} catch (detectionError) { logger.log('ERROR', `[handleUpdate/${userId}] Error during Speculator Detection.`, { error: detectionError.message }); }
|
|
229
200
|
}
|
|
230
201
|
|
|
231
202
|
if (isPrivate) {
|
|
232
203
|
logger.log('WARN', `[handleUpdate/${userId}] Removing private user from updates.`);
|
|
233
|
-
for (const instrumentId of instrumentsToProcess) {
|
|
234
|
-
await batchManager.deleteFromTimestampBatch(userId, userType, instrumentId);
|
|
235
|
-
}
|
|
204
|
+
for (const instrumentId of instrumentsToProcess) { await batchManager.deleteFromTimestampBatch(userId, userType, instrumentId); }
|
|
236
205
|
const blockCountsRef = db.doc(config.FIRESTORE_DOC_SPECULATOR_BLOCK_COUNTS);
|
|
237
206
|
for (const instrumentId of instrumentsToProcess) {
|
|
238
207
|
const incrementField = `counts.${instrumentId}_${Math.floor(userId/1e6)*1e6}`;
|
|
@@ -241,17 +210,12 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
241
210
|
return;
|
|
242
211
|
}
|
|
243
212
|
|
|
244
|
-
for (const instrumentId of instrumentsToProcess) {
|
|
245
|
-
await batchManager.updateUserTimestamp(userId, userType, instrumentId);
|
|
246
|
-
}
|
|
213
|
+
for (const instrumentId of instrumentsToProcess) { await batchManager.updateUserTimestamp(userId, userType, instrumentId); }
|
|
247
214
|
|
|
248
215
|
if (userType === 'speculator') { await batchManager.addSpeculatorTimestampFix(userId, String(Math.floor(userId/1e6)*1e6)); }
|
|
249
|
-
|
|
250
|
-
if (hasPortfolioErrors) {
|
|
251
|
-
|
|
252
|
-
} else {
|
|
253
|
-
logger.log('TRACE', `[handleUpdate/${userId}] Update task finished successfully.`);
|
|
254
|
-
}
|
|
216
|
+
|
|
217
|
+
if (hasPortfolioErrors) { logger.log('WARN', `[handleUpdate/${userId}] Update task finished with ERRORS. See logs above.`);
|
|
218
|
+
} else { logger.log('TRACE', `[handleUpdate/${userId}] Update task finished successfully.`); }
|
|
255
219
|
}
|
|
256
220
|
|
|
257
221
|
module.exports = { handleUpdate };
|