bulltrackers-module 1.0.153 → 1.0.155
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -3,9 +3,13 @@
|
|
|
3
3
|
* It selects an available (unlocked) proxy for each request and locks it upon failure.
|
|
4
4
|
* * This module is designed to be reusable and receives all dependencies
|
|
5
5
|
* (firestore, logger) and configuration via its constructor.
|
|
6
|
+
* --- MODIFIED: Now includes exponential backoff and retries specifically for rate-limit errors. ---
|
|
6
7
|
*/
|
|
7
8
|
const { FieldValue } = require('@google-cloud/firestore');
|
|
8
9
|
|
|
10
|
+
// --- NEW: Added sleep utility ---
|
|
11
|
+
const sleep = (ms) => new Promise(resolve => setTimeout(resolve, ms));
|
|
12
|
+
|
|
9
13
|
class IntelligentProxyManager {
|
|
10
14
|
/**
|
|
11
15
|
* @param {object} firestore - An initialized Firestore instance.
|
|
@@ -29,6 +33,11 @@ class IntelligentProxyManager {
|
|
|
29
33
|
this.proxyLockingEnabled = config.proxyLockingEnabled !== false;
|
|
30
34
|
this.proxies = {};
|
|
31
35
|
this.configLastLoaded = 0;
|
|
36
|
+
|
|
37
|
+
// --- NEW: Retry configuration ---
|
|
38
|
+
this.MAX_RETRIES = 3;
|
|
39
|
+
this.INITIAL_BACKOFF_MS = 1000;
|
|
40
|
+
|
|
32
41
|
if (this.proxyUrls.length === 0) { this.logger.log('WARN', '[ProxyManager] No proxy URLs provided in config.');
|
|
33
42
|
} else { const lockingStatus = this.proxyLockingEnabled ? "Locking Mechanism Enabled" : "Locking Mechanism DISABLED"; this.logger.log('INFO', `[ProxyManager] Initialized with ${this.proxyUrls.length} proxies and ${lockingStatus}.`); }
|
|
34
43
|
}
|
|
@@ -81,43 +90,105 @@ class IntelligentProxyManager {
|
|
|
81
90
|
}
|
|
82
91
|
|
|
83
92
|
/**
|
|
84
|
-
* Makes a fetch request
|
|
93
|
+
* --- MODIFIED: Makes a fetch request with exponential backoff for rate limits ---
|
|
85
94
|
* @param {string} targetUrl - The URL to fetch.
|
|
86
95
|
* @param {object} options - Fetch options (e.g., headers).
|
|
87
96
|
* @returns {Promise<object>} A mock Response object.
|
|
88
97
|
*/
|
|
89
98
|
async fetch(targetUrl, options = {}) {
|
|
90
99
|
let proxy = null;
|
|
91
|
-
try {
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
100
|
+
try {
|
|
101
|
+
proxy = await this._selectProxy();
|
|
102
|
+
} catch (error) {
|
|
103
|
+
// This happens if *all* proxies are locked.
|
|
104
|
+
return { ok: false, status: 503, error: { message: error.message }, headers: new Headers() };
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
let backoff = this.INITIAL_BACKOFF_MS;
|
|
108
|
+
let lastResponse = null;
|
|
109
|
+
|
|
110
|
+
for (let attempt = 1; attempt <= this.MAX_RETRIES; attempt++) {
|
|
111
|
+
const response = await this._fetchViaAppsScript(proxy.url, targetUrl, options);
|
|
112
|
+
lastResponse = response; // Always store the last response
|
|
113
|
+
|
|
114
|
+
// 1. Success
|
|
115
|
+
if (response.ok) {
|
|
116
|
+
return response;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// 2. Rate Limit Error (Retryable)
|
|
120
|
+
if (response.isRateLimitError) {
|
|
121
|
+
this.logger.log('WARN', `[ProxyManager] Rate limit hit on proxy ${proxy.owner} (Attempt ${attempt}/${this.MAX_RETRIES}). Backing off for ${backoff}ms...`, { url: targetUrl });
|
|
122
|
+
await sleep(backoff);
|
|
123
|
+
backoff *= 2; // Exponential backoff
|
|
124
|
+
// Continue to the next attempt
|
|
125
|
+
continue;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// 3. Other Fetch Error (Non-Retryable, Lock Proxy)
|
|
129
|
+
if (response.isUrlFetchError) {
|
|
130
|
+
this.logger.log('ERROR', `[ProxyManager] Proxy ${proxy.owner} failed (non-rate-limit). Locking proxy.`, { url: targetUrl, status: response.status });
|
|
131
|
+
await this.lockProxy(proxy.owner);
|
|
132
|
+
return response; // Fail fast and return
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// 4. Standard Error (e.g., 404, 500 from *target* URL, not proxy)
|
|
136
|
+
// This was a "successful" proxy fetch of a failing URL. Not retryable.
|
|
137
|
+
return response;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// If loop finishes, all retries failed (likely all were rate-limit errors)
|
|
141
|
+
this.logger.log('ERROR', `[ProxyManager] Request failed after ${this.MAX_RETRIES} rate-limit retries.`, { url: targetUrl });
|
|
142
|
+
return lastResponse;
|
|
96
143
|
}
|
|
97
144
|
|
|
145
|
+
|
|
98
146
|
/**
|
|
99
147
|
* Internal function to call the Google AppScript proxy.
|
|
148
|
+
* --- MODIFIED: Now adds `isRateLimitError` flag to response ---
|
|
100
149
|
* @private
|
|
101
150
|
*/
|
|
102
151
|
async _fetchViaAppsScript(proxyUrl, targetUrl, options) {
|
|
103
152
|
const payload = { url: targetUrl, ...options };
|
|
104
153
|
try {
|
|
105
154
|
const response = await fetch(proxyUrl, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(payload) });
|
|
155
|
+
|
|
156
|
+
// This is an error with the *proxy function itself* (e.g., 500, 429)
|
|
106
157
|
if (!response.ok) {
|
|
107
158
|
const errorText = await response.text();
|
|
108
159
|
this.logger.log('WARN', `[ProxyManager] Proxy infrastructure itself failed.`, { status: response.status, proxy: proxyUrl, error: errorText });
|
|
109
|
-
|
|
160
|
+
const isRateLimit = response.status === 429;
|
|
161
|
+
return { ok: false, status: response.status, isUrlFetchError: true, isRateLimitError: isRateLimit, error: { message: `Proxy infrastructure failed with status ${response.status}` }, headers: response.headers, text: () => Promise.resolve(errorText) };
|
|
162
|
+
}
|
|
163
|
+
|
|
110
164
|
const proxyResponse = await response.json();
|
|
165
|
+
|
|
166
|
+
// This is an error *returned by the proxy* (e.g., UrlFetchApp failed)
|
|
111
167
|
if (proxyResponse.error) {
|
|
112
168
|
const errorMsg = proxyResponse.error.message || '';
|
|
169
|
+
// --- NEW: Check for AppScript's rate limit error text ---
|
|
113
170
|
if (errorMsg.toLowerCase().includes('service invoked too many times')) {
|
|
114
171
|
this.logger.log('WARN', `[ProxyManager] Proxy quota error: ${proxyUrl}`, { error: proxyResponse.error });
|
|
115
|
-
return { ok: false, status: 500, error: proxyResponse.error, isUrlFetchError: true, headers: new Headers() };
|
|
116
|
-
|
|
117
|
-
|
|
172
|
+
return { ok: false, status: 500, error: proxyResponse.error, isUrlFetchError: true, isRateLimitError: true, headers: new Headers() }; // <-- Set flag
|
|
173
|
+
}
|
|
174
|
+
// Other UrlFetchApp error
|
|
175
|
+
return { ok: false, status: 500, error: proxyResponse.error, isUrlFetchError: true, isRateLimitError: false, headers: new Headers(), text: () => Promise.resolve(errorMsg) };
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
// Success. The proxy fetched the target URL.
|
|
179
|
+
return {
|
|
180
|
+
ok: proxyResponse.statusCode >= 200 && proxyResponse.statusCode < 300,
|
|
181
|
+
status: proxyResponse.statusCode,
|
|
182
|
+
headers: new Headers(proxyResponse.headers || {}),
|
|
183
|
+
json: () => Promise.resolve(JSON.parse(proxyResponse.body)),
|
|
184
|
+
text: () => Promise.resolve(proxyResponse.body),
|
|
185
|
+
isUrlFetchError: false,
|
|
186
|
+
isRateLimitError: false
|
|
187
|
+
};
|
|
118
188
|
} catch (networkError) {
|
|
119
189
|
this.logger.log('ERROR', `[ProxyManager] Network error calling proxy: ${proxyUrl}`, { errorMessage: networkError.message });
|
|
120
|
-
return { ok: false, status: 0, isUrlFetchError: true, error: { message: `Network error: ${networkError.message}` }, headers: new Headers() };
|
|
190
|
+
return { ok: false, status: 0, isUrlFetchError: true, isRateLimitError: false, error: { message: `Network error: ${networkError.message}` }, headers: new Headers() };
|
|
191
|
+
}
|
|
121
192
|
}
|
|
122
193
|
}
|
|
123
194
|
|
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
/*
|
|
2
2
|
* FILENAME: CloudFunctions/NpmWrappers/bulltrackers-module/functions/task-engine/helpers/update_helpers.js
|
|
3
3
|
* (MODIFIED: To conditionally fetch history API once per user per batch)
|
|
4
|
+
* (MODIFIED: `lookupUsernames` runs batches in parallel)
|
|
5
|
+
* (MODIFIED: `handleUpdate` fetches history and all portfolios in parallel)
|
|
6
|
+
* (MODIFIED: `handleUpdate` now uses batchManager for history cache)
|
|
4
7
|
*/
|
|
5
8
|
|
|
6
9
|
/**
|
|
@@ -12,70 +15,180 @@
|
|
|
12
15
|
* --- MODIFIED: Conditionally fetches history only once per user per batch. ---
|
|
13
16
|
*/
|
|
14
17
|
const { FieldValue } = require('@google-cloud/firestore');
|
|
18
|
+
const pLimit = require('p-limit');
|
|
15
19
|
|
|
16
|
-
|
|
20
|
+
/**
|
|
21
|
+
* (MODIFIED: Runs lookup batches in parallel)
|
|
22
|
+
*/
|
|
23
|
+
async function lookupUsernames(cids, { logger, headerManager, proxyManager }, config) {
|
|
17
24
|
if (!cids?.length) return [];
|
|
18
25
|
logger.log('INFO', `[lookupUsernames] Looking up usernames for ${cids.length} CIDs.`);
|
|
19
|
-
|
|
26
|
+
|
|
27
|
+
// Use a new config value, falling back to 5
|
|
28
|
+
const limit = pLimit(config.USERNAME_LOOKUP_CONCURRENCY || 5);
|
|
29
|
+
const { USERNAME_LOOKUP_BATCH_SIZE, ETORO_API_RANKINGS_URL } = config;
|
|
30
|
+
|
|
31
|
+
const batches = [];
|
|
20
32
|
for (let i = 0; i < cids.length; i += USERNAME_LOOKUP_BATCH_SIZE) {
|
|
21
|
-
|
|
33
|
+
batches.push(cids.slice(i, i + USERNAME_LOOKUP_BATCH_SIZE).map(Number));
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const batchPromises = batches.map(batch => limit(async () => {
|
|
22
37
|
const header = await headerManager.selectHeader();
|
|
23
|
-
if (!header) {
|
|
38
|
+
if (!header) {
|
|
39
|
+
logger.log('ERROR', '[lookupUsernames] Could not select a header.');
|
|
40
|
+
return null; // Return null to skip this batch
|
|
41
|
+
}
|
|
42
|
+
|
|
24
43
|
let success = false;
|
|
25
44
|
try {
|
|
26
45
|
const res = await proxyManager.fetch(`${ETORO_API_RANKINGS_URL}?Period=LastTwoYears`, { method: 'POST', headers: { ...header.header, 'Content-Type': 'application/json' }, body: JSON.stringify(batch) });
|
|
27
46
|
if (!res.ok) throw new Error(`API status ${res.status}`);
|
|
28
47
|
const data = await res.json();
|
|
29
|
-
|
|
30
|
-
|
|
48
|
+
success = true;
|
|
49
|
+
logger.log('DEBUG', 'Looked up usernames', { batch: batch.slice(0, 5) }); // Log only a few
|
|
50
|
+
return data; // Return data on success
|
|
31
51
|
} catch (err) {
|
|
32
52
|
logger.log('WARN', `[lookupUsernames] Failed batch`, { error: err.message });
|
|
33
|
-
|
|
34
|
-
|
|
53
|
+
return null; // Return null on failure
|
|
54
|
+
} finally {
|
|
55
|
+
headerManager.updatePerformance(header.id, success);
|
|
56
|
+
}
|
|
57
|
+
}));
|
|
58
|
+
|
|
59
|
+
const results = await Promise.allSettled(batchPromises);
|
|
60
|
+
|
|
61
|
+
const allUsers = results
|
|
62
|
+
.filter(r => r.status === 'fulfilled' && r.value && Array.isArray(r.value))
|
|
63
|
+
.flatMap(r => r.value); // Flatten all successful batch results
|
|
64
|
+
|
|
35
65
|
logger.log('INFO', `[lookupUsernames] Found ${allUsers.length} public users out of ${cids.length}.`);
|
|
36
66
|
return allUsers;
|
|
37
67
|
}
|
|
38
68
|
|
|
39
|
-
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* (MODIFIED: Fetches history and all portfolios in parallel)
|
|
72
|
+
* (MODIFIED: Uses batchManager for history cache)
|
|
73
|
+
*/
|
|
74
|
+
async function handleUpdate(task, taskId, { logger, headerManager, proxyManager, db, batchManager }, config, username) { // <--- REMOVED historyFetchedForUser
|
|
40
75
|
const { userId, instruments, instrumentId, userType } = task;
|
|
41
76
|
const instrumentsToProcess = userType === 'speculator' ? (instruments || [instrumentId]) : [undefined];
|
|
42
77
|
const today = new Date().toISOString().slice(0, 10);
|
|
43
78
|
const portfolioBlockId = `${Math.floor(parseInt(userId) / 1000000)}M`;
|
|
44
|
-
|
|
79
|
+
|
|
45
80
|
let historyHeader = null;
|
|
46
|
-
|
|
47
|
-
let
|
|
48
|
-
let
|
|
81
|
+
let wasHistorySuccess = false;
|
|
82
|
+
let historyFetchPromise = null;
|
|
83
|
+
let isPrivate = false;
|
|
84
|
+
|
|
49
85
|
try {
|
|
50
|
-
|
|
51
|
-
|
|
86
|
+
// --- 1. Prepare History Fetch (if needed) ---
|
|
87
|
+
// (MODIFIED: Use batchManager's cross-invocation cache)
|
|
88
|
+
if (!batchManager.checkAndSetHistoryFetched(userId)) {
|
|
89
|
+
// This user has NOT been fetched in the last 10 mins (by this instance)
|
|
52
90
|
historyHeader = await headerManager.selectHeader();
|
|
53
|
-
if (historyHeader) {
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
91
|
+
if (historyHeader) {
|
|
92
|
+
// No need to add to a local set, batchManager did it.
|
|
93
|
+
const historyUrl = `${config.ETORO_API_USERSTATS_URL}${username}/trades/oneYearAgo?CopyAsAsset=true`;
|
|
94
|
+
historyFetchPromise = proxyManager.fetch(historyUrl, { headers: historyHeader.header });
|
|
95
|
+
} else {
|
|
96
|
+
logger.log('WARN', `[handleUpdate] Could not select history header for ${userId}. History will be skipped for this task.`);
|
|
97
|
+
}
|
|
98
|
+
} else {
|
|
99
|
+
logger.log('TRACE', `[handleUpdate] History fetch for ${userId} skipped (already fetched by this instance).`);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// --- 2. Prepare All Portfolio Fetches ---
|
|
103
|
+
const portfolioRequests = [];
|
|
104
|
+
for (const instId of instrumentsToProcess) {
|
|
105
|
+
const portfolioHeader = await headerManager.selectHeader();
|
|
106
|
+
if (!portfolioHeader) throw new Error(`Could not select portfolio header for ${userId}`);
|
|
107
|
+
|
|
108
|
+
const portfolioUrl = userType === 'speculator'
|
|
109
|
+
? `${config.ETORO_API_POSITIONS_URL}?cid=${userId}&InstrumentID=${instId}`
|
|
110
|
+
: `${config.ETORO_API_PORTFOLIO_URL}?cid=${userId}`;
|
|
111
|
+
|
|
112
|
+
portfolioRequests.push({
|
|
113
|
+
instrumentId: instId,
|
|
114
|
+
url: portfolioUrl,
|
|
115
|
+
header: portfolioHeader,
|
|
116
|
+
promise: proxyManager.fetch(portfolioUrl, { headers: portfolioHeader.header })
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// --- 3. Execute All API Calls in Parallel ---
|
|
121
|
+
const allPromises = [
|
|
122
|
+
...(historyFetchPromise ? [historyFetchPromise] : []),
|
|
123
|
+
...portfolioRequests.map(r => r.promise)
|
|
124
|
+
];
|
|
125
|
+
const allResults = await Promise.allSettled(allPromises);
|
|
126
|
+
|
|
127
|
+
// --- 4. Process History Result ---
|
|
128
|
+
let resultIndex = 0;
|
|
129
|
+
if (historyFetchPromise) {
|
|
130
|
+
const historyRes = allResults[resultIndex++];
|
|
131
|
+
if (historyRes.status === 'fulfilled' && historyRes.value.ok) {
|
|
132
|
+
const data = await historyRes.value.json();
|
|
133
|
+
wasHistorySuccess = true;
|
|
134
|
+
await batchManager.addToTradingHistoryBatch(userId, portfolioBlockId, today, data, userType);
|
|
135
|
+
} else {
|
|
136
|
+
logger.log('WARN', `[handleUpdate] History fetch failed for ${userId}`, { error: historyRes.reason || `status ${historyRes.value?.status}` });
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// --- 5. Process Portfolio Results ---
|
|
141
|
+
for (let i = 0; i < portfolioRequests.length; i++) {
|
|
142
|
+
const requestInfo = portfolioRequests[i];
|
|
143
|
+
const portfolioRes = allResults[resultIndex++];
|
|
60
144
|
let wasPortfolioSuccess = false;
|
|
61
|
-
|
|
62
|
-
if (portfolioRes.ok) {
|
|
63
|
-
const body = await portfolioRes.text();
|
|
64
|
-
if (body.includes("user is PRIVATE")) {
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
145
|
+
|
|
146
|
+
if (portfolioRes.status === 'fulfilled' && portfolioRes.value.ok) {
|
|
147
|
+
const body = await portfolioRes.value.text();
|
|
148
|
+
if (body.includes("user is PRIVATE")) {
|
|
149
|
+
isPrivate = true;
|
|
150
|
+
logger.log('WARN', `User ${userId} is private. Removing from updates.`);
|
|
151
|
+
break; // Stop processing more portfolios for this private user
|
|
152
|
+
} else {
|
|
153
|
+
wasPortfolioSuccess = true;
|
|
154
|
+
await batchManager.addToPortfolioBatch(userId, portfolioBlockId, today, JSON.parse(body), userType, requestInfo.instrumentId);
|
|
155
|
+
}
|
|
156
|
+
logger.log('DEBUG', 'Processing portfolio for user', { userId, portfolioUrl: requestInfo.url });
|
|
157
|
+
} else {
|
|
158
|
+
logger.log('WARN', `Failed to fetch portfolio`, { userId, url: requestInfo.url, error: portfolioRes.reason || `status ${portfolioRes.value?.status}` });
|
|
159
|
+
}
|
|
160
|
+
// Update performance for this specific header
|
|
161
|
+
headerManager.updatePerformance(requestInfo.header.id, wasPortfolioSuccess);
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
// --- 6. Handle Private Users & Timestamps ---
|
|
70
165
|
if (isPrivate) {
|
|
71
166
|
logger.log('WARN', `User ${userId} is private. Removing from updates.`);
|
|
72
|
-
for (const instrumentId of instrumentsToProcess) {
|
|
167
|
+
for (const instrumentId of instrumentsToProcess) {
|
|
168
|
+
await batchManager.deleteFromTimestampBatch(userId, userType, instrumentId);
|
|
169
|
+
}
|
|
73
170
|
const blockCountsRef = db.doc(config.FIRESTORE_DOC_SPECULATOR_BLOCK_COUNTS);
|
|
74
|
-
for (const instrumentId of instrumentsToProcess) {
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
171
|
+
for (const instrumentId of instrumentsToProcess) {
|
|
172
|
+
const incrementField = `counts.${instrumentId}_${Math.floor(userId/1e6)*1e6}`;
|
|
173
|
+
// This is not batched, but it's a rare event.
|
|
174
|
+
await blockCountsRef.set({ [incrementField]: FieldValue.increment(-1) }, { merge: true });
|
|
175
|
+
}
|
|
176
|
+
return; // Don't update timestamps
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
// If not private, update all timestamps
|
|
180
|
+
for (const instrumentId of instrumentsToProcess) {
|
|
181
|
+
await batchManager.updateUserTimestamp(userId, userType, instrumentId);
|
|
182
|
+
}
|
|
183
|
+
if (userType === 'speculator') {
|
|
184
|
+
await batchManager.addSpeculatorTimestampFix(userId, String(Math.floor(userId/1e6)*1e6));
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
} finally {
|
|
188
|
+
if (historyHeader) { // historyHeader is only set if a fetch was attempted
|
|
189
|
+
headerManager.updatePerformance(historyHeader.id, wasHistorySuccess);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
79
192
|
}
|
|
80
193
|
|
|
81
194
|
module.exports = { handleUpdate, lookupUsernames };
|
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
* REFACTORED: Renamed 'firestore' to 'db' for consistency.
|
|
4
4
|
* OPTIMIZED: Added logic to handle speculator timestamp fixes within the batch.
|
|
5
5
|
* --- MODIFIED: Added username map caching and trading history batching. ---
|
|
6
|
+
* --- MODIFIED: Added cross-invocation cache for history fetches. ---
|
|
6
7
|
*/
|
|
7
8
|
const { FieldValue } = require('@google-cloud/firestore');
|
|
8
9
|
|
|
@@ -16,9 +17,18 @@ class FirestoreBatchManager {
|
|
|
16
17
|
this.timestampBatch = {};
|
|
17
18
|
this.tradingHistoryBatch = {};
|
|
18
19
|
this.speculatorTimestampFixBatch = {};
|
|
20
|
+
|
|
21
|
+
// Username map cache
|
|
19
22
|
this.usernameMap = new Map();
|
|
20
23
|
this.usernameMapUpdates = {};
|
|
21
24
|
this.usernameMapLastLoaded = 0;
|
|
25
|
+
|
|
26
|
+
// History fetch cache (NEW)
|
|
27
|
+
this.historyFetchedUserIds = new Set();
|
|
28
|
+
this.historyCacheTimestamp = Date.now();
|
|
29
|
+
// Set a 10-minute TTL on this cache (600,000 ms)
|
|
30
|
+
this.HISTORY_CACHE_TTL_MS = config.HISTORY_CACHE_TTL_MS || 600000;
|
|
31
|
+
|
|
22
32
|
this.processedSpeculatorCids = new Set();
|
|
23
33
|
this.usernameMapCollectionName = config.FIRESTORE_COLLECTION_USERNAME_MAP;
|
|
24
34
|
this.normalHistoryCollectionName = config.FIRESTORE_COLLECTION_NORMAL_HISTORY;
|
|
@@ -27,9 +37,47 @@ class FirestoreBatchManager {
|
|
|
27
37
|
logger.log('INFO', 'FirestoreBatchManager initialized.');
|
|
28
38
|
}
|
|
29
39
|
|
|
40
|
+
/*
|
|
41
|
+
* NEW: Checks if a user's history has been fetched in the last 10 minutes.
|
|
42
|
+
* If not, it logs them as fetched and returns false (to trigger a fetch).
|
|
43
|
+
* @param {string} userId
|
|
44
|
+
* @returns {boolean} True if already fetched, false if not.
|
|
45
|
+
*/
|
|
46
|
+
checkAndSetHistoryFetched(userId) {
|
|
47
|
+
// Check if the cache is stale
|
|
48
|
+
if (Date.now() - this.historyCacheTimestamp > this.HISTORY_CACHE_TTL_MS) {
|
|
49
|
+
this.logger.log('INFO', '[BATCH] History fetch cache (10m TTL) expired. Clearing set.');
|
|
50
|
+
this.historyFetchedUserIds.clear();
|
|
51
|
+
this.historyCacheTimestamp = Date.now();
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (this.historyFetchedUserIds.has(userId)) {
|
|
55
|
+
return true; // Yes, already fetched
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Not fetched yet. Mark as fetched and return false.
|
|
59
|
+
this.historyFetchedUserIds.add(userId);
|
|
60
|
+
return false;
|
|
61
|
+
}
|
|
62
|
+
|
|
30
63
|
_getUsernameShardId(cid) { return `cid_map_shard_${Math.floor(parseInt(cid) / 10000) % 10}`; }
|
|
31
64
|
|
|
32
|
-
_scheduleFlush() { if (!this.batchTimeout) this.batchTimeout = setTimeout(() => this.flushBatches(), this.config.TASK_ENGINE_FLUSH_INTERVAL_MS); }
|
|
65
|
+
// _scheduleFlush() { if (!this.batchTimeout) this.batchTimeout = setTimeout(() => this.flushBatches(), this.config.TASK_ENGINE_FLUSH_INTERVAL_MS); } Old version
|
|
66
|
+
|
|
67
|
+
_scheduleFlush() {
|
|
68
|
+
const totalOps = this._estimateBatchSize();
|
|
69
|
+
if (totalOps >= 400) { this.flushBatches(); return; }
|
|
70
|
+
if (!this.batchTimeout) { this.batchTimeout = setTimeout(() => this.flushBatches(), this.config.TASK_ENGINE_FLUSH_INTERVAL_MS); }
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
_estimateBatchSize() {
|
|
74
|
+
let ops = 0;
|
|
75
|
+
ops += Object.keys(this.portfolioBatch).length;
|
|
76
|
+
ops += Object.keys(this.tradingHistoryBatch).length;
|
|
77
|
+
ops += Object.keys(this.timestampBatch).length;
|
|
78
|
+
ops += Object.keys(this.speculatorTimestampFixBatch).length;
|
|
79
|
+
return ops;
|
|
80
|
+
}
|
|
33
81
|
|
|
34
82
|
async loadUsernameMap() {
|
|
35
83
|
if (Date.now() - this.usernameMapLastLoaded < 3600000) return;
|
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
/*
|
|
2
2
|
* FILENAME: CloudFunctions/NpmWrappers/bulltrackers-module/functions/task-engine/utils/task_engine_utils.js
|
|
3
3
|
* (MODIFIED: To pass down a Set to track history fetches)
|
|
4
|
+
* (MODIFIED: To run all update tasks in parallel with a concurrency limit)
|
|
5
|
+
* (MODIFIED: To use a SINGLE parallel work pool for ALL tasks)
|
|
6
|
+
* (MODIFIED: To remove local history cache set)
|
|
4
7
|
*/
|
|
5
8
|
|
|
6
9
|
/**
|
|
@@ -12,6 +15,7 @@
|
|
|
12
15
|
const { handleDiscover } = require('../helpers/discover_helpers');
|
|
13
16
|
const { handleVerify } = require('../helpers/verify_helpers');
|
|
14
17
|
const { handleUpdate, lookupUsernames } = require('../helpers/update_helpers');
|
|
18
|
+
const pLimit = require('p-limit'); // <--- IMPORT p-limit
|
|
15
19
|
|
|
16
20
|
/**
|
|
17
21
|
* Parses Pub/Sub message into task array.
|
|
@@ -43,28 +47,72 @@ async function prepareTaskBatches(tasks, batchManager, logger) {
|
|
|
43
47
|
async function runUsernameLookups(tasksToRun, cidsToLookup, dependencies, config, batchManager, logger) {
|
|
44
48
|
if (!cidsToLookup.size) return;
|
|
45
49
|
logger.log('INFO', `[TaskEngine] Looking up ${cidsToLookup.size} usernames...`);
|
|
46
|
-
|
|
50
|
+
// Pass config to lookupUsernames
|
|
51
|
+
const foundUsers = await lookupUsernames([...cidsToLookup.keys()], dependencies, config); // <--- PASS FULL CONFIG
|
|
47
52
|
for (const u of foundUsers) { const cid = String(u.CID), username = u.Value.UserName; batchManager.addUsernameMapUpdate(cid, username); const task = cidsToLookup.get(cid); if (task) { tasksToRun.push({ task, username }); cidsToLookup.delete(cid); } }
|
|
48
53
|
if (cidsToLookup.size) logger.log('WARN', `[TaskEngine] Could not find ${cidsToLookup.size} usernames (likely private).`, { skippedCids: [...cidsToLookup.keys()] });
|
|
49
54
|
}
|
|
50
55
|
|
|
51
56
|
/**
|
|
52
57
|
* Executes all tasks.
|
|
58
|
+
* (MODIFIED: Runs ALL tasks in a single parallel pool)
|
|
53
59
|
*/
|
|
54
60
|
async function executeTasks(tasksToRun, otherTasks, dependencies, config, taskId) {
|
|
55
|
-
const { logger } = dependencies;
|
|
56
|
-
|
|
61
|
+
const { logger, batchManager } = dependencies; // <--- Get batchManager
|
|
62
|
+
|
|
63
|
+
// REMOVED: const historyFetchedForUser = new Set();
|
|
64
|
+
|
|
65
|
+
// Create one unified parallel pool
|
|
66
|
+
const limit = pLimit(config.TASK_ENGINE_CONCURRENCY || 10);
|
|
67
|
+
const allTaskPromises = [];
|
|
68
|
+
let taskCounters = { update: 0, discover: 0, verify: 0, unknown: 0, failed: 0 };
|
|
69
|
+
|
|
70
|
+
// 1. Queue 'other' tasks (discover, verify)
|
|
57
71
|
for (const task of otherTasks) {
|
|
58
72
|
const subTaskId = `${task.type}-${task.userType || 'unknown'}-${task.userId || task.cids?.[0] || 'sub'}`;
|
|
59
73
|
const handler = { discover: handleDiscover, verify: handleVerify }[task.type];
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
74
|
+
|
|
75
|
+
if (handler) {
|
|
76
|
+
allTaskPromises.push(
|
|
77
|
+
limit(() =>
|
|
78
|
+
handler(task, subTaskId, dependencies, config)
|
|
79
|
+
.then(() => taskCounters[task.type]++)
|
|
80
|
+
.catch(err => {
|
|
81
|
+
logger.log('ERROR', `[TaskEngine/${taskId}] Error in ${task.type} for ${subTaskId}`, { errorMessage: err.message });
|
|
82
|
+
taskCounters.failed++;
|
|
83
|
+
})
|
|
84
|
+
)
|
|
85
|
+
);
|
|
86
|
+
} else {
|
|
87
|
+
logger.log('ERROR', `[TaskEngine/${taskId}] Unknown task type: ${task.type}`);
|
|
88
|
+
taskCounters.unknown++;
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// 2. Queue 'update' tasks
|
|
63
93
|
for (const { task, username } of tasksToRun) {
|
|
64
94
|
const subTaskId = `${task.type}-${task.userType || 'unknown'}-${task.userId}`;
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
95
|
+
allTaskPromises.push(
|
|
96
|
+
limit(() =>
|
|
97
|
+
// Pass batchManager instead of the local set
|
|
98
|
+
handleUpdate(task, subTaskId, dependencies, config, username) // <--- REMOVED historyFetchedForUser
|
|
99
|
+
.then(() => taskCounters.update++)
|
|
100
|
+
.catch(err => {
|
|
101
|
+
logger.log('ERROR', `[TaskEngine/${taskId}] Error in handleUpdate for ${task.userId}`, { errorMessage: err.message });
|
|
102
|
+
taskCounters.failed++;
|
|
103
|
+
})
|
|
104
|
+
)
|
|
105
|
+
);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// 3. Wait for ALL tasks to complete
|
|
109
|
+
await Promise.all(allTaskPromises);
|
|
110
|
+
|
|
111
|
+
// 4. Log final summary
|
|
112
|
+
logger.log(
|
|
113
|
+
taskCounters.failed > 0 ? 'WARN' : 'SUCCESS',
|
|
114
|
+
`[TaskEngine/${taskId}] Processed all tasks. Updates: ${taskCounters.update}, Discovers: ${taskCounters.discover}, Verifies: ${taskCounters.verify}, Unknown: ${taskCounters.unknown}, Failed: ${taskCounters.failed}.`
|
|
115
|
+
);
|
|
68
116
|
}
|
|
69
117
|
|
|
70
118
|
module.exports = { parseTaskPayload, prepareTaskBatches, runUsernameLookups, executeTasks };
|