bulltrackers-module 1.0.543 → 1.0.545
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/executors/StandardExecutor.js +21 -22
- package/functions/task-engine/helpers/data_storage_helpers.js +16 -16
- package/functions/task-engine/helpers/popular_investor_helpers.js +50 -16
- package/functions/task-engine/helpers/social_helpers.js +52 -6
- package/package.json +1 -1
|
@@ -18,7 +18,7 @@ class StandardExecutor {
|
|
|
18
18
|
const dStr = date.toISOString().slice(0, 10);
|
|
19
19
|
const logger = deps.logger;
|
|
20
20
|
|
|
21
|
-
//
|
|
21
|
+
// Determine required user types for this batch of calculations
|
|
22
22
|
const requiredUserTypes = new Set();
|
|
23
23
|
calcs.forEach(c => {
|
|
24
24
|
const type = (c.userType || 'ALL').toUpperCase();
|
|
@@ -27,7 +27,6 @@ class StandardExecutor {
|
|
|
27
27
|
const userTypeArray = requiredUserTypes.has('ALL') ? null : Array.from(requiredUserTypes);
|
|
28
28
|
|
|
29
29
|
// [OPTIMIZATION] Check for Target CID in manifests (On-Demand Optimization)
|
|
30
|
-
// If present, we will filter all data streams to strictly this user
|
|
31
30
|
const targetCid = calcs.find(c => c.targetCid)?.targetCid || calcs.find(c => c.manifest?.targetCid)?.manifest?.targetCid;
|
|
32
31
|
if (targetCid) {
|
|
33
32
|
logger.log('INFO', `[StandardExecutor] Running in Targeted Mode for CID: ${targetCid}`);
|
|
@@ -62,7 +61,6 @@ class StandardExecutor {
|
|
|
62
61
|
} catch (e) { logger.log('WARN', `Failed to init ${c.name}`); }
|
|
63
62
|
}
|
|
64
63
|
|
|
65
|
-
// Pass targetCid to streamAndProcess
|
|
66
64
|
return await StandardExecutor.streamAndProcess(
|
|
67
65
|
dStr, state, passName, config, deps, fullRoot,
|
|
68
66
|
rootData.portfolioRefs, rootData.historyRefs,
|
|
@@ -71,7 +69,6 @@ class StandardExecutor {
|
|
|
71
69
|
);
|
|
72
70
|
}
|
|
73
71
|
|
|
74
|
-
// [UPDATED] Added targetCid param
|
|
75
72
|
static async streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs, fetchedDeps, previousFetchedDeps, skipStatusWrite, requiredUserTypes = null, targetCid = null) {
|
|
76
73
|
const { logger } = deps;
|
|
77
74
|
const calcs = Object.values(state).filter(c => c && c.manifest);
|
|
@@ -82,11 +79,9 @@ class StandardExecutor {
|
|
|
82
79
|
// --- 1. Resolve and Filter Portfolio Refs (Today) ---
|
|
83
80
|
let effectivePortfolioRefs = portfolioRefs;
|
|
84
81
|
if (!effectivePortfolioRefs) {
|
|
85
|
-
// If refs weren't provided by AvailabilityChecker, fetch them now
|
|
86
82
|
effectivePortfolioRefs = await getPortfolioPartRefs(config, deps, dateStr, requiredUserTypes);
|
|
87
83
|
}
|
|
88
84
|
if (targetCid && effectivePortfolioRefs) {
|
|
89
|
-
// Filter: Keep only refs that match the CID (or Legacy refs without CID)
|
|
90
85
|
effectivePortfolioRefs = effectivePortfolioRefs.filter(r => !r.cid || String(r.cid) === String(targetCid));
|
|
91
86
|
}
|
|
92
87
|
|
|
@@ -129,7 +124,6 @@ class StandardExecutor {
|
|
|
129
124
|
const setupDuration = performance.now() - startSetup;
|
|
130
125
|
Object.keys(executionStats).forEach(name => executionStats[name].timings.setup += setupDuration);
|
|
131
126
|
|
|
132
|
-
// Yesterday's Refs are already filtered in run()
|
|
133
127
|
const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
|
|
134
128
|
const prevDateStr = prevDate.toISOString().slice(0, 10);
|
|
135
129
|
|
|
@@ -138,7 +132,6 @@ class StandardExecutor {
|
|
|
138
132
|
earliestDates = await getEarliestDataDates(config, deps);
|
|
139
133
|
}
|
|
140
134
|
|
|
141
|
-
// [FIX] Use effective/filtered refs
|
|
142
135
|
const tP_iter = streamPortfolioData(config, deps, dateStr, effectivePortfolioRefs, requiredUserTypes);
|
|
143
136
|
|
|
144
137
|
const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
|
|
@@ -200,7 +193,6 @@ class StandardExecutor {
|
|
|
200
193
|
return { successUpdates: aggregatedSuccess, failureReport: aggregatedFailures };
|
|
201
194
|
}
|
|
202
195
|
|
|
203
|
-
// ... rest of the file (flushBuffer, mergeReports, executePerUser) ...
|
|
204
196
|
static async flushBuffer(state, dateStr, passName, config, deps, shardIndexMap, executionStats, mode, skipStatusWrite, isInitialWrite = false) {
|
|
205
197
|
const { logger } = deps;
|
|
206
198
|
const transformedState = {};
|
|
@@ -272,35 +264,39 @@ class StandardExecutor {
|
|
|
272
264
|
static async executePerUser(calcInstance, metadata, dateStr, portfolioData, yesterdayPortfolioData, historyData, computedDeps, prevDeps, config, deps, loader, stats, earliestDates) {
|
|
273
265
|
const { logger } = deps;
|
|
274
266
|
const targetUserType = metadata.userType;
|
|
275
|
-
// [
|
|
267
|
+
// [FIX] Always load Global Helpers
|
|
276
268
|
const mappings = await loader.loadMappings();
|
|
277
|
-
// [FIX] Correct method
|
|
269
|
+
// [FIX] Correct method: loadPIMasterList() (no args needed as loader has context)
|
|
278
270
|
const piMasterList = await loader.loadPIMasterList();
|
|
279
271
|
const SCHEMAS = mathLayer.SCHEMAS;
|
|
280
272
|
|
|
281
273
|
// 1. Load Root Data
|
|
282
274
|
const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await loader.loadInsights(dateStr) } : null;
|
|
283
|
-
const verifications = metadata.rootDataDependencies?.includes('verification') ? await loader.loadVerificationProfiles(config, deps) : null;
|
|
284
|
-
const rankings = metadata.rootDataDependencies?.includes('rankings') ? await loader.loadPopularInvestorRankings(config, deps, dateStr) : null;
|
|
285
275
|
|
|
286
|
-
// [FIX]
|
|
276
|
+
// [FIX] Correct method: loadVerifications() (no args)
|
|
277
|
+
const verifications = metadata.rootDataDependencies?.includes('verification') ? await loader.loadVerifications() : null;
|
|
278
|
+
|
|
279
|
+
// [FIX] Correct method: loadRankings(dateStr) (no config/deps args)
|
|
280
|
+
const rankings = metadata.rootDataDependencies?.includes('rankings') ? await loader.loadRankings(dateStr) : null;
|
|
281
|
+
|
|
282
|
+
// [FIX] Correct method: loadRankings(prevStr)
|
|
287
283
|
let yesterdayRankings = null;
|
|
288
284
|
if (metadata.rootDataDependencies?.includes('rankings') && metadata.isHistorical) {
|
|
289
285
|
const prevDate = new Date(dateStr); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
|
|
290
286
|
const prevStr = prevDate.toISOString().slice(0, 10);
|
|
291
|
-
yesterdayRankings = await loader.
|
|
287
|
+
yesterdayRankings = await loader.loadRankings(prevStr);
|
|
292
288
|
}
|
|
293
289
|
|
|
294
|
-
|
|
290
|
+
// [FIX] Correct method: loadSocial(dateStr)
|
|
291
|
+
const socialContainer = metadata.rootDataDependencies?.includes('social') ? await loader.loadSocial(dateStr) : null;
|
|
295
292
|
|
|
296
|
-
// [NEW] Load New Root Data Types for Profile Metrics
|
|
297
|
-
// [FIX] Enforce canHaveMissingRoots
|
|
298
293
|
const allowMissing = metadata.canHaveMissingRoots === true;
|
|
299
294
|
|
|
295
|
+
// [FIX] Correct method: loadRatings(dateStr)
|
|
300
296
|
let ratings = null;
|
|
301
297
|
if (metadata.rootDataDependencies?.includes('ratings')) {
|
|
302
298
|
try {
|
|
303
|
-
ratings = await loader.
|
|
299
|
+
ratings = await loader.loadRatings(dateStr);
|
|
304
300
|
} catch (e) {
|
|
305
301
|
if (!allowMissing) {
|
|
306
302
|
throw new Error(`[StandardExecutor] Required root 'ratings' failed to load for ${metadata.name}: ${e.message}`);
|
|
@@ -312,10 +308,11 @@ class StandardExecutor {
|
|
|
312
308
|
}
|
|
313
309
|
}
|
|
314
310
|
|
|
311
|
+
// [FIX] Correct method: loadPageViews(dateStr)
|
|
315
312
|
let pageViews = null;
|
|
316
313
|
if (metadata.rootDataDependencies?.includes('pageViews')) {
|
|
317
314
|
try {
|
|
318
|
-
pageViews = await loader.
|
|
315
|
+
pageViews = await loader.loadPageViews(dateStr);
|
|
319
316
|
} catch (e) {
|
|
320
317
|
if (!allowMissing) {
|
|
321
318
|
throw new Error(`[StandardExecutor] Required root 'pageViews' failed to load for ${metadata.name}: ${e.message}`);
|
|
@@ -327,10 +324,11 @@ class StandardExecutor {
|
|
|
327
324
|
}
|
|
328
325
|
}
|
|
329
326
|
|
|
327
|
+
// [FIX] Correct method: loadWatchlistMembership(dateStr)
|
|
330
328
|
let watchlistMembership = null;
|
|
331
329
|
if (metadata.rootDataDependencies?.includes('watchlist')) {
|
|
332
330
|
try {
|
|
333
|
-
watchlistMembership = await loader.loadWatchlistMembership(
|
|
331
|
+
watchlistMembership = await loader.loadWatchlistMembership(dateStr);
|
|
334
332
|
} catch (e) {
|
|
335
333
|
if (!allowMissing) {
|
|
336
334
|
throw new Error(`[StandardExecutor] Required root 'watchlist' failed to load for ${metadata.name}: ${e.message}`);
|
|
@@ -342,10 +340,11 @@ class StandardExecutor {
|
|
|
342
340
|
}
|
|
343
341
|
}
|
|
344
342
|
|
|
343
|
+
// [FIX] Correct method: loadAlertHistory(dateStr)
|
|
345
344
|
let alertHistory = null;
|
|
346
345
|
if (metadata.rootDataDependencies?.includes('alerts')) {
|
|
347
346
|
try {
|
|
348
|
-
alertHistory = await loader.
|
|
347
|
+
alertHistory = await loader.loadAlertHistory(dateStr);
|
|
349
348
|
} catch (e) {
|
|
350
349
|
if (!allowMissing) {
|
|
351
350
|
throw new Error(`[StandardExecutor] Required root 'alerts' failed to load for ${metadata.name}: ${e.message}`);
|
|
@@ -132,13 +132,9 @@ async function storeSignedInUserSocialPosts({ db, logger, collectionRegistry, ci
|
|
|
132
132
|
}, { merge: false });
|
|
133
133
|
|
|
134
134
|
// 2. Store latest posts to user-centric collection (for fallback)
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
}
|
|
139
|
-
const userPostsRef = db.collection(
|
|
140
|
-
getCollectionPath('signedInUsers', 'socialPosts', { cid })
|
|
141
|
-
);
|
|
135
|
+
// Path structure: SignedInUsers/{cid}/posts/{postId}
|
|
136
|
+
// Construct path directly - we know the structure
|
|
137
|
+
const collectionName = 'SignedInUsers';
|
|
142
138
|
|
|
143
139
|
// Store each post individually in user-centric collection
|
|
144
140
|
const batch = db.batch();
|
|
@@ -149,7 +145,11 @@ async function storeSignedInUserSocialPosts({ db, logger, collectionRegistry, ci
|
|
|
149
145
|
const postId = post.id || post.postId;
|
|
150
146
|
if (!postId) continue;
|
|
151
147
|
|
|
152
|
-
|
|
148
|
+
// Construct path step-by-step: SignedInUsers/{cid}/posts/{postId}
|
|
149
|
+
const postRef = db.collection(collectionName)
|
|
150
|
+
.doc(String(cid))
|
|
151
|
+
.collection('posts')
|
|
152
|
+
.doc(postId);
|
|
153
153
|
batch.set(postRef, {
|
|
154
154
|
...post,
|
|
155
155
|
fetchedAt: FieldValue.serverTimestamp(),
|
|
@@ -300,13 +300,9 @@ async function storePopularInvestorSocialPosts({ db, logger, collectionRegistry,
|
|
|
300
300
|
}, { merge: false });
|
|
301
301
|
|
|
302
302
|
// 2. Store latest posts to user-centric collection (for fallback)
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
}
|
|
307
|
-
const userPostsRef = db.collection(
|
|
308
|
-
getCollectionPath('popularInvestors', 'socialPosts', { piCid: String(cid) })
|
|
309
|
-
);
|
|
303
|
+
// Path structure: PopularInvestors/{piCid}/posts/{postId}
|
|
304
|
+
// Construct path directly - we know the structure
|
|
305
|
+
const collectionName = 'PopularInvestors';
|
|
310
306
|
|
|
311
307
|
// Store each post individually in user-centric collection
|
|
312
308
|
const batch = db.batch();
|
|
@@ -317,7 +313,11 @@ async function storePopularInvestorSocialPosts({ db, logger, collectionRegistry,
|
|
|
317
313
|
const postId = post.id || post.postId;
|
|
318
314
|
if (!postId) continue;
|
|
319
315
|
|
|
320
|
-
|
|
316
|
+
// Construct path step-by-step: PopularInvestors/{cid}/posts/{postId}
|
|
317
|
+
const postRef = db.collection(collectionName)
|
|
318
|
+
.doc(String(cid))
|
|
319
|
+
.collection('posts')
|
|
320
|
+
.doc(postId);
|
|
321
321
|
batch.set(postRef, {
|
|
322
322
|
...post,
|
|
323
323
|
fetchedAt: FieldValue.serverTimestamp(),
|
|
@@ -19,34 +19,68 @@ const {
|
|
|
19
19
|
// 1. SHARED INTERNAL HELPERS (The "Engine")
|
|
20
20
|
// ==========================================
|
|
21
21
|
|
|
22
|
-
async function fetchWithRetry(url, options, proxyManager, logger, label) {
|
|
23
|
-
//
|
|
22
|
+
async function fetchWithRetry(url, options, proxyManager, logger, label, headerManager = null) {
|
|
23
|
+
// SIMPLIFIED: Always try proxy first, then fallback to direct
|
|
24
|
+
// Use header manager if provided
|
|
25
|
+
let finalOptions = { ...options };
|
|
26
|
+
let headerId = null;
|
|
27
|
+
|
|
28
|
+
if (headerManager) {
|
|
29
|
+
try {
|
|
30
|
+
const headerResult = await headerManager.selectHeader();
|
|
31
|
+
if (headerResult && headerResult.header) {
|
|
32
|
+
finalOptions.headers = {
|
|
33
|
+
...(finalOptions.headers || {}),
|
|
34
|
+
...headerResult.header
|
|
35
|
+
};
|
|
36
|
+
headerId = headerResult.id;
|
|
37
|
+
}
|
|
38
|
+
} catch (e) {
|
|
39
|
+
logger.log('WARN', `[${label}] Failed to get header from headerManager: ${e.message}`);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Try proxy first (if circuit breaker allows)
|
|
24
44
|
if (shouldTryProxy()) {
|
|
25
45
|
try {
|
|
26
|
-
const res = await proxyManager.fetch(url,
|
|
46
|
+
const res = await proxyManager.fetch(url, finalOptions);
|
|
27
47
|
if (res.ok) {
|
|
28
48
|
recordProxyOutcome(true);
|
|
49
|
+
if (headerManager && headerId) {
|
|
50
|
+
headerManager.updatePerformance(headerId, true);
|
|
51
|
+
}
|
|
29
52
|
return res;
|
|
30
53
|
} else {
|
|
31
|
-
//
|
|
32
|
-
const
|
|
33
|
-
logger.log('WARN', `[${label}] Proxy returned status ${res.status} for ${url}. Response: ${errorText.substring(0, 200)}`);
|
|
54
|
+
// Proxy returned non-OK status - record failure but don't throw yet
|
|
55
|
+
const status = res.status;
|
|
34
56
|
recordProxyOutcome(false);
|
|
57
|
+
logger.log('WARN', `[${label}] Proxy returned status ${status} for ${url}. Failures: ${getFailureCount()}/${getMaxFailures()}. Will try direct fetch.`);
|
|
58
|
+
// Continue to fallback - don't throw here
|
|
35
59
|
}
|
|
36
60
|
} catch (e) {
|
|
37
61
|
recordProxyOutcome(false);
|
|
38
|
-
logger.log('WARN', `[${label}] Proxy
|
|
62
|
+
logger.log('WARN', `[${label}] Proxy exception for ${url}: ${e.message}. Failures: ${getFailureCount()}/${getMaxFailures()}. Will try direct fetch.`);
|
|
63
|
+
// Continue to fallback - don't throw here
|
|
39
64
|
}
|
|
65
|
+
} else {
|
|
66
|
+
logger.log('INFO', `[${label}] Circuit breaker open (${getFailureCount()}/${getMaxFailures()} failures), skipping proxy for ${url}`);
|
|
40
67
|
}
|
|
41
68
|
|
|
42
|
-
// Fallback
|
|
69
|
+
// Fallback to direct fetch only if proxy failed or circuit breaker is open
|
|
70
|
+
logger.log('INFO', `[${label}] Using direct fetch for ${url}`);
|
|
43
71
|
const directFetch = typeof fetch !== 'undefined' ? fetch : require('node-fetch');
|
|
44
|
-
const res = await directFetch(url,
|
|
72
|
+
const res = await directFetch(url, finalOptions);
|
|
45
73
|
if (!res.ok) {
|
|
46
74
|
const errorText = await res.text().catch(() => 'Unable to read response');
|
|
47
75
|
logger.log('ERROR', `[${label}] Direct fetch failed for ${url}: Status ${res.status}. Response: ${errorText.substring(0, 200)}`);
|
|
48
76
|
throw new Error(`Fetch failed: ${res.status} ${res.statusText} - ${errorText.substring(0, 100)}`);
|
|
49
77
|
}
|
|
78
|
+
|
|
79
|
+
// Update header manager on success
|
|
80
|
+
if (headerManager && headerId) {
|
|
81
|
+
headerManager.updatePerformance(headerId, true);
|
|
82
|
+
}
|
|
83
|
+
|
|
50
84
|
return res;
|
|
51
85
|
}
|
|
52
86
|
|
|
@@ -62,13 +96,13 @@ async function updateLastUpdated(db, collectionRegistry, cid, userType, dataType
|
|
|
62
96
|
}
|
|
63
97
|
|
|
64
98
|
async function processPortfolio(context, config, taskData, isPI) {
|
|
65
|
-
const { db, logger, collectionRegistry, proxyManager } = context;
|
|
99
|
+
const { db, logger, collectionRegistry, proxyManager, headerManager } = context;
|
|
66
100
|
const { cid, username, uuid, today, requestOptions } = taskData;
|
|
67
101
|
const url = `${config.ETORO_API_PORTFOLIO_URL}?cid=${cid}&client_request_id=${uuid}`;
|
|
68
102
|
|
|
69
103
|
logger.log('INFO', `[Portfolio] Fetching ${isPI ? 'PI' : 'Signed-In User'} portfolio from: ${url}`);
|
|
70
104
|
|
|
71
|
-
const res = await fetchWithRetry(url, requestOptions, proxyManager, logger, 'Portfolio');
|
|
105
|
+
const res = await fetchWithRetry(url, requestOptions, proxyManager, logger, 'Portfolio', headerManager);
|
|
72
106
|
const data = await res.json();
|
|
73
107
|
data.fetchedAt = new Date();
|
|
74
108
|
data.username = username;
|
|
@@ -81,7 +115,7 @@ async function processPortfolio(context, config, taskData, isPI) {
|
|
|
81
115
|
for (const pos of topPositions) {
|
|
82
116
|
try {
|
|
83
117
|
const posUrl = `${config.ETORO_API_POSITIONS_URL}?cid=${cid}&InstrumentID=${pos.InstrumentID}&client_request_id=${uuid}`;
|
|
84
|
-
const deepRes = await fetchWithRetry(posUrl, requestOptions, proxyManager, logger, 'DeepPos');
|
|
118
|
+
const deepRes = await fetchWithRetry(posUrl, requestOptions, proxyManager, logger, 'DeepPos', headerManager);
|
|
85
119
|
deepPositions.push({ instrumentId: pos.InstrumentID, ...(await deepRes.json()) });
|
|
86
120
|
} catch (e) {} // Skip failed deep positions
|
|
87
121
|
}
|
|
@@ -98,12 +132,12 @@ async function processPortfolio(context, config, taskData, isPI) {
|
|
|
98
132
|
}
|
|
99
133
|
|
|
100
134
|
async function processHistory(context, config, taskData, isPI) {
|
|
101
|
-
const { db, logger, collectionRegistry, proxyManager } = context;
|
|
135
|
+
const { db, logger, collectionRegistry, proxyManager, headerManager } = context;
|
|
102
136
|
const { cid, uuid, today, requestOptions } = taskData;
|
|
103
137
|
const oneYearAgo = new Date(); oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
|
|
104
138
|
const url = `${config.ETORO_API_HISTORY_URL}?StartTime=${oneYearAgo.toISOString()}&PageNumber=1&ItemsPerPage=30000&PublicHistoryPortfolioFilter=&CID=${cid}&client_request_id=${uuid}`;
|
|
105
139
|
|
|
106
|
-
const res = await fetchWithRetry(url, requestOptions, proxyManager, logger, 'History');
|
|
140
|
+
const res = await fetchWithRetry(url, requestOptions, proxyManager, logger, 'History', headerManager);
|
|
107
141
|
const data = await res.json();
|
|
108
142
|
|
|
109
143
|
// Filter valid close reasons
|
|
@@ -119,7 +153,7 @@ async function processHistory(context, config, taskData, isPI) {
|
|
|
119
153
|
}
|
|
120
154
|
|
|
121
155
|
async function processSocial(context, config, taskData, isPI) {
|
|
122
|
-
const { db, logger, collectionRegistry, proxyManager } = context;
|
|
156
|
+
const { db, logger, collectionRegistry, proxyManager, headerManager } = context;
|
|
123
157
|
const { cid, username, uuid, today, requestOptions } = taskData;
|
|
124
158
|
const { getGcidForUser } = require('../../social-task-handler/helpers/handler_helpers');
|
|
125
159
|
|
|
@@ -127,7 +161,7 @@ async function processSocial(context, config, taskData, isPI) {
|
|
|
127
161
|
const gcid = await getGcidForUser(context, config.social || {}, cid, username);
|
|
128
162
|
const url = `${config.social?.userFeedApiUrl || 'https://www.etoro.com/api/edm-streams/v1/feed/user/top/'}${gcid}?take=10&client_request_id=${uuid}`;
|
|
129
163
|
|
|
130
|
-
const res = await fetchWithRetry(url, requestOptions, proxyManager, logger, 'Social');
|
|
164
|
+
const res = await fetchWithRetry(url, requestOptions, proxyManager, logger, 'Social', headerManager);
|
|
131
165
|
const data = await res.json();
|
|
132
166
|
const posts = (data.discussions || []).slice(0, 30).map(d => ({
|
|
133
167
|
id: d.post.id,
|
|
@@ -90,13 +90,59 @@ async function handleSocialFetch(taskData, config, dependencies, batchCounterRef
|
|
|
90
90
|
|
|
91
91
|
logger.log('INFO', `[SocialFetch/${taskId}] Requesting URL: ${url}`);
|
|
92
92
|
|
|
93
|
+
// Use circuit breaker logic: always try proxy first, fallback to direct only after proxy fails
|
|
94
|
+
const { shouldTryProxy, recordProxyOutcome, getFailureCount, getMaxFailures } = require('../utils/proxy_circuit_breaker');
|
|
95
|
+
|
|
93
96
|
let response;
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
97
|
+
let proxyAttempted = false;
|
|
98
|
+
let proxyFailed = false;
|
|
99
|
+
|
|
100
|
+
// Always try proxy first (unless circuit breaker is open and we know it will fail)
|
|
101
|
+
if (shouldTryProxy()) {
|
|
102
|
+
proxyAttempted = true;
|
|
103
|
+
try {
|
|
104
|
+
response = await proxyManager.fetch(url, { headers: requestHeaders });
|
|
105
|
+
if (response.ok) {
|
|
106
|
+
recordProxyOutcome(true);
|
|
107
|
+
headerManager.updatePerformance(selectedHeader.id, true);
|
|
108
|
+
logger.log('TRACE', `[SocialFetch/${taskId}] Proxy fetch succeeded`);
|
|
109
|
+
} else {
|
|
110
|
+
recordProxyOutcome(false);
|
|
111
|
+
proxyFailed = true;
|
|
112
|
+
headerManager.updatePerformance(selectedHeader.id, false);
|
|
113
|
+
logger.log('WARN', `[SocialFetch/${taskId}] Proxy returned status ${response.status} for ${url}. Failures: ${getFailureCount()}/${getMaxFailures()}`);
|
|
114
|
+
}
|
|
115
|
+
} catch (err) {
|
|
116
|
+
recordProxyOutcome(false);
|
|
117
|
+
proxyFailed = true;
|
|
118
|
+
headerManager.updatePerformance(selectedHeader.id, false);
|
|
119
|
+
logger.log('WARN', `[SocialFetch/${taskId}] Proxy failed for ${url}: ${err.message}. Failures: ${getFailureCount()}/${getMaxFailures()}`);
|
|
120
|
+
}
|
|
121
|
+
} else {
|
|
122
|
+
// Circuit breaker is open, but try proxy once more to check if it recovered
|
|
123
|
+
logger.log('INFO', `[SocialFetch/${taskId}] Circuit breaker open (${getFailureCount()}/${getMaxFailures()} failures), attempting proxy once more`);
|
|
124
|
+
proxyAttempted = true;
|
|
125
|
+
try {
|
|
126
|
+
response = await proxyManager.fetch(url, { headers: requestHeaders });
|
|
127
|
+
if (response.ok) {
|
|
128
|
+
recordProxyOutcome(true); // Reset circuit breaker on success
|
|
129
|
+
headerManager.updatePerformance(selectedHeader.id, true);
|
|
130
|
+
logger.log('INFO', `[SocialFetch/${taskId}] Proxy succeeded despite circuit breaker. Resetting circuit breaker.`);
|
|
131
|
+
} else {
|
|
132
|
+
proxyFailed = true;
|
|
133
|
+
headerManager.updatePerformance(selectedHeader.id, false);
|
|
134
|
+
logger.log('WARN', `[SocialFetch/${taskId}] Proxy failed (circuit breaker open): Status ${response.status}`);
|
|
135
|
+
}
|
|
136
|
+
} catch (err) {
|
|
137
|
+
proxyFailed = true;
|
|
138
|
+
headerManager.updatePerformance(selectedHeader.id, false);
|
|
139
|
+
logger.log('WARN', `[SocialFetch/${taskId}] Proxy failed (circuit breaker open): ${err.message}`);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
// Fallback to direct fetch only after proxy fails
|
|
144
|
+
if (proxyFailed || !proxyAttempted) {
|
|
145
|
+
logger.log('INFO', `[SocialFetch/${taskId}] Falling back to direct fetch for ${url}${proxyFailed ? ' (proxy failed)' : ''}`);
|
|
100
146
|
try {
|
|
101
147
|
const directFetch = typeof fetch !== 'undefined' ? fetch : require('node-fetch');
|
|
102
148
|
response = await directFetch(url, { headers: requestHeaders });
|