bulltrackers-module 1.0.152 → 1.0.153
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/appscript-api/index.js +8 -38
- package/functions/computation-system/helpers/computation_pass_runner.js +38 -183
- package/functions/computation-system/helpers/orchestration_helpers.js +105 -326
- package/functions/computation-system/utils/data_loader.js +38 -133
- package/functions/computation-system/utils/schema_capture.js +7 -41
- package/functions/computation-system/utils/utils.js +37 -124
- package/functions/core/utils/firestore_utils.js +8 -46
- package/functions/core/utils/intelligent_header_manager.js +26 -128
- package/functions/core/utils/intelligent_proxy_manager.js +33 -171
- package/functions/core/utils/pubsub_utils.js +7 -24
- package/functions/dispatcher/helpers/dispatch_helpers.js +9 -30
- package/functions/dispatcher/index.js +7 -30
- package/functions/etoro-price-fetcher/helpers/handler_helpers.js +12 -80
- package/functions/fetch-insights/helpers/handler_helpers.js +18 -70
- package/functions/generic-api/helpers/api_helpers.js +28 -167
- package/functions/generic-api/index.js +49 -188
- package/functions/invalid-speculator-handler/helpers/handler_helpers.js +10 -47
- package/functions/orchestrator/helpers/discovery_helpers.js +1 -5
- package/functions/orchestrator/index.js +1 -6
- package/functions/price-backfill/helpers/handler_helpers.js +13 -69
- package/functions/social-orchestrator/helpers/orchestrator_helpers.js +5 -37
- package/functions/social-task-handler/helpers/handler_helpers.js +29 -186
- package/functions/speculator-cleanup-orchestrator/helpers/cleanup_helpers.js +19 -78
- package/functions/task-engine/handler_creator.js +2 -8
- package/functions/task-engine/helpers/update_helpers.js +17 -83
- package/functions/task-engine/helpers/verify_helpers.js +11 -56
- package/functions/task-engine/utils/firestore_batch_manager.js +16 -67
- package/functions/task-engine/utils/task_engine_utils.js +6 -35
- package/index.js +45 -43
- package/package.json +1 -1
|
@@ -15,75 +15,21 @@ const { FieldValue, FieldPath } = require('@google-cloud/firestore');
|
|
|
15
15
|
*/
|
|
16
16
|
async function getSentimentFromGemini(dependencies, snippet) {
|
|
17
17
|
const { logger, geminiModel } = dependencies;
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
return { overallSentiment: "Neutral", topics: [] }; // Return default object
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
// --- NEW PROMPT ---
|
|
25
|
-
const prompt = `You are a financial analyst. Analyze the following social media post.
|
|
26
|
-
Your task is to provide:
|
|
27
|
-
1. The overall sentiment (Bullish, Bearish, or Neutral) toward the main asset. Return only one word.
|
|
28
|
-
2. A list of key topics or events mentioned (e.g., "FOMC", "CPI", "Earnings", "Inflation", "War", "Acquisition"). If no specific event is mentioned, return an empty array.
|
|
29
|
-
|
|
30
|
-
Return ONLY a valid JSON object in this exact format:
|
|
31
|
-
{
|
|
32
|
-
"overallSentiment": "...",
|
|
33
|
-
"topics": ["...", "..."]
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
Post: "${snippet}"`
|
|
37
|
-
// --- END NEW PROMPT ---
|
|
38
|
-
|
|
39
|
-
try {
|
|
40
|
-
const request = {
|
|
41
|
-
contents: [
|
|
42
|
-
{
|
|
43
|
-
role: "user",
|
|
44
|
-
parts: [ { text: prompt } ]
|
|
45
|
-
}
|
|
46
|
-
],
|
|
47
|
-
generationConfig: {
|
|
48
|
-
temperature: 0.1,
|
|
49
|
-
topP: 0.1,
|
|
50
|
-
maxOutputTokens: 256 // Increased tokens for JSON
|
|
51
|
-
}
|
|
52
|
-
};
|
|
53
|
-
|
|
18
|
+
if (!geminiModel) { logger.log('WARN', '[getSentimentFromGemini] Gemini model not found in dependencies.'); return { overallSentiment: "Neutral", topics: [] }; }
|
|
19
|
+
const prompt = `You are a financial analyst. Analyze the following social media post. Your task is to provide: 1. The overall sentiment (Bullish, Bearish, or Neutral) toward the main asset. Return only one word. 2. A list of key topics or events mentioned (e.g., "FOMC", "CPI", "Earnings", "Inflation", "War", "Acquisition"). If no specific event is mentioned, return an empty array. Return ONLY a valid JSON object in this exact format: { "overallSentiment": "...", "topics": ["...", "..."] } Post: "${snippet}"`
|
|
20
|
+
try { const request = { contents: [ { role: "user", parts: [ { text: prompt } ] } ], generationConfig: { temperature: 0.1, topP: 0.1, maxOutputTokens: 256 } };
|
|
54
21
|
const result = await geminiModel.generateContent(request);
|
|
55
22
|
const response = result.response;
|
|
56
|
-
|
|
57
23
|
const text = response?.candidates?.[0]?.content?.parts?.[0]?.text?.trim() || '';
|
|
58
|
-
|
|
59
|
-
// --- NEW: Parse JSON response ---
|
|
60
24
|
try {
|
|
61
|
-
// Find the JSON block
|
|
62
25
|
const jsonMatch = text.match(/\{[\s\S]*\}/);
|
|
63
26
|
if (jsonMatch && jsonMatch[0]) {
|
|
64
27
|
const parsedJson = JSON.parse(jsonMatch[0]);
|
|
65
|
-
|
|
66
|
-
if (parsedJson && parsedJson.overallSentiment && Array.isArray(parsedJson.topics)) {
|
|
67
|
-
logger.log('INFO', `[getSentimentFromGemini] Classified sentiment: ${parsedJson.overallSentiment}, Topics: ${parsedJson.topics.join(', ')}`);
|
|
68
|
-
return parsedJson; // Return the full object
|
|
69
|
-
}
|
|
70
|
-
}
|
|
28
|
+
if (parsedJson && parsedJson.overallSentiment && Array.isArray(parsedJson.topics)) { logger.log('INFO', `[getSentimentFromGemini] Classified sentiment: ${parsedJson.overallSentiment}, Topics: ${parsedJson.topics.join(', ')}`); return parsedJson; } }
|
|
71
29
|
logger.log('WARN', `[getSentimentFromGemini] Unexpected JSON structure from AI: "${text}". Defaulting.`);
|
|
72
30
|
return { overallSentiment: "Neutral", topics: [] };
|
|
73
|
-
} catch (parseError) {
|
|
74
|
-
|
|
75
|
-
return { overallSentiment: "Neutral", topics: [] };
|
|
76
|
-
}
|
|
77
|
-
// --- END NEW JSON PARSE ---
|
|
78
|
-
|
|
79
|
-
} catch (error) {
|
|
80
|
-
logger.log('ERROR', '[getSentimentFromGemini] Error calling Gemini API.', {
|
|
81
|
-
errorMessage: error.message,
|
|
82
|
-
errorStack: error.stack
|
|
83
|
-
});
|
|
84
|
-
// Default to 'Neutral' on API error to avoid halting the pipeline
|
|
85
|
-
return { overallSentiment: "Neutral", topics: [] }; // Return default object
|
|
86
|
-
}
|
|
31
|
+
} catch (parseError) { logger.log('WARN', `[getSentimentFromGemini] Failed to parse JSON response from AI: "${text}". Defaulting.`, { error: parseError.message }); return { overallSentiment: "Neutral", topics: [] }; }
|
|
32
|
+
} catch (error) { logger.log('ERROR', '[getSentimentFromGemini] Error calling Gemini API.', { errorMessage: error.message, errorStack: error.stack }); return { overallSentiment: "Neutral", topics: [] }; }
|
|
87
33
|
}
|
|
88
34
|
|
|
89
35
|
|
|
@@ -97,166 +43,73 @@ Post: "${snippet}"`
|
|
|
97
43
|
*/
|
|
98
44
|
exports.handleSocialTask = async (message, context, config, dependencies) => {
|
|
99
45
|
const { db, logger, headerManager, proxyManager } = dependencies;
|
|
100
|
-
|
|
101
46
|
let task;
|
|
102
|
-
try {
|
|
103
|
-
task = JSON.parse(Buffer.from(message.data, 'base64').toString('utf-8'));
|
|
104
|
-
} catch (e) {
|
|
105
|
-
logger.log('ERROR', '[SocialTask] Failed to parse Pub/Sub message data.', { error: e.message, data: message.data });
|
|
106
|
-
return; // Acknowledge the message
|
|
107
|
-
}
|
|
108
|
-
|
|
47
|
+
try { task = JSON.parse(Buffer.from(message.data, 'base64').toString('utf-8')); } catch (e) { logger.log('ERROR', '[SocialTask] Failed to parse Pub/Sub message data.', { error: e.message, data: message.data }); return; }
|
|
109
48
|
const { tickerId, since } = task;
|
|
110
49
|
const sinceDate = new Date(since);
|
|
111
50
|
const taskId = `social-${tickerId}-${context.eventId || Date.now()}`;
|
|
112
51
|
logger.log('INFO', `[SocialTask/${taskId}] Processing ticker ${tickerId} for posts since ${since}.`);
|
|
113
|
-
|
|
114
|
-
// --- Config validation ---
|
|
115
|
-
if (!config.socialApiBaseUrl || !config.socialInsightsCollectionName || !config.processedPostsCollectionName) {
|
|
116
|
-
logger.log('ERROR', `[SocialTask/${taskId}] Missing required configuration.`);
|
|
117
|
-
throw new Error('Missing required configuration for Social Task.');
|
|
118
|
-
}
|
|
119
|
-
|
|
52
|
+
if (!config.socialApiBaseUrl || !config.socialInsightsCollectionName || !config.processedPostsCollectionName) { logger.log('ERROR', `[SocialTask/${taskId}] Missing required configuration.`); throw new Error('Missing required configuration for Social Task.'); }
|
|
120
53
|
const processedPostsRef = db.collection(config.processedPostsCollectionName);
|
|
121
54
|
const today = new Date().toISOString().slice(0, 10);
|
|
122
55
|
const insightsCollectionRef = db.collection(config.socialInsightsCollectionName).doc(today).collection('posts');
|
|
123
|
-
|
|
124
56
|
let offset = 0;
|
|
125
|
-
const take = 10;
|
|
57
|
+
const take = 10;
|
|
126
58
|
let keepFetching = true;
|
|
127
59
|
let postsProcessed = 0;
|
|
128
60
|
let postsSaved = 0;
|
|
129
|
-
const processedInThisRun = new Set();
|
|
130
|
-
|
|
61
|
+
const processedInThisRun = new Set();
|
|
131
62
|
try {
|
|
132
63
|
while (keepFetching) {
|
|
133
64
|
const url = `${config.socialApiBaseUrl}${tickerId}?take=${take}&offset=${offset}&reactionsPageSize=20`;
|
|
134
65
|
logger.log('TRACE', `[SocialTask/${taskId}] Fetching: ${url}`);
|
|
135
|
-
|
|
136
66
|
const selectedHeader = await headerManager.selectHeader();
|
|
137
67
|
let wasSuccess = false;
|
|
138
68
|
let response;
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
if (!response.ok) {
|
|
143
|
-
throw new Error(`API error ${response.status}`);
|
|
144
|
-
}
|
|
145
|
-
wasSuccess = true;
|
|
146
|
-
} catch (fetchError) {
|
|
147
|
-
logger.log('WARN', `[SocialTask/${taskId}] Fetch failed for offset ${offset}.`, { err: fetchError.message });
|
|
148
|
-
keepFetching = false; // Stop on fetch error
|
|
149
|
-
if (selectedHeader) headerManager.updatePerformance(selectedHeader.id, false);
|
|
150
|
-
continue;
|
|
151
|
-
} finally {
|
|
152
|
-
if (selectedHeader) headerManager.updatePerformance(selectedHeader.id, wasSuccess);
|
|
153
|
-
}
|
|
154
|
-
|
|
69
|
+
try { response = await proxyManager.fetch(url, { headers: selectedHeader.header }); if (!response.ok) { throw new Error(`API error ${response.status}`); } wasSuccess = true;
|
|
70
|
+
} catch (fetchError) { logger.log('WARN', `[SocialTask/${taskId}] Fetch failed for offset ${offset}.`, { err: fetchError.message }); keepFetching = false; if (selectedHeader) headerManager.updatePerformance(selectedHeader.id, false); continue;
|
|
71
|
+
} finally { if (selectedHeader) headerManager.updatePerformance(selectedHeader.id, wasSuccess); }
|
|
155
72
|
const page = await response.json();
|
|
156
73
|
const discussions = page?.discussions;
|
|
157
|
-
|
|
158
|
-
if (!Array.isArray(discussions) || discussions.length === 0) {
|
|
159
|
-
logger.log('INFO', `[SocialTask/${taskId}] No more posts found at offset ${offset}. Stopping.`);
|
|
160
|
-
keepFetching = false;
|
|
161
|
-
continue;
|
|
162
|
-
}
|
|
163
|
-
|
|
74
|
+
if (!Array.isArray(discussions) || discussions.length === 0) { logger.log('INFO', `[SocialTask/${taskId}] No more posts found at offset ${offset}. Stopping.`); keepFetching = false; continue; }
|
|
164
75
|
const postIds = discussions.map(d => d.post.id).filter(Boolean);
|
|
165
|
-
if (postIds.length === 0) {
|
|
166
|
-
offset += take;
|
|
167
|
-
continue;
|
|
168
|
-
}
|
|
169
|
-
|
|
170
|
-
// --- Deduplication Check ---
|
|
76
|
+
if (postIds.length === 0) { offset += take; continue; }
|
|
171
77
|
const existingDocs = await processedPostsRef.where(FieldPath.documentId(), 'in', postIds).get();
|
|
172
78
|
const existingIds = new Set(existingDocs.docs.map(d => d.id));
|
|
173
|
-
|
|
174
79
|
const batch = db.batch();
|
|
175
80
|
let newPostsInBatch = 0;
|
|
176
|
-
|
|
177
81
|
for (const discussion of discussions) {
|
|
178
82
|
const post = discussion?.post;
|
|
179
83
|
if (!post || !post.id || !post.message?.text) continue;
|
|
180
|
-
|
|
181
|
-
// Stop pagination if we've reached posts older than our window
|
|
182
84
|
const postCreatedDate = new Date(post.created);
|
|
183
|
-
if (postCreatedDate < sinceDate) {
|
|
184
|
-
|
|
185
|
-
continue;
|
|
186
|
-
}
|
|
187
|
-
|
|
188
|
-
// Skip if already processed
|
|
189
|
-
if (existingIds.has(post.id) || processedInThisRun.has(post.id)) {
|
|
190
|
-
continue;
|
|
191
|
-
}
|
|
192
|
-
|
|
193
|
-
// Filter language (accept 'en' and 'en-gb')
|
|
85
|
+
if (postCreatedDate < sinceDate) { keepFetching = false; continue; }
|
|
86
|
+
if (existingIds.has(post.id) || processedInThisRun.has(post.id)) { continue; }
|
|
194
87
|
const lang = post.message.languageCode || 'unknown';
|
|
195
|
-
if (lang !== 'en' && lang !== 'en-gb') {
|
|
196
|
-
continue; // Skip non-English posts
|
|
197
|
-
}
|
|
198
|
-
|
|
199
|
-
// --- Process the new post ---
|
|
88
|
+
if (lang !== 'en' && lang !== 'en-gb') { continue; }
|
|
200
89
|
postsProcessed++;
|
|
201
90
|
processedInThisRun.add(post.id);
|
|
202
91
|
const text = post.message.text;
|
|
92
|
+
const likeCount = discussion.emotionsData?.like?.paging?.totalCount || 0;
|
|
93
|
+
const commentCount = discussion.summary?.totalCommentsAndReplies || 0;
|
|
94
|
+
let pollData = null;
|
|
203
95
|
|
|
204
|
-
// --- START: Enhanced Data Extraction ---
|
|
205
|
-
const likeCount = discussion.emotionsData?.like?.paging?.totalCount || 0;
|
|
206
|
-
const commentCount = discussion.summary?.totalCommentsAndReplies || 0;
|
|
207
|
-
let pollData = null;
|
|
208
|
-
|
|
209
|
-
// Check for poll data and extract it
|
|
210
96
|
if (post.type === 'Poll' && post.metadata?.poll?.options) {
|
|
211
|
-
pollData = {
|
|
212
|
-
id: post.metadata.poll.id,
|
|
213
|
-
options: post.metadata.poll.options.map(opt => ({
|
|
214
|
-
id: opt.id,
|
|
215
|
-
text: opt.text,
|
|
216
|
-
votesCount: opt.votesCount || 0
|
|
217
|
-
}))
|
|
218
|
-
};
|
|
219
|
-
}
|
|
220
|
-
// --- END: Enhanced Data Extraction ---
|
|
97
|
+
pollData = { id: post.metadata.poll.id, options: post.metadata.poll.options.map(opt => ({ id: opt.id, text: opt.text, votesCount: opt.votesCount || 0 })) }; }
|
|
221
98
|
|
|
222
99
|
|
|
223
|
-
|
|
224
|
-
const MAX_CHARS = 500; // ~125 tokens, very cheap
|
|
100
|
+
const MAX_CHARS = 500;
|
|
225
101
|
let snippet = text;
|
|
226
|
-
if (text.length > (MAX_CHARS * 2)) {
|
|
227
|
-
|
|
228
|
-
snippet = text.substring(0, MAX_CHARS) + " ... " + text.substring(text.length - MAX_CHARS);
|
|
229
|
-
} else if (text.length > MAX_CHARS) {
|
|
230
|
-
// Just truncate
|
|
231
|
-
snippet = text.substring(0, MAX_CHARS);
|
|
232
|
-
}
|
|
102
|
+
if (text.length > (MAX_CHARS * 2)) { snippet = text.substring(0, MAX_CHARS) + " ... " + text.substring(text.length - MAX_CHARS);
|
|
103
|
+
} else if (text.length > MAX_CHARS) { snippet = text.substring(0, MAX_CHARS); }
|
|
233
104
|
|
|
234
|
-
// 2. AI Sentiment Analysis
|
|
235
|
-
// Pass full dependencies object
|
|
236
|
-
// --- MODIFIED: Store the entire result object ---
|
|
237
105
|
const sentimentResult = await getSentimentFromGemini(dependencies, snippet);
|
|
238
106
|
|
|
239
|
-
|
|
240
|
-
const postData = {
|
|
241
|
-
sentiment: sentimentResult, // <-- Store the full {overallSentiment, topics} object
|
|
242
|
-
textSnippet: snippet,
|
|
243
|
-
fullText: text, // Store the full original text
|
|
244
|
-
language: lang,
|
|
245
|
-
tickers: post.tags.map(t => t.market?.symbolName).filter(Boolean),
|
|
246
|
-
postOwnerId: post.owner?.id,
|
|
247
|
-
likeCount: likeCount, // Store like count
|
|
248
|
-
commentCount: commentCount, // Store comment count
|
|
249
|
-
pollData: pollData, // Store poll data (will be null if not a poll)
|
|
250
|
-
createdAt: post.created,
|
|
251
|
-
fetchedAt: FieldValue.serverTimestamp()
|
|
252
|
-
};
|
|
253
|
-
// --- END MODIFICATION ---
|
|
107
|
+
|
|
108
|
+
const postData = { sentiment: sentimentResult, textSnippet: snippet, fullText: text, language: lang, tickers: post.tags.map(t => t.market?.symbolName).filter(Boolean), postOwnerId: post.owner?.id, likeCount: likeCount, commentCount: commentCount, pollData: pollData, createdAt: post.created, fetchedAt: FieldValue.serverTimestamp() };
|
|
254
109
|
|
|
255
|
-
// 4. Add to batch for `daily_social_insights`
|
|
256
110
|
const insightDocRef = insightsCollectionRef.doc(post.id);
|
|
257
111
|
batch.set(insightDocRef, postData);
|
|
258
112
|
|
|
259
|
-
// 5. Add to batch for `processed_social_posts` (dedupe collection)
|
|
260
113
|
const dedupeDocRef = processedPostsRef.doc(post.id);
|
|
261
114
|
batch.set(dedupeDocRef, { processedAt: FieldValue.serverTimestamp() });
|
|
262
115
|
|
|
@@ -269,21 +122,11 @@ exports.handleSocialTask = async (message, context, config, dependencies) => {
|
|
|
269
122
|
logger.log('INFO', `[SocialTask/${taskId}] Saved ${newPostsInBatch} new posts from offset ${offset}.`);
|
|
270
123
|
}
|
|
271
124
|
|
|
272
|
-
// Continue to next page
|
|
273
125
|
offset += take;
|
|
274
126
|
}
|
|
275
127
|
|
|
276
128
|
logger.log('SUCCESS', `[SocialTask/${taskId}] Run complete. Processed ${postsProcessed} new posts, saved ${postsSaved}.`);
|
|
277
129
|
|
|
278
|
-
} catch (error) {
|
|
279
|
-
|
|
280
|
-
throw error;
|
|
281
|
-
} finally {
|
|
282
|
-
// Always flush header performance
|
|
283
|
-
try {
|
|
284
|
-
await headerManager.flushPerformanceUpdates();
|
|
285
|
-
} catch (flushError) {
|
|
286
|
-
logger.log('ERROR', `[SocialTask/${taskId}] Failed to flush header performance.`, { errorMessage: flushError.message });
|
|
287
|
-
}
|
|
288
|
-
}
|
|
130
|
+
} catch (error) { logger.log('ERROR', `[SocialTask/${taskId}] Fatal error during task execution.`, { errorMessage: error.message, errorStack: error.stack }); throw error;
|
|
131
|
+
} finally { try { await headerManager.flushPerformanceUpdates(); } catch (flushError) { logger.log('ERROR', `[SocialTask/${taskId}] Failed to flush header performance.`, { errorMessage: flushError.message }); } }
|
|
289
132
|
};
|
|
@@ -14,26 +14,11 @@ const { FieldValue } = require('@google-cloud/firestore');
|
|
|
14
14
|
exports.runCleanup = async (config, dependencies) => {
|
|
15
15
|
const { logger } = dependencies;
|
|
16
16
|
logger.log('INFO', '[CleanupHelpers] Running cleanup orchestrator...');
|
|
17
|
-
|
|
18
|
-
try {
|
|
19
|
-
// Start cleanup for pending users
|
|
20
|
-
const { batch: batchAfterPending, count: pendingRemoved } = await cleanupPendingSpeculators(config, dependencies);
|
|
21
|
-
|
|
22
|
-
// Continue with the same batch for stale speculators
|
|
17
|
+
try { const { batch: batchAfterPending, count: pendingRemoved } = await cleanupPendingSpeculators(config, dependencies);
|
|
23
18
|
const { batch: finalBatch, count: staleRemoved } = await cleanupStaleSpeculators(config, dependencies, batchAfterPending);
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
logger.log('SUCCESS', `[CleanupHelpers] Cleanup commit successful. Removed ${pendingRemoved} pending, ${staleRemoved} stale speculators.`);
|
|
28
|
-
return { pendingRemoved, staleRemoved };
|
|
29
|
-
} else {
|
|
30
|
-
logger.log('SUCCESS', '[CleanupHelpers] No stale users found in pending or blocks.');
|
|
31
|
-
return { pendingRemoved: 0, staleRemoved: 0 };
|
|
32
|
-
}
|
|
33
|
-
} catch (error) {
|
|
34
|
-
logger.log('ERROR', '[CleanupHelpers] FATAL error during cleanup orchestration', { errorMessage: error.message, errorStack: error.stack });
|
|
35
|
-
throw error;
|
|
36
|
-
}
|
|
19
|
+
if (pendingRemoved > 0 || staleRemoved > 0) { await finalBatch.commit(); logger.log('SUCCESS', `[CleanupHelpers] Cleanup commit successful. Removed ${pendingRemoved} pending, ${staleRemoved} stale speculators.`); return { pendingRemoved, staleRemoved };
|
|
20
|
+
} else { logger.log('SUCCESS', '[CleanupHelpers] No stale users found in pending or blocks.'); return { pendingRemoved: 0, staleRemoved: 0 }; }
|
|
21
|
+
} catch (error) { logger.log('ERROR', '[CleanupHelpers] FATAL error during cleanup orchestration', { errorMessage: error.message, errorStack: error.stack }); throw error; }
|
|
37
22
|
};
|
|
38
23
|
|
|
39
24
|
/**
|
|
@@ -43,18 +28,15 @@ async function cleanupPendingSpeculators(config, dependencies) {
|
|
|
43
28
|
const { db, logger } = dependencies;
|
|
44
29
|
logger.log('INFO', '[CleanupHelpers] Starting pending speculator cleanup...');
|
|
45
30
|
|
|
46
|
-
const batch = db.batch();
|
|
31
|
+
const batch = db.batch();
|
|
47
32
|
let stalePendingUsersRemoved = 0;
|
|
48
|
-
const pendingCollectionRef = db.collection(config.pendingSpeculatorsCollectionName);
|
|
33
|
+
const pendingCollectionRef = db.collection(config.pendingSpeculatorsCollectionName);
|
|
49
34
|
const staleThreshold = new Date();
|
|
50
35
|
staleThreshold.setHours(staleThreshold.getHours() - config.pendingGracePeriodHours);
|
|
51
36
|
|
|
52
37
|
try {
|
|
53
38
|
const pendingSnapshot = await pendingCollectionRef.get();
|
|
54
|
-
if (pendingSnapshot.empty) {
|
|
55
|
-
logger.log('INFO', '[CleanupHelpers] Pending speculators collection is empty.');
|
|
56
|
-
return { batch, count: 0 };
|
|
57
|
-
}
|
|
39
|
+
if (pendingSnapshot.empty) { logger.log('INFO', '[CleanupHelpers] Pending speculators collection is empty.'); return { batch, count: 0 }; }
|
|
58
40
|
|
|
59
41
|
for (const doc of pendingSnapshot.docs) {
|
|
60
42
|
const pendingData = doc.data().users || {};
|
|
@@ -63,23 +45,11 @@ async function cleanupPendingSpeculators(config, dependencies) {
|
|
|
63
45
|
|
|
64
46
|
for (const userId in pendingData) {
|
|
65
47
|
const timestamp = pendingData[userId]?.toDate ? pendingData[userId].toDate() : null;
|
|
66
|
-
if (timestamp && timestamp < staleThreshold) {
|
|
67
|
-
updates[`users.${userId}`] = FieldValue.delete();
|
|
68
|
-
stalePendingUsersRemoved++;
|
|
69
|
-
updatesInDoc++;
|
|
70
|
-
}
|
|
71
|
-
}
|
|
48
|
+
if (timestamp && timestamp < staleThreshold) { updates[`users.${userId}`] = FieldValue.delete(); stalePendingUsersRemoved++; updatesInDoc++; } }
|
|
72
49
|
|
|
73
|
-
if (updatesInDoc > 0) {
|
|
74
|
-
logger.log('TRACE', `[CleanupHelpers] Marking ${updatesInDoc} users for removal from pending doc ${doc.id}`);
|
|
75
|
-
batch.update(doc.ref, updates);
|
|
76
|
-
}
|
|
77
|
-
}
|
|
50
|
+
if (updatesInDoc > 0) { logger.log('TRACE', `[CleanupHelpers] Marking ${updatesInDoc} users for removal from pending doc ${doc.id}`); batch.update(doc.ref, updates); } }
|
|
78
51
|
logger.log('INFO', `[CleanupHelpers] Marked ${stalePendingUsersRemoved} total stale pending users for removal.`);
|
|
79
|
-
} catch (error) {
|
|
80
|
-
logger.log('ERROR', '[CleanupHelpers] Error cleaning pending speculators', { errorMessage: error.message });
|
|
81
|
-
throw error;
|
|
82
|
-
}
|
|
52
|
+
} catch (error) { logger.log('ERROR', '[CleanupHelpers] Error cleaning pending speculators', { errorMessage: error.message }); throw error; }
|
|
83
53
|
return { batch, count: stalePendingUsersRemoved };
|
|
84
54
|
}
|
|
85
55
|
|
|
@@ -90,17 +60,14 @@ async function cleanupStaleSpeculators(config, dependencies, batch) {
|
|
|
90
60
|
const { db, logger } = dependencies;
|
|
91
61
|
logger.log('INFO', '[CleanupHelpers] Starting stale speculator cleanup from blocks...');
|
|
92
62
|
let totalUsersRemoved = 0;
|
|
93
|
-
const blocksCollectionRef = db.collection(config.speculatorBlocksCollectionName);
|
|
63
|
+
const blocksCollectionRef = db.collection(config.speculatorBlocksCollectionName);
|
|
94
64
|
const gracePeriodDate = new Date();
|
|
95
65
|
gracePeriodDate.setDate(gracePeriodDate.getDate() - config.activityGracePeriodDays);
|
|
96
66
|
const blockCountsUpdate = {};
|
|
97
67
|
|
|
98
68
|
try {
|
|
99
69
|
const blocksSnapshot = await blocksCollectionRef.get();
|
|
100
|
-
if (blocksSnapshot.empty) {
|
|
101
|
-
logger.log('INFO', '[CleanupHelpers] Speculator blocks collection is empty.');
|
|
102
|
-
return { batch, count: 0 };
|
|
103
|
-
}
|
|
70
|
+
if (blocksSnapshot.empty) { logger.log('INFO', '[CleanupHelpers] Speculator blocks collection is empty.'); return { batch, count: 0 }; }
|
|
104
71
|
|
|
105
72
|
for (const doc of blocksSnapshot.docs) {
|
|
106
73
|
const blockId = doc.id;
|
|
@@ -116,45 +83,19 @@ async function cleanupStaleSpeculators(config, dependencies, batch) {
|
|
|
116
83
|
const userData = users[userKey];
|
|
117
84
|
const lastHeldTimestamp = userData.lastHeldSpeculatorAsset?.toDate ? userData.lastHeldSpeculatorAsset.toDate() : null;
|
|
118
85
|
|
|
119
|
-
if (lastHeldTimestamp && lastHeldTimestamp < gracePeriodDate) {
|
|
120
|
-
updates[userKey] = FieldValue.delete();
|
|
121
|
-
usersRemovedFromBlock++;
|
|
86
|
+
if (lastHeldTimestamp && lastHeldTimestamp < gracePeriodDate) { updates[userKey] = FieldValue.delete(); usersRemovedFromBlock++;
|
|
122
87
|
|
|
123
88
|
if (userData.instruments && Array.isArray(userData.instruments)) {
|
|
124
|
-
userData.instruments.forEach(instrumentId => {
|
|
125
|
-
|
|
126
|
-
if (!blockCountsUpdate[instrumentBlockKey]) {
|
|
127
|
-
blockCountsUpdate[instrumentBlockKey] = 0;
|
|
128
|
-
}
|
|
129
|
-
blockCountsUpdate[instrumentBlockKey]--;
|
|
130
|
-
});
|
|
131
|
-
}
|
|
132
|
-
}
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
if (usersRemovedFromBlock > 0) {
|
|
136
|
-
logger.log('TRACE', `[CleanupHelpers] Marking ${usersRemovedFromBlock} users for removal from block ${blockId}.`);
|
|
137
|
-
batch.update(doc.ref, updates);
|
|
138
|
-
totalUsersRemoved += usersRemovedFromBlock;
|
|
139
|
-
}
|
|
140
|
-
}
|
|
89
|
+
userData.instruments.forEach(instrumentId => { const instrumentBlockKey = `${instrumentId}_${blockId}`; if (!blockCountsUpdate[instrumentBlockKey]) { blockCountsUpdate[instrumentBlockKey] = 0; } blockCountsUpdate[instrumentBlockKey]--; }); } } }
|
|
90
|
+
if (usersRemovedFromBlock > 0) { logger.log('TRACE', `[CleanupHelpers] Marking ${usersRemovedFromBlock} users for removal from block ${blockId}.`); batch.update(doc.ref, updates); totalUsersRemoved += usersRemovedFromBlock; } }
|
|
141
91
|
|
|
142
92
|
if (totalUsersRemoved > 0 && Object.keys(blockCountsUpdate).length > 0) {
|
|
143
|
-
const countsRef = db.doc(config.speculatorBlockCountsDocPath);
|
|
93
|
+
const countsRef = db.doc(config.speculatorBlockCountsDocPath);
|
|
144
94
|
const finalCountUpdates = {};
|
|
145
|
-
for (const key in blockCountsUpdate) {
|
|
146
|
-
finalCountUpdates[`counts.${key}`] = FieldValue.increment(blockCountsUpdate[key]);
|
|
147
|
-
}
|
|
95
|
+
for (const key in blockCountsUpdate) { finalCountUpdates[`counts.${key}`] = FieldValue.increment(blockCountsUpdate[key]); }
|
|
148
96
|
logger.log('TRACE', '[CleanupHelpers] Staging block count decrements.', { updates: finalCountUpdates });
|
|
149
|
-
batch.set(countsRef, finalCountUpdates, { merge: true });
|
|
150
|
-
}
|
|
151
|
-
|
|
97
|
+
batch.set(countsRef, finalCountUpdates, { merge: true }); }
|
|
152
98
|
logger.log('INFO', `[CleanupHelpers] Marked ${totalUsersRemoved} total stale speculators for removal from blocks.`);
|
|
153
|
-
|
|
154
|
-
} catch (error) {
|
|
155
|
-
logger.log('ERROR', '[CleanupHelpers] Error cleaning stale speculators', { errorMessage: error.message });
|
|
156
|
-
throw error;
|
|
157
|
-
}
|
|
158
|
-
|
|
99
|
+
} catch (error) { logger.log('ERROR', '[CleanupHelpers] Error cleaning stale speculators', { errorMessage: error.message }); throw error; }
|
|
159
100
|
return { batch, count: totalUsersRemoved };
|
|
160
101
|
}
|
|
@@ -5,12 +5,7 @@
|
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
7
|
// Import the new utility functions that contain the logic
|
|
8
|
-
const {
|
|
9
|
-
parseTaskPayload,
|
|
10
|
-
prepareTaskBatches,
|
|
11
|
-
runUsernameLookups,
|
|
12
|
-
executeTasks
|
|
13
|
-
} = require('./utils/task_engine_utils'); // <-- Import from our new utils file
|
|
8
|
+
const { parseTaskPayload, prepareTaskBatches, runUsernameLookups, executeTasks } = require('./utils/task_engine_utils');
|
|
14
9
|
|
|
15
10
|
/**
|
|
16
11
|
* Main pipe: pipe.taskEngine.handleRequest
|
|
@@ -31,8 +26,7 @@ async function handleRequest(message, context, config, dependencies) {
|
|
|
31
26
|
await runUsernameLookups(tasksToRun, cidsToLookup, dependencies, config, batchManager, logger);
|
|
32
27
|
await executeTasks(tasksToRun, otherTasks, dependencies, config, taskId);
|
|
33
28
|
} catch (error) {logger.log('ERROR', `[TaskEngine/${taskId}] Failed during batch processing.`, { errorMessage: error.message, errorStack: error.stack });
|
|
34
|
-
} finally {
|
|
35
|
-
try {logger.log('INFO', `[TaskEngine/${taskId}] Flushing all accumulated batches...`);
|
|
29
|
+
} finally { try {logger.log('INFO', `[TaskEngine/${taskId}] Flushing all accumulated batches...`);
|
|
36
30
|
await batchManager.flushBatches();
|
|
37
31
|
logger.log('INFO', `[TaskEngine/${taskId}] Final batch and header flush complete.`);} catch (flushError) {logger.log('ERROR', `[TaskEngine/${taskId}] Error during final flush attempt.`, { error: flushError.message });}}
|
|
38
32
|
}
|
|
@@ -23,13 +23,11 @@ async function lookupUsernames(cids, { logger, headerManager, proxyManager }, {
|
|
|
23
23
|
if (!header) { logger.log('ERROR', '[lookupUsernames] Could not select a header.'); continue; }
|
|
24
24
|
let success = false;
|
|
25
25
|
try {
|
|
26
|
-
const res = await proxyManager.fetch(`${ETORO_API_RANKINGS_URL}?Period=LastTwoYears`, {
|
|
27
|
-
method: 'POST', headers: { ...header.header, 'Content-Type': 'application/json' }, body: JSON.stringify(batch)
|
|
28
|
-
});
|
|
26
|
+
const res = await proxyManager.fetch(`${ETORO_API_RANKINGS_URL}?Period=LastTwoYears`, { method: 'POST', headers: { ...header.header, 'Content-Type': 'application/json' }, body: JSON.stringify(batch) });
|
|
29
27
|
if (!res.ok) throw new Error(`API status ${res.status}`);
|
|
30
28
|
const data = await res.json();
|
|
31
29
|
if (Array.isArray(data)) allUsers.push(...data);
|
|
32
|
-
success = true;
|
|
30
|
+
success = true; logger.log('DEBUG', 'Looked up usernames', { batch })
|
|
33
31
|
} catch (err) {
|
|
34
32
|
logger.log('WARN', `[lookupUsernames] Failed batch`, { error: err.message });
|
|
35
33
|
} finally { headerManager.updatePerformance(header.id, success); }
|
|
@@ -38,110 +36,46 @@ async function lookupUsernames(cids, { logger, headerManager, proxyManager }, {
|
|
|
38
36
|
return allUsers;
|
|
39
37
|
}
|
|
40
38
|
|
|
41
|
-
// --- START MODIFICATION: Added historyFetchedForUser argument ---
|
|
42
39
|
async function handleUpdate(task, taskId, { logger, headerManager, proxyManager, db, batchManager }, config, username, historyFetchedForUser) {
|
|
43
|
-
const { userId, instruments, instrumentId, userType } = task;
|
|
44
|
-
|
|
45
|
-
// ⚠️ Support both old (instrumentId) and new (instruments array) format
|
|
46
|
-
const instrumentsToProcess = userType === 'speculator'
|
|
47
|
-
? (instruments || [instrumentId]) // New format or fallback to old
|
|
48
|
-
: [undefined]; // Normal users don't have instruments
|
|
49
|
-
|
|
40
|
+
const { userId, instruments, instrumentId, userType } = task;
|
|
41
|
+
const instrumentsToProcess = userType === 'speculator' ? (instruments || [instrumentId]) : [undefined];
|
|
50
42
|
const today = new Date().toISOString().slice(0, 10);
|
|
51
43
|
const portfolioBlockId = `${Math.floor(parseInt(userId) / 1000000)}M`;
|
|
52
|
-
|
|
53
44
|
let portfolioHeader = await headerManager.selectHeader();
|
|
54
45
|
let historyHeader = null;
|
|
55
46
|
if (!portfolioHeader) throw new Error("Could not select portfolio header.");
|
|
56
|
-
|
|
57
47
|
let wasHistorySuccess = false, isPrivate = false;
|
|
58
|
-
|
|
59
|
-
// --- FIX: 'fetchHistory' MUST be declared here, outside the 'try' block ---
|
|
60
48
|
let fetchHistory = false;
|
|
61
|
-
|
|
62
49
|
try {
|
|
63
|
-
// Fetch history ONCE per user
|
|
64
50
|
const promisesToRun = [];
|
|
65
|
-
// --- 'fetchHistory' is no longer declared here ---
|
|
66
|
-
|
|
67
51
|
if (!historyFetchedForUser.has(userId)) {
|
|
68
52
|
historyHeader = await headerManager.selectHeader();
|
|
69
|
-
if (historyHeader) {
|
|
70
|
-
fetchHistory = true; // This now sets the outer variable
|
|
71
|
-
historyFetchedForUser.add(userId);
|
|
72
|
-
const historyUrl = `${config.ETORO_API_USERSTATS_URL}${username}/trades/oneYearAgo?CopyAsAsset=true`;
|
|
73
|
-
promisesToRun.push(proxyManager.fetch(historyUrl, { headers: historyHeader.header }));
|
|
74
|
-
}
|
|
75
|
-
}
|
|
76
|
-
|
|
77
|
-
// Process history result (if fetched)
|
|
53
|
+
if (historyHeader) { fetchHistory = true; historyFetchedForUser.add(userId); const historyUrl = `${config.ETORO_API_USERSTATS_URL}${username}/trades/oneYearAgo?CopyAsAsset=true`; promisesToRun.push(proxyManager.fetch(historyUrl, { headers: historyHeader.header })); } }
|
|
78
54
|
if (fetchHistory) {
|
|
79
55
|
const results = await Promise.allSettled(promisesToRun);
|
|
80
56
|
const historyRes = results[0];
|
|
81
|
-
if (historyRes.status === 'fulfilled' && historyRes.value.ok) {
|
|
82
|
-
const data = await historyRes.value.json();
|
|
83
|
-
wasHistorySuccess = true;
|
|
84
|
-
await batchManager.addToTradingHistoryBatch(userId, portfolioBlockId, today, data, userType);
|
|
85
|
-
}
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
// Now fetch portfolio for EACH instrument (speculators) or once (normal)
|
|
57
|
+
if (historyRes.status === 'fulfilled' && historyRes.value.ok) { const data = await historyRes.value.json(); wasHistorySuccess = true; await batchManager.addToTradingHistoryBatch(userId, portfolioBlockId, today, data, userType); } }
|
|
89
58
|
for (const instrumentId of instrumentsToProcess) {
|
|
90
|
-
const portfolioUrl = userType === 'speculator'
|
|
91
|
-
? `${config.ETORO_API_POSITIONS_URL}?cid=${userId}&InstrumentID=${instrumentId}`
|
|
92
|
-
: `${config.ETORO_API_PORTFOLIO_URL}?cid=${userId}`;
|
|
93
|
-
|
|
59
|
+
const portfolioUrl = userType === 'speculator' ? `${config.ETORO_API_POSITIONS_URL}?cid=${userId}&InstrumentID=${instrumentId}` : `${config.ETORO_API_PORTFOLIO_URL}?cid=${userId}`;
|
|
94
60
|
let wasPortfolioSuccess = false;
|
|
95
|
-
|
|
96
61
|
const portfolioRes = await proxyManager.fetch(portfolioUrl, { headers: portfolioHeader.header });
|
|
97
|
-
|
|
98
62
|
if (portfolioRes.ok) {
|
|
99
63
|
const body = await portfolioRes.text();
|
|
100
|
-
if (body.includes("user is PRIVATE")) {
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
} else {
|
|
104
|
-
wasPortfolioSuccess = true;
|
|
105
|
-
await batchManager.addToPortfolioBatch(userId, portfolioBlockId, today, JSON.parse(body), userType, instrumentId);
|
|
106
|
-
}
|
|
107
|
-
}
|
|
108
|
-
|
|
64
|
+
if (body.includes("user is PRIVATE")) { isPrivate = true; logger.log('WARN', `User ${userId} is private. Removing from updates.`) ; break;
|
|
65
|
+
} else { wasPortfolioSuccess = true; await batchManager.addToPortfolioBatch(userId, portfolioBlockId, today, JSON.parse(body), userType, instrumentId); } }
|
|
66
|
+
logger.log('DEBUG', 'Processing portfolio for user', { userId, portfolioUrl })
|
|
109
67
|
headerManager.updatePerformance(portfolioHeader.id, wasPortfolioSuccess);
|
|
110
|
-
|
|
111
|
-
// Re-select header for next instrument
|
|
112
68
|
if (instrumentsToProcess.length > 1 && instrumentId !== instrumentsToProcess[instrumentsToProcess.length - 1]) {
|
|
113
|
-
portfolioHeader = await headerManager.selectHeader();
|
|
114
|
-
}
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
// Handle private user
|
|
69
|
+
portfolioHeader = await headerManager.selectHeader(); } }
|
|
118
70
|
if (isPrivate) {
|
|
119
71
|
logger.log('WARN', `User ${userId} is private. Removing from updates.`);
|
|
120
|
-
|
|
121
|
-
for (const instrumentId of instrumentsToProcess) {
|
|
122
|
-
await batchManager.deleteFromTimestampBatch(userId, userType, instrumentId);
|
|
123
|
-
}
|
|
72
|
+
for (const instrumentId of instrumentsToProcess) { await batchManager.deleteFromTimestampBatch(userId, userType, instrumentId); }
|
|
124
73
|
const blockCountsRef = db.doc(config.FIRESTORE_DOC_SPECULATOR_BLOCK_COUNTS);
|
|
125
|
-
for (const instrumentId of instrumentsToProcess) {
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
// Update timestamps
|
|
133
|
-
for (const instrumentId of instrumentsToProcess) {
|
|
134
|
-
await batchManager.updateUserTimestamp(userId, userType, instrumentId);
|
|
135
|
-
}
|
|
136
|
-
if (userType === 'speculator') {
|
|
137
|
-
await batchManager.addSpeculatorTimestampFix(userId, String(Math.floor(userId/1e6)*1e6));
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
} finally {
|
|
141
|
-
if (historyHeader && fetchHistory) {
|
|
142
|
-
headerManager.updatePerformance(historyHeader.id, wasHistorySuccess);
|
|
143
|
-
}
|
|
144
|
-
}
|
|
74
|
+
for (const instrumentId of instrumentsToProcess) { const incrementField = `counts.${instrumentId}_${Math.floor(userId/1e6)*1e6}`; await blockCountsRef.set({ [incrementField]: FieldValue.increment(-1) }, { merge: true }); }
|
|
75
|
+
return; }
|
|
76
|
+
for (const instrumentId of instrumentsToProcess) { await batchManager.updateUserTimestamp(userId, userType, instrumentId); }
|
|
77
|
+
if (userType === 'speculator') { await batchManager.addSpeculatorTimestampFix(userId, String(Math.floor(userId/1e6)*1e6)); }
|
|
78
|
+
} finally { if (historyHeader && fetchHistory) { headerManager.updatePerformance(historyHeader.id, wasHistorySuccess); }}
|
|
145
79
|
}
|
|
146
80
|
|
|
147
81
|
module.exports = { handleUpdate, lookupUsernames };
|