bulltrackers-module 1.0.152 → 1.0.154

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/functions/appscript-api/index.js +8 -38
  2. package/functions/computation-system/helpers/computation_pass_runner.js +38 -183
  3. package/functions/computation-system/helpers/orchestration_helpers.js +105 -326
  4. package/functions/computation-system/utils/data_loader.js +38 -133
  5. package/functions/computation-system/utils/schema_capture.js +7 -41
  6. package/functions/computation-system/utils/utils.js +37 -124
  7. package/functions/core/utils/firestore_utils.js +8 -46
  8. package/functions/core/utils/intelligent_header_manager.js +26 -128
  9. package/functions/core/utils/intelligent_proxy_manager.js +33 -171
  10. package/functions/core/utils/pubsub_utils.js +7 -24
  11. package/functions/dispatcher/helpers/dispatch_helpers.js +9 -30
  12. package/functions/dispatcher/index.js +7 -30
  13. package/functions/etoro-price-fetcher/helpers/handler_helpers.js +12 -80
  14. package/functions/fetch-insights/helpers/handler_helpers.js +18 -70
  15. package/functions/generic-api/helpers/api_helpers.js +28 -167
  16. package/functions/generic-api/index.js +49 -188
  17. package/functions/invalid-speculator-handler/helpers/handler_helpers.js +10 -47
  18. package/functions/orchestrator/helpers/discovery_helpers.js +1 -5
  19. package/functions/orchestrator/index.js +1 -6
  20. package/functions/price-backfill/helpers/handler_helpers.js +13 -69
  21. package/functions/social-orchestrator/helpers/orchestrator_helpers.js +5 -37
  22. package/functions/social-task-handler/helpers/handler_helpers.js +29 -186
  23. package/functions/speculator-cleanup-orchestrator/helpers/cleanup_helpers.js +19 -78
  24. package/functions/task-engine/handler_creator.js +2 -8
  25. package/functions/task-engine/helpers/update_helpers.js +74 -100
  26. package/functions/task-engine/helpers/verify_helpers.js +11 -56
  27. package/functions/task-engine/utils/firestore_batch_manager.js +29 -65
  28. package/functions/task-engine/utils/task_engine_utils.js +14 -37
  29. package/index.js +45 -43
  30. package/package.json +1 -1
@@ -10,13 +10,7 @@
10
10
  const express = require('express');
11
11
  const cors = require('cors');
12
12
  const { FieldPath } = require('@google-cloud/firestore');
13
- const {
14
- buildCalculationMap,
15
- createApiHandler,
16
- getComputationStructure,
17
- createManifestHandler, // <-- IMPORT NEW HANDLER
18
- getDynamicSchema // <-- This helper's behavior has changed
19
- } = require('./helpers/api_helpers.js');
13
+ const { buildCalculationMap, createApiHandler, getComputationStructure,createManifestHandler, getDynamicSchema } = require('./helpers/api_helpers.js');
20
14
 
21
15
  /**
22
16
  * --- NEW: In-Memory Cache Handler ---
@@ -27,60 +21,29 @@ const {
27
21
  * @returns {function} The new (req, res) handler with caching logic.
28
22
  */
29
23
  const createCacheHandler = (handler, { logger }) => {
30
- // 1. Define the cache *outside* the handler so it persists
24
+ // 1. Cache
31
25
  const CACHE = {};
32
- const CACHE_TTL_MS = 10 * 60 * 1000; // 10 minute cache
33
-
26
+ const CACHE_TTL_MS = 10 * 60 * 1000;
34
27
  // 2. Return the new handler
35
- return async (req, res) => {
36
- const cacheKey = req.url; // Use the full URL (e.g., "/?computation=foo") as the key
37
- const now = Date.now();
38
-
39
- // 3. --- Cache HIT ---
40
- if (CACHE[cacheKey] && (now - CACHE[cacheKey].timestamp) < CACHE_TTL_MS) {
41
- logger.log('INFO', `[API] Cache HIT for ${cacheKey}`);
42
- // Send cached status and data
43
- return res.status(CACHE[cacheKey].status).send(CACHE[cacheKey].data);
44
- }
45
-
46
- // 4. --- Cache MISS ---
47
- logger.log('INFO', `[API] Cache MISS for ${cacheKey}`);
48
-
49
- // We need to capture the response from the original handler.
50
- // To do this, we "monkey-patch" res.send and res.status.
51
- const originalSend = res.send;
52
- const originalStatus = res.status;
53
-
54
- let capturedData = null;
55
- let capturedStatus = 200; // Default
56
-
57
- // Create a new status function
58
- res.status = (statusCode) => {
59
- capturedStatus = statusCode;
60
- // Call the original status function
61
- return originalStatus.call(res, statusCode);
62
- };
63
-
64
- // Create a new send function
65
- res.send = (data) => {
66
- capturedData = data;
67
- // Call the original send function
68
- return originalSend.call(res, data);
69
- };
70
-
71
- // 5. Call the original handler (which will now use our patched functions)
72
- await handler(req, res);
73
-
74
- // 6. If the response was successful, cache it
75
- if (capturedStatus === 200 && capturedData) {
76
- logger.log('INFO', `[API] Caching new entry for ${cacheKey}`);
77
- CACHE[cacheKey] = {
78
- data: capturedData,
79
- status: capturedStatus,
80
- timestamp: now
81
- };
82
- }
83
- };
28
+ return async (req, res) => { const cacheKey = req.url; const now = Date.now();
29
+ // 3. --- Cache HIT ---
30
+ if (CACHE[cacheKey] && (now - CACHE[cacheKey].timestamp) < CACHE_TTL_MS) {
31
+ logger.log('INFO', `[API] Cache HIT for ${cacheKey}`);
32
+ return res.status(CACHE[cacheKey].status).send(CACHE[cacheKey].data); }
33
+ // 4. --- Cache MISS ---
34
+ logger.log('INFO', `[API] Cache MISS for ${cacheKey}`);
35
+ const originalSend = res.send;
36
+ const originalStatus = res.status;
37
+ let capturedData = null;
38
+ let capturedStatus = 200;
39
+ res.status = (statusCode) => { capturedStatus = statusCode; return originalStatus.call(res, statusCode); };
40
+ res.send = (data) => { capturedData = data; return originalSend.call(res, data); };
41
+ // 5. Call the original handler (which will now use our patched functions)
42
+ await handler(req, res);
43
+ // 6. If the response was successful, cache it
44
+ if (capturedStatus === 200 && capturedData) {
45
+ logger.log('INFO', `[API] Caching new entry for ${cacheKey}`);
46
+ CACHE[cacheKey] = { data: capturedData, status: capturedStatus, timestamp: now }; } };
84
47
  };
85
48
 
86
49
 
@@ -94,8 +57,8 @@ const createCacheHandler = (handler, { logger }) => {
94
57
  */
95
58
  function createApiApp(config, dependencies, unifiedCalculations) {
96
59
  const app = express();
97
- const { logger, db } = dependencies;
98
-
60
+ const { logger, db } = dependencies;
61
+
99
62
  // --- Pre-compute Calculation Map (now includes classes) ---
100
63
  const calcMap = buildCalculationMap(unifiedCalculations);
101
64
 
@@ -104,126 +67,58 @@ function createApiApp(config, dependencies, unifiedCalculations) {
104
67
  app.use(express.json());
105
68
 
106
69
  // --- Main API Endpoint ---
107
- // MODIFIED: We now wrap the original handler with the cache handler.
108
70
  const originalApiHandler = createApiHandler(config, dependencies, calcMap);
109
71
  const cachedApiHandler = createCacheHandler(originalApiHandler, dependencies);
110
- app.get('/', cachedApiHandler); // <-- Use the new caching handler
72
+ app.get('/', cachedApiHandler);
111
73
 
112
74
  // --- Health Check Endpoint ---
113
- app.get('/health', (req, res) => {
114
- res.status(200).send('OK');
115
- });
75
+ app.get('/health', (req, res) => { res.status(200).send('OK'); });
116
76
 
117
77
  // --- Debug Endpoint to list all computation keys ---
118
78
  app.get('/list-computations', (req, res) => {
119
- try {
120
- const computationKeys = Object.keys(calcMap);
121
- res.status(200).send({
122
- status: 'success',
123
- count: computationKeys.length,
124
- computations: computationKeys.sort(),
125
- });
126
- } catch (error) {
127
- logger.log('ERROR', 'API /list-computations failed.', { errorMessage: error.message });
128
- res.status(500).send({ status: 'error', message: 'An internal error occurred.' });
129
- }
130
- });
79
+ try { const computationKeys = Object.keys(calcMap);
80
+ res.status(200).send({ status: 'success', count: computationKeys.length, computations: computationKeys.sort(), });
81
+ } catch (error) { logger.log('ERROR', 'API /list-computations failed.', { errorMessage: error.message });
82
+ res.status(500).send({ status: 'error', message: 'An internal error occurred.' }); } });
131
83
 
132
84
  // --- Debug Endpoint to get *stored* structure from Firestore ---
133
85
  app.get('/structure/:computationName', async (req, res) => {
134
86
  const { computationName } = req.params;
135
87
  const result = await getComputationStructure(computationName, calcMap, config, dependencies);
136
- if (result.status === 'error') {
137
- const statusCode = result.message.includes('not found') ? 404 : 500;
138
- return res.status(statusCode).send(result);
139
- }
140
- res.status(200).send(result);
141
- });
142
-
143
- /**
144
- * ---!!!--- REPLACED SECTION ---!!!---
145
- * This route now uses the createManifestHandler to serve the
146
- * pre-generated schemas from Firestore.
147
- */
88
+ if (result.status === 'error') { const statusCode = result.message.includes('not found') ? 404 : 500;
89
+ return res.status(statusCode).send(result); }
90
+ res.status(200).send(result); });
148
91
  app.get('/manifest', createManifestHandler(config, dependencies, calcMap));
149
-
150
-
151
- /**
152
- * ---!!!--- REPLACED SECTION ---!!!---
153
- * This endpoint now reads the *static* getSchema() method from a class
154
- * and stores that in Firestore. It no longer does runtime inference.
155
- */
156
92
  app.post('/manifest/generate/:computationName', async (req, res) => {
157
93
  const { computationName } = req.params;
158
94
  logger.log('INFO', `Manual static schema generation requested for: ${computationName}`);
159
-
95
+
160
96
  try {
161
97
  // 1. Find the calculation class from the calcMap
162
98
  const calcInfo = calcMap[computationName];
163
-
164
- if (!calcInfo || !calcInfo.class) {
165
- return res.status(404).send({
166
- status: 'error',
167
- message: `Computation '${computationName}' not found or has no class in calculation map.`
168
- });
169
- }
170
-
99
+ if (!calcInfo || !calcInfo.class) { return res.status(404).send({ status: 'error', message: `Computation '${computationName}' not found or has no class in calculation map.` }); }
171
100
  const targetCalcClass = calcInfo.class;
172
101
  const targetCategory = calcInfo.category;
173
102
 
174
103
  // 2. Use the getDynamicSchema helper (which now just reads the static method)
175
104
  const schemaStructure = await getDynamicSchema(targetCalcClass, computationName);
105
+ if (schemaStructure.ERROR) { return res.status(400).send({ status: 'error', message: `Failed to get static schema: ${schemaStructure.ERROR}` }); }
176
106
 
177
- if (schemaStructure.ERROR) {
178
- return res.status(400).send({
179
- status: 'error',
180
- message: `Failed to get static schema: ${schemaStructure.ERROR}`
181
- });
182
- }
183
-
184
107
  // 3. Import the new batchStoreSchemas utility
185
- const { batchStoreSchemas } = require('../computation-system/utils/schema_capture');
108
+ const { batchStoreSchemas } = require('../computation-system/utils/schema_capture.js');
186
109
 
187
110
  // 4. Get metadata (as much as we can from the class)
188
- const metadata = {
189
- isHistorical: !!(targetCalcClass.toString().includes('yesterdayPortfolio')),
190
- dependencies: (typeof targetCalcClass.getDependencies === 'function') ? targetCalcClass.getDependencies() : [],
191
- rootDataDependencies: [], // Cannot be known here
192
- pass: 'unknown', // Cannot be known here
193
- type: (targetCategory === 'meta' || targetCategory === 'socialPosts') ? targetCategory : 'standard',
194
- note: "Manually generated via API"
195
- };
111
+ const metadata = { isHistorical: !!(targetCalcClass.toString().includes('yesterdayPortfolio')), dependencies: (typeof targetCalcClass.getDependencies === 'function') ? targetCalcClass.getDependencies() : [], rootDataDependencies: [], pass: 'unknown', type: (targetCategory === 'meta' || targetCategory === 'socialPosts') ? targetCategory : 'standard', note: "Manually generated via API" };
196
112
 
197
113
  // 5. Store the schema in Firestore
198
- await batchStoreSchemas(
199
- dependencies,
200
- config,
201
- [{
202
- name: computationName,
203
- category: targetCategory,
204
- schema: schemaStructure, // Pass the static schema
205
- metadata: metadata
206
- }]
207
- );
208
-
114
+ await batchStoreSchemas( dependencies, config, [{ name: computationName, category: targetCategory, schema: schemaStructure, metadata: metadata }] );
115
+
209
116
  // 6. Respond with the schema
210
- res.status(200).send({
211
- status: 'success',
212
- message: `Static schema read and stored for ${computationName}`,
213
- computation: computationName,
214
- category: targetCategory,
215
- schema: schemaStructure
216
- });
217
-
117
+ res.status(200).send({ status: 'success', message: `Static schema read and stored for ${computationName}`, computation: computationName, category: targetCategory, schema: schemaStructure });
118
+
218
119
  } catch (error) {
219
- logger.log('ERROR', `Failed to generate schema for ${computationName}`, {
220
- errorMessage: error.message,
221
- stack: error.stack
222
- });
223
- res.status(5.00).send({
224
- status: 'error',
225
- message: `Failed to generate/store schema: ${error.message}`
226
- });
120
+ logger.log('ERROR', `Failed to generate schema for ${computationName}`, { errorMessage: error.message, stack: error.stack });
121
+ res.status(5.00).send({ status: 'error', message: `Failed to generate/store schema: ${error.message}` });
227
122
  }
228
123
  });
229
124
 
@@ -233,49 +128,15 @@ function createApiApp(config, dependencies, unifiedCalculations) {
233
128
  */
234
129
  app.get('/manifest/:computationName', async (req, res) => {
235
130
  const { computationName } = req.params;
236
-
237
131
  try {
238
132
  const schemaCollection = config.schemaCollection || 'computation_schemas';
239
133
  const schemaDoc = await db.collection(schemaCollection).doc(computationName).get();
240
-
241
- if (!schemaDoc.exists) {
242
- return res.status(4404).send({
243
- status: 'error',
244
- message: `Schema not found for computation: ${computationName}`,
245
- hint: 'Try running the computation system or use POST /manifest/generate/:computationName'
246
- });
247
- }
248
-
249
- const data = schemaDoc.data();
250
-
251
- // --- Use the same response structure as /manifest ---
252
- res.status(200).send({
253
- status: 'success',
254
- computation: computationName,
255
- category: data.category,
256
- structure: data.schema,
257
- metadata: data.metadata || {},
258
- lastUpdated: data.lastUpdated
259
- });
260
-
261
- } catch (error) {
262
- logger.log('ERROR', `Failed to fetch schema for ${computationName}`, {
263
- errorMessage: error.message
264
- });
265
- res.status(500).send({
266
- status: 'error',
267
- message: 'An internal error occurred.'
268
- });
269
- }
270
- });
271
-
272
- // --- THIS MUST BE THE LAST LINE OF THE FUNCTION ---
134
+ if (!schemaDoc.exists) { return res.status(4404).send({ status: 'error', message: `Schema not found for computation: ${computationName}`, hint: 'Try running the computation system or use POST /manifest/generate/:computationName' }); }
135
+ const data = schemaDoc.data();
136
+ res.status(200).send({ status: 'success', computation: computationName, category: data.category, structure: data.schema, metadata: data.metadata || {}, lastUpdated: data.lastUpdated });
137
+ } catch (error) { logger.log('ERROR', `Failed to fetch schema for ${computationName}`, { errorMessage: error.message }); res.status(500).send({ status: 'error', message: 'An internal error occurred.' }); }});
273
138
  return app;
274
139
  }
275
140
 
276
141
 
277
- module.exports = {
278
- createApiApp,
279
- // Exporting helpers so they can be part of the pipe.api.helpers object
280
- helpers: require('./helpers/api_helpers'),
281
- };
142
+ module.exports = { createApiApp, helpers: require('./helpers/api_helpers.js') };
@@ -15,61 +15,24 @@ exports.handleInvalidSpeculator = async (message, context, config, dependencies)
15
15
  const { db, logger } = dependencies;
16
16
  try {
17
17
  const { invalidCids } = JSON.parse(Buffer.from(message.data, 'base64').toString());
18
-
19
- if (!invalidCids || invalidCids.length === 0) {
20
- logger.log('WARN', 'Received message with no invalid CIDs. Nothing to do.');
21
- return;
22
- }
23
-
24
- const collectionRef = db.collection(config.invalidSpeculatorsCollectionName); // Use db
18
+ if (!invalidCids || invalidCids.length === 0) { logger.log('WARN', 'Received message with no invalid CIDs. Nothing to do.'); return; }
19
+ const collectionRef = db.collection(config.invalidSpeculatorsCollectionName);
25
20
  const maxPerDoc = config.maxInvalidUsersPerDoc;
26
-
27
- const querySnapshot = await collectionRef
28
- .where('userCount', '<', maxPerDoc)
29
- .limit(10)
30
- .get();
31
-
21
+ const querySnapshot = await collectionRef .where('userCount', '<', maxPerDoc) .limit(10) .get();
32
22
  let targetDocRef;
33
-
34
- if (!querySnapshot.empty) {
35
- const randomIndex = Math.floor(Math.random() * querySnapshot.docs.length);
36
- targetDocRef = querySnapshot.docs[randomIndex].ref;
37
- } else {
38
- targetDocRef = collectionRef.doc();
39
- }
40
-
41
- // Use db from dependencies
42
- await db.runTransaction(async (transaction) => {
43
- const doc = await transaction.get(targetDocRef);
44
-
23
+ if (!querySnapshot.empty) { const randomIndex = Math.floor(Math.random() * querySnapshot.docs.length); targetDocRef = querySnapshot.docs[randomIndex].ref;
24
+ } else { targetDocRef = collectionRef.doc(); }
25
+ await db.runTransaction(async (transaction) => { const doc = await transaction.get(targetDocRef);
45
26
  const updates = {};
46
27
  let newUsersCount = 0;
47
-
48
- if (!doc.exists) {
49
- updates.userCount = 0;
50
- }
51
-
28
+ if (!doc.exists) { updates.userCount = 0; }
52
29
  for (const cid of invalidCids) {
53
30
  const fieldPath = `users.${cid}`;
54
- if (!doc.exists || !doc.data().users || !doc.data().users[cid]) {
55
- updates[fieldPath] = true;
56
- newUsersCount++;
57
- }
58
- }
59
-
60
- if (newUsersCount > 0) {
61
- updates.userCount = FieldValue.increment(newUsersCount);
62
- transaction.set(targetDocRef, updates, { merge: true });
63
- }
64
- });
65
-
31
+ if (!doc.exists || !doc.data().users || !doc.data().users[cid]) {updates[fieldPath] = true; newUsersCount++; } }
32
+ if (newUsersCount > 0) { updates.userCount = FieldValue.increment(newUsersCount); transaction.set(targetDocRef, updates, { merge: true }); } });
66
33
  logger.log('SUCCESS', `Successfully stored ${invalidCids.length} invalid speculator IDs in document ${targetDocRef.id}.`);
67
-
68
34
  } catch (error) {
69
- logger.log('ERROR', 'FATAL Error in Invalid Speculator Handler', {
70
- errorMessage: error.message,
71
- errorStack: error.stack
72
- });
35
+ logger.log('ERROR', 'FATAL Error in Invalid Speculator Handler', { errorMessage: error.message, errorStack: error.stack });
73
36
  throw error;
74
37
  }
75
38
  };
@@ -105,8 +105,4 @@ async function dispatchDiscovery(userType, candidates, config, dependencies) {
105
105
  logger.log('SUCCESS', `[Orchestrator Helpers] Dispatched ${totalCidsPublished} CIDs in ${tasks.length} tasks, grouped into ${messagesPublished} Pub/Sub messages for ${userType} discovery.`);
106
106
  }
107
107
 
108
- module.exports = {
109
- checkDiscoveryNeed,
110
- getDiscoveryCandidates,
111
- dispatchDiscovery,
112
- };
108
+ module.exports = { checkDiscoveryNeed, getDiscoveryCandidates, dispatchDiscovery };
@@ -61,9 +61,4 @@ async function runUpdates(userType, updateConfig, globalConfig, deps) {
61
61
  logger.log('SUCCESS', `[Orchestrator] Dispatched update tasks for ${userType}.`);
62
62
  }
63
63
 
64
- module.exports = {
65
- runDiscoveryOrchestrator,
66
- runUpdateOrchestrator,
67
- runDiscovery,
68
- runUpdates
69
- };
64
+ module.exports = { runDiscoveryOrchestrator, runUpdateOrchestrator, runDiscovery, runUpdates };
@@ -4,15 +4,10 @@
4
4
  * candle API into the new sharded `asset_prices` collection.
5
5
  */
6
6
 
7
- const { FieldValue } = require('@google-cloud/firestore'); // Todo inject this
8
-
9
- const pLimit = require('p-limit'); // TODO inject this
10
-
11
- // How many tickers to fetch in parallel
7
+ const { FieldValue } = require('@google-cloud/firestore');
8
+ const pLimit = require('p-limit');
12
9
  const CONCURRENT_REQUESTS = 10;
13
- // How many days of history to fetch
14
10
  const DAYS_TO_FETCH = 365;
15
- // How many tickers to group into one Firestore document
16
11
  const SHARD_SIZE = 40;
17
12
 
18
13
  /**
@@ -20,97 +15,46 @@ const SHARD_SIZE = 40;
20
15
  * @param {object} config - Configuration object.
21
16
  * @param {object} dependencies - Contains db, logger, headerManager, proxyManager, calculationUtils.
22
17
  */
23
- // --- MODIFIED: Removed calculationUtils, as it's in dependencies ---
24
18
  exports.runBackfillAssetPrices = async (config, dependencies) => {
25
19
  const { db, logger, headerManager, proxyManager, calculationUtils } = dependencies;
26
- const { loadInstrumentMappings } = calculationUtils; // <-- Get function from dependencies
27
- // --- END MODIFIED ---
28
-
20
+ const { loadInstrumentMappings } = calculationUtils;
29
21
  logger.log('INFO', '[PriceBackfill] Starting historical price backfill...');
30
-
31
22
  let mappings;
32
23
  try {
33
- // --- MODIFIED: Use the injected utils ---
34
24
  mappings = await loadInstrumentMappings();
35
- // --- END MODIFIED ---
36
- if (!mappings || !mappings.instrumentToTicker) {
37
- throw new Error("Failed to load instrument mappings.");
38
- }
39
- } catch (e) {
40
- logger.log('ERROR', '[PriceBackfill] Could not load instrument mappings.', { err: e.message });
41
- return;
42
- }
43
-
25
+ if (!mappings || !mappings.instrumentToTicker) { throw new Error("Failed to load instrument mappings."); }
26
+ } catch (e) { logger.log('ERROR', '[PriceBackfill] Could not load instrument mappings.', { err: e.message }); return; }
44
27
  const instrumentIds = Object.keys(mappings.instrumentToTicker);
45
28
  logger.log('INFO', `[PriceBackfill] Found ${instrumentIds.length} instruments to backfill.`);
46
-
47
29
  const limit = pLimit(CONCURRENT_REQUESTS);
48
30
  let successCount = 0;
49
31
  let errorCount = 0;
50
-
51
32
  const promises = instrumentIds.map(instrumentId => {
52
33
  return limit(async () => {
53
34
  try {
54
35
  const ticker = mappings.instrumentToTicker[instrumentId] || `unknown_${instrumentId}`;
55
- const url = `https://candle.etoro.com/candles/asc.json/OneDay/${DAYS_TO_FETCH}/${instrumentId}`; //TODO implement config value
56
-
36
+ const url = `https://candle.etoro.com/candles/asc.json/OneDay/${DAYS_TO_FETCH}/${instrumentId}`;
57
37
  const selectedHeader = await headerManager.selectHeader();
58
38
  let wasSuccess = false;
59
-
60
- const response = await proxyManager.fetch(url, {
61
- headers: selectedHeader.header,
62
- timeout: 20000
63
- });
64
-
65
- if (!response.ok) {
66
- throw new Error(`API error ${response.status} for instrument ${instrumentId}`);
67
- }
39
+ const response = await proxyManager.fetch(url, { headers: selectedHeader.header, timeout: 20000 });
40
+ if (!response.ok) { throw new Error(`API error ${response.status} for instrument ${instrumentId}`); }
68
41
  wasSuccess = true;
69
42
  headerManager.updatePerformance(selectedHeader.id, wasSuccess);
70
-
71
43
  const data = await response.json();
72
44
  const candles = data?.Candles?.[0]?.Candles;
73
-
74
- if (!Array.isArray(candles) || candles.length === 0) {
75
- logger.log('WARN', `[PriceBackfill] No candle data returned for ${ticker} (${instrumentId})`);
76
- return;
77
- }
78
-
79
- // Format data as a map
45
+ if (!Array.isArray(candles) || candles.length === 0) { logger.log('WARN', `[PriceBackfill] No candle data returned for ${ticker} (${instrumentId})`); return; }
80
46
  const prices = {};
81
- for (const candle of candles) {
82
- const dateKey = candle.FromDate.substring(0, 10);
83
- prices[dateKey] = candle.Close;
84
- }
85
-
86
- // Determine shard ID
47
+ for (const candle of candles) { const dateKey = candle.FromDate.substring(0, 10); prices[dateKey] = candle.Close; }
87
48
  const shardId = `shard_${parseInt(instrumentId, 10) % SHARD_SIZE}`;
88
- const docRef = db.collection('asset_prices').doc(shardId); // TODO implement config value
89
-
90
- const payload = {
91
- [instrumentId]: {
92
- ticker: ticker,
93
- prices: prices,
94
- lastUpdated: FieldValue.serverTimestamp()
95
- }
96
- };
97
-
98
- // Write to Firestore
49
+ const docRef = db.collection('asset_prices').doc(shardId);
50
+ const payload = { [instrumentId]: { ticker: ticker, prices: prices, lastUpdated: FieldValue.serverTimestamp() } };
99
51
  await docRef.set(payload, { merge: true });
100
52
  logger.log('TRACE', `[PriceBackfill] Successfully stored data for ${ticker} (${instrumentId}) in ${shardId}`);
101
53
  successCount++;
102
-
103
54
  } catch (err) {
104
55
  logger.log('ERROR', `[PriceBackfill] Failed to process instrument ${instrumentId}`, { err: err.message });
105
- errorCount++;
106
- }
107
- });
108
- });
109
-
56
+ errorCount++; } }); });
110
57
  await Promise.all(promises);
111
-
112
- // Flush any remaining header updates
113
58
  await headerManager.flushPerformanceUpdates();
114
-
115
59
  logger.log('SUCCESS', `[PriceBackfill] Backfill complete. Success: ${successCount}, Failed: ${errorCount}`);
116
60
  };
@@ -13,50 +13,18 @@
13
13
  exports.runSocialOrchestrator = async (config, dependencies) => {
14
14
  const { logger, pubsubUtils } = dependencies;
15
15
  logger.log('INFO', '[SocialOrchestrator] Starting social post fetching orchestration...');
16
-
17
- // Validate configuration
18
- if (!config.targetTickerIds || !Array.isArray(config.targetTickerIds) || !config.socialFetchTaskTopicName || !config.socialFetchTimeWindowHours) {
19
- logger.log('ERROR', '[SocialOrchestrator] Missing required configuration: targetTickerIds (array), socialFetchTaskTopicName, or socialFetchTimeWindowHours.');
20
- throw new Error('Missing required configuration for Social Orchestrator.');
21
- }
22
-
16
+ if (!config.targetTickerIds || !Array.isArray(config.targetTickerIds) || !config.socialFetchTaskTopicName || !config.socialFetchTimeWindowHours) { logger.log('ERROR', '[SocialOrchestrator] Missing required configuration: targetTickerIds (array), socialFetchTaskTopicName, or socialFetchTimeWindowHours.'); throw new Error('Missing required configuration for Social Orchestrator.'); }
23
17
  try {
24
18
  const tasks = [];
25
19
  const { targetTickerIds, socialFetchTimeWindowHours } = config;
26
-
27
- // Calculate the 'since' timestamp
28
20
  const sinceTimestamp = new Date();
29
21
  sinceTimestamp.setHours(sinceTimestamp.getHours() - socialFetchTimeWindowHours);
30
-
31
- // Add a 15-minute buffer to ensure overlap
32
22
  sinceTimestamp.setMinutes(sinceTimestamp.getMinutes() - 15);
33
-
34
23
  const sinceISO = sinceTimestamp.toISOString();
35
-
36
- for (const tickerId of targetTickerIds) {
37
- tasks.push({
38
- tickerId: String(tickerId), // Ensure it's a string
39
- since: sinceISO
40
- });
41
- }
42
-
43
- if (tasks.length === 0) {
44
- logger.log('WARN', '[SocialOrchestrator] No target tickers found to process.');
45
- return { success: true, message: "No target tickers configured." };
46
- }
47
-
48
- // Use pubsubUtils to batch publish all ticker tasks
49
- await pubsubUtils.batchPublishTasks(dependencies, {
50
- topicName: config.socialFetchTaskTopicName,
51
- tasks: tasks,
52
- taskType: 'social-fetch-task'
53
- });
54
-
24
+ for (const tickerId of targetTickerIds) { tasks.push({ tickerId: String(tickerId), since: sinceISO }); }
25
+ if (tasks.length === 0) { logger.log('WARN', '[SocialOrchestrator] No target tickers found to process.'); return { success: true, message: "No target tickers configured." }; }
26
+ await pubsubUtils.batchPublishTasks(dependencies, { topicName: config.socialFetchTaskTopicName, tasks: tasks, taskType: 'social-fetch-task' });
55
27
  logger.log('SUCCESS', `[SocialOrchestrator] Successfully published ${tasks.length} social fetch tasks for window >= ${sinceISO}.`);
56
28
  return { success: true, tasksQueued: tasks.length };
57
-
58
- } catch (error) {
59
- logger.log('ERROR', '[SocialOrchestrator] Fatal error during orchestration.', { errorMessage: error.message, errorStack: error.stack });
60
- throw error;
61
- }
29
+ } catch (error) { logger.log('ERROR', '[SocialOrchestrator] Fatal error during orchestration.', { errorMessage: error.message, errorStack: error.stack }); throw error; }
62
30
  };