bulltrackers-module 1.0.104 → 1.0.106
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.MD +222 -222
- package/functions/appscript-api/helpers/errors.js +19 -19
- package/functions/appscript-api/index.js +58 -58
- package/functions/computation-system/helpers/orchestration_helpers.js +647 -113
- package/functions/computation-system/utils/data_loader.js +191 -191
- package/functions/computation-system/utils/utils.js +149 -254
- package/functions/core/utils/firestore_utils.js +433 -433
- package/functions/core/utils/pubsub_utils.js +53 -53
- package/functions/dispatcher/helpers/dispatch_helpers.js +47 -47
- package/functions/dispatcher/index.js +52 -52
- package/functions/etoro-price-fetcher/helpers/handler_helpers.js +124 -124
- package/functions/fetch-insights/helpers/handler_helpers.js +91 -91
- package/functions/generic-api/helpers/api_helpers.js +379 -379
- package/functions/generic-api/index.js +150 -150
- package/functions/invalid-speculator-handler/helpers/handler_helpers.js +75 -75
- package/functions/orchestrator/helpers/discovery_helpers.js +226 -226
- package/functions/orchestrator/helpers/update_helpers.js +92 -92
- package/functions/orchestrator/index.js +147 -147
- package/functions/price-backfill/helpers/handler_helpers.js +116 -123
- package/functions/social-orchestrator/helpers/orchestrator_helpers.js +61 -61
- package/functions/social-task-handler/helpers/handler_helpers.js +288 -288
- package/functions/task-engine/handler_creator.js +78 -78
- package/functions/task-engine/helpers/discover_helpers.js +125 -125
- package/functions/task-engine/helpers/update_helpers.js +118 -118
- package/functions/task-engine/helpers/verify_helpers.js +162 -162
- package/functions/task-engine/utils/firestore_batch_manager.js +258 -258
- package/index.js +105 -113
- package/package.json +45 -45
- package/functions/computation-system/computation_dependencies.json +0 -120
- package/functions/computation-system/helpers/worker_helpers.js +0 -340
- package/functions/computation-system/utils/computation_state_manager.js +0 -178
- package/functions/computation-system/utils/dependency_graph.js +0 -191
- package/functions/speculator-cleanup-orchestrator/helpers/cleanup_helpers.js +0 -160
|
@@ -1,53 +1,53 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Core Pub/Sub utility functions.
|
|
3
|
-
* REFACTORED: All functions are now stateless and receive dependencies.
|
|
4
|
-
* 'pubsub' (PubSub instance) and 'logger' are passed via a 'dependencies' object.
|
|
5
|
-
*/
|
|
6
|
-
|
|
7
|
-
/**
|
|
8
|
-
* Publishes an array of tasks to a specified Pub/Sub topic in batches.
|
|
9
|
-
* @async
|
|
10
|
-
* @param {object} dependencies - Contains pubsub, logger.
|
|
11
|
-
* @param {object} config - Configuration object.
|
|
12
|
-
* @param {string} config.topicName - The name of the Pub/Sub topic.
|
|
13
|
-
* @param {Array<object>} config.tasks - The tasks to publish.
|
|
14
|
-
* @param {string} config.taskType - A descriptor for the task type (for logging).
|
|
15
|
-
* @param {number} [config.maxPubsubBatchSize=500] - Max messages to publish in one client batch.
|
|
16
|
-
* @returns {Promise<void>}
|
|
17
|
-
*/
|
|
18
|
-
async function batchPublishTasks(dependencies, config) {
|
|
19
|
-
const { pubsub, logger } = dependencies;
|
|
20
|
-
const { topicName, tasks, taskType, maxPubsubBatchSize = 500 } = config;
|
|
21
|
-
|
|
22
|
-
if (!tasks || tasks.length === 0) {
|
|
23
|
-
logger.log('INFO',`[Core Utils] No ${taskType} tasks to publish to ${topicName}.`);
|
|
24
|
-
return;
|
|
25
|
-
}
|
|
26
|
-
logger.log('INFO',`[Core Utils] Publishing ${tasks.length} ${taskType} tasks to ${topicName}...`);
|
|
27
|
-
const topic = pubsub.topic(topicName);
|
|
28
|
-
let messagesPublished = 0;
|
|
29
|
-
|
|
30
|
-
try {
|
|
31
|
-
for (let i = 0; i < tasks.length; i += maxPubsubBatchSize) {
|
|
32
|
-
const batchTasks = tasks.slice(i, i + maxPubsubBatchSize);
|
|
33
|
-
const batchPromises = batchTasks.map(task => {
|
|
34
|
-
const dataBuffer = Buffer.from(JSON.stringify(task));
|
|
35
|
-
return topic.publishMessage({ data: dataBuffer })
|
|
36
|
-
.catch(err => logger.log('ERROR', `[Core Utils] Failed to publish single message for ${taskType}`, { error: err.message, task: task }));
|
|
37
|
-
});
|
|
38
|
-
await Promise.all(batchPromises);
|
|
39
|
-
messagesPublished += batchTasks.length;
|
|
40
|
-
logger.log('TRACE', `[Core Utils] Published batch ${Math.ceil((i + 1) / maxPubsubBatchSize)} for ${taskType} (${batchTasks.length} messages)`);
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
logger.log('SUCCESS', `[Core Utils] Finished publishing ${messagesPublished} ${taskType} tasks to ${topicName}.`);
|
|
44
|
-
|
|
45
|
-
} catch (error) {
|
|
46
|
-
logger.log('ERROR', `[Core Utils] Error during batch publishing of ${taskType} tasks to ${topicName}`, { errorMessage: error.message });
|
|
47
|
-
throw error;
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
module.exports = {
|
|
52
|
-
batchPublishTasks,
|
|
53
|
-
};
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Core Pub/Sub utility functions.
|
|
3
|
+
* REFACTORED: All functions are now stateless and receive dependencies.
|
|
4
|
+
* 'pubsub' (PubSub instance) and 'logger' are passed via a 'dependencies' object.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Publishes an array of tasks to a specified Pub/Sub topic in batches.
|
|
9
|
+
* @async
|
|
10
|
+
* @param {object} dependencies - Contains pubsub, logger.
|
|
11
|
+
* @param {object} config - Configuration object.
|
|
12
|
+
* @param {string} config.topicName - The name of the Pub/Sub topic.
|
|
13
|
+
* @param {Array<object>} config.tasks - The tasks to publish.
|
|
14
|
+
* @param {string} config.taskType - A descriptor for the task type (for logging).
|
|
15
|
+
* @param {number} [config.maxPubsubBatchSize=500] - Max messages to publish in one client batch.
|
|
16
|
+
* @returns {Promise<void>}
|
|
17
|
+
*/
|
|
18
|
+
async function batchPublishTasks(dependencies, config) {
|
|
19
|
+
const { pubsub, logger } = dependencies;
|
|
20
|
+
const { topicName, tasks, taskType, maxPubsubBatchSize = 500 } = config;
|
|
21
|
+
|
|
22
|
+
if (!tasks || tasks.length === 0) {
|
|
23
|
+
logger.log('INFO',`[Core Utils] No ${taskType} tasks to publish to ${topicName}.`);
|
|
24
|
+
return;
|
|
25
|
+
}
|
|
26
|
+
logger.log('INFO',`[Core Utils] Publishing ${tasks.length} ${taskType} tasks to ${topicName}...`);
|
|
27
|
+
const topic = pubsub.topic(topicName);
|
|
28
|
+
let messagesPublished = 0;
|
|
29
|
+
|
|
30
|
+
try {
|
|
31
|
+
for (let i = 0; i < tasks.length; i += maxPubsubBatchSize) {
|
|
32
|
+
const batchTasks = tasks.slice(i, i + maxPubsubBatchSize);
|
|
33
|
+
const batchPromises = batchTasks.map(task => {
|
|
34
|
+
const dataBuffer = Buffer.from(JSON.stringify(task));
|
|
35
|
+
return topic.publishMessage({ data: dataBuffer })
|
|
36
|
+
.catch(err => logger.log('ERROR', `[Core Utils] Failed to publish single message for ${taskType}`, { error: err.message, task: task }));
|
|
37
|
+
});
|
|
38
|
+
await Promise.all(batchPromises);
|
|
39
|
+
messagesPublished += batchTasks.length;
|
|
40
|
+
logger.log('TRACE', `[Core Utils] Published batch ${Math.ceil((i + 1) / maxPubsubBatchSize)} for ${taskType} (${batchTasks.length} messages)`);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
logger.log('SUCCESS', `[Core Utils] Finished publishing ${messagesPublished} ${taskType} tasks to ${topicName}.`);
|
|
44
|
+
|
|
45
|
+
} catch (error) {
|
|
46
|
+
logger.log('ERROR', `[Core Utils] Error during batch publishing of ${taskType} tasks to ${topicName}`, { errorMessage: error.message });
|
|
47
|
+
throw error;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
module.exports = {
|
|
52
|
+
batchPublishTasks,
|
|
53
|
+
};
|
|
@@ -1,47 +1,47 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Sub-pipe for dispatching tasks.
|
|
3
|
-
* REFACTORED: Now stateless and receives dependencies.
|
|
4
|
-
*/
|
|
5
|
-
|
|
6
|
-
const sleep = (ms) => new Promise(resolve => setTimeout(resolve, ms));
|
|
7
|
-
|
|
8
|
-
/**
|
|
9
|
-
* Sub-pipe: pipe.dispatcher.dispatchTasksInBatches
|
|
10
|
-
* @param {Array} tasks - Array of tasks to publish.
|
|
11
|
-
* @param {object} dependencies - Contains pubsub, logger.
|
|
12
|
-
* @param {object} config - Configuration object.
|
|
13
|
-
* @param {string} config.topicName - Target Pub/Sub topic name.
|
|
14
|
-
* @param {number} config.batchSize - Number of tasks per batch.
|
|
15
|
-
* @param {number} config.batchDelayMs - Delay between batches in milliseconds.
|
|
16
|
-
*/
|
|
17
|
-
async function dispatchTasksInBatches(tasks, dependencies, config) {
|
|
18
|
-
const { pubsub, logger } = dependencies;
|
|
19
|
-
const { topicName, batchSize, batchDelayMs } = config;
|
|
20
|
-
const topic = pubsub.topic(topicName);
|
|
21
|
-
let totalTasksQueued = 0;
|
|
22
|
-
|
|
23
|
-
logger.log('INFO', `[Module Dispatcher] Received ${tasks.length} tasks. Creating batches...`);
|
|
24
|
-
|
|
25
|
-
for (let i = 0; i < tasks.length; i += batchSize) {
|
|
26
|
-
const batch = tasks.slice(i, i + batchSize);
|
|
27
|
-
|
|
28
|
-
try {
|
|
29
|
-
await Promise.all(batch.map(task => topic.publishMessage({ json: task })));
|
|
30
|
-
totalTasksQueued += batch.length;
|
|
31
|
-
logger.log('INFO', `[Module Dispatcher] Dispatched batch ${Math.ceil((i + 1) / batchSize)} with ${batch.length} tasks.`);
|
|
32
|
-
|
|
33
|
-
if (i + batchSize < tasks.length) {
|
|
34
|
-
await sleep(batchDelayMs);
|
|
35
|
-
}
|
|
36
|
-
} catch (publishError) {
|
|
37
|
-
logger.log('ERROR', `[Module Dispatcher] Failed to publish batch ${Math.ceil((i + 1) / batchSize)}. Error: ${publishError.message}`, { errorStack: publishError.stack });
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
logger.log('SUCCESS', `[Module Dispatcher] Successfully dispatched ${totalTasksQueued} tasks in ${Math.ceil(tasks.length / batchSize)} batches.`);
|
|
42
|
-
return totalTasksQueued;
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
module.exports = {
|
|
46
|
-
dispatchTasksInBatches
|
|
47
|
-
};
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Sub-pipe for dispatching tasks.
|
|
3
|
+
* REFACTORED: Now stateless and receives dependencies.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const sleep = (ms) => new Promise(resolve => setTimeout(resolve, ms));
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Sub-pipe: pipe.dispatcher.dispatchTasksInBatches
|
|
10
|
+
* @param {Array} tasks - Array of tasks to publish.
|
|
11
|
+
* @param {object} dependencies - Contains pubsub, logger.
|
|
12
|
+
* @param {object} config - Configuration object.
|
|
13
|
+
* @param {string} config.topicName - Target Pub/Sub topic name.
|
|
14
|
+
* @param {number} config.batchSize - Number of tasks per batch.
|
|
15
|
+
* @param {number} config.batchDelayMs - Delay between batches in milliseconds.
|
|
16
|
+
*/
|
|
17
|
+
async function dispatchTasksInBatches(tasks, dependencies, config) {
|
|
18
|
+
const { pubsub, logger } = dependencies;
|
|
19
|
+
const { topicName, batchSize, batchDelayMs } = config;
|
|
20
|
+
const topic = pubsub.topic(topicName);
|
|
21
|
+
let totalTasksQueued = 0;
|
|
22
|
+
|
|
23
|
+
logger.log('INFO', `[Module Dispatcher] Received ${tasks.length} tasks. Creating batches...`);
|
|
24
|
+
|
|
25
|
+
for (let i = 0; i < tasks.length; i += batchSize) {
|
|
26
|
+
const batch = tasks.slice(i, i + batchSize);
|
|
27
|
+
|
|
28
|
+
try {
|
|
29
|
+
await Promise.all(batch.map(task => topic.publishMessage({ json: task })));
|
|
30
|
+
totalTasksQueued += batch.length;
|
|
31
|
+
logger.log('INFO', `[Module Dispatcher] Dispatched batch ${Math.ceil((i + 1) / batchSize)} with ${batch.length} tasks.`);
|
|
32
|
+
|
|
33
|
+
if (i + batchSize < tasks.length) {
|
|
34
|
+
await sleep(batchDelayMs);
|
|
35
|
+
}
|
|
36
|
+
} catch (publishError) {
|
|
37
|
+
logger.log('ERROR', `[Module Dispatcher] Failed to publish batch ${Math.ceil((i + 1) / batchSize)}. Error: ${publishError.message}`, { errorStack: publishError.stack });
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
logger.log('SUCCESS', `[Module Dispatcher] Successfully dispatched ${totalTasksQueued} tasks in ${Math.ceil(tasks.length / batchSize)} batches.`);
|
|
42
|
+
return totalTasksQueued;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
module.exports = {
|
|
46
|
+
dispatchTasksInBatches
|
|
47
|
+
};
|
|
@@ -1,53 +1,53 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Main entry point for the Dispatcher function.
|
|
3
|
-
* REFACTORED: This file now contains the main pipe function 'handleRequest'.
|
|
4
|
-
*/
|
|
5
|
-
|
|
6
|
-
// --- 1. REMOVE the circular require ---
|
|
7
|
-
// const { pipe } = require('../../index'); // <<< REMOVE THIS LINE
|
|
8
|
-
|
|
9
|
-
// --- 2. ADD direct require for the specific sub-pipe needed ---
|
|
10
|
-
const { dispatchTasksInBatches } = require('./helpers/dispatch_helpers');
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
/**
|
|
14
|
-
* Main pipe: pipe.dispatcher.handleRequest
|
|
15
|
-
* This is the Pub/Sub triggered handler.
|
|
16
|
-
* @param {object} message - The Pub/Sub message.
|
|
17
|
-
* @param {object} context - The message context.
|
|
18
|
-
* @param {object} config - The dispatcher-specific config.
|
|
19
|
-
* @param {object} dependencies - Contains pubsub, logger.
|
|
20
|
-
*/
|
|
21
|
-
async function handleRequest(message, context, config, dependencies) {
|
|
22
|
-
const { logger } = dependencies;
|
|
23
|
-
try {
|
|
24
|
-
if (!message.data) {
|
|
25
|
-
logger.log('WARN', '[Module Dispatcher] Received message without data.');
|
|
26
|
-
return;
|
|
27
|
-
}
|
|
28
|
-
const decodedMessage = JSON.parse(Buffer.from(message.data, 'base64').toString());
|
|
29
|
-
const { tasks } = decodedMessage;
|
|
30
|
-
|
|
31
|
-
if (!tasks || !Array.isArray(tasks) || tasks.length === 0) {
|
|
32
|
-
logger.log('WARN', '[Module Dispatcher] Received message with no valid tasks. Nothing to do.');
|
|
33
|
-
return;
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
if (!config || !config.topicName || !config.batchSize || !config.batchDelayMs) {
|
|
37
|
-
logger.log('ERROR', '[Module Dispatcher] Invalid configuration provided.', { config });
|
|
38
|
-
throw new Error("Dispatcher module received invalid configuration.");
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
// --- 3. Use the directly required function ---
|
|
42
|
-
// Call the sub-pipe -> becomes dispatchTasksInBatches
|
|
43
|
-
await dispatchTasksInBatches(tasks, dependencies, config); // <<< USE DIRECTLY
|
|
44
|
-
|
|
45
|
-
} catch (error) {
|
|
46
|
-
logger.log('ERROR', '[Module Dispatcher] FATAL error processing message', { errorMessage: error.message, errorStack: error.stack });
|
|
47
|
-
throw error;
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
module.exports = {
|
|
52
|
-
handleRequest,
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Main entry point for the Dispatcher function.
|
|
3
|
+
* REFACTORED: This file now contains the main pipe function 'handleRequest'.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
// --- 1. REMOVE the circular require ---
|
|
7
|
+
// const { pipe } = require('../../index'); // <<< REMOVE THIS LINE
|
|
8
|
+
|
|
9
|
+
// --- 2. ADD direct require for the specific sub-pipe needed ---
|
|
10
|
+
const { dispatchTasksInBatches } = require('./helpers/dispatch_helpers');
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Main pipe: pipe.dispatcher.handleRequest
|
|
15
|
+
* This is the Pub/Sub triggered handler.
|
|
16
|
+
* @param {object} message - The Pub/Sub message.
|
|
17
|
+
* @param {object} context - The message context.
|
|
18
|
+
* @param {object} config - The dispatcher-specific config.
|
|
19
|
+
* @param {object} dependencies - Contains pubsub, logger.
|
|
20
|
+
*/
|
|
21
|
+
async function handleRequest(message, context, config, dependencies) {
|
|
22
|
+
const { logger } = dependencies;
|
|
23
|
+
try {
|
|
24
|
+
if (!message.data) {
|
|
25
|
+
logger.log('WARN', '[Module Dispatcher] Received message without data.');
|
|
26
|
+
return;
|
|
27
|
+
}
|
|
28
|
+
const decodedMessage = JSON.parse(Buffer.from(message.data, 'base64').toString());
|
|
29
|
+
const { tasks } = decodedMessage;
|
|
30
|
+
|
|
31
|
+
if (!tasks || !Array.isArray(tasks) || tasks.length === 0) {
|
|
32
|
+
logger.log('WARN', '[Module Dispatcher] Received message with no valid tasks. Nothing to do.');
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
if (!config || !config.topicName || !config.batchSize || !config.batchDelayMs) {
|
|
37
|
+
logger.log('ERROR', '[Module Dispatcher] Invalid configuration provided.', { config });
|
|
38
|
+
throw new Error("Dispatcher module received invalid configuration.");
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
// --- 3. Use the directly required function ---
|
|
42
|
+
// Call the sub-pipe -> becomes dispatchTasksInBatches
|
|
43
|
+
await dispatchTasksInBatches(tasks, dependencies, config); // <<< USE DIRECTLY
|
|
44
|
+
|
|
45
|
+
} catch (error) {
|
|
46
|
+
logger.log('ERROR', '[Module Dispatcher] FATAL error processing message', { errorMessage: error.message, errorStack: error.stack });
|
|
47
|
+
throw error;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
module.exports = {
|
|
52
|
+
handleRequest,
|
|
53
53
|
};
|
|
@@ -1,125 +1,125 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Main pipe: pipe.maintenance.runFetchPrices
|
|
3
|
-
* REFACTORED: Now writes to the new sharded `asset_prices` collection.
|
|
4
|
-
* FIXED: Uses docRef.update() to correctly merge nested price maps
|
|
5
|
-
* instead of docRef.set({ merge: true }) which creates flat keys.
|
|
6
|
-
*/
|
|
7
|
-
const { FieldValue } = require('@google-cloud/firestore');
|
|
8
|
-
|
|
9
|
-
// How many tickers to group into one Firestore document
|
|
10
|
-
const SHARD_SIZE = 40;
|
|
11
|
-
|
|
12
|
-
/**
|
|
13
|
-
* Main pipe: pipe.maintenance.runFetchPrices
|
|
14
|
-
* @param {object} config - Configuration object.
|
|
15
|
-
* @param {object} dependencies - Contains db, logger, headerManager, proxyManager.
|
|
16
|
-
* @returns {Promise<{success: boolean, message: string, instrumentsProcessed?: number}>}
|
|
17
|
-
*/
|
|
18
|
-
exports.fetchAndStorePrices = async (config, dependencies) => {
|
|
19
|
-
const { db, logger, headerManager, proxyManager } = dependencies;
|
|
20
|
-
|
|
21
|
-
logger.log('INFO', '[PriceFetcherHelpers] Starting Daily Closing Price Update...');
|
|
22
|
-
let selectedHeader = null;
|
|
23
|
-
let wasSuccessful = false;
|
|
24
|
-
|
|
25
|
-
// --- NEW: Use the new config key, or fallback to the old one --- TODO Implement the config
|
|
26
|
-
const priceCollectionName = 'asset_prices';
|
|
27
|
-
|
|
28
|
-
try {
|
|
29
|
-
if (!config.etoroApiUrl) {
|
|
30
|
-
throw new Error("Missing required configuration: etoroApiUrl.");
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
selectedHeader = await headerManager.selectHeader();
|
|
34
|
-
if (!selectedHeader || !selectedHeader.header) {
|
|
35
|
-
throw new Error("Could not select a valid header for the request.");
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
const fetchOptions = {
|
|
39
|
-
headers: selectedHeader.header,
|
|
40
|
-
timeout: 60000
|
|
41
|
-
};
|
|
42
|
-
|
|
43
|
-
logger.log('INFO', `[PriceFetcherHelpers] Using header ID: ${selectedHeader.id}`);
|
|
44
|
-
|
|
45
|
-
const response = await proxyManager.fetch(config.etoroApiUrl, fetchOptions);
|
|
46
|
-
|
|
47
|
-
if (!response || typeof response.text !== 'function') {
|
|
48
|
-
throw new Error(`Invalid response structure received from proxy.`);
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
if (!response.ok) {
|
|
52
|
-
const errorBody = await response.text();
|
|
53
|
-
throw new Error(`API returned status ${response.status}: ${errorBody}`);
|
|
54
|
-
}
|
|
55
|
-
wasSuccessful = true;
|
|
56
|
-
|
|
57
|
-
const results = await response.json();
|
|
58
|
-
if (!Array.isArray(results)) {
|
|
59
|
-
throw new Error('Invalid response format from API. Expected an array.');
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
// --- START MODIFICATION ---
|
|
63
|
-
|
|
64
|
-
logger.log('INFO', `[PriceFetcherHelpers] Received ${results.length} instrument prices. Sharding...`);
|
|
65
|
-
const shardUpdates = {}; // { "shard_0": { ... }, "shard_1": { ... } }
|
|
66
|
-
|
|
67
|
-
for (const instrumentData of results) {
|
|
68
|
-
const dailyData = instrumentData?.ClosingPrices?.Daily;
|
|
69
|
-
const instrumentId = instrumentData.InstrumentId;
|
|
70
|
-
|
|
71
|
-
if (instrumentId && dailyData?.Price && dailyData?.Date) {
|
|
72
|
-
const instrumentIdStr = String(instrumentId);
|
|
73
|
-
const dateKey = dailyData.Date.substring(0, 10);
|
|
74
|
-
|
|
75
|
-
// Determine shard ID
|
|
76
|
-
const shardId = `shard_${parseInt(instrumentIdStr, 10) % SHARD_SIZE}`;
|
|
77
|
-
|
|
78
|
-
if (!shardUpdates[shardId]) {
|
|
79
|
-
shardUpdates[shardId] = {};
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
// Use dot notation to define the update path
|
|
83
|
-
const pricePath = `${instrumentIdStr}.prices.${dateKey}`;
|
|
84
|
-
const updatePath = `${instrumentIdStr}.lastUpdated`;
|
|
85
|
-
|
|
86
|
-
shardUpdates[shardId][pricePath] = dailyData.Price;
|
|
87
|
-
shardUpdates[shardId][updatePath] = FieldValue.serverTimestamp();
|
|
88
|
-
}
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
// Commit all shard updates in parallel
|
|
92
|
-
const batchPromises = [];
|
|
93
|
-
for (const shardId in shardUpdates) {
|
|
94
|
-
const docRef = db.collection(priceCollectionName).doc(shardId);
|
|
95
|
-
const payload = shardUpdates[shardId];
|
|
96
|
-
|
|
97
|
-
// --- THIS IS THE FIX ---
|
|
98
|
-
// Use .update() to correctly merge data into nested maps.
|
|
99
|
-
// Using .set(payload, { merge: true }) creates the flat, broken keys.
|
|
100
|
-
batchPromises.push(docRef.update(payload));
|
|
101
|
-
// --- END FIX ---
|
|
102
|
-
}
|
|
103
|
-
|
|
104
|
-
await Promise.all(batchPromises);
|
|
105
|
-
|
|
106
|
-
// --- END MODIFICATION ---
|
|
107
|
-
|
|
108
|
-
const successMessage = `Successfully processed and saved daily prices for ${results.length} instruments to ${batchPromises.length} shards.`;
|
|
109
|
-
logger.log('SUCCESS', `[PriceFetcherHelpers] ${successMessage}`);
|
|
110
|
-
return { success: true, message: successMessage, instrumentsProcessed: results.length };
|
|
111
|
-
|
|
112
|
-
} catch (error) {
|
|
113
|
-
logger.log('ERROR', '[PriceFetcherHelpers] Fatal error during closing price update', {
|
|
114
|
-
errorMessage: error.message,
|
|
115
|
-
errorStack: error.stack,
|
|
116
|
-
headerId: selectedHeader ? selectedHeader.id : 'not-selected'
|
|
117
|
-
});
|
|
118
|
-
throw error;
|
|
119
|
-
} finally {
|
|
120
|
-
if (selectedHeader) {
|
|
121
|
-
await headerManager.updatePerformance(selectedHeader.id, wasSuccessful);
|
|
122
|
-
await headerManager.flushPerformanceUpdates();
|
|
123
|
-
}
|
|
124
|
-
}
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Main pipe: pipe.maintenance.runFetchPrices
|
|
3
|
+
* REFACTORED: Now writes to the new sharded `asset_prices` collection.
|
|
4
|
+
* FIXED: Uses docRef.update() to correctly merge nested price maps
|
|
5
|
+
* instead of docRef.set({ merge: true }) which creates flat keys.
|
|
6
|
+
*/
|
|
7
|
+
const { FieldValue } = require('@google-cloud/firestore');
|
|
8
|
+
|
|
9
|
+
// How many tickers to group into one Firestore document
|
|
10
|
+
const SHARD_SIZE = 40;
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Main pipe: pipe.maintenance.runFetchPrices
|
|
14
|
+
* @param {object} config - Configuration object.
|
|
15
|
+
* @param {object} dependencies - Contains db, logger, headerManager, proxyManager.
|
|
16
|
+
* @returns {Promise<{success: boolean, message: string, instrumentsProcessed?: number}>}
|
|
17
|
+
*/
|
|
18
|
+
exports.fetchAndStorePrices = async (config, dependencies) => {
|
|
19
|
+
const { db, logger, headerManager, proxyManager } = dependencies;
|
|
20
|
+
|
|
21
|
+
logger.log('INFO', '[PriceFetcherHelpers] Starting Daily Closing Price Update...');
|
|
22
|
+
let selectedHeader = null;
|
|
23
|
+
let wasSuccessful = false;
|
|
24
|
+
|
|
25
|
+
// --- NEW: Use the new config key, or fallback to the old one --- TODO Implement the config
|
|
26
|
+
const priceCollectionName = 'asset_prices';
|
|
27
|
+
|
|
28
|
+
try {
|
|
29
|
+
if (!config.etoroApiUrl) {
|
|
30
|
+
throw new Error("Missing required configuration: etoroApiUrl.");
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
selectedHeader = await headerManager.selectHeader();
|
|
34
|
+
if (!selectedHeader || !selectedHeader.header) {
|
|
35
|
+
throw new Error("Could not select a valid header for the request.");
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const fetchOptions = {
|
|
39
|
+
headers: selectedHeader.header,
|
|
40
|
+
timeout: 60000
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
logger.log('INFO', `[PriceFetcherHelpers] Using header ID: ${selectedHeader.id}`);
|
|
44
|
+
|
|
45
|
+
const response = await proxyManager.fetch(config.etoroApiUrl, fetchOptions);
|
|
46
|
+
|
|
47
|
+
if (!response || typeof response.text !== 'function') {
|
|
48
|
+
throw new Error(`Invalid response structure received from proxy.`);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
if (!response.ok) {
|
|
52
|
+
const errorBody = await response.text();
|
|
53
|
+
throw new Error(`API returned status ${response.status}: ${errorBody}`);
|
|
54
|
+
}
|
|
55
|
+
wasSuccessful = true;
|
|
56
|
+
|
|
57
|
+
const results = await response.json();
|
|
58
|
+
if (!Array.isArray(results)) {
|
|
59
|
+
throw new Error('Invalid response format from API. Expected an array.');
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// --- START MODIFICATION ---
|
|
63
|
+
|
|
64
|
+
logger.log('INFO', `[PriceFetcherHelpers] Received ${results.length} instrument prices. Sharding...`);
|
|
65
|
+
const shardUpdates = {}; // { "shard_0": { ... }, "shard_1": { ... } }
|
|
66
|
+
|
|
67
|
+
for (const instrumentData of results) {
|
|
68
|
+
const dailyData = instrumentData?.ClosingPrices?.Daily;
|
|
69
|
+
const instrumentId = instrumentData.InstrumentId;
|
|
70
|
+
|
|
71
|
+
if (instrumentId && dailyData?.Price && dailyData?.Date) {
|
|
72
|
+
const instrumentIdStr = String(instrumentId);
|
|
73
|
+
const dateKey = dailyData.Date.substring(0, 10);
|
|
74
|
+
|
|
75
|
+
// Determine shard ID
|
|
76
|
+
const shardId = `shard_${parseInt(instrumentIdStr, 10) % SHARD_SIZE}`;
|
|
77
|
+
|
|
78
|
+
if (!shardUpdates[shardId]) {
|
|
79
|
+
shardUpdates[shardId] = {};
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// Use dot notation to define the update path
|
|
83
|
+
const pricePath = `${instrumentIdStr}.prices.${dateKey}`;
|
|
84
|
+
const updatePath = `${instrumentIdStr}.lastUpdated`;
|
|
85
|
+
|
|
86
|
+
shardUpdates[shardId][pricePath] = dailyData.Price;
|
|
87
|
+
shardUpdates[shardId][updatePath] = FieldValue.serverTimestamp();
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Commit all shard updates in parallel
|
|
92
|
+
const batchPromises = [];
|
|
93
|
+
for (const shardId in shardUpdates) {
|
|
94
|
+
const docRef = db.collection(priceCollectionName).doc(shardId);
|
|
95
|
+
const payload = shardUpdates[shardId];
|
|
96
|
+
|
|
97
|
+
// --- THIS IS THE FIX ---
|
|
98
|
+
// Use .update() to correctly merge data into nested maps.
|
|
99
|
+
// Using .set(payload, { merge: true }) creates the flat, broken keys.
|
|
100
|
+
batchPromises.push(docRef.update(payload));
|
|
101
|
+
// --- END FIX ---
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
await Promise.all(batchPromises);
|
|
105
|
+
|
|
106
|
+
// --- END MODIFICATION ---
|
|
107
|
+
|
|
108
|
+
const successMessage = `Successfully processed and saved daily prices for ${results.length} instruments to ${batchPromises.length} shards.`;
|
|
109
|
+
logger.log('SUCCESS', `[PriceFetcherHelpers] ${successMessage}`);
|
|
110
|
+
return { success: true, message: successMessage, instrumentsProcessed: results.length };
|
|
111
|
+
|
|
112
|
+
} catch (error) {
|
|
113
|
+
logger.log('ERROR', '[PriceFetcherHelpers] Fatal error during closing price update', {
|
|
114
|
+
errorMessage: error.message,
|
|
115
|
+
errorStack: error.stack,
|
|
116
|
+
headerId: selectedHeader ? selectedHeader.id : 'not-selected'
|
|
117
|
+
});
|
|
118
|
+
throw error;
|
|
119
|
+
} finally {
|
|
120
|
+
if (selectedHeader) {
|
|
121
|
+
await headerManager.updatePerformance(selectedHeader.id, wasSuccessful);
|
|
122
|
+
await headerManager.flushPerformanceUpdates();
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
125
|
};
|