bulltrackers-module 1.0.199 → 1.0.201

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,82 @@
1
+ /**
2
+ * FILENAME: bulltrackers-module/functions/computation-system/helpers/computation_dispatcher.js
3
+ * PURPOSE: Dispatches computation tasks to Pub/Sub for scalable execution.
4
+ */
5
+
6
+ const { getExpectedDateStrings } = require('../utils/utils.js');
7
+ const { groupByPass } = require('./orchestration_helpers.js');
8
+
9
+ const TOPIC_NAME = 'computation-tasks';
10
+
11
+ /**
12
+ * Dispatches computation tasks for a specific pass.
13
+ * Instead of running them, it queues them in Pub/Sub.
14
+ */
15
+ async function dispatchComputationPass(config, dependencies, computationManifest) {
16
+ const { logger, pubsubUtils } = dependencies;
17
+ const passToRun = String(config.COMPUTATION_PASS_TO_RUN);
18
+
19
+ if (!passToRun) {
20
+ return logger.log('ERROR', '[Dispatcher] No pass defined (COMPUTATION_PASS_TO_RUN). Aborting.');
21
+ }
22
+
23
+ logger.log('INFO', `🚀 [Dispatcher] Preparing to dispatch PASS ${passToRun}...`);
24
+
25
+ // 1. Determine Date Range (Same logic as PassRunner)
26
+ // Hardcoded earliest dates - keep synced with PassRunner for now
27
+ const earliestDates = {
28
+ portfolio: new Date('2025-09-25T00:00:00Z'),
29
+ history: new Date('2025-11-05T00:00:00Z'),
30
+ social: new Date('2025-10-30T00:00:00Z'),
31
+ insights: new Date('2025-08-26T00:00:00Z'),
32
+ price: new Date('2025-08-01T00:00:00Z')
33
+ };
34
+ const passEarliestDate = Object.values(earliestDates).reduce((a, b) => a < b ? a : b);
35
+ const endDateUTC = new Date(Date.UTC(new Date().getUTCFullYear(), new Date().getUTCMonth(), new Date().getUTCDate() - 1));
36
+
37
+ const allExpectedDates = getExpectedDateStrings(passEarliestDate, endDateUTC);
38
+
39
+ // 2. Validate Pass Existence
40
+ const passes = groupByPass(computationManifest);
41
+ const calcsInThisPass = passes[passToRun] || [];
42
+
43
+ if (!calcsInThisPass.length) {
44
+ return logger.log('WARN', `[Dispatcher] No calcs for Pass ${passToRun}. Exiting.`);
45
+ }
46
+
47
+ logger.log('INFO', `[Dispatcher] Found ${calcsInThisPass.length} calcs for Pass ${passToRun}. Target dates: ${allExpectedDates.length}`);
48
+
49
+ // 3. Dispatch Messages
50
+ let dispatchedCount = 0;
51
+ const BATCH_SIZE = 50; // Pub/Sub batch publishing size
52
+
53
+ // We can publish in parallel batches
54
+ const chunks = [];
55
+ for (let i = 0; i < allExpectedDates.length; i += BATCH_SIZE) {
56
+ chunks.push(allExpectedDates.slice(i, i + BATCH_SIZE));
57
+ }
58
+
59
+ for (const chunk of chunks) {
60
+ const messages = chunk.map(dateStr => ({
61
+ json: {
62
+ action: 'RUN_COMPUTATION_DATE',
63
+ date: dateStr,
64
+ pass: passToRun,
65
+ timestamp: Date.now()
66
+ }
67
+ }));
68
+
69
+ try {
70
+ await pubsubUtils.publishMessageBatch(TOPIC_NAME, messages);
71
+ dispatchedCount += messages.length;
72
+ logger.log('INFO', `[Dispatcher] Dispatched batch of ${messages.length} tasks.`);
73
+ } catch (err) {
74
+ logger.log('ERROR', `[Dispatcher] Failed to dispatch batch: ${err.message}`);
75
+ }
76
+ }
77
+
78
+ logger.log('INFO', `[Dispatcher] Finished dispatching. Total tasks: ${dispatchedCount}`);
79
+ return { dispatched: dispatchedCount };
80
+ }
81
+
82
+ module.exports = { dispatchComputationPass };
@@ -1,202 +1,158 @@
1
1
  /**
2
2
  * FILENAME: bulltrackers-module/functions/computation-system/helpers/computation_pass_runner.js
3
- * FIXED: Integrates 'runBatchPriceComputation' to prevent OOM on price calculations.
4
- * FIXED: Added try/catch around runBatchPriceComputation to prevent crash on failure.
3
+ * FIXED: 'runDateComputation' now executes ALL calculation types (Standard, Meta, AND Price).
5
4
  */
6
5
 
7
- const {
8
- groupByPass,
9
- checkRootDataAvailability,
10
- fetchExistingResults,
11
- fetchComputationStatus,
6
+ const {
7
+ groupByPass,
8
+ checkRootDataAvailability,
9
+ fetchExistingResults,
10
+ fetchComputationStatus,
12
11
  updateComputationStatus,
13
- runStandardComputationPass,
12
+ runStandardComputationPass,
14
13
  runMetaComputationPass,
15
14
  checkRootDependencies,
16
- runBatchPriceComputation
15
+ runBatchPriceComputation
17
16
  } = require('./orchestration_helpers.js');
18
17
 
19
18
  const { getExpectedDateStrings, normalizeName } = require('../utils/utils.js');
20
19
 
21
20
  const PARALLEL_BATCH_SIZE = 7;
22
21
 
22
+ /**
23
+ * LEGACY / MANUAL RUNNER
24
+ * (Kept for backward compatibility if you run the old HTTP endpoint directly)
25
+ */
23
26
  async function runComputationPass(config, dependencies, computationManifest) {
24
27
  const { logger } = dependencies;
25
- const passToRun = String(config.COMPUTATION_PASS_TO_RUN);
26
- if (!passToRun)
27
- return logger.log('ERROR', '[PassRunner] No pass defined. Aborting.');
28
-
29
- logger.log('INFO', `🚀 Starting PASS ${passToRun} (Targeting /computation_status/{YYYY-MM-DD})...`);
28
+ const passToRun = String(config.COMPUTATION_PASS_TO_RUN);
29
+ if (!passToRun) return logger.log('ERROR', '[PassRunner] No pass defined. Aborting.');
30
+
31
+ logger.log('INFO', `🚀 Starting PASS ${passToRun} (Legacy Mode)...`);
30
32
 
31
33
  // Hardcoded earliest dates
32
- const earliestDates = {
33
- portfolio: new Date('2025-09-25T00:00:00Z'),
34
- history: new Date('2025-11-05T00:00:00Z'),
35
- social: new Date('2025-10-30T00:00:00Z'),
36
- insights: new Date('2025-08-26T00:00:00Z'),
37
- price: new Date('2025-08-01T00:00:00Z') // This is slightly arbitrary, the true earliest date is 1 year ago, but there is no need to backfill that far so we just set it to be slightly earlier than the 2nd earliest computation.
34
+ const earliestDates = {
35
+ portfolio: new Date('2025-09-25T00:00:00Z'),
36
+ history: new Date('2025-11-05T00:00:00Z'),
37
+ social: new Date('2025-10-30T00:00:00Z'),
38
+ insights: new Date('2025-08-26T00:00:00Z'),
39
+ price: new Date('2025-08-01T00:00:00Z')
38
40
  };
39
- earliestDates.absoluteEarliest = Object.values(earliestDates).reduce((a,b) => a < b ? a : b);
41
+ earliestDates.absoluteEarliest = Object.values(earliestDates).reduce((a, b) => a < b ? a : b);
40
42
 
41
43
  const passes = groupByPass(computationManifest);
42
44
  const calcsInThisPass = passes[passToRun] || [];
43
45
 
44
- if (!calcsInThisPass.length)
46
+ if (!calcsInThisPass.length)
45
47
  return logger.log('WARN', `[PassRunner] No calcs for Pass ${passToRun}. Exiting.`);
46
-
47
- const passEarliestDate = earliestDates.absoluteEarliest;
48
- const endDateUTC = new Date(Date.UTC(new Date().getUTCFullYear(), new Date().getUTCMonth(), new Date().getUTCDate() - 1));
49
- const allExpectedDates = getExpectedDateStrings(passEarliestDate, endDateUTC);
50
48
 
51
- // --- SEPARATION OF CONCERNS ---
52
- // Identify calculations that require the Optimized Price Batch Runner
53
- const priceBatchCalcs = calcsInThisPass.filter(c =>
54
- c.type === 'meta' &&
55
- c.rootDataDependencies &&
56
- c.rootDataDependencies.includes('price')
57
- );
49
+ const passEarliestDate = earliestDates.absoluteEarliest;
50
+ const endDateUTC = new Date(Date.UTC(new Date().getUTCFullYear(), new Date().getUTCMonth(), new Date().getUTCDate() - 1));
51
+ const allExpectedDates = getExpectedDateStrings(passEarliestDate, endDateUTC);
58
52
 
59
- // Identify calculations for the Standard Date-Loop Runner
53
+ // Legacy Batch Optimization for Price (Only used in legacy loop)
54
+ const priceBatchCalcs = calcsInThisPass.filter(c => c.type === 'meta' && c.rootDataDependencies?.includes('price'));
60
55
  const standardAndOtherMetaCalcs = calcsInThisPass.filter(c => !priceBatchCalcs.includes(c));
61
56
 
62
-
63
- // ========================================================================
64
- // 1. EXECUTE OPTIMIZED PRICE BATCH (Shard-First)
65
- // ========================================================================
66
57
  if (priceBatchCalcs.length > 0) {
67
- logger.log('INFO', `[PassRunner] Detected ${priceBatchCalcs.length} Price-Meta calculations. Checking statuses...`);
68
-
69
58
  try {
70
- // Filter dates that actually need these calculations
71
- // We do a quick serial check of status docs to avoid re-running satisfied dates
72
- const datesNeedingPriceCalc = [];
73
-
74
- // Check statuses in chunks to avoid blowing up IO
75
- const STATUS_CHECK_CHUNK = 20;
76
- for (let i = 0; i < allExpectedDates.length; i += STATUS_CHECK_CHUNK) {
77
- const dateChunk = allExpectedDates.slice(i, i + STATUS_CHECK_CHUNK);
78
- await Promise.all(dateChunk.map(async (dateStr) => {
79
- const status = await fetchComputationStatus(dateStr, config, dependencies);
80
- // If ANY of the price calcs are missing/false, we run the batch for this date
81
- const needsRun = priceBatchCalcs.some(c => status[normalizeName(c.name)] !== true);
82
- if (needsRun) datesNeedingPriceCalc.push(dateStr);
83
- }));
84
- }
85
-
86
- if (datesNeedingPriceCalc.length > 0) {
87
- logger.log('INFO', `[PassRunner] >>> Starting Optimized Batch for ${datesNeedingPriceCalc.length} dates <<<`);
88
-
89
- // Execute the Shard-First Logic
90
- await runBatchPriceComputation(config, dependencies, datesNeedingPriceCalc, priceBatchCalcs);
91
-
92
- // Manually update statuses for these dates/calcs upon completion
93
- // (runBatchPriceComputation handles the results, but we must mark the status doc)
94
- logger.log('INFO', `[PassRunner] Updating status documents for batch...`);
95
-
96
- const BATCH_UPDATE_SIZE = 50;
97
- for (let i = 0; i < datesNeedingPriceCalc.length; i += BATCH_UPDATE_SIZE) {
98
- const updateChunk = datesNeedingPriceCalc.slice(i, i + BATCH_UPDATE_SIZE);
99
- await Promise.all(updateChunk.map(async (dateStr) => {
100
- const updates = {};
101
- priceBatchCalcs.forEach(c => updates[normalizeName(c.name)] = true);
102
- await updateComputationStatus(dateStr, updates, config, dependencies);
103
- }));
104
- }
105
- logger.log('INFO', `[PassRunner] >>> Optimized Batch Complete <<<`);
106
- } else {
107
- logger.log('INFO', `[PassRunner] All Price-Meta calculations are up to date.`);
108
- }
109
- } catch (batchError) {
110
- // FIX: Catch unexpected crashes in the optimized batch runner to allow standard calcs to proceed
111
- logger.log('ERROR', `[PassRunner] Optimized Price Batch Failed! Continuing to standard calculations.`, { errorMessage: batchError.message });
112
- }
59
+ await runBatchPriceComputation(config, dependencies, allExpectedDates, priceBatchCalcs); // Simplified for legacy
60
+ } catch (e) { logger.log('ERROR', 'Legacy Batch Price failed', e); }
113
61
  }
114
62
 
63
+ if (standardAndOtherMetaCalcs.length === 0) return;
115
64
 
116
- // ========================================================================
117
- // 2. EXECUTE STANDARD DATE LOOP (Date-First)
118
- // ========================================================================
119
- if (standardAndOtherMetaCalcs.length === 0) {
120
- logger.log('INFO', `[PassRunner] No other calculations remaining. Exiting.`);
121
- return;
65
+ for (let i = 0; i < allExpectedDates.length; i += PARALLEL_BATCH_SIZE) {
66
+ const batch = allExpectedDates.slice(i, i + PARALLEL_BATCH_SIZE);
67
+ await Promise.all(batch.map(dateStr => runDateComputation(dateStr, passToRun, standardAndOtherMetaCalcs, config, dependencies, computationManifest)));
122
68
  }
69
+ }
123
70
 
124
- const standardCalcs = standardAndOtherMetaCalcs.filter(c => c.type === 'standard');
125
- const metaCalcs = standardAndOtherMetaCalcs.filter(c => c.type === 'meta');
71
+ /**
72
+ * UPDATED: Isolated function to run computations for a single date.
73
+ * Used by the Pub/Sub Worker.
74
+ */
75
+ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, dependencies, computationManifest) {
76
+ const { logger } = dependencies;
77
+ const dateToProcess = new Date(dateStr + 'T00:00:00Z');
78
+
79
+ // 1. Fetch Status for THIS specific date only
80
+ const dailyStatus = await fetchComputationStatus(dateStr, config, dependencies);
81
+
82
+ // Helper: Check status
83
+ const shouldRun = (calc) => {
84
+ const cName = normalizeName(calc.name);
85
+ if (dailyStatus[cName] === true) return false;
86
+ if (calc.dependencies && calc.dependencies.length > 0) {
87
+ const missing = calc.dependencies.filter(depName => dailyStatus[normalizeName(depName)] !== true);
88
+ if (missing.length > 0) return false;
89
+ }
90
+ return true;
91
+ };
126
92
 
127
- // Process a single date
128
- const processDate = async (dateStr) => {
129
- const dateToProcess = new Date(dateStr + 'T00:00:00Z');
93
+ // --- FIX: Run ALL calc types (Standard, Meta, Price) ---
94
+ const calcsToAttempt = calcsInThisPass.filter(shouldRun);
130
95
 
131
- // 1. Fetch Status for THIS specific date only
132
- const dailyStatus = await fetchComputationStatus(dateStr, config, dependencies);
96
+ if (!calcsToAttempt.length) return null;
133
97
 
134
- // Helper: Check status
135
- const shouldRun = (calc) => {
136
- const cName = normalizeName(calc.name);
137
- if (dailyStatus[cName] === true) return false;
138
-
139
- if (calc.dependencies && calc.dependencies.length > 0) {
140
- const missing = calc.dependencies.filter(depName => dailyStatus[normalizeName(depName)] !== true);
141
- if (missing.length > 0) return false;
142
- }
143
- return true;
144
- };
98
+ // 2. Check Root Data Availability
99
+ const earliestDates = {
100
+ portfolio: new Date('2025-09-25T00:00:00Z'),
101
+ history: new Date('2025-11-05T00:00:00Z'),
102
+ social: new Date('2025-10-30T00:00:00Z'),
103
+ insights: new Date('2025-08-26T00:00:00Z'),
104
+ price: new Date('2025-08-01T00:00:00Z')
105
+ };
145
106
 
146
- const standardToRun = standardCalcs.filter(shouldRun);
147
- const metaToRun = metaCalcs.filter(shouldRun);
148
-
149
- if (!standardToRun.length && !metaToRun.length) return null;
107
+ const rootData = await checkRootDataAvailability(dateStr, config, dependencies, earliestDates);
108
+ if (!rootData) {
109
+ logger.log('INFO', `[DateRunner] Root data missing for ${dateStr}. Skipping.`);
110
+ return null;
111
+ }
150
112
 
151
- // 2. Check Root Data Availability
152
- const rootData = await checkRootDataAvailability(dateStr, config, dependencies, earliestDates);
153
- if (!rootData) return null;
113
+ // 3. Filter again based on Root Data availability
114
+ const runnableCalcs = calcsToAttempt.filter(c => checkRootDependencies(c, rootData.status).canRun);
154
115
 
155
- // 3. Filter again based on Root Data availability
156
- const finalStandardToRun = standardToRun.filter(c => checkRootDependencies(c, rootData.status).canRun);
157
- const finalMetaToRun = metaToRun.filter(c => checkRootDependencies(c, rootData.status).canRun);
116
+ if (!runnableCalcs.length) return null;
158
117
 
159
- if (!finalStandardToRun.length && !finalMetaToRun.length) return null;
118
+ // Split into Standard (Streaming) and Meta (Once-Per-Day/Price)
119
+ const standardToRun = runnableCalcs.filter(c => c.type === 'standard');
120
+ // Note: Meta includes Price calcs in this flow
121
+ const metaToRun = runnableCalcs.filter(c => c.type === 'meta');
160
122
 
161
- logger.log('INFO', `[PassRunner] Running ${dateStr}: ${finalStandardToRun.length} std, ${finalMetaToRun.length} meta`);
123
+ logger.log('INFO', `[DateRunner] Running ${dateStr}: ${standardToRun.length} std, ${metaToRun.length} meta`);
162
124
 
163
- const dateUpdates = {};
125
+ const dateUpdates = {};
164
126
 
165
- try {
166
- const calcsRunning = [...finalStandardToRun, ...finalMetaToRun];
167
- const existingResults = await fetchExistingResults(dateStr, calcsRunning, computationManifest, config, dependencies, false);
168
- const prevDate = new Date(dateToProcess); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
169
- const prevDateStr = prevDate.toISOString().slice(0, 10);
170
- const previousResults = await fetchExistingResults(prevDateStr, calcsRunning, computationManifest, config, dependencies, true);
171
-
172
- // Changed skipstatus write to false to ensure updates are recorded, allowing for proper tracking and avoiding re-computation in future passes. NOTE : Writing true here introduces significant bugs and should be avoided.
173
- if (finalStandardToRun.length) {
174
- const updates = await runStandardComputationPass(dateToProcess, finalStandardToRun, `Pass ${passToRun} (Std)`, config, dependencies, rootData, existingResults, previousResults, false);
175
- Object.assign(dateUpdates, updates);
176
- }
177
- if (finalMetaToRun.length) { // Again, writing true here introduces significant bugs and should be avoided.
178
- const updates = await runMetaComputationPass(dateToProcess, finalMetaToRun, `Pass ${passToRun} (Meta)`, config, dependencies, existingResults, previousResults, rootData, false);
179
- Object.assign(dateUpdates, updates);
180
- }
181
- } catch (err) {
182
- logger.log('ERROR', `[PassRunner] FAILED Pass ${passToRun} for ${dateStr}`, { errorMessage: err.message });
183
- [...finalStandardToRun, ...finalMetaToRun].forEach(c => dateUpdates[normalizeName(c.name)] = false);
184
- }
127
+ try {
128
+ const calcsRunning = [...standardToRun, ...metaToRun];
185
129
 
186
- if (Object.keys(dateUpdates).length > 0) {
187
- await updateComputationStatus(dateStr, dateUpdates, config, dependencies);
188
- }
130
+ // Fetch dependencies (results from this day or yesterday)
131
+ const existingResults = await fetchExistingResults(dateStr, calcsRunning, computationManifest, config, dependencies, false);
132
+ const prevDate = new Date(dateToProcess); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
133
+ const prevDateStr = prevDate.toISOString().slice(0, 10);
134
+ const previousResults = await fetchExistingResults(prevDateStr, calcsRunning, computationManifest, config, dependencies, true);
189
135
 
190
- return { date: dateStr, updates: dateUpdates };
191
- };
136
+ if (standardToRun.length) {
137
+ const updates = await runStandardComputationPass(dateToProcess, standardToRun, `Pass ${passToRun} (Std)`, config, dependencies, rootData, existingResults, previousResults, false);
138
+ Object.assign(dateUpdates, updates);
139
+ }
140
+ if (metaToRun.length) {
141
+ // runMetaComputationPass uses the Controller, which handles Price Sharding logic internally for single dates.
142
+ const updates = await runMetaComputationPass(dateToProcess, metaToRun, `Pass ${passToRun} (Meta)`, config, dependencies, existingResults, previousResults, rootData, false);
143
+ Object.assign(dateUpdates, updates);
144
+ }
145
+ } catch (err) {
146
+ logger.log('ERROR', `[DateRunner] FAILED Pass ${passToRun} for ${dateStr}`, { errorMessage: err.message });
147
+ [...standardToRun, ...metaToRun].forEach(c => dateUpdates[normalizeName(c.name)] = false);
148
+ throw err; // Re-throw to trigger Pub/Sub retry
149
+ }
192
150
 
193
- // Batch process dates
194
- for (let i = 0; i < allExpectedDates.length; i += PARALLEL_BATCH_SIZE) {
195
- const batch = allExpectedDates.slice(i, i + PARALLEL_BATCH_SIZE);
196
- await Promise.all(batch.map(processDate));
151
+ if (Object.keys(dateUpdates).length > 0) {
152
+ await updateComputationStatus(dateStr, dateUpdates, config, dependencies);
197
153
  }
198
-
199
- logger.log('INFO', `[PassRunner] Pass ${passToRun} orchestration finished.`);
154
+
155
+ return { date: dateStr, updates: dateUpdates };
200
156
  }
201
157
 
202
- module.exports = { runComputationPass };
158
+ module.exports = { runComputationPass, runDateComputation };
@@ -0,0 +1,56 @@
1
+ /**
2
+ * FILENAME: bulltrackers-module/functions/computation-system/helpers/computation_worker.js
3
+ * PURPOSE: Consumes computation tasks from Pub/Sub and executes them.
4
+ */
5
+
6
+ const { runDateComputation } = require('./computation_pass_runner.js');
7
+ const { groupByPass } = require('./orchestration_helpers.js');
8
+
9
+ /**
10
+ * Handles a single Pub/Sub message for a computation task.
11
+ */
12
+ async function handleComputationTask(message, config, dependencies, computationManifest) {
13
+ const { logger } = dependencies;
14
+
15
+ try {
16
+ const data = message.json || message; // Handle both raw payload and parsed JSON
17
+
18
+ if (data.action !== 'RUN_COMPUTATION_DATE') {
19
+ logger.log('WARN', `[Worker] Unknown action: ${data.action}. Ignoring.`);
20
+ return;
21
+ }
22
+
23
+ const { date, pass } = data;
24
+
25
+ if (!date || !pass) {
26
+ logger.log('ERROR', `[Worker] Missing date or pass in payload: ${JSON.stringify(data)}`);
27
+ return;
28
+ }
29
+
30
+ logger.log('INFO', `[Worker] Received task: Date=${date}, Pass=${pass}`);
31
+
32
+ // Resolve calculations for this pass
33
+ const passes = groupByPass(computationManifest);
34
+ const calcsInThisPass = passes[pass] || [];
35
+
36
+ if (!calcsInThisPass.length) {
37
+ logger.log('WARN', `[Worker] No calculations found for Pass ${pass}.`);
38
+ return;
39
+ }
40
+
41
+ // Execute the computation for this specific date
42
+ const result = await runDateComputation(date, pass, calcsInThisPass, config, dependencies, computationManifest);
43
+
44
+ if (result) {
45
+ logger.log('INFO', `[Worker] Successfully processed ${date} (Pass ${pass}). Updates: ${Object.keys(result.updates || {}).length}`);
46
+ } else {
47
+ logger.log('INFO', `[Worker] Processed ${date} (Pass ${pass}) - No action needed or data missing.`);
48
+ }
49
+
50
+ } catch (err) {
51
+ logger.log('ERROR', `[Worker] Fatal error processing task: ${err.message}`, { stack: err.stack });
52
+ throw err; // Re-throw to trigger Pub/Sub retry
53
+ }
54
+ }
55
+
56
+ module.exports = { handleComputationTask };
@@ -1,8 +1,8 @@
1
1
  /**
2
2
  * FILENAME: bulltrackers-module/functions/computation-system/helpers/orchestration_helpers.js
3
- * FIXED: TS Error (controller.loader.mappings)
4
- * ADDED: Smart Shard Lookup for specific tickers
5
- * OPTIMIZED: Added Concurrency (Parallel Commits & Pipelined Shards) for runBatchPriceComputation
3
+ * FIXED: Context math mapping in runBatchPriceComputation (resolves 'undefined' crash).
4
+ * IMPROVED: Explicit logging for every calculation run (Start, Success, Failure).
5
+ * OPTIMIZED: Parallel Commits & Pipelined Shards.
6
6
  */
7
7
 
8
8
  const { ComputationController } = require('../controllers/computation_controller');
@@ -14,7 +14,14 @@ const {
14
14
  getRelevantShardRefs, loadDataByRefs
15
15
  } = require('../utils/data_loader');
16
16
 
17
- const pLimit = require('p-limit'); // TODO, THIS OUGHT TO BE INJECTED.
17
+ // --- FIX 1: Import Math Layer Primitives for Correct Context Mapping ---
18
+ const {
19
+ DataExtractor, HistoryExtractor, MathPrimitives, Aggregators,
20
+ Validators, SCHEMAS, SignalPrimitives, DistributionAnalytics,
21
+ TimeSeries, priceExtractor
22
+ } = require('../layers/math_primitives.js');
23
+
24
+ const pLimit = require('p-limit');
18
25
 
19
26
  /**
20
27
  * Groups calculations from a manifest by their 'pass' property.
@@ -22,29 +29,23 @@ const pLimit = require('p-limit'); // TODO, THIS OUGHT TO BE INJECTED.
22
29
  function groupByPass(manifest) { return manifest.reduce((acc, calc) => { (acc[calc.pass] = acc[calc.pass] || []).push(calc); return acc; }, {}); }
23
30
 
24
31
  /**
25
- * --- NEW HELPER: PASSIVE DATA VALIDATION ---
32
+ * --- PASSIVE DATA VALIDATION ---
26
33
  * Scans a result set for suspicious patterns (e.g., a field is NULL for 100% of tickers).
27
- * Logs warnings but DOES NOT block the commit.
28
34
  */
29
- function validateResultPatterns(logger, calcName, results, category) { // TODO, THIS COULD BE MUCH MORE SOPHISTICATED, WE WILL NEVER FORCE FAIL A COMPUTATION REGARDLESS, BUT IT COULD BE A GREAT WARNING SYSTEM, USE GLOUD LOG SINKS TO DETECT PROBLEMS BASED ON THESE LOGS
30
- // 1. Skip Speculators (Too sparse, nulls are expected)
35
+ function validateResultPatterns(logger, calcName, results, category) {
31
36
  if (category === 'speculator' || category === 'speculators') return;
32
37
 
33
38
  const tickers = Object.keys(results);
34
39
  const totalItems = tickers.length;
35
40
 
36
- // 2. Need a decent sample size to judge patterns
37
41
  if (totalItems < 5) return;
38
42
 
39
- // 3. Get all keys from the first valid object
40
- // We assume schema is roughly consistent across tickers
41
43
  const sampleTicker = tickers.find(t => results[t] && typeof results[t] === 'object');
42
44
  if (!sampleTicker) return;
43
45
 
44
46
  const keys = Object.keys(results[sampleTicker]);
45
47
 
46
48
  keys.forEach(key => {
47
- // Skip internal keys or metadata
48
49
  if (key.startsWith('_')) return;
49
50
 
50
51
  let nullCount = 0;
@@ -58,14 +59,11 @@ function validateResultPatterns(logger, calcName, results, category) { // TODO,
58
59
  if (typeof val === 'number' && isNaN(val)) nanCount++;
59
60
  }
60
61
 
61
- // 4. Define Thresholds
62
- // If 100% of data is NaN or Undefined, that's almost certainly a bug.
63
62
  if (nanCount === totalItems) {
64
63
  logger.log('ERROR', `[DataQuality] Calc '${calcName}' field '${key}' is NaN for 100% of ${totalItems} items. Code bug likely.`);
65
64
  } else if (undefinedCount === totalItems) {
66
65
  logger.log('ERROR', `[DataQuality] Calc '${calcName}' field '${key}' is UNDEFINED for 100% of ${totalItems} items. Code bug likely.`);
67
66
  }
68
- // 5. Nulls are tricky. warn if >90%, but don't error (might be valid logic like "no shorts")
69
67
  else if (nullCount > (totalItems * 0.9)) {
70
68
  logger.log('WARN', `[DataQuality] Calc '${calcName}' field '${key}' is NULL for ${nullCount}/${totalItems} items. Check logic if this is unexpected.`);
71
69
  }
@@ -329,10 +327,7 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
329
327
  }
330
328
 
331
329
  if (Object.keys(standardRes).length) {
332
- // --- NEW: Run Passive Validation ---
333
- // We do this BEFORE marking it completed, but we do NOT stop the write.
334
330
  validateResultPatterns(deps.logger, name, standardRes, calc.manifest.category);
335
- // -----------------------------------
336
331
 
337
332
  standardRes._completed = true;
338
333
  writes.push({
@@ -388,10 +383,10 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
388
383
  * OPTIMIZED: Implements concurrency for both Shard Processing and Write Commits
389
384
  */
390
385
  async function runBatchPriceComputation(config, deps, dateStrings, calcs, targetTickers = []) {
391
- const { logger, db, calculationUtils } = deps; // Ensure calculationUtils is available for retry
386
+ const { logger, db, calculationUtils } = deps;
392
387
  const controller = new ComputationController(config, deps);
393
388
 
394
- // 1. FIX: Call loadMappings() correctly and get the result
389
+ // 1. Call loadMappings() correctly and get the result
395
390
  const mappings = await controller.loader.loadMappings();
396
391
 
397
392
  // 2. Resolve Shards (All or Subset)
@@ -413,12 +408,9 @@ async function runBatchPriceComputation(config, deps, dateStrings, calcs, target
413
408
  }
414
409
 
415
410
  // 3. Execution Planning
416
- // CONCURRENCY SETTING:
417
- // Limit outer concurrency (processing shard chunks) to 2 to prevent contention on daily result docs.
418
- // While Firestore handles concurrent writes to the same doc, limiting this avoids excessive retries.
419
411
  const OUTER_CONCURRENCY_LIMIT = 2;
420
412
  const SHARD_BATCH_SIZE = 20;
421
- const WRITE_BATCH_LIMIT = 50; // Keep write batch size small (payload safety)
413
+ const WRITE_BATCH_LIMIT = 50;
422
414
 
423
415
  logger.log('INFO', `[BatchPrice] Execution Plan: ${dateStrings.length} days, ${allShardRefs.length} shards. Concurrency: ${OUTER_CONCURRENCY_LIMIT}.`);
424
416
 
@@ -440,7 +432,6 @@ async function runBatchPriceComputation(config, deps, dateStrings, calcs, target
440
432
  // Optional Filtering for Subset Mode
441
433
  if (targetInstrumentIds.length > 0) {
442
434
  const requestedSet = new Set(targetInstrumentIds);
443
- // Iterate over the loaded data and delete anything we didn't ask for
444
435
  for (const loadedInstrumentId in pricesData) {
445
436
  if (!requestedSet.has(loadedInstrumentId)) {
446
437
  delete pricesData[loadedInstrumentId];
@@ -450,26 +441,44 @@ async function runBatchPriceComputation(config, deps, dateStrings, calcs, target
450
441
  const writes = [];
451
442
 
452
443
  // --- CALCULATION PHASE ---
453
- // This builds up the array of writes (one per date)
454
444
  for (const dateStr of dateStrings) {
445
+ // --- FIX 2: Manually map math primitives to their alias names ---
446
+ // This matches the ContextBuilder logic in ComputationController
447
+ // and fixes the "Cannot read properties of undefined (reading 'standardDeviation')" error.
455
448
  const context = {
456
449
  mappings,
457
450
  prices: { history: pricesData },
458
451
  date: { today: dateStr },
459
- math: require('../layers/math_primitives.js')
452
+ math: {
453
+ extract: DataExtractor,
454
+ history: HistoryExtractor,
455
+ compute: MathPrimitives,
456
+ aggregate: Aggregators,
457
+ validate: Validators,
458
+ signals: SignalPrimitives,
459
+ schemas: SCHEMAS,
460
+ distribution : DistributionAnalytics,
461
+ TimeSeries: TimeSeries,
462
+ priceExtractor : priceExtractor
463
+ }
460
464
  };
461
465
 
462
466
  for (const calcManifest of calcs) {
463
467
  try {
468
+ // --- LOGGING FIX: Log start of calculation ---
469
+ logger.log('INFO', `[BatchPrice] >> Running ${calcManifest.name} for ${dateStr}...`);
470
+
464
471
  const instance = new calcManifest.class();
465
472
  await instance.process(context);
466
473
  const result = await instance.getResult();
467
474
 
475
+ let hasContent = false;
468
476
  if (result && Object.keys(result).length > 0) {
469
477
  let dataToWrite = result;
470
478
  if (result.by_instrument) dataToWrite = result.by_instrument;
471
479
 
472
480
  if (Object.keys(dataToWrite).length > 0) {
481
+ hasContent = true;
473
482
  const docRef = db.collection(config.resultsCollection).doc(dateStr)
474
483
  .collection(config.resultsSubcollection).doc(calcManifest.category)
475
484
  .collection(config.computationsSubcollection).doc(normalizeName(calcManifest.name));
@@ -481,22 +490,28 @@ async function runBatchPriceComputation(config, deps, dateStrings, calcs, target
481
490
  });
482
491
  }
483
492
  }
493
+
494
+ // --- LOGGING FIX: Log success/completion ---
495
+ if (hasContent) {
496
+ logger.log('INFO', `[BatchPrice] \u2714 Finished ${calcManifest.name} for ${dateStr}. Found data.`);
497
+ } else {
498
+ logger.log('INFO', `[BatchPrice] - Finished ${calcManifest.name} for ${dateStr}. No result data.`);
499
+ }
500
+
484
501
  } catch (err) {
485
- logger.log('ERROR', `[BatchPrice] Calc ${calcManifest.name} failed for ${dateStr}`, { error: err.message });
502
+ // --- LOGGING FIX: Explicit failure log ---
503
+ logger.log('ERROR', `[BatchPrice] \u2716 Failed ${calcManifest.name} for ${dateStr}: ${err.message}`);
486
504
  }
487
505
  }
488
506
  }
489
507
 
490
508
  // --- PARALLEL COMMIT PHASE ---
491
- // Instead of committing sequentially via commitBatchInChunks, we process these writes in parallel.
492
- // Since each write targets a DIFFERENT date (different document), parallelizing this is safe and fast.
493
509
  if (writes.length > 0) {
494
510
  const commitBatches = [];
495
511
  for (let i = 0; i < writes.length; i += WRITE_BATCH_LIMIT) {
496
512
  commitBatches.push(writes.slice(i, i + WRITE_BATCH_LIMIT));
497
513
  }
498
514
 
499
- // Use a higher concurrency for commits since they target disjoint documents
500
515
  const commitLimit = pLimit(10);
501
516
 
502
517
  await Promise.all(commitBatches.map((batchWrites, bIndex) => commitLimit(async () => {
@@ -507,7 +522,6 @@ async function runBatchPriceComputation(config, deps, dateStrings, calcs, target
507
522
  await calculationUtils.withRetry(() => batch.commit(), `BatchPrice-C${index}-B${bIndex}`);
508
523
  } catch (commitErr) {
509
524
  logger.log('ERROR', `[BatchPrice] Commit failed for Chunk ${index} Batch ${bIndex}.`, { error: commitErr.message });
510
- // We log but don't throw, to allow other batches to succeed
511
525
  }
512
526
  })));
513
527
  }
@@ -1,36 +1,82 @@
1
1
  /**
2
2
  * @fileoverview Core Pub/Sub utility functions.
3
- * REFACTORED: All functions are now stateless and receive dependencies.
4
- * 'pubsub' (PubSub instance) and 'logger' are passed via a 'dependencies' object.
3
+ * REFACTORED: Hybrid module supporting both Stateless functions and Stateful Class.
4
+ * Fixes "PubSubUtils is not a constructor" error.
5
5
  */
6
6
 
7
7
  /**
8
- * Publishes an array of tasks to a specified Pub/Sub topic in batches.
9
- * @async
10
- * @param {object} dependencies - Contains pubsub, logger.
11
- * @param {object} config - Configuration object.
12
- * @param {string} config.topicName - The name of the Pub/Sub topic.
13
- * @param {Array<object>} config.tasks - The tasks to publish.
14
- * @param {string} config.taskType - A descriptor for the task type (for logging).
15
- * @param {number} [config.maxPubsubBatchSize=500] - Max messages to publish in one client batch.
16
- * @returns {Promise<void>}
8
+ * Stateless Function: Publishes tasks in batches.
9
+ * @param {object} dependencies - { pubsub, logger }
10
+ * @param {object} config - { topicName, tasks, taskType, maxPubsubBatchSize }
17
11
  */
18
12
  async function batchPublishTasks(dependencies, config) {
19
13
  const { pubsub, logger } = dependencies;
20
14
  const { topicName, tasks, taskType, maxPubsubBatchSize = 500 } = config;
21
- if (!tasks || tasks.length === 0) { logger.log('INFO',`[Core Utils] No ${taskType} tasks to publish to ${topicName}.`); return; }
22
- logger.log('INFO',`[Core Utils] Publishing ${tasks.length} ${taskType} tasks to ${topicName}...`);
15
+
16
+ if (!tasks || tasks.length === 0) {
17
+ logger.log('INFO', `[Core Utils] No ${taskType} tasks to publish to ${topicName}.`);
18
+ return;
19
+ }
20
+
21
+ logger.log('INFO', `[Core Utils] Publishing ${tasks.length} ${taskType} tasks to ${topicName}...`);
23
22
  const topic = pubsub.topic(topicName);
24
23
  let messagesPublished = 0;
24
+
25
25
  try {
26
- for (let i = 0; i < tasks.length; i += maxPubsubBatchSize) { const batchTasks = tasks.slice(i, i + maxPubsubBatchSize);
27
- const batchPromises = batchTasks.map(task => { const dataBuffer = Buffer.from(JSON.stringify(task));
28
- return topic.publishMessage({ data: dataBuffer }) .catch(err => logger.log('ERROR', `[Core Utils] Failed to publish single message for ${taskType}`, { error: err.message, task: task })); });
26
+ for (let i = 0; i < tasks.length; i += maxPubsubBatchSize) {
27
+ const batchTasks = tasks.slice(i, i + maxPubsubBatchSize);
28
+ const batchPromises = batchTasks.map(task => {
29
+ const dataBuffer = Buffer.from(JSON.stringify(task));
30
+ return topic.publishMessage({ data: dataBuffer })
31
+ .catch(err => logger.log('ERROR', `[Core Utils] Failed to publish single message for ${taskType}`, { error: err.message, task: task }));
32
+ });
33
+
29
34
  await Promise.all(batchPromises);
30
35
  messagesPublished += batchTasks.length;
31
- logger.log('TRACE', `[Core Utils] Published batch ${Math.ceil((i + 1) / maxPubsubBatchSize)} for ${taskType} (${batchTasks.length} messages)`); }
36
+ logger.log('TRACE', `[Core Utils] Published batch ${Math.ceil((i + 1) / maxPubsubBatchSize)} for ${taskType} (${batchTasks.length} messages)`);
37
+ }
32
38
  logger.log('SUCCESS', `[Core Utils] Finished publishing ${messagesPublished} ${taskType} tasks to ${topicName}.`);
33
- } catch (error) { logger.log('ERROR', `[Core Utils] Error during batch publishing of ${taskType} tasks to ${topicName}`, { errorMessage: error.message }); throw error; }
39
+ } catch (error) {
40
+ logger.log('ERROR', `[Core Utils] Error during batch publishing of ${taskType} tasks to ${topicName}`, { errorMessage: error.message });
41
+ throw error;
42
+ }
43
+ }
44
+
45
+ /**
46
+ * Stateful Class Wrapper
47
+ * Allows usage like: const utils = new PubSubUtils(deps); utils.batchPublishTasks(...)
48
+ */
49
+ class PubSubUtils {
50
+ constructor(dependencies) {
51
+ this.dependencies = dependencies;
52
+ }
53
+
54
+ /**
55
+ * Hybrid method: Supports both (config) and (dependencies, config) signatures.
56
+ */
57
+ async batchPublishTasks(arg1, arg2) {
58
+ // If called as (dependencies, config), use passed dependencies (Stateless/Legacy style)
59
+ if (arg2) {
60
+ return batchPublishTasks(arg1, arg2);
61
+ }
62
+ // If called as (config), use this.dependencies (Stateful style)
63
+ return batchPublishTasks(this.dependencies, arg1);
64
+ }
65
+
66
+ /**
67
+ * Helper for Computation System (Dispatcher)
68
+ * Maps (topic, messages) -> batchPublishTasks
69
+ */
70
+ async publishMessageBatch(topicName, messages) {
71
+ // Unpack {json: ...} structure if present
72
+ const tasks = messages.map(m => m.json || m);
73
+ const config = {
74
+ topicName,
75
+ tasks,
76
+ taskType: 'computation-batch'
77
+ };
78
+ return batchPublishTasks(this.dependencies, config);
79
+ }
34
80
  }
35
81
 
36
- module.exports = { batchPublishTasks };
82
+ module.exports = { batchPublishTasks, PubSubUtils };
package/index.js CHANGED
@@ -3,56 +3,77 @@
3
3
  * Export the pipes!
4
4
  */
5
5
 
6
- // Core
7
- const core = { IntelligentHeaderManager : require('./functions/core/utils/intelligent_header_manager') .IntelligentHeaderManager,
8
- IntelligentProxyManager : require('./functions/core/utils/intelligent_proxy_manager') .IntelligentProxyManager,
9
- FirestoreBatchManager : require('./functions/task-engine/utils/firestore_batch_manager') .FirestoreBatchManager,
10
- firestoreUtils : require('./functions/core/utils/firestore_utils'),
11
- pubsubUtils : require('./functions/core/utils/pubsub_utils') };
12
-
13
- // Orchestrator
14
- const orchestrator = { runDiscoveryOrchestrator : require('./functions/orchestrator/index') .runDiscoveryOrchestrator,
15
- runUpdateOrchestrator : require('./functions/orchestrator/index') .runUpdateOrchestrator,
16
- checkDiscoveryNeed : require('./functions/orchestrator/helpers/discovery_helpers') .checkDiscoveryNeed,
17
- getDiscoveryCandidates : require('./functions/orchestrator/helpers/discovery_helpers') .getDiscoveryCandidates,
18
- dispatchDiscovery : require('./functions/orchestrator/helpers/discovery_helpers') .dispatchDiscovery,
19
- getUpdateTargets : require('./functions/orchestrator/helpers/update_helpers') .getUpdateTargets,
20
- dispatchUpdates : require('./functions/orchestrator/helpers/update_helpers') .dispatchUpdates };
21
-
22
- // Dispatcher
23
- const dispatcher = { handleRequest : require('./functions/dispatcher/index') .handleRequest ,
24
- dispatchTasksInBatches : require('./functions/dispatcher/helpers/dispatch_helpers') .dispatchTasksInBatches };
25
-
26
- // Task Engine
27
- const taskEngine = { handleRequest : require('./functions/task-engine/handler_creator') .handleRequest ,
28
- handleDiscover : require('./functions/task-engine/helpers/discover_helpers') .handleDiscover,
29
- handleVerify : require('./functions/task-engine/helpers/verify_helpers') .handleVerify ,
30
- handleUpdate : require('./functions/task-engine/helpers/update_helpers') .handleUpdate };
6
+ // Import the PubSub Module
7
+ const pubsubModule = require('./functions/core/utils/pubsub_utils');
8
+
9
+ // Core
10
+ const core = {
11
+ IntelligentHeaderManager: require('./functions/core/utils/intelligent_header_manager').IntelligentHeaderManager,
12
+ IntelligentProxyManager: require('./functions/core/utils/intelligent_proxy_manager').IntelligentProxyManager,
13
+ FirestoreBatchManager: require('./functions/task-engine/utils/firestore_batch_manager').FirestoreBatchManager,
14
+ firestoreUtils: require('./functions/core/utils/firestore_utils'),
15
+
16
+ // EXPORT FIX:
17
+ pubsubUtils: pubsubModule, // Keeps stateless function access
18
+ PubSubUtils: pubsubModule.PubSubUtils // Exposes the Class for 'new pipe.core.PubSubUtils()'
19
+ };
20
+
21
+ // Orchestrator
22
+ const orchestrator = {
23
+ runDiscoveryOrchestrator: require('./functions/orchestrator/index').runDiscoveryOrchestrator,
24
+ runUpdateOrchestrator: require('./functions/orchestrator/index').runUpdateOrchestrator,
25
+ checkDiscoveryNeed: require('./functions/orchestrator/helpers/discovery_helpers').checkDiscoveryNeed,
26
+ getDiscoveryCandidates: require('./functions/orchestrator/helpers/discovery_helpers').getDiscoveryCandidates,
27
+ dispatchDiscovery: require('./functions/orchestrator/helpers/discovery_helpers').dispatchDiscovery,
28
+ getUpdateTargets: require('./functions/orchestrator/helpers/update_helpers').getUpdateTargets,
29
+ dispatchUpdates: require('./functions/orchestrator/helpers/update_helpers').dispatchUpdates
30
+ };
31
+
32
+ // Dispatcher
33
+ const dispatcher = {
34
+ handleRequest: require('./functions/dispatcher/index').handleRequest,
35
+ dispatchTasksInBatches: require('./functions/dispatcher/helpers/dispatch_helpers').dispatchTasksInBatches
36
+ };
37
+
38
+ // Task Engine
39
+ const taskEngine = {
40
+ handleRequest: require('./functions/task-engine/handler_creator').handleRequest,
41
+ handleDiscover: require('./functions/task-engine/helpers/discover_helpers').handleDiscover,
42
+ handleVerify: require('./functions/task-engine/helpers/verify_helpers').handleVerify,
43
+ handleUpdate: require('./functions/task-engine/helpers/update_helpers').handleUpdate
44
+ };
31
45
 
32
46
  // --- NEW IMPORT ---
33
47
  const { build: buildManifestFunc } = require('./functions/computation-system/helpers/computation_manifest_builder');
34
48
 
35
- // Computation System
36
- const computationSystem = { runComputationPass : require('./functions/computation-system/helpers/computation_pass_runner') .runComputationPass,
37
- dataLoader : require('./functions/computation-system/utils/data_loader'),
38
- computationUtils : require('./functions/computation-system/utils/utils'),
39
- buildManifest : buildManifestFunc
40
- };
41
-
42
- // API
43
- const api = { createApiApp : require('./functions/generic-api/index') .createApiApp,
44
- helpers : require('./functions/generic-api/helpers/api_helpers') };
45
-
46
- // Maintenance
47
- const maintenance = { runSpeculatorCleanup : require('./functions/speculator-cleanup-orchestrator/helpers/cleanup_helpers') .runCleanup,
48
- handleInvalidSpeculator : require('./functions/invalid-speculator-handler/helpers/handler_helpers') .handleInvalidSpeculator,
49
- runFetchInsights : require('./functions/fetch-insights/helpers/handler_helpers') .fetchAndStoreInsights,
50
- runFetchPrices : require('./functions/etoro-price-fetcher/helpers/handler_helpers') .fetchAndStorePrices,
51
- runSocialOrchestrator : require('./functions/social-orchestrator/helpers/orchestrator_helpers') .runSocialOrchestrator,
52
- handleSocialTask : require('./functions/social-task-handler/helpers/handler_helpers') .handleSocialTask,
53
- runBackfillAssetPrices : require('./functions/price-backfill/helpers/handler_helpers') .runBackfillAssetPrices };
54
-
55
- // Proxy
56
- const proxy = { handlePost : require('./functions/appscript-api/index') .handlePost };
57
-
58
- module.exports = { pipe: { core, orchestrator, dispatcher, taskEngine, computationSystem, api, maintenance, proxy } };
49
+ // Computation System
50
+ const computationSystem = {
51
+ runComputationPass: require('./functions/computation-system/helpers/computation_pass_runner').runComputationPass,
52
+ dispatchComputationPass: require('./functions/computation-system/helpers/computation_dispatcher').dispatchComputationPass,
53
+ handleComputationTask: require('./functions/computation-system/helpers/computation_worker').handleComputationTask,
54
+ dataLoader: require('./functions/computation-system/utils/data_loader'),
55
+ computationUtils: require('./functions/computation-system/utils/utils'),
56
+ buildManifest: buildManifestFunc
57
+ };
58
+
59
+ // API
60
+ const api = {
61
+ createApiApp: require('./functions/generic-api/index').createApiApp,
62
+ helpers: require('./functions/generic-api/helpers/api_helpers')
63
+ };
64
+
65
+ // Maintenance
66
+ const maintenance = {
67
+ runSpeculatorCleanup: require('./functions/speculator-cleanup-orchestrator/helpers/cleanup_helpers').runCleanup,
68
+ handleInvalidSpeculator: require('./functions/invalid-speculator-handler/helpers/handler_helpers').handleInvalidSpeculator,
69
+ runFetchInsights: require('./functions/fetch-insights/helpers/handler_helpers').fetchAndStoreInsights,
70
+ runFetchPrices: require('./functions/etoro-price-fetcher/helpers/handler_helpers').fetchAndStorePrices,
71
+ runSocialOrchestrator: require('./functions/social-orchestrator/helpers/orchestrator_helpers').runSocialOrchestrator,
72
+ handleSocialTask: require('./functions/social-task-handler/helpers/handler_helpers').handleSocialTask,
73
+ runBackfillAssetPrices: require('./functions/price-backfill/helpers/handler_helpers').runBackfillAssetPrices
74
+ };
75
+
76
+ // Proxy
77
+ const proxy = { handlePost: require('./functions/appscript-api/index').handlePost };
78
+
79
+ module.exports = { pipe: { core, orchestrator, dispatcher, taskEngine, computationSystem, api, maintenance, proxy } };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.199",
3
+ "version": "1.0.201",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [