bulltrackers-module 1.0.204 → 1.0.206
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/helpers/computation_pass_runner.js +13 -13
- package/functions/computation-system/helpers/orchestration_helpers.js +249 -240
- package/functions/computation-system/layers/math_primitives.js +104 -36
- package/functions/task-engine/helpers/update_helpers.js +14 -82
- package/functions/task-engine/utils/task_engine_utils.js +26 -22
- package/package.json +1 -1
|
@@ -25,7 +25,7 @@ const PARALLEL_BATCH_SIZE = 7;
|
|
|
25
25
|
*/
|
|
26
26
|
async function runComputationPass(config, dependencies, computationManifest) {
|
|
27
27
|
const { logger } = dependencies;
|
|
28
|
-
const passToRun
|
|
28
|
+
const passToRun = String(config.COMPUTATION_PASS_TO_RUN);
|
|
29
29
|
if (!passToRun) return logger.log('ERROR', '[PassRunner] No pass defined. Aborting.');
|
|
30
30
|
|
|
31
31
|
logger.log('INFO', `🚀 Starting PASS ${passToRun} (Legacy Mode)...`);
|
|
@@ -33,21 +33,21 @@ async function runComputationPass(config, dependencies, computationManifest) {
|
|
|
33
33
|
// Hardcoded earliest dates
|
|
34
34
|
const earliestDates = {
|
|
35
35
|
portfolio: new Date('2025-09-25T00:00:00Z'),
|
|
36
|
-
history:
|
|
37
|
-
social:
|
|
38
|
-
insights:
|
|
39
|
-
price:
|
|
36
|
+
history: new Date('2025-11-05T00:00:00Z'),
|
|
37
|
+
social: new Date('2025-10-30T00:00:00Z'),
|
|
38
|
+
insights: new Date('2025-08-26T00:00:00Z'),
|
|
39
|
+
price: new Date('2025-08-01T00:00:00Z')
|
|
40
40
|
};
|
|
41
41
|
earliestDates.absoluteEarliest = Object.values(earliestDates).reduce((a, b) => a < b ? a : b);
|
|
42
42
|
|
|
43
|
-
const passes
|
|
43
|
+
const passes = groupByPass(computationManifest);
|
|
44
44
|
const calcsInThisPass = passes[passToRun] || [];
|
|
45
45
|
|
|
46
46
|
if (!calcsInThisPass.length)
|
|
47
47
|
return logger.log('WARN', `[PassRunner] No calcs for Pass ${passToRun}. Exiting.`);
|
|
48
48
|
|
|
49
49
|
const passEarliestDate = earliestDates.absoluteEarliest;
|
|
50
|
-
const endDateUTC
|
|
50
|
+
const endDateUTC = new Date(Date.UTC(new Date().getUTCFullYear(), new Date().getUTCMonth(), new Date().getUTCDate() - 1));
|
|
51
51
|
const allExpectedDates = getExpectedDateStrings(passEarliestDate, endDateUTC);
|
|
52
52
|
|
|
53
53
|
// Legacy Batch Optimization for Price (Only used in legacy loop)
|
|
@@ -98,10 +98,10 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
|
|
|
98
98
|
// 2. Check Root Data Availability
|
|
99
99
|
const earliestDates = {
|
|
100
100
|
portfolio: new Date('2025-09-25T00:00:00Z'),
|
|
101
|
-
history:
|
|
102
|
-
social:
|
|
103
|
-
insights:
|
|
104
|
-
price:
|
|
101
|
+
history: new Date('2025-11-05T00:00:00Z'),
|
|
102
|
+
social: new Date('2025-10-30T00:00:00Z'),
|
|
103
|
+
insights: new Date('2025-08-26T00:00:00Z'),
|
|
104
|
+
price: new Date('2025-08-01T00:00:00Z')
|
|
105
105
|
};
|
|
106
106
|
|
|
107
107
|
const rootData = await checkRootDataAvailability(dateStr, config, dependencies, earliestDates);
|
|
@@ -129,8 +129,8 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
|
|
|
129
129
|
|
|
130
130
|
// Fetch dependencies (results from this day or yesterday)
|
|
131
131
|
const existingResults = await fetchExistingResults(dateStr, calcsRunning, computationManifest, config, dependencies, false);
|
|
132
|
-
const prevDate
|
|
133
|
-
const prevDateStr
|
|
132
|
+
const prevDate = new Date(dateToProcess); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
|
|
133
|
+
const prevDateStr = prevDate.toISOString().slice(0, 10);
|
|
134
134
|
const previousResults = await fetchExistingResults(prevDateStr, calcsRunning, computationManifest, config, dependencies, true);
|
|
135
135
|
|
|
136
136
|
if (standardToRun.length) {
|
|
@@ -1,27 +1,26 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* FILENAME: bulltrackers-module/functions/computation-system/helpers/orchestration_helpers.js
|
|
3
|
-
* FIXED:
|
|
4
|
-
*
|
|
5
|
-
*
|
|
3
|
+
* FIXED: 'commitResults' now isolates commits PER COMPUTATION.
|
|
4
|
+
* A single failure (e.g., size limit) will only fail that specific calculation,
|
|
5
|
+
* allowing others in the same pass/date to succeed and be recorded.
|
|
6
6
|
*/
|
|
7
7
|
|
|
8
8
|
const { ComputationController } = require('../controllers/computation_controller');
|
|
9
9
|
const { batchStoreSchemas } = require('../utils/schema_capture');
|
|
10
10
|
const { normalizeName, commitBatchInChunks } = require('../utils/utils');
|
|
11
|
-
const {
|
|
12
|
-
getPortfolioPartRefs, loadDailyInsights, loadDailySocialPostInsights,
|
|
11
|
+
const {
|
|
12
|
+
getPortfolioPartRefs, loadDailyInsights, loadDailySocialPostInsights,
|
|
13
13
|
getHistoryPartRefs, streamPortfolioData, streamHistoryData,
|
|
14
14
|
getRelevantShardRefs, loadDataByRefs
|
|
15
15
|
} = require('../utils/data_loader');
|
|
16
16
|
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
TimeSeries, priceExtractor
|
|
17
|
+
const {
|
|
18
|
+
DataExtractor, HistoryExtractor, MathPrimitives, Aggregators,
|
|
19
|
+
Validators, SCHEMAS, SignalPrimitives, DistributionAnalytics,
|
|
20
|
+
TimeSeries, priceExtractor
|
|
22
21
|
} = require('../layers/math_primitives.js');
|
|
23
22
|
|
|
24
|
-
const pLimit = require('p-limit');
|
|
23
|
+
const pLimit = require('p-limit');
|
|
25
24
|
|
|
26
25
|
/**
|
|
27
26
|
* Groups calculations from a manifest by their 'pass' property.
|
|
@@ -30,26 +29,25 @@ function groupByPass(manifest) { return manifest.reduce((acc, calc) => { (acc[ca
|
|
|
30
29
|
|
|
31
30
|
/**
|
|
32
31
|
* --- PASSIVE DATA VALIDATION ---
|
|
33
|
-
* Scans a result set for suspicious patterns (e.g., a field is NULL for 100% of tickers).
|
|
34
32
|
*/
|
|
35
|
-
function validateResultPatterns(logger, calcName, results, category) {
|
|
33
|
+
function validateResultPatterns(logger, calcName, results, category) {
|
|
36
34
|
if (category === 'speculator' || category === 'speculators') return;
|
|
37
35
|
|
|
38
|
-
const tickers
|
|
36
|
+
const tickers = Object.keys(results);
|
|
39
37
|
const totalItems = tickers.length;
|
|
40
|
-
|
|
41
|
-
if (totalItems < 5) return;
|
|
38
|
+
|
|
39
|
+
if (totalItems < 5) return;
|
|
42
40
|
|
|
43
41
|
const sampleTicker = tickers.find(t => results[t] && typeof results[t] === 'object');
|
|
44
42
|
if (!sampleTicker) return;
|
|
45
|
-
|
|
43
|
+
|
|
46
44
|
const keys = Object.keys(results[sampleTicker]);
|
|
47
|
-
|
|
45
|
+
|
|
48
46
|
keys.forEach(key => {
|
|
49
47
|
if (key.startsWith('_')) return;
|
|
50
48
|
|
|
51
|
-
let nullCount
|
|
52
|
-
let nanCount
|
|
49
|
+
let nullCount = 0;
|
|
50
|
+
let nanCount = 0;
|
|
53
51
|
let undefinedCount = 0;
|
|
54
52
|
|
|
55
53
|
for (const t of tickers) {
|
|
@@ -60,28 +58,25 @@ function validateResultPatterns(logger, calcName, results, category) {
|
|
|
60
58
|
}
|
|
61
59
|
|
|
62
60
|
if (nanCount === totalItems) {
|
|
63
|
-
logger.log('ERROR', `[DataQuality] Calc '${calcName}' field '${key}' is NaN for 100% of ${totalItems} items
|
|
61
|
+
logger.log('ERROR', `[DataQuality] Calc '${calcName}' field '${key}' is NaN for 100% of ${totalItems} items.`);
|
|
64
62
|
} else if (undefinedCount === totalItems) {
|
|
65
|
-
logger.log('ERROR', `[DataQuality] Calc '${calcName}' field '${key}' is UNDEFINED for 100% of ${totalItems} items
|
|
66
|
-
}
|
|
63
|
+
logger.log('ERROR', `[DataQuality] Calc '${calcName}' field '${key}' is UNDEFINED for 100% of ${totalItems} items.`);
|
|
64
|
+
}
|
|
67
65
|
else if (nullCount > (totalItems * 0.9)) {
|
|
68
|
-
logger.log('WARN', `[DataQuality] Calc '${calcName}' field '${key}' is NULL for ${nullCount}/${totalItems} items
|
|
66
|
+
logger.log('WARN', `[DataQuality] Calc '${calcName}' field '${key}' is NULL for ${nullCount}/${totalItems} items.`);
|
|
69
67
|
}
|
|
70
68
|
});
|
|
71
69
|
}
|
|
72
70
|
|
|
73
|
-
/**
|
|
74
|
-
* Checks if all root data dependencies for a given calculation are met.
|
|
75
|
-
*/
|
|
76
71
|
function checkRootDependencies(calcManifest, rootDataStatus) {
|
|
77
72
|
const missing = [];
|
|
78
73
|
if (!calcManifest.rootDataDependencies) return { canRun: true, missing };
|
|
79
74
|
for (const dep of calcManifest.rootDataDependencies) {
|
|
80
|
-
if
|
|
81
|
-
else if (dep === 'insights'
|
|
82
|
-
else if (dep === 'social'
|
|
83
|
-
else if (dep === 'history'
|
|
84
|
-
else if (dep === 'price'
|
|
75
|
+
if (dep === 'portfolio' && !rootDataStatus.hasPortfolio) missing.push('portfolio');
|
|
76
|
+
else if (dep === 'insights' && !rootDataStatus.hasInsights) missing.push('insights');
|
|
77
|
+
else if (dep === 'social' && !rootDataStatus.hasSocial) missing.push('social');
|
|
78
|
+
else if (dep === 'history' && !rootDataStatus.hasHistory) missing.push('history');
|
|
79
|
+
else if (dep === 'price' && !rootDataStatus.hasPrices) missing.push('price');
|
|
85
80
|
}
|
|
86
81
|
return { canRun: missing.length === 0, missing };
|
|
87
82
|
}
|
|
@@ -89,30 +84,30 @@ function checkRootDependencies(calcManifest, rootDataStatus) {
|
|
|
89
84
|
async function checkRootDataAvailability(dateStr, config, dependencies, earliestDates) {
|
|
90
85
|
const { logger } = dependencies;
|
|
91
86
|
const dateToProcess = new Date(dateStr + 'T00:00:00Z');
|
|
92
|
-
let portfolioRefs
|
|
93
|
-
let hasPortfolio
|
|
94
|
-
let insightsData
|
|
87
|
+
let portfolioRefs = [], historyRefs = [];
|
|
88
|
+
let hasPortfolio = false, hasInsights = false, hasSocial = false, hasHistory = false, hasPrices = false;
|
|
89
|
+
let insightsData = null, socialData = null;
|
|
95
90
|
|
|
96
91
|
try {
|
|
97
92
|
const tasks = [];
|
|
98
|
-
if (dateToProcess >= earliestDates.portfolio) tasks.push(getPortfolioPartRefs(config, dependencies, dateStr).then(r => { portfolioRefs = r; hasPortfolio
|
|
99
|
-
if (dateToProcess >= earliestDates.insights) tasks.push(loadDailyInsights(config, dependencies, dateStr).then(r => { insightsData
|
|
100
|
-
if (dateToProcess >= earliestDates.social) tasks.push(loadDailySocialPostInsights(config, dependencies, dateStr).then(r => { socialData
|
|
101
|
-
if (dateToProcess >= earliestDates.history) tasks.push(getHistoryPartRefs(config, dependencies, dateStr).then(r => { historyRefs
|
|
102
|
-
|
|
93
|
+
if (dateToProcess >= earliestDates.portfolio) tasks.push(getPortfolioPartRefs (config, dependencies, dateStr).then(r => { portfolioRefs = r; hasPortfolio = !!r.length; }));
|
|
94
|
+
if (dateToProcess >= earliestDates.insights) tasks.push(loadDailyInsights (config, dependencies, dateStr).then(r => { insightsData = r; hasInsights = !!r; }));
|
|
95
|
+
if (dateToProcess >= earliestDates.social) tasks.push(loadDailySocialPostInsights (config, dependencies, dateStr).then(r => { socialData = r; hasSocial = !!r; }));
|
|
96
|
+
if (dateToProcess >= earliestDates.history) tasks.push(getHistoryPartRefs (config, dependencies, dateStr).then(r => { historyRefs = r; hasHistory = !!r.length; }));
|
|
97
|
+
|
|
103
98
|
if (dateToProcess >= earliestDates.price) {
|
|
104
99
|
tasks.push(checkPriceDataAvailability(config, dependencies).then(r => { hasPrices = r; }));
|
|
105
100
|
}
|
|
106
|
-
|
|
101
|
+
|
|
107
102
|
await Promise.all(tasks);
|
|
108
|
-
|
|
103
|
+
|
|
109
104
|
if (!(hasPortfolio || hasInsights || hasSocial || hasHistory || hasPrices)) return null;
|
|
110
|
-
|
|
111
|
-
return {
|
|
112
|
-
portfolioRefs,
|
|
113
|
-
historyRefs,
|
|
114
|
-
todayInsights: insightsData,
|
|
115
|
-
todaySocialPostInsights: socialData,
|
|
105
|
+
|
|
106
|
+
return {
|
|
107
|
+
portfolioRefs,
|
|
108
|
+
historyRefs,
|
|
109
|
+
todayInsights: insightsData,
|
|
110
|
+
todaySocialPostInsights: socialData,
|
|
116
111
|
status: { hasPortfolio, hasInsights, hasSocial, hasHistory, hasPrices }
|
|
117
112
|
};
|
|
118
113
|
|
|
@@ -140,15 +135,15 @@ async function checkPriceDataAvailability(config, dependencies) {
|
|
|
140
135
|
|
|
141
136
|
async function fetchComputationStatus(dateStr, config, { db }) {
|
|
142
137
|
const collection = config.computationStatusCollection || 'computation_status';
|
|
143
|
-
const docRef
|
|
144
|
-
const snap
|
|
138
|
+
const docRef = db.collection(collection).doc(dateStr);
|
|
139
|
+
const snap = await docRef.get();
|
|
145
140
|
return snap.exists ? snap.data() : {};
|
|
146
141
|
}
|
|
147
142
|
|
|
148
143
|
async function fetchGlobalComputationStatus(config, { db }) {
|
|
149
144
|
const collection = config.computationStatusCollection || 'computation_status';
|
|
150
|
-
const docRef
|
|
151
|
-
const snap
|
|
145
|
+
const docRef = db.collection(collection).doc('global_status');
|
|
146
|
+
const snap = await docRef.get();
|
|
152
147
|
return snap.exists ? snap.data() : {};
|
|
153
148
|
}
|
|
154
149
|
|
|
@@ -156,13 +151,13 @@ async function updateComputationStatus(dateStr, updates, config, { db }) {
|
|
|
156
151
|
if (!updates || Object.keys(updates).length === 0) return;
|
|
157
152
|
const collection = config.computationStatusCollection || 'computation_status';
|
|
158
153
|
const docRef = db.collection(collection).doc(dateStr);
|
|
159
|
-
await docRef.set(updates, { merge: true });
|
|
154
|
+
await docRef.set(updates, { merge: true });
|
|
160
155
|
}
|
|
161
156
|
|
|
162
157
|
async function updateGlobalComputationStatus(updatesByDate, config, { db }) {
|
|
163
158
|
if (!updatesByDate || Object.keys(updatesByDate).length === 0) return;
|
|
164
159
|
const collection = config.computationStatusCollection || 'computation_status';
|
|
165
|
-
const docRef
|
|
160
|
+
const docRef = db.collection(collection).doc('global_status');
|
|
166
161
|
const flattenUpdates = {};
|
|
167
162
|
for (const [date, statuses] of Object.entries(updatesByDate)) {
|
|
168
163
|
for (const [calc, status] of Object.entries(statuses)) {
|
|
@@ -172,14 +167,14 @@ async function updateGlobalComputationStatus(updatesByDate, config, { db }) {
|
|
|
172
167
|
try {
|
|
173
168
|
await docRef.update(flattenUpdates);
|
|
174
169
|
} catch (err) {
|
|
175
|
-
if (err.code === 5) {
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
170
|
+
if (err.code === 5) {
|
|
171
|
+
const deepObj = {};
|
|
172
|
+
for (const [date, statuses] of Object.entries(updatesByDate)) {
|
|
173
|
+
deepObj[date] = statuses;
|
|
174
|
+
}
|
|
175
|
+
await docRef.set(deepObj, { merge: true });
|
|
181
176
|
} else {
|
|
182
|
-
|
|
177
|
+
throw err;
|
|
183
178
|
}
|
|
184
179
|
}
|
|
185
180
|
}
|
|
@@ -188,7 +183,7 @@ async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config,
|
|
|
188
183
|
const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
|
|
189
184
|
const calcsToFetch = new Set();
|
|
190
185
|
for (const calc of calcsInPass) {
|
|
191
|
-
if (calc.dependencies)
|
|
186
|
+
if (calc.dependencies) { calc.dependencies.forEach(d => calcsToFetch.add(normalizeName(d))); }
|
|
192
187
|
if (includeSelf && calc.isHistorical) { calcsToFetch.add(normalizeName(calc.name)); }
|
|
193
188
|
}
|
|
194
189
|
if (!calcsToFetch.size) return {};
|
|
@@ -197,23 +192,26 @@ async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config,
|
|
|
197
192
|
const names = [];
|
|
198
193
|
for (const name of calcsToFetch) {
|
|
199
194
|
const m = manifestMap.get(name);
|
|
200
|
-
if (m) {
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
195
|
+
if (m) {
|
|
196
|
+
docRefs.push(db.collection(config.resultsCollection).doc(dateStr)
|
|
197
|
+
.collection(config.resultsSubcollection).doc(m.category || 'unknown')
|
|
198
|
+
.collection(config.computationsSubcollection).doc(name));
|
|
199
|
+
names.push(name);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
204
202
|
if (docRefs.length) {
|
|
205
203
|
const snaps = await db.getAll(...docRefs);
|
|
206
|
-
snaps.forEach((doc, i) => {
|
|
204
|
+
snaps.forEach((doc, i) => { if (doc.exists && doc.data()._completed) { fetched[names[i]] = doc.data(); } });
|
|
207
205
|
}
|
|
208
206
|
return fetched;
|
|
209
207
|
}
|
|
210
208
|
|
|
211
209
|
async function streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs, fetchedDeps, previousFetchedDeps) {
|
|
212
210
|
const { logger } = deps;
|
|
213
|
-
const controller
|
|
214
|
-
const calcs
|
|
215
|
-
const streamingCalcs = calcs.filter(c =>
|
|
216
|
-
c.manifest.rootDataDependencies.includes('portfolio') ||
|
|
211
|
+
const controller = new ComputationController(config, deps);
|
|
212
|
+
const calcs = Object.values(state).filter(c => c && c.manifest);
|
|
213
|
+
const streamingCalcs = calcs.filter(c =>
|
|
214
|
+
c.manifest.rootDataDependencies.includes('portfolio') ||
|
|
217
215
|
c.manifest.rootDataDependencies.includes('history')
|
|
218
216
|
);
|
|
219
217
|
|
|
@@ -222,14 +220,14 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
|
|
|
222
220
|
logger.log('INFO', `[${passName}] Streaming for ${streamingCalcs.length} computations...`);
|
|
223
221
|
|
|
224
222
|
await controller.loader.loadMappings();
|
|
225
|
-
const prevDate
|
|
223
|
+
const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
|
|
226
224
|
const prevDateStr = prevDate.toISOString().slice(0, 10);
|
|
227
225
|
|
|
228
|
-
const tP_iter
|
|
226
|
+
const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
|
|
229
227
|
const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
|
|
230
|
-
const yP_iter
|
|
231
|
-
const needsTradingHistory
|
|
232
|
-
const tH_iter
|
|
228
|
+
const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs) ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
|
|
229
|
+
const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
|
|
230
|
+
const tH_iter = (needsTradingHistory && historyRefs) ? streamHistoryData(config, deps, dateStr, historyRefs) : null;
|
|
233
231
|
|
|
234
232
|
let yP_chunk = {};
|
|
235
233
|
let tH_chunk = {};
|
|
@@ -238,14 +236,14 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
|
|
|
238
236
|
if (yP_iter) yP_chunk = (await yP_iter.next()).value || {};
|
|
239
237
|
if (tH_iter) tH_chunk = (await tH_iter.next()).value || {};
|
|
240
238
|
|
|
241
|
-
const promises = streamingCalcs.map(calc =>
|
|
239
|
+
const promises = streamingCalcs.map(calc =>
|
|
242
240
|
controller.executor.executePerUser(
|
|
243
241
|
calc,
|
|
244
242
|
calc.manifest,
|
|
245
243
|
dateStr,
|
|
246
244
|
tP_chunk,
|
|
247
|
-
yP_chunk,
|
|
248
|
-
tH_chunk,
|
|
245
|
+
yP_chunk,
|
|
246
|
+
tH_chunk,
|
|
249
247
|
fetchedDeps,
|
|
250
248
|
previousFetchedDeps
|
|
251
249
|
)
|
|
@@ -260,20 +258,20 @@ async function runStandardComputationPass(date, calcs, passName, config, deps, r
|
|
|
260
258
|
const logger = deps.logger;
|
|
261
259
|
const fullRoot = { ...rootData };
|
|
262
260
|
if (calcs.some(c => c.isHistorical)) {
|
|
263
|
-
const prev
|
|
261
|
+
const prev = new Date(date); prev.setUTCDate(prev.getUTCDate() - 1);
|
|
264
262
|
const prevStr = prev.toISOString().slice(0, 10);
|
|
265
263
|
fullRoot.yesterdayPortfolioRefs = await getPortfolioPartRefs(config, deps, prevStr);
|
|
266
264
|
}
|
|
267
265
|
|
|
268
266
|
const state = {};
|
|
269
267
|
for (const c of calcs) {
|
|
270
|
-
try {
|
|
271
|
-
const inst = new c.class();
|
|
272
|
-
inst.manifest = c;
|
|
273
|
-
state[normalizeName(c.name)] = inst;
|
|
268
|
+
try {
|
|
269
|
+
const inst = new c.class();
|
|
270
|
+
inst.manifest = c;
|
|
271
|
+
state[normalizeName(c.name)] = inst;
|
|
274
272
|
logger.log('INFO', `${c.name} calculation running for ${dStr}`);
|
|
275
|
-
}
|
|
276
|
-
catch(e)
|
|
273
|
+
}
|
|
274
|
+
catch (e) { logger.log('WARN', `Failed to init ${c.name}`); }
|
|
277
275
|
}
|
|
278
276
|
|
|
279
277
|
await streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs, fetchedDeps, previousFetchedDeps);
|
|
@@ -282,8 +280,8 @@ async function runStandardComputationPass(date, calcs, passName, config, deps, r
|
|
|
282
280
|
|
|
283
281
|
async function runMetaComputationPass(date, calcs, passName, config, deps, fetchedDeps, previousFetchedDeps, rootData, skipStatusWrite = false) {
|
|
284
282
|
const controller = new ComputationController(config, deps);
|
|
285
|
-
const dStr
|
|
286
|
-
const state
|
|
283
|
+
const dStr = date.toISOString().slice(0, 10);
|
|
284
|
+
const state = {};
|
|
287
285
|
|
|
288
286
|
for (const mCalc of calcs) {
|
|
289
287
|
try {
|
|
@@ -297,28 +295,46 @@ async function runMetaComputationPass(date, calcs, passName, config, deps, fetch
|
|
|
297
295
|
return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
|
|
298
296
|
}
|
|
299
297
|
|
|
298
|
+
/**
|
|
299
|
+
* --- REFACTORED: commitResults ---
|
|
300
|
+
* Commits results individually per calculation.
|
|
301
|
+
* If one calculation fails (e.g. size limit), others still succeed.
|
|
302
|
+
*/
|
|
300
303
|
async function commitResults(stateObj, dStr, passName, config, deps, skipStatusWrite = false) {
|
|
301
|
-
const
|
|
302
|
-
const
|
|
304
|
+
const successUpdates = {};
|
|
305
|
+
const schemas = [];
|
|
303
306
|
|
|
307
|
+
// Iterate PER CALCULATION to isolate failures
|
|
304
308
|
for (const name in stateObj) {
|
|
305
309
|
const calc = stateObj[name];
|
|
310
|
+
let hasData = false;
|
|
311
|
+
|
|
306
312
|
try {
|
|
307
|
-
const result = await calc.getResult();
|
|
313
|
+
const result = await calc.getResult();
|
|
308
314
|
if (!result) {
|
|
309
|
-
deps.logger.log('INFO', `${name}
|
|
315
|
+
deps.logger.log('INFO', `${name} for ${dStr}: Skipped (Empty Result)`);
|
|
310
316
|
continue;
|
|
311
317
|
}
|
|
312
|
-
|
|
313
|
-
const standardRes = {};
|
|
314
|
-
let hasData = false;
|
|
315
318
|
|
|
319
|
+
const standardRes = {};
|
|
320
|
+
const shardedWrites = [];
|
|
321
|
+
const calcWrites = []; // Accumulate all writes for THIS specific calculation
|
|
322
|
+
|
|
323
|
+
// 1. Separate Standard and Sharded Data
|
|
316
324
|
for (const key in result) {
|
|
317
325
|
if (key.startsWith('sharded_')) {
|
|
318
326
|
const sData = result[key];
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
327
|
+
// sData structure: { CollectionName: { DocId: { ...data } } }
|
|
328
|
+
for (const colName in sData) {
|
|
329
|
+
const docsMap = sData[colName];
|
|
330
|
+
for (const docId in docsMap) {
|
|
331
|
+
// Support both full path or collection-relative path
|
|
332
|
+
const ref = docId.includes('/') ? deps.db.doc(docId) : deps.db.collection(colName).doc(docId);
|
|
333
|
+
shardedWrites.push({
|
|
334
|
+
ref,
|
|
335
|
+
data: { ...docsMap[docId], _completed: true }
|
|
336
|
+
});
|
|
337
|
+
}
|
|
322
338
|
}
|
|
323
339
|
if (Object.keys(sData).length > 0) hasData = true;
|
|
324
340
|
} else {
|
|
@@ -326,70 +342,75 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
326
342
|
}
|
|
327
343
|
}
|
|
328
344
|
|
|
345
|
+
// 2. Prepare Standard Result Write
|
|
329
346
|
if (Object.keys(standardRes).length) {
|
|
330
347
|
validateResultPatterns(deps.logger, name, standardRes, calc.manifest.category);
|
|
331
|
-
|
|
332
348
|
standardRes._completed = true;
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
349
|
+
|
|
350
|
+
const docRef = deps.db.collection(config.resultsCollection).doc(dStr)
|
|
351
|
+
.collection(config.resultsSubcollection).doc(calc.manifest.category)
|
|
352
|
+
.collection(config.computationsSubcollection).doc(name);
|
|
353
|
+
|
|
354
|
+
calcWrites.push({
|
|
355
|
+
ref: docRef,
|
|
337
356
|
data: standardRes
|
|
338
357
|
});
|
|
339
358
|
hasData = true;
|
|
340
359
|
}
|
|
341
|
-
|
|
360
|
+
|
|
361
|
+
// 3. Queue Schema (Safe to accumulate)
|
|
342
362
|
if (calc.manifest.class.getSchema) {
|
|
343
363
|
const { class: _cls, ...safeMetadata } = calc.manifest;
|
|
344
|
-
schemas.push({
|
|
345
|
-
name, category: calc.manifest.category, schema: calc.manifest.class.getSchema(), metadata: safeMetadata
|
|
364
|
+
schemas.push({
|
|
365
|
+
name, category: calc.manifest.category, schema: calc.manifest.class.getSchema(), metadata: safeMetadata
|
|
346
366
|
});
|
|
347
367
|
}
|
|
348
|
-
|
|
368
|
+
|
|
369
|
+
// 4. ATTEMPT COMMIT FOR THIS CALCULATION ONLY
|
|
349
370
|
if (hasData) {
|
|
350
|
-
|
|
351
|
-
|
|
371
|
+
// Combine standard + sharded writes for this unit of work
|
|
372
|
+
const allWritesForCalc = [...calcWrites, ...shardedWrites];
|
|
373
|
+
|
|
374
|
+
if (allWritesForCalc.length > 0) {
|
|
375
|
+
await commitBatchInChunks(config, deps, allWritesForCalc, `${name} Results`);
|
|
376
|
+
|
|
377
|
+
// IF we get here, the commit succeeded.
|
|
378
|
+
successUpdates[name] = true;
|
|
379
|
+
deps.logger.log('INFO', `${name} for ${dStr}: \u2714 Success (Written)`);
|
|
380
|
+
} else {
|
|
381
|
+
deps.logger.log('INFO', `${name} for ${dStr}: - No Data to Write`);
|
|
382
|
+
}
|
|
352
383
|
} else {
|
|
353
|
-
deps.logger.log('INFO', `${name}
|
|
384
|
+
deps.logger.log('INFO', `${name} for ${dStr}: - Empty`);
|
|
354
385
|
}
|
|
355
386
|
|
|
356
|
-
} catch (e) {
|
|
357
|
-
|
|
358
|
-
deps.logger.log('
|
|
387
|
+
} catch (e) {
|
|
388
|
+
// CRITICAL: Catch errors here so the loop continues for other calculations
|
|
389
|
+
deps.logger.log('ERROR', `${name} for ${dStr}: \u2716 FAILED Commit: ${e.message}`);
|
|
390
|
+
// Do NOT add to successUpdates
|
|
359
391
|
}
|
|
360
392
|
}
|
|
361
393
|
|
|
362
|
-
|
|
363
|
-
if (
|
|
364
|
-
for (const col in sharded) {
|
|
365
|
-
const sWrites = [];
|
|
366
|
-
for (const id in sharded[col]) {
|
|
367
|
-
const ref = id.includes('/') ? deps.db.doc(id) : deps.db.collection(col).doc(id);
|
|
368
|
-
sWrites.push({ ref, data: { ...sharded[col][id], _completed: true } });
|
|
369
|
-
}
|
|
370
|
-
if (sWrites.length) await commitBatchInChunks(config, deps, sWrites, `${passName} Sharded ${col}`);
|
|
371
|
-
}
|
|
394
|
+
// Save Schemas (Best effort, isolated)
|
|
395
|
+
if (schemas.length) batchStoreSchemas(deps, config, schemas).catch(() => { });
|
|
372
396
|
|
|
397
|
+
// Update Status Document (Only for the ones that succeeded)
|
|
373
398
|
if (!skipStatusWrite && Object.keys(successUpdates).length > 0) {
|
|
374
399
|
await updateComputationStatus(dStr, successUpdates, config, deps);
|
|
375
|
-
deps.logger.log('INFO', `[${passName}] Updated status document for ${Object.keys(successUpdates).length} computations.`);
|
|
400
|
+
deps.logger.log('INFO', `[${passName}] Updated status document for ${Object.keys(successUpdates).length} successful computations.`);
|
|
376
401
|
}
|
|
377
402
|
return successUpdates;
|
|
378
403
|
}
|
|
379
404
|
|
|
380
405
|
/**
|
|
381
406
|
* --- UPDATED: runBatchPriceComputation ---
|
|
382
|
-
* Now supports subset/specific ticker execution via 'targetTickers'
|
|
383
|
-
* OPTIMIZED: Implements concurrency for both Shard Processing and Write Commits
|
|
384
407
|
*/
|
|
385
408
|
async function runBatchPriceComputation(config, deps, dateStrings, calcs, targetTickers = []) {
|
|
386
|
-
const { logger, db, calculationUtils } = deps;
|
|
409
|
+
const { logger, db, calculationUtils } = deps;
|
|
387
410
|
const controller = new ComputationController(config, deps);
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
// 2. Resolve Shards (All or Subset)
|
|
411
|
+
|
|
412
|
+
const mappings = await controller.loader.loadMappings();
|
|
413
|
+
|
|
393
414
|
let targetInstrumentIds = [];
|
|
394
415
|
if (targetTickers && targetTickers.length > 0) {
|
|
395
416
|
const tickerToInst = mappings.tickerToInstrument || {};
|
|
@@ -399,22 +420,20 @@ async function runBatchPriceComputation(config, deps, dateStrings, calcs, target
|
|
|
399
420
|
return;
|
|
400
421
|
}
|
|
401
422
|
}
|
|
402
|
-
|
|
423
|
+
|
|
403
424
|
const allShardRefs = await getRelevantShardRefs(config, deps, targetInstrumentIds);
|
|
404
|
-
|
|
425
|
+
|
|
405
426
|
if (!allShardRefs.length) {
|
|
406
427
|
logger.log('WARN', '[BatchPrice] No relevant price shards found. Exiting.');
|
|
407
428
|
return;
|
|
408
429
|
}
|
|
409
430
|
|
|
410
|
-
|
|
411
|
-
const OUTER_CONCURRENCY_LIMIT = 2;
|
|
431
|
+
const OUTER_CONCURRENCY_LIMIT = 2;
|
|
412
432
|
const SHARD_BATCH_SIZE = 20;
|
|
413
|
-
const WRITE_BATCH_LIMIT = 50;
|
|
433
|
+
const WRITE_BATCH_LIMIT = 50;
|
|
414
434
|
|
|
415
435
|
logger.log('INFO', `[BatchPrice] Execution Plan: ${dateStrings.length} days, ${allShardRefs.length} shards. Concurrency: ${OUTER_CONCURRENCY_LIMIT}.`);
|
|
416
436
|
|
|
417
|
-
// 4. Create Chunks of Shards
|
|
418
437
|
const shardChunks = [];
|
|
419
438
|
for (let i = 0; i < allShardRefs.length; i += SHARD_BATCH_SIZE) {
|
|
420
439
|
shardChunks.push(allShardRefs.slice(i, i + SHARD_BATCH_SIZE));
|
|
@@ -422,114 +441,104 @@ async function runBatchPriceComputation(config, deps, dateStrings, calcs, target
|
|
|
422
441
|
|
|
423
442
|
const outerLimit = pLimit(OUTER_CONCURRENCY_LIMIT);
|
|
424
443
|
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
444
|
+
const chunkPromises = [];
|
|
445
|
+
for (let index = 0; index < shardChunks.length; index++) {
|
|
446
|
+
const shardChunkRefs = shardChunks[index];
|
|
447
|
+
chunkPromises.push(outerLimit(async () => {
|
|
448
|
+
try {
|
|
449
|
+
logger.log('INFO', `[BatchPrice] Processing chunk ${index + 1}/${shardChunks.length} (${shardChunkRefs.length} shards)...`);
|
|
450
|
+
|
|
451
|
+
const pricesData = await loadDataByRefs(config, deps, shardChunkRefs);
|
|
452
|
+
|
|
453
|
+
if (targetInstrumentIds.length > 0) {
|
|
454
|
+
const requestedSet = new Set(targetInstrumentIds);
|
|
455
|
+
for (const loadedInstrumentId in pricesData) {
|
|
456
|
+
if (!requestedSet.has(loadedInstrumentId)) {
|
|
457
|
+
delete pricesData[loadedInstrumentId];
|
|
458
|
+
}
|
|
438
459
|
}
|
|
439
460
|
}
|
|
440
|
-
}
|
|
441
|
-
const writes = [];
|
|
442
|
-
|
|
443
|
-
// --- CALCULATION PHASE ---
|
|
444
|
-
for (const dateStr of dateStrings) {
|
|
445
|
-
// --- FIX 2: Manually map math primitives to their alias names ---
|
|
446
|
-
// This matches the ContextBuilder logic in ComputationController
|
|
447
|
-
// and fixes the "Cannot read properties of undefined (reading 'standardDeviation')" error.
|
|
448
|
-
const context = {
|
|
449
|
-
mappings,
|
|
450
|
-
prices: { history: pricesData },
|
|
451
|
-
date: { today: dateStr },
|
|
452
|
-
math: {
|
|
453
|
-
extract: DataExtractor,
|
|
454
|
-
history: HistoryExtractor,
|
|
455
|
-
compute: MathPrimitives,
|
|
456
|
-
aggregate: Aggregators,
|
|
457
|
-
validate: Validators,
|
|
458
|
-
signals: SignalPrimitives,
|
|
459
|
-
schemas: SCHEMAS,
|
|
460
|
-
distribution : DistributionAnalytics,
|
|
461
|
-
TimeSeries: TimeSeries,
|
|
462
|
-
priceExtractor : priceExtractor
|
|
463
|
-
}
|
|
464
|
-
};
|
|
465
|
-
|
|
466
|
-
for (const calcManifest of calcs) {
|
|
467
|
-
try {
|
|
468
|
-
// --- LOGGING FIX: Log start of calculation ---
|
|
469
|
-
logger.log('INFO', `[BatchPrice] >> Running ${calcManifest.name} for ${dateStr}...`);
|
|
470
|
-
|
|
471
|
-
const instance = new calcManifest.class();
|
|
472
|
-
await instance.process(context);
|
|
473
|
-
const result = await instance.getResult();
|
|
474
|
-
|
|
475
|
-
let hasContent = false;
|
|
476
|
-
if (result && Object.keys(result).length > 0) {
|
|
477
|
-
let dataToWrite = result;
|
|
478
|
-
if (result.by_instrument) dataToWrite = result.by_instrument;
|
|
479
|
-
|
|
480
|
-
if (Object.keys(dataToWrite).length > 0) {
|
|
481
|
-
hasContent = true;
|
|
482
|
-
const docRef = db.collection(config.resultsCollection).doc(dateStr)
|
|
483
|
-
.collection(config.resultsSubcollection).doc(calcManifest.category)
|
|
484
|
-
.collection(config.computationsSubcollection).doc(normalizeName(calcManifest.name));
|
|
485
|
-
|
|
486
|
-
writes.push({
|
|
487
|
-
ref: docRef,
|
|
488
|
-
data: { ...dataToWrite, _completed: true },
|
|
489
|
-
options: { merge: true }
|
|
490
|
-
});
|
|
491
|
-
}
|
|
492
|
-
}
|
|
493
461
|
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
462
|
+
// We now accumulate writes per calc to allow partial success, though batching optimization is tricky here.
|
|
463
|
+
// For safety, let's keep the existing structure but wrap individual calc processing in try/catch
|
|
464
|
+
// inside the write phase if possible.
|
|
465
|
+
// However, runBatchPrice is optimized for BULK throughput.
|
|
466
|
+
// To prevent total failure, we will use a safe array.
|
|
467
|
+
const writes = [];
|
|
468
|
+
|
|
469
|
+
for (const dateStr of dateStrings) {
|
|
470
|
+
const context = {
|
|
471
|
+
mappings,
|
|
472
|
+
prices: { history: pricesData },
|
|
473
|
+
date: { today: dateStr },
|
|
474
|
+
math: {
|
|
475
|
+
extract: DataExtractor,
|
|
476
|
+
history: HistoryExtractor,
|
|
477
|
+
compute: MathPrimitives,
|
|
478
|
+
aggregate: Aggregators,
|
|
479
|
+
validate: Validators,
|
|
480
|
+
signals: SignalPrimitives,
|
|
481
|
+
schemas: SCHEMAS,
|
|
482
|
+
distribution: DistributionAnalytics,
|
|
483
|
+
TimeSeries: TimeSeries,
|
|
484
|
+
priceExtractor: priceExtractor
|
|
485
|
+
}
|
|
486
|
+
};
|
|
487
|
+
|
|
488
|
+
for (const calcManifest of calcs) {
|
|
489
|
+
try {
|
|
490
|
+
// logger.log('INFO', `[BatchPrice] >> Running ${calcManifest.name} for ${dateStr}...`); // Verbose
|
|
491
|
+
const instance = new calcManifest.class();
|
|
492
|
+
await instance.process(context);
|
|
493
|
+
const result = await instance.getResult();
|
|
494
|
+
|
|
495
|
+
if (result && Object.keys(result).length > 0) {
|
|
496
|
+
let dataToWrite = result;
|
|
497
|
+
if (result.by_instrument) dataToWrite = result.by_instrument;
|
|
498
|
+
|
|
499
|
+
if (Object.keys(dataToWrite).length > 0) {
|
|
500
|
+
const docRef = db.collection(config.resultsCollection).doc(dateStr)
|
|
501
|
+
.collection(config.resultsSubcollection).doc(calcManifest.category)
|
|
502
|
+
.collection(config.computationsSubcollection).doc(normalizeName(calcManifest.name));
|
|
503
|
+
|
|
504
|
+
writes.push({
|
|
505
|
+
ref: docRef,
|
|
506
|
+
data: { ...dataToWrite, _completed: true },
|
|
507
|
+
options: { merge: true }
|
|
508
|
+
});
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
} catch (err) {
|
|
512
|
+
logger.log('ERROR', `[BatchPrice] \u2716 Failed ${calcManifest.name} for ${dateStr}: ${err.message}`);
|
|
499
513
|
}
|
|
500
|
-
|
|
501
|
-
} catch (err) {
|
|
502
|
-
// --- LOGGING FIX: Explicit failure log ---
|
|
503
|
-
logger.log('ERROR', `[BatchPrice] \u2716 Failed ${calcManifest.name} for ${dateStr}: ${err.message}`);
|
|
504
514
|
}
|
|
505
515
|
}
|
|
506
|
-
}
|
|
507
|
-
|
|
508
|
-
// --- PARALLEL COMMIT PHASE ---
|
|
509
|
-
if (writes.length > 0) {
|
|
510
|
-
const commitBatches = [];
|
|
511
|
-
for (let i = 0; i < writes.length; i += WRITE_BATCH_LIMIT) {
|
|
512
|
-
commitBatches.push(writes.slice(i, i + WRITE_BATCH_LIMIT));
|
|
513
|
-
}
|
|
514
516
|
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
batchWrites.forEach(w => batch.set(w.ref, w.data, w.options));
|
|
520
|
-
|
|
521
|
-
try {
|
|
522
|
-
await calculationUtils.withRetry(() => batch.commit(), `BatchPrice-C${index}-B${bIndex}`);
|
|
523
|
-
} catch (commitErr) {
|
|
524
|
-
logger.log('ERROR', `[BatchPrice] Commit failed for Chunk ${index} Batch ${bIndex}.`, { error: commitErr.message });
|
|
517
|
+
if (writes.length > 0) {
|
|
518
|
+
const commitBatches = [];
|
|
519
|
+
for (let i = 0; i < writes.length; i += WRITE_BATCH_LIMIT) {
|
|
520
|
+
commitBatches.push(writes.slice(i, i + WRITE_BATCH_LIMIT));
|
|
525
521
|
}
|
|
526
|
-
})));
|
|
527
|
-
}
|
|
528
522
|
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
523
|
+
const commitLimit = pLimit(10);
|
|
524
|
+
|
|
525
|
+
await Promise.all(commitBatches.map((batchWrites, bIndex) => commitLimit(async () => {
|
|
526
|
+
const batch = db.batch();
|
|
527
|
+
batchWrites.forEach(w => batch.set(w.ref, w.data, w.options));
|
|
528
|
+
|
|
529
|
+
try {
|
|
530
|
+
await calculationUtils.withRetry(() => batch.commit(), `BatchPrice-C${index}-B${bIndex}`);
|
|
531
|
+
} catch (commitErr) {
|
|
532
|
+
logger.log('ERROR', `[BatchPrice] Commit failed for Chunk ${index} Batch ${bIndex}.`, { error: commitErr.message });
|
|
533
|
+
}
|
|
534
|
+
})));
|
|
535
|
+
}
|
|
536
|
+
|
|
537
|
+
} catch (chunkErr) {
|
|
538
|
+
logger.log('ERROR', `[BatchPrice] Fatal error processing Chunk ${index}.`, { error: chunkErr.message });
|
|
539
|
+
}
|
|
540
|
+
}));
|
|
541
|
+
}
|
|
533
542
|
|
|
534
543
|
await Promise.all(chunkPromises);
|
|
535
544
|
logger.log('INFO', '[BatchPrice] Optimization pass complete.');
|
|
@@ -541,9 +550,9 @@ module.exports = {
|
|
|
541
550
|
checkRootDataAvailability,
|
|
542
551
|
fetchExistingResults,
|
|
543
552
|
fetchComputationStatus,
|
|
544
|
-
fetchGlobalComputationStatus,
|
|
545
|
-
updateComputationStatus,
|
|
546
|
-
updateGlobalComputationStatus,
|
|
553
|
+
fetchGlobalComputationStatus,
|
|
554
|
+
updateComputationStatus,
|
|
555
|
+
updateGlobalComputationStatus,
|
|
547
556
|
runStandardComputationPass,
|
|
548
557
|
runMetaComputationPass,
|
|
549
558
|
runBatchPriceComputation
|
|
@@ -95,8 +95,8 @@ class DataExtractor { // For generic access of data types
|
|
|
95
95
|
* Extract Current Equity Value %.
|
|
96
96
|
* Schema: 'Value' is the current value as a % of total portfolio equity.
|
|
97
97
|
*/
|
|
98
|
-
static getPositionValuePct(position) {
|
|
99
|
-
return position ? (position.Value || 0) : 0;
|
|
98
|
+
static getPositionValuePct(position) {
|
|
99
|
+
return position ? (position.Value || 0) : 0;
|
|
100
100
|
}
|
|
101
101
|
|
|
102
102
|
/**
|
|
@@ -262,39 +262,108 @@ class priceExtractor {
|
|
|
262
262
|
}
|
|
263
263
|
|
|
264
264
|
class HistoryExtractor {
|
|
265
|
-
// --- Schema Accessor (
|
|
265
|
+
// --- Schema Accessor (REFACTORED for Granular API) ---
|
|
266
266
|
/**
|
|
267
267
|
* Extracts the daily history snapshot from the User object.
|
|
268
|
-
*
|
|
268
|
+
* Returns the raw granular positions list.
|
|
269
269
|
*/
|
|
270
270
|
static getDailyHistory(user) {
|
|
271
|
+
// The new API returns { PublicHistoryPositions: [...] }
|
|
271
272
|
return user?.history?.today || null;
|
|
272
273
|
}
|
|
273
274
|
|
|
274
|
-
// --- Data Extractors ---
|
|
275
|
+
// --- Data Extractors (ADAPTER PATTERN) ---
|
|
276
|
+
/**
|
|
277
|
+
* Adapts granular trade history into the legacy 'TradedAssets' format.
|
|
278
|
+
* Groups trades by InstrumentID and calculates average holding time.
|
|
279
|
+
*/
|
|
275
280
|
static getTradedAssets(historyDoc) {
|
|
276
|
-
|
|
277
|
-
return
|
|
281
|
+
const trades = historyDoc?.PublicHistoryPositions || [];
|
|
282
|
+
if (!trades.length) return [];
|
|
283
|
+
|
|
284
|
+
// Group by InstrumentID
|
|
285
|
+
const assetsMap = new Map();
|
|
286
|
+
|
|
287
|
+
for (const t of trades) {
|
|
288
|
+
const instId = t.InstrumentID;
|
|
289
|
+
if (!instId) continue;
|
|
290
|
+
|
|
291
|
+
if (!assetsMap.has(instId)) {
|
|
292
|
+
assetsMap.set(instId, {
|
|
293
|
+
instrumentId: instId,
|
|
294
|
+
totalDuration: 0,
|
|
295
|
+
count: 0
|
|
296
|
+
});
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
const asset = assetsMap.get(instId);
|
|
300
|
+
|
|
301
|
+
// Calculate Duration in Minutes
|
|
302
|
+
const open = new Date(t.OpenDateTime);
|
|
303
|
+
const close = new Date(t.CloseDateTime);
|
|
304
|
+
const durationMins = (close - open) / 60000; // ms -> min
|
|
305
|
+
|
|
306
|
+
if (durationMins > 0) {
|
|
307
|
+
asset.totalDuration += durationMins;
|
|
308
|
+
asset.count++;
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
// Convert Map to Array format expected by existing calculations
|
|
313
|
+
// (Returns objects with .instrumentId and .avgHoldingTimeInMinutes)
|
|
314
|
+
return Array.from(assetsMap.values()).map(a => ({
|
|
315
|
+
instrumentId: a.instrumentId,
|
|
316
|
+
avgHoldingTimeInMinutes: a.count > 0 ? (a.totalDuration / a.count) : 0
|
|
317
|
+
}));
|
|
278
318
|
}
|
|
279
319
|
|
|
280
320
|
static getInstrumentId(asset) {
|
|
281
321
|
return asset ? asset.instrumentId : null;
|
|
282
322
|
}
|
|
283
323
|
|
|
284
|
-
static getAvgHoldingTimeMinutes(asset) {
|
|
324
|
+
static getAvgHoldingTimeMinutes(asset) {
|
|
285
325
|
return asset ? (asset.avgHoldingTimeInMinutes || 0) : 0;
|
|
286
326
|
}
|
|
287
327
|
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
328
|
+
/**
|
|
329
|
+
* Adapts granular trade history into the legacy 'Summary' format.
|
|
330
|
+
* Calculates WinRatio, AvgProfit, etc. on the fly from the raw list.
|
|
331
|
+
*/
|
|
332
|
+
static getSummary(historyDoc) {
|
|
333
|
+
const trades = historyDoc?.PublicHistoryPositions || [];
|
|
334
|
+
if (!trades.length) return null;
|
|
335
|
+
|
|
336
|
+
let totalTrades = trades.length;
|
|
337
|
+
let wins = 0;
|
|
338
|
+
let totalProf = 0;
|
|
339
|
+
let totalLoss = 0;
|
|
340
|
+
let profCount = 0;
|
|
341
|
+
let lossCount = 0;
|
|
342
|
+
let totalDur = 0;
|
|
343
|
+
|
|
344
|
+
for (const t of trades) {
|
|
345
|
+
// P&L Stats (NetProfit is %)
|
|
346
|
+
if (t.NetProfit > 0) {
|
|
347
|
+
wins++;
|
|
348
|
+
totalProf += t.NetProfit;
|
|
349
|
+
profCount++;
|
|
350
|
+
} else if (t.NetProfit < 0) {
|
|
351
|
+
totalLoss += t.NetProfit;
|
|
352
|
+
lossCount++;
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
// Duration Stats
|
|
356
|
+
const open = new Date(t.OpenDateTime);
|
|
357
|
+
const close = new Date(t.CloseDateTime);
|
|
358
|
+
totalDur += (close - open) / 60000; // ms -> min
|
|
359
|
+
}
|
|
291
360
|
|
|
292
|
-
return {
|
|
293
|
-
totalTrades:
|
|
294
|
-
winRatio:
|
|
295
|
-
avgProfitPct:
|
|
296
|
-
avgLossPct:
|
|
297
|
-
avgHoldingTimeInMinutes:
|
|
361
|
+
return {
|
|
362
|
+
totalTrades: totalTrades,
|
|
363
|
+
winRatio: totalTrades > 0 ? (wins / totalTrades) * 100 : 0,
|
|
364
|
+
avgProfitPct: profCount > 0 ? totalProf / profCount : 0,
|
|
365
|
+
avgLossPct: lossCount > 0 ? totalLoss / lossCount : 0,
|
|
366
|
+
avgHoldingTimeInMinutes: totalTrades > 0 ? totalDur / totalTrades : 0
|
|
298
367
|
};
|
|
299
368
|
}
|
|
300
369
|
}
|
|
@@ -354,7 +423,7 @@ class SignalPrimitives {
|
|
|
354
423
|
|
|
355
424
|
static getPreviousState(previousComputed, calcName, ticker, fieldName = null) { // This is used for either fetching computations listed in getdependencies() OR self-history
|
|
356
425
|
if (!previousComputed || !previousComputed[calcName]) return null; // Using this for self-history DOES NOT cause a circular dependency because we assign a special rule in orchestration_helpers
|
|
357
|
-
// Which handles the self-reference
|
|
426
|
+
// Which handles the self-reference.
|
|
358
427
|
const tickerData = previousComputed[calcName][ticker];
|
|
359
428
|
if (!tickerData) return null;
|
|
360
429
|
|
|
@@ -374,7 +443,7 @@ class MathPrimitives {
|
|
|
374
443
|
static median(values) {
|
|
375
444
|
if (!values || !values.length) return 0;
|
|
376
445
|
const sorted = [...values].sort((a, b) => a - b);
|
|
377
|
-
const mid
|
|
446
|
+
const mid = Math.floor(sorted.length / 2);
|
|
378
447
|
return sorted.length % 2 === 0
|
|
379
448
|
? (sorted[mid - 1] + sorted[mid]) / 2
|
|
380
449
|
: sorted[mid];
|
|
@@ -382,7 +451,7 @@ class MathPrimitives {
|
|
|
382
451
|
|
|
383
452
|
static standardDeviation(values) {
|
|
384
453
|
if (!values || !values.length) return 0;
|
|
385
|
-
const avg
|
|
454
|
+
const avg = this.average(values);
|
|
386
455
|
const squareDiffs = values.map(val => Math.pow((val || 0) - avg, 2));
|
|
387
456
|
return Math.sqrt(this.average(squareDiffs));
|
|
388
457
|
}
|
|
@@ -398,7 +467,7 @@ class MathPrimitives {
|
|
|
398
467
|
* Where:
|
|
399
468
|
* b = ln(Barrier/Price)
|
|
400
469
|
* v = drift - 0.5 * volatility^2
|
|
401
|
-
*
|
|
470
|
+
* @param {number} currentPrice - The current price of the asset
|
|
402
471
|
* @param {number} barrierPrice - The target price (SL or TP)
|
|
403
472
|
* @param {number} volatility - Annualized volatility (e.g., 0.40 for 40%)
|
|
404
473
|
* @param {number} days - Number of days to forecast (e.g., 3)
|
|
@@ -408,9 +477,9 @@ class MathPrimitives {
|
|
|
408
477
|
static calculateHitProbability(currentPrice, barrierPrice, volatility, days, drift = 0) { // https://www.ma.ic.ac.uk/~bin06/M3A22/m3f22chVII.pdf
|
|
409
478
|
if (currentPrice <= 0 || barrierPrice <= 0 || volatility <= 0 || days <= 0) return 0;
|
|
410
479
|
|
|
411
|
-
const t
|
|
480
|
+
const t = days / 365.0; // Convert days to years
|
|
412
481
|
const sigma = volatility;
|
|
413
|
-
const mu
|
|
482
|
+
const mu = drift;
|
|
414
483
|
|
|
415
484
|
// The barrier in log-space
|
|
416
485
|
const b = Math.log(barrierPrice / currentPrice);
|
|
@@ -418,7 +487,7 @@ class MathPrimitives {
|
|
|
418
487
|
// Adjusted drift (nu)
|
|
419
488
|
const nu = mu - 0.5 * Math.pow(sigma, 2);
|
|
420
489
|
|
|
421
|
-
const sqrtT
|
|
490
|
+
const sqrtT = Math.sqrt(t);
|
|
422
491
|
const sigmaSqrtT = sigma * sqrtT;
|
|
423
492
|
|
|
424
493
|
// Helper for Standard Normal CDF (Φ)
|
|
@@ -448,8 +517,7 @@ class MathPrimitives {
|
|
|
448
517
|
|
|
449
518
|
// Calculate Probability
|
|
450
519
|
// Note: If nu is 0, the second term simplifies significantly, but we keep full form.
|
|
451
|
-
const probability = normCDF(( -Math.abs(b) - nu * t ) / sigmaSqrtT) +
|
|
452
|
-
Math.exp((2 * nu * Math.abs(b)) / (sigma * sigma)) * normCDF(( -Math.abs(b) + nu * t ) / sigmaSqrtT);
|
|
520
|
+
const probability = normCDF(( -Math.abs(b) - nu * t ) / sigmaSqrtT) + Math.exp((2 * nu * Math.abs(b)) / (sigma * sigma)) * normCDF(( -Math.abs(b) + nu * t ) / sigmaSqrtT);
|
|
453
521
|
|
|
454
522
|
return Math.min(Math.max(probability, 0), 1);
|
|
455
523
|
}
|
|
@@ -468,11 +536,11 @@ class MathPrimitives {
|
|
|
468
536
|
static simulateGBM(currentPrice, volatility, days, simulations = 1000, drift = 0) {
|
|
469
537
|
if (currentPrice <= 0 || volatility <= 0 || days <= 0) return new Float32Array(0);
|
|
470
538
|
|
|
471
|
-
const t
|
|
472
|
-
const sigma
|
|
473
|
-
const mu
|
|
539
|
+
const t = days / 365.0;
|
|
540
|
+
const sigma = volatility;
|
|
541
|
+
const mu = drift;
|
|
474
542
|
const driftTerm = (mu - 0.5 * sigma * sigma) * t;
|
|
475
|
-
const volTerm
|
|
543
|
+
const volTerm = sigma * Math.sqrt(t);
|
|
476
544
|
|
|
477
545
|
// Use Float32Array for memory efficiency with large simulation counts
|
|
478
546
|
const results = new Float32Array(simulations);
|
|
@@ -481,7 +549,7 @@ class MathPrimitives {
|
|
|
481
549
|
// Box-Muller transform for efficient standard normal distribution generation
|
|
482
550
|
const u1 = Math.random();
|
|
483
551
|
const u2 = Math.random();
|
|
484
|
-
const z
|
|
552
|
+
const z = Math.sqrt(-2.0 * Math.log(u1)) * Math.cos(2.0 * Math.PI * u2);
|
|
485
553
|
|
|
486
554
|
// GBM Formula: St = S0 * exp((mu - 0.5*sigma^2)t + sigma*Wt)
|
|
487
555
|
results[i] = currentPrice * Math.exp(driftTerm + volTerm * z);
|
|
@@ -643,14 +711,14 @@ class TimeSeries {
|
|
|
643
711
|
let sumX = 0, sumY = 0, sumXY = 0, sumX2 = 0, sumY2 = 0;
|
|
644
712
|
|
|
645
713
|
for (let i = 0; i < n; i++) {
|
|
646
|
-
sumX
|
|
647
|
-
sumY
|
|
714
|
+
sumX += x[i];
|
|
715
|
+
sumY += y[i];
|
|
648
716
|
sumXY += x[i] * y[i];
|
|
649
717
|
sumX2 += x[i] * x[i];
|
|
650
718
|
sumY2 += y[i] * y[i];
|
|
651
719
|
}
|
|
652
720
|
|
|
653
|
-
const numerator
|
|
721
|
+
const numerator = (n * sumXY) - (sumX * sumY);
|
|
654
722
|
const denominator = Math.sqrt(((n * sumX2) - (sumX * sumX)) * ((n * sumY2) - (sumY * sumY)));
|
|
655
723
|
|
|
656
724
|
return (denominator === 0) ? 0 : numerator / denominator;
|
|
@@ -719,8 +787,8 @@ class DistributionAnalytics {
|
|
|
719
787
|
|
|
720
788
|
let sumX = 0, sumY = 0, sumXY = 0, sumXX = 0, sumYY = 0;
|
|
721
789
|
for (let i = 0; i < n; i++) {
|
|
722
|
-
sumX
|
|
723
|
-
sumY
|
|
790
|
+
sumX += xValues[i];
|
|
791
|
+
sumY += yValues[i];
|
|
724
792
|
sumXY += xValues[i] * yValues[i];
|
|
725
793
|
sumXX += xValues[i] * xValues[i];
|
|
726
794
|
sumYY += yValues[i] * yValues[i];
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
/*
|
|
2
2
|
* FILENAME: CloudFunctions/NpmWrappers/bulltrackers-module/functions/task-engine/helpers/update_helpers.js
|
|
3
|
+
* (OPTIMIZED V3: Removed obsolete username lookup logic)
|
|
3
4
|
* (OPTIMIZED V2: Added "Circuit Breaker" for Proxy failures)
|
|
4
|
-
* (OPTIMIZED V2: Downgraded verbose per-user logs to TRACE to save costs)
|
|
5
5
|
* (REFACTORED: Concurrency set to 1, added fallback and verbose logging)
|
|
6
6
|
*/
|
|
7
7
|
|
|
8
8
|
const { FieldValue } = require('@google-cloud/firestore');
|
|
9
|
-
const
|
|
9
|
+
const crypto = require('crypto');
|
|
10
10
|
|
|
11
11
|
// --- CIRCUIT BREAKER STATE ---
|
|
12
12
|
// Persists across function invocations in the same instance.
|
|
@@ -28,7 +28,6 @@ function recordProxyOutcome(success) {
|
|
|
28
28
|
if (success) {
|
|
29
29
|
if (_consecutiveProxyFailures > 0) {
|
|
30
30
|
// Optional: Only log recovery to reduce noise
|
|
31
|
-
// console.log('[ProxyCircuit] Proxy recovered.');
|
|
32
31
|
}
|
|
33
32
|
_consecutiveProxyFailures = 0;
|
|
34
33
|
} else {
|
|
@@ -36,86 +35,11 @@ function recordProxyOutcome(success) {
|
|
|
36
35
|
}
|
|
37
36
|
}
|
|
38
37
|
|
|
39
|
-
/**
|
|
40
|
-
* (REFACTORED: Concurrency set to 1, added fallback and verbose logging)
|
|
41
|
-
*/
|
|
42
|
-
async function lookupUsernames(cids, { logger, headerManager, proxyManager }, config) {
|
|
43
|
-
if (!cids?.length) return [];
|
|
44
|
-
logger.log('INFO', `[lookupUsernames] Looking up usernames for ${cids.length} CIDs.`);
|
|
45
|
-
|
|
46
|
-
// --- Set concurrency to 1 because appscript gets really fucked up with undocumented rate limits if we try spam it concurrently, a shame but that's life. DO NOT CHANGE THIS
|
|
47
|
-
const limit = pLimit(1);
|
|
48
|
-
const { USERNAME_LOOKUP_BATCH_SIZE, ETORO_API_RANKINGS_URL } = config;
|
|
49
|
-
const batches = [];
|
|
50
|
-
for (let i = 0; i < cids.length; i += USERNAME_LOOKUP_BATCH_SIZE) { batches.push(cids.slice(i, i + USERNAME_LOOKUP_BATCH_SIZE).map(Number)); }
|
|
51
|
-
|
|
52
|
-
const batchPromises = batches.map((batch, index) => limit(async () => {
|
|
53
|
-
const batchId = `batch-${index + 1}`;
|
|
54
|
-
logger.log('TRACE', `[lookupUsernames/${batchId}] Processing batch of ${batch.length} CIDs...`); // DOWNGRADED TO TRACE
|
|
55
|
-
|
|
56
|
-
const header = await headerManager.selectHeader();
|
|
57
|
-
if (!header) { logger.log('ERROR', `[lookupUsernames/${batchId}] Could not select a header.`); return null; }
|
|
58
|
-
|
|
59
|
-
let wasSuccess = false;
|
|
60
|
-
let proxyUsed = false;
|
|
61
|
-
let response;
|
|
62
|
-
const url = `${ETORO_API_RANKINGS_URL}?Period=LastTwoYears`;
|
|
63
|
-
const options = { method: 'POST', headers: { ...header.header, 'Content-Type': 'application/json' }, body: JSON.stringify(batch) };
|
|
64
|
-
|
|
65
|
-
// --- 1. Try Proxy (Circuit Breaker Protected) ---
|
|
66
|
-
if (shouldTryProxy()) {
|
|
67
|
-
try {
|
|
68
|
-
logger.log('TRACE', `[lookupUsernames/${batchId}] Attempting fetch via AppScript proxy...`);
|
|
69
|
-
response = await proxyManager.fetch(url, options);
|
|
70
|
-
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`);
|
|
71
|
-
|
|
72
|
-
wasSuccess = true;
|
|
73
|
-
proxyUsed = true;
|
|
74
|
-
recordProxyOutcome(true); // Reset failure count
|
|
75
|
-
logger.log('TRACE', `[lookupUsernames/${batchId}] AppScript proxy fetch successful.`); // DOWNGRADED TO TRACE
|
|
76
|
-
|
|
77
|
-
} catch (proxyError) {
|
|
78
|
-
recordProxyOutcome(false); // Increment failure count
|
|
79
|
-
logger.log('WARN', `[lookupUsernames/${batchId}] AppScript proxy fetch FAILED. Error: ${proxyError.message}. Failures: ${_consecutiveProxyFailures}/${MAX_PROXY_FAILURES}.`, { error: proxyError.message, source: 'AppScript' });
|
|
80
|
-
// Fall through to direct...
|
|
81
|
-
}
|
|
82
|
-
} else {
|
|
83
|
-
logger.log('TRACE', `[lookupUsernames/${batchId}] Circuit Breaker Open. Skipping Proxy.`);
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
// --- 2. Direct Fallback ---
|
|
87
|
-
if (!wasSuccess) {
|
|
88
|
-
try {
|
|
89
|
-
response = await fetch(url, options);
|
|
90
|
-
if (!response.ok) { const errorText = await response.text(); throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`); }
|
|
91
|
-
logger.log('TRACE', `[lookupUsernames/${batchId}] Direct node-fetch fallback successful.`); // DOWNGRADED TO TRACE
|
|
92
|
-
wasSuccess = true; // It worked eventually
|
|
93
|
-
} catch (fallbackError) {
|
|
94
|
-
logger.log('ERROR', `[lookupUsernames/${batchId}] Direct node-fetch fallback FAILED. Giving up on this batch.`, { error: fallbackError.message, source: 'eToro/Network' });
|
|
95
|
-
return null; // Give up on this batch
|
|
96
|
-
}
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
if (proxyUsed) { headerManager.updatePerformance(header.id, wasSuccess); }
|
|
100
|
-
|
|
101
|
-
try {
|
|
102
|
-
const data = await response.json(); return data;
|
|
103
|
-
} catch (parseError) {
|
|
104
|
-
logger.log('ERROR', `[lookupUsernames/${batchId}] Failed to parse JSON response.`, { error: parseError.message }); return null;
|
|
105
|
-
}
|
|
106
|
-
}));
|
|
107
|
-
|
|
108
|
-
const results = await Promise.allSettled(batchPromises);
|
|
109
|
-
const allUsers = results.filter(r => r.status === 'fulfilled' && r.value && Array.isArray(r.value)).flatMap(r => r.value);
|
|
110
|
-
logger.log('INFO', `[lookupUsernames] Found ${allUsers.length} public users out of ${cids.length}.`);
|
|
111
|
-
return allUsers;
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
|
|
115
38
|
/**
|
|
116
39
|
* (REFACTORED: Fully sequential, verbose logging, node-fetch fallback)
|
|
117
40
|
*/
|
|
118
|
-
async function handleUpdate(task, taskId, { logger, headerManager, proxyManager, db, batchManager }, config
|
|
41
|
+
async function handleUpdate(task, taskId, { logger, headerManager, proxyManager, db, batchManager }, config) {
|
|
42
|
+
// Note: 'username' param removed from signature as it is no longer needed.
|
|
119
43
|
const { userId, instruments, instrumentId, userType } = task;
|
|
120
44
|
const instrumentsToProcess = userType === 'speculator' ? (instruments || [instrumentId]) : [undefined];
|
|
121
45
|
const today = new Date().toISOString().slice(0, 10);
|
|
@@ -137,7 +61,15 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
137
61
|
if (!historyHeader) {
|
|
138
62
|
logger.log('WARN', `[handleUpdate/${userId}] Could not select history header. Skipping history.`);
|
|
139
63
|
} else {
|
|
140
|
-
|
|
64
|
+
|
|
65
|
+
// --- REFACTOR: New Granular API Logic ---
|
|
66
|
+
// No username required. Uses CID (userId) directly.
|
|
67
|
+
const d = new Date();
|
|
68
|
+
d.setFullYear(d.getFullYear() - 1);
|
|
69
|
+
const oneYearAgoStr = d.toISOString();
|
|
70
|
+
const uuid = crypto.randomUUID ? crypto.randomUUID() : '0205aca7-bd37-4884-8455-f28ce1add2de'; // Fallback for older nodes
|
|
71
|
+
|
|
72
|
+
const historyUrl = `https://www.etoro.com/sapi/trade-data-real/history/public/credit/flat?StartTime=${oneYearAgoStr}&PageNumber=1&ItemsPerPage=30000&PublicHistoryPortfolioFilter=&CID=${userId}&client_request_id=${uuid}`;
|
|
141
73
|
const options = { headers: historyHeader.header };
|
|
142
74
|
let response;
|
|
143
75
|
|
|
@@ -278,4 +210,4 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
278
210
|
logger.log('TRACE', `[handleUpdate/${userId}] Update task finished successfully.`); // DOWNGRADED TO TRACE
|
|
279
211
|
}
|
|
280
212
|
|
|
281
|
-
module.exports = { handleUpdate
|
|
213
|
+
module.exports = { handleUpdate };
|
|
@@ -11,7 +11,7 @@
|
|
|
11
11
|
|
|
12
12
|
const { handleDiscover } = require('../helpers/discover_helpers');
|
|
13
13
|
const { handleVerify } = require('../helpers/verify_helpers');
|
|
14
|
-
const { handleUpdate
|
|
14
|
+
const { handleUpdate } = require('../helpers/update_helpers'); // Removed lookupUsernames import
|
|
15
15
|
const pLimit = require('p-limit');
|
|
16
16
|
|
|
17
17
|
/**
|
|
@@ -27,27 +27,25 @@ function parseTaskPayload(message, logger) {
|
|
|
27
27
|
}
|
|
28
28
|
|
|
29
29
|
/**
|
|
30
|
-
* Sorts tasks into update
|
|
30
|
+
* Sorts tasks into update and other (discover/verify).
|
|
31
|
+
* REFACTORED: Simplified. No username lookup logic needed.
|
|
31
32
|
*/
|
|
32
33
|
async function prepareTaskBatches(tasks, batchManager, logger) {
|
|
33
|
-
const tasksToRun = [],
|
|
34
|
-
|
|
34
|
+
const tasksToRun = [], otherTasks = [];
|
|
35
|
+
|
|
35
36
|
for (const task of tasks) {
|
|
36
|
-
if (task.type === 'update') {
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
const foundUsers = await lookupUsernames([...cidsToLookup.keys()], dependencies, config); // <--- PASS FULL CONFIG
|
|
49
|
-
for (const u of foundUsers) { const cid = String(u.CID), username = u.Value.UserName; batchManager.addUsernameMapUpdate(cid, username); const task = cidsToLookup.get(cid); if (task) { tasksToRun.push({ task, username }); cidsToLookup.delete(cid); } }
|
|
50
|
-
if (cidsToLookup.size) logger.log('WARN', `[TaskEngine] Could not find ${cidsToLookup.size} usernames (likely private).`, { skippedCids: [...cidsToLookup.keys()] });
|
|
37
|
+
if (task.type === 'update') {
|
|
38
|
+
// New API uses CID (userId), so we push directly to run.
|
|
39
|
+
tasksToRun.push(task);
|
|
40
|
+
} else {
|
|
41
|
+
otherTasks.push(task);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// We explicitly return empty structures for compatibility if handler_creator expects them,
|
|
46
|
+
// though ideally handler_creator should also be simplified.
|
|
47
|
+
// For now, we return compatible object structure.
|
|
48
|
+
return { tasksToRun, cidsToLookup: new Map(), otherTasks };
|
|
51
49
|
}
|
|
52
50
|
|
|
53
51
|
/**
|
|
@@ -88,11 +86,16 @@ async function executeTasks(tasksToRun, otherTasks, dependencies, config, taskId
|
|
|
88
86
|
}
|
|
89
87
|
|
|
90
88
|
// 2. Queue 'update' tasks
|
|
91
|
-
for (const
|
|
89
|
+
for (const task of tasksToRun) {
|
|
90
|
+
// We unpack 'task' directly now, no wrapping object {task, username}
|
|
91
|
+
// However, we must ensure backward compatibility if the array was {task, username} before.
|
|
92
|
+
// In prepareTaskBatches above, we pushed raw 'task'.
|
|
93
|
+
// So we use 'task' directly.
|
|
94
|
+
|
|
92
95
|
const subTaskId = `${task.type}-${task.userType || 'unknown'}-${task.userId}`;
|
|
93
96
|
allTaskPromises.push(
|
|
94
97
|
limit(() =>
|
|
95
|
-
handleUpdate(task, subTaskId, dependencies, config
|
|
98
|
+
handleUpdate(task, subTaskId, dependencies, config)
|
|
96
99
|
.then(() => taskCounters.update++)
|
|
97
100
|
.catch(err => {
|
|
98
101
|
logger.log('ERROR', `[TaskEngine/${taskId}] Error in handleUpdate for ${task.userId}`, { errorMessage: err.message });
|
|
@@ -112,4 +115,5 @@ async function executeTasks(tasksToRun, otherTasks, dependencies, config, taskId
|
|
|
112
115
|
);
|
|
113
116
|
}
|
|
114
117
|
|
|
115
|
-
|
|
118
|
+
// Note: runUsernameLookups removed from exports
|
|
119
|
+
module.exports = { parseTaskPayload, prepareTaskBatches, executeTasks };
|