bulltrackers-module 1.0.194 → 1.0.196
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/controllers/computation_controller.js +4 -4
- package/functions/computation-system/helpers/computation_pass_runner.js +7 -7
- package/functions/computation-system/helpers/orchestration_helpers.js +9 -8
- package/functions/computation-system/layers/math_primitives.js +7 -1
- package/functions/computation-system/utils/data_loader.js +1 -1
- package/functions/computation-system/utils/schema_capture.js +41 -7
- package/functions/computation-system/utils/utils.js +21 -21
- package/functions/generic-api/helpers/api_helpers.js +171 -26
- package/functions/task-engine/helpers/update_helpers.js +162 -85
- package/package.json +1 -1
|
@@ -3,8 +3,8 @@
|
|
|
3
3
|
* V4.1: Supports Smart Shard Lookup via Wrapper
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
|
-
const { DataExtractor, HistoryExtractor, MathPrimitives, Aggregators, Validators, SCHEMAS, SignalPrimitives, DistributionAnalytics, TimeSeries, priceExtractor } = require('../layers/math_primitives');
|
|
7
|
-
const { loadDailyInsights, loadDailySocialPostInsights, getRelevantShardRefs, getPriceShardRefs } = require('../utils/data_loader');
|
|
6
|
+
const { DataExtractor, HistoryExtractor, MathPrimitives, Aggregators, Validators, SCHEMAS, SignalPrimitives, DistributionAnalytics, TimeSeries, priceExtractor } = require('../layers/math_primitives'); // TODO, SURELY THIS COULD BE DYNAMIC, DETECT WHAT IS EXPORTED?
|
|
7
|
+
const { loadDailyInsights, loadDailySocialPostInsights, getRelevantShardRefs, getPriceShardRefs } = require('../utils/data_loader'); // THE IDEA IS THAT WE CAN PRODUCE NEW COMPUTATIONS IN MATH PRIMITIVES, AND NOT NEED TO UPDATE ANY OTHER CODE FILE
|
|
8
8
|
|
|
9
9
|
class DataLoader {
|
|
10
10
|
constructor(config, dependencies) {
|
|
@@ -64,7 +64,7 @@ class DataLoader {
|
|
|
64
64
|
}
|
|
65
65
|
}
|
|
66
66
|
|
|
67
|
-
class ContextBuilder {
|
|
67
|
+
class ContextBuilder { //TODO, THE MATH EXTRACT HERE COULD SURELY BE DYNAMIC LISTED, JUST TAKE WHAT IS EXPORTED?
|
|
68
68
|
static buildPerUserContext(options) {
|
|
69
69
|
const { todayPortfolio, yesterdayPortfolio, todayHistory, yesterdayHistory, userId, userType, dateStr, metadata, mappings, insights, socialData, computedDependencies, previousComputedDependencies, config, deps } = options;
|
|
70
70
|
return {
|
|
@@ -80,7 +80,7 @@ class ContextBuilder {
|
|
|
80
80
|
};
|
|
81
81
|
}
|
|
82
82
|
|
|
83
|
-
static buildMetaContext(options) {
|
|
83
|
+
static buildMetaContext(options) { //TODO, THE MATH EXTRACT HERE COULD SURELY BE DYNAMIC LISTED, JUST TAKE WHAT IS EXPORTED?
|
|
84
84
|
const { dateStr, metadata, mappings, insights, socialData, prices, computedDependencies, previousComputedDependencies, config, deps } = options;
|
|
85
85
|
return {
|
|
86
86
|
date: { today: dateStr },
|
|
@@ -13,7 +13,7 @@ const {
|
|
|
13
13
|
runStandardComputationPass,
|
|
14
14
|
runMetaComputationPass,
|
|
15
15
|
checkRootDependencies,
|
|
16
|
-
runBatchPriceComputation
|
|
16
|
+
runBatchPriceComputation
|
|
17
17
|
} = require('./orchestration_helpers.js');
|
|
18
18
|
|
|
19
19
|
const { getExpectedDateStrings, normalizeName } = require('../utils/utils.js');
|
|
@@ -31,10 +31,10 @@ async function runComputationPass(config, dependencies, computationManifest) {
|
|
|
31
31
|
// Hardcoded earliest dates
|
|
32
32
|
const earliestDates = {
|
|
33
33
|
portfolio: new Date('2025-09-25T00:00:00Z'),
|
|
34
|
-
history:
|
|
35
|
-
social:
|
|
36
|
-
insights:
|
|
37
|
-
price:
|
|
34
|
+
history: new Date('2025-11-05T00:00:00Z'),
|
|
35
|
+
social: new Date('2025-10-30T00:00:00Z'),
|
|
36
|
+
insights: new Date('2025-08-26T00:00:00Z'),
|
|
37
|
+
price: new Date('2025-08-01T00:00:00Z') // This is slightly arbitrary, the true earliest date is 1 year ago, but there is no need to backfill that far so we just set it to be slightly earlier than the 2nd earliest computation.
|
|
38
38
|
};
|
|
39
39
|
earliestDates.absoluteEarliest = Object.values(earliestDates).reduce((a,b) => a < b ? a : b);
|
|
40
40
|
|
|
@@ -169,12 +169,12 @@ async function runComputationPass(config, dependencies, computationManifest) {
|
|
|
169
169
|
const prevDateStr = prevDate.toISOString().slice(0, 10);
|
|
170
170
|
const previousResults = await fetchExistingResults(prevDateStr, calcsRunning, computationManifest, config, dependencies, true);
|
|
171
171
|
|
|
172
|
-
// Changed skipstatus write to false to ensure updates are recorded
|
|
172
|
+
// Changed skipstatus write to false to ensure updates are recorded, allowing for proper tracking and avoiding re-computation in future passes. NOTE : Writing true here introduces significant bugs and should be avoided.
|
|
173
173
|
if (finalStandardToRun.length) {
|
|
174
174
|
const updates = await runStandardComputationPass(dateToProcess, finalStandardToRun, `Pass ${passToRun} (Std)`, config, dependencies, rootData, existingResults, previousResults, false);
|
|
175
175
|
Object.assign(dateUpdates, updates);
|
|
176
176
|
}
|
|
177
|
-
if (finalMetaToRun.length) {
|
|
177
|
+
if (finalMetaToRun.length) { // Again, writing true here introduces significant bugs and should be avoided.
|
|
178
178
|
const updates = await runMetaComputationPass(dateToProcess, finalMetaToRun, `Pass ${passToRun} (Meta)`, config, dependencies, existingResults, previousResults, rootData, false);
|
|
179
179
|
Object.assign(dateUpdates, updates);
|
|
180
180
|
}
|
|
@@ -13,7 +13,8 @@ const {
|
|
|
13
13
|
getHistoryPartRefs, streamPortfolioData, streamHistoryData,
|
|
14
14
|
getRelevantShardRefs, loadDataByRefs
|
|
15
15
|
} = require('../utils/data_loader');
|
|
16
|
-
|
|
16
|
+
|
|
17
|
+
const pLimit = require('p-limit'); // TODO, THIS OUGHT TO BE INJECTED.
|
|
17
18
|
|
|
18
19
|
/**
|
|
19
20
|
* Groups calculations from a manifest by their 'pass' property.
|
|
@@ -25,7 +26,7 @@ function groupByPass(manifest) { return manifest.reduce((acc, calc) => { (acc[ca
|
|
|
25
26
|
* Scans a result set for suspicious patterns (e.g., a field is NULL for 100% of tickers).
|
|
26
27
|
* Logs warnings but DOES NOT block the commit.
|
|
27
28
|
*/
|
|
28
|
-
function validateResultPatterns(logger, calcName, results, category) {
|
|
29
|
+
function validateResultPatterns(logger, calcName, results, category) { // TODO, THIS COULD BE MUCH MORE SOPHISTICATED, WE WILL NEVER FORCE FAIL A COMPUTATION REGARDLESS, BUT IT COULD BE A GREAT WARNING SYSTEM, USE GLOUD LOG SINKS TO DETECT PROBLEMS BASED ON THESE LOGS
|
|
29
30
|
// 1. Skip Speculators (Too sparse, nulls are expected)
|
|
30
31
|
if (category === 'speculator' || category === 'speculators') return;
|
|
31
32
|
|
|
@@ -90,16 +91,16 @@ function checkRootDependencies(calcManifest, rootDataStatus) {
|
|
|
90
91
|
async function checkRootDataAvailability(dateStr, config, dependencies, earliestDates) {
|
|
91
92
|
const { logger } = dependencies;
|
|
92
93
|
const dateToProcess = new Date(dateStr + 'T00:00:00Z');
|
|
93
|
-
let portfolioRefs
|
|
94
|
-
let hasPortfolio
|
|
95
|
-
let insightsData
|
|
94
|
+
let portfolioRefs = [], historyRefs = [];
|
|
95
|
+
let hasPortfolio = false, hasInsights = false, hasSocial = false, hasHistory = false, hasPrices = false;
|
|
96
|
+
let insightsData = null, socialData = null;
|
|
96
97
|
|
|
97
98
|
try {
|
|
98
99
|
const tasks = [];
|
|
99
100
|
if (dateToProcess >= earliestDates.portfolio) tasks.push(getPortfolioPartRefs(config, dependencies, dateStr).then(r => { portfolioRefs = r; hasPortfolio = !!r.length; }));
|
|
100
|
-
if (dateToProcess >= earliestDates.insights)
|
|
101
|
-
if (dateToProcess >= earliestDates.social)
|
|
102
|
-
if (dateToProcess >= earliestDates.history)
|
|
101
|
+
if (dateToProcess >= earliestDates.insights) tasks.push(loadDailyInsights(config, dependencies, dateStr).then(r => { insightsData = r; hasInsights = !!r; }));
|
|
102
|
+
if (dateToProcess >= earliestDates.social) tasks.push(loadDailySocialPostInsights(config, dependencies, dateStr).then(r => { socialData = r; hasSocial = !!r; }));
|
|
103
|
+
if (dateToProcess >= earliestDates.history) tasks.push(getHistoryPartRefs(config, dependencies, dateStr).then(r => { historyRefs = r; hasHistory = !!r.length; }));
|
|
103
104
|
|
|
104
105
|
if (dateToProcess >= earliestDates.price) {
|
|
105
106
|
tasks.push(checkPriceDataAvailability(config, dependencies).then(r => { hasPrices = r; }));
|
|
@@ -732,7 +732,13 @@ class DistributionAnalytics {
|
|
|
732
732
|
}
|
|
733
733
|
|
|
734
734
|
|
|
735
|
-
//
|
|
735
|
+
// TODO, THIS EXPORT CAN SURELY BE DYNAMICALLY PRODUCED? TAKE WHATEVER CLASSES ARE DEFINED? THE IDEA IS THAT WE CAN SIMPLY PRODUCE NEW CLASSES AND NOT NEED TO MAKE ANY FURTHER CHANGES SO :
|
|
736
|
+
// 1. DYNAMICALLY FINDS THE CLASS, EXPORTS HERE
|
|
737
|
+
// 2. DYNAMICALLY IMPORTS INTO COMPUTATION CONTROLLER
|
|
738
|
+
// 3. DYNAMICALLY POPULATES THE MATH VALUE OF THE COMPUTATION CONTROLLER FOR STANDARD AND META COMPUTATION CONTEXTBUILDERS
|
|
739
|
+
// 4. GO AHEAD AND WRITE YOUR COMPUTATION FILES USING THE NEW MATH PRIMITIVES DEFINED AS NEEDED.
|
|
740
|
+
// THIS THEN MEANS THE MATH PRIMTIVIES CAN BE UPDATED EASILY WITHOUT FURTHER CHANGES TO OTHER SYSTEMS AND AVOID COMPLICATED DEBUGGING OR FORGETFUL INTEGRATIONS.
|
|
741
|
+
|
|
736
742
|
module.exports = {
|
|
737
743
|
Aggregators,
|
|
738
744
|
DataExtractor,
|
|
@@ -177,7 +177,7 @@ async function getPriceShardRefs(config, deps) {
|
|
|
177
177
|
async function ensurePriceShardIndex(config, deps) {
|
|
178
178
|
const { db, logger } = deps;
|
|
179
179
|
const metadataCol = config.metadataCollection || 'system_metadata';
|
|
180
|
-
const indexDocRef = db.collection(metadataCol).doc('price_shard_index');
|
|
180
|
+
const indexDocRef = db.collection(metadataCol).doc('price_shard_index'); // TODO. TEST THIS SHARD INDEX SYSTEM, CURRENTLY UNUSED IN COMPUTATIONS BUT IS EXTREMELY EFFICIENT AND GREAT FOR COST REDUCTION
|
|
181
181
|
|
|
182
182
|
// 1. Try to fetch existing index
|
|
183
183
|
const snap = await indexDocRef.get();
|
|
@@ -6,6 +6,7 @@
|
|
|
6
6
|
/**
|
|
7
7
|
* Batch store schemas for multiple computations.
|
|
8
8
|
* This function now expects a fully-formed schema, not sample output.
|
|
9
|
+
* It strictly stamps a 'lastUpdated' field to support stale-schema filtering in the API.
|
|
9
10
|
*
|
|
10
11
|
* @param {object} dependencies - Contains db, logger
|
|
11
12
|
* @param {object} config - Configuration object
|
|
@@ -13,16 +14,49 @@
|
|
|
13
14
|
*/
|
|
14
15
|
async function batchStoreSchemas(dependencies, config, schemas) {
|
|
15
16
|
const { db, logger } = dependencies;
|
|
16
|
-
|
|
17
|
+
|
|
18
|
+
if (config.captureSchemas === false) {
|
|
19
|
+
logger.log('INFO', '[SchemaCapture] Schema capture is disabled. Skipping.');
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
|
|
17
23
|
const batch = db.batch();
|
|
18
24
|
const schemaCollection = config.schemaCollection || 'computation_schemas';
|
|
25
|
+
let validCount = 0;
|
|
26
|
+
|
|
19
27
|
for (const item of schemas) {
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
28
|
+
try {
|
|
29
|
+
if (!item.schema) {
|
|
30
|
+
logger.log('WARN', `[SchemaCapture] No schema provided for ${item.name}. Skipping.`);
|
|
31
|
+
continue;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const docRef = db.collection(schemaCollection).doc(item.name);
|
|
35
|
+
|
|
36
|
+
// Critical: Always overwrite 'lastUpdated' to now
|
|
37
|
+
batch.set(docRef, {
|
|
38
|
+
computationName: item.name,
|
|
39
|
+
category: item.category,
|
|
40
|
+
schema: item.schema,
|
|
41
|
+
metadata: item.metadata || {},
|
|
42
|
+
lastUpdated: new Date()
|
|
43
|
+
}, { merge: true });
|
|
44
|
+
|
|
45
|
+
validCount++;
|
|
46
|
+
|
|
47
|
+
} catch (error) {
|
|
48
|
+
logger.log('WARN', `[SchemaCapture] Failed to add schema to batch for ${item.name}`, { errorMessage: error.message });
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (validCount > 0) {
|
|
53
|
+
try {
|
|
54
|
+
await batch.commit();
|
|
55
|
+
logger.log('INFO', `[SchemaCapture] Batch stored ${validCount} computation schemas`);
|
|
56
|
+
} catch (error) {
|
|
57
|
+
logger.log('ERROR', '[SchemaCapture] Failed to commit schema batch', { errorMessage: error.message });
|
|
58
|
+
}
|
|
59
|
+
}
|
|
26
60
|
}
|
|
27
61
|
|
|
28
62
|
module.exports = {
|
|
@@ -83,7 +83,7 @@ async function getEarliestDataDates(config, deps) {
|
|
|
83
83
|
speculatorHistoryDate,
|
|
84
84
|
insightsDate,
|
|
85
85
|
socialDate,
|
|
86
|
-
priceDate
|
|
86
|
+
priceDate
|
|
87
87
|
] = await Promise.all([
|
|
88
88
|
getFirstDateFromCollection(config, deps, config.normalUserPortfolioCollection),
|
|
89
89
|
getFirstDateFromCollection(config, deps, config.speculatorPortfolioCollection),
|
|
@@ -91,7 +91,7 @@ async function getEarliestDataDates(config, deps) {
|
|
|
91
91
|
getFirstDateFromCollection(config, deps, config.speculatorHistoryCollection),
|
|
92
92
|
getFirstDateFromSimpleCollection(config, deps, config.insightsCollectionName),
|
|
93
93
|
getFirstDateFromSimpleCollection(config, deps, config.socialInsightsCollectionName),
|
|
94
|
-
getFirstDateFromPriceCollection(config, deps) //
|
|
94
|
+
getFirstDateFromPriceCollection(config, deps) //TODO, Why no config.pricecollectionname here, looks ugly.
|
|
95
95
|
]);
|
|
96
96
|
|
|
97
97
|
const getMinDate = (...dates) => {
|
|
@@ -101,35 +101,35 @@ async function getEarliestDataDates(config, deps) {
|
|
|
101
101
|
};
|
|
102
102
|
|
|
103
103
|
const earliestPortfolioDate = getMinDate(investorDate, speculatorDate);
|
|
104
|
-
const earliestHistoryDate
|
|
105
|
-
const earliestInsightsDate
|
|
106
|
-
const earliestSocialDate
|
|
107
|
-
const earliestPriceDate
|
|
108
|
-
const absoluteEarliest
|
|
104
|
+
const earliestHistoryDate = getMinDate(investorHistoryDate, speculatorHistoryDate);
|
|
105
|
+
const earliestInsightsDate = getMinDate(insightsDate);
|
|
106
|
+
const earliestSocialDate = getMinDate(socialDate);
|
|
107
|
+
const earliestPriceDate = getMinDate(priceDate);
|
|
108
|
+
const absoluteEarliest = getMinDate(
|
|
109
109
|
earliestPortfolioDate,
|
|
110
110
|
earliestHistoryDate,
|
|
111
111
|
earliestInsightsDate,
|
|
112
112
|
earliestSocialDate,
|
|
113
|
-
earliestPriceDate
|
|
113
|
+
earliestPriceDate
|
|
114
114
|
);
|
|
115
115
|
|
|
116
116
|
const fallbackDate = new Date(config.earliestComputationDate + 'T00:00:00Z' || '2023-01-01T00:00:00Z');
|
|
117
117
|
|
|
118
118
|
const result = {
|
|
119
|
-
portfolio: earliestPortfolioDate
|
|
120
|
-
history: earliestHistoryDate
|
|
121
|
-
insights: earliestInsightsDate
|
|
122
|
-
social: earliestSocialDate
|
|
123
|
-
price: earliestPriceDate
|
|
124
|
-
absoluteEarliest: absoluteEarliest || fallbackDate
|
|
119
|
+
portfolio: earliestPortfolioDate || new Date('2999-12-31T00:00:00Z'),
|
|
120
|
+
history: earliestHistoryDate || new Date('2999-12-31T00:00:00Z'),
|
|
121
|
+
insights: earliestInsightsDate || new Date('2999-12-31T00:00:00Z'),
|
|
122
|
+
social: earliestSocialDate || new Date('2999-12-31T00:00:00Z'),
|
|
123
|
+
price: earliestPriceDate || new Date('2999-12-31T00:00:00Z'),
|
|
124
|
+
absoluteEarliest: absoluteEarliest || fallbackDate
|
|
125
125
|
};
|
|
126
126
|
|
|
127
127
|
logger.log('INFO', 'Earliest data availability map built:', {
|
|
128
|
-
portfolio:
|
|
129
|
-
history:
|
|
130
|
-
insights:
|
|
131
|
-
social:
|
|
132
|
-
price:
|
|
128
|
+
portfolio: result.portfolio.toISOString().slice(0, 10),
|
|
129
|
+
history: result.history.toISOString().slice(0, 10),
|
|
130
|
+
insights: result.insights.toISOString().slice(0, 10),
|
|
131
|
+
social: result.social.toISOString().slice(0, 10),
|
|
132
|
+
price: result.price.toISOString().slice(0, 10),
|
|
133
133
|
absoluteEarliest: result.absoluteEarliest.toISOString().slice(0, 10)
|
|
134
134
|
});
|
|
135
135
|
|
|
@@ -143,7 +143,7 @@ async function getEarliestDataDates(config, deps) {
|
|
|
143
143
|
async function getFirstDateFromPriceCollection(config, deps) {
|
|
144
144
|
const { db, logger, calculationUtils } = deps;
|
|
145
145
|
const { withRetry } = calculationUtils;
|
|
146
|
-
const collection = config.priceCollection || 'asset_prices';
|
|
146
|
+
const collection = config.priceCollection || 'asset_prices'; // TODO This hardcode is right, but we should really be passing the config directly as other data sources do.
|
|
147
147
|
|
|
148
148
|
try {
|
|
149
149
|
logger.log('TRACE', `[getFirstDateFromPriceCollection] Querying ${collection}...`);
|
|
@@ -185,7 +185,7 @@ async function getFirstDateFromPriceCollection(config, deps) {
|
|
|
185
185
|
});
|
|
186
186
|
|
|
187
187
|
if (earliestDate) {
|
|
188
|
-
logger.log('TRACE', `[getFirstDateFromPriceCollection] Earliest price date: ${earliestDate.toISOString().slice(0, 10)}`);
|
|
188
|
+
logger.log('TRACE', `[getFirstDateFromPriceCollection] Earliest price date: ${earliestDate.toISOString().slice(0, 10)}`); // TODO, WTF IS THIS TS ERROR. Property 'toISOString' does not exist on type 'never'.ts(2339)
|
|
189
189
|
}
|
|
190
190
|
|
|
191
191
|
return earliestDate;
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
* @fileoverview API sub-pipes.
|
|
3
3
|
* REFACTORED: Now stateless and receive dependencies.
|
|
4
4
|
* NEW: getDynamicSchema now reads static schema.
|
|
5
|
-
* NEW: createManifestHandler
|
|
5
|
+
* NEW: createManifestHandler filters out STALE schemas (>7 days old).
|
|
6
6
|
*/
|
|
7
7
|
|
|
8
8
|
const { FieldPath } = require('@google-cloud/firestore');
|
|
@@ -15,13 +15,18 @@ const validateRequest = (query, config) => {
|
|
|
15
15
|
if (!query.computations) return "Missing 'computations' parameter.";
|
|
16
16
|
if (!query.startDate || !/^\d{4}-\d{2}-\d{2}$/.test(query.startDate)) return "Missing or invalid 'startDate'.";
|
|
17
17
|
if (!query.endDate || !/^\d{4}-\d{2}-\d{2}$/.test(query.endDate)) return "Missing or invalid 'endDate'.";
|
|
18
|
+
|
|
18
19
|
const start = new Date(query.startDate);
|
|
19
20
|
const end = new Date(query.endDate);
|
|
21
|
+
|
|
20
22
|
if (end < start) return "'endDate' must be after 'startDate'.";
|
|
23
|
+
|
|
21
24
|
const maxDateRange = config.maxDateRange || 100;
|
|
22
25
|
const diffTime = Math.abs(end - start);
|
|
23
26
|
const diffDays = Math.ceil(diffTime / (1000 * 60 * 60 * 24)) + 1;
|
|
27
|
+
|
|
24
28
|
if (diffDays > maxDateRange) return `Date range cannot exceed ${maxDateRange} days.`;
|
|
29
|
+
|
|
25
30
|
return null;
|
|
26
31
|
};
|
|
27
32
|
|
|
@@ -34,8 +39,19 @@ const validateRequest = (query, config) => {
|
|
|
34
39
|
const buildCalculationMap = (unifiedCalculations) => {
|
|
35
40
|
const calcMap = {};
|
|
36
41
|
for (const category in unifiedCalculations) {
|
|
37
|
-
for (const subKey in unifiedCalculations[category]) {
|
|
38
|
-
|
|
42
|
+
for (const subKey in unifiedCalculations[category]) {
|
|
43
|
+
const item = unifiedCalculations[category][subKey];
|
|
44
|
+
if (subKey === 'historical' && typeof item === 'object') {
|
|
45
|
+
for (const calcName in item) {
|
|
46
|
+
calcMap[calcName] = { category: category, class: item[calcName] };
|
|
47
|
+
}
|
|
48
|
+
} else if (typeof item === 'function') {
|
|
49
|
+
const calcName = subKey;
|
|
50
|
+
calcMap[calcName] = { category: category, class: item };
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
return calcMap;
|
|
39
55
|
};
|
|
40
56
|
|
|
41
57
|
/**
|
|
@@ -45,7 +61,10 @@ const getDateStringsInRange = (startDate, endDate) => {
|
|
|
45
61
|
const dates = [];
|
|
46
62
|
const current = new Date(startDate + 'T00:00:00Z');
|
|
47
63
|
const end = new Date(endDate + 'T00:00:00Z');
|
|
48
|
-
while (current <= end) {
|
|
64
|
+
while (current <= end) {
|
|
65
|
+
dates.push(current.toISOString().slice(0, 10));
|
|
66
|
+
current.setUTCDate(current.getUTCDate() + 1);
|
|
67
|
+
}
|
|
49
68
|
return dates;
|
|
50
69
|
};
|
|
51
70
|
|
|
@@ -58,19 +77,42 @@ const fetchUnifiedData = async (config, dependencies, calcKeys, dateStrings, cal
|
|
|
58
77
|
const insightsCollection = config.unifiedInsightsCollection || 'unified_insights';
|
|
59
78
|
const resultsSub = config.resultsSubcollection || 'results';
|
|
60
79
|
const compsSub = config.computationsSubcollection || 'computations';
|
|
80
|
+
|
|
61
81
|
try {
|
|
62
82
|
for (const date of dateStrings) {
|
|
63
83
|
response[date] = {};
|
|
64
84
|
const docRefs = [];
|
|
65
85
|
const keyPaths = [];
|
|
86
|
+
|
|
66
87
|
for (const key of calcKeys) {
|
|
67
88
|
const pathInfo = calcMap[key];
|
|
68
|
-
if (pathInfo) {
|
|
69
|
-
|
|
89
|
+
if (pathInfo) {
|
|
90
|
+
const docRef = db.collection(insightsCollection).doc(date)
|
|
91
|
+
.collection(resultsSub).doc(pathInfo.category)
|
|
92
|
+
.collection(compsSub).doc(key);
|
|
93
|
+
docRefs.push(docRef);
|
|
94
|
+
keyPaths.push(key);
|
|
95
|
+
} else {
|
|
96
|
+
logger.log('WARN', `[${date}] No path info found for computation key: ${key}`);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
70
100
|
if (docRefs.length === 0) continue;
|
|
101
|
+
|
|
71
102
|
const snapshots = await db.getAll(...docRefs);
|
|
72
|
-
snapshots.forEach((doc, i) => {
|
|
73
|
-
|
|
103
|
+
snapshots.forEach((doc, i) => {
|
|
104
|
+
const key = keyPaths[i];
|
|
105
|
+
if (doc.exists) {
|
|
106
|
+
response[date][key] = doc.data();
|
|
107
|
+
} else {
|
|
108
|
+
response[date][key] = null;
|
|
109
|
+
}
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
} catch (error) {
|
|
113
|
+
logger.log('ERROR', 'API: Error fetching data from Firestore.', { errorMessage: error.message });
|
|
114
|
+
throw new Error('Failed to retrieve computation data.');
|
|
115
|
+
}
|
|
74
116
|
return response;
|
|
75
117
|
};
|
|
76
118
|
|
|
@@ -79,14 +121,30 @@ const fetchUnifiedData = async (config, dependencies, calcKeys, dateStrings, cal
|
|
|
79
121
|
*/
|
|
80
122
|
const createApiHandler = (config, dependencies, calcMap) => {
|
|
81
123
|
const { logger } = dependencies;
|
|
82
|
-
return async (req, res) => {
|
|
83
|
-
|
|
124
|
+
return async (req, res) => {
|
|
125
|
+
const validationError = validateRequest(req.query, config);
|
|
126
|
+
if (validationError) {
|
|
127
|
+
logger.log('WARN', 'API Bad Request', { error: validationError, query: req.query });
|
|
128
|
+
return res.status(400).send({ status: 'error', message: validationError });
|
|
129
|
+
}
|
|
84
130
|
try {
|
|
85
131
|
const computationKeys = req.query.computations.split(',');
|
|
86
132
|
const dateStrings = getDateStringsInRange(req.query.startDate, req.query.endDate);
|
|
87
133
|
const data = await fetchUnifiedData(config, dependencies, computationKeys, dateStrings, calcMap);
|
|
88
|
-
res.status(200).send({
|
|
89
|
-
|
|
134
|
+
res.status(200).send({
|
|
135
|
+
status: 'success',
|
|
136
|
+
metadata: {
|
|
137
|
+
computations: computationKeys,
|
|
138
|
+
startDate: req.query.startDate,
|
|
139
|
+
endDate: req.query.endDate,
|
|
140
|
+
},
|
|
141
|
+
data,
|
|
142
|
+
});
|
|
143
|
+
} catch (error) {
|
|
144
|
+
logger.log('ERROR', 'API processing failed.', { errorMessage: error.message, stack: error.stack });
|
|
145
|
+
res.status(500).send({ status: 'error', message: 'An internal error occurred.' });
|
|
146
|
+
}
|
|
147
|
+
};
|
|
90
148
|
};
|
|
91
149
|
|
|
92
150
|
/**
|
|
@@ -99,21 +157,29 @@ function createStructureSnippet(data, maxKeys = 20) {
|
|
|
99
157
|
if (typeof data === 'boolean') return true;
|
|
100
158
|
return data;
|
|
101
159
|
}
|
|
102
|
-
if (Array.isArray(data)) {
|
|
160
|
+
if (Array.isArray(data)) {
|
|
161
|
+
if (data.length === 0) return "<empty array>";
|
|
162
|
+
return [ createStructureSnippet(data[0], maxKeys) ];
|
|
163
|
+
}
|
|
103
164
|
const newObj = {};
|
|
104
|
-
const keys = Object.keys(data)
|
|
165
|
+
const keys = Object.keys(data);
|
|
166
|
+
|
|
105
167
|
if (keys.length > 0 && keys.every(k => k.match(/^[A-Z.]+$/) || k.includes('_') || k.match(/^[0-9]+$/))) {
|
|
106
168
|
const exampleKey = keys[0];
|
|
107
169
|
newObj[exampleKey] = createStructureSnippet(data[exampleKey], maxKeys);
|
|
108
170
|
newObj["... (more items)"] = "...";
|
|
109
171
|
return newObj;
|
|
110
172
|
}
|
|
173
|
+
|
|
111
174
|
if (keys.length > maxKeys) {
|
|
112
175
|
const firstKey = keys[0] || "example_key";
|
|
113
176
|
newObj[firstKey] = createStructureSnippet(data[firstKey], maxKeys);
|
|
114
177
|
newObj[`... (${keys.length - 1} more keys)`] = "<object>";
|
|
115
178
|
} else {
|
|
116
|
-
for (const key of keys) {
|
|
179
|
+
for (const key of keys) {
|
|
180
|
+
newObj[key] = createStructureSnippet(data[key], maxKeys);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
117
183
|
return newObj;
|
|
118
184
|
}
|
|
119
185
|
|
|
@@ -130,11 +196,16 @@ async function getComputationStructure(computationName, calcMap, config, depende
|
|
|
130
196
|
const resultsSub = config.resultsSubcollection || 'results';
|
|
131
197
|
const compsSub = config.computationsSubcollection || 'computations';
|
|
132
198
|
const computationQueryPath = `${category}.${computationName}`;
|
|
133
|
-
const dateQuery = db.collection(insightsCollection)
|
|
199
|
+
const dateQuery = db.collection(insightsCollection)
|
|
200
|
+
.where(computationQueryPath, '==', true)
|
|
201
|
+
.orderBy(FieldPath.documentId(), 'desc')
|
|
202
|
+
.limit(1);
|
|
134
203
|
const dateSnapshot = await dateQuery.get();
|
|
135
204
|
if (dateSnapshot.empty) { return { status: 'error', computation: computationName, message: `No computed data found. (Query path: ${computationQueryPath})` }; }
|
|
136
205
|
const latestStoredDate = dateSnapshot.docs[0].id;
|
|
137
|
-
const docRef = db.collection(insightsCollection).doc(latestStoredDate)
|
|
206
|
+
const docRef = db.collection(insightsCollection).doc(latestStoredDate)
|
|
207
|
+
.collection(resultsSub).doc(category)
|
|
208
|
+
.collection(compsSub).doc(computationName);
|
|
138
209
|
const doc = await docRef.get();
|
|
139
210
|
if (!doc.exists) { return { status: 'error', computation: computationName, message: `Summary flag was present for ${latestStoredDate} but doc is missing.` }; }
|
|
140
211
|
const fullData = doc.data();
|
|
@@ -152,30 +223,104 @@ async function getComputationStructure(computationName, calcMap, config, depende
|
|
|
152
223
|
*/
|
|
153
224
|
async function getDynamicSchema(CalcClass, calcName) {
|
|
154
225
|
if (CalcClass && typeof CalcClass.getSchema === 'function') {
|
|
155
|
-
try {
|
|
156
|
-
|
|
157
|
-
|
|
226
|
+
try {
|
|
227
|
+
return CalcClass.getSchema();
|
|
228
|
+
} catch (e) {
|
|
229
|
+
console.error(`Error running static getSchema() for ${calcName}: ${e.message}`);
|
|
230
|
+
return { "ERROR": `Failed to get static schema: ${e.message}` };
|
|
231
|
+
}
|
|
232
|
+
} else {
|
|
233
|
+
return { "ERROR": `Computation '${calcName}' does not have a static getSchema() method defined.` };
|
|
234
|
+
}
|
|
158
235
|
}
|
|
159
236
|
|
|
160
237
|
|
|
161
238
|
/**
|
|
162
|
-
* --- NEW: MANIFEST API HANDLER ---
|
|
239
|
+
* --- NEW: MANIFEST API HANDLER (With Filtering) ---
|
|
163
240
|
*/
|
|
164
241
|
const createManifestHandler = (config, dependencies, calcMap) => {
|
|
165
242
|
const { db, logger } = dependencies;
|
|
166
243
|
const schemaCollection = config.schemaCollection || 'computation_schemas';
|
|
244
|
+
|
|
167
245
|
return async (req, res) => {
|
|
168
246
|
try {
|
|
169
247
|
logger.log('INFO', '[API /manifest] Fetching all computation schemas...');
|
|
170
248
|
const snapshot = await db.collection(schemaCollection).get();
|
|
171
|
-
|
|
249
|
+
|
|
250
|
+
if (snapshot.empty) {
|
|
251
|
+
logger.log('WARN', '[API /manifest] No schemas found in collection.');
|
|
252
|
+
return res.status(404).send({ status: 'error', message: 'No computation schemas have been generated yet.' });
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
// --- FILTERING LOGIC ---
|
|
172
256
|
const manifest = {};
|
|
173
|
-
|
|
174
|
-
|
|
257
|
+
const now = Date.now();
|
|
258
|
+
const MAX_AGE_MS = 7 * 24 * 60 * 60 * 1000; // 7 days
|
|
259
|
+
|
|
260
|
+
let activeCount = 0;
|
|
261
|
+
let staleCount = 0;
|
|
262
|
+
|
|
263
|
+
snapshot.forEach(doc => {
|
|
264
|
+
const data = doc.data();
|
|
265
|
+
|
|
266
|
+
// Safe Timestamp conversion
|
|
267
|
+
let lastUpdatedMs = 0;
|
|
268
|
+
if (data.lastUpdated && typeof data.lastUpdated.toMillis === 'function') {
|
|
269
|
+
lastUpdatedMs = data.lastUpdated.toMillis();
|
|
270
|
+
} else if (data.lastUpdated instanceof Date) {
|
|
271
|
+
lastUpdatedMs = data.lastUpdated.getTime();
|
|
272
|
+
} else {
|
|
273
|
+
// Fallback for very old records without a timestamp
|
|
274
|
+
lastUpdatedMs = 0;
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
// Exclude stale records
|
|
278
|
+
if ((now - lastUpdatedMs) < MAX_AGE_MS) {
|
|
279
|
+
manifest[doc.id] = {
|
|
280
|
+
category: data.category,
|
|
281
|
+
structure: data.schema,
|
|
282
|
+
metadata: data.metadata,
|
|
283
|
+
lastUpdated: data.lastUpdated
|
|
284
|
+
};
|
|
285
|
+
activeCount++;
|
|
286
|
+
} else {
|
|
287
|
+
staleCount++;
|
|
288
|
+
}
|
|
289
|
+
});
|
|
290
|
+
|
|
291
|
+
// Log filtering results
|
|
292
|
+
if (staleCount > 0) {
|
|
293
|
+
logger.log('INFO', `[API /manifest] Filtered out ${staleCount} stale schemas (older than 7 days). Returning ${activeCount} active.`);
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
res.status(200).send({
|
|
297
|
+
status: 'success',
|
|
298
|
+
summary: {
|
|
299
|
+
source: 'firestore_computation_schemas',
|
|
300
|
+
totalComputations: snapshot.size,
|
|
301
|
+
schemasAvailable: activeCount,
|
|
302
|
+
schemasFiltered: staleCount,
|
|
303
|
+
lastUpdated: Math.max(...Object.values(manifest).map(m =>
|
|
304
|
+
(m.lastUpdated && m.lastUpdated.toMillis) ? m.lastUpdated.toMillis() : 0
|
|
305
|
+
), 0)
|
|
306
|
+
},
|
|
307
|
+
manifest: manifest
|
|
308
|
+
});
|
|
309
|
+
|
|
175
310
|
} catch (error) {
|
|
176
311
|
logger.log('ERROR', 'API /manifest handler failed.', { errorMessage: error.message, stack: error.stack });
|
|
177
|
-
res.status(500).send({ status: 'error', message: 'An internal error occurred.' });
|
|
312
|
+
res.status(500).send({ status: 'error', message: 'An internal error occurred.' });
|
|
313
|
+
}
|
|
314
|
+
};
|
|
178
315
|
};
|
|
179
316
|
|
|
180
317
|
|
|
181
|
-
module.exports = {
|
|
318
|
+
module.exports = {
|
|
319
|
+
validateRequest,
|
|
320
|
+
buildCalculationMap,
|
|
321
|
+
fetchUnifiedData,
|
|
322
|
+
createApiHandler,
|
|
323
|
+
getComputationStructure,
|
|
324
|
+
getDynamicSchema,
|
|
325
|
+
createManifestHandler
|
|
326
|
+
};
|
|
@@ -1,14 +1,41 @@
|
|
|
1
1
|
/*
|
|
2
2
|
* FILENAME: CloudFunctions/NpmWrappers/bulltrackers-module/functions/task-engine/helpers/update_helpers.js
|
|
3
|
-
* (
|
|
4
|
-
* (
|
|
5
|
-
* (REFACTORED:
|
|
6
|
-
* (FIXED: Corrected variable name 'instId' to 'instrumentId' in final timestamp loops)
|
|
3
|
+
* (OPTIMIZED V2: Added "Circuit Breaker" for Proxy failures)
|
|
4
|
+
* (OPTIMIZED V2: Downgraded verbose per-user logs to TRACE to save costs)
|
|
5
|
+
* (REFACTORED: Concurrency set to 1, added fallback and verbose logging)
|
|
7
6
|
*/
|
|
8
7
|
|
|
9
8
|
const { FieldValue } = require('@google-cloud/firestore');
|
|
10
9
|
const pLimit = require('p-limit');
|
|
11
10
|
|
|
11
|
+
// --- CIRCUIT BREAKER STATE ---
|
|
12
|
+
// Persists across function invocations in the same instance.
|
|
13
|
+
// If the Proxy fails 3 times in a row, we stop trying it to save the 5s timeout cost.
|
|
14
|
+
let _consecutiveProxyFailures = 0;
|
|
15
|
+
const MAX_PROXY_FAILURES = 3;
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Helper to check if we should attempt the proxy
|
|
19
|
+
*/
|
|
20
|
+
function shouldTryProxy() {
|
|
21
|
+
return _consecutiveProxyFailures < MAX_PROXY_FAILURES;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Helper to record proxy result
|
|
26
|
+
*/
|
|
27
|
+
function recordProxyOutcome(success) {
|
|
28
|
+
if (success) {
|
|
29
|
+
if (_consecutiveProxyFailures > 0) {
|
|
30
|
+
// Optional: Only log recovery to reduce noise
|
|
31
|
+
// console.log('[ProxyCircuit] Proxy recovered.');
|
|
32
|
+
}
|
|
33
|
+
_consecutiveProxyFailures = 0;
|
|
34
|
+
} else {
|
|
35
|
+
_consecutiveProxyFailures++;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
12
39
|
/**
|
|
13
40
|
* (REFACTORED: Concurrency set to 1, added fallback and verbose logging)
|
|
14
41
|
*/
|
|
@@ -21,35 +48,65 @@ async function lookupUsernames(cids, { logger, headerManager, proxyManager }, co
|
|
|
21
48
|
const { USERNAME_LOOKUP_BATCH_SIZE, ETORO_API_RANKINGS_URL } = config;
|
|
22
49
|
const batches = [];
|
|
23
50
|
for (let i = 0; i < cids.length; i += USERNAME_LOOKUP_BATCH_SIZE) { batches.push(cids.slice(i, i + USERNAME_LOOKUP_BATCH_SIZE).map(Number)); }
|
|
24
|
-
|
|
25
|
-
|
|
51
|
+
|
|
52
|
+
const batchPromises = batches.map((batch, index) => limit(async () => {
|
|
53
|
+
const batchId = `batch-${index + 1}`;
|
|
54
|
+
logger.log('TRACE', `[lookupUsernames/${batchId}] Processing batch of ${batch.length} CIDs...`); // DOWNGRADED TO TRACE
|
|
55
|
+
|
|
26
56
|
const header = await headerManager.selectHeader();
|
|
27
57
|
if (!header) { logger.log('ERROR', `[lookupUsernames/${batchId}] Could not select a header.`); return null; }
|
|
58
|
+
|
|
28
59
|
let wasSuccess = false;
|
|
29
|
-
let proxyUsed =
|
|
60
|
+
let proxyUsed = false;
|
|
30
61
|
let response;
|
|
31
62
|
const url = `${ETORO_API_RANKINGS_URL}?Period=LastTwoYears`;
|
|
32
63
|
const options = { method: 'POST', headers: { ...header.header, 'Content-Type': 'application/json' }, body: JSON.stringify(batch) };
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
64
|
+
|
|
65
|
+
// --- 1. Try Proxy (Circuit Breaker Protected) ---
|
|
66
|
+
if (shouldTryProxy()) {
|
|
67
|
+
try {
|
|
68
|
+
logger.log('TRACE', `[lookupUsernames/${batchId}] Attempting fetch via AppScript proxy...`);
|
|
69
|
+
response = await proxyManager.fetch(url, options);
|
|
70
|
+
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`);
|
|
71
|
+
|
|
72
|
+
wasSuccess = true;
|
|
73
|
+
proxyUsed = true;
|
|
74
|
+
recordProxyOutcome(true); // Reset failure count
|
|
75
|
+
logger.log('TRACE', `[lookupUsernames/${batchId}] AppScript proxy fetch successful.`); // DOWNGRADED TO TRACE
|
|
76
|
+
|
|
77
|
+
} catch (proxyError) {
|
|
78
|
+
recordProxyOutcome(false); // Increment failure count
|
|
79
|
+
logger.log('WARN', `[lookupUsernames/${batchId}] AppScript proxy fetch FAILED. Error: ${proxyError.message}. Failures: ${_consecutiveProxyFailures}/${MAX_PROXY_FAILURES}.`, { error: proxyError.message, source: 'AppScript' });
|
|
80
|
+
// Fall through to direct...
|
|
81
|
+
}
|
|
82
|
+
} else {
|
|
83
|
+
logger.log('TRACE', `[lookupUsernames/${batchId}] Circuit Breaker Open. Skipping Proxy.`);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// --- 2. Direct Fallback ---
|
|
87
|
+
if (!wasSuccess) {
|
|
88
|
+
try {
|
|
89
|
+
response = await fetch(url, options);
|
|
42
90
|
if (!response.ok) { const errorText = await response.text(); throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`); }
|
|
43
|
-
logger.log('
|
|
44
|
-
|
|
91
|
+
logger.log('TRACE', `[lookupUsernames/${batchId}] Direct node-fetch fallback successful.`); // DOWNGRADED TO TRACE
|
|
92
|
+
wasSuccess = true; // It worked eventually
|
|
93
|
+
} catch (fallbackError) {
|
|
94
|
+
logger.log('ERROR', `[lookupUsernames/${batchId}] Direct node-fetch fallback FAILED. Giving up on this batch.`, { error: fallbackError.message, source: 'eToro/Network' });
|
|
45
95
|
return null; // Give up on this batch
|
|
46
96
|
}
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if (proxyUsed) { headerManager.updatePerformance(header.id, wasSuccess); }
|
|
100
|
+
|
|
101
|
+
try {
|
|
102
|
+
const data = await response.json(); return data;
|
|
103
|
+
} catch (parseError) {
|
|
104
|
+
logger.log('ERROR', `[lookupUsernames/${batchId}] Failed to parse JSON response.`, { error: parseError.message }); return null;
|
|
105
|
+
}
|
|
106
|
+
}));
|
|
50
107
|
|
|
51
108
|
const results = await Promise.allSettled(batchPromises);
|
|
52
|
-
const allUsers = results
|
|
109
|
+
const allUsers = results.filter(r => r.status === 'fulfilled' && r.value && Array.isArray(r.value)).flatMap(r => r.value);
|
|
53
110
|
logger.log('INFO', `[lookupUsernames] Found ${allUsers.length} public users out of ${cids.length}.`);
|
|
54
111
|
return allUsers;
|
|
55
112
|
}
|
|
@@ -64,118 +121,140 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
64
121
|
const today = new Date().toISOString().slice(0, 10);
|
|
65
122
|
const portfolioBlockId = `${Math.floor(parseInt(userId) / 1000000)}M`;
|
|
66
123
|
let isPrivate = false;
|
|
67
|
-
|
|
124
|
+
|
|
125
|
+
// DOWNGRADED TO TRACE
|
|
126
|
+
logger.log('TRACE', `[handleUpdate/${userId}] Starting update task. Type: ${userType}. Instruments: ${instrumentsToProcess.join(', ')}`);
|
|
68
127
|
|
|
69
128
|
// --- 1. Process History Fetch (Sequentially) ---
|
|
70
129
|
let historyHeader = null;
|
|
71
130
|
let wasHistorySuccess = false;
|
|
72
|
-
let proxyUsedForHistory =
|
|
131
|
+
let proxyUsedForHistory = false;
|
|
73
132
|
|
|
74
133
|
try {
|
|
75
134
|
if (!batchManager.checkAndSetHistoryFetched(userId)) {
|
|
76
|
-
logger.log('
|
|
135
|
+
logger.log('TRACE', `[handleUpdate/${userId}] Attempting history fetch.`);
|
|
77
136
|
historyHeader = await headerManager.selectHeader();
|
|
78
|
-
if (!historyHeader) {
|
|
137
|
+
if (!historyHeader) {
|
|
138
|
+
logger.log('WARN', `[handleUpdate/${userId}] Could not select history header. Skipping history.`);
|
|
79
139
|
} else {
|
|
80
140
|
const historyUrl = `${config.ETORO_API_USERSTATS_URL}${username}/trades/oneYearAgo?CopyAsAsset=true`;
|
|
81
141
|
const options = { headers: historyHeader.header };
|
|
82
142
|
let response;
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
proxyUsedForHistory = false;
|
|
91
|
-
try { response = await fetch(historyUrl, options);
|
|
92
|
-
if (!response.ok) { const errorText = await response.text();
|
|
93
|
-
throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`); } // SHIT we failed here too
|
|
94
|
-
wasHistorySuccess = true; // Fallback succeeded, we are so smart
|
|
143
|
+
|
|
144
|
+
// --- PROXY ATTEMPT ---
|
|
145
|
+
if (shouldTryProxy()) {
|
|
146
|
+
try {
|
|
147
|
+
logger.log('TRACE', `[handleUpdate/${userId}] Attempting history fetch via AppScript proxy...`);
|
|
148
|
+
response = await proxyManager.fetch(historyUrl, options);
|
|
149
|
+
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`);
|
|
95
150
|
|
|
96
|
-
|
|
97
|
-
|
|
151
|
+
wasHistorySuccess = true;
|
|
152
|
+
proxyUsedForHistory = true;
|
|
153
|
+
recordProxyOutcome(true); // Reset
|
|
154
|
+
|
|
155
|
+
} catch (proxyError) {
|
|
156
|
+
recordProxyOutcome(false); // Count failure
|
|
157
|
+
logger.log('WARN', `[handleUpdate/${userId}] History fetch via AppScript proxy FAILED. Error: ${proxyError.message}. Failures: ${_consecutiveProxyFailures}/${MAX_PROXY_FAILURES}.`, { error: proxyError.message, source: 'AppScript' });
|
|
98
158
|
}
|
|
99
159
|
}
|
|
100
160
|
|
|
101
|
-
|
|
161
|
+
// --- DIRECT FALLBACK ---
|
|
162
|
+
if (!wasHistorySuccess) {
|
|
163
|
+
try {
|
|
164
|
+
response = await fetch(historyUrl, options);
|
|
165
|
+
if (!response.ok) { const errorText = await response.text(); throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`); }
|
|
166
|
+
wasHistorySuccess = true;
|
|
167
|
+
logger.log('TRACE', `[handleUpdate/${userId}] History fetch direct success.`);
|
|
168
|
+
} catch (fallbackError) {
|
|
169
|
+
logger.log('ERROR', `[handleUpdate/${userId}] History fetch direct fallback FAILED.`, { error: fallbackError.message, source: 'eToro/Network' });
|
|
170
|
+
wasHistorySuccess = false;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
if (wasHistorySuccess) {
|
|
102
175
|
const data = await response.json();
|
|
103
|
-
await batchManager.addToTradingHistoryBatch(userId, portfolioBlockId, today, data, userType);
|
|
176
|
+
await batchManager.addToTradingHistoryBatch(userId, portfolioBlockId, today, data, userType);
|
|
177
|
+
}
|
|
104
178
|
}
|
|
105
|
-
} else {
|
|
106
|
-
|
|
107
|
-
|
|
179
|
+
} else {
|
|
180
|
+
logger.log('TRACE', `[handleUpdate/${userId}] History fetch skipped (already fetched).`);
|
|
181
|
+
}
|
|
182
|
+
} catch (err) {
|
|
183
|
+
logger.log('ERROR', `[handleUpdate/${userId}] Unhandled error during history processing.`, { error: err.message }); wasHistorySuccess = false;
|
|
184
|
+
} finally {
|
|
185
|
+
if (historyHeader && proxyUsedForHistory) { headerManager.updatePerformance(historyHeader.id, wasHistorySuccess); }
|
|
186
|
+
}
|
|
108
187
|
|
|
109
188
|
// --- 2. Process Portfolio Fetches (Sequentially) ---
|
|
110
|
-
logger.log('
|
|
189
|
+
logger.log('TRACE', `[handleUpdate/${userId}] Starting ${instrumentsToProcess.length} sequential portfolio fetches.`); // DOWNGRADED TO TRACE
|
|
111
190
|
|
|
112
191
|
for (const instId of instrumentsToProcess) {
|
|
113
192
|
if (isPrivate) {
|
|
114
|
-
logger.log('
|
|
193
|
+
logger.log('TRACE', `[handleUpdate/${userId}] Skipping remaining instruments (User Private).`);
|
|
115
194
|
break;
|
|
116
195
|
}
|
|
117
196
|
|
|
118
197
|
const portfolioHeader = await headerManager.selectHeader();
|
|
119
|
-
if (!portfolioHeader) { logger.log('ERROR', `[handleUpdate/${userId}] Could not select portfolio header for instId ${instId}. Skipping this instrument.`);
|
|
120
|
-
continue;
|
|
121
|
-
}
|
|
198
|
+
if (!portfolioHeader) { logger.log('ERROR', `[handleUpdate/${userId}] Could not select portfolio header for instId ${instId}. Skipping this instrument.`); continue; }
|
|
122
199
|
|
|
123
200
|
const portfolioUrl = userType === 'speculator' ? `${config.ETORO_API_POSITIONS_URL}?cid=${userId}&InstrumentID=${instId}` : `${config.ETORO_API_PORTFOLIO_URL}?cid=${userId}`;
|
|
124
|
-
|
|
125
201
|
const options = { headers: portfolioHeader.header };
|
|
126
202
|
let response;
|
|
127
203
|
let wasPortfolioSuccess = false;
|
|
128
|
-
let proxyUsedForPortfolio =
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
logger.log('TRACE', `[handleUpdate/${userId}] Attempting portfolio fetch for instId ${instId} via AppScript proxy...`);
|
|
133
|
-
response = await proxyManager.fetch(portfolioUrl, options);
|
|
134
|
-
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`); // SHIT we failed here
|
|
135
|
-
wasPortfolioSuccess = true; // Oh we are smart, worked first time.
|
|
136
|
-
|
|
137
|
-
} catch (proxyError) { // try fallback with local node fetch using GCP IP Pools
|
|
138
|
-
logger.log('WARN', `[handleUpdate/${userId}] Portfolio fetch for instId ${instId} via AppScript proxy FAILED. Error: ${proxyError.message}. Attempting direct node-fetch fallback.`, { error: proxyError.message, source: 'AppScript' });
|
|
139
|
-
proxyUsedForPortfolio = false; // We are not using Appscript proxy here as fallback is GCP based, so false
|
|
140
|
-
|
|
204
|
+
let proxyUsedForPortfolio = false;
|
|
205
|
+
|
|
206
|
+
// --- PROXY ATTEMPT ---
|
|
207
|
+
if (shouldTryProxy()) {
|
|
141
208
|
try {
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`); // SHIT we failed here
|
|
146
|
-
}
|
|
147
|
-
wasPortfolioSuccess = true; // Fallback succeeded we are so smart
|
|
209
|
+
logger.log('TRACE', `[handleUpdate/${userId}] Attempting portfolio fetch via AppScript proxy...`);
|
|
210
|
+
response = await proxyManager.fetch(portfolioUrl, options);
|
|
211
|
+
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`);
|
|
148
212
|
|
|
213
|
+
wasPortfolioSuccess = true;
|
|
214
|
+
proxyUsedForPortfolio = true;
|
|
215
|
+
recordProxyOutcome(true); // Reset
|
|
216
|
+
|
|
217
|
+
} catch (proxyError) {
|
|
218
|
+
recordProxyOutcome(false); // Count failure
|
|
219
|
+
logger.log('WARN', `[handleUpdate/${userId}] Portfolio fetch via Proxy FAILED. Error: ${proxyError.message}. Failures: ${_consecutiveProxyFailures}/${MAX_PROXY_FAILURES}.`, { error: proxyError.message, source: 'AppScript' });
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
// --- DIRECT FALLBACK ---
|
|
224
|
+
if (!wasPortfolioSuccess) {
|
|
225
|
+
try {
|
|
226
|
+
response = await fetch(portfolioUrl, options);
|
|
227
|
+
if (!response.ok) { const errorText = await response.text(); throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`); }
|
|
228
|
+
wasPortfolioSuccess = true;
|
|
229
|
+
logger.log('TRACE', `[handleUpdate/${userId}] Portfolio fetch direct success.`);
|
|
149
230
|
} catch (fallbackError) {
|
|
150
|
-
logger.log('ERROR', `[handleUpdate/${userId}] Portfolio fetch
|
|
231
|
+
logger.log('ERROR', `[handleUpdate/${userId}] Portfolio fetch direct fallback FAILED.`, { error: fallbackError.message, source: 'eToro/Network' });
|
|
151
232
|
wasPortfolioSuccess = false;
|
|
152
233
|
}
|
|
153
234
|
}
|
|
154
235
|
|
|
155
|
-
// --- 4. Process Portfolio Result
|
|
236
|
+
// --- 4. Process Portfolio Result ---
|
|
156
237
|
if (wasPortfolioSuccess) {
|
|
157
238
|
const body = await response.text();
|
|
158
|
-
if (body.includes("user is PRIVATE")) { isPrivate = true; logger.log('WARN', `[handleUpdate/${userId}] User is PRIVATE. Marking for removal.`);
|
|
159
|
-
break; // Stop processing more portfolios for this private user
|
|
160
|
-
}
|
|
239
|
+
if (body.includes("user is PRIVATE")) { isPrivate = true; logger.log('WARN', `[handleUpdate/${userId}] User is PRIVATE. Marking for removal.`); break; }
|
|
161
240
|
|
|
162
241
|
try {
|
|
163
242
|
const portfolioJson = JSON.parse(body);
|
|
164
243
|
await batchManager.addToPortfolioBatch(userId, portfolioBlockId, today, portfolioJson, userType, instId);
|
|
165
|
-
|
|
166
|
-
} else { logger.log('INFO', `[handleUpdate/${userId}] Successfully processed full portfolio (normal user).`); } // Normal users
|
|
244
|
+
logger.log('TRACE', `[handleUpdate/${userId}] Portfolio processed successfully.`); // DOWNGRADED TO TRACE
|
|
167
245
|
|
|
168
|
-
} catch (parseError) {
|
|
169
|
-
wasPortfolioSuccess = false;
|
|
170
|
-
logger.log('ERROR', `[handleUpdate/${userId}] FAILED TO PARSE JSON RESPONSE
|
|
246
|
+
} catch (parseError) {
|
|
247
|
+
wasPortfolioSuccess = false;
|
|
248
|
+
logger.log('ERROR', `[handleUpdate/${userId}] FAILED TO PARSE JSON RESPONSE.`, { url: portfolioUrl, parseErrorMessage: parseError.message });
|
|
171
249
|
}
|
|
172
|
-
} else {
|
|
250
|
+
} else {
|
|
251
|
+
logger.log('WARN', `[handleUpdate/${userId}] Portfolio fetch failed for instId ${instId}.`);
|
|
252
|
+
}
|
|
173
253
|
|
|
174
254
|
if (proxyUsedForPortfolio) { headerManager.updatePerformance(portfolioHeader.id, wasPortfolioSuccess); }
|
|
175
255
|
}
|
|
176
256
|
|
|
177
257
|
// --- 5. Handle Private Users & Timestamps ---
|
|
178
|
-
// FIXED: Corrected variable naming here from 'instId' to 'instrumentId'
|
|
179
258
|
if (isPrivate) {
|
|
180
259
|
logger.log('WARN', `[handleUpdate/${userId}] Removing private user from updates.`);
|
|
181
260
|
for (const instrumentId of instrumentsToProcess) {
|
|
@@ -190,15 +269,13 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
190
269
|
}
|
|
191
270
|
|
|
192
271
|
// If not private, update all timestamps
|
|
193
|
-
// FIXED: Corrected variable naming here from 'instId' to 'instrumentId'
|
|
194
272
|
for (const instrumentId of instrumentsToProcess) {
|
|
195
273
|
await batchManager.updateUserTimestamp(userId, userType, instrumentId);
|
|
196
274
|
}
|
|
197
275
|
|
|
198
276
|
if (userType === 'speculator') { await batchManager.addSpeculatorTimestampFix(userId, String(Math.floor(userId/1e6)*1e6)); }
|
|
199
277
|
|
|
200
|
-
logger.log('
|
|
201
|
-
// 'finally' block for header flushing is handled by the main handler_creator.js
|
|
278
|
+
logger.log('TRACE', `[handleUpdate/${userId}] Update task finished successfully.`); // DOWNGRADED TO TRACE
|
|
202
279
|
}
|
|
203
280
|
|
|
204
281
|
module.exports = { handleUpdate, lookupUsernames };
|