bulltrackers-module 1.0.210 → 1.0.212
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/controllers/computation_controller.js +199 -188
- package/functions/computation-system/helpers/computation_dispatcher.js +90 -90
- package/functions/computation-system/helpers/computation_manifest_builder.js +323 -283
- package/functions/computation-system/helpers/computation_pass_runner.js +185 -157
- package/functions/computation-system/helpers/computation_worker.js +85 -85
- package/functions/computation-system/helpers/orchestration_helpers.js +542 -558
- package/functions/computation-system/layers/extractors.js +415 -0
- package/functions/computation-system/layers/index.js +40 -0
- package/functions/computation-system/layers/math_primitives.js +743 -743
- package/functions/computation-system/layers/mathematics.js +397 -0
- package/functions/computation-system/layers/profiling.js +287 -0
- package/functions/computation-system/layers/validators.js +170 -0
- package/functions/computation-system/utils/schema_capture.js +63 -63
- package/functions/computation-system/utils/utils.js +22 -1
- package/functions/task-engine/helpers/update_helpers.js +34 -12
- package/package.json +1 -1
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Validators Layer
|
|
3
|
+
* Schema validation logic adhering to schema.md production definitions.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const { SCHEMAS } = require('./profiling');
|
|
7
|
+
|
|
8
|
+
class Validators {
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Validates a User Portfolio based on User Type.
|
|
12
|
+
* @param {Object} portfolio - The portfolio object.
|
|
13
|
+
* @param {string} userType - 'normal' or 'speculator'.
|
|
14
|
+
* @returns {Object} { valid: boolean, errors: string[] }
|
|
15
|
+
*/
|
|
16
|
+
static validatePortfolio(portfolio, userType) {
|
|
17
|
+
const errors = [];
|
|
18
|
+
if (!portfolio || typeof portfolio !== 'object') {
|
|
19
|
+
return { valid: false, errors: ['Portfolio is null or invalid object'] };
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
// 1. SPECULATOR PORTFOLIO VALIDATION
|
|
23
|
+
if (userType === SCHEMAS.USER_TYPES.SPECULATOR) {
|
|
24
|
+
// Must have PublicPositions array
|
|
25
|
+
if (!Array.isArray(portfolio.PublicPositions)) {
|
|
26
|
+
errors.push('Speculator portfolio missing "PublicPositions" array');
|
|
27
|
+
} else {
|
|
28
|
+
// Validate a sample position structure (checking the first one for performance)
|
|
29
|
+
if (portfolio.PublicPositions.length > 0) {
|
|
30
|
+
const pos = portfolio.PublicPositions[0];
|
|
31
|
+
if (typeof pos.InstrumentID !== 'number') errors.push('Speculator Position missing numeric "InstrumentID"');
|
|
32
|
+
if (typeof pos.PositionID !== 'number') errors.push('Speculator Position missing numeric "PositionID"');
|
|
33
|
+
if (typeof pos.NetProfit !== 'number') errors.push('Speculator Position missing numeric "NetProfit"');
|
|
34
|
+
if (typeof pos.Leverage !== 'number') errors.push('Speculator Position missing numeric "Leverage"');
|
|
35
|
+
if (typeof pos.OpenRate !== 'number') errors.push('Speculator Position missing numeric "OpenRate"');
|
|
36
|
+
if (typeof pos.IsBuy !== 'boolean') errors.push('Speculator Position missing boolean "IsBuy"');
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// Root level metrics (Speculator specific)
|
|
41
|
+
if (typeof portfolio.NetProfit !== 'number') errors.push('Speculator portfolio missing root "NetProfit"');
|
|
42
|
+
if (typeof portfolio.Invested !== 'number') errors.push('Speculator portfolio missing root "Invested"');
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// 2. NORMAL USER PORTFOLIO VALIDATION
|
|
46
|
+
else {
|
|
47
|
+
// Must have AggregatedPositions array
|
|
48
|
+
if (!Array.isArray(portfolio.AggregatedPositions)) {
|
|
49
|
+
errors.push('Normal portfolio missing "AggregatedPositions" array');
|
|
50
|
+
} else {
|
|
51
|
+
// Validate a sample position structure
|
|
52
|
+
if (portfolio.AggregatedPositions.length > 0) {
|
|
53
|
+
const pos = portfolio.AggregatedPositions[0];
|
|
54
|
+
if (typeof pos.InstrumentID !== 'number') errors.push('Normal Position missing numeric "InstrumentID"');
|
|
55
|
+
if (typeof pos.NetProfit !== 'number') errors.push('Normal Position missing numeric "NetProfit"');
|
|
56
|
+
if (typeof pos.Invested !== 'number') errors.push('Normal Position missing numeric "Invested"');
|
|
57
|
+
if (typeof pos.Value !== 'number') errors.push('Normal Position missing numeric "Value"');
|
|
58
|
+
if (!pos.Direction) errors.push('Normal Position missing "Direction" string');
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
return { valid: errors.length === 0, errors };
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Validates Trade History Data.
|
|
68
|
+
* @param {Object} historyDoc - The history document object.
|
|
69
|
+
* @returns {Object} { valid: boolean, errors: string[] }
|
|
70
|
+
*/
|
|
71
|
+
static validateTradeHistory(historyDoc) {
|
|
72
|
+
const errors = [];
|
|
73
|
+
if (!historyDoc || typeof historyDoc !== 'object') {
|
|
74
|
+
return { valid: false, errors: ['History document is null'] };
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
if (!Array.isArray(historyDoc.PublicHistoryPositions)) {
|
|
78
|
+
errors.push('History missing "PublicHistoryPositions" array');
|
|
79
|
+
} else if (historyDoc.PublicHistoryPositions.length > 0) {
|
|
80
|
+
// Validate first item schema
|
|
81
|
+
const trade = historyDoc.PublicHistoryPositions[0];
|
|
82
|
+
if (typeof trade.PositionID !== 'number') errors.push('History trade missing numeric "PositionID"');
|
|
83
|
+
if (typeof trade.CID !== 'number') errors.push('History trade missing numeric "CID"');
|
|
84
|
+
if (typeof trade.InstrumentID !== 'number') errors.push('History trade missing numeric "InstrumentID"');
|
|
85
|
+
if (typeof trade.CloseRate !== 'number') errors.push('History trade missing numeric "CloseRate"');
|
|
86
|
+
if (typeof trade.CloseReason !== 'number') errors.push('History trade missing numeric "CloseReason"');
|
|
87
|
+
if (typeof trade.NetProfit !== 'number') errors.push('History trade missing numeric "NetProfit"');
|
|
88
|
+
if (!trade.OpenDateTime) errors.push('History trade missing "OpenDateTime"');
|
|
89
|
+
if (!trade.CloseDateTime) errors.push('History trade missing "CloseDateTime"');
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
return { valid: errors.length === 0, errors };
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Validates Social Post Data.
|
|
97
|
+
* @param {Object} post - The social post object.
|
|
98
|
+
* @returns {Object} { valid: boolean, errors: string[] }
|
|
99
|
+
*/
|
|
100
|
+
static validateSocialPost(post) {
|
|
101
|
+
const errors = [];
|
|
102
|
+
if (!post) return { valid: false, errors: ['Post is null'] };
|
|
103
|
+
|
|
104
|
+
if (!post.postOwnerId) errors.push('Post missing "postOwnerId"');
|
|
105
|
+
if (!post.fullText) errors.push('Post missing "fullText"');
|
|
106
|
+
if (!post.createdAt) errors.push('Post missing "createdAt"');
|
|
107
|
+
if (typeof post.likeCount !== 'number') errors.push('Post missing numeric "likeCount"');
|
|
108
|
+
|
|
109
|
+
// Sentiment Map
|
|
110
|
+
if (!post.sentiment || typeof post.sentiment.overallSentiment !== 'string') {
|
|
111
|
+
errors.push('Post missing valid "sentiment.overallSentiment"');
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
return { valid: errors.length === 0, errors };
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Validates Insights Data (Platform Ownership).
|
|
119
|
+
* @param {Object} insight - A single insight item from the array.
|
|
120
|
+
* @returns {Object} { valid: boolean, errors: string[] }
|
|
121
|
+
*/
|
|
122
|
+
static validateInsight(insight) {
|
|
123
|
+
const errors = [];
|
|
124
|
+
if (!insight) return { valid: false, errors: ['Insight object is null'] };
|
|
125
|
+
|
|
126
|
+
if (typeof insight.instrumentId !== 'number') errors.push('Insight missing numeric "instrumentId"');
|
|
127
|
+
if (typeof insight.total !== 'number') errors.push('Insight missing numeric "total" (Total Owners)');
|
|
128
|
+
if (typeof insight.percentage !== 'number') errors.push('Insight missing numeric "percentage" (Global Ownership)');
|
|
129
|
+
if (typeof insight.growth !== 'number') errors.push('Insight missing numeric "growth"');
|
|
130
|
+
|
|
131
|
+
// Ownership split check
|
|
132
|
+
if (typeof insight.buy !== 'number') errors.push('Insight missing numeric "buy" %');
|
|
133
|
+
if (typeof insight.sell !== 'number') errors.push('Insight missing numeric "sell" %');
|
|
134
|
+
|
|
135
|
+
return { valid: errors.length === 0, errors };
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
/**
|
|
139
|
+
* Validates Asset Price Shard Data.
|
|
140
|
+
* @param {Object} instrumentData - The price data map for a specific instrument ID.
|
|
141
|
+
* @returns {Object} { valid: boolean, errors: string[] }
|
|
142
|
+
*/
|
|
143
|
+
static validatePriceData(instrumentData) {
|
|
144
|
+
const errors = [];
|
|
145
|
+
if (!instrumentData) return { valid: false, errors: ['Price data is null'] };
|
|
146
|
+
|
|
147
|
+
// Must have 'prices' map
|
|
148
|
+
if (!instrumentData.prices || typeof instrumentData.prices !== 'object') {
|
|
149
|
+
errors.push('Instrument price data missing "prices" map');
|
|
150
|
+
} else {
|
|
151
|
+
// Check at least one price key matches date format YYYY-MM-DD
|
|
152
|
+
const keys = Object.keys(instrumentData.prices);
|
|
153
|
+
if (keys.length > 0) {
|
|
154
|
+
const sampleKey = keys[0];
|
|
155
|
+
const sampleVal = instrumentData.prices[sampleKey];
|
|
156
|
+
|
|
157
|
+
if (!/^\d{4}-\d{2}-\d{2}$/.test(sampleKey)) {
|
|
158
|
+
errors.push(`Price map key "${sampleKey}" does not match YYYY-MM-DD format`);
|
|
159
|
+
}
|
|
160
|
+
if (typeof sampleVal !== 'number') {
|
|
161
|
+
errors.push('Price value is not a number');
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
return { valid: errors.length === 0, errors };
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
module.exports = { Validators };
|
|
@@ -1,64 +1,64 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Schema capture utility for computation outputs
|
|
3
|
-
* This module batches and stores pre-defined static schemas in Firestore.
|
|
4
|
-
*/
|
|
5
|
-
|
|
6
|
-
/**
|
|
7
|
-
* Batch store schemas for multiple computations.
|
|
8
|
-
* This function now expects a fully-formed schema, not sample output.
|
|
9
|
-
* It strictly stamps a 'lastUpdated' field to support stale-schema filtering in the API.
|
|
10
|
-
*
|
|
11
|
-
* @param {object} dependencies - Contains db, logger
|
|
12
|
-
* @param {object} config - Configuration object
|
|
13
|
-
* @param {Array} schemas - Array of {name, category, schema, metadata} objects
|
|
14
|
-
*/
|
|
15
|
-
async function batchStoreSchemas(dependencies, config, schemas) {
|
|
16
|
-
const { db, logger } = dependencies;
|
|
17
|
-
|
|
18
|
-
if (config.captureSchemas === false) {
|
|
19
|
-
logger.log('INFO', '[SchemaCapture] Schema capture is disabled. Skipping.');
|
|
20
|
-
return;
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
const batch = db.batch();
|
|
24
|
-
const schemaCollection = config.schemaCollection || 'computation_schemas';
|
|
25
|
-
let validCount = 0;
|
|
26
|
-
|
|
27
|
-
for (const item of schemas) {
|
|
28
|
-
try {
|
|
29
|
-
if (!item.schema) {
|
|
30
|
-
logger.log('WARN', `[SchemaCapture] No schema provided for ${item.name}. Skipping.`);
|
|
31
|
-
continue;
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
const docRef = db.collection(schemaCollection).doc(item.name);
|
|
35
|
-
|
|
36
|
-
// Critical: Always overwrite 'lastUpdated' to now
|
|
37
|
-
batch.set(docRef, {
|
|
38
|
-
computationName: item.name,
|
|
39
|
-
category: item.category,
|
|
40
|
-
schema: item.schema,
|
|
41
|
-
metadata: item.metadata || {},
|
|
42
|
-
lastUpdated: new Date()
|
|
43
|
-
}, { merge: true });
|
|
44
|
-
|
|
45
|
-
validCount++;
|
|
46
|
-
|
|
47
|
-
} catch (error) {
|
|
48
|
-
logger.log('WARN', `[SchemaCapture] Failed to add schema to batch for ${item.name}`, { errorMessage: error.message });
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
if (validCount > 0) {
|
|
53
|
-
try {
|
|
54
|
-
await batch.commit();
|
|
55
|
-
logger.log('INFO', `[SchemaCapture] Batch stored ${validCount} computation schemas`);
|
|
56
|
-
} catch (error) {
|
|
57
|
-
logger.log('ERROR', '[SchemaCapture] Failed to commit schema batch', { errorMessage: error.message });
|
|
58
|
-
}
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
module.exports = {
|
|
63
|
-
batchStoreSchemas
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Schema capture utility for computation outputs
|
|
3
|
+
* This module batches and stores pre-defined static schemas in Firestore.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Batch store schemas for multiple computations.
|
|
8
|
+
* This function now expects a fully-formed schema, not sample output.
|
|
9
|
+
* It strictly stamps a 'lastUpdated' field to support stale-schema filtering in the API.
|
|
10
|
+
*
|
|
11
|
+
* @param {object} dependencies - Contains db, logger
|
|
12
|
+
* @param {object} config - Configuration object
|
|
13
|
+
* @param {Array} schemas - Array of {name, category, schema, metadata} objects
|
|
14
|
+
*/
|
|
15
|
+
async function batchStoreSchemas(dependencies, config, schemas) {
|
|
16
|
+
const { db, logger } = dependencies;
|
|
17
|
+
|
|
18
|
+
if (config.captureSchemas === false) {
|
|
19
|
+
logger.log('INFO', '[SchemaCapture] Schema capture is disabled. Skipping.');
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const batch = db.batch();
|
|
24
|
+
const schemaCollection = config.schemaCollection || 'computation_schemas';
|
|
25
|
+
let validCount = 0;
|
|
26
|
+
|
|
27
|
+
for (const item of schemas) {
|
|
28
|
+
try {
|
|
29
|
+
if (!item.schema) {
|
|
30
|
+
logger.log('WARN', `[SchemaCapture] No schema provided for ${item.name}. Skipping.`);
|
|
31
|
+
continue;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const docRef = db.collection(schemaCollection).doc(item.name);
|
|
35
|
+
|
|
36
|
+
// Critical: Always overwrite 'lastUpdated' to now
|
|
37
|
+
batch.set(docRef, {
|
|
38
|
+
computationName: item.name,
|
|
39
|
+
category: item.category,
|
|
40
|
+
schema: item.schema,
|
|
41
|
+
metadata: item.metadata || {},
|
|
42
|
+
lastUpdated: new Date()
|
|
43
|
+
}, { merge: true });
|
|
44
|
+
|
|
45
|
+
validCount++;
|
|
46
|
+
|
|
47
|
+
} catch (error) {
|
|
48
|
+
logger.log('WARN', `[SchemaCapture] Failed to add schema to batch for ${item.name}`, { errorMessage: error.message });
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (validCount > 0) {
|
|
53
|
+
try {
|
|
54
|
+
await batch.commit();
|
|
55
|
+
logger.log('INFO', `[SchemaCapture] Batch stored ${validCount} computation schemas`);
|
|
56
|
+
} catch (error) {
|
|
57
|
+
logger.log('ERROR', '[SchemaCapture] Failed to commit schema batch', { errorMessage: error.message });
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
module.exports = {
|
|
63
|
+
batchStoreSchemas
|
|
64
64
|
};
|
|
@@ -2,13 +2,34 @@
|
|
|
2
2
|
* @fileoverview Computation system sub-pipes and utils.
|
|
3
3
|
* REFACTORED: Now stateless and receive dependencies where needed.
|
|
4
4
|
* FIXED: 'commitBatchInChunks' now respects Firestore 10MB size limit.
|
|
5
|
+
* NEW: Added 'generateCodeHash' for version control.
|
|
5
6
|
*/
|
|
6
7
|
|
|
7
8
|
const { FieldValue, FieldPath } = require('@google-cloud/firestore');
|
|
9
|
+
const crypto = require('crypto');
|
|
8
10
|
|
|
9
11
|
/** Stage 1: Normalize a calculation name to kebab-case */
|
|
10
12
|
function normalizeName(name) { return name.replace(/_/g, '-'); }
|
|
11
13
|
|
|
14
|
+
/**
|
|
15
|
+
* Generates a SHA-256 hash of a code string, ignoring comments and whitespace.
|
|
16
|
+
* This effectively versions the logic.
|
|
17
|
+
* @param {string} codeString - The source code of the function/class.
|
|
18
|
+
* @returns {string} The hex hash.
|
|
19
|
+
*/
|
|
20
|
+
function generateCodeHash(codeString) {
|
|
21
|
+
if (!codeString) return 'unknown';
|
|
22
|
+
|
|
23
|
+
// 1. Remove single-line comments (//...)
|
|
24
|
+
let clean = codeString.replace(/\/\/.*$/gm, '');
|
|
25
|
+
// 2. Remove multi-line comments (/*...*/)
|
|
26
|
+
clean = clean.replace(/\/\*[\s\S]*?\*\//g, '');
|
|
27
|
+
// 3. Remove all whitespace (spaces, tabs, newlines)
|
|
28
|
+
clean = clean.replace(/\s+/g, '');
|
|
29
|
+
|
|
30
|
+
return crypto.createHash('sha256').update(clean).digest('hex');
|
|
31
|
+
}
|
|
32
|
+
|
|
12
33
|
/** * Stage 2: Commit a batch of writes in chunks
|
|
13
34
|
* FIXED: Now splits batches by SIZE (9MB limit) and COUNT (450 docs)
|
|
14
35
|
* to prevent "Request payload size exceeds the limit" errors.
|
|
@@ -249,4 +270,4 @@ async function getFirstDateFromPriceCollection(config, deps) {
|
|
|
249
270
|
}
|
|
250
271
|
}
|
|
251
272
|
|
|
252
|
-
module.exports = { FieldValue, FieldPath, normalizeName, commitBatchInChunks, getExpectedDateStrings, getEarliestDataDates };
|
|
273
|
+
module.exports = { FieldValue, FieldPath, normalizeName, commitBatchInChunks, getExpectedDateStrings, getEarliestDataDates, generateCodeHash };
|
|
@@ -4,6 +4,8 @@
|
|
|
4
4
|
* (OPTIMIZED V3: Removed obsolete username lookup logic)
|
|
5
5
|
* (OPTIMIZED V2: Added "Circuit Breaker" for Proxy failures)
|
|
6
6
|
* (REFACTORED: Concurrency set to 1, added fallback and verbose logging)
|
|
7
|
+
* (FIXED: Improved logging clarity for Normal vs Speculator users)
|
|
8
|
+
* (FIXED: Final log now accurately reflects failure state)
|
|
7
9
|
*/
|
|
8
10
|
|
|
9
11
|
const { FieldValue } = require('@google-cloud/firestore');
|
|
@@ -66,6 +68,8 @@ function detectSpeculatorTargets(historyData, portfolioData) {
|
|
|
66
68
|
*/
|
|
67
69
|
async function handleUpdate(task, taskId, { logger, headerManager, proxyManager, db, batchManager, pubsub }, config) {
|
|
68
70
|
const { userId, instruments, instrumentId, userType } = task;
|
|
71
|
+
|
|
72
|
+
// Normalize the loop: Speculators get specific IDs, Normal users get [undefined] to trigger one pass.
|
|
69
73
|
const instrumentsToProcess = userType === 'speculator' ? (instruments || [instrumentId]) : [undefined];
|
|
70
74
|
const today = new Date().toISOString().slice(0, 10);
|
|
71
75
|
const portfolioBlockId = `${Math.floor(parseInt(userId) / 1000000)}M`;
|
|
@@ -75,7 +79,12 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
75
79
|
let capturedHistory = null;
|
|
76
80
|
let capturedPortfolio = null;
|
|
77
81
|
|
|
78
|
-
|
|
82
|
+
// Track overall success for the final log
|
|
83
|
+
let hasPortfolioErrors = false;
|
|
84
|
+
|
|
85
|
+
// FIX 1: Better Start Log
|
|
86
|
+
const scopeLog = userType === 'speculator' ? `Instruments: [${instrumentsToProcess.join(', ')}]` : 'Scope: Full Portfolio';
|
|
87
|
+
logger.log('TRACE', `[handleUpdate/${userId}] Starting update task. Type: ${userType}. ${scopeLog}`);
|
|
79
88
|
|
|
80
89
|
// --- 1. Process History Fetch (Sequentially) ---
|
|
81
90
|
let historyHeader = null;
|
|
@@ -148,13 +157,16 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
148
157
|
logger.log('TRACE', `[handleUpdate/${userId}] Starting ${instrumentsToProcess.length} sequential portfolio fetches.`);
|
|
149
158
|
|
|
150
159
|
for (const instId of instrumentsToProcess) {
|
|
160
|
+
// FIX 2: Define a clear scope name for logging
|
|
161
|
+
const scopeName = instId ? `Instrument ${instId}` : 'Full Portfolio';
|
|
162
|
+
|
|
151
163
|
if (isPrivate) {
|
|
152
|
-
logger.log('TRACE', `[handleUpdate/${userId}] Skipping
|
|
164
|
+
logger.log('TRACE', `[handleUpdate/${userId}] Skipping ${scopeName} (User Private).`);
|
|
153
165
|
break;
|
|
154
166
|
}
|
|
155
167
|
|
|
156
168
|
const portfolioHeader = await headerManager.selectHeader();
|
|
157
|
-
if (!portfolioHeader) { logger.log('ERROR', `[handleUpdate/${userId}] Could not select portfolio header for
|
|
169
|
+
if (!portfolioHeader) { logger.log('ERROR', `[handleUpdate/${userId}] Could not select portfolio header for ${scopeName}. Skipping.`); continue; }
|
|
158
170
|
|
|
159
171
|
const portfolioUrl = userType === 'speculator' ? `${config.ETORO_API_POSITIONS_URL}?cid=${userId}&InstrumentID=${instId}` : `${config.ETORO_API_PORTFOLIO_URL}?cid=${userId}`;
|
|
160
172
|
const options = { headers: portfolioHeader.header };
|
|
@@ -165,7 +177,7 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
165
177
|
// --- PROXY ATTEMPT ---
|
|
166
178
|
if (shouldTryProxy()) {
|
|
167
179
|
try {
|
|
168
|
-
logger.log('TRACE', `[handleUpdate/${userId}] Attempting
|
|
180
|
+
logger.log('TRACE', `[handleUpdate/${userId}] Attempting fetch for ${scopeName} via AppScript proxy...`);
|
|
169
181
|
response = await proxyManager.fetch(portfolioUrl, options);
|
|
170
182
|
if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`);
|
|
171
183
|
wasPortfolioSuccess = true;
|
|
@@ -174,7 +186,7 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
174
186
|
|
|
175
187
|
} catch (proxyError) {
|
|
176
188
|
recordProxyOutcome(false);
|
|
177
|
-
logger.log('WARN', `[handleUpdate/${userId}] Portfolio fetch via Proxy FAILED. Error: ${proxyError.message}. Failures: ${_consecutiveProxyFailures}/${MAX_PROXY_FAILURES}.`, { error: proxyError.message, source: 'AppScript' });
|
|
189
|
+
logger.log('WARN', `[handleUpdate/${userId}] Portfolio fetch for ${scopeName} via Proxy FAILED. Error: ${proxyError.message}. Failures: ${_consecutiveProxyFailures}/${MAX_PROXY_FAILURES}.`, { error: proxyError.message, source: 'AppScript' });
|
|
178
190
|
}
|
|
179
191
|
}
|
|
180
192
|
|
|
@@ -184,9 +196,9 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
184
196
|
response = await fetch(portfolioUrl, options);
|
|
185
197
|
if (!response.ok) { const errorText = await response.text(); throw new Error(`Direct fetch failed with status ${response.status}. Response: ${errorText.substring(0, 200)}`); }
|
|
186
198
|
wasPortfolioSuccess = true;
|
|
187
|
-
logger.log('TRACE', `[handleUpdate/${userId}] Portfolio fetch direct success.`);
|
|
199
|
+
logger.log('TRACE', `[handleUpdate/${userId}] Portfolio fetch for ${scopeName} direct success.`);
|
|
188
200
|
} catch (fallbackError) {
|
|
189
|
-
logger.log('ERROR', `[handleUpdate/${userId}] Portfolio fetch direct fallback FAILED.`, { error: fallbackError.message, source: 'eToro/Network' });
|
|
201
|
+
logger.log('ERROR', `[handleUpdate/${userId}] Portfolio fetch for ${scopeName} direct fallback FAILED.`, { error: fallbackError.message, source: 'eToro/Network' });
|
|
190
202
|
wasPortfolioSuccess = false;
|
|
191
203
|
}
|
|
192
204
|
}
|
|
@@ -200,14 +212,16 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
200
212
|
const portfolioJson = JSON.parse(body);
|
|
201
213
|
capturedPortfolio = portfolioJson; // Capture for detection
|
|
202
214
|
await batchManager.addToPortfolioBatch(userId, portfolioBlockId, today, portfolioJson, userType, instId);
|
|
203
|
-
logger.log('TRACE', `[handleUpdate/${userId}] Portfolio processed successfully.`);
|
|
215
|
+
logger.log('TRACE', `[handleUpdate/${userId}] Portfolio for ${scopeName} processed successfully.`);
|
|
204
216
|
|
|
205
217
|
} catch (parseError) {
|
|
206
218
|
wasPortfolioSuccess = false;
|
|
207
|
-
|
|
219
|
+
hasPortfolioErrors = true; // Mark error state
|
|
220
|
+
logger.log('ERROR', `[handleUpdate/${userId}] FAILED TO PARSE JSON RESPONSE for ${scopeName}.`, { url: portfolioUrl, parseErrorMessage: parseError.message });
|
|
208
221
|
}
|
|
209
222
|
} else {
|
|
210
|
-
|
|
223
|
+
hasPortfolioErrors = true; // Mark error state
|
|
224
|
+
logger.log('WARN', `[handleUpdate/${userId}] Portfolio fetch FAILED for ${scopeName}.`);
|
|
211
225
|
}
|
|
212
226
|
|
|
213
227
|
if (proxyUsedForPortfolio) { headerManager.updatePerformance(portfolioHeader.id, wasPortfolioSuccess); }
|
|
@@ -254,14 +268,22 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
|
|
|
254
268
|
return;
|
|
255
269
|
}
|
|
256
270
|
|
|
257
|
-
// If not private, update
|
|
271
|
+
// If not private AND no critical errors, update timestamps
|
|
272
|
+
// (We update timestamps even on partial failures for speculators to avoid infinite retry loops immediately,
|
|
273
|
+
// relying on the next scheduled run, but for Normal users, a failure usually means we should retry later.
|
|
274
|
+
// Current logic: Update timestamp to prevent immediate re-queueing.)
|
|
258
275
|
for (const instrumentId of instrumentsToProcess) {
|
|
259
276
|
await batchManager.updateUserTimestamp(userId, userType, instrumentId);
|
|
260
277
|
}
|
|
261
278
|
|
|
262
279
|
if (userType === 'speculator') { await batchManager.addSpeculatorTimestampFix(userId, String(Math.floor(userId/1e6)*1e6)); }
|
|
263
280
|
|
|
264
|
-
|
|
281
|
+
// FIX 3: Honest Final Log
|
|
282
|
+
if (hasPortfolioErrors) {
|
|
283
|
+
logger.log('WARN', `[handleUpdate/${userId}] Update task finished with ERRORS. See logs above.`);
|
|
284
|
+
} else {
|
|
285
|
+
logger.log('TRACE', `[handleUpdate/${userId}] Update task finished successfully.`);
|
|
286
|
+
}
|
|
265
287
|
}
|
|
266
288
|
|
|
267
289
|
module.exports = { handleUpdate };
|