bulltrackers-module 1.0.214 → 1.0.216

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,8 +1,5 @@
1
1
  /**
2
- * @fileoverview Computation system sub-pipes and utils.
3
- * REFACTORED: Now stateless and receive dependencies where needed.
4
- * FIXED: 'commitBatchInChunks' now respects Firestore 10MB size limit.
5
- * NEW: Added 'generateCodeHash' for version control.
2
+ * FILENAME: computation-system/utils/utils.js
6
3
  */
7
4
 
8
5
  const { FieldValue, FieldPath } = require('@google-cloud/firestore');
@@ -12,27 +9,18 @@ const crypto = require('crypto');
12
9
  function normalizeName(name) { return name.replace(/_/g, '-'); }
13
10
 
14
11
  /**
15
- * Generates a SHA-256 hash of a code string, ignoring comments and whitespace.
16
- * This effectively versions the logic.
17
- * @param {string} codeString - The source code of the function/class.
18
- * @returns {string} The hex hash.
12
+ * Generates a SHA-256 hash of a code string.
19
13
  */
20
14
  function generateCodeHash(codeString) {
21
15
  if (!codeString) return 'unknown';
22
-
23
- // 1. Remove single-line comments (//...)
24
16
  let clean = codeString.replace(/\/\/.*$/gm, '');
25
- // 2. Remove multi-line comments (/*...*/)
26
17
  clean = clean.replace(/\/\*[\s\S]*?\*\//g, '');
27
- // 3. Remove all whitespace (spaces, tabs, newlines)
28
18
  clean = clean.replace(/\s+/g, '');
29
-
30
19
  return crypto.createHash('sha256').update(clean).digest('hex');
31
20
  }
32
21
 
33
22
  /** * Stage 2: Commit a batch of writes in chunks
34
- * FIXED: Now splits batches by SIZE (9MB limit) and COUNT (450 docs)
35
- * to prevent "Request payload size exceeds the limit" errors.
23
+ * FIXED: Now respects write.options (e.g. { merge: false }) to allow overwrites/deletes.
36
24
  */
37
25
  async function commitBatchInChunks(config, deps, writes, operationName) {
38
26
  const { db, logger, calculationUtils } = deps;
@@ -43,17 +31,14 @@ async function commitBatchInChunks(config, deps, writes, operationName) {
43
31
  return;
44
32
  }
45
33
 
46
- // Firestore Constraints
47
- const MAX_BATCH_OPS = 300; // Safety limit (Max 500)
48
- const MAX_BATCH_BYTES = 9 * 1024 * 1024; // 9MB Safety limit (Max 10MB)
34
+ const MAX_BATCH_OPS = 300;
35
+ const MAX_BATCH_BYTES = 9 * 1024 * 1024;
49
36
 
50
37
  let currentBatch = db.batch();
51
38
  let currentOpsCount = 0;
52
39
  let currentBytesEst = 0;
53
40
  let batchIndex = 1;
54
- let totalChunks = 0; // We don't know total chunks in advance now due to dynamic sizing
55
41
 
56
- // Helper to commit the current batch and reset
57
42
  const commitAndReset = async () => {
58
43
  if (currentOpsCount > 0) {
59
44
  try {
@@ -74,30 +59,25 @@ async function commitBatchInChunks(config, deps, writes, operationName) {
74
59
  };
75
60
 
76
61
  for (const write of writes) {
77
- // 1. Estimate Size: JSON stringify is a decent proxy for Firestore payload size
78
- // We handle potential circular refs or failures gracefully by assuming a minimum size
79
62
  let docSize = 100;
80
- try {
81
- if (write.data) docSize = JSON.stringify(write.data).length;
82
- } catch (e) { /* ignore size check error */ }
63
+ try { if (write.data) docSize = JSON.stringify(write.data).length; } catch (e) { }
83
64
 
84
- // 2. Warn if a SINGLE document is approaching the 1MB limit
85
65
  if (docSize > 900 * 1024) {
86
- logger.log('WARN', `[${operationName}] Large document detected (~${(docSize / 1024).toFixed(2)} KB). This allows few ops per batch.`);
66
+ logger.log('WARN', `[${operationName}] Large document detected (~${(docSize / 1024).toFixed(2)} KB).`);
87
67
  }
88
68
 
89
- // 3. Check if adding this write would overflow the batch
90
69
  if ((currentOpsCount + 1 > MAX_BATCH_OPS) || (currentBytesEst + docSize > MAX_BATCH_BYTES)) {
91
70
  await commitAndReset();
92
71
  }
93
72
 
94
- // 4. Add to batch
95
- currentBatch.set(write.ref, write.data, { merge: true });
73
+ // USE PROVIDED OPTIONS OR DEFAULT TO MERGE: TRUE
74
+ const options = write.options || { merge: true };
75
+ currentBatch.set(write.ref, write.data, options);
76
+
96
77
  currentOpsCount++;
97
78
  currentBytesEst += docSize;
98
79
  }
99
80
 
100
- // 5. Commit remaining
101
81
  await commitAndReset();
102
82
  }
103
83
 
@@ -112,10 +92,7 @@ function getExpectedDateStrings(startDate, endDate) {
112
92
  return dateStrings;
113
93
  }
114
94
 
115
- /**
116
- * --- NEW HELPER ---
117
- * Stage 4: Get the earliest date in a *flat* collection where doc IDs are dates.
118
- */
95
+ /** Stage 4: Get the earliest date in a *flat* collection where doc IDs are dates. */
119
96
  async function getFirstDateFromSimpleCollection(config, deps, collectionName) {
120
97
  const { db, logger, calculationUtils } = deps;
121
98
  const { withRetry } = calculationUtils;
@@ -149,22 +126,10 @@ async function getFirstDateFromCollection(config, deps, collectionName) {
149
126
  return earliestDate;
150
127
  }
151
128
 
152
- /** * --- MODIFIED FUNCTION ---
153
- * Stage 5: Determine the earliest date from *all* source data.
154
- */
129
+ /** Stage 5: Determine the earliest date from *all* source data. */
155
130
  async function getEarliestDataDates(config, deps) {
156
131
  const { logger } = deps;
157
- logger.log('INFO', 'Querying for earliest date from ALL source data collections...');
158
-
159
- const [
160
- investorDate,
161
- speculatorDate,
162
- investorHistoryDate,
163
- speculatorHistoryDate,
164
- insightsDate,
165
- socialDate,
166
- priceDate
167
- ] = await Promise.all([
132
+ const [ investorDate, speculatorDate, investorHistoryDate, speculatorHistoryDate, insightsDate, socialDate, priceDate ] = await Promise.all([
168
133
  getFirstDateFromCollection(config, deps, config.normalUserPortfolioCollection),
169
134
  getFirstDateFromCollection(config, deps, config.speculatorPortfolioCollection),
170
135
  getFirstDateFromCollection(config, deps, config.normalUserHistoryCollection),
@@ -185,17 +150,11 @@ async function getEarliestDataDates(config, deps) {
185
150
  const earliestInsightsDate = getMinDate(insightsDate);
186
151
  const earliestSocialDate = getMinDate(socialDate);
187
152
  const earliestPriceDate = getMinDate(priceDate);
188
- const absoluteEarliest = getMinDate(
189
- earliestPortfolioDate,
190
- earliestHistoryDate,
191
- earliestInsightsDate,
192
- earliestSocialDate,
193
- earliestPriceDate
194
- );
153
+ const absoluteEarliest = getMinDate(earliestPortfolioDate, earliestHistoryDate, earliestInsightsDate, earliestSocialDate, earliestPriceDate);
195
154
 
196
155
  const fallbackDate = new Date(config.earliestComputationDate + 'T00:00:00Z' || '2023-01-01T00:00:00Z');
197
156
 
198
- const result = {
157
+ return {
199
158
  portfolio: earliestPortfolioDate || new Date('2999-12-31T00:00:00Z'),
200
159
  history: earliestHistoryDate || new Date('2999-12-31T00:00:00Z'),
201
160
  insights: earliestInsightsDate || new Date('2999-12-31T00:00:00Z'),
@@ -203,71 +162,26 @@ async function getEarliestDataDates(config, deps) {
203
162
  price: earliestPriceDate || new Date('2999-12-31T00:00:00Z'),
204
163
  absoluteEarliest: absoluteEarliest || fallbackDate
205
164
  };
206
-
207
- logger.log('INFO', 'Earliest data availability map built:', {
208
- portfolio: result.portfolio.toISOString().slice(0, 10),
209
- history: result.history.toISOString().slice(0, 10),
210
- insights: result.insights.toISOString().slice(0, 10),
211
- social: result.social.toISOString().slice(0, 10),
212
- price: result.price.toISOString().slice(0, 10),
213
- absoluteEarliest: result.absoluteEarliest.toISOString().slice(0, 10)
214
- });
215
-
216
- return result;
217
165
  }
218
166
 
219
- /**
220
- * NEW HELPER: Get the earliest date from price collection
221
- */
222
167
  async function getFirstDateFromPriceCollection(config, deps) {
223
168
  const { db, logger, calculationUtils } = deps;
224
169
  const { withRetry } = calculationUtils;
225
170
  const collection = config.priceCollection || 'asset_prices';
226
-
227
171
  try {
228
- logger.log('TRACE', `[getFirstDateFromPriceCollection] Querying ${collection}...`);
229
-
230
- const snapshot = await withRetry(
231
- () => db.collection(collection).limit(10).get(),
232
- `GetPriceShards(${collection})`
233
- );
234
-
235
- if (snapshot.empty) {
236
- logger.log('WARN', `No price shards found in ${collection}`);
237
- return null;
238
- }
239
-
172
+ const snapshot = await withRetry(() => db.collection(collection).limit(10).get(), `GetPriceShards(${collection})`);
240
173
  let earliestDate = null;
241
-
242
174
  snapshot.forEach(doc => {
243
175
  const shardData = doc.data();
244
176
  for (const instrumentId in shardData) {
245
177
  const instrumentData = shardData[instrumentId];
246
178
  if (!instrumentData.prices) continue;
247
-
248
- const dates = Object.keys(instrumentData.prices)
249
- .filter(d => /^\d{4}-\d{2}-\d{2}$/.test(d))
250
- .sort();
251
-
252
- if (dates.length > 0) {
253
- const firstDate = new Date(dates[0] + 'T00:00:00Z');
254
- if (!earliestDate || firstDate < earliestDate) {
255
- earliestDate = firstDate;
256
- }
257
- }
179
+ const dates = Object.keys(instrumentData.prices).filter(d => /^\d{4}-\d{2}-\d{2}$/.test(d)).sort();
180
+ if (dates.length > 0) { const firstDate = new Date(dates[0] + 'T00:00:00Z'); if (!earliestDate || firstDate < earliestDate) earliestDate = firstDate; }
258
181
  }
259
182
  });
260
-
261
- if (earliestDate) {
262
- logger.log('TRACE', `[getFirstDateFromPriceCollection] Earliest price date: ${earliestDate.toISOString().slice(0, 10)}`);
263
- }
264
-
265
183
  return earliestDate;
266
-
267
- } catch (e) {
268
- logger.log('ERROR', `Failed to get earliest price date from ${collection}`, { errorMessage: e.message });
269
- return null;
270
- }
184
+ } catch (e) { logger.log('ERROR', `Failed to get earliest price date from ${collection}`, { errorMessage: e.message }); return null; }
271
185
  }
272
186
 
273
187
  module.exports = { FieldValue, FieldPath, normalizeName, commitBatchInChunks, getExpectedDateStrings, getEarliestDataDates, generateCodeHash };
@@ -5,43 +5,26 @@
5
5
  */
6
6
 
7
7
  const { FieldValue } = require('@google-cloud/firestore');
8
- const crypto = require('crypto');
8
+ const crypto = require('crypto');
9
9
 
10
10
  // --- CIRCUIT BREAKER STATE ---
11
11
  let _consecutiveProxyFailures = 0;
12
- const MAX_PROXY_FAILURES = 3;
12
+ const MAX_PROXY_FAILURES = 3;
13
13
 
14
- function shouldTryProxy() {
15
- return _consecutiveProxyFailures < MAX_PROXY_FAILURES;
16
- }
14
+ function shouldTryProxy() { return _consecutiveProxyFailures < MAX_PROXY_FAILURES; }
17
15
 
18
16
  function recordProxyOutcome(success) {
19
- if (success) {
20
- _consecutiveProxyFailures = 0;
21
- } else {
22
- _consecutiveProxyFailures++;
23
- }
17
+ if (success) { _consecutiveProxyFailures = 0;
18
+ } else { _consecutiveProxyFailures++; }
24
19
  }
25
20
 
26
21
  function detectSpeculatorTargets(historyData, portfolioData) {
27
22
  if (!historyData?.PublicHistoryPositions || !portfolioData?.AggregatedPositions) return [];
28
-
29
23
  const leveragedAssets = new Set();
30
- for (const pos of historyData.PublicHistoryPositions) {
31
- if (pos.Leverage > 1 && pos.InstrumentID) {
32
- leveragedAssets.add(pos.InstrumentID);
33
- }
34
- }
35
-
24
+ for (const pos of historyData.PublicHistoryPositions) { if (pos.Leverage > 1 && pos.InstrumentID) { leveragedAssets.add(pos.InstrumentID); } }
36
25
  if (leveragedAssets.size === 0) return [];
37
-
38
26
  const targets = [];
39
- for (const pos of portfolioData.AggregatedPositions) {
40
- if (leveragedAssets.has(pos.InstrumentID)) {
41
- targets.push(pos.InstrumentID);
42
- }
43
- }
44
-
27
+ for (const pos of portfolioData.AggregatedPositions) { if (leveragedAssets.has(pos.InstrumentID)) { targets.push(pos.InstrumentID); } }
45
28
  return targets;
46
29
  }
47
30
 
@@ -49,37 +32,35 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
49
32
  const { userId, instruments, instrumentId, userType } = task;
50
33
 
51
34
  const instrumentsToProcess = userType === 'speculator' ? (instruments || [instrumentId]) : [undefined];
52
- const today = new Date().toISOString().slice(0, 10);
53
- const portfolioBlockId = `${Math.floor(parseInt(userId) / 1000000)}M`;
54
- let isPrivate = false;
55
-
56
- let capturedHistory = null;
57
- let capturedPortfolio = null;
58
- let hasPortfolioErrors = false;
35
+ const today = new Date().toISOString().slice(0, 10);
36
+ const portfolioBlockId = `${Math.floor(parseInt(userId) / 1000000)}M`;
37
+ let isPrivate = false;
38
+ let capturedHistory = null;
39
+ let capturedPortfolio = null;
40
+ let hasPortfolioErrors = false;
59
41
 
60
42
  const scopeLog = userType === 'speculator' ? `Instruments: [${instrumentsToProcess.join(', ')}]` : 'Scope: Full Portfolio';
61
43
  logger.log('TRACE', `[handleUpdate/${userId}] Starting update task. Type: ${userType}. ${scopeLog}`);
62
44
 
63
45
  // --- 1. Process History Fetch (Sequentially) ---
64
- let historyHeader = null;
65
- let wasHistorySuccess = false;
46
+ let historyHeader = null;
47
+ let wasHistorySuccess = false;
66
48
  let proxyUsedForHistory = false;
67
49
 
68
50
  try {
69
51
  if (!batchManager.checkAndSetHistoryFetched(userId)) {
70
52
  logger.log('TRACE', `[handleUpdate/${userId}] Attempting history fetch.`);
71
53
  historyHeader = await headerManager.selectHeader();
72
- if (!historyHeader) {
73
- logger.log('WARN', `[handleUpdate/${userId}] Could not select history header. Skipping history.`);
54
+ if (!historyHeader) { logger.log('WARN', `[handleUpdate/${userId}] Could not select history header. Skipping history.`);
74
55
  } else {
75
56
 
76
57
  const d = new Date();
77
58
  d.setFullYear(d.getFullYear() - 1);
78
59
  const oneYearAgoStr = d.toISOString();
79
- const uuid = crypto.randomUUID ? crypto.randomUUID() : '0205aca7-bd37-4884-8455-f28ce1add2de';
60
+ const uuid = crypto.randomUUID ? crypto.randomUUID() : '0205aca7-bd37-4884-8455-f28ce1add2de';
80
61
 
81
62
  const historyUrl = `https://www.etoro.com/sapi/trade-data-real/history/public/credit/flat?StartTime=${oneYearAgoStr}&PageNumber=1&ItemsPerPage=30000&PublicHistoryPortfolioFilter=&CID=${userId}&client_request_id=${uuid}`;
82
- const options = { headers: historyHeader.header };
63
+ const options = { headers: historyHeader.header };
83
64
  let response;
84
65
 
85
66
  // --- PROXY ATTEMPT ---
@@ -88,7 +69,7 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
88
69
  logger.log('TRACE', `[handleUpdate/${userId}] Attempting history fetch via AppScript proxy...`);
89
70
  response = await proxyManager.fetch(historyUrl, options);
90
71
  if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`);
91
- wasHistorySuccess = true;
72
+ wasHistorySuccess = true;
92
73
  proxyUsedForHistory = true;
93
74
  recordProxyOutcome(true);
94
75
 
@@ -113,9 +94,7 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
113
94
 
114
95
  if (wasHistorySuccess) {
115
96
  const data = await response.json();
116
-
117
- // --- FILTER LOGIC FOR GRANULAR API ---
118
- // 0 = Manual, 1 = Stop Loss, 5 = Take Profit.
97
+
119
98
  const VALID_REASONS = [0, 1, 5];
120
99
  if (data.PublicHistoryPositions && Array.isArray(data.PublicHistoryPositions)) {
121
100
  const originalCount = data.PublicHistoryPositions.length;
@@ -128,14 +107,9 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
128
107
  await batchManager.addToTradingHistoryBatch(userId, portfolioBlockId, today, data, userType);
129
108
  }
130
109
  }
131
- } else {
132
- logger.log('TRACE', `[handleUpdate/${userId}] History fetch skipped (already fetched).`);
133
- }
134
- } catch (err) {
135
- logger.log('ERROR', `[handleUpdate/${userId}] Unhandled error during history processing.`, { error: err.message }); wasHistorySuccess = false;
136
- } finally {
137
- if (historyHeader && proxyUsedForHistory) { headerManager.updatePerformance(historyHeader.id, wasHistorySuccess); }
138
- }
110
+ } else { logger.log('TRACE', `[handleUpdate/${userId}] History fetch skipped (already fetched).`); }
111
+ } catch (err) { logger.log('ERROR', `[handleUpdate/${userId}] Unhandled error during history processing.`, { error: err.message }); wasHistorySuccess = false;
112
+ } finally { if (historyHeader && proxyUsedForHistory) { headerManager.updatePerformance(historyHeader.id, wasHistorySuccess); } }
139
113
 
140
114
  // --- 2. Process Portfolio Fetches (Sequentially) ---
141
115
  logger.log('TRACE', `[handleUpdate/${userId}] Starting ${instrumentsToProcess.length} sequential portfolio fetches.`);
@@ -154,7 +128,7 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
154
128
  const portfolioUrl = userType === 'speculator' ? `${config.ETORO_API_POSITIONS_URL}?cid=${userId}&InstrumentID=${instId}` : `${config.ETORO_API_PORTFOLIO_URL}?cid=${userId}`;
155
129
  const options = { headers: portfolioHeader.header };
156
130
  let response;
157
- let wasPortfolioSuccess = false;
131
+ let wasPortfolioSuccess = false;
158
132
  let proxyUsedForPortfolio = false;
159
133
 
160
134
  if (shouldTryProxy()) {
@@ -162,7 +136,7 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
162
136
  logger.log('TRACE', `[handleUpdate/${userId}] Attempting fetch for ${scopeName} via AppScript proxy...`);
163
137
  response = await proxyManager.fetch(portfolioUrl, options);
164
138
  if (!response.ok) throw new Error(`AppScript proxy failed with status ${response.status}`);
165
- wasPortfolioSuccess = true;
139
+ wasPortfolioSuccess = true;
166
140
  proxyUsedForPortfolio = true;
167
141
  recordProxyOutcome(true);
168
142
 
@@ -190,13 +164,13 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
190
164
 
191
165
  try {
192
166
  const portfolioJson = JSON.parse(body);
193
- capturedPortfolio = portfolioJson;
167
+ capturedPortfolio = portfolioJson;
194
168
  await batchManager.addToPortfolioBatch(userId, portfolioBlockId, today, portfolioJson, userType, instId);
195
169
  logger.log('TRACE', `[handleUpdate/${userId}] Portfolio for ${scopeName} processed successfully.`);
196
170
 
197
171
  } catch (parseError) {
198
172
  wasPortfolioSuccess = false;
199
- hasPortfolioErrors = true;
173
+ hasPortfolioErrors = true;
200
174
  logger.log('ERROR', `[handleUpdate/${userId}] FAILED TO PARSE JSON RESPONSE for ${scopeName}.`, { url: portfolioUrl, parseErrorMessage: parseError.message });
201
175
  }
202
176
  } else {
@@ -210,8 +184,7 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
210
184
  if (userType !== 'speculator' && capturedHistory && capturedPortfolio && pubsub && config.PUBSUB_TOPIC_TASK_ENGINE) {
211
185
  try {
212
186
  const speculatorAssets = detectSpeculatorTargets(capturedHistory, capturedPortfolio);
213
- if (speculatorAssets.length > 0) {
214
- logger.log('INFO', `[handleUpdate/${userId}] DETECTED SPECULATOR BEHAVIOR. Queuing ${speculatorAssets.length} targeted updates.`);
187
+ if (speculatorAssets.length > 0) { logger.log('INFO', `[handleUpdate/${userId}] DETECTED SPECULATOR BEHAVIOR. Queuing ${speculatorAssets.length} targeted updates.`);
215
188
 
216
189
  const newTasks = speculatorAssets.map(assetId => ({
217
190
  type: 'update',
@@ -223,16 +196,12 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
223
196
  const dataBuffer = Buffer.from(JSON.stringify({ tasks: newTasks }));
224
197
  await pubsub.topic(config.PUBSUB_TOPIC_TASK_ENGINE).publishMessage({ data: dataBuffer });
225
198
  }
226
- } catch (detectionError) {
227
- logger.log('ERROR', `[handleUpdate/${userId}] Error during Speculator Detection.`, { error: detectionError.message });
228
- }
199
+ } catch (detectionError) { logger.log('ERROR', `[handleUpdate/${userId}] Error during Speculator Detection.`, { error: detectionError.message }); }
229
200
  }
230
201
 
231
202
  if (isPrivate) {
232
203
  logger.log('WARN', `[handleUpdate/${userId}] Removing private user from updates.`);
233
- for (const instrumentId of instrumentsToProcess) {
234
- await batchManager.deleteFromTimestampBatch(userId, userType, instrumentId);
235
- }
204
+ for (const instrumentId of instrumentsToProcess) { await batchManager.deleteFromTimestampBatch(userId, userType, instrumentId); }
236
205
  const blockCountsRef = db.doc(config.FIRESTORE_DOC_SPECULATOR_BLOCK_COUNTS);
237
206
  for (const instrumentId of instrumentsToProcess) {
238
207
  const incrementField = `counts.${instrumentId}_${Math.floor(userId/1e6)*1e6}`;
@@ -241,17 +210,12 @@ async function handleUpdate(task, taskId, { logger, headerManager, proxyManager,
241
210
  return;
242
211
  }
243
212
 
244
- for (const instrumentId of instrumentsToProcess) {
245
- await batchManager.updateUserTimestamp(userId, userType, instrumentId);
246
- }
213
+ for (const instrumentId of instrumentsToProcess) { await batchManager.updateUserTimestamp(userId, userType, instrumentId); }
247
214
 
248
215
  if (userType === 'speculator') { await batchManager.addSpeculatorTimestampFix(userId, String(Math.floor(userId/1e6)*1e6)); }
249
-
250
- if (hasPortfolioErrors) {
251
- logger.log('WARN', `[handleUpdate/${userId}] Update task finished with ERRORS. See logs above.`);
252
- } else {
253
- logger.log('TRACE', `[handleUpdate/${userId}] Update task finished successfully.`);
254
- }
216
+
217
+ if (hasPortfolioErrors) { logger.log('WARN', `[handleUpdate/${userId}] Update task finished with ERRORS. See logs above.`);
218
+ } else { logger.log('TRACE', `[handleUpdate/${userId}] Update task finished successfully.`); }
255
219
  }
256
220
 
257
221
  module.exports = { handleUpdate };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.214",
3
+ "version": "1.0.216",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [