bulltrackers-module 1.0.612 → 1.0.614
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -74,6 +74,12 @@ const pageCollection = async (firestore, dateStr, computationName, userId, lookb
|
|
|
74
74
|
if (results.length === 0) {
|
|
75
75
|
throw new Error(`No page data found for User ID ${userId} in computation ${computationName} within the last ${lookbackDays} days`);
|
|
76
76
|
}
|
|
77
|
+
// Sort results by date descending (newest first) so the latest data is always first
|
|
78
|
+
results.sort((a, b) => {
|
|
79
|
+
const dateA = new Date(a.date);
|
|
80
|
+
const dateB = new Date(b.date);
|
|
81
|
+
return dateB.getTime() - dateA.getTime(); // Descending order (newest first)
|
|
82
|
+
});
|
|
77
83
|
return results;
|
|
78
84
|
} catch (error) {
|
|
79
85
|
console.error(`Error fetching page collection data: ${error}`);
|
|
@@ -147,8 +147,8 @@ router.get('/:piId/profile', async (req, res) => {
|
|
|
147
147
|
const computationName = 'PopularInvestorProfileMetrics';
|
|
148
148
|
const profileData = await pageCollection(db, targetDate, computationName, piId, parseInt(lookback));
|
|
149
149
|
|
|
150
|
-
// Extract computationDate from the latest data entry (first item
|
|
151
|
-
// The profileData array
|
|
150
|
+
// Extract computationDate from the latest data entry (first item after sorting by date descending)
|
|
151
|
+
// The profileData array is sorted by date descending (newest first) by pageCollection
|
|
152
152
|
const computationDate = profileData && profileData.length > 0 ? profileData[0].date : null;
|
|
153
153
|
|
|
154
154
|
res.json({
|
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
* UPDATED: Implemented TTL retention policy. Defaults to 90 days from the computation date.
|
|
6
6
|
* UPDATED: Fixed issue where switching to 'isPage' mode didn't clean up old sharded/raw data.
|
|
7
7
|
*/
|
|
8
|
-
const { commitBatchInChunks, generateDataHash } = require('../utils/utils')
|
|
8
|
+
const { commitBatchInChunks, generateDataHash, FieldValue } = require('../utils/utils')
|
|
9
9
|
const { updateComputationStatus } = require('./StatusRepository');
|
|
10
10
|
const { batchStoreSchemas } = require('../utils/schema_capture');
|
|
11
11
|
const { generateProcessId, PROCESS_TYPES } = require('../logger/logger');
|
|
@@ -15,7 +15,7 @@ const ContractValidator = require('./ContractValidator');
|
|
|
15
15
|
const validationOverrides = require('../config/validation_overrides');
|
|
16
16
|
const pLimit = require('p-limit');
|
|
17
17
|
const zlib = require('zlib');
|
|
18
|
-
|
|
18
|
+
;
|
|
19
19
|
|
|
20
20
|
const NON_RETRYABLE_ERRORS = [ 'PERMISSION_DENIED', 'DATA_LOSS', 'FAILED_PRECONDITION' ];
|
|
21
21
|
const SIMHASH_REGISTRY_COLLECTION = 'system_simhash_registry';
|