bulltrackers-module 1.0.707 → 1.0.710
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -3025,29 +3025,27 @@ const getWatchlistTriggerCounts = async (db, userId, watchlistId) => {
|
|
|
3025
3025
|
throw error;
|
|
3026
3026
|
}
|
|
3027
3027
|
};
|
|
3028
|
-
|
|
3029
3028
|
/**
|
|
3030
|
-
* Query PIs matching dynamic watchlist criteria
|
|
3031
|
-
*
|
|
3032
|
-
*
|
|
3033
|
-
*
|
|
3034
|
-
*
|
|
3035
|
-
*
|
|
3036
|
-
*
|
|
3037
|
-
*
|
|
3038
|
-
*
|
|
3039
|
-
* @param {Object} db - Firestore instance
|
|
3029
|
+
* Query PIs matching dynamic watchlist criteria over a time range.
|
|
3030
|
+
* * UPDATED LOGIC:
|
|
3031
|
+
* Instead of only looking at the latest data snapshot, this function scans
|
|
3032
|
+
* the entire requested time range (e.g. 7 days).
|
|
3033
|
+
* * It returns any PI who met the criteria AT LEAST ONCE during the period.
|
|
3034
|
+
* It provides a history of their matches, allowing the UI to show:
|
|
3035
|
+
* - When they first appeared (firstMatchedAt)
|
|
3036
|
+
* - If they are no longer matching (droppedOffAt)
|
|
3037
|
+
* - Their value history over the period
|
|
3038
|
+
* * @param {Object} db - Firestore instance
|
|
3040
3039
|
* @param {string} computationName - Name of the computation to query
|
|
3041
|
-
* @param {Object} parameters - Threshold parameters
|
|
3042
|
-
* @param {string} timeRange - Time range
|
|
3040
|
+
* @param {Object} parameters - Threshold parameters (e.g., {minChange: 1})
|
|
3041
|
+
* @param {string} timeRange - Time range (today, last_7_days, last_30_days)
|
|
3043
3042
|
* @param {number} limit - Maximum number of results
|
|
3044
|
-
* @returns {Promise<Object>} Matching PIs with their values
|
|
3045
3043
|
*/
|
|
3046
3044
|
const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}, timeRange = 'last_7_days', limit = 100) => {
|
|
3047
3045
|
try {
|
|
3048
|
-
console.log(`[queryDynamicWatchlistMatches]
|
|
3046
|
+
console.log(`[queryDynamicWatchlistMatches] Aggregating ${computationName} over ${timeRange}`);
|
|
3049
3047
|
|
|
3050
|
-
// Determine
|
|
3048
|
+
// 1. Determine Date Range
|
|
3051
3049
|
const endDate = new Date();
|
|
3052
3050
|
const startDate = new Date();
|
|
3053
3051
|
|
|
@@ -3055,42 +3053,35 @@ const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}
|
|
|
3055
3053
|
case 'today':
|
|
3056
3054
|
// Just today
|
|
3057
3055
|
break;
|
|
3058
|
-
case 'last_7_days':
|
|
3059
|
-
startDate.setDate(startDate.getDate() - 7);
|
|
3060
|
-
break;
|
|
3061
3056
|
case 'last_30_days':
|
|
3062
3057
|
startDate.setDate(startDate.getDate() - 30);
|
|
3063
3058
|
break;
|
|
3059
|
+
case 'last_7_days':
|
|
3064
3060
|
default:
|
|
3065
3061
|
startDate.setDate(startDate.getDate() - 7);
|
|
3066
3062
|
}
|
|
3067
3063
|
|
|
3068
|
-
// Build list of dates to check (
|
|
3064
|
+
// Build list of dates to check (Newest -> Oldest)
|
|
3069
3065
|
const dates = [];
|
|
3070
3066
|
for (let d = new Date(endDate); d >= startDate; d.setDate(d.getDate() - 1)) {
|
|
3071
3067
|
dates.push(d.toISOString().split('T')[0]);
|
|
3072
3068
|
}
|
|
3073
3069
|
|
|
3074
|
-
|
|
3075
|
-
|
|
3076
|
-
|
|
3077
|
-
let mostRecentDate = null;
|
|
3078
|
-
let docRef = null;
|
|
3079
|
-
let docSnapshot = null;
|
|
3080
|
-
|
|
3081
|
-
for (const dateStr of dates) {
|
|
3070
|
+
// 2. Fetch Data for ALL Dates in Parallel
|
|
3071
|
+
// We do not stop at the first match anymore.
|
|
3072
|
+
const datePromises = dates.map(async (dateStr) => {
|
|
3082
3073
|
try {
|
|
3083
|
-
// Try alerts path first
|
|
3084
|
-
docRef = db.collection('unified_insights')
|
|
3074
|
+
// Try alerts path first (primary location for alert computations)
|
|
3075
|
+
let docRef = db.collection('unified_insights')
|
|
3085
3076
|
.doc(dateStr)
|
|
3086
3077
|
.collection('results')
|
|
3087
3078
|
.doc('alerts')
|
|
3088
3079
|
.collection('computations')
|
|
3089
3080
|
.doc(computationName);
|
|
3090
3081
|
|
|
3091
|
-
docSnapshot = await docRef.get();
|
|
3082
|
+
let docSnapshot = await docRef.get();
|
|
3092
3083
|
|
|
3093
|
-
//
|
|
3084
|
+
// Fallback to popular-investor path
|
|
3094
3085
|
if (!docSnapshot.exists) {
|
|
3095
3086
|
docRef = db.collection('unified_insights')
|
|
3096
3087
|
.doc(dateStr)
|
|
@@ -3098,141 +3089,176 @@ const queryDynamicWatchlistMatches = async (db, computationName, parameters = {}
|
|
|
3098
3089
|
.doc('popular-investor')
|
|
3099
3090
|
.collection('computations')
|
|
3100
3091
|
.doc(computationName);
|
|
3101
|
-
|
|
3102
3092
|
docSnapshot = await docRef.get();
|
|
3103
3093
|
}
|
|
3104
|
-
|
|
3105
|
-
if (docSnapshot.exists)
|
|
3106
|
-
|
|
3107
|
-
|
|
3108
|
-
|
|
3094
|
+
|
|
3095
|
+
if (!docSnapshot.exists) return null;
|
|
3096
|
+
|
|
3097
|
+
const docData = docSnapshot.data();
|
|
3098
|
+
let dayData = {};
|
|
3099
|
+
|
|
3100
|
+
// Handle Sharding
|
|
3101
|
+
if (docData._sharded === true) {
|
|
3102
|
+
const shardsSnapshot = await docRef.collection('_shards').get();
|
|
3103
|
+
for (const shardDoc of shardsSnapshot.docs) {
|
|
3104
|
+
const shardContent = shardDoc.data();
|
|
3105
|
+
Object.entries(shardContent).forEach(([key, value]) => {
|
|
3106
|
+
if (!key.startsWith('_') && key !== 'cids' && /^\d+$/.test(key)) {
|
|
3107
|
+
dayData[key] = value;
|
|
3108
|
+
}
|
|
3109
|
+
});
|
|
3110
|
+
}
|
|
3111
|
+
} else {
|
|
3112
|
+
// Standard Document
|
|
3113
|
+
Object.entries(docData).forEach(([key, value]) => {
|
|
3114
|
+
if (!key.startsWith('_') && key !== 'cids' && /^\d+$/.test(key)) {
|
|
3115
|
+
dayData[key] = value;
|
|
3116
|
+
}
|
|
3117
|
+
});
|
|
3109
3118
|
}
|
|
3110
|
-
|
|
3111
|
-
|
|
3112
|
-
|
|
3119
|
+
|
|
3120
|
+
return { date: dateStr, data: dayData };
|
|
3121
|
+
|
|
3122
|
+
} catch (err) {
|
|
3123
|
+
console.warn(`[queryDynamicWatchlistMatches] Error fetching date ${dateStr}: ${err.message}`);
|
|
3124
|
+
return null;
|
|
3113
3125
|
}
|
|
3114
|
-
}
|
|
3115
|
-
|
|
3116
|
-
//
|
|
3117
|
-
|
|
3118
|
-
console.log(`[queryDynamicWatchlistMatches] No computation data found for ${computationName} in date range`);
|
|
3119
|
-
return {
|
|
3120
|
-
success: true,
|
|
3121
|
-
matches: [],
|
|
3122
|
-
count: 0,
|
|
3123
|
-
totalScanned: 0,
|
|
3124
|
-
dateRange: {
|
|
3125
|
-
start: startDate.toISOString().split('T')[0],
|
|
3126
|
-
end: endDate.toISOString().split('T')[0]
|
|
3127
|
-
},
|
|
3128
|
-
dataDate: null,
|
|
3129
|
-
computationName,
|
|
3130
|
-
parameters,
|
|
3131
|
-
message: `No computation data found for ${computationName} in the selected time range`
|
|
3132
|
-
};
|
|
3133
|
-
}
|
|
3134
|
-
|
|
3135
|
-
const docData = docSnapshot.data();
|
|
3126
|
+
});
|
|
3127
|
+
|
|
3128
|
+
// Wait for all days to load
|
|
3129
|
+
const rawResults = (await Promise.all(datePromises)).filter(r => r !== null);
|
|
3136
3130
|
|
|
3137
|
-
//
|
|
3138
|
-
|
|
3131
|
+
// 3. Aggregate Matches Per User
|
|
3132
|
+
// Map: piCid -> { firstMatchedAt, lastMatchedAt, history: [], ... }
|
|
3133
|
+
const piAggregates = new Map();
|
|
3139
3134
|
|
|
3140
|
-
|
|
3141
|
-
|
|
3142
|
-
|
|
3143
|
-
|
|
3144
|
-
|
|
3145
|
-
|
|
3146
|
-
|
|
3147
|
-
|
|
3148
|
-
|
|
3149
|
-
|
|
3150
|
-
|
|
3151
|
-
|
|
3135
|
+
// Process dates from Oldest -> Newest to build timeline correctly
|
|
3136
|
+
// (rawResults is Newest->Oldest, so we reverse it)
|
|
3137
|
+
const timeline = rawResults.reverse();
|
|
3138
|
+
|
|
3139
|
+
for (const dayEntry of timeline) {
|
|
3140
|
+
const { date, data } = dayEntry;
|
|
3141
|
+
|
|
3142
|
+
for (const [piCidStr, piData] of Object.entries(data)) {
|
|
3143
|
+
if (piData.error) continue;
|
|
3144
|
+
|
|
3145
|
+
const piCid = Number(piCidStr);
|
|
3146
|
+
|
|
3147
|
+
// Check if this PI matches the criteria ON THIS SPECIFIC DAY
|
|
3148
|
+
const filterResult = checkPIMatchesCriteria(computationName, piData, parameters);
|
|
3149
|
+
|
|
3150
|
+
if (filterResult.passes) {
|
|
3151
|
+
if (!piAggregates.has(piCid)) {
|
|
3152
|
+
piAggregates.set(piCid, {
|
|
3153
|
+
cid: piCid,
|
|
3154
|
+
firstMatchedAt: date, // Earliest date in range they matched
|
|
3155
|
+
lastMatchedAt: date, // Will update as we go forward
|
|
3156
|
+
droppedOffAt: null, // Will calculate later
|
|
3157
|
+
matchCount: 0,
|
|
3158
|
+
history: [], // Array of { date, value, change }
|
|
3159
|
+
latestData: null // Store latest raw data for metadata
|
|
3160
|
+
});
|
|
3161
|
+
}
|
|
3162
|
+
|
|
3163
|
+
const agg = piAggregates.get(piCid);
|
|
3164
|
+
agg.lastMatchedAt = date;
|
|
3165
|
+
agg.matchCount++;
|
|
3166
|
+
agg.latestData = piData; // Update to keep the most recent data packet
|
|
3167
|
+
|
|
3168
|
+
agg.history.push({
|
|
3169
|
+
date: date,
|
|
3170
|
+
matched: true,
|
|
3171
|
+
value: filterResult.matchValue,
|
|
3172
|
+
change: filterResult.change
|
|
3173
|
+
});
|
|
3174
|
+
} else {
|
|
3175
|
+
// PI exists in data but DOES NOT match criteria (e.g. Risk score dropped back down)
|
|
3176
|
+
if (piAggregates.has(piCid)) {
|
|
3177
|
+
const agg = piAggregates.get(piCid);
|
|
3178
|
+
// Record the non-match event in history if they had previously matched
|
|
3179
|
+
agg.history.push({
|
|
3180
|
+
date: date,
|
|
3181
|
+
matched: false,
|
|
3182
|
+
value: filterResult.matchValue, // Value that failed criteria
|
|
3183
|
+
change: filterResult.change
|
|
3184
|
+
});
|
|
3152
3185
|
}
|
|
3153
|
-
});
|
|
3154
|
-
}
|
|
3155
|
-
} else {
|
|
3156
|
-
// Data is in the document itself
|
|
3157
|
-
Object.entries(docData).forEach(([key, value]) => {
|
|
3158
|
-
if (key.startsWith('_') || key === 'cids' || key === 'metadata' || key === 'globalMetadata') return;
|
|
3159
|
-
if (/^\d+$/.test(key)) {
|
|
3160
|
-
allUserData[key] = value;
|
|
3161
3186
|
}
|
|
3162
|
-
});
|
|
3163
|
-
}
|
|
3164
|
-
|
|
3165
|
-
const totalPIs = Object.keys(allUserData).length;
|
|
3166
|
-
console.log(`[queryDynamicWatchlistMatches] Evaluating ${totalPIs} PIs from ${mostRecentDate} against criteria`);
|
|
3167
|
-
|
|
3168
|
-
// Filter PIs that match the criteria on this most recent date
|
|
3169
|
-
const matchingPIs = [];
|
|
3170
|
-
const cidsToLookup = [];
|
|
3171
|
-
|
|
3172
|
-
for (const [piCidStr, piData] of Object.entries(allUserData)) {
|
|
3173
|
-
const piCid = Number(piCidStr);
|
|
3174
|
-
|
|
3175
|
-
// Skip error data
|
|
3176
|
-
if (piData.error) continue;
|
|
3177
|
-
|
|
3178
|
-
// Check if PI matches the criteria
|
|
3179
|
-
const filterResult = checkPIMatchesCriteria(computationName, piData, parameters);
|
|
3180
|
-
|
|
3181
|
-
if (filterResult.passes) {
|
|
3182
|
-
cidsToLookup.push({ piCid, piData, filterResult });
|
|
3183
3187
|
}
|
|
3184
3188
|
}
|
|
3185
|
-
|
|
3186
|
-
|
|
3187
|
-
|
|
3188
|
-
|
|
3189
|
-
|
|
3190
|
-
|
|
3189
|
+
|
|
3190
|
+
// 4. Calculate Status (Dropped Off, Current) & Fetch Usernames
|
|
3191
|
+
const results = [];
|
|
3192
|
+
const todayStr = new Date().toISOString().split('T')[0];
|
|
3193
|
+
const lastDataDate = rawResults.length > 0 ? rawResults[rawResults.length - 1].date : todayStr; // The "Current" date of the system
|
|
3194
|
+
|
|
3195
|
+
for (const [cid, agg] of piAggregates) {
|
|
3196
|
+
// Determine if "Dropped Off"
|
|
3197
|
+
// They dropped off if their last match was BEFORE the most recent data date we have
|
|
3198
|
+
let isCurrent = agg.lastMatchedAt === lastDataDate;
|
|
3199
|
+
let droppedOffAt = isCurrent ? null : agg.history[agg.history.length - 1]?.date;
|
|
3200
|
+
|
|
3201
|
+
// Fetch Username (Optimistic/Lazy)
|
|
3202
|
+
let username = `PI-${cid}`;
|
|
3191
3203
|
try {
|
|
3192
|
-
|
|
3193
|
-
|
|
3194
|
-
|
|
3195
|
-
|
|
3196
|
-
|
|
3197
|
-
|
|
3198
|
-
|
|
3199
|
-
|
|
3200
|
-
|
|
3201
|
-
|
|
3202
|
-
|
|
3203
|
-
|
|
3204
|
-
|
|
3205
|
-
|
|
3206
|
-
|
|
3207
|
-
|
|
3208
|
-
|
|
3209
|
-
|
|
3210
|
-
|
|
3211
|
-
|
|
3204
|
+
// Assuming this is cached or fast enough. For 1500 users this might be slow loop.
|
|
3205
|
+
// In production, better to fetch all usernames in one batch or store in the computation data.
|
|
3206
|
+
// Falling back to "Unknown" if costly, or rely on frontend to fetch details.
|
|
3207
|
+
const piProfile = await fetchPopularInvestorMasterList(db, String(cid)).catch(() => null);
|
|
3208
|
+
if (piProfile) username = piProfile.username;
|
|
3209
|
+
} catch (e) {}
|
|
3210
|
+
|
|
3211
|
+
results.push({
|
|
3212
|
+
cid: cid,
|
|
3213
|
+
username: username,
|
|
3214
|
+
|
|
3215
|
+
// Aggregated Stats
|
|
3216
|
+
firstMatchedAt: agg.firstMatchedAt,
|
|
3217
|
+
lastMatchedAt: agg.lastMatchedAt,
|
|
3218
|
+
droppedOffAt: droppedOffAt, // If set, they matched previously but don't match now
|
|
3219
|
+
isCurrentlyMatching: isCurrent,
|
|
3220
|
+
matchCount: agg.matchCount,
|
|
3221
|
+
|
|
3222
|
+
// Visualization Data
|
|
3223
|
+
history: agg.history,
|
|
3224
|
+
|
|
3225
|
+
// Latest Snapshot Values (for sorting/display)
|
|
3226
|
+
latestValue: agg.history[agg.history.length - 1]?.value,
|
|
3227
|
+
|
|
3228
|
+
// Metadata for debug/details
|
|
3229
|
+
metadata: agg.latestData
|
|
3212
3230
|
});
|
|
3213
3231
|
}
|
|
3214
|
-
|
|
3215
|
-
// Sort
|
|
3216
|
-
|
|
3217
|
-
|
|
3218
|
-
|
|
3219
|
-
|
|
3220
|
-
|
|
3221
|
-
|
|
3232
|
+
|
|
3233
|
+
// 5. Sort Results
|
|
3234
|
+
// Priority: Currently Matching > Recently Dropped Off
|
|
3235
|
+
// Secondary: Match Value (High Risk/High Change first)
|
|
3236
|
+
results.sort((a, b) => {
|
|
3237
|
+
if (a.isCurrentlyMatching !== b.isCurrentlyMatching) {
|
|
3238
|
+
return a.isCurrentlyMatching ? -1 : 1;
|
|
3239
|
+
}
|
|
3240
|
+
// If both current or both dropped, sort by latest value magnitude
|
|
3241
|
+
return Math.abs(b.latestValue) - Math.abs(a.latestValue);
|
|
3242
|
+
});
|
|
3243
|
+
|
|
3244
|
+
const limitedResults = results.slice(0, limit);
|
|
3245
|
+
|
|
3246
|
+
console.log(`[queryDynamicWatchlistMatches] Found ${results.length} unique PIs matching at least once.`);
|
|
3247
|
+
|
|
3222
3248
|
return {
|
|
3223
3249
|
success: true,
|
|
3224
|
-
matches:
|
|
3225
|
-
count:
|
|
3226
|
-
|
|
3227
|
-
totalMatching: cidsToLookup.length,
|
|
3250
|
+
matches: limitedResults,
|
|
3251
|
+
count: limitedResults.length,
|
|
3252
|
+
totalUniqueMatches: results.length,
|
|
3228
3253
|
dateRange: {
|
|
3229
3254
|
start: startDate.toISOString().split('T')[0],
|
|
3230
3255
|
end: endDate.toISOString().split('T')[0]
|
|
3231
3256
|
},
|
|
3232
|
-
dataDate:
|
|
3257
|
+
dataDate: lastDataDate, // Most recent date found in system
|
|
3233
3258
|
computationName,
|
|
3234
3259
|
parameters
|
|
3235
3260
|
};
|
|
3261
|
+
|
|
3236
3262
|
} catch (error) {
|
|
3237
3263
|
console.error(`[queryDynamicWatchlistMatches] Error: ${error.message}`, error);
|
|
3238
3264
|
throw error;
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
* @fileoverview Executor for "Standard" (User-Level) calculations.
|
|
3
3
|
* REFACTORED: Hoisted data loading, centralized Series/Root logic.
|
|
4
4
|
* UPDATED: Calls getResult() on computations to trigger summary logging.
|
|
5
|
-
* FIXED:
|
|
5
|
+
* FIXED: Removed redundant shard index increment that caused shard gaps (0, 2, 4...).
|
|
6
6
|
*/
|
|
7
7
|
const { normalizeName, getEarliestDataDates } = require('../utils/utils');
|
|
8
8
|
const { streamPortfolioData, streamHistoryData, getPortfolioPartRefs, getHistoryPartRefs } = require('../utils/data_loader');
|
|
@@ -279,7 +279,7 @@ class StandardExecutor {
|
|
|
279
279
|
data = inst.results || {};
|
|
280
280
|
}
|
|
281
281
|
|
|
282
|
-
// Track active calculations
|
|
282
|
+
// Track active calculations (for debugging/logging only now)
|
|
283
283
|
if (Object.keys(data).length > 0) {
|
|
284
284
|
activeCalcs.push(name);
|
|
285
285
|
}
|
|
@@ -302,16 +302,10 @@ class StandardExecutor {
|
|
|
302
302
|
|
|
303
303
|
const res = await commitResults(transformedState, dateStr, passName, config, deps, skipStatus, { flushMode: mode, shardIndexes: shardMap, isInitialWrite: isInitial });
|
|
304
304
|
|
|
305
|
-
// Update shardMap from result
|
|
305
|
+
// Update shardMap from result.
|
|
306
|
+
// ResultCommitter now returns the CORRECT nextShardIndex (e.g. if it wrote shard_0, it returns 1).
|
|
306
307
|
if (res.shardIndexes) Object.assign(shardMap, res.shardIndexes);
|
|
307
308
|
|
|
308
|
-
// [FIX 2] Force increment shard indexes for active calculations.
|
|
309
|
-
// This ensures the NEXT batch writes to a NEW shard (e.g. results_1)
|
|
310
|
-
// instead of overwriting the current one (results_0).
|
|
311
|
-
activeCalcs.forEach(name => {
|
|
312
|
-
shardMap[name] = (shardMap[name] || 0) + 1;
|
|
313
|
-
});
|
|
314
|
-
|
|
315
309
|
return res;
|
|
316
310
|
}
|
|
317
311
|
|