bulltrackers-module 1.0.7 → 1.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/helpers/orchestration_helpers.js +266 -0
- package/functions/computation-system/index.js +9 -0
- package/functions/computation-system/utils/data_loader.js +102 -0
- package/functions/computation-system/utils/utils.js +169 -0
- package/index.js +5 -3
- package/package.json +5 -3
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Core orchestration logic for the Computation System.
|
|
3
|
+
* Contains both the high-level pass orchestrator and the low-level
|
|
4
|
+
* function for processing a single date.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
const { FieldPath } = require('@google-cloud/firestore');
|
|
8
|
+
const { getPortfolioPartRefs, loadFullDayMap, loadDataByRefs } = require('../utils/data_loader.js');
|
|
9
|
+
const {
|
|
10
|
+
historicalCalculations, dailyCalculations, HISTORICAL_CALC_NAMES,
|
|
11
|
+
withRetry, getExpectedDateStrings, processJobsInParallel, getFirstDateFromSourceData,
|
|
12
|
+
commitBatchInChunks, unifiedUtils
|
|
13
|
+
} = require('../utils/utils.js');
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
// --- CORE CALCULATION HELPERS (Moved from original utils.js) ---
|
|
17
|
+
|
|
18
|
+
/** Initializes calculator instances from the provided calculation classes. */
|
|
19
|
+
function initializeCalculators(calculationsToRun, sourcePackage) {
|
|
20
|
+
const state = {};
|
|
21
|
+
for (const { category, calcName } of calculationsToRun) {
|
|
22
|
+
const CalculationClass = sourcePackage[category]?.[calcName];
|
|
23
|
+
if (typeof CalculationClass === 'function') {
|
|
24
|
+
try {
|
|
25
|
+
state[`${category}/${calcName}`] = new CalculationClass();
|
|
26
|
+
} catch (e) {
|
|
27
|
+
// Use logger, but it's not passed here, so console.warn
|
|
28
|
+
console.warn(`[Orchestrator] Init failed for ${category}/${calcName}`, { errorMessage: e.message });
|
|
29
|
+
state[`${category}/${calcName}`] = null;
|
|
30
|
+
}
|
|
31
|
+
} else {
|
|
32
|
+
console.warn(`[Orchestrator] Calculation class not found: ${category}/${calcName}`);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
return state;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/** Streams day's data and calls the process() method on all calculators. */
|
|
39
|
+
async function streamAndProcess(firestore, dateStr, todayRefs, state, passName, logger, yesterdayPortfolios = {}) {
|
|
40
|
+
logger.log('INFO', `[${passName}] Streaming ${todayRefs.length} 'today' part docs for ${dateStr}...`);
|
|
41
|
+
const { instrumentToTicker, instrumentToSector } = await unifiedUtils.loadInstrumentMappings();
|
|
42
|
+
const context = { instrumentMappings: instrumentToTicker, sectorMapping: instrumentToSector };
|
|
43
|
+
|
|
44
|
+
for (let i = 0; i < todayRefs.length; i += 10) { // Process in chunks of 10 part files
|
|
45
|
+
const batchRefs = todayRefs.slice(i, i + 10);
|
|
46
|
+
const todayPortfoliosChunk = await loadDataByRefs(firestore, batchRefs);
|
|
47
|
+
|
|
48
|
+
for (const uid in todayPortfoliosChunk) {
|
|
49
|
+
const p = todayPortfoliosChunk[uid];
|
|
50
|
+
if (!p) continue;
|
|
51
|
+
|
|
52
|
+
const userType = p.PublicPositions ? 'speculator' : 'normal';
|
|
53
|
+
|
|
54
|
+
for (const key in state) {
|
|
55
|
+
const calc = state[key];
|
|
56
|
+
if (!calc || typeof calc.process !== 'function') continue;
|
|
57
|
+
|
|
58
|
+
const [category, calcName] = key.split('/');
|
|
59
|
+
let processArgs;
|
|
60
|
+
|
|
61
|
+
if (HISTORICAL_CALC_NAMES.has(calcName)) {
|
|
62
|
+
const pYesterday = yesterdayPortfolios[uid];
|
|
63
|
+
if (!pYesterday) continue; // Skip if no yesterday data for historical calc
|
|
64
|
+
processArgs = [p, pYesterday, uid];
|
|
65
|
+
} else {
|
|
66
|
+
// Filter daily calcs by user type
|
|
67
|
+
if ((userType === 'normal' && category === 'speculators') ||
|
|
68
|
+
(userType === 'speculator' && !['speculators', 'sanity'].includes(category))) {
|
|
69
|
+
continue;
|
|
70
|
+
}
|
|
71
|
+
processArgs = [p, uid, context];
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
try {
|
|
75
|
+
await Promise.resolve(calc.process(...processArgs));
|
|
76
|
+
} catch (e) {
|
|
77
|
+
logger.log('WARN', `Process error in ${key} for user ${uid}`, { err: e.message });
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
// --- LOW-LEVEL ORCHESTRATOR (Moved from pass_orchestrators.js) ---
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Runs a set of calculations for a given date and writes the final results.
|
|
89
|
+
* @param {Firestore} firestore A Firestore client instance.
|
|
90
|
+
* @param {Function} logger A logger instance.
|
|
91
|
+
* @param {Date} dateToProcess - The date to run computations for.
|
|
92
|
+
* @param {Array} calculationsToRun - List of { category, calcName } to run.
|
|
93
|
+
* @param {String} passName - The name of the pass for logging.
|
|
94
|
+
* @param {Object} sourcePackage - The object containing calculation classes.
|
|
95
|
+
* @returns {Object} A summary of the execution.
|
|
96
|
+
*/
|
|
97
|
+
async function runUnifiedComputation(firestore, logger, dateToProcess, calculationsToRun, passName, sourcePackage) {
|
|
98
|
+
const dateStr = dateToProcess.toISOString().slice(0, 10);
|
|
99
|
+
logger.log('INFO', `[${passName}] Starting run for ${dateStr} with ${calculationsToRun.length} calcs.`);
|
|
100
|
+
|
|
101
|
+
try {
|
|
102
|
+
// --- 1. Data Loading ---
|
|
103
|
+
const todayRefs = await getPortfolioPartRefs(firestore, dateStr);
|
|
104
|
+
if (todayRefs.length === 0) {
|
|
105
|
+
return { success: true, date: dateStr, message: "No portfolio data for today." };
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
let yesterdayPortfolios = {};
|
|
109
|
+
const requiresYesterday = calculationsToRun.some(c => sourcePackage[c.category]?.[c.calcName]?.prototype?.process.length === 3);
|
|
110
|
+
|
|
111
|
+
if (requiresYesterday) {
|
|
112
|
+
const prev = new Date(dateToProcess);
|
|
113
|
+
prev.setUTCDate(prev.getUTCDate() - 1);
|
|
114
|
+
const prevStr = prev.toISOString().slice(0, 10);
|
|
115
|
+
const yesterdayRefs = await getPortfolioPartRefs(firestore, prevStr);
|
|
116
|
+
yesterdayPortfolios = await loadFullDayMap(firestore, yesterdayRefs);
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// --- 2. In-Memory Processing ---
|
|
120
|
+
const state = initializeCalculators(calculationsToRun, sourcePackage);
|
|
121
|
+
await streamAndProcess(firestore, dateStr, todayRefs, state, passName, logger, yesterdayPortfolios);
|
|
122
|
+
|
|
123
|
+
// --- 3. REFACTORED: Write results PER-CALCULATION ---
|
|
124
|
+
let successCount = 0;
|
|
125
|
+
const resultsSubcollection = firestore.collection('unified_insights').doc(dateStr).collection('results');
|
|
126
|
+
|
|
127
|
+
for (const key in state) {
|
|
128
|
+
const calc = state[key];
|
|
129
|
+
if (!calc || typeof calc.getResult !== 'function') continue;
|
|
130
|
+
|
|
131
|
+
const [category, calcName] = key.split('/');
|
|
132
|
+
const pendingWrites = [];
|
|
133
|
+
const summaryData = {};
|
|
134
|
+
|
|
135
|
+
try {
|
|
136
|
+
const result = await Promise.resolve(calc.getResult());
|
|
137
|
+
|
|
138
|
+
if (result && Object.keys(result).length > 0) {
|
|
139
|
+
|
|
140
|
+
if (result.sharded_user_profitability) {
|
|
141
|
+
for (const shardId in result.sharded_user_profitability) {
|
|
142
|
+
const shardData = result.sharded_user_profitability[shardId];
|
|
143
|
+
if (shardData && Object.keys(shardData).length > 0) {
|
|
144
|
+
const shardRef = firestore.collection('unified_insights').doc(shardId);
|
|
145
|
+
pendingWrites.push({ ref: shardRef, data: { profits: shardData } });
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
} else {
|
|
149
|
+
const computationDocRef = resultsSubcollection.doc(category)
|
|
150
|
+
.collection('computations')
|
|
151
|
+
.doc(calcName);
|
|
152
|
+
pendingWrites.push({ ref: computationDocRef, data: result });
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
if (!summaryData[category]) summaryData[category] = {};
|
|
156
|
+
summaryData[category][calcName] = true;
|
|
157
|
+
|
|
158
|
+
const docRef = firestore.collection('unified_insights').doc(dateStr);
|
|
159
|
+
pendingWrites.push({ ref: docRef, data: summaryData });
|
|
160
|
+
|
|
161
|
+
await commitBatchInChunks(
|
|
162
|
+
firestore,
|
|
163
|
+
pendingWrites,
|
|
164
|
+
`Commit ${passName} ${dateStr} [${key}]`
|
|
165
|
+
);
|
|
166
|
+
successCount++;
|
|
167
|
+
}
|
|
168
|
+
} catch (e) {
|
|
169
|
+
logger.log('ERROR', `[${passName}] getResult/Commit failed for ${key}`, { err: e.message });
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
logger.log('SUCCESS', `[${passName}] Completed ${dateStr}. Success: ${successCount}/${calculationsToRun.length}.`);
|
|
174
|
+
return { success: true, date: dateStr, successful: successCount, failed: calculationsToRun.length - successCount };
|
|
175
|
+
|
|
176
|
+
} catch (err) {
|
|
177
|
+
logger.log('ERROR', `[${passName}] Fatal error for ${dateStr}`, { errorMessage: err.message, stack: err.stack });
|
|
178
|
+
return { success: false, date: dateStr, error: err.message };
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
// --- HIGH-LEVEL ORCHESTRATOR (Moved from ComputationSystem.js) ---
|
|
184
|
+
|
|
185
|
+
/**
|
|
186
|
+
* Main entry point for the Unified Computation System logic.
|
|
187
|
+
* @param {Firestore} firestore A Firestore client instance.
|
|
188
|
+
* @param {Function} logger A logger instance.
|
|
189
|
+
* @returns {Promise<Object>} A summary of all passes.
|
|
190
|
+
*/
|
|
191
|
+
async function runComputationOrchestrator(firestore, logger) {
|
|
192
|
+
const summary = { pass1_results: [], pass2_results: [] };
|
|
193
|
+
const yesterday = new Date();
|
|
194
|
+
yesterday.setUTCDate(yesterday.getUTCDate() - 1);
|
|
195
|
+
const endDateUTC = new Date(Date.UTC(yesterday.getUTCFullYear(), yesterday.getUTCMonth(), yesterday.getUTCDate()));
|
|
196
|
+
|
|
197
|
+
// --- Master Calculation Lists ---
|
|
198
|
+
const masterHistoricalList = Object.entries(historicalCalculations).flatMap(([cat, calcs]) =>
|
|
199
|
+
Object.keys(calcs).map(name => ({ category: cat, calcName: name }))
|
|
200
|
+
);
|
|
201
|
+
const masterDailyList = Object.entries(dailyCalculations).flatMap(([cat, calcs]) =>
|
|
202
|
+
Object.keys(calcs).map(name => ({ category: cat, calcName: name }))
|
|
203
|
+
);
|
|
204
|
+
const masterFullList = [...masterHistoricalList, ...masterDailyList];
|
|
205
|
+
|
|
206
|
+
// --- Date & Job Discovery ---
|
|
207
|
+
const firstDate = await getFirstDateFromSourceData(firestore);
|
|
208
|
+
const startDateUTC = new Date(Date.UTC(firstDate.getUTCFullYear(), firstDate.getUTCMonth(), firstDate.getUTCDate()));
|
|
209
|
+
|
|
210
|
+
const allExpectedDates = getExpectedDateStrings(startDateUTC, endDateUTC);
|
|
211
|
+
const insightDocs = await withRetry(() => firestore.collection('unified_insights').get(), "ListAllInsightDocs");
|
|
212
|
+
const existingDateIds = new Set(insightDocs.docs.map(d => d.id).filter(id => /^\d{4}-\d{2}-\d{2}$/.test(id)));
|
|
213
|
+
|
|
214
|
+
// --- PASS 1: Find and process entirely missing days (DAILY CALCS ONLY) ---
|
|
215
|
+
const missingDates = allExpectedDates.filter(dateStr => !existingDateIds.has(dateStr));
|
|
216
|
+
const pass1Jobs = missingDates.map(date => ({ date, missing: masterDailyList }));
|
|
217
|
+
|
|
218
|
+
const pass1Results = await processJobsInParallel(
|
|
219
|
+
pass1Jobs,
|
|
220
|
+
(date, missing) => runUnifiedComputation(firestore, logger, date, missing, 'Pass 1 (Daily Only)', dailyCalculations),
|
|
221
|
+
'Pass 1'
|
|
222
|
+
);
|
|
223
|
+
summary.pass1_results = pass1Results.map((r, i) => r.status === 'fulfilled' ? r.value : { success: false, date: pass1Jobs[i].date, error: r.reason?.message });
|
|
224
|
+
|
|
225
|
+
// --- PASS 2: Find and process incomplete days ---
|
|
226
|
+
const pass2Jobs = [];
|
|
227
|
+
const updatedInsightDocs = await withRetry(() => firestore.collection('unified_insights').where(FieldPath.documentId(), 'in', allExpectedDates).get(), "ListAllInsightDocs-Pass2");
|
|
228
|
+
|
|
229
|
+
updatedInsightDocs.forEach(doc => {
|
|
230
|
+
if (!/^\d{4}-\d{2}-\d{2}$/.test(doc.id)) return;
|
|
231
|
+
|
|
232
|
+
const data = doc.data();
|
|
233
|
+
const missingCalcs = masterFullList.filter(({ category, calcName }) => !data?.[category]?.[calcName]);
|
|
234
|
+
|
|
235
|
+
if (missingCalcs.length > 0) {
|
|
236
|
+
pass2Jobs.push({ date: doc.id, missing: missingCalcs });
|
|
237
|
+
}
|
|
238
|
+
});
|
|
239
|
+
|
|
240
|
+
const pass2Results = await processJobsInParallel(
|
|
241
|
+
pass2Jobs,
|
|
242
|
+
(date, missing) => {
|
|
243
|
+
const historicalMissing = missing.filter(c => HISTORICAL_CALC_NAMES.has(c.calcName));
|
|
244
|
+
const dailyMissing = missing.filter(c => !HISTORICAL_CALC_NAMES.has(c.calcName));
|
|
245
|
+
const promises = [];
|
|
246
|
+
if (historicalMissing.length > 0) {
|
|
247
|
+
promises.push(runUnifiedComputation(firestore, logger, date, historicalMissing, 'Pass 2 (Historical)', historicalCalculations));
|
|
248
|
+
}
|
|
249
|
+
if (dailyMissing.length > 0) {
|
|
250
|
+
promises.push(runUnifiedComputation(firestore, logger, date, dailyMissing, 'Pass 2 (Daily)', dailyCalculations));
|
|
251
|
+
}
|
|
252
|
+
return Promise.all(promises);
|
|
253
|
+
},
|
|
254
|
+
'Pass 2'
|
|
255
|
+
);
|
|
256
|
+
summary.pass2_results = pass2Results.map((r, i) => r.status === 'fulfilled' ? r.value : { success: false, date: pass2Jobs[i].date, error: r.reason?.message });
|
|
257
|
+
|
|
258
|
+
// --- FINISH ---
|
|
259
|
+
// The wrapper function will log success and send the response.
|
|
260
|
+
return summary;
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
module.exports = {
|
|
264
|
+
runComputationOrchestrator,
|
|
265
|
+
// We don't need to export runUnifiedComputation as it's only used internally
|
|
266
|
+
};
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Data loader for the Computation System.
|
|
3
|
+
* Provides functions for finding data references and loading data in chunks.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const { logger } = require("sharedsetup")(__filename);
|
|
7
|
+
// Import withRetry from the unified calculations package
|
|
8
|
+
const { withRetry } = require('aiden-shared-calculations-unified').utils;
|
|
9
|
+
|
|
10
|
+
const COLLECTIONS_TO_QUERY = ['NormalUserPortfolios', 'SpeculatorUserPortfolios'];
|
|
11
|
+
const PART_REF_BATCH_SIZE = 50; // Number of document snapshots to load at once
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Gets a list of all portfolio "part" document references for a given date.
|
|
15
|
+
* This function is lightweight and only fetches references, not data.
|
|
16
|
+
*
|
|
17
|
+
* @param {Firestore} firestore A Firestore client instance.
|
|
18
|
+
* @param {string} dateString The date in YYYY-MM-DD format.
|
|
19
|
+
* @returns {Promise<Firestore.DocumentReference[]>} An array of DocumentReferences.
|
|
20
|
+
*/
|
|
21
|
+
async function getPortfolioPartRefs(firestore, dateString) {
|
|
22
|
+
logger.log('INFO', `Getting portfolio part references for date: ${dateString}`);
|
|
23
|
+
const allPartRefs = [];
|
|
24
|
+
|
|
25
|
+
for (const collectionName of COLLECTIONS_TO_QUERY) {
|
|
26
|
+
const blockDocsQuery = firestore.collection(collectionName);
|
|
27
|
+
const blockDocRefs = await withRetry(
|
|
28
|
+
() => blockDocsQuery.listDocuments(),
|
|
29
|
+
`listDocuments(${collectionName})`
|
|
30
|
+
);
|
|
31
|
+
|
|
32
|
+
if (blockDocRefs.length === 0) {
|
|
33
|
+
logger.log('WARN', `No block documents found in collection: ${collectionName}`);
|
|
34
|
+
continue;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
for (const blockDocRef of blockDocRefs) {
|
|
38
|
+
const partsCollectionRef = blockDocRef.collection('snapshots').doc(dateString).collection('parts');
|
|
39
|
+
const partDocs = await withRetry(
|
|
40
|
+
() => partsCollectionRef.listDocuments(),
|
|
41
|
+
`listDocuments(${partsCollectionRef.path})`
|
|
42
|
+
);
|
|
43
|
+
allPartRefs.push(...partDocs);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
logger.log('INFO', `Found ${allPartRefs.length} part document references for ${dateString}.`);
|
|
48
|
+
return allPartRefs;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Loads data from an array of DocumentReferences using firestore.getAll().
|
|
54
|
+
* Merges all data into a single map keyed by user ID.
|
|
55
|
+
*
|
|
56
|
+
* @param {Firestore} firestore A Firestore client instance.
|
|
57
|
+
* @param {Firestore.DocumentReference[]} refs An array of DocumentReferences to load.
|
|
58
|
+
* @returns {Promise<object>} A single map of { [userId]: portfolioData }.
|
|
59
|
+
*/
|
|
60
|
+
async function loadDataByRefs(firestore, refs) {
|
|
61
|
+
if (!refs || refs.length === 0) {
|
|
62
|
+
return {};
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const mergedPortfolios = {};
|
|
66
|
+
|
|
67
|
+
for (let i = 0; i < refs.length; i += PART_REF_BATCH_SIZE) {
|
|
68
|
+
const batchRefs = refs.slice(i, i + PART_REF_BATCH_SIZE);
|
|
69
|
+
|
|
70
|
+
const snapshots = await withRetry(
|
|
71
|
+
() => firestore.getAll(...batchRefs),
|
|
72
|
+
`getAll(batch ${Math.floor(i / PART_REF_BATCH_SIZE)})`
|
|
73
|
+
);
|
|
74
|
+
|
|
75
|
+
for (const doc of snapshots) {
|
|
76
|
+
if (doc.exists) {
|
|
77
|
+
const data = doc.data();
|
|
78
|
+
if (data && typeof data === 'object') {
|
|
79
|
+
Object.assign(mergedPortfolios, data);
|
|
80
|
+
} else {
|
|
81
|
+
logger.log('WARN', `Document ${doc.id} exists but data is not an object. Data:`, data);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
return mergedPortfolios;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
/** Loads all portfolio data for a day by streaming part references. */
|
|
90
|
+
async function loadFullDayMap(firestore, partRefs) {
|
|
91
|
+
if (partRefs.length === 0) return {};
|
|
92
|
+
logger.log('TRACE', `Loading full day map from ${partRefs.length} references...`);
|
|
93
|
+
const fullMap = await loadDataByRefs(firestore, partRefs);
|
|
94
|
+
logger.log('TRACE', `Full day map loaded with ${Object.keys(fullMap).length} users.`);
|
|
95
|
+
return fullMap;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
module.exports = {
|
|
99
|
+
getPortfolioPartRefs,
|
|
100
|
+
loadDataByRefs,
|
|
101
|
+
loadFullDayMap
|
|
102
|
+
};
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Helper functions for the Unified Computation System.
|
|
3
|
+
* This module imports all calculations and utilities from the single
|
|
4
|
+
* `aiden-shared-calculations-unified` package and categorizes them for the orchestrator.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
const { FieldValue, FieldPath } = require('@google-cloud/firestore');
|
|
8
|
+
const { logger } = require("sharedsetup")(__filename);
|
|
9
|
+
const { calculations, utils } = require('aiden-shared-calculations-unified');
|
|
10
|
+
|
|
11
|
+
const { withRetry } = utils; // Get withRetry from the package.
|
|
12
|
+
|
|
13
|
+
// --- Configuration ---
|
|
14
|
+
const BATCH_SIZE_LIMIT = 500;
|
|
15
|
+
const MAX_CONCURRENT_DATES = 3;
|
|
16
|
+
|
|
17
|
+
// --- Calculation Categorization ---
|
|
18
|
+
const HISTORICAL_CALC_NAMES = new Set([
|
|
19
|
+
'paper-vs-diamond-hands', 'smart-money-flow', 'profitability-migration',
|
|
20
|
+
'user-profitability-tracker', 'sector-rotation', 'crowd-conviction-score',
|
|
21
|
+
'risk-appetite-change', 'drawdown-response', 'gain-response',
|
|
22
|
+
'tsl-effectiveness', 'position-count-pnl', 'diversification-pnl'
|
|
23
|
+
]);
|
|
24
|
+
|
|
25
|
+
const historicalCalculations = {};
|
|
26
|
+
const dailyCalculations = {};
|
|
27
|
+
|
|
28
|
+
for (const category in calculations) {
|
|
29
|
+
for (const calcName in calculations[category]) {
|
|
30
|
+
if (HISTORICAL_CALC_NAMES.has(calcName)) {
|
|
31
|
+
if (!historicalCalculations[category]) historicalCalculations[category] = {};
|
|
32
|
+
historicalCalculations[category][calcName] = calculations[category][calcName];
|
|
33
|
+
} else {
|
|
34
|
+
if (!dailyCalculations[category]) dailyCalculations[category] = {};
|
|
35
|
+
dailyCalculations[category][calcName] = calculations[category][calcName];
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
// --- UTILITY FUNCTIONS ---
|
|
42
|
+
|
|
43
|
+
/** Commits Firestore batch writes in chunks to respect the 500 operation limit. */
|
|
44
|
+
async function commitBatchInChunks(firestore, writes, operationName) {
|
|
45
|
+
if (writes.length === 0) {
|
|
46
|
+
logger.log('WARN', `[${operationName}] No writes to commit.`);
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
for (let i = 0; i < writes.length; i += BATCH_SIZE_LIMIT) {
|
|
50
|
+
const batch = firestore.batch();
|
|
51
|
+
const chunk = writes.slice(i, i + BATCH_SIZE_LIMIT);
|
|
52
|
+
chunk.forEach(write => batch.set(write.ref, write.data, { merge: true }));
|
|
53
|
+
|
|
54
|
+
const chunkNum = (i / BATCH_SIZE_LIMIT) + 1;
|
|
55
|
+
const totalChunks = Math.ceil(writes.length / BATCH_SIZE_LIMIT);
|
|
56
|
+
await withRetry(
|
|
57
|
+
() => batch.commit(),
|
|
58
|
+
`${operationName} (Chunk ${chunkNum}/${totalChunks})`
|
|
59
|
+
);
|
|
60
|
+
logger.log('INFO', `[${operationName}] Committed chunk ${chunkNum}/${totalChunks} (${chunk.length} ops).`);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// --- DATE & FLOW CONTROL HELPERS ---
|
|
65
|
+
|
|
66
|
+
/** Generates a list of date strings between a start and end date (inclusive). */
|
|
67
|
+
function getExpectedDateStrings(startDate, endDate) {
|
|
68
|
+
const dateStrings = [];
|
|
69
|
+
if (startDate <= endDate) {
|
|
70
|
+
for (let d = new Date(startDate); d <= endDate; d.setUTCDate(d.getUTCDate() + 1)) {
|
|
71
|
+
dateStrings.push(new Date(d).toISOString().slice(0, 10));
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
return dateStrings;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/** Processes job objects in parallel batches. */
|
|
78
|
+
async function processJobsInParallel(jobs, taskFunction, passName) {
|
|
79
|
+
const results = [];
|
|
80
|
+
if (jobs.length > 0) {
|
|
81
|
+
logger.log('INFO', `[${passName}] Processing ${jobs.length} jobs with ${MAX_CONCURRENT_DATES} parallel workers.`);
|
|
82
|
+
jobs.sort((a, b) => new Date(a.date) - new Date(b.date)); // Ensure chronological processing
|
|
83
|
+
for (let i = 0; i < jobs.length; i += MAX_CONCURRENT_DATES) {
|
|
84
|
+
const jobBatch = jobs.slice(i, i + MAX_CONCURRENT_DATES);
|
|
85
|
+
const promises = jobBatch.map(job => taskFunction(new Date(job.date), job.missing));
|
|
86
|
+
results.push(...await Promise.allSettled(promises));
|
|
87
|
+
}
|
|
88
|
+
} else {
|
|
89
|
+
logger.log('INFO', `[${passName}] No jobs to process.`);
|
|
90
|
+
}
|
|
91
|
+
return results;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* Finds the earliest date document by searching inside the 'snapshots' subcollection
|
|
96
|
+
* of all block documents (e.g., '1M', '2M') within a given collection.
|
|
97
|
+
*/
|
|
98
|
+
async function getFirstDateFromCollection(firestore, collectionName) {
|
|
99
|
+
let earliestDate = null;
|
|
100
|
+
try {
|
|
101
|
+
const blockDocRefs = await withRetry(
|
|
102
|
+
() => firestore.collection(collectionName).listDocuments(),
|
|
103
|
+
`GetBlocks(${collectionName})`
|
|
104
|
+
);
|
|
105
|
+
|
|
106
|
+
if (blockDocRefs.length === 0) {
|
|
107
|
+
logger.log('WARN', `No block documents found in collection: ${collectionName}`);
|
|
108
|
+
return null;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
for (const blockDocRef of blockDocRefs) {
|
|
112
|
+
const snapshotQuery = blockDocRef.collection('snapshots')
|
|
113
|
+
.where(FieldPath.documentId(), '>=', '2000-01-01')
|
|
114
|
+
.orderBy(FieldPath.documentId(), 'asc')
|
|
115
|
+
.limit(1);
|
|
116
|
+
|
|
117
|
+
const snapshotSnap = await withRetry(
|
|
118
|
+
() => snapshotQuery.get(),
|
|
119
|
+
`GetEarliestSnapshot(${blockDocRef.path})`
|
|
120
|
+
);
|
|
121
|
+
|
|
122
|
+
if (!snapshotSnap.empty && /^\d{4}-\d{2}-\d{2}$/.test(snapshotSnap.docs[0].id)) {
|
|
123
|
+
const foundDate = new Date(snapshotSnap.docs[0].id);
|
|
124
|
+
if (!earliestDate || foundDate < earliestDate) {
|
|
125
|
+
earliestDate = foundDate;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
} catch (e) {
|
|
130
|
+
logger.log('ERROR', `GetFirstDate failed for ${collectionName}`, { errorMessage: e.message });
|
|
131
|
+
}
|
|
132
|
+
return earliestDate;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
/**
|
|
136
|
+
* Determines the absolute earliest date from source portfolio data collections.
|
|
137
|
+
*/
|
|
138
|
+
async function getFirstDateFromSourceData(firestore) {
|
|
139
|
+
logger.log('INFO', 'Querying for the earliest date from source portfolio data...');
|
|
140
|
+
|
|
141
|
+
const investorDate = await getFirstDateFromCollection(firestore, 'NormalUserPortfolios');
|
|
142
|
+
const speculatorDate = await getFirstDateFromCollection(firestore, 'SpeculatorUserPortfolios');
|
|
143
|
+
|
|
144
|
+
let earliestDate;
|
|
145
|
+
|
|
146
|
+
if (investorDate && speculatorDate) {
|
|
147
|
+
earliestDate = investorDate < speculatorDate ? investorDate : speculatorDate;
|
|
148
|
+
} else {
|
|
149
|
+
earliestDate = investorDate || speculatorDate;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
if (earliestDate) {
|
|
153
|
+
logger.log('INFO', `Found earliest source data date: ${earliestDate.toISOString().slice(0, 10)}`);
|
|
154
|
+
return earliestDate;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// Fallback if no source data is found at all.
|
|
158
|
+
const d = new Date();
|
|
159
|
+
d.setUTCDate(d.getUTCDate() - 30);
|
|
160
|
+
logger.log('WARN', `No source data found. Defaulting first date to: ${d.toISOString().slice(0, 10)}`);
|
|
161
|
+
return d;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
module.exports = {
|
|
165
|
+
FieldValue, FieldPath,
|
|
166
|
+
historicalCalculations, dailyCalculations, unifiedUtils: utils, HISTORICAL_CALC_NAMES,
|
|
167
|
+
withRetry, commitBatchInChunks,
|
|
168
|
+
getExpectedDateStrings, processJobsInParallel, getFirstDateFromSourceData,
|
|
169
|
+
};
|
package/index.js
CHANGED
|
@@ -4,12 +4,14 @@
|
|
|
4
4
|
* to be shared across multiple Cloud Functions.
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
|
-
const core = require('./functions/core
|
|
7
|
+
const core = require('./functions/core');
|
|
8
8
|
const Orchestrator = require('./functions/orchestrator');
|
|
9
|
-
const TaskEngine = require('./functions/task-engine');
|
|
9
|
+
const TaskEngine = require('./functions/task-engine');
|
|
10
|
+
const ComputationSystem = require('./functions/computation-system'); // Add this
|
|
10
11
|
|
|
11
12
|
module.exports = {
|
|
12
13
|
core,
|
|
13
14
|
Orchestrator,
|
|
14
|
-
TaskEngine,
|
|
15
|
+
TaskEngine,
|
|
16
|
+
ComputationSystem, // Add this
|
|
15
17
|
};
|
package/package.json
CHANGED
|
@@ -1,13 +1,14 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "bulltrackers-module",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.10",
|
|
4
4
|
"description": "Helper Functions for Bulltrackers.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"files": [
|
|
7
7
|
"index.js",
|
|
8
8
|
"functions/orchestrator/",
|
|
9
9
|
"functions/task-engine/",
|
|
10
|
-
"functions/core/"
|
|
10
|
+
"functions/core/",
|
|
11
|
+
"functions/computation-system/"
|
|
11
12
|
],
|
|
12
13
|
"scripts": {
|
|
13
14
|
"test": "echo \"Error: no test specified\" && exit 1"
|
|
@@ -22,7 +23,8 @@
|
|
|
22
23
|
"dependencies": {
|
|
23
24
|
"@google-cloud/firestore": "^7.11.3",
|
|
24
25
|
"sharedsetup": "latest",
|
|
25
|
-
"require-all": "^3.0.0"
|
|
26
|
+
"require-all": "^3.0.0",
|
|
27
|
+
"aiden-shared-calculations-unified": "1.0.0"
|
|
26
28
|
},
|
|
27
29
|
"engines": {
|
|
28
30
|
"node": ">=20"
|