bulltrackers-module 1.0.186 → 1.0.188
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,23 +1,10 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* FIXED: computation_controller.js
|
|
3
|
-
*
|
|
3
|
+
* V4.1: Supports Smart Shard Lookup via Wrapper
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
|
-
const { DataExtractor,
|
|
7
|
-
|
|
8
|
-
MathPrimitives,
|
|
9
|
-
Aggregators,
|
|
10
|
-
Validators,
|
|
11
|
-
SCHEMAS,
|
|
12
|
-
SignalPrimitives,
|
|
13
|
-
DistributionAnalytics,
|
|
14
|
-
TimeSeries,
|
|
15
|
-
priceExtractor }
|
|
16
|
-
= require('../layers/math_primitives');
|
|
17
|
-
|
|
18
|
-
const { loadDailyInsights,
|
|
19
|
-
loadDailySocialPostInsights,
|
|
20
|
-
} = require('../utils/data_loader');
|
|
6
|
+
const { DataExtractor, HistoryExtractor, MathPrimitives, Aggregators, Validators, SCHEMAS, SignalPrimitives, DistributionAnalytics, TimeSeries, priceExtractor } = require('../layers/math_primitives');
|
|
7
|
+
const { loadDailyInsights, loadDailySocialPostInsights, getRelevantShardRefs, getPriceShardRefs } = require('../utils/data_loader');
|
|
21
8
|
|
|
22
9
|
class DataLoader {
|
|
23
10
|
constructor(config, dependencies) {
|
|
@@ -25,6 +12,10 @@ class DataLoader {
|
|
|
25
12
|
this.deps = dependencies;
|
|
26
13
|
this.cache = { mappings: null, insights: new Map(), social: new Map(), prices: null };
|
|
27
14
|
}
|
|
15
|
+
|
|
16
|
+
// Helper to fix property access issues if any legacy code exists
|
|
17
|
+
get mappings() { return this.cache.mappings; }
|
|
18
|
+
|
|
28
19
|
async loadMappings() {
|
|
29
20
|
if (this.cache.mappings) return this.cache.mappings;
|
|
30
21
|
const { calculationUtils } = this.deps;
|
|
@@ -43,129 +34,64 @@ class DataLoader {
|
|
|
43
34
|
this.cache.social.set(dateStr, social);
|
|
44
35
|
return social;
|
|
45
36
|
}
|
|
37
|
+
|
|
46
38
|
/**
|
|
47
|
-
* NEW:
|
|
39
|
+
* NEW: Get references to all price shards without loading data.
|
|
48
40
|
*/
|
|
49
|
-
async
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
const historyMap = {};
|
|
61
|
-
|
|
62
|
-
snapshot.forEach(doc => {
|
|
63
|
-
const shardData = doc.data();
|
|
64
|
-
// Merge shard keys (instrumentIds) into main map
|
|
65
|
-
if (shardData) Object.assign(historyMap, shardData);
|
|
66
|
-
});
|
|
67
|
-
|
|
68
|
-
logger.log('INFO', `[DataLoader] Loaded prices for ${Object.keys(historyMap).length} instruments.`);
|
|
69
|
-
|
|
70
|
-
// Cache as an object with 'history' map to match priceExtractor expectations
|
|
71
|
-
this.cache.prices = { history: historyMap };
|
|
72
|
-
return this.cache.prices;
|
|
41
|
+
async getPriceShardReferences() {
|
|
42
|
+
return getPriceShardRefs(this.config, this.deps);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* NEW: Get specific shard references based on instrument IDs (Smart Lookup)
|
|
47
|
+
*/
|
|
48
|
+
async getSpecificPriceShardReferences(targetInstrumentIds) {
|
|
49
|
+
return getRelevantShardRefs(this.config, this.deps, targetInstrumentIds);
|
|
50
|
+
}
|
|
73
51
|
|
|
52
|
+
/**
|
|
53
|
+
* NEW: Load a single price shard.
|
|
54
|
+
*/
|
|
55
|
+
async loadPriceShard(docRef) {
|
|
56
|
+
try {
|
|
57
|
+
const snap = await docRef.get();
|
|
58
|
+
if (!snap.exists) return {};
|
|
59
|
+
return snap.data();
|
|
74
60
|
} catch (e) {
|
|
75
|
-
|
|
76
|
-
return {
|
|
61
|
+
console.error(`Error loading shard ${docRef.path}:`, e);
|
|
62
|
+
return {};
|
|
77
63
|
}
|
|
78
64
|
}
|
|
79
65
|
}
|
|
80
66
|
|
|
81
67
|
class ContextBuilder {
|
|
82
68
|
static buildPerUserContext(options) {
|
|
83
|
-
const {
|
|
84
|
-
todayPortfolio,
|
|
85
|
-
yesterdayPortfolio,
|
|
86
|
-
todayHistory,
|
|
87
|
-
yesterdayHistory,
|
|
88
|
-
userId,
|
|
89
|
-
userType,
|
|
90
|
-
dateStr,
|
|
91
|
-
metadata,
|
|
92
|
-
mappings,
|
|
93
|
-
insights,
|
|
94
|
-
socialData,
|
|
95
|
-
computedDependencies,
|
|
96
|
-
previousComputedDependencies,
|
|
97
|
-
config,
|
|
98
|
-
deps
|
|
99
|
-
} = options;
|
|
100
|
-
|
|
69
|
+
const { todayPortfolio, yesterdayPortfolio, todayHistory, yesterdayHistory, userId, userType, dateStr, metadata, mappings, insights, socialData, computedDependencies, previousComputedDependencies, config, deps } = options;
|
|
101
70
|
return {
|
|
102
|
-
user: {
|
|
103
|
-
id: userId,
|
|
104
|
-
type: userType,
|
|
105
|
-
portfolio: { today: todayPortfolio, yesterday: yesterdayPortfolio },
|
|
106
|
-
history: { today: todayHistory, yesterday: yesterdayHistory }
|
|
107
|
-
},
|
|
71
|
+
user: { id: userId, type: userType, portfolio: { today: todayPortfolio, yesterday: yesterdayPortfolio }, history: { today: todayHistory, yesterday: yesterdayHistory } },
|
|
108
72
|
date: { today: dateStr },
|
|
109
73
|
insights: { today: insights?.today, yesterday: insights?.yesterday },
|
|
110
74
|
social: { today: socialData?.today, yesterday: socialData?.yesterday },
|
|
111
75
|
mappings: mappings || {},
|
|
112
|
-
math: {
|
|
113
|
-
extract: DataExtractor,
|
|
114
|
-
history: HistoryExtractor,
|
|
115
|
-
compute: MathPrimitives,
|
|
116
|
-
aggregate: Aggregators,
|
|
117
|
-
validate: Validators,
|
|
118
|
-
signals: SignalPrimitives,
|
|
119
|
-
schemas: SCHEMAS,
|
|
120
|
-
distribution : DistributionAnalytics,
|
|
121
|
-
TimeSeries: TimeSeries,
|
|
122
|
-
priceExtractor : priceExtractor
|
|
123
|
-
},
|
|
76
|
+
math: { extract: DataExtractor, history: HistoryExtractor, compute: MathPrimitives, aggregate: Aggregators, validate: Validators, signals: SignalPrimitives, schemas: SCHEMAS, distribution : DistributionAnalytics, TimeSeries: TimeSeries, priceExtractor : priceExtractor },
|
|
124
77
|
computed: computedDependencies || {},
|
|
125
78
|
previousComputed: previousComputedDependencies || {},
|
|
126
|
-
meta: metadata,
|
|
127
|
-
config,
|
|
128
|
-
deps
|
|
79
|
+
meta: metadata, config, deps
|
|
129
80
|
};
|
|
130
81
|
}
|
|
131
82
|
|
|
132
83
|
static buildMetaContext(options) {
|
|
133
|
-
const {
|
|
134
|
-
dateStr,
|
|
135
|
-
metadata,
|
|
136
|
-
mappings,
|
|
137
|
-
insights,
|
|
138
|
-
socialData,
|
|
139
|
-
prices, // <--- ADDED THIS
|
|
140
|
-
computedDependencies,
|
|
141
|
-
previousComputedDependencies,
|
|
142
|
-
config,
|
|
143
|
-
deps
|
|
144
|
-
} = options;
|
|
145
|
-
|
|
84
|
+
const { dateStr, metadata, mappings, insights, socialData, prices, computedDependencies, previousComputedDependencies, config, deps } = options;
|
|
146
85
|
return {
|
|
147
86
|
date: { today: dateStr },
|
|
148
87
|
insights: { today: insights?.today, yesterday: insights?.yesterday },
|
|
149
88
|
social: { today: socialData?.today, yesterday: socialData?.yesterday },
|
|
150
|
-
prices: prices || {},
|
|
89
|
+
prices: prices || {},
|
|
151
90
|
mappings: mappings || {},
|
|
152
|
-
math: {
|
|
153
|
-
extract: DataExtractor,
|
|
154
|
-
history: HistoryExtractor,
|
|
155
|
-
compute: MathPrimitives,
|
|
156
|
-
aggregate: Aggregators,
|
|
157
|
-
validate: Validators,
|
|
158
|
-
signals: SignalPrimitives,
|
|
159
|
-
schemas: SCHEMAS,
|
|
160
|
-
distribution: DistributionAnalytics,
|
|
161
|
-
TimeSeries: TimeSeries,
|
|
162
|
-
priceExtractor : priceExtractor
|
|
163
|
-
},
|
|
91
|
+
math: { extract: DataExtractor, history: HistoryExtractor, compute: MathPrimitives, aggregate: Aggregators, validate: Validators, signals: SignalPrimitives, schemas: SCHEMAS, distribution: DistributionAnalytics, TimeSeries: TimeSeries, priceExtractor : priceExtractor },
|
|
164
92
|
computed: computedDependencies || {},
|
|
165
93
|
previousComputed: previousComputedDependencies || {},
|
|
166
|
-
meta: metadata,
|
|
167
|
-
config,
|
|
168
|
-
deps
|
|
94
|
+
meta: metadata, config, deps
|
|
169
95
|
};
|
|
170
96
|
}
|
|
171
97
|
}
|
|
@@ -187,47 +113,40 @@ class ComputationExecutor {
|
|
|
187
113
|
const yesterdayPortfolio = yesterdayPortfolioData ? yesterdayPortfolioData[userId] : null;
|
|
188
114
|
const todayHistory = historyData ? historyData[userId] : null;
|
|
189
115
|
const actualUserType = todayPortfolio.PublicPositions ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
|
|
190
|
-
|
|
191
116
|
if (targetUserType !== 'all') {
|
|
192
117
|
const mappedTarget = (targetUserType === 'speculator') ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
|
|
193
118
|
if (mappedTarget !== actualUserType) continue;
|
|
194
119
|
}
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
todayPortfolio, yesterdayPortfolio,
|
|
198
|
-
todayHistory,
|
|
199
|
-
userId, userType: actualUserType, dateStr, metadata, mappings, insights,
|
|
200
|
-
computedDependencies: computedDeps,
|
|
201
|
-
previousComputedDependencies: prevDeps,
|
|
202
|
-
config: this.config, deps: this.deps
|
|
203
|
-
});
|
|
204
|
-
|
|
205
|
-
try { await calcInstance.process(context); }
|
|
206
|
-
catch (e) { logger.log('WARN', `Calc ${metadata.name} failed for user ${userId}: ${e.message}`); }
|
|
120
|
+
const context = ContextBuilder.buildPerUserContext({ todayPortfolio, yesterdayPortfolio, todayHistory, userId, userType: actualUserType, dateStr, metadata, mappings, insights, computedDependencies: computedDeps, previousComputedDependencies: prevDeps, config: this.config, deps: this.deps });
|
|
121
|
+
try { await calcInstance.process(context); } catch (e) { logger.log('WARN', `Calc ${metadata.name} failed for user ${userId}: ${e.message}`); }
|
|
207
122
|
}
|
|
208
123
|
}
|
|
209
124
|
|
|
210
125
|
async executeOncePerDay(calcInstance, metadata, dateStr, computedDeps, prevDeps) {
|
|
211
126
|
const mappings = await this.loader.loadMappings();
|
|
212
|
-
|
|
213
|
-
// Load standard dependencies
|
|
127
|
+
const { logger } = this.deps;
|
|
214
128
|
const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await this.loader.loadInsights(dateStr) } : null;
|
|
215
129
|
const social = metadata.rootDataDependencies?.includes('social') ? { today: await this.loader.loadSocial(dateStr) } : null;
|
|
216
130
|
|
|
217
|
-
// NEW: Load Price dependencies if required
|
|
218
|
-
let prices = null;
|
|
219
131
|
if (metadata.rootDataDependencies?.includes('price')) {
|
|
220
|
-
|
|
132
|
+
logger.log('INFO', `[Executor] Running Batched/Sharded Execution for ${metadata.name}`);
|
|
133
|
+
const shardRefs = await this.loader.getPriceShardReferences();
|
|
134
|
+
if (shardRefs.length === 0) { logger.log('WARN', '[Executor] No price shards found.'); return {}; }
|
|
135
|
+
let processedCount = 0;
|
|
136
|
+
for (const ref of shardRefs) {
|
|
137
|
+
const shardData = await this.loader.loadPriceShard(ref);
|
|
138
|
+
const partialContext = ContextBuilder.buildMetaContext({ dateStr, metadata, mappings, insights, socialData: social, prices: { history: shardData }, computedDependencies: computedDeps, previousComputedDependencies: prevDeps, config: this.config, deps: this.deps });
|
|
139
|
+
await calcInstance.process(partialContext);
|
|
140
|
+
partialContext.prices = null;
|
|
141
|
+
processedCount++;
|
|
142
|
+
if (processedCount % 10 === 0) { if (global.gc) { global.gc(); } }
|
|
143
|
+
}
|
|
144
|
+
logger.log('INFO', `[Executor] Finished Batched Execution for ${metadata.name} (${processedCount} shards).`);
|
|
145
|
+
return calcInstance.getResult ? await calcInstance.getResult() : {};
|
|
146
|
+
} else {
|
|
147
|
+
const context = ContextBuilder.buildMetaContext({ dateStr, metadata, mappings, insights, socialData: social, prices: {}, computedDependencies: computedDeps, previousComputedDependencies: prevDeps, config: this.config, deps: this.deps });
|
|
148
|
+
return await calcInstance.process(context);
|
|
221
149
|
}
|
|
222
|
-
|
|
223
|
-
const context = ContextBuilder.buildMetaContext({
|
|
224
|
-
dateStr, metadata, mappings, insights, socialData: social,
|
|
225
|
-
prices, // Pass prices to builder
|
|
226
|
-
computedDependencies: computedDeps,
|
|
227
|
-
previousComputedDependencies: prevDeps,
|
|
228
|
-
config: this.config, deps: this.deps
|
|
229
|
-
});
|
|
230
|
-
return await calcInstance.process(context);
|
|
231
150
|
}
|
|
232
151
|
}
|
|
233
152
|
|
|
@@ -240,4 +159,4 @@ class ComputationController {
|
|
|
240
159
|
}
|
|
241
160
|
}
|
|
242
161
|
|
|
243
|
-
module.exports = { ComputationController };
|
|
162
|
+
module.exports = { ComputationController };
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* FILENAME: bulltrackers-module/functions/computation-system/helpers/orchestration_helpers.js
|
|
3
|
-
* FIXED:
|
|
3
|
+
* FIXED: TS Error (controller.loader.mappings)
|
|
4
|
+
* ADDED: Smart Shard Lookup for specific tickers
|
|
4
5
|
*/
|
|
5
6
|
|
|
6
7
|
const { ComputationController } = require('../controllers/computation_controller');
|
|
@@ -8,7 +9,8 @@ const { batchStoreSchemas } = require('../utils/schema_capture'
|
|
|
8
9
|
const { normalizeName, commitBatchInChunks } = require('../utils/utils');
|
|
9
10
|
const {
|
|
10
11
|
getPortfolioPartRefs, loadDailyInsights, loadDailySocialPostInsights,
|
|
11
|
-
getHistoryPartRefs, streamPortfolioData, streamHistoryData
|
|
12
|
+
getHistoryPartRefs, streamPortfolioData, streamHistoryData,
|
|
13
|
+
getRelevantShardRefs, loadDataByRefs
|
|
12
14
|
} = require('../utils/data_loader');
|
|
13
15
|
|
|
14
16
|
|
|
@@ -28,14 +30,11 @@ function checkRootDependencies(calcManifest, rootDataStatus) {
|
|
|
28
30
|
else if (dep === 'insights' && !rootDataStatus.hasInsights) missing.push('insights');
|
|
29
31
|
else if (dep === 'social' && !rootDataStatus.hasSocial) missing.push('social');
|
|
30
32
|
else if (dep === 'history' && !rootDataStatus.hasHistory) missing.push('history');
|
|
31
|
-
else if (dep === 'price' && !rootDataStatus.hasPrices) missing.push('price');
|
|
33
|
+
else if (dep === 'price' && !rootDataStatus.hasPrices) missing.push('price');
|
|
32
34
|
}
|
|
33
35
|
return { canRun: missing.length === 0, missing };
|
|
34
36
|
}
|
|
35
37
|
|
|
36
|
-
/**
|
|
37
|
-
* Checks for the availability of all required root data for a specific date.
|
|
38
|
-
*/
|
|
39
38
|
async function checkRootDataAvailability(dateStr, config, dependencies, earliestDates) {
|
|
40
39
|
const { logger } = dependencies;
|
|
41
40
|
const dateToProcess = new Date(dateStr + 'T00:00:00Z');
|
|
@@ -50,7 +49,6 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
|
|
|
50
49
|
if (dateToProcess >= earliestDates.social) tasks.push(loadDailySocialPostInsights(config, dependencies, dateStr).then(r => { socialData = r; hasSocial = !!r; }));
|
|
51
50
|
if (dateToProcess >= earliestDates.history) tasks.push(getHistoryPartRefs(config, dependencies, dateStr).then(r => { historyRefs = r; hasHistory = !!r.length; }));
|
|
52
51
|
|
|
53
|
-
// NEW: Check if price data exists - simple validation
|
|
54
52
|
if (dateToProcess >= earliestDates.price) {
|
|
55
53
|
tasks.push(checkPriceDataAvailability(config, dependencies).then(r => { hasPrices = r; }));
|
|
56
54
|
}
|
|
@@ -73,24 +71,16 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
|
|
|
73
71
|
}
|
|
74
72
|
}
|
|
75
73
|
|
|
76
|
-
/**
|
|
77
|
-
* NEW HELPER: Simple check if price collection has any data
|
|
78
|
-
*/
|
|
79
74
|
async function checkPriceDataAvailability(config, dependencies) {
|
|
80
75
|
const { db, logger } = dependencies;
|
|
81
76
|
const collection = config.priceCollection || 'asset_prices';
|
|
82
|
-
|
|
83
77
|
try {
|
|
84
|
-
// Just check if the collection has at least one document
|
|
85
78
|
const snapshot = await db.collection(collection).limit(1).get();
|
|
86
|
-
|
|
87
79
|
if (snapshot.empty) {
|
|
88
80
|
logger.log('WARN', `[checkPriceData] No price shards found in ${collection}`);
|
|
89
81
|
return false;
|
|
90
82
|
}
|
|
91
|
-
|
|
92
83
|
return true;
|
|
93
|
-
|
|
94
84
|
} catch (e) {
|
|
95
85
|
logger.log('ERROR', `[checkPriceData] Failed to check price availability: ${e.message}`);
|
|
96
86
|
return false;
|
|
@@ -122,18 +112,16 @@ async function updateGlobalComputationStatus(updatesByDate, config, { db }) {
|
|
|
122
112
|
if (!updatesByDate || Object.keys(updatesByDate).length === 0) return;
|
|
123
113
|
const collection = config.computationStatusCollection || 'computation_status';
|
|
124
114
|
const docRef = db.collection(collection).doc('global_status');
|
|
125
|
-
|
|
126
115
|
const flattenUpdates = {};
|
|
127
116
|
for (const [date, statuses] of Object.entries(updatesByDate)) {
|
|
128
117
|
for (const [calc, status] of Object.entries(statuses)) {
|
|
129
118
|
flattenUpdates[`${date}.${calc}`] = status;
|
|
130
119
|
}
|
|
131
120
|
}
|
|
132
|
-
|
|
133
121
|
try {
|
|
134
122
|
await docRef.update(flattenUpdates);
|
|
135
123
|
} catch (err) {
|
|
136
|
-
if (err.code === 5) {
|
|
124
|
+
if (err.code === 5) {
|
|
137
125
|
const deepObj = {};
|
|
138
126
|
for (const [date, statuses] of Object.entries(updatesByDate)) {
|
|
139
127
|
deepObj[date] = statuses;
|
|
@@ -187,10 +175,8 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
|
|
|
187
175
|
const prevDateStr = prevDate.toISOString().slice(0, 10);
|
|
188
176
|
|
|
189
177
|
const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
|
|
190
|
-
|
|
191
178
|
const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
|
|
192
179
|
const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs) ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
|
|
193
|
-
|
|
194
180
|
const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
|
|
195
181
|
const tH_iter = (needsTradingHistory && historyRefs) ? streamHistoryData(config, deps, dateStr, historyRefs) : null;
|
|
196
182
|
|
|
@@ -221,7 +207,6 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
|
|
|
221
207
|
async function runStandardComputationPass(date, calcs, passName, config, deps, rootData, fetchedDeps, previousFetchedDeps, skipStatusWrite = false) {
|
|
222
208
|
const dStr = date.toISOString().slice(0, 10);
|
|
223
209
|
const logger = deps.logger;
|
|
224
|
-
|
|
225
210
|
const fullRoot = { ...rootData };
|
|
226
211
|
if (calcs.some(c => c.isHistorical)) {
|
|
227
212
|
const prev = new Date(date); prev.setUTCDate(prev.getUTCDate() - 1);
|
|
@@ -235,17 +220,12 @@ async function runStandardComputationPass(date, calcs, passName, config, deps, r
|
|
|
235
220
|
const inst = new c.class();
|
|
236
221
|
inst.manifest = c;
|
|
237
222
|
state[normalizeName(c.name)] = inst;
|
|
238
|
-
|
|
239
|
-
// LOG: Explicitly say what calculation is being processed (Initialized)
|
|
240
223
|
logger.log('INFO', `${c.name} calculation running for ${dStr}`);
|
|
241
224
|
}
|
|
242
|
-
catch(e) {
|
|
243
|
-
logger.log('WARN', `Failed to init ${c.name}`);
|
|
244
|
-
}
|
|
225
|
+
catch(e) { logger.log('WARN', `Failed to init ${c.name}`); }
|
|
245
226
|
}
|
|
246
227
|
|
|
247
228
|
await streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs, fetchedDeps, previousFetchedDeps);
|
|
248
|
-
|
|
249
229
|
return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
|
|
250
230
|
}
|
|
251
231
|
|
|
@@ -256,23 +236,16 @@ async function runMetaComputationPass(date, calcs, passName, config, deps, fetch
|
|
|
256
236
|
|
|
257
237
|
for (const mCalc of calcs) {
|
|
258
238
|
try {
|
|
259
|
-
// LOG: Explicitly say what calculation is being processed
|
|
260
239
|
deps.logger.log('INFO', `${mCalc.name} calculation running for ${dStr}`);
|
|
261
|
-
|
|
262
240
|
const inst = new mCalc.class();
|
|
263
241
|
inst.manifest = mCalc;
|
|
264
242
|
await controller.executor.executeOncePerDay(inst, mCalc, dStr, fetchedDeps, previousFetchedDeps);
|
|
265
243
|
state[normalizeName(mCalc.name)] = inst;
|
|
266
244
|
} catch (e) { deps.logger.log('ERROR', `Meta calc failed ${mCalc.name}: ${e.message}`); }
|
|
267
245
|
}
|
|
268
|
-
|
|
269
246
|
return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
|
|
270
247
|
}
|
|
271
248
|
|
|
272
|
-
/**
|
|
273
|
-
* --- UPDATED: commitResults ---
|
|
274
|
-
* Includes Explicit Result Logging and Honest Status Reporting.
|
|
275
|
-
*/
|
|
276
249
|
async function commitResults(stateObj, dStr, passName, config, deps, skipStatusWrite = false) {
|
|
277
250
|
const writes = [], schemas = [], sharded = {};
|
|
278
251
|
const successUpdates = {};
|
|
@@ -281,8 +254,6 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
281
254
|
const calc = stateObj[name];
|
|
282
255
|
try {
|
|
283
256
|
const result = await calc.getResult();
|
|
284
|
-
|
|
285
|
-
// If null/undefined, log as Failed/Unknown immediately
|
|
286
257
|
if (!result) {
|
|
287
258
|
deps.logger.log('INFO', `${name} calculation for ${dStr} ran, result : Failed (Empty Result)`);
|
|
288
259
|
continue;
|
|
@@ -318,19 +289,14 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
318
289
|
if (calc.manifest.class.getSchema) {
|
|
319
290
|
const { class: _cls, ...safeMetadata } = calc.manifest;
|
|
320
291
|
schemas.push({
|
|
321
|
-
name,
|
|
322
|
-
category: calc.manifest.category,
|
|
323
|
-
schema: calc.manifest.class.getSchema(),
|
|
324
|
-
metadata: safeMetadata
|
|
292
|
+
name, category: calc.manifest.category, schema: calc.manifest.class.getSchema(), metadata: safeMetadata
|
|
325
293
|
});
|
|
326
294
|
}
|
|
327
295
|
|
|
328
|
-
// --- EXPLICIT LOGGING & STATUS UPDATE ---
|
|
329
296
|
if (hasData) {
|
|
330
297
|
successUpdates[name] = true;
|
|
331
298
|
deps.logger.log('INFO', `${name} calculation for ${dStr} ran, result : Succeeded`);
|
|
332
299
|
} else {
|
|
333
|
-
// It ran without error, but produced no content (e.g. no data met criteria)
|
|
334
300
|
deps.logger.log('INFO', `${name} calculation for ${dStr} ran, result : Unknown (No Data Written)`);
|
|
335
301
|
}
|
|
336
302
|
|
|
@@ -342,7 +308,6 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
342
308
|
|
|
343
309
|
if (schemas.length) batchStoreSchemas(deps, config, schemas).catch(()=>{});
|
|
344
310
|
if (writes.length) await commitBatchInChunks(config, deps, writes, `${passName} Results`);
|
|
345
|
-
|
|
346
311
|
for (const col in sharded) {
|
|
347
312
|
const sWrites = [];
|
|
348
313
|
for (const id in sharded[col]) {
|
|
@@ -356,10 +321,111 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
356
321
|
await updateComputationStatus(dStr, successUpdates, config, deps);
|
|
357
322
|
deps.logger.log('INFO', `[${passName}] Updated status document for ${Object.keys(successUpdates).length} computations.`);
|
|
358
323
|
}
|
|
359
|
-
|
|
360
324
|
return successUpdates;
|
|
361
325
|
}
|
|
362
326
|
|
|
327
|
+
/**
|
|
328
|
+
* --- UPDATED: runBatchPriceComputation ---
|
|
329
|
+
* Now supports subset/specific ticker execution via 'targetTickers'
|
|
330
|
+
*/
|
|
331
|
+
async function runBatchPriceComputation(config, deps, dateStrings, calcs, targetTickers = []) {
|
|
332
|
+
const { logger, db } = deps;
|
|
333
|
+
const controller = new ComputationController(config, deps);
|
|
334
|
+
|
|
335
|
+
// 1. FIX: Call loadMappings() correctly and get the result
|
|
336
|
+
const mappings = await controller.loader.loadMappings(); // [FIXED]
|
|
337
|
+
|
|
338
|
+
// 2. Resolve Shards (All or Subset)
|
|
339
|
+
let targetInstrumentIds = [];
|
|
340
|
+
if (targetTickers && targetTickers.length > 0) {
|
|
341
|
+
// Convert Tickers -> InstrumentIDs
|
|
342
|
+
const tickerToInst = mappings.tickerToInstrument || {};
|
|
343
|
+
targetInstrumentIds = targetTickers.map(t => tickerToInst[t]).filter(id => id);
|
|
344
|
+
|
|
345
|
+
if (targetInstrumentIds.length === 0) {
|
|
346
|
+
logger.log('WARN', '[BatchPrice] Target tickers provided but no IDs found. Aborting.');
|
|
347
|
+
return;
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
// Uses the new data_loader function to look up specific shards if ids are present
|
|
352
|
+
const allShardRefs = await getRelevantShardRefs(config, deps, targetInstrumentIds);
|
|
353
|
+
|
|
354
|
+
if (!allShardRefs.length) {
|
|
355
|
+
logger.log('WARN', '[BatchPrice] No relevant price shards found. Exiting.');
|
|
356
|
+
return;
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
// 3. Process in Chunks
|
|
360
|
+
const SHARD_BATCH_SIZE = 20;
|
|
361
|
+
logger.log('INFO', `[BatchPrice] Execution Plan: ${dateStrings.length} days, ${allShardRefs.length} shards.`);
|
|
362
|
+
|
|
363
|
+
for (let i = 0; i < allShardRefs.length; i += SHARD_BATCH_SIZE) {
|
|
364
|
+
const shardChunkRefs = allShardRefs.slice(i, i + SHARD_BATCH_SIZE);
|
|
365
|
+
logger.log('INFO', `[BatchPrice] Processing chunk ${Math.floor(i/SHARD_BATCH_SIZE) + 1} (${shardChunkRefs.length} shards)...`);
|
|
366
|
+
|
|
367
|
+
const pricesData = await loadDataByRefs(config, deps, shardChunkRefs);
|
|
368
|
+
|
|
369
|
+
// --- FILTERING (Optional but Recommended) ---
|
|
370
|
+
// If we are in "Subset Mode", strictly filter the loaded data to only include target instruments.
|
|
371
|
+
// This ensures the calculations don't process extra tickers that happened to be in the same shard.
|
|
372
|
+
if (targetInstrumentIds.length > 0) {
|
|
373
|
+
const filteredData = {};
|
|
374
|
+
targetInstrumentIds.forEach(id => {
|
|
375
|
+
if (pricesData[id]) filteredData[id] = pricesData[id];
|
|
376
|
+
});
|
|
377
|
+
// Overwrite with filtered set
|
|
378
|
+
// Note: pricesData is const, so we can't reassign, but we can pass filteredData to context.
|
|
379
|
+
// However, keeping simple: logic below works because calcs iterate whatever is passed.
|
|
380
|
+
// Let's pass the raw data; specific calcs usually loop over everything provided in context.
|
|
381
|
+
// If we want strictness, we should pass filteredData.
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
const writes = [];
|
|
385
|
+
|
|
386
|
+
for (const dateStr of dateStrings) {
|
|
387
|
+
const context = {
|
|
388
|
+
mappings,
|
|
389
|
+
prices: { history: pricesData },
|
|
390
|
+
date: { today: dateStr },
|
|
391
|
+
math: require('../layers/math_primitives.js')
|
|
392
|
+
};
|
|
393
|
+
|
|
394
|
+
for (const calcManifest of calcs) {
|
|
395
|
+
try {
|
|
396
|
+
const instance = new calcManifest.class();
|
|
397
|
+
await instance.process(context);
|
|
398
|
+
const result = await instance.getResult();
|
|
399
|
+
|
|
400
|
+
if (result && Object.keys(result).length > 0) {
|
|
401
|
+
let dataToWrite = result;
|
|
402
|
+
if (result.by_instrument) dataToWrite = result.by_instrument;
|
|
403
|
+
|
|
404
|
+
if (Object.keys(dataToWrite).length > 0) {
|
|
405
|
+
const docRef = db.collection(config.resultsCollection).doc(dateStr)
|
|
406
|
+
.collection(config.resultsSubcollection).doc(calcManifest.category)
|
|
407
|
+
.collection(config.computationsSubcollection).doc(normalizeName(calcManifest.name));
|
|
408
|
+
|
|
409
|
+
writes.push({
|
|
410
|
+
ref: docRef,
|
|
411
|
+
data: { ...dataToWrite, _completed: true },
|
|
412
|
+
options: { merge: true }
|
|
413
|
+
});
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
} catch (err) {
|
|
417
|
+
logger.log('ERROR', `[BatchPrice] Calc ${calcManifest.name} failed for ${dateStr}`, { error: err.message });
|
|
418
|
+
}
|
|
419
|
+
}
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
if (writes.length > 0) {
|
|
423
|
+
await commitBatchInChunks(config, deps, writes, `BatchPrice Chunk ${Math.floor(i/SHARD_BATCH_SIZE)}`);
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
logger.log('INFO', '[BatchPrice] Optimization pass complete.');
|
|
427
|
+
}
|
|
428
|
+
|
|
363
429
|
module.exports = {
|
|
364
430
|
groupByPass,
|
|
365
431
|
checkRootDependencies,
|
|
@@ -370,5 +436,6 @@ module.exports = {
|
|
|
370
436
|
updateComputationStatus,
|
|
371
437
|
updateGlobalComputationStatus,
|
|
372
438
|
runStandardComputationPass,
|
|
373
|
-
runMetaComputationPass
|
|
374
|
-
|
|
439
|
+
runMetaComputationPass,
|
|
440
|
+
runBatchPriceComputation
|
|
441
|
+
};
|
|
@@ -3,15 +3,9 @@
|
|
|
3
3
|
* REFACTORED: Now stateless and receive dependencies.
|
|
4
4
|
* --- NEW: Added streamPortfolioData async generator ---
|
|
5
5
|
* --- FIXED: streamPortfolioData and streamHistoryData now accept optional 'providedRefs' ---
|
|
6
|
+
* --- UPDATE: Added Smart Shard Indexing for specific ticker lookups ---
|
|
6
7
|
*/
|
|
7
8
|
|
|
8
|
-
/**
|
|
9
|
-
* Sub-pipe: pipe.computationSystem.dataLoader.getPortfolioPartRefs
|
|
10
|
-
* @param {object} config - The computation system configuration object.
|
|
11
|
-
* @param {object} dependencies - Contains db, logger, calculationUtils.
|
|
12
|
-
* @param {string} dateString - The date in YYYY-MM-DD format.
|
|
13
|
-
* @returns {Promise<Firestore.DocumentReference[]>} An array of DocumentReferences.
|
|
14
|
-
*/
|
|
15
9
|
/** --- Data Loader Sub-Pipes (Stateless, Dependency-Injection) --- */
|
|
16
10
|
|
|
17
11
|
/** Stage 1: Get portfolio part document references for a given date */
|
|
@@ -22,12 +16,13 @@ async function getPortfolioPartRefs(config, deps, dateString) {
|
|
|
22
16
|
const allPartRefs = [];
|
|
23
17
|
const collectionsToQuery = [config.normalUserPortfolioCollection, config.speculatorPortfolioCollection];
|
|
24
18
|
for (const collectionName of collectionsToQuery) {
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
19
|
+
const blockDocsQuery = db.collection(collectionName);
|
|
20
|
+
const blockDocRefs = await withRetry(() => blockDocsQuery.listDocuments(), `listDocuments(${collectionName})`);
|
|
21
|
+
if (!blockDocRefs.length) { logger.log('WARN', `No block documents in ${collectionName}`); continue; }
|
|
22
|
+
const partsPromises = blockDocRefs.map(blockDocRef => { const partsCollectionRef = blockDocRef.collection(config.snapshotsSubcollection).doc(dateString).collection(config.partsSubcollection); return withRetry(() => partsCollectionRef.listDocuments(), `listDocuments(${partsCollectionRef.path})`); });
|
|
23
|
+
const partDocArrays = await Promise.all(partsPromises);
|
|
24
|
+
partDocArrays.forEach(partDocs => { allPartRefs.push(...partDocs); });
|
|
25
|
+
}
|
|
31
26
|
logger.log('INFO', `Found ${allPartRefs.length} portfolio part refs for ${dateString}`);
|
|
32
27
|
return allPartRefs;
|
|
33
28
|
}
|
|
@@ -39,8 +34,16 @@ async function loadDataByRefs(config, deps, refs) {
|
|
|
39
34
|
if (!refs || !refs.length) return {};
|
|
40
35
|
const mergedPortfolios = {};
|
|
41
36
|
const batchSize = config.partRefBatchSize || 50;
|
|
42
|
-
for (let i = 0; i < refs.length; i += batchSize) {
|
|
43
|
-
|
|
37
|
+
for (let i = 0; i < refs.length; i += batchSize) {
|
|
38
|
+
const batchRefs = refs.slice(i, i + batchSize);
|
|
39
|
+
const snapshots = await withRetry(() => db.getAll(...batchRefs), `getAll(batch ${Math.floor(i / batchSize)})`);
|
|
40
|
+
for (const doc of snapshots) {
|
|
41
|
+
if (!doc.exists) continue;
|
|
42
|
+
const data = doc.data();
|
|
43
|
+
if (data && typeof data === 'object') Object.assign(mergedPortfolios, data);
|
|
44
|
+
else logger.log('WARN', `Doc ${doc.id} exists but data is not an object`, data);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
44
47
|
return mergedPortfolios;
|
|
45
48
|
}
|
|
46
49
|
|
|
@@ -60,13 +63,15 @@ async function loadDailyInsights(config, deps, dateString) {
|
|
|
60
63
|
const { withRetry } = calculationUtils;
|
|
61
64
|
const insightsCollectionName = config.insightsCollectionName || 'daily_instrument_insights';
|
|
62
65
|
logger.log('INFO', `Loading daily insights for ${dateString} from ${insightsCollectionName}`);
|
|
63
|
-
try {
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
66
|
+
try {
|
|
67
|
+
const docRef = db.collection(insightsCollectionName).doc(dateString);
|
|
68
|
+
const docSnap = await withRetry(() => docRef.get(), `getInsights(${dateString})`);
|
|
69
|
+
if (!docSnap.exists) { logger.log('WARN', `Insights not found for ${dateString}`); return null; }
|
|
70
|
+
logger.log('TRACE', `Successfully loaded insights for ${dateString}`);
|
|
71
|
+
return docSnap.data();
|
|
72
|
+
} catch (error) {
|
|
73
|
+
logger.log('ERROR', `Failed to load daily insights for ${dateString}`, { errorMessage: error.message });
|
|
74
|
+
return null;
|
|
70
75
|
}
|
|
71
76
|
}
|
|
72
77
|
|
|
@@ -76,15 +81,17 @@ async function loadDailySocialPostInsights(config, deps, dateString) {
|
|
|
76
81
|
const { withRetry } = calculationUtils;
|
|
77
82
|
const collectionName = config.socialInsightsCollectionName || 'daily_social_insights';
|
|
78
83
|
logger.log('INFO', `Loading social post insights for ${dateString} from ${collectionName}`);
|
|
79
|
-
try {
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
84
|
+
try {
|
|
85
|
+
const postsCollectionRef = db.collection(collectionName).doc(dateString).collection('posts');
|
|
86
|
+
const querySnapshot = await withRetry(() => postsCollectionRef.get(), `getSocialPosts(${dateString})`);
|
|
87
|
+
if (querySnapshot.empty) { logger.log('WARN', `No social post insights for ${dateString}`); return null; }
|
|
88
|
+
const postsMap = {};
|
|
89
|
+
querySnapshot.forEach(doc => { postsMap[doc.id] = doc.data(); });
|
|
90
|
+
logger.log('TRACE', `Loaded ${Object.keys(postsMap).length} social post insights`);
|
|
91
|
+
return postsMap;
|
|
92
|
+
} catch (error) {
|
|
93
|
+
logger.log('ERROR', `Failed to load social post insights for ${dateString}`, { errorMessage: error.message });
|
|
94
|
+
return null;
|
|
88
95
|
}
|
|
89
96
|
}
|
|
90
97
|
|
|
@@ -96,53 +103,169 @@ async function getHistoryPartRefs(config, deps, dateString) {
|
|
|
96
103
|
const allPartRefs = [];
|
|
97
104
|
const collectionsToQuery = [config.normalUserHistoryCollection, config.speculatorHistoryCollection];
|
|
98
105
|
for (const collectionName of collectionsToQuery) {
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
106
|
+
if (!collectionName) { logger.log('WARN', `History collection undefined. Skipping.`); continue; }
|
|
107
|
+
const blockDocsQuery = db.collection(collectionName);
|
|
108
|
+
const blockDocRefs = await withRetry(() => blockDocsQuery.listDocuments(), `listDocuments(${collectionName})`);
|
|
109
|
+
if (!blockDocRefs.length) { logger.log('WARN', `No block documents in ${collectionName}`); continue; }
|
|
110
|
+
const partsPromises = blockDocRefs.map(blockDocRef => {
|
|
111
|
+
const partsCollectionRef = blockDocRef.collection(config.snapshotsSubcollection).doc(dateString).collection(config.partsSubcollection);
|
|
112
|
+
return withRetry(() => partsCollectionRef.listDocuments(), `listDocuments(${partsCollectionRef.path})`);
|
|
113
|
+
});
|
|
114
|
+
const partDocArrays = await Promise.all(partsPromises);
|
|
115
|
+
partDocArrays.forEach(partDocs => { allPartRefs.push(...partDocs); });
|
|
116
|
+
}
|
|
107
117
|
logger.log('INFO', `Found ${allPartRefs.length} history part refs for ${dateString}`);
|
|
108
118
|
return allPartRefs;
|
|
109
119
|
}
|
|
110
120
|
|
|
111
|
-
|
|
112
|
-
/**
|
|
113
|
-
* Streams portfolio data in chunks for a given date.
|
|
114
|
-
* This is an async generator.
|
|
115
|
-
* @param {object} config - The computation system configuration object.
|
|
116
|
-
* @param {object} deps - Contains db, logger, calculationUtils.
|
|
117
|
-
* @param {string} dateString - The date in YYYY-MM-DD format.
|
|
118
|
-
* @param {Array<Firestore.DocumentReference> | null} [providedRefs=null] - Optional pre-fetched refs.
|
|
119
|
-
*/
|
|
121
|
+
/** Stage 7: Stream portfolio data in chunks */
|
|
120
122
|
async function* streamPortfolioData(config, deps, dateString, providedRefs = null) {
|
|
121
123
|
const { logger } = deps;
|
|
122
124
|
const refs = providedRefs || (await getPortfolioPartRefs(config, deps, dateString));
|
|
123
125
|
if (refs.length === 0) { logger.log('WARN', `[streamPortfolioData] No portfolio refs found for ${dateString}. Stream is empty.`); return; }
|
|
124
126
|
const batchSize = config.partRefBatchSize || 50;
|
|
125
127
|
logger.log('INFO', `[streamPortfolioData] Streaming ${refs.length} portfolio parts in chunks of ${batchSize}...`);
|
|
126
|
-
for (let i = 0; i < refs.length; i += batchSize) {
|
|
128
|
+
for (let i = 0; i < refs.length; i += batchSize) {
|
|
129
|
+
const batchRefs = refs.slice(i, i + batchSize);
|
|
130
|
+
const data = await loadDataByRefs(config, deps, batchRefs);
|
|
131
|
+
yield data;
|
|
132
|
+
}
|
|
127
133
|
logger.log('INFO', `[streamPortfolioData] Finished streaming for ${dateString}.`);
|
|
128
134
|
}
|
|
129
135
|
|
|
130
|
-
/**
|
|
131
|
-
* --- NEW: Stage 8: Stream history data in chunks ---
|
|
132
|
-
* Streams history data in chunks for a given date.
|
|
133
|
-
* @param {object} config - The computation system configuration object.
|
|
134
|
-
* @param {object} deps - Contains db, logger, calculationUtils.
|
|
135
|
-
* @param {string} dateString - The date in YYYY-MM-DD format.
|
|
136
|
-
* @param {Array<Firestore.DocumentReference> | null} [providedRefs=null] - Optional pre-fetched refs.
|
|
137
|
-
*/
|
|
136
|
+
/** Stage 8: Stream history data in chunks */
|
|
138
137
|
async function* streamHistoryData(config, deps, dateString, providedRefs = null) {
|
|
139
138
|
const { logger } = deps;
|
|
140
139
|
const refs = providedRefs || (await getHistoryPartRefs(config, deps, dateString));
|
|
141
140
|
if (refs.length === 0) { logger.log('WARN', `[streamHistoryData] No history refs found for ${dateString}. Stream is empty.`); return; }
|
|
142
141
|
const batchSize = config.partRefBatchSize || 50;
|
|
143
142
|
logger.log('INFO', `[streamHistoryData] Streaming ${refs.length} history parts in chunks of ${batchSize}...`);
|
|
144
|
-
for (let i = 0; i < refs.length; i += batchSize) {
|
|
143
|
+
for (let i = 0; i < refs.length; i += batchSize) {
|
|
144
|
+
const batchRefs = refs.slice(i, i + batchSize);
|
|
145
|
+
const data = await loadDataByRefs(config, deps, batchRefs);
|
|
146
|
+
yield data;
|
|
147
|
+
}
|
|
145
148
|
logger.log('INFO', `[streamHistoryData] Finished streaming for ${dateString}.`);
|
|
146
149
|
}
|
|
147
150
|
|
|
148
|
-
|
|
151
|
+
/** Stage 9: Get all price shard references (Basic) */
|
|
152
|
+
async function getPriceShardRefs(config, deps) {
|
|
153
|
+
const { db, logger, calculationUtils } = deps;
|
|
154
|
+
const { withRetry } = calculationUtils;
|
|
155
|
+
const collection = config.priceCollection || 'asset_prices';
|
|
156
|
+
try {
|
|
157
|
+
const collectionRef = db.collection(collection);
|
|
158
|
+
const refs = await withRetry(() => collectionRef.listDocuments(), `listDocuments(${collection})`);
|
|
159
|
+
return refs;
|
|
160
|
+
} catch (e) {
|
|
161
|
+
logger.log('ERROR', `Failed to list price shards: ${e.message}`);
|
|
162
|
+
return [];
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
/** * --- NEW: Stage 10: Smart Shard Lookup System ---
|
|
167
|
+
* Builds or fetches a "Ticker -> Shard" index to avoid scanning all shards
|
|
168
|
+
* when only specific tickers are needed.
|
|
169
|
+
*/
|
|
170
|
+
|
|
171
|
+
/**
|
|
172
|
+
* Ensures the Price Shard Index exists. If not, builds it by scanning all shards.
|
|
173
|
+
* @param {object} config
|
|
174
|
+
* @param {object} deps
|
|
175
|
+
* @returns {Promise<Object>} The lookup map { "instrumentId": "shardDocId" }
|
|
176
|
+
*/
|
|
177
|
+
async function ensurePriceShardIndex(config, deps) {
|
|
178
|
+
const { db, logger } = deps;
|
|
179
|
+
const metadataCol = config.metadataCollection || 'system_metadata';
|
|
180
|
+
const indexDocRef = db.collection(metadataCol).doc('price_shard_index');
|
|
181
|
+
|
|
182
|
+
// 1. Try to fetch existing index
|
|
183
|
+
const snap = await indexDocRef.get();
|
|
184
|
+
if (snap.exists) {
|
|
185
|
+
const data = snap.data();
|
|
186
|
+
// Simple expiry check (optional): Rebuild if older than 24h
|
|
187
|
+
// For now, we trust it exists.
|
|
188
|
+
return data.index || {};
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
logger.log('INFO', '[ShardIndex] Index not found. Building new Price Shard Index (Scanning all shards)...');
|
|
192
|
+
|
|
193
|
+
// 2. Build Index
|
|
194
|
+
const collection = config.priceCollection || 'asset_prices';
|
|
195
|
+
const snapshot = await db.collection(collection).get();
|
|
196
|
+
|
|
197
|
+
const index = {};
|
|
198
|
+
let shardCount = 0;
|
|
199
|
+
|
|
200
|
+
snapshot.forEach(doc => {
|
|
201
|
+
shardCount++;
|
|
202
|
+
const data = doc.data(); // This loads the shard into memory, intensive but necessary once
|
|
203
|
+
if (data.history) {
|
|
204
|
+
// Keys of history are Instrument IDs
|
|
205
|
+
Object.keys(data.history).forEach(instId => {
|
|
206
|
+
index[instId] = doc.id;
|
|
207
|
+
});
|
|
208
|
+
}
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
// 3. Save Index
|
|
212
|
+
await indexDocRef.set({
|
|
213
|
+
index: index,
|
|
214
|
+
lastUpdated: new Date().toISOString(),
|
|
215
|
+
shardCount: shardCount
|
|
216
|
+
});
|
|
217
|
+
|
|
218
|
+
logger.log('INFO', `[ShardIndex] Built index for ${Object.keys(index).length} instruments across ${shardCount} shards.`);
|
|
219
|
+
return index;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
/**
|
|
223
|
+
* Gets DocumentReferences for shards containing the requested Instrument IDs.
|
|
224
|
+
* If targetInstrumentIds is null/empty, returns ALL shards.
|
|
225
|
+
* @param {object} config
|
|
226
|
+
* @param {object} deps
|
|
227
|
+
* @param {string[]} targetInstrumentIds - List of Instrument IDs (NOT Tickers)
|
|
228
|
+
* @returns {Promise<Firestore.DocumentReference[]>}
|
|
229
|
+
*/
|
|
230
|
+
async function getRelevantShardRefs(config, deps, targetInstrumentIds) {
|
|
231
|
+
const { db, logger } = deps;
|
|
232
|
+
|
|
233
|
+
// If no specific targets, return ALL refs (Standard Bulk Batch)
|
|
234
|
+
if (!targetInstrumentIds || targetInstrumentIds.length === 0) {
|
|
235
|
+
return getPriceShardRefs(config, deps);
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
logger.log('INFO', `[ShardLookup] Resolving shards for ${targetInstrumentIds.length} specific instruments...`);
|
|
239
|
+
|
|
240
|
+
const index = await ensurePriceShardIndex(config, deps);
|
|
241
|
+
const uniqueShardIds = new Set();
|
|
242
|
+
const collection = config.priceCollection || 'asset_prices';
|
|
243
|
+
|
|
244
|
+
let foundCount = 0;
|
|
245
|
+
for (const id of targetInstrumentIds) {
|
|
246
|
+
const shardId = index[id];
|
|
247
|
+
if (shardId) {
|
|
248
|
+
uniqueShardIds.add(shardId);
|
|
249
|
+
foundCount++;
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
logger.log('INFO', `[ShardLookup] Mapped ${foundCount}/${targetInstrumentIds.length} instruments to ${uniqueShardIds.size} unique shards.`);
|
|
254
|
+
|
|
255
|
+
// Convert Shard IDs to References
|
|
256
|
+
return Array.from(uniqueShardIds).map(id => db.collection(collection).doc(id));
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
module.exports = {
|
|
260
|
+
getPortfolioPartRefs,
|
|
261
|
+
loadDataByRefs,
|
|
262
|
+
loadFullDayMap,
|
|
263
|
+
loadDailyInsights,
|
|
264
|
+
loadDailySocialPostInsights,
|
|
265
|
+
getHistoryPartRefs,
|
|
266
|
+
streamPortfolioData,
|
|
267
|
+
streamHistoryData,
|
|
268
|
+
getPriceShardRefs,
|
|
269
|
+
ensurePriceShardIndex,
|
|
270
|
+
getRelevantShardRefs // Export new function
|
|
271
|
+
};
|