bulltrackers-module 1.0.732 → 1.0.733
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/orchestrator/index.js +19 -17
- package/index.js +8 -29
- package/package.json +1 -1
- package/functions/computation-system/WorkflowOrchestrator.js +0 -213
- package/functions/computation-system/config/monitoring_config.js +0 -31
- package/functions/computation-system/config/validation_overrides.js +0 -10
- package/functions/computation-system/context/ContextFactory.js +0 -143
- package/functions/computation-system/context/ManifestBuilder.js +0 -379
- package/functions/computation-system/data/AvailabilityChecker.js +0 -236
- package/functions/computation-system/data/CachedDataLoader.js +0 -325
- package/functions/computation-system/data/DependencyFetcher.js +0 -455
- package/functions/computation-system/executors/MetaExecutor.js +0 -279
- package/functions/computation-system/executors/PriceBatchExecutor.js +0 -108
- package/functions/computation-system/executors/StandardExecutor.js +0 -465
- package/functions/computation-system/helpers/computation_dispatcher.js +0 -750
- package/functions/computation-system/helpers/computation_worker.js +0 -375
- package/functions/computation-system/helpers/monitor.js +0 -64
- package/functions/computation-system/helpers/on_demand_helpers.js +0 -154
- package/functions/computation-system/layers/extractors.js +0 -1097
- package/functions/computation-system/layers/index.js +0 -40
- package/functions/computation-system/layers/mathematics.js +0 -522
- package/functions/computation-system/layers/profiling.js +0 -537
- package/functions/computation-system/layers/validators.js +0 -170
- package/functions/computation-system/legacy/AvailabilityCheckerOld.js +0 -388
- package/functions/computation-system/legacy/CachedDataLoaderOld.js +0 -357
- package/functions/computation-system/legacy/DependencyFetcherOld.js +0 -478
- package/functions/computation-system/legacy/MetaExecutorold.js +0 -364
- package/functions/computation-system/legacy/StandardExecutorold.js +0 -476
- package/functions/computation-system/legacy/computation_dispatcherold.js +0 -944
- package/functions/computation-system/logger/logger.js +0 -297
- package/functions/computation-system/persistence/ContractValidator.js +0 -81
- package/functions/computation-system/persistence/FirestoreUtils.js +0 -56
- package/functions/computation-system/persistence/ResultCommitter.js +0 -283
- package/functions/computation-system/persistence/ResultsValidator.js +0 -130
- package/functions/computation-system/persistence/RunRecorder.js +0 -142
- package/functions/computation-system/persistence/StatusRepository.js +0 -52
- package/functions/computation-system/reporter_epoch.js +0 -6
- package/functions/computation-system/scripts/UpdateContracts.js +0 -128
- package/functions/computation-system/services/SnapshotService.js +0 -148
- package/functions/computation-system/simulation/Fabricator.js +0 -285
- package/functions/computation-system/simulation/SeededRandom.js +0 -41
- package/functions/computation-system/simulation/SimRunner.js +0 -51
- package/functions/computation-system/system_epoch.js +0 -2
- package/functions/computation-system/tools/BuildReporter.js +0 -531
- package/functions/computation-system/tools/ContractDiscoverer.js +0 -144
- package/functions/computation-system/tools/DeploymentValidator.js +0 -536
- package/functions/computation-system/tools/FinalSweepReporter.js +0 -322
- package/functions/computation-system/topology/HashManager.js +0 -55
- package/functions/computation-system/topology/ManifestLoader.js +0 -47
- package/functions/computation-system/utils/data_loader.js +0 -675
- package/functions/computation-system/utils/schema_capture.js +0 -121
- package/functions/computation-system/utils/utils.js +0 -188
|
@@ -1,279 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Executor for "Meta" (global) calculations.
|
|
3
|
-
* REFACTORED: Applied DRY principles to Root Data and Series loading.
|
|
4
|
-
*/
|
|
5
|
-
const { normalizeName } = require('../utils/utils');
|
|
6
|
-
const { CachedDataLoader } = require('../data/CachedDataLoader');
|
|
7
|
-
const { ContextFactory } = require('../context/ContextFactory');
|
|
8
|
-
const { commitResults } = require('../persistence/ResultCommitter');
|
|
9
|
-
const { fetchResultSeries } = require('../data/DependencyFetcher');
|
|
10
|
-
const { getManifest } = require('../topology/ManifestLoader');
|
|
11
|
-
|
|
12
|
-
// Helper to get calculations - prefer config.calculations, fallback to direct require
|
|
13
|
-
function getCalculations(config) {
|
|
14
|
-
if (config && config.calculations) return config.calculations;
|
|
15
|
-
try { return require('aiden-shared-calculations-unified').calculations; }
|
|
16
|
-
catch (e) { return {}; }
|
|
17
|
-
}
|
|
18
|
-
|
|
19
|
-
class MetaExecutor {
|
|
20
|
-
|
|
21
|
-
// =========================================================================
|
|
22
|
-
// PRIMARY ENTRY POINT (Batch Execution)
|
|
23
|
-
// =========================================================================
|
|
24
|
-
static async run(date, calcs, passName, config, deps, rootData, fetchedDeps, previousFetchedDeps) {
|
|
25
|
-
const { logger } = deps;
|
|
26
|
-
const dStr = date.toISOString().slice(0, 10);
|
|
27
|
-
const loader = new CachedDataLoader(config, deps);
|
|
28
|
-
|
|
29
|
-
// 1. Setup Manifest Lookup (use activeProductLines and calculations from config)
|
|
30
|
-
const allManifests = getManifest(config.activeProductLines || [], getCalculations(config), deps);
|
|
31
|
-
const manifestLookup = Object.fromEntries(allManifests.map(m => [normalizeName(m.name), m.category]));
|
|
32
|
-
|
|
33
|
-
// 2. Load Base Data (Always Required)
|
|
34
|
-
const [mappings, rankings, verifications, piMasterList] = await Promise.all([
|
|
35
|
-
loader.loadMappings(),
|
|
36
|
-
loader.loadRankings(dStr),
|
|
37
|
-
loader.loadVerifications(),
|
|
38
|
-
loader.loadPIMasterList() // <--- ADDED LOAD
|
|
39
|
-
]);
|
|
40
|
-
|
|
41
|
-
// 3. Load Historical Rankings (if needed)
|
|
42
|
-
let rankingsYesterday = null;
|
|
43
|
-
if (calcs.some(c => c.isHistorical)) {
|
|
44
|
-
const prevStr = new Date(date.getTime() - 86400000).toISOString().slice(0, 10);
|
|
45
|
-
rankingsYesterday = await loader.loadRankings(prevStr);
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
// 4. Load Variable Root Data & Series (Refactored Helpers)
|
|
49
|
-
const variableRoots = await loadVariableRootData(loader, dStr, calcs, logger);
|
|
50
|
-
const seriesData = await loadSeriesData(loader, dStr, calcs, manifestLookup, config, deps);
|
|
51
|
-
|
|
52
|
-
// 5. Execution Loop
|
|
53
|
-
const state = {};
|
|
54
|
-
for (const c of calcs) {
|
|
55
|
-
const inst = new c.class();
|
|
56
|
-
inst.manifest = c;
|
|
57
|
-
|
|
58
|
-
const context = ContextFactory.buildMetaContext({
|
|
59
|
-
dateStr: dStr,
|
|
60
|
-
metadata: c,
|
|
61
|
-
mappings,
|
|
62
|
-
insights: { today: rootData.todayInsights },
|
|
63
|
-
socialData: { today: rootData.todaySocialPostInsights },
|
|
64
|
-
computedDependencies: fetchedDeps,
|
|
65
|
-
previousComputedDependencies: previousFetchedDeps,
|
|
66
|
-
config, deps,
|
|
67
|
-
allRankings: rankings,
|
|
68
|
-
allRankingsYesterday: rankingsYesterday,
|
|
69
|
-
allVerifications: verifications,
|
|
70
|
-
piMasterList, // <--- INJECTED HERE
|
|
71
|
-
// Spread variable roots directly (ratings, pageViews, etc.)
|
|
72
|
-
...variableRoots,
|
|
73
|
-
seriesData
|
|
74
|
-
});
|
|
75
|
-
|
|
76
|
-
try {
|
|
77
|
-
const result = await inst.process(context);
|
|
78
|
-
|
|
79
|
-
// Fallback if the computation didn't set this.results internally
|
|
80
|
-
if (!inst.results) inst.results = result;
|
|
81
|
-
|
|
82
|
-
// Debug logging condensed
|
|
83
|
-
if (inst.results && Object.keys(inst.results).length === 0) {
|
|
84
|
-
logger.log('WARN', `[MetaExecutor] ⚠️ ${c.name} produced EMPTY results.`);
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
state[c.name] = inst;
|
|
88
|
-
} catch (e) {
|
|
89
|
-
logger.log('ERROR', `[MetaExecutor] ❌ ${c.name} failed: ${e.message}`);
|
|
90
|
-
}
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
// Force 'isInitialWrite: true' for robust cleanup of old keys
|
|
94
|
-
return await commitResults(state, dStr, passName, config, deps, false, { isInitialWrite: true });
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
// =========================================================================
|
|
98
|
-
// SINGLE EXECUTION (Sharded/On-Demand)
|
|
99
|
-
// =========================================================================
|
|
100
|
-
static async executeOncePerDay(calcInstance, metadata, dateStr, computedDeps, prevDeps, config, deps, loader) {
|
|
101
|
-
const { logger } = deps;
|
|
102
|
-
const calcs = [metadata]; // Treat single as list for helpers
|
|
103
|
-
|
|
104
|
-
// Build manifestLookup using calculations from config (set in index.js)
|
|
105
|
-
const allManifests = getManifest(config.activeProductLines || [], getCalculations(config), deps);
|
|
106
|
-
const manifestLookup = Object.fromEntries(allManifests.map(m => [normalizeName(m.name), m.category]));
|
|
107
|
-
|
|
108
|
-
// 1. Load Data using Shared Helpers
|
|
109
|
-
const [mappings, rankings, variableRoots, seriesData, piMasterList] = await Promise.all([
|
|
110
|
-
loader.loadMappings(),
|
|
111
|
-
loader.loadRankings(dateStr),
|
|
112
|
-
loadVariableRootData(loader, dateStr, calcs, logger),
|
|
113
|
-
loadSeriesData(loader, dateStr, calcs, manifestLookup, config, deps),
|
|
114
|
-
loader.loadPIMasterList()
|
|
115
|
-
]);
|
|
116
|
-
|
|
117
|
-
let rankingsYesterday = null;
|
|
118
|
-
if (metadata.isHistorical) {
|
|
119
|
-
const prevStr = new Date(new Date(dateStr).getTime() - 86400000).toISOString().slice(0, 10);
|
|
120
|
-
rankingsYesterday = await loader.loadRankings(prevStr);
|
|
121
|
-
}
|
|
122
|
-
|
|
123
|
-
const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await loader.loadInsights(dateStr) } : null;
|
|
124
|
-
const social = metadata.rootDataDependencies?.includes('social') ? { today: await loader.loadSocial(dateStr) } : null;
|
|
125
|
-
|
|
126
|
-
// 2. Build Context Base
|
|
127
|
-
const contextBase = {
|
|
128
|
-
dateStr, metadata, mappings, insights, socialData: social,
|
|
129
|
-
computedDependencies: computedDeps,
|
|
130
|
-
previousComputedDependencies: prevDeps, config, deps,
|
|
131
|
-
allRankings: rankings,
|
|
132
|
-
allRankingsYesterday: rankingsYesterday,
|
|
133
|
-
piMasterList,
|
|
134
|
-
...variableRoots,
|
|
135
|
-
seriesData
|
|
136
|
-
};
|
|
137
|
-
|
|
138
|
-
// 3. Sharded Execution (Price) or Standard
|
|
139
|
-
if (metadata.rootDataDependencies?.includes('price')) {
|
|
140
|
-
logger.log('INFO', `[Executor] Running Batched/Sharded Execution for ${metadata.name}`);
|
|
141
|
-
const shardRefs = await loader.getPriceShardReferences();
|
|
142
|
-
if (!shardRefs.length) {
|
|
143
|
-
logger.log('WARN', '[Executor] No price shards found.');
|
|
144
|
-
return {};
|
|
145
|
-
}
|
|
146
|
-
|
|
147
|
-
const stats = { processedShards: 0, processedItems: 0 };
|
|
148
|
-
for (const ref of shardRefs) {
|
|
149
|
-
const shardData = await loader.loadPriceShard(ref);
|
|
150
|
-
await calcInstance.process(ContextFactory.buildMetaContext({
|
|
151
|
-
...contextBase,
|
|
152
|
-
prices: { history: shardData }
|
|
153
|
-
}));
|
|
154
|
-
|
|
155
|
-
stats.processedShards++;
|
|
156
|
-
stats.processedItems += Object.keys(shardData).length;
|
|
157
|
-
}
|
|
158
|
-
calcInstance._executionStats = stats;
|
|
159
|
-
return calcInstance.getResult ? await calcInstance.getResult() : {};
|
|
160
|
-
}
|
|
161
|
-
else {
|
|
162
|
-
const res = await calcInstance.process(ContextFactory.buildMetaContext({
|
|
163
|
-
...contextBase,
|
|
164
|
-
prices: {}
|
|
165
|
-
}));
|
|
166
|
-
calcInstance._executionStats = { processedItems: 1 };
|
|
167
|
-
return res;
|
|
168
|
-
}
|
|
169
|
-
}
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
// =============================================================================
|
|
173
|
-
// INTERNAL HELPERS
|
|
174
|
-
// =============================================================================
|
|
175
|
-
|
|
176
|
-
/**
|
|
177
|
-
* Loads variable root data types (Ratings, PageViews, Watchlist, Alerts)
|
|
178
|
-
* based on calculation requirements. Handles strict vs. optional failures.
|
|
179
|
-
*/
|
|
180
|
-
async function loadVariableRootData(loader, dateStr, calcs, logger) {
|
|
181
|
-
const requirements = {};
|
|
182
|
-
const results = { ratings: null, pageViews: null, watchlistMembership: null, alertHistory: null };
|
|
183
|
-
|
|
184
|
-
// Map internal key to Loader Method
|
|
185
|
-
const loaderMap = {
|
|
186
|
-
ratings: { method: 'loadRatings', resultKey: 'ratings' },
|
|
187
|
-
pageViews: { method: 'loadPageViews', resultKey: 'pageViews' },
|
|
188
|
-
watchlist: { method: 'loadWatchlistMembership', resultKey: 'watchlistMembership' },
|
|
189
|
-
alerts: { method: 'loadAlertHistory', resultKey: 'alertHistory' }
|
|
190
|
-
};
|
|
191
|
-
|
|
192
|
-
// 1. Analyze Requirements
|
|
193
|
-
for (const c of calcs) {
|
|
194
|
-
const deps = c.rootDataDependencies || [];
|
|
195
|
-
const strict = c.canHaveMissingRoots !== true;
|
|
196
|
-
deps.forEach(d => {
|
|
197
|
-
if (loaderMap[d]) {
|
|
198
|
-
if (!requirements[d]) requirements[d] = { strict: false };
|
|
199
|
-
if (strict) requirements[d].strict = true;
|
|
200
|
-
}
|
|
201
|
-
});
|
|
202
|
-
}
|
|
203
|
-
|
|
204
|
-
// 2. Fetch Data
|
|
205
|
-
const promises = Object.entries(requirements).map(async ([key, req]) => {
|
|
206
|
-
const { method, resultKey } = loaderMap[key];
|
|
207
|
-
try {
|
|
208
|
-
results[resultKey] = await loader[method](dateStr);
|
|
209
|
-
} catch (e) {
|
|
210
|
-
if (req.strict) throw new Error(`[MetaExecutor] Missing required root '${key}': ${e.message}`);
|
|
211
|
-
logger.log('WARN', `[MetaExecutor] Missing optional root '${key}'.`);
|
|
212
|
-
results[resultKey] = null;
|
|
213
|
-
}
|
|
214
|
-
});
|
|
215
|
-
|
|
216
|
-
await Promise.all(promises);
|
|
217
|
-
return results;
|
|
218
|
-
}
|
|
219
|
-
|
|
220
|
-
/**
|
|
221
|
-
* Loads time-series data for both Root inputs and Computation results.
|
|
222
|
-
*/
|
|
223
|
-
async function loadSeriesData(loader, dateStr, calcs, manifestLookup, config, deps) {
|
|
224
|
-
const rootRequests = {};
|
|
225
|
-
const depRequests = {}; // norm -> { days, originalName }
|
|
226
|
-
|
|
227
|
-
// 1. Aggregate Lookback Depths from dependencySeries config
|
|
228
|
-
for (const c of calcs) {
|
|
229
|
-
if (c.rootDataSeries) {
|
|
230
|
-
Object.entries(c.rootDataSeries).forEach(([type, val]) => {
|
|
231
|
-
const days = typeof val === 'object' ? val.lookback : val;
|
|
232
|
-
rootRequests[type] = Math.max(rootRequests[type] || 0, days);
|
|
233
|
-
});
|
|
234
|
-
}
|
|
235
|
-
if (c.dependencySeries) {
|
|
236
|
-
Object.entries(c.dependencySeries).forEach(([name, val]) => {
|
|
237
|
-
const days = typeof val === 'object' ? val.lookback : val;
|
|
238
|
-
const norm = normalizeName(name);
|
|
239
|
-
if (!depRequests[norm] || depRequests[norm].days < days) {
|
|
240
|
-
depRequests[norm] = { days, originalName: name };
|
|
241
|
-
}
|
|
242
|
-
});
|
|
243
|
-
}
|
|
244
|
-
}
|
|
245
|
-
|
|
246
|
-
const seriesData = { root: {}, results: {} };
|
|
247
|
-
|
|
248
|
-
// 2. Fetch Root Series
|
|
249
|
-
const rootLoaders = {
|
|
250
|
-
alerts: 'loadAlertHistory',
|
|
251
|
-
insights: 'loadInsights',
|
|
252
|
-
ratings: 'loadRatings',
|
|
253
|
-
watchlist: 'loadWatchlistMembership',
|
|
254
|
-
rankings: 'loadRankings'
|
|
255
|
-
};
|
|
256
|
-
|
|
257
|
-
const rootPromises = Object.entries(rootRequests).map(async ([type, days]) => {
|
|
258
|
-
if (rootLoaders[type]) {
|
|
259
|
-
deps.logger.log('INFO', `[MetaExecutor] Loading ${days}-day series for Root '${type}'`);
|
|
260
|
-
const res = await loader.loadSeries(rootLoaders[type], dateStr, days);
|
|
261
|
-
seriesData.root[type] = res.data;
|
|
262
|
-
}
|
|
263
|
-
});
|
|
264
|
-
|
|
265
|
-
// 3. Fetch Dependency Series (category comes from manifestLookup)
|
|
266
|
-
const depEntries = Object.values(depRequests);
|
|
267
|
-
if (depEntries.length > 0) {
|
|
268
|
-
const depOriginalNames = depEntries.map(e => e.originalName);
|
|
269
|
-
const maxDays = Math.max(...depEntries.map(e => e.days));
|
|
270
|
-
|
|
271
|
-
deps.logger.log('INFO', `[MetaExecutor] Loading up to ${maxDays}-day series for Dependencies: ${depOriginalNames.join(', ')}`);
|
|
272
|
-
seriesData.results = await fetchResultSeries(dateStr, depOriginalNames, manifestLookup, config, deps, maxDays);
|
|
273
|
-
}
|
|
274
|
-
|
|
275
|
-
await Promise.all(rootPromises);
|
|
276
|
-
return seriesData;
|
|
277
|
-
}
|
|
278
|
-
|
|
279
|
-
module.exports = { MetaExecutor };
|
|
@@ -1,108 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Specialized Executor for Price-Dependent Batch computations.
|
|
3
|
-
*/
|
|
4
|
-
const pLimit = require('p-limit');
|
|
5
|
-
const { normalizeName } = require('../utils/utils');
|
|
6
|
-
const { getRelevantShardRefs, loadDataByRefs } = require('../utils/data_loader');
|
|
7
|
-
const { CachedDataLoader } = require('../data/CachedDataLoader');
|
|
8
|
-
const mathLayer = require('../layers/index');
|
|
9
|
-
const { LEGACY_MAPPING } = require('../topology/HashManager');
|
|
10
|
-
|
|
11
|
-
async function runBatchPriceComputation(config, deps, dateStrings, calcs, targetTickers = []) {
|
|
12
|
-
const { logger, db, calculationUtils } = deps;
|
|
13
|
-
const cachedLoader = new CachedDataLoader(config, deps);
|
|
14
|
-
const mappings = await cachedLoader.loadMappings();
|
|
15
|
-
|
|
16
|
-
let targetInstrumentIds = [];
|
|
17
|
-
if (targetTickers && targetTickers.length > 0) {
|
|
18
|
-
const tickerToInst = mappings.tickerToInstrument || {};
|
|
19
|
-
targetInstrumentIds = targetTickers.map(t => tickerToInst[t]).filter(id => id);
|
|
20
|
-
if (targetInstrumentIds.length === 0) { logger.log('WARN', '[BatchPrice] Target tickers provided but no IDs found. Aborting.'); return; }
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
const allShardRefs = await getRelevantShardRefs(config, deps, targetInstrumentIds);
|
|
24
|
-
if (!allShardRefs.length) { logger.log('WARN', '[BatchPrice] No relevant price shards found. Exiting.'); return; }
|
|
25
|
-
|
|
26
|
-
const OUTER_CONCURRENCY_LIMIT = 2, SHARD_BATCH_SIZE = 20, WRITE_BATCH_LIMIT = 50;
|
|
27
|
-
logger.log('INFO', `[BatchPrice] Execution Plan: ${dateStrings.length} days, ${allShardRefs.length} shards. Concurrency: ${OUTER_CONCURRENCY_LIMIT}.`);
|
|
28
|
-
|
|
29
|
-
const shardChunks = [];
|
|
30
|
-
for (let i = 0; i < allShardRefs.length; i += SHARD_BATCH_SIZE) { shardChunks.push(allShardRefs.slice(i, i + SHARD_BATCH_SIZE)); }
|
|
31
|
-
|
|
32
|
-
const outerLimit = pLimit(OUTER_CONCURRENCY_LIMIT);
|
|
33
|
-
const chunkPromises = [];
|
|
34
|
-
|
|
35
|
-
for (let index = 0; index < shardChunks.length; index++) {
|
|
36
|
-
const shardChunkRefs = shardChunks[index];
|
|
37
|
-
chunkPromises.push(outerLimit(async () => {
|
|
38
|
-
try {
|
|
39
|
-
logger.log('INFO', `[BatchPrice] Processing chunk ${index + 1}/${shardChunks.length} (${shardChunkRefs.length} shards)...`);
|
|
40
|
-
const pricesData = await loadDataByRefs(config, deps, shardChunkRefs);
|
|
41
|
-
if (targetInstrumentIds.length > 0) {
|
|
42
|
-
const requestedSet = new Set(targetInstrumentIds);
|
|
43
|
-
for (const loadedInstrumentId in pricesData) {
|
|
44
|
-
if (!requestedSet.has(loadedInstrumentId)) { delete pricesData[loadedInstrumentId]; }
|
|
45
|
-
}
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
const writes = [];
|
|
49
|
-
for (const dateStr of dateStrings) {
|
|
50
|
-
const dynamicMathContext = {};
|
|
51
|
-
for (const [key, value] of Object.entries(mathLayer)) {
|
|
52
|
-
dynamicMathContext[key] = value;
|
|
53
|
-
if (LEGACY_MAPPING[key]) { dynamicMathContext[LEGACY_MAPPING[key]] = value;}
|
|
54
|
-
}
|
|
55
|
-
const context = { mappings, prices: { history: pricesData }, date: { today: dateStr }, math: dynamicMathContext };
|
|
56
|
-
|
|
57
|
-
for (const calcManifest of calcs) {
|
|
58
|
-
try {
|
|
59
|
-
const instance = new calcManifest.class();
|
|
60
|
-
await instance.process(context);
|
|
61
|
-
const result = await instance.getResult();
|
|
62
|
-
if (result && Object.keys(result).length > 0) {
|
|
63
|
-
let dataToWrite = result;
|
|
64
|
-
if (result.by_instrument) dataToWrite = result.by_instrument;
|
|
65
|
-
if (Object.keys(dataToWrite).length > 0) {
|
|
66
|
-
const docRef = db.collection(config.resultsCollection)
|
|
67
|
-
.doc(dateStr)
|
|
68
|
-
.collection(config.resultsSubcollection)
|
|
69
|
-
.doc(calcManifest.category)
|
|
70
|
-
.collection(config.computationsSubcollection)
|
|
71
|
-
.doc(normalizeName(calcManifest.name));
|
|
72
|
-
|
|
73
|
-
writes.push({
|
|
74
|
-
ref: docRef,
|
|
75
|
-
data: {
|
|
76
|
-
...dataToWrite,
|
|
77
|
-
_completed: true,
|
|
78
|
-
_lastUpdated: new Date().toISOString()
|
|
79
|
-
},
|
|
80
|
-
options: { merge: true }
|
|
81
|
-
});
|
|
82
|
-
}
|
|
83
|
-
}
|
|
84
|
-
} catch (err) { logger.log('ERROR', `[BatchPrice] \u2716 Failed ${calcManifest.name} for ${dateStr}: ${err.message}`); }
|
|
85
|
-
}
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
if (writes.length > 0) {
|
|
89
|
-
const commitBatches = [];
|
|
90
|
-
for (let i = 0; i < writes.length; i += WRITE_BATCH_LIMIT) { commitBatches.push(writes.slice(i, i + WRITE_BATCH_LIMIT)); }
|
|
91
|
-
const commitLimit = pLimit(10);
|
|
92
|
-
await Promise.all(commitBatches.map((batchWrites, bIndex) => commitLimit(async () => {
|
|
93
|
-
const batch = db.batch(); batchWrites.forEach(w => batch.set(w.ref, w.data, w.options));
|
|
94
|
-
try {
|
|
95
|
-
await calculationUtils.withRetry(() => batch.commit(), `BatchPrice-C${index}-B${bIndex}`);
|
|
96
|
-
} catch (commitErr) {
|
|
97
|
-
logger.log('ERROR', `[BatchPrice] Commit failed for Chunk ${index} Batch ${bIndex}.`, { error: commitErr.message });
|
|
98
|
-
}
|
|
99
|
-
})));
|
|
100
|
-
}
|
|
101
|
-
} catch (chunkErr) { logger.log('ERROR', `[BatchPrice] Fatal error processing Chunk ${index}.`, { error: chunkErr.message }); }
|
|
102
|
-
}));
|
|
103
|
-
}
|
|
104
|
-
await Promise.all(chunkPromises);
|
|
105
|
-
logger.log('INFO', '[BatchPrice] Optimization pass complete.');
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
module.exports = { runBatchPriceComputation };
|