bulltrackers-module 1.0.172 → 1.0.174
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.MD +3 -179
- package/functions/computation-system/controllers/computation_controller.js +85 -89
- package/functions/computation-system/helpers/computation_manifest_builder.js +15 -20
- package/functions/computation-system/helpers/computation_pass_runner.js +68 -35
- package/functions/computation-system/helpers/orchestration_helpers.js +63 -58
- package/functions/computation-system/layers/math_primitives.js +445 -44
- package/index.js +1 -1
- package/package.json +1 -1
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* FIXED: computation_pass_runner.js
|
|
3
|
-
*
|
|
4
|
-
* --- MODIFIED: Passes 'existingResults' to 'runStandardComputationPass'
|
|
5
|
-
* to replicate the test harness's 7-argument signature. ---
|
|
3
|
+
* V3: Fetches Previous Day's Results to enable State Persistence.
|
|
6
4
|
*/
|
|
7
5
|
|
|
8
6
|
const { groupByPass, checkRootDataAvailability, fetchExistingResults, runStandardComputationPass, runMetaComputationPass } = require('./orchestration_helpers.js');
|
|
@@ -11,63 +9,98 @@ const PARALLEL_BATCH_SIZE = 7;
|
|
|
11
9
|
|
|
12
10
|
async function runComputationPass(config, dependencies, computationManifest) {
|
|
13
11
|
const { logger } = dependencies;
|
|
14
|
-
const passToRun
|
|
15
|
-
if (!passToRun)
|
|
12
|
+
const passToRun = String(config.COMPUTATION_PASS_TO_RUN);
|
|
13
|
+
if (!passToRun)
|
|
14
|
+
return logger.log('ERROR', '[PassRunner] No pass defined. Aborting.');
|
|
15
|
+
|
|
16
16
|
logger.log('INFO', `🚀 Starting PASS ${passToRun}...`);
|
|
17
17
|
|
|
18
|
-
const earliestDates
|
|
18
|
+
const earliestDates = { portfolio: new Date('2025-09-25T00:00:00Z'), history: new Date('2025-11-05T00:00:00Z'), social: new Date('2025-10-30T00:00:00Z'), insights: new Date('2025-08-26T00:00:00Z') };
|
|
19
19
|
earliestDates.absoluteEarliest = Object.values(earliestDates).reduce((a,b) => a < b ? a : b);
|
|
20
|
-
logger.log('INFO', `Hardcoded earliest dates: ${Object.entries(earliestDates).map(([k,v]) => `${k}=${v.toISOString().slice(0,10)}`).join(', ')}`);
|
|
21
20
|
|
|
22
|
-
const passes
|
|
21
|
+
const passes = groupByPass(computationManifest);
|
|
23
22
|
const calcsInThisPass = passes[passToRun] || [];
|
|
24
|
-
|
|
23
|
+
|
|
24
|
+
if (!calcsInThisPass.length)
|
|
25
|
+
return logger.log('WARN', `[PassRunner] No calcs for Pass ${passToRun}. Exiting.`);
|
|
26
|
+
|
|
25
27
|
const calcEarliestDates = new Map();
|
|
26
|
-
|
|
27
|
-
const
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
28
|
+
|
|
29
|
+
for (const calc of calcsInThisPass) {
|
|
30
|
+
const deps = calc.rootDataDependencies || [];
|
|
31
|
+
|
|
32
|
+
if (!deps.length)
|
|
33
|
+
{ calcEarliestDates.set(calc.name, earliestDates.absoluteEarliest); continue; }
|
|
34
|
+
|
|
35
|
+
const latestDep = new Date(Math.max(...deps.map(d => earliestDates[d]?.getTime() || 0)));
|
|
36
|
+
const calcDate = calc.isHistorical
|
|
37
|
+
? new Date(latestDep.getTime() + 86400000)
|
|
38
|
+
: latestDep;
|
|
39
|
+
|
|
40
|
+
calcEarliestDates.set(calc.name, calcDate);
|
|
41
|
+
}
|
|
31
42
|
|
|
32
43
|
const passEarliestDate = new Date(Math.min(...Array.from(calcEarliestDates.values()).map(d => d.getTime())));
|
|
33
|
-
|
|
34
|
-
const endDateUTC = new Date(Date.UTC(new Date().getUTCFullYear(), new Date().getUTCMonth(), new Date().getUTCDate() - 1));
|
|
44
|
+
const endDateUTC = new Date(Date.UTC(new Date().getUTCFullYear(), new Date().getUTCMonth(), new Date().getUTCDate() - 1));
|
|
35
45
|
const allExpectedDates = getExpectedDateStrings(passEarliestDate, endDateUTC);
|
|
36
|
-
const standardCalcs
|
|
37
|
-
const metaCalcs
|
|
38
|
-
|
|
39
|
-
const
|
|
40
|
-
|
|
46
|
+
const standardCalcs = calcsInThisPass.filter(c => c.type === 'standard');
|
|
47
|
+
const metaCalcs = calcsInThisPass.filter(c => c.type === 'meta');
|
|
48
|
+
|
|
49
|
+
const checkDeps = (calc, rootData, existingResults, dateToProcess) => {
|
|
50
|
+
if (existingResults[calc.name])
|
|
51
|
+
return false;
|
|
41
52
|
|
|
42
|
-
|
|
43
|
-
if (missingRoot.length) return false;
|
|
53
|
+
const earliest = calcEarliestDates.get(calc.name);
|
|
44
54
|
|
|
45
|
-
|
|
55
|
+
if (earliest && dateToProcess < earliest)
|
|
56
|
+
return false;
|
|
57
|
+
|
|
58
|
+
const missingRoot = (calc.rootDataDependencies || []).filter(dep => !rootData.status[`has${dep[0].toUpperCase() + dep.slice(1)}`]);
|
|
59
|
+
|
|
60
|
+
if (missingRoot.length)
|
|
61
|
+
return false;
|
|
62
|
+
|
|
63
|
+
if (calc.type === 'meta')
|
|
64
|
+
{ const missingComputed = (calc.dependencies || []).filter(d => !existingResults[d]);
|
|
65
|
+
if (missingComputed.length)
|
|
66
|
+
return false; }
|
|
67
|
+
return true;
|
|
68
|
+
};
|
|
46
69
|
|
|
47
70
|
const processDate = async (dateStr) => {
|
|
48
71
|
const dateToProcess = new Date(dateStr + 'T00:00:00Z');
|
|
49
72
|
try {
|
|
50
73
|
const rootData = await checkRootDataAvailability(dateStr, config, dependencies, earliestDates);
|
|
51
74
|
if (!rootData) return logger.log('WARN', `[PassRunner] Skipping ${dateStr}: No root data.`);
|
|
52
|
-
|
|
75
|
+
|
|
76
|
+
// 1. Fetch TODAY'S results (Check what is already done)
|
|
77
|
+
const existingResults = await fetchExistingResults(dateStr, calcsInThisPass, computationManifest, config, dependencies, false);
|
|
78
|
+
|
|
79
|
+
// 2. Fetch YESTERDAY'S results (For State Persistence) We calculate T-1 date string
|
|
80
|
+
const prevDate = new Date(dateToProcess); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
|
|
81
|
+
const prevDateStr = prevDate.toISOString().slice(0, 10);
|
|
82
|
+
|
|
83
|
+
// We pass 'true' to includeSelf, because we need the calc's OWN history (e.g. mimetic-latency needs previous mimetic-latency)
|
|
84
|
+
const previousResults = await fetchExistingResults(prevDateStr, calcsInThisPass, computationManifest, config, dependencies, true);
|
|
85
|
+
|
|
53
86
|
const standardToRun = standardCalcs.filter(c => checkDeps(c, rootData, existingResults, dateToProcess));
|
|
54
87
|
const metaToRun = metaCalcs.filter(c => checkDeps(c, rootData, existingResults, dateToProcess));
|
|
88
|
+
|
|
55
89
|
if (!standardToRun.length && !metaToRun.length) return logger.log('INFO', `[PassRunner] All calcs complete for ${dateStr}. Skipping.`);
|
|
90
|
+
|
|
56
91
|
logger.log('INFO', `[PassRunner] Running ${dateStr}: ${standardToRun.length} standard, ${metaToRun.length} meta`);
|
|
57
92
|
|
|
58
|
-
//
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
// --- END CHANGE ---
|
|
63
|
-
|
|
64
|
-
if (metaToRun.length) await runMetaComputationPass(dateToProcess, metaToRun, `Pass ${passToRun} (Meta)`, config, dependencies, existingResults, rootData);
|
|
93
|
+
// Pass 'previousResults' to the runners
|
|
94
|
+
if (standardToRun.length) await runStandardComputationPass(dateToProcess, standardToRun, `Pass ${passToRun} (Standard)`, config, dependencies, rootData, existingResults, previousResults);
|
|
95
|
+
if (metaToRun.length) await runMetaComputationPass(dateToProcess, metaToRun, `Pass ${passToRun} (Meta)`, config, dependencies, existingResults, previousResults, rootData);
|
|
96
|
+
|
|
65
97
|
} catch (err) { logger.log('ERROR', `[PassRunner] FAILED Pass ${passToRun} for ${dateStr}`, { errorMessage: err.message, stack: err.stack }); }
|
|
66
98
|
};
|
|
67
99
|
|
|
68
|
-
for (let i = 0; i < allExpectedDates.length; i += PARALLEL_BATCH_SIZE) {
|
|
69
|
-
|
|
70
|
-
|
|
100
|
+
for (let i = 0; i < allExpectedDates.length; i += PARALLEL_BATCH_SIZE) {
|
|
101
|
+
const batch = allExpectedDates.slice(i, i + PARALLEL_BATCH_SIZE);
|
|
102
|
+
await Promise.all(batch.map(processDate));
|
|
103
|
+
}
|
|
71
104
|
logger.log('INFO', `[PassRunner] Pass ${passToRun} orchestration finished.`);
|
|
72
105
|
}
|
|
73
106
|
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
/**
|
|
2
|
-
*
|
|
3
|
-
*
|
|
2
|
+
* FIXED: orchestration_helpers.js
|
|
3
|
+
* V3.2: Enabled streaming of Trading History data (tH_iter) for computations
|
|
4
|
+
* that require it, alongside Portfolio data.
|
|
4
5
|
*/
|
|
5
6
|
|
|
6
7
|
const { ComputationController } = require('../controllers/computation_controller');
|
|
@@ -11,7 +12,7 @@ const {
|
|
|
11
12
|
const { batchStoreSchemas } = require('../utils/schema_capture');
|
|
12
13
|
const { normalizeName, commitBatchInChunks } = require('../utils/utils');
|
|
13
14
|
|
|
14
|
-
// --- Helpers
|
|
15
|
+
// --- Helpers ---
|
|
15
16
|
|
|
16
17
|
function groupByPass(manifest) {
|
|
17
18
|
return manifest.reduce((acc, calc) => { (acc[calc.pass] = acc[calc.pass] || []).push(calc); return acc; }, {});
|
|
@@ -21,10 +22,10 @@ function checkRootDependencies(calcManifest, rootDataStatus) {
|
|
|
21
22
|
const missing = [];
|
|
22
23
|
if (!calcManifest.rootDataDependencies) return { canRun: true, missing };
|
|
23
24
|
for (const dep of calcManifest.rootDataDependencies) {
|
|
24
|
-
if
|
|
25
|
-
else if (dep === 'insights'
|
|
26
|
-
else if (dep === 'social'
|
|
27
|
-
else if (dep === 'history'
|
|
25
|
+
if (dep === 'portfolio' && !rootDataStatus.hasPortfolio) missing.push('portfolio');
|
|
26
|
+
else if (dep === 'insights' && !rootDataStatus.hasInsights) missing.push('insights');
|
|
27
|
+
else if (dep === 'social' && !rootDataStatus.hasSocial) missing.push('social');
|
|
28
|
+
else if (dep === 'history' && !rootDataStatus.hasHistory) missing.push('history');
|
|
28
29
|
}
|
|
29
30
|
return { canRun: missing.length === 0, missing };
|
|
30
31
|
}
|
|
@@ -33,8 +34,6 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
|
|
|
33
34
|
const { logger } = dependencies;
|
|
34
35
|
const dateToProcess = new Date(dateStr + 'T00:00:00Z');
|
|
35
36
|
|
|
36
|
-
// Quick check for existence of required root collections
|
|
37
|
-
// (Implementation preserved from original for consistency)
|
|
38
37
|
let portfolioRefs = [], historyRefs = [];
|
|
39
38
|
let hasPortfolio = false, hasInsights = false, hasSocial = false, hasHistory = false;
|
|
40
39
|
let insightsData = null, socialData = null;
|
|
@@ -65,13 +64,20 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
|
|
|
65
64
|
}
|
|
66
65
|
}
|
|
67
66
|
|
|
68
|
-
|
|
69
|
-
|
|
67
|
+
// --- OPTIMIZED FETCH ---
|
|
68
|
+
async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config, { db }, includeSelf = false) {
|
|
70
69
|
const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
|
|
71
70
|
const calcsToFetch = new Set();
|
|
71
|
+
|
|
72
72
|
for (const calc of calcsInPass) {
|
|
73
|
-
if (calc.dependencies)
|
|
73
|
+
if (calc.dependencies) {
|
|
74
|
+
calc.dependencies.forEach(d => calcsToFetch.add(normalizeName(d)));
|
|
75
|
+
}
|
|
76
|
+
if (includeSelf && calc.isHistorical) {
|
|
77
|
+
calcsToFetch.add(normalizeName(calc.name));
|
|
78
|
+
}
|
|
74
79
|
}
|
|
80
|
+
|
|
75
81
|
if (!calcsToFetch.size) return {};
|
|
76
82
|
|
|
77
83
|
const fetched = {};
|
|
@@ -90,36 +96,35 @@ async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config,
|
|
|
90
96
|
|
|
91
97
|
if (docRefs.length) {
|
|
92
98
|
const snaps = await db.getAll(...docRefs);
|
|
93
|
-
snaps.forEach((doc, i) => {
|
|
99
|
+
snaps.forEach((doc, i) => {
|
|
100
|
+
if(doc.exists && doc.data()._completed) {
|
|
101
|
+
fetched[names[i]] = doc.data();
|
|
102
|
+
}
|
|
103
|
+
});
|
|
94
104
|
}
|
|
95
105
|
return fetched;
|
|
96
106
|
}
|
|
97
107
|
|
|
98
|
-
function filterCalculations(standardCalcs, metaCalcs, rootDataStatus, existingResults, passToRun, dateStr, earliestDates) {
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
if (existingResults[c.name]) return false;
|
|
102
|
-
// Date check
|
|
108
|
+
function filterCalculations(standardCalcs, metaCalcs, rootDataStatus, existingResults, passToRun, dateStr, earliestDates) { // TODO passtorun is unused, why?
|
|
109
|
+
const filter = (c) => { // Since we are using toplogical sorting for deps, surely the pass being run is implicit
|
|
110
|
+
if (existingResults[c.name]) return false; // Am a bit confused here. Feel this may be a bug.
|
|
103
111
|
let earliest = new Date('1970-01-01');
|
|
104
112
|
(c.rootDataDependencies || []).forEach(d => { if(earliestDates[d] > earliest) earliest = earliestDates[d]; });
|
|
105
113
|
if (c.isHistorical) earliest.setUTCDate(earliest.getUTCDate() + 1);
|
|
106
114
|
if (new Date(dateStr) < earliest) return false;
|
|
107
|
-
// Data check
|
|
108
115
|
if (!checkRootDependencies(c, rootDataStatus).canRun) return false;
|
|
109
|
-
// Dependency check
|
|
110
116
|
if (c.type === 'meta' && c.dependencies && c.dependencies.some(d => !existingResults[normalizeName(d)])) return false;
|
|
111
117
|
return true;
|
|
112
118
|
};
|
|
113
119
|
return { standardCalcsToRun: standardCalcs.filter(filter), metaCalcsToRun: metaCalcs.filter(filter) };
|
|
114
120
|
}
|
|
115
121
|
|
|
116
|
-
// ---
|
|
122
|
+
// --- EXECUTION DELEGATES ---
|
|
117
123
|
|
|
118
|
-
async function streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs, fetchedDeps) {
|
|
124
|
+
async function streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs, fetchedDeps, previousFetchedDeps) {
|
|
119
125
|
const { logger } = deps;
|
|
120
126
|
const controller = new ComputationController(config, deps);
|
|
121
127
|
|
|
122
|
-
// 1. Filter for Standard Calculations (that need streaming)
|
|
123
128
|
const calcs = Object.values(state).filter(c => c && c.manifest);
|
|
124
129
|
const streamingCalcs = calcs.filter(c =>
|
|
125
130
|
c.manifest.rootDataDependencies.includes('portfolio') ||
|
|
@@ -130,50 +135,55 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
|
|
|
130
135
|
|
|
131
136
|
logger.log('INFO', `[${passName}] Streaming for ${streamingCalcs.length} computations...`);
|
|
132
137
|
|
|
133
|
-
|
|
134
|
-
await controller.loader.loadMappings(); // Pre-cache mappings
|
|
138
|
+
await controller.loader.loadMappings();
|
|
135
139
|
const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
|
|
136
140
|
const prevDateStr = prevDate.toISOString().slice(0, 10);
|
|
137
141
|
|
|
142
|
+
// 1. Today's Portfolio Stream
|
|
138
143
|
const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
const
|
|
144
|
+
|
|
145
|
+
// 2. Yesterday's Portfolio Stream (for 'isHistorical' calcs)
|
|
146
|
+
const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
|
|
147
|
+
const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs)
|
|
142
148
|
? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs)
|
|
143
149
|
: null;
|
|
144
150
|
|
|
151
|
+
// 3. Today's History Stream (NEW)
|
|
152
|
+
const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
|
|
153
|
+
const tH_iter = (needsTradingHistory && historyRefs)
|
|
154
|
+
? streamHistoryData(config, deps, dateStr, historyRefs)
|
|
155
|
+
: null;
|
|
156
|
+
|
|
145
157
|
let yP_chunk = {};
|
|
158
|
+
let tH_chunk = {};
|
|
146
159
|
|
|
147
|
-
// 3. Stream & Execute
|
|
148
160
|
for await (const tP_chunk of tP_iter) {
|
|
149
161
|
if (yP_iter) yP_chunk = (await yP_iter.next()).value || {};
|
|
162
|
+
if (tH_iter) tH_chunk = (await tH_iter.next()).value || {};
|
|
150
163
|
|
|
151
|
-
// Execute Batch
|
|
152
164
|
const promises = streamingCalcs.map(calc =>
|
|
153
165
|
controller.executor.executePerUser(
|
|
154
166
|
calc,
|
|
155
167
|
calc.manifest,
|
|
156
168
|
dateStr,
|
|
157
|
-
tP_chunk,
|
|
158
|
-
yP_chunk,
|
|
159
|
-
|
|
169
|
+
tP_chunk,
|
|
170
|
+
yP_chunk, // Yesterday Portfolio
|
|
171
|
+
tH_chunk, // Today History (NEW ARGUMENT)
|
|
172
|
+
fetchedDeps,
|
|
173
|
+
previousFetchedDeps
|
|
160
174
|
)
|
|
161
175
|
);
|
|
162
176
|
await Promise.all(promises);
|
|
163
177
|
}
|
|
164
|
-
|
|
165
178
|
logger.log('INFO', `[${passName}] Streaming complete.`);
|
|
166
179
|
}
|
|
167
180
|
|
|
168
|
-
// ---
|
|
181
|
+
// --- RUNNERS ---
|
|
169
182
|
|
|
170
|
-
async function runStandardComputationPass(date, calcs, passName, config, deps, rootData, fetchedDeps) {
|
|
183
|
+
async function runStandardComputationPass(date, calcs, passName, config, deps, rootData, fetchedDeps, previousFetchedDeps) {
|
|
171
184
|
const dStr = date.toISOString().slice(0, 10);
|
|
172
185
|
const logger = deps.logger;
|
|
173
186
|
|
|
174
|
-
// 1. Load additional historical context if needed (e.g. previous day's insights)
|
|
175
|
-
// Note: Portfolio/History streams are handled inside streamAndProcess
|
|
176
|
-
// We just need to attach references if they exist
|
|
177
187
|
const fullRoot = { ...rootData };
|
|
178
188
|
if (calcs.some(c => c.isHistorical)) {
|
|
179
189
|
const prev = new Date(date); prev.setUTCDate(prev.getUTCDate() - 1);
|
|
@@ -181,7 +191,6 @@ async function runStandardComputationPass(date, calcs, passName, config, deps, r
|
|
|
181
191
|
fullRoot.yesterdayPortfolioRefs = await getPortfolioPartRefs(config, deps, prevStr);
|
|
182
192
|
}
|
|
183
193
|
|
|
184
|
-
// 2. Initialize Calculation Instances
|
|
185
194
|
const state = {};
|
|
186
195
|
for (const c of calcs) {
|
|
187
196
|
try {
|
|
@@ -191,14 +200,11 @@ async function runStandardComputationPass(date, calcs, passName, config, deps, r
|
|
|
191
200
|
} catch(e) { logger.log('WARN', `Failed to init ${c.name}`); }
|
|
192
201
|
}
|
|
193
202
|
|
|
194
|
-
|
|
195
|
-
await streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs, fetchedDeps);
|
|
196
|
-
|
|
197
|
-
// 4. Commit Results
|
|
203
|
+
await streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs, fetchedDeps, previousFetchedDeps);
|
|
198
204
|
await commitResults(state, dStr, passName, config, deps);
|
|
199
205
|
}
|
|
200
206
|
|
|
201
|
-
async function runMetaComputationPass(date, calcs, passName, config, deps, fetchedDeps, rootData) {
|
|
207
|
+
async function runMetaComputationPass(date, calcs, passName, config, deps, fetchedDeps, previousFetchedDeps, rootData) {
|
|
202
208
|
const controller = new ComputationController(config, deps);
|
|
203
209
|
const dStr = date.toISOString().slice(0, 10);
|
|
204
210
|
const state = {};
|
|
@@ -207,36 +213,28 @@ async function runMetaComputationPass(date, calcs, passName, config, deps, fetch
|
|
|
207
213
|
try {
|
|
208
214
|
const inst = new mCalc.class();
|
|
209
215
|
inst.manifest = mCalc;
|
|
210
|
-
|
|
211
|
-
await controller.executor.executeOncePerDay(inst, mCalc, dStr, fetchedDeps);
|
|
212
|
-
|
|
216
|
+
await controller.executor.executeOncePerDay(inst, mCalc, dStr, fetchedDeps, previousFetchedDeps);
|
|
213
217
|
state[normalizeName(mCalc.name)] = inst;
|
|
214
|
-
} catch (e) {
|
|
215
|
-
deps.logger.log('ERROR', `Meta calc failed ${mCalc.name}: ${e.message}`);
|
|
216
|
-
}
|
|
218
|
+
} catch (e) { deps.logger.log('ERROR', `Meta calc failed ${mCalc.name}: ${e.message}`); }
|
|
217
219
|
}
|
|
218
220
|
|
|
219
221
|
await commitResults(state, dStr, passName, config, deps);
|
|
220
222
|
}
|
|
221
223
|
|
|
222
|
-
// --- Commit Helper (Standardized) ---
|
|
223
224
|
async function commitResults(stateObj, dStr, passName, config, deps) {
|
|
224
225
|
const writes = [], schemas = [], sharded = {};
|
|
225
|
-
|
|
226
226
|
for (const name in stateObj) {
|
|
227
227
|
const calc = stateObj[name];
|
|
228
228
|
try {
|
|
229
229
|
const result = await calc.getResult();
|
|
230
230
|
if (!result) continue;
|
|
231
|
-
|
|
232
231
|
const standardRes = {};
|
|
233
232
|
for (const key in result) {
|
|
234
|
-
if (key.startsWith('sharded_')) {
|
|
233
|
+
if (key.startsWith('sharded_')) { // TODO - This should iedally become redundant, computations themselves should NEVER return an object so large it requires sharding...
|
|
235
234
|
const sData = result[key];
|
|
236
235
|
for (const c in sData) { sharded[c] = sharded[c] || {}; Object.assign(sharded[c], sData[c]); }
|
|
237
236
|
} else standardRes[key] = result[key];
|
|
238
237
|
}
|
|
239
|
-
|
|
240
238
|
if (Object.keys(standardRes).length) {
|
|
241
239
|
standardRes._completed = true;
|
|
242
240
|
writes.push({
|
|
@@ -246,9 +244,16 @@ async function commitResults(stateObj, dStr, passName, config, deps) {
|
|
|
246
244
|
data: standardRes
|
|
247
245
|
});
|
|
248
246
|
}
|
|
249
|
-
|
|
250
247
|
if (calc.manifest.class.getSchema) {
|
|
251
|
-
|
|
248
|
+
// FIX: Remove the 'class' property (function) because Firestore cannot store it. (We were literally submitting the entire JS class to firestore...)
|
|
249
|
+
const { class: _cls, ...safeMetadata } = calc.manifest;
|
|
250
|
+
|
|
251
|
+
schemas.push({
|
|
252
|
+
name,
|
|
253
|
+
category: calc.manifest.category,
|
|
254
|
+
schema: calc.manifest.class.getSchema(),
|
|
255
|
+
metadata: safeMetadata
|
|
256
|
+
});
|
|
252
257
|
}
|
|
253
258
|
} catch (e) { deps.logger.log('ERROR', `Commit failed ${name}: ${e.message}`); }
|
|
254
259
|
}
|