bulltrackers-module 1.0.174 → 1.0.176
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.MD +1901 -2
- package/functions/computation-system/controllers/computation_controller.js +1 -1
- package/functions/computation-system/helpers/computation_manifest_builder.js +9 -39
- package/functions/computation-system/helpers/computation_pass_runner.js +96 -60
- package/functions/computation-system/helpers/orchestration_helpers.js +158 -96
- package/functions/task-engine/utils/firestore_batch_manager.js +224 -180
- package/package.json +1 -1
|
@@ -1,23 +1,24 @@
|
|
|
1
1
|
/**
|
|
2
|
-
*
|
|
3
|
-
* V3.2: Enabled streaming of Trading History data (tH_iter) for computations
|
|
4
|
-
* that require it, alongside Portfolio data.
|
|
2
|
+
* FILENAME: bulltrackers-module/functions/computation-system/helpers/orchestration_helpers.js
|
|
5
3
|
*/
|
|
6
4
|
|
|
7
|
-
const { ComputationController }
|
|
5
|
+
const { ComputationController } = require('../controllers/computation_controller');
|
|
6
|
+
const { batchStoreSchemas } = require('../utils/schema_capture');
|
|
7
|
+
const { normalizeName, commitBatchInChunks } = require('../utils/utils');
|
|
8
8
|
const {
|
|
9
9
|
getPortfolioPartRefs, loadDailyInsights, loadDailySocialPostInsights,
|
|
10
10
|
getHistoryPartRefs, streamPortfolioData, streamHistoryData
|
|
11
11
|
} = require('../utils/data_loader');
|
|
12
|
-
const { batchStoreSchemas } = require('../utils/schema_capture');
|
|
13
|
-
const { normalizeName, commitBatchInChunks } = require('../utils/utils');
|
|
14
12
|
|
|
15
|
-
// --- Helpers ---
|
|
16
13
|
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
14
|
+
/**
|
|
15
|
+
* Groups calculations from a manifest by their 'pass' property.
|
|
16
|
+
*/
|
|
17
|
+
function groupByPass(manifest) { return manifest.reduce((acc, calc) => { (acc[calc.pass] = acc[calc.pass] || []).push(calc); return acc; }, {}); }
|
|
20
18
|
|
|
19
|
+
/**
|
|
20
|
+
* Checks if all root data dependencies for a given calculation are met.
|
|
21
|
+
*/
|
|
21
22
|
function checkRootDependencies(calcManifest, rootDataStatus) {
|
|
22
23
|
const missing = [];
|
|
23
24
|
if (!calcManifest.rootDataDependencies) return { canRun: true, missing };
|
|
@@ -30,24 +31,22 @@ function checkRootDependencies(calcManifest, rootDataStatus) {
|
|
|
30
31
|
return { canRun: missing.length === 0, missing };
|
|
31
32
|
}
|
|
32
33
|
|
|
34
|
+
/**
|
|
35
|
+
* Checks for the availability of all required root data for a specific date.
|
|
36
|
+
*/
|
|
33
37
|
async function checkRootDataAvailability(dateStr, config, dependencies, earliestDates) {
|
|
34
|
-
|
|
38
|
+
// ... [Unchanged content of checkRootDataAvailability] ...
|
|
39
|
+
const { logger } = dependencies;
|
|
35
40
|
const dateToProcess = new Date(dateStr + 'T00:00:00Z');
|
|
36
|
-
|
|
37
|
-
let
|
|
38
|
-
let hasPortfolio = false, hasInsights = false, hasSocial = false, hasHistory = false;
|
|
39
|
-
let insightsData = null, socialData = null;
|
|
41
|
+
let portfolioRefs = [], historyRefs = [];
|
|
42
|
+
let hasPortfolio = false, hasInsights = false, hasSocial = false, hasHistory = false, insightsData = null , socialData = null;
|
|
40
43
|
|
|
41
44
|
try {
|
|
42
45
|
const tasks = [];
|
|
43
|
-
if (dateToProcess >= earliestDates.portfolio)
|
|
44
|
-
|
|
45
|
-
if (dateToProcess >= earliestDates.
|
|
46
|
-
|
|
47
|
-
if (dateToProcess >= earliestDates.social)
|
|
48
|
-
tasks.push(loadDailySocialPostInsights(config, dependencies, dateStr).then(r => { socialData = r; hasSocial = !!r; }));
|
|
49
|
-
if (dateToProcess >= earliestDates.history)
|
|
50
|
-
tasks.push(getHistoryPartRefs(config, dependencies, dateStr).then(r => { historyRefs = r; hasHistory = !!r.length; }));
|
|
46
|
+
if (dateToProcess >= earliestDates.portfolio) tasks.push(getPortfolioPartRefs(config, dependencies, dateStr).then(r => { portfolioRefs = r; hasPortfolio = !!r.length; }));
|
|
47
|
+
if (dateToProcess >= earliestDates.insights) tasks.push(loadDailyInsights(config, dependencies, dateStr).then(r => { insightsData = r; hasInsights = !!r; }));
|
|
48
|
+
if (dateToProcess >= earliestDates.social) tasks.push(loadDailySocialPostInsights(config, dependencies, dateStr).then(r => { socialData = r; hasSocial = !!r; }));
|
|
49
|
+
if (dateToProcess >= earliestDates.history) tasks.push(getHistoryPartRefs(config, dependencies, dateStr).then(r => { historyRefs = r; hasHistory = !!r.length; }));
|
|
51
50
|
|
|
52
51
|
await Promise.all(tasks);
|
|
53
52
|
|
|
@@ -58,73 +57,114 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
|
|
|
58
57
|
todayInsights: insightsData, todaySocialPostInsights: socialData,
|
|
59
58
|
status: { hasPortfolio, hasInsights, hasSocial, hasHistory }
|
|
60
59
|
};
|
|
60
|
+
|
|
61
61
|
} catch (err) {
|
|
62
62
|
logger.log('ERROR', `Error checking data: ${err.message}`);
|
|
63
63
|
return null;
|
|
64
64
|
}
|
|
65
65
|
}
|
|
66
66
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
67
|
+
/**
|
|
68
|
+
* --- DEPRECATED: Old per-date fetch ---
|
|
69
|
+
* Keeps compatibility but logic moves to fetchGlobalComputationStatus
|
|
70
|
+
*/
|
|
71
|
+
async function fetchComputationStatus(dateStr, config, { db }) {
|
|
72
|
+
const collection = config.computationStatusCollection || 'computation_status';
|
|
73
|
+
const docRef = db.collection(collection).doc(dateStr);
|
|
74
|
+
const snap = await docRef.get();
|
|
75
|
+
return snap.exists ? snap.data() : {};
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
/**
|
|
79
|
+
* --- NEW: Fetches the SINGLE GLOBAL status document ---
|
|
80
|
+
* Loads the entire history of statuses in one read.
|
|
81
|
+
*/
|
|
82
|
+
async function fetchGlobalComputationStatus(config, { db }) {
|
|
83
|
+
const collection = config.computationStatusCollection || 'computation_status';
|
|
84
|
+
const docRef = db.collection(collection).doc('global_status');
|
|
85
|
+
const snap = await docRef.get();
|
|
86
|
+
return snap.exists ? snap.data() : {};
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
/**
|
|
90
|
+
* --- DEPRECATED: Old per-date update ---
|
|
91
|
+
*/
|
|
92
|
+
async function updateComputationStatus(dateStr, updates, config, { db }) {
|
|
93
|
+
if (!updates || Object.keys(updates).length === 0) return;
|
|
94
|
+
const collection = config.computationStatusCollection || 'computation_status';
|
|
95
|
+
const docRef = db.collection(collection).doc(dateStr);
|
|
96
|
+
await docRef.set(updates, { merge: true });
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* --- NEW: Batch Updates to Global Document ---
|
|
101
|
+
* Accepts a map of { "YYYY-MM-DD": { calcName: true, ... } }
|
|
102
|
+
* and writes them using dot notation to avoid overwriting other dates.
|
|
103
|
+
*/
|
|
104
|
+
async function updateGlobalComputationStatus(updatesByDate, config, { db }) {
|
|
105
|
+
if (!updatesByDate || Object.keys(updatesByDate).length === 0) return;
|
|
106
|
+
const collection = config.computationStatusCollection || 'computation_status';
|
|
107
|
+
const docRef = db.collection(collection).doc('global_status');
|
|
71
108
|
|
|
72
|
-
for
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
calcsToFetch.add(normalizeName(calc.name));
|
|
109
|
+
// Flatten to dot notation for Firestore update: "2023-10-27.calcName": true
|
|
110
|
+
const flattenUpdates = {};
|
|
111
|
+
for (const [date, statuses] of Object.entries(updatesByDate)) {
|
|
112
|
+
for (const [calc, status] of Object.entries(statuses)) {
|
|
113
|
+
flattenUpdates[`${date}.${calc}`] = status;
|
|
78
114
|
}
|
|
79
115
|
}
|
|
80
116
|
|
|
117
|
+
try {
|
|
118
|
+
await docRef.update(flattenUpdates);
|
|
119
|
+
} catch (err) {
|
|
120
|
+
// If doc doesn't exist (first run), update fails. Use set({merge:true}).
|
|
121
|
+
if (err.code === 5) { // NOT_FOUND
|
|
122
|
+
const deepObj = {};
|
|
123
|
+
for (const [date, statuses] of Object.entries(updatesByDate)) {
|
|
124
|
+
deepObj[date] = statuses;
|
|
125
|
+
}
|
|
126
|
+
await docRef.set(deepObj, { merge: true });
|
|
127
|
+
} else {
|
|
128
|
+
throw err;
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
/**
|
|
134
|
+
* --- UPDATED: fetchExistingResults ---
|
|
135
|
+
* (Unchanged, keeps fetching results per date as this is heavy data)
|
|
136
|
+
*/
|
|
137
|
+
async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config, { db }, includeSelf = false) {
|
|
138
|
+
const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
|
|
139
|
+
const calcsToFetch = new Set();
|
|
140
|
+
for (const calc of calcsInPass) {
|
|
141
|
+
if (calc.dependencies) { calc.dependencies.forEach(d => calcsToFetch.add(normalizeName(d))); }
|
|
142
|
+
if (includeSelf && calc.isHistorical) { calcsToFetch.add(normalizeName(calc.name)); }
|
|
143
|
+
}
|
|
81
144
|
if (!calcsToFetch.size) return {};
|
|
82
|
-
|
|
83
145
|
const fetched = {};
|
|
84
146
|
const docRefs = [];
|
|
85
|
-
const names
|
|
86
|
-
|
|
147
|
+
const names = [];
|
|
87
148
|
for (const name of calcsToFetch) {
|
|
88
149
|
const m = manifestMap.get(name);
|
|
89
|
-
if (m) {
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
names.push(name);
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
|
|
150
|
+
if (m) { docRefs.push(db.collection(config.resultsCollection).doc(dateStr)
|
|
151
|
+
.collection(config.resultsSubcollection).doc(m.category || 'unknown')
|
|
152
|
+
.collection(config.computationsSubcollection).doc(name));
|
|
153
|
+
names.push(name); } }
|
|
97
154
|
if (docRefs.length) {
|
|
98
155
|
const snaps = await db.getAll(...docRefs);
|
|
99
|
-
snaps.forEach((doc, i) => {
|
|
100
|
-
if(doc.exists && doc.data()._completed) {
|
|
101
|
-
fetched[names[i]] = doc.data();
|
|
102
|
-
}
|
|
103
|
-
});
|
|
156
|
+
snaps.forEach((doc, i) => { if(doc.exists && doc.data()._completed) { fetched[names[i]] = doc.data(); } });
|
|
104
157
|
}
|
|
105
158
|
return fetched;
|
|
106
159
|
}
|
|
107
160
|
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
(c.rootDataDependencies || []).forEach(d => { if(earliestDates[d] > earliest) earliest = earliestDates[d]; });
|
|
113
|
-
if (c.isHistorical) earliest.setUTCDate(earliest.getUTCDate() + 1);
|
|
114
|
-
if (new Date(dateStr) < earliest) return false;
|
|
115
|
-
if (!checkRootDependencies(c, rootDataStatus).canRun) return false;
|
|
116
|
-
if (c.type === 'meta' && c.dependencies && c.dependencies.some(d => !existingResults[normalizeName(d)])) return false;
|
|
117
|
-
return true;
|
|
118
|
-
};
|
|
119
|
-
return { standardCalcsToRun: standardCalcs.filter(filter), metaCalcsToRun: metaCalcs.filter(filter) };
|
|
120
|
-
}
|
|
121
|
-
|
|
122
|
-
// --- EXECUTION DELEGATES ---
|
|
123
|
-
|
|
161
|
+
/**
|
|
162
|
+
* --- UPDATED: streamAndProcess ---
|
|
163
|
+
* (Unchanged)
|
|
164
|
+
*/
|
|
124
165
|
async function streamAndProcess(dateStr, state, passName, config, deps, rootData, portfolioRefs, historyRefs, fetchedDeps, previousFetchedDeps) {
|
|
125
166
|
const { logger } = deps;
|
|
126
167
|
const controller = new ComputationController(config, deps);
|
|
127
|
-
|
|
128
168
|
const calcs = Object.values(state).filter(c => c && c.manifest);
|
|
129
169
|
const streamingCalcs = calcs.filter(c =>
|
|
130
170
|
c.manifest.rootDataDependencies.includes('portfolio') ||
|
|
@@ -139,20 +179,13 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
|
|
|
139
179
|
const prevDate = new Date(dateStr + 'T00:00:00Z'); prevDate.setUTCDate(prevDate.getUTCDate() - 1);
|
|
140
180
|
const prevDateStr = prevDate.toISOString().slice(0, 10);
|
|
141
181
|
|
|
142
|
-
// 1. Today's Portfolio Stream
|
|
143
182
|
const tP_iter = streamPortfolioData(config, deps, dateStr, portfolioRefs);
|
|
144
183
|
|
|
145
|
-
// 2. Yesterday's Portfolio Stream (for 'isHistorical' calcs)
|
|
146
184
|
const needsYesterdayPortfolio = streamingCalcs.some(c => c.manifest.isHistorical);
|
|
147
|
-
const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs)
|
|
148
|
-
? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs)
|
|
149
|
-
: null;
|
|
185
|
+
const yP_iter = (needsYesterdayPortfolio && rootData.yesterdayPortfolioRefs) ? streamPortfolioData(config, deps, prevDateStr, rootData.yesterdayPortfolioRefs) : null;
|
|
150
186
|
|
|
151
|
-
// 3. Today's History Stream (NEW)
|
|
152
187
|
const needsTradingHistory = streamingCalcs.some(c => c.manifest.rootDataDependencies.includes('history'));
|
|
153
|
-
const tH_iter = (needsTradingHistory && historyRefs)
|
|
154
|
-
? streamHistoryData(config, deps, dateStr, historyRefs)
|
|
155
|
-
: null;
|
|
188
|
+
const tH_iter = (needsTradingHistory && historyRefs) ? streamHistoryData(config, deps, dateStr, historyRefs) : null;
|
|
156
189
|
|
|
157
190
|
let yP_chunk = {};
|
|
158
191
|
let tH_chunk = {};
|
|
@@ -167,8 +200,8 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
|
|
|
167
200
|
calc.manifest,
|
|
168
201
|
dateStr,
|
|
169
202
|
tP_chunk,
|
|
170
|
-
yP_chunk,
|
|
171
|
-
tH_chunk,
|
|
203
|
+
yP_chunk,
|
|
204
|
+
tH_chunk,
|
|
172
205
|
fetchedDeps,
|
|
173
206
|
previousFetchedDeps
|
|
174
207
|
)
|
|
@@ -178,36 +211,47 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
|
|
|
178
211
|
logger.log('INFO', `[${passName}] Streaming complete.`);
|
|
179
212
|
}
|
|
180
213
|
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
214
|
+
/**
|
|
215
|
+
* --- UPDATED: runStandardComputationPass ---
|
|
216
|
+
* Now accepts `skipStatusWrite` and returns `successUpdates`
|
|
217
|
+
*/
|
|
218
|
+
async function runStandardComputationPass(date, calcs, passName, config, deps, rootData, fetchedDeps, previousFetchedDeps, skipStatusWrite = false) {
|
|
184
219
|
const dStr = date.toISOString().slice(0, 10);
|
|
185
220
|
const logger = deps.logger;
|
|
186
221
|
|
|
187
222
|
const fullRoot = { ...rootData };
|
|
188
223
|
if (calcs.some(c => c.isHistorical)) {
|
|
189
|
-
const prev
|
|
224
|
+
const prev = new Date(date); prev.setUTCDate(prev.getUTCDate() - 1);
|
|
190
225
|
const prevStr = prev.toISOString().slice(0, 10);
|
|
191
226
|
fullRoot.yesterdayPortfolioRefs = await getPortfolioPartRefs(config, deps, prevStr);
|
|
192
227
|
}
|
|
193
228
|
|
|
194
229
|
const state = {};
|
|
195
230
|
for (const c of calcs) {
|
|
196
|
-
try {
|
|
197
|
-
const inst = new c.class();
|
|
198
|
-
inst.manifest = c;
|
|
199
|
-
state[normalizeName(c.name)] = inst;
|
|
200
|
-
}
|
|
231
|
+
try {
|
|
232
|
+
const inst = new c.class();
|
|
233
|
+
inst.manifest = c;
|
|
234
|
+
state[normalizeName(c.name)] = inst;
|
|
235
|
+
}
|
|
236
|
+
catch(e) {
|
|
237
|
+
logger.log('WARN', `Failed to init ${c.name}`);
|
|
238
|
+
}
|
|
201
239
|
}
|
|
202
240
|
|
|
203
241
|
await streamAndProcess(dStr, state, passName, config, deps, fullRoot, rootData.portfolioRefs, rootData.historyRefs, fetchedDeps, previousFetchedDeps);
|
|
204
|
-
|
|
242
|
+
|
|
243
|
+
// Return the updates instead of just writing them
|
|
244
|
+
return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
|
|
205
245
|
}
|
|
206
246
|
|
|
207
|
-
|
|
247
|
+
/**
|
|
248
|
+
* --- UPDATED: runMetaComputationPass ---
|
|
249
|
+
* Now accepts `skipStatusWrite` and returns `successUpdates`
|
|
250
|
+
*/
|
|
251
|
+
async function runMetaComputationPass(date, calcs, passName, config, deps, fetchedDeps, previousFetchedDeps, rootData, skipStatusWrite = false) {
|
|
208
252
|
const controller = new ComputationController(config, deps);
|
|
209
|
-
const dStr
|
|
210
|
-
const state
|
|
253
|
+
const dStr = date.toISOString().slice(0, 10);
|
|
254
|
+
const state = {};
|
|
211
255
|
|
|
212
256
|
for (const mCalc of calcs) {
|
|
213
257
|
try {
|
|
@@ -218,11 +262,17 @@ async function runMetaComputationPass(date, calcs, passName, config, deps, fetch
|
|
|
218
262
|
} catch (e) { deps.logger.log('ERROR', `Meta calc failed ${mCalc.name}: ${e.message}`); }
|
|
219
263
|
}
|
|
220
264
|
|
|
221
|
-
await commitResults(state, dStr, passName, config, deps);
|
|
265
|
+
return await commitResults(state, dStr, passName, config, deps, skipStatusWrite);
|
|
222
266
|
}
|
|
223
267
|
|
|
224
|
-
|
|
268
|
+
/**
|
|
269
|
+
* --- UPDATED: commitResults ---
|
|
270
|
+
* Added `skipStatusWrite` parameter. Returns `successUpdates`.
|
|
271
|
+
*/
|
|
272
|
+
async function commitResults(stateObj, dStr, passName, config, deps, skipStatusWrite = false) {
|
|
225
273
|
const writes = [], schemas = [], sharded = {};
|
|
274
|
+
const successUpdates = {};
|
|
275
|
+
|
|
226
276
|
for (const name in stateObj) {
|
|
227
277
|
const calc = stateObj[name];
|
|
228
278
|
try {
|
|
@@ -230,7 +280,7 @@ async function commitResults(stateObj, dStr, passName, config, deps) {
|
|
|
230
280
|
if (!result) continue;
|
|
231
281
|
const standardRes = {};
|
|
232
282
|
for (const key in result) {
|
|
233
|
-
if (key.startsWith('sharded_')) {
|
|
283
|
+
if (key.startsWith('sharded_')) {
|
|
234
284
|
const sData = result[key];
|
|
235
285
|
for (const c in sData) { sharded[c] = sharded[c] || {}; Object.assign(sharded[c], sData[c]); }
|
|
236
286
|
} else standardRes[key] = result[key];
|
|
@@ -245,9 +295,7 @@ async function commitResults(stateObj, dStr, passName, config, deps) {
|
|
|
245
295
|
});
|
|
246
296
|
}
|
|
247
297
|
if (calc.manifest.class.getSchema) {
|
|
248
|
-
// FIX: Remove the 'class' property (function) because Firestore cannot store it. (We were literally submitting the entire JS class to firestore...)
|
|
249
298
|
const { class: _cls, ...safeMetadata } = calc.manifest;
|
|
250
|
-
|
|
251
299
|
schemas.push({
|
|
252
300
|
name,
|
|
253
301
|
category: calc.manifest.category,
|
|
@@ -255,6 +303,9 @@ async function commitResults(stateObj, dStr, passName, config, deps) {
|
|
|
255
303
|
metadata: safeMetadata
|
|
256
304
|
});
|
|
257
305
|
}
|
|
306
|
+
|
|
307
|
+
successUpdates[name] = true;
|
|
308
|
+
|
|
258
309
|
} catch (e) { deps.logger.log('ERROR', `Commit failed ${name}: ${e.message}`); }
|
|
259
310
|
}
|
|
260
311
|
|
|
@@ -269,13 +320,24 @@ async function commitResults(stateObj, dStr, passName, config, deps) {
|
|
|
269
320
|
}
|
|
270
321
|
if (sWrites.length) await commitBatchInChunks(config, deps, sWrites, `${passName} Sharded ${col}`);
|
|
271
322
|
}
|
|
323
|
+
|
|
324
|
+
if (!skipStatusWrite && Object.keys(successUpdates).length > 0) {
|
|
325
|
+
await updateComputationStatus(dStr, successUpdates, config, deps);
|
|
326
|
+
deps.logger.log('INFO', `[${passName}] Updated status document for ${Object.keys(successUpdates).length} computations.`);
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
return successUpdates;
|
|
272
330
|
}
|
|
273
331
|
|
|
274
332
|
module.exports = {
|
|
275
333
|
groupByPass,
|
|
334
|
+
checkRootDependencies,
|
|
276
335
|
checkRootDataAvailability,
|
|
277
336
|
fetchExistingResults,
|
|
278
|
-
|
|
337
|
+
fetchComputationStatus,
|
|
338
|
+
fetchGlobalComputationStatus,
|
|
339
|
+
updateComputationStatus,
|
|
340
|
+
updateGlobalComputationStatus,
|
|
279
341
|
runStandardComputationPass,
|
|
280
342
|
runMetaComputationPass
|
|
281
343
|
};
|