bulltrackers-module 1.0.146 → 1.0.148

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,6 +2,8 @@ const { FieldPath } = require('@google-cloud/firestore');
2
2
  // --- MODIFIED: Import streamPortfolioData ---
3
3
  const { getPortfolioPartRefs, loadFullDayMap, loadDataByRefs, loadDailyInsights, loadDailySocialPostInsights, getHistoryPartRefs, streamPortfolioData, streamHistoryData } = require('../utils/data_loader.js');
4
4
  const { normalizeName, commitBatchInChunks } = require('../utils/utils.js');
5
+ // --- CHANGED: We only need batchStoreSchemas now ---
6
+ const { batchStoreSchemas } = require('../utils/schema_capture');
5
7
 
6
8
  /** Stage 1: Group manifest by pass number */
7
9
  function groupByPass(manifest) { return manifest.reduce((acc, calc) => { (acc[calc.pass] = acc[calc.pass] || []).push(calc); return acc; }, {}); }
@@ -20,7 +22,7 @@ function checkRootDependencies(calcManifest, rootDataStatus) {
20
22
  else if (dep === 'history' && !rootDataStatus.hasHistory) missing.push('history');
21
23
  }
22
24
  return { canRun: missing.length === 0, missing };
23
- }
25
+ }
24
26
 
25
27
  /** * --- MODIFIED: Uses earliestDates map to avoid unnecessary queries ---
26
28
  * Stage 3: Check root data availability for a date
@@ -33,24 +35,17 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
33
35
  let hasPortfolio = false, hasInsights = false, hasSocial = false, hasHistory = false;
34
36
  try {
35
37
  const tasks = [];
36
- if (dateToProcess >= earliestDates.portfolio)
37
- {tasks.push(getPortfolioPartRefs(config, dependencies, dateStr).then(res => {portfolioRefs = res;hasPortfolio = !!(res?.length);}));}
38
- if (dateToProcess >= earliestDates.insights) {
39
- tasks.push(loadDailyInsights(config, dependencies, dateStr).then(res => {insightsData = res;hasInsights = !!res;}));}
40
- if (dateToProcess >= earliestDates.social) {
41
- tasks.push(loadDailySocialPostInsights(config, dependencies, dateStr).then(res => {socialData = res;hasSocial = !!res;}));}
42
- if (dateToProcess >= earliestDates.history) {
43
- tasks.push(getHistoryPartRefs(config, dependencies, dateStr).then(res => {historyRefs = res;hasHistory = !!(res?.length);}));}
38
+ if (dateToProcess >= earliestDates.portfolio) {tasks.push(getPortfolioPartRefs(config, dependencies, dateStr).then(res => {portfolioRefs = res;hasPortfolio = !!(res?.length);}));}
39
+ if (dateToProcess >= earliestDates.insights) {tasks.push(loadDailyInsights(config, dependencies, dateStr).then(res => {insightsData = res;hasInsights = !!res;}));}
40
+ if (dateToProcess >= earliestDates.social) {tasks.push(loadDailySocialPostInsights(config, dependencies, dateStr).then(res => {socialData = res;hasSocial = !!res;}));}
41
+ if (dateToProcess >= earliestDates.history) {tasks.push(getHistoryPartRefs(config, dependencies, dateStr).then(res => {historyRefs = res;hasHistory = !!(res?.length);}));}
44
42
  await Promise.all(tasks);
45
- logger.log('INFO', `[PassRunner] Data availability for ${dateStr}: P:${hasPortfolio}, I:${hasInsights}, S:${hasSocial}, H:${hasHistory}`);
46
-
43
+ logger.log('INFO', `[PassRunner] Data availability for ${dateStr}: P:${hasPortfolio}, I:${insightsData_}, S:${socialData_}, H:${hasHistory}`);
47
44
  if (!(hasPortfolio || hasInsights || hasSocial || hasHistory)) { logger.log('WARN', `[PassRunner] No root data at all for ${dateStr}.`); return null; }
48
45
  return { portfolioRefs, todayInsights: insightsData, todaySocialPostInsights: socialData, historyRefs, status: { hasPortfolio, hasInsights, hasSocial, hasHistory } };
49
46
  } catch (err) { logger.log('ERROR', `[PassRunner] Error checking data for ${dateStr}`, { errorMessage: err.message }); return null; }
50
47
  }
51
48
 
52
-
53
-
54
49
  /** --- MODIFIED: Stage 4: Fetch ALL existing computed results for the pass ---
55
50
  * This function now checks for *all* calcs in the pass, not just meta-dependencies,
56
51
  * to enable skipping completed work.
@@ -58,22 +53,18 @@ async function checkRootDataAvailability(dateStr, config, dependencies, earliest
58
53
  async function fetchExistingResults(dateStr, calcsInPass, fullManifest, config, { db, logger }) {
59
54
  const manifestMap = new Map(fullManifest.map(c => [normalizeName(c.name), c]));
60
55
  const allCalcsInPass = new Set(calcsInPass.map(c => normalizeName(c.name)));
61
- if (!allCalcsInPass.size) return {};
62
- logger.log('INFO', `[PassRunner] Checking for ${allCalcsInPass.size} existing results for ${dateStr}...`);
56
+ if (!allCalcsInPass.size) return {}; logger.log('INFO', `[PassRunner] Checking for ${allCalcsInPass.size} existing results for ${dateStr}...`);
63
57
  const docRefs = [], depNames = [];
64
- for (const calcName of allCalcsInPass) {
65
- const calcManifest = manifestMap.get(calcName);
58
+ for (const calcName of allCalcsInPass) { const calcManifest = manifestMap.get(calcName);
66
59
  if (!calcManifest) { logger.log('ERROR', `[PassRunner] Missing manifest for ${calcName}`); continue; }
67
60
  docRefs.push(db.collection(config.resultsCollection).doc(dateStr).collection(config.resultsSubcollection).doc(calcManifest.category||'unknown').collection(config.computationsSubcollection).doc(calcName));
68
61
  depNames.push(calcName);
69
62
  }
70
- const fetched = {};
71
- if (docRefs.length) (await db.getAll(...docRefs)).forEach((doc,i)=>fetched[depNames[i]]=doc.exists?doc.data():null);
72
- return fetched;
63
+ const fetched = {}; if (docRefs.length) (await db.getAll(...docRefs)).forEach((doc,i)=>fetched[depNames[i]]=doc.exists?doc.data():null); return fetched;
73
64
  }
74
65
 
75
66
  /**
76
- * --- ENTIRELY REWRITTEN: Stage 5: Filter calculations ---
67
+ * --- Stage 5: Filter calculations ---
77
68
  * This function now implements your "even better design".
78
69
  * It calculates the *true earliest run date* for every calculation
79
70
  * and filters them out *before* the "Running..." log ever appears.
@@ -95,13 +86,10 @@ function filterCalculations(standardCalcs, metaCalcs, rootDataStatus, existingRe
95
86
  };
96
87
  const filterCalc = (calc) => {
97
88
  if (existingResults[calc.name]) {logger.log('TRACE', `[Pass ${passToRun}] Skipping ${calc.name} for ${dateStr}. Result already exists.`); skipped.add(calc.name); return false;}
98
-
99
89
  const earliestRunDate = getTrueEarliestRunDate(calc);
100
90
  if (dateToProcess < earliestRunDate) {logger.log('TRACE', `[Pass ${passToRun}] Skipping ${calc.name} for ${dateStr}. Date is before true earliest run date (${earliestRunDate.toISOString().slice(0, 10)}).`); skipped.add(calc.name); return false; }
101
-
102
91
  const { canRun, missing: missingRoot } = checkRootDependencies(calc, rootDataStatus);
103
92
  if (!canRun) {logger.log('INFO', `[Pass ${passToRun}] Skipping ${calc.name} for ${dateStr}. Data missing for this date: [${missingRoot.join(', ')}]`);skipped.add(calc.name); return false;}
104
-
105
93
  if (calc.type === 'meta') { const missingDeps = (calc.dependencies || []).map(normalizeName).filter(d => !existingResults[d]); if (missingDeps.length > 0) { logger.log('WARN', `[Pass ${passToRun} Meta] Skipping ${calc.name} for ${dateStr}. Missing computed deps: [${missingDeps.join(', ')}]`); skipped.add(calc.name); return false;} }
106
94
  return true;
107
95
  };
@@ -110,7 +98,6 @@ function filterCalculations(standardCalcs, metaCalcs, rootDataStatus, existingRe
110
98
  return { standardCalcsToRun, metaCalcsToRun };
111
99
  }
112
100
 
113
-
114
101
  /** Stage 6: Initialize calculator instances */
115
102
  function initializeCalculators(calcs, logger) { const state = {}; for (const c of calcs) { const name=normalizeName(c.name), Cl=c.class; if(typeof Cl==='function') try { const inst=new Cl(); inst.manifest=c; state[name]=inst; } catch(e){logger.warn(`Init failed ${name}`,{errorMessage:e.message}); state[name]=null;} else {logger.warn(`Class missing ${name}`); state[name]=null;} } return state; }
116
103
 
@@ -120,16 +107,10 @@ function initializeCalculators(calcs, logger) { const state = {}; for (const c o
120
107
  async function loadHistoricalData(date, calcs, config, deps, rootData) {
121
108
  const { logger } = deps;
122
109
  const updated = {...rootData};
123
- const tasks = [];
124
-
125
- // --- REMOVED: needsYesterdayPortfolio ---
126
- // --- REMOVED: needsTodayHistory ---
127
- // --- REMOVED: needsYesterdayHistory ---
110
+ const tasks = [];
128
111
  const needsYesterdayInsights = calcs.some(c => c.isHistorical && c.rootDataDependencies.includes('insights'));
129
112
  const needsYesterdaySocial = calcs.some(c => c.isHistorical && c.rootDataDependencies.includes('social'));
130
113
 
131
- // --- REMOVED: All async tasks for portfolio and history data ---
132
-
133
114
  if(needsYesterdayInsights) {
134
115
  tasks.push((async()=>{ const prev=new Date(date); prev.setUTCDate(prev.getUTCDate()-1); const prevStr=prev.toISOString().slice(0,10);
135
116
  logger.log('INFO', `[PassRunner] Loading YESTERDAY insights data for ${prevStr}`);
@@ -140,7 +121,7 @@ async function loadHistoricalData(date, calcs, config, deps, rootData) {
140
121
  updated.yesterdaySocialPostInsights=await loadDailySocialPostInsights(config,deps,prevStr); })());}
141
122
 
142
123
  await Promise.all(tasks);
143
- return updated; // This no longer contains the large data maps
124
+ return updated;
144
125
  }
145
126
 
146
127
  /**
@@ -153,22 +134,17 @@ async function loadHistoricalData(date, calcs, config, deps, rootData) {
153
134
  */
154
135
  async function streamAndProcess(dateStr, state, passName, config, deps, rootData) {
155
136
  const { logger, calculationUtils } = deps;
156
-
157
- // --- MODIFIED: yesterdayPortfolios & todayHistoryData are no longer in rootData ---
158
137
  const { todayInsights, yesterdayInsights, todaySocialPostInsights, yesterdaySocialPostInsights } = rootData;
159
-
160
138
  const calcsThatStreamPortfolio = Object.values(state).filter(calc => calc && calc.manifest && (calc.manifest.rootDataDependencies.includes('portfolio') || calc.manifest.category === 'speculators'));
161
139
  const context={instrumentMappings:(await calculationUtils.loadInstrumentMappings()).instrumentToTicker, sectorMapping:(await calculationUtils.loadInstrumentMappings()).instrumentToSector, todayDateStr:dateStr, dependencies:deps, config};
162
140
  let firstUser=true;
163
141
 
164
- // --- (Non-streaming (insights/social) calculation logic remains unchanged) ---
165
142
  for(const name in state){
166
143
  const calc=state[name]; if(!calc||typeof calc.process!=='function') continue;
167
144
  const cat=calc.manifest.category;
168
145
  if(cat==='socialPosts'||cat==='insights') {
169
146
  if (firstUser) {
170
147
  logger.log('INFO', `[${passName}] Running non-streaming calc: ${name}`);
171
- // (Using 'null' for hT and hY as they aren't relevant for these calcs)
172
148
  let args=[null,null,null,{...context, userType: 'n/a'},todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,null,null];
173
149
  if(calc.manifest.isHistorical) {
174
150
  args=[null,null,null,{...context, userType: 'n/a'},todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,null,null];
@@ -177,7 +153,6 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
177
153
  }
178
154
  }
179
155
  }
180
- // --- (End of non-streaming calc logic) ---
181
156
 
182
157
  if (calcsThatStreamPortfolio.length === 0) {
183
158
  logger.log('INFO', `[${passName}] No portfolio-streaming calcs to run for ${dateStr}. Skipping stream.`);
@@ -186,7 +161,6 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
186
161
 
187
162
  logger.log('INFO', `[${passName}] Streaming portfolio & historical data for ${calcsThatStreamPortfolio.length} calcs...`);
188
163
 
189
- // --- NEW: Prepare iterators and maps for parallel streaming ---
190
164
  const prevDate = new Date(dateStr + 'T00:00:00Z');
191
165
  prevDate.setUTCDate(prevDate.getUTCDate() - 1);
192
166
  const prevDateStr = prevDate.toISOString().slice(0, 10);
@@ -194,15 +168,12 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
194
168
  const needsYesterdayPortfolio = Object.values(state).some(c => c && c.manifest.isHistorical && c.manifest.rootDataDependencies.includes('portfolio'));
195
169
  const needsTodayHistory = Object.values(state).some(c => c && c.manifest.rootDataDependencies.includes('history'));
196
170
 
197
- // Get the async iterators
198
171
  const yP_iterator = needsYesterdayPortfolio ? streamPortfolioData(config, deps, prevDateStr) : null;
199
172
  const hT_iterator = needsTodayHistory ? streamHistoryData(config, deps, dateStr) : null;
200
173
 
201
- // These maps will accumulate data chunk-by-chunk
202
174
  let yesterdayPortfolios = {};
203
175
  let todayHistoryData = {};
204
176
 
205
- // Load the FIRST chunk of historical data before the loop starts
206
177
  if (yP_iterator) {
207
178
  Object.assign(yesterdayPortfolios, (await yP_iterator.next()).value || {});
208
179
  logger.log('INFO', `[${passName}] Loaded first chunk of yesterday's portfolios.`);
@@ -211,12 +182,9 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
211
182
  Object.assign(todayHistoryData, (await hT_iterator.next()).value || {});
212
183
  logger.log('INFO', `[${passName}] Loaded first chunk of today's history.`);
213
184
  }
214
-
215
- // --- MODIFIED: Main streaming loop (driven by TODAY's portfolio stream) ---
185
+
216
186
  for await (const chunk of streamPortfolioData(config, deps, dateStr)) {
217
187
 
218
- // --- NEW: Load the NEXT chunk of historical data ---
219
- // This keeps the historical maps populated as the main stream progresses
220
188
  if (yP_iterator) {
221
189
  Object.assign(yesterdayPortfolios, (await yP_iterator.next()).value || {});
222
190
  }
@@ -224,12 +192,11 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
224
192
  Object.assign(todayHistoryData, (await hT_iterator.next()).value || {});
225
193
  }
226
194
 
227
- for(const uid in chunk){ // Iterate through today's portfolio chunk
195
+ for(const uid in chunk){
228
196
  const p = chunk[uid]; if(!p) continue;
229
197
  const userType=p.PublicPositions?'speculator':'normal';
230
198
  context.userType=userType;
231
199
 
232
- // --- NEW: Look up corresponding historical data for THIS user ---
233
200
  const pY = yesterdayPortfolios[uid] || null; // Yesterday's Portfolio
234
201
  const hT = todayHistoryData[uid] || null; // Today's History
235
202
  // (Note: yesterdayHistoryData (hY) would require another stream if needed)
@@ -240,11 +207,9 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
240
207
 
241
208
  if(isSocialOrInsights) continue; // Skip non-streaming calcs
242
209
 
243
- // --- MODIFIED: Arguments now use streamed historical data (hT) ---
244
210
  let args=[p,null,uid,context,todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,hT,null];
245
211
 
246
212
  if(isHistorical){
247
- // pY is now the streamed yesterday's portfolio for this user
248
213
  if(!pY && (cat !== 'behavioural' && name !== 'historical-performance-aggregator')) continue;
249
214
  args=[p,pY,uid,context,todayInsights,yesterdayInsights,todaySocialPostInsights,yesterdaySocialPostInsights,hT,null];
250
215
  }
@@ -255,7 +220,6 @@ async function streamAndProcess(dateStr, state, passName, config, deps, rootData
255
220
  }
256
221
  firstUser=false;
257
222
 
258
- // --- NEW: Clear this user from historical maps to free memory ---
259
223
  if (pY) { delete yesterdayPortfolios[uid]; }
260
224
  if (hT) { delete todayHistoryData[uid]; }
261
225
  }
@@ -268,18 +232,26 @@ async function runStandardComputationPass(date, calcs, passName, config, deps, r
268
232
  const dStr = date.toISOString().slice(0, 10), logger = deps.logger;
269
233
  if (calcs.length === 0) {
270
234
  logger.log('INFO', `[${passName}] No standard calcs to run for ${dStr} after filtering.`);
271
- return; }
235
+ return;
236
+ }
272
237
  logger.log('INFO', `[${passName}] Running ${dStr} with ${calcs.length} calcs.`);
273
238
  const fullRoot = await loadHistoricalData(date, calcs, config, deps, rootData);
274
239
  const state = initializeCalculators(calcs, logger);
275
240
  await streamAndProcess(dStr, state, passName, config, deps, fullRoot);
241
+
276
242
  let success = 0;
243
+ const failedCalcs = [];
277
244
  const standardWrites = [];
278
245
  const shardedWrites = {};
246
+
247
+ // === NEW: Collect schemas ===
248
+ const schemasToStore = [];
249
+
279
250
  for (const name in state) {
280
251
  const calc = state[name];
281
252
  if (!calc || typeof calc.getResult !== 'function') continue;
282
- try {const result = await Promise.resolve(calc.getResult());
253
+ try {
254
+ const result = await Promise.resolve(calc.getResult());
283
255
  if (result && Object.keys(result).length > 0) {
284
256
  const standardResult = {};
285
257
  for (const key in result) {
@@ -297,18 +269,60 @@ async function runStandardComputationPass(date, calcs, passName, config, deps, r
297
269
  const docRef = deps.db.collection(config.resultsCollection).doc(dStr)
298
270
  .collection(config.resultsSubcollection).doc(calc.manifest.category)
299
271
  .collection(config.computationsSubcollection).doc(name);
300
- console.log('Writing standard result for', name, 'on', dStr);
301
272
  standardWrites.push({ ref: docRef, data: standardResult });
302
273
  }
274
+
275
+ // === CHANGED: Capture static schema ===
276
+ const calcClass = calc.manifest.class;
277
+ let staticSchema = null;
278
+ if (calcClass && typeof calcClass.getSchema === 'function') {
279
+ try {
280
+ staticSchema = calcClass.getSchema();
281
+ } catch (e) {
282
+ logger.log('WARN', `[SchemaCapture] Failed to get static schema for ${name}`, { err: e.message });
283
+ }
284
+ } else {
285
+ logger.log('TRACE', `[SchemaCapture] No static schema found for ${name}. Skipping manifest entry.`);
286
+ }
287
+
288
+ if (staticSchema) {
289
+ schemasToStore.push({
290
+ name,
291
+ category: calc.manifest.category,
292
+ schema: staticSchema, // <-- Use the static schema
293
+ metadata: {
294
+ isHistorical: calc.manifest.isHistorical || false,
295
+ dependencies: calc.manifest.dependencies || [],
296
+ rootDataDependencies: calc.manifest.rootDataDependencies || [],
297
+ pass: calc.manifest.pass,
298
+ type: calc.manifest.type || 'standard'
299
+ }
300
+ });
301
+ }
302
+ // === END CHANGED SECTION ===
303
+
303
304
  success++;
304
305
  }
305
306
  } catch (e) {
306
307
  logger.log('ERROR', `getResult failed ${name} for ${dStr}`, { err: e.message, stack: e.stack });
308
+ failedCalcs.push(name);
307
309
  }
308
310
  }
311
+
312
+ // === NEW: Store schemas asynchronously (don't block computation) ===
313
+ if (schemasToStore.length > 0) {
314
+ // This function is now imported from the simplified schema_capture.js
315
+ batchStoreSchemas(deps, config, schemasToStore).catch(err => {
316
+ logger.log('WARN', '[SchemaCapture] Non-blocking schema storage failed', {
317
+ errorMessage: err.message
318
+ });
319
+ });
320
+ }
321
+
309
322
  if (standardWrites.length > 0) {
310
323
  await commitBatchInChunks(config, deps, standardWrites, `${passName} Standard ${dStr}`);
311
324
  }
325
+
312
326
  for (const docPath in shardedWrites) {
313
327
  const docData = shardedWrites[docPath];
314
328
  const shardedDocWrites = [];
@@ -330,10 +344,21 @@ async function runStandardComputationPass(date, calcs, passName, config, deps, r
330
344
  await commitBatchInChunks(config, deps, shardedDocWrites, `${passName} Sharded ${docPath} ${dStr}`);
331
345
  }
332
346
  }
333
- logger.log(success === calcs.length ? 'SUCCESS' : 'WARN', `[${passName}] Completed ${dStr}. Success: ${success}/${calcs.length}`);
347
+
348
+ const logMetadata = {};
349
+ if (failedCalcs.length > 0) {
350
+ logMetadata.failedComputations = failedCalcs;
351
+ }
352
+ logger.log(
353
+ success === calcs.length ? 'SUCCESS' : 'WARN',
354
+ `[${passName}] Completed ${dStr}. Success: ${success}/${calcs.length}`,
355
+ logMetadata
356
+ );
334
357
  }
335
358
 
336
- /** Stage 10: Run meta computations */
359
+ /**
360
+ * Modified runMetaComputationPass with schema capture Stage 10
361
+ */
337
362
  async function runMetaComputationPass(date, calcs, passName, config, deps, fetchedDeps, rootData) {
338
363
  const dStr = date.toISOString().slice(0, 10), logger = deps.logger;
339
364
  if (calcs.length === 0) {
@@ -342,18 +367,34 @@ async function runMetaComputationPass(date, calcs, passName, config, deps, fetch
342
367
  }
343
368
  logger.log('INFO', `[${passName}] Running ${dStr} with ${calcs.length} calcs.`);
344
369
  const fullRoot = await loadHistoricalData(date, calcs, config, deps, rootData);
370
+
345
371
  let success = 0;
372
+ const failedCalcs = [];
346
373
  const standardWrites = [];
347
374
  const shardedWrites = {};
375
+
376
+ // === NEW: Collect schemas ===
377
+ const schemasToStore = [];
378
+
348
379
  for (const mCalc of calcs) {
349
380
  const name = normalizeName(mCalc.name), Cl = mCalc.class;
350
- if (typeof Cl !== 'function') {logger.log('ERROR', `Invalid class ${name}`);continue;}
381
+ if (typeof Cl !== 'function') {
382
+ logger.log('ERROR', `Invalid class ${name}`);
383
+ failedCalcs.push(name);
384
+ continue;
385
+ }
351
386
  const inst = new Cl();
352
387
  try {
353
388
  const result = await Promise.resolve(inst.process(dStr, { ...deps, rootData: fullRoot }, config, fetchedDeps));
354
- if (result && Object.keys(result).length > 0) {const standardResult = {}; for (const key in result) {
355
- if (key.startsWith('sharded_')) {const shardedData = result[key];for (const collectionName in shardedData)
356
- {if (!shardedWrites[collectionName]) shardedWrites[collectionName] = {};Object.assign(shardedWrites[collectionName], shardedData[collectionName]);}
389
+ if (result && Object.keys(result).length > 0) {
390
+ const standardResult = {};
391
+ for (const key in result) {
392
+ if (key.startsWith('sharded_')) {
393
+ const shardedData = result[key];
394
+ for (const collectionName in shardedData) {
395
+ if (!shardedWrites[collectionName]) shardedWrites[collectionName] = {};
396
+ Object.assign(shardedWrites[collectionName], shardedData[collectionName]);
397
+ }
357
398
  } else {
358
399
  standardResult[key] = result[key];
359
400
  }
@@ -362,18 +403,60 @@ async function runMetaComputationPass(date, calcs, passName, config, deps, fetch
362
403
  const docRef = deps.db.collection(config.resultsCollection).doc(dStr)
363
404
  .collection(config.resultsSubcollection).doc(mCalc.category)
364
405
  .collection(config.computationsSubcollection).doc(name);
365
- console.log('Writing standard result for', name, 'on', dStr);
366
406
  standardWrites.push({ ref: docRef, data: standardResult });
367
407
  }
408
+
409
+ // === CHANGED: Capture static schema ===
410
+ const calcClass = mCalc.class;
411
+ let staticSchema = null;
412
+ if (calcClass && typeof calcClass.getSchema === 'function') {
413
+ try {
414
+ staticSchema = calcClass.getSchema();
415
+ } catch (e) {
416
+ logger.log('WARN', `[SchemaCapture] Failed to get static schema for ${name}`, { err: e.message });
417
+ }
418
+ } else {
419
+ logger.log('TRACE', `[SchemaCapture] No static schema found for ${name}. Skipping manifest entry.`);
420
+ }
421
+
422
+ if (staticSchema) {
423
+ schemasToStore.push({
424
+ name,
425
+ category: mCalc.category,
426
+ schema: staticSchema, // <-- Use the static schema
427
+ metadata: {
428
+ isHistorical: mCalc.isHistorical || false,
429
+ dependencies: mCalc.dependencies || [],
430
+ rootDataDependencies: mCalc.rootDataDependencies || [],
431
+ pass: mCalc.pass,
432
+ type: 'meta'
433
+ }
434
+ });
435
+ }
436
+ // === END CHANGED SECTION ===
437
+
368
438
  success++;
369
439
  }
370
440
  } catch (e) {
371
441
  logger.log('ERROR', `Meta-calc failed ${name} for ${dStr}`, { err: e.message, stack: e.stack });
442
+ failedCalcs.push(name);
372
443
  }
373
444
  }
445
+
446
+ // === NEW: Store schemas asynchronously ===
447
+ if (schemasToStore.length > 0) {
448
+ // This function is now imported from the simplified schema_capture.js
449
+ batchStoreSchemas(deps, config, schemasToStore).catch(err => {
450
+ logger.log('WARN', '[SchemaCapture] Non-blocking schema storage failed', {
451
+ errorMessage: err.message
452
+ });
453
+ });
454
+ }
455
+
374
456
  if (standardWrites.length > 0) {
375
457
  await commitBatchInChunks(config, deps, standardWrites, `${passName} Meta ${dStr}`);
376
458
  }
459
+
377
460
  for (const collectionName in shardedWrites) {
378
461
  const docs = shardedWrites[collectionName];
379
462
  const shardedDocWrites = [];
@@ -387,7 +470,10 @@ async function runMetaComputationPass(date, calcs, passName, config, deps, fetch
387
470
  await commitBatchInChunks(config, deps, shardedDocWrites, `${passName} Sharded ${collectionName} ${dStr}`);
388
471
  }
389
472
  }
390
- logger.log(success === calcs.length ? 'SUCCESS' : 'WARN', `[${passName}] Completed ${dStr}. Success: ${success}/${calcs.length}`);
473
+
474
+ const logMetadata = {};
475
+ if (failedCalcs.length > 0) { logMetadata.failedComputations = failedCalcs; }
476
+ logger.log( success === calcs.length ? 'SUCCESS' : 'WARN', `[${passName}] Completed ${dStr}. Success: ${success}/${calcs.length}`, logMetadata );
391
477
  }
392
478
 
393
479
  module.exports = { groupByPass, checkRootDataAvailability, fetchExistingResults, filterCalculations, runStandardComputationPass, runMetaComputationPass };
@@ -0,0 +1,64 @@
1
+ /**
2
+ * @fileoverview Schema capture utility for computation outputs
3
+ * This module batches and stores pre-defined static schemas in Firestore.
4
+ */
5
+
6
+ /**
7
+ * Batch store schemas for multiple computations.
8
+ * This function now expects a fully-formed schema, not sample output.
9
+ *
10
+ * @param {object} dependencies - Contains db, logger
11
+ * @param {object} config - Configuration object
12
+ * @param {Array} schemas - Array of {name, category, schema, metadata} objects
13
+ */
14
+ async function batchStoreSchemas(dependencies, config, schemas) {
15
+ const { db, logger } = dependencies;
16
+
17
+ // Check if schema capture is disabled
18
+ if (config.captureSchemas === false) {
19
+ logger.log('INFO', '[SchemaCapture] Schema capture is disabled. Skipping.');
20
+ return;
21
+ }
22
+
23
+ const batch = db.batch();
24
+ const schemaCollection = config.schemaCollection || 'computation_schemas';
25
+
26
+ for (const item of schemas) {
27
+ try {
28
+ // 'item.schema' is now the pre-built schema from static getSchema()
29
+ if (!item.schema) {
30
+ logger.log('WARN', `[SchemaCapture] No schema provided for ${item.name}. Skipping.`);
31
+ continue;
32
+ }
33
+
34
+ const docRef = db.collection(schemaCollection).doc(item.name);
35
+
36
+ batch.set(docRef, {
37
+ computationName: item.name,
38
+ category: item.category,
39
+ schema: item.schema, // Use the provided schema directly
40
+ metadata: item.metadata || {},
41
+ lastUpdated: new Date()
42
+ // 'sampleKeys' is removed as we no longer have the output
43
+ }, { merge: true });
44
+ } catch (error) {
45
+ logger.log('WARN', `[SchemaCapture] Failed to add schema to batch for ${item.name}`, {
46
+ errorMessage: error.message
47
+ });
48
+ }
49
+ }
50
+
51
+ try {
52
+ await batch.commit();
53
+ logger.log('INFO', `[SchemaCapture] Batch stored ${schemas.length} computation schemas`);
54
+ } catch (error) {
55
+ logger.log('ERROR', '[SchemaCapture] Failed to commit schema batch', {
56
+ errorMessage: error.message
57
+ });
58
+ }
59
+ }
60
+
61
+ module.exports = {
62
+ // generateSchema and storeComputationSchema are now obsolete
63
+ batchStoreSchemas
64
+ };
@@ -1,63 +1,13 @@
1
1
  /**
2
2
  * @fileoverview API sub-pipes.
3
3
  * REFACTORED: Now stateless and receive dependencies.
4
- * NEW: Added getDynamicSchema to "test run" calculations
5
- * by mocking async dependencies.
4
+ * NEW: getDynamicSchema now reads static schema.
5
+ * NEW: createManifestHandler added to serve all schemas.
6
6
  */
7
7
 
8
8
  const { FieldPath } = require('@google-cloud/firestore');
9
- // --- NEW: Import calculation utils for mocking ---
10
- // We import 'aiden-shared-calculations-unified' to access its 'utils'
11
- const { utils } = require('aiden-shared-calculations-unified');
12
-
13
- // --- NEW: Store original utils ---
14
- const originalLoadMappings = utils.loadInstrumentMappings;
15
- const originalLoadPrices = utils.loadAllPriceData;
16
- const originalGetSectorMap = utils.getInstrumentSectorMap;
17
-
18
- // --- NEW: Define Mocks ---
19
- // This mock data will be "injected" into the calculations during the test run
20
- const mockMappings = { instrumentToTicker: { 1: 'TEST_TICKER', 2: 'ANOTHER' }, instrumentToSector: { 1: 'Test Sector', 2: 'Other' } };
21
- const mockPrices = { 1: { '2025-01-01': 100, '2025-01-02': 102 } };
22
-
23
- const mockPos = { InstrumentID: 1, NetProfit: 0.05, InvestedAmount: 50, Amount: 1000, Value: 55, Direction: 'Buy', IsBuy: true, PositionID: 123, OpenRate: 100, StopLossRate: 90, TakeProfitRate: 120, Leverage: 1, IsTslEnabled: false, OpenDateTime: '2025-01-01T12:00:00Z', CurrentRate: 105 };
24
- const mockToday = { AggregatedPositions: [mockPos, { ...mockPos, InstrumentID: 2 }], PublicPositions: [mockPos, { ...mockPos, InstrumentID: 2 }], PortfolioValue: 110 };
25
- const mockYesterday = { AggregatedPositions: [mockPos], PublicPositions: [mockPos], PortfolioValue: 100 };
26
- const mockInsights = { insights: [{ instrumentId: 1, total: 100, buy: 50, sell: 50 }] };
27
- const mockSocial = { 'post1': { tickers: ['TEST_TICKER'], sentiment: { overallSentiment: 'Bullish', topics: ['AI'] }, likeCount: 5, commentCount: 2, fullText: 'TEST_TICKER AI' } };
28
-
29
- // A mock context that's passed to process()
30
- const mockContext = {
31
- instrumentMappings: mockMappings.instrumentToTicker,
32
- sectorMapping: mockMappings.instrumentToSector,
33
- todayDateStr: '2025-01-02',
34
- yesterdayDateStr: '2025-01-01',
35
- dependencies: { // For meta-calcs that read other calc results
36
- db: { // Mock the DB to return fake data
37
- collection: function() { return this; },
38
- doc: function() { return this; },
39
- get: async () => ({
40
- exists: true,
41
- data: () => ({ /* mock data for meta-calc deps */
42
- 'asset-crowd-flow': { 'TEST_TICKER': { net_crowd_flow_pct: 1.5 } },
43
- 'social_sentiment_aggregation': { 'tickerSentiment': { 'TEST_TICKER': { sentimentRatio: 80 } } },
44
- 'daily_investor_scores': { 'user123': 8.5 }
45
- })
46
- }),
47
- getAll: async (...refs) => refs.map(ref => ({
48
- exists: true,
49
- data: () => ({ /* mock data for meta-calc deps */
50
- 'asset-crowd-flow': { 'TEST_TICKER': { net_crowd_flow_pct: 1.5 } },
51
- 'social_sentiment_aggregation': { 'tickerSentiment': { 'TEST_TICKER': { sentimentRatio: 80 } } }
52
- })
53
- }))
54
- },
55
- logger: { log: () => {} } // Suppress logs during test run
56
- },
57
- config: {} // For meta-calcs
58
- };
59
- // --- END NEW MOCKS ---
60
9
 
10
+ // --- All Mocks are REMOVED ---
61
11
 
62
12
  /**
63
13
  * Sub-pipe: pipe.api.helpers.validateRequest
@@ -81,6 +31,9 @@ const validateRequest = (query, config) => {
81
31
 
82
32
  /**
83
33
  * Sub-pipe: pipe.api.helpers.buildCalculationMap
34
+ * --- CRITICAL UPDATE ---
35
+ * This function now stores the class itself in the map,
36
+ * which is required by the /manifest/generate endpoint.
84
37
  */
85
38
  const buildCalculationMap = (unifiedCalculations) => {
86
39
  const calcMap = {};
@@ -91,13 +44,19 @@ const buildCalculationMap = (unifiedCalculations) => {
91
44
  // Handle historical subdirectory
92
45
  if (subKey === 'historical' && typeof item === 'object') {
93
46
  for (const calcName in item) {
94
- calcMap[calcName] = { category: category };
47
+ calcMap[calcName] = {
48
+ category: category,
49
+ class: item[calcName] // <-- Store the class
50
+ };
95
51
  }
96
52
  }
97
53
  // Handle regular daily/meta/social calc
98
54
  else if (typeof item === 'function') {
99
55
  const calcName = subKey;
100
- calcMap[calcName] = { category: category };
56
+ calcMap[calcName] = {
57
+ category: category,
58
+ class: item // <-- Store the class
59
+ };
101
60
  }
102
61
  }
103
62
  }
@@ -121,12 +80,6 @@ const getDateStringsInRange = (startDate, endDate) => {
121
80
 
122
81
  /**
123
82
  * Sub-pipe: pipe.api.helpers.fetchData
124
- * @param {object} config - The Generic API V2 configuration object.
125
- * @param {object} dependencies - Contains db, logger.
126
- * @param {string[]} calcKeys - Array of computation keys to fetch.
127
- * @param {string[]} dateStrings - Array of dates to fetch for.
128
- * @param {Object} calcMap - The pre-built calculation lookup map.
129
- * @returns {Promise<Object>} A nested object of [date][computationKey] = data.
130
83
  */
131
84
  const fetchUnifiedData = async (config, dependencies, calcKeys, dateStrings, calcMap) => {
132
85
  const { db, logger } = dependencies;
@@ -144,21 +97,16 @@ const fetchUnifiedData = async (config, dependencies, calcKeys, dateStrings, cal
144
97
  for (const key of calcKeys) {
145
98
  const pathInfo = calcMap[key];
146
99
  if (pathInfo) {
147
- // Use db from dependencies
148
100
  const docRef = db.collection(insightsCollection).doc(date)
149
101
  .collection(resultsSub).doc(pathInfo.category)
150
102
  .collection(compsSub).doc(key);
151
-
152
103
  docRefs.push(docRef);
153
104
  keyPaths.push(key);
154
105
  } else {
155
106
  logger.log('WARN', `[${date}] No path info found for computation key: ${key}`);
156
107
  }
157
108
  }
158
-
159
109
  if (docRefs.length === 0) continue;
160
-
161
- // Use db from dependencies
162
110
  const snapshots = await db.getAll(...docRefs);
163
111
  snapshots.forEach((doc, i) => {
164
112
  const key = keyPaths[i];
@@ -178,13 +126,9 @@ const fetchUnifiedData = async (config, dependencies, calcKeys, dateStrings, cal
178
126
 
179
127
  /**
180
128
  * Factory for the main API handler.
181
- * @param {object} config - The Generic API V2 configuration object.
182
- * @param {object} dependencies - Contains db, logger.
183
- * @param {Object} calcMap - The pre-built calculation lookup map.
184
- * @returns {Function} An async Express request handler.
185
129
  */
186
130
  const createApiHandler = (config, dependencies, calcMap) => {
187
- const { logger } = dependencies; // db is in dependencies
131
+ const { logger } = dependencies;
188
132
 
189
133
  return async (req, res) => {
190
134
  const validationError = validateRequest(req.query, config);
@@ -192,14 +136,10 @@ const createApiHandler = (config, dependencies, calcMap) => {
192
136
  logger.log('WARN', 'API Bad Request', { error: validationError, query: req.query });
193
137
  return res.status(400).send({ status: 'error', message: validationError });
194
138
  }
195
-
196
139
  try {
197
140
  const computationKeys = req.query.computations.split(',');
198
141
  const dateStrings = getDateStringsInRange(req.query.startDate, req.query.endDate);
199
-
200
- // Pass dependencies to sub-pipe
201
142
  const data = await fetchUnifiedData(config, dependencies, computationKeys, dateStrings, calcMap);
202
-
203
143
  res.status(200).send({
204
144
  status: 'success',
205
145
  metadata: {
@@ -221,31 +161,24 @@ const createApiHandler = (config, dependencies, calcMap) => {
221
161
  */
222
162
  function createStructureSnippet(data, maxKeys = 20) {
223
163
  if (data === null || typeof data !== 'object') {
224
- // Handle primitive types
225
164
  if (typeof data === 'number') return 0;
226
165
  if (typeof data === 'string') return "string";
227
166
  if (typeof data === 'boolean') return true;
228
167
  return data;
229
168
  }
230
169
  if (Array.isArray(data)) {
231
- if (data.length === 0) {
232
- return "<empty array>";
233
- }
234
- // Generalize array contents to just the first element's structure
170
+ if (data.length === 0) return "<empty array>";
235
171
  return [ createStructureSnippet(data[0], maxKeys) ];
236
172
  }
237
173
  const newObj = {};
238
174
  const keys = Object.keys(data);
239
175
 
240
- // Check if it's an "example" object (like { "AAPL": {...} })
241
- // This heuristic identifies keys that are all-caps or look like example tickers
242
176
  if (keys.length > 0 && keys.every(k => k.match(/^[A-Z.]+$/) || k.includes('_') || k.match(/^[0-9]+$/))) {
243
177
  const exampleKey = keys[0];
244
178
  newObj[exampleKey] = createStructureSnippet(data[exampleKey], maxKeys);
245
179
  newObj["... (more items)"] = "...";
246
180
  return newObj;
247
181
  }
248
-
249
182
  if (keys.length > maxKeys) {
250
183
  const firstKey = keys[0] || "example_key";
251
184
  newObj[firstKey] = createStructureSnippet(data[firstKey], maxKeys);
@@ -260,7 +193,6 @@ function createStructureSnippet(data, maxKeys = 20) {
260
193
 
261
194
  /**
262
195
  * Sub-pipe: pipe.api.helpers.getComputationStructure
263
- * (This is now a debug tool to check *live* data)
264
196
  */
265
197
  async function getComputationStructure(computationName, calcMap, config, dependencies) {
266
198
  const { db, logger } = dependencies;
@@ -270,40 +202,29 @@ async function getComputationStructure(computationName, calcMap, config, depende
270
202
  return { status: 'error', computation: computationName, message: `Computation not found in calculation map.` };
271
203
  }
272
204
  const { category } = pathInfo;
273
-
274
205
  const insightsCollection = config.unifiedInsightsCollection || 'unified_insights';
275
206
  const resultsSub = config.resultsSubcollection || 'results';
276
207
  const compsSub = config.computationsSubcollection || 'computations';
277
-
278
208
  const computationQueryPath = `${category}.${computationName}`;
279
- // Use db from dependencies
209
+
280
210
  const dateQuery = db.collection(insightsCollection)
281
211
  .where(computationQueryPath, '==', true)
282
212
  .orderBy(FieldPath.documentId(), 'desc')
283
213
  .limit(1);
284
-
285
214
  const dateSnapshot = await dateQuery.get();
286
-
287
215
  if (dateSnapshot.empty) {
288
216
  return { status: 'error', computation: computationName, message: `No computed data found. (Query path: ${computationQueryPath})` };
289
217
  }
290
-
291
218
  const latestStoredDate = dateSnapshot.docs[0].id;
292
-
293
- // Use db from dependencies
294
219
  const docRef = db.collection(insightsCollection).doc(latestStoredDate)
295
220
  .collection(resultsSub).doc(category)
296
221
  .collection(compsSub).doc(computationName);
297
-
298
222
  const doc = await docRef.get();
299
-
300
223
  if (!doc.exists) {
301
224
  return { status: 'error', computation: computationName, message: `Summary flag was present for ${latestStoredDate} but doc is missing.` };
302
225
  }
303
-
304
226
  const fullData = doc.data();
305
227
  const structureSnippet = createStructureSnippet(fullData);
306
-
307
228
  return {
308
229
  status: 'success',
309
230
  computation: computationName,
@@ -311,7 +232,6 @@ async function getComputationStructure(computationName, calcMap, config, depende
311
232
  latestStoredDate: latestStoredDate,
312
233
  structureSnippet: structureSnippet,
313
234
  };
314
-
315
235
  } catch (error) {
316
236
  logger.log('ERROR', `API /structure/${computationName} helper failed.`, { errorMessage: error.message });
317
237
  return { status: 'error', computation: computationName, message: error.message };
@@ -320,54 +240,73 @@ async function getComputationStructure(computationName, calcMap, config, depende
320
240
 
321
241
 
322
242
  /**
323
- * --- NEW: DYNAMIC SCHEMA GENERATION HARNESS ---
324
- * @param {class} CalcClass The calculation class to test.
325
- * @param {string} calcName The name of the calculation for logging.
326
- * @returns {Promise<object>} A snippet of the output structure.
243
+ * --- UPDATED: DYNAMIC SCHEMA GENERATION HARNESS ---
327
244
  */
328
245
  async function getDynamicSchema(CalcClass, calcName) {
329
- // 1. Apply Mocks (Monkey-Patching)
330
- utils.loadInstrumentMappings = async () => mockMappings;
331
- utils.loadAllPriceData = async () => mockPrices;
332
- utils.getInstrumentSectorMap = async () => mockMappings.instrumentToSector;
333
-
334
- let result = {};
335
- const calc = new CalcClass();
336
-
337
- try {
338
- // 2. Check for Meta-Calculation signature: process(dateStr, dependencies, config)
339
- const processStr = calc.process.toString();
340
- if (processStr.includes('dateStr') && processStr.includes('dependencies')) {
341
- // It's a meta-calc. Run its process() with mock dependencies
342
- result = await calc.process('2025-01-02', mockContext.dependencies, mockContext.config);
343
- } else {
344
- // It's a standard calculation. Run process() + getResult()
345
- await calc.process(
346
- mockToday,
347
- mockYesterday,
348
- 'test-user-123',
349
- mockContext,
350
- mockInsights, // todayInsights
351
- mockInsights, // yesterdayInsights
352
- mockSocial, // todaySocial
353
- mockSocial // yesterdaySocial
354
- );
355
- result = await calc.getResult();
246
+ if (CalcClass && typeof CalcClass.getSchema === 'function') {
247
+ try {
248
+ return CalcClass.getSchema();
249
+ } catch (e) {
250
+ console.error(`Error running static getSchema() for ${calcName}: ${e.message}`);
251
+ return { "ERROR": `Failed to get static schema: ${e.message}` };
356
252
  }
357
- } catch (e) {
358
- console.error(`Error running schema test for ${calcName}: ${e.message}`);
359
- result = { "ERROR": `Failed to generate schema: ${e.message}` };
360
- } finally {
361
- // 3. Restore Original Functions
362
- utils.loadInstrumentMappings = originalLoadMappings;
363
- utils.loadAllPriceData = originalLoadPrices;
364
- utils.getInstrumentSectorMap = originalGetSectorMap;
253
+ } else {
254
+ return { "ERROR": `Computation '${calcName}' does not have a static getSchema() method defined.` };
365
255
  }
366
-
367
- // 4. Sanitize the result to just a "structure"
368
- return createStructureSnippet(result);
369
256
  }
370
- // --- END NEW HARNESS ---
257
+ // --- END UPDATED HARNESS ---
258
+
259
+
260
+ /**
261
+ * --- NEW: MANIFEST API HANDLER ---
262
+ */
263
+ const createManifestHandler = (config, dependencies, calcMap) => {
264
+ const { db, logger } = dependencies;
265
+ const schemaCollection = config.schemaCollection || 'computation_schemas';
266
+
267
+ return async (req, res) => {
268
+ try {
269
+ logger.log('INFO', '[API /manifest] Fetching all computation schemas...');
270
+ const snapshot = await db.collection(schemaCollection).get();
271
+ if (snapshot.empty) {
272
+ logger.log('WARN', '[API /manifest] No schemas found in collection.');
273
+ return res.status(404).send({ status: 'error', message: 'No computation schemas have been generated yet.' });
274
+ }
275
+
276
+ const manifest = {};
277
+ snapshot.forEach(doc => {
278
+ const data = doc.data();
279
+ manifest[doc.id] = {
280
+ // --- CHANGED: Return the structure consistent with your file ---
281
+ category: data.category,
282
+ structure: data.schema, // Use 'structure' key
283
+ metadata: data.metadata,
284
+ lastUpdated: data.lastUpdated
285
+ };
286
+ });
287
+
288
+ res.status(200).send({
289
+ status: 'success',
290
+ // --- CHANGED: Use the structure from your file ---
291
+ summary: {
292
+ source: 'firestore_computation_schemas',
293
+ totalComputations: snapshot.size,
294
+ schemasAvailable: snapshot.size,
295
+ schemasFailed: 0,
296
+ lastUpdated: Math.max(...Object.values(manifest).map(m =>
297
+ m.lastUpdated ? m.lastUpdated.toMillis() : 0
298
+ ))
299
+ },
300
+ manifest: manifest
301
+ });
302
+
303
+ } catch (error) {
304
+ logger.log('ERROR', 'API /manifest handler failed.', { errorMessage: error.message, stack: error.stack });
305
+ res.status(500).send({ status: 'error', message: 'An internal error occurred.' });
306
+ }
307
+ };
308
+ };
309
+ // --- END NEW HANDLER ---
371
310
 
372
311
 
373
312
  module.exports = {
@@ -376,5 +315,6 @@ module.exports = {
376
315
  fetchUnifiedData,
377
316
  createApiHandler,
378
317
  getComputationStructure,
379
- getDynamicSchema // <-- EXPORT NEW HELPER
318
+ getDynamicSchema,
319
+ createManifestHandler // <-- EXPORT NEW HANDLER
380
320
  };
@@ -1,8 +1,8 @@
1
1
  /**
2
2
  * @fileoverview Main entry point for the Generic API module.
3
3
  * Exports the 'createApiApp' main pipe function.
4
- * REFACTORED: /manifest endpoint now uses a dynamic "test harness"
5
- * to generate the schema without modifying calculation files.
4
+ * REFACTORED: /manifest endpoint now reads static schemas from Firestore.
5
+ * REFACTORED: /manifest/generate endpoint now reads static schema from class.
6
6
  */
7
7
 
8
8
  const express = require('express');
@@ -12,7 +12,8 @@ const {
12
12
  buildCalculationMap,
13
13
  createApiHandler,
14
14
  getComputationStructure,
15
- getDynamicSchema // <-- IMPORT NEW HELPER
15
+ createManifestHandler, // <-- IMPORT NEW HANDLER
16
+ getDynamicSchema // <-- This helper's behavior has changed
16
17
  } = require('./helpers/api_helpers.js');
17
18
 
18
19
  /**
@@ -25,9 +26,9 @@ const {
25
26
  */
26
27
  function createApiApp(config, dependencies, unifiedCalculations) {
27
28
  const app = express();
28
- const { logger } = dependencies; // db is in dependencies
29
+ const { logger, db } = dependencies;
29
30
 
30
- // --- Pre-compute Calculation Map ---
31
+ // --- Pre-compute Calculation Map (now includes classes) ---
31
32
  const calcMap = buildCalculationMap(unifiedCalculations);
32
33
 
33
34
  // --- Middleware ---
@@ -60,90 +61,148 @@ function createApiApp(config, dependencies, unifiedCalculations) {
60
61
  // --- Debug Endpoint to get *stored* structure from Firestore ---
61
62
  app.get('/structure/:computationName', async (req, res) => {
62
63
  const { computationName } = req.params;
63
-
64
- // Call sub-pipe, passing dependencies
65
64
  const result = await getComputationStructure(computationName, calcMap, config, dependencies);
66
-
67
65
  if (result.status === 'error') {
68
66
  const statusCode = result.message.includes('not found') ? 404 : 500;
69
67
  return res.status(statusCode).send(result);
70
68
  }
71
-
72
69
  res.status(200).send(result);
73
70
  });
74
71
 
75
- // --- NEW: Fully Refactored Manifest Endpoint (Dynamic Test Run) ---
76
- app.get('/manifest', async (req, res) => {
77
- logger.log('INFO', 'API /manifest dynamic generation starting...');
72
+ /**
73
+ * ---!!!--- REPLACED SECTION ---!!!---
74
+ * This route now uses the createManifestHandler to serve the
75
+ * pre-generated schemas from Firestore.
76
+ */
77
+ app.get('/manifest', createManifestHandler(config, dependencies, calcMap));
78
+
79
+
80
+ /**
81
+ * ---!!!--- REPLACED SECTION ---!!!---
82
+ * This endpoint now reads the *static* getSchema() method from a class
83
+ * and stores that in Firestore. It no longer does runtime inference.
84
+ */
85
+ app.post('/manifest/generate/:computationName', async (req, res) => {
86
+ const { computationName } = req.params;
87
+ logger.log('INFO', `Manual static schema generation requested for: ${computationName}`);
88
+
78
89
  try {
79
- const manifest = {};
80
- let successCount = 0;
81
- const errors = [];
90
+ // 1. Find the calculation class from the calcMap
91
+ const calcInfo = calcMap[computationName];
82
92
 
83
- // This logic iterates through the calculation module structure
84
- for (const category in unifiedCalculations) {
85
- for (const subKey in unifiedCalculations[category]) {
86
- const item = unifiedCalculations[category][subKey];
87
- let calcName = null;
88
- let CalcClass = null;
89
-
90
- // Handle nested 'historical' directory
91
- if (subKey === 'historical' && typeof item === 'object') {
92
- for (const name in item) {
93
- calcName = name;
94
- CalcClass = item[name];
95
- if (CalcClass && typeof CalcClass === 'function') {
96
- try {
97
- manifest[calcName] = {
98
- category: category,
99
- structure: await getDynamicSchema(CalcClass, calcName) // <-- DYNAMIC CALL
100
- };
101
- successCount++;
102
- } catch (e) { errors.push(`${category}/${calcName}: ${e.message}`); }
103
- }
104
- }
105
- }
106
- // Handle regular calc at root of category
107
- else if (typeof item === 'function') {
108
- calcName = subKey;
109
- CalcClass = item;
110
- if (CalcClass && typeof CalcClass === 'function') {
111
- try {
112
- manifest[calcName] = {
113
- category: category,
114
- structure: await getDynamicSchema(CalcClass, calcName) // <-- DYNAMIC CALL
115
- };
116
- successCount++;
117
- } catch (e) { errors.push(`${category}/${calcName}: ${e.message}`); }
118
- }
119
- }
120
- }
93
+ if (!calcInfo || !calcInfo.class) {
94
+ return res.status(404).send({
95
+ status: 'error',
96
+ message: `Computation '${computationName}' not found or has no class in calculation map.`
97
+ });
121
98
  }
99
+
100
+ const targetCalcClass = calcInfo.class;
101
+ const targetCategory = calcInfo.category;
122
102
 
123
- const totalComputations = Object.keys(calcMap).length;
124
- logger.log('INFO', `API /manifest complete. Generated schema for ${successCount}/${totalComputations} computations.`);
103
+ // 2. Use the getDynamicSchema helper (which now just reads the static method)
104
+ const schemaStructure = await getDynamicSchema(targetCalcClass, computationName);
105
+
106
+ if (schemaStructure.ERROR) {
107
+ return res.status(400).send({
108
+ status: 'error',
109
+ message: `Failed to get static schema: ${schemaStructure.ERROR}`
110
+ });
111
+ }
125
112
 
113
+ // 3. Import the new batchStoreSchemas utility
114
+ const { batchStoreSchemas } = require('../computation-system/utils/schema_capture');
115
+
116
+ // 4. Get metadata (as much as we can from the class)
117
+ const metadata = {
118
+ isHistorical: !!(targetCalcClass.toString().includes('yesterdayPortfolio')),
119
+ dependencies: (typeof targetCalcClass.getDependencies === 'function') ? targetCalcClass.getDependencies() : [],
120
+ rootDataDependencies: [], // Cannot be known here
121
+ pass: 'unknown', // Cannot be known here
122
+ type: (targetCategory === 'meta' || targetCategory === 'socialPosts') ? targetCategory : 'standard',
123
+ note: "Manually generated via API"
124
+ };
125
+
126
+ // 5. Store the schema in Firestore
127
+ await batchStoreSchemas(
128
+ dependencies,
129
+ config,
130
+ [{
131
+ name: computationName,
132
+ category: targetCategory,
133
+ schema: schemaStructure, // Pass the static schema
134
+ metadata: metadata
135
+ }]
136
+ );
137
+
138
+ // 6. Respond with the schema
126
139
  res.status(200).send({
127
140
  status: 'success',
128
- summary: {
129
- source: 'computation_module_dynamic_test',
130
- totalComputations: totalComputations,
131
- schemasGenerated: successCount,
132
- schemasFailed: errors.length,
133
- },
134
- manifest: manifest,
135
- errors: errors.length > 0 ? errors : undefined,
141
+ message: `Static schema read and stored for ${computationName}`,
142
+ computation: computationName,
143
+ category: targetCategory,
144
+ schema: schemaStructure
145
+ });
146
+
147
+ } catch (error) {
148
+ logger.log('ERROR', `Failed to generate schema for ${computationName}`, {
149
+ errorMessage: error.message,
150
+ stack: error.stack
136
151
  });
152
+ res.status(5.00).send({
153
+ status: 'error',
154
+ message: `Failed to generate/store schema: ${error.message}`
155
+ });
156
+ }
157
+ });
137
158
 
159
+ /**
160
+ * This endpoint is fine as-is. It reads from the Firestore
161
+ * collection that the /manifest and /manifest/generate routes populate.
162
+ */
163
+ app.get('/manifest/:computationName', async (req, res) => {
164
+ const { computationName } = req.params;
165
+
166
+ try {
167
+ const schemaCollection = config.schemaCollection || 'computation_schemas';
168
+ const schemaDoc = await db.collection(schemaCollection).doc(computationName).get();
169
+
170
+ if (!schemaDoc.exists) {
171
+ return res.status(404).send({
172
+ status: 'error',
173
+ message: `Schema not found for computation: ${computationName}`,
174
+ hint: 'Try running the computation system or use POST /manifest/generate/:computationName'
175
+ });
176
+ }
177
+
178
+ const data = schemaDoc.data();
179
+
180
+ // --- Use the same response structure as /manifest ---
181
+ res.status(200).send({
182
+ status: 'success',
183
+ computation: computationName,
184
+ category: data.category,
185
+ structure: data.schema,
186
+ metadata: data.metadata || {},
187
+ lastUpdated: data.lastUpdated
188
+ });
189
+
138
190
  } catch (error) {
139
- logger.log('ERROR', `API /manifest failed unexpectedly.`, { errorMessage: error.message, stack: error.stack });
140
- res.status(500).send({ status: 'error', message: `An internal error occurred while building the manifest.` });
191
+ logger.log('ERROR', `Failed to fetch schema for ${computationName}`, {
192
+ errorMessage: error.message
193
+ });
194
+ res.status(500).send({
195
+ status: 'error',
196
+ message: 'An internal error occurred.'
197
+ });
141
198
  }
142
199
  });
143
200
 
201
+ // --- THIS MUST BE THE LAST LINE OF THE FUNCTION ---
144
202
  return app;
145
203
  }
146
204
 
205
+
147
206
  module.exports = {
148
207
  createApiApp,
149
208
  // Exporting helpers so they can be part of the pipe.api.helpers object
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.146",
3
+ "version": "1.0.148",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [