bulltrackers-module 1.0.770 → 1.0.772

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,228 @@
1
+ /**
2
+ * @fileoverview Sector Correlations (Debug Version)
3
+ * * Computes the Correlation matrix for all Sectors.
4
+ * * INCLUDES DEBUG LOGGING to verify data availability.
5
+ */
6
+ const { Computation } = require('../framework');
7
+
8
+ class SectorCorrelations extends Computation {
9
+ static getConfig() {
10
+ return {
11
+ name: 'SectorCorrelations',
12
+ description: 'Computes correlation matrix of sectors (Best Effort)',
13
+ type: 'global',
14
+ category: 'market_analysis',
15
+ isHistorical: false,
16
+
17
+ requires: {
18
+ 'asset_prices': {
19
+ lookback: 30,
20
+ mandatory: true,
21
+ // FIX 1: Explicitly request 'close' to match your schema
22
+ fields: ['instrument_id', 'date', 'close']
23
+ },
24
+ 'ticker_mappings': {
25
+ lookback: 0,
26
+ mandatory: true,
27
+ fields: ['instrument_id', 'ticker']
28
+ },
29
+ 'sector_mappings': {
30
+ lookback: 0,
31
+ mandatory: true,
32
+ fields: ['symbol', 'sector']
33
+ }
34
+ },
35
+
36
+ storage: {
37
+ bigquery: true,
38
+ firestore: { enabled: false }
39
+ }
40
+ };
41
+ }
42
+
43
+ async process(context) {
44
+ const { data, targetDate } = context;
45
+
46
+ // ===========================================================================
47
+ // DEBUG: INSPECT INCOMING DATA
48
+ // ===========================================================================
49
+ const toArray = (input) => Array.isArray(input) ? input : Object.values(input).flat();
50
+ const prices = toArray(data['asset_prices'] || []);
51
+
52
+ console.log(`[DEBUG] SectorCorrelations Processing...`);
53
+ console.log(`[DEBUG] Target Date: ${targetDate}`);
54
+ console.log(`[DEBUG] Asset Prices Loaded: ${prices.length}`);
55
+
56
+ if (prices.length > 0) {
57
+ console.log(`[DEBUG] First Price Row:`, JSON.stringify(prices[0]));
58
+ const sampleDate = prices[0].date.value || prices[0].date;
59
+ console.log(`[DEBUG] Sample Date from Row 0: ${sampleDate}`);
60
+ } else {
61
+ console.warn(`[DEBUG] WARNING: No asset prices returned from DB. Check your Test Date vs Data Dates.`);
62
+ }
63
+
64
+ // ===========================================================================
65
+ // 1. BUILD SECTOR MAPPING
66
+ // ===========================================================================
67
+ const tickerMap = new Map();
68
+ toArray(data['ticker_mappings'] || []).forEach(row => {
69
+ if (row.instrument_id && row.ticker) {
70
+ tickerMap.set(String(row.instrument_id), row.ticker.toUpperCase());
71
+ }
72
+ });
73
+
74
+ const symbolSectorMap = new Map();
75
+ const allSectors = new Set();
76
+
77
+ toArray(data['sector_mappings'] || []).forEach(row => {
78
+ if (row.symbol && row.sector && row.sector !== 'N/A') {
79
+ symbolSectorMap.set(row.symbol.toUpperCase(), row.sector);
80
+ allSectors.add(row.sector);
81
+ }
82
+ });
83
+
84
+ const sectorList = Array.from(allSectors).sort();
85
+
86
+ // ===========================================================================
87
+ // 2. PROCESS PRICES
88
+ // ===========================================================================
89
+ const instrumentReturns = {}; // { date: { SectorA: [ret1, ret2] } }
90
+ let processedRows = 0;
91
+ let matchedRows = 0;
92
+
93
+ prices.forEach(p => {
94
+ processedRows++;
95
+ if (!p.instrument_id || !p.date) return;
96
+
97
+ // FIX 2: Handle 'close' vs 'close_price' safely
98
+ const closePrice = p.close !== undefined ? p.close : p.close_price;
99
+
100
+ if (closePrice === undefined || closePrice === null) {
101
+ // If the first row fails, log it
102
+ if (processedRows === 1) console.error(`[DEBUG] Row 1 missing 'close' value. Keys: ${Object.keys(p).join(',')}`);
103
+ return;
104
+ }
105
+
106
+ const ticker = tickerMap.get(String(p.instrument_id));
107
+ if (!ticker) return; // Instrument not in mapping
108
+
109
+ const sector = symbolSectorMap.get(ticker);
110
+ if (!sector || sector === 'Other') return;
111
+
112
+ matchedRows++;
113
+
114
+ // Sort logic needs array first, doing simplistic day-grouping here
115
+ // Re-grouping for return calc
116
+ });
117
+
118
+ console.log(`[DEBUG] Rows Processed: ${processedRows}`);
119
+ console.log(`[DEBUG] Rows Matched to Sector: ${matchedRows} (Low match count = Ticker/Sector Map issue)`);
120
+
121
+ // Re-implementing the robust logic with correct property access
122
+ const instrumentPrices = {};
123
+
124
+ prices.forEach(p => {
125
+ const id = String(p.instrument_id);
126
+ if (!instrumentPrices[id]) instrumentPrices[id] = [];
127
+ instrumentPrices[id].push(p);
128
+ });
129
+
130
+ Object.entries(instrumentPrices).forEach(([id, pList]) => {
131
+ const ticker = tickerMap.get(id);
132
+ const sector = ticker ? symbolSectorMap.get(ticker) : null;
133
+ if (!sector) return;
134
+
135
+ // Sort by Date
136
+ pList.sort((a, b) => new Date(a.date.value || a.date) - new Date(b.date.value || b.date));
137
+
138
+ for (let i = 1; i < pList.length; i++) {
139
+ // FIX 2 APPLIED HERE: Use 'p.close'
140
+ const prev = Number(pList[i - 1].close || pList[i - 1].close_price);
141
+ const curr = Number(pList[i].close || pList[i].close_price);
142
+
143
+ const dRaw = pList[i].date.value || pList[i].date;
144
+ const date = typeof dRaw === 'string' ? dRaw.split('T')[0] : new Date(dRaw).toISOString().split('T')[0];
145
+
146
+ if (prev > 0 && curr > 0) {
147
+ const ret = Math.log(curr / prev);
148
+ if (!instrumentReturns[date]) instrumentReturns[date] = {};
149
+ if (!instrumentReturns[date][sector]) instrumentReturns[date][sector] = [];
150
+ instrumentReturns[date][sector].push(ret);
151
+ }
152
+ }
153
+ });
154
+
155
+ // 3. ALIGN TIMELINES & COMPUTE MATRIX
156
+ const validDates = Object.keys(instrumentReturns).sort();
157
+ console.log(`[DEBUG] Valid Data Dates Found: ${validDates.length}`);
158
+ if (validDates.length > 0) {
159
+ console.log(`[DEBUG] Date Range: ${validDates[0]} to ${validDates[validDates.length - 1]}`);
160
+ }
161
+
162
+ const sectorHistory = {};
163
+ sectorList.forEach(s => sectorHistory[s] = []);
164
+
165
+ validDates.forEach(date => {
166
+ sectorList.forEach(s => {
167
+ const rets = instrumentReturns[date][s];
168
+ if (rets && rets.length > 0) {
169
+ const avg = rets.reduce((a, b) => a + b, 0) / rets.length;
170
+ sectorHistory[s].push(avg);
171
+ } else {
172
+ sectorHistory[s].push(0);
173
+ }
174
+ });
175
+ });
176
+
177
+ const matrix = {};
178
+ const n = validDates.length;
179
+
180
+ // Init Identity
181
+ sectorList.forEach(s1 => {
182
+ matrix[s1] = {};
183
+ sectorList.forEach(s2 => {
184
+ matrix[s1][s2] = (s1 === s2) ? 1.0 : 0.0;
185
+ });
186
+ });
187
+
188
+ if (n > 1) {
189
+ for (let i = 0; i < sectorList.length; i++) {
190
+ const s1 = sectorList[i];
191
+ for (let j = i + 1; j < sectorList.length; j++) {
192
+ const s2 = sectorList[j];
193
+ const corr = this._calculateCorrelation(sectorHistory[s1], sectorHistory[s2], n);
194
+ if (!isNaN(corr)) {
195
+ matrix[s1][s2] = Number(corr.toFixed(4));
196
+ matrix[s2][s1] = Number(corr.toFixed(4));
197
+ }
198
+ }
199
+ }
200
+ } else {
201
+ console.warn(`[SectorCorrelations] Only ${n} valid data points found. Returning Identity Matrix.`);
202
+ }
203
+
204
+ this.setResult('_global', {
205
+ correlationMatrix: matrix,
206
+ sectors: sectorList,
207
+ computedAt: targetDate,
208
+ dataPoints: n
209
+ });
210
+ }
211
+
212
+ _calculateCorrelation(x, y, n) {
213
+ if (n < 2) return 0;
214
+ let sumX = 0, sumY = 0, sumXY = 0, sumX2 = 0, sumY2 = 0;
215
+ for (let i = 0; i < n; i++) {
216
+ sumX += x[i]; sumY += y[i];
217
+ sumXY += x[i] * y[i];
218
+ sumX2 += x[i] * x[i]; sumY2 += y[i] * y[i];
219
+ }
220
+ const numerator = (n * sumXY) - (sumX * sumY);
221
+ const termX = (n * sumX2 - sumX * sumX);
222
+ const termY = (n * sumY2 - sumY * sumY);
223
+ if (termX <= 0 || termY <= 0) return 0;
224
+ return numerator / Math.sqrt(termX * termY);
225
+ }
226
+ }
227
+
228
+ module.exports = SectorCorrelations;
@@ -6,37 +6,20 @@ class SignedInUserMirrorHistory extends Computation {
6
6
  name: 'SignedInUserMirrorHistory',
7
7
  type: 'per-entity',
8
8
  category: 'signed_in_user',
9
-
9
+
10
10
  // CRITICAL: Must be historical to accumulate the "Past" list over time
11
11
  // without needing huge lookbacks.
12
- isHistorical: true,
13
-
12
+ isHistorical: true,
13
+
14
14
  requires: {
15
- 'portfolio_snapshots': {
16
- lookback: 0,
17
- mandatory: true,
18
- fields: ['user_id', 'portfolio_data', 'date']
19
- },
20
- // Reduced from 90 to 30 to comply with DAG limits
21
- 'trade_history_snapshots': {
22
- lookback: 30,
23
- mandatory: false,
24
- fields: ['user_id', 'history_data']
25
- },
26
- 'pi_master_list': {
27
- lookback: 1,
28
- mandatory: false,
29
- fields: ['cid', 'username']
30
- }
15
+ 'portfolio_snapshots': { lookback: 0, mandatory: true, fields: ['user_id', 'portfolio_data', 'date'] },
16
+ 'trade_history_snapshots': { lookback: 1, mandatory: false, fields: ['user_id', 'history_data'] },
17
+ 'pi_master_list': { lookback: 1, mandatory: false, fields: ['cid', 'username'] }
31
18
  },
32
-
19
+
33
20
  storage: {
34
21
  bigquery: true,
35
- firestore: {
36
- enabled: true,
37
- path: 'users/{entityId}/mirror_history',
38
- merge: true
39
- }
22
+ firestore: { enabled: false }
40
23
  }
41
24
  };
42
25
  }
@@ -48,7 +31,7 @@ class SignedInUserMirrorHistory extends Computation {
48
31
  const masterList = data['pi_master_list'] || [];
49
32
  const usernameMap = new Map();
50
33
  const toArray = (input) => Array.isArray(input) ? input : Object.values(input || {});
51
-
34
+
52
35
  toArray(masterList).forEach(row => {
53
36
  if (row.cid && row.username) usernameMap.set(String(row.cid), row.username);
54
37
  });
@@ -56,11 +39,11 @@ class SignedInUserMirrorHistory extends Computation {
56
39
  // 2. Determine CURRENT Mirrors (From Today's Portfolio)
57
40
  const currentMap = new Map();
58
41
  const portfolioRow = data['portfolio_snapshots'];
59
-
42
+
60
43
  if (portfolioRow) {
61
44
  const pData = rules.portfolio.extractPortfolioData(portfolioRow);
62
- const rawMirrors = rules.portfolio.extractMirrors(pData);
63
-
45
+ const rawMirrors = rules.portfolio.extractMirrors(pData);
46
+
64
47
  rawMirrors.forEach(m => {
65
48
  const cid = String(m.ParentCID || m.MirrorID);
66
49
  if (cid && cid !== '0') {
@@ -70,7 +53,7 @@ class SignedInUserMirrorHistory extends Computation {
70
53
  invested: m.Invested || 0,
71
54
  profit: m.NetProfit || 0,
72
55
  status: 'active',
73
- startedAt: m.InitDate || null
56
+ startedAt: m.InitDate || null
74
57
  });
75
58
  }
76
59
  });
@@ -100,7 +83,7 @@ class SignedInUserMirrorHistory extends Computation {
100
83
  });
101
84
  }
102
85
 
103
- // C. Supplement with Trade History (Only look back 30 days for metadata)
86
+ // C. Supplement with Trade History (Snapshot contains rolling 365d data)
104
87
  // This catches any explicit "Stop Copy" events that might provide better metadata
105
88
  const historyRows = data['trade_history_snapshots'] || [];
106
89
  historyRows.forEach(row => {
@@ -108,7 +91,7 @@ class SignedInUserMirrorHistory extends Computation {
108
91
  trades.forEach(trade => {
109
92
  const parentCID = trade.ParentCID || trade.MirrorID || trade.CopyTraderID;
110
93
  const cidStr = String(parentCID);
111
-
94
+
112
95
  // If we found a past interaction that isn't currently active
113
96
  if (parentCID && cidStr !== '0' && !currentMap.has(cidStr)) {
114
97
  if (!pastMap.has(cidStr)) {
@@ -13,7 +13,7 @@ class SignedInUserProfileMetrics extends Computation {
13
13
  type: 'per-entity',
14
14
  category: 'signed_in_user',
15
15
  isHistorical: true,
16
-
16
+
17
17
  requires: {
18
18
  // --- Core Data (Drivers) ---
19
19
  // [CRITICAL] Filters restrict execution to Signed-In Users only
@@ -21,7 +21,7 @@ class SignedInUserProfileMetrics extends Computation {
21
21
  lookback: 30,
22
22
  mandatory: true,
23
23
  fields: ['user_id', 'portfolio_data', 'date'],
24
- filter: { user_type: 'SIGNED_IN_USER' }
24
+ filter: { user_type: 'SIGNED_IN_USER' }
25
25
  },
26
26
  'trade_history_snapshots': {
27
27
  lookback: 30,
@@ -35,7 +35,7 @@ class SignedInUserProfileMetrics extends Computation {
35
35
  fields: ['user_id', 'posts_data', 'date'],
36
36
  filter: { user_type: 'SIGNED_IN_USER' }
37
37
  },
38
-
38
+
39
39
  // --- Reference Data (Lookups) ---
40
40
  // Used only to enrich PIs found in the user's copy list
41
41
  'pi_rankings': {
@@ -53,7 +53,7 @@ class SignedInUserProfileMetrics extends Computation {
53
53
  mandatory: false,
54
54
  fields: ['pi_id', 'average_rating', 'date']
55
55
  },
56
-
56
+
57
57
  // --- Mappings ---
58
58
  'ticker_mappings': { mandatory: false },
59
59
  'sector_mappings': { mandatory: false }
@@ -63,7 +63,7 @@ class SignedInUserProfileMetrics extends Computation {
63
63
  bigquery: true,
64
64
  firestore: {
65
65
  enabled: true,
66
- path: 'user_profiles/{entityId}/metrics/{date}', // Standardized path
66
+ path: 'SignedInUserProfiles/{entityId}/metrics/{date}', // Standardized path
67
67
  merge: true
68
68
  }
69
69
  },
@@ -142,7 +142,7 @@ class SignedInUserProfileMetrics extends Computation {
142
142
  const portfolios = sortAsc(getEntityRows(data['portfolio_snapshots']));
143
143
  const historyData = sortAsc(getEntityRows(data['trade_history_snapshots']));
144
144
  const socialData = sortAsc(getEntityRows(data['social_post_snapshots']));
145
-
145
+
146
146
  const currentPortfolio = portfolios.length > 0 ? portfolios[portfolios.length - 1] : null;
147
147
 
148
148
  // ==========================================================================================
@@ -189,7 +189,7 @@ class SignedInUserProfileMetrics extends Computation {
189
189
  trades.forEach(t => {
190
190
  const closeDate = rules.trades.getCloseDate(t);
191
191
  if (!closeDate) return;
192
-
192
+
193
193
  const dKey = closeDate.toISOString().split('T')[0];
194
194
  const profit = rules.trades.getNetProfit(t);
195
195
 
@@ -202,21 +202,21 @@ class SignedInUserProfileMetrics extends Computation {
202
202
  // Stats for Aggregate Graphics
203
203
  if (profit > 0) { winLoss.wins++; winLoss.profit += profit; }
204
204
  else if (profit < 0) { winLoss.losses++; winLoss.loss += profit; }
205
-
205
+
206
206
  dailyPnL.set(dKey, (dailyPnL.get(dKey) || 0) + profit);
207
207
  });
208
208
  });
209
209
 
210
210
  result.profitablePositions.data = Array.from(tradeStats.values())
211
- .sort((a,b) => a.date.localeCompare(b.date))
211
+ .sort((a, b) => a.date.localeCompare(b.date))
212
212
  .slice(-30);
213
-
213
+
214
214
  result.performanceGraphics.data = {
215
215
  winRate: (winLoss.wins + winLoss.losses) > 0 ? Number(((winLoss.wins / (winLoss.wins + winLoss.losses)) * 100).toFixed(2)) : 0,
216
216
  avgWin: winLoss.wins > 0 ? Number((winLoss.profit / winLoss.wins).toFixed(2)) : 0,
217
217
  avgLoss: winLoss.losses > 0 ? Number((winLoss.loss / winLoss.losses).toFixed(2)) : 0,
218
218
  profitFactor: winLoss.losses !== 0 ? Number((Math.abs(winLoss.profit / winLoss.loss)).toFixed(2)) : (winLoss.profit > 0 ? Infinity : 0),
219
- dailyPnL: Array.from(dailyPnL.entries()).map(([date, pnl]) => ({ date, pnl: Number(pnl.toFixed(2)) })).sort((a,b) => a.date.localeCompare(b.date)).slice(-20)
219
+ dailyPnL: Array.from(dailyPnL.entries()).map(([date, pnl]) => ({ date, pnl: Number(pnl.toFixed(2)) })).sort((a, b) => a.date.localeCompare(b.date)).slice(-20)
220
220
  };
221
221
 
222
222
  // ==========================================================================================
@@ -225,10 +225,10 @@ class SignedInUserProfileMetrics extends Computation {
225
225
  if (currentPortfolio) {
226
226
  const pData = rules.portfolio.extractPortfolioData(currentPortfolio);
227
227
  const positions = rules.portfolio.extractPositions(pData);
228
-
228
+
229
229
  // Extract Mirrors using Rules (assumes rule exists, or manual extraction if needed)
230
230
  // If rules.portfolio doesn't have extractMirrors, we can look at pData.AggregatedMirrors directly
231
- const mirrors = pData.AggregatedMirrors || [];
231
+ const mirrors = pData.AggregatedMirrors || [];
232
232
 
233
233
  let totalInv = 0, totalProf = 0;
234
234
  const secExp = {}, assetExp = {}, secProfits = {};
@@ -239,18 +239,18 @@ class SignedInUserProfileMetrics extends Computation {
239
239
  const inv = rules.portfolio.getInvested(pos);
240
240
  const profPct = rules.portfolio.getNetProfit(pos);
241
241
  const profVal = inv * (profPct / 100);
242
-
243
- totalInv += inv;
242
+
243
+ totalInv += inv;
244
244
  totalProf += profVal;
245
-
246
- const ticker = resolveTicker(id);
245
+
246
+ const ticker = resolveTicker(id);
247
247
  const sector = resolveSector(id);
248
-
248
+
249
249
  secExp[sector] = (secExp[sector] || 0) + inv;
250
250
  assetExp[ticker] = (assetExp[ticker] || 0) + inv;
251
251
 
252
252
  if (!secProfits[sector]) secProfits[sector] = { profit: 0, weight: 0 };
253
- secProfits[sector].profit += profVal;
253
+ secProfits[sector].profit += profVal;
254
254
  secProfits[sector].weight += inv;
255
255
  });
256
256
 
@@ -261,13 +261,13 @@ class SignedInUserProfileMetrics extends Computation {
261
261
  const cid = String(m.ParentCID || m.CID); // Adjust based on exact schema
262
262
  const inv = m.Invested || m.Amount || 0;
263
263
  const profPct = m.NetProfit || 0;
264
-
265
- totalInv += inv;
264
+
265
+ totalInv += inv;
266
266
  totalProf += (inv * (profPct / 100));
267
267
 
268
268
  // LOOKUP: Get PI details from the global reference data
269
269
  const rank = globalRankings.find(r => String(r.pi_id || r.CustomerId) === cid);
270
-
270
+
271
271
  result.copiedPIs.data.push({
272
272
  cid,
273
273
  username: resolveUsername(cid),
@@ -277,8 +277,8 @@ class SignedInUserProfileMetrics extends Computation {
277
277
  pendingClosure: m.PendingForClosure === true,
278
278
  isRanked: !!rank,
279
279
  // Enrich with Reference Data
280
- rankData: rank ? {
281
- riskScore: rules.rankings.getRiskScore(rank),
280
+ rankData: rank ? {
281
+ riskScore: rules.rankings.getRiskScore(rank),
282
282
  gain: rules.rankings.getTotalGain(rank),
283
283
  aum: rules.rankings.getAUMTier(rank)
284
284
  } : null
@@ -300,20 +300,20 @@ class SignedInUserProfileMetrics extends Computation {
300
300
  if (p > bestP) { bestP = p; bestS = sec; }
301
301
  if (p < worstP) { worstP = p; worstS = sec; }
302
302
  });
303
- result.sectorPerformance = {
304
- bestSector: bestS,
305
- worstSector: worstS,
303
+ result.sectorPerformance = {
304
+ bestSector: bestS,
305
+ worstSector: worstS,
306
306
  bestSectorProfit: bestS ? Number(bestP.toFixed(2)) : 0,
307
307
  worstSectorProfit: worstS ? Number(worstP.toFixed(2)) : 0
308
308
  };
309
-
309
+
310
310
  // Exposure Charts
311
311
  if (totalInv > 0) {
312
- Object.keys(secExp).forEach(k => result.sectorExposure.data[k] = Number(((secExp[k]/totalInv)*100).toFixed(2)));
312
+ Object.keys(secExp).forEach(k => result.sectorExposure.data[k] = Number(((secExp[k] / totalInv) * 100).toFixed(2)));
313
313
  Object.entries(assetExp)
314
- .sort((a,b) => b[1] - a[1])
314
+ .sort((a, b) => b[1] - a[1])
315
315
  .slice(0, 10)
316
- .forEach(([k,v]) => result.assetExposure.data[k] = Number(((v/totalInv)*100).toFixed(2)));
316
+ .forEach(([k, v]) => result.assetExposure.data[k] = Number(((v / totalInv) * 100).toFixed(2)));
317
317
  }
318
318
  }
319
319
 
@@ -93,7 +93,6 @@ class RunAnalyzer {
93
93
 
94
94
  // 5. Dependency Checks
95
95
  const missingDeps = [];
96
- let hasDataDrift = false;
97
96
 
98
97
  for (const dep of dependencies) {
99
98
  const depEntry = this.manifestMap.get(dep);
@@ -107,8 +106,10 @@ class RunAnalyzer {
107
106
  } else if (stored?.dependencyResultHashes) {
108
107
  // Check for data drift
109
108
  const lastSeenResultHash = stored.dependencyResultHashes[dep];
109
+
110
+ // Compare the hash of the dependency when WE ran vs what it is NOW
110
111
  if (lastSeenResultHash && depStatus.resultHash !== lastSeenResultHash) {
111
- hasDataDrift = true;
112
+
112
113
  if (!needsRun) {
113
114
  needsRun = true;
114
115
  runReason = `Dependency data changed: ${dep}`;
@@ -10,7 +10,6 @@ const crypto = require('crypto');
10
10
  const { MaterializedViewManager } = require('./MaterializedViewManager');
11
11
 
12
12
  // SAFETY CONFIGURATION
13
- // You can move these to your main config file if preferred
14
13
  const DEFAULT_SAFETY_LIMIT_GB = 10; // Max GB per query
15
14
  const MAX_LOOKBACK_DAYS = 60;
16
15
  const BATCH_GROWTH_WARNING_THRESHOLD = 5;
@@ -269,6 +268,7 @@ class DataFetcher {
269
268
 
270
269
  const tableConfig = this.tables[table] || {};
271
270
  const { dateField, entityField, dataField } = tableConfig;
271
+ console.log(`[DEBUG] fetchBatched for '${table}'. Config Found: ${!!this.tables[table]}. entityField: ${entityField}`);
272
272
  const physicalTable = tableConfig.tableName || table;
273
273
 
274
274
  const query = await this.queryBuilder.build({
@@ -17,7 +17,7 @@ class StateRepository {
17
17
  constructor(config, logger = null) {
18
18
  this.config = config;
19
19
  this.logger = logger || console;
20
-
20
+
21
21
  this.bigquery = new BigQuery({
22
22
  projectId: config.bigquery.projectId,
23
23
  location: config.bigquery.location
@@ -156,6 +156,8 @@ class StateRepository {
156
156
  location: this.config.bigquery.location
157
157
  });
158
158
 
159
+ this._log('INFO', `getResult('${computationName}', '${dateStr}') table=${table} rows=${rows.length}`);
160
+
159
161
  if (rows.length === 0) {
160
162
  this.resultCache.set(cacheKey, null);
161
163
  return null;
@@ -213,8 +215,8 @@ class StateRepository {
213
215
 
214
216
  const [rows] = await this.bigquery.query({
215
217
  query,
216
- params: {
217
- targetDate: dateStr,
218
+ params: {
219
+ targetDate: dateStr,
218
220
  compName: computationName.toLowerCase(),
219
221
  entityId: String(entityId)
220
222
  },
@@ -243,11 +245,11 @@ class StateRepository {
243
245
  */
244
246
  async getBatchEntityResults(dateStr, computationName, entityIds) {
245
247
  if (!entityIds || entityIds.length === 0) return {};
246
-
248
+
247
249
  const cacheKeyPrefix = `${dateStr}:${computationName.toLowerCase()}`;
248
250
  const results = {};
249
251
  const uncachedIds = [];
250
-
252
+
251
253
  // Check cache first
252
254
  for (const entityId of entityIds) {
253
255
  const key = `${cacheKeyPrefix}:${entityId}`;
@@ -257,9 +259,9 @@ class StateRepository {
257
259
  uncachedIds.push(entityId);
258
260
  }
259
261
  }
260
-
262
+
261
263
  if (uncachedIds.length === 0) return results;
262
-
264
+
263
265
  // Fetch uncached in batch
264
266
  try {
265
267
  const table = this.config.resultStore?.table || 'computation_results';
@@ -275,8 +277,8 @@ class StateRepository {
275
277
 
276
278
  const [rows] = await this.bigquery.query({
277
279
  query,
278
- params: {
279
- targetDate: dateStr,
280
+ params: {
281
+ targetDate: dateStr,
280
282
  compName: computationName.toLowerCase(),
281
283
  entityIds: uncachedIds.map(String)
282
284
  },
@@ -290,11 +292,11 @@ class StateRepository {
290
292
  if (typeof data === 'string') {
291
293
  try { data = JSON.parse(data); } catch (e) { /* keep */ }
292
294
  }
293
-
295
+
294
296
  results[entityId] = data;
295
297
  this.resultCache.set(`${cacheKeyPrefix}:${entityId}`, data);
296
298
  }
297
-
299
+
298
300
  return results;
299
301
  } catch (e) {
300
302
  this._log('ERROR', `Batch fetch failed: ${e.message}`);