bulltrackers-module 1.0.733 → 1.0.734
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system-v2/README.md +152 -0
- package/functions/computation-system-v2/computations/PopularInvestorProfileMetrics.js +720 -0
- package/functions/computation-system-v2/computations/PopularInvestorRiskAssessment.js +176 -0
- package/functions/computation-system-v2/computations/PopularInvestorRiskMetrics.js +294 -0
- package/functions/computation-system-v2/computations/TestComputation.js +46 -0
- package/functions/computation-system-v2/computations/UserPortfolioSummary.js +172 -0
- package/functions/computation-system-v2/config/bulltrackers.config.js +317 -0
- package/functions/computation-system-v2/framework/core/Computation.js +73 -0
- package/functions/computation-system-v2/framework/core/Manifest.js +223 -0
- package/functions/computation-system-v2/framework/core/RuleInjector.js +53 -0
- package/functions/computation-system-v2/framework/core/Rules.js +231 -0
- package/functions/computation-system-v2/framework/core/RunAnalyzer.js +163 -0
- package/functions/computation-system-v2/framework/cost/CostTracker.js +154 -0
- package/functions/computation-system-v2/framework/data/DataFetcher.js +399 -0
- package/functions/computation-system-v2/framework/data/QueryBuilder.js +232 -0
- package/functions/computation-system-v2/framework/data/SchemaRegistry.js +287 -0
- package/functions/computation-system-v2/framework/execution/Orchestrator.js +498 -0
- package/functions/computation-system-v2/framework/execution/TaskRunner.js +35 -0
- package/functions/computation-system-v2/framework/execution/middleware/CostTrackerMiddleware.js +32 -0
- package/functions/computation-system-v2/framework/execution/middleware/LineageMiddleware.js +32 -0
- package/functions/computation-system-v2/framework/execution/middleware/Middleware.js +14 -0
- package/functions/computation-system-v2/framework/execution/middleware/ProfilerMiddleware.js +47 -0
- package/functions/computation-system-v2/framework/index.js +45 -0
- package/functions/computation-system-v2/framework/lineage/LineageTracker.js +147 -0
- package/functions/computation-system-v2/framework/monitoring/Profiler.js +80 -0
- package/functions/computation-system-v2/framework/resilience/Checkpointer.js +66 -0
- package/functions/computation-system-v2/framework/scheduling/ScheduleValidator.js +327 -0
- package/functions/computation-system-v2/framework/storage/StateRepository.js +286 -0
- package/functions/computation-system-v2/framework/storage/StorageManager.js +469 -0
- package/functions/computation-system-v2/framework/storage/index.js +9 -0
- package/functions/computation-system-v2/framework/testing/ComputationTester.js +86 -0
- package/functions/computation-system-v2/framework/utils/Graph.js +205 -0
- package/functions/computation-system-v2/handlers/dispatcher.js +109 -0
- package/functions/computation-system-v2/handlers/index.js +23 -0
- package/functions/computation-system-v2/handlers/onDemand.js +289 -0
- package/functions/computation-system-v2/handlers/scheduler.js +327 -0
- package/functions/computation-system-v2/index.js +163 -0
- package/functions/computation-system-v2/rules/index.js +49 -0
- package/functions/computation-system-v2/rules/instruments.js +465 -0
- package/functions/computation-system-v2/rules/metrics.js +304 -0
- package/functions/computation-system-v2/rules/portfolio.js +534 -0
- package/functions/computation-system-v2/rules/rankings.js +655 -0
- package/functions/computation-system-v2/rules/social.js +562 -0
- package/functions/computation-system-v2/rules/trades.js +545 -0
- package/functions/computation-system-v2/scripts/migrate-sectors.js +73 -0
- package/functions/computation-system-v2/test/test-dispatcher.js +317 -0
- package/functions/computation-system-v2/test/test-framework.js +500 -0
- package/functions/computation-system-v2/test/test-real-execution.js +166 -0
- package/functions/computation-system-v2/test/test-real-integration.js +194 -0
- package/functions/computation-system-v2/test/test-refactor-e2e.js +131 -0
- package/functions/computation-system-v2/test/test-results.json +31 -0
- package/functions/computation-system-v2/test/test-risk-metrics-computation.js +329 -0
- package/functions/computation-system-v2/test/test-scheduler.js +204 -0
- package/functions/computation-system-v2/test/test-storage.js +449 -0
- package/functions/orchestrator/index.js +18 -26
- package/package.json +3 -2
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview State Repository
|
|
3
|
+
* * Manages the read-access to the system's state:
|
|
4
|
+
* 1. Loading daily execution status (hashes, timestamps)
|
|
5
|
+
* 2. Loading previous results (for dependencies and history)
|
|
6
|
+
* 3. Caching results for performance
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const { BigQuery } = require('@google-cloud/bigquery');
|
|
10
|
+
|
|
11
|
+
class StateRepository {
|
|
12
|
+
/**
|
|
13
|
+
* @param {Object} config - System configuration
|
|
14
|
+
* @param {Object} [logger] - Logger instance
|
|
15
|
+
*/
|
|
16
|
+
constructor(config, logger = null) {
|
|
17
|
+
this.config = config;
|
|
18
|
+
this.logger = logger || console;
|
|
19
|
+
|
|
20
|
+
this.bigquery = new BigQuery({
|
|
21
|
+
projectId: config.bigquery.projectId,
|
|
22
|
+
location: config.bigquery.location
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
// Caches
|
|
26
|
+
this.statusCache = new Map(); // dateStr -> Map<compName, statusObj>
|
|
27
|
+
this.resultCache = new Map(); // key -> resultObj
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Load status of all computations for a given date.
|
|
32
|
+
* @param {string} dateStr - Target date (YYYY-MM-DD)
|
|
33
|
+
* @returns {Promise<Map<string, Object>>} Map of computation name -> status
|
|
34
|
+
*/
|
|
35
|
+
async getDailyStatus(dateStr) {
|
|
36
|
+
if (this.statusCache.has(dateStr)) {
|
|
37
|
+
return this.statusCache.get(dateStr);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const statusMap = new Map();
|
|
41
|
+
const table = this.config.resultStore?.table || 'computation_results';
|
|
42
|
+
const fullTable = `\`${this.config.bigquery.projectId}.${this.config.bigquery.dataset}.${table}\``;
|
|
43
|
+
|
|
44
|
+
try {
|
|
45
|
+
const query = `
|
|
46
|
+
SELECT
|
|
47
|
+
computation_name,
|
|
48
|
+
ANY_VALUE(code_hash) as code_hash,
|
|
49
|
+
ANY_VALUE(result_hash) as result_hash,
|
|
50
|
+
ANY_VALUE(dependency_result_hashes) as dependency_result_hashes,
|
|
51
|
+
COUNT(*) as entity_count,
|
|
52
|
+
MAX(updated_at) as updated_at
|
|
53
|
+
FROM ${fullTable}
|
|
54
|
+
WHERE date = @targetDate
|
|
55
|
+
GROUP BY computation_name
|
|
56
|
+
`;
|
|
57
|
+
|
|
58
|
+
const [rows] = await this.bigquery.query({
|
|
59
|
+
query,
|
|
60
|
+
params: { targetDate: dateStr },
|
|
61
|
+
location: this.config.bigquery.location
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
for (const row of rows) {
|
|
65
|
+
let depResultHashes = row.dependency_result_hashes;
|
|
66
|
+
if (typeof depResultHashes === 'string') {
|
|
67
|
+
try { depResultHashes = JSON.parse(depResultHashes); } catch (e) { depResultHashes = {}; }
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
statusMap.set(row.computation_name.toLowerCase(), {
|
|
71
|
+
hash: row.code_hash,
|
|
72
|
+
resultHash: row.result_hash,
|
|
73
|
+
dependencyResultHashes: depResultHashes || {},
|
|
74
|
+
entityCount: parseInt(row.entity_count, 10),
|
|
75
|
+
updatedAt: row.updated_at
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
} catch (e) {
|
|
79
|
+
if (e.message?.includes('Not found') || e.code === 404) {
|
|
80
|
+
this._log('DEBUG', `No status found for ${dateStr} (table may not exist)`);
|
|
81
|
+
} else {
|
|
82
|
+
this._log('WARN', `Failed to load status for ${dateStr}: ${e.message}`);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
this.statusCache.set(dateStr, statusMap);
|
|
87
|
+
return statusMap;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Update the local status cache after a write.
|
|
92
|
+
* @param {string} dateStr
|
|
93
|
+
* @param {string} name
|
|
94
|
+
* @param {Object} statusObj
|
|
95
|
+
*/
|
|
96
|
+
updateStatusCache(dateStr, name, statusObj) {
|
|
97
|
+
const status = this.statusCache.get(dateStr) || new Map();
|
|
98
|
+
status.set(name.toLowerCase(), statusObj);
|
|
99
|
+
this.statusCache.set(dateStr, status);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Load a computation result (global or all entities).
|
|
104
|
+
*/
|
|
105
|
+
async getResult(dateStr, computationName) {
|
|
106
|
+
const cacheKey = `${dateStr}:${computationName.toLowerCase()}`;
|
|
107
|
+
if (this.resultCache.has(cacheKey)) {
|
|
108
|
+
return this.resultCache.get(cacheKey);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
try {
|
|
112
|
+
const table = this.config.resultStore?.table || 'computation_results';
|
|
113
|
+
const fullTable = `\`${this.config.bigquery.projectId}.${this.config.bigquery.dataset}.${table}\``;
|
|
114
|
+
|
|
115
|
+
const query = `
|
|
116
|
+
SELECT entity_id, result_data
|
|
117
|
+
FROM ${fullTable}
|
|
118
|
+
WHERE date = @targetDate AND computation_name = @compName
|
|
119
|
+
`;
|
|
120
|
+
|
|
121
|
+
const [rows] = await this.bigquery.query({
|
|
122
|
+
query,
|
|
123
|
+
params: { targetDate: dateStr, compName: computationName.toLowerCase() },
|
|
124
|
+
location: this.config.bigquery.location
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
if (rows.length === 0) {
|
|
128
|
+
this.resultCache.set(cacheKey, null);
|
|
129
|
+
return null;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
const resultMap = {};
|
|
133
|
+
for (const row of rows) {
|
|
134
|
+
const entityId = row.entity_id || '_global';
|
|
135
|
+
let data = row.result_data;
|
|
136
|
+
if (typeof data === 'string') {
|
|
137
|
+
try { data = JSON.parse(data); } catch (e) { /* keep as string */ }
|
|
138
|
+
}
|
|
139
|
+
resultMap[entityId] = data;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
this.resultCache.set(cacheKey, resultMap);
|
|
143
|
+
return resultMap;
|
|
144
|
+
} catch (e) {
|
|
145
|
+
if (!e.message?.includes('Not found')) {
|
|
146
|
+
this._log('WARN', `Failed to load result for ${computationName}: ${e.message}`);
|
|
147
|
+
}
|
|
148
|
+
return null;
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
/**
|
|
153
|
+
* Load a specific entity's result (memory efficient).
|
|
154
|
+
*/
|
|
155
|
+
async getEntityResult(dateStr, computationName, entityId) {
|
|
156
|
+
// Check full cache first
|
|
157
|
+
const fullCacheKey = `${dateStr}:${computationName.toLowerCase()}`;
|
|
158
|
+
if (this.resultCache.has(fullCacheKey)) {
|
|
159
|
+
const fullResult = this.resultCache.get(fullCacheKey);
|
|
160
|
+
return fullResult?.[entityId] || null;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// Check specific cache
|
|
164
|
+
const entityCacheKey = `${fullCacheKey}:${entityId}`;
|
|
165
|
+
if (this.resultCache.has(entityCacheKey)) {
|
|
166
|
+
return this.resultCache.get(entityCacheKey);
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
try {
|
|
170
|
+
const table = this.config.resultStore?.table || 'computation_results';
|
|
171
|
+
const fullTable = `\`${this.config.bigquery.projectId}.${this.config.bigquery.dataset}.${table}\``;
|
|
172
|
+
|
|
173
|
+
const query = `
|
|
174
|
+
SELECT result_data
|
|
175
|
+
FROM ${fullTable}
|
|
176
|
+
WHERE date = @targetDate
|
|
177
|
+
AND computation_name = @compName
|
|
178
|
+
AND entity_id = @entityId
|
|
179
|
+
LIMIT 1
|
|
180
|
+
`;
|
|
181
|
+
|
|
182
|
+
const [rows] = await this.bigquery.query({
|
|
183
|
+
query,
|
|
184
|
+
params: {
|
|
185
|
+
targetDate: dateStr,
|
|
186
|
+
compName: computationName.toLowerCase(),
|
|
187
|
+
entityId: String(entityId)
|
|
188
|
+
},
|
|
189
|
+
location: this.config.bigquery.location
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
if (rows.length === 0) {
|
|
193
|
+
this.resultCache.set(entityCacheKey, null);
|
|
194
|
+
return null;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
let data = rows[0].result_data;
|
|
198
|
+
if (typeof data === 'string') {
|
|
199
|
+
try { data = JSON.parse(data); } catch (e) { /* keep */ }
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
this.resultCache.set(entityCacheKey, data);
|
|
203
|
+
return data;
|
|
204
|
+
} catch (e) {
|
|
205
|
+
return null;
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
/**
|
|
210
|
+
* Get multiple entity results in a single query (batch lazy load)
|
|
211
|
+
* FIXED: This solves the N+1 problem by allowing the Executor to fetch dependencies
|
|
212
|
+
* for an entire processing batch in one go.
|
|
213
|
+
*/
|
|
214
|
+
async getBatchEntityResults(dateStr, computationName, entityIds) {
|
|
215
|
+
if (!entityIds || entityIds.length === 0) return {};
|
|
216
|
+
|
|
217
|
+
const cacheKeyPrefix = `${dateStr}:${computationName.toLowerCase()}`;
|
|
218
|
+
const results = {};
|
|
219
|
+
const uncachedIds = [];
|
|
220
|
+
|
|
221
|
+
// Check cache first
|
|
222
|
+
for (const entityId of entityIds) {
|
|
223
|
+
const key = `${cacheKeyPrefix}:${entityId}`;
|
|
224
|
+
if (this.resultCache.has(key)) {
|
|
225
|
+
results[entityId] = this.resultCache.get(key);
|
|
226
|
+
} else {
|
|
227
|
+
uncachedIds.push(entityId);
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
if (uncachedIds.length === 0) return results;
|
|
232
|
+
|
|
233
|
+
// Fetch uncached in batch
|
|
234
|
+
try {
|
|
235
|
+
const table = this.config.resultStore?.table || 'computation_results';
|
|
236
|
+
const fullTable = `\`${this.config.bigquery.projectId}.${this.config.bigquery.dataset}.${table}\``;
|
|
237
|
+
|
|
238
|
+
const query = `
|
|
239
|
+
SELECT entity_id, result_data
|
|
240
|
+
FROM ${fullTable}
|
|
241
|
+
WHERE date = @targetDate
|
|
242
|
+
AND computation_name = @compName
|
|
243
|
+
AND entity_id IN UNNEST(@entityIds)
|
|
244
|
+
`;
|
|
245
|
+
|
|
246
|
+
const [rows] = await this.bigquery.query({
|
|
247
|
+
query,
|
|
248
|
+
params: {
|
|
249
|
+
targetDate: dateStr,
|
|
250
|
+
compName: computationName.toLowerCase(),
|
|
251
|
+
entityIds: uncachedIds.map(String)
|
|
252
|
+
},
|
|
253
|
+
location: this.config.bigquery.location
|
|
254
|
+
});
|
|
255
|
+
|
|
256
|
+
// Parse and cache
|
|
257
|
+
for (const row of rows) {
|
|
258
|
+
const entityId = row.entity_id;
|
|
259
|
+
let data = row.result_data;
|
|
260
|
+
if (typeof data === 'string') {
|
|
261
|
+
try { data = JSON.parse(data); } catch (e) { /* keep */ }
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
results[entityId] = data;
|
|
265
|
+
this.resultCache.set(`${cacheKeyPrefix}:${entityId}`, data);
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
return results;
|
|
269
|
+
} catch (e) {
|
|
270
|
+
this._log('ERROR', `Batch fetch failed: ${e.message}`);
|
|
271
|
+
return results;
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
cacheResult(dateStr, computationName, result) {
|
|
276
|
+
const cacheKey = `${dateStr}:${computationName.toLowerCase()}`;
|
|
277
|
+
this.resultCache.set(cacheKey, result);
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
_log(level, message) {
|
|
281
|
+
if (this.logger?.log) this.logger.log(level, `[StateRepo] ${message}`);
|
|
282
|
+
else console.log(`[${level}] [StateRepo] ${message}`);
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
module.exports = { StateRepository };
|