bulltrackers-module 1.0.230 → 1.0.232
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/WorkflowOrchestrator.js +10 -30
- package/functions/computation-system/persistence/ResultCommitter.js +10 -10
- package/functions/generic-api/helpers/api_helpers.js +165 -52
- package/functions/generic-api/index.js +124 -76
- package/package.json +1 -1
- package/functions/computation-system/controllers/computation_controller.js +0 -146
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Main Orchestrator. Coordinates the topological execution.
|
|
3
|
-
* UPDATED:
|
|
3
|
+
* UPDATED: Removed legacy boolean 'true' logic.
|
|
4
4
|
*/
|
|
5
5
|
const { normalizeName } = require('./utils/utils');
|
|
6
6
|
const { checkRootDataAvailability } = require('./data/AvailabilityChecker');
|
|
@@ -10,7 +10,6 @@ const { StandardExecutor } = require('./executor
|
|
|
10
10
|
const { MetaExecutor } = require('./executors/MetaExecutor');
|
|
11
11
|
const { generateProcessId, PROCESS_TYPES } = require('./logger/logger');
|
|
12
12
|
|
|
13
|
-
// New Status Constant
|
|
14
13
|
const STATUS_IMPOSSIBLE = 'IMPOSSIBLE';
|
|
15
14
|
|
|
16
15
|
function groupByPass(manifest) {
|
|
@@ -20,21 +19,14 @@ function groupByPass(manifest) {
|
|
|
20
19
|
}, {});
|
|
21
20
|
}
|
|
22
21
|
|
|
23
|
-
/**
|
|
24
|
-
* Performs strict analysis of what can run.
|
|
25
|
-
* IMPOSSIBLE LOGIC:
|
|
26
|
-
* 1. If Root Data is missing AND Date != Today -> IMPOSSIBLE.
|
|
27
|
-
* 2. If Dependency is IMPOSSIBLE -> IMPOSSIBLE.
|
|
28
|
-
* 3. IMPOSSIBLE items are written to DB to prevent future retries.
|
|
29
|
-
*/
|
|
30
22
|
function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus, manifestMap) {
|
|
31
23
|
const report = {
|
|
32
24
|
runnable: [],
|
|
33
|
-
blocked: [],
|
|
34
|
-
impossible: [],
|
|
35
|
-
failedDependency: [],
|
|
36
|
-
reRuns: [],
|
|
37
|
-
skipped: []
|
|
25
|
+
blocked: [],
|
|
26
|
+
impossible: [],
|
|
27
|
+
failedDependency: [],
|
|
28
|
+
reRuns: [],
|
|
29
|
+
skipped: []
|
|
38
30
|
};
|
|
39
31
|
|
|
40
32
|
const isTargetToday = (dateStr === new Date().toISOString().slice(0, 10));
|
|
@@ -44,9 +36,7 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
|
|
|
44
36
|
const storedDepHash = dailyStatus[norm];
|
|
45
37
|
const depManifest = manifestMap.get(norm);
|
|
46
38
|
|
|
47
|
-
// Check 1: Is dependency IMPOSSIBLE? (Logic handled in main loop, but safe to check here)
|
|
48
39
|
if (storedDepHash === STATUS_IMPOSSIBLE) return false;
|
|
49
|
-
|
|
50
40
|
if (!storedDepHash) return false;
|
|
51
41
|
if (!depManifest) return false;
|
|
52
42
|
if (storedDepHash !== depManifest.hash) return false;
|
|
@@ -59,7 +49,7 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
|
|
|
59
49
|
const storedHash = dailyStatus[cName];
|
|
60
50
|
const currentHash = calc.hash;
|
|
61
51
|
|
|
62
|
-
// 1. Check
|
|
52
|
+
// 1. Check Impossible
|
|
63
53
|
if (storedHash === STATUS_IMPOSSIBLE) {
|
|
64
54
|
report.skipped.push({ name: cName, reason: 'Permanently Impossible' });
|
|
65
55
|
continue;
|
|
@@ -78,7 +68,6 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
|
|
|
78
68
|
}
|
|
79
69
|
|
|
80
70
|
if (missingRoots.length > 0) {
|
|
81
|
-
// LOGIC: If date is NOT today, missing root data is fatal and permanent.
|
|
82
71
|
if (!isTargetToday) {
|
|
83
72
|
report.impossible.push({ name: cName, reason: `Missing Root Data: ${missingRoots.join(', ')} (Historical)` });
|
|
84
73
|
} else {
|
|
@@ -95,10 +84,8 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
|
|
|
95
84
|
for (const dep of calc.dependencies) {
|
|
96
85
|
const normDep = normalizeName(dep);
|
|
97
86
|
|
|
98
|
-
// Check if the dependency is marked IMPOSSIBLE in the DB
|
|
99
87
|
if (dailyStatus[normDep] === STATUS_IMPOSSIBLE) {
|
|
100
88
|
dependencyIsImpossible = true;
|
|
101
|
-
// We can break early, if one input is impossible, the result is impossible.
|
|
102
89
|
break;
|
|
103
90
|
}
|
|
104
91
|
|
|
@@ -109,7 +96,6 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
|
|
|
109
96
|
}
|
|
110
97
|
|
|
111
98
|
if (dependencyIsImpossible) {
|
|
112
|
-
// Propagate the Impossible Status
|
|
113
99
|
report.impossible.push({ name: cName, reason: 'Dependency is Impossible' });
|
|
114
100
|
continue;
|
|
115
101
|
}
|
|
@@ -119,14 +105,13 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
|
|
|
119
105
|
continue;
|
|
120
106
|
}
|
|
121
107
|
|
|
122
|
-
// 4. Hash / State Check
|
|
123
|
-
if (!storedHash || storedHash === false) {
|
|
108
|
+
// 4. Hash / State Check (Legacy 'true' logic removed)
|
|
109
|
+
if (!storedHash || storedHash === false) {
|
|
124
110
|
report.runnable.push(calc);
|
|
125
111
|
} else if (storedHash !== currentHash) {
|
|
126
112
|
report.reRuns.push({ name: cName, oldHash: storedHash, newHash: currentHash });
|
|
127
|
-
} else if (storedHash === true) {
|
|
128
|
-
report.reRuns.push({ name: cName, reason: 'Legacy Upgrade' });
|
|
129
113
|
} else {
|
|
114
|
+
// Stored Hash === Current Hash
|
|
130
115
|
report.skipped.push({ name: cName });
|
|
131
116
|
}
|
|
132
117
|
}
|
|
@@ -162,7 +147,6 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
|
|
|
162
147
|
if (logger && typeof logger.logDateAnalysis === 'function') {
|
|
163
148
|
logger.logDateAnalysis(dateStr, analysisReport);
|
|
164
149
|
} else {
|
|
165
|
-
// Safe fallback
|
|
166
150
|
const logMsg = `[Analysis] Date: ${dateStr} | Runnable: ${analysisReport.runnable.length} | Blocked: ${analysisReport.blocked.length} | Impossible: ${analysisReport.impossible.length}`;
|
|
167
151
|
if (logger && logger.info) logger.info(logMsg);
|
|
168
152
|
else console.log(logMsg);
|
|
@@ -170,12 +154,8 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
|
|
|
170
154
|
|
|
171
155
|
// 5. UPDATE STATUS FOR NON-RUNNABLE ITEMS
|
|
172
156
|
const statusUpdates = {};
|
|
173
|
-
|
|
174
|
-
// A. Mark BLOCKED as 'false' (Transient Failure)
|
|
175
157
|
analysisReport.blocked.forEach(item => statusUpdates[item.name] = false);
|
|
176
158
|
analysisReport.failedDependency.forEach(item => statusUpdates[item.name] = false);
|
|
177
|
-
|
|
178
|
-
// B. Mark IMPOSSIBLE as 'IMPOSSIBLE' (Permanent Failure - Overwrites existing status)
|
|
179
159
|
analysisReport.impossible.forEach(item => statusUpdates[item.name] = STATUS_IMPOSSIBLE);
|
|
180
160
|
|
|
181
161
|
if (Object.keys(statusUpdates).length > 0) {
|
|
@@ -45,12 +45,19 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
45
45
|
await commitBatchInChunks(config, deps, updates, `${name} Results`);
|
|
46
46
|
|
|
47
47
|
// Structured Storage Log
|
|
48
|
-
logger
|
|
48
|
+
if (logger && logger.logStorage) {
|
|
49
|
+
logger.logStorage(pid, name, dStr, mainDocRef.path, totalSize, isSharded);
|
|
50
|
+
}
|
|
49
51
|
|
|
50
|
-
|
|
52
|
+
// Update success tracking
|
|
53
|
+
if (calc.manifest.hash) {
|
|
54
|
+
successUpdates[name] = calc.manifest.hash;
|
|
55
|
+
}
|
|
51
56
|
}
|
|
52
57
|
} catch (e) {
|
|
53
|
-
logger.log
|
|
58
|
+
if (logger && logger.log) {
|
|
59
|
+
logger.log('ERROR', `Commit failed for ${name}`, { processId: pid, error: e.message });
|
|
60
|
+
}
|
|
54
61
|
}
|
|
55
62
|
}
|
|
56
63
|
|
|
@@ -62,10 +69,7 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
62
69
|
return successUpdates;
|
|
63
70
|
}
|
|
64
71
|
|
|
65
|
-
// ... rest of file (calculateFirestoreBytes, prepareAutoShardedWrites) remains same ...
|
|
66
|
-
// Just ensure prepareAutoShardedWrites uses the provided logger if it logs internal warnings.
|
|
67
72
|
function calculateFirestoreBytes(value) {
|
|
68
|
-
// ... same as before
|
|
69
73
|
if (value === null) return 1;
|
|
70
74
|
if (value === undefined) return 0;
|
|
71
75
|
if (typeof value === 'boolean') return 1;
|
|
@@ -79,8 +83,6 @@ function calculateFirestoreBytes(value) {
|
|
|
79
83
|
}
|
|
80
84
|
|
|
81
85
|
async function prepareAutoShardedWrites(result, docRef, logger) {
|
|
82
|
-
// ... same logic, just ensure existing logs inside here use the logger properly if needed
|
|
83
|
-
// Copied from previous logic, essentially checks size > 900KB and splits
|
|
84
86
|
const SAFETY_THRESHOLD_BYTES = 1000 * 1024;
|
|
85
87
|
const OVERHEAD_ALLOWANCE = 20 * 1024;
|
|
86
88
|
const CHUNK_LIMIT = SAFETY_THRESHOLD_BYTES - OVERHEAD_ALLOWANCE;
|
|
@@ -94,8 +96,6 @@ async function prepareAutoShardedWrites(result, docRef, logger) {
|
|
|
94
96
|
|
|
95
97
|
if ((totalSize + docPathSize) < CHUNK_LIMIT) { const data = { ...result, _completed: true, _sharded: false }; return [{ ref: docRef, data, options: { merge: true } }]; }
|
|
96
98
|
|
|
97
|
-
// Note: We don't log "Sharding..." here anymore because we log the structured event in commitResults
|
|
98
|
-
|
|
99
99
|
for (const [key, value] of Object.entries(result)) {
|
|
100
100
|
if (key.startsWith('_')) continue;
|
|
101
101
|
const keySize = Buffer.byteLength(key, 'utf8') + 1;
|
|
@@ -1,40 +1,121 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview API sub-pipes.
|
|
3
|
-
* REFACTORED:
|
|
4
|
-
* NEW: getDynamicSchema now reads static schema.
|
|
5
|
-
* NEW: createManifestHandler filters out STALE schemas (>7 days old).
|
|
3
|
+
* REFACTORED: Implements Status-Based Availability Caching and Smart Date Resolution.
|
|
6
4
|
*/
|
|
7
5
|
|
|
8
6
|
const { FieldPath } = require('@google-cloud/firestore');
|
|
9
7
|
|
|
8
|
+
// --- AVAILABILITY CACHE ---
|
|
9
|
+
// Maintains a map of which computations are available on which dates.
|
|
10
|
+
class AvailabilityCache {
|
|
11
|
+
constructor(db, logger, ttlMs = 5 * 60 * 1000) { // 5 Minute TTL
|
|
12
|
+
this.db = db;
|
|
13
|
+
this.logger = logger;
|
|
14
|
+
this.ttlMs = ttlMs;
|
|
15
|
+
this.cache = null; // { "calcName": ["2023-10-01", "2023-09-30"] } (Sorted DESC)
|
|
16
|
+
this.lastFetched = 0;
|
|
17
|
+
this.statusCollection = 'computation_status';
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
async getMap() {
|
|
21
|
+
const now = Date.now();
|
|
22
|
+
if (this.cache && (now - this.lastFetched < this.ttlMs)) {
|
|
23
|
+
return this.cache;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
this.logger.log('INFO', '[AvailabilityCache] Refreshing availability map from Firestore...');
|
|
27
|
+
|
|
28
|
+
// Fetch last 400 days of status to build the map
|
|
29
|
+
// We only fetch keys and small status objects, so this is relatively cheap.
|
|
30
|
+
const snapshot = await this.db.collection(this.statusCollection)
|
|
31
|
+
.orderBy(FieldPath.documentId(), 'desc')
|
|
32
|
+
.limit(400)
|
|
33
|
+
.get();
|
|
34
|
+
|
|
35
|
+
const newMap = {};
|
|
36
|
+
|
|
37
|
+
snapshot.forEach(doc => {
|
|
38
|
+
const dateStr = doc.id;
|
|
39
|
+
const statusData = doc.data();
|
|
40
|
+
|
|
41
|
+
// Regex to validate date format YYYY-MM-DD
|
|
42
|
+
if (!/^\d{4}-\d{2}-\d{2}$/.test(dateStr)) return;
|
|
43
|
+
|
|
44
|
+
for (const [calcName, status] of Object.entries(statusData)) {
|
|
45
|
+
// We consider it available if status is truthy and NOT 'IMPOSSIBLE'
|
|
46
|
+
if (status && status !== 'IMPOSSIBLE') {
|
|
47
|
+
if (!newMap[calcName]) newMap[calcName] = [];
|
|
48
|
+
newMap[calcName].push(dateStr);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
this.cache = newMap;
|
|
54
|
+
this.lastFetched = now;
|
|
55
|
+
this.logger.log('INFO', `[AvailabilityCache] Refreshed. Tracked ${Object.keys(newMap).length} computations.`);
|
|
56
|
+
return this.cache;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
10
59
|
|
|
11
60
|
/**
|
|
12
|
-
*
|
|
61
|
+
* Helper: Resolve which dates to fetch based on mode and availability.
|
|
13
62
|
*/
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
if (end < start) return "'endDate' must be after 'startDate'.";
|
|
63
|
+
async function resolveTargetDates(availabilityCache, computationKeys, mode, limit) {
|
|
64
|
+
const map = await availabilityCache.getMap();
|
|
65
|
+
const datesToFetch = new Set();
|
|
66
|
+
|
|
67
|
+
// 1. Identify all available dates for the requested computations
|
|
68
|
+
// We union the dates: if ANY requested calc is available on a date, we consider that date "relevant".
|
|
69
|
+
// (Alternatively, we could intersect, but union allows sparse data return).
|
|
70
|
+
const relevantDatesSet = new Set();
|
|
23
71
|
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
72
|
+
computationKeys.forEach(key => {
|
|
73
|
+
const dates = map[key] || [];
|
|
74
|
+
dates.forEach(d => relevantDatesSet.add(d));
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
// Sort descending
|
|
78
|
+
const sortedDates = Array.from(relevantDatesSet).sort((a, b) => b.localeCompare(a));
|
|
79
|
+
|
|
80
|
+
if (sortedDates.length === 0) return [];
|
|
81
|
+
|
|
82
|
+
if (mode === 'latest') {
|
|
83
|
+
// Return only the most recent date found
|
|
84
|
+
return [sortedDates[0]];
|
|
85
|
+
}
|
|
27
86
|
|
|
28
|
-
if (
|
|
87
|
+
if (mode === 'series') {
|
|
88
|
+
// Return the last N available dates
|
|
89
|
+
return sortedDates.slice(0, limit);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
return [];
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Sub-pipe: pipe.api.helpers.validateRequest
|
|
97
|
+
*/
|
|
98
|
+
const validateRequest = (query) => {
|
|
99
|
+
if (!query.computations) return "Missing 'computations' parameter.";
|
|
29
100
|
|
|
101
|
+
// New optional params, but computations is mandatory
|
|
102
|
+
const allowedModes = ['latest', 'series'];
|
|
103
|
+
if (query.mode && !allowedModes.includes(query.mode)) {
|
|
104
|
+
return "Invalid 'mode'. Must be 'latest' or 'series'.";
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
if (query.mode === 'series') {
|
|
108
|
+
const limit = parseInt(query.limit);
|
|
109
|
+
if (query.limit && (isNaN(limit) || limit < 1 || limit > 365)) {
|
|
110
|
+
return "Invalid 'limit'. Must be between 1 and 365.";
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
30
114
|
return null;
|
|
31
115
|
};
|
|
32
116
|
|
|
33
117
|
/**
|
|
34
118
|
* Sub-pipe: pipe.api.helpers.buildCalculationMap
|
|
35
|
-
* --- CRITICAL UPDATE ---
|
|
36
|
-
* This function now stores the class itself in the map,
|
|
37
|
-
* which is required by the /manifest/generate endpoint.
|
|
38
119
|
*/
|
|
39
120
|
const buildCalculationMap = (unifiedCalculations) => {
|
|
40
121
|
const calcMap = {};
|
|
@@ -55,21 +136,8 @@ const buildCalculationMap = (unifiedCalculations) => {
|
|
|
55
136
|
};
|
|
56
137
|
|
|
57
138
|
/**
|
|
58
|
-
*
|
|
59
|
-
|
|
60
|
-
const getDateStringsInRange = (startDate, endDate) => {
|
|
61
|
-
const dates = [];
|
|
62
|
-
const current = new Date(startDate + 'T00:00:00Z');
|
|
63
|
-
const end = new Date(endDate + 'T00:00:00Z');
|
|
64
|
-
while (current <= end) {
|
|
65
|
-
dates.push(current.toISOString().slice(0, 10));
|
|
66
|
-
current.setUTCDate(current.getUTCDate() + 1);
|
|
67
|
-
}
|
|
68
|
-
return dates;
|
|
69
|
-
};
|
|
70
|
-
|
|
71
|
-
/**
|
|
72
|
-
* Sub-pipe: pipe.api.helpers.fetchData
|
|
139
|
+
* Sub-pipe: pipe.api.helpers.fetchUnifiedData
|
|
140
|
+
* UPDATED: Uses specific date list derived from availability.
|
|
73
141
|
*/
|
|
74
142
|
const fetchUnifiedData = async (config, dependencies, calcKeys, dateStrings, calcMap) => {
|
|
75
143
|
const { db, logger } = dependencies;
|
|
@@ -78,11 +146,14 @@ const fetchUnifiedData = async (config, dependencies, calcKeys, dateStrings, cal
|
|
|
78
146
|
const resultsSub = config.resultsSubcollection || 'results';
|
|
79
147
|
const compsSub = config.computationsSubcollection || 'computations';
|
|
80
148
|
|
|
149
|
+
if (dateStrings.length === 0) return {};
|
|
150
|
+
|
|
81
151
|
try {
|
|
152
|
+
const readPromises = [];
|
|
153
|
+
|
|
154
|
+
// Prepare all reads
|
|
82
155
|
for (const date of dateStrings) {
|
|
83
|
-
response[date] = {};
|
|
84
|
-
const docRefs = [];
|
|
85
|
-
const keyPaths = [];
|
|
156
|
+
response[date] = {}; // Init structure
|
|
86
157
|
|
|
87
158
|
for (const key of calcKeys) {
|
|
88
159
|
const pathInfo = calcMap[key];
|
|
@@ -90,25 +161,34 @@ const fetchUnifiedData = async (config, dependencies, calcKeys, dateStrings, cal
|
|
|
90
161
|
const docRef = db.collection(insightsCollection).doc(date)
|
|
91
162
|
.collection(resultsSub).doc(pathInfo.category)
|
|
92
163
|
.collection(compsSub).doc(key);
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
} else {
|
|
96
|
-
logger.log('WARN', `[${date}] No path info found for computation key: ${key}`);
|
|
164
|
+
|
|
165
|
+
readPromises.push({ date, key, ref: docRef });
|
|
97
166
|
}
|
|
98
167
|
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
if (readPromises.length === 0) return response;
|
|
171
|
+
|
|
172
|
+
// Batch reads in chunks (Firestore getAll limit is usually high, but let's be safe)
|
|
173
|
+
const CHUNK_SIZE = 100;
|
|
174
|
+
for (let i = 0; i < readPromises.length; i += CHUNK_SIZE) {
|
|
175
|
+
const chunk = readPromises.slice(i, i + CHUNK_SIZE);
|
|
176
|
+
const refs = chunk.map(item => item.ref);
|
|
99
177
|
|
|
100
|
-
|
|
178
|
+
const snapshots = await db.getAll(...refs);
|
|
101
179
|
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
const key = keyPaths[i];
|
|
180
|
+
snapshots.forEach((doc, idx) => {
|
|
181
|
+
const { date, key } = chunk[idx];
|
|
105
182
|
if (doc.exists) {
|
|
106
183
|
response[date][key] = doc.data();
|
|
107
184
|
} else {
|
|
185
|
+
// Start sparse: don't populate nulls to keep payload small?
|
|
186
|
+
// Or populate null to indicate "checked but missing"
|
|
108
187
|
response[date][key] = null;
|
|
109
188
|
}
|
|
110
189
|
});
|
|
111
190
|
}
|
|
191
|
+
|
|
112
192
|
} catch (error) {
|
|
113
193
|
logger.log('ERROR', 'API: Error fetching data from Firestore.', { errorMessage: error.message });
|
|
114
194
|
throw new Error('Failed to retrieve computation data.');
|
|
@@ -118,26 +198,59 @@ const fetchUnifiedData = async (config, dependencies, calcKeys, dateStrings, cal
|
|
|
118
198
|
|
|
119
199
|
/**
|
|
120
200
|
* Factory for the main API handler.
|
|
201
|
+
* UPDATED: Uses AvailabilityCache to determine dates.
|
|
121
202
|
*/
|
|
122
203
|
const createApiHandler = (config, dependencies, calcMap) => {
|
|
123
|
-
const { logger } = dependencies;
|
|
204
|
+
const { logger, db } = dependencies;
|
|
205
|
+
|
|
206
|
+
// Instantiate Cache (Singleton scope within the closure)
|
|
207
|
+
const availabilityCache = new AvailabilityCache(db, logger);
|
|
208
|
+
|
|
124
209
|
return async (req, res) => {
|
|
125
|
-
const validationError = validateRequest(req.query
|
|
210
|
+
const validationError = validateRequest(req.query);
|
|
126
211
|
if (validationError) {
|
|
127
212
|
logger.log('WARN', 'API Bad Request', { error: validationError, query: req.query });
|
|
128
213
|
return res.status(400).send({ status: 'error', message: validationError });
|
|
129
214
|
}
|
|
215
|
+
|
|
130
216
|
try {
|
|
131
217
|
const computationKeys = req.query.computations.split(',');
|
|
132
|
-
const
|
|
218
|
+
const mode = req.query.mode || 'latest'; // Default to latest
|
|
219
|
+
const limit = parseInt(req.query.limit) || 30; // Default 30 days for series
|
|
220
|
+
|
|
221
|
+
// 1. Resolve Dates based on Availability
|
|
222
|
+
const dateStrings = await resolveTargetDates(availabilityCache, computationKeys, mode, limit);
|
|
223
|
+
|
|
224
|
+
if (dateStrings.length === 0) {
|
|
225
|
+
return res.status(200).send({
|
|
226
|
+
status: 'success',
|
|
227
|
+
metadata: {
|
|
228
|
+
computations: computationKeys,
|
|
229
|
+
mode,
|
|
230
|
+
count: 0,
|
|
231
|
+
dates: []
|
|
232
|
+
},
|
|
233
|
+
data: {}
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// 2. Fetch Data for these dates
|
|
133
238
|
const data = await fetchUnifiedData(config, dependencies, computationKeys, dateStrings, calcMap);
|
|
239
|
+
|
|
240
|
+
// 3. Cleanup sparse dates (optional: remove dates where all requested keys are null)
|
|
241
|
+
// For now, we return what was fetched.
|
|
242
|
+
|
|
134
243
|
res.set('Cache-Control', 'public, max-age=300, s-maxage=3600');
|
|
135
244
|
res.status(200).send({
|
|
136
245
|
status: 'success',
|
|
137
246
|
metadata: {
|
|
138
247
|
computations: computationKeys,
|
|
139
|
-
|
|
140
|
-
|
|
248
|
+
mode,
|
|
249
|
+
limit: mode === 'series' ? limit : 1,
|
|
250
|
+
dateRange: {
|
|
251
|
+
start: dateStrings[dateStrings.length - 1], // Oldest
|
|
252
|
+
end: dateStrings[0] // Newest
|
|
253
|
+
}
|
|
141
254
|
},
|
|
142
255
|
data,
|
|
143
256
|
});
|
|
@@ -148,6 +261,7 @@ const createApiHandler = (config, dependencies, calcMap) => {
|
|
|
148
261
|
};
|
|
149
262
|
};
|
|
150
263
|
|
|
264
|
+
// ... (Previous Helper Functions: getComputationStructure, getDynamicSchema, createManifestHandler stay the same) ...
|
|
151
265
|
/**
|
|
152
266
|
* Internal helper for snippet generation.
|
|
153
267
|
*/
|
|
@@ -315,7 +429,6 @@ const createManifestHandler = (config, dependencies, calcMap) => {
|
|
|
315
429
|
};
|
|
316
430
|
};
|
|
317
431
|
|
|
318
|
-
|
|
319
432
|
module.exports = {
|
|
320
433
|
validateRequest,
|
|
321
434
|
buildCalculationMap,
|
|
@@ -1,142 +1,190 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Main entry point for the Generic API module.
|
|
3
|
-
*
|
|
4
|
-
* REFACTORED:
|
|
5
|
-
* REFACTORED: /manifest/generate endpoint now reads static schema from class.
|
|
6
|
-
*
|
|
7
|
-
* --- MODIFIED: Added in-memory cache wrapper for the main API handler ---
|
|
3
|
+
* Export the 'createApiApp' main pipe function.
|
|
4
|
+
* REFACTORED: API V3 - Status-Aware Data Fetching.
|
|
8
5
|
*/
|
|
9
6
|
|
|
10
7
|
const express = require('express');
|
|
11
8
|
const cors = require('cors');
|
|
12
|
-
const {
|
|
13
|
-
const { buildCalculationMap, createApiHandler, getComputationStructure,createManifestHandler, getDynamicSchema } = require('./helpers/api_helpers.js');
|
|
9
|
+
const { buildCalculationMap, createApiHandler, getComputationStructure, createManifestHandler, getDynamicSchema } = require('./helpers/api_helpers.js');
|
|
14
10
|
|
|
15
11
|
/**
|
|
16
|
-
*
|
|
17
|
-
*
|
|
18
|
-
* to any Express request handler.
|
|
19
|
-
* @param {function} handler - The original (req, res) handler to wrap.
|
|
20
|
-
* @param {object} dependencies - { logger }
|
|
21
|
-
* @returns {function} The new (req, res) handler with caching logic.
|
|
12
|
+
* In-Memory Cache Handler
|
|
13
|
+
* Wrapper that adds TTL cache to GET requests.
|
|
22
14
|
*/
|
|
23
15
|
const createCacheHandler = (handler, { logger }) => {
|
|
24
|
-
// 1. Cache
|
|
25
16
|
const CACHE = {};
|
|
26
|
-
const CACHE_TTL_MS = 10 * 60 * 1000;
|
|
27
|
-
|
|
28
|
-
return async (req, res) => {
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
17
|
+
const CACHE_TTL_MS = 10 * 60 * 1000; // 10 Minutes
|
|
18
|
+
|
|
19
|
+
return async (req, res) => {
|
|
20
|
+
// Cache Key now includes mode and limit
|
|
21
|
+
const cacheKey = req.url;
|
|
22
|
+
const now = Date.now();
|
|
23
|
+
|
|
24
|
+
if (CACHE[cacheKey] && (now - CACHE[cacheKey].timestamp) < CACHE_TTL_MS) {
|
|
25
|
+
logger.log('INFO', `[API] Cache HIT for ${cacheKey}`);
|
|
26
|
+
return res.status(CACHE[cacheKey].status).send(CACHE[cacheKey].data);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
logger.log('INFO', `[API] Cache MISS for ${cacheKey}`);
|
|
30
|
+
|
|
31
|
+
const originalSend = res.send;
|
|
32
|
+
const originalStatus = res.status;
|
|
33
|
+
let capturedData = null;
|
|
34
|
+
let capturedStatus = 200;
|
|
35
|
+
|
|
36
|
+
res.status = (statusCode) => {
|
|
37
|
+
capturedStatus = statusCode;
|
|
38
|
+
return originalStatus.call(res, statusCode);
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
res.send = (data) => {
|
|
42
|
+
capturedData = data;
|
|
43
|
+
return originalSend.call(res, data);
|
|
44
|
+
};
|
|
45
|
+
|
|
46
|
+
await handler(req, res);
|
|
47
|
+
|
|
48
|
+
if (capturedStatus === 200 && capturedData) {
|
|
49
|
+
logger.log('INFO', `[API] Caching new entry for ${cacheKey}`);
|
|
50
|
+
CACHE[cacheKey] = {
|
|
51
|
+
data: capturedData,
|
|
52
|
+
status: capturedStatus,
|
|
53
|
+
timestamp: now
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
};
|
|
47
57
|
};
|
|
48
58
|
|
|
49
59
|
|
|
50
60
|
/**
|
|
51
61
|
* Main pipe: pipe.api.createApiApp
|
|
52
|
-
* Creates and configures the Express app for the Generic API.
|
|
53
|
-
* @param {object} config - The Generic API V2 configuration object.
|
|
54
|
-
* @param {object} dependencies - Shared dependencies { db, logger }.
|
|
55
|
-
* @param {Object} unifiedCalculations - The calculations manifest from 'aiden-shared-calculations-unified'.
|
|
56
|
-
* @returns {express.Application} The configured Express app.
|
|
57
62
|
*/
|
|
58
63
|
function createApiApp(config, dependencies, unifiedCalculations) {
|
|
59
64
|
const app = express();
|
|
60
65
|
const { logger, db } = dependencies;
|
|
61
66
|
|
|
62
|
-
//
|
|
67
|
+
// Build Calc Map once
|
|
63
68
|
const calcMap = buildCalculationMap(unifiedCalculations);
|
|
64
69
|
|
|
65
|
-
//
|
|
70
|
+
// Middleware
|
|
66
71
|
app.use(cors({ origin: true }));
|
|
67
72
|
app.use(express.json());
|
|
68
73
|
|
|
69
|
-
// --- Main API Endpoint ---
|
|
74
|
+
// --- Main API V3 Endpoint ---
|
|
75
|
+
// createApiHandler now initializes the AvailabilityCache internally
|
|
70
76
|
const originalApiHandler = createApiHandler(config, dependencies, calcMap);
|
|
71
77
|
const cachedApiHandler = createCacheHandler(originalApiHandler, dependencies);
|
|
78
|
+
|
|
79
|
+
// This handler now supports ?mode=latest and ?mode=series&limit=X
|
|
72
80
|
app.get('/', cachedApiHandler);
|
|
73
81
|
|
|
74
|
-
//
|
|
82
|
+
// Health Check
|
|
75
83
|
app.get('/health', (req, res) => { res.status(200).send('OK'); });
|
|
76
84
|
|
|
77
|
-
//
|
|
85
|
+
// Debug: List keys
|
|
78
86
|
app.get('/list-computations', (req, res) => {
|
|
79
|
-
try {
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
87
|
+
try {
|
|
88
|
+
const computationKeys = Object.keys(calcMap);
|
|
89
|
+
res.status(200).send({
|
|
90
|
+
status: 'success',
|
|
91
|
+
count: computationKeys.length,
|
|
92
|
+
computations: computationKeys.sort(),
|
|
93
|
+
});
|
|
94
|
+
} catch (error) {
|
|
95
|
+
logger.log('ERROR', 'API /list-computations failed.', { errorMessage: error.message });
|
|
96
|
+
res.status(500).send({ status: 'error', message: 'An internal error occurred.' });
|
|
97
|
+
}
|
|
98
|
+
});
|
|
83
99
|
|
|
84
|
-
//
|
|
100
|
+
// Structure Inspection
|
|
85
101
|
app.get('/structure/:computationName', async (req, res) => {
|
|
86
102
|
const { computationName } = req.params;
|
|
87
103
|
const result = await getComputationStructure(computationName, calcMap, config, dependencies);
|
|
88
|
-
if (result.status === 'error') {
|
|
89
|
-
|
|
90
|
-
|
|
104
|
+
if (result.status === 'error') {
|
|
105
|
+
const statusCode = result.message.includes('not found') ? 404 : 500;
|
|
106
|
+
return res.status(statusCode).send(result);
|
|
107
|
+
}
|
|
108
|
+
res.status(200).send(result);
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
// Manifests (Schema Generation)
|
|
91
112
|
app.get('/manifest', createManifestHandler(config, dependencies, calcMap));
|
|
113
|
+
|
|
114
|
+
// Manual Schema Gen Trigger
|
|
92
115
|
app.post('/manifest/generate/:computationName', async (req, res) => {
|
|
93
116
|
const { computationName } = req.params;
|
|
94
117
|
logger.log('INFO', `Manual static schema generation requested for: ${computationName}`);
|
|
95
118
|
|
|
96
119
|
try {
|
|
97
|
-
// 1. Find the calculation class from the calcMap
|
|
98
120
|
const calcInfo = calcMap[computationName];
|
|
99
|
-
if (!calcInfo || !calcInfo.class) {
|
|
121
|
+
if (!calcInfo || !calcInfo.class) {
|
|
122
|
+
return res.status(404).send({ status: 'error', message: `Computation '${computationName}' not found.` });
|
|
123
|
+
}
|
|
100
124
|
const targetCalcClass = calcInfo.class;
|
|
101
125
|
const targetCategory = calcInfo.category;
|
|
102
126
|
|
|
103
|
-
// 2. Use the getDynamicSchema helper (which now just reads the static method)
|
|
104
127
|
const schemaStructure = await getDynamicSchema(targetCalcClass, computationName);
|
|
105
|
-
if (schemaStructure.ERROR) {
|
|
128
|
+
if (schemaStructure.ERROR) {
|
|
129
|
+
return res.status(400).send({ status: 'error', message: `Failed to get static schema: ${schemaStructure.ERROR}` });
|
|
130
|
+
}
|
|
106
131
|
|
|
107
|
-
// 3. Import the new batchStoreSchemas utility
|
|
108
132
|
const { batchStoreSchemas } = require('../computation-system/utils/schema_capture.js');
|
|
109
133
|
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
134
|
+
const metadata = {
|
|
135
|
+
isHistorical: !!(targetCalcClass.toString().includes('yesterdayPortfolio')),
|
|
136
|
+
dependencies: (typeof targetCalcClass.getDependencies === 'function') ? targetCalcClass.getDependencies() : [],
|
|
137
|
+
rootDataDependencies: [],
|
|
138
|
+
type: (targetCategory === 'meta' || targetCategory === 'socialPosts') ? targetCategory : 'standard',
|
|
139
|
+
note: "Manually generated via API"
|
|
140
|
+
};
|
|
141
|
+
|
|
142
|
+
await batchStoreSchemas(dependencies, config, [{
|
|
143
|
+
name: computationName,
|
|
144
|
+
category: targetCategory,
|
|
145
|
+
schema: schemaStructure,
|
|
146
|
+
metadata: metadata
|
|
147
|
+
}]);
|
|
148
|
+
|
|
149
|
+
res.status(200).send({
|
|
150
|
+
status: 'success',
|
|
151
|
+
message: `Static schema read and stored for ${computationName}`,
|
|
152
|
+
computation: computationName,
|
|
153
|
+
category: targetCategory,
|
|
154
|
+
schema: schemaStructure
|
|
155
|
+
});
|
|
118
156
|
|
|
119
157
|
} catch (error) {
|
|
120
|
-
logger.log('ERROR', `Failed to generate schema for ${computationName}`, { errorMessage: error.message
|
|
121
|
-
res.status(
|
|
158
|
+
logger.log('ERROR', `Failed to generate schema for ${computationName}`, { errorMessage: error.message });
|
|
159
|
+
res.status(500).send({ status: 'error', message: `Failed: ${error.message}` });
|
|
122
160
|
}
|
|
123
161
|
});
|
|
124
162
|
|
|
125
|
-
|
|
126
|
-
* This endpoint is fine as-is. It reads from the Firestore
|
|
127
|
-
* collection that the /manifest and /manifest/generate routes populate.
|
|
128
|
-
*/
|
|
163
|
+
// Single Manifest Get
|
|
129
164
|
app.get('/manifest/:computationName', async (req, res) => {
|
|
130
165
|
const { computationName } = req.params;
|
|
131
166
|
try {
|
|
132
167
|
const schemaCollection = config.schemaCollection || 'computation_schemas';
|
|
133
168
|
const schemaDoc = await db.collection(schemaCollection).doc(computationName).get();
|
|
134
|
-
if (!schemaDoc.exists) {
|
|
169
|
+
if (!schemaDoc.exists) {
|
|
170
|
+
return res.status(404).send({ status: 'error', message: `Schema not found for ${computationName}` });
|
|
171
|
+
}
|
|
135
172
|
const data = schemaDoc.data();
|
|
136
|
-
res.status(200).send({
|
|
137
|
-
|
|
173
|
+
res.status(200).send({
|
|
174
|
+
status: 'success',
|
|
175
|
+
computation: computationName,
|
|
176
|
+
category: data.category,
|
|
177
|
+
structure: data.schema,
|
|
178
|
+
metadata: data.metadata || {},
|
|
179
|
+
lastUpdated: data.lastUpdated
|
|
180
|
+
});
|
|
181
|
+
} catch (error) {
|
|
182
|
+
logger.log('ERROR', `Failed to fetch schema for ${computationName}`, { errorMessage: error.message });
|
|
183
|
+
res.status(500).send({ status: 'error', message: 'An internal error occurred.' });
|
|
184
|
+
}
|
|
185
|
+
});
|
|
186
|
+
|
|
138
187
|
return app;
|
|
139
188
|
}
|
|
140
189
|
|
|
141
|
-
|
|
142
190
|
module.exports = { createApiApp, helpers: require('./helpers/api_helpers.js') };
|
package/package.json
CHANGED
|
@@ -1,146 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* FIXED: computation_controller.js
|
|
3
|
-
* V5.1: Exports LEGACY_MAPPING for Manifest Builder
|
|
4
|
-
*/
|
|
5
|
-
|
|
6
|
-
// Load all layers dynamically from the index
|
|
7
|
-
const mathLayer = require('../layers/index');
|
|
8
|
-
const { loadDailyInsights, loadDailySocialPostInsights, getRelevantShardRefs, getPriceShardRefs } = require('../utils/data_loader');
|
|
9
|
-
|
|
10
|
-
// Legacy Keys Mapping (Ensures backward compatibility with existing Calculations)
|
|
11
|
-
// Maps the new modular class names to the property names expected by existing code (e.g. math.extract)
|
|
12
|
-
const LEGACY_MAPPING = {
|
|
13
|
-
DataExtractor: 'extract',
|
|
14
|
-
HistoryExtractor: 'history',
|
|
15
|
-
MathPrimitives: 'compute',
|
|
16
|
-
Aggregators: 'aggregate',
|
|
17
|
-
Validators: 'validate',
|
|
18
|
-
SignalPrimitives: 'signals',
|
|
19
|
-
SCHEMAS: 'schemas',
|
|
20
|
-
DistributionAnalytics: 'distribution',
|
|
21
|
-
TimeSeries: 'TimeSeries',
|
|
22
|
-
priceExtractor: 'priceExtractor',
|
|
23
|
-
InsightsExtractor: 'insights',
|
|
24
|
-
UserClassifier: 'classifier',
|
|
25
|
-
Psychometrics: 'psychometrics',
|
|
26
|
-
CognitiveBiases: 'bias',
|
|
27
|
-
SkillAttribution: 'skill',
|
|
28
|
-
ExecutionAnalytics: 'execution',
|
|
29
|
-
AdaptiveAnalytics: 'adaptive'
|
|
30
|
-
};
|
|
31
|
-
|
|
32
|
-
class DataLoader {
|
|
33
|
-
constructor(config, dependencies) { this.config = config; this.deps = dependencies; this.cache = { mappings: null, insights: new Map(), social: new Map(), prices: null }; }
|
|
34
|
-
get mappings() { return this.cache.mappings; }
|
|
35
|
-
async loadMappings() { if (this.cache.mappings) return this.cache.mappings; const { calculationUtils } = this.deps; this.cache.mappings = await calculationUtils.loadInstrumentMappings(); return this.cache.mappings; }
|
|
36
|
-
async loadInsights(dateStr) { if (this.cache.insights.has(dateStr)) return this.cache.insights.get(dateStr); const insights = await loadDailyInsights(this.config, this.deps, dateStr); this.cache.insights.set(dateStr, insights); return insights; }
|
|
37
|
-
async loadSocial(dateStr) { if (this.cache.social.has(dateStr)) return this.cache.social.get(dateStr); const social = await loadDailySocialPostInsights(this.config, this.deps, dateStr); this.cache.social.set(dateStr, social); return social; }
|
|
38
|
-
async getPriceShardReferences() { return getPriceShardRefs(this.config, this.deps); }
|
|
39
|
-
async getSpecificPriceShardReferences (targetInstrumentIds) { return getRelevantShardRefs(this.config, this.deps, targetInstrumentIds); }
|
|
40
|
-
async loadPriceShard(docRef) { try { const snap = await docRef.get(); if (!snap.exists) return {}; return snap.data(); } catch (e) { console.error(`Error loading shard ${docRef.path}:`, e); return {}; } }
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
class ContextBuilder {
|
|
44
|
-
static buildMathContext() {
|
|
45
|
-
const mathContext = {};
|
|
46
|
-
for (const [key, value] of Object.entries(mathLayer)) { mathContext[key] = value; const legacyKey = LEGACY_MAPPING[key]; if (legacyKey) { mathContext[legacyKey] = value; } }
|
|
47
|
-
return mathContext;
|
|
48
|
-
}
|
|
49
|
-
static buildPerUserContext(options) {
|
|
50
|
-
const { todayPortfolio, yesterdayPortfolio, todayHistory, yesterdayHistory, userId, userType, dateStr, metadata, mappings, insights, socialData, computedDependencies, previousComputedDependencies, config, deps } = options;
|
|
51
|
-
return {
|
|
52
|
-
user: { id: userId, type: userType, portfolio: { today: todayPortfolio, yesterday: yesterdayPortfolio }, history: { today: todayHistory, yesterday: yesterdayHistory } },
|
|
53
|
-
date: { today: dateStr },
|
|
54
|
-
insights: { today: insights?.today, yesterday: insights?.yesterday },
|
|
55
|
-
social: { today: socialData?.today, yesterday: socialData?.yesterday },
|
|
56
|
-
mappings: mappings || {},
|
|
57
|
-
math: ContextBuilder.buildMathContext(),
|
|
58
|
-
computed: computedDependencies || {},
|
|
59
|
-
previousComputed: previousComputedDependencies || {},
|
|
60
|
-
meta: metadata, config, deps
|
|
61
|
-
};
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
static buildMetaContext(options) {
|
|
65
|
-
const { dateStr, metadata, mappings, insights, socialData, prices, computedDependencies, previousComputedDependencies, config, deps } = options;
|
|
66
|
-
return {
|
|
67
|
-
date: { today: dateStr },
|
|
68
|
-
insights: { today: insights?.today, yesterday: insights?.yesterday },
|
|
69
|
-
social: { today: socialData?.today, yesterday: socialData?.yesterday },
|
|
70
|
-
prices: prices || {},
|
|
71
|
-
mappings: mappings || {},
|
|
72
|
-
math: ContextBuilder.buildMathContext(),
|
|
73
|
-
computed: computedDependencies || {},
|
|
74
|
-
previousComputed: previousComputedDependencies || {},
|
|
75
|
-
meta: metadata, config, deps
|
|
76
|
-
};
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
class ComputationExecutor {
|
|
81
|
-
constructor(config, dependencies, dataLoader) {
|
|
82
|
-
this.config = config;
|
|
83
|
-
this.deps = dependencies;
|
|
84
|
-
this.loader = dataLoader;
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
async executePerUser(calcInstance, metadata, dateStr, portfolioData, yesterdayPortfolioData, historyData, computedDeps, prevDeps) {
|
|
88
|
-
const { logger } = this.deps;
|
|
89
|
-
const targetUserType = metadata.userType;
|
|
90
|
-
const mappings = await this.loader.loadMappings();
|
|
91
|
-
const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await this.loader.loadInsights(dateStr) } : null;
|
|
92
|
-
|
|
93
|
-
// Access SCHEMAS dynamically from the loaded layer
|
|
94
|
-
const SCHEMAS = mathLayer.SCHEMAS;
|
|
95
|
-
|
|
96
|
-
for (const [userId, todayPortfolio] of Object.entries(portfolioData)) {
|
|
97
|
-
const yesterdayPortfolio = yesterdayPortfolioData ? yesterdayPortfolioData[userId] : null;
|
|
98
|
-
const todayHistory = historyData ? historyData[userId] : null;
|
|
99
|
-
const actualUserType = todayPortfolio.PublicPositions ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
|
|
100
|
-
if (targetUserType !== 'all') {
|
|
101
|
-
const mappedTarget = (targetUserType === 'speculator') ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
|
|
102
|
-
if (mappedTarget !== actualUserType) continue;
|
|
103
|
-
}
|
|
104
|
-
const context = ContextBuilder.buildPerUserContext({ todayPortfolio, yesterdayPortfolio, todayHistory, userId, userType: actualUserType, dateStr, metadata, mappings, insights, computedDependencies: computedDeps, previousComputedDependencies: prevDeps, config: this.config, deps: this.deps });
|
|
105
|
-
try { await calcInstance.process(context); } catch (e) { logger.log('WARN', `Calc ${metadata.name} failed for user ${userId}: ${e.message}`); }
|
|
106
|
-
}
|
|
107
|
-
}
|
|
108
|
-
|
|
109
|
-
async executeOncePerDay(calcInstance, metadata, dateStr, computedDeps, prevDeps) {
|
|
110
|
-
const mappings = await this.loader.loadMappings();
|
|
111
|
-
const { logger } = this.deps;
|
|
112
|
-
const insights = metadata.rootDataDependencies?.includes('insights') ? { today: await this.loader.loadInsights(dateStr) } : null;
|
|
113
|
-
const social = metadata.rootDataDependencies?.includes('social') ? { today: await this.loader.loadSocial(dateStr) } : null;
|
|
114
|
-
|
|
115
|
-
if (metadata.rootDataDependencies?.includes('price')) {
|
|
116
|
-
logger.log('INFO', `[Executor] Running Batched/Sharded Execution for ${metadata.name}`);
|
|
117
|
-
const shardRefs = await this.loader.getPriceShardReferences();
|
|
118
|
-
if (shardRefs.length === 0) { logger.log('WARN', '[Executor] No price shards found.'); return {}; }
|
|
119
|
-
let processedCount = 0;
|
|
120
|
-
for (const ref of shardRefs) {
|
|
121
|
-
const shardData = await this.loader.loadPriceShard(ref);
|
|
122
|
-
const partialContext = ContextBuilder.buildMetaContext({ dateStr, metadata, mappings, insights, socialData: social, prices: { history: shardData }, computedDependencies: computedDeps, previousComputedDependencies: prevDeps, config: this.config, deps: this.deps });
|
|
123
|
-
await calcInstance.process(partialContext);
|
|
124
|
-
partialContext.prices = null;
|
|
125
|
-
processedCount++;
|
|
126
|
-
if (processedCount % 10 === 0) { if (global.gc) { global.gc(); } }
|
|
127
|
-
}
|
|
128
|
-
logger.log('INFO', `[Executor] Finished Batched Execution for ${metadata.name} (${processedCount} shards).`);
|
|
129
|
-
return calcInstance.getResult ? await calcInstance.getResult() : {};
|
|
130
|
-
} else {
|
|
131
|
-
const context = ContextBuilder.buildMetaContext({ dateStr, metadata, mappings, insights, socialData: social, prices: {}, computedDependencies: computedDeps, previousComputedDependencies: prevDeps, config: this.config, deps: this.deps });
|
|
132
|
-
return await calcInstance.process(context);
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
class ComputationController {
|
|
138
|
-
constructor(config, dependencies) {
|
|
139
|
-
this.config = config;
|
|
140
|
-
this.deps = dependencies;
|
|
141
|
-
this.loader = new DataLoader(config, dependencies);
|
|
142
|
-
this.executor = new ComputationExecutor(config, dependencies, this.loader);
|
|
143
|
-
}
|
|
144
|
-
}
|
|
145
|
-
|
|
146
|
-
module.exports = { ComputationController, LEGACY_MAPPING };
|