bulltrackers-module 1.0.218 → 1.0.220
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/WorkflowOrchestrator.js +153 -0
- package/functions/computation-system/context/ContextFactory.js +63 -0
- package/functions/computation-system/context/ManifestBuilder.js +240 -0
- package/functions/computation-system/controllers/computation_controller.js +12 -4
- package/functions/computation-system/data/AvailabilityChecker.js +75 -0
- package/functions/computation-system/data/CachedDataLoader.js +63 -0
- package/functions/computation-system/data/DependencyFetcher.js +70 -0
- package/functions/computation-system/executors/MetaExecutor.js +68 -0
- package/functions/computation-system/executors/PriceBatchExecutor.js +99 -0
- package/functions/computation-system/executors/StandardExecutor.js +115 -0
- package/functions/computation-system/helpers/computation_dispatcher.js +3 -3
- package/functions/computation-system/helpers/computation_worker.js +44 -18
- package/functions/computation-system/layers/extractors.js +9 -3
- package/functions/computation-system/layers/mathematics.js +1 -1
- package/functions/computation-system/persistence/FirestoreUtils.js +64 -0
- package/functions/computation-system/persistence/ResultCommitter.js +118 -0
- package/functions/computation-system/persistence/StatusRepository.js +23 -0
- package/functions/computation-system/topology/HashManager.js +35 -0
- package/functions/computation-system/utils/utils.js +38 -10
- package/index.js +8 -3
- package/package.json +1 -1
- package/functions/computation-system/helpers/computation_manifest_builder.js +0 -291
- package/functions/computation-system/helpers/computation_pass_runner.js +0 -129
- package/functions/computation-system/helpers/orchestration_helpers.js +0 -352
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Handles saving computation results with transparent auto-sharding.
|
|
3
|
+
*/
|
|
4
|
+
const { commitBatchInChunks } = require('./FirestoreUtils');
|
|
5
|
+
const { updateComputationStatus } = require('./StatusRepository');
|
|
6
|
+
const { batchStoreSchemas } = require('../utils/schema_capture');
|
|
7
|
+
|
|
8
|
+
async function commitResults(stateObj, dStr, passName, config, deps, skipStatusWrite = false) {
|
|
9
|
+
const successUpdates = {};
|
|
10
|
+
const schemas = [];
|
|
11
|
+
|
|
12
|
+
for (const name in stateObj) {
|
|
13
|
+
const calc = stateObj[name];
|
|
14
|
+
try {
|
|
15
|
+
const result = await calc.getResult();
|
|
16
|
+
if (!result) {
|
|
17
|
+
deps.logger.log('INFO', `${name} for ${dStr}: Skipped (Empty Result)`);
|
|
18
|
+
continue;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
const mainDocRef = deps.db.collection(config.resultsCollection)
|
|
22
|
+
.doc(dStr)
|
|
23
|
+
.collection(config.resultsSubcollection)
|
|
24
|
+
.doc(calc.manifest.category)
|
|
25
|
+
.collection(config.computationsSubcollection)
|
|
26
|
+
.doc(name);
|
|
27
|
+
|
|
28
|
+
const updates = await prepareAutoShardedWrites(result, mainDocRef, deps.logger);
|
|
29
|
+
|
|
30
|
+
if (calc.manifest.class.getSchema) {
|
|
31
|
+
const { class: _cls, ...safeMetadata } = calc.manifest;
|
|
32
|
+
schemas.push({
|
|
33
|
+
name,
|
|
34
|
+
category: calc.manifest.category,
|
|
35
|
+
schema: calc.manifest.class.getSchema(),
|
|
36
|
+
metadata: safeMetadata
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
if (updates.length > 0) {
|
|
41
|
+
await commitBatchInChunks(config, deps, updates, `${name} Results`);
|
|
42
|
+
successUpdates[name] = calc.manifest.hash || true;
|
|
43
|
+
const isSharded = updates.some(u => u.data._sharded === true);
|
|
44
|
+
deps.logger.log('INFO', `${name} for ${dStr}: \u2714 Success (Written ${isSharded ? 'Sharded' : 'Standard'})`);
|
|
45
|
+
} else {
|
|
46
|
+
deps.logger.log('INFO', `${name} for ${dStr}: - Empty Data`);
|
|
47
|
+
}
|
|
48
|
+
} catch (e) {
|
|
49
|
+
deps.logger.log('ERROR', `${name} for ${dStr}: \u2716 FAILED Commit: ${e.message}`);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
if (schemas.length) batchStoreSchemas(deps, config, schemas).catch(() => {});
|
|
54
|
+
|
|
55
|
+
if (!skipStatusWrite && Object.keys(successUpdates).length > 0) {
|
|
56
|
+
await updateComputationStatus(dStr, successUpdates, config, deps);
|
|
57
|
+
deps.logger.log('INFO', `[${passName}] Updated status document for ${Object.keys(successUpdates).length} successful computations.`);
|
|
58
|
+
}
|
|
59
|
+
return successUpdates;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
function calculateFirestoreBytes(value) {
|
|
63
|
+
if (value === null) return 1;
|
|
64
|
+
if (value === undefined) return 0;
|
|
65
|
+
if (typeof value === 'boolean') return 1;
|
|
66
|
+
if (typeof value === 'number') return 8;
|
|
67
|
+
if (typeof value === 'string') return Buffer.byteLength(value, 'utf8') + 1;
|
|
68
|
+
if (value instanceof Date) return 8;
|
|
69
|
+
if (value.constructor && value.constructor.name === 'DocumentReference') { return Buffer.byteLength(value.path, 'utf8') + 16; }
|
|
70
|
+
if (Array.isArray(value)) { let sum = 0; for (const item of value) sum += calculateFirestoreBytes(item); return sum; }
|
|
71
|
+
if (typeof value === 'object') { let sum = 0; for (const k in value) { if (Object.prototype.hasOwnProperty.call(value, k)) { sum += (Buffer.byteLength(k, 'utf8') + 1) + calculateFirestoreBytes(value[k]); } } return sum; }
|
|
72
|
+
return 0;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
async function prepareAutoShardedWrites(result, docRef, logger) {
|
|
76
|
+
const SAFETY_THRESHOLD_BYTES = 1000 * 1024; // 1MB Limit
|
|
77
|
+
const OVERHEAD_ALLOWANCE = 20 * 1024;
|
|
78
|
+
const CHUNK_LIMIT = SAFETY_THRESHOLD_BYTES - OVERHEAD_ALLOWANCE;
|
|
79
|
+
const totalSize = calculateFirestoreBytes(result);
|
|
80
|
+
const docPathSize = Buffer.byteLength(docRef.path, 'utf8') + 16;
|
|
81
|
+
|
|
82
|
+
if ((totalSize + docPathSize) < CHUNK_LIMIT) {
|
|
83
|
+
const data = { ...result, _completed: true, _sharded: false };
|
|
84
|
+
return [{ ref: docRef, data, options: { merge: true } }];
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
logger.log('INFO', `[AutoShard] Result size ~${Math.round(totalSize/1024)}KB exceeds limit. Sharding...`);
|
|
88
|
+
const writes = [];
|
|
89
|
+
const shardCollection = docRef.collection('_shards');
|
|
90
|
+
let currentChunk = {};
|
|
91
|
+
let currentChunkSize = 0;
|
|
92
|
+
let shardIndex = 0;
|
|
93
|
+
|
|
94
|
+
for (const [key, value] of Object.entries(result)) {
|
|
95
|
+
if (key.startsWith('_')) continue;
|
|
96
|
+
const keySize = Buffer.byteLength(key, 'utf8') + 1;
|
|
97
|
+
const valueSize = calculateFirestoreBytes(value);
|
|
98
|
+
const itemSize = keySize + valueSize;
|
|
99
|
+
|
|
100
|
+
if (currentChunkSize + itemSize > CHUNK_LIMIT) {
|
|
101
|
+
writes.push({ ref: shardCollection.doc(`shard_${shardIndex}`), data: currentChunk, options: { merge: false } });
|
|
102
|
+
shardIndex++;
|
|
103
|
+
currentChunk = {};
|
|
104
|
+
currentChunkSize = 0;
|
|
105
|
+
}
|
|
106
|
+
currentChunk[key] = value;
|
|
107
|
+
currentChunkSize += itemSize;
|
|
108
|
+
}
|
|
109
|
+
if (Object.keys(currentChunk).length > 0) {
|
|
110
|
+
writes.push({ ref: shardCollection.doc(`shard_${shardIndex}`), data: currentChunk, options: { merge: false } });
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
const pointerData = { _completed: true, _sharded: true, _shardCount: shardIndex + 1, _lastUpdated: new Date().toISOString() };
|
|
114
|
+
writes.push({ ref: docRef, data: pointerData, options: { merge: false } });
|
|
115
|
+
return writes;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
module.exports = { commitResults };
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Manages computation status tracking in Firestore.
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
async function fetchComputationStatus(dateStr, config, { db }) {
|
|
6
|
+
if (!key) throw new Error('fetchStatus requires a key');
|
|
7
|
+
const key = dateStr;
|
|
8
|
+
const collection = config.computationStatusCollection || 'computation_status';
|
|
9
|
+
const docRef = db.collection(collection).doc(key);
|
|
10
|
+
const snap = await docRef.get();
|
|
11
|
+
return snap.exists ? snap.data() : {};
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
async function updateComputationStatus(dateStr, updates, config, { db }) {
|
|
15
|
+
if (!dateStr) throw new Error('updateStatus requires a key');
|
|
16
|
+
if (!updates || Object.keys(updates).length === 0) return;
|
|
17
|
+
const collection = config.computationStatusCollection || 'computation_status';
|
|
18
|
+
const docRef = db.collection(collection).doc(dateStr);
|
|
19
|
+
await docRef.set(updates, { merge: true });
|
|
20
|
+
return true;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
module.exports = { fetchComputationStatus, updateComputationStatus };
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Manages code versioning, hashing, and legacy mappings.
|
|
3
|
+
*/
|
|
4
|
+
const crypto = require('crypto');
|
|
5
|
+
|
|
6
|
+
// Legacy Keys Mapping (Ensures backward compatibility)
|
|
7
|
+
const LEGACY_MAPPING = {
|
|
8
|
+
DataExtractor: 'extract',
|
|
9
|
+
HistoryExtractor: 'history',
|
|
10
|
+
MathPrimitives: 'compute',
|
|
11
|
+
Aggregators: 'aggregate',
|
|
12
|
+
Validators: 'validate',
|
|
13
|
+
SignalPrimitives: 'signals',
|
|
14
|
+
SCHEMAS: 'schemas',
|
|
15
|
+
DistributionAnalytics: 'distribution',
|
|
16
|
+
TimeSeries: 'TimeSeries',
|
|
17
|
+
priceExtractor: 'priceExtractor',
|
|
18
|
+
InsightsExtractor: 'insights',
|
|
19
|
+
UserClassifier: 'classifier',
|
|
20
|
+
Psychometrics: 'psychometrics',
|
|
21
|
+
CognitiveBiases: 'bias',
|
|
22
|
+
SkillAttribution: 'skill',
|
|
23
|
+
ExecutionAnalytics: 'execution',
|
|
24
|
+
AdaptiveAnalytics: 'adaptive'
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
function generateCodeHash(codeString) {
|
|
28
|
+
if (!codeString) return 'unknown';
|
|
29
|
+
let clean = codeString.replace(/\/\/.*$/gm, '');
|
|
30
|
+
clean = clean.replace(/\/\*[\s\S]*?\*\//g, '');
|
|
31
|
+
clean = clean.replace(/\s+/g, '');
|
|
32
|
+
return crypto.createHash('sha256').update(clean).digest('hex');
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
module.exports = { LEGACY_MAPPING, generateCodeHash };
|
|
@@ -19,12 +19,34 @@ function generateCodeHash(codeString) {
|
|
|
19
19
|
return crypto.createHash('sha256').update(clean).digest('hex');
|
|
20
20
|
}
|
|
21
21
|
|
|
22
|
+
/**
|
|
23
|
+
* Executes a function with exponential backoff retry logic.
|
|
24
|
+
* @param {Function} fn - Async function to execute
|
|
25
|
+
* @param {string} operationName - Label for logging
|
|
26
|
+
* @param {number} maxRetries - Max attempts (default 3)
|
|
27
|
+
*/
|
|
28
|
+
async function withRetry(fn, operationName, maxRetries = 3) {
|
|
29
|
+
let attempt = 0;
|
|
30
|
+
while (attempt < maxRetries) {
|
|
31
|
+
try {
|
|
32
|
+
return await fn();
|
|
33
|
+
} catch (error) {
|
|
34
|
+
attempt++;
|
|
35
|
+
console.warn(`[Retry] ${operationName} failed (Attempt ${attempt}/${maxRetries}): ${error.message}`);
|
|
36
|
+
if (attempt >= maxRetries) throw error;
|
|
37
|
+
// Exponential backoff: 1s, 2s, 4s...
|
|
38
|
+
await new Promise(resolve => setTimeout(resolve, 1000 * Math.pow(2, attempt - 1)));
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
22
43
|
/** * Stage 2: Commit a batch of writes in chunks
|
|
23
44
|
* FIXED: Now respects write.options (e.g. { merge: false }) to allow overwrites/deletes.
|
|
24
45
|
*/
|
|
25
46
|
async function commitBatchInChunks(config, deps, writes, operationName) {
|
|
26
|
-
const { db, logger
|
|
27
|
-
|
|
47
|
+
const { db, logger } = deps;
|
|
48
|
+
// Use the local withRetry if not provided in deps
|
|
49
|
+
const retryFn = (deps.calculationUtils && deps.calculationUtils.withRetry) ? deps.calculationUtils.withRetry : withRetry;
|
|
28
50
|
|
|
29
51
|
if (!writes || !writes.length) {
|
|
30
52
|
logger.log('WARN', `[${operationName}] No writes to commit.`);
|
|
@@ -42,7 +64,7 @@ async function commitBatchInChunks(config, deps, writes, operationName) {
|
|
|
42
64
|
const commitAndReset = async () => {
|
|
43
65
|
if (currentOpsCount > 0) {
|
|
44
66
|
try {
|
|
45
|
-
await
|
|
67
|
+
await retryFn(
|
|
46
68
|
() => currentBatch.commit(),
|
|
47
69
|
`${operationName} (Chunk ${batchIndex})`
|
|
48
70
|
);
|
|
@@ -94,8 +116,7 @@ function getExpectedDateStrings(startDate, endDate) {
|
|
|
94
116
|
|
|
95
117
|
/** Stage 4: Get the earliest date in a *flat* collection where doc IDs are dates. */
|
|
96
118
|
async function getFirstDateFromSimpleCollection(config, deps, collectionName) {
|
|
97
|
-
const { db, logger
|
|
98
|
-
const { withRetry } = calculationUtils;
|
|
119
|
+
const { db, logger } = deps;
|
|
99
120
|
try {
|
|
100
121
|
if (!collectionName) { logger.log('WARN', `[Core Utils] Collection name not provided for simple date query.`); return null; }
|
|
101
122
|
const query = db.collection(collectionName).where(FieldPath.documentId(), '>=', '2000-01-01').orderBy(FieldPath.documentId(), 'asc').limit(1);
|
|
@@ -107,8 +128,7 @@ async function getFirstDateFromSimpleCollection(config, deps, collectionName) {
|
|
|
107
128
|
|
|
108
129
|
/** Stage 4: Get the earliest date in a sharded collection */
|
|
109
130
|
async function getFirstDateFromCollection(config, deps, collectionName) {
|
|
110
|
-
const { db, logger
|
|
111
|
-
const { withRetry } = calculationUtils;
|
|
131
|
+
const { db, logger } = deps;
|
|
112
132
|
let earliestDate = null;
|
|
113
133
|
try {
|
|
114
134
|
if (!collectionName) { logger.log('WARN', `[Core Utils] Collection name not provided for sharded date query.`); return null; }
|
|
@@ -165,8 +185,7 @@ async function getEarliestDataDates(config, deps) {
|
|
|
165
185
|
}
|
|
166
186
|
|
|
167
187
|
async function getFirstDateFromPriceCollection(config, deps) {
|
|
168
|
-
const { db, logger
|
|
169
|
-
const { withRetry } = calculationUtils;
|
|
188
|
+
const { db, logger } = deps;
|
|
170
189
|
const collection = config.priceCollection || 'asset_prices';
|
|
171
190
|
try {
|
|
172
191
|
const snapshot = await withRetry(() => db.collection(collection).limit(10).get(), `GetPriceShards(${collection})`);
|
|
@@ -184,4 +203,13 @@ async function getFirstDateFromPriceCollection(config, deps) {
|
|
|
184
203
|
} catch (e) { logger.log('ERROR', `Failed to get earliest price date from ${collection}`, { errorMessage: e.message }); return null; }
|
|
185
204
|
}
|
|
186
205
|
|
|
187
|
-
module.exports = {
|
|
206
|
+
module.exports = {
|
|
207
|
+
FieldValue,
|
|
208
|
+
FieldPath,
|
|
209
|
+
normalizeName,
|
|
210
|
+
commitBatchInChunks,
|
|
211
|
+
getExpectedDateStrings,
|
|
212
|
+
getEarliestDataDates,
|
|
213
|
+
generateCodeHash,
|
|
214
|
+
withRetry // Exported here
|
|
215
|
+
};
|
package/index.js
CHANGED
|
@@ -43,14 +43,19 @@ const taskEngine = {
|
|
|
43
43
|
handleUpdate: require('./functions/task-engine/helpers/update_helpers').handleUpdate
|
|
44
44
|
};
|
|
45
45
|
|
|
46
|
-
// ---
|
|
47
|
-
const { build: buildManifestFunc } = require('./functions/computation-system/
|
|
46
|
+
// --- UPDATED IMPORT: Point to the new Context Domain ---
|
|
47
|
+
const { build: buildManifestFunc } = require('./functions/computation-system/context/ManifestBuilder');
|
|
48
48
|
|
|
49
49
|
// Computation System
|
|
50
50
|
const computationSystem = {
|
|
51
|
-
|
|
51
|
+
// UPDATED: Point to the new Workflow Orchestrator
|
|
52
|
+
runComputationPass: require('./functions/computation-system/WorkflowOrchestrator').runComputationPass,
|
|
53
|
+
|
|
54
|
+
// These helpers wrap the Orchestrator, so they stay, but we updated their internals (see below)
|
|
52
55
|
dispatchComputationPass: require('./functions/computation-system/helpers/computation_dispatcher').dispatchComputationPass,
|
|
53
56
|
handleComputationTask: require('./functions/computation-system/helpers/computation_worker').handleComputationTask,
|
|
57
|
+
|
|
58
|
+
// Utils
|
|
54
59
|
dataLoader: require('./functions/computation-system/utils/data_loader'),
|
|
55
60
|
computationUtils: require('./functions/computation-system/utils/utils'),
|
|
56
61
|
buildManifest: buildManifestFunc
|
package/package.json
CHANGED
|
@@ -1,291 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview
|
|
3
|
-
* Dynamic Manifest Builder (v6 - Merkle Tree Dependency Hashing)
|
|
4
|
-
*
|
|
5
|
-
* KEY FEATURES:
|
|
6
|
-
* 1. Smart Layer Hashing: Detects used layers (Math, Extractors) to avoid stale helper code.
|
|
7
|
-
* 2. Cascading Invalidation (Merkle Hashing):
|
|
8
|
-
* The final hash of a computation is derived from:
|
|
9
|
-
* [Own Code] + [Layer States] + [Hashes of all Dependencies]
|
|
10
|
-
* * This guarantees that if Calculation A is updated, Calculation B (which depends on A)
|
|
11
|
-
* will automatically generate a new hash, forcing the system to re-run it.
|
|
12
|
-
*/
|
|
13
|
-
|
|
14
|
-
const { generateCodeHash } = require('../utils/utils');
|
|
15
|
-
|
|
16
|
-
// 1. Import Layers directly to generate their "State Hashes"
|
|
17
|
-
const MathematicsLayer = require('../layers/mathematics');
|
|
18
|
-
const ExtractorsLayer = require('../layers/extractors');
|
|
19
|
-
const ProfilingLayer = require('../layers/profiling');
|
|
20
|
-
const ValidatorsLayer = require('../layers/validators');
|
|
21
|
-
|
|
22
|
-
/* --------------------------------------------------
|
|
23
|
-
* 1. Layer Hash Generation
|
|
24
|
-
* -------------------------------------------------- */
|
|
25
|
-
|
|
26
|
-
function generateLayerHash(layerExports, layerName) {
|
|
27
|
-
const keys = Object.keys(layerExports).sort();
|
|
28
|
-
let combinedSource = `LAYER:${layerName}`;
|
|
29
|
-
|
|
30
|
-
for (const key of keys) {
|
|
31
|
-
const item = layerExports[key];
|
|
32
|
-
if (typeof item === 'function') { combinedSource += item.toString();
|
|
33
|
-
} else if (typeof item === 'object' && item !== null) { combinedSource += JSON.stringify(item);
|
|
34
|
-
} else { combinedSource += String(item); }
|
|
35
|
-
}
|
|
36
|
-
return generateCodeHash(combinedSource);
|
|
37
|
-
}
|
|
38
|
-
|
|
39
|
-
// Pre-compute layer hashes at startup
|
|
40
|
-
const LAYER_HASHES = {
|
|
41
|
-
'mathematics': generateLayerHash(MathematicsLayer, 'mathematics'),
|
|
42
|
-
'extractors': generateLayerHash(ExtractorsLayer, 'extractors'),
|
|
43
|
-
'profiling': generateLayerHash(ProfilingLayer, 'profiling'),
|
|
44
|
-
'validators': generateLayerHash(ValidatorsLayer, 'validators')
|
|
45
|
-
};
|
|
46
|
-
|
|
47
|
-
// Map code patterns to Layer dependencies
|
|
48
|
-
const LAYER_TRIGGERS = {
|
|
49
|
-
'mathematics': [
|
|
50
|
-
'math.compute', 'MathPrimitives',
|
|
51
|
-
'math.signals', 'SignalPrimitives', 'signals.',
|
|
52
|
-
'math.aggregate', 'Aggregators',
|
|
53
|
-
'math.timeseries', 'TimeSeries', 'timeSeries.',
|
|
54
|
-
'math.distribution', 'DistributionAnalytics', 'distribution.',
|
|
55
|
-
'math.financial', 'FinancialEngineering'
|
|
56
|
-
],
|
|
57
|
-
'extractors': [
|
|
58
|
-
'math.extract', 'DataExtractor',
|
|
59
|
-
'math.history', 'HistoryExtractor',
|
|
60
|
-
'math.prices', 'priceExtractor',
|
|
61
|
-
'math.insights', 'InsightsExtractor', 'insights.',
|
|
62
|
-
'math.tradeSeries', 'TradeSeriesBuilder'
|
|
63
|
-
],
|
|
64
|
-
'profiling': [
|
|
65
|
-
'math.profiling', 'SCHEMAS',
|
|
66
|
-
'math.classifier', 'UserClassifier',
|
|
67
|
-
'math.psychometrics', 'Psychometrics',
|
|
68
|
-
'math.bias', 'CognitiveBiases',
|
|
69
|
-
'math.skill', 'SkillAttribution'
|
|
70
|
-
],
|
|
71
|
-
'validators': [
|
|
72
|
-
'math.validate', 'Validators'
|
|
73
|
-
]
|
|
74
|
-
};
|
|
75
|
-
|
|
76
|
-
/* --------------------------------------------------
|
|
77
|
-
* Pretty Console Helpers
|
|
78
|
-
* -------------------------------------------------- */
|
|
79
|
-
const log = {
|
|
80
|
-
info: (msg) => console.log('ℹ︎ ' + msg),
|
|
81
|
-
step: (msg) => console.log('› ' + msg),
|
|
82
|
-
warn: (msg) => console.warn('⚠︎ ' + msg),
|
|
83
|
-
success: (msg) => console.log('✔︎ ' + msg),
|
|
84
|
-
error: (msg) => console.error('✖ ' + msg),
|
|
85
|
-
fatal: (msg) => { console.error('✖ FATAL ✖ ' + msg); console.error('✖ FATAL ✖ Manifest build FAILED.'); },
|
|
86
|
-
divider: (label) => { const line = ''.padEnd(60, '─'); console.log(`\n${line}\n${label}\n${line}\n`); },
|
|
87
|
-
};
|
|
88
|
-
|
|
89
|
-
/* --------------------------------------------------
|
|
90
|
-
* Helper Utilities
|
|
91
|
-
* -------------------------------------------------- */
|
|
92
|
-
|
|
93
|
-
const normalizeName = (name) => { if (typeof name !== 'string') return name; return name.trim().replace(/,$/, '').replace(/_/g, '-').toLowerCase(); };
|
|
94
|
-
|
|
95
|
-
function suggestClosest(name, candidates, n = 3) {
|
|
96
|
-
const levenshtein = (a = '', b = '') => {
|
|
97
|
-
const m = a.length, n = b.length;
|
|
98
|
-
if (!m) return n; if (!n) return m;
|
|
99
|
-
const dp = Array.from({ length: m + 1 }, (_, i) => Array(n + 1).fill(i));
|
|
100
|
-
for (let j = 0; j <= n; j++) dp[0][j] = j;
|
|
101
|
-
for (let i = 1; i <= m; i++)
|
|
102
|
-
for (let j = 1; j <= n; j++)
|
|
103
|
-
dp[i][j] = a[i - 1] === b[j - 1] ? dp[i - 1][j - 1] : Math.min(dp[i - 1][j - 1], dp[i][j - 1], dp[i - 1][j]) + 1;
|
|
104
|
-
return dp[m][n];
|
|
105
|
-
};
|
|
106
|
-
const scores = candidates.map(c => [c, levenshtein(name, c)]);
|
|
107
|
-
scores.sort((a, b) => a[1] - b[1]);
|
|
108
|
-
return scores.slice(0, n).map(s => s[0]);
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
function getDependencySet(endpoints, adjacencyList) {
|
|
112
|
-
const required = new Set(endpoints);
|
|
113
|
-
const queue = [...endpoints];
|
|
114
|
-
while (queue.length > 0) { const calcName = queue.shift(); const dependencies = adjacencyList.get(calcName);
|
|
115
|
-
if (dependencies) { for (const dep of dependencies) { if (!required.has(dep)) { required.add(dep); queue.push(dep); } } } }
|
|
116
|
-
return required;
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
/* --------------------------------------------------
|
|
120
|
-
* Core Manifest Builder
|
|
121
|
-
* -------------------------------------------------- */
|
|
122
|
-
|
|
123
|
-
function buildManifest(productLinesToRun = [], calculations) {
|
|
124
|
-
log.divider('Building Dynamic Manifest (Merkle Hashing)');
|
|
125
|
-
log.info(`Target Product Lines: [${productLinesToRun.join(', ')}]`);
|
|
126
|
-
|
|
127
|
-
const manifestMap = new Map();
|
|
128
|
-
const adjacency = new Map();
|
|
129
|
-
const reverseAdjacency = new Map();
|
|
130
|
-
const inDegree = new Map();
|
|
131
|
-
let hasFatalError = false;
|
|
132
|
-
|
|
133
|
-
/* ---------------- 1. Load All Calculations (Phase 1: Intrinsic Hash) ---------------- */
|
|
134
|
-
log.step('Loading and validating all calculation classes…');
|
|
135
|
-
|
|
136
|
-
function processCalc(Class, name, folderName) {
|
|
137
|
-
if (!Class || typeof Class !== 'function') return;
|
|
138
|
-
const normalizedName = normalizeName(name);
|
|
139
|
-
|
|
140
|
-
if (typeof Class.getMetadata !== 'function') { log.fatal(`Calculation "${normalizedName}" is missing static getMetadata().`); hasFatalError = true; return; }
|
|
141
|
-
if (typeof Class.getDependencies !== 'function') { log.fatal(`Calculation "${normalizedName}" is missing static getDependencies().`); hasFatalError = true;return; }
|
|
142
|
-
|
|
143
|
-
const metadata = Class.getMetadata();
|
|
144
|
-
const dependencies = Class.getDependencies().map(normalizeName);
|
|
145
|
-
|
|
146
|
-
const codeStr = Class.toString();
|
|
147
|
-
if (metadata.isHistorical === true && !codeStr.includes('yesterday') && !codeStr.includes('previousComputed')) {
|
|
148
|
-
log.warn(`Calculation "${normalizedName}" marked 'isHistorical' but no 'previousComputed' state reference found.`);
|
|
149
|
-
}
|
|
150
|
-
|
|
151
|
-
let finalCategory = folderName === 'core' && metadata.category ? metadata.category : folderName;
|
|
152
|
-
|
|
153
|
-
// --- PHASE 1: INTRINSIC HASH (Code + Layers) ---
|
|
154
|
-
// We do NOT include dependencies yet.
|
|
155
|
-
let compositeHashString = generateCodeHash(codeStr);
|
|
156
|
-
const usedLayers = [];
|
|
157
|
-
|
|
158
|
-
// Check for specific layer usage
|
|
159
|
-
for (const [layerName, triggers] of Object.entries(LAYER_TRIGGERS)) {
|
|
160
|
-
if (triggers.some(trigger => codeStr.includes(trigger))) { compositeHashString += LAYER_HASHES[layerName]; usedLayers.push(layerName); }
|
|
161
|
-
}
|
|
162
|
-
|
|
163
|
-
// Safe Mode Fallback
|
|
164
|
-
let isSafeMode = false;
|
|
165
|
-
if (usedLayers.length === 0) {
|
|
166
|
-
isSafeMode = true;
|
|
167
|
-
Object.values(LAYER_HASHES).forEach(h => compositeHashString += h);
|
|
168
|
-
}
|
|
169
|
-
|
|
170
|
-
const baseHash = generateCodeHash(compositeHashString);
|
|
171
|
-
|
|
172
|
-
const manifestEntry = {
|
|
173
|
-
name: normalizedName,
|
|
174
|
-
class: Class,
|
|
175
|
-
category: finalCategory,
|
|
176
|
-
sourcePackage: folderName,
|
|
177
|
-
type: metadata.type,
|
|
178
|
-
isHistorical: metadata.isHistorical,
|
|
179
|
-
rootDataDependencies: metadata.rootDataDependencies || [],
|
|
180
|
-
userType: metadata.userType,
|
|
181
|
-
dependencies: dependencies,
|
|
182
|
-
pass: 0,
|
|
183
|
-
hash: baseHash,
|
|
184
|
-
debugUsedLayers: isSafeMode ? ['ALL (Safe Mode)'] : usedLayers
|
|
185
|
-
};
|
|
186
|
-
|
|
187
|
-
manifestMap.set(normalizedName, manifestEntry);
|
|
188
|
-
adjacency.set(normalizedName, dependencies);
|
|
189
|
-
inDegree.set(normalizedName, dependencies.length);
|
|
190
|
-
dependencies.forEach(dep => { if (!reverseAdjacency.has(dep)) reverseAdjacency.set(dep, []); reverseAdjacency.get(dep).push(normalizedName); });
|
|
191
|
-
}
|
|
192
|
-
|
|
193
|
-
if (!calculations || typeof calculations !== 'object') {
|
|
194
|
-
throw new Error('Manifest build failed: Invalid calculations object.');
|
|
195
|
-
}
|
|
196
|
-
|
|
197
|
-
for (const folderName in calculations) {
|
|
198
|
-
if (folderName === 'legacy') continue;
|
|
199
|
-
const group = calculations[folderName];
|
|
200
|
-
for (const key in group) {
|
|
201
|
-
const entry = group[key];
|
|
202
|
-
if (typeof entry === 'function') { processCalc(entry, key, folderName); }
|
|
203
|
-
}
|
|
204
|
-
}
|
|
205
|
-
|
|
206
|
-
if (hasFatalError) { throw new Error('Manifest build failed due to missing static methods.'); }
|
|
207
|
-
log.success(`Loaded ${manifestMap.size} calculations.`);
|
|
208
|
-
|
|
209
|
-
/* ---------------- 2. Validate Dependency Links ---------------- */
|
|
210
|
-
const allNames = new Set(manifestMap.keys());
|
|
211
|
-
let invalidLinks = false;
|
|
212
|
-
for (const [name, entry] of manifestMap) {
|
|
213
|
-
for (const dep of entry.dependencies) {
|
|
214
|
-
if (!allNames.has(dep)) {
|
|
215
|
-
invalidLinks = true;
|
|
216
|
-
const guesses = suggestClosest(dep, Array.from(allNames));
|
|
217
|
-
log.error(`${name} depends on unknown calculation "${dep}". Did you mean: ${guesses.join(', ')}?`);
|
|
218
|
-
}
|
|
219
|
-
if (dep === name) {
|
|
220
|
-
invalidLinks = true;
|
|
221
|
-
log.error(`${name} has a circular dependency on *itself*!`);
|
|
222
|
-
}
|
|
223
|
-
}
|
|
224
|
-
}
|
|
225
|
-
if (invalidLinks) { throw new Error('Manifest validation failed.'); }
|
|
226
|
-
|
|
227
|
-
/* ---------------- 3. Filter for Product Lines ---------------- */
|
|
228
|
-
const productLineEndpoints = [];
|
|
229
|
-
for (const [name, entry] of manifestMap.entries()) { if (productLinesToRun.includes(entry.category)) { productLineEndpoints.push(name); } }
|
|
230
|
-
for (const [name, entry] of manifestMap.entries()) { if (entry.sourcePackage === 'core') { productLineEndpoints.push(name); } }
|
|
231
|
-
|
|
232
|
-
const requiredCalcs = getDependencySet(productLineEndpoints, adjacency);
|
|
233
|
-
log.info(`Filtered down to ${requiredCalcs.size} active calculations.`);
|
|
234
|
-
|
|
235
|
-
const filteredManifestMap = new Map();
|
|
236
|
-
const filteredInDegree = new Map();
|
|
237
|
-
const filteredReverseAdjacency = new Map();
|
|
238
|
-
|
|
239
|
-
for (const name of requiredCalcs) { filteredManifestMap.set(name, manifestMap.get(name)); filteredInDegree.set(name, inDegree.get(name));
|
|
240
|
-
const consumers = (reverseAdjacency.get(name) || []).filter(consumer => requiredCalcs.has(consumer)); filteredReverseAdjacency.set(name, consumers); }
|
|
241
|
-
|
|
242
|
-
/* ---------------- 4. Topological Sort ---------------- */
|
|
243
|
-
const sortedManifest = [];
|
|
244
|
-
const queue = [];
|
|
245
|
-
let maxPass = 0;
|
|
246
|
-
|
|
247
|
-
for (const [name, degree] of filteredInDegree) { if (degree === 0) { queue.push(name); filteredManifestMap.get(name).pass = 1; maxPass = 1; } }
|
|
248
|
-
queue.sort();
|
|
249
|
-
while (queue.length) {
|
|
250
|
-
const currentName = queue.shift();
|
|
251
|
-
const currentEntry = filteredManifestMap.get(currentName);
|
|
252
|
-
sortedManifest.push(currentEntry);
|
|
253
|
-
|
|
254
|
-
for (const neighborName of (filteredReverseAdjacency.get(currentName) || [])) { const newDegree = filteredInDegree.get(neighborName) - 1; filteredInDegree.set(neighborName, newDegree);
|
|
255
|
-
const neighborEntry = filteredManifestMap.get(neighborName);
|
|
256
|
-
if (neighborEntry.pass <= currentEntry.pass) { neighborEntry.pass = currentEntry.pass + 1; if (neighborEntry.pass > maxPass) maxPass = neighborEntry.pass; }
|
|
257
|
-
if (newDegree === 0) { queue.push(neighborName); } }
|
|
258
|
-
queue.sort(); }
|
|
259
|
-
|
|
260
|
-
if (sortedManifest.length !== filteredManifestMap.size) {
|
|
261
|
-
throw new Error('Circular dependency detected. Manifest build failed.'); }
|
|
262
|
-
|
|
263
|
-
/* ---------------- 5. Phase 2: Cascading Dependency Hashing ---------------- */
|
|
264
|
-
log.step('Computing Cascading Merkle Hashes...');
|
|
265
|
-
|
|
266
|
-
for (const entry of sortedManifest) {
|
|
267
|
-
// Start with the intrinsic hash (Code + Layers)
|
|
268
|
-
let dependencySignature = entry.hash;
|
|
269
|
-
|
|
270
|
-
if (entry.dependencies && entry.dependencies.length > 0) {
|
|
271
|
-
const depHashes = entry.dependencies.map(depName => { const depEntry = filteredManifestMap.get(depName); if (!depEntry) return ''; return depEntry.hash; }).join('|');
|
|
272
|
-
dependencySignature += `|DEPS:${depHashes}`;
|
|
273
|
-
}
|
|
274
|
-
// Generate the Final Smart Hash
|
|
275
|
-
entry.hash = generateCodeHash(dependencySignature);
|
|
276
|
-
}
|
|
277
|
-
|
|
278
|
-
log.success(`Total passes required: ${maxPass}`);
|
|
279
|
-
return sortedManifest;
|
|
280
|
-
}
|
|
281
|
-
|
|
282
|
-
function build(productLinesToRun, calculations) {
|
|
283
|
-
try {
|
|
284
|
-
return buildManifest(productLinesToRun, calculations);
|
|
285
|
-
} catch (error) {
|
|
286
|
-
log.error(error.message);
|
|
287
|
-
return null;
|
|
288
|
-
}
|
|
289
|
-
}
|
|
290
|
-
|
|
291
|
-
module.exports = { build };
|