bulltrackers-module 1.0.281 → 1.0.283
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/onboarding.md +154 -869
- package/functions/computation-system/persistence/ContractValidator.js +81 -0
- package/functions/computation-system/persistence/ResultCommitter.js +73 -13
- package/functions/computation-system/scripts/UpdateContracts.js +128 -0
- package/functions/computation-system/simulation/Fabricator.js +285 -0
- package/functions/computation-system/simulation/SeededRandom.js +41 -0
- package/functions/computation-system/simulation/SimRunner.js +51 -0
- package/functions/computation-system/tools/BuildReporter.js +199 -159
- package/functions/computation-system/tools/ContractDiscoverer.js +144 -0
- package/package.json +1 -1
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Enforces the contracts discovered by the offline tool.
|
|
3
|
+
* Designed to be permissive with volatility ("Anomalies") but strict with logic ("Violations").
|
|
4
|
+
*/
|
|
5
|
+
class ContractValidator {
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* @param {Object} result - The production output (single item or batch).
|
|
9
|
+
* @param {Object} contract - The loaded contract JSON.
|
|
10
|
+
* @returns {Object} { valid: boolean, reason: string }
|
|
11
|
+
*/
|
|
12
|
+
static validate(result, contract) {
|
|
13
|
+
if (!result || !contract) return { valid: true };
|
|
14
|
+
|
|
15
|
+
// Handle Batches (StandardExecutor produces map of User -> Result)
|
|
16
|
+
const items = Object.values(result);
|
|
17
|
+
if (items.length === 0) return { valid: true };
|
|
18
|
+
|
|
19
|
+
// We check a sample to save CPU, or check all if critical
|
|
20
|
+
// For "Cohort" logic, we usually check all because one bad apple spoils the average.
|
|
21
|
+
for (const item of items) {
|
|
22
|
+
const check = this._validateItem(item, contract);
|
|
23
|
+
if (!check.valid) return check;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
return { valid: true };
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
static _validateItem(item, contract) {
|
|
30
|
+
// 1. Structure Check
|
|
31
|
+
if (contract.requiredKeys) {
|
|
32
|
+
for (const key of contract.requiredKeys) {
|
|
33
|
+
if (item[key] === undefined) {
|
|
34
|
+
return { valid: false, reason: `Schema Violation: Missing key '${key}'` };
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// 2. Numeric Physics Check (Hard Bounds)
|
|
40
|
+
if (contract.numericBounds) {
|
|
41
|
+
for (const [key, bounds] of Object.entries(contract.numericBounds)) {
|
|
42
|
+
const val = item[key];
|
|
43
|
+
if (typeof val !== 'number') continue;
|
|
44
|
+
|
|
45
|
+
if (val < bounds.min) {
|
|
46
|
+
return { valid: false, reason: `Physics Violation: ${key} (${val}) is below hard limit ${bounds.min}` };
|
|
47
|
+
}
|
|
48
|
+
if (val > bounds.max) {
|
|
49
|
+
return { valid: false, reason: `Physics Violation: ${key} (${val}) is above hard limit ${bounds.max}` };
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// 3. Statistical Sanity Check (Soft Bounds)
|
|
55
|
+
// We generally DO NOT BLOCK on this for financial data, unless it's egregious.
|
|
56
|
+
// We block if it's "Mathematically Impossible" based on the distribution.
|
|
57
|
+
if (contract.distributions) {
|
|
58
|
+
for (const [key, dist] of Object.entries(contract.distributions)) {
|
|
59
|
+
const val = item[key];
|
|
60
|
+
if (typeof val !== 'number') continue;
|
|
61
|
+
|
|
62
|
+
const diff = Math.abs(val - dist.mean);
|
|
63
|
+
const sigmas = diff / dist.stdDev;
|
|
64
|
+
|
|
65
|
+
// 15 Sigma is our "Ridiculousness Threshold".
|
|
66
|
+
// Even crypto doesn't move 15 standard deviations in one calculation step
|
|
67
|
+
// unless the data is corrupt (e.g. integer overflow, or bad scraping).
|
|
68
|
+
if (sigmas > 15 && diff > 1.0) { // Ensure diff is material
|
|
69
|
+
return {
|
|
70
|
+
valid: false,
|
|
71
|
+
reason: `Statistical Impossibility: ${key} is ${sigmas.toFixed(1)} sigmas from mean. Value: ${val}, Mean: ${dist.mean}`
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
return { valid: true };
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
module.exports = ContractValidator;
|
|
@@ -4,12 +4,15 @@
|
|
|
4
4
|
* UPDATED: Implements Content-Based Hashing (ResultHash) for dependency short-circuiting.
|
|
5
5
|
* UPDATED: Auto-enforces Weekend Mode validation.
|
|
6
6
|
* UPDATED: Implements "Initial Write" logic to wipe stale data/shards on a fresh run.
|
|
7
|
+
* UPDATED: Implements "Contract Validation" (Semantic Gates) to block logical violations.
|
|
8
|
+
* OPTIMIZED: Fetches pre-calculated 'simHash' from Registry (removes expensive simulation step).
|
|
7
9
|
*/
|
|
8
10
|
const { commitBatchInChunks, generateDataHash } = require('../utils/utils');
|
|
9
11
|
const { updateComputationStatus } = require('./StatusRepository');
|
|
10
12
|
const { batchStoreSchemas } = require('../utils/schema_capture');
|
|
11
13
|
const { generateProcessId, PROCESS_TYPES } = require('../logger/logger');
|
|
12
14
|
const { HeuristicValidator } = require('./ResultsValidator');
|
|
15
|
+
const ContractValidator = require('./ContractValidator'); // [NEW]
|
|
13
16
|
const validationOverrides = require('../config/validation_overrides');
|
|
14
17
|
const pLimit = require('p-limit');
|
|
15
18
|
const zlib = require('zlib');
|
|
@@ -18,6 +21,9 @@ const NON_RETRYABLE_ERRORS = [
|
|
|
18
21
|
'PERMISSION_DENIED', 'DATA_LOSS', 'FAILED_PRECONDITION'
|
|
19
22
|
];
|
|
20
23
|
|
|
24
|
+
const SIMHASH_REGISTRY_COLLECTION = 'system_simhash_registry';
|
|
25
|
+
const CONTRACTS_COLLECTION = 'system_contracts'; // [NEW]
|
|
26
|
+
|
|
21
27
|
/**
|
|
22
28
|
* Commits results to Firestore.
|
|
23
29
|
*/
|
|
@@ -31,12 +37,16 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
31
37
|
|
|
32
38
|
// Options defaults
|
|
33
39
|
const flushMode = options.flushMode || 'STANDARD';
|
|
34
|
-
const isInitialWrite = options.isInitialWrite === true;
|
|
40
|
+
const isInitialWrite = options.isInitialWrite === true;
|
|
35
41
|
const shardIndexes = options.shardIndexes || {};
|
|
36
42
|
const nextShardIndexes = {};
|
|
37
43
|
|
|
38
44
|
const fanOutLimit = pLimit(10);
|
|
39
45
|
|
|
46
|
+
// [NEW] Bulk fetch contracts for all calcs in this batch to minimize latency
|
|
47
|
+
// This prevents N+1 reads during the loop
|
|
48
|
+
const contractMap = await fetchContracts(db, Object.keys(stateObj));
|
|
49
|
+
|
|
40
50
|
for (const name in stateObj) {
|
|
41
51
|
const calc = stateObj[name];
|
|
42
52
|
const execStats = calc._executionStats || { processedUsers: 0, skippedUsers: 0 };
|
|
@@ -65,7 +75,23 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
65
75
|
};
|
|
66
76
|
}
|
|
67
77
|
|
|
68
|
-
//
|
|
78
|
+
// 1. SEMANTIC GATE (CONTRACT VALIDATION) [NEW]
|
|
79
|
+
// We run this BEFORE Heuristics because it catches "Logic Bugs" vs "Data Noise"
|
|
80
|
+
const contract = contractMap[name];
|
|
81
|
+
if (contract) {
|
|
82
|
+
const contractCheck = ContractValidator.validate(result, contract);
|
|
83
|
+
if (!contractCheck.valid) {
|
|
84
|
+
// STOP THE CASCADE: Fail this specific calculation
|
|
85
|
+
runMetrics.validation.isValid = false;
|
|
86
|
+
runMetrics.validation.anomalies.push(contractCheck.reason);
|
|
87
|
+
|
|
88
|
+
const semanticError = new Error(contractCheck.reason);
|
|
89
|
+
semanticError.stage = 'SEMANTIC_GATE';
|
|
90
|
+
throw semanticError;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// 2. HEURISTIC VALIDATION (Data Integrity)
|
|
69
95
|
if (result && Object.keys(result).length > 0) {
|
|
70
96
|
const healthCheck = HeuristicValidator.analyze(calc.manifest.name, result, dStr, effectiveOverrides);
|
|
71
97
|
if (!healthCheck.valid) {
|
|
@@ -80,6 +106,21 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
80
106
|
const isEmpty = !result || (typeof result === 'object' && Object.keys(result).length === 0);
|
|
81
107
|
const resultHash = isEmpty ? 'empty' : generateDataHash(result);
|
|
82
108
|
|
|
109
|
+
// [OPTIMIZATION] FETCH SimHash from Registry (Do NOT Calculate)
|
|
110
|
+
let simHash = null;
|
|
111
|
+
if (calc.manifest.hash && flushMode !== 'INTERMEDIATE') {
|
|
112
|
+
try {
|
|
113
|
+
const regDoc = await db.collection(SIMHASH_REGISTRY_COLLECTION).doc(calc.manifest.hash).get();
|
|
114
|
+
if (regDoc.exists) {
|
|
115
|
+
simHash = regDoc.data().simHash;
|
|
116
|
+
} else {
|
|
117
|
+
logger.log('WARN', `[ResultCommitter] SimHash not found in registry for ${name}.`);
|
|
118
|
+
}
|
|
119
|
+
} catch (regErr) {
|
|
120
|
+
logger.log('WARN', `[ResultCommitter] Failed to read SimHash registry: ${regErr.message}`);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
83
124
|
if (isEmpty) {
|
|
84
125
|
if (flushMode === 'INTERMEDIATE') {
|
|
85
126
|
nextShardIndexes[name] = currentShardIndex;
|
|
@@ -88,6 +129,7 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
88
129
|
if (calc.manifest.hash) {
|
|
89
130
|
successUpdates[name] = {
|
|
90
131
|
hash: calc.manifest.hash,
|
|
132
|
+
simHash: simHash,
|
|
91
133
|
resultHash: resultHash,
|
|
92
134
|
dependencyResultHashes: calc.manifest.dependencyResultHashes || {},
|
|
93
135
|
category: calc.manifest.category,
|
|
@@ -115,8 +157,6 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
115
157
|
.collection(config.computationsSubcollection)
|
|
116
158
|
.doc(name);
|
|
117
159
|
|
|
118
|
-
// Note: Multi-date fan-out rarely hits sharding, and tracking isInitialWrite per-date is complex.
|
|
119
|
-
// We assume standard merging here.
|
|
120
160
|
await writeSingleResult(dailyData, historicalDocRef, name, historicalDate, logger, config, deps, 0, 'STANDARD', false);
|
|
121
161
|
}));
|
|
122
162
|
await Promise.all(datePromises);
|
|
@@ -124,6 +164,7 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
124
164
|
if (calc.manifest.hash) {
|
|
125
165
|
successUpdates[name] = {
|
|
126
166
|
hash: calc.manifest.hash,
|
|
167
|
+
simHash: simHash,
|
|
127
168
|
resultHash: resultHash,
|
|
128
169
|
dependencyResultHashes: calc.manifest.dependencyResultHashes || {},
|
|
129
170
|
category: calc.manifest.category,
|
|
@@ -151,6 +192,7 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
151
192
|
if (calc.manifest.hash) {
|
|
152
193
|
successUpdates[name] = {
|
|
153
194
|
hash: calc.manifest.hash,
|
|
195
|
+
simHash: simHash,
|
|
154
196
|
resultHash: resultHash,
|
|
155
197
|
dependencyResultHashes: calc.manifest.dependencyResultHashes || {},
|
|
156
198
|
category: calc.manifest.category,
|
|
@@ -186,12 +228,34 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
186
228
|
return { successUpdates, failureReport, shardIndexes: nextShardIndexes };
|
|
187
229
|
}
|
|
188
230
|
|
|
231
|
+
/**
|
|
232
|
+
* [NEW] Helper to fetch contracts for a list of calculations
|
|
233
|
+
*/
|
|
234
|
+
async function fetchContracts(db, calcNames) {
|
|
235
|
+
if (!calcNames || calcNames.length === 0) return {};
|
|
236
|
+
const map = {};
|
|
237
|
+
|
|
238
|
+
// In a high-throughput system, we might cache these in memory (LRU)
|
|
239
|
+
// For now, we fetch from Firestore efficiently.
|
|
240
|
+
const refs = calcNames.map(name => db.collection(CONTRACTS_COLLECTION).doc(name));
|
|
241
|
+
|
|
242
|
+
try {
|
|
243
|
+
const snaps = await db.getAll(...refs);
|
|
244
|
+
snaps.forEach(snap => {
|
|
245
|
+
if (snap.exists) {
|
|
246
|
+
map[snap.id] = snap.data();
|
|
247
|
+
}
|
|
248
|
+
});
|
|
249
|
+
} catch (e) {
|
|
250
|
+
console.warn(`[ResultCommitter] Failed to fetch contracts batch: ${e.message}`);
|
|
251
|
+
}
|
|
252
|
+
return map;
|
|
253
|
+
}
|
|
254
|
+
|
|
189
255
|
async function writeSingleResult(result, docRef, name, dateContext, logger, config, deps, startShardIndex = 0, flushMode = 'STANDARD', isInitialWrite = false) {
|
|
190
256
|
|
|
191
|
-
//
|
|
192
|
-
// If this is the initial write of a run, we verify the existing state to prevent "Ghost Data".
|
|
257
|
+
// Transition & Cleanup Logic
|
|
193
258
|
let wasSharded = false;
|
|
194
|
-
let hadRootData = false;
|
|
195
259
|
let shouldWipeShards = false;
|
|
196
260
|
|
|
197
261
|
// Default: Merge updates. But if Initial Write, overwrite (merge: false) to clear stale fields.
|
|
@@ -203,11 +267,7 @@ async function writeSingleResult(result, docRef, name, dateContext, logger, conf
|
|
|
203
267
|
if (currentSnap.exists) {
|
|
204
268
|
const d = currentSnap.data();
|
|
205
269
|
wasSharded = (d._sharded === true);
|
|
206
|
-
// If it was sharded, we MUST wipe the old shards because we are re-writing from scratch.
|
|
207
|
-
// Even if we write new shards, we want to ensure shard_10 doesn't persist if we only write up to shard_5.
|
|
208
270
|
if (wasSharded) shouldWipeShards = true;
|
|
209
|
-
|
|
210
|
-
// If it wasn't sharded, it had root data. overwriting (merge: false) handles that automatically.
|
|
211
271
|
}
|
|
212
272
|
} catch (e) { /* ignore read error */ }
|
|
213
273
|
}
|
|
@@ -276,7 +336,7 @@ async function writeSingleResult(result, docRef, name, dateContext, logger, conf
|
|
|
276
336
|
try {
|
|
277
337
|
const updates = await prepareAutoShardedWrites(result, docRef, logger, constraints.bytes, constraints.keys, startShardIndex, flushMode);
|
|
278
338
|
|
|
279
|
-
//
|
|
339
|
+
// Inject Cleanup Ops
|
|
280
340
|
if (shouldWipeShards) {
|
|
281
341
|
logger.log('INFO', `[Cleanup] ${name}: Wiping old shards before Write (Initial).`);
|
|
282
342
|
const shardCol = docRef.collection('_shards');
|
|
@@ -410,4 +470,4 @@ function calculateFirestoreBytes(value) {
|
|
|
410
470
|
if (typeof value === 'object') { let sum = 0; for (const k in value) { if (Object.prototype.hasOwnProperty.call(value, k)) { sum += (Buffer.byteLength(k, 'utf8') + 1) + calculateFirestoreBytes(value[k]); } } return sum; } return 0;
|
|
411
471
|
}
|
|
412
472
|
|
|
413
|
-
module.exports = { commitResults };
|
|
473
|
+
module.exports = { commitResults };
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Discovery Script: UpdateContracts.js
|
|
3
|
+
* Runs offline simulations to "learn" the behavioral contracts of all calculations.
|
|
4
|
+
* Saves these contracts to Firestore for the Runtime Enforcer (ResultCommitter) to use.
|
|
5
|
+
* * USAGE:
|
|
6
|
+
* node computation-system/scripts/UpdateContracts.js [--calc=CalcName]
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const path = require('path');
|
|
10
|
+
const admin = require('firebase-admin');
|
|
11
|
+
|
|
12
|
+
// Initialize Firebase (Standard Env Check)
|
|
13
|
+
if (!admin.apps.length) {
|
|
14
|
+
if (process.env.GOOGLE_APPLICATION_CREDENTIALS) {
|
|
15
|
+
admin.initializeApp();
|
|
16
|
+
} else {
|
|
17
|
+
// Fallback for local dev if key path isn't set in env
|
|
18
|
+
console.warn("⚠️ No GOOGLE_APPLICATION_CREDENTIALS. Attempting default init...");
|
|
19
|
+
admin.initializeApp();
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const db = admin.firestore();
|
|
24
|
+
const { StructuredLogger } = require('../logger/logger');
|
|
25
|
+
const { getManifest } = require('../topology/ManifestLoader');
|
|
26
|
+
const ContractDiscoverer = require('../tools/ContractDiscoverer');
|
|
27
|
+
|
|
28
|
+
// Load Calculations Package
|
|
29
|
+
let calculationPackage;
|
|
30
|
+
try {
|
|
31
|
+
// Adjust path if necessary for your local monorepo structure
|
|
32
|
+
calculationPackage = require('aiden-shared-calculations-unified');
|
|
33
|
+
} catch (e) {
|
|
34
|
+
console.error("FATAL: Could not load 'aiden-shared-calculations-unified'. Ensure you are in the correct directory or npm link is active.");
|
|
35
|
+
process.exit(1);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const CONTRACTS_COLLECTION = 'system_contracts';
|
|
39
|
+
|
|
40
|
+
async function main() {
|
|
41
|
+
const logger = new StructuredLogger({ enableConsole: true });
|
|
42
|
+
|
|
43
|
+
// 1. Setup Dependencies
|
|
44
|
+
// The ManifestLoader and Discoverer need a mock dependency object
|
|
45
|
+
const mockDeps = {
|
|
46
|
+
db,
|
|
47
|
+
logger,
|
|
48
|
+
// Mock specific utils if needed by your calculations during instantiation
|
|
49
|
+
calculationUtils: {
|
|
50
|
+
loadInstrumentMappings: async () => ({ instrumentToTicker: {}, tickerToInstrument: {} })
|
|
51
|
+
}
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
console.log("🚀 Starting Contract Discovery...");
|
|
55
|
+
|
|
56
|
+
// 2. Load Manifest
|
|
57
|
+
const calculations = calculationPackage.calculations;
|
|
58
|
+
const manifest = getManifest([], calculations, mockDeps);
|
|
59
|
+
const manifestMap = new Map(manifest.map(c => [c.name, c]));
|
|
60
|
+
|
|
61
|
+
console.log(`ℹ️ Loaded manifest with ${manifest.length} calculations.`);
|
|
62
|
+
|
|
63
|
+
// 3. Filter Target (Optional CLI Arg)
|
|
64
|
+
const targetArg = process.argv.find(a => a.startsWith('--calc='));
|
|
65
|
+
const targetName = targetArg ? targetArg.split('=')[1] : null;
|
|
66
|
+
|
|
67
|
+
let calcsToProcess = manifest;
|
|
68
|
+
if (targetName) {
|
|
69
|
+
calcsToProcess = manifest.filter(c => c.name.toLowerCase() === targetName.toLowerCase());
|
|
70
|
+
if (calcsToProcess.length === 0) {
|
|
71
|
+
console.error(`❌ Calculation '${targetName}' not found.`);
|
|
72
|
+
process.exit(1);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// 4. Run Discovery Loop
|
|
77
|
+
let successCount = 0;
|
|
78
|
+
let skipCount = 0;
|
|
79
|
+
|
|
80
|
+
for (const calc of calcsToProcess) {
|
|
81
|
+
// Skip computations that don't produce data (like aggregators without schema)
|
|
82
|
+
if (!calc.class.getSchema && !calc.dependencies) {
|
|
83
|
+
console.log(`⏭️ Skipping ${calc.name} (No schema/outputs to analyze).`);
|
|
84
|
+
skipCount++;
|
|
85
|
+
continue;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
try {
|
|
89
|
+
// A. Discover Contract via Simulation
|
|
90
|
+
// We run 50 iterations to get a statistically significant sample
|
|
91
|
+
const contract = await ContractDiscoverer.generateContract(calc, manifestMap, 50);
|
|
92
|
+
|
|
93
|
+
if (contract) {
|
|
94
|
+
// B. Enrich with Metadata
|
|
95
|
+
// FIX: Create a NEW object to satisfy Type Checking (avoid mutating the inferred shape)
|
|
96
|
+
const finalContract = {
|
|
97
|
+
...contract,
|
|
98
|
+
lastUpdated: new Date(),
|
|
99
|
+
generatedBy: 'UpdateContracts.js',
|
|
100
|
+
version: '1.0'
|
|
101
|
+
};
|
|
102
|
+
|
|
103
|
+
// C. Save to Firestore
|
|
104
|
+
// Use finalContract instead of contract
|
|
105
|
+
await db.collection(CONTRACTS_COLLECTION).doc(calc.name).set(finalContract);
|
|
106
|
+
console.log(`✅ [SAVED] Contract for ${calc.name}`);
|
|
107
|
+
successCount++;
|
|
108
|
+
} else {
|
|
109
|
+
console.warn(`⚠️ [EMPTY] No contract generated for ${calc.name} (Insufficient data/samples).`);
|
|
110
|
+
skipCount++;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
} catch (err) {
|
|
114
|
+
console.error(`❌ [ERROR] Failed to generate contract for ${calc.name}:`, err.message);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
console.log("\n============================================");
|
|
119
|
+
console.log(`🎉 Discovery Complete.`);
|
|
120
|
+
console.log(` Updated: ${successCount}`);
|
|
121
|
+
console.log(` Skipped: ${skipCount}`);
|
|
122
|
+
console.log("============================================");
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
main().catch(err => {
|
|
126
|
+
console.error("FATAL SCRIPT ERROR:", err);
|
|
127
|
+
process.exit(1);
|
|
128
|
+
});
|
|
@@ -0,0 +1,285 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Generates deterministic mock contexts for Simulation Hashing.
|
|
3
|
+
* STRICTLY ALIGNED WITH SCHEMA.MD (Production V2 Schemas).
|
|
4
|
+
* UPGRADED: Supports Iteration (Seed Rotation) and Volume Scaling for Arrays.
|
|
5
|
+
*/
|
|
6
|
+
const SeededRandom = require('./SeededRandom');
|
|
7
|
+
const { ContextFactory } = require('../context/ContextFactory');
|
|
8
|
+
|
|
9
|
+
const FAKE_SECTORS = ['Technology', 'Healthcare', 'Financials', 'Energy', 'Crypto', 'Consumer Discretionary'];
|
|
10
|
+
const FAKE_TICKERS = ['AAPL', 'GOOGL', 'MSFT', 'TSLA', 'AMZN', 'BTC', 'ETH', 'NVDA', 'META', 'AMD'];
|
|
11
|
+
const FAKE_TOPICS = ['AI', 'Earnings', 'Fed', 'Crypto', 'Macro'];
|
|
12
|
+
|
|
13
|
+
class Fabricator {
|
|
14
|
+
constructor(calcName) {
|
|
15
|
+
this.baseSeed = calcName;
|
|
16
|
+
// Primary RNG for high-level structure
|
|
17
|
+
this.rng = new SeededRandom(calcName);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Generates a context for a specific user iteration.
|
|
22
|
+
* @param {number} iteration - The index of the user in the batch (0, 1, 2...).
|
|
23
|
+
*/
|
|
24
|
+
async generateContext(calcManifest, dependenciesManifest, iteration = 0) {
|
|
25
|
+
// [CRITICAL] Rotate the RNG state based on iteration so User 1 != User 2
|
|
26
|
+
this.rng = new SeededRandom(`${this.baseSeed}_ITER_${iteration}`);
|
|
27
|
+
|
|
28
|
+
const FIXED_DATE = '2025-01-01'; // Fixed simulation date
|
|
29
|
+
|
|
30
|
+
// 1. Generate Root Data
|
|
31
|
+
const user = this._generateUser(calcManifest.userType, iteration);
|
|
32
|
+
const insights = this._generateInsights(FIXED_DATE);
|
|
33
|
+
|
|
34
|
+
// 2. Generate Mock Dependencies (The "Schema Faking" Part)
|
|
35
|
+
const computed = {};
|
|
36
|
+
if (calcManifest.dependencies) {
|
|
37
|
+
for (const depName of calcManifest.dependencies) {
|
|
38
|
+
const depEntry = dependenciesManifest.get(depName);
|
|
39
|
+
if (depEntry && depEntry.class && depEntry.class.getSchema) {
|
|
40
|
+
const schema = depEntry.class.getSchema();
|
|
41
|
+
// [VOLUME UPGRADE] Dependencies usually represent aggregate data.
|
|
42
|
+
computed[depName] = this._fakeFromSchema(schema, true);
|
|
43
|
+
} else {
|
|
44
|
+
computed[depName] = {};
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
return ContextFactory.buildPerUserContext({
|
|
50
|
+
userId: user.id,
|
|
51
|
+
userType: user.type,
|
|
52
|
+
dateStr: FIXED_DATE,
|
|
53
|
+
todayPortfolio: user.portfolio.today,
|
|
54
|
+
yesterdayPortfolio: user.portfolio.yesterday,
|
|
55
|
+
todayHistory: user.history.today,
|
|
56
|
+
yesterdayHistory: user.history.yesterday,
|
|
57
|
+
metadata: calcManifest,
|
|
58
|
+
mappings: {
|
|
59
|
+
instrumentToTicker: this._generateMappings(),
|
|
60
|
+
instrumentToSector: this._generateSectorMappings()
|
|
61
|
+
},
|
|
62
|
+
insights: { today: insights },
|
|
63
|
+
socialData: { today: this._generateSocial(FIXED_DATE) },
|
|
64
|
+
computedDependencies: computed,
|
|
65
|
+
config: {},
|
|
66
|
+
deps: { logger: { log: () => {} } }
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// --- Schema Faker Logic (Unchanged) ---
|
|
71
|
+
_fakeFromSchema(schema, isHighVolume = false) {
|
|
72
|
+
if (!schema) return {};
|
|
73
|
+
if (schema.type === 'object') {
|
|
74
|
+
const obj = {};
|
|
75
|
+
if (schema.properties) {
|
|
76
|
+
for (const [key, propSchema] of Object.entries(schema.properties)) {
|
|
77
|
+
obj[key] = this._fakeFromSchema(propSchema, isHighVolume);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
if (schema.patternProperties) {
|
|
81
|
+
const count = isHighVolume ? this.rng.range(20, 50) : 3;
|
|
82
|
+
const propSchema = Object.values(schema.patternProperties)[0];
|
|
83
|
+
for (let i = 0; i < count; i++) {
|
|
84
|
+
// Use deterministic ticker keys for stability
|
|
85
|
+
const key = `${this.rng.choice(FAKE_TICKERS)}`;
|
|
86
|
+
// Note: In real scenarios tickers are unique, so we might need a suffix if count > tickers.length
|
|
87
|
+
const safeKey = count > FAKE_TICKERS.length ? `${key}_${i}` : key;
|
|
88
|
+
obj[safeKey] = this._fakeFromSchema(propSchema, isHighVolume);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
return obj;
|
|
92
|
+
}
|
|
93
|
+
if (schema.type === 'array') {
|
|
94
|
+
const min = isHighVolume ? 50 : 1;
|
|
95
|
+
const max = isHighVolume ? 150 : 5;
|
|
96
|
+
const len = this.rng.range(min, max);
|
|
97
|
+
return Array.from({ length: len }, () => this._fakeFromSchema(schema.items, isHighVolume));
|
|
98
|
+
}
|
|
99
|
+
if (schema.type === 'number') return parseFloat(this.rng.next().toFixed(4)) * 100;
|
|
100
|
+
if (schema.type === 'string') return "SIMULATED_STRING";
|
|
101
|
+
if (schema.type === 'boolean') return this.rng.bool();
|
|
102
|
+
return null;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// --- PROD ALIGNED GENERATORS ---
|
|
106
|
+
|
|
107
|
+
_generateUser(type, iteration) {
|
|
108
|
+
const userId = 1000000 + iteration; // Numeric ID to match Schema
|
|
109
|
+
const isSpeculator = (type === 'speculator');
|
|
110
|
+
|
|
111
|
+
return {
|
|
112
|
+
id: String(userId),
|
|
113
|
+
type: type || 'all',
|
|
114
|
+
portfolio: {
|
|
115
|
+
today: isSpeculator ? this._genSpecPortfolio(userId) : this._genNormalPortfolio(userId),
|
|
116
|
+
yesterday: isSpeculator ? this._genSpecPortfolio(userId) : this._genNormalPortfolio(userId)
|
|
117
|
+
},
|
|
118
|
+
history: {
|
|
119
|
+
today: { PublicHistoryPositions: this._genHistoryTrades(userId) },
|
|
120
|
+
yesterday: { PublicHistoryPositions: this._genHistoryTrades(userId) }
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Schema 2: Speculator User Portfolio
|
|
126
|
+
_genSpecPortfolio(userId) {
|
|
127
|
+
const invested = this.rng.range(5000, 50000);
|
|
128
|
+
const netProfit = this.rng.range(-20, 30);
|
|
129
|
+
const equity = invested * (1 + (netProfit / 100));
|
|
130
|
+
|
|
131
|
+
return {
|
|
132
|
+
AverageOpen: this.rng.range(100, 3000),
|
|
133
|
+
Equity: parseFloat(equity.toFixed(4)),
|
|
134
|
+
Invested: parseFloat(invested.toFixed(4)),
|
|
135
|
+
NetProfit: parseFloat(netProfit.toFixed(4)),
|
|
136
|
+
PublicPositions: Array.from({ length: this.rng.range(2, 10) }, (_, i) => {
|
|
137
|
+
const openRate = this.rng.range(50, 500);
|
|
138
|
+
const isBuy = this.rng.bool();
|
|
139
|
+
return {
|
|
140
|
+
Amount: parseFloat(this.rng.range(100, 1000).toFixed(4)),
|
|
141
|
+
CID: userId,
|
|
142
|
+
CurrentRate: parseFloat((openRate * (1 + (this.rng.next() - 0.5) * 0.1)).toFixed(2)),
|
|
143
|
+
InstrumentID: 100 + (i % 20),
|
|
144
|
+
IsBuy: isBuy,
|
|
145
|
+
IsTslEnabled: this.rng.bool(0.1),
|
|
146
|
+
Leverage: this.rng.choice([1, 2, 5, 10, 20]),
|
|
147
|
+
MirrorID: 0,
|
|
148
|
+
NetProfit: parseFloat(this.rng.range(-50, 50).toFixed(4)),
|
|
149
|
+
OpenDateTime: '2024-12-01T10:00:00Z',
|
|
150
|
+
OpenRate: parseFloat(openRate.toFixed(2)),
|
|
151
|
+
ParentPositionID: 0,
|
|
152
|
+
PipDifference: this.rng.range(-100, 100),
|
|
153
|
+
PositionID: 3000000000 + i,
|
|
154
|
+
StopLossRate: 0.01,
|
|
155
|
+
TakeProfitRate: 0
|
|
156
|
+
};
|
|
157
|
+
})
|
|
158
|
+
};
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// Schema 1: Normal User Portfolio
|
|
162
|
+
_genNormalPortfolio(userId) {
|
|
163
|
+
const positions = Array.from({ length: this.rng.range(3, 12) }, (_, i) => ({
|
|
164
|
+
Direction: "Buy",
|
|
165
|
+
InstrumentID: 100 + (i % 20),
|
|
166
|
+
Invested: parseFloat(this.rng.range(5, 20).toFixed(4)), // Percent
|
|
167
|
+
NetProfit: parseFloat(this.rng.range(-30, 40).toFixed(4)),
|
|
168
|
+
Value: parseFloat(this.rng.range(5, 25).toFixed(4)) // Percent (Invested + PnL approx)
|
|
169
|
+
}));
|
|
170
|
+
|
|
171
|
+
// [CRITICAL] DataExtractor.getPortfolioDailyPnl uses AggregatedPositionsByInstrumentTypeID
|
|
172
|
+
// We must generate this aggregation or PnL calcs return 0.
|
|
173
|
+
const aggByType = positions.map(p => ({
|
|
174
|
+
Direction: p.Direction,
|
|
175
|
+
InstrumentTypeID: 5, // Stock
|
|
176
|
+
Invested: p.Invested,
|
|
177
|
+
NetProfit: p.NetProfit,
|
|
178
|
+
Value: p.Value
|
|
179
|
+
}));
|
|
180
|
+
|
|
181
|
+
return {
|
|
182
|
+
AggregatedMirrors: [],
|
|
183
|
+
AggregatedPositions: positions,
|
|
184
|
+
AggregatedPositionsByInstrumentTypeID: aggByType, // Required for PnL
|
|
185
|
+
AggregatedPositionsByStockIndustryID: [],
|
|
186
|
+
CreditByRealizedEquity: 0,
|
|
187
|
+
CreditByUnrealizedEquity: 0
|
|
188
|
+
};
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
// Schema 3: Trade History
|
|
192
|
+
_genHistoryTrades(userId) {
|
|
193
|
+
return Array.from({ length: this.rng.range(5, 30) }, (_, i) => ({
|
|
194
|
+
PositionID: 4000000000 + i,
|
|
195
|
+
CID: userId,
|
|
196
|
+
OpenDateTime: '2024-12-01T10:00:00Z',
|
|
197
|
+
OpenRate: 100.50,
|
|
198
|
+
InstrumentID: 100 + (i % 20),
|
|
199
|
+
IsBuy: this.rng.bool(),
|
|
200
|
+
MirrorID: 0,
|
|
201
|
+
ParentPositionID: 0,
|
|
202
|
+
CloseDateTime: '2024-12-02T10:00:00Z',
|
|
203
|
+
CloseRate: 110.20,
|
|
204
|
+
CloseReason: this.rng.choice([1, 5, 0]), // 1=SL, 5=TP, 0=Manual
|
|
205
|
+
ParentCID: userId,
|
|
206
|
+
NetProfit: parseFloat(this.rng.range(-50, 50).toFixed(4)),
|
|
207
|
+
Leverage: this.rng.choice([1, 2, 5])
|
|
208
|
+
}));
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
// Schema 5: Insights
|
|
212
|
+
// [CRITICAL FIX] Must return object { fetchedAt, insights: [] }, not just array.
|
|
213
|
+
_generateInsights(dateStr) {
|
|
214
|
+
const insightsArray = Array.from({ length: 50 }, (_, i) => ({
|
|
215
|
+
instrumentId: 100 + i,
|
|
216
|
+
total: this.rng.range(100, 50000), // Total owners
|
|
217
|
+
percentage: this.rng.next() * 0.05, // % of brokerage
|
|
218
|
+
growth: parseFloat((this.rng.next() * 10 - 5).toFixed(4)),
|
|
219
|
+
buy: this.rng.range(20, 95),
|
|
220
|
+
sell: 0, // Will calculate below
|
|
221
|
+
prevBuy: this.rng.range(20, 95),
|
|
222
|
+
prevSell: 0
|
|
223
|
+
}));
|
|
224
|
+
|
|
225
|
+
// Fix sell/prevSell math
|
|
226
|
+
insightsArray.forEach(i => {
|
|
227
|
+
i.sell = 100 - i.buy;
|
|
228
|
+
i.prevSell = 100 - i.prevBuy;
|
|
229
|
+
});
|
|
230
|
+
|
|
231
|
+
return {
|
|
232
|
+
fetchedAt: `${dateStr}T12:00:00Z`,
|
|
233
|
+
insights: insightsArray
|
|
234
|
+
};
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// Schema 4: Social Post Data
|
|
238
|
+
// Returns Map: { "postId": { ... } }
|
|
239
|
+
_generateSocial(dateStr) {
|
|
240
|
+
const posts = {};
|
|
241
|
+
const count = this.rng.range(5, 20);
|
|
242
|
+
|
|
243
|
+
for(let i=0; i<count; i++) {
|
|
244
|
+
const id = `post_${i}_${this.rng.next().toString(36).substring(7)}`;
|
|
245
|
+
const ticker = this.rng.choice(FAKE_TICKERS);
|
|
246
|
+
|
|
247
|
+
posts[id] = {
|
|
248
|
+
commentCount: this.rng.range(0, 50),
|
|
249
|
+
createdAt: `${dateStr}T09:00:00Z`,
|
|
250
|
+
fetchedAt: `${dateStr}T10:00:00Z`,
|
|
251
|
+
fullText: `$${ticker} is looking bullish today!`,
|
|
252
|
+
language: 'en-gb',
|
|
253
|
+
likeCount: this.rng.range(0, 200),
|
|
254
|
+
postOwnerId: String(this.rng.range(100000, 999999)),
|
|
255
|
+
sentiment: {
|
|
256
|
+
overallSentiment: this.rng.choice(['Bullish', 'Bearish', 'Neutral']),
|
|
257
|
+
topics: [this.rng.choice(FAKE_TOPICS)]
|
|
258
|
+
},
|
|
259
|
+
textSnippet: `$${ticker} is looking...`,
|
|
260
|
+
tickers: [ticker]
|
|
261
|
+
};
|
|
262
|
+
}
|
|
263
|
+
return posts;
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
_generateMappings() {
|
|
267
|
+
const map = {};
|
|
268
|
+
// Map ID 100-150 to FAKE_TICKERS deterministically
|
|
269
|
+
for(let i=0; i<50; i++) {
|
|
270
|
+
// cycle through tickers
|
|
271
|
+
map[100+i] = FAKE_TICKERS[i % FAKE_TICKERS.length];
|
|
272
|
+
}
|
|
273
|
+
return map;
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
_generateSectorMappings() {
|
|
277
|
+
const map = {};
|
|
278
|
+
for(let i=0; i<50; i++) {
|
|
279
|
+
map[100+i] = FAKE_SECTORS[i % FAKE_SECTORS.length];
|
|
280
|
+
}
|
|
281
|
+
return map;
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
module.exports = Fabricator;
|