bulltrackers-module 1.0.732 → 1.0.733
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/orchestrator/index.js +19 -17
- package/index.js +8 -29
- package/package.json +1 -1
- package/functions/computation-system/WorkflowOrchestrator.js +0 -213
- package/functions/computation-system/config/monitoring_config.js +0 -31
- package/functions/computation-system/config/validation_overrides.js +0 -10
- package/functions/computation-system/context/ContextFactory.js +0 -143
- package/functions/computation-system/context/ManifestBuilder.js +0 -379
- package/functions/computation-system/data/AvailabilityChecker.js +0 -236
- package/functions/computation-system/data/CachedDataLoader.js +0 -325
- package/functions/computation-system/data/DependencyFetcher.js +0 -455
- package/functions/computation-system/executors/MetaExecutor.js +0 -279
- package/functions/computation-system/executors/PriceBatchExecutor.js +0 -108
- package/functions/computation-system/executors/StandardExecutor.js +0 -465
- package/functions/computation-system/helpers/computation_dispatcher.js +0 -750
- package/functions/computation-system/helpers/computation_worker.js +0 -375
- package/functions/computation-system/helpers/monitor.js +0 -64
- package/functions/computation-system/helpers/on_demand_helpers.js +0 -154
- package/functions/computation-system/layers/extractors.js +0 -1097
- package/functions/computation-system/layers/index.js +0 -40
- package/functions/computation-system/layers/mathematics.js +0 -522
- package/functions/computation-system/layers/profiling.js +0 -537
- package/functions/computation-system/layers/validators.js +0 -170
- package/functions/computation-system/legacy/AvailabilityCheckerOld.js +0 -388
- package/functions/computation-system/legacy/CachedDataLoaderOld.js +0 -357
- package/functions/computation-system/legacy/DependencyFetcherOld.js +0 -478
- package/functions/computation-system/legacy/MetaExecutorold.js +0 -364
- package/functions/computation-system/legacy/StandardExecutorold.js +0 -476
- package/functions/computation-system/legacy/computation_dispatcherold.js +0 -944
- package/functions/computation-system/logger/logger.js +0 -297
- package/functions/computation-system/persistence/ContractValidator.js +0 -81
- package/functions/computation-system/persistence/FirestoreUtils.js +0 -56
- package/functions/computation-system/persistence/ResultCommitter.js +0 -283
- package/functions/computation-system/persistence/ResultsValidator.js +0 -130
- package/functions/computation-system/persistence/RunRecorder.js +0 -142
- package/functions/computation-system/persistence/StatusRepository.js +0 -52
- package/functions/computation-system/reporter_epoch.js +0 -6
- package/functions/computation-system/scripts/UpdateContracts.js +0 -128
- package/functions/computation-system/services/SnapshotService.js +0 -148
- package/functions/computation-system/simulation/Fabricator.js +0 -285
- package/functions/computation-system/simulation/SeededRandom.js +0 -41
- package/functions/computation-system/simulation/SimRunner.js +0 -51
- package/functions/computation-system/system_epoch.js +0 -2
- package/functions/computation-system/tools/BuildReporter.js +0 -531
- package/functions/computation-system/tools/ContractDiscoverer.js +0 -144
- package/functions/computation-system/tools/DeploymentValidator.js +0 -536
- package/functions/computation-system/tools/FinalSweepReporter.js +0 -322
- package/functions/computation-system/topology/HashManager.js +0 -55
- package/functions/computation-system/topology/ManifestLoader.js +0 -47
- package/functions/computation-system/utils/data_loader.js +0 -675
- package/functions/computation-system/utils/schema_capture.js +0 -121
- package/functions/computation-system/utils/utils.js +0 -188
|
@@ -1,128 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Discovery Script: UpdateContracts.js
|
|
3
|
-
* Runs offline simulations to "learn" the behavioral contracts of all calculations.
|
|
4
|
-
* Saves these contracts to Firestore for the Runtime Enforcer (ResultCommitter) to use.
|
|
5
|
-
* * USAGE:
|
|
6
|
-
* node computation-system/scripts/UpdateContracts.js [--calc=CalcName]
|
|
7
|
-
*/
|
|
8
|
-
|
|
9
|
-
const path = require('path');
|
|
10
|
-
const admin = require('firebase-admin');
|
|
11
|
-
|
|
12
|
-
// Initialize Firebase (Standard Env Check)
|
|
13
|
-
if (!admin.apps.length) {
|
|
14
|
-
if (process.env.GOOGLE_APPLICATION_CREDENTIALS) {
|
|
15
|
-
admin.initializeApp();
|
|
16
|
-
} else {
|
|
17
|
-
// Fallback for local dev if key path isn't set in env
|
|
18
|
-
console.warn("⚠️ No GOOGLE_APPLICATION_CREDENTIALS. Attempting default init...");
|
|
19
|
-
admin.initializeApp();
|
|
20
|
-
}
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
const db = admin.firestore();
|
|
24
|
-
const { StructuredLogger } = require('../logger/logger');
|
|
25
|
-
const { getManifest } = require('../topology/ManifestLoader');
|
|
26
|
-
const ContractDiscoverer = require('../tools/ContractDiscoverer');
|
|
27
|
-
|
|
28
|
-
// Load Calculations Package
|
|
29
|
-
let calculationPackage;
|
|
30
|
-
try {
|
|
31
|
-
// Adjust path if necessary for your local monorepo structure
|
|
32
|
-
calculationPackage = require('aiden-shared-calculations-unified');
|
|
33
|
-
} catch (e) {
|
|
34
|
-
console.error("FATAL: Could not load 'aiden-shared-calculations-unified'. Ensure you are in the correct directory or npm link is active.");
|
|
35
|
-
process.exit(1);
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
const CONTRACTS_COLLECTION = 'system_contracts';
|
|
39
|
-
|
|
40
|
-
async function main() {
|
|
41
|
-
const logger = new StructuredLogger({ enableConsole: true });
|
|
42
|
-
|
|
43
|
-
// 1. Setup Dependencies
|
|
44
|
-
// The ManifestLoader and Discoverer need a mock dependency object
|
|
45
|
-
const mockDeps = {
|
|
46
|
-
db,
|
|
47
|
-
logger,
|
|
48
|
-
// Mock specific utils if needed by your calculations during instantiation
|
|
49
|
-
calculationUtils: {
|
|
50
|
-
loadInstrumentMappings: async () => ({ instrumentToTicker: {}, tickerToInstrument: {} })
|
|
51
|
-
}
|
|
52
|
-
};
|
|
53
|
-
|
|
54
|
-
console.log("🚀 Starting Contract Discovery...");
|
|
55
|
-
|
|
56
|
-
// 2. Load Manifest
|
|
57
|
-
const calculations = calculationPackage.calculations;
|
|
58
|
-
const manifest = getManifest([], calculations, mockDeps);
|
|
59
|
-
const manifestMap = new Map(manifest.map(c => [c.name, c]));
|
|
60
|
-
|
|
61
|
-
console.log(`ℹ️ Loaded manifest with ${manifest.length} calculations.`);
|
|
62
|
-
|
|
63
|
-
// 3. Filter Target (Optional CLI Arg)
|
|
64
|
-
const targetArg = process.argv.find(a => a.startsWith('--calc='));
|
|
65
|
-
const targetName = targetArg ? targetArg.split('=')[1] : null;
|
|
66
|
-
|
|
67
|
-
let calcsToProcess = manifest;
|
|
68
|
-
if (targetName) {
|
|
69
|
-
calcsToProcess = manifest.filter(c => c.name.toLowerCase() === targetName.toLowerCase());
|
|
70
|
-
if (calcsToProcess.length === 0) {
|
|
71
|
-
console.error(`❌ Calculation '${targetName}' not found.`);
|
|
72
|
-
process.exit(1);
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
// 4. Run Discovery Loop
|
|
77
|
-
let successCount = 0;
|
|
78
|
-
let skipCount = 0;
|
|
79
|
-
|
|
80
|
-
for (const calc of calcsToProcess) {
|
|
81
|
-
// Skip computations that don't produce data (like aggregators without schema)
|
|
82
|
-
if (!calc.class.getSchema && !calc.dependencies) {
|
|
83
|
-
console.log(`⏭️ Skipping ${calc.name} (No schema/outputs to analyze).`);
|
|
84
|
-
skipCount++;
|
|
85
|
-
continue;
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
try {
|
|
89
|
-
// A. Discover Contract via Simulation
|
|
90
|
-
// We run 50 iterations to get a statistically significant sample
|
|
91
|
-
const contract = await ContractDiscoverer.generateContract(calc, manifestMap, 50);
|
|
92
|
-
|
|
93
|
-
if (contract) {
|
|
94
|
-
// B. Enrich with Metadata
|
|
95
|
-
// FIX: Create a NEW object to satisfy Type Checking (avoid mutating the inferred shape)
|
|
96
|
-
const finalContract = {
|
|
97
|
-
...contract,
|
|
98
|
-
lastUpdated: new Date(),
|
|
99
|
-
generatedBy: 'UpdateContracts.js',
|
|
100
|
-
version: '1.0'
|
|
101
|
-
};
|
|
102
|
-
|
|
103
|
-
// C. Save to Firestore
|
|
104
|
-
// Use finalContract instead of contract
|
|
105
|
-
await db.collection(CONTRACTS_COLLECTION).doc(calc.name).set(finalContract);
|
|
106
|
-
console.log(`✅ [SAVED] Contract for ${calc.name}`);
|
|
107
|
-
successCount++;
|
|
108
|
-
} else {
|
|
109
|
-
console.warn(`⚠️ [EMPTY] No contract generated for ${calc.name} (Insufficient data/samples).`);
|
|
110
|
-
skipCount++;
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
} catch (err) {
|
|
114
|
-
console.error(`❌ [ERROR] Failed to generate contract for ${calc.name}:`, err.message);
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
|
|
118
|
-
console.log("\n============================================");
|
|
119
|
-
console.log(`🎉 Discovery Complete.`);
|
|
120
|
-
console.log(` Updated: ${successCount}`);
|
|
121
|
-
console.log(` Skipped: ${skipCount}`);
|
|
122
|
-
console.log("============================================");
|
|
123
|
-
}
|
|
124
|
-
|
|
125
|
-
main().catch(err => {
|
|
126
|
-
console.error("FATAL SCRIPT ERROR:", err);
|
|
127
|
-
process.exit(1);
|
|
128
|
-
});
|
|
@@ -1,148 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Snapshot Service.
|
|
3
|
-
* Creates a complete "Frozen State" of the day's data in GCS.
|
|
4
|
-
* UPDATED: Now snapshots ALL data types (Ratings, Verification, Metadata, etc).
|
|
5
|
-
*/
|
|
6
|
-
const zlib = require('zlib');
|
|
7
|
-
const { Storage } = require('@google-cloud/storage');
|
|
8
|
-
const storage = new Storage();
|
|
9
|
-
const dataLoader = require('../utils/data_loader');
|
|
10
|
-
|
|
11
|
-
async function generateDailySnapshots(dateStr, config, deps) {
|
|
12
|
-
const { logger } = deps;
|
|
13
|
-
const bucketName = config.gcsBucketName || 'bulltrackers';
|
|
14
|
-
const bucket = storage.bucket(bucketName);
|
|
15
|
-
|
|
16
|
-
// Quick check: if all main snapshots exist, skip entirely
|
|
17
|
-
const mainFiles = [
|
|
18
|
-
`${dateStr}/snapshots/portfolios.json.gz`,
|
|
19
|
-
`${dateStr}/snapshots/social.json.gz`,
|
|
20
|
-
`${dateStr}/snapshots/history.jsonl.gz`,
|
|
21
|
-
`${dateStr}/snapshots/ratings.json.gz`,
|
|
22
|
-
`${dateStr}/snapshots/rankings.json.gz`
|
|
23
|
-
];
|
|
24
|
-
|
|
25
|
-
if (!config.forceSnapshot) {
|
|
26
|
-
const existenceChecks = await Promise.all(mainFiles.map(path => bucket.file(path).exists()));
|
|
27
|
-
const allExist = existenceChecks.every(([exists]) => exists);
|
|
28
|
-
|
|
29
|
-
if (allExist) {
|
|
30
|
-
logger.log('INFO', `[SnapshotService] ⏭️ All snapshots already exist for ${dateStr}, skipping`);
|
|
31
|
-
return { status: 'SKIPPED', date: dateStr, reason: 'all_exist' };
|
|
32
|
-
}
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
logger.log('INFO', `[SnapshotService] 📸 Starting Full System Snapshot for ${dateStr}`);
|
|
36
|
-
|
|
37
|
-
// parallelize independent fetches
|
|
38
|
-
await Promise.all([
|
|
39
|
-
snapshotPortfolios(dateStr, bucket, config, deps), // Heavy
|
|
40
|
-
snapshotSocial(dateStr, bucket, config, deps), // Heavy
|
|
41
|
-
snapshotHistory(dateStr, bucket, config, deps), // Heavy (JSONL)
|
|
42
|
-
snapshotRatings(dateStr, bucket, config, deps), // Sharded
|
|
43
|
-
snapshotVerification(dateStr, bucket, config, deps),// Collection Group
|
|
44
|
-
snapshotRankings(dateStr, bucket, config, deps), // Single Doc
|
|
45
|
-
snapshotMetadata(dateStr, bucket, config, deps) // Small Docs (Insights, Alerts, Watchlist)
|
|
46
|
-
]);
|
|
47
|
-
|
|
48
|
-
logger.log('INFO', `[SnapshotService] ✅ Full System Snapshot Complete for ${dateStr}`);
|
|
49
|
-
return { status: 'OK', date: dateStr };
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
// --- HEAVY DATA HANDLERS (As before) ---
|
|
53
|
-
|
|
54
|
-
async function snapshotPortfolios(dateStr, bucket, config, deps) {
|
|
55
|
-
const file = bucket.file(`${dateStr}/snapshots/portfolios.json.gz`);
|
|
56
|
-
if ((await file.exists())[0] && !config.forceSnapshot) return;
|
|
57
|
-
|
|
58
|
-
// Fetch ALL types (PI + SignedIn) to ensure complete context
|
|
59
|
-
const refs = await dataLoader.getPortfolioPartRefs(config, deps, dateStr, ['ALL']);
|
|
60
|
-
const data = await dataLoader.loadDataByRefs(config, deps, refs);
|
|
61
|
-
if (Object.keys(data).length > 0) {
|
|
62
|
-
await file.save(JSON.stringify(data), { gzip: true });
|
|
63
|
-
deps.logger.log('INFO', `[Snapshot] Saved ${Object.keys(data).length} portfolios.`);
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
async function snapshotSocial(dateStr, bucket, config, deps) {
|
|
68
|
-
const file = bucket.file(`${dateStr}/snapshots/social.json.gz`);
|
|
69
|
-
if ((await file.exists())[0] && !config.forceSnapshot) return;
|
|
70
|
-
|
|
71
|
-
const data = await dataLoader.loadDailySocialPostInsights(config, deps, dateStr);
|
|
72
|
-
await file.save(JSON.stringify(data), { gzip: true });
|
|
73
|
-
deps.logger.log('INFO', `[Snapshot] Saved Social Data.`);
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
async function snapshotHistory(dateStr, bucket, config, deps) {
|
|
77
|
-
const file = bucket.file(`${dateStr}/snapshots/history.jsonl.gz`);
|
|
78
|
-
if ((await file.exists())[0] && !config.forceSnapshot) return;
|
|
79
|
-
|
|
80
|
-
const refs = await dataLoader.getHistoryPartRefs(config, deps, dateStr, ['ALL']);
|
|
81
|
-
const gcsStream = file.createWriteStream({ gzip: true });
|
|
82
|
-
|
|
83
|
-
// Stream line-by-line
|
|
84
|
-
const BATCH_SIZE = 10;
|
|
85
|
-
for (let i = 0; i < refs.length; i += BATCH_SIZE) {
|
|
86
|
-
const batchRefs = refs.slice(i, i + BATCH_SIZE);
|
|
87
|
-
const dataMap = await dataLoader.loadDataByRefs(config, deps, batchRefs);
|
|
88
|
-
let chunk = '';
|
|
89
|
-
for (const [uid, h] of Object.entries(dataMap)) chunk += JSON.stringify({ [uid]: h }) + '\n';
|
|
90
|
-
if (!gcsStream.write(chunk)) await new Promise(r => gcsStream.once('drain', r));
|
|
91
|
-
}
|
|
92
|
-
gcsStream.end();
|
|
93
|
-
await new Promise((resolve, reject) => { gcsStream.on('finish', resolve); gcsStream.on('error', reject); });
|
|
94
|
-
deps.logger.log('INFO', `[Snapshot] Saved History (JSONL).`);
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
// --- NEW HANDLERS (The "All Data" Expansion) ---
|
|
98
|
-
|
|
99
|
-
async function snapshotRatings(dateStr, bucket, config, deps) {
|
|
100
|
-
const file = bucket.file(`${dateStr}/snapshots/ratings.json.gz`);
|
|
101
|
-
if ((await file.exists())[0] && !config.forceSnapshot) return;
|
|
102
|
-
|
|
103
|
-
// Load RAW ratings from Firestore shards
|
|
104
|
-
const data = await dataLoader.loadPIRatings(config, deps, dateStr);
|
|
105
|
-
await file.save(JSON.stringify(data), { gzip: true });
|
|
106
|
-
deps.logger.log('INFO', `[Snapshot] Saved Ratings for ${Object.keys(data).length} PIs.`);
|
|
107
|
-
}
|
|
108
|
-
|
|
109
|
-
async function snapshotVerification(dateStr, bucket, config, deps) {
|
|
110
|
-
const file = bucket.file(`${dateStr}/snapshots/verification.json.gz`);
|
|
111
|
-
if ((await file.exists())[0] && !config.forceSnapshot) return;
|
|
112
|
-
|
|
113
|
-
// Load global verification profiles (expensive scan)
|
|
114
|
-
const data = await dataLoader.loadVerificationProfiles(config, deps);
|
|
115
|
-
await file.save(JSON.stringify(data), { gzip: true });
|
|
116
|
-
deps.logger.log('INFO', `[Snapshot] Saved ${Object.keys(data).length} Verification Profiles.`);
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
async function snapshotRankings(dateStr, bucket, config, deps) {
|
|
120
|
-
const file = bucket.file(`${dateStr}/snapshots/rankings.json.gz`);
|
|
121
|
-
if ((await file.exists())[0] && !config.forceSnapshot) return;
|
|
122
|
-
|
|
123
|
-
const data = await dataLoader.loadPopularInvestorRankings(config, deps, dateStr);
|
|
124
|
-
if (data) await file.save(JSON.stringify(data), { gzip: true });
|
|
125
|
-
deps.logger.log('INFO', `[Snapshot] Saved Rankings.`);
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
async function snapshotMetadata(dateStr, bucket, config, deps) {
|
|
129
|
-
// Bundle small files into one "metadata.json" or keep separate. Separate is safer for loaders.
|
|
130
|
-
const ops = [
|
|
131
|
-
{ name: 'insights', fn: () => dataLoader.loadDailyInsights(config, deps, dateStr) },
|
|
132
|
-
{ name: 'page_views', fn: () => dataLoader.loadPIPageViews(config, deps, dateStr) },
|
|
133
|
-
{ name: 'watchlist', fn: () => dataLoader.loadWatchlistMembership(config, deps, dateStr) },
|
|
134
|
-
{ name: 'alerts', fn: () => dataLoader.loadPIAlertHistory(config, deps, dateStr) },
|
|
135
|
-
{ name: 'master_list', fn: () => dataLoader.loadPopularInvestorMasterList(config, deps) } // Not date bound usually, but good to snapshot state
|
|
136
|
-
];
|
|
137
|
-
|
|
138
|
-
for (const op of ops) {
|
|
139
|
-
const file = bucket.file(`${dateStr}/snapshots/${op.name}.json.gz`);
|
|
140
|
-
if ((await file.exists())[0] && !config.forceSnapshot) continue;
|
|
141
|
-
|
|
142
|
-
const data = await op.fn();
|
|
143
|
-
if (data) await file.save(JSON.stringify(data), { gzip: true });
|
|
144
|
-
}
|
|
145
|
-
deps.logger.log('INFO', `[Snapshot] Saved Metadata files.`);
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
module.exports = { generateDailySnapshots };
|
|
@@ -1,285 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Generates deterministic mock contexts for Simulation Hashing.
|
|
3
|
-
* STRICTLY ALIGNED WITH SCHEMA.MD (Production V2 Schemas).
|
|
4
|
-
* UPGRADED: Supports Iteration (Seed Rotation) and Volume Scaling for Arrays.
|
|
5
|
-
*/
|
|
6
|
-
const SeededRandom = require('./SeededRandom');
|
|
7
|
-
const { ContextFactory } = require('../context/ContextFactory');
|
|
8
|
-
|
|
9
|
-
const FAKE_SECTORS = ['Technology', 'Healthcare', 'Financials', 'Energy', 'Crypto', 'Consumer Discretionary'];
|
|
10
|
-
const FAKE_TICKERS = ['AAPL', 'GOOGL', 'MSFT', 'TSLA', 'AMZN', 'BTC', 'ETH', 'NVDA', 'META', 'AMD'];
|
|
11
|
-
const FAKE_TOPICS = ['AI', 'Earnings', 'Fed', 'Crypto', 'Macro'];
|
|
12
|
-
|
|
13
|
-
class Fabricator {
|
|
14
|
-
constructor(calcName) {
|
|
15
|
-
this.baseSeed = calcName;
|
|
16
|
-
// Primary RNG for high-level structure
|
|
17
|
-
this.rng = new SeededRandom(calcName);
|
|
18
|
-
}
|
|
19
|
-
|
|
20
|
-
/**
|
|
21
|
-
* Generates a context for a specific user iteration.
|
|
22
|
-
* @param {number} iteration - The index of the user in the batch (0, 1, 2...).
|
|
23
|
-
*/
|
|
24
|
-
async generateContext(calcManifest, dependenciesManifest, iteration = 0) {
|
|
25
|
-
// [CRITICAL] Rotate the RNG state based on iteration so User 1 != User 2
|
|
26
|
-
this.rng = new SeededRandom(`${this.baseSeed}_ITER_${iteration}`);
|
|
27
|
-
|
|
28
|
-
const FIXED_DATE = '2025-01-01'; // Fixed simulation date
|
|
29
|
-
|
|
30
|
-
// 1. Generate Root Data
|
|
31
|
-
const user = this._generateUser(calcManifest.userType, iteration);
|
|
32
|
-
const insights = this._generateInsights(FIXED_DATE);
|
|
33
|
-
|
|
34
|
-
// 2. Generate Mock Dependencies (The "Schema Faking" Part)
|
|
35
|
-
const computed = {};
|
|
36
|
-
if (calcManifest.dependencies) {
|
|
37
|
-
for (const depName of calcManifest.dependencies) {
|
|
38
|
-
const depEntry = dependenciesManifest.get(depName);
|
|
39
|
-
if (depEntry && depEntry.class && depEntry.class.getSchema) {
|
|
40
|
-
const schema = depEntry.class.getSchema();
|
|
41
|
-
// [VOLUME UPGRADE] Dependencies usually represent aggregate data.
|
|
42
|
-
computed[depName] = this._fakeFromSchema(schema, true);
|
|
43
|
-
} else {
|
|
44
|
-
computed[depName] = {};
|
|
45
|
-
}
|
|
46
|
-
}
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
return ContextFactory.buildPerUserContext({
|
|
50
|
-
userId: user.id,
|
|
51
|
-
userType: user.type,
|
|
52
|
-
dateStr: FIXED_DATE,
|
|
53
|
-
todayPortfolio: user.portfolio.today,
|
|
54
|
-
yesterdayPortfolio: user.portfolio.yesterday,
|
|
55
|
-
todayHistory: user.history.today,
|
|
56
|
-
yesterdayHistory: user.history.yesterday,
|
|
57
|
-
metadata: calcManifest,
|
|
58
|
-
mappings: {
|
|
59
|
-
instrumentToTicker: this._generateMappings(),
|
|
60
|
-
instrumentToSector: this._generateSectorMappings()
|
|
61
|
-
},
|
|
62
|
-
insights: { today: insights },
|
|
63
|
-
socialData: { today: this._generateSocial(FIXED_DATE) },
|
|
64
|
-
computedDependencies: computed,
|
|
65
|
-
config: {},
|
|
66
|
-
deps: { logger: { log: () => {} } }
|
|
67
|
-
});
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
// --- Schema Faker Logic (Unchanged) ---
|
|
71
|
-
_fakeFromSchema(schema, isHighVolume = false) {
|
|
72
|
-
if (!schema) return {};
|
|
73
|
-
if (schema.type === 'object') {
|
|
74
|
-
const obj = {};
|
|
75
|
-
if (schema.properties) {
|
|
76
|
-
for (const [key, propSchema] of Object.entries(schema.properties)) {
|
|
77
|
-
obj[key] = this._fakeFromSchema(propSchema, isHighVolume);
|
|
78
|
-
}
|
|
79
|
-
}
|
|
80
|
-
if (schema.patternProperties) {
|
|
81
|
-
const count = isHighVolume ? this.rng.range(20, 50) : 3;
|
|
82
|
-
const propSchema = Object.values(schema.patternProperties)[0];
|
|
83
|
-
for (let i = 0; i < count; i++) {
|
|
84
|
-
// Use deterministic ticker keys for stability
|
|
85
|
-
const key = `${this.rng.choice(FAKE_TICKERS)}`;
|
|
86
|
-
// Note: In real scenarios tickers are unique, so we might need a suffix if count > tickers.length
|
|
87
|
-
const safeKey = count > FAKE_TICKERS.length ? `${key}_${i}` : key;
|
|
88
|
-
obj[safeKey] = this._fakeFromSchema(propSchema, isHighVolume);
|
|
89
|
-
}
|
|
90
|
-
}
|
|
91
|
-
return obj;
|
|
92
|
-
}
|
|
93
|
-
if (schema.type === 'array') {
|
|
94
|
-
const min = isHighVolume ? 50 : 1;
|
|
95
|
-
const max = isHighVolume ? 150 : 5;
|
|
96
|
-
const len = this.rng.range(min, max);
|
|
97
|
-
return Array.from({ length: len }, () => this._fakeFromSchema(schema.items, isHighVolume));
|
|
98
|
-
}
|
|
99
|
-
if (schema.type === 'number') return parseFloat(this.rng.next().toFixed(4)) * 100;
|
|
100
|
-
if (schema.type === 'string') return "SIMULATED_STRING";
|
|
101
|
-
if (schema.type === 'boolean') return this.rng.bool();
|
|
102
|
-
return null;
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
// --- PROD ALIGNED GENERATORS ---
|
|
106
|
-
|
|
107
|
-
_generateUser(type, iteration) {
|
|
108
|
-
const userId = 1000000 + iteration; // Numeric ID to match Schema
|
|
109
|
-
const isSpeculator = (type === 'speculator');
|
|
110
|
-
|
|
111
|
-
return {
|
|
112
|
-
id: String(userId),
|
|
113
|
-
type: type || 'all',
|
|
114
|
-
portfolio: {
|
|
115
|
-
today: isSpeculator ? this._genSpecPortfolio(userId) : this._genNormalPortfolio(userId),
|
|
116
|
-
yesterday: isSpeculator ? this._genSpecPortfolio(userId) : this._genNormalPortfolio(userId)
|
|
117
|
-
},
|
|
118
|
-
history: {
|
|
119
|
-
today: { PublicHistoryPositions: this._genHistoryTrades(userId) },
|
|
120
|
-
yesterday: { PublicHistoryPositions: this._genHistoryTrades(userId) }
|
|
121
|
-
}
|
|
122
|
-
};
|
|
123
|
-
}
|
|
124
|
-
|
|
125
|
-
// Schema 2: Speculator User Portfolio
|
|
126
|
-
_genSpecPortfolio(userId) {
|
|
127
|
-
const invested = this.rng.range(5000, 50000);
|
|
128
|
-
const netProfit = this.rng.range(-20, 30);
|
|
129
|
-
const equity = invested * (1 + (netProfit / 100));
|
|
130
|
-
|
|
131
|
-
return {
|
|
132
|
-
AverageOpen: this.rng.range(100, 3000),
|
|
133
|
-
Equity: parseFloat(equity.toFixed(4)),
|
|
134
|
-
Invested: parseFloat(invested.toFixed(4)),
|
|
135
|
-
NetProfit: parseFloat(netProfit.toFixed(4)),
|
|
136
|
-
PublicPositions: Array.from({ length: this.rng.range(2, 10) }, (_, i) => {
|
|
137
|
-
const openRate = this.rng.range(50, 500);
|
|
138
|
-
const isBuy = this.rng.bool();
|
|
139
|
-
return {
|
|
140
|
-
Amount: parseFloat(this.rng.range(100, 1000).toFixed(4)),
|
|
141
|
-
CID: userId,
|
|
142
|
-
CurrentRate: parseFloat((openRate * (1 + (this.rng.next() - 0.5) * 0.1)).toFixed(2)),
|
|
143
|
-
InstrumentID: 100 + (i % 20),
|
|
144
|
-
IsBuy: isBuy,
|
|
145
|
-
IsTslEnabled: this.rng.bool(0.1),
|
|
146
|
-
Leverage: this.rng.choice([1, 2, 5, 10, 20]),
|
|
147
|
-
MirrorID: 0,
|
|
148
|
-
NetProfit: parseFloat(this.rng.range(-50, 50).toFixed(4)),
|
|
149
|
-
OpenDateTime: '2024-12-01T10:00:00Z',
|
|
150
|
-
OpenRate: parseFloat(openRate.toFixed(2)),
|
|
151
|
-
ParentPositionID: 0,
|
|
152
|
-
PipDifference: this.rng.range(-100, 100),
|
|
153
|
-
PositionID: 3000000000 + i,
|
|
154
|
-
StopLossRate: 0.01,
|
|
155
|
-
TakeProfitRate: 0
|
|
156
|
-
};
|
|
157
|
-
})
|
|
158
|
-
};
|
|
159
|
-
}
|
|
160
|
-
|
|
161
|
-
// Schema 1: Normal User Portfolio
|
|
162
|
-
_genNormalPortfolio(userId) {
|
|
163
|
-
const positions = Array.from({ length: this.rng.range(3, 12) }, (_, i) => ({
|
|
164
|
-
Direction: "Buy",
|
|
165
|
-
InstrumentID: 100 + (i % 20),
|
|
166
|
-
Invested: parseFloat(this.rng.range(5, 20).toFixed(4)), // Percent
|
|
167
|
-
NetProfit: parseFloat(this.rng.range(-30, 40).toFixed(4)),
|
|
168
|
-
Value: parseFloat(this.rng.range(5, 25).toFixed(4)) // Percent (Invested + PnL approx)
|
|
169
|
-
}));
|
|
170
|
-
|
|
171
|
-
// [CRITICAL] DataExtractor.getPortfolioDailyPnl uses AggregatedPositionsByInstrumentTypeID
|
|
172
|
-
// We must generate this aggregation or PnL calcs return 0.
|
|
173
|
-
const aggByType = positions.map(p => ({
|
|
174
|
-
Direction: p.Direction,
|
|
175
|
-
InstrumentTypeID: 5, // Stock
|
|
176
|
-
Invested: p.Invested,
|
|
177
|
-
NetProfit: p.NetProfit,
|
|
178
|
-
Value: p.Value
|
|
179
|
-
}));
|
|
180
|
-
|
|
181
|
-
return {
|
|
182
|
-
AggregatedMirrors: [],
|
|
183
|
-
AggregatedPositions: positions,
|
|
184
|
-
AggregatedPositionsByInstrumentTypeID: aggByType, // Required for PnL
|
|
185
|
-
AggregatedPositionsByStockIndustryID: [],
|
|
186
|
-
CreditByRealizedEquity: 0,
|
|
187
|
-
CreditByUnrealizedEquity: 0
|
|
188
|
-
};
|
|
189
|
-
}
|
|
190
|
-
|
|
191
|
-
// Schema 3: Trade History
|
|
192
|
-
_genHistoryTrades(userId) {
|
|
193
|
-
return Array.from({ length: this.rng.range(5, 30) }, (_, i) => ({
|
|
194
|
-
PositionID: 4000000000 + i,
|
|
195
|
-
CID: userId,
|
|
196
|
-
OpenDateTime: '2024-12-01T10:00:00Z',
|
|
197
|
-
OpenRate: 100.50,
|
|
198
|
-
InstrumentID: 100 + (i % 20),
|
|
199
|
-
IsBuy: this.rng.bool(),
|
|
200
|
-
MirrorID: 0,
|
|
201
|
-
ParentPositionID: 0,
|
|
202
|
-
CloseDateTime: '2024-12-02T10:00:00Z',
|
|
203
|
-
CloseRate: 110.20,
|
|
204
|
-
CloseReason: this.rng.choice([1, 5, 0]), // 1=SL, 5=TP, 0=Manual
|
|
205
|
-
ParentCID: userId,
|
|
206
|
-
NetProfit: parseFloat(this.rng.range(-50, 50).toFixed(4)),
|
|
207
|
-
Leverage: this.rng.choice([1, 2, 5])
|
|
208
|
-
}));
|
|
209
|
-
}
|
|
210
|
-
|
|
211
|
-
// Schema 5: Insights
|
|
212
|
-
// [CRITICAL FIX] Must return object { fetchedAt, insights: [] }, not just array.
|
|
213
|
-
_generateInsights(dateStr) {
|
|
214
|
-
const insightsArray = Array.from({ length: 50 }, (_, i) => ({
|
|
215
|
-
instrumentId: 100 + i,
|
|
216
|
-
total: this.rng.range(100, 50000), // Total owners
|
|
217
|
-
percentage: this.rng.next() * 0.05, // % of brokerage
|
|
218
|
-
growth: parseFloat((this.rng.next() * 10 - 5).toFixed(4)),
|
|
219
|
-
buy: this.rng.range(20, 95),
|
|
220
|
-
sell: 0, // Will calculate below
|
|
221
|
-
prevBuy: this.rng.range(20, 95),
|
|
222
|
-
prevSell: 0
|
|
223
|
-
}));
|
|
224
|
-
|
|
225
|
-
// Fix sell/prevSell math
|
|
226
|
-
insightsArray.forEach(i => {
|
|
227
|
-
i.sell = 100 - i.buy;
|
|
228
|
-
i.prevSell = 100 - i.prevBuy;
|
|
229
|
-
});
|
|
230
|
-
|
|
231
|
-
return {
|
|
232
|
-
fetchedAt: `${dateStr}T12:00:00Z`,
|
|
233
|
-
insights: insightsArray
|
|
234
|
-
};
|
|
235
|
-
}
|
|
236
|
-
|
|
237
|
-
// Schema 4: Social Post Data
|
|
238
|
-
// Returns Map: { "postId": { ... } }
|
|
239
|
-
_generateSocial(dateStr) {
|
|
240
|
-
const posts = {};
|
|
241
|
-
const count = this.rng.range(5, 20);
|
|
242
|
-
|
|
243
|
-
for(let i=0; i<count; i++) {
|
|
244
|
-
const id = `post_${i}_${this.rng.next().toString(36).substring(7)}`;
|
|
245
|
-
const ticker = this.rng.choice(FAKE_TICKERS);
|
|
246
|
-
|
|
247
|
-
posts[id] = {
|
|
248
|
-
commentCount: this.rng.range(0, 50),
|
|
249
|
-
createdAt: `${dateStr}T09:00:00Z`,
|
|
250
|
-
fetchedAt: `${dateStr}T10:00:00Z`,
|
|
251
|
-
fullText: `$${ticker} is looking bullish today!`,
|
|
252
|
-
language: 'en-gb',
|
|
253
|
-
likeCount: this.rng.range(0, 200),
|
|
254
|
-
postOwnerId: String(this.rng.range(100000, 999999)),
|
|
255
|
-
sentiment: {
|
|
256
|
-
overallSentiment: this.rng.choice(['Bullish', 'Bearish', 'Neutral']),
|
|
257
|
-
topics: [this.rng.choice(FAKE_TOPICS)]
|
|
258
|
-
},
|
|
259
|
-
textSnippet: `$${ticker} is looking...`,
|
|
260
|
-
tickers: [ticker]
|
|
261
|
-
};
|
|
262
|
-
}
|
|
263
|
-
return posts;
|
|
264
|
-
}
|
|
265
|
-
|
|
266
|
-
_generateMappings() {
|
|
267
|
-
const map = {};
|
|
268
|
-
// Map ID 100-150 to FAKE_TICKERS deterministically
|
|
269
|
-
for(let i=0; i<50; i++) {
|
|
270
|
-
// cycle through tickers
|
|
271
|
-
map[100+i] = FAKE_TICKERS[i % FAKE_TICKERS.length];
|
|
272
|
-
}
|
|
273
|
-
return map;
|
|
274
|
-
}
|
|
275
|
-
|
|
276
|
-
_generateSectorMappings() {
|
|
277
|
-
const map = {};
|
|
278
|
-
for(let i=0; i<50; i++) {
|
|
279
|
-
map[100+i] = FAKE_SECTORS[i % FAKE_SECTORS.length];
|
|
280
|
-
}
|
|
281
|
-
return map;
|
|
282
|
-
}
|
|
283
|
-
}
|
|
284
|
-
|
|
285
|
-
module.exports = Fabricator;
|
|
@@ -1,41 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Deterministic Pseudo-Random Number Generator (LCG).
|
|
3
|
-
* Ensures that for a given seed, the sequence of numbers is identical across runs.
|
|
4
|
-
*/
|
|
5
|
-
class SeededRandom {
|
|
6
|
-
constructor(seedString) {
|
|
7
|
-
this.state = this._stringToSeed(seedString);
|
|
8
|
-
}
|
|
9
|
-
|
|
10
|
-
_stringToSeed(str) {
|
|
11
|
-
let h = 2166136261 >>> 0;
|
|
12
|
-
for (let i = 0; i < str.length; i++) {
|
|
13
|
-
h = Math.imul(h ^ str.charCodeAt(i), 16777619);
|
|
14
|
-
}
|
|
15
|
-
return h >>> 0;
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
/** Returns a float between 0 and 1 */
|
|
19
|
-
next() {
|
|
20
|
-
this.state = (Math.imul(48271, this.state) % 2147483647);
|
|
21
|
-
return (this.state - 1) / 2147483646;
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
/** Returns an integer between min and max (inclusive) */
|
|
25
|
-
range(min, max) {
|
|
26
|
-
return Math.floor(this.next() * (max - min + 1)) + min;
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
/** Returns a random element from an array */
|
|
30
|
-
choice(arr) {
|
|
31
|
-
if (!arr || arr.length === 0) return null;
|
|
32
|
-
return arr[this.range(0, arr.length - 1)];
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
/** Returns a boolean based on probability */
|
|
36
|
-
bool(probability = 0.5) {
|
|
37
|
-
return this.next() < probability;
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
module.exports = SeededRandom;
|
|
@@ -1,51 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Runner for Behavioral Hashing (SimHash).
|
|
3
|
-
* Executes a calculation against a fabricated, deterministic context.
|
|
4
|
-
*/
|
|
5
|
-
const Fabricator = require('./Fabricator');
|
|
6
|
-
const { generateDataHash } = require('../utils/utils');
|
|
7
|
-
|
|
8
|
-
class SimRunner {
|
|
9
|
-
/**
|
|
10
|
-
* Runs the simulation for a specific calculation.
|
|
11
|
-
* @param {Object} calcManifest - The manifest entry for the calculation.
|
|
12
|
-
* @param {Map} fullManifestMap - Map of all manifests (to look up dependencies).
|
|
13
|
-
* @returns {Promise<string>} The SimHash (SHA256 of the output).
|
|
14
|
-
*/
|
|
15
|
-
static async run(calcManifest, fullManifestMap) {
|
|
16
|
-
try {
|
|
17
|
-
const fabricator = new Fabricator(calcManifest.name);
|
|
18
|
-
|
|
19
|
-
// 1. Generate Deterministic Context
|
|
20
|
-
const context = await fabricator.generateContext(calcManifest, fullManifestMap);
|
|
21
|
-
|
|
22
|
-
// 2. Instantiate
|
|
23
|
-
const instance = new calcManifest.class();
|
|
24
|
-
|
|
25
|
-
// 3. Process
|
|
26
|
-
await instance.process(context);
|
|
27
|
-
|
|
28
|
-
// 4. Get Result
|
|
29
|
-
// Note: If the calculation uses internal state buffering (like `results` property),
|
|
30
|
-
// getResult() usually returns that.
|
|
31
|
-
let result = null;
|
|
32
|
-
if (instance.getResult) {
|
|
33
|
-
result = await instance.getResult();
|
|
34
|
-
} else {
|
|
35
|
-
result = instance.result || instance.results || {};
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
// 5. Sanitize & Hash
|
|
39
|
-
// We strip any non-deterministic keys if they leak (like timestamps generated inside process)
|
|
40
|
-
// But ideally, the context mocking prevents this.
|
|
41
|
-
return generateDataHash(result);
|
|
42
|
-
|
|
43
|
-
} catch (e) {
|
|
44
|
-
console.error(`[SimRunner] Simulation failed for ${calcManifest.name}:`, e);
|
|
45
|
-
// If simulation crashes, we return a hash of the error to safely trigger a re-run
|
|
46
|
-
return generateDataHash({ error: e.message });
|
|
47
|
-
}
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
module.exports = SimRunner;
|