bulltrackers-module 1.0.280 → 1.0.282
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/computation-system/executors/MetaExecutor.js +6 -8
- package/functions/computation-system/executors/StandardExecutor.js +20 -43
- package/functions/computation-system/onboarding.md +154 -869
- package/functions/computation-system/persistence/ResultCommitter.js +83 -16
- package/functions/computation-system/simulation/Fabricator.js +285 -0
- package/functions/computation-system/simulation/SeededRandom.js +41 -0
- package/functions/computation-system/simulation/SimRunner.js +51 -0
- package/functions/computation-system/tools/BuildReporter.js +147 -161
- package/functions/computation-system/utils/utils.js +13 -2
- package/package.json +1 -1
|
@@ -2,7 +2,9 @@
|
|
|
2
2
|
* @fileoverview Handles saving computation results with observability and Smart Cleanup.
|
|
3
3
|
* UPDATED: Implements GZIP Compression for efficient storage.
|
|
4
4
|
* UPDATED: Implements Content-Based Hashing (ResultHash) for dependency short-circuiting.
|
|
5
|
-
* UPDATED: Auto-enforces Weekend Mode validation
|
|
5
|
+
* UPDATED: Auto-enforces Weekend Mode validation.
|
|
6
|
+
* UPDATED: Implements "Initial Write" logic to wipe stale data/shards on a fresh run.
|
|
7
|
+
* OPTIMIZED: Fetches pre-calculated 'simHash' from Registry (removes expensive simulation step).
|
|
6
8
|
*/
|
|
7
9
|
const { commitBatchInChunks, generateDataHash } = require('../utils/utils');
|
|
8
10
|
const { updateComputationStatus } = require('./StatusRepository');
|
|
@@ -17,6 +19,8 @@ const NON_RETRYABLE_ERRORS = [
|
|
|
17
19
|
'PERMISSION_DENIED', 'DATA_LOSS', 'FAILED_PRECONDITION'
|
|
18
20
|
];
|
|
19
21
|
|
|
22
|
+
const SIMHASH_REGISTRY_COLLECTION = 'system_simhash_registry';
|
|
23
|
+
|
|
20
24
|
/**
|
|
21
25
|
* Commits results to Firestore.
|
|
22
26
|
*/
|
|
@@ -30,6 +34,7 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
30
34
|
|
|
31
35
|
// Options defaults
|
|
32
36
|
const flushMode = options.flushMode || 'STANDARD';
|
|
37
|
+
const isInitialWrite = options.isInitialWrite === true;
|
|
33
38
|
const shardIndexes = options.shardIndexes || {};
|
|
34
39
|
const nextShardIndexes = {};
|
|
35
40
|
|
|
@@ -50,29 +55,22 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
50
55
|
const result = await calc.getResult();
|
|
51
56
|
const configOverrides = validationOverrides[calc.manifest.name] || {};
|
|
52
57
|
|
|
53
|
-
// --- [NEW] AUTO-ENFORCE WEEKEND MODE FOR PRICE-ONLY CALCS ---
|
|
54
|
-
// If a calculation depends SOLELY on 'price', we assume market closures
|
|
55
|
-
// will cause 0s/Flatlines on weekends, so we enforce lenient validation.
|
|
56
58
|
const dataDeps = calc.manifest.rootDataDependencies || [];
|
|
57
59
|
const isPriceOnly = (dataDeps.length === 1 && dataDeps[0] === 'price');
|
|
58
60
|
|
|
59
61
|
let effectiveOverrides = { ...configOverrides };
|
|
60
62
|
|
|
61
63
|
if (isPriceOnly && !effectiveOverrides.weekend) {
|
|
62
|
-
// Apply strict leniency for weekend/holiday price actions
|
|
63
64
|
effectiveOverrides.weekend = {
|
|
64
65
|
maxZeroPct: 100,
|
|
65
66
|
maxFlatlinePct: 100,
|
|
66
67
|
maxNullPct: 100
|
|
67
68
|
};
|
|
68
69
|
}
|
|
69
|
-
// -----------------------------------------------------------
|
|
70
70
|
|
|
71
71
|
// Validation
|
|
72
72
|
if (result && Object.keys(result).length > 0) {
|
|
73
|
-
// [FIX] Added 'dStr' as 3rd argument to match HeuristicValidator signature
|
|
74
73
|
const healthCheck = HeuristicValidator.analyze(calc.manifest.name, result, dStr, effectiveOverrides);
|
|
75
|
-
|
|
76
74
|
if (!healthCheck.valid) {
|
|
77
75
|
runMetrics.validation.isValid = false;
|
|
78
76
|
runMetrics.validation.anomalies.push(healthCheck.reason);
|
|
@@ -83,11 +81,27 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
83
81
|
}
|
|
84
82
|
|
|
85
83
|
const isEmpty = !result || (typeof result === 'object' && Object.keys(result).length === 0);
|
|
86
|
-
|
|
87
|
-
// Calculate Result Hash (Content-Based)
|
|
88
84
|
const resultHash = isEmpty ? 'empty' : generateDataHash(result);
|
|
89
85
|
|
|
90
|
-
//
|
|
86
|
+
// [OPTIMIZATION] FETCH SimHash from Registry (Do NOT Calculate)
|
|
87
|
+
let simHash = null;
|
|
88
|
+
if (calc.manifest.hash && flushMode !== 'INTERMEDIATE') {
|
|
89
|
+
try {
|
|
90
|
+
// Fast O(1) lookup using Code Hash
|
|
91
|
+
// We simply check if the BuildReporter has already stamped this code version
|
|
92
|
+
const regDoc = await db.collection(SIMHASH_REGISTRY_COLLECTION).doc(calc.manifest.hash).get();
|
|
93
|
+
if (regDoc.exists) {
|
|
94
|
+
simHash = regDoc.data().simHash;
|
|
95
|
+
} else {
|
|
96
|
+
// Fallback: This happens if BuildReporter didn't run or is out of sync.
|
|
97
|
+
// We do NOT run SimRunner here to protect production performance.
|
|
98
|
+
logger.log('WARN', `[ResultCommitter] SimHash not found in registry for ${name} (Hash: ${calc.manifest.hash}). Is BuildReporter skipped?`);
|
|
99
|
+
}
|
|
100
|
+
} catch (regErr) {
|
|
101
|
+
logger.log('WARN', `[ResultCommitter] Failed to read SimHash registry: ${regErr.message}`);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
91
105
|
if (isEmpty) {
|
|
92
106
|
if (flushMode === 'INTERMEDIATE') {
|
|
93
107
|
nextShardIndexes[name] = currentShardIndex;
|
|
@@ -96,6 +110,7 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
96
110
|
if (calc.manifest.hash) {
|
|
97
111
|
successUpdates[name] = {
|
|
98
112
|
hash: calc.manifest.hash,
|
|
113
|
+
simHash: simHash, // [NEW] Populated from Registry
|
|
99
114
|
resultHash: resultHash,
|
|
100
115
|
dependencyResultHashes: calc.manifest.dependencyResultHashes || {},
|
|
101
116
|
category: calc.manifest.category,
|
|
@@ -123,13 +138,14 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
123
138
|
.collection(config.computationsSubcollection)
|
|
124
139
|
.doc(name);
|
|
125
140
|
|
|
126
|
-
await writeSingleResult(dailyData, historicalDocRef, name, historicalDate, logger, config, deps, 0, 'STANDARD');
|
|
141
|
+
await writeSingleResult(dailyData, historicalDocRef, name, historicalDate, logger, config, deps, 0, 'STANDARD', false);
|
|
127
142
|
}));
|
|
128
143
|
await Promise.all(datePromises);
|
|
129
144
|
|
|
130
145
|
if (calc.manifest.hash) {
|
|
131
146
|
successUpdates[name] = {
|
|
132
147
|
hash: calc.manifest.hash,
|
|
148
|
+
simHash: simHash, // [NEW] Populated from Registry
|
|
133
149
|
resultHash: resultHash,
|
|
134
150
|
dependencyResultHashes: calc.manifest.dependencyResultHashes || {},
|
|
135
151
|
category: calc.manifest.category,
|
|
@@ -146,7 +162,7 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
146
162
|
.collection(config.computationsSubcollection)
|
|
147
163
|
.doc(name);
|
|
148
164
|
|
|
149
|
-
const writeStats = await writeSingleResult(result, mainDocRef, name, dStr, logger, config, deps, currentShardIndex, flushMode);
|
|
165
|
+
const writeStats = await writeSingleResult(result, mainDocRef, name, dStr, logger, config, deps, currentShardIndex, flushMode, isInitialWrite);
|
|
150
166
|
|
|
151
167
|
runMetrics.storage.sizeBytes = writeStats.totalSize;
|
|
152
168
|
runMetrics.storage.isSharded = writeStats.isSharded;
|
|
@@ -157,6 +173,7 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
157
173
|
if (calc.manifest.hash) {
|
|
158
174
|
successUpdates[name] = {
|
|
159
175
|
hash: calc.manifest.hash,
|
|
176
|
+
simHash: simHash, // [NEW] Populated from Registry
|
|
160
177
|
resultHash: resultHash,
|
|
161
178
|
dependencyResultHashes: calc.manifest.dependencyResultHashes || {},
|
|
162
179
|
category: calc.manifest.category,
|
|
@@ -192,8 +209,26 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
|
|
|
192
209
|
return { successUpdates, failureReport, shardIndexes: nextShardIndexes };
|
|
193
210
|
}
|
|
194
211
|
|
|
195
|
-
async function writeSingleResult(result, docRef, name, dateContext, logger, config, deps, startShardIndex = 0, flushMode = 'STANDARD') {
|
|
212
|
+
async function writeSingleResult(result, docRef, name, dateContext, logger, config, deps, startShardIndex = 0, flushMode = 'STANDARD', isInitialWrite = false) {
|
|
196
213
|
|
|
214
|
+
// Transition & Cleanup Logic
|
|
215
|
+
let wasSharded = false;
|
|
216
|
+
let shouldWipeShards = false;
|
|
217
|
+
|
|
218
|
+
// Default: Merge updates. But if Initial Write, overwrite (merge: false) to clear stale fields.
|
|
219
|
+
let rootMergeOption = !isInitialWrite;
|
|
220
|
+
|
|
221
|
+
if (isInitialWrite) {
|
|
222
|
+
try {
|
|
223
|
+
const currentSnap = await docRef.get();
|
|
224
|
+
if (currentSnap.exists) {
|
|
225
|
+
const d = currentSnap.data();
|
|
226
|
+
wasSharded = (d._sharded === true);
|
|
227
|
+
if (wasSharded) shouldWipeShards = true;
|
|
228
|
+
}
|
|
229
|
+
} catch (e) { /* ignore read error */ }
|
|
230
|
+
}
|
|
231
|
+
|
|
197
232
|
// --- COMPRESSION STRATEGY ---
|
|
198
233
|
try {
|
|
199
234
|
const jsonString = JSON.stringify(result);
|
|
@@ -212,7 +247,22 @@ async function writeSingleResult(result, docRef, name, dateContext, logger, conf
|
|
|
212
247
|
payload: compressedBuffer
|
|
213
248
|
};
|
|
214
249
|
|
|
215
|
-
|
|
250
|
+
// Cleanup: If it was sharded, or if we are wiping shards on initial write
|
|
251
|
+
if (shouldWipeShards) {
|
|
252
|
+
logger.log('INFO', `[Cleanup] ${name}: Wiping old shards before Compressed Write.`);
|
|
253
|
+
const updates = [];
|
|
254
|
+
const shardCol = docRef.collection('_shards');
|
|
255
|
+
const shardDocs = await shardCol.listDocuments();
|
|
256
|
+
shardDocs.forEach(d => updates.push({ type: 'DELETE', ref: d }));
|
|
257
|
+
|
|
258
|
+
// Root update with merge: false (overwrites everything)
|
|
259
|
+
updates.push({ ref: docRef, data: compressedPayload, options: { merge: false } });
|
|
260
|
+
|
|
261
|
+
await commitBatchInChunks(config, deps, updates, `${name}::Cleanup+Compress`);
|
|
262
|
+
} else {
|
|
263
|
+
// Standard update (respecting calculated rootMergeOption)
|
|
264
|
+
await docRef.set(compressedPayload, { merge: rootMergeOption });
|
|
265
|
+
}
|
|
216
266
|
|
|
217
267
|
return {
|
|
218
268
|
totalSize: compressedBuffer.length,
|
|
@@ -242,12 +292,29 @@ async function writeSingleResult(result, docRef, name, dateContext, logger, conf
|
|
|
242
292
|
const constraints = strategies[attempt];
|
|
243
293
|
try {
|
|
244
294
|
const updates = await prepareAutoShardedWrites(result, docRef, logger, constraints.bytes, constraints.keys, startShardIndex, flushMode);
|
|
245
|
-
const pointer = updates.find(u => u.data._completed !== undefined || u.data._sharded !== undefined);
|
|
246
295
|
|
|
296
|
+
// Inject Cleanup Ops
|
|
297
|
+
if (shouldWipeShards) {
|
|
298
|
+
logger.log('INFO', `[Cleanup] ${name}: Wiping old shards before Write (Initial).`);
|
|
299
|
+
const shardCol = docRef.collection('_shards');
|
|
300
|
+
const shardDocs = await shardCol.listDocuments();
|
|
301
|
+
// Prepend DELETEs
|
|
302
|
+
shardDocs.forEach(d => updates.unshift({ type: 'DELETE', ref: d }));
|
|
303
|
+
shouldWipeShards = false; // Done for this loop
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
// Ensure the root document write respects our merge option
|
|
307
|
+
const rootUpdate = updates.find(u => u.ref.path === docRef.path && u.type !== 'DELETE');
|
|
308
|
+
if (rootUpdate) {
|
|
309
|
+
rootUpdate.options = { merge: rootMergeOption };
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
const pointer = updates.find(u => u.data && (u.data._completed !== undefined || u.data._sharded !== undefined));
|
|
247
313
|
finalStats.totalSize = updates.reduce((acc, u) => acc + (u.data ? JSON.stringify(u.data).length : 0), 0);
|
|
248
314
|
|
|
249
315
|
let maxIndex = startShardIndex;
|
|
250
316
|
updates.forEach(u => {
|
|
317
|
+
if (u.type === 'DELETE') return;
|
|
251
318
|
const segs = u.ref.path.split('/');
|
|
252
319
|
const last = segs[segs.length - 1];
|
|
253
320
|
if (last.startsWith('shard_')) {
|
|
@@ -0,0 +1,285 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Generates deterministic mock contexts for Simulation Hashing.
|
|
3
|
+
* STRICTLY ALIGNED WITH SCHEMA.MD (Production V2 Schemas).
|
|
4
|
+
* UPGRADED: Supports Iteration (Seed Rotation) and Volume Scaling for Arrays.
|
|
5
|
+
*/
|
|
6
|
+
const SeededRandom = require('./SeededRandom');
|
|
7
|
+
const { ContextFactory } = require('../context/ContextFactory');
|
|
8
|
+
|
|
9
|
+
const FAKE_SECTORS = ['Technology', 'Healthcare', 'Financials', 'Energy', 'Crypto', 'Consumer Discretionary'];
|
|
10
|
+
const FAKE_TICKERS = ['AAPL', 'GOOGL', 'MSFT', 'TSLA', 'AMZN', 'BTC', 'ETH', 'NVDA', 'META', 'AMD'];
|
|
11
|
+
const FAKE_TOPICS = ['AI', 'Earnings', 'Fed', 'Crypto', 'Macro'];
|
|
12
|
+
|
|
13
|
+
class Fabricator {
|
|
14
|
+
constructor(calcName) {
|
|
15
|
+
this.baseSeed = calcName;
|
|
16
|
+
// Primary RNG for high-level structure
|
|
17
|
+
this.rng = new SeededRandom(calcName);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Generates a context for a specific user iteration.
|
|
22
|
+
* @param {number} iteration - The index of the user in the batch (0, 1, 2...).
|
|
23
|
+
*/
|
|
24
|
+
async generateContext(calcManifest, dependenciesManifest, iteration = 0) {
|
|
25
|
+
// [CRITICAL] Rotate the RNG state based on iteration so User 1 != User 2
|
|
26
|
+
this.rng = new SeededRandom(`${this.baseSeed}_ITER_${iteration}`);
|
|
27
|
+
|
|
28
|
+
const FIXED_DATE = '2025-01-01'; // Fixed simulation date
|
|
29
|
+
|
|
30
|
+
// 1. Generate Root Data
|
|
31
|
+
const user = this._generateUser(calcManifest.userType, iteration);
|
|
32
|
+
const insights = this._generateInsights(FIXED_DATE);
|
|
33
|
+
|
|
34
|
+
// 2. Generate Mock Dependencies (The "Schema Faking" Part)
|
|
35
|
+
const computed = {};
|
|
36
|
+
if (calcManifest.dependencies) {
|
|
37
|
+
for (const depName of calcManifest.dependencies) {
|
|
38
|
+
const depEntry = dependenciesManifest.get(depName);
|
|
39
|
+
if (depEntry && depEntry.class && depEntry.class.getSchema) {
|
|
40
|
+
const schema = depEntry.class.getSchema();
|
|
41
|
+
// [VOLUME UPGRADE] Dependencies usually represent aggregate data.
|
|
42
|
+
computed[depName] = this._fakeFromSchema(schema, true);
|
|
43
|
+
} else {
|
|
44
|
+
computed[depName] = {};
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
return ContextFactory.buildPerUserContext({
|
|
50
|
+
userId: user.id,
|
|
51
|
+
userType: user.type,
|
|
52
|
+
dateStr: FIXED_DATE,
|
|
53
|
+
todayPortfolio: user.portfolio.today,
|
|
54
|
+
yesterdayPortfolio: user.portfolio.yesterday,
|
|
55
|
+
todayHistory: user.history.today,
|
|
56
|
+
yesterdayHistory: user.history.yesterday,
|
|
57
|
+
metadata: calcManifest,
|
|
58
|
+
mappings: {
|
|
59
|
+
instrumentToTicker: this._generateMappings(),
|
|
60
|
+
instrumentToSector: this._generateSectorMappings()
|
|
61
|
+
},
|
|
62
|
+
insights: { today: insights },
|
|
63
|
+
socialData: { today: this._generateSocial(FIXED_DATE) },
|
|
64
|
+
computedDependencies: computed,
|
|
65
|
+
config: {},
|
|
66
|
+
deps: { logger: { log: () => {} } }
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// --- Schema Faker Logic (Unchanged) ---
|
|
71
|
+
_fakeFromSchema(schema, isHighVolume = false) {
|
|
72
|
+
if (!schema) return {};
|
|
73
|
+
if (schema.type === 'object') {
|
|
74
|
+
const obj = {};
|
|
75
|
+
if (schema.properties) {
|
|
76
|
+
for (const [key, propSchema] of Object.entries(schema.properties)) {
|
|
77
|
+
obj[key] = this._fakeFromSchema(propSchema, isHighVolume);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
if (schema.patternProperties) {
|
|
81
|
+
const count = isHighVolume ? this.rng.range(20, 50) : 3;
|
|
82
|
+
const propSchema = Object.values(schema.patternProperties)[0];
|
|
83
|
+
for (let i = 0; i < count; i++) {
|
|
84
|
+
// Use deterministic ticker keys for stability
|
|
85
|
+
const key = `${this.rng.choice(FAKE_TICKERS)}`;
|
|
86
|
+
// Note: In real scenarios tickers are unique, so we might need a suffix if count > tickers.length
|
|
87
|
+
const safeKey = count > FAKE_TICKERS.length ? `${key}_${i}` : key;
|
|
88
|
+
obj[safeKey] = this._fakeFromSchema(propSchema, isHighVolume);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
return obj;
|
|
92
|
+
}
|
|
93
|
+
if (schema.type === 'array') {
|
|
94
|
+
const min = isHighVolume ? 50 : 1;
|
|
95
|
+
const max = isHighVolume ? 150 : 5;
|
|
96
|
+
const len = this.rng.range(min, max);
|
|
97
|
+
return Array.from({ length: len }, () => this._fakeFromSchema(schema.items, isHighVolume));
|
|
98
|
+
}
|
|
99
|
+
if (schema.type === 'number') return parseFloat(this.rng.next().toFixed(4)) * 100;
|
|
100
|
+
if (schema.type === 'string') return "SIMULATED_STRING";
|
|
101
|
+
if (schema.type === 'boolean') return this.rng.bool();
|
|
102
|
+
return null;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// --- PROD ALIGNED GENERATORS ---
|
|
106
|
+
|
|
107
|
+
_generateUser(type, iteration) {
|
|
108
|
+
const userId = 1000000 + iteration; // Numeric ID to match Schema
|
|
109
|
+
const isSpeculator = (type === 'speculator');
|
|
110
|
+
|
|
111
|
+
return {
|
|
112
|
+
id: String(userId),
|
|
113
|
+
type: type || 'all',
|
|
114
|
+
portfolio: {
|
|
115
|
+
today: isSpeculator ? this._genSpecPortfolio(userId) : this._genNormalPortfolio(userId),
|
|
116
|
+
yesterday: isSpeculator ? this._genSpecPortfolio(userId) : this._genNormalPortfolio(userId)
|
|
117
|
+
},
|
|
118
|
+
history: {
|
|
119
|
+
today: { PublicHistoryPositions: this._genHistoryTrades(userId) },
|
|
120
|
+
yesterday: { PublicHistoryPositions: this._genHistoryTrades(userId) }
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Schema 2: Speculator User Portfolio
|
|
126
|
+
_genSpecPortfolio(userId) {
|
|
127
|
+
const invested = this.rng.range(5000, 50000);
|
|
128
|
+
const netProfit = this.rng.range(-20, 30);
|
|
129
|
+
const equity = invested * (1 + (netProfit / 100));
|
|
130
|
+
|
|
131
|
+
return {
|
|
132
|
+
AverageOpen: this.rng.range(100, 3000),
|
|
133
|
+
Equity: parseFloat(equity.toFixed(4)),
|
|
134
|
+
Invested: parseFloat(invested.toFixed(4)),
|
|
135
|
+
NetProfit: parseFloat(netProfit.toFixed(4)),
|
|
136
|
+
PublicPositions: Array.from({ length: this.rng.range(2, 10) }, (_, i) => {
|
|
137
|
+
const openRate = this.rng.range(50, 500);
|
|
138
|
+
const isBuy = this.rng.bool();
|
|
139
|
+
return {
|
|
140
|
+
Amount: parseFloat(this.rng.range(100, 1000).toFixed(4)),
|
|
141
|
+
CID: userId,
|
|
142
|
+
CurrentRate: parseFloat((openRate * (1 + (this.rng.next() - 0.5) * 0.1)).toFixed(2)),
|
|
143
|
+
InstrumentID: 100 + (i % 20),
|
|
144
|
+
IsBuy: isBuy,
|
|
145
|
+
IsTslEnabled: this.rng.bool(0.1),
|
|
146
|
+
Leverage: this.rng.choice([1, 2, 5, 10, 20]),
|
|
147
|
+
MirrorID: 0,
|
|
148
|
+
NetProfit: parseFloat(this.rng.range(-50, 50).toFixed(4)),
|
|
149
|
+
OpenDateTime: '2024-12-01T10:00:00Z',
|
|
150
|
+
OpenRate: parseFloat(openRate.toFixed(2)),
|
|
151
|
+
ParentPositionID: 0,
|
|
152
|
+
PipDifference: this.rng.range(-100, 100),
|
|
153
|
+
PositionID: 3000000000 + i,
|
|
154
|
+
StopLossRate: 0.01,
|
|
155
|
+
TakeProfitRate: 0
|
|
156
|
+
};
|
|
157
|
+
})
|
|
158
|
+
};
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// Schema 1: Normal User Portfolio
|
|
162
|
+
_genNormalPortfolio(userId) {
|
|
163
|
+
const positions = Array.from({ length: this.rng.range(3, 12) }, (_, i) => ({
|
|
164
|
+
Direction: "Buy",
|
|
165
|
+
InstrumentID: 100 + (i % 20),
|
|
166
|
+
Invested: parseFloat(this.rng.range(5, 20).toFixed(4)), // Percent
|
|
167
|
+
NetProfit: parseFloat(this.rng.range(-30, 40).toFixed(4)),
|
|
168
|
+
Value: parseFloat(this.rng.range(5, 25).toFixed(4)) // Percent (Invested + PnL approx)
|
|
169
|
+
}));
|
|
170
|
+
|
|
171
|
+
// [CRITICAL] DataExtractor.getPortfolioDailyPnl uses AggregatedPositionsByInstrumentTypeID
|
|
172
|
+
// We must generate this aggregation or PnL calcs return 0.
|
|
173
|
+
const aggByType = positions.map(p => ({
|
|
174
|
+
Direction: p.Direction,
|
|
175
|
+
InstrumentTypeID: 5, // Stock
|
|
176
|
+
Invested: p.Invested,
|
|
177
|
+
NetProfit: p.NetProfit,
|
|
178
|
+
Value: p.Value
|
|
179
|
+
}));
|
|
180
|
+
|
|
181
|
+
return {
|
|
182
|
+
AggregatedMirrors: [],
|
|
183
|
+
AggregatedPositions: positions,
|
|
184
|
+
AggregatedPositionsByInstrumentTypeID: aggByType, // Required for PnL
|
|
185
|
+
AggregatedPositionsByStockIndustryID: [],
|
|
186
|
+
CreditByRealizedEquity: 0,
|
|
187
|
+
CreditByUnrealizedEquity: 0
|
|
188
|
+
};
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
// Schema 3: Trade History
|
|
192
|
+
_genHistoryTrades(userId) {
|
|
193
|
+
return Array.from({ length: this.rng.range(5, 30) }, (_, i) => ({
|
|
194
|
+
PositionID: 4000000000 + i,
|
|
195
|
+
CID: userId,
|
|
196
|
+
OpenDateTime: '2024-12-01T10:00:00Z',
|
|
197
|
+
OpenRate: 100.50,
|
|
198
|
+
InstrumentID: 100 + (i % 20),
|
|
199
|
+
IsBuy: this.rng.bool(),
|
|
200
|
+
MirrorID: 0,
|
|
201
|
+
ParentPositionID: 0,
|
|
202
|
+
CloseDateTime: '2024-12-02T10:00:00Z',
|
|
203
|
+
CloseRate: 110.20,
|
|
204
|
+
CloseReason: this.rng.choice([1, 5, 0]), // 1=SL, 5=TP, 0=Manual
|
|
205
|
+
ParentCID: userId,
|
|
206
|
+
NetProfit: parseFloat(this.rng.range(-50, 50).toFixed(4)),
|
|
207
|
+
Leverage: this.rng.choice([1, 2, 5])
|
|
208
|
+
}));
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
// Schema 5: Insights
|
|
212
|
+
// [CRITICAL FIX] Must return object { fetchedAt, insights: [] }, not just array.
|
|
213
|
+
_generateInsights(dateStr) {
|
|
214
|
+
const insightsArray = Array.from({ length: 50 }, (_, i) => ({
|
|
215
|
+
instrumentId: 100 + i,
|
|
216
|
+
total: this.rng.range(100, 50000), // Total owners
|
|
217
|
+
percentage: this.rng.next() * 0.05, // % of brokerage
|
|
218
|
+
growth: parseFloat((this.rng.next() * 10 - 5).toFixed(4)),
|
|
219
|
+
buy: this.rng.range(20, 95),
|
|
220
|
+
sell: 0, // Will calculate below
|
|
221
|
+
prevBuy: this.rng.range(20, 95),
|
|
222
|
+
prevSell: 0
|
|
223
|
+
}));
|
|
224
|
+
|
|
225
|
+
// Fix sell/prevSell math
|
|
226
|
+
insightsArray.forEach(i => {
|
|
227
|
+
i.sell = 100 - i.buy;
|
|
228
|
+
i.prevSell = 100 - i.prevBuy;
|
|
229
|
+
});
|
|
230
|
+
|
|
231
|
+
return {
|
|
232
|
+
fetchedAt: `${dateStr}T12:00:00Z`,
|
|
233
|
+
insights: insightsArray
|
|
234
|
+
};
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// Schema 4: Social Post Data
|
|
238
|
+
// Returns Map: { "postId": { ... } }
|
|
239
|
+
_generateSocial(dateStr) {
|
|
240
|
+
const posts = {};
|
|
241
|
+
const count = this.rng.range(5, 20);
|
|
242
|
+
|
|
243
|
+
for(let i=0; i<count; i++) {
|
|
244
|
+
const id = `post_${i}_${this.rng.next().toString(36).substring(7)}`;
|
|
245
|
+
const ticker = this.rng.choice(FAKE_TICKERS);
|
|
246
|
+
|
|
247
|
+
posts[id] = {
|
|
248
|
+
commentCount: this.rng.range(0, 50),
|
|
249
|
+
createdAt: `${dateStr}T09:00:00Z`,
|
|
250
|
+
fetchedAt: `${dateStr}T10:00:00Z`,
|
|
251
|
+
fullText: `$${ticker} is looking bullish today!`,
|
|
252
|
+
language: 'en-gb',
|
|
253
|
+
likeCount: this.rng.range(0, 200),
|
|
254
|
+
postOwnerId: String(this.rng.range(100000, 999999)),
|
|
255
|
+
sentiment: {
|
|
256
|
+
overallSentiment: this.rng.choice(['Bullish', 'Bearish', 'Neutral']),
|
|
257
|
+
topics: [this.rng.choice(FAKE_TOPICS)]
|
|
258
|
+
},
|
|
259
|
+
textSnippet: `$${ticker} is looking...`,
|
|
260
|
+
tickers: [ticker]
|
|
261
|
+
};
|
|
262
|
+
}
|
|
263
|
+
return posts;
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
_generateMappings() {
|
|
267
|
+
const map = {};
|
|
268
|
+
// Map ID 100-150 to FAKE_TICKERS deterministically
|
|
269
|
+
for(let i=0; i<50; i++) {
|
|
270
|
+
// cycle through tickers
|
|
271
|
+
map[100+i] = FAKE_TICKERS[i % FAKE_TICKERS.length];
|
|
272
|
+
}
|
|
273
|
+
return map;
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
_generateSectorMappings() {
|
|
277
|
+
const map = {};
|
|
278
|
+
for(let i=0; i<50; i++) {
|
|
279
|
+
map[100+i] = FAKE_SECTORS[i % FAKE_SECTORS.length];
|
|
280
|
+
}
|
|
281
|
+
return map;
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
module.exports = Fabricator;
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Deterministic Pseudo-Random Number Generator (LCG).
|
|
3
|
+
* Ensures that for a given seed, the sequence of numbers is identical across runs.
|
|
4
|
+
*/
|
|
5
|
+
class SeededRandom {
|
|
6
|
+
constructor(seedString) {
|
|
7
|
+
this.state = this._stringToSeed(seedString);
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
_stringToSeed(str) {
|
|
11
|
+
let h = 2166136261 >>> 0;
|
|
12
|
+
for (let i = 0; i < str.length; i++) {
|
|
13
|
+
h = Math.imul(h ^ str.charCodeAt(i), 16777619);
|
|
14
|
+
}
|
|
15
|
+
return h >>> 0;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/** Returns a float between 0 and 1 */
|
|
19
|
+
next() {
|
|
20
|
+
this.state = (Math.imul(48271, this.state) % 2147483647);
|
|
21
|
+
return (this.state - 1) / 2147483646;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/** Returns an integer between min and max (inclusive) */
|
|
25
|
+
range(min, max) {
|
|
26
|
+
return Math.floor(this.next() * (max - min + 1)) + min;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
/** Returns a random element from an array */
|
|
30
|
+
choice(arr) {
|
|
31
|
+
if (!arr || arr.length === 0) return null;
|
|
32
|
+
return arr[this.range(0, arr.length - 1)];
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/** Returns a boolean based on probability */
|
|
36
|
+
bool(probability = 0.5) {
|
|
37
|
+
return this.next() < probability;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
module.exports = SeededRandom;
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Runner for Behavioral Hashing (SimHash).
|
|
3
|
+
* Executes a calculation against a fabricated, deterministic context.
|
|
4
|
+
*/
|
|
5
|
+
const Fabricator = require('./Fabricator');
|
|
6
|
+
const { generateDataHash } = require('../utils/utils');
|
|
7
|
+
|
|
8
|
+
class SimRunner {
|
|
9
|
+
/**
|
|
10
|
+
* Runs the simulation for a specific calculation.
|
|
11
|
+
* @param {Object} calcManifest - The manifest entry for the calculation.
|
|
12
|
+
* @param {Map} fullManifestMap - Map of all manifests (to look up dependencies).
|
|
13
|
+
* @returns {Promise<string>} The SimHash (SHA256 of the output).
|
|
14
|
+
*/
|
|
15
|
+
static async run(calcManifest, fullManifestMap) {
|
|
16
|
+
try {
|
|
17
|
+
const fabricator = new Fabricator(calcManifest.name);
|
|
18
|
+
|
|
19
|
+
// 1. Generate Deterministic Context
|
|
20
|
+
const context = await fabricator.generateContext(calcManifest, fullManifestMap);
|
|
21
|
+
|
|
22
|
+
// 2. Instantiate
|
|
23
|
+
const instance = new calcManifest.class();
|
|
24
|
+
|
|
25
|
+
// 3. Process
|
|
26
|
+
await instance.process(context);
|
|
27
|
+
|
|
28
|
+
// 4. Get Result
|
|
29
|
+
// Note: If the calculation uses internal state buffering (like `results` property),
|
|
30
|
+
// getResult() usually returns that.
|
|
31
|
+
let result = null;
|
|
32
|
+
if (instance.getResult) {
|
|
33
|
+
result = await instance.getResult();
|
|
34
|
+
} else {
|
|
35
|
+
result = instance.result || instance.results || {};
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// 5. Sanitize & Hash
|
|
39
|
+
// We strip any non-deterministic keys if they leak (like timestamps generated inside process)
|
|
40
|
+
// But ideally, the context mocking prevents this.
|
|
41
|
+
return generateDataHash(result);
|
|
42
|
+
|
|
43
|
+
} catch (e) {
|
|
44
|
+
console.error(`[SimRunner] Simulation failed for ${calcManifest.name}:`, e);
|
|
45
|
+
// If simulation crashes, we return a hash of the error to safely trigger a re-run
|
|
46
|
+
return generateDataHash({ error: e.message });
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
module.exports = SimRunner;
|