bulltrackers-module 1.0.732 → 1.0.733
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/orchestrator/index.js +19 -17
- package/index.js +8 -29
- package/package.json +1 -1
- package/functions/computation-system/WorkflowOrchestrator.js +0 -213
- package/functions/computation-system/config/monitoring_config.js +0 -31
- package/functions/computation-system/config/validation_overrides.js +0 -10
- package/functions/computation-system/context/ContextFactory.js +0 -143
- package/functions/computation-system/context/ManifestBuilder.js +0 -379
- package/functions/computation-system/data/AvailabilityChecker.js +0 -236
- package/functions/computation-system/data/CachedDataLoader.js +0 -325
- package/functions/computation-system/data/DependencyFetcher.js +0 -455
- package/functions/computation-system/executors/MetaExecutor.js +0 -279
- package/functions/computation-system/executors/PriceBatchExecutor.js +0 -108
- package/functions/computation-system/executors/StandardExecutor.js +0 -465
- package/functions/computation-system/helpers/computation_dispatcher.js +0 -750
- package/functions/computation-system/helpers/computation_worker.js +0 -375
- package/functions/computation-system/helpers/monitor.js +0 -64
- package/functions/computation-system/helpers/on_demand_helpers.js +0 -154
- package/functions/computation-system/layers/extractors.js +0 -1097
- package/functions/computation-system/layers/index.js +0 -40
- package/functions/computation-system/layers/mathematics.js +0 -522
- package/functions/computation-system/layers/profiling.js +0 -537
- package/functions/computation-system/layers/validators.js +0 -170
- package/functions/computation-system/legacy/AvailabilityCheckerOld.js +0 -388
- package/functions/computation-system/legacy/CachedDataLoaderOld.js +0 -357
- package/functions/computation-system/legacy/DependencyFetcherOld.js +0 -478
- package/functions/computation-system/legacy/MetaExecutorold.js +0 -364
- package/functions/computation-system/legacy/StandardExecutorold.js +0 -476
- package/functions/computation-system/legacy/computation_dispatcherold.js +0 -944
- package/functions/computation-system/logger/logger.js +0 -297
- package/functions/computation-system/persistence/ContractValidator.js +0 -81
- package/functions/computation-system/persistence/FirestoreUtils.js +0 -56
- package/functions/computation-system/persistence/ResultCommitter.js +0 -283
- package/functions/computation-system/persistence/ResultsValidator.js +0 -130
- package/functions/computation-system/persistence/RunRecorder.js +0 -142
- package/functions/computation-system/persistence/StatusRepository.js +0 -52
- package/functions/computation-system/reporter_epoch.js +0 -6
- package/functions/computation-system/scripts/UpdateContracts.js +0 -128
- package/functions/computation-system/services/SnapshotService.js +0 -148
- package/functions/computation-system/simulation/Fabricator.js +0 -285
- package/functions/computation-system/simulation/SeededRandom.js +0 -41
- package/functions/computation-system/simulation/SimRunner.js +0 -51
- package/functions/computation-system/system_epoch.js +0 -2
- package/functions/computation-system/tools/BuildReporter.js +0 -531
- package/functions/computation-system/tools/ContractDiscoverer.js +0 -144
- package/functions/computation-system/tools/DeploymentValidator.js +0 -536
- package/functions/computation-system/tools/FinalSweepReporter.js +0 -322
- package/functions/computation-system/topology/HashManager.js +0 -55
- package/functions/computation-system/topology/ManifestLoader.js +0 -47
- package/functions/computation-system/utils/data_loader.js +0 -675
- package/functions/computation-system/utils/schema_capture.js +0 -121
- package/functions/computation-system/utils/utils.js +0 -188
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Layers Barrel File
|
|
3
|
-
* Aggregates all mathematical, extractor, and profiling primitives.
|
|
4
|
-
* Utilizes require-all to dynamically load new modules added to this directory.
|
|
5
|
-
*/
|
|
6
|
-
|
|
7
|
-
const requireAll = require('require-all');
|
|
8
|
-
|
|
9
|
-
// 1. Manually Require Core Modules to ensure order and presence
|
|
10
|
-
const profiling = require('./profiling');
|
|
11
|
-
const extractors = require('./extractors');
|
|
12
|
-
const mathematics = require('./mathematics');
|
|
13
|
-
const validators = require('./validators');
|
|
14
|
-
|
|
15
|
-
// 2. Aggregate explicit exports
|
|
16
|
-
const coreExports = {
|
|
17
|
-
...profiling,
|
|
18
|
-
...extractors,
|
|
19
|
-
...mathematics,
|
|
20
|
-
...validators
|
|
21
|
-
};
|
|
22
|
-
|
|
23
|
-
// 3. Dynamic Loading (Optional/Future-Proofing)
|
|
24
|
-
// This loads any OTHER file in this directory that wasn't manually required above.
|
|
25
|
-
const dynamicModules = requireAll({
|
|
26
|
-
dirname: __dirname,
|
|
27
|
-
filter: /^(?!index\.js$|profiling\.js$|extractors\.js$|mathematics\.js$|validators\.js$).+\.js$/,
|
|
28
|
-
resolve: (mod) => mod
|
|
29
|
-
});
|
|
30
|
-
|
|
31
|
-
// Flatten dynamic modules into the export object
|
|
32
|
-
const dynamicExports = {};
|
|
33
|
-
Object.values(dynamicModules).forEach(moduleExports => {
|
|
34
|
-
Object.assign(dynamicExports, moduleExports);
|
|
35
|
-
});
|
|
36
|
-
|
|
37
|
-
module.exports = {
|
|
38
|
-
...coreExports,
|
|
39
|
-
...dynamicExports
|
|
40
|
-
};
|
|
@@ -1,522 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Mathematics Layer
|
|
3
|
-
* Core mathematical functions, statistics, and signal primitives.
|
|
4
|
-
*/
|
|
5
|
-
|
|
6
|
-
const { DataExtractor } = require('./extractors');
|
|
7
|
-
const { SCHEMAS } = require('./profiling');
|
|
8
|
-
|
|
9
|
-
class MathPrimitives {
|
|
10
|
-
static average(values) {
|
|
11
|
-
if (!values || !values.length) return 0;
|
|
12
|
-
return values.reduce((a, b) => a + b, 0) / values.length;
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
static median(values) {
|
|
16
|
-
if (!values || !values.length) return 0;
|
|
17
|
-
const sorted = [...values].sort((a, b) => a - b);
|
|
18
|
-
const mid = Math.floor(sorted.length / 2);
|
|
19
|
-
return sorted.length % 2 === 0
|
|
20
|
-
? (sorted[mid - 1] + sorted[mid]) / 2
|
|
21
|
-
: sorted[mid];
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
static standardDeviation(values) {
|
|
25
|
-
if (!values || !values.length) return 0;
|
|
26
|
-
const avg = this.average(values);
|
|
27
|
-
const squareDiffs = values.map(val => Math.pow((val || 0) - avg, 2));
|
|
28
|
-
return Math.sqrt(this.average(squareDiffs));
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
static bucketBinary(value, threshold = 0) {
|
|
32
|
-
return value > threshold ? 'winner' : 'loser';
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
static calculateHitProbability(currentPrice, barrierPrice, volatility, days, drift = 0) {
|
|
36
|
-
if (currentPrice <= 0 || barrierPrice <= 0 || volatility <= 0 || days <= 0) return 0;
|
|
37
|
-
|
|
38
|
-
const t = days / 365.0;
|
|
39
|
-
const sigma = volatility;
|
|
40
|
-
const mu = drift;
|
|
41
|
-
const b = Math.log(barrierPrice / currentPrice);
|
|
42
|
-
const nu = mu - 0.5 * Math.pow(sigma, 2);
|
|
43
|
-
const sqrtT = Math.sqrt(t);
|
|
44
|
-
const sigmaSqrtT = sigma * sqrtT;
|
|
45
|
-
|
|
46
|
-
const normCDF = (x) => {
|
|
47
|
-
const t = 1 / (1 + 0.2316419 * Math.abs(x));
|
|
48
|
-
const d = 0.3989423 * Math.exp(-x * x / 2);
|
|
49
|
-
const prob = d * t * (0.3193815 + t * (-0.3565638 + t * (1.781478 + t * (-1.821256 + t * 1.330274))));
|
|
50
|
-
return x > 0 ? 1 - prob : prob;
|
|
51
|
-
};
|
|
52
|
-
|
|
53
|
-
const term1 = (b - nu * t) / sigmaSqrtT;
|
|
54
|
-
const term2 = (2 * nu * b) / (sigma * sigma);
|
|
55
|
-
const term3 = (b + nu * t) / sigmaSqrtT;
|
|
56
|
-
|
|
57
|
-
if ((currentPrice > barrierPrice && barrierPrice > currentPrice) ||
|
|
58
|
-
(currentPrice < barrierPrice && barrierPrice < currentPrice)) {
|
|
59
|
-
return 1.0;
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
const probability = normCDF(-term3) + Math.exp(term2) * normCDF(-term1);
|
|
63
|
-
|
|
64
|
-
return Math.min(Math.max(probability, 0), 1);
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
static simulateGBM(currentPrice, volatility, days, simulations = 1000, drift = 0) {
|
|
68
|
-
if (currentPrice <= 0 || volatility <= 0 || days <= 0) return new Float32Array(0);
|
|
69
|
-
|
|
70
|
-
const t = days / 365.0;
|
|
71
|
-
const sigma = volatility;
|
|
72
|
-
const mu = drift;
|
|
73
|
-
const driftTerm = (mu - 0.5 * sigma * sigma) * t;
|
|
74
|
-
const volTerm = sigma * Math.sqrt(t);
|
|
75
|
-
|
|
76
|
-
const results = new Float32Array(simulations);
|
|
77
|
-
|
|
78
|
-
for (let i = 0; i < simulations; i++) {
|
|
79
|
-
const u1 = Math.random();
|
|
80
|
-
const u2 = Math.random();
|
|
81
|
-
const z = Math.sqrt(-2.0 * Math.log(u1)) * Math.cos(2.0 * Math.PI * u2);
|
|
82
|
-
results[i] = currentPrice * Math.exp(driftTerm + volTerm * z);
|
|
83
|
-
}
|
|
84
|
-
return results;
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
static simulatePopulationBreakdown(pricePaths, userProfiles) {
|
|
88
|
-
if (!pricePaths.length || !userProfiles.length) return 0;
|
|
89
|
-
|
|
90
|
-
let totalBreakdownEvents = 0;
|
|
91
|
-
const totalSims = pricePaths.length;
|
|
92
|
-
const totalUsers = userProfiles.length;
|
|
93
|
-
|
|
94
|
-
for (let i = 0; i < totalSims; i++) {
|
|
95
|
-
const simPrice = pricePaths[i];
|
|
96
|
-
let capitulatedUsersInScenario = 0;
|
|
97
|
-
|
|
98
|
-
for (let j = 0; j < totalUsers; j++) {
|
|
99
|
-
const user = userProfiles[j];
|
|
100
|
-
const hypotheticalPnL = ((simPrice - user.entryPrice) / user.entryPrice) * 100;
|
|
101
|
-
|
|
102
|
-
if (hypotheticalPnL < user.thresholdPct) {
|
|
103
|
-
capitulatedUsersInScenario++;
|
|
104
|
-
}
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
totalBreakdownEvents += (capitulatedUsersInScenario / totalUsers);
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
return totalBreakdownEvents / totalSims;
|
|
111
|
-
}
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
class FinancialEngineering {
|
|
115
|
-
static sortinoRatio(returns, targetReturn = 0) {
|
|
116
|
-
if (!returns || returns.length < 2) return 0;
|
|
117
|
-
const avgReturn = MathPrimitives.average(returns);
|
|
118
|
-
const downsideDiffs = returns.map(r => Math.min(0, r - targetReturn));
|
|
119
|
-
const downsideVariance = downsideDiffs.reduce((sum, d) => sum + (d * d), 0) / returns.length;
|
|
120
|
-
const downsideDev = Math.sqrt(downsideVariance);
|
|
121
|
-
if (downsideDev === 0) return 0;
|
|
122
|
-
return (avgReturn - targetReturn) / downsideDev;
|
|
123
|
-
}
|
|
124
|
-
|
|
125
|
-
static kellyCriterion(winRatio, avgWinPct, avgLossPct) {
|
|
126
|
-
const p = winRatio / 100;
|
|
127
|
-
const q = 1 - p;
|
|
128
|
-
const lossMag = Math.abs(avgLossPct);
|
|
129
|
-
if (lossMag === 0) return 0;
|
|
130
|
-
const b = avgWinPct / lossMag;
|
|
131
|
-
const f = (b * p - q) / b;
|
|
132
|
-
return Math.max(0, f);
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
class TimeSeriesAnalysis {
|
|
137
|
-
static hurstExponent(series) {
|
|
138
|
-
if (!series || series.length < 10) return 0.5;
|
|
139
|
-
const logReturns = [];
|
|
140
|
-
for (let i = 1; i < series.length; i++) {
|
|
141
|
-
logReturns.push(Math.log(series[i] / series[i-1]));
|
|
142
|
-
}
|
|
143
|
-
const mean = MathPrimitives.average(logReturns);
|
|
144
|
-
const stdDev = MathPrimitives.standardDeviation(logReturns);
|
|
145
|
-
if (stdDev === 0) return 0.5;
|
|
146
|
-
const deviations = logReturns.map(r => r - mean);
|
|
147
|
-
let sum = 0;
|
|
148
|
-
const cumulativeDeviations = deviations.map(d => sum += d);
|
|
149
|
-
const maxDev = Math.max(...cumulativeDeviations);
|
|
150
|
-
const minDev = Math.min(...cumulativeDeviations);
|
|
151
|
-
const range = maxDev - minDev;
|
|
152
|
-
const rs = range / stdDev;
|
|
153
|
-
const n = logReturns.length;
|
|
154
|
-
const hurst = Math.log(rs) / Math.log(n);
|
|
155
|
-
return Math.min(1, Math.max(0, hurst));
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
/**
|
|
159
|
-
* UPGRADE: Fast Fourier Transform (Cooley-Tukey Algorithm) - O(n log n)
|
|
160
|
-
* Useful for detecting cycles in price data.
|
|
161
|
-
* @param {Array<number>} data - Input signal
|
|
162
|
-
* @returns {Array<Object>} Array of { real, imag } complex numbers
|
|
163
|
-
*/
|
|
164
|
-
static fft(data) {
|
|
165
|
-
const n = data.length;
|
|
166
|
-
if (n <= 1) return data.map(v => ({ real: v, imag: 0 }));
|
|
167
|
-
|
|
168
|
-
if ((n & (n - 1)) !== 0) {
|
|
169
|
-
// If n is not power of 2, zero-pad to next power of 2 for O(n log n)
|
|
170
|
-
const nextPow2 = Math.pow(2, Math.ceil(Math.log2(n)));
|
|
171
|
-
const padded = new Array(nextPow2).fill(0);
|
|
172
|
-
for(let i=0; i<n; i++) padded[i] = data[i];
|
|
173
|
-
return this.fft(padded);
|
|
174
|
-
}
|
|
175
|
-
|
|
176
|
-
const even = new Array(n / 2);
|
|
177
|
-
const odd = new Array(n / 2);
|
|
178
|
-
for (let i = 0; i < n / 2; i++) {
|
|
179
|
-
even[i] = data[2 * i];
|
|
180
|
-
odd[i] = data[2 * i + 1];
|
|
181
|
-
}
|
|
182
|
-
|
|
183
|
-
const evenResult = this.fft(even);
|
|
184
|
-
const oddResult = this.fft(odd);
|
|
185
|
-
|
|
186
|
-
const result = new Array(n);
|
|
187
|
-
for (let k = 0; k < n / 2; k++) {
|
|
188
|
-
const t = -2 * Math.PI * k / n;
|
|
189
|
-
const expReal = Math.cos(t);
|
|
190
|
-
const expImag = Math.sin(t);
|
|
191
|
-
|
|
192
|
-
// Multiply oddResult[k] by exp
|
|
193
|
-
const tReal = expReal * oddResult[k].real - expImag * oddResult[k].imag;
|
|
194
|
-
const tImag = expReal * oddResult[k].imag + expImag * oddResult[k].real;
|
|
195
|
-
|
|
196
|
-
result[k] = {
|
|
197
|
-
real: evenResult[k].real + tReal,
|
|
198
|
-
imag: evenResult[k].imag + tImag
|
|
199
|
-
};
|
|
200
|
-
result[k + n / 2] = {
|
|
201
|
-
real: evenResult[k].real - tReal,
|
|
202
|
-
imag: evenResult[k].imag - tImag
|
|
203
|
-
};
|
|
204
|
-
}
|
|
205
|
-
return result;
|
|
206
|
-
}
|
|
207
|
-
}
|
|
208
|
-
|
|
209
|
-
class SignalPrimitives {
|
|
210
|
-
static getMetric(dependencies, calcName, ticker, fieldName, fallback = 0) {
|
|
211
|
-
if (!dependencies || !dependencies[calcName]) return fallback;
|
|
212
|
-
const tickerData = dependencies[calcName][ticker];
|
|
213
|
-
if (!tickerData) return fallback;
|
|
214
|
-
const val = tickerData[fieldName];
|
|
215
|
-
return (typeof val === 'number') ? val : fallback;
|
|
216
|
-
}
|
|
217
|
-
|
|
218
|
-
static getUnionKeys(dependencies, calcNames) {
|
|
219
|
-
const keys = new Set();
|
|
220
|
-
if (!dependencies) return [];
|
|
221
|
-
for (const name of calcNames) {
|
|
222
|
-
const resultObj = dependencies[name];
|
|
223
|
-
if (resultObj && typeof resultObj === 'object') {
|
|
224
|
-
Object.keys(resultObj).forEach(k => keys.add(k));
|
|
225
|
-
}
|
|
226
|
-
}
|
|
227
|
-
return Array.from(keys);
|
|
228
|
-
}
|
|
229
|
-
|
|
230
|
-
static normalizeTanh(value, scale = 10, sensitivity = 10.0) {
|
|
231
|
-
if (value === 0) return 0;
|
|
232
|
-
return Math.tanh(value / sensitivity) * scale;
|
|
233
|
-
}
|
|
234
|
-
|
|
235
|
-
static normalizeZScore(value, mean, stdDev) {
|
|
236
|
-
if (!stdDev || stdDev === 0) return 0;
|
|
237
|
-
return (value - mean) / stdDev;
|
|
238
|
-
}
|
|
239
|
-
|
|
240
|
-
static divergence(valueA, valueB) {
|
|
241
|
-
return (valueA || 0) - (valueB || 0);
|
|
242
|
-
}
|
|
243
|
-
|
|
244
|
-
static getPreviousState(previousComputed, calcName, ticker, fieldName = null) {
|
|
245
|
-
if (!previousComputed || !previousComputed[calcName]) return null;
|
|
246
|
-
const tickerData = previousComputed[calcName][ticker];
|
|
247
|
-
if (!tickerData) return null;
|
|
248
|
-
if (fieldName) return tickerData[fieldName];
|
|
249
|
-
return tickerData;
|
|
250
|
-
}
|
|
251
|
-
}
|
|
252
|
-
|
|
253
|
-
class Aggregators {
|
|
254
|
-
static bucketUsersByPnlPerAsset(usersData, tickerMap) {
|
|
255
|
-
const buckets = new Map();
|
|
256
|
-
for (const [userId, portfolio] of Object.entries(usersData)) {
|
|
257
|
-
const userType = portfolio.PublicPositions ? SCHEMAS.USER_TYPES.SPECULATOR : SCHEMAS.USER_TYPES.NORMAL;
|
|
258
|
-
const positions = DataExtractor.getPositions(portfolio, userType);
|
|
259
|
-
for (const pos of positions) {
|
|
260
|
-
const id = DataExtractor.getInstrumentId(pos);
|
|
261
|
-
const pnl = DataExtractor.getNetProfit(pos);
|
|
262
|
-
if (!id || pnl === 0) continue;
|
|
263
|
-
const ticker = tickerMap[id];
|
|
264
|
-
if (!ticker) continue;
|
|
265
|
-
if (!buckets.has(ticker)) buckets.set(ticker, { winners: [], losers: [] });
|
|
266
|
-
const b = buckets.get(ticker);
|
|
267
|
-
if (pnl > 0) b.winners.push(userId);
|
|
268
|
-
else b.losers.push(userId);
|
|
269
|
-
}
|
|
270
|
-
}
|
|
271
|
-
return Object.fromEntries(buckets);
|
|
272
|
-
}
|
|
273
|
-
|
|
274
|
-
static getWeightedSentiment(positions) {
|
|
275
|
-
if (!positions || positions.length === 0) return 0;
|
|
276
|
-
let totalWeightedPnL = 0;
|
|
277
|
-
let totalWeight = 0;
|
|
278
|
-
for (const pos of positions) {
|
|
279
|
-
const pnl = DataExtractor.getNetProfit(pos);
|
|
280
|
-
const weight = DataExtractor.getPositionWeight(pos);
|
|
281
|
-
if (weight > 0) {
|
|
282
|
-
totalWeightedPnL += (pnl * weight);
|
|
283
|
-
totalWeight += weight;
|
|
284
|
-
}
|
|
285
|
-
}
|
|
286
|
-
if (totalWeight === 0) return 0;
|
|
287
|
-
return totalWeightedPnL / totalWeight;
|
|
288
|
-
}
|
|
289
|
-
}
|
|
290
|
-
|
|
291
|
-
class TimeSeries {
|
|
292
|
-
static updateEMAState(value, state, alpha = 0.1) {
|
|
293
|
-
const mean = state ? (state.mean || 0) : 0;
|
|
294
|
-
const variance = state ? (state.variance || 1) : 1;
|
|
295
|
-
if (value === undefined || value === null || isNaN(value)) {
|
|
296
|
-
return { mean, variance };
|
|
297
|
-
}
|
|
298
|
-
const diff = value - mean;
|
|
299
|
-
const newMean = mean + (alpha * diff);
|
|
300
|
-
const newVariance = (1 - alpha) * (variance + (alpha * diff * diff));
|
|
301
|
-
return { mean: newMean, variance: newVariance };
|
|
302
|
-
}
|
|
303
|
-
|
|
304
|
-
static pearsonCorrelation(x, y) {
|
|
305
|
-
if (!x || !y || x.length !== y.length || x.length === 0) return 0;
|
|
306
|
-
const n = x.length;
|
|
307
|
-
let sumX = 0, sumY = 0, sumXY = 0, sumX2 = 0, sumY2 = 0;
|
|
308
|
-
for (let i = 0; i < n; i++) {
|
|
309
|
-
sumX += x[i]; sumY += y[i];
|
|
310
|
-
sumXY += x[i] * y[i];
|
|
311
|
-
sumX2 += x[i] * x[i]; sumY2 += y[i] * y[i];
|
|
312
|
-
}
|
|
313
|
-
const numerator = (n * sumXY) - (sumX * sumY);
|
|
314
|
-
const denominator = Math.sqrt(((n * sumX2) - (sumX * sumX)) * ((n * sumY2) - (sumY * sumY)));
|
|
315
|
-
return (denominator === 0) ? 0 : numerator / denominator;
|
|
316
|
-
}
|
|
317
|
-
|
|
318
|
-
/**
|
|
319
|
-
* UPGRADE: Sliding Window Min/Max using Monotonic Queue - O(n)
|
|
320
|
-
* Calculates rolling min/max for a stream of numbers efficiently.
|
|
321
|
-
* @param {Array<number>} data - Input series
|
|
322
|
-
* @param {number} windowSize - The rolling window size
|
|
323
|
-
* @returns {Object} { min: Array, max: Array }
|
|
324
|
-
*/
|
|
325
|
-
static slidingWindowExtrema(data, windowSize) {
|
|
326
|
-
if (!data || data.length === 0) return { min: [], max: [] };
|
|
327
|
-
const resultMin = [];
|
|
328
|
-
const resultMax = [];
|
|
329
|
-
const dequeMin = []; // Indexes for Min (increasing values)
|
|
330
|
-
const dequeMax = []; // Indexes for Max (decreasing values)
|
|
331
|
-
|
|
332
|
-
for (let i = 0; i < data.length; i++) {
|
|
333
|
-
// 1. Remove out of range
|
|
334
|
-
if (dequeMin.length > 0 && dequeMin[0] <= i - windowSize) dequeMin.shift();
|
|
335
|
-
if (dequeMax.length > 0 && dequeMax[0] <= i - windowSize) dequeMax.shift();
|
|
336
|
-
|
|
337
|
-
// 2. Maintain Monotonic properties
|
|
338
|
-
// Min: Remove elements from tail that are >= current
|
|
339
|
-
while (dequeMin.length > 0 && data[dequeMin[dequeMin.length - 1]] >= data[i]) {
|
|
340
|
-
dequeMin.pop();
|
|
341
|
-
}
|
|
342
|
-
// Max: Remove elements from tail that are <= current
|
|
343
|
-
while (dequeMax.length > 0 && data[dequeMax[dequeMax.length - 1]] <= data[i]) {
|
|
344
|
-
dequeMax.pop();
|
|
345
|
-
}
|
|
346
|
-
|
|
347
|
-
// 3. Add current
|
|
348
|
-
dequeMin.push(i);
|
|
349
|
-
dequeMax.push(i);
|
|
350
|
-
|
|
351
|
-
// 4. Record result (once window is full)
|
|
352
|
-
if (i >= windowSize - 1) {
|
|
353
|
-
resultMin.push(data[dequeMin[0]]);
|
|
354
|
-
resultMax.push(data[dequeMax[0]]);
|
|
355
|
-
}
|
|
356
|
-
}
|
|
357
|
-
return { min: resultMin, max: resultMax };
|
|
358
|
-
}
|
|
359
|
-
}
|
|
360
|
-
|
|
361
|
-
class DistributionAnalytics {
|
|
362
|
-
static computeKDE(data, bandwidth, steps = 60) {
|
|
363
|
-
if (!data || data.length === 0) return [];
|
|
364
|
-
let min = Infinity, max = -Infinity;
|
|
365
|
-
for (const p of data) {
|
|
366
|
-
if (p.value < min) min = p.value;
|
|
367
|
-
if (p.value > max) max = p.value;
|
|
368
|
-
}
|
|
369
|
-
min -= bandwidth * 3;
|
|
370
|
-
max += bandwidth * 3;
|
|
371
|
-
const stepSize = (max - min) / steps;
|
|
372
|
-
const curve = [];
|
|
373
|
-
for (let i = 0; i <= steps; i++) {
|
|
374
|
-
const x = min + (i * stepSize);
|
|
375
|
-
let density = 0;
|
|
376
|
-
for (const p of data) {
|
|
377
|
-
const diff = (x - p.value);
|
|
378
|
-
if (Math.abs(diff) > bandwidth * 3) continue;
|
|
379
|
-
const u = diff / bandwidth;
|
|
380
|
-
const k = 0.39894228 * Math.exp(-0.5 * u * u);
|
|
381
|
-
density += (p.weight * k) / bandwidth;
|
|
382
|
-
}
|
|
383
|
-
if (density > 0) curve.push({ price: x, density });
|
|
384
|
-
}
|
|
385
|
-
return curve;
|
|
386
|
-
}
|
|
387
|
-
|
|
388
|
-
static integrateProfile(curve, startPrice, endPrice) {
|
|
389
|
-
if (!curve || !Array.isArray(curve)) return 0;
|
|
390
|
-
let sum = 0;
|
|
391
|
-
for (let i = 0; i < curve.length - 1; i++) {
|
|
392
|
-
const p1 = curve[i];
|
|
393
|
-
const p2 = curve[i+1];
|
|
394
|
-
if (p1.price >= startPrice && p2.price <= endPrice) {
|
|
395
|
-
sum += (p2.price - p1.price) * ((p1.density + p2.density) / 2);
|
|
396
|
-
}
|
|
397
|
-
}
|
|
398
|
-
return sum;
|
|
399
|
-
}
|
|
400
|
-
|
|
401
|
-
static linearRegression(xValues, yValues) {
|
|
402
|
-
const n = xValues.length;
|
|
403
|
-
if (n !== yValues.length || n < 2) return { slope: 0, r2: 0 };
|
|
404
|
-
let sumX = 0, sumY = 0, sumXY = 0, sumXX = 0, sumYY = 0;
|
|
405
|
-
for (let i = 0; i < n; i++) {
|
|
406
|
-
sumX += xValues[i]; sumY += yValues[i];
|
|
407
|
-
sumXY += xValues[i] * yValues[i];
|
|
408
|
-
sumXX += xValues[i] * xValues[i]; sumYY += yValues[i] * yValues[i];
|
|
409
|
-
}
|
|
410
|
-
const slope = (n * sumXY - sumX * sumY) / (n * sumXX - sumX * sumX);
|
|
411
|
-
return { slope, n };
|
|
412
|
-
}
|
|
413
|
-
}
|
|
414
|
-
|
|
415
|
-
/**
|
|
416
|
-
* file: computation-system/layers/mathematics.js
|
|
417
|
-
* [Previous content remains, adding LinearAlgebra class]
|
|
418
|
-
*/
|
|
419
|
-
|
|
420
|
-
class LinearAlgebra {
|
|
421
|
-
/**
|
|
422
|
-
* Calculates the Covariance Matrix and Mean Vector for a dataset
|
|
423
|
-
* @param {Array<Array<number>>} data - Rows are observations, Cols are features
|
|
424
|
-
* @returns {Object} { matrix: Array<Array<number>>, means: Array<number> }
|
|
425
|
-
*/
|
|
426
|
-
static covarianceMatrix(data) {
|
|
427
|
-
if (!data || data.length === 0) return { matrix: [], means: [] };
|
|
428
|
-
const n = data.length;
|
|
429
|
-
const numFeatures = data[0].length;
|
|
430
|
-
|
|
431
|
-
// 1. Calculate Means
|
|
432
|
-
const means = new Array(numFeatures).fill(0);
|
|
433
|
-
for (let i = 0; i < n; i++) {
|
|
434
|
-
for (let j = 0; j < numFeatures; j++) {
|
|
435
|
-
means[j] += data[i][j];
|
|
436
|
-
}
|
|
437
|
-
}
|
|
438
|
-
for (let j = 0; j < numFeatures; j++) means[j] /= n;
|
|
439
|
-
|
|
440
|
-
// 2. Calculate Covariance
|
|
441
|
-
// Cov(x,y) = Σ(x_i - x_mean)(y_i - y_mean) / (N-1)
|
|
442
|
-
const cov = Array(numFeatures).fill(0).map(() => Array(numFeatures).fill(0));
|
|
443
|
-
for (let i = 0; i < numFeatures; i++) {
|
|
444
|
-
for (let j = 0; j < numFeatures; j++) {
|
|
445
|
-
let sum = 0;
|
|
446
|
-
for (let k = 0; k < n; k++) {
|
|
447
|
-
sum += (data[k][i] - means[i]) * (data[k][j] - means[j]);
|
|
448
|
-
}
|
|
449
|
-
cov[i][j] = sum / (n > 1 ? n - 1 : 1);
|
|
450
|
-
}
|
|
451
|
-
}
|
|
452
|
-
return { matrix: cov, means };
|
|
453
|
-
}
|
|
454
|
-
|
|
455
|
-
/**
|
|
456
|
-
* Inverts a Matrix using Gaussian Elimination
|
|
457
|
-
* Required to transform the distance into standard deviations
|
|
458
|
-
* @param {Array<Array<number>>} M - Square Matrix
|
|
459
|
-
*/
|
|
460
|
-
static invertMatrix(M) {
|
|
461
|
-
if (!M || M.length === 0) return null;
|
|
462
|
-
const n = M.length;
|
|
463
|
-
|
|
464
|
-
// Deep copy to create the augmented matrix [M | I]
|
|
465
|
-
const A = M.map(row => [...row]);
|
|
466
|
-
const I = Array(n).fill(0).map((_, i) => Array(n).fill(0).map((_, j) => (i === j ? 1 : 0)));
|
|
467
|
-
|
|
468
|
-
for (let i = 0; i < n; i++) {
|
|
469
|
-
// Find pivot
|
|
470
|
-
let pivot = A[i][i];
|
|
471
|
-
if (Math.abs(pivot) < 1e-10) return null; // Singular matrix (features are perfectly correlated)
|
|
472
|
-
|
|
473
|
-
// Normalize row i
|
|
474
|
-
for (let j = 0; j < n; j++) {
|
|
475
|
-
A[i][j] /= pivot;
|
|
476
|
-
I[i][j] /= pivot;
|
|
477
|
-
}
|
|
478
|
-
|
|
479
|
-
// Eliminate other rows
|
|
480
|
-
for (let k = 0; k < n; k++) {
|
|
481
|
-
if (k !== i) {
|
|
482
|
-
const factor = A[k][i];
|
|
483
|
-
for (let j = 0; j < n; j++) {
|
|
484
|
-
A[k][j] -= factor * A[i][j];
|
|
485
|
-
I[k][j] -= factor * I[i][j];
|
|
486
|
-
}
|
|
487
|
-
}
|
|
488
|
-
}
|
|
489
|
-
}
|
|
490
|
-
return I;
|
|
491
|
-
}
|
|
492
|
-
|
|
493
|
-
/**
|
|
494
|
-
* Calculates Mahalanobis Distance
|
|
495
|
-
* D = sqrt( (x - μ)^T * Σ^-1 * (x - μ) )
|
|
496
|
-
* @param {Array<number>} vector - The current day's feature vector
|
|
497
|
-
* @param {Array<number>} means - The baseline mean vector
|
|
498
|
-
* @param {Array<Array<number>>} inverseCovariance - The inverted covariance matrix
|
|
499
|
-
*/
|
|
500
|
-
static mahalanobisDistance(vector, means, inverseCovariance) {
|
|
501
|
-
if (!inverseCovariance || vector.length !== means.length) return 0;
|
|
502
|
-
const n = vector.length;
|
|
503
|
-
|
|
504
|
-
// Difference Vector (x - μ)
|
|
505
|
-
const diff = vector.map((val, i) => val - means[i]);
|
|
506
|
-
|
|
507
|
-
let distanceSq = 0;
|
|
508
|
-
for (let i = 0; i < n; i++) {
|
|
509
|
-
let rowSum = 0;
|
|
510
|
-
for (let j = 0; j < n; j++) {
|
|
511
|
-
rowSum += diff[j] * inverseCovariance[j][i];
|
|
512
|
-
}
|
|
513
|
-
distanceSq += rowSum * diff[i];
|
|
514
|
-
}
|
|
515
|
-
|
|
516
|
-
return Math.sqrt(Math.max(0, distanceSq));
|
|
517
|
-
}
|
|
518
|
-
}
|
|
519
|
-
|
|
520
|
-
// ... existing exports ...
|
|
521
|
-
module.exports = { MathPrimitives, SignalPrimitives, Aggregators, TimeSeries, DistributionAnalytics, FinancialEngineering, TimeSeriesAnalysis, LinearAlgebra };
|
|
522
|
-
|