datly 0.0.1 → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/datly.cjs +1 -0
- package/dist/datly.mjs +1 -0
- package/dist/datly.umd.js +1 -1
- package/dist/datly.umd.js.map +1 -0
- package/package.json +24 -11
- package/src/core/dataLoader.js +407 -0
- package/src/core/utils.js +306 -0
- package/src/core/validator.js +205 -0
- package/src/dataviz/index.js +1566 -0
- package/src/descriptive/centralTendency.js +208 -0
- package/src/descriptive/dispersion.js +273 -0
- package/src/descriptive/position.js +268 -0
- package/src/descriptive/shape.js +336 -0
- package/src/index.js +480 -0
- package/src/inferential/confidenceIntervals.js +561 -0
- package/src/inferential/hypothesisTesting.js +527 -0
- package/src/inferential/normalityTests.js +587 -0
- package/src/insights/autoAnalyser.js +685 -0
- package/src/insights/interpreter.js +543 -0
- package/src/insights/patternDetector.js +897 -0
- package/src/insights/reportGenerator.js +1072 -0
- package/src/ml/ClassificationMetrics.js +336 -0
- package/src/ml/DecisionTree.js +412 -0
- package/src/ml/KNearestNeighbors.js +317 -0
- package/src/ml/LinearRegression.js +179 -0
- package/src/ml/LogisticRegression.js +396 -0
- package/src/ml/MachineLearning.js +490 -0
- package/src/ml/NaiveBayes.js +296 -0
- package/src/ml/RandomForest.js +323 -0
- package/src/ml/SupportVectorMachine.js +299 -0
- package/src/ml/baseModel.js +106 -0
- package/src/multivariate/correlation.js +653 -0
- package/src/multivariate/regression.js +660 -0
@@ -0,0 +1,527 @@
|
|
1
|
+
class HypothesisTesting {
|
2
|
+
tTest(sample1, sample2, type = 'two-sample', alpha = 0.05) {
|
3
|
+
const validSample1 = sample1.filter(val =>
|
4
|
+
typeof val === 'number' && !isNaN(val) && isFinite(val)
|
5
|
+
);
|
6
|
+
|
7
|
+
if (validSample1.length < 2) {
|
8
|
+
throw new Error('Sample 1 must have at least 2 valid values');
|
9
|
+
}
|
10
|
+
|
11
|
+
switch (type) {
|
12
|
+
case 'one-sample':
|
13
|
+
return this.oneSampleTTest(validSample1, sample2, alpha);
|
14
|
+
case 'two-sample':
|
15
|
+
const validSample2 = sample2.filter(val =>
|
16
|
+
typeof val === 'number' && !isNaN(val) && isFinite(val)
|
17
|
+
);
|
18
|
+
if (validSample2.length < 2) {
|
19
|
+
throw new Error('Sample 2 must have at least 2 valid values');
|
20
|
+
}
|
21
|
+
return this.twoSampleTTest(validSample1, validSample2, alpha);
|
22
|
+
case 'paired':
|
23
|
+
if (sample1.length !== sample2.length) {
|
24
|
+
throw new Error('Paired samples must have the same length');
|
25
|
+
}
|
26
|
+
return this.pairedTTest(validSample1, sample2, alpha);
|
27
|
+
default:
|
28
|
+
throw new Error('Unknown t-test type. Use: one-sample, two-sample, or paired');
|
29
|
+
}
|
30
|
+
}
|
31
|
+
|
32
|
+
oneSampleTTest(sample, mu0, alpha = 0.05) {
|
33
|
+
const n = sample.length;
|
34
|
+
const mean = sample.reduce((sum, val) => sum + val, 0) / n;
|
35
|
+
const variance = sample.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / (n - 1);
|
36
|
+
const stdError = Math.sqrt(variance / n);
|
37
|
+
|
38
|
+
if (stdError === 0) {
|
39
|
+
throw new Error('Cannot perform t-test when standard error is zero');
|
40
|
+
}
|
41
|
+
|
42
|
+
const tStat = (mean - mu0) / stdError;
|
43
|
+
const df = n - 1;
|
44
|
+
const pValue = 2 * (1 - this.tCDF(Math.abs(tStat), df));
|
45
|
+
|
46
|
+
return {
|
47
|
+
type: 'one-sample',
|
48
|
+
statistic: tStat,
|
49
|
+
pValue: pValue,
|
50
|
+
degreesOfFreedom: df,
|
51
|
+
sampleMean: mean,
|
52
|
+
hypothesizedMean: mu0,
|
53
|
+
standardError: stdError,
|
54
|
+
criticalValue: this.tInverse(1 - alpha / 2, df),
|
55
|
+
significant: pValue < alpha,
|
56
|
+
alpha: alpha
|
57
|
+
};
|
58
|
+
}
|
59
|
+
|
60
|
+
twoSampleTTest(sample1, sample2, alpha = 0.05, equalVariances = false) {
|
61
|
+
const n1 = sample1.length;
|
62
|
+
const n2 = sample2.length;
|
63
|
+
|
64
|
+
const mean1 = sample1.reduce((sum, val) => sum + val, 0) / n1;
|
65
|
+
const mean2 = sample2.reduce((sum, val) => sum + val, 0) / n2;
|
66
|
+
|
67
|
+
const var1 = sample1.reduce((sum, val) => sum + Math.pow(val - mean1, 2), 0) / (n1 - 1);
|
68
|
+
const var2 = sample2.reduce((sum, val) => sum + Math.pow(val - mean2, 2), 0) / (n2 - 1);
|
69
|
+
|
70
|
+
let tStat, df, stdError;
|
71
|
+
|
72
|
+
if (equalVariances) {
|
73
|
+
const pooledVar = ((n1 - 1) * var1 + (n2 - 1) * var2) / (n1 + n2 - 2);
|
74
|
+
stdError = Math.sqrt(pooledVar * (1/n1 + 1/n2));
|
75
|
+
df = n1 + n2 - 2;
|
76
|
+
} else {
|
77
|
+
stdError = Math.sqrt(var1/n1 + var2/n2);
|
78
|
+
df = Math.pow(var1/n1 + var2/n2, 2) /
|
79
|
+
(Math.pow(var1/n1, 2)/(n1-1) + Math.pow(var2/n2, 2)/(n2-1));
|
80
|
+
}
|
81
|
+
|
82
|
+
if (stdError === 0) {
|
83
|
+
throw new Error('Cannot perform t-test when standard error is zero');
|
84
|
+
}
|
85
|
+
|
86
|
+
tStat = (mean1 - mean2) / stdError;
|
87
|
+
const pValue = 2 * (1 - this.tCDF(Math.abs(tStat), df));
|
88
|
+
|
89
|
+
return {
|
90
|
+
type: 'two-sample',
|
91
|
+
statistic: tStat,
|
92
|
+
pValue: pValue,
|
93
|
+
degreesOfFreedom: df,
|
94
|
+
sample1Mean: mean1,
|
95
|
+
sample2Mean: mean2,
|
96
|
+
meanDifference: mean1 - mean2,
|
97
|
+
standardError: stdError,
|
98
|
+
equalVariances: equalVariances,
|
99
|
+
criticalValue: this.tInverse(1 - alpha / 2, df),
|
100
|
+
significant: pValue < alpha,
|
101
|
+
alpha: alpha
|
102
|
+
};
|
103
|
+
}
|
104
|
+
|
105
|
+
pairedTTest(sample1, sample2, alpha = 0.05) {
|
106
|
+
if (sample1.length !== sample2.length) {
|
107
|
+
throw new Error('Paired samples must have equal length');
|
108
|
+
}
|
109
|
+
|
110
|
+
const differences = [];
|
111
|
+
for (let i = 0; i < sample1.length; i++) {
|
112
|
+
if (typeof sample1[i] === 'number' && typeof sample2[i] === 'number' &&
|
113
|
+
!isNaN(sample1[i]) && !isNaN(sample2[i]) &&
|
114
|
+
isFinite(sample1[i]) && isFinite(sample2[i])) {
|
115
|
+
differences.push(sample1[i] - sample2[i]);
|
116
|
+
}
|
117
|
+
}
|
118
|
+
|
119
|
+
if (differences.length < 2) {
|
120
|
+
throw new Error('Need at least 2 valid paired observations');
|
121
|
+
}
|
122
|
+
|
123
|
+
return this.oneSampleTTest(differences, 0, alpha);
|
124
|
+
}
|
125
|
+
|
126
|
+
zTest(sample, populationMean, populationStd, alpha = 0.05) {
|
127
|
+
const validSample = sample.filter(val =>
|
128
|
+
typeof val === 'number' && !isNaN(val) && isFinite(val)
|
129
|
+
);
|
130
|
+
|
131
|
+
if (validSample.length === 0) {
|
132
|
+
throw new Error('No valid numeric values found');
|
133
|
+
}
|
134
|
+
|
135
|
+
if (populationStd <= 0) {
|
136
|
+
throw new Error('Population standard deviation must be positive');
|
137
|
+
}
|
138
|
+
|
139
|
+
const n = validSample.length;
|
140
|
+
const sampleMean = validSample.reduce((sum, val) => sum + val, 0) / n;
|
141
|
+
const standardError = populationStd / Math.sqrt(n);
|
142
|
+
const zStat = (sampleMean - populationMean) / standardError;
|
143
|
+
const pValue = 2 * (1 - this.normalCDF(Math.abs(zStat)));
|
144
|
+
|
145
|
+
return {
|
146
|
+
type: 'z-test',
|
147
|
+
statistic: zStat,
|
148
|
+
pValue: pValue,
|
149
|
+
sampleMean: sampleMean,
|
150
|
+
populationMean: populationMean,
|
151
|
+
populationStd: populationStd,
|
152
|
+
sampleSize: n,
|
153
|
+
standardError: standardError,
|
154
|
+
criticalValue: this.normalInverse(1 - alpha / 2),
|
155
|
+
significant: pValue < alpha,
|
156
|
+
alpha: alpha
|
157
|
+
};
|
158
|
+
}
|
159
|
+
|
160
|
+
anovaTest(groups, alpha = 0.05) {
|
161
|
+
if (!Array.isArray(groups) || groups.length < 2) {
|
162
|
+
throw new Error('ANOVA requires at least 2 groups');
|
163
|
+
}
|
164
|
+
|
165
|
+
const validGroups = groups.map(group =>
|
166
|
+
group.filter(val => typeof val === 'number' && !isNaN(val) && isFinite(val))
|
167
|
+
);
|
168
|
+
|
169
|
+
validGroups.forEach((group, index) => {
|
170
|
+
if (group.length < 2) {
|
171
|
+
throw new Error(`Group ${index + 1} must have at least 2 valid values`);
|
172
|
+
}
|
173
|
+
});
|
174
|
+
|
175
|
+
const k = validGroups.length;
|
176
|
+
const groupMeans = validGroups.map(group =>
|
177
|
+
group.reduce((sum, val) => sum + val, 0) / group.length
|
178
|
+
);
|
179
|
+
const groupSizes = validGroups.map(group => group.length);
|
180
|
+
const totalN = groupSizes.reduce((sum, n) => sum + n, 0);
|
181
|
+
|
182
|
+
const allValues = validGroups.flat();
|
183
|
+
const grandMean = allValues.reduce((sum, val) => sum + val, 0) / totalN;
|
184
|
+
|
185
|
+
const ssb = validGroups.reduce((sum, group, index) => {
|
186
|
+
return sum + groupSizes[index] * Math.pow(groupMeans[index] - grandMean, 2);
|
187
|
+
}, 0);
|
188
|
+
|
189
|
+
const ssw = validGroups.reduce((sum, group, index) => {
|
190
|
+
const groupMean = groupMeans[index];
|
191
|
+
return sum + group.reduce((groupSum, val) =>
|
192
|
+
groupSum + Math.pow(val - groupMean, 2), 0
|
193
|
+
);
|
194
|
+
}, 0);
|
195
|
+
|
196
|
+
const dfBetween = k - 1;
|
197
|
+
const dfWithin = totalN - k;
|
198
|
+
const msb = ssb / dfBetween;
|
199
|
+
const msw = ssw / dfWithin;
|
200
|
+
|
201
|
+
if (msw === 0) {
|
202
|
+
throw new Error('Cannot perform ANOVA when within-group variance is zero');
|
203
|
+
}
|
204
|
+
|
205
|
+
const fStat = msb / msw;
|
206
|
+
const pValue = 1 - this.fCDF(fStat, dfBetween, dfWithin);
|
207
|
+
|
208
|
+
return {
|
209
|
+
type: 'one-way-anova',
|
210
|
+
statistic: fStat,
|
211
|
+
pValue: pValue,
|
212
|
+
dfBetween: dfBetween,
|
213
|
+
dfWithin: dfWithin,
|
214
|
+
sumOfSquaresBetween: ssb,
|
215
|
+
sumOfSquaresWithin: ssw,
|
216
|
+
meanSquareBetween: msb,
|
217
|
+
meanSquareWithin: msw,
|
218
|
+
groupMeans: groupMeans,
|
219
|
+
grandMean: grandMean,
|
220
|
+
significant: pValue < alpha,
|
221
|
+
alpha: alpha
|
222
|
+
};
|
223
|
+
}
|
224
|
+
|
225
|
+
chiSquareTest(col1, col2, alpha = 0.05) {
|
226
|
+
if (col1.length !== col2.length) {
|
227
|
+
throw new Error('Columns must have the same length');
|
228
|
+
}
|
229
|
+
|
230
|
+
const contingencyResult = this.createContingencyTable(col1, col2);
|
231
|
+
const { table, totals, rows, columns } = contingencyResult;
|
232
|
+
|
233
|
+
let chiSquareStat = 0;
|
234
|
+
const expected = {};
|
235
|
+
|
236
|
+
rows.forEach(row => {
|
237
|
+
expected[row] = {};
|
238
|
+
columns.forEach(col => {
|
239
|
+
const expectedFreq = (totals.row[row] * totals.col[col]) / totals.grand;
|
240
|
+
expected[row][col] = expectedFreq;
|
241
|
+
|
242
|
+
if (expectedFreq < 5) {
|
243
|
+
console.warn(`Warning: Expected frequency (${expectedFreq.toFixed(2)}) is less than 5`);
|
244
|
+
}
|
245
|
+
|
246
|
+
const observed = table[row][col];
|
247
|
+
chiSquareStat += Math.pow(observed - expectedFreq, 2) / expectedFreq;
|
248
|
+
});
|
249
|
+
});
|
250
|
+
|
251
|
+
const df = (rows.length - 1) * (columns.length - 1);
|
252
|
+
const pValue = 1 - this.chiSquareCDF(chiSquareStat, df);
|
253
|
+
|
254
|
+
return {
|
255
|
+
type: 'chi-square-independence',
|
256
|
+
statistic: chiSquareStat,
|
257
|
+
pValue: pValue,
|
258
|
+
degreesOfFreedom: df,
|
259
|
+
observed: table,
|
260
|
+
expected: expected,
|
261
|
+
significant: pValue < alpha,
|
262
|
+
alpha: alpha,
|
263
|
+
cramersV: this.cramersV(chiSquareStat, totals.grand, Math.min(rows.length, columns.length))
|
264
|
+
};
|
265
|
+
}
|
266
|
+
|
267
|
+
createContingencyTable(col1, col2) {
|
268
|
+
const uniqueCol1 = [...new Set(col1)];
|
269
|
+
const uniqueCol2 = [...new Set(col2)];
|
270
|
+
|
271
|
+
const table = {};
|
272
|
+
const totals = { row: {}, col: {}, grand: 0 };
|
273
|
+
|
274
|
+
uniqueCol1.forEach(val1 => {
|
275
|
+
table[val1] = {};
|
276
|
+
totals.row[val1] = 0;
|
277
|
+
});
|
278
|
+
|
279
|
+
uniqueCol2.forEach(val2 => {
|
280
|
+
totals.col[val2] = 0;
|
281
|
+
});
|
282
|
+
|
283
|
+
for (let i = 0; i < col1.length; i++) {
|
284
|
+
const val1 = col1[i];
|
285
|
+
const val2 = col2[i];
|
286
|
+
|
287
|
+
if (!table[val1][val2]) {
|
288
|
+
table[val1][val2] = 0;
|
289
|
+
}
|
290
|
+
|
291
|
+
table[val1][val2]++;
|
292
|
+
totals.row[val1]++;
|
293
|
+
totals.col[val2]++;
|
294
|
+
totals.grand++;
|
295
|
+
}
|
296
|
+
|
297
|
+
uniqueCol1.forEach(val1 => {
|
298
|
+
uniqueCol2.forEach(val2 => {
|
299
|
+
if (!table[val1][val2]) {
|
300
|
+
table[val1][val2] = 0;
|
301
|
+
}
|
302
|
+
});
|
303
|
+
});
|
304
|
+
|
305
|
+
return { table, totals, rows: uniqueCol1, columns: uniqueCol2 };
|
306
|
+
}
|
307
|
+
|
308
|
+
cramersV(chiSquare, n, minDimension) {
|
309
|
+
return Math.sqrt(chiSquare / (n * (minDimension - 1)));
|
310
|
+
}
|
311
|
+
|
312
|
+
mannWhitneyTest(sample1, sample2, alpha = 0.05) {
|
313
|
+
const validSample1 = sample1.filter(val =>
|
314
|
+
typeof val === 'number' && !isNaN(val) && isFinite(val)
|
315
|
+
);
|
316
|
+
const validSample2 = sample2.filter(val =>
|
317
|
+
typeof val === 'number' && !isNaN(val) && isFinite(val)
|
318
|
+
);
|
319
|
+
|
320
|
+
if (validSample1.length === 0 || validSample2.length === 0) {
|
321
|
+
throw new Error('Both samples must contain valid numeric values');
|
322
|
+
}
|
323
|
+
|
324
|
+
const n1 = validSample1.length;
|
325
|
+
const n2 = validSample2.length;
|
326
|
+
|
327
|
+
const combined = [
|
328
|
+
...validSample1.map(val => ({ value: val, group: 1 })),
|
329
|
+
...validSample2.map(val => ({ value: val, group: 2 }))
|
330
|
+
];
|
331
|
+
|
332
|
+
combined.sort((a, b) => a.value - b.value);
|
333
|
+
|
334
|
+
let currentRank = 1;
|
335
|
+
for (let i = 0; i < combined.length; i++) {
|
336
|
+
const tiedValues = [combined[i]];
|
337
|
+
while (i + 1 < combined.length && combined[i + 1].value === combined[i].value) {
|
338
|
+
i++;
|
339
|
+
tiedValues.push(combined[i]);
|
340
|
+
}
|
341
|
+
|
342
|
+
const avgRank = (currentRank + currentRank + tiedValues.length - 1) / 2;
|
343
|
+
tiedValues.forEach(item => item.rank = avgRank);
|
344
|
+
currentRank += tiedValues.length;
|
345
|
+
}
|
346
|
+
|
347
|
+
const r1 = combined.filter(item => item.group === 1)
|
348
|
+
.reduce((sum, item) => sum + item.rank, 0);
|
349
|
+
|
350
|
+
const u1 = r1 - (n1 * (n1 + 1)) / 2;
|
351
|
+
const u2 = n1 * n2 - u1;
|
352
|
+
const uStat = Math.min(u1, u2);
|
353
|
+
|
354
|
+
const meanU = (n1 * n2) / 2;
|
355
|
+
const stdU = Math.sqrt((n1 * n2 * (n1 + n2 + 1)) / 12);
|
356
|
+
const zStat = (uStat - meanU) / stdU;
|
357
|
+
const pValue = 2 * (1 - this.normalCDF(Math.abs(zStat)));
|
358
|
+
|
359
|
+
return {
|
360
|
+
type: 'mann-whitney-u',
|
361
|
+
statistic: uStat,
|
362
|
+
u1: u1,
|
363
|
+
u2: u2,
|
364
|
+
zStatistic: zStat,
|
365
|
+
pValue: pValue,
|
366
|
+
sample1Size: n1,
|
367
|
+
sample2Size: n2,
|
368
|
+
significant: pValue < alpha,
|
369
|
+
alpha: alpha
|
370
|
+
};
|
371
|
+
}
|
372
|
+
|
373
|
+
tCDF(t, df) {
|
374
|
+
const x = df / (t * t + df);
|
375
|
+
return 1 - 0.5 * this.incompleteBeta(df / 2, 0.5, x);
|
376
|
+
}
|
377
|
+
|
378
|
+
tInverse(p, df) {
|
379
|
+
if (p <= 0 || p >= 1) return NaN;
|
380
|
+
|
381
|
+
let t = this.normalInverse(p);
|
382
|
+
const c1 = t / 4;
|
383
|
+
const c2 = (5 * t * t * t + 16 * t) / 96;
|
384
|
+
const c3 = (3 * t * t * t * t * t + 19 * t * t * t + 17 * t) / 384;
|
385
|
+
|
386
|
+
return t + c1 / df + c2 / (df * df) + c3 / (df * df * df);
|
387
|
+
}
|
388
|
+
|
389
|
+
normalCDF(z) {
|
390
|
+
return 0.5 * (1 + this.erf(z / Math.sqrt(2)));
|
391
|
+
}
|
392
|
+
|
393
|
+
normalInverse(p) {
|
394
|
+
if (p <= 0 || p >= 1) throw new Error('p must be between 0 and 1');
|
395
|
+
|
396
|
+
const a = [-3.969683028665376e+01, 2.209460984245205e+02, -2.759285104469687e+02,
|
397
|
+
1.383577518672690e+02, -3.066479806614716e+01, 2.506628277459239e+00];
|
398
|
+
const b = [-5.447609879822406e+01, 1.615858368580409e+02, -1.556989798598866e+02,
|
399
|
+
6.680131188771972e+01, -1.328068155288572e+01, 1];
|
400
|
+
|
401
|
+
if (p > 0.5) return -this.normalInverse(1 - p);
|
402
|
+
|
403
|
+
const q = Math.sqrt(-2 * Math.log(p));
|
404
|
+
let num = a[5];
|
405
|
+
let den = b[5];
|
406
|
+
|
407
|
+
for (let i = 4; i >= 0; i--) {
|
408
|
+
num = num * q + a[i];
|
409
|
+
den = den * q + b[i];
|
410
|
+
}
|
411
|
+
|
412
|
+
return num / den;
|
413
|
+
}
|
414
|
+
|
415
|
+
fCDF(f, df1, df2) {
|
416
|
+
const x = df2 / (df2 + df1 * f);
|
417
|
+
return 1 - this.incompleteBeta(df2 / 2, df1 / 2, x);
|
418
|
+
}
|
419
|
+
|
420
|
+
chiSquareCDF(x, df) {
|
421
|
+
if (x <= 0) return 0;
|
422
|
+
return this.incompleteGamma(df / 2, x / 2) / this.gamma(df / 2);
|
423
|
+
}
|
424
|
+
|
425
|
+
erf(x) {
|
426
|
+
const a1 = 0.254829592;
|
427
|
+
const a2 = -0.284496736;
|
428
|
+
const a3 = 1.421413741;
|
429
|
+
const a4 = -1.453152027;
|
430
|
+
const a5 = 1.061405429;
|
431
|
+
const p = 0.3275911;
|
432
|
+
|
433
|
+
const sign = x < 0 ? -1 : 1;
|
434
|
+
x = Math.abs(x);
|
435
|
+
|
436
|
+
const t = 1 / (1 + p * x);
|
437
|
+
const y = 1 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t * Math.exp(-x * x);
|
438
|
+
|
439
|
+
return sign * y;
|
440
|
+
}
|
441
|
+
|
442
|
+
incompleteBeta(a, b, x) {
|
443
|
+
if (x <= 0) return 0;
|
444
|
+
if (x >= 1) return 1;
|
445
|
+
|
446
|
+
const bt = Math.exp(this.logGamma(a + b) - this.logGamma(a) - this.logGamma(b) +
|
447
|
+
a * Math.log(x) + b * Math.log(1 - x));
|
448
|
+
|
449
|
+
if (x < (a + 1) / (a + b + 2)) {
|
450
|
+
return bt * this.continuedFractionBeta(a, b, x) / a;
|
451
|
+
} else {
|
452
|
+
return 1 - bt * this.continuedFractionBeta(b, a, 1 - x) / b;
|
453
|
+
}
|
454
|
+
}
|
455
|
+
|
456
|
+
continuedFractionBeta(a, b, x) {
|
457
|
+
const qab = a + b;
|
458
|
+
const qap = a + 1;
|
459
|
+
const qam = a - 1;
|
460
|
+
let c = 1;
|
461
|
+
let d = 1 - qab * x / qap;
|
462
|
+
|
463
|
+
if (Math.abs(d) < 1e-30) d = 1e-30;
|
464
|
+
d = 1 / d;
|
465
|
+
let h = d;
|
466
|
+
|
467
|
+
for (let m = 1; m <= 100; m++) {
|
468
|
+
const m2 = 2 * m;
|
469
|
+
let aa = m * (b - m) * x / ((qam + m2) * (a + m2));
|
470
|
+
d = 1 + aa * d;
|
471
|
+
if (Math.abs(d) < 1e-30) d = 1e-30;
|
472
|
+
c = 1 + aa / c;
|
473
|
+
if (Math.abs(c) < 1e-30) c = 1e-30;
|
474
|
+
d = 1 / d;
|
475
|
+
h *= d * c;
|
476
|
+
|
477
|
+
aa = -(a + m) * (qab + m) * x / ((a + m2) * (qap + m2));
|
478
|
+
d = 1 + aa * d;
|
479
|
+
if (Math.abs(d) < 1e-30) d = 1e-30;
|
480
|
+
c = 1 + aa / c;
|
481
|
+
if (Math.abs(c) < 1e-30) c = 1e-30;
|
482
|
+
d = 1 / d;
|
483
|
+
const del = d * c;
|
484
|
+
h *= del;
|
485
|
+
|
486
|
+
if (Math.abs(del - 1) < 1e-12) break;
|
487
|
+
}
|
488
|
+
|
489
|
+
return h;
|
490
|
+
}
|
491
|
+
|
492
|
+
logGamma(x) {
|
493
|
+
const cof = [76.18009172947146, -86.50532032941677, 24.01409824083091,
|
494
|
+
-1.231739572450155, 0.001208650973866179, -0.000005395239384953];
|
495
|
+
let ser = 1.000000000190015;
|
496
|
+
|
497
|
+
const xx = x;
|
498
|
+
let y = x;
|
499
|
+
let tmp = x + 5.5;
|
500
|
+
tmp -= (x + 0.5) * Math.log(tmp);
|
501
|
+
|
502
|
+
for (let j = 0; j < 6; j++) ser += cof[j] / ++y;
|
503
|
+
|
504
|
+
return -tmp + Math.log(2.5066282746310005 * ser / xx);
|
505
|
+
}
|
506
|
+
|
507
|
+
incompleteGamma(a, x) {
|
508
|
+
if (x <= 0) return 0;
|
509
|
+
|
510
|
+
let sum = 1;
|
511
|
+
let term = 1;
|
512
|
+
|
513
|
+
for (let n = 1; n < 100; n++) {
|
514
|
+
term *= x / (a + n - 1);
|
515
|
+
sum += term;
|
516
|
+
if (Math.abs(term) < 1e-12) break;
|
517
|
+
}
|
518
|
+
|
519
|
+
return Math.pow(x, a) * Math.exp(-x) * sum;
|
520
|
+
}
|
521
|
+
|
522
|
+
gamma(x) {
|
523
|
+
return Math.exp(this.logGamma(x));
|
524
|
+
}
|
525
|
+
}
|
526
|
+
|
527
|
+
export default HypothesisTesting;
|