deepbox 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (173) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +344 -0
  3. package/dist/CSRMatrix-CwGwQRea.d.cts +219 -0
  4. package/dist/CSRMatrix-KzNt6QpS.d.ts +219 -0
  5. package/dist/Tensor-BQLk1ltW.d.cts +147 -0
  6. package/dist/Tensor-g8mUClel.d.ts +147 -0
  7. package/dist/chunk-4S73VUBD.js +677 -0
  8. package/dist/chunk-4S73VUBD.js.map +1 -0
  9. package/dist/chunk-5R4S63PF.js +2925 -0
  10. package/dist/chunk-5R4S63PF.js.map +1 -0
  11. package/dist/chunk-6AE5FKKQ.cjs +9264 -0
  12. package/dist/chunk-6AE5FKKQ.cjs.map +1 -0
  13. package/dist/chunk-AD436M45.js +3854 -0
  14. package/dist/chunk-AD436M45.js.map +1 -0
  15. package/dist/chunk-ALS7ETWZ.cjs +4263 -0
  16. package/dist/chunk-ALS7ETWZ.cjs.map +1 -0
  17. package/dist/chunk-AU7XHGKJ.js +2092 -0
  18. package/dist/chunk-AU7XHGKJ.js.map +1 -0
  19. package/dist/chunk-B5TNKUEY.js +1481 -0
  20. package/dist/chunk-B5TNKUEY.js.map +1 -0
  21. package/dist/chunk-BCR7G3A6.js +9136 -0
  22. package/dist/chunk-BCR7G3A6.js.map +1 -0
  23. package/dist/chunk-C4PKXY74.cjs +1917 -0
  24. package/dist/chunk-C4PKXY74.cjs.map +1 -0
  25. package/dist/chunk-DWZY6PIP.cjs +6400 -0
  26. package/dist/chunk-DWZY6PIP.cjs.map +1 -0
  27. package/dist/chunk-E3EU5FZO.cjs +2113 -0
  28. package/dist/chunk-E3EU5FZO.cjs.map +1 -0
  29. package/dist/chunk-F3JWBINJ.js +1054 -0
  30. package/dist/chunk-F3JWBINJ.js.map +1 -0
  31. package/dist/chunk-FJYLIGJX.js +1940 -0
  32. package/dist/chunk-FJYLIGJX.js.map +1 -0
  33. package/dist/chunk-JSCDE774.cjs +729 -0
  34. package/dist/chunk-JSCDE774.cjs.map +1 -0
  35. package/dist/chunk-LWECRCW2.cjs +2412 -0
  36. package/dist/chunk-LWECRCW2.cjs.map +1 -0
  37. package/dist/chunk-MLBMYKCG.js +6379 -0
  38. package/dist/chunk-MLBMYKCG.js.map +1 -0
  39. package/dist/chunk-OX6QXFMV.cjs +3874 -0
  40. package/dist/chunk-OX6QXFMV.cjs.map +1 -0
  41. package/dist/chunk-PHV2DKRS.cjs +1072 -0
  42. package/dist/chunk-PHV2DKRS.cjs.map +1 -0
  43. package/dist/chunk-PL7TAYKI.js +4056 -0
  44. package/dist/chunk-PL7TAYKI.js.map +1 -0
  45. package/dist/chunk-PR647I7R.js +1898 -0
  46. package/dist/chunk-PR647I7R.js.map +1 -0
  47. package/dist/chunk-QERHVCHC.cjs +2960 -0
  48. package/dist/chunk-QERHVCHC.cjs.map +1 -0
  49. package/dist/chunk-XEG44RF6.cjs +1514 -0
  50. package/dist/chunk-XEG44RF6.cjs.map +1 -0
  51. package/dist/chunk-XMWVME2W.js +2377 -0
  52. package/dist/chunk-XMWVME2W.js.map +1 -0
  53. package/dist/chunk-ZB75FESB.cjs +1979 -0
  54. package/dist/chunk-ZB75FESB.cjs.map +1 -0
  55. package/dist/chunk-ZLW62TJG.cjs +4061 -0
  56. package/dist/chunk-ZLW62TJG.cjs.map +1 -0
  57. package/dist/chunk-ZXKBDFP3.js +4235 -0
  58. package/dist/chunk-ZXKBDFP3.js.map +1 -0
  59. package/dist/core/index.cjs +204 -0
  60. package/dist/core/index.cjs.map +1 -0
  61. package/dist/core/index.d.cts +2 -0
  62. package/dist/core/index.d.ts +2 -0
  63. package/dist/core/index.js +3 -0
  64. package/dist/core/index.js.map +1 -0
  65. package/dist/dataframe/index.cjs +22 -0
  66. package/dist/dataframe/index.cjs.map +1 -0
  67. package/dist/dataframe/index.d.cts +3 -0
  68. package/dist/dataframe/index.d.ts +3 -0
  69. package/dist/dataframe/index.js +5 -0
  70. package/dist/dataframe/index.js.map +1 -0
  71. package/dist/datasets/index.cjs +134 -0
  72. package/dist/datasets/index.cjs.map +1 -0
  73. package/dist/datasets/index.d.cts +3 -0
  74. package/dist/datasets/index.d.ts +3 -0
  75. package/dist/datasets/index.js +5 -0
  76. package/dist/datasets/index.js.map +1 -0
  77. package/dist/index-74AB8Cyh.d.cts +1126 -0
  78. package/dist/index-9oQx1HgV.d.cts +1180 -0
  79. package/dist/index-BJY2SI4i.d.ts +483 -0
  80. package/dist/index-BWGhrDlr.d.ts +733 -0
  81. package/dist/index-B_DK4FKY.d.cts +242 -0
  82. package/dist/index-BbA2Gxfl.d.ts +456 -0
  83. package/dist/index-BgHYAoSS.d.cts +837 -0
  84. package/dist/index-BndMbqsM.d.ts +1439 -0
  85. package/dist/index-C1mfVYoo.d.ts +2517 -0
  86. package/dist/index-CCvlwAmL.d.cts +809 -0
  87. package/dist/index-CDw5CnOU.d.ts +785 -0
  88. package/dist/index-Cn3SdB0O.d.ts +1126 -0
  89. package/dist/index-CrqLlS-a.d.ts +776 -0
  90. package/dist/index-D61yaSMY.d.cts +483 -0
  91. package/dist/index-D9Loo1_A.d.cts +2517 -0
  92. package/dist/index-DIT_OO9C.d.cts +785 -0
  93. package/dist/index-DIp_RrRt.d.ts +242 -0
  94. package/dist/index-DbultU6X.d.cts +1427 -0
  95. package/dist/index-DmEg_LCm.d.cts +776 -0
  96. package/dist/index-DoPWVxPo.d.cts +1439 -0
  97. package/dist/index-DuCxd-8d.d.ts +837 -0
  98. package/dist/index-Dx42TZaY.d.ts +809 -0
  99. package/dist/index-DyZ4QQf5.d.cts +456 -0
  100. package/dist/index-GFAVyOWO.d.ts +1427 -0
  101. package/dist/index-WHQLn0e8.d.cts +733 -0
  102. package/dist/index-ZtI1Iy4L.d.ts +1180 -0
  103. package/dist/index-eJgeni9c.d.cts +1911 -0
  104. package/dist/index-tk4lSYod.d.ts +1911 -0
  105. package/dist/index.cjs +72 -0
  106. package/dist/index.cjs.map +1 -0
  107. package/dist/index.d.cts +17 -0
  108. package/dist/index.d.ts +17 -0
  109. package/dist/index.js +15 -0
  110. package/dist/index.js.map +1 -0
  111. package/dist/linalg/index.cjs +86 -0
  112. package/dist/linalg/index.cjs.map +1 -0
  113. package/dist/linalg/index.d.cts +3 -0
  114. package/dist/linalg/index.d.ts +3 -0
  115. package/dist/linalg/index.js +5 -0
  116. package/dist/linalg/index.js.map +1 -0
  117. package/dist/metrics/index.cjs +158 -0
  118. package/dist/metrics/index.cjs.map +1 -0
  119. package/dist/metrics/index.d.cts +3 -0
  120. package/dist/metrics/index.d.ts +3 -0
  121. package/dist/metrics/index.js +5 -0
  122. package/dist/metrics/index.js.map +1 -0
  123. package/dist/ml/index.cjs +87 -0
  124. package/dist/ml/index.cjs.map +1 -0
  125. package/dist/ml/index.d.cts +3 -0
  126. package/dist/ml/index.d.ts +3 -0
  127. package/dist/ml/index.js +6 -0
  128. package/dist/ml/index.js.map +1 -0
  129. package/dist/ndarray/index.cjs +501 -0
  130. package/dist/ndarray/index.cjs.map +1 -0
  131. package/dist/ndarray/index.d.cts +5 -0
  132. package/dist/ndarray/index.d.ts +5 -0
  133. package/dist/ndarray/index.js +4 -0
  134. package/dist/ndarray/index.js.map +1 -0
  135. package/dist/nn/index.cjs +142 -0
  136. package/dist/nn/index.cjs.map +1 -0
  137. package/dist/nn/index.d.cts +6 -0
  138. package/dist/nn/index.d.ts +6 -0
  139. package/dist/nn/index.js +5 -0
  140. package/dist/nn/index.js.map +1 -0
  141. package/dist/optim/index.cjs +77 -0
  142. package/dist/optim/index.cjs.map +1 -0
  143. package/dist/optim/index.d.cts +4 -0
  144. package/dist/optim/index.d.ts +4 -0
  145. package/dist/optim/index.js +4 -0
  146. package/dist/optim/index.js.map +1 -0
  147. package/dist/plot/index.cjs +114 -0
  148. package/dist/plot/index.cjs.map +1 -0
  149. package/dist/plot/index.d.cts +6 -0
  150. package/dist/plot/index.d.ts +6 -0
  151. package/dist/plot/index.js +5 -0
  152. package/dist/plot/index.js.map +1 -0
  153. package/dist/preprocess/index.cjs +82 -0
  154. package/dist/preprocess/index.cjs.map +1 -0
  155. package/dist/preprocess/index.d.cts +4 -0
  156. package/dist/preprocess/index.d.ts +4 -0
  157. package/dist/preprocess/index.js +5 -0
  158. package/dist/preprocess/index.js.map +1 -0
  159. package/dist/random/index.cjs +74 -0
  160. package/dist/random/index.cjs.map +1 -0
  161. package/dist/random/index.d.cts +3 -0
  162. package/dist/random/index.d.ts +3 -0
  163. package/dist/random/index.js +5 -0
  164. package/dist/random/index.js.map +1 -0
  165. package/dist/stats/index.cjs +142 -0
  166. package/dist/stats/index.cjs.map +1 -0
  167. package/dist/stats/index.d.cts +3 -0
  168. package/dist/stats/index.d.ts +3 -0
  169. package/dist/stats/index.js +5 -0
  170. package/dist/stats/index.js.map +1 -0
  171. package/dist/tensor-B96jjJLQ.d.cts +205 -0
  172. package/dist/tensor-B96jjJLQ.d.ts +205 -0
  173. package/package.json +226 -0
@@ -0,0 +1,2377 @@
1
+ import { tensor, Tensor } from './chunk-BCR7G3A6.js';
2
+ import { __export, InvalidParameterError, ShapeError, DTypeError, getElementAsNumber, normalizeAxis, validateShape } from './chunk-4S73VUBD.js';
3
+
4
+ // src/stats/index.ts
5
+ var stats_exports = {};
6
+ __export(stats_exports, {
7
+ anderson: () => anderson,
8
+ bartlett: () => bartlett,
9
+ chisquare: () => chisquare,
10
+ corrcoef: () => corrcoef,
11
+ cov: () => cov,
12
+ f_oneway: () => f_oneway,
13
+ friedmanchisquare: () => friedmanchisquare,
14
+ geometricMean: () => geometricMean,
15
+ harmonicMean: () => harmonicMean,
16
+ kendalltau: () => kendalltau,
17
+ kruskal: () => kruskal,
18
+ kstest: () => kstest,
19
+ kurtosis: () => kurtosis,
20
+ levene: () => levene,
21
+ mannwhitneyu: () => mannwhitneyu,
22
+ mean: () => mean,
23
+ median: () => median,
24
+ mode: () => mode,
25
+ moment: () => moment,
26
+ normaltest: () => normaltest,
27
+ pearsonr: () => pearsonr,
28
+ percentile: () => percentile,
29
+ quantile: () => quantile,
30
+ shapiro: () => shapiro,
31
+ skewness: () => skewness,
32
+ spearmanr: () => spearmanr,
33
+ std: () => std,
34
+ trimMean: () => trimMean,
35
+ ttest_1samp: () => ttest_1samp,
36
+ ttest_ind: () => ttest_ind,
37
+ ttest_rel: () => ttest_rel,
38
+ variance: () => variance,
39
+ wilcoxon: () => wilcoxon
40
+ });
41
+
42
+ // src/stats/_internal.ts
43
+ function normalizeAxes(axis, ndim) {
44
+ if (axis === void 0) return [];
45
+ const axesInput = Array.isArray(axis) ? [...axis] : [axis];
46
+ const seen = /* @__PURE__ */ new Set();
47
+ const result = [];
48
+ for (const ax of axesInput) {
49
+ const norm = normalizeAxis(ax, ndim);
50
+ if (!seen.has(norm)) {
51
+ seen.add(norm);
52
+ result.push(norm);
53
+ }
54
+ }
55
+ return result.sort((a, b) => a - b);
56
+ }
57
+ function reducedShape(shape, axes, keepdims) {
58
+ if (axes.length === 0) {
59
+ return keepdims ? new Array(shape.length).fill(1) : [];
60
+ }
61
+ const reduce = new Set(axes);
62
+ const out = [];
63
+ for (let i = 0; i < shape.length; i++) {
64
+ const d = shape[i];
65
+ if (d === void 0) throw new ShapeError("Internal error: missing shape dimension");
66
+ if (reduce.has(i)) {
67
+ if (keepdims) out.push(1);
68
+ } else {
69
+ out.push(d);
70
+ }
71
+ }
72
+ validateShape(out);
73
+ return out;
74
+ }
75
+ function computeStrides(shape) {
76
+ const strides = new Array(shape.length);
77
+ let stride = 1;
78
+ for (let i = shape.length - 1; i >= 0; i--) {
79
+ strides[i] = stride;
80
+ stride *= shape[i] ?? 0;
81
+ }
82
+ return strides;
83
+ }
84
+ function assertSameSize(a, b, name) {
85
+ if (a.size !== b.size) {
86
+ throw new InvalidParameterError(
87
+ `${name}: tensors must have the same number of elements; got ${a.size} and ${b.size}`,
88
+ "size",
89
+ { a: a.size, b: b.size }
90
+ );
91
+ }
92
+ }
93
+ function getNumberAt(t, offset) {
94
+ if (t.dtype === "string" || Array.isArray(t.data)) {
95
+ throw new DTypeError("operation not supported for string dtype");
96
+ }
97
+ return getElementAsNumber(t.data, offset);
98
+ }
99
+ function rankData(values) {
100
+ const n = values.length;
101
+ const ranks = new Float64Array(n);
102
+ if (n === 0) return { ranks, tieSum: 0 };
103
+ const sorted = Array.from(values, (v, i) => ({ v, i })).sort((a, b) => a.v - b.v);
104
+ let tieSum = 0;
105
+ for (let i = 0; i < sorted.length; ) {
106
+ let j = i + 1;
107
+ while (j < sorted.length && sorted[j]?.v === sorted[i]?.v) {
108
+ j++;
109
+ }
110
+ const t = j - i;
111
+ const avgRank = (i + 1 + j) / 2;
112
+ for (let k = i; k < j; k++) {
113
+ const idx = sorted[k]?.i;
114
+ if (idx !== void 0) {
115
+ ranks[idx] = avgRank;
116
+ }
117
+ }
118
+ if (t > 1) tieSum += t * t * t - t;
119
+ i = j;
120
+ }
121
+ return { ranks, tieSum };
122
+ }
123
+ function forEachIndexOffset(t, fn) {
124
+ if (t.size === 0) return;
125
+ if (t.ndim === 0) {
126
+ fn(t.offset, []);
127
+ return;
128
+ }
129
+ const shape = t.shape;
130
+ const strides = t.strides;
131
+ const idx = new Array(t.ndim).fill(0);
132
+ let offset = t.offset;
133
+ while (true) {
134
+ fn(offset, idx);
135
+ let axis = t.ndim - 1;
136
+ for (; ; ) {
137
+ idx[axis] = (idx[axis] ?? 0) + 1;
138
+ offset += strides[axis] ?? 0;
139
+ const dim = shape[axis] ?? 0;
140
+ if ((idx[axis] ?? 0) < dim) break;
141
+ offset -= (idx[axis] ?? 0) * (strides[axis] ?? 0);
142
+ idx[axis] = 0;
143
+ axis--;
144
+ if (axis < 0) return;
145
+ }
146
+ }
147
+ }
148
+ function reduceMean(t, axis, keepdims) {
149
+ const axes = normalizeAxes(axis, t.ndim);
150
+ if (axes.length === 0) {
151
+ if (t.size === 0) {
152
+ throw new InvalidParameterError("mean() requires at least one element", "size", t.size);
153
+ }
154
+ let sum = 0;
155
+ forEachIndexOffset(t, (off) => {
156
+ sum += getNumberAt(t, off);
157
+ });
158
+ const out = new Float64Array(1);
159
+ out[0] = sum / t.size;
160
+ const outShape2 = keepdims ? new Array(t.ndim).fill(1) : [];
161
+ return Tensor.fromTypedArray({
162
+ data: out,
163
+ shape: outShape2,
164
+ dtype: "float64",
165
+ device: t.device
166
+ });
167
+ }
168
+ const outShape = reducedShape(t.shape, axes, keepdims);
169
+ const outStrides = computeStrides(outShape);
170
+ const outSize = outShape.reduce((a, b) => a * b, 1);
171
+ const sums = new Float64Array(outSize);
172
+ const reduce = new Set(axes);
173
+ const reduceCount = axes.reduce((acc, ax) => acc * (t.shape[ax] ?? 0), 1);
174
+ if (reduceCount === 0) {
175
+ throw new InvalidParameterError(
176
+ "mean() reduction over empty axis is undefined",
177
+ "reduceCount",
178
+ reduceCount
179
+ );
180
+ }
181
+ forEachIndexOffset(t, (off, idx) => {
182
+ let outFlat = 0;
183
+ if (keepdims) {
184
+ for (let i = 0; i < t.ndim; i++) {
185
+ const s = outStrides[i] ?? 0;
186
+ const v = reduce.has(i) ? 0 : idx[i] ?? 0;
187
+ outFlat += v * s;
188
+ }
189
+ } else {
190
+ let oi = 0;
191
+ for (let i = 0; i < t.ndim; i++) {
192
+ if (reduce.has(i)) continue;
193
+ outFlat += (idx[i] ?? 0) * (outStrides[oi] ?? 0);
194
+ oi++;
195
+ }
196
+ }
197
+ sums[outFlat] = (sums[outFlat] ?? 0) + getNumberAt(t, off);
198
+ });
199
+ for (let i = 0; i < sums.length; i++) {
200
+ sums[i] = (sums[i] ?? 0) / reduceCount;
201
+ }
202
+ return Tensor.fromTypedArray({
203
+ data: sums,
204
+ shape: outShape,
205
+ dtype: "float64",
206
+ device: t.device
207
+ });
208
+ }
209
+ function reduceVariance(t, axis, keepdims, ddof) {
210
+ const axes = normalizeAxes(axis, t.ndim);
211
+ if (t.dtype === "string") {
212
+ throw new DTypeError("variance() not supported for string dtype");
213
+ }
214
+ if (axes.length === 0) {
215
+ if (t.size === 0) {
216
+ throw new InvalidParameterError("variance() requires at least one element", "size", t.size);
217
+ }
218
+ if (ddof < 0) {
219
+ throw new InvalidParameterError("ddof must be non-negative", "ddof", ddof);
220
+ }
221
+ if (t.size <= ddof) {
222
+ throw new InvalidParameterError(
223
+ `ddof=${ddof} >= size=${t.size}, variance undefined`,
224
+ "ddof",
225
+ ddof
226
+ );
227
+ }
228
+ let mean2 = 0;
229
+ let m2 = 0;
230
+ let n = 0;
231
+ forEachIndexOffset(t, (off) => {
232
+ const x = getNumberAt(t, off);
233
+ n++;
234
+ const delta = x - mean2;
235
+ mean2 += delta / n;
236
+ const delta2 = x - mean2;
237
+ m2 += delta * delta2;
238
+ });
239
+ const out2 = new Float64Array(1);
240
+ out2[0] = m2 / (n - ddof);
241
+ const outShape2 = keepdims ? new Array(t.ndim).fill(1) : [];
242
+ return Tensor.fromTypedArray({
243
+ data: out2,
244
+ shape: outShape2,
245
+ dtype: "float64",
246
+ device: t.device
247
+ });
248
+ }
249
+ const outShape = reducedShape(t.shape, axes, keepdims);
250
+ const outStrides = computeStrides(outShape);
251
+ const outSize = outShape.reduce((a, b) => a * b, 1);
252
+ const reduce = new Set(axes);
253
+ const reduceCount = axes.reduce((acc, ax) => acc * (t.shape[ax] ?? 0), 1);
254
+ if (reduceCount === 0) {
255
+ throw new InvalidParameterError(
256
+ "variance() reduction over empty axis is undefined",
257
+ "reduceCount",
258
+ reduceCount
259
+ );
260
+ }
261
+ if (ddof < 0) {
262
+ throw new InvalidParameterError("ddof must be non-negative", "ddof", ddof);
263
+ }
264
+ if (reduceCount <= ddof) {
265
+ throw new InvalidParameterError(
266
+ `ddof=${ddof} >= reduced size=${reduceCount}, variance undefined`,
267
+ "ddof",
268
+ ddof
269
+ );
270
+ }
271
+ const means = new Float64Array(outSize);
272
+ const m2s = new Float64Array(outSize);
273
+ const counts = new Int32Array(outSize);
274
+ forEachIndexOffset(t, (off, idx) => {
275
+ let outFlat = 0;
276
+ if (keepdims) {
277
+ for (let i = 0; i < t.ndim; i++) {
278
+ const s = outStrides[i] ?? 0;
279
+ const v = reduce.has(i) ? 0 : idx[i] ?? 0;
280
+ outFlat += v * s;
281
+ }
282
+ } else {
283
+ let oi = 0;
284
+ for (let i = 0; i < t.ndim; i++) {
285
+ if (reduce.has(i)) continue;
286
+ outFlat += (idx[i] ?? 0) * (outStrides[oi] ?? 0);
287
+ oi++;
288
+ }
289
+ }
290
+ const x = getNumberAt(t, off);
291
+ const n = (counts[outFlat] ?? 0) + 1;
292
+ counts[outFlat] = n;
293
+ const mean2 = means[outFlat] ?? 0;
294
+ const delta = x - mean2;
295
+ const nextMean = mean2 + delta / n;
296
+ means[outFlat] = nextMean;
297
+ const delta2 = x - nextMean;
298
+ m2s[outFlat] = (m2s[outFlat] ?? 0) + delta * delta2;
299
+ });
300
+ const out = new Float64Array(outSize);
301
+ for (let i = 0; i < outSize; i++) {
302
+ const n = counts[i] ?? 0;
303
+ out[i] = (m2s[i] ?? 0) / (n - ddof);
304
+ }
305
+ return Tensor.fromTypedArray({
306
+ data: out,
307
+ shape: outShape,
308
+ dtype: "float64",
309
+ device: t.device
310
+ });
311
+ }
312
+ var LANCZOS_COEFFS = [
313
+ 0.9999999999998099,
314
+ 676.5203681218851,
315
+ -1259.1392167224028,
316
+ 771.3234287776531,
317
+ -176.6150291621406,
318
+ 12.507343278686905,
319
+ -0.13857109526572012,
320
+ 9984369578019572e-21,
321
+ 15056327351493116e-23
322
+ ];
323
+ function logGamma(z) {
324
+ if (z < 0.5) {
325
+ return Math.log(Math.PI) - Math.log(Math.sin(Math.PI * z)) - logGamma(1 - z);
326
+ }
327
+ z -= 1;
328
+ let x = LANCZOS_COEFFS[0];
329
+ for (let i = 1; i < LANCZOS_COEFFS.length; i++) {
330
+ x += (LANCZOS_COEFFS[i] ?? 0) / (z + i);
331
+ }
332
+ const t = z + 7.5;
333
+ return 0.5 * Math.log(2 * Math.PI) + (z + 0.5) * Math.log(t) - t + Math.log(x);
334
+ }
335
+ function betacf(a, b, x) {
336
+ const MAX_ITER = 200;
337
+ const EPS = 3e-14;
338
+ const FPMIN = 1e-300;
339
+ const qab = a + b;
340
+ const qap = a + 1;
341
+ const qam = a - 1;
342
+ let c = 1;
343
+ let d = 1 - qab * x / qap;
344
+ if (Math.abs(d) < FPMIN) d = FPMIN;
345
+ d = 1 / d;
346
+ let h = d;
347
+ for (let m = 1; m <= MAX_ITER; m++) {
348
+ const m2 = 2 * m;
349
+ let aa = m * (b - m) * x / ((qam + m2) * (a + m2));
350
+ d = 1 + aa * d;
351
+ if (Math.abs(d) < FPMIN) d = FPMIN;
352
+ c = 1 + aa / c;
353
+ if (Math.abs(c) < FPMIN) c = FPMIN;
354
+ d = 1 / d;
355
+ h *= d * c;
356
+ aa = -(a + m) * (qab + m) * x / ((a + m2) * (qap + m2));
357
+ d = 1 + aa * d;
358
+ if (Math.abs(d) < FPMIN) d = FPMIN;
359
+ c = 1 + aa / c;
360
+ if (Math.abs(c) < FPMIN) c = FPMIN;
361
+ d = 1 / d;
362
+ const del = d * c;
363
+ h *= del;
364
+ if (Math.abs(del - 1) < EPS) break;
365
+ }
366
+ return h;
367
+ }
368
+ function regularizedIncompleteBeta(a, b, x) {
369
+ if (!Number.isFinite(a) || a <= 0) {
370
+ throw new InvalidParameterError("a must be > 0", "a", a);
371
+ }
372
+ if (!Number.isFinite(b) || b <= 0) {
373
+ throw new InvalidParameterError("b must be > 0", "b", b);
374
+ }
375
+ if (x < 0 || x > 1) {
376
+ throw new InvalidParameterError("x must be in [0,1]", "x", x);
377
+ }
378
+ if (x === 0) return 0;
379
+ if (x === 1) return 1;
380
+ const lnBt = logGamma(a + b) - logGamma(a) - logGamma(b) + a * Math.log(x) + b * Math.log(1 - x);
381
+ const bt = Math.exp(lnBt);
382
+ if (x < (a + 1) / (a + b + 2)) {
383
+ return bt * betacf(a, b, x) / a;
384
+ }
385
+ return 1 - bt * betacf(b, a, 1 - x) / b;
386
+ }
387
+ function regularizedLowerIncompleteGamma(s, x) {
388
+ if (!Number.isFinite(s) || s <= 0) {
389
+ throw new InvalidParameterError("s must be > 0", "s", s);
390
+ }
391
+ if (x < 0) throw new InvalidParameterError("x must be >= 0", "x", x);
392
+ if (x === 0) return 0;
393
+ const ITMAX = 200;
394
+ const EPS = 3e-14;
395
+ const FPMIN = 1e-300;
396
+ if (x < s + 1) {
397
+ let sum = 1 / s;
398
+ let del = sum;
399
+ let ap = s;
400
+ for (let n = 1; n <= ITMAX; n++) {
401
+ ap += 1;
402
+ del *= x / ap;
403
+ sum += del;
404
+ if (Math.abs(del) < Math.abs(sum) * EPS) break;
405
+ }
406
+ return sum * Math.exp(-x + s * Math.log(x) - logGamma(s));
407
+ }
408
+ let b = x + 1 - s;
409
+ let c = 1 / FPMIN;
410
+ let d = 1 / b;
411
+ let h = d;
412
+ for (let i = 1; i <= ITMAX; i++) {
413
+ const an = -i * (i - s);
414
+ b += 2;
415
+ d = an * d + b;
416
+ if (Math.abs(d) < FPMIN) d = FPMIN;
417
+ c = b + an / c;
418
+ if (Math.abs(c) < FPMIN) c = FPMIN;
419
+ d = 1 / d;
420
+ const del = d * c;
421
+ h *= del;
422
+ if (Math.abs(del - 1) < EPS) break;
423
+ }
424
+ const q = Math.exp(-x + s * Math.log(x) - logGamma(s)) * h;
425
+ return 1 - q;
426
+ }
427
+ function normalCdf(x) {
428
+ const a = 0.147;
429
+ const sign = x < 0 ? -1 : 1;
430
+ const ax = Math.abs(x) / Math.SQRT2;
431
+ const t = 1 + a * ax * ax;
432
+ const erf = sign * Math.sqrt(1 - Math.exp(-ax * ax * ((4 / Math.PI + a * ax * ax) / t)));
433
+ return 0.5 * (1 + erf);
434
+ }
435
+ function studentTCdf(t, df) {
436
+ if (!Number.isFinite(df) || df <= 0) {
437
+ throw new InvalidParameterError("df must be > 0", "df", df);
438
+ }
439
+ if (Number.isNaN(t)) return NaN;
440
+ if (!Number.isFinite(t)) return t < 0 ? 0 : 1;
441
+ const x = df / (df + t * t);
442
+ const a = df / 2;
443
+ const b = 0.5;
444
+ const ib = regularizedIncompleteBeta(a, b, x);
445
+ const p = 0.5 * ib;
446
+ return t >= 0 ? 1 - p : p;
447
+ }
448
+ function chiSquareCdf(x, k) {
449
+ if (!Number.isFinite(k) || k <= 0) {
450
+ throw new InvalidParameterError("degrees of freedom must be > 0", "k", k);
451
+ }
452
+ if (Number.isNaN(x)) return NaN;
453
+ if (x === Infinity) return 1;
454
+ if (x <= 0) return 0;
455
+ return regularizedLowerIncompleteGamma(k / 2, x / 2);
456
+ }
457
+ function fCdf(x, dfn, dfd) {
458
+ if (!Number.isFinite(dfn) || dfn <= 0) {
459
+ throw new InvalidParameterError("degrees of freedom (dfn) must be > 0", "dfn", dfn);
460
+ }
461
+ if (!Number.isFinite(dfd) || dfd <= 0) {
462
+ throw new InvalidParameterError("degrees of freedom (dfd) must be > 0", "dfd", dfd);
463
+ }
464
+ if (Number.isNaN(x)) return NaN;
465
+ if (x === Infinity) return 1;
466
+ if (x <= 0) return 0;
467
+ const xx = dfn * x / (dfn * x + dfd);
468
+ return regularizedIncompleteBeta(dfn / 2, dfd / 2, xx);
469
+ }
470
+
471
+ // src/stats/correlation.ts
472
+ function toDenseFlatArray(t) {
473
+ const out = new Float64Array(t.size);
474
+ let i = 0;
475
+ forEachIndexOffset(t, (off) => {
476
+ out[i] = getNumberAt(t, off);
477
+ i++;
478
+ });
479
+ return out;
480
+ }
481
+ function pearsonFromDense(x, y) {
482
+ const n = x.length;
483
+ let sumX = 0;
484
+ let sumY = 0;
485
+ for (let i = 0; i < n; i++) {
486
+ sumX += x[i] ?? 0;
487
+ sumY += y[i] ?? 0;
488
+ }
489
+ const meanX = sumX / n;
490
+ const meanY = sumY / n;
491
+ let num = 0;
492
+ let denX = 0;
493
+ let denY = 0;
494
+ for (let i = 0; i < n; i++) {
495
+ const dx = (x[i] ?? 0) - meanX;
496
+ const dy = (y[i] ?? 0) - meanY;
497
+ num += dx * dy;
498
+ denX += dx * dx;
499
+ denY += dy * dy;
500
+ }
501
+ const den = Math.sqrt(denX * denY);
502
+ if (den === 0) {
503
+ throw new InvalidParameterError(
504
+ "pearsonr() is undefined for constant input",
505
+ "input",
506
+ "constant"
507
+ );
508
+ }
509
+ return num / den;
510
+ }
511
+ function pearsonr(x, y) {
512
+ assertSameSize(x, y, "pearsonr");
513
+ const n = x.size;
514
+ if (n < 2) {
515
+ throw new InvalidParameterError("pearsonr() requires at least 2 paired samples", "n", n);
516
+ }
517
+ const xd = toDenseFlatArray(x);
518
+ const yd = toDenseFlatArray(y);
519
+ const r = pearsonFromDense(xd, yd);
520
+ const df = n - 2;
521
+ if (df <= 0) {
522
+ return [r, NaN];
523
+ }
524
+ const tStat = r * Math.sqrt(df / (1 - r * r));
525
+ const pValue = 2 * (1 - studentTCdf(Math.abs(tStat), df));
526
+ return [r, pValue];
527
+ }
528
+ function spearmanr(x, y) {
529
+ assertSameSize(x, y, "spearmanr");
530
+ const n = x.size;
531
+ if (n < 2) {
532
+ throw new InvalidParameterError("spearmanr() requires at least 2 paired samples", "n", n);
533
+ }
534
+ const xd = toDenseFlatArray(x);
535
+ const yd = toDenseFlatArray(y);
536
+ const rankX = rankData(xd).ranks;
537
+ const rankY = rankData(yd).ranks;
538
+ const rho = pearsonFromDense(rankX, rankY);
539
+ const df = n - 2;
540
+ const tStat = rho * Math.sqrt(df / (1 - rho * rho));
541
+ const pValue = df > 0 ? 2 * (1 - studentTCdf(Math.abs(tStat), df)) : NaN;
542
+ return [rho, pValue];
543
+ }
544
+ function kendalltau(x, y) {
545
+ assertSameSize(x, y, "kendalltau");
546
+ const n = x.size;
547
+ if (n < 2) {
548
+ throw new InvalidParameterError("kendalltau() requires at least 2 paired samples", "n", n);
549
+ }
550
+ const xd = toDenseFlatArray(x);
551
+ const yd = toDenseFlatArray(y);
552
+ let concordant = 0;
553
+ let discordant = 0;
554
+ for (let i = 0; i < n - 1; i++) {
555
+ const xi = xd[i] ?? 0;
556
+ const yi = yd[i] ?? 0;
557
+ for (let j = i + 1; j < n; j++) {
558
+ const signX = Math.sign((xd[j] ?? 0) - xi);
559
+ const signY = Math.sign((yd[j] ?? 0) - yi);
560
+ if (signX === 0 || signY === 0) continue;
561
+ if (signX === signY) concordant++;
562
+ else discordant++;
563
+ }
564
+ }
565
+ const n0 = n * (n - 1) / 2;
566
+ const tieSums = (vals) => {
567
+ const sorted = Array.from(vals).sort((a, b) => a - b);
568
+ let sumT = 0;
569
+ let sumT2 = 0;
570
+ let sumT3 = 0;
571
+ for (let i = 0; i < sorted.length; ) {
572
+ let j = i + 1;
573
+ while (j < sorted.length && sorted[j] === sorted[i]) j++;
574
+ const t = j - i;
575
+ if (t > 1) {
576
+ sumT += t * (t - 1);
577
+ sumT2 += t * (t - 1) * (2 * t + 5);
578
+ sumT3 += t * (t - 1) * (t - 2);
579
+ }
580
+ i = j;
581
+ }
582
+ return { nTies: sumT / 2, sumT, sumT2, sumT3 };
583
+ };
584
+ const tieX = tieSums(xd);
585
+ const tieY = tieSums(yd);
586
+ const denom = Math.sqrt((n0 - tieX.nTies) * (n0 - tieY.nTies));
587
+ const s = concordant - discordant;
588
+ const tau = denom === 0 ? NaN : s / denom;
589
+ let varS = (n * (n - 1) * (2 * n + 5) - tieX.sumT2 - tieY.sumT2) / 18 + tieX.sumT * tieY.sumT / (2 * n * (n - 1));
590
+ if (n > 2) {
591
+ varS += tieX.sumT3 * tieY.sumT3 / (9 * n * (n - 1) * (n - 2));
592
+ }
593
+ const pValue = varS <= 0 ? NaN : 2 * (1 - normalCdf(Math.abs(s / Math.sqrt(varS))));
594
+ return [tau, pValue];
595
+ }
596
+ function corrcoef(x, y) {
597
+ if (y) {
598
+ const [r] = pearsonr(x, y);
599
+ return tensor([
600
+ [1, r],
601
+ [r, 1]
602
+ ]);
603
+ }
604
+ if (x.ndim === 1) {
605
+ if (x.size < 2) {
606
+ throw new InvalidParameterError(
607
+ "corrcoef() requires at least 2 observations",
608
+ "nObs",
609
+ x.size
610
+ );
611
+ }
612
+ return tensor([[1]]);
613
+ }
614
+ if (x.ndim !== 2) {
615
+ throw new ShapeError("corrcoef() expects a 1D or 2D tensor");
616
+ }
617
+ const nObs = x.shape[0] ?? 0;
618
+ const nVar = x.shape[1] ?? 0;
619
+ if (nObs < 2) {
620
+ throw new InvalidParameterError("corrcoef() requires at least 2 observations", "nObs", nObs);
621
+ }
622
+ const s0 = x.strides[0] ?? 0;
623
+ const s1 = x.strides[1] ?? 0;
624
+ const xOff = x.offset;
625
+ const xData = x.data;
626
+ const directAccess = !Array.isArray(xData) && !(xData instanceof BigInt64Array);
627
+ const means = new Float64Array(nVar);
628
+ if (directAccess) {
629
+ for (let j = 0; j < nVar; j++) {
630
+ let s = 0;
631
+ for (let i = 0; i < nObs; i++) {
632
+ s += xData[xOff + i * s0 + j * s1];
633
+ }
634
+ means[j] = s / nObs;
635
+ }
636
+ } else {
637
+ for (let j = 0; j < nVar; j++) {
638
+ let s = 0;
639
+ for (let i = 0; i < nObs; i++) {
640
+ s += getNumberAt(x, xOff + i * s0 + j * s1);
641
+ }
642
+ means[j] = s / nObs;
643
+ }
644
+ }
645
+ const cov2 = new Float64Array(nVar * nVar);
646
+ const ddof = 1;
647
+ if (directAccess) {
648
+ for (let a = 0; a < nVar; a++) {
649
+ const ma = means[a];
650
+ for (let b = a; b < nVar; b++) {
651
+ const mb = means[b];
652
+ let s = 0;
653
+ for (let i = 0; i < nObs; i++) {
654
+ const base = xOff + i * s0;
655
+ s += (xData[base + a * s1] - ma) * (xData[base + b * s1] - mb);
656
+ }
657
+ const v = s / (nObs - ddof);
658
+ cov2[a * nVar + b] = v;
659
+ cov2[b * nVar + a] = v;
660
+ }
661
+ }
662
+ } else {
663
+ for (let a = 0; a < nVar; a++) {
664
+ for (let b = a; b < nVar; b++) {
665
+ let s = 0;
666
+ for (let i = 0; i < nObs; i++) {
667
+ const offA = xOff + i * s0 + a * s1;
668
+ const offB = xOff + i * s0 + b * s1;
669
+ s += (getNumberAt(x, offA) - (means[a] ?? 0)) * (getNumberAt(x, offB) - (means[b] ?? 0));
670
+ }
671
+ const v = s / (nObs - ddof);
672
+ cov2[a * nVar + b] = v;
673
+ cov2[b * nVar + a] = v;
674
+ }
675
+ }
676
+ }
677
+ const corr = new Float64Array(nVar * nVar);
678
+ for (let i = 0; i < nVar; i++) {
679
+ for (let j = 0; j < nVar; j++) {
680
+ const v = cov2[i * nVar + j] ?? 0;
681
+ const vi = cov2[i * nVar + i] ?? 0;
682
+ const vj = cov2[j * nVar + j] ?? 0;
683
+ const den = Math.sqrt(vi * vj);
684
+ corr[i * nVar + j] = den === 0 ? NaN : v / den;
685
+ }
686
+ }
687
+ return Tensor.fromTypedArray({
688
+ data: corr,
689
+ shape: [nVar, nVar],
690
+ dtype: "float64",
691
+ device: x.device
692
+ });
693
+ }
694
+ function cov(x, y, ddof = 1) {
695
+ if (!Number.isFinite(ddof) || ddof < 0) {
696
+ throw new InvalidParameterError("ddof must be a non-negative finite number", "ddof", ddof);
697
+ }
698
+ if (y) {
699
+ assertSameSize(x, y, "cov");
700
+ const n = x.size;
701
+ if (n === 0) throw new InvalidParameterError("cov() requires at least one element", "n", n);
702
+ if (n <= ddof)
703
+ throw new InvalidParameterError(
704
+ `ddof=${ddof} >= size=${n}, covariance undefined`,
705
+ "ddof",
706
+ ddof
707
+ );
708
+ const xd = toDenseFlatArray(x);
709
+ const yd = toDenseFlatArray(y);
710
+ let meanX = 0;
711
+ let meanY = 0;
712
+ for (let i = 0; i < n; i++) {
713
+ meanX += xd[i] ?? 0;
714
+ meanY += yd[i] ?? 0;
715
+ }
716
+ meanX /= n;
717
+ meanY /= n;
718
+ let varX = 0;
719
+ let varY = 0;
720
+ let covXY = 0;
721
+ for (let i = 0; i < n; i++) {
722
+ const dx = (xd[i] ?? 0) - meanX;
723
+ const dy = (yd[i] ?? 0) - meanY;
724
+ varX += dx * dx;
725
+ varY += dy * dy;
726
+ covXY += dx * dy;
727
+ }
728
+ varX /= n - ddof;
729
+ varY /= n - ddof;
730
+ covXY /= n - ddof;
731
+ return tensor([
732
+ [varX, covXY],
733
+ [covXY, varY]
734
+ ]);
735
+ }
736
+ if (x.ndim === 1) {
737
+ const n = x.size;
738
+ if (n === 0) throw new InvalidParameterError("cov() requires at least one element", "n", n);
739
+ if (ddof < 0) {
740
+ throw new InvalidParameterError("ddof must be non-negative", "ddof", ddof);
741
+ }
742
+ if (n <= ddof)
743
+ throw new InvalidParameterError(
744
+ `ddof=${ddof} >= size=${n}, covariance undefined`,
745
+ "ddof",
746
+ ddof
747
+ );
748
+ const xd = toDenseFlatArray(x);
749
+ let meanX = 0;
750
+ for (let i = 0; i < n; i++) meanX += xd[i] ?? 0;
751
+ meanX /= n;
752
+ let varX = 0;
753
+ for (let i = 0; i < n; i++) {
754
+ const dx = (xd[i] ?? 0) - meanX;
755
+ varX += dx * dx;
756
+ }
757
+ varX /= n - ddof;
758
+ return tensor([[varX]]);
759
+ }
760
+ if (x.ndim !== 2) {
761
+ throw new ShapeError("cov() expects a 1D or 2D tensor");
762
+ }
763
+ const nObs = x.shape[0] ?? 0;
764
+ const nVar = x.shape[1] ?? 0;
765
+ if (nObs === 0)
766
+ throw new InvalidParameterError("cov() requires at least one observation", "nObs", nObs);
767
+ if (ddof < 0) {
768
+ throw new InvalidParameterError("ddof must be non-negative", "ddof", ddof);
769
+ }
770
+ if (nObs <= ddof)
771
+ throw new InvalidParameterError(
772
+ `ddof=${ddof} >= nObs=${nObs}, covariance undefined`,
773
+ "ddof",
774
+ ddof
775
+ );
776
+ const means = new Float64Array(nVar);
777
+ for (let j = 0; j < nVar; j++) {
778
+ let s = 0;
779
+ for (let i = 0; i < nObs; i++) {
780
+ const off = x.offset + i * (x.strides[0] ?? 0) + j * (x.strides[1] ?? 0);
781
+ s += getNumberAt(x, off);
782
+ }
783
+ means[j] = s / nObs;
784
+ }
785
+ const out = new Float64Array(nVar * nVar);
786
+ for (let a = 0; a < nVar; a++) {
787
+ for (let b = a; b < nVar; b++) {
788
+ let s = 0;
789
+ for (let i = 0; i < nObs; i++) {
790
+ const offA = x.offset + i * (x.strides[0] ?? 0) + a * (x.strides[1] ?? 0);
791
+ const offB = x.offset + i * (x.strides[0] ?? 0) + b * (x.strides[1] ?? 0);
792
+ s += (getNumberAt(x, offA) - (means[a] ?? 0)) * (getNumberAt(x, offB) - (means[b] ?? 0));
793
+ }
794
+ const v = s / (nObs - ddof);
795
+ out[a * nVar + b] = v;
796
+ out[b * nVar + a] = v;
797
+ }
798
+ }
799
+ return Tensor.fromTypedArray({
800
+ data: out,
801
+ shape: [nVar, nVar],
802
+ dtype: "float64",
803
+ device: x.device
804
+ });
805
+ }
806
+
807
+ // src/stats/descriptive.ts
808
+ function mean(t, axis, _keepdims = false) {
809
+ return reduceMean(t, axis, _keepdims);
810
+ }
811
+ function median(t, axis, _keepdims = false) {
812
+ const axes = normalizeAxes(axis, t.ndim);
813
+ if (axes.length === 0) {
814
+ if (t.size === 0) {
815
+ throw new InvalidParameterError("median() requires at least one element", "size", t.size);
816
+ }
817
+ const values = [];
818
+ let hasNaN = false;
819
+ forEachIndexOffset(t, (off) => {
820
+ const v = getNumberAt(t, off);
821
+ if (Number.isNaN(v)) hasNaN = true;
822
+ values.push(v);
823
+ });
824
+ const outShape2 = _keepdims ? new Array(t.ndim).fill(1) : [];
825
+ if (hasNaN) {
826
+ const out3 = new Float64Array(1);
827
+ out3[0] = Number.NaN;
828
+ return Tensor.fromTypedArray({
829
+ data: out3,
830
+ shape: outShape2,
831
+ dtype: "float64",
832
+ device: t.device
833
+ });
834
+ }
835
+ values.sort((a, b) => a - b);
836
+ const mid = Math.floor(values.length / 2);
837
+ const result = values.length % 2 === 0 ? ((values[mid - 1] ?? 0) + (values[mid] ?? 0)) / 2 : values[mid] ?? 0;
838
+ const out2 = new Float64Array(1);
839
+ out2[0] = result;
840
+ return Tensor.fromTypedArray({
841
+ data: out2,
842
+ shape: outShape2,
843
+ dtype: "float64",
844
+ device: t.device
845
+ });
846
+ }
847
+ const outShape = reducedShape(t.shape, axes, _keepdims);
848
+ const outStrides = computeStrides(outShape);
849
+ const outSize = outShape.reduce((a, b) => a * b, 1);
850
+ const buckets = new Array(outSize);
851
+ const nanFlags = new Array(outSize).fill(false);
852
+ const reduce = new Set(axes);
853
+ forEachIndexOffset(t, (off, idx) => {
854
+ let outFlat = 0;
855
+ if (_keepdims) {
856
+ for (let i = 0; i < t.ndim; i++) {
857
+ const s = outStrides[i] ?? 0;
858
+ const v = reduce.has(i) ? 0 : idx[i] ?? 0;
859
+ outFlat += v * s;
860
+ }
861
+ } else {
862
+ let oi = 0;
863
+ for (let i = 0; i < t.ndim; i++) {
864
+ if (reduce.has(i)) continue;
865
+ outFlat += (idx[i] ?? 0) * (outStrides[oi] ?? 0);
866
+ oi++;
867
+ }
868
+ }
869
+ const val = getNumberAt(t, off);
870
+ if (Number.isNaN(val)) {
871
+ nanFlags[outFlat] = true;
872
+ return;
873
+ }
874
+ const arr = buckets[outFlat] ?? [];
875
+ arr.push(val);
876
+ buckets[outFlat] = arr;
877
+ });
878
+ const out = new Float64Array(outSize);
879
+ for (let i = 0; i < outSize; i++) {
880
+ if (nanFlags[i]) {
881
+ out[i] = Number.NaN;
882
+ continue;
883
+ }
884
+ const arr = buckets[i] ?? [];
885
+ if (arr.length === 0) {
886
+ throw new InvalidParameterError(
887
+ "median() reduction over empty axis is undefined",
888
+ "axis",
889
+ arr.length
890
+ );
891
+ }
892
+ arr.sort((a, b) => a - b);
893
+ const mid = Math.floor(arr.length / 2);
894
+ out[i] = arr.length % 2 === 0 ? ((arr[mid - 1] ?? 0) + (arr[mid] ?? 0)) / 2 : arr[mid] ?? 0;
895
+ }
896
+ return Tensor.fromTypedArray({
897
+ data: out,
898
+ shape: outShape,
899
+ dtype: "float64",
900
+ device: t.device
901
+ });
902
+ }
903
+ function mode(t, axis) {
904
+ const axes = normalizeAxes(axis, t.ndim);
905
+ if (axes.length === 0) {
906
+ if (t.size === 0) {
907
+ throw new InvalidParameterError("mode() requires at least one element", "size", t.size);
908
+ }
909
+ const freq = /* @__PURE__ */ new Map();
910
+ let maxFreq = 0;
911
+ let modeVal = Number.POSITIVE_INFINITY;
912
+ let hasNaN = false;
913
+ forEachIndexOffset(t, (off) => {
914
+ const val = getNumberAt(t, off);
915
+ if (Number.isNaN(val)) {
916
+ hasNaN = true;
917
+ return;
918
+ }
919
+ const count = (freq.get(val) ?? 0) + 1;
920
+ freq.set(val, count);
921
+ if (count > maxFreq || count === maxFreq && val < modeVal) {
922
+ maxFreq = count;
923
+ modeVal = val;
924
+ }
925
+ });
926
+ if (hasNaN) {
927
+ return tensor([Number.NaN]);
928
+ }
929
+ return tensor([modeVal]);
930
+ }
931
+ const outShape = reducedShape(t.shape, axes, false);
932
+ const outStrides = computeStrides(outShape);
933
+ const outSize = outShape.reduce((a, b) => a * b, 1);
934
+ const maps = new Array(outSize);
935
+ const bestCounts = new Int32Array(outSize);
936
+ const bestValues = new Float64Array(outSize);
937
+ const nanFlags = new Array(outSize).fill(false);
938
+ const reduce = new Set(axes);
939
+ forEachIndexOffset(t, (off, idx) => {
940
+ let outFlat = 0;
941
+ let oi = 0;
942
+ for (let i = 0; i < t.ndim; i++) {
943
+ if (reduce.has(i)) continue;
944
+ outFlat += (idx[i] ?? 0) * (outStrides[oi] ?? 0);
945
+ oi++;
946
+ }
947
+ const val = getNumberAt(t, off);
948
+ if (Number.isNaN(val)) {
949
+ nanFlags[outFlat] = true;
950
+ return;
951
+ }
952
+ const m = maps[outFlat] ?? /* @__PURE__ */ new Map();
953
+ const next = (m.get(val) ?? 0) + 1;
954
+ m.set(val, next);
955
+ maps[outFlat] = m;
956
+ const currentBestCount = bestCounts[outFlat] ?? 0;
957
+ const currentBestValue = bestValues[outFlat] ?? Number.POSITIVE_INFINITY;
958
+ if (next > currentBestCount || next === currentBestCount && val < currentBestValue) {
959
+ bestCounts[outFlat] = next;
960
+ bestValues[outFlat] = val;
961
+ }
962
+ });
963
+ const out = new Float64Array(outSize);
964
+ for (let i = 0; i < outSize; i++) {
965
+ if (nanFlags[i]) {
966
+ out[i] = Number.NaN;
967
+ continue;
968
+ }
969
+ if ((bestCounts[i] ?? 0) === 0) {
970
+ throw new InvalidParameterError(
971
+ "mode() reduction over empty axis is undefined",
972
+ "axis",
973
+ bestCounts[i] ?? 0
974
+ );
975
+ }
976
+ out[i] = bestValues[i] ?? Number.NaN;
977
+ }
978
+ return Tensor.fromTypedArray({
979
+ data: out,
980
+ shape: outShape,
981
+ dtype: "float64",
982
+ device: t.device
983
+ });
984
+ }
985
+ function std(t, axis, _keepdims = false, ddof = 0) {
986
+ const v = reduceVariance(t, axis, _keepdims, ddof);
987
+ const out = new Float64Array(v.size);
988
+ for (let i = 0; i < v.size; i++) {
989
+ out[i] = Math.sqrt(getNumberAt(v, v.offset + i));
990
+ }
991
+ return Tensor.fromTypedArray({
992
+ data: out,
993
+ shape: v.shape,
994
+ dtype: "float64",
995
+ device: v.device
996
+ });
997
+ }
998
+ function variance(t, axis, _keepdims = false, ddof = 0) {
999
+ return reduceVariance(t, axis, _keepdims, ddof);
1000
+ }
1001
+ function skewness(t, axis, bias = true) {
1002
+ const mu = reduceMean(t, axis, false);
1003
+ const sigma2 = reduceVariance(t, axis, false, 0);
1004
+ const axes = normalizeAxes(axis, t.ndim);
1005
+ const reduce = new Set(axes);
1006
+ const outShape = reducedShape(t.shape, axes, false);
1007
+ const outStrides = computeStrides(outShape);
1008
+ const outSize = outShape.reduce((a, b) => a * b, 1);
1009
+ const sumCube = new Float64Array(outSize);
1010
+ const counts = new Int32Array(outSize);
1011
+ forEachIndexOffset(t, (off, idx) => {
1012
+ let outFlat = 0;
1013
+ let oi = 0;
1014
+ for (let i = 0; i < t.ndim; i++) {
1015
+ if (reduce.has(i)) continue;
1016
+ outFlat += (idx[i] ?? 0) * (outStrides[oi] ?? 0);
1017
+ oi++;
1018
+ }
1019
+ const m = getNumberAt(mu, mu.offset + outFlat);
1020
+ const v = Math.sqrt(getNumberAt(sigma2, sigma2.offset + outFlat));
1021
+ const x = getNumberAt(t, off);
1022
+ if (!Number.isFinite(v) || v === 0) {
1023
+ sumCube[outFlat] = NaN;
1024
+ } else {
1025
+ const z = (x - m) / v;
1026
+ sumCube[outFlat] = (sumCube[outFlat] ?? 0) + z * z * z;
1027
+ }
1028
+ counts[outFlat] = (counts[outFlat] ?? 0) + 1;
1029
+ });
1030
+ const out = new Float64Array(outSize);
1031
+ for (let i = 0; i < outSize; i++) {
1032
+ const n = counts[i] ?? 0;
1033
+ if (n === 0) {
1034
+ out[i] = NaN;
1035
+ continue;
1036
+ }
1037
+ let g1 = (sumCube[i] ?? NaN) / n;
1038
+ if (!bias) {
1039
+ if (n < 3) {
1040
+ g1 = NaN;
1041
+ } else {
1042
+ g1 *= Math.sqrt(n * (n - 1)) / (n - 2);
1043
+ }
1044
+ }
1045
+ out[i] = g1;
1046
+ }
1047
+ return Tensor.fromTypedArray({
1048
+ data: out,
1049
+ shape: outShape,
1050
+ dtype: "float64",
1051
+ device: t.device
1052
+ });
1053
+ }
1054
+ function kurtosis(t, axis, fisher = true, bias = true) {
1055
+ const mu = reduceMean(t, axis, false);
1056
+ const sigma2 = reduceVariance(t, axis, false, 0);
1057
+ const axes = normalizeAxes(axis, t.ndim);
1058
+ const reduce = new Set(axes);
1059
+ const outShape = reducedShape(t.shape, axes, false);
1060
+ const outStrides = computeStrides(outShape);
1061
+ const outSize = outShape.reduce((a, b) => a * b, 1);
1062
+ const sumQuad = new Float64Array(outSize);
1063
+ const counts = new Int32Array(outSize);
1064
+ forEachIndexOffset(t, (off, idx) => {
1065
+ let outFlat = 0;
1066
+ let oi = 0;
1067
+ for (let i = 0; i < t.ndim; i++) {
1068
+ if (reduce.has(i)) continue;
1069
+ outFlat += (idx[i] ?? 0) * (outStrides[oi] ?? 0);
1070
+ oi++;
1071
+ }
1072
+ const m = getNumberAt(mu, mu.offset + outFlat);
1073
+ const v = Math.sqrt(getNumberAt(sigma2, sigma2.offset + outFlat));
1074
+ const x = getNumberAt(t, off);
1075
+ if (!Number.isFinite(v) || v === 0) {
1076
+ sumQuad[outFlat] = NaN;
1077
+ } else {
1078
+ const z = (x - m) / v;
1079
+ sumQuad[outFlat] = (sumQuad[outFlat] ?? 0) + z ** 4;
1080
+ }
1081
+ counts[outFlat] = (counts[outFlat] ?? 0) + 1;
1082
+ });
1083
+ const out = new Float64Array(outSize);
1084
+ for (let i = 0; i < outSize; i++) {
1085
+ const n = counts[i] ?? 0;
1086
+ if (n === 0) {
1087
+ out[i] = NaN;
1088
+ continue;
1089
+ }
1090
+ let g2 = (sumQuad[i] ?? NaN) / n;
1091
+ if (!bias) {
1092
+ if (n < 4) {
1093
+ g2 = NaN;
1094
+ } else {
1095
+ const excess = g2 - 3;
1096
+ const adj = ((n + 1) * excess + 6) * ((n - 1) / ((n - 2) * (n - 3)));
1097
+ g2 = adj + 3;
1098
+ }
1099
+ }
1100
+ out[i] = fisher ? g2 - 3 : g2;
1101
+ }
1102
+ return Tensor.fromTypedArray({
1103
+ data: out,
1104
+ shape: outShape,
1105
+ dtype: "float64",
1106
+ device: t.device
1107
+ });
1108
+ }
1109
+ function quantile(t, q, axis) {
1110
+ const qVals = Array.isArray(q) ? q : [q];
1111
+ for (const v of qVals) {
1112
+ if (!Number.isFinite(v) || v < 0 || v > 1) {
1113
+ throw new InvalidParameterError("q must be in [0, 1]", "q", v);
1114
+ }
1115
+ }
1116
+ const axes = normalizeAxes(axis, t.ndim);
1117
+ const reduce = new Set(axes);
1118
+ if (axes.length === 0) {
1119
+ if (t.size === 0) {
1120
+ throw new InvalidParameterError("quantile() requires at least one element", "size", t.size);
1121
+ }
1122
+ const arr = [];
1123
+ let hasNaN = false;
1124
+ forEachIndexOffset(t, (off) => {
1125
+ const v = getNumberAt(t, off);
1126
+ if (Number.isNaN(v)) hasNaN = true;
1127
+ arr.push(v);
1128
+ });
1129
+ if (hasNaN) {
1130
+ return tensor(qVals.map(() => Number.NaN));
1131
+ }
1132
+ arr.sort((a, b) => a - b);
1133
+ const results = [];
1134
+ for (const qVal of qVals) {
1135
+ const idx = qVal * (arr.length - 1);
1136
+ const lower = Math.floor(idx);
1137
+ const upper = Math.ceil(idx);
1138
+ const weight = idx - lower;
1139
+ results.push((arr[lower] ?? 0) * (1 - weight) + (arr[upper] ?? 0) * weight);
1140
+ }
1141
+ return tensor(results);
1142
+ }
1143
+ const outShape = reducedShape(t.shape, axes, false);
1144
+ const outStrides = computeStrides(outShape);
1145
+ const outSize = outShape.reduce((a, b) => a * b, 1);
1146
+ const buckets = new Array(outSize);
1147
+ const nanFlags = new Array(outSize).fill(false);
1148
+ forEachIndexOffset(t, (off, idx) => {
1149
+ let outFlat = 0;
1150
+ let oi = 0;
1151
+ for (let i = 0; i < t.ndim; i++) {
1152
+ if (reduce.has(i)) continue;
1153
+ outFlat += (idx[i] ?? 0) * (outStrides[oi] ?? 0);
1154
+ oi++;
1155
+ }
1156
+ const val = getNumberAt(t, off);
1157
+ if (Number.isNaN(val)) {
1158
+ nanFlags[outFlat] = true;
1159
+ return;
1160
+ }
1161
+ const arr = buckets[outFlat] ?? [];
1162
+ arr.push(val);
1163
+ buckets[outFlat] = arr;
1164
+ });
1165
+ const finalShape = [qVals.length, ...outShape];
1166
+ const finalSize = qVals.length * outSize;
1167
+ const out = new Float64Array(finalSize);
1168
+ for (let g = 0; g < outSize; g++) {
1169
+ if (nanFlags[g]) {
1170
+ for (let qi = 0; qi < qVals.length; qi++) {
1171
+ out[qi * outSize + g] = Number.NaN;
1172
+ }
1173
+ continue;
1174
+ }
1175
+ const arr = buckets[g] ?? [];
1176
+ if (arr.length === 0) {
1177
+ throw new InvalidParameterError(
1178
+ "quantile() reduction over empty axis is undefined",
1179
+ "axis",
1180
+ arr.length
1181
+ );
1182
+ }
1183
+ arr.sort((a, b) => a - b);
1184
+ for (let qi = 0; qi < qVals.length; qi++) {
1185
+ const qVal = qVals[qi] ?? 0;
1186
+ const idx = qVal * (arr.length - 1);
1187
+ const lower = Math.floor(idx);
1188
+ const upper = Math.ceil(idx);
1189
+ const weight = idx - lower;
1190
+ out[qi * outSize + g] = (arr[lower] ?? 0) * (1 - weight) + (arr[upper] ?? 0) * weight;
1191
+ }
1192
+ }
1193
+ return Tensor.fromTypedArray({
1194
+ data: out,
1195
+ shape: finalShape,
1196
+ dtype: "float64",
1197
+ device: t.device
1198
+ });
1199
+ }
1200
+ function percentile(t, q, axis) {
1201
+ const qArr = Array.isArray(q) ? q : [q];
1202
+ for (const v of qArr) {
1203
+ if (!Number.isFinite(v) || v < 0 || v > 100) {
1204
+ throw new InvalidParameterError("q must be in [0, 100]", "q", v);
1205
+ }
1206
+ }
1207
+ const qVals = Array.isArray(q) ? q.map((v) => v / 100) : q / 100;
1208
+ return quantile(t, qVals, axis);
1209
+ }
1210
+ function moment(t, n, axis) {
1211
+ if (!Number.isFinite(n) || !Number.isInteger(n) || n < 0) {
1212
+ throw new InvalidParameterError("n must be a non-negative integer", "n", n);
1213
+ }
1214
+ const mu = reduceMean(t, axis, false);
1215
+ const axes = normalizeAxes(axis, t.ndim);
1216
+ const reduce = new Set(axes);
1217
+ const outShape = reducedShape(t.shape, axes, false);
1218
+ const outStrides = computeStrides(outShape);
1219
+ const outSize = outShape.reduce((a, b) => a * b, 1);
1220
+ const sums = new Float64Array(outSize);
1221
+ const counts = new Int32Array(outSize);
1222
+ forEachIndexOffset(t, (off, idx) => {
1223
+ let outFlat = 0;
1224
+ let oi = 0;
1225
+ for (let i = 0; i < t.ndim; i++) {
1226
+ if (reduce.has(i)) continue;
1227
+ outFlat += (idx[i] ?? 0) * (outStrides[oi] ?? 0);
1228
+ oi++;
1229
+ }
1230
+ const m = getNumberAt(mu, mu.offset + outFlat);
1231
+ const x = getNumberAt(t, off);
1232
+ sums[outFlat] = (sums[outFlat] ?? 0) + (x - m) ** n;
1233
+ counts[outFlat] = (counts[outFlat] ?? 0) + 1;
1234
+ });
1235
+ const out = new Float64Array(outSize);
1236
+ for (let i = 0; i < outSize; i++) {
1237
+ const c = counts[i] ?? 0;
1238
+ out[i] = c === 0 ? NaN : (sums[i] ?? 0) / c;
1239
+ }
1240
+ return Tensor.fromTypedArray({
1241
+ data: out,
1242
+ shape: outShape,
1243
+ dtype: "float64",
1244
+ device: t.device
1245
+ });
1246
+ }
1247
+ function geometricMean(t, axis) {
1248
+ const axes = normalizeAxes(axis, t.ndim);
1249
+ if (axes.length === 0 && t.size === 0) {
1250
+ throw new InvalidParameterError(
1251
+ "geometricMean() requires at least one element",
1252
+ "size",
1253
+ t.size
1254
+ );
1255
+ }
1256
+ const reduce = new Set(axes);
1257
+ const outShape = reducedShape(t.shape, axes, false);
1258
+ const outStrides = computeStrides(outShape);
1259
+ const outSize = outShape.reduce((a, b) => a * b, 1);
1260
+ const sums = new Float64Array(outSize);
1261
+ const counts = new Int32Array(outSize);
1262
+ forEachIndexOffset(t, (off, idx) => {
1263
+ let outFlat = 0;
1264
+ let oi = 0;
1265
+ for (let i = 0; i < t.ndim; i++) {
1266
+ if (reduce.has(i)) continue;
1267
+ outFlat += (idx[i] ?? 0) * (outStrides[oi] ?? 0);
1268
+ oi++;
1269
+ }
1270
+ const x = getNumberAt(t, off);
1271
+ if (x <= 0)
1272
+ throw new InvalidParameterError("geometricMean() requires all values to be > 0", "value", x);
1273
+ sums[outFlat] = (sums[outFlat] ?? 0) + Math.log(x);
1274
+ counts[outFlat] = (counts[outFlat] ?? 0) + 1;
1275
+ });
1276
+ const out = new Float64Array(outSize);
1277
+ for (let i = 0; i < outSize; i++) {
1278
+ const c = counts[i] ?? 0;
1279
+ if (c === 0) {
1280
+ throw new InvalidParameterError(
1281
+ "geometricMean() reduction over empty axis is undefined",
1282
+ "axis",
1283
+ c
1284
+ );
1285
+ }
1286
+ out[i] = Math.exp((sums[i] ?? 0) / c);
1287
+ }
1288
+ return Tensor.fromTypedArray({
1289
+ data: out,
1290
+ shape: outShape,
1291
+ dtype: "float64",
1292
+ device: t.device
1293
+ });
1294
+ }
1295
+ function harmonicMean(t, axis) {
1296
+ const axes = normalizeAxes(axis, t.ndim);
1297
+ if (axes.length === 0 && t.size === 0) {
1298
+ throw new InvalidParameterError("harmonicMean() requires at least one element", "size", t.size);
1299
+ }
1300
+ const reduce = new Set(axes);
1301
+ const outShape = reducedShape(t.shape, axes, false);
1302
+ const outStrides = computeStrides(outShape);
1303
+ const outSize = outShape.reduce((a, b) => a * b, 1);
1304
+ const sums = new Float64Array(outSize);
1305
+ const counts = new Int32Array(outSize);
1306
+ forEachIndexOffset(t, (off, idx) => {
1307
+ let outFlat = 0;
1308
+ let oi = 0;
1309
+ for (let i = 0; i < t.ndim; i++) {
1310
+ if (reduce.has(i)) continue;
1311
+ outFlat += (idx[i] ?? 0) * (outStrides[oi] ?? 0);
1312
+ oi++;
1313
+ }
1314
+ const x = getNumberAt(t, off);
1315
+ if (x <= 0) {
1316
+ throw new InvalidParameterError("harmonicMean() requires all values to be > 0", "value", x);
1317
+ }
1318
+ sums[outFlat] = (sums[outFlat] ?? 0) + 1 / x;
1319
+ counts[outFlat] = (counts[outFlat] ?? 0) + 1;
1320
+ });
1321
+ const out = new Float64Array(outSize);
1322
+ for (let i = 0; i < outSize; i++) {
1323
+ const c = counts[i] ?? 0;
1324
+ if (c === 0) {
1325
+ throw new InvalidParameterError(
1326
+ "harmonicMean() reduction over empty axis is undefined",
1327
+ "axis",
1328
+ c
1329
+ );
1330
+ }
1331
+ out[i] = c / (sums[i] ?? NaN);
1332
+ }
1333
+ return Tensor.fromTypedArray({
1334
+ data: out,
1335
+ shape: outShape,
1336
+ dtype: "float64",
1337
+ device: t.device
1338
+ });
1339
+ }
1340
+ function trimMean(t, proportiontocut, axis) {
1341
+ if (!Number.isFinite(proportiontocut) || proportiontocut < 0 || proportiontocut >= 0.5) {
1342
+ throw new InvalidParameterError(
1343
+ "proportiontocut must be a finite number in range [0, 0.5)",
1344
+ "proportiontocut",
1345
+ proportiontocut
1346
+ );
1347
+ }
1348
+ const axes = normalizeAxes(axis, t.ndim);
1349
+ const reduce = new Set(axes);
1350
+ if (axes.length === 0) {
1351
+ const arr = [];
1352
+ let hasNaN = false;
1353
+ forEachIndexOffset(t, (off) => {
1354
+ const v = getNumberAt(t, off);
1355
+ if (Number.isNaN(v)) hasNaN = true;
1356
+ arr.push(v);
1357
+ });
1358
+ if (arr.length === 0)
1359
+ throw new InvalidParameterError(
1360
+ "trimMean() requires at least one element",
1361
+ "size",
1362
+ arr.length
1363
+ );
1364
+ if (hasNaN) {
1365
+ return tensor([Number.NaN]);
1366
+ }
1367
+ arr.sort((a, b) => a - b);
1368
+ const nTrim = Math.floor(arr.length * proportiontocut);
1369
+ const trimmed = arr.slice(nTrim, arr.length - nTrim);
1370
+ const sum = trimmed.reduce((a, b) => a + b, 0);
1371
+ return tensor([sum / trimmed.length]);
1372
+ }
1373
+ const outShape = reducedShape(t.shape, axes, false);
1374
+ const outStrides = computeStrides(outShape);
1375
+ const outSize = outShape.reduce((a, b) => a * b, 1);
1376
+ const buckets = new Array(outSize);
1377
+ const nanFlags = new Array(outSize).fill(false);
1378
+ forEachIndexOffset(t, (off, idx) => {
1379
+ let outFlat = 0;
1380
+ let oi = 0;
1381
+ for (let i = 0; i < t.ndim; i++) {
1382
+ if (reduce.has(i)) continue;
1383
+ outFlat += (idx[i] ?? 0) * (outStrides[oi] ?? 0);
1384
+ oi++;
1385
+ }
1386
+ const val = getNumberAt(t, off);
1387
+ if (Number.isNaN(val)) {
1388
+ nanFlags[outFlat] = true;
1389
+ return;
1390
+ }
1391
+ const arr = buckets[outFlat] ?? [];
1392
+ arr.push(val);
1393
+ buckets[outFlat] = arr;
1394
+ });
1395
+ const out = new Float64Array(outSize);
1396
+ for (let i = 0; i < outSize; i++) {
1397
+ if (nanFlags[i]) {
1398
+ out[i] = Number.NaN;
1399
+ continue;
1400
+ }
1401
+ const arr = buckets[i] ?? [];
1402
+ if (arr.length === 0) {
1403
+ throw new InvalidParameterError(
1404
+ "trimMean() reduction over empty axis is undefined",
1405
+ "axis",
1406
+ arr.length
1407
+ );
1408
+ }
1409
+ arr.sort((a, b) => a - b);
1410
+ const nTrim = Math.floor(arr.length * proportiontocut);
1411
+ const trimmed = arr.slice(nTrim, arr.length - nTrim);
1412
+ const sum = trimmed.reduce((a, b) => a + b, 0);
1413
+ out[i] = sum / trimmed.length;
1414
+ }
1415
+ return Tensor.fromTypedArray({
1416
+ data: out,
1417
+ shape: outShape,
1418
+ dtype: "float64",
1419
+ device: t.device
1420
+ });
1421
+ }
1422
+
1423
+ // src/stats/tests.ts
1424
+ function toDenseSortedArray1D(x) {
1425
+ if (x.size < 1) {
1426
+ return new Float64Array(0);
1427
+ }
1428
+ const out = new Float64Array(x.size);
1429
+ let i = 0;
1430
+ forEachIndexOffset(x, (off) => {
1431
+ out[i] = getNumberAt(x, off);
1432
+ i++;
1433
+ });
1434
+ out.sort((a, b) => a - b);
1435
+ return out;
1436
+ }
1437
+ function toDenseArray1D(x) {
1438
+ const out = new Float64Array(x.size);
1439
+ let i = 0;
1440
+ forEachIndexOffset(x, (off) => {
1441
+ out[i] = getNumberAt(x, off);
1442
+ i++;
1443
+ });
1444
+ return out;
1445
+ }
1446
+ function meanAndM2(x) {
1447
+ if (x.length === 0) {
1448
+ throw new InvalidParameterError("expected at least one element", "length", x.length);
1449
+ }
1450
+ let mean2 = 0;
1451
+ let m2 = 0;
1452
+ for (let i = 0; i < x.length; i++) {
1453
+ const v = x[i] ?? 0;
1454
+ const n = i + 1;
1455
+ const delta = v - mean2;
1456
+ mean2 += delta / n;
1457
+ const delta2 = v - mean2;
1458
+ m2 += delta * delta2;
1459
+ }
1460
+ return { mean: mean2, m2 };
1461
+ }
1462
+ function shapiroWilk(x) {
1463
+ const n = x.length;
1464
+ if (n < 3 || n > 5e3) {
1465
+ throw new InvalidParameterError("shapiro() sample size must be between 3 and 5000", "n", n);
1466
+ }
1467
+ const range = (x[n - 1] ?? 0) - (x[0] ?? 0);
1468
+ if (range === 0) {
1469
+ throw new InvalidParameterError("shapiro() all x values are identical", "range", range);
1470
+ }
1471
+ const small = 1e-19;
1472
+ if (range < small) {
1473
+ throw new InvalidParameterError("shapiro() range is too small", "range", range);
1474
+ }
1475
+ const nn2 = Math.floor(n / 2);
1476
+ const a = new Float64Array(nn2 + 1);
1477
+ const g = [-2.273, 0.459];
1478
+ const c1 = [0, 0.221157, -0.147981, -2.07119, 4.434685, -2.706056];
1479
+ const c2 = [0, 0.042981, -0.293762, -1.752461, 5.682633, -3.582633];
1480
+ const c3 = [0.544, -0.39978, 0.025054, -6714e-7];
1481
+ const c4 = [1.3822, -0.77857, 0.062767, -20322e-7];
1482
+ const c5 = [-1.5861, -0.31082, -0.083751, 38915e-7];
1483
+ const c6 = [-0.4803, -0.082676, 30302e-7];
1484
+ const poly = (cc, x0) => {
1485
+ let p = cc[cc.length - 1] ?? 0;
1486
+ for (let j = cc.length - 2; j >= 0; j--) {
1487
+ p = p * x0 + (cc[j] ?? 0);
1488
+ }
1489
+ return p;
1490
+ };
1491
+ const sign = (v) => v === 0 ? 0 : v > 0 ? 1 : -1;
1492
+ const an = n;
1493
+ if (n === 3) {
1494
+ a[1] = Math.SQRT1_2;
1495
+ } else {
1496
+ const an25 = an + 0.25;
1497
+ let summ2 = 0;
1498
+ for (let i = 1; i <= nn2; i++) {
1499
+ const p = (i - 0.375) / an25;
1500
+ let lo = -10;
1501
+ let hi = 10;
1502
+ for (let it = 0; it < 80; it++) {
1503
+ const mid = (lo + hi) / 2;
1504
+ const cdf = normalCdf(mid);
1505
+ if (cdf < p) lo = mid;
1506
+ else hi = mid;
1507
+ }
1508
+ const z2 = (lo + hi) / 2;
1509
+ a[i] = z2;
1510
+ summ2 += z2 * z2;
1511
+ }
1512
+ summ2 *= 2;
1513
+ const ssumm2 = Math.sqrt(summ2);
1514
+ const rsn = 1 / Math.sqrt(an);
1515
+ const a1 = poly(c1, rsn) - (a[1] ?? 0) / ssumm2;
1516
+ let i1;
1517
+ let fac;
1518
+ if (n > 5) {
1519
+ i1 = 3;
1520
+ const a2 = -((a[2] ?? 0) / ssumm2) + poly(c2, rsn);
1521
+ fac = Math.sqrt(
1522
+ (summ2 - 2 * (a[1] ?? 0) * (a[1] ?? 0) - 2 * (a[2] ?? 0) * (a[2] ?? 0)) / (1 - 2 * a1 * a1 - 2 * a2 * a2)
1523
+ );
1524
+ a[2] = a2;
1525
+ } else {
1526
+ i1 = 2;
1527
+ fac = Math.sqrt((summ2 - 2 * (a[1] ?? 0) * (a[1] ?? 0)) / (1 - 2 * a1 * a1));
1528
+ }
1529
+ a[1] = a1;
1530
+ for (let i = i1; i <= nn2; i++) {
1531
+ a[i] = -((a[i] ?? 0) / fac);
1532
+ }
1533
+ }
1534
+ let xx = (x[0] ?? 0) / range;
1535
+ let sx = xx;
1536
+ let sa = -(a[1] ?? 0);
1537
+ for (let i = 1, j = n - 1; i < n; j--) {
1538
+ const xi = (x[i] ?? 0) / range;
1539
+ if (xx - xi > small) {
1540
+ throw new InvalidParameterError("shapiro() data is not sorted", "data", "unsorted");
1541
+ }
1542
+ sx += xi;
1543
+ i++;
1544
+ if (i !== j) {
1545
+ sa += sign(i - j) * (a[Math.min(i, j)] ?? 0);
1546
+ }
1547
+ xx = xi;
1548
+ }
1549
+ sa /= n;
1550
+ sx /= n;
1551
+ let ssa = 0;
1552
+ let ssx = 0;
1553
+ let sax = 0;
1554
+ for (let i = 0, j = n - 1; i < n; i++, j--) {
1555
+ const asa = i !== j ? sign(i - j) * (a[1 + Math.min(i, j)] ?? 0) - sa : -sa;
1556
+ const xsx = (x[i] ?? 0) / range - sx;
1557
+ ssa += asa * asa;
1558
+ ssx += xsx * xsx;
1559
+ sax += asa * xsx;
1560
+ }
1561
+ const ssassx = Math.sqrt(ssa * ssx);
1562
+ const w1 = (ssassx - sax) * (ssassx + sax) / (ssa * ssx);
1563
+ const w = 1 - w1;
1564
+ if (n === 3) {
1565
+ const pi6 = 1.90985931710274;
1566
+ const stqr = 1.0471975511966;
1567
+ let pw = pi6 * (Math.asin(Math.sqrt(w)) - stqr);
1568
+ if (pw < 0) pw = 0;
1569
+ if (pw > 1) pw = 1;
1570
+ return { statistic: w, pvalue: pw };
1571
+ }
1572
+ const y = Math.log(w1);
1573
+ const lnN = Math.log(an);
1574
+ let m;
1575
+ let s;
1576
+ if (n <= 11) {
1577
+ const gamma = poly(g, an);
1578
+ if (y >= gamma) {
1579
+ return { statistic: w, pvalue: 0 };
1580
+ }
1581
+ const yy = -Math.log(gamma - y);
1582
+ m = poly(c3, an);
1583
+ s = Math.exp(poly(c4, an));
1584
+ const z2 = (yy - m) / s;
1585
+ return { statistic: w, pvalue: normalCdf(z2) };
1586
+ }
1587
+ m = poly(c5, lnN);
1588
+ s = Math.exp(poly(c6, lnN));
1589
+ const z = (y - m) / s;
1590
+ return { statistic: w, pvalue: normalCdf(z) };
1591
+ }
1592
+ function ttest_1samp(a, popmean) {
1593
+ const x = toDenseSortedArray1D(a);
1594
+ const n = x.length;
1595
+ if (n < 2) {
1596
+ throw new InvalidParameterError("ttest_1samp() requires at least 2 samples", "n", n);
1597
+ }
1598
+ let mean2 = 0;
1599
+ for (let i = 0; i < n; i++) mean2 += x[i] ?? 0;
1600
+ mean2 /= n;
1601
+ let m2 = 0;
1602
+ for (let i = 0; i < n; i++) {
1603
+ const d = (x[i] ?? 0) - mean2;
1604
+ m2 += d * d;
1605
+ }
1606
+ const variance2 = m2 / (n - 1);
1607
+ const std2 = Math.sqrt(variance2);
1608
+ if (std2 === 0) {
1609
+ throw new InvalidParameterError("ttest_1samp() is undefined for constant input", "std", std2);
1610
+ }
1611
+ const tstat = (mean2 - popmean) / (std2 / Math.sqrt(n));
1612
+ const df = n - 1;
1613
+ const pvalue = 2 * (1 - studentTCdf(Math.abs(tstat), df));
1614
+ return { statistic: tstat, pvalue };
1615
+ }
1616
+ function ttest_ind(a, b, equalVar = true) {
1617
+ const xa = toDenseSortedArray1D(a);
1618
+ const xb = toDenseSortedArray1D(b);
1619
+ const na = xa.length;
1620
+ const nb = xb.length;
1621
+ if (na < 2 || nb < 2) {
1622
+ throw new InvalidParameterError("ttest_ind() requires at least 2 samples in each group", "n", {
1623
+ na,
1624
+ nb
1625
+ });
1626
+ }
1627
+ let meanA = 0;
1628
+ let meanB = 0;
1629
+ for (let i = 0; i < na; i++) meanA += xa[i] ?? 0;
1630
+ for (let i = 0; i < nb; i++) meanB += xb[i] ?? 0;
1631
+ meanA /= na;
1632
+ meanB /= nb;
1633
+ let ssa = 0;
1634
+ let ssb = 0;
1635
+ for (let i = 0; i < na; i++) {
1636
+ const d = (xa[i] ?? 0) - meanA;
1637
+ ssa += d * d;
1638
+ }
1639
+ for (let i = 0; i < nb; i++) {
1640
+ const d = (xb[i] ?? 0) - meanB;
1641
+ ssb += d * d;
1642
+ }
1643
+ const varA = ssa / (na - 1);
1644
+ const varB = ssb / (nb - 1);
1645
+ let tstat;
1646
+ let df;
1647
+ if (equalVar) {
1648
+ const pooledVar = ((na - 1) * varA + (nb - 1) * varB) / (na + nb - 2);
1649
+ const denom = Math.sqrt(pooledVar * (1 / na + 1 / nb));
1650
+ if (denom === 0)
1651
+ throw new InvalidParameterError(
1652
+ "ttest_ind() is undefined for constant input",
1653
+ "denom",
1654
+ denom
1655
+ );
1656
+ tstat = (meanA - meanB) / denom;
1657
+ df = na + nb - 2;
1658
+ } else {
1659
+ const denom = Math.sqrt(varA / na + varB / nb);
1660
+ if (denom === 0)
1661
+ throw new InvalidParameterError(
1662
+ "ttest_ind() is undefined for constant input",
1663
+ "denom",
1664
+ denom
1665
+ );
1666
+ tstat = (meanA - meanB) / denom;
1667
+ df = (varA / na + varB / nb) ** 2 / ((varA / na) ** 2 / (na - 1) + (varB / nb) ** 2 / (nb - 1));
1668
+ }
1669
+ const pvalue = 2 * (1 - studentTCdf(Math.abs(tstat), df));
1670
+ return { statistic: tstat, pvalue };
1671
+ }
1672
+ function ttest_rel(a, b) {
1673
+ if (a.size !== b.size) {
1674
+ throw new InvalidParameterError("ttest_rel() requires paired samples of equal length", "size", {
1675
+ a: a.size,
1676
+ b: b.size
1677
+ });
1678
+ }
1679
+ const n = a.size;
1680
+ if (n < 2) {
1681
+ throw new InvalidParameterError("ttest_rel() requires at least 2 paired samples", "n", n);
1682
+ }
1683
+ const diffs = new Float64Array(n);
1684
+ let i = 0;
1685
+ forEachIndexOffset(a, (offA) => {
1686
+ diffs[i] = getNumberAt(a, offA);
1687
+ i++;
1688
+ });
1689
+ const bd = new Float64Array(n);
1690
+ i = 0;
1691
+ forEachIndexOffset(b, (offB) => {
1692
+ bd[i] = getNumberAt(b, offB);
1693
+ i++;
1694
+ });
1695
+ for (let k = 0; k < n; k++) {
1696
+ diffs[k] = (diffs[k] ?? 0) - (bd[k] ?? 0);
1697
+ }
1698
+ let mean2 = 0;
1699
+ for (let k = 0; k < n; k++) mean2 += diffs[k] ?? 0;
1700
+ mean2 /= n;
1701
+ let ss = 0;
1702
+ for (let k = 0; k < n; k++) {
1703
+ const d = (diffs[k] ?? 0) - mean2;
1704
+ ss += d * d;
1705
+ }
1706
+ const varDiff = ss / (n - 1);
1707
+ const stdDiff = Math.sqrt(varDiff);
1708
+ if (stdDiff === 0) {
1709
+ throw new InvalidParameterError(
1710
+ "ttest_rel() is undefined for constant differences",
1711
+ "stdDiff",
1712
+ stdDiff
1713
+ );
1714
+ }
1715
+ const tstat = mean2 / (stdDiff / Math.sqrt(n));
1716
+ const df = n - 1;
1717
+ const pvalue = 2 * (1 - studentTCdf(Math.abs(tstat), df));
1718
+ return { statistic: tstat, pvalue };
1719
+ }
1720
+ function chisquare(f_obs, f_exp) {
1721
+ const obs = toDenseArray1D(f_obs);
1722
+ const n = obs.length;
1723
+ if (n < 1) {
1724
+ throw new InvalidParameterError("chisquare() requires at least one observed value", "n", n);
1725
+ }
1726
+ let chiSq = 0;
1727
+ let sumObs = 0;
1728
+ for (let i = 0; i < n; i++) {
1729
+ const v = obs[i] ?? 0;
1730
+ if (!Number.isFinite(v) || v < 0) {
1731
+ throw new InvalidParameterError(
1732
+ "chisquare() observed frequencies must be finite and >= 0",
1733
+ "f_obs",
1734
+ v
1735
+ );
1736
+ }
1737
+ sumObs += v;
1738
+ }
1739
+ if (f_exp && f_obs.size !== f_exp.size) {
1740
+ throw new InvalidParameterError(
1741
+ "Observed and expected frequency arrays must have the same length",
1742
+ "size",
1743
+ { f_obs: f_obs.size, f_exp: f_exp.size }
1744
+ );
1745
+ }
1746
+ if (!f_exp) {
1747
+ const expected = sumObs / n;
1748
+ if (!Number.isFinite(expected) || expected <= 0) {
1749
+ throw new InvalidParameterError(
1750
+ "chisquare() expected frequencies must be finite and > 0",
1751
+ "expected",
1752
+ expected
1753
+ );
1754
+ }
1755
+ for (let i = 0; i < n; i++) {
1756
+ const v = obs[i] ?? 0;
1757
+ chiSq += (v - expected) ** 2 / expected;
1758
+ }
1759
+ } else {
1760
+ const exp = toDenseArray1D(f_exp);
1761
+ let sumExp = 0;
1762
+ for (let i = 0; i < n; i++) {
1763
+ const v = exp[i] ?? 0;
1764
+ if (!Number.isFinite(v) || v <= 0) {
1765
+ throw new InvalidParameterError(
1766
+ "chisquare() expected frequencies must be finite and > 0",
1767
+ "f_exp",
1768
+ v
1769
+ );
1770
+ }
1771
+ sumExp += v;
1772
+ }
1773
+ const rtol = Math.sqrt(Number.EPSILON);
1774
+ const denom = Math.max(Math.abs(sumObs), Math.abs(sumExp));
1775
+ if (Math.abs(sumObs - sumExp) > rtol * denom) {
1776
+ throw new InvalidParameterError(
1777
+ "chisquare() expected and observed frequencies must sum to the same value",
1778
+ "sum",
1779
+ { f_obs: sumObs, f_exp: sumExp }
1780
+ );
1781
+ }
1782
+ for (let i = 0; i < n; i++) {
1783
+ const vObs = obs[i] ?? 0;
1784
+ const vExp = exp[i] ?? 0;
1785
+ chiSq += (vObs - vExp) ** 2 / vExp;
1786
+ }
1787
+ }
1788
+ const df = n - 1;
1789
+ if (df < 1) {
1790
+ throw new InvalidParameterError(
1791
+ "chisquare() requires at least 2 categories (df must be >= 1)",
1792
+ "df",
1793
+ df
1794
+ );
1795
+ }
1796
+ const pvalue = 1 - chiSquareCdf(chiSq, df);
1797
+ return { statistic: chiSq, pvalue };
1798
+ }
1799
+ function kstest(data, cdf) {
1800
+ const x = toDenseSortedArray1D(data);
1801
+ const n = x.length;
1802
+ if (n === 0) {
1803
+ throw new InvalidParameterError("kstest() requires at least one element", "n", n);
1804
+ }
1805
+ if (typeof cdf === "string" && cdf !== "norm") {
1806
+ throw new InvalidParameterError(
1807
+ `Unsupported distribution: '${cdf}'. Supported distributions: 'norm'`,
1808
+ "cdf",
1809
+ cdf
1810
+ );
1811
+ }
1812
+ const F = typeof cdf === "string" ? (v) => normalCdf(v) : cdf;
1813
+ let d = 0;
1814
+ for (let i = 0; i < n; i++) {
1815
+ const xi = x[i] ?? 0;
1816
+ const fi = F(xi);
1817
+ const dPlus = (i + 1) / n - fi;
1818
+ const dMinus = fi - i / n;
1819
+ d = Math.max(d, dPlus, dMinus);
1820
+ }
1821
+ let p = 0;
1822
+ for (let k = 1; k < 200; k++) {
1823
+ const term = Math.exp(-2 * k * k * d * d * n);
1824
+ p += (k % 2 === 1 ? 1 : -1) * term;
1825
+ if (term < 1e-12) break;
1826
+ }
1827
+ p = Math.max(0, Math.min(1, 2 * p));
1828
+ return { statistic: d, pvalue: p };
1829
+ }
1830
+ function normaltest(a) {
1831
+ const x = toDenseSortedArray1D(a);
1832
+ const n = x.length;
1833
+ if (n < 8) {
1834
+ throw new InvalidParameterError("normaltest() requires at least 8 samples", "n", n);
1835
+ }
1836
+ const { mean: mean2, m2 } = meanAndM2(x);
1837
+ const m2n = m2 / n;
1838
+ const std2 = Math.sqrt(m2n);
1839
+ if (!Number.isFinite(std2) || std2 === 0) {
1840
+ throw new InvalidParameterError("normaltest() is undefined for constant input", "std", std2);
1841
+ }
1842
+ let m3 = 0;
1843
+ let m4 = 0;
1844
+ for (let i = 0; i < n; i++) {
1845
+ const d = (x[i] ?? 0) - mean2;
1846
+ m3 += d * d * d;
1847
+ m4 += d * d * d * d;
1848
+ }
1849
+ const skew = m3 / n / m2n ** 1.5;
1850
+ const kurt = m4 / n / (m2n * m2n);
1851
+ const y = skew * Math.sqrt((n + 1) * (n + 3) / (6 * (n - 2)));
1852
+ const beta2 = 3 * (n * n + 27 * n - 70) * (n + 1) * (n + 3) / ((n - 2) * (n + 5) * (n + 7) * (n + 9));
1853
+ const w2 = -1 + Math.sqrt(2 * (beta2 - 1));
1854
+ const delta = 1 / Math.sqrt(0.5 * Math.log(w2));
1855
+ const alpha = Math.sqrt(2 / (w2 - 1));
1856
+ const yScaled = y / alpha;
1857
+ const z1 = delta * Math.log(yScaled + Math.sqrt(yScaled * yScaled + 1));
1858
+ const e = 3 * (n - 1) / (n + 1);
1859
+ const varb2 = 24 * n * (n - 2) * (n - 3) / ((n + 1) * (n + 1) * (n + 3) * (n + 5));
1860
+ const xval = (kurt - e) / Math.sqrt(varb2);
1861
+ const sqrtbeta1 = 6 * (n * n - 5 * n + 2) / ((n + 7) * (n + 9)) * Math.sqrt(6 * (n + 3) * (n + 5) / (n * (n - 2) * (n - 3)));
1862
+ const aTerm = 6 + 8 / sqrtbeta1 * (2 / sqrtbeta1 + Math.sqrt(1 + 4 / (sqrtbeta1 * sqrtbeta1)));
1863
+ const term1 = 1 - 2 / (9 * aTerm);
1864
+ const denom = 1 + xval * Math.sqrt(2 / (aTerm - 4));
1865
+ const term2 = denom === 0 ? Number.NaN : Math.sign(denom) * ((1 - 2 / aTerm) / Math.abs(denom)) ** (1 / 3);
1866
+ const z2 = (term1 - term2) / Math.sqrt(2 / (9 * aTerm));
1867
+ const k2 = z1 * z1 + z2 * z2;
1868
+ const pvalue = 1 - chiSquareCdf(k2, 2);
1869
+ return { statistic: k2, pvalue };
1870
+ }
1871
+ function shapiro(x) {
1872
+ const sorted = toDenseSortedArray1D(x);
1873
+ return shapiroWilk(sorted);
1874
+ }
1875
+ function anderson(x) {
1876
+ const sorted = toDenseSortedArray1D(x);
1877
+ const n = sorted.length;
1878
+ if (n < 1) {
1879
+ throw new InvalidParameterError("anderson() requires at least one element", "n", n);
1880
+ }
1881
+ const { mean: mean2, m2 } = meanAndM2(sorted);
1882
+ const variance2 = n > 1 ? m2 / (n - 1) : NaN;
1883
+ let std2 = Math.sqrt(variance2);
1884
+ if (!Number.isFinite(std2) || std2 === 0) {
1885
+ throw new InvalidParameterError("anderson() is undefined for constant input", "std", std2);
1886
+ }
1887
+ if (n < 10) {
1888
+ const quantile2 = (q) => {
1889
+ if (n === 1) return sorted[0] ?? 0;
1890
+ const pos = (n - 1) * q;
1891
+ const lo = Math.floor(pos);
1892
+ const hi = Math.ceil(pos);
1893
+ const v0 = sorted[lo] ?? 0;
1894
+ const v1 = sorted[hi] ?? v0;
1895
+ return v0 + (pos - lo) * (v1 - v0);
1896
+ };
1897
+ const q1 = quantile2(0.25);
1898
+ const q3 = quantile2(0.75);
1899
+ const iqr = q3 - q1;
1900
+ const robust = iqr / 1.349;
1901
+ if (Number.isFinite(robust) && robust > 0) {
1902
+ std2 = robust;
1903
+ }
1904
+ }
1905
+ let A2 = 0;
1906
+ for (let i = 0; i < n; i++) {
1907
+ const zi = ((sorted[i] ?? 0) - mean2) / std2;
1908
+ const zj = ((sorted[n - 1 - i] ?? 0) - mean2) / std2;
1909
+ const PhiI = Math.max(1e-300, Math.min(1 - 1e-16, normalCdf(zi)));
1910
+ const PhiJ = Math.max(1e-300, Math.min(1 - 1e-16, normalCdf(zj)));
1911
+ A2 += (2 * (i + 1) - 1) * (Math.log(PhiI) + Math.log(1 - PhiJ));
1912
+ }
1913
+ A2 = -n - A2 / n;
1914
+ const baseCritical = [0.576, 0.656, 0.787, 0.918, 1.092];
1915
+ const factor = 1 + 4 / n - 25 / (n * n);
1916
+ const critical_values = baseCritical.map((v) => Math.round(v / factor * 1e3) / 1e3);
1917
+ return {
1918
+ statistic: A2,
1919
+ critical_values,
1920
+ significance_level: [0.15, 0.1, 0.05, 0.025, 0.01]
1921
+ };
1922
+ }
1923
+ function mannwhitneyu(x, y) {
1924
+ const nx = x.size;
1925
+ const ny = y.size;
1926
+ if (nx < 1 || ny < 1) {
1927
+ throw new InvalidParameterError("Both samples must be non-empty", "size", {
1928
+ x: nx,
1929
+ y: ny
1930
+ });
1931
+ }
1932
+ const xVals = toDenseArray1D(x);
1933
+ const yVals = toDenseArray1D(y);
1934
+ const n = nx + ny;
1935
+ const combined = new Float64Array(n);
1936
+ combined.set(xVals, 0);
1937
+ combined.set(yVals, nx);
1938
+ const { ranks, tieSum } = rankData(combined);
1939
+ let R1 = 0;
1940
+ for (let i = 0; i < nx; i++) {
1941
+ R1 += ranks[i] ?? 0;
1942
+ }
1943
+ const U1 = R1 - nx * (nx + 1) / 2;
1944
+ const U2 = nx * ny - U1;
1945
+ const U = Math.min(U1, U2);
1946
+ const meanU = nx * ny / 2;
1947
+ const tieAdj = n > 1 ? tieSum / (n * (n - 1)) : 0;
1948
+ const varU = nx * ny * (n + 1 - tieAdj) / 12;
1949
+ if (varU <= 0) {
1950
+ return { statistic: U, pvalue: NaN };
1951
+ }
1952
+ const stdU = Math.sqrt(varU);
1953
+ const useContinuity = nx + ny > 20;
1954
+ const continuity = useContinuity ? U < meanU ? 0.5 : U > meanU ? -0.5 : 0 : 0;
1955
+ const z = (U - meanU + continuity) / stdU;
1956
+ const pvalue = 2 * (1 - normalCdf(Math.abs(z)));
1957
+ return { statistic: U, pvalue };
1958
+ }
1959
+ function wilcoxon(x, y) {
1960
+ const n = x.size;
1961
+ const diffs = [];
1962
+ if (y) {
1963
+ if (x.size !== y.size) {
1964
+ throw new InvalidParameterError("Paired samples must have equal length", "size", {
1965
+ x: x.size,
1966
+ y: y.size
1967
+ });
1968
+ }
1969
+ const xd = toDenseArray1D(x);
1970
+ const yd = toDenseArray1D(y);
1971
+ for (let i = 0; i < n; i++) {
1972
+ const diff = (xd[i] ?? 0) - (yd[i] ?? 0);
1973
+ if (diff !== 0) diffs.push(diff);
1974
+ }
1975
+ } else {
1976
+ const xd = toDenseArray1D(x);
1977
+ for (let i = 0; i < n; i++) {
1978
+ const val = xd[i] ?? 0;
1979
+ if (val !== 0) diffs.push(val);
1980
+ }
1981
+ }
1982
+ if (diffs.length === 0) {
1983
+ throw new InvalidParameterError(
1984
+ "wilcoxon() is undefined when all differences are zero",
1985
+ "diffs",
1986
+ diffs.length
1987
+ );
1988
+ }
1989
+ const absDiffs = new Float64Array(diffs.length);
1990
+ for (let i = 0; i < diffs.length; i++) {
1991
+ absDiffs[i] = Math.abs(diffs[i] ?? 0);
1992
+ }
1993
+ const { ranks, tieSum } = rankData(absDiffs);
1994
+ let Wplus = 0;
1995
+ for (let i = 0; i < diffs.length; i++) {
1996
+ if ((diffs[i] ?? 0) > 0) {
1997
+ Wplus += ranks[i] ?? 0;
1998
+ }
1999
+ }
2000
+ const nEff = diffs.length;
2001
+ const meanW = nEff * (nEff + 1) / 4;
2002
+ const varW = nEff * (nEff + 1) * (2 * nEff + 1) / 24 - tieSum / 48;
2003
+ if (varW <= 0) {
2004
+ return { statistic: Wplus, pvalue: NaN };
2005
+ }
2006
+ const stdW = Math.sqrt(varW);
2007
+ const useContinuity = nEff > 20;
2008
+ const continuity = useContinuity ? Wplus < meanW ? 0.5 : Wplus > meanW ? -0.5 : 0 : 0;
2009
+ const z = (Wplus - meanW + continuity) / stdW;
2010
+ const pvalue = 2 * (1 - normalCdf(Math.abs(z)));
2011
+ return { statistic: Wplus, pvalue };
2012
+ }
2013
+ function kruskal(...samples) {
2014
+ const k = samples.length;
2015
+ if (k < 2) {
2016
+ throw new InvalidParameterError("kruskal() requires at least 2 groups", "k", k);
2017
+ }
2018
+ let N = 0;
2019
+ const sizes = new Array(k);
2020
+ const validatedSamples = new Array(k);
2021
+ for (let g = 0; g < k; g++) {
2022
+ const sample = samples[g];
2023
+ if (!sample || sample.size < 1) {
2024
+ throw new InvalidParameterError("kruskal() requires non-empty samples", "size", {
2025
+ group: g,
2026
+ size: sample?.size ?? 0
2027
+ });
2028
+ }
2029
+ validatedSamples[g] = sample;
2030
+ sizes[g] = sample.size;
2031
+ N += sample.size;
2032
+ }
2033
+ const combined = new Float64Array(N);
2034
+ const groupIndex = new Int32Array(N);
2035
+ let idx = 0;
2036
+ for (let g = 0; g < k; g++) {
2037
+ const sample = validatedSamples[g];
2038
+ if (!sample) {
2039
+ throw new InvalidParameterError("kruskal() requires non-empty samples", "sample", g);
2040
+ }
2041
+ const vals = toDenseArray1D(sample);
2042
+ for (let i = 0; i < vals.length; i++) {
2043
+ combined[idx] = vals[i] ?? 0;
2044
+ groupIndex[idx] = g;
2045
+ idx++;
2046
+ }
2047
+ }
2048
+ const { ranks, tieSum } = rankData(combined);
2049
+ const rankSums = new Float64Array(k);
2050
+ for (let i = 0; i < N; i++) {
2051
+ const group = groupIndex[i] ?? 0;
2052
+ const rank = ranks[i] ?? 0;
2053
+ rankSums[group] = (rankSums[group] ?? 0) + rank;
2054
+ }
2055
+ let H = 0;
2056
+ for (let g = 0; g < k; g++) {
2057
+ const rs = rankSums[g] ?? 0;
2058
+ const sz = sizes[g] ?? 1;
2059
+ H += rs * rs / sz;
2060
+ }
2061
+ H = 12 / (N * (N + 1)) * H - 3 * (N + 1);
2062
+ const tieCorrection = N > 1 ? 1 - tieSum / (N * N * N - N) : 1;
2063
+ if (tieCorrection <= 0) {
2064
+ throw new InvalidParameterError(
2065
+ "kruskal() is undefined when all numbers are identical",
2066
+ "tieCorrection",
2067
+ tieCorrection
2068
+ );
2069
+ }
2070
+ H /= tieCorrection;
2071
+ const df = k - 1;
2072
+ const pvalue = 1 - chiSquareCdf(H, df);
2073
+ return { statistic: H, pvalue };
2074
+ }
2075
+ function friedmanchisquare(...samples) {
2076
+ const k = samples.length;
2077
+ if (k < 3) {
2078
+ throw new InvalidParameterError(
2079
+ "friedmanchisquare() requires at least 3 related samples",
2080
+ "k",
2081
+ k
2082
+ );
2083
+ }
2084
+ const n = samples[0]?.size ?? 0;
2085
+ if (n < 1) {
2086
+ throw new InvalidParameterError(
2087
+ "friedmanchisquare() requires all samples to be non-empty",
2088
+ "n",
2089
+ n
2090
+ );
2091
+ }
2092
+ for (let i = 1; i < k; i++) {
2093
+ const sample = samples[i];
2094
+ if (sample && sample.size !== n) {
2095
+ throw new InvalidParameterError(
2096
+ "All samples must have the same length for Friedman test",
2097
+ "size",
2098
+ { expected: n, got: sample.size, sampleIndex: i }
2099
+ );
2100
+ }
2101
+ }
2102
+ const denseSamples = samples.map(
2103
+ (sample) => sample ? toDenseArray1D(sample) : new Float64Array(0)
2104
+ );
2105
+ const rankSums = new Float64Array(k);
2106
+ let tieSum = 0;
2107
+ for (let i = 0; i < n; i++) {
2108
+ const block = new Float64Array(k);
2109
+ for (let j = 0; j < k; j++) {
2110
+ const arr = denseSamples[j];
2111
+ block[j] = arr?.[i] ?? 0;
2112
+ }
2113
+ const ranked = rankData(block);
2114
+ tieSum += ranked.tieSum;
2115
+ for (let j = 0; j < k; j++) {
2116
+ const rank = ranked.ranks[j] ?? 0;
2117
+ rankSums[j] = (rankSums[j] ?? 0) + rank;
2118
+ }
2119
+ }
2120
+ let chiSq = 0;
2121
+ for (let j = 0; j < k; j++) {
2122
+ const rs = rankSums[j] ?? 0;
2123
+ chiSq += rs * rs;
2124
+ }
2125
+ chiSq = 12 / (n * k * (k + 1)) * chiSq - 3 * n * (k + 1);
2126
+ const tieCorrection = n > 0 ? 1 - tieSum / (n * k * (k * k - 1)) : 1;
2127
+ if (tieCorrection <= 0) {
2128
+ throw new InvalidParameterError(
2129
+ "friedmanchisquare() is undefined when all numbers are identical within blocks",
2130
+ "tieCorrection",
2131
+ tieCorrection
2132
+ );
2133
+ }
2134
+ chiSq /= tieCorrection;
2135
+ const df = k - 1;
2136
+ const pvalue = 1 - chiSquareCdf(chiSq, df);
2137
+ return { statistic: chiSq, pvalue };
2138
+ }
2139
+ function levene(center, ...samples) {
2140
+ const k = samples.length;
2141
+ if (k < 2) {
2142
+ throw new InvalidParameterError("levene() requires at least 2 groups", "k", k);
2143
+ }
2144
+ const groups = [];
2145
+ const centers = [];
2146
+ for (let g = 0; g < k; g++) {
2147
+ const sample = samples[g];
2148
+ if (!sample || sample.size === 0) {
2149
+ throw new InvalidParameterError("levene() requires all groups to be non-empty", "groupSize", {
2150
+ group: g,
2151
+ size: sample?.size ?? 0
2152
+ });
2153
+ }
2154
+ const arr = toDenseSortedArray1D(sample);
2155
+ if (arr.length < 2) {
2156
+ throw new InvalidParameterError(
2157
+ "levene() requires at least 2 samples per group",
2158
+ "groupSize",
2159
+ arr.length
2160
+ );
2161
+ }
2162
+ groups.push(arr);
2163
+ if (center === "mean") {
2164
+ let sum = 0;
2165
+ for (let i = 0; i < arr.length; i++) sum += arr[i] ?? 0;
2166
+ centers.push(sum / arr.length);
2167
+ } else if (center === "median") {
2168
+ const mid = Math.floor(arr.length / 2);
2169
+ if (arr.length % 2 === 0) {
2170
+ centers.push(((arr[mid - 1] ?? 0) + (arr[mid] ?? 0)) / 2);
2171
+ } else {
2172
+ centers.push(arr[mid] ?? 0);
2173
+ }
2174
+ } else {
2175
+ const trimCount = Math.floor(arr.length * 0.1);
2176
+ let sum = 0;
2177
+ const n = arr.length - 2 * trimCount;
2178
+ for (let i = trimCount; i < arr.length - trimCount; i++) {
2179
+ sum += arr[i] ?? 0;
2180
+ }
2181
+ centers.push(sum / n);
2182
+ }
2183
+ }
2184
+ const Z = [];
2185
+ const groupMeansZ = [];
2186
+ let N = 0;
2187
+ let grandSumZ = 0;
2188
+ for (let g = 0; g < groups.length; g++) {
2189
+ const arr = groups[g];
2190
+ if (!arr) continue;
2191
+ const c = centers[g] ?? 0;
2192
+ const zArr = new Float64Array(arr.length);
2193
+ let sumZ = 0;
2194
+ for (let i = 0; i < arr.length; i++) {
2195
+ const absVal = Math.abs((arr[i] ?? 0) - c);
2196
+ zArr[i] = absVal;
2197
+ sumZ += absVal;
2198
+ }
2199
+ Z.push(zArr);
2200
+ groupMeansZ.push(sumZ / arr.length);
2201
+ N += arr.length;
2202
+ grandSumZ += sumZ;
2203
+ }
2204
+ const grandMeanZ = grandSumZ / N;
2205
+ let SSB = 0;
2206
+ let SSW = 0;
2207
+ for (let g = 0; g < Z.length; g++) {
2208
+ const zArr = Z[g];
2209
+ if (!zArr) continue;
2210
+ const n = zArr.length;
2211
+ SSB += n * ((groupMeansZ[g] ?? 0) - grandMeanZ) ** 2;
2212
+ for (let i = 0; i < n; i++) {
2213
+ SSW += ((zArr[i] ?? 0) - (groupMeansZ[g] ?? 0)) ** 2;
2214
+ }
2215
+ }
2216
+ const dfB = k - 1;
2217
+ const dfW = N - k;
2218
+ if (dfW <= 0) {
2219
+ throw new InvalidParameterError(
2220
+ "levene() requires more total observations than groups",
2221
+ "dfW",
2222
+ dfW
2223
+ );
2224
+ }
2225
+ if (SSW === 0) {
2226
+ return { statistic: Infinity, pvalue: 0 };
2227
+ }
2228
+ const W = SSB / dfB / (SSW / dfW);
2229
+ const pvalue = 1 - fCdf(W, dfB, dfW);
2230
+ return { statistic: W, pvalue };
2231
+ }
2232
+ function bartlett(...samples) {
2233
+ const k = samples.length;
2234
+ if (k < 2) {
2235
+ throw new InvalidParameterError("bartlett() requires at least 2 groups", "k", k);
2236
+ }
2237
+ const variances = [];
2238
+ const sizes = [];
2239
+ let N = 0;
2240
+ for (let g = 0; g < k; g++) {
2241
+ const sample = samples[g];
2242
+ if (!sample || sample.size === 0) {
2243
+ throw new InvalidParameterError(
2244
+ "bartlett() requires all groups to be non-empty",
2245
+ "groupSize",
2246
+ { group: g, size: sample?.size ?? 0 }
2247
+ );
2248
+ }
2249
+ const arr = toDenseSortedArray1D(sample);
2250
+ const n = arr.length;
2251
+ if (n < 2) {
2252
+ throw new InvalidParameterError(
2253
+ "bartlett() requires at least 2 samples per group",
2254
+ "groupSize",
2255
+ n
2256
+ );
2257
+ }
2258
+ let mean2 = 0;
2259
+ for (let i = 0; i < n; i++) mean2 += arr[i] ?? 0;
2260
+ mean2 /= n;
2261
+ let ss = 0;
2262
+ for (let i = 0; i < n; i++) {
2263
+ const d = (arr[i] ?? 0) - mean2;
2264
+ ss += d * d;
2265
+ }
2266
+ const variance2 = ss / (n - 1);
2267
+ variances.push(variance2);
2268
+ sizes.push(n);
2269
+ N += n;
2270
+ }
2271
+ for (let g = 0; g < k; g++) {
2272
+ if ((variances[g] ?? 0) === 0) {
2273
+ throw new InvalidParameterError(
2274
+ "bartlett() is undefined when a group has zero variance",
2275
+ "variance",
2276
+ variances[g]
2277
+ );
2278
+ }
2279
+ }
2280
+ let pooledNumerator = 0;
2281
+ for (let g = 0; g < k; g++) {
2282
+ pooledNumerator += ((sizes[g] ?? 1) - 1) * (variances[g] ?? 1);
2283
+ }
2284
+ const pooledVariance = pooledNumerator / (N - k);
2285
+ let sumLogVar = 0;
2286
+ for (let g = 0; g < k; g++) {
2287
+ sumLogVar += ((sizes[g] ?? 1) - 1) * Math.log(variances[g] ?? 1);
2288
+ }
2289
+ const T = (N - k) * Math.log(pooledVariance) - sumLogVar;
2290
+ let sumInvDf = 0;
2291
+ for (let g = 0; g < k; g++) {
2292
+ sumInvDf += 1 / ((sizes[g] ?? 1) - 1);
2293
+ }
2294
+ const C = 1 + 1 / (3 * (k - 1)) * (sumInvDf - 1 / (N - k));
2295
+ const chiSq = T / C;
2296
+ const df = k - 1;
2297
+ const pvalue = 1 - chiSquareCdf(chiSq, df);
2298
+ return { statistic: chiSq, pvalue };
2299
+ }
2300
+ function f_oneway(...samples) {
2301
+ const k = samples.length;
2302
+ if (k < 2) {
2303
+ throw new InvalidParameterError("f_oneway() requires at least 2 groups", "groups", k);
2304
+ }
2305
+ for (let g = 0; g < k; g++) {
2306
+ const sample = samples[g];
2307
+ if (!sample || sample.size === 0) {
2308
+ throw new InvalidParameterError(
2309
+ "f_oneway() requires all groups to be non-empty",
2310
+ "groupSize",
2311
+ { group: g, size: sample?.size ?? 0 }
2312
+ );
2313
+ }
2314
+ }
2315
+ let N = 0;
2316
+ const means = [];
2317
+ const sizes = [];
2318
+ const groups = [];
2319
+ for (let g = 0; g < k; g++) {
2320
+ const sample = samples[g];
2321
+ if (!sample) {
2322
+ throw new InvalidParameterError(
2323
+ "f_oneway() requires all groups to be non-empty",
2324
+ "groupSize",
2325
+ { group: g, size: 0 }
2326
+ );
2327
+ }
2328
+ const arr = toDenseArray1D(sample);
2329
+ const n = arr.length;
2330
+ groups.push(arr);
2331
+ sizes.push(n);
2332
+ N += n;
2333
+ let sum = 0;
2334
+ for (let i = 0; i < n; i++) {
2335
+ sum += arr[i] ?? 0;
2336
+ }
2337
+ means.push(sum / n);
2338
+ }
2339
+ let grandSum = 0;
2340
+ for (let g = 0; g < k; g++) {
2341
+ grandSum += (means[g] ?? 0) * (sizes[g] ?? 0);
2342
+ }
2343
+ const grandMean = grandSum / N;
2344
+ let SSB = 0;
2345
+ let SSW = 0;
2346
+ for (let g = 0; g < groups.length; g++) {
2347
+ const arr = groups[g];
2348
+ if (!arr) continue;
2349
+ const n = arr.length;
2350
+ SSB += n * ((means[g] ?? 0) - grandMean) ** 2;
2351
+ for (let i = 0; i < n; i++) {
2352
+ SSW += ((arr[i] ?? 0) - (means[g] ?? 0)) ** 2;
2353
+ }
2354
+ }
2355
+ const dfB = k - 1;
2356
+ const dfW = N - k;
2357
+ if (dfW <= 0) {
2358
+ throw new InvalidParameterError(
2359
+ "f_oneway() requires at least one group with more than one sample",
2360
+ "dfW",
2361
+ dfW
2362
+ );
2363
+ }
2364
+ const MSB = SSB / dfB;
2365
+ const MSW = SSW / dfW;
2366
+ if (MSW === 0) {
2367
+ const F2 = MSB === 0 ? NaN : Infinity;
2368
+ return { statistic: F2, pvalue: MSB === 0 ? NaN : 0 };
2369
+ }
2370
+ const F = MSB / MSW;
2371
+ const pvalue = 1 - fCdf(F, dfB, dfW);
2372
+ return { statistic: F, pvalue };
2373
+ }
2374
+
2375
+ export { anderson, bartlett, chisquare, corrcoef, cov, f_oneway, friedmanchisquare, geometricMean, harmonicMean, kendalltau, kruskal, kstest, kurtosis, levene, mannwhitneyu, mean, median, mode, moment, normaltest, pearsonr, percentile, quantile, shapiro, skewness, spearmanr, stats_exports, std, trimMean, ttest_1samp, ttest_ind, ttest_rel, variance, wilcoxon };
2376
+ //# sourceMappingURL=chunk-XMWVME2W.js.map
2377
+ //# sourceMappingURL=chunk-XMWVME2W.js.map