bun-scikit 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +187 -0
  3. package/binding.gyp +21 -0
  4. package/docs/README.md +7 -0
  5. package/docs/native-abi.md +53 -0
  6. package/index.ts +1 -0
  7. package/package.json +76 -0
  8. package/scripts/build-node-addon.ts +26 -0
  9. package/scripts/build-zig-kernels.ts +50 -0
  10. package/scripts/check-api-docs-coverage.ts +52 -0
  11. package/scripts/check-benchmark-health.ts +140 -0
  12. package/scripts/install-native.ts +160 -0
  13. package/scripts/package-native-artifacts.ts +62 -0
  14. package/scripts/sync-benchmark-readme.ts +181 -0
  15. package/scripts/update-benchmark-history.ts +91 -0
  16. package/src/ensemble/RandomForestClassifier.ts +136 -0
  17. package/src/ensemble/RandomForestRegressor.ts +136 -0
  18. package/src/index.ts +32 -0
  19. package/src/linear_model/LinearRegression.ts +136 -0
  20. package/src/linear_model/LogisticRegression.ts +260 -0
  21. package/src/linear_model/SGDClassifier.ts +161 -0
  22. package/src/linear_model/SGDRegressor.ts +104 -0
  23. package/src/metrics/classification.ts +294 -0
  24. package/src/metrics/regression.ts +51 -0
  25. package/src/model_selection/GridSearchCV.ts +244 -0
  26. package/src/model_selection/KFold.ts +82 -0
  27. package/src/model_selection/RepeatedKFold.ts +49 -0
  28. package/src/model_selection/RepeatedStratifiedKFold.ts +50 -0
  29. package/src/model_selection/StratifiedKFold.ts +112 -0
  30. package/src/model_selection/StratifiedShuffleSplit.ts +211 -0
  31. package/src/model_selection/crossValScore.ts +165 -0
  32. package/src/model_selection/trainTestSplit.ts +82 -0
  33. package/src/naive_bayes/GaussianNB.ts +148 -0
  34. package/src/native/node-addon/bun_scikit_addon.cpp +450 -0
  35. package/src/native/zigKernels.ts +576 -0
  36. package/src/neighbors/KNeighborsClassifier.ts +85 -0
  37. package/src/pipeline/ColumnTransformer.ts +203 -0
  38. package/src/pipeline/FeatureUnion.ts +123 -0
  39. package/src/pipeline/Pipeline.ts +168 -0
  40. package/src/preprocessing/MinMaxScaler.ts +113 -0
  41. package/src/preprocessing/OneHotEncoder.ts +91 -0
  42. package/src/preprocessing/PolynomialFeatures.ts +158 -0
  43. package/src/preprocessing/RobustScaler.ts +149 -0
  44. package/src/preprocessing/SimpleImputer.ts +150 -0
  45. package/src/preprocessing/StandardScaler.ts +92 -0
  46. package/src/svm/LinearSVC.ts +117 -0
  47. package/src/tree/DecisionTreeClassifier.ts +394 -0
  48. package/src/tree/DecisionTreeRegressor.ts +407 -0
  49. package/src/types.ts +18 -0
  50. package/src/utils/linalg.ts +209 -0
  51. package/src/utils/validation.ts +78 -0
  52. package/zig/kernels.zig +1327 -0
@@ -0,0 +1,136 @@
1
+ import type { Matrix, RegressionModel, Vector } from "../types";
2
+ import { r2Score } from "../metrics/regression";
3
+ import { DecisionTreeRegressor } from "../tree/DecisionTreeRegressor";
4
+ import { assertFiniteVector, validateRegressionInputs } from "../utils/validation";
5
+ import type { MaxFeaturesOption } from "../tree/DecisionTreeClassifier";
6
+
7
+ export interface RandomForestRegressorOptions {
8
+ nEstimators?: number;
9
+ maxDepth?: number;
10
+ minSamplesSplit?: number;
11
+ minSamplesLeaf?: number;
12
+ maxFeatures?: MaxFeaturesOption;
13
+ bootstrap?: boolean;
14
+ randomState?: number;
15
+ }
16
+
17
+ function mulberry32(seed: number): () => number {
18
+ let state = seed >>> 0;
19
+ return () => {
20
+ state += 0x6d2b79f5;
21
+ let t = Math.imul(state ^ (state >>> 15), 1 | state);
22
+ t ^= t + Math.imul(t ^ (t >>> 7), 61 | t);
23
+ return ((t ^ (t >>> 14)) >>> 0) / 4294967296;
24
+ };
25
+ }
26
+
27
+ export class RandomForestRegressor implements RegressionModel {
28
+ private readonly nEstimators: number;
29
+ private readonly maxDepth?: number;
30
+ private readonly minSamplesSplit?: number;
31
+ private readonly minSamplesLeaf?: number;
32
+ private readonly maxFeatures: MaxFeaturesOption;
33
+ private readonly bootstrap: boolean;
34
+ private readonly randomState?: number;
35
+ private trees: DecisionTreeRegressor[] = [];
36
+
37
+ constructor(options: RandomForestRegressorOptions = {}) {
38
+ this.nEstimators = options.nEstimators ?? 50;
39
+ this.maxDepth = options.maxDepth ?? 12;
40
+ this.minSamplesSplit = options.minSamplesSplit ?? 2;
41
+ this.minSamplesLeaf = options.minSamplesLeaf ?? 1;
42
+ this.maxFeatures = options.maxFeatures ?? "sqrt";
43
+ this.bootstrap = options.bootstrap ?? true;
44
+ this.randomState = options.randomState;
45
+
46
+ if (!Number.isInteger(this.nEstimators) || this.nEstimators < 1) {
47
+ throw new Error(`nEstimators must be a positive integer. Got ${this.nEstimators}.`);
48
+ }
49
+ }
50
+
51
+ fit(X: Matrix, y: Vector): this {
52
+ validateRegressionInputs(X, y);
53
+
54
+ const sampleCount = X.length;
55
+ const featureCount = X[0].length;
56
+ const random = this.randomState === undefined ? Math.random : mulberry32(this.randomState);
57
+ const flattenedX = this.flattenTrainingMatrix(X, sampleCount, featureCount);
58
+ const yValues = this.toFloat64Vector(y);
59
+ this.trees = new Array(this.nEstimators);
60
+
61
+ for (let estimatorIndex = 0; estimatorIndex < this.nEstimators; estimatorIndex += 1) {
62
+ const sampleIndices = new Uint32Array(sampleCount);
63
+ if (this.bootstrap) {
64
+ for (let i = 0; i < sampleCount; i += 1) {
65
+ sampleIndices[i] = Math.floor(random() * sampleCount);
66
+ }
67
+ } else {
68
+ for (let i = 0; i < sampleCount; i += 1) {
69
+ sampleIndices[i] = i;
70
+ }
71
+ }
72
+
73
+ const tree = new DecisionTreeRegressor({
74
+ maxDepth: this.maxDepth,
75
+ minSamplesSplit: this.minSamplesSplit,
76
+ minSamplesLeaf: this.minSamplesLeaf,
77
+ maxFeatures: this.maxFeatures,
78
+ randomState:
79
+ this.randomState === undefined ? undefined : this.randomState + estimatorIndex + 1,
80
+ });
81
+ tree.fit(X, y, sampleIndices, true, flattenedX, yValues);
82
+ this.trees[estimatorIndex] = tree;
83
+ }
84
+
85
+ return this;
86
+ }
87
+
88
+ predict(X: Matrix): Vector {
89
+ if (this.trees.length === 0) {
90
+ throw new Error("RandomForestRegressor has not been fitted.");
91
+ }
92
+
93
+ const treePredictions = this.trees.map((tree) => tree.predict(X));
94
+ const sampleCount = X.length;
95
+ const predictions = new Array<number>(sampleCount).fill(0);
96
+
97
+ for (let sampleIndex = 0; sampleIndex < sampleCount; sampleIndex += 1) {
98
+ let sum = 0;
99
+ for (let treeIndex = 0; treeIndex < treePredictions.length; treeIndex += 1) {
100
+ sum += treePredictions[treeIndex][sampleIndex];
101
+ }
102
+ predictions[sampleIndex] = sum / this.trees.length;
103
+ }
104
+
105
+ return predictions;
106
+ }
107
+
108
+ score(X: Matrix, y: Vector): number {
109
+ assertFiniteVector(y);
110
+ return r2Score(y, this.predict(X));
111
+ }
112
+
113
+ private flattenTrainingMatrix(
114
+ X: Matrix,
115
+ sampleCount: number,
116
+ featureCount: number,
117
+ ): Float64Array {
118
+ const flattened = new Float64Array(sampleCount * featureCount);
119
+ for (let i = 0; i < sampleCount; i += 1) {
120
+ const row = X[i];
121
+ const rowOffset = i * featureCount;
122
+ for (let j = 0; j < featureCount; j += 1) {
123
+ flattened[rowOffset + j] = row[j];
124
+ }
125
+ }
126
+ return flattened;
127
+ }
128
+
129
+ private toFloat64Vector(y: Vector): Float64Array {
130
+ const out = new Float64Array(y.length);
131
+ for (let i = 0; i < y.length; i += 1) {
132
+ out[i] = y[i];
133
+ }
134
+ return out;
135
+ }
136
+ }
package/src/index.ts ADDED
@@ -0,0 +1,32 @@
1
+ export * from "./types";
2
+
3
+ export * from "./preprocessing/StandardScaler";
4
+ export * from "./preprocessing/MinMaxScaler";
5
+ export * from "./preprocessing/RobustScaler";
6
+ export * from "./preprocessing/PolynomialFeatures";
7
+ export * from "./preprocessing/SimpleImputer";
8
+ export * from "./preprocessing/OneHotEncoder";
9
+ export * from "./linear_model/LinearRegression";
10
+ export * from "./linear_model/LogisticRegression";
11
+ export * from "./linear_model/SGDClassifier";
12
+ export * from "./linear_model/SGDRegressor";
13
+ export * from "./neighbors/KNeighborsClassifier";
14
+ export * from "./naive_bayes/GaussianNB";
15
+ export * from "./svm/LinearSVC";
16
+ export * from "./tree/DecisionTreeClassifier";
17
+ export * from "./tree/DecisionTreeRegressor";
18
+ export * from "./ensemble/RandomForestClassifier";
19
+ export * from "./ensemble/RandomForestRegressor";
20
+ export * from "./model_selection/trainTestSplit";
21
+ export * from "./model_selection/KFold";
22
+ export * from "./model_selection/StratifiedKFold";
23
+ export * from "./model_selection/StratifiedShuffleSplit";
24
+ export * from "./model_selection/RepeatedKFold";
25
+ export * from "./model_selection/RepeatedStratifiedKFold";
26
+ export * from "./model_selection/crossValScore";
27
+ export * from "./model_selection/GridSearchCV";
28
+ export * from "./pipeline/Pipeline";
29
+ export * from "./pipeline/ColumnTransformer";
30
+ export * from "./pipeline/FeatureUnion";
31
+ export * from "./metrics/regression";
32
+ export * from "./metrics/classification";
@@ -0,0 +1,136 @@
1
+ import type { Matrix, RegressionModel, Vector } from "../types";
2
+ import { r2Score } from "../metrics/regression";
3
+ import { dot } from "../utils/linalg";
4
+ import { getZigKernels } from "../native/zigKernels";
5
+ import {
6
+ assertConsistentRowSize,
7
+ assertFiniteMatrix,
8
+ assertFiniteVector,
9
+ validateRegressionInputs,
10
+ } from "../utils/validation";
11
+
12
+ export interface LinearRegressionOptions {
13
+ fitIntercept?: boolean;
14
+ solver?: "normal";
15
+ }
16
+
17
+ export class LinearRegression implements RegressionModel {
18
+ coef_: Vector = [];
19
+ intercept_ = 0;
20
+ fitBackend_: "zig" = "zig";
21
+ fitBackendLibrary_: string | null = null;
22
+
23
+ private readonly fitIntercept: boolean;
24
+ private readonly solver: "normal";
25
+ private isFitted = false;
26
+
27
+ constructor(options: LinearRegressionOptions = {}) {
28
+ this.fitIntercept = options.fitIntercept ?? true;
29
+ this.solver = options.solver ?? "normal";
30
+ }
31
+
32
+ fit(X: Matrix, y: Vector): this {
33
+ validateRegressionInputs(X, y);
34
+ if (this.solver !== "normal") {
35
+ throw new Error("LinearRegression solver 'normal' is required in zig-only mode.");
36
+ }
37
+ this.fitNormalEquationNative(X, y);
38
+
39
+ this.isFitted = true;
40
+ return this;
41
+ }
42
+
43
+ predict(X: Matrix): Vector {
44
+ if (!this.isFitted) {
45
+ throw new Error("LinearRegression has not been fitted.");
46
+ }
47
+
48
+ assertConsistentRowSize(X);
49
+ assertFiniteMatrix(X);
50
+
51
+ if (X[0].length !== this.coef_.length) {
52
+ throw new Error(
53
+ `Feature size mismatch. Expected ${this.coef_.length}, got ${X[0].length}.`,
54
+ );
55
+ }
56
+
57
+ return X.map((row) => this.intercept_ + dot(row, this.coef_));
58
+ }
59
+
60
+ score(X: Matrix, y: Vector): number {
61
+ assertFiniteVector(y);
62
+ return r2Score(y, this.predict(X));
63
+ }
64
+
65
+ private fitNormalEquationNative(X: Matrix, y: Vector): void {
66
+ const kernels = getZigKernels();
67
+ if (!kernels) {
68
+ throw new Error(
69
+ "LinearRegression requires native Zig kernels. Build them with `bun run native:build`.",
70
+ );
71
+ }
72
+ if (
73
+ !kernels.linearModelCreate ||
74
+ !kernels.linearModelDestroy ||
75
+ !kernels.linearModelFit ||
76
+ !kernels.linearModelCopyCoefficients ||
77
+ !kernels.linearModelGetIntercept
78
+ ) {
79
+ throw new Error("Native linear model symbols are not available.");
80
+ }
81
+
82
+ const nSamples = X.length;
83
+ const nFeatures = X[0].length;
84
+ const flattenedX = this.flattenMatrix(X);
85
+ const yBuffer = this.toFloat64Vector(y);
86
+ const handle = kernels.linearModelCreate(nFeatures, this.fitIntercept ? 1 : 0);
87
+ if (handle === 0n) {
88
+ throw new Error("Failed to create native linear model handle.");
89
+ }
90
+
91
+ try {
92
+ const fitStatus = kernels.linearModelFit(handle, flattenedX, yBuffer, nSamples, 1e-8);
93
+ if (fitStatus !== 1) {
94
+ throw new Error("Native linear model fit failed.");
95
+ }
96
+
97
+ const coefficients = new Float64Array(nFeatures);
98
+ const copied = kernels.linearModelCopyCoefficients(handle, coefficients);
99
+ if (copied !== 1) {
100
+ throw new Error("Failed to copy native linear coefficients.");
101
+ }
102
+
103
+ this.coef_ = Array.from(coefficients);
104
+ this.intercept_ = kernels.linearModelGetIntercept(handle);
105
+ this.fitBackend_ = "zig";
106
+ this.fitBackendLibrary_ = kernels.libraryPath;
107
+ } catch (error) {
108
+ kernels.linearModelDestroy(handle);
109
+ throw error;
110
+ }
111
+ kernels.linearModelDestroy(handle);
112
+ }
113
+
114
+ private flattenMatrix(X: Matrix): Float64Array {
115
+ const rowCount = X.length;
116
+ const featureCount = X[0].length;
117
+ const flattenedX = new Float64Array(rowCount * featureCount);
118
+ for (let i = 0; i < rowCount; i += 1) {
119
+ const row = X[i];
120
+ const rowOffset = i * featureCount;
121
+ for (let j = 0; j < featureCount; j += 1) {
122
+ flattenedX[rowOffset + j] = row[j];
123
+ }
124
+ }
125
+ return flattenedX;
126
+ }
127
+
128
+ private toFloat64Vector(y: Vector): Float64Array {
129
+ const yBuffer = new Float64Array(y.length);
130
+ for (let i = 0; i < y.length; i += 1) {
131
+ yBuffer[i] = y[i];
132
+ }
133
+ return yBuffer;
134
+ }
135
+
136
+ }
@@ -0,0 +1,260 @@
1
+ import type { ClassificationModel, Matrix, Vector } from "../types";
2
+ import { dot } from "../utils/linalg";
3
+ import {
4
+ assertConsistentRowSize,
5
+ assertFiniteMatrix,
6
+ assertFiniteVector,
7
+ validateClassificationInputs,
8
+ } from "../utils/validation";
9
+ import { accuracyScore } from "../metrics/classification";
10
+ import { getZigKernels } from "../native/zigKernels";
11
+
12
+ export interface LogisticRegressionOptions {
13
+ fitIntercept?: boolean;
14
+ solver?: "gd" | "lbfgs";
15
+ learningRate?: number;
16
+ maxIter?: number;
17
+ tolerance?: number;
18
+ l2?: number;
19
+ lbfgsMemory?: number;
20
+ }
21
+
22
+ function sigmoid(z: number): number {
23
+ if (z >= 0) {
24
+ const expNeg = Math.exp(-z);
25
+ return 1 / (1 + expNeg);
26
+ }
27
+ const expPos = Math.exp(z);
28
+ return expPos / (1 + expPos);
29
+ }
30
+
31
+ export class LogisticRegression implements ClassificationModel {
32
+ coef_: Vector = [];
33
+ intercept_ = 0;
34
+ classes_: Vector = [0, 1];
35
+ fitBackend_: "zig" = "zig";
36
+ fitBackendLibrary_: string | null = null;
37
+
38
+ private readonly fitIntercept: boolean;
39
+ private readonly solver: "gd" | "lbfgs";
40
+ private readonly learningRate: number;
41
+ private readonly maxIter: number;
42
+ private readonly tolerance: number;
43
+ private readonly l2: number;
44
+ private readonly lbfgsMemory: number;
45
+ private isFitted = false;
46
+
47
+ constructor(options: LogisticRegressionOptions = {}) {
48
+ this.fitIntercept = options.fitIntercept ?? true;
49
+ this.solver = options.solver ?? "gd";
50
+ this.learningRate = options.learningRate ?? 0.1;
51
+ this.maxIter = options.maxIter ?? 20_000;
52
+ this.tolerance = options.tolerance ?? 1e-8;
53
+ this.l2 = options.l2 ?? 0;
54
+ this.lbfgsMemory = options.lbfgsMemory ?? 7;
55
+ }
56
+
57
+ fit(X: Matrix, y: Vector): this {
58
+ validateClassificationInputs(X, y);
59
+
60
+ const nSamples = X.length;
61
+ const nFeatures = X[0].length;
62
+ const flattenedX = this.flattenMatrix(X);
63
+ const yBuffer = this.toFloat64Vector(y);
64
+ const coefficients = new Float64Array(nFeatures);
65
+ const gradients = new Float64Array(nFeatures);
66
+ const intercept = new Float64Array(1);
67
+
68
+ const kernels = getZigKernels();
69
+ if (!kernels) {
70
+ throw new Error(
71
+ "LogisticRegression requires native Zig kernels. Build them with `bun run native:build`.",
72
+ );
73
+ }
74
+
75
+ if (
76
+ kernels?.logisticModelCreate &&
77
+ kernels.logisticModelDestroy &&
78
+ kernels.logisticModelCopyCoefficients &&
79
+ kernels.logisticModelGetIntercept
80
+ ) {
81
+ const fitNative =
82
+ this.solver === "lbfgs" ? kernels.logisticModelFitLbfgs : kernels.logisticModelFit;
83
+ if (!fitNative) {
84
+ throw new Error(
85
+ `LogisticRegression solver '${this.solver}' is unavailable in native kernels.`,
86
+ );
87
+ }
88
+
89
+ const handle = kernels.logisticModelCreate(nFeatures, this.fitIntercept ? 1 : 0);
90
+ if (handle === 0n) {
91
+ throw new Error("Failed to create native logistic model handle.");
92
+ }
93
+
94
+ try {
95
+ if (!fitNative) {
96
+ throw new Error("Native solver function is unavailable.");
97
+ }
98
+ const epochsRan =
99
+ this.solver === "lbfgs"
100
+ ? kernels.logisticModelFitLbfgs!(
101
+ handle,
102
+ flattenedX,
103
+ yBuffer,
104
+ nSamples,
105
+ this.maxIter,
106
+ this.tolerance,
107
+ this.l2,
108
+ this.lbfgsMemory,
109
+ )
110
+ : kernels.logisticModelFit!(
111
+ handle,
112
+ flattenedX,
113
+ yBuffer,
114
+ nSamples,
115
+ this.learningRate,
116
+ this.l2,
117
+ this.maxIter,
118
+ this.tolerance,
119
+ );
120
+ if (epochsRan === 0n && this.maxIter > 0) {
121
+ throw new Error("Native logistic model fit failed.");
122
+ }
123
+
124
+ const copied = kernels.logisticModelCopyCoefficients(handle, coefficients);
125
+ if (copied !== 1) {
126
+ throw new Error("Failed to copy native logistic coefficients.");
127
+ }
128
+
129
+ this.fitBackend_ = "zig";
130
+ this.fitBackendLibrary_ = kernels.libraryPath;
131
+ this.coef_ = Array.from(coefficients);
132
+ this.intercept_ = kernels.logisticModelGetIntercept(handle);
133
+ this.isFitted = true;
134
+ kernels.logisticModelDestroy(handle);
135
+ return this;
136
+ } catch (error) {
137
+ kernels.logisticModelDestroy(handle);
138
+ throw error;
139
+ }
140
+ }
141
+
142
+ if (this.solver === "lbfgs") {
143
+ throw new Error("LogisticRegression solver 'lbfgs' requires native model-handle kernels.");
144
+ }
145
+
146
+ if (kernels?.logisticTrainEpoch) {
147
+ if (kernels.logisticTrainEpochs) {
148
+ kernels.logisticTrainEpochs(
149
+ flattenedX,
150
+ yBuffer,
151
+ nSamples,
152
+ nFeatures,
153
+ coefficients,
154
+ intercept,
155
+ gradients,
156
+ this.learningRate,
157
+ this.l2,
158
+ this.fitIntercept ? 1 : 0,
159
+ this.maxIter,
160
+ this.tolerance,
161
+ );
162
+ } else {
163
+ for (let iter = 0; iter < this.maxIter; iter += 1) {
164
+ const maxUpdate = kernels.logisticTrainEpoch(
165
+ flattenedX,
166
+ yBuffer,
167
+ nSamples,
168
+ nFeatures,
169
+ coefficients,
170
+ intercept,
171
+ gradients,
172
+ this.learningRate,
173
+ this.l2,
174
+ this.fitIntercept ? 1 : 0,
175
+ );
176
+
177
+ if (maxUpdate < this.tolerance) {
178
+ break;
179
+ }
180
+ }
181
+ }
182
+
183
+ this.fitBackend_ = "zig";
184
+ this.fitBackendLibrary_ = kernels.libraryPath;
185
+ this.coef_ = Array.from(coefficients);
186
+ this.intercept_ = intercept[0];
187
+ this.isFitted = true;
188
+ return this;
189
+ }
190
+
191
+ throw new Error(
192
+ "Native logistic kernels are unavailable. Rebuild with `bun run native:build` and ensure model-handle or epoch kernels are exported.",
193
+ );
194
+ }
195
+
196
+ predictProba(X: Matrix): Matrix {
197
+ if (!this.isFitted) {
198
+ throw new Error("LogisticRegression has not been fitted.");
199
+ }
200
+
201
+ assertConsistentRowSize(X);
202
+ assertFiniteMatrix(X);
203
+
204
+ if (X[0].length !== this.coef_.length) {
205
+ throw new Error(
206
+ `Feature size mismatch. Expected ${this.coef_.length}, got ${X[0].length}.`,
207
+ );
208
+ }
209
+
210
+ return X.map((row) => {
211
+ const positive = sigmoid(this.intercept_ + dot(row, this.coef_));
212
+ return [1 - positive, positive];
213
+ });
214
+ }
215
+
216
+ predict(X: Matrix): Vector {
217
+ if (!this.isFitted) {
218
+ throw new Error("LogisticRegression has not been fitted.");
219
+ }
220
+
221
+ assertConsistentRowSize(X);
222
+ assertFiniteMatrix(X);
223
+
224
+ if (X[0].length !== this.coef_.length) {
225
+ throw new Error(
226
+ `Feature size mismatch. Expected ${this.coef_.length}, got ${X[0].length}.`,
227
+ );
228
+ }
229
+
230
+ return this.predictProba(X).map((pair) => (pair[1] >= 0.5 ? 1 : 0));
231
+ }
232
+
233
+ score(X: Matrix, y: Vector): number {
234
+ assertFiniteVector(y);
235
+ return accuracyScore(y, this.predict(X));
236
+ }
237
+
238
+ private flattenMatrix(X: Matrix): Float64Array {
239
+ const rowCount = X.length;
240
+ const featureCount = X[0].length;
241
+ const flattenedX = new Float64Array(rowCount * featureCount);
242
+ for (let i = 0; i < rowCount; i += 1) {
243
+ const row = X[i];
244
+ const rowOffset = i * featureCount;
245
+ for (let j = 0; j < featureCount; j += 1) {
246
+ flattenedX[rowOffset + j] = row[j];
247
+ }
248
+ }
249
+ return flattenedX;
250
+ }
251
+
252
+ private toFloat64Vector(y: Vector): Float64Array {
253
+ const yBuffer = new Float64Array(y.length);
254
+ for (let i = 0; i < y.length; i += 1) {
255
+ yBuffer[i] = y[i];
256
+ }
257
+ return yBuffer;
258
+ }
259
+
260
+ }