@mni-ml/framework 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/README.md +117 -0
  2. package/package.json +3 -2
package/README.md ADDED
@@ -0,0 +1,117 @@
1
+ # @mni-ml/framework
2
+
3
+ A minimal machine learning library written in TypeScript. Implements core abstractions found in PyTorch -- autograd, tensors, modules, and training -- from scratch.
4
+
5
+ Built for learning and experimentation. Inspired by [minitorch](https://minitorch.github.io/).
6
+
7
+ ## Install
8
+
9
+ ```bash
10
+ npm install @mni-ml/framework
11
+ ```
12
+
13
+ ## What's included
14
+
15
+ - Scalar and tensor automatic differentiation (autograd)
16
+ - N-dimensional tensors backed by `Float64Array` with broadcasting and strided storage
17
+ - Element-wise, pairwise, and reduction ops, matrix multiplication, 1D and 2D convolutions
18
+ - Parallel CPU ops via worker threads
19
+ - Module system with automatic parameter registration, `train()`/`eval()` mode
20
+ - Layers: `Linear`, `Conv1d`, `Conv2d`, `Embedding`, `ReLU`, `Sigmoid`, `Tanh`
21
+ - Loss functions: `mseLoss`, `crossEntropyLoss`
22
+ - Functional ops: `softmax`, `logsoftmax`, `dropout`, `avgpool2d`, `maxpool2d`
23
+ - SGD optimizer
24
+ - Built-in 2D classification datasets
25
+
26
+ ## Quick start
27
+
28
+ ```typescript
29
+ import {
30
+ Tensor, Linear, ReLU, SGD, mseLoss, Module, Parameter
31
+ } from "@mni-ml/framework";
32
+
33
+ class MLP extends Module {
34
+ l1: Linear;
35
+ l2: Linear;
36
+ relu: ReLU;
37
+
38
+ constructor() {
39
+ super();
40
+ this.l1 = new Linear(2, 10);
41
+ this.relu = new ReLU();
42
+ this.l2 = new Linear(10, 1);
43
+ }
44
+
45
+ forward(x: Tensor): Tensor {
46
+ return this.l2.forward(this.relu.forward(this.l1.forward(x)));
47
+ }
48
+ }
49
+
50
+ const model = new MLP();
51
+ const opt = new SGD(model.parameters(), 0.05);
52
+
53
+ for (let epoch = 0; epoch < 100; epoch++) {
54
+ const x = Tensor.tensor([[0.1, 0.9], [0.8, 0.2]]);
55
+ const target = Tensor.tensor([[1], [0]]);
56
+
57
+ const pred = model.forward(x);
58
+ const loss = mseLoss(pred, target);
59
+
60
+ opt.zeroGrad();
61
+ loss.backward();
62
+ opt.step();
63
+ }
64
+ ```
65
+
66
+ ## API
67
+
68
+ ### Tensor
69
+
70
+ ```typescript
71
+ Tensor.tensor([[1, 2], [3, 4]]) // from nested arrays
72
+ Tensor.zeros([3, 3]) // zeros
73
+ Tensor.ones([2, 4]) // ones
74
+ Tensor.rand([2, 3]) // uniform random
75
+
76
+ t.add(other) t.sub(other) t.mul(other) // arithmetic
77
+ t.neg() t.exp() t.log() // unary
78
+ t.sigmoid() t.relu() // activations
79
+ t.matmul(other) // matrix multiply
80
+ t.conv1d(weight) t.conv2d(weight) // convolutions
81
+ t.sum(dim?) t.mean(dim?) t.max(dim) // reductions
82
+ t.permute(...order) t.view(...shape) // reshaping
83
+ t.backward() // backpropagation
84
+ ```
85
+
86
+ ### Modules
87
+
88
+ | Module | Description |
89
+ |--------|-------------|
90
+ | `Linear(in, out)` | Fully connected layer |
91
+ | `Conv1d(inCh, outCh, kernelW)` | 1D convolution |
92
+ | `Conv2d(inCh, outCh, [kH, kW])` | 2D convolution |
93
+ | `Embedding(numEmb, embDim)` | Lookup table with trainable weights |
94
+ | `ReLU` | Rectified linear unit |
95
+ | `Sigmoid` | Sigmoid activation |
96
+ | `Tanh` | Hyperbolic tangent activation |
97
+
98
+ ### Loss functions
99
+
100
+ | Function | Use case |
101
+ |----------|----------|
102
+ | `mseLoss(pred, target)` | Regression |
103
+ | `crossEntropyLoss(pred, target)` | Classification |
104
+
105
+ ### Functional
106
+
107
+ ```typescript
108
+ softmax(input, dim)
109
+ logsoftmax(input, dim)
110
+ dropout(input, rate, ignore)
111
+ avgpool2d(input, [kH, kW])
112
+ maxpool2d(input, [kH, kW])
113
+ ```
114
+
115
+ ## License
116
+
117
+ MIT
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@mni-ml/framework",
3
3
  "type": "module",
4
- "version": "0.0.1",
4
+ "version": "0.0.2",
5
5
  "description": "A minimal machine learning library in TypeScript — autograd, tensors, neural network modules, and training utilities inspired by minitorch.",
6
6
  "license": "MIT",
7
7
  "repository": {
@@ -24,7 +24,8 @@
24
24
  }
25
25
  },
26
26
  "files": [
27
- "dist"
27
+ "dist",
28
+ "README.md"
28
29
  ],
29
30
  "devDependencies": {
30
31
  "@fast-check/jest": "^2.1.1",