mini-jstorch 1.2.1 → 1.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/Dummy/Experiments.js +0 -256
package/package.json
CHANGED
package/src/Dummy/Experiments.js
DELETED
|
@@ -1,256 +0,0 @@
|
|
|
1
|
-
// MINI JSTORCH ENGINE
|
|
2
|
-
// ENGINE VER: 1.2.0
|
|
3
|
-
// LICENSE: MIT (C) Rizal 2025
|
|
4
|
-
|
|
5
|
-
// ---------------------- Utilities ----------------------
|
|
6
|
-
function zeros(rows, cols) { return Array.from({length:rows},()=>Array(cols).fill(0)); }
|
|
7
|
-
function ones(rows, cols) { return Array.from({length:rows},()=>Array(cols).fill(1)); }
|
|
8
|
-
function randomMatrix(rows, cols, scale=0.1){ return Array.from({length:rows},()=>Array.from({length:cols},()=> (Math.random()*2-1)*scale)); }
|
|
9
|
-
function transpose(matrix){ return matrix[0].map((_,i)=>matrix.map(row=>row[i])); }
|
|
10
|
-
function addMatrices(a,b){ return a.map((row,i)=>row.map((v,j)=>v+(b[i] && b[i][j]!==undefined?b[i][j]:0))); }
|
|
11
|
-
function dot(a,b){ const res=zeros(a.length,b[0].length); for(let i=0;i<a.length;i++) for(let j=0;j<b[0].length;j++) for(let k=0;k<a[0].length;k++) res[i][j]+=a[i][k]*b[k][j]; return res; }
|
|
12
|
-
function softmax(x){ const m=Math.max(...x); const exps=x.map(v=>Math.exp(v-m)); const s=exps.reduce((a,b)=>a+b,0); return exps.map(v=>v/s); }
|
|
13
|
-
function crossEntropy(pred,target){ const eps=1e-12; return -target.reduce((sum,t,i)=>sum+t*Math.log(pred[i]+eps),0); }
|
|
14
|
-
|
|
15
|
-
// ---------------------- Tensor ----------------------
|
|
16
|
-
export class Tensor {
|
|
17
|
-
constructor(data){ this.data=data; this.grad=zeros(data.length,data[0].length); }
|
|
18
|
-
shape(){ return [this.data.length,this.data[0].length]; }
|
|
19
|
-
add(t){ return t instanceof Tensor?this.data.map((r,i)=>r.map((v,j)=>v+t.data[i][j])):this.data.map(r=>r.map(v=>v+t)); }
|
|
20
|
-
sub(t){ return t instanceof Tensor?this.data.map((r,i)=>r.map((v,j)=>v-t.data[i][j])):this.data.map(r=>r.map(v=>v-t)); }
|
|
21
|
-
mul(t){ return t instanceof Tensor?this.data.map((r,i)=>r.map((v,j)=>v*t.data[i][j])):this.data.map(r=>r.map(v=>v*t)); }
|
|
22
|
-
matmul(t){ if(t instanceof Tensor) return dot(this.data,t.data); else throw new Error("matmul requires Tensor"); }
|
|
23
|
-
transpose(){ return transpose(this.data); }
|
|
24
|
-
flatten(){ return this.data.flat(); }
|
|
25
|
-
static zeros(r,c){ return new Tensor(zeros(r,c)); }
|
|
26
|
-
static ones(r,c){ return new Tensor(ones(r,c)); }
|
|
27
|
-
static random(r,c,scale=0.1){ return new Tensor(randomMatrix(r,c,scale)); }
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
// ---------------------- Layers ----------------------
|
|
31
|
-
export class Linear {
|
|
32
|
-
constructor(inputDim,outputDim){
|
|
33
|
-
this.W=randomMatrix(inputDim,outputDim);
|
|
34
|
-
this.b=Array(outputDim).fill(0);
|
|
35
|
-
this.gradW=zeros(inputDim,outputDim);
|
|
36
|
-
this.gradb=Array(outputDim).fill(0);
|
|
37
|
-
this.x=null;
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
forward(x){
|
|
41
|
-
this.x=x;
|
|
42
|
-
const out=dot(x,this.W);
|
|
43
|
-
return out.map((row,i)=>row.map((v,j)=>v+this.b[j]));
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
backward(grad){
|
|
47
|
-
for(let i=0;i<this.W.length;i++) for(let j=0;j<this.W[0].length;j++)
|
|
48
|
-
this.gradW[i][j]=this.x.reduce((sum,row,k)=>sum+row[i]*grad[k][j],0);
|
|
49
|
-
for(let j=0;j<this.b.length;j++)
|
|
50
|
-
this.gradb[j]=grad.reduce((sum,row)=>sum+row[j],0);
|
|
51
|
-
|
|
52
|
-
const gradInput=zeros(this.x.length,this.W.length);
|
|
53
|
-
for(let i=0;i<this.x.length;i++)
|
|
54
|
-
for(let j=0;j<this.W.length;j++)
|
|
55
|
-
for(let k=0;k<this.W[0].length;k++)
|
|
56
|
-
gradInput[i][j]+=grad[i][k]*this.W[j][k];
|
|
57
|
-
return gradInput;
|
|
58
|
-
}
|
|
59
|
-
|
|
60
|
-
parameters(){ return [ {param:this.W,grad:this.gradW}, {param:[this.b],grad:[this.gradb]} ]; }
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
// ---------------------- Conv2D ----------------------
|
|
64
|
-
export class Conv2D {
|
|
65
|
-
constructor(inC,outC,kernel,stride=1,padding=0){
|
|
66
|
-
this.inC=inC; this.outC=outC; this.kernel=kernel;
|
|
67
|
-
this.stride=stride; this.padding=padding;
|
|
68
|
-
this.W=Array(outC).fill(0).map(()=>Array(inC).fill(0).map(()=>randomMatrix(kernel,kernel)));
|
|
69
|
-
this.gradW=Array(outC).fill(0).map(()=>Array(inC).fill(0).map(()=>zeros(kernel,kernel)));
|
|
70
|
-
this.x=null;
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
pad2D(input,pad){
|
|
74
|
-
return input.map(channel=>{
|
|
75
|
-
const rows=channel.length+2*pad;
|
|
76
|
-
const cols=channel[0].length+2*pad;
|
|
77
|
-
const out=Array.from({length:rows},()=>Array(cols).fill(0));
|
|
78
|
-
for(let i=0;i<channel.length;i++) for(let j=0;j<channel[0].length;j++) out[i+pad][j+pad]=channel[i][j];
|
|
79
|
-
return out;
|
|
80
|
-
});
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
conv2DSingle(input,kernel){
|
|
84
|
-
const rows=input.length-kernel.length+1;
|
|
85
|
-
const cols=input[0].length-kernel[0].length+1;
|
|
86
|
-
const out=zeros(rows,cols);
|
|
87
|
-
for(let i=0;i<rows;i++) for(let j=0;j<cols;j++)
|
|
88
|
-
for(let ki=0;ki<kernel.length;ki++) for(let kj=0;kj<kernel[0].length;kj++)
|
|
89
|
-
out[i][j]+=input[i+ki][j+kj]*kernel[ki][kj];
|
|
90
|
-
return out;
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
forward(batch){
|
|
94
|
-
this.x=batch;
|
|
95
|
-
return batch.map(sample=>{
|
|
96
|
-
const channelsOut=[];
|
|
97
|
-
for(let oc=0;oc<this.outC;oc++){
|
|
98
|
-
let outChan=zeros(sample[0].length,sample[0][0].length);
|
|
99
|
-
for(let ic=0;ic<this.inC;ic++){
|
|
100
|
-
let inputChan=sample[ic];
|
|
101
|
-
if(this.padding>0) inputChan=this.pad2D([inputChan],this.padding)[0];
|
|
102
|
-
const conv=this.conv2DSingle(inputChan,this.W[oc][ic]);
|
|
103
|
-
outChan=addMatrices(outChan,conv);
|
|
104
|
-
}
|
|
105
|
-
channelsOut.push(outChan);
|
|
106
|
-
}
|
|
107
|
-
return channelsOut;
|
|
108
|
-
});
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
backward(grad) {
|
|
112
|
-
const batchSize = this.x.length;
|
|
113
|
-
const gradInput = this.x.map(sample => sample.map(chan => zeros(chan.length, chan[0].length)));
|
|
114
|
-
const gradW = this.W.map(oc => oc.map(ic => zeros(this.kernel,this.kernel)));
|
|
115
|
-
|
|
116
|
-
for (let b = 0; b < batchSize; b++) {
|
|
117
|
-
const xPadded = this.pad2D(this.x[b], this.padding);
|
|
118
|
-
const gradInputPadded = xPadded.map(chan => zeros(chan.length, chan[0].length));
|
|
119
|
-
|
|
120
|
-
for (let oc = 0; oc < this.outC; oc++) {
|
|
121
|
-
for (let ic = 0; ic < this.inC; ic++) {
|
|
122
|
-
const outGrad = grad[b][oc];
|
|
123
|
-
const inChan = xPadded[ic];
|
|
124
|
-
|
|
125
|
-
// Compute gradW
|
|
126
|
-
for (let i = 0; i < this.kernel; i++) {
|
|
127
|
-
for (let j = 0; j < this.kernel; j++) {
|
|
128
|
-
let sum = 0;
|
|
129
|
-
for (let y = 0; y < outGrad.length; y++) {
|
|
130
|
-
for (let x = 0; x < outGrad[0].length; x++) {
|
|
131
|
-
const inY = y * this.stride + i;
|
|
132
|
-
const inX = x * this.stride + j;
|
|
133
|
-
if (inY < inChan.length && inX < inChan[0].length) {
|
|
134
|
-
sum += inChan[inY][inX] * outGrad[y][x];
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
}
|
|
138
|
-
gradW[oc][ic][i][j] += sum;
|
|
139
|
-
}
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
// Compute gradInput
|
|
143
|
-
const flippedKernel = this.W[oc][ic].map(row => [...row].reverse()).reverse();
|
|
144
|
-
for (let y = 0; y < outGrad.length; y++) {
|
|
145
|
-
for (let x = 0; x < outGrad[0].length; x++) {
|
|
146
|
-
for (let i = 0; i < this.kernel; i++) {
|
|
147
|
-
for (let j = 0; j < this.kernel; j++) {
|
|
148
|
-
const inY = y * this.stride + i;
|
|
149
|
-
const inX = x * this.stride + j;
|
|
150
|
-
if (inY < gradInputPadded[ic].length && inX < gradInputPadded[ic][0].length) {
|
|
151
|
-
gradInputPadded[ic][inY][inX] += flippedKernel[i][j] * outGrad[y][x];
|
|
152
|
-
}
|
|
153
|
-
}
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
}
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
// Remove padding from gradInput
|
|
161
|
-
if (this.padding > 0) {
|
|
162
|
-
for (let ic = 0; ic < this.inC; ic++) {
|
|
163
|
-
const padded = gradInputPadded[ic];
|
|
164
|
-
const cropped = padded.slice(this.padding, padded.length - this.padding)
|
|
165
|
-
.map(row => row.slice(this.padding, row.length - this.padding));
|
|
166
|
-
gradInput[b][ic] = cropped;
|
|
167
|
-
}
|
|
168
|
-
} else {
|
|
169
|
-
for (let ic = 0; ic < this.inC; ic++) gradInput[b][ic] = gradInputPadded[ic];
|
|
170
|
-
}
|
|
171
|
-
}
|
|
172
|
-
|
|
173
|
-
this.gradW = gradW;
|
|
174
|
-
return gradInput;
|
|
175
|
-
}
|
|
176
|
-
|
|
177
|
-
parameters(){ return this.W.flatMap((w,oc)=>w.map((wc,ic)=>({param:wc,grad:this.gradW[oc][ic]}))); }
|
|
178
|
-
}
|
|
179
|
-
|
|
180
|
-
// ---------------------- Sequential ----------------------
|
|
181
|
-
export class Sequential {
|
|
182
|
-
constructor(layers=[]){ this.layers=layers; }
|
|
183
|
-
forward(x){ return this.layers.reduce((acc,l)=>l.forward(acc), x); }
|
|
184
|
-
backward(grad){ return this.layers.reduceRight((g,l)=>l.backward(g), grad); }
|
|
185
|
-
parameters(){ return this.layers.flatMap(l=>l.parameters?l.parameters():[]); }
|
|
186
|
-
}
|
|
187
|
-
|
|
188
|
-
// ---------------------- Activations ----------------------
|
|
189
|
-
export class ReLU{ constructor(){ this.out=null; } forward(x){ this.out=x.map(r=>r.map(v=>Math.max(0,v))); return this.out; } backward(grad){ return grad.map((r,i)=>r.map((v,j)=>v*(this.out[i][j]>0?1:0))); } }
|
|
190
|
-
export class Sigmoid{ constructor(){ this.out=null; } forward(x){ const fn=v=>1/(1+Math.exp(-v)); this.out=x.map(r=>r.map(fn)); return this.out; } backward(grad){ return grad.map((r,i)=>r.map((v,j)=>v*this.out[i][j]*(1-this.out[i][j]))); } }
|
|
191
|
-
export class Tanh{ constructor(){ this.out=null; } forward(x){ this.out=x.map(r=>r.map(v=>Math.tanh(v))); return this.out; } backward(grad){ return grad.map((r,i)=>r.map((v,j)=>v*(1-this.out[i][j]**2))); } }
|
|
192
|
-
export class LeakyReLU{ constructor(alpha=0.01){ this.alpha=alpha; this.out=null; } forward(x){ this.out=x.map(r=>r.map(v=>v>0?v:v*this.alpha)); return this.out; } backward(grad){ return grad.map((r,i)=>r.map((v,j)=>v*(this.out[i][j]>0?1:this.alpha))); } }
|
|
193
|
-
export class GELU{ constructor(){ this.out=null; } forward(x){ const fn=v=>0.5*v*(1+Math.tanh(Math.sqrt(2/Math.PI)*(v+0.044715*v**3))); this.out=x.map(r=>r.map(fn)); return this.out; } backward(grad){ return grad.map((r,i)=>r.map(v=>v*1)); } }
|
|
194
|
-
|
|
195
|
-
// ---------------------- Dropout ----------------------
|
|
196
|
-
export class Dropout{ constructor(p=0.5){ this.p=p; } forward(x){ return x.map(r=>r.map(v=>v*Math.random()>=this.p?v:0)); } backward(grad){ return grad.map(r=>r.map(v=>v*(1-this.p))); } }
|
|
197
|
-
|
|
198
|
-
// ---------------------- Losses ----------------------
|
|
199
|
-
export class MSELoss{ forward(pred,target){ this.pred=pred; this.target=target; const losses=pred.map((row,i)=>row.reduce((sum,v,j)=>sum+(v-target[i][j])**2,0)/row.length); return losses.reduce((a,b)=>a+b,0)/pred.length; } backward(){ return this.pred.map((row,i)=>row.map((v,j)=>2*(v-this.target[i][j])/row.length)); } }
|
|
200
|
-
export class CrossEntropyLoss{ forward(pred,target){ this.pred=pred; this.target=target; const losses=pred.map((p,i)=>crossEntropy(softmax(p),target[i])); return losses.reduce((a,b)=>a+b,0)/pred.length; } backward(){ return this.pred.map((p,i)=>{ const s=softmax(p); return s.map((v,j)=>(v-this.target[i][j])/this.pred.length); }); } }
|
|
201
|
-
|
|
202
|
-
// ---------------------- Optimizers ----------------------
|
|
203
|
-
export class Adam{
|
|
204
|
-
constructor(params,lr=0.001,b1=0.9,b2=0.999,eps=1e-8){
|
|
205
|
-
this.params=params; this.lr=lr; this.beta1=b1; this.beta2=b2; this.eps=eps;
|
|
206
|
-
this.m=params.map(p=>zeros(p.param.length,p.param[0].length||1));
|
|
207
|
-
this.v=params.map(p=>zeros(p.param.length,p.param[0].length||1));
|
|
208
|
-
this.t=0;
|
|
209
|
-
}
|
|
210
|
-
step(){
|
|
211
|
-
this.t++;
|
|
212
|
-
this.params.forEach((p,idx)=>{
|
|
213
|
-
for(let i=0;i<p.param.length;i++)
|
|
214
|
-
for(let j=0;j<(p.param[0].length||1);j++){
|
|
215
|
-
const g=p.grad[i][j];
|
|
216
|
-
this.m[idx][i][j]=this.beta1*this.m[idx][i][j]+(1-this.beta1)*g;
|
|
217
|
-
this.v[idx][i][j]=this.beta2*this.v[idx][i][j]+(1-this.beta2)*g*g;
|
|
218
|
-
const mHat=this.m[idx][i][j]/(1-Math.pow(this.beta1,this.t));
|
|
219
|
-
const vHat=this.v[idx][i][j]/(1-Math.pow(this.beta2,this.t));
|
|
220
|
-
p.param[i][j]-=this.lr*mHat/(Math.sqrt(vHat)+this.eps);
|
|
221
|
-
}
|
|
222
|
-
});
|
|
223
|
-
}
|
|
224
|
-
}
|
|
225
|
-
|
|
226
|
-
export class SGD{ constructor(params,lr=0.01){ this.params=params; this.lr=lr; } step(){ this.params.forEach(p=>{ for(let i=0;i<p.param.length;i++) for(let j=0;j<(p.param[0].length||1);j++) p.param[i][j]-=this.lr*p.grad[i][j]; }); } }
|
|
227
|
-
|
|
228
|
-
// ---------------------- Model Save/Load ----------------------
|
|
229
|
-
export function saveModel(model){
|
|
230
|
-
if(!(model instanceof Sequential)) throw new Error("saveModel supports only Sequential");
|
|
231
|
-
const weights=model.layers.map(layer=>({weights:layer.W||null,biases:layer.b||null}));
|
|
232
|
-
return JSON.stringify(weights);
|
|
233
|
-
}
|
|
234
|
-
|
|
235
|
-
export function loadModel(model,json){
|
|
236
|
-
if(!(model instanceof Sequential)) throw new Error("loadModel supports only Sequential");
|
|
237
|
-
const weights=JSON.parse(json);
|
|
238
|
-
model.layers.forEach((layer,i)=>{
|
|
239
|
-
if(layer.W && weights[i].weights) layer.W=weights[i].weights;
|
|
240
|
-
if(layer.b && weights[i].biases) layer.b=weights[i].biases;
|
|
241
|
-
});
|
|
242
|
-
}
|
|
243
|
-
|
|
244
|
-
// ---------------------- Advanced Utils ----------------------
|
|
245
|
-
export function flattenBatch(batch){ return batch.flat(2); }
|
|
246
|
-
export function stack(tensors){ return tensors.map(t=>t.data); }
|
|
247
|
-
export function eye(n){ return Array.from({length:n},(_,i)=>Array.from({length:n},(_,j)=>i===j?1:0)); }
|
|
248
|
-
export function concat(a,b,axis=0){ /* concat along axis */ if(axis===0) return [...a,...b]; if(axis===1) return a.map((row,i)=>[...row,...b[i]]); }
|
|
249
|
-
export function reshape(tensor, rows, cols) {
|
|
250
|
-
let flat = tensor.data.flat(); // flatten dulu
|
|
251
|
-
if(flat.length < rows*cols) throw new Error("reshape size mismatch");
|
|
252
|
-
const out = Array.from({length: rows}, (_, i) =>
|
|
253
|
-
flat.slice(i*cols, i*cols + cols)
|
|
254
|
-
);
|
|
255
|
-
return out;
|
|
256
|
-
}
|