catniff 0.5.7 → 0.5.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/nn.d.ts CHANGED
@@ -57,7 +57,7 @@ declare class LayerNorm {
57
57
  constructor(normalizedShape: number | number[], eps?: number, elementwiseAffine?: boolean, bias?: boolean, device?: string);
58
58
  forward(input: Tensor | TensorValue): Tensor;
59
59
  }
60
- interface StateDict {
60
+ export interface StateDict {
61
61
  [key: string]: any;
62
62
  }
63
63
  export declare const nn: {
package/dist/optim.d.ts CHANGED
@@ -1,4 +1,9 @@
1
1
  import { Tensor } from "./core";
2
+ declare abstract class BaseOptimizer {
3
+ params: Tensor[];
4
+ constructor(params: Tensor[]);
5
+ zeroGrad(): void;
6
+ }
2
7
  export interface SGDOptions {
3
8
  lr?: number;
4
9
  momentum?: number;
@@ -6,8 +11,7 @@ export interface SGDOptions {
6
11
  weightDecay?: number;
7
12
  nesterov?: boolean;
8
13
  }
9
- declare class SGD {
10
- params: Tensor[];
14
+ declare class SGD extends BaseOptimizer {
11
15
  momentumBuffers: Map<Tensor, Tensor>;
12
16
  lr: number;
13
17
  momentum: number;
@@ -23,8 +27,7 @@ export interface AdamOptions {
23
27
  eps?: number;
24
28
  weightDecay?: number;
25
29
  }
26
- declare class Adam {
27
- params: Tensor[];
30
+ declare class Adam extends BaseOptimizer {
28
31
  momentumBuffers: Map<Tensor, Tensor>;
29
32
  velocityBuffers: Map<Tensor, Tensor>;
30
33
  stepCount: number;
@@ -36,6 +39,7 @@ declare class Adam {
36
39
  step(): void;
37
40
  }
38
41
  export declare class Optim {
42
+ static BaseOptimizer: typeof BaseOptimizer;
39
43
  static SGD: typeof SGD;
40
44
  static Adam: typeof Adam;
41
45
  }
package/dist/optim.js CHANGED
@@ -2,8 +2,19 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.Optim = void 0;
4
4
  const core_1 = require("./core");
5
- class SGD {
5
+ class BaseOptimizer {
6
6
  params;
7
+ constructor(params) {
8
+ this.params = params;
9
+ }
10
+ zeroGrad() {
11
+ for (let index = 0; index < this.params.length; index++) {
12
+ const param = this.params[index];
13
+ param.grad = core_1.Tensor.zerosLike(param);
14
+ }
15
+ }
16
+ }
17
+ class SGD extends BaseOptimizer {
7
18
  momentumBuffers = new Map();
8
19
  lr;
9
20
  momentum;
@@ -11,7 +22,7 @@ class SGD {
11
22
  weightDecay;
12
23
  nesterov;
13
24
  constructor(params, options) {
14
- this.params = params;
25
+ super(params);
15
26
  this.lr = options?.lr || 0.001;
16
27
  this.momentum = options?.momentum || 0;
17
28
  this.dampening = options?.dampening || 0;
@@ -55,8 +66,7 @@ class SGD {
55
66
  }
56
67
  }
57
68
  }
58
- class Adam {
59
- params;
69
+ class Adam extends BaseOptimizer {
60
70
  momentumBuffers = new Map(); // First moment (m_t)
61
71
  velocityBuffers = new Map(); // Second moment (v_t)
62
72
  stepCount = 0;
@@ -65,7 +75,7 @@ class Adam {
65
75
  eps;
66
76
  weightDecay;
67
77
  constructor(params, options) {
68
- this.params = params;
78
+ super(params);
69
79
  this.lr = options?.lr || 0.001;
70
80
  this.betas = options?.betas || [0.9, 0.999];
71
81
  this.eps = options?.eps || 1e-8;
@@ -117,6 +127,7 @@ class Adam {
117
127
  }
118
128
  }
119
129
  class Optim {
130
+ static BaseOptimizer = BaseOptimizer;
120
131
  static SGD = SGD;
121
132
  static Adam = Adam;
122
133
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "catniff",
3
- "version": "0.5.7",
3
+ "version": "0.5.8",
4
4
  "description": "A small Torch-like deep learning framework for Javascript",
5
5
  "main": "index.js",
6
6
  "scripts": {