catniff 0.5.7 → 0.5.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/backend.d.ts CHANGED
@@ -1,4 +1,5 @@
1
1
  import { Tensor } from "./core";
2
2
  export interface Backend {
3
+ create(tensor: Tensor): void;
3
4
  transfer(tensor: Tensor): Tensor;
4
5
  }
package/dist/core.d.ts CHANGED
@@ -177,4 +177,5 @@ export declare class Tensor {
177
177
  replace(other: Tensor, allowShapeMismatch?: boolean): Tensor;
178
178
  static backends: Map<string, Backend>;
179
179
  to(device: string): Tensor;
180
+ to_(device: string): Tensor;
180
181
  }
package/dist/core.js CHANGED
@@ -21,13 +21,8 @@ class Tensor {
21
21
  this.gradFn = options.gradFn || (() => { });
22
22
  this.children = options.children || [];
23
23
  this.device = options.device || "cpu";
24
- // Move tensor to device
25
- if (this.device !== "cpu") {
26
- const backend = Tensor.backends.get(this.device);
27
- if (backend && backend.transfer) {
28
- backend.transfer(this);
29
- }
30
- }
24
+ // Move to device in-place
25
+ this.to_(this.device);
31
26
  }
32
27
  // Utility to flatten an nD array to be 1D
33
28
  static flatten(tensor) {
@@ -1712,9 +1707,21 @@ class Tensor {
1712
1707
  static backends = new Map();
1713
1708
  // Op to transfer tensor to another device
1714
1709
  to(device) {
1710
+ if (device === "cpu")
1711
+ return this;
1715
1712
  const backend = Tensor.backends.get(device);
1716
1713
  if (backend && backend.transfer) {
1717
- backend.transfer(this);
1714
+ return backend.transfer(this);
1715
+ }
1716
+ throw new Error(`No device found to transfer tensor to or a handler is not implemented for device.`);
1717
+ }
1718
+ // Op to transfer tensor to another device in-place
1719
+ to_(device) {
1720
+ if (device === "cpu")
1721
+ return this;
1722
+ const backend = Tensor.backends.get(this.device);
1723
+ if (backend && backend.create) {
1724
+ backend.create(this);
1718
1725
  return this;
1719
1726
  }
1720
1727
  throw new Error(`No device found to transfer tensor to or a handler is not implemented for device.`);
package/dist/nn.d.ts CHANGED
@@ -57,7 +57,7 @@ declare class LayerNorm {
57
57
  constructor(normalizedShape: number | number[], eps?: number, elementwiseAffine?: boolean, bias?: boolean, device?: string);
58
58
  forward(input: Tensor | TensorValue): Tensor;
59
59
  }
60
- interface StateDict {
60
+ export interface StateDict {
61
61
  [key: string]: any;
62
62
  }
63
63
  export declare const nn: {
package/dist/optim.d.ts CHANGED
@@ -1,4 +1,9 @@
1
1
  import { Tensor } from "./core";
2
+ declare abstract class BaseOptimizer {
3
+ params: Tensor[];
4
+ constructor(params: Tensor[]);
5
+ zeroGrad(): void;
6
+ }
2
7
  export interface SGDOptions {
3
8
  lr?: number;
4
9
  momentum?: number;
@@ -6,8 +11,7 @@ export interface SGDOptions {
6
11
  weightDecay?: number;
7
12
  nesterov?: boolean;
8
13
  }
9
- declare class SGD {
10
- params: Tensor[];
14
+ declare class SGD extends BaseOptimizer {
11
15
  momentumBuffers: Map<Tensor, Tensor>;
12
16
  lr: number;
13
17
  momentum: number;
@@ -23,8 +27,7 @@ export interface AdamOptions {
23
27
  eps?: number;
24
28
  weightDecay?: number;
25
29
  }
26
- declare class Adam {
27
- params: Tensor[];
30
+ declare class Adam extends BaseOptimizer {
28
31
  momentumBuffers: Map<Tensor, Tensor>;
29
32
  velocityBuffers: Map<Tensor, Tensor>;
30
33
  stepCount: number;
@@ -36,6 +39,7 @@ declare class Adam {
36
39
  step(): void;
37
40
  }
38
41
  export declare class Optim {
42
+ static BaseOptimizer: typeof BaseOptimizer;
39
43
  static SGD: typeof SGD;
40
44
  static Adam: typeof Adam;
41
45
  }
package/dist/optim.js CHANGED
@@ -2,8 +2,19 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.Optim = void 0;
4
4
  const core_1 = require("./core");
5
- class SGD {
5
+ class BaseOptimizer {
6
6
  params;
7
+ constructor(params) {
8
+ this.params = params;
9
+ }
10
+ zeroGrad() {
11
+ for (let index = 0; index < this.params.length; index++) {
12
+ const param = this.params[index];
13
+ param.grad = core_1.Tensor.zerosLike(param);
14
+ }
15
+ }
16
+ }
17
+ class SGD extends BaseOptimizer {
7
18
  momentumBuffers = new Map();
8
19
  lr;
9
20
  momentum;
@@ -11,7 +22,7 @@ class SGD {
11
22
  weightDecay;
12
23
  nesterov;
13
24
  constructor(params, options) {
14
- this.params = params;
25
+ super(params);
15
26
  this.lr = options?.lr || 0.001;
16
27
  this.momentum = options?.momentum || 0;
17
28
  this.dampening = options?.dampening || 0;
@@ -55,8 +66,7 @@ class SGD {
55
66
  }
56
67
  }
57
68
  }
58
- class Adam {
59
- params;
69
+ class Adam extends BaseOptimizer {
60
70
  momentumBuffers = new Map(); // First moment (m_t)
61
71
  velocityBuffers = new Map(); // Second moment (v_t)
62
72
  stepCount = 0;
@@ -65,7 +75,7 @@ class Adam {
65
75
  eps;
66
76
  weightDecay;
67
77
  constructor(params, options) {
68
- this.params = params;
78
+ super(params);
69
79
  this.lr = options?.lr || 0.001;
70
80
  this.betas = options?.betas || [0.9, 0.999];
71
81
  this.eps = options?.eps || 1e-8;
@@ -117,6 +127,7 @@ class Adam {
117
127
  }
118
128
  }
119
129
  class Optim {
130
+ static BaseOptimizer = BaseOptimizer;
120
131
  static SGD = SGD;
121
132
  static Adam = Adam;
122
133
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "catniff",
3
- "version": "0.5.7",
3
+ "version": "0.5.9",
4
4
  "description": "A small Torch-like deep learning framework for Javascript",
5
5
  "main": "index.js",
6
6
  "scripts": {