scalar-autograd 0.1.6 → 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,6 +16,7 @@ export declare abstract class Optimizer {
16
16
  * Performs a parameter update step.
17
17
  */
18
18
  abstract step(): void;
19
+ abstract resetStateFor(trainable: Value): void;
19
20
  /**
20
21
  * Sets grads of all trainables to zero.
21
22
  */
@@ -53,6 +54,7 @@ export declare class SGD extends Optimizer {
53
54
  * Performs a parameter update using standard SGD.
54
55
  */
55
56
  step(): void;
57
+ resetStateFor(trainable: Value): void;
56
58
  }
57
59
  /**
58
60
  * Adam and AdamW optimizer parameters.
@@ -88,6 +90,7 @@ export declare class Adam extends Optimizer {
88
90
  * Performs a parameter update using Adam optimization.
89
91
  */
90
92
  step(): void;
93
+ resetStateFor(trainable: Value): void;
91
94
  }
92
95
  /**
93
96
  * AdamW optimizer, supports decoupled weight decay and gradient clipping (same options as Adam).
@@ -111,4 +114,5 @@ export declare class AdamW extends Optimizer {
111
114
  * Performs a parameter update using AdamW optimization (decoupled weight decay).
112
115
  */
113
116
  step(): void;
117
+ resetStateFor(trainable: Value): void;
114
118
  }
@@ -64,6 +64,8 @@ class SGD extends Optimizer {
64
64
  v.data -= this.learningRate * v.grad;
65
65
  }
66
66
  }
67
+ resetStateFor(trainable) {
68
+ }
67
69
  }
68
70
  exports.SGD = SGD;
69
71
  /**
@@ -119,6 +121,10 @@ class Adam extends Optimizer {
119
121
  this.v.set(v, vVal);
120
122
  }
121
123
  }
124
+ resetStateFor(trainable) {
125
+ this.m.set(trainable, 0);
126
+ this.v.set(trainable, 0);
127
+ }
122
128
  }
123
129
  exports.Adam = Adam;
124
130
  /**
@@ -173,5 +179,9 @@ class AdamW extends Optimizer {
173
179
  this.v.set(v, vVal);
174
180
  }
175
181
  }
182
+ resetStateFor(trainable) {
183
+ this.m.set(trainable, 0);
184
+ this.v.set(trainable, 0);
185
+ }
176
186
  }
177
187
  exports.AdamW = AdamW;
package/dist/V.d.ts CHANGED
Binary file
package/dist/V.js CHANGED
Binary file
package/dist/Value.d.ts CHANGED
@@ -200,6 +200,11 @@ export declare class Value {
200
200
  * @returns New Value which is the negation.
201
201
  */
202
202
  neg(): Value;
203
+ /**
204
+ * Returns sign(this).
205
+ * @returns New Value with sign.
206
+ */
207
+ sign(): Value;
203
208
  /**
204
209
  * Returns the sum of the given Values.
205
210
  * @param vals Array of Value objects
package/dist/Value.js CHANGED
@@ -289,6 +289,13 @@ class Value {
289
289
  neg() {
290
290
  return ValueArithmetic_1.ValueArithmetic.neg(this);
291
291
  }
292
+ /**
293
+ * Returns sign(this).
294
+ * @returns New Value with sign.
295
+ */
296
+ sign() {
297
+ return ValueArithmetic_1.ValueArithmetic.sign(this);
298
+ }
292
299
  /**
293
300
  * Returns the sum of the given Values.
294
301
  * @param vals Array of Value objects
@@ -230,6 +230,15 @@ describe('Value unary and binary operators: trigs, relu, abs, exp/log, min/max',
230
230
  it('numerical gradient: log', () => testUnaryGrad('log', x => x.log(), x => 1 / x, 1.5));
231
231
  it('numerical gradient: tanh', () => testUnaryGrad('tanh', x => x.tanh(), x => 1 - Math.tanh(x) ** 2, 0.9));
232
232
  it('numerical gradient: sigmoid', () => testUnaryGrad('sigmoid', x => x.sigmoid(), x => { const s = 1 / (1 + Math.exp(-x)); return s * (1 - s); }, 0.7));
233
+ it('numerical gradient: sign', () => testUnaryGrad('sign', x => x.sign(), x => 0, 2.0));
234
+ it('numerical gradient: sign negative', () => testUnaryGrad('sign', x => x.sign(), x => 0, -2.0));
235
+ it('gradient of sign(0) is 0', () => {
236
+ const x = new Value_1.Value(0.0, "x", true);
237
+ const y = x.sign();
238
+ expect(y.data).toBe(0); // sign(0) should be 0
239
+ y.backward();
240
+ expect(x.grad).toBe(0); // Analytical gradient for sign(0) is implemented as 0
241
+ });
233
242
  // Numerical vs analytic gradient checks for binary operators
234
243
  it('numerical gradient: add', () => testBinaryGrad('add', (a, b) => a.add(b), (a, b) => 1, (a, b) => 1, 1.3, -2.1));
235
244
  it('numerical gradient: sub', () => testBinaryGrad('sub', (a, b) => a.sub(b), (a, b) => 1, (a, b) => -1, 5.2, -1.2));
@@ -23,4 +23,5 @@ export declare class ValueArithmetic {
23
23
  static sum(vals: Value[]): Value;
24
24
  static mean(vals: Value[]): Value;
25
25
  static neg(a: Value): Value;
26
+ static sign(a: Value): Value;
26
27
  }
@@ -176,5 +176,15 @@ class ValueArithmetic {
176
176
  a.grad -= out.grad;
177
177
  }, `(-${a.label})`);
178
178
  }
179
+ static sign(a) {
180
+ const s = Math.sign(a.data);
181
+ return Value_1.Value.make(s, a, null, (out) => () => {
182
+ // The derivative of sign(x) is 0 for x != 0.
183
+ // At x = 0, the derivative is undefined (Dirac delta), but for practical purposes in ML,
184
+ // we can define it as 0.
185
+ if (a.requiresGrad)
186
+ a.grad += 0 * out.grad;
187
+ }, `sign(${a.label})`);
188
+ }
179
189
  }
180
190
  exports.ValueArithmetic = ValueArithmetic;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "scalar-autograd",
3
- "version": "0.1.6",
3
+ "version": "0.1.8",
4
4
  "description": "Scalar-based reverse-mode automatic differentiation in TypeScript.",
5
5
  "main": "dist/Value.js",
6
6
  "types": "dist/Value.d.ts",