scalar-autograd 0.1.3 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,245 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const Value_1 = require("./Value");
4
+ function numericalGrad(f, x0, eps = 1e-6) {
5
+ return (f(x0 + eps) - f(x0 - eps)) / (2 * eps);
6
+ }
7
+ function testUnaryGrad(opName, op, dOp, xval) {
8
+ const x = new Value_1.Value(xval, "x", true);
9
+ const y = op(x);
10
+ y.backward();
11
+ const analytic = x.grad;
12
+ const numeric = numericalGrad(z => op(new Value_1.Value(z, "x", false)).data, xval);
13
+ expect(analytic).toBeCloseTo(numeric, 4);
14
+ }
15
+ function testBinaryGrad(opName, op, dOpA, dOpB, aval, bval) {
16
+ const a = new Value_1.Value(aval, "a", true);
17
+ const b = new Value_1.Value(bval, "b", true);
18
+ const c = op(a, b);
19
+ c.backward();
20
+ const analyticA = a.grad;
21
+ const analyticB = b.grad;
22
+ const numericA = numericalGrad(x => op(new Value_1.Value(x, "a", false), new Value_1.Value(bval, "b", false)).data, aval);
23
+ const numericB = numericalGrad(x => op(new Value_1.Value(aval, "a", false), new Value_1.Value(x, "b", false)).data, bval);
24
+ expect(analyticA).toBeCloseTo(numericA, 4);
25
+ expect(analyticB).toBeCloseTo(numericB, 4);
26
+ }
27
+ describe('Value autograd system', () => {
28
+ it('runs the forward and backward pass example', () => {
29
+ const a = new Value_1.Value(2, 'a', true);
30
+ const b = new Value_1.Value(-3, 'b', true);
31
+ const c = new Value_1.Value(10, 'c', true);
32
+ const e = a.mul(b); // e = a * b
33
+ const d = e.add(c); // d = e + c
34
+ const f = d.tanh(); // f = tanh(d)
35
+ f.backward();
36
+ expect(Number(a.data)).toBe(2);
37
+ expect(Number(b.data)).toBe(-3);
38
+ expect(Number(c.data)).toBe(10);
39
+ expect(f.toString()).toMatch(/Value\(data=.*?, grad=.*?, label=tanh\(\(.+\)\)\)/);
40
+ expect(Number.isFinite(a.grad)).toBe(true);
41
+ expect(Number.isFinite(b.grad)).toBe(true);
42
+ expect(Number.isFinite(c.grad)).toBe(true);
43
+ });
44
+ describe('Value new operators: powValue, mod, cmp, softplus, floor/ceil/round, square/cube, reciprocal, clamp, sum, mean', () => {
45
+ it('powValue matches number math and gradients', () => {
46
+ const a = new Value_1.Value(2, 'a', true);
47
+ const b = new Value_1.Value(3, 'b', true);
48
+ const c = a.powValue(b);
49
+ c.backward();
50
+ // da = b * a^(b-1); db = log(a) * a^b
51
+ expect(c.data).toBeCloseTo(8);
52
+ expect(a.grad).toBeCloseTo(3 * (2 ** 2));
53
+ expect(b.grad).toBeCloseTo(Math.log(2) * 8);
54
+ });
55
+ it('mod computes values modulo', () => {
56
+ const a = new Value_1.Value(7);
57
+ const b = new Value_1.Value(3);
58
+ expect(a.mod(b).data).toBeCloseTo(1);
59
+ });
60
+ it('cmp functions eq/neq/gt/lt/gte/lte match JS', () => {
61
+ const a = new Value_1.Value(5);
62
+ const b = new Value_1.Value(7);
63
+ expect(a.eq(b).data).toBe(0);
64
+ expect(b.eq(b).data).toBe(1);
65
+ expect(a.neq(b).data).toBe(1);
66
+ expect(b.neq(b).data).toBe(0);
67
+ expect(a.gt(b).data).toBe(0);
68
+ expect(b.gt(a).data).toBe(1);
69
+ expect(a.lt(b).data).toBe(1);
70
+ expect(b.lt(a).data).toBe(0);
71
+ expect(a.gte(b).data).toBe(0);
72
+ expect(b.gte(a).data).toBe(1);
73
+ expect(a.lte(b).data).toBe(1);
74
+ expect(b.lte(a).data).toBe(0);
75
+ });
76
+ it('softplus and its gradient', () => {
77
+ const x = new Value_1.Value(0.5, 'x', true);
78
+ const y = x.softplus();
79
+ y.backward();
80
+ expect(y.data).toBeCloseTo(Math.log(1 + Math.exp(0.5)), 5);
81
+ expect(x.grad).toBeCloseTo(1 / (1 + Math.exp(-0.5)), 5);
82
+ });
83
+ it('floor, ceil and round logic', () => {
84
+ const x = new Value_1.Value(-2.7);
85
+ expect(x.floor().data).toBe(-3);
86
+ expect(x.ceil().data).toBe(-2);
87
+ expect(new Value_1.Value(1.4).round().data).toBe(1);
88
+ expect(new Value_1.Value(1.6).round().data).toBe(2);
89
+ });
90
+ it('square, cube, reciprocal logic', () => {
91
+ const x = new Value_1.Value(3, 'x', true);
92
+ const sq = x.square();
93
+ const cu = x.cube();
94
+ const rec = x.reciprocal();
95
+ sq.backward();
96
+ expect(sq.data).toBe(9);
97
+ expect(x.grad).toBe(6);
98
+ x.grad = 0;
99
+ cu.backward();
100
+ expect(cu.data).toBe(27);
101
+ expect(x.grad).toBe(27);
102
+ x.grad = 0;
103
+ rec.backward();
104
+ expect(rec.data).toBeCloseTo(1 / 3);
105
+ expect(x.grad).toBeCloseTo(-1 / 9);
106
+ });
107
+ it('clamp clamps value and only has gradient when in interior', () => {
108
+ const x = new Value_1.Value(5, 'x', true);
109
+ const c1 = x.clamp(0, 3);
110
+ expect(c1.data).toBe(3);
111
+ c1.backward();
112
+ expect(x.grad).toBe(0);
113
+ x.grad = 0;
114
+ const c2 = x.clamp(0, 10);
115
+ expect(c2.data).toBe(5);
116
+ c2.backward();
117
+ expect(x.grad).toBe(1);
118
+ x.grad = 0;
119
+ const c3 = x.clamp(7, 9);
120
+ expect(c3.data).toBe(7);
121
+ c3.backward();
122
+ expect(x.grad).toBe(0);
123
+ });
124
+ it('sum and mean logic for array inputs', () => {
125
+ const vals = [1, 3, 5].map((n, i) => new Value_1.Value(n, 'v' + i, true));
126
+ const s = Value_1.Value.sum(vals);
127
+ const m = Value_1.Value.mean(vals);
128
+ expect(s.data).toBe(9);
129
+ expect(m.data).toBe(3);
130
+ m.backward();
131
+ for (const v of vals)
132
+ expect(v.grad).toBeCloseTo(1 / 3);
133
+ });
134
+ });
135
+ it('computes gradients only for required nodes (example from user)', () => {
136
+ const x = new Value_1.Value(2.0, "x", true);
137
+ const y = new Value_1.Value(3.0, "y", false); // y doesn't require gradients
138
+ const z = x.mul(y).add(x.pow(2));
139
+ z.backward();
140
+ expect(Number(x.grad)).toBeCloseTo(7.0);
141
+ expect(Number(y.grad)).toBeCloseTo(0.0);
142
+ expect(x.toString()).toMatch(/Value\(data=2.0000, grad=7.0000, label=x\)/);
143
+ expect(y.toString()).toMatch(/Value\(data=3.0000, grad=0.0000, label=y\)/);
144
+ });
145
+ it('computes gradients for add operation', () => {
146
+ const a = new Value_1.Value(1.5, 'a', true);
147
+ const b = new Value_1.Value(-0.7, 'b', true);
148
+ const c = a.add(b);
149
+ c.backward();
150
+ // dc/da = 1, dc/db = 1
151
+ expect(a.grad).toBeCloseTo(1.0);
152
+ expect(b.grad).toBeCloseTo(1.0);
153
+ });
154
+ it('computes gradients for mul operation', () => {
155
+ const a = new Value_1.Value(2, 'a', true);
156
+ const b = new Value_1.Value(3, 'b', true);
157
+ const c = a.mul(b);
158
+ c.backward();
159
+ // dc/da = b, dc/db = a
160
+ expect(a.grad).toBeCloseTo(3.0);
161
+ expect(b.grad).toBeCloseTo(2.0);
162
+ });
163
+ it('computes gradients for sub operation', () => {
164
+ const a = new Value_1.Value(2.5, 'a', true);
165
+ const b = new Value_1.Value(1.2, 'b', true);
166
+ const c = a.sub(b);
167
+ c.backward();
168
+ // dc/da = 1, dc/db = -1
169
+ expect(a.grad).toBeCloseTo(1.0);
170
+ expect(b.grad).toBeCloseTo(-1.0);
171
+ });
172
+ it('computes gradients for div operation', () => {
173
+ const a = new Value_1.Value(6, 'a', true);
174
+ const b = new Value_1.Value(2, 'b', true);
175
+ const c = a.div(b);
176
+ c.backward();
177
+ // dc/da = 1/b, dc/db = -a/b^2
178
+ expect(a.grad).toBeCloseTo(0.5);
179
+ expect(b.grad).toBeCloseTo(-1.5);
180
+ });
181
+ it('computes gradients for pow operation', () => {
182
+ const a = new Value_1.Value(4, 'a', true);
183
+ const c = a.pow(3);
184
+ c.backward();
185
+ // dc/da = 3*a^2 = 48
186
+ expect(a.grad).toBeCloseTo(48.0);
187
+ });
188
+ it('computes gradients for tanh operation', () => {
189
+ const a = new Value_1.Value(1, 'a', true);
190
+ const c = a.tanh();
191
+ c.backward();
192
+ // dc/da = 1-tanh(a)^2
193
+ const t = Math.tanh(1);
194
+ expect(a.grad).toBeCloseTo(1 - t * t);
195
+ });
196
+ it('computes gradients for sigmoid operation', () => {
197
+ const a = new Value_1.Value(0.7, 'a', true);
198
+ const c = a.sigmoid();
199
+ c.backward();
200
+ // dc/da = sigmoid(a)*(1-sigmoid(a))
201
+ const s = 1 / (1 + Math.exp(-0.7));
202
+ expect(a.grad).toBeCloseTo(s * (1 - s));
203
+ });
204
+ it('does not track graph when using Value.withNoGrad', () => {
205
+ const a = new Value_1.Value(5, 'a', true);
206
+ const b = new Value_1.Value(7, 'b', true);
207
+ let c = undefined;
208
+ Value_1.Value.withNoGrad(() => {
209
+ c = a.add(b);
210
+ });
211
+ expect(c).toBeDefined();
212
+ expect(c.requiresGrad).toBe(false);
213
+ expect(c['prev'].length).toBe(0);
214
+ c.backward();
215
+ expect(a.grad).toBe(0);
216
+ expect(b.grad).toBe(0);
217
+ });
218
+ });
219
+ describe('Value unary and binary operators: trigs, relu, abs, exp/log, min/max', () => {
220
+ // Numerical vs analytic gradient checks for unary operators
221
+ it('numerical gradient: sin', () => testUnaryGrad('sin', x => x.sin(), x => Math.cos(x), 1.1));
222
+ it('numerical gradient: cos', () => testUnaryGrad('cos', x => x.cos(), x => -Math.sin(x), 0.5));
223
+ it('numerical gradient: tan', () => testUnaryGrad('tan', x => x.tan(), x => 1 / (Math.cos(x) ** 2), 0.8));
224
+ it('numerical gradient: asin', () => testUnaryGrad('asin', x => x.asin(), x => 1 / Math.sqrt(1 - x * x), 0.25));
225
+ it('numerical gradient: acos', () => testUnaryGrad('acos', x => x.acos(), x => -1 / Math.sqrt(1 - x * x), 0.25));
226
+ it('numerical gradient: atan', () => testUnaryGrad('atan', x => x.atan(), x => 1 / (1 + x * x), 1.3));
227
+ it('numerical gradient: relu', () => testUnaryGrad('relu', x => x.relu(), x => (x > 0 ? 1 : 0), 3.0));
228
+ it('numerical gradient: abs', () => testUnaryGrad('abs', x => x.abs(), x => (x >= 0 ? 1 : -1), -3));
229
+ it('numerical gradient: exp', () => testUnaryGrad('exp', x => x.exp(), x => Math.exp(x), 1.2));
230
+ it('numerical gradient: log', () => testUnaryGrad('log', x => x.log(), x => 1 / x, 1.5));
231
+ it('numerical gradient: tanh', () => testUnaryGrad('tanh', x => x.tanh(), x => 1 - Math.tanh(x) ** 2, 0.9));
232
+ it('numerical gradient: sigmoid', () => testUnaryGrad('sigmoid', x => x.sigmoid(), x => { const s = 1 / (1 + Math.exp(-x)); return s * (1 - s); }, 0.7));
233
+ // Numerical vs analytic gradient checks for binary operators
234
+ it('numerical gradient: add', () => testBinaryGrad('add', (a, b) => a.add(b), (a, b) => 1, (a, b) => 1, 1.3, -2.1));
235
+ it('numerical gradient: sub', () => testBinaryGrad('sub', (a, b) => a.sub(b), (a, b) => 1, (a, b) => -1, 5.2, -1.2));
236
+ it('numerical gradient: mul', () => testBinaryGrad('mul', (a, b) => a.mul(b), (a, b) => b, (a, b) => a, 1.7, 2.5));
237
+ it('numerical gradient: div', () => testBinaryGrad('div', (a, b) => a.div(b), (a, b) => 1 / b, (a, b) => -a / (b * b), 4.0, -2.2));
238
+ it('numerical gradient: pow', () => {
239
+ const exp = 3.3;
240
+ const grad = (a) => exp * Math.pow(a, exp - 1);
241
+ testUnaryGrad('pow', x => x.pow(exp), grad, 2.0);
242
+ });
243
+ it('numerical gradient: min', () => testBinaryGrad('min', (a, b) => a.min(b), (a, b) => a < b ? 1 : 0, (a, b) => b < a ? 1 : 0, -1.0, 0.8));
244
+ it('numerical gradient: max', () => testBinaryGrad('max', (a, b) => a.max(b), (a, b) => a > b ? 1 : 0, (a, b) => b > a ? 1 : 0, 2.3, -4.5));
245
+ });
@@ -0,0 +1,34 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ValueActivation = void 0;
4
+ const Value_1 = require("./Value");
5
+ class ValueActivation {
6
+ static relu(x) {
7
+ const r = Math.max(0, x.data);
8
+ return Value_1.Value.make(r, x, null, (out) => () => {
9
+ if (x.requiresGrad)
10
+ x.grad += (x.data > 0 ? 1 : 0) * out.grad;
11
+ }, `relu(${x.label})`);
12
+ }
13
+ static softplus(x) {
14
+ const s = Math.log(1 + Math.exp(x.data));
15
+ return Value_1.Value.make(s, x, null, (out) => () => {
16
+ x.grad += 1 / (1 + Math.exp(-x.data)) * out.grad;
17
+ }, `softplus(${x.label})`);
18
+ }
19
+ static tanh(x) {
20
+ const t = Math.tanh(x.data);
21
+ return Value_1.Value.make(t, x, null, (out) => () => {
22
+ if (x.requiresGrad)
23
+ x.grad += (1 - t ** 2) * out.grad;
24
+ }, `tanh(${x.label})`);
25
+ }
26
+ static sigmoid(x) {
27
+ const s = 1 / (1 + Math.exp(-x.data));
28
+ return Value_1.Value.make(s, x, null, (out) => () => {
29
+ if (x.requiresGrad)
30
+ x.grad += s * (1 - s) * out.grad;
31
+ }, `sigmoid(${x.label})`);
32
+ }
33
+ }
34
+ exports.ValueActivation = ValueActivation;
@@ -0,0 +1,180 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ValueArithmetic = void 0;
4
+ const Value_1 = require("./Value");
5
+ class ValueArithmetic {
6
+ static add(a, b) {
7
+ return Value_1.Value.make(a.data + b.data, a, b, (out) => () => {
8
+ if (a.requiresGrad)
9
+ a.grad += 1 * out.grad;
10
+ if (b.requiresGrad)
11
+ b.grad += 1 * out.grad;
12
+ }, `(${a.label}+${b.label})`);
13
+ }
14
+ static sqrt(a) {
15
+ if (a.data < 0) {
16
+ throw new Error(`Cannot take sqrt of negative number: ${a.data}`);
17
+ }
18
+ const root = Math.sqrt(a.data);
19
+ return Value_1.Value.make(root, a, null, (out) => () => {
20
+ if (a.requiresGrad)
21
+ a.grad += 0.5 / root * out.grad;
22
+ }, `sqrt(${a.label})`);
23
+ }
24
+ static mul(a, b) {
25
+ return Value_1.Value.make(a.data * b.data, a, b, (out) => () => {
26
+ if (a.requiresGrad)
27
+ a.grad += b.data * out.grad;
28
+ if (b.requiresGrad)
29
+ b.grad += a.data * out.grad;
30
+ }, `(${a.label}*${b.label})`);
31
+ }
32
+ static sub(a, b) {
33
+ return Value_1.Value.make(a.data - b.data, a, b, (out) => () => {
34
+ if (a.requiresGrad)
35
+ a.grad += 1 * out.grad;
36
+ if (b.requiresGrad)
37
+ b.grad -= 1 * out.grad;
38
+ }, `(${a.label}-${b.label})`);
39
+ }
40
+ static div(a, b, eps = 1e-12) {
41
+ if (Math.abs(b.data) < eps) {
42
+ throw new Error(`Division by zero or near-zero encountered in div: denominator=${b.data}`);
43
+ }
44
+ const safe = b.data;
45
+ return Value_1.Value.make(a.data / safe, a, b, (out) => () => {
46
+ if (a.requiresGrad)
47
+ a.grad += (1 / safe) * out.grad;
48
+ if (b.requiresGrad)
49
+ b.grad -= (a.data / (safe ** 2)) * out.grad;
50
+ }, `(${a.label}/${b.label})`);
51
+ }
52
+ static pow(a, exp) {
53
+ if (typeof exp !== "number" || Number.isNaN(exp) || !Number.isFinite(exp)) {
54
+ throw new Error(`Exponent must be a finite number, got ${exp}`);
55
+ }
56
+ if (a.data < 0 && Math.abs(exp % 1) > 1e-12) {
57
+ throw new Error(`Cannot raise negative base (${a.data}) to non-integer exponent (${exp})`);
58
+ }
59
+ const safeBase = a.data;
60
+ return Value_1.Value.make(Math.pow(safeBase, exp), a, null, (out) => () => {
61
+ if (a.requiresGrad)
62
+ a.grad += exp * Math.pow(safeBase, exp - 1) * out.grad;
63
+ }, `(${a.label}^${exp})`);
64
+ }
65
+ static powValue(a, b, eps = 1e-12) {
66
+ if (a.data < 0 && Math.abs(b.data % 1) > eps) {
67
+ throw new Error(`Cannot raise negative base (${a.data}) to non-integer exponent (${b.data})`);
68
+ }
69
+ if (a.data === 0 && b.data <= 0) {
70
+ throw new Error(`0 cannot be raised to zero or negative power: ${b.data}`);
71
+ }
72
+ const safeBase = a.data;
73
+ return Value_1.Value.make(Math.pow(safeBase, b.data), a, b, (out) => () => {
74
+ a.grad += b.data * Math.pow(safeBase, b.data - 1) * out.grad;
75
+ b.grad += Math.log(Math.max(eps, safeBase)) * Math.pow(safeBase, b.data) * out.grad;
76
+ }, `(${a.label}^${b.label})`);
77
+ }
78
+ static mod(a, b) {
79
+ if (typeof b.data !== 'number' || b.data === 0) {
80
+ throw new Error(`Modulo by zero encountered`);
81
+ }
82
+ return Value_1.Value.make(a.data % b.data, a, b, (out) => () => {
83
+ a.grad += 1 * out.grad;
84
+ // No grad to b (modulus not used in most diff cases)
85
+ }, `(${a.label}%${b.label})`);
86
+ }
87
+ static abs(a) {
88
+ const d = Math.abs(a.data);
89
+ return Value_1.Value.make(d, a, null, (out) => () => {
90
+ if (a.requiresGrad)
91
+ a.grad += (a.data >= 0 ? 1 : -1) * out.grad;
92
+ }, `abs(${a.label})`);
93
+ }
94
+ static exp(a) {
95
+ const e = Math.exp(a.data);
96
+ return Value_1.Value.make(e, a, null, (out) => () => {
97
+ if (a.requiresGrad)
98
+ a.grad += e * out.grad;
99
+ }, `exp(${a.label})`);
100
+ }
101
+ static log(a, eps = 1e-12) {
102
+ if (a.data <= 0) {
103
+ throw new Error(`Logarithm undefined for non-positive value: ${a.data}`);
104
+ }
105
+ const safe = Math.max(a.data, eps);
106
+ const l = Math.log(safe);
107
+ return Value_1.Value.make(l, a, null, (out) => () => {
108
+ if (a.requiresGrad)
109
+ a.grad += (1 / safe) * out.grad;
110
+ }, `log(${a.label})`);
111
+ }
112
+ static min(a, b) {
113
+ const d = Math.min(a.data, b.data);
114
+ return Value_1.Value.make(d, a, b, (out) => () => {
115
+ if (a.requiresGrad)
116
+ a.grad += (a.data < b.data ? 1 : 0) * out.grad;
117
+ if (b.requiresGrad)
118
+ b.grad += (b.data < a.data ? 1 : 0) * out.grad;
119
+ }, `min(${a.label},${b.label})`);
120
+ }
121
+ static max(a, b) {
122
+ const d = Math.max(a.data, b.data);
123
+ return Value_1.Value.make(d, a, b, (out) => () => {
124
+ if (a.requiresGrad)
125
+ a.grad += (a.data > b.data ? 1 : 0) * out.grad;
126
+ if (b.requiresGrad)
127
+ b.grad += (b.data > a.data ? 1 : 0) * out.grad;
128
+ }, `max(${a.label},${b.label})`);
129
+ }
130
+ static floor(a) {
131
+ const fl = Math.floor(a.data);
132
+ return Value_1.Value.make(fl, a, null, () => () => { }, `floor(${a.label})`);
133
+ }
134
+ static ceil(a) {
135
+ const cl = Math.ceil(a.data);
136
+ return Value_1.Value.make(cl, a, null, () => () => { }, `ceil(${a.label})`);
137
+ }
138
+ static round(a) {
139
+ const rd = Math.round(a.data);
140
+ return Value_1.Value.make(rd, a, null, () => () => { }, `round(${a.label})`);
141
+ }
142
+ static square(a) {
143
+ return ValueArithmetic.pow(a, 2);
144
+ }
145
+ static cube(a) {
146
+ return ValueArithmetic.pow(a, 3);
147
+ }
148
+ static reciprocal(a, eps = 1e-12) {
149
+ if (Math.abs(a.data) < eps) {
150
+ throw new Error(`Reciprocal of zero or near-zero detected`);
151
+ }
152
+ return Value_1.Value.make(1 / a.data, a, null, (out) => () => {
153
+ if (a.requiresGrad)
154
+ a.grad += -1 / (a.data * a.data) * out.grad;
155
+ }, `reciprocal(${a.label})`);
156
+ }
157
+ static clamp(a, min, max) {
158
+ let val = Math.max(min, Math.min(a.data, max));
159
+ return Value_1.Value.make(val, a, null, (out) => () => {
160
+ a.grad += (a.data > min && a.data < max ? 1 : 0) * out.grad;
161
+ }, `clamp(${a.label},${min},${max})`);
162
+ }
163
+ static sum(vals) {
164
+ if (!vals.length)
165
+ return new Value_1.Value(0);
166
+ return vals.reduce((a, b) => a.add(b));
167
+ }
168
+ static mean(vals) {
169
+ if (!vals.length)
170
+ return new Value_1.Value(0);
171
+ return ValueArithmetic.sum(vals).div(vals.length);
172
+ }
173
+ static neg(a) {
174
+ return Value_1.Value.make(-a.data, a, null, (out) => () => {
175
+ if (a.requiresGrad)
176
+ a.grad -= out.grad;
177
+ }, `(-${a.label})`);
178
+ }
179
+ }
180
+ exports.ValueArithmetic = ValueArithmetic;
@@ -0,0 +1,47 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ValueComparison = void 0;
4
+ const Value_1 = require("./Value");
5
+ class ValueComparison {
6
+ static eq(a, b) {
7
+ return Value_1.Value.make(a.data === b.data ? 1 : 0, a, b, (out) => () => {
8
+ // No gradient - discrete operation
9
+ }, `(${a.label}==${b.label})`);
10
+ }
11
+ static ifThenElse(cond, thenVal, elseVal) {
12
+ return Value_1.Value.make(cond.data ? thenVal.data : elseVal.data, cond, cond.data ? thenVal : elseVal, (out) => () => {
13
+ if (cond.data) {
14
+ thenVal.grad += out.grad;
15
+ }
16
+ else {
17
+ elseVal.grad += out.grad;
18
+ }
19
+ }, `if(${cond.label}){${thenVal.label}}else{${elseVal.label}}`);
20
+ }
21
+ static neq(a, b) {
22
+ return Value_1.Value.make(a.data !== b.data ? 1 : 0, a, b, (out) => () => {
23
+ // No gradient - discrete operation
24
+ }, `(${a.label}!=${b.label})`);
25
+ }
26
+ static gt(a, b) {
27
+ return Value_1.Value.make(a.data > b.data ? 1 : 0, a, b, (out) => () => {
28
+ // No gradient - discrete operation
29
+ }, `(${a.label}>${b.label})`);
30
+ }
31
+ static lt(a, b) {
32
+ return Value_1.Value.make(a.data < b.data ? 1 : 0, a, b, (out) => () => {
33
+ // No gradient - discrete operation
34
+ }, `(${a.label}<${b.label})`);
35
+ }
36
+ static gte(a, b) {
37
+ return Value_1.Value.make(a.data >= b.data ? 1 : 0, a, b, (out) => () => {
38
+ // No gradient - discrete operation
39
+ }, `(${a.label}>=${b.label})`);
40
+ }
41
+ static lte(a, b) {
42
+ return Value_1.Value.make(a.data <= b.data ? 1 : 0, a, b, (out) => () => {
43
+ // No gradient - discrete operation
44
+ }, `(${a.label}<=${b.label})`);
45
+ }
46
+ }
47
+ exports.ValueComparison = ValueComparison;
@@ -0,0 +1,49 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ValueTrig = void 0;
4
+ const Value_1 = require("./Value");
5
+ class ValueTrig {
6
+ static sin(x) {
7
+ const s = Math.sin(x.data);
8
+ return Value_1.Value.make(s, x, null, (out) => () => {
9
+ if (x.requiresGrad)
10
+ x.grad += Math.cos(x.data) * out.grad;
11
+ }, `sin(${x.label})`);
12
+ }
13
+ static cos(x) {
14
+ const c = Math.cos(x.data);
15
+ return Value_1.Value.make(c, x, null, (out) => () => {
16
+ if (x.requiresGrad)
17
+ x.grad += -Math.sin(x.data) * out.grad;
18
+ }, `cos(${x.label})`);
19
+ }
20
+ static tan(x) {
21
+ const t = Math.tan(x.data);
22
+ return Value_1.Value.make(t, x, null, (out) => () => {
23
+ if (x.requiresGrad)
24
+ x.grad += (1 / (Math.cos(x.data) ** 2)) * out.grad;
25
+ }, `tan(${x.label})`);
26
+ }
27
+ static asin(x) {
28
+ const s = Math.asin(x.data);
29
+ return Value_1.Value.make(s, x, null, (out) => () => {
30
+ if (x.requiresGrad)
31
+ x.grad += (1 / Math.sqrt(1 - x.data * x.data)) * out.grad;
32
+ }, `asin(${x.label})`);
33
+ }
34
+ static acos(x) {
35
+ const c = Math.acos(x.data);
36
+ return Value_1.Value.make(c, x, null, (out) => () => {
37
+ if (x.requiresGrad)
38
+ x.grad += (-1 / Math.sqrt(1 - x.data * x.data)) * out.grad;
39
+ }, `acos(${x.label})`);
40
+ }
41
+ static atan(x) {
42
+ const a = Math.atan(x.data);
43
+ return Value_1.Value.make(a, x, null, (out) => () => {
44
+ if (x.requiresGrad)
45
+ x.grad += (1 / (1 + x.data * x.data)) * out.grad;
46
+ }, `atan(${x.label})`);
47
+ }
48
+ }
49
+ exports.ValueTrig = ValueTrig;
package/package.json CHANGED
@@ -1,9 +1,9 @@
1
1
  {
2
2
  "name": "scalar-autograd",
3
- "version": "0.1.3",
3
+ "version": "0.1.5",
4
4
  "description": "Scalar-based reverse-mode automatic differentiation in TypeScript.",
5
- "main": "Value.js",
6
- "types": "Value.d.ts",
5
+ "main": "dist/Value.js",
6
+ "types": "dist/Value.d.ts",
7
7
  "repository": {
8
8
  "type": "git",
9
9
  "url": "https://github.com/mfagerlund/ScalarAutograd.git"
@@ -18,15 +18,7 @@
18
18
  "author": "Mattias Fagerlund <mattias.fagerlund@carretera.se>",
19
19
  "license": "MIT",
20
20
  "files": [
21
- "Value.js",
22
- "Value.d.ts",
23
- "V.ts",
24
- "Value*.js",
25
- "Value*.ts",
26
- "Losses.js",
27
- "Losses.ts",
28
- "Optimizers.js",
29
- "Optimizers.ts"
21
+ "dist/"
30
22
  ],
31
23
  "devDependencies": {
32
24
  "ts-node": "^10.0.0",