catniff 0.1.5 → 0.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/autograd.d.ts +72 -8
- package/dist/autograd.js +296 -9
- package/dist/tensor.d.ts +29 -0
- package/dist/tensor.js +109 -173
- package/package.json +1 -1
package/dist/autograd.d.ts
CHANGED
|
@@ -6,14 +6,46 @@ export declare enum OP {
|
|
|
6
6
|
MUL = 3,
|
|
7
7
|
POW = 4,
|
|
8
8
|
DIV = 5,
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
9
|
+
GE = 6,
|
|
10
|
+
LE = 7,
|
|
11
|
+
GT = 8,
|
|
12
|
+
LT = 9,
|
|
13
|
+
EQ = 10,
|
|
14
|
+
LOGICALAND = 11,
|
|
15
|
+
LOGICALOR = 12,
|
|
16
|
+
LOGICALXOR = 13,
|
|
17
|
+
LOGICALNOT = 14,
|
|
18
|
+
BITWISEAND = 15,
|
|
19
|
+
BITWISEOR = 16,
|
|
20
|
+
BITWISEXOR = 17,
|
|
21
|
+
BITWISENOT = 18,
|
|
22
|
+
BITWISELEFTSHIFT = 19,
|
|
23
|
+
BITWISERIGHTSHIFT = 20,
|
|
24
|
+
NEG = 21,
|
|
25
|
+
ABS = 22,
|
|
26
|
+
SIGN = 23,
|
|
27
|
+
SIN = 24,
|
|
28
|
+
COS = 25,
|
|
29
|
+
TAN = 26,
|
|
30
|
+
ASIN = 27,
|
|
31
|
+
ACOS = 28,
|
|
32
|
+
ATAN = 29,
|
|
33
|
+
SINH = 30,
|
|
34
|
+
COSH = 31,
|
|
35
|
+
ASINH = 32,
|
|
36
|
+
ACOSH = 33,
|
|
37
|
+
ATANH = 34,
|
|
38
|
+
SQRT = 35,
|
|
39
|
+
EXP = 36,
|
|
40
|
+
LOG = 37,
|
|
41
|
+
LOG2 = 38,
|
|
42
|
+
LOG10 = 39,
|
|
43
|
+
LOG1P = 40,
|
|
44
|
+
RELU = 41,
|
|
45
|
+
SIGMOID = 42,
|
|
46
|
+
TANH = 43,
|
|
47
|
+
T = 44,
|
|
48
|
+
MM = 45
|
|
17
49
|
}
|
|
18
50
|
export declare class Node {
|
|
19
51
|
value: Tensor;
|
|
@@ -28,9 +60,41 @@ export declare class Node {
|
|
|
28
60
|
mul(other: Node | number): Node;
|
|
29
61
|
pow(other: Node | number): Node;
|
|
30
62
|
div(other: Node | number): Node;
|
|
63
|
+
ge(other: Node | number): Node;
|
|
64
|
+
le(other: Node | number): Node;
|
|
65
|
+
gt(other: Node | number): Node;
|
|
66
|
+
lt(other: Node | number): Node;
|
|
67
|
+
eq(other: Node | number): Node;
|
|
68
|
+
logicalAnd(other: Node | number): Node;
|
|
69
|
+
logicalOr(other: Node | number): Node;
|
|
70
|
+
logicalXor(other: Node | number): Node;
|
|
71
|
+
logicalNot(): Node;
|
|
72
|
+
bitwiseAnd(other: Node | number): Node;
|
|
73
|
+
bitwiseOr(other: Node | number): Node;
|
|
74
|
+
bitwiseXor(other: Node | number): Node;
|
|
75
|
+
bitwiseNot(): Node;
|
|
76
|
+
bitwiseLeftShift(other: Node | number): Node;
|
|
77
|
+
bitwiseRightShift(other: Node | number): Node;
|
|
31
78
|
neg(): Node;
|
|
79
|
+
abs(): Node;
|
|
80
|
+
sign(): Node;
|
|
81
|
+
sin(): Node;
|
|
82
|
+
cos(): Node;
|
|
83
|
+
tan(): Node;
|
|
84
|
+
asin(): Node;
|
|
85
|
+
acos(): Node;
|
|
86
|
+
atan(): Node;
|
|
87
|
+
sinh(): Node;
|
|
88
|
+
cosh(): Node;
|
|
89
|
+
asinh(): Node;
|
|
90
|
+
acosh(): Node;
|
|
91
|
+
atanh(): Node;
|
|
92
|
+
sqrt(): Node;
|
|
32
93
|
exp(): Node;
|
|
33
94
|
log(): Node;
|
|
95
|
+
log2(): Node;
|
|
96
|
+
log10(): Node;
|
|
97
|
+
log1p(): Node;
|
|
34
98
|
relu(): Node;
|
|
35
99
|
sigmoid(): Node;
|
|
36
100
|
tanh(): Node;
|
package/dist/autograd.js
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.Node = exports.OP = void 0;
|
|
4
4
|
const tensor_1 = require("./tensor");
|
|
5
|
-
const { add, sub, mul, pow, div, neg, exp, log, relu, sigmoid, tanh,
|
|
5
|
+
const { add, sub, mul, pow, div, gt, lt, ge, le, eq, logicalAnd, logicalOr, logicalXor, logicalNot, bitwiseAnd, bitwiseOr, bitwiseXor, bitwiseNot, bitwiseLeftShift, bitwiseRightShift, neg, abs, sign, sin, cos, tan, asin, acos, atan, sinh, cosh, asinh, acosh, atanh, sqrt, exp, log, log2, log10, log1p, relu, sigmoid, tanh, t, mm } = tensor_1.TensorMath;
|
|
6
6
|
var OP;
|
|
7
7
|
(function (OP) {
|
|
8
8
|
OP[OP["NONE"] = 0] = "NONE";
|
|
@@ -11,14 +11,46 @@ var OP;
|
|
|
11
11
|
OP[OP["MUL"] = 3] = "MUL";
|
|
12
12
|
OP[OP["POW"] = 4] = "POW";
|
|
13
13
|
OP[OP["DIV"] = 5] = "DIV";
|
|
14
|
-
OP[OP["
|
|
15
|
-
OP[OP["
|
|
16
|
-
OP[OP["
|
|
17
|
-
OP[OP["
|
|
18
|
-
OP[OP["
|
|
19
|
-
OP[OP["
|
|
20
|
-
OP[OP["
|
|
21
|
-
OP[OP["
|
|
14
|
+
OP[OP["GE"] = 6] = "GE";
|
|
15
|
+
OP[OP["LE"] = 7] = "LE";
|
|
16
|
+
OP[OP["GT"] = 8] = "GT";
|
|
17
|
+
OP[OP["LT"] = 9] = "LT";
|
|
18
|
+
OP[OP["EQ"] = 10] = "EQ";
|
|
19
|
+
OP[OP["LOGICALAND"] = 11] = "LOGICALAND";
|
|
20
|
+
OP[OP["LOGICALOR"] = 12] = "LOGICALOR";
|
|
21
|
+
OP[OP["LOGICALXOR"] = 13] = "LOGICALXOR";
|
|
22
|
+
OP[OP["LOGICALNOT"] = 14] = "LOGICALNOT";
|
|
23
|
+
OP[OP["BITWISEAND"] = 15] = "BITWISEAND";
|
|
24
|
+
OP[OP["BITWISEOR"] = 16] = "BITWISEOR";
|
|
25
|
+
OP[OP["BITWISEXOR"] = 17] = "BITWISEXOR";
|
|
26
|
+
OP[OP["BITWISENOT"] = 18] = "BITWISENOT";
|
|
27
|
+
OP[OP["BITWISELEFTSHIFT"] = 19] = "BITWISELEFTSHIFT";
|
|
28
|
+
OP[OP["BITWISERIGHTSHIFT"] = 20] = "BITWISERIGHTSHIFT";
|
|
29
|
+
OP[OP["NEG"] = 21] = "NEG";
|
|
30
|
+
OP[OP["ABS"] = 22] = "ABS";
|
|
31
|
+
OP[OP["SIGN"] = 23] = "SIGN";
|
|
32
|
+
OP[OP["SIN"] = 24] = "SIN";
|
|
33
|
+
OP[OP["COS"] = 25] = "COS";
|
|
34
|
+
OP[OP["TAN"] = 26] = "TAN";
|
|
35
|
+
OP[OP["ASIN"] = 27] = "ASIN";
|
|
36
|
+
OP[OP["ACOS"] = 28] = "ACOS";
|
|
37
|
+
OP[OP["ATAN"] = 29] = "ATAN";
|
|
38
|
+
OP[OP["SINH"] = 30] = "SINH";
|
|
39
|
+
OP[OP["COSH"] = 31] = "COSH";
|
|
40
|
+
OP[OP["ASINH"] = 32] = "ASINH";
|
|
41
|
+
OP[OP["ACOSH"] = 33] = "ACOSH";
|
|
42
|
+
OP[OP["ATANH"] = 34] = "ATANH";
|
|
43
|
+
OP[OP["SQRT"] = 35] = "SQRT";
|
|
44
|
+
OP[OP["EXP"] = 36] = "EXP";
|
|
45
|
+
OP[OP["LOG"] = 37] = "LOG";
|
|
46
|
+
OP[OP["LOG2"] = 38] = "LOG2";
|
|
47
|
+
OP[OP["LOG10"] = 39] = "LOG10";
|
|
48
|
+
OP[OP["LOG1P"] = 40] = "LOG1P";
|
|
49
|
+
OP[OP["RELU"] = 41] = "RELU";
|
|
50
|
+
OP[OP["SIGMOID"] = 42] = "SIGMOID";
|
|
51
|
+
OP[OP["TANH"] = 43] = "TANH";
|
|
52
|
+
OP[OP["T"] = 44] = "T";
|
|
53
|
+
OP[OP["MM"] = 45] = "MM";
|
|
22
54
|
})(OP || (exports.OP = OP = {}));
|
|
23
55
|
class Node {
|
|
24
56
|
value;
|
|
@@ -96,6 +128,124 @@ class Node {
|
|
|
96
128
|
};
|
|
97
129
|
return out;
|
|
98
130
|
}
|
|
131
|
+
ge(other) {
|
|
132
|
+
other = Node.forceNode(other);
|
|
133
|
+
const out = new Node(ge(this.value, other.value), [this, other], OP.GE);
|
|
134
|
+
out.feedBackward = () => {
|
|
135
|
+
// We consider the derivative of ge to be 0, which does not add to current grad, so this function is just empty
|
|
136
|
+
};
|
|
137
|
+
return out;
|
|
138
|
+
}
|
|
139
|
+
le(other) {
|
|
140
|
+
other = Node.forceNode(other);
|
|
141
|
+
const out = new Node(le(this.value, other.value), [this, other], OP.LE);
|
|
142
|
+
out.feedBackward = () => {
|
|
143
|
+
// We consider the derivative of le to be 0, which does not add to current grad, so this function is just empty
|
|
144
|
+
};
|
|
145
|
+
return out;
|
|
146
|
+
}
|
|
147
|
+
gt(other) {
|
|
148
|
+
other = Node.forceNode(other);
|
|
149
|
+
const out = new Node(gt(this.value, other.value), [this, other], OP.GT);
|
|
150
|
+
out.feedBackward = () => {
|
|
151
|
+
// We consider the derivative of gt to be 0, which does not add to current grad, so this function is just empty
|
|
152
|
+
};
|
|
153
|
+
return out;
|
|
154
|
+
}
|
|
155
|
+
lt(other) {
|
|
156
|
+
other = Node.forceNode(other);
|
|
157
|
+
const out = new Node(lt(this.value, other.value), [this, other], OP.LT);
|
|
158
|
+
out.feedBackward = () => {
|
|
159
|
+
// We consider the derivative of lt to be 0, which does not add to current grad, so this function is just empty
|
|
160
|
+
};
|
|
161
|
+
return out;
|
|
162
|
+
}
|
|
163
|
+
eq(other) {
|
|
164
|
+
other = Node.forceNode(other);
|
|
165
|
+
const out = new Node(eq(this.value, other.value), [this, other], OP.EQ);
|
|
166
|
+
out.feedBackward = () => {
|
|
167
|
+
// We consider the derivative of eq to be 0, which does not add to current grad, so this function is just empty
|
|
168
|
+
};
|
|
169
|
+
return out;
|
|
170
|
+
}
|
|
171
|
+
logicalAnd(other) {
|
|
172
|
+
other = Node.forceNode(other);
|
|
173
|
+
const out = new Node(logicalAnd(this.value, other.value), [this, other], OP.LOGICALAND);
|
|
174
|
+
out.feedBackward = () => {
|
|
175
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
176
|
+
};
|
|
177
|
+
return out;
|
|
178
|
+
}
|
|
179
|
+
logicalOr(other) {
|
|
180
|
+
other = Node.forceNode(other);
|
|
181
|
+
const out = new Node(logicalOr(this.value, other.value), [this, other], OP.LOGICALOR);
|
|
182
|
+
out.feedBackward = () => {
|
|
183
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
184
|
+
};
|
|
185
|
+
return out;
|
|
186
|
+
}
|
|
187
|
+
logicalXor(other) {
|
|
188
|
+
other = Node.forceNode(other);
|
|
189
|
+
const out = new Node(logicalXor(this.value, other.value), [this, other], OP.LOGICALXOR);
|
|
190
|
+
out.feedBackward = () => {
|
|
191
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
192
|
+
};
|
|
193
|
+
return out;
|
|
194
|
+
}
|
|
195
|
+
logicalNot() {
|
|
196
|
+
const out = new Node(logicalNot(this.value), [this], OP.LOGICALNOT);
|
|
197
|
+
out.feedBackward = () => {
|
|
198
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
199
|
+
};
|
|
200
|
+
return out;
|
|
201
|
+
}
|
|
202
|
+
bitwiseAnd(other) {
|
|
203
|
+
other = Node.forceNode(other);
|
|
204
|
+
const out = new Node(bitwiseAnd(this.value, other.value), [this, other], OP.BITWISEAND);
|
|
205
|
+
out.feedBackward = () => {
|
|
206
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
207
|
+
};
|
|
208
|
+
return out;
|
|
209
|
+
}
|
|
210
|
+
bitwiseOr(other) {
|
|
211
|
+
other = Node.forceNode(other);
|
|
212
|
+
const out = new Node(bitwiseOr(this.value, other.value), [this, other], OP.BITWISEOR);
|
|
213
|
+
out.feedBackward = () => {
|
|
214
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
215
|
+
};
|
|
216
|
+
return out;
|
|
217
|
+
}
|
|
218
|
+
bitwiseXor(other) {
|
|
219
|
+
other = Node.forceNode(other);
|
|
220
|
+
const out = new Node(bitwiseXor(this.value, other.value), [this, other], OP.BITWISEXOR);
|
|
221
|
+
out.feedBackward = () => {
|
|
222
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
223
|
+
};
|
|
224
|
+
return out;
|
|
225
|
+
}
|
|
226
|
+
bitwiseNot() {
|
|
227
|
+
const out = new Node(bitwiseNot(this.value), [this], OP.BITWISENOT);
|
|
228
|
+
out.feedBackward = () => {
|
|
229
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
230
|
+
};
|
|
231
|
+
return out;
|
|
232
|
+
}
|
|
233
|
+
bitwiseLeftShift(other) {
|
|
234
|
+
other = Node.forceNode(other);
|
|
235
|
+
const out = new Node(bitwiseLeftShift(this.value, other.value), [this, other], OP.BITWISELEFTSHIFT);
|
|
236
|
+
out.feedBackward = () => {
|
|
237
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
238
|
+
};
|
|
239
|
+
return out;
|
|
240
|
+
}
|
|
241
|
+
bitwiseRightShift(other) {
|
|
242
|
+
other = Node.forceNode(other);
|
|
243
|
+
const out = new Node(bitwiseRightShift(this.value, other.value), [this, other], OP.BITWISERIGHTSHIFT);
|
|
244
|
+
out.feedBackward = () => {
|
|
245
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
246
|
+
};
|
|
247
|
+
return out;
|
|
248
|
+
}
|
|
99
249
|
neg() {
|
|
100
250
|
const out = new Node(neg(this.value), [this], OP.NEG);
|
|
101
251
|
out.feedBackward = () => {
|
|
@@ -104,6 +254,119 @@ class Node {
|
|
|
104
254
|
};
|
|
105
255
|
return out;
|
|
106
256
|
}
|
|
257
|
+
abs() {
|
|
258
|
+
const out = new Node(abs(this.value), [this], OP.ABS);
|
|
259
|
+
out.feedBackward = () => {
|
|
260
|
+
// |x| d/dx = sign(x)
|
|
261
|
+
Node.addGrad(this, mul(out.grad, sign(this.value)));
|
|
262
|
+
};
|
|
263
|
+
return out;
|
|
264
|
+
}
|
|
265
|
+
sign() {
|
|
266
|
+
const out = new Node(sign(this.value), [this], OP.SIGN);
|
|
267
|
+
out.feedBackward = () => {
|
|
268
|
+
// We consider the derivative of sign to be 0, which does not add to current grad, so this function is just empty
|
|
269
|
+
};
|
|
270
|
+
return out;
|
|
271
|
+
}
|
|
272
|
+
sin() {
|
|
273
|
+
const out = new Node(sin(this.value), [this], OP.SIN);
|
|
274
|
+
out.feedBackward = () => {
|
|
275
|
+
// sinx d/dx = cosx
|
|
276
|
+
Node.addGrad(this, mul(out.grad, cos(this.value)));
|
|
277
|
+
};
|
|
278
|
+
return out;
|
|
279
|
+
}
|
|
280
|
+
cos() {
|
|
281
|
+
const out = new Node(cos(this.value), [this], OP.COS);
|
|
282
|
+
out.feedBackward = () => {
|
|
283
|
+
// cosx d/dx = -sinx
|
|
284
|
+
Node.addGrad(this, mul(out.grad, neg(sin(this.value))));
|
|
285
|
+
};
|
|
286
|
+
return out;
|
|
287
|
+
}
|
|
288
|
+
tan() {
|
|
289
|
+
const tanResult = tan(this.value);
|
|
290
|
+
const out = new Node(tanResult, [this], OP.TAN);
|
|
291
|
+
out.feedBackward = () => {
|
|
292
|
+
// tanx d/dx = 1+(tanx)^2
|
|
293
|
+
Node.addGrad(this, mul(out.grad, add(1, pow(tanResult, 2))));
|
|
294
|
+
};
|
|
295
|
+
return out;
|
|
296
|
+
}
|
|
297
|
+
asin() {
|
|
298
|
+
const out = new Node(asin(this.value), [this], OP.ASIN);
|
|
299
|
+
out.feedBackward = () => {
|
|
300
|
+
// asinx d/dx = 1/sqrt(1-x^2)
|
|
301
|
+
Node.addGrad(this, div(out.grad, sqrt(sub(1, pow(this.value, 2)))));
|
|
302
|
+
};
|
|
303
|
+
return out;
|
|
304
|
+
}
|
|
305
|
+
acos() {
|
|
306
|
+
const out = new Node(acos(this.value), [this], OP.ACOS);
|
|
307
|
+
out.feedBackward = () => {
|
|
308
|
+
// acosx d/dx = -1/sqrt(1-x^2)
|
|
309
|
+
Node.addGrad(this, neg(div(out.grad, sqrt(sub(1, pow(this.value, 2))))));
|
|
310
|
+
};
|
|
311
|
+
return out;
|
|
312
|
+
}
|
|
313
|
+
atan() {
|
|
314
|
+
const out = new Node(atan(this.value), [this], OP.ATAN);
|
|
315
|
+
out.feedBackward = () => {
|
|
316
|
+
// atanx d/dx = 1/(1+x^2)
|
|
317
|
+
Node.addGrad(this, div(out.grad, add(1, pow(this.value, 2))));
|
|
318
|
+
};
|
|
319
|
+
return out;
|
|
320
|
+
}
|
|
321
|
+
sinh() {
|
|
322
|
+
const out = new Node(sinh(this.value), [this], OP.SINH);
|
|
323
|
+
out.feedBackward = () => {
|
|
324
|
+
// sinhx d/dx = coshx
|
|
325
|
+
Node.addGrad(this, mul(out.grad, cosh(this.value)));
|
|
326
|
+
};
|
|
327
|
+
return out;
|
|
328
|
+
}
|
|
329
|
+
cosh() {
|
|
330
|
+
const out = new Node(cosh(this.value), [this], OP.COSH);
|
|
331
|
+
out.feedBackward = () => {
|
|
332
|
+
// coshx d/dx = sinhx
|
|
333
|
+
Node.addGrad(this, mul(out.grad, sinh(this.value)));
|
|
334
|
+
};
|
|
335
|
+
return out;
|
|
336
|
+
}
|
|
337
|
+
asinh() {
|
|
338
|
+
const out = new Node(asinh(this.value), [this], OP.ASINH);
|
|
339
|
+
out.feedBackward = () => {
|
|
340
|
+
// asinhx d/dx = 1/sqrt(1+x^2)
|
|
341
|
+
Node.addGrad(this, div(out.grad, sqrt(add(1, pow(this.value, 2)))));
|
|
342
|
+
};
|
|
343
|
+
return out;
|
|
344
|
+
}
|
|
345
|
+
acosh() {
|
|
346
|
+
const out = new Node(acosh(this.value), [this], OP.ACOSH);
|
|
347
|
+
out.feedBackward = () => {
|
|
348
|
+
// acosx d/dx = 1/(sqrt(x-1)*sqrt(x+1))
|
|
349
|
+
Node.addGrad(this, div(out.grad, mul(sqrt(sub(this.value, 1)), sqrt(add(this.value, 1)))));
|
|
350
|
+
};
|
|
351
|
+
return out;
|
|
352
|
+
}
|
|
353
|
+
atanh() {
|
|
354
|
+
const out = new Node(atanh(this.value), [this], OP.ATANH);
|
|
355
|
+
out.feedBackward = () => {
|
|
356
|
+
// atanx d/dx = 1/(1-x^2)
|
|
357
|
+
Node.addGrad(this, div(out.grad, sub(1, pow(this.value, 2))));
|
|
358
|
+
};
|
|
359
|
+
return out;
|
|
360
|
+
}
|
|
361
|
+
sqrt() {
|
|
362
|
+
const sqrtResult = sqrt(this.value);
|
|
363
|
+
const out = new Node(sqrtResult, [this], OP.SQRT);
|
|
364
|
+
out.feedBackward = () => {
|
|
365
|
+
// sqrt(x) d/dx = 1/(2*sqrt(x))
|
|
366
|
+
Node.addGrad(this, div(out.grad, mul(2, sqrtResult)));
|
|
367
|
+
};
|
|
368
|
+
return out;
|
|
369
|
+
}
|
|
107
370
|
exp() {
|
|
108
371
|
const expResult = exp(this.value);
|
|
109
372
|
const out = new Node(expResult, [this], OP.EXP);
|
|
@@ -121,6 +384,30 @@ class Node {
|
|
|
121
384
|
};
|
|
122
385
|
return out;
|
|
123
386
|
}
|
|
387
|
+
log2() {
|
|
388
|
+
const out = new Node(log2(this.value), [this], OP.LOG2);
|
|
389
|
+
out.feedBackward = () => {
|
|
390
|
+
// log2(x) d/dx = 1/(xln2)
|
|
391
|
+
Node.addGrad(this, div(out.grad, mul(this.value, Math.log(2))));
|
|
392
|
+
};
|
|
393
|
+
return out;
|
|
394
|
+
}
|
|
395
|
+
log10() {
|
|
396
|
+
const out = new Node(log10(this.value), [this], OP.LOG10);
|
|
397
|
+
out.feedBackward = () => {
|
|
398
|
+
// log2(x) d/dx = 1/(xln10)
|
|
399
|
+
Node.addGrad(this, div(out.grad, mul(this.value, Math.log(10))));
|
|
400
|
+
};
|
|
401
|
+
return out;
|
|
402
|
+
}
|
|
403
|
+
log1p() {
|
|
404
|
+
const out = new Node(log1p(this.value), [this], OP.LOG1P);
|
|
405
|
+
out.feedBackward = () => {
|
|
406
|
+
// ln(1+x) d/dx = 1/(1+x)
|
|
407
|
+
Node.addGrad(this, div(out.grad, add(this.value, 1)));
|
|
408
|
+
};
|
|
409
|
+
return out;
|
|
410
|
+
}
|
|
124
411
|
relu() {
|
|
125
412
|
const out = new Node(relu(this.value), [this], OP.RELU);
|
|
126
413
|
out.feedBackward = () => {
|
package/dist/tensor.d.ts
CHANGED
|
@@ -3,6 +3,8 @@ export declare class TensorMath {
|
|
|
3
3
|
static create(num: number, shape: number[]): Tensor;
|
|
4
4
|
static getShape(tA: Tensor): number[];
|
|
5
5
|
static padShape(tA: Tensor, tB: Tensor): [Tensor[], Tensor[]];
|
|
6
|
+
static elementWiseAB(tA: Tensor, tB: Tensor, op: (tA: number, tB: number) => number): Tensor;
|
|
7
|
+
static elementWiseSelf(tA: Tensor, op: (tA: number) => number): Tensor;
|
|
6
8
|
static add(tA: Tensor, tB: Tensor): Tensor;
|
|
7
9
|
static sub(tA: Tensor, tB: Tensor): Tensor;
|
|
8
10
|
static mul(tA: Tensor, tB: Tensor): Tensor;
|
|
@@ -13,9 +15,36 @@ export declare class TensorMath {
|
|
|
13
15
|
static ge(tA: Tensor, tB: Tensor): Tensor;
|
|
14
16
|
static le(tA: Tensor, tB: Tensor): Tensor;
|
|
15
17
|
static eq(tA: Tensor, tB: Tensor): Tensor;
|
|
18
|
+
static logicalAnd(tA: Tensor, tB: Tensor): Tensor;
|
|
19
|
+
static logicalOr(tA: Tensor, tB: Tensor): Tensor;
|
|
20
|
+
static logicalXor(tA: Tensor, tB: Tensor): Tensor;
|
|
21
|
+
static logicalNot(tA: Tensor): Tensor;
|
|
22
|
+
static bitwiseAnd(tA: Tensor, tB: Tensor): Tensor;
|
|
23
|
+
static bitwiseOr(tA: Tensor, tB: Tensor): Tensor;
|
|
24
|
+
static bitwiseXor(tA: Tensor, tB: Tensor): Tensor;
|
|
25
|
+
static bitwiseNot(tA: Tensor): Tensor;
|
|
26
|
+
static bitwiseLeftShift(tA: Tensor, tB: Tensor): Tensor;
|
|
27
|
+
static bitwiseRightShift(tA: Tensor, tB: Tensor): Tensor;
|
|
16
28
|
static neg(tA: Tensor): Tensor;
|
|
29
|
+
static abs(tA: Tensor): Tensor;
|
|
30
|
+
static sign(tA: Tensor): Tensor;
|
|
31
|
+
static sin(tA: Tensor): Tensor;
|
|
32
|
+
static cos(tA: Tensor): Tensor;
|
|
33
|
+
static tan(tA: Tensor): Tensor;
|
|
34
|
+
static asin(tA: Tensor): Tensor;
|
|
35
|
+
static acos(tA: Tensor): Tensor;
|
|
36
|
+
static atan(tA: Tensor): Tensor;
|
|
37
|
+
static sinh(tA: Tensor): Tensor;
|
|
38
|
+
static cosh(tA: Tensor): Tensor;
|
|
39
|
+
static asinh(tA: Tensor): Tensor;
|
|
40
|
+
static acosh(tA: Tensor): Tensor;
|
|
41
|
+
static atanh(tA: Tensor): Tensor;
|
|
42
|
+
static sqrt(tA: Tensor): Tensor;
|
|
17
43
|
static exp(tA: Tensor): Tensor;
|
|
18
44
|
static log(tA: Tensor): Tensor;
|
|
45
|
+
static log2(tA: Tensor): Tensor;
|
|
46
|
+
static log10(tA: Tensor): Tensor;
|
|
47
|
+
static log1p(tA: Tensor): Tensor;
|
|
19
48
|
static relu(tA: Tensor): Tensor;
|
|
20
49
|
static sigmoid(tA: Tensor): Tensor;
|
|
21
50
|
static tanh(tA: Tensor): Tensor;
|
package/dist/tensor.js
CHANGED
|
@@ -35,9 +35,9 @@ class TensorMath {
|
|
|
35
35
|
}
|
|
36
36
|
return [tA, tB];
|
|
37
37
|
}
|
|
38
|
-
static
|
|
38
|
+
static elementWiseAB(tA, tB, op) {
|
|
39
39
|
if (typeof tA === "number" && typeof tB === "number") {
|
|
40
|
-
return tA
|
|
40
|
+
return op(tA, tB);
|
|
41
41
|
}
|
|
42
42
|
[tA, tB] = TensorMath.padShape(tA, tB);
|
|
43
43
|
const outLen = Math.max(tA.length, tB.length);
|
|
@@ -48,210 +48,146 @@ class TensorMath {
|
|
|
48
48
|
for (let i = 0; i < outLen; i++) {
|
|
49
49
|
const subA = tA[tA.length === 1 ? 0 : i];
|
|
50
50
|
const subB = tB[tB.length === 1 ? 0 : i];
|
|
51
|
-
result.push(TensorMath.
|
|
51
|
+
result.push(TensorMath.elementWiseAB(subA, subB, op));
|
|
52
52
|
}
|
|
53
53
|
return result;
|
|
54
54
|
}
|
|
55
|
-
static
|
|
56
|
-
if (typeof tA === "number"
|
|
57
|
-
return tA
|
|
58
|
-
}
|
|
59
|
-
[tA, tB] = TensorMath.padShape(tA, tB);
|
|
60
|
-
const outLen = Math.max(tA.length, tB.length);
|
|
61
|
-
if (tA.length !== tB.length && tA.length !== 1 && tB.length !== 1) {
|
|
62
|
-
throw new Error("Inputs are incompatible tensors");
|
|
55
|
+
static elementWiseSelf(tA, op) {
|
|
56
|
+
if (typeof tA === "number") {
|
|
57
|
+
return op(tA);
|
|
63
58
|
}
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
const subA = tA[tA.length === 1 ? 0 : i];
|
|
67
|
-
const subB = tB[tB.length === 1 ? 0 : i];
|
|
68
|
-
result.push(TensorMath.sub(subA, subB));
|
|
59
|
+
else {
|
|
60
|
+
return tA.map(subA => TensorMath.elementWiseSelf(subA, op));
|
|
69
61
|
}
|
|
70
|
-
|
|
62
|
+
}
|
|
63
|
+
static add(tA, tB) {
|
|
64
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA + tB);
|
|
65
|
+
}
|
|
66
|
+
static sub(tA, tB) {
|
|
67
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA - tB);
|
|
71
68
|
}
|
|
72
69
|
static mul(tA, tB) {
|
|
73
|
-
|
|
74
|
-
return tA * tB;
|
|
75
|
-
}
|
|
76
|
-
[tA, tB] = TensorMath.padShape(tA, tB);
|
|
77
|
-
const outLen = Math.max(tA.length, tB.length);
|
|
78
|
-
if (tA.length !== tB.length && tA.length !== 1 && tB.length !== 1) {
|
|
79
|
-
throw new Error("Inputs are incompatible tensors");
|
|
80
|
-
}
|
|
81
|
-
const result = [];
|
|
82
|
-
for (let i = 0; i < outLen; i++) {
|
|
83
|
-
const subA = tA[tA.length === 1 ? 0 : i];
|
|
84
|
-
const subB = tB[tB.length === 1 ? 0 : i];
|
|
85
|
-
result.push(TensorMath.mul(subA, subB));
|
|
86
|
-
}
|
|
87
|
-
return result;
|
|
70
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA * tB);
|
|
88
71
|
}
|
|
89
72
|
static pow(tA, tB) {
|
|
90
|
-
|
|
91
|
-
return tA ** tB;
|
|
92
|
-
}
|
|
93
|
-
[tA, tB] = TensorMath.padShape(tA, tB);
|
|
94
|
-
const outLen = Math.max(tA.length, tB.length);
|
|
95
|
-
if (tA.length !== tB.length && tA.length !== 1 && tB.length !== 1) {
|
|
96
|
-
throw new Error("Inputs are incompatible tensors");
|
|
97
|
-
}
|
|
98
|
-
const result = [];
|
|
99
|
-
for (let i = 0; i < outLen; i++) {
|
|
100
|
-
const subA = tA[tA.length === 1 ? 0 : i];
|
|
101
|
-
const subB = tB[tB.length === 1 ? 0 : i];
|
|
102
|
-
result.push(TensorMath.pow(subA, subB));
|
|
103
|
-
}
|
|
104
|
-
return result;
|
|
73
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA ** tB);
|
|
105
74
|
}
|
|
106
75
|
static div(tA, tB) {
|
|
107
|
-
|
|
108
|
-
return tA / tB;
|
|
109
|
-
}
|
|
110
|
-
[tA, tB] = TensorMath.padShape(tA, tB);
|
|
111
|
-
const outLen = Math.max(tA.length, tB.length);
|
|
112
|
-
if (tA.length !== tB.length && tA.length !== 1 && tB.length !== 1) {
|
|
113
|
-
throw new Error("Inputs are incompatible tensors");
|
|
114
|
-
}
|
|
115
|
-
const result = [];
|
|
116
|
-
for (let i = 0; i < outLen; i++) {
|
|
117
|
-
const subA = tA[tA.length === 1 ? 0 : i];
|
|
118
|
-
const subB = tB[tB.length === 1 ? 0 : i];
|
|
119
|
-
result.push(TensorMath.div(subA, subB));
|
|
120
|
-
}
|
|
121
|
-
return result;
|
|
76
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA / tB);
|
|
122
77
|
}
|
|
123
78
|
static gt(tA, tB) {
|
|
124
|
-
|
|
125
|
-
return tA > tB ? 1 : 0;
|
|
126
|
-
}
|
|
127
|
-
[tA, tB] = TensorMath.padShape(tA, tB);
|
|
128
|
-
const outLen = Math.max(tA.length, tB.length);
|
|
129
|
-
if (tA.length !== tB.length && tA.length !== 1 && tB.length !== 1) {
|
|
130
|
-
throw new Error("Inputs are incompatible tensors");
|
|
131
|
-
}
|
|
132
|
-
const result = [];
|
|
133
|
-
for (let i = 0; i < outLen; i++) {
|
|
134
|
-
const subA = tA[tA.length === 1 ? 0 : i];
|
|
135
|
-
const subB = tB[tB.length === 1 ? 0 : i];
|
|
136
|
-
result.push(TensorMath.gt(subA, subB));
|
|
137
|
-
}
|
|
138
|
-
return result;
|
|
79
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA > tB ? 1 : 0);
|
|
139
80
|
}
|
|
140
81
|
static lt(tA, tB) {
|
|
141
|
-
|
|
142
|
-
return tA < tB ? 1 : 0;
|
|
143
|
-
}
|
|
144
|
-
[tA, tB] = TensorMath.padShape(tA, tB);
|
|
145
|
-
const outLen = Math.max(tA.length, tB.length);
|
|
146
|
-
if (tA.length !== tB.length && tA.length !== 1 && tB.length !== 1) {
|
|
147
|
-
throw new Error("Inputs are incompatible tensors");
|
|
148
|
-
}
|
|
149
|
-
const result = [];
|
|
150
|
-
for (let i = 0; i < outLen; i++) {
|
|
151
|
-
const subA = tA[tA.length === 1 ? 0 : i];
|
|
152
|
-
const subB = tB[tB.length === 1 ? 0 : i];
|
|
153
|
-
result.push(TensorMath.lt(subA, subB));
|
|
154
|
-
}
|
|
155
|
-
return result;
|
|
82
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA < tB ? 1 : 0);
|
|
156
83
|
}
|
|
157
84
|
static ge(tA, tB) {
|
|
158
|
-
|
|
159
|
-
return tA >= tB ? 1 : 0;
|
|
160
|
-
}
|
|
161
|
-
[tA, tB] = TensorMath.padShape(tA, tB);
|
|
162
|
-
const outLen = Math.max(tA.length, tB.length);
|
|
163
|
-
if (tA.length !== tB.length && tA.length !== 1 && tB.length !== 1) {
|
|
164
|
-
throw new Error("Inputs are incompatible tensors");
|
|
165
|
-
}
|
|
166
|
-
const result = [];
|
|
167
|
-
for (let i = 0; i < outLen; i++) {
|
|
168
|
-
const subA = tA[tA.length === 1 ? 0 : i];
|
|
169
|
-
const subB = tB[tB.length === 1 ? 0 : i];
|
|
170
|
-
result.push(TensorMath.ge(subA, subB));
|
|
171
|
-
}
|
|
172
|
-
return result;
|
|
85
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA >= tB ? 1 : 0);
|
|
173
86
|
}
|
|
174
87
|
static le(tA, tB) {
|
|
175
|
-
|
|
176
|
-
return tA <= tB ? 1 : 0;
|
|
177
|
-
}
|
|
178
|
-
[tA, tB] = TensorMath.padShape(tA, tB);
|
|
179
|
-
const outLen = Math.max(tA.length, tB.length);
|
|
180
|
-
if (tA.length !== tB.length && tA.length !== 1 && tB.length !== 1) {
|
|
181
|
-
throw new Error("Inputs are incompatible tensors");
|
|
182
|
-
}
|
|
183
|
-
const result = [];
|
|
184
|
-
for (let i = 0; i < outLen; i++) {
|
|
185
|
-
const subA = tA[tA.length === 1 ? 0 : i];
|
|
186
|
-
const subB = tB[tB.length === 1 ? 0 : i];
|
|
187
|
-
result.push(TensorMath.le(subA, subB));
|
|
188
|
-
}
|
|
189
|
-
return result;
|
|
88
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA <= tB ? 1 : 0);
|
|
190
89
|
}
|
|
191
90
|
static eq(tA, tB) {
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
91
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA === tB ? 1 : 0);
|
|
92
|
+
}
|
|
93
|
+
static logicalAnd(tA, tB) {
|
|
94
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA === 1 && tB === 1 ? 1 : 0);
|
|
95
|
+
}
|
|
96
|
+
static logicalOr(tA, tB) {
|
|
97
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA === 1 || tB === 1 ? 1 : 0);
|
|
98
|
+
}
|
|
99
|
+
static logicalXor(tA, tB) {
|
|
100
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => (tA === 1 || tB === 1) && tA !== tB ? 1 : 0);
|
|
101
|
+
}
|
|
102
|
+
static logicalNot(tA) {
|
|
103
|
+
return TensorMath.elementWiseSelf(tA, (tA) => tA === 1 ? 0 : 1);
|
|
104
|
+
}
|
|
105
|
+
static bitwiseAnd(tA, tB) {
|
|
106
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA & tB);
|
|
107
|
+
}
|
|
108
|
+
static bitwiseOr(tA, tB) {
|
|
109
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA | tB);
|
|
110
|
+
}
|
|
111
|
+
static bitwiseXor(tA, tB) {
|
|
112
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA ^ tB);
|
|
113
|
+
}
|
|
114
|
+
static bitwiseNot(tA) {
|
|
115
|
+
return TensorMath.elementWiseSelf(tA, (tA) => ~tA);
|
|
116
|
+
}
|
|
117
|
+
static bitwiseLeftShift(tA, tB) {
|
|
118
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA << tB);
|
|
119
|
+
}
|
|
120
|
+
static bitwiseRightShift(tA, tB) {
|
|
121
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA >> tB);
|
|
207
122
|
}
|
|
208
123
|
static neg(tA) {
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
124
|
+
return TensorMath.elementWiseSelf(tA, (tA) => -tA);
|
|
125
|
+
}
|
|
126
|
+
static abs(tA) {
|
|
127
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.abs(tA));
|
|
128
|
+
}
|
|
129
|
+
static sign(tA) {
|
|
130
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.sign(tA));
|
|
131
|
+
}
|
|
132
|
+
static sin(tA) {
|
|
133
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.sin(tA));
|
|
134
|
+
}
|
|
135
|
+
static cos(tA) {
|
|
136
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.cos(tA));
|
|
137
|
+
}
|
|
138
|
+
static tan(tA) {
|
|
139
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.tan(tA));
|
|
140
|
+
}
|
|
141
|
+
static asin(tA) {
|
|
142
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.asin(tA));
|
|
143
|
+
}
|
|
144
|
+
static acos(tA) {
|
|
145
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.acos(tA));
|
|
146
|
+
}
|
|
147
|
+
static atan(tA) {
|
|
148
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.atan(tA));
|
|
149
|
+
}
|
|
150
|
+
static sinh(tA) {
|
|
151
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.sinh(tA));
|
|
152
|
+
}
|
|
153
|
+
static cosh(tA) {
|
|
154
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.cosh(tA));
|
|
155
|
+
}
|
|
156
|
+
static asinh(tA) {
|
|
157
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.asinh(tA));
|
|
158
|
+
}
|
|
159
|
+
static acosh(tA) {
|
|
160
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.acosh(tA));
|
|
161
|
+
}
|
|
162
|
+
static atanh(tA) {
|
|
163
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.atanh(tA));
|
|
164
|
+
}
|
|
165
|
+
static sqrt(tA) {
|
|
166
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.sqrt(tA));
|
|
215
167
|
}
|
|
216
168
|
static exp(tA) {
|
|
217
|
-
|
|
218
|
-
return Math.exp(tA);
|
|
219
|
-
}
|
|
220
|
-
else {
|
|
221
|
-
return tA.map(subA => TensorMath.exp(subA));
|
|
222
|
-
}
|
|
169
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.exp(tA));
|
|
223
170
|
}
|
|
224
171
|
static log(tA) {
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
172
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.log(tA));
|
|
173
|
+
}
|
|
174
|
+
static log2(tA) {
|
|
175
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.log2(tA));
|
|
176
|
+
}
|
|
177
|
+
static log10(tA) {
|
|
178
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.log10(tA));
|
|
179
|
+
}
|
|
180
|
+
static log1p(tA) {
|
|
181
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.log(tA));
|
|
231
182
|
}
|
|
232
183
|
static relu(tA) {
|
|
233
|
-
|
|
234
|
-
return Math.max(tA, 0);
|
|
235
|
-
}
|
|
236
|
-
else {
|
|
237
|
-
return tA.map(subA => TensorMath.relu(subA));
|
|
238
|
-
}
|
|
184
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.max(tA, 0));
|
|
239
185
|
}
|
|
240
186
|
static sigmoid(tA) {
|
|
241
|
-
|
|
242
|
-
return 1 / (1 + Math.exp(-tA));
|
|
243
|
-
}
|
|
244
|
-
else {
|
|
245
|
-
return tA.map(subA => TensorMath.sigmoid(subA));
|
|
246
|
-
}
|
|
187
|
+
return TensorMath.elementWiseSelf(tA, (tA) => 1 / (1 + Math.exp(-tA)));
|
|
247
188
|
}
|
|
248
189
|
static tanh(tA) {
|
|
249
|
-
|
|
250
|
-
return Math.tanh(tA);
|
|
251
|
-
}
|
|
252
|
-
else {
|
|
253
|
-
return tA.map(subA => TensorMath.tanh(subA));
|
|
254
|
-
}
|
|
190
|
+
return TensorMath.elementWiseSelf(tA, (tA) => Math.tanh(tA));
|
|
255
191
|
}
|
|
256
192
|
static squeezeAxis(tA, axis) {
|
|
257
193
|
if (typeof tA === "number")
|