catniff 0.1.6 → 0.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/autograd.d.ts +45 -25
- package/dist/autograd.js +114 -26
- package/dist/tensor.d.ts +10 -0
- package/dist/tensor.js +30 -0
- package/package.json +1 -1
package/dist/autograd.d.ts
CHANGED
|
@@ -11,31 +11,41 @@ export declare enum OP {
|
|
|
11
11
|
GT = 8,
|
|
12
12
|
LT = 9,
|
|
13
13
|
EQ = 10,
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
14
|
+
LOGICALAND = 11,
|
|
15
|
+
LOGICALOR = 12,
|
|
16
|
+
LOGICALXOR = 13,
|
|
17
|
+
LOGICALNOT = 14,
|
|
18
|
+
BITWISEAND = 15,
|
|
19
|
+
BITWISEOR = 16,
|
|
20
|
+
BITWISEXOR = 17,
|
|
21
|
+
BITWISENOT = 18,
|
|
22
|
+
BITWISELEFTSHIFT = 19,
|
|
23
|
+
BITWISERIGHTSHIFT = 20,
|
|
24
|
+
NEG = 21,
|
|
25
|
+
ABS = 22,
|
|
26
|
+
SIGN = 23,
|
|
27
|
+
SIN = 24,
|
|
28
|
+
COS = 25,
|
|
29
|
+
TAN = 26,
|
|
30
|
+
ASIN = 27,
|
|
31
|
+
ACOS = 28,
|
|
32
|
+
ATAN = 29,
|
|
33
|
+
SINH = 30,
|
|
34
|
+
COSH = 31,
|
|
35
|
+
ASINH = 32,
|
|
36
|
+
ACOSH = 33,
|
|
37
|
+
ATANH = 34,
|
|
38
|
+
SQRT = 35,
|
|
39
|
+
EXP = 36,
|
|
40
|
+
LOG = 37,
|
|
41
|
+
LOG2 = 38,
|
|
42
|
+
LOG10 = 39,
|
|
43
|
+
LOG1P = 40,
|
|
44
|
+
RELU = 41,
|
|
45
|
+
SIGMOID = 42,
|
|
46
|
+
TANH = 43,
|
|
47
|
+
T = 44,
|
|
48
|
+
MM = 45
|
|
39
49
|
}
|
|
40
50
|
export declare class Node {
|
|
41
51
|
value: Tensor;
|
|
@@ -55,6 +65,16 @@ export declare class Node {
|
|
|
55
65
|
gt(other: Node | number): Node;
|
|
56
66
|
lt(other: Node | number): Node;
|
|
57
67
|
eq(other: Node | number): Node;
|
|
68
|
+
logicalAnd(other: Node | number): Node;
|
|
69
|
+
logicalOr(other: Node | number): Node;
|
|
70
|
+
logicalXor(other: Node | number): Node;
|
|
71
|
+
logicalNot(): Node;
|
|
72
|
+
bitwiseAnd(other: Node | number): Node;
|
|
73
|
+
bitwiseOr(other: Node | number): Node;
|
|
74
|
+
bitwiseXor(other: Node | number): Node;
|
|
75
|
+
bitwiseNot(): Node;
|
|
76
|
+
bitwiseLeftShift(other: Node | number): Node;
|
|
77
|
+
bitwiseRightShift(other: Node | number): Node;
|
|
58
78
|
neg(): Node;
|
|
59
79
|
abs(): Node;
|
|
60
80
|
sign(): Node;
|
package/dist/autograd.js
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.Node = exports.OP = void 0;
|
|
4
4
|
const tensor_1 = require("./tensor");
|
|
5
|
-
const { add, sub, mul, pow, div, gt, lt, ge, le, eq, neg, abs, sign, sin, cos, tan, asin, acos, atan, sinh, cosh, asinh, acosh, atanh, sqrt, exp, log, log2, log10, log1p, relu, sigmoid, tanh, t, mm } = tensor_1.TensorMath;
|
|
5
|
+
const { add, sub, mul, pow, div, gt, lt, ge, le, eq, logicalAnd, logicalOr, logicalXor, logicalNot, bitwiseAnd, bitwiseOr, bitwiseXor, bitwiseNot, bitwiseLeftShift, bitwiseRightShift, neg, abs, sign, sin, cos, tan, asin, acos, atan, sinh, cosh, asinh, acosh, atanh, sqrt, exp, log, log2, log10, log1p, relu, sigmoid, tanh, t, mm } = tensor_1.TensorMath;
|
|
6
6
|
var OP;
|
|
7
7
|
(function (OP) {
|
|
8
8
|
OP[OP["NONE"] = 0] = "NONE";
|
|
@@ -16,31 +16,41 @@ var OP;
|
|
|
16
16
|
OP[OP["GT"] = 8] = "GT";
|
|
17
17
|
OP[OP["LT"] = 9] = "LT";
|
|
18
18
|
OP[OP["EQ"] = 10] = "EQ";
|
|
19
|
-
OP[OP["
|
|
20
|
-
OP[OP["
|
|
21
|
-
OP[OP["
|
|
22
|
-
OP[OP["
|
|
23
|
-
OP[OP["
|
|
24
|
-
OP[OP["
|
|
25
|
-
OP[OP["
|
|
26
|
-
OP[OP["
|
|
27
|
-
OP[OP["
|
|
28
|
-
OP[OP["
|
|
29
|
-
OP[OP["
|
|
30
|
-
OP[OP["
|
|
31
|
-
OP[OP["
|
|
32
|
-
OP[OP["
|
|
33
|
-
OP[OP["
|
|
34
|
-
OP[OP["
|
|
35
|
-
OP[OP["
|
|
36
|
-
OP[OP["
|
|
37
|
-
OP[OP["
|
|
38
|
-
OP[OP["
|
|
39
|
-
OP[OP["
|
|
40
|
-
OP[OP["
|
|
41
|
-
OP[OP["
|
|
42
|
-
OP[OP["
|
|
43
|
-
OP[OP["
|
|
19
|
+
OP[OP["LOGICALAND"] = 11] = "LOGICALAND";
|
|
20
|
+
OP[OP["LOGICALOR"] = 12] = "LOGICALOR";
|
|
21
|
+
OP[OP["LOGICALXOR"] = 13] = "LOGICALXOR";
|
|
22
|
+
OP[OP["LOGICALNOT"] = 14] = "LOGICALNOT";
|
|
23
|
+
OP[OP["BITWISEAND"] = 15] = "BITWISEAND";
|
|
24
|
+
OP[OP["BITWISEOR"] = 16] = "BITWISEOR";
|
|
25
|
+
OP[OP["BITWISEXOR"] = 17] = "BITWISEXOR";
|
|
26
|
+
OP[OP["BITWISENOT"] = 18] = "BITWISENOT";
|
|
27
|
+
OP[OP["BITWISELEFTSHIFT"] = 19] = "BITWISELEFTSHIFT";
|
|
28
|
+
OP[OP["BITWISERIGHTSHIFT"] = 20] = "BITWISERIGHTSHIFT";
|
|
29
|
+
OP[OP["NEG"] = 21] = "NEG";
|
|
30
|
+
OP[OP["ABS"] = 22] = "ABS";
|
|
31
|
+
OP[OP["SIGN"] = 23] = "SIGN";
|
|
32
|
+
OP[OP["SIN"] = 24] = "SIN";
|
|
33
|
+
OP[OP["COS"] = 25] = "COS";
|
|
34
|
+
OP[OP["TAN"] = 26] = "TAN";
|
|
35
|
+
OP[OP["ASIN"] = 27] = "ASIN";
|
|
36
|
+
OP[OP["ACOS"] = 28] = "ACOS";
|
|
37
|
+
OP[OP["ATAN"] = 29] = "ATAN";
|
|
38
|
+
OP[OP["SINH"] = 30] = "SINH";
|
|
39
|
+
OP[OP["COSH"] = 31] = "COSH";
|
|
40
|
+
OP[OP["ASINH"] = 32] = "ASINH";
|
|
41
|
+
OP[OP["ACOSH"] = 33] = "ACOSH";
|
|
42
|
+
OP[OP["ATANH"] = 34] = "ATANH";
|
|
43
|
+
OP[OP["SQRT"] = 35] = "SQRT";
|
|
44
|
+
OP[OP["EXP"] = 36] = "EXP";
|
|
45
|
+
OP[OP["LOG"] = 37] = "LOG";
|
|
46
|
+
OP[OP["LOG2"] = 38] = "LOG2";
|
|
47
|
+
OP[OP["LOG10"] = 39] = "LOG10";
|
|
48
|
+
OP[OP["LOG1P"] = 40] = "LOG1P";
|
|
49
|
+
OP[OP["RELU"] = 41] = "RELU";
|
|
50
|
+
OP[OP["SIGMOID"] = 42] = "SIGMOID";
|
|
51
|
+
OP[OP["TANH"] = 43] = "TANH";
|
|
52
|
+
OP[OP["T"] = 44] = "T";
|
|
53
|
+
OP[OP["MM"] = 45] = "MM";
|
|
44
54
|
})(OP || (exports.OP = OP = {}));
|
|
45
55
|
class Node {
|
|
46
56
|
value;
|
|
@@ -158,6 +168,84 @@ class Node {
|
|
|
158
168
|
};
|
|
159
169
|
return out;
|
|
160
170
|
}
|
|
171
|
+
logicalAnd(other) {
|
|
172
|
+
other = Node.forceNode(other);
|
|
173
|
+
const out = new Node(logicalAnd(this.value, other.value), [this, other], OP.LOGICALAND);
|
|
174
|
+
out.feedBackward = () => {
|
|
175
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
176
|
+
};
|
|
177
|
+
return out;
|
|
178
|
+
}
|
|
179
|
+
logicalOr(other) {
|
|
180
|
+
other = Node.forceNode(other);
|
|
181
|
+
const out = new Node(logicalOr(this.value, other.value), [this, other], OP.LOGICALOR);
|
|
182
|
+
out.feedBackward = () => {
|
|
183
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
184
|
+
};
|
|
185
|
+
return out;
|
|
186
|
+
}
|
|
187
|
+
logicalXor(other) {
|
|
188
|
+
other = Node.forceNode(other);
|
|
189
|
+
const out = new Node(logicalXor(this.value, other.value), [this, other], OP.LOGICALXOR);
|
|
190
|
+
out.feedBackward = () => {
|
|
191
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
192
|
+
};
|
|
193
|
+
return out;
|
|
194
|
+
}
|
|
195
|
+
logicalNot() {
|
|
196
|
+
const out = new Node(logicalNot(this.value), [this], OP.LOGICALNOT);
|
|
197
|
+
out.feedBackward = () => {
|
|
198
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
199
|
+
};
|
|
200
|
+
return out;
|
|
201
|
+
}
|
|
202
|
+
bitwiseAnd(other) {
|
|
203
|
+
other = Node.forceNode(other);
|
|
204
|
+
const out = new Node(bitwiseAnd(this.value, other.value), [this, other], OP.BITWISEAND);
|
|
205
|
+
out.feedBackward = () => {
|
|
206
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
207
|
+
};
|
|
208
|
+
return out;
|
|
209
|
+
}
|
|
210
|
+
bitwiseOr(other) {
|
|
211
|
+
other = Node.forceNode(other);
|
|
212
|
+
const out = new Node(bitwiseOr(this.value, other.value), [this, other], OP.BITWISEOR);
|
|
213
|
+
out.feedBackward = () => {
|
|
214
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
215
|
+
};
|
|
216
|
+
return out;
|
|
217
|
+
}
|
|
218
|
+
bitwiseXor(other) {
|
|
219
|
+
other = Node.forceNode(other);
|
|
220
|
+
const out = new Node(bitwiseXor(this.value, other.value), [this, other], OP.BITWISEXOR);
|
|
221
|
+
out.feedBackward = () => {
|
|
222
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
223
|
+
};
|
|
224
|
+
return out;
|
|
225
|
+
}
|
|
226
|
+
bitwiseNot() {
|
|
227
|
+
const out = new Node(bitwiseNot(this.value), [this], OP.BITWISENOT);
|
|
228
|
+
out.feedBackward = () => {
|
|
229
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
230
|
+
};
|
|
231
|
+
return out;
|
|
232
|
+
}
|
|
233
|
+
bitwiseLeftShift(other) {
|
|
234
|
+
other = Node.forceNode(other);
|
|
235
|
+
const out = new Node(bitwiseLeftShift(this.value, other.value), [this, other], OP.BITWISELEFTSHIFT);
|
|
236
|
+
out.feedBackward = () => {
|
|
237
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
238
|
+
};
|
|
239
|
+
return out;
|
|
240
|
+
}
|
|
241
|
+
bitwiseRightShift(other) {
|
|
242
|
+
other = Node.forceNode(other);
|
|
243
|
+
const out = new Node(bitwiseRightShift(this.value, other.value), [this, other], OP.BITWISERIGHTSHIFT);
|
|
244
|
+
out.feedBackward = () => {
|
|
245
|
+
// We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
|
|
246
|
+
};
|
|
247
|
+
return out;
|
|
248
|
+
}
|
|
161
249
|
neg() {
|
|
162
250
|
const out = new Node(neg(this.value), [this], OP.NEG);
|
|
163
251
|
out.feedBackward = () => {
|
package/dist/tensor.d.ts
CHANGED
|
@@ -15,6 +15,16 @@ export declare class TensorMath {
|
|
|
15
15
|
static ge(tA: Tensor, tB: Tensor): Tensor;
|
|
16
16
|
static le(tA: Tensor, tB: Tensor): Tensor;
|
|
17
17
|
static eq(tA: Tensor, tB: Tensor): Tensor;
|
|
18
|
+
static logicalAnd(tA: Tensor, tB: Tensor): Tensor;
|
|
19
|
+
static logicalOr(tA: Tensor, tB: Tensor): Tensor;
|
|
20
|
+
static logicalXor(tA: Tensor, tB: Tensor): Tensor;
|
|
21
|
+
static logicalNot(tA: Tensor): Tensor;
|
|
22
|
+
static bitwiseAnd(tA: Tensor, tB: Tensor): Tensor;
|
|
23
|
+
static bitwiseOr(tA: Tensor, tB: Tensor): Tensor;
|
|
24
|
+
static bitwiseXor(tA: Tensor, tB: Tensor): Tensor;
|
|
25
|
+
static bitwiseNot(tA: Tensor): Tensor;
|
|
26
|
+
static bitwiseLeftShift(tA: Tensor, tB: Tensor): Tensor;
|
|
27
|
+
static bitwiseRightShift(tA: Tensor, tB: Tensor): Tensor;
|
|
18
28
|
static neg(tA: Tensor): Tensor;
|
|
19
29
|
static abs(tA: Tensor): Tensor;
|
|
20
30
|
static sign(tA: Tensor): Tensor;
|
package/dist/tensor.js
CHANGED
|
@@ -90,6 +90,36 @@ class TensorMath {
|
|
|
90
90
|
static eq(tA, tB) {
|
|
91
91
|
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA === tB ? 1 : 0);
|
|
92
92
|
}
|
|
93
|
+
static logicalAnd(tA, tB) {
|
|
94
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA === 1 && tB === 1 ? 1 : 0);
|
|
95
|
+
}
|
|
96
|
+
static logicalOr(tA, tB) {
|
|
97
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA === 1 || tB === 1 ? 1 : 0);
|
|
98
|
+
}
|
|
99
|
+
static logicalXor(tA, tB) {
|
|
100
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => (tA === 1 || tB === 1) && tA !== tB ? 1 : 0);
|
|
101
|
+
}
|
|
102
|
+
static logicalNot(tA) {
|
|
103
|
+
return TensorMath.elementWiseSelf(tA, (tA) => tA === 1 ? 0 : 1);
|
|
104
|
+
}
|
|
105
|
+
static bitwiseAnd(tA, tB) {
|
|
106
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA & tB);
|
|
107
|
+
}
|
|
108
|
+
static bitwiseOr(tA, tB) {
|
|
109
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA | tB);
|
|
110
|
+
}
|
|
111
|
+
static bitwiseXor(tA, tB) {
|
|
112
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA ^ tB);
|
|
113
|
+
}
|
|
114
|
+
static bitwiseNot(tA) {
|
|
115
|
+
return TensorMath.elementWiseSelf(tA, (tA) => ~tA);
|
|
116
|
+
}
|
|
117
|
+
static bitwiseLeftShift(tA, tB) {
|
|
118
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA << tB);
|
|
119
|
+
}
|
|
120
|
+
static bitwiseRightShift(tA, tB) {
|
|
121
|
+
return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA >> tB);
|
|
122
|
+
}
|
|
93
123
|
static neg(tA) {
|
|
94
124
|
return TensorMath.elementWiseSelf(tA, (tA) => -tA);
|
|
95
125
|
}
|