catniff 0.1.6 → 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,31 +11,42 @@ export declare enum OP {
11
11
  GT = 8,
12
12
  LT = 9,
13
13
  EQ = 10,
14
- NEG = 11,
15
- ABS = 12,
16
- SIGN = 13,
17
- SIN = 14,
18
- COS = 15,
19
- TAN = 16,
20
- ASIN = 17,
21
- ACOS = 18,
22
- ATAN = 19,
23
- SINH = 20,
24
- COSH = 21,
25
- ASINH = 22,
26
- ACOSH = 23,
27
- ATANH = 24,
28
- SQRT = 25,
29
- EXP = 26,
30
- LOG = 27,
31
- LOG2 = 28,
32
- LOG10 = 29,
33
- LOG1P = 30,
34
- RELU = 31,
35
- SIGMOID = 32,
36
- TANH = 33,
37
- T = 34,
38
- MM = 35
14
+ LOGICALAND = 11,
15
+ LOGICALOR = 12,
16
+ LOGICALXOR = 13,
17
+ LOGICALNOT = 14,
18
+ BITWISEAND = 15,
19
+ BITWISEOR = 16,
20
+ BITWISEXOR = 17,
21
+ BITWISENOT = 18,
22
+ BITWISELEFTSHIFT = 19,
23
+ BITWISERIGHTSHIFT = 20,
24
+ NEG = 21,
25
+ ABS = 22,
26
+ SIGN = 23,
27
+ SIN = 24,
28
+ COS = 25,
29
+ TAN = 26,
30
+ ASIN = 27,
31
+ ACOS = 28,
32
+ ATAN = 29,
33
+ SINH = 30,
34
+ COSH = 31,
35
+ ASINH = 32,
36
+ ACOSH = 33,
37
+ ATANH = 34,
38
+ SQRT = 35,
39
+ EXP = 36,
40
+ LOG = 37,
41
+ LOG2 = 38,
42
+ LOG10 = 39,
43
+ LOG1P = 40,
44
+ RELU = 41,
45
+ SIGMOID = 42,
46
+ TANH = 43,
47
+ T = 44,
48
+ MM = 45,
49
+ DOT = 46
39
50
  }
40
51
  export declare class Node {
41
52
  value: Tensor;
@@ -55,6 +66,16 @@ export declare class Node {
55
66
  gt(other: Node | number): Node;
56
67
  lt(other: Node | number): Node;
57
68
  eq(other: Node | number): Node;
69
+ logicalAnd(other: Node | number): Node;
70
+ logicalOr(other: Node | number): Node;
71
+ logicalXor(other: Node | number): Node;
72
+ logicalNot(): Node;
73
+ bitwiseAnd(other: Node | number): Node;
74
+ bitwiseOr(other: Node | number): Node;
75
+ bitwiseXor(other: Node | number): Node;
76
+ bitwiseNot(): Node;
77
+ bitwiseLeftShift(other: Node | number): Node;
78
+ bitwiseRightShift(other: Node | number): Node;
58
79
  neg(): Node;
59
80
  abs(): Node;
60
81
  sign(): Node;
@@ -80,6 +101,7 @@ export declare class Node {
80
101
  tanh(): Node;
81
102
  t(): Node;
82
103
  mm(other: Node | number): Node;
104
+ dot(other: Node | number): Node;
83
105
  backward(): void;
84
106
  static forceNode(value: Node | number): Node;
85
107
  static addGrad(node: Node, accumGrad: Tensor): void;
package/dist/autograd.js CHANGED
@@ -2,7 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.Node = exports.OP = void 0;
4
4
  const tensor_1 = require("./tensor");
5
- const { add, sub, mul, pow, div, gt, lt, ge, le, eq, neg, abs, sign, sin, cos, tan, asin, acos, atan, sinh, cosh, asinh, acosh, atanh, sqrt, exp, log, log2, log10, log1p, relu, sigmoid, tanh, t, mm } = tensor_1.TensorMath;
5
+ const { add, sub, mul, pow, div, gt, lt, ge, le, eq, logicalAnd, logicalOr, logicalXor, logicalNot, bitwiseAnd, bitwiseOr, bitwiseXor, bitwiseNot, bitwiseLeftShift, bitwiseRightShift, neg, abs, sign, sin, cos, tan, asin, acos, atan, sinh, cosh, asinh, acosh, atanh, sqrt, exp, log, log2, log10, log1p, relu, sigmoid, tanh, t, mm, dot } = tensor_1.TensorMath;
6
6
  var OP;
7
7
  (function (OP) {
8
8
  OP[OP["NONE"] = 0] = "NONE";
@@ -16,31 +16,42 @@ var OP;
16
16
  OP[OP["GT"] = 8] = "GT";
17
17
  OP[OP["LT"] = 9] = "LT";
18
18
  OP[OP["EQ"] = 10] = "EQ";
19
- OP[OP["NEG"] = 11] = "NEG";
20
- OP[OP["ABS"] = 12] = "ABS";
21
- OP[OP["SIGN"] = 13] = "SIGN";
22
- OP[OP["SIN"] = 14] = "SIN";
23
- OP[OP["COS"] = 15] = "COS";
24
- OP[OP["TAN"] = 16] = "TAN";
25
- OP[OP["ASIN"] = 17] = "ASIN";
26
- OP[OP["ACOS"] = 18] = "ACOS";
27
- OP[OP["ATAN"] = 19] = "ATAN";
28
- OP[OP["SINH"] = 20] = "SINH";
29
- OP[OP["COSH"] = 21] = "COSH";
30
- OP[OP["ASINH"] = 22] = "ASINH";
31
- OP[OP["ACOSH"] = 23] = "ACOSH";
32
- OP[OP["ATANH"] = 24] = "ATANH";
33
- OP[OP["SQRT"] = 25] = "SQRT";
34
- OP[OP["EXP"] = 26] = "EXP";
35
- OP[OP["LOG"] = 27] = "LOG";
36
- OP[OP["LOG2"] = 28] = "LOG2";
37
- OP[OP["LOG10"] = 29] = "LOG10";
38
- OP[OP["LOG1P"] = 30] = "LOG1P";
39
- OP[OP["RELU"] = 31] = "RELU";
40
- OP[OP["SIGMOID"] = 32] = "SIGMOID";
41
- OP[OP["TANH"] = 33] = "TANH";
42
- OP[OP["T"] = 34] = "T";
43
- OP[OP["MM"] = 35] = "MM";
19
+ OP[OP["LOGICALAND"] = 11] = "LOGICALAND";
20
+ OP[OP["LOGICALOR"] = 12] = "LOGICALOR";
21
+ OP[OP["LOGICALXOR"] = 13] = "LOGICALXOR";
22
+ OP[OP["LOGICALNOT"] = 14] = "LOGICALNOT";
23
+ OP[OP["BITWISEAND"] = 15] = "BITWISEAND";
24
+ OP[OP["BITWISEOR"] = 16] = "BITWISEOR";
25
+ OP[OP["BITWISEXOR"] = 17] = "BITWISEXOR";
26
+ OP[OP["BITWISENOT"] = 18] = "BITWISENOT";
27
+ OP[OP["BITWISELEFTSHIFT"] = 19] = "BITWISELEFTSHIFT";
28
+ OP[OP["BITWISERIGHTSHIFT"] = 20] = "BITWISERIGHTSHIFT";
29
+ OP[OP["NEG"] = 21] = "NEG";
30
+ OP[OP["ABS"] = 22] = "ABS";
31
+ OP[OP["SIGN"] = 23] = "SIGN";
32
+ OP[OP["SIN"] = 24] = "SIN";
33
+ OP[OP["COS"] = 25] = "COS";
34
+ OP[OP["TAN"] = 26] = "TAN";
35
+ OP[OP["ASIN"] = 27] = "ASIN";
36
+ OP[OP["ACOS"] = 28] = "ACOS";
37
+ OP[OP["ATAN"] = 29] = "ATAN";
38
+ OP[OP["SINH"] = 30] = "SINH";
39
+ OP[OP["COSH"] = 31] = "COSH";
40
+ OP[OP["ASINH"] = 32] = "ASINH";
41
+ OP[OP["ACOSH"] = 33] = "ACOSH";
42
+ OP[OP["ATANH"] = 34] = "ATANH";
43
+ OP[OP["SQRT"] = 35] = "SQRT";
44
+ OP[OP["EXP"] = 36] = "EXP";
45
+ OP[OP["LOG"] = 37] = "LOG";
46
+ OP[OP["LOG2"] = 38] = "LOG2";
47
+ OP[OP["LOG10"] = 39] = "LOG10";
48
+ OP[OP["LOG1P"] = 40] = "LOG1P";
49
+ OP[OP["RELU"] = 41] = "RELU";
50
+ OP[OP["SIGMOID"] = 42] = "SIGMOID";
51
+ OP[OP["TANH"] = 43] = "TANH";
52
+ OP[OP["T"] = 44] = "T";
53
+ OP[OP["MM"] = 45] = "MM";
54
+ OP[OP["DOT"] = 46] = "DOT";
44
55
  })(OP || (exports.OP = OP = {}));
45
56
  class Node {
46
57
  value;
@@ -158,6 +169,84 @@ class Node {
158
169
  };
159
170
  return out;
160
171
  }
172
+ logicalAnd(other) {
173
+ other = Node.forceNode(other);
174
+ const out = new Node(logicalAnd(this.value, other.value), [this, other], OP.LOGICALAND);
175
+ out.feedBackward = () => {
176
+ // We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
177
+ };
178
+ return out;
179
+ }
180
+ logicalOr(other) {
181
+ other = Node.forceNode(other);
182
+ const out = new Node(logicalOr(this.value, other.value), [this, other], OP.LOGICALOR);
183
+ out.feedBackward = () => {
184
+ // We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
185
+ };
186
+ return out;
187
+ }
188
+ logicalXor(other) {
189
+ other = Node.forceNode(other);
190
+ const out = new Node(logicalXor(this.value, other.value), [this, other], OP.LOGICALXOR);
191
+ out.feedBackward = () => {
192
+ // We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
193
+ };
194
+ return out;
195
+ }
196
+ logicalNot() {
197
+ const out = new Node(logicalNot(this.value), [this], OP.LOGICALNOT);
198
+ out.feedBackward = () => {
199
+ // We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
200
+ };
201
+ return out;
202
+ }
203
+ bitwiseAnd(other) {
204
+ other = Node.forceNode(other);
205
+ const out = new Node(bitwiseAnd(this.value, other.value), [this, other], OP.BITWISEAND);
206
+ out.feedBackward = () => {
207
+ // We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
208
+ };
209
+ return out;
210
+ }
211
+ bitwiseOr(other) {
212
+ other = Node.forceNode(other);
213
+ const out = new Node(bitwiseOr(this.value, other.value), [this, other], OP.BITWISEOR);
214
+ out.feedBackward = () => {
215
+ // We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
216
+ };
217
+ return out;
218
+ }
219
+ bitwiseXor(other) {
220
+ other = Node.forceNode(other);
221
+ const out = new Node(bitwiseXor(this.value, other.value), [this, other], OP.BITWISEXOR);
222
+ out.feedBackward = () => {
223
+ // We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
224
+ };
225
+ return out;
226
+ }
227
+ bitwiseNot() {
228
+ const out = new Node(bitwiseNot(this.value), [this], OP.BITWISENOT);
229
+ out.feedBackward = () => {
230
+ // We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
231
+ };
232
+ return out;
233
+ }
234
+ bitwiseLeftShift(other) {
235
+ other = Node.forceNode(other);
236
+ const out = new Node(bitwiseLeftShift(this.value, other.value), [this, other], OP.BITWISELEFTSHIFT);
237
+ out.feedBackward = () => {
238
+ // We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
239
+ };
240
+ return out;
241
+ }
242
+ bitwiseRightShift(other) {
243
+ other = Node.forceNode(other);
244
+ const out = new Node(bitwiseRightShift(this.value, other.value), [this, other], OP.BITWISERIGHTSHIFT);
245
+ out.feedBackward = () => {
246
+ // We consider the derivative of this to be 0, which does not add to current grad, so this function is just empty
247
+ };
248
+ return out;
249
+ }
161
250
  neg() {
162
251
  const out = new Node(neg(this.value), [this], OP.NEG);
163
252
  out.feedBackward = () => {
@@ -359,6 +448,15 @@ class Node {
359
448
  };
360
449
  return out;
361
450
  }
451
+ dot(other) {
452
+ other = Node.forceNode(other);
453
+ const out = new Node(dot(this.value, other.value), [this, other], OP.DOT);
454
+ out.feedBackward = () => {
455
+ Node.addGrad(this, mul(out.grad, other.value));
456
+ Node.addGrad(other, mul(out.grad, this.value));
457
+ };
458
+ return out;
459
+ }
362
460
  backward() {
363
461
  // Build topological order
364
462
  const topo = [];
package/dist/tensor.d.ts CHANGED
@@ -15,6 +15,16 @@ export declare class TensorMath {
15
15
  static ge(tA: Tensor, tB: Tensor): Tensor;
16
16
  static le(tA: Tensor, tB: Tensor): Tensor;
17
17
  static eq(tA: Tensor, tB: Tensor): Tensor;
18
+ static logicalAnd(tA: Tensor, tB: Tensor): Tensor;
19
+ static logicalOr(tA: Tensor, tB: Tensor): Tensor;
20
+ static logicalXor(tA: Tensor, tB: Tensor): Tensor;
21
+ static logicalNot(tA: Tensor): Tensor;
22
+ static bitwiseAnd(tA: Tensor, tB: Tensor): Tensor;
23
+ static bitwiseOr(tA: Tensor, tB: Tensor): Tensor;
24
+ static bitwiseXor(tA: Tensor, tB: Tensor): Tensor;
25
+ static bitwiseNot(tA: Tensor): Tensor;
26
+ static bitwiseLeftShift(tA: Tensor, tB: Tensor): Tensor;
27
+ static bitwiseRightShift(tA: Tensor, tB: Tensor): Tensor;
18
28
  static neg(tA: Tensor): Tensor;
19
29
  static abs(tA: Tensor): Tensor;
20
30
  static sign(tA: Tensor): Tensor;
@@ -44,4 +54,5 @@ export declare class TensorMath {
44
54
  static sum(tA: Tensor, dims?: number[] | number, keepDims?: boolean): Tensor;
45
55
  static t(tA: Tensor): Tensor;
46
56
  static mm(tA: Tensor, tB: Tensor): Tensor;
57
+ static dot(tA: Tensor, tB: Tensor): Tensor;
47
58
  }
package/dist/tensor.js CHANGED
@@ -90,6 +90,36 @@ class TensorMath {
90
90
  static eq(tA, tB) {
91
91
  return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA === tB ? 1 : 0);
92
92
  }
93
+ static logicalAnd(tA, tB) {
94
+ return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA === 1 && tB === 1 ? 1 : 0);
95
+ }
96
+ static logicalOr(tA, tB) {
97
+ return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA === 1 || tB === 1 ? 1 : 0);
98
+ }
99
+ static logicalXor(tA, tB) {
100
+ return TensorMath.elementWiseAB(tA, tB, (tA, tB) => (tA === 1 || tB === 1) && tA !== tB ? 1 : 0);
101
+ }
102
+ static logicalNot(tA) {
103
+ return TensorMath.elementWiseSelf(tA, (tA) => tA === 1 ? 0 : 1);
104
+ }
105
+ static bitwiseAnd(tA, tB) {
106
+ return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA & tB);
107
+ }
108
+ static bitwiseOr(tA, tB) {
109
+ return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA | tB);
110
+ }
111
+ static bitwiseXor(tA, tB) {
112
+ return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA ^ tB);
113
+ }
114
+ static bitwiseNot(tA) {
115
+ return TensorMath.elementWiseSelf(tA, (tA) => ~tA);
116
+ }
117
+ static bitwiseLeftShift(tA, tB) {
118
+ return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA << tB);
119
+ }
120
+ static bitwiseRightShift(tA, tB) {
121
+ return TensorMath.elementWiseAB(tA, tB, (tA, tB) => tA >> tB);
122
+ }
93
123
  static neg(tA) {
94
124
  return TensorMath.elementWiseSelf(tA, (tA) => -tA);
95
125
  }
@@ -259,5 +289,19 @@ class TensorMath {
259
289
  }
260
290
  return matC;
261
291
  }
292
+ static dot(tA, tB) {
293
+ const shapeA = TensorMath.getShape(tA);
294
+ const shapeB = TensorMath.getShape(tB);
295
+ if (shapeA.length !== 1 || shapeB.length !== 1 || shapeA[0] !== shapeB[0])
296
+ throw new Error("Inputs are not 1D tensors");
297
+ const vectLen = shapeA[0];
298
+ const vectA = tA;
299
+ const vectB = tB;
300
+ let sum = 0;
301
+ for (let index = 0; index < vectLen; index++) {
302
+ sum += vectA[index] * vectB[index];
303
+ }
304
+ return sum;
305
+ }
262
306
  }
263
307
  exports.TensorMath = TensorMath;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "catniff",
3
- "version": "0.1.6",
3
+ "version": "0.1.8",
4
4
  "description": "A cute autograd engine for Javascript",
5
5
  "main": "index.js",
6
6
  "scripts": {