catniff 0.2.3 → 0.2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/core.d.ts CHANGED
@@ -9,8 +9,8 @@ export interface TensorOptions {
9
9
  }
10
10
  export declare class Tensor {
11
11
  value: number[] | number;
12
- shape: number[];
13
- strides: number[];
12
+ readonly shape: number[];
13
+ readonly strides: number[];
14
14
  grad?: Tensor;
15
15
  requiresGrad: boolean;
16
16
  gradFn: Function;
package/dist/core.js CHANGED
@@ -20,8 +20,13 @@ class Tensor {
20
20
  }
21
21
  // Utility to flatten an nD array to be 1D
22
22
  static flatten(tensor) {
23
+ // Handle scalar tensors
23
24
  if (typeof tensor === "number")
24
25
  return tensor;
26
+ // If value is already 1D, we just need to return the value ('s reference)
27
+ if (typeof tensor[0] === "number")
28
+ return tensor;
29
+ // Or else recursively traverse through the nD array to flatten
25
30
  const result = [];
26
31
  function traverse(arr) {
27
32
  if (typeof arr === "number") {
@@ -148,7 +153,11 @@ class Tensor {
148
153
  static elementWiseSelf(tA, op) {
149
154
  if (typeof tA.value === "number")
150
155
  return new Tensor(op(tA.value));
151
- return new Tensor(tA.value.map(el => op(el)), { shape: [...tA.shape], strides: [...tA.strides] });
156
+ const newValue = new Array(tA.value.length);
157
+ for (let index = 0; index < tA.value.length; index++) {
158
+ newValue[index] = op(tA.value[index]);
159
+ }
160
+ return new Tensor(newValue, { shape: tA.shape, strides: tA.strides });
152
161
  }
153
162
  // Utility to do element-wise operation and build a dag node with another tensor
154
163
  elementWiseABDAG(other, op, thisGrad = () => new Tensor(0), otherGrad = () => new Tensor(0)) {
@@ -319,8 +328,8 @@ class Tensor {
319
328
  const originalSize = this.shape.reduce((a, b) => a * b, 1);
320
329
  let gradShape, gradStrides, gradValue = [];
321
330
  if (this.requiresGrad) {
322
- gradShape = [...this.shape];
323
- gradStrides = [...this.strides];
331
+ gradShape = this.shape;
332
+ gradStrides = this.strides;
324
333
  gradValue = new Array(originalSize).fill(0);
325
334
  }
326
335
  for (let index = 0; index < originalSize; index++) {
@@ -535,7 +544,7 @@ class Tensor {
535
544
  }
536
545
  // If same dimension, return copy
537
546
  if (dim1 === dim2) {
538
- return new Tensor(this.value, { shape: [...this.shape], strides: [...this.strides] });
547
+ return new Tensor(this.value, { shape: this.shape, strides: this.strides });
539
548
  }
540
549
  // Create new shape and strides by swapping
541
550
  const newShape = [...this.shape];
@@ -660,7 +669,7 @@ class Tensor {
660
669
  throw new Error("Input is not a 2D and 1D tensor pair");
661
670
  }
662
671
  // MM with no grad
663
- const thisMat = new Tensor(this.value, { shape: [...this.shape], strides: [...this.strides] });
672
+ const thisMat = new Tensor(this.value, { shape: this.shape, strides: this.strides });
664
673
  const otherMat = new Tensor(other.value, { shape: [other.shape[0], 1], strides: [other.strides[0], 1] });
665
674
  const out = thisMat.mm(otherMat).squeeze(1);
666
675
  // Handle grad with original tensors
@@ -708,7 +717,7 @@ class Tensor {
708
717
  static fullLike(tensor, num, options = {}) {
709
718
  if (typeof tensor.value === "number")
710
719
  return new Tensor(num, options);
711
- return new Tensor(tensor.value.map(el => num), { shape: [...tensor.shape], strides: [...tensor.strides], ...options });
720
+ return new Tensor(new Array(tensor.value.length).fill(num), { shape: tensor.shape, strides: tensor.strides, ...options });
712
721
  }
713
722
  // Reverse-mode autodiff call
714
723
  backward() {
@@ -718,7 +727,7 @@ class Tensor {
718
727
  function build(node) {
719
728
  if (!visited.has(node) && node.requiresGrad) {
720
729
  visited.add(node);
721
- node.grad = Tensor.fullLike(node, 0);
730
+ node.grad = Tensor.fullLike(node, 0); // Reset grad with 0
722
731
  for (let child of node.children)
723
732
  build(child);
724
733
  topo.push(node);
@@ -731,7 +740,7 @@ class Tensor {
731
740
  topo[index].gradFn();
732
741
  }
733
742
  }
734
- // Returns the number/nD array form of tensor
743
+ // Returns the raw number/nD array form of tensor
735
744
  val() {
736
745
  if (typeof this.value === "number")
737
746
  return this.value;
@@ -753,11 +762,11 @@ class Tensor {
753
762
  }
754
763
  return buildNested(this.value, this.shape, this.strides);
755
764
  }
756
- // Returns a copy of the tensor with gradient turned on/off
765
+ // Returns a copy of the tensor with gradient turned on/off and detaches from autograd
757
766
  withGrad(requiresGrad) {
758
767
  return new Tensor(this.value, {
759
- shape: [...this.shape],
760
- strides: [...this.strides],
768
+ shape: this.shape,
769
+ strides: this.strides,
761
770
  requiresGrad
762
771
  });
763
772
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "catniff",
3
- "version": "0.2.3",
3
+ "version": "0.2.4",
4
4
  "description": "A cute autograd engine for Javascript",
5
5
  "main": "index.js",
6
6
  "scripts": {