catniff 0.6.11 → 0.6.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -132,6 +132,7 @@ All available APIs are in [`./src/`](./src/) if you want to dig deeper.
132
132
 
133
133
  * [Shakespeare-style text generator](https://github.com/nguyenphuminh/shakespeare-lm).
134
134
  * [Simple neural net for XOR calculation](./examples/xornet.js).
135
+ * [N-th order derivative calculation](./examples/nthorder.js).
135
136
  * [Tensors](./examples/tensors.js).
136
137
  * [Optimizer](./examples/optim.js).
137
138
  * [Simple quadratic equation](./examples/quadratic.js).
package/dist/core.d.ts CHANGED
@@ -23,6 +23,8 @@ export declare class Tensor {
23
23
  children: Tensor[];
24
24
  device: string;
25
25
  static training: boolean;
26
+ static noGrad: boolean;
27
+ static createGraph: boolean;
26
28
  constructor(value: TensorValue, options?: TensorOptions);
27
29
  static flattenValue(tensor: TensorValue): number[] | number;
28
30
  static getShape(tensor: TensorValue): number[];
@@ -214,7 +216,6 @@ export declare class Tensor {
214
216
  zeroGrad?: boolean;
215
217
  }): void;
216
218
  val(): TensorValue;
217
- withGrad(requiresGrad: boolean): Tensor;
218
219
  detach(): Tensor;
219
220
  clone(): Tensor;
220
221
  replace(other: Tensor | TensorValue, allowShapeMismatch?: boolean): Tensor;
package/dist/core.js CHANGED
@@ -14,6 +14,8 @@ class Tensor {
14
14
  children;
15
15
  device;
16
16
  static training = false;
17
+ static noGrad = false;
18
+ static createGraph = false;
17
19
  constructor(value, options = {}) {
18
20
  // Storage
19
21
  this.value = Tensor.flattenValue(value);
@@ -210,12 +212,12 @@ class Tensor {
210
212
  out.gradFn = () => {
211
213
  // Disable gradient collecting of gradients themselves
212
214
  const outGrad = out.grad;
213
- const selfNoGrad = this.detach();
214
- const otherNoGrad = other.detach();
215
+ const selfWithGrad = Tensor.createGraph ? this : this.detach();
216
+ const otherWithGrad = Tensor.createGraph ? other : other.detach();
215
217
  if (this.requiresGrad)
216
- Tensor.addGrad(this, thisGrad(selfNoGrad, otherNoGrad, outGrad));
218
+ Tensor.addGrad(this, thisGrad(selfWithGrad, otherWithGrad, outGrad));
217
219
  if (other.requiresGrad)
218
- Tensor.addGrad(other, otherGrad(selfNoGrad, otherNoGrad, outGrad));
220
+ Tensor.addGrad(other, otherGrad(selfWithGrad, otherWithGrad, outGrad));
219
221
  };
220
222
  }
221
223
  return out;
@@ -231,9 +233,9 @@ class Tensor {
231
233
  out.gradFn = () => {
232
234
  // Disable gradient collecting of gradients themselves
233
235
  const outGrad = out.grad;
234
- const selfNoGrad = this.detach();
236
+ const selfWithGrad = Tensor.createGraph ? this : this.detach();
235
237
  if (this.requiresGrad)
236
- Tensor.addGrad(this, thisGrad(selfNoGrad, outGrad));
238
+ Tensor.addGrad(this, thisGrad(selfWithGrad, outGrad));
237
239
  };
238
240
  }
239
241
  return out;
@@ -1353,12 +1355,12 @@ class Tensor {
1353
1355
  out.gradFn = () => {
1354
1356
  // Disable gradient collecting of gradients themselves
1355
1357
  const outGrad = out.grad;
1356
- const selfNoGrad = this.detach();
1357
- const otherNoGrad = other.detach();
1358
+ const selfWithGrad = Tensor.createGraph ? this : this.detach();
1359
+ const otherWithGrad = Tensor.createGraph ? other : other.detach();
1358
1360
  if (this.requiresGrad)
1359
- Tensor.addGrad(this, outGrad.mm(otherNoGrad.t()));
1361
+ Tensor.addGrad(this, outGrad.mm(otherWithGrad.t()));
1360
1362
  if (other.requiresGrad)
1361
- Tensor.addGrad(other, selfNoGrad.t().mm(outGrad));
1363
+ Tensor.addGrad(other, selfWithGrad.t().mm(outGrad));
1362
1364
  };
1363
1365
  }
1364
1366
  return out;
@@ -1411,12 +1413,12 @@ class Tensor {
1411
1413
  out.gradFn = () => {
1412
1414
  // Disable gradient collecting of gradients themselves
1413
1415
  const outGrad = out.grad;
1414
- const selfNoGrad = this.detach();
1415
- const otherNoGrad = other.detach();
1416
+ const selfWithGrad = Tensor.createGraph ? this : this.detach();
1417
+ const otherWithGrad = Tensor.createGraph ? other : other.detach();
1416
1418
  if (this.requiresGrad)
1417
- Tensor.addGrad(this, outGrad.bmm(otherNoGrad.transpose(1, 2)));
1419
+ Tensor.addGrad(this, outGrad.bmm(otherWithGrad.transpose(1, 2)));
1418
1420
  if (other.requiresGrad)
1419
- Tensor.addGrad(other, selfNoGrad.transpose(1, 2).bmm(outGrad));
1421
+ Tensor.addGrad(other, selfWithGrad.transpose(1, 2).bmm(outGrad));
1420
1422
  };
1421
1423
  }
1422
1424
  return out;
@@ -1511,12 +1513,12 @@ class Tensor {
1511
1513
  out.gradFn = () => {
1512
1514
  other = other;
1513
1515
  const outGrad = out.grad;
1514
- const selfNoGrad = self.detach();
1515
- const otherNoGrad = other.detach();
1516
+ const selfWithGrad = Tensor.createGraph ? self : self.detach();
1517
+ const otherWithGrad = Tensor.createGraph ? other : other.detach();
1516
1518
  if (this.requiresGrad)
1517
- Tensor.addGrad(this, outGrad.matmul(otherNoGrad.transpose(-2, -1)));
1519
+ Tensor.addGrad(this, outGrad.matmul(otherWithGrad.transpose(-2, -1)));
1518
1520
  if (other.requiresGrad)
1519
- Tensor.addGrad(other, selfNoGrad.transpose(-2, -1).matmul(outGrad));
1521
+ Tensor.addGrad(other, selfWithGrad.transpose(-2, -1).matmul(outGrad));
1520
1522
  };
1521
1523
  }
1522
1524
  return out;
@@ -1801,7 +1803,7 @@ class Tensor {
1801
1803
  const visited = new Set();
1802
1804
  function build(node) {
1803
1805
  // Only collects unvisited node and node that requires gradient
1804
- if (!visited.has(node) && node.requiresGrad) {
1806
+ if (!visited.has(node) && node.requiresGrad && !Tensor.noGrad) {
1805
1807
  visited.add(node);
1806
1808
  // Reset grad to zeros if specified
1807
1809
  if (zeroGrad) {
@@ -1841,17 +1843,6 @@ class Tensor {
1841
1843
  }
1842
1844
  return buildNested(this.value, this.shape, this.strides, this.offset);
1843
1845
  }
1844
- // Returns a view of the tensor with gradient turned on/off and detaches from autograd
1845
- withGrad(requiresGrad) {
1846
- return new Tensor(this.value, {
1847
- shape: this.shape,
1848
- strides: this.strides,
1849
- offset: this.offset,
1850
- numel: this.numel,
1851
- device: this.device,
1852
- requiresGrad
1853
- });
1854
- }
1855
1846
  // Returns a view of the tensor with gradient turned off and detaches from autograd
1856
1847
  detach() {
1857
1848
  return new Tensor(this.value, {
@@ -1863,15 +1854,18 @@ class Tensor {
1863
1854
  requiresGrad: false
1864
1855
  });
1865
1856
  }
1866
- // Returns a copy of the tensor (with new data allocation) and detaches from autograd
1857
+ // Returns a copy of the tensor (with new data allocation) and keeps grad connection
1867
1858
  clone() {
1868
- return new Tensor(typeof this.value === "number" ? this.value : [...this.value], {
1869
- shape: this.shape,
1870
- strides: this.strides,
1871
- offset: this.offset,
1872
- numel: this.numel,
1873
- requiresGrad: this.requiresGrad
1874
- });
1859
+ const newValue = typeof this.value === "number" ? this.value : [...this.value];
1860
+ const out = new Tensor(newValue);
1861
+ if (this.requiresGrad) {
1862
+ out.requiresGrad = true;
1863
+ out.children.push(this);
1864
+ out.gradFn = () => {
1865
+ Tensor.addGrad(this, out.grad);
1866
+ };
1867
+ }
1868
+ return out;
1875
1869
  }
1876
1870
  // Returns this tensor with value replaced with the value of another tensor
1877
1871
  replace(other, allowShapeMismatch = false) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "catniff",
3
- "version": "0.6.11",
3
+ "version": "0.6.12",
4
4
  "description": "A small Torch-like deep learning framework for Javascript",
5
5
  "main": "index.js",
6
6
  "scripts": {