catniff 0.4.1 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -9,22 +9,6 @@ Install through npm:
9
9
  npm install catniff
10
10
  ```
11
11
 
12
- ## Example
13
-
14
- Here is a little demo of a quadratic function:
15
- ```js
16
- const { Tensor } = require("catniff");
17
-
18
- const x = new Tensor(2, { requiresGrad: true });
19
- const L = x.pow(2).add(x); // x^2 + x
20
-
21
- L.backward();
22
-
23
- console.log(x.grad.val()); // 5
24
- ```
25
-
26
- View all examples in [`./examples`](./examples).
27
-
28
12
  ## Tensors
29
13
 
30
14
  Tensors in Catniff can be created by passing in a number or an nD array, and there are built-in methods that can be used to perform tensor arithmetic:
@@ -92,7 +76,7 @@ optim.step();
92
76
  console.log("Updated weight:", w.data); // Should move toward 3.0
93
77
  ```
94
78
 
95
- And it can still do much more, check out the docs mentioned below for more information.
79
+ And it can still do much more, check out the docs and examples below for more information.
96
80
 
97
81
  ## Documentation
98
82
 
@@ -100,12 +84,19 @@ Full documentation is available in [`./docs/documentation.md`](./docs/documentat
100
84
 
101
85
  All available APIs are in [`./src/`](./src/) if you want to dig deeper.
102
86
 
87
+ ## Examples
88
+
89
+ * [Simple neural net for XOR calculation](./examples/xornet.js).
90
+ * [Tensors](./examples/tensors.js).
91
+ * [Optimizer](./examples/optim.js).
92
+ * [Simple quadratic equation](./examples/quadratic.js).
93
+
103
94
  ## Todos
104
95
 
105
96
  * Bug fixes.
106
97
  * More tensor ops.
107
98
  * GPU acceleration.
108
- * Some general neural net APIs.
99
+ * More general neural net APIs.
109
100
  * More detailed documentation.
110
101
  * Code refactoring.
111
102
  * Proper tests.
package/dist/core.d.ts CHANGED
@@ -46,6 +46,8 @@ export declare class Tensor {
46
46
  mean(dims?: number[] | number, keepDims?: boolean): Tensor;
47
47
  max(dims?: number[] | number, keepDims?: boolean): Tensor;
48
48
  min(dims?: number[] | number, keepDims?: boolean): Tensor;
49
+ var(dims?: number[] | number, keepDims?: boolean): Tensor;
50
+ std(dims?: number[] | number, keepDims?: boolean): Tensor;
49
51
  softmax(dims?: number[] | number): Tensor;
50
52
  add(other: TensorValue | Tensor): Tensor;
51
53
  sub(other: TensorValue | Tensor): Tensor;
package/dist/core.js CHANGED
@@ -339,14 +339,20 @@ class Tensor {
339
339
  sum(dims, keepDims = false) {
340
340
  if (typeof this.value === "number")
341
341
  return this;
342
- if (typeof dims === "number") {
343
- dims = [dims];
344
- }
345
342
  if (typeof dims === "undefined") {
346
343
  dims = Array.from({ length: this.shape.length }, (_, index) => index);
347
344
  }
345
+ if (Array.isArray(dims)) {
346
+ // Sort in descending order
347
+ const sortedDims = dims.sort((a, b) => b - a);
348
+ let reducedThis = this;
349
+ for (let i = 0; i < sortedDims.length; i++) {
350
+ reducedThis = reducedThis.sum(sortedDims[i], true);
351
+ }
352
+ return keepDims ? reducedThis : reducedThis.squeeze(dims);
353
+ }
348
354
  // Dims that are reduced now have size-1
349
- const outputShape = this.shape.map((dim, i) => dims.includes(i) ? 1 : dim);
355
+ const outputShape = this.shape.map((dim, i) => dims === i ? 1 : dim);
350
356
  const outputStrides = Tensor.getStrides(outputShape);
351
357
  const outputSize = Tensor.shapeToSize(outputShape);
352
358
  const outputValue = new Array(outputSize).fill(0);
@@ -363,7 +369,7 @@ class Tensor {
363
369
  for (let realFlatIndex = 0; realFlatIndex < originalSize; realFlatIndex++) {
364
370
  const coords = Tensor.indexToCoords(realFlatIndex, this.strides);
365
371
  // Force 0 on reduced axes to collapse into size-1 dims
366
- const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
372
+ const outCoords = coords.map((val, i) => dims === i ? 0 : val);
367
373
  // Convert output coordinates to flat index
368
374
  const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
369
375
  // Add into sum
@@ -392,14 +398,20 @@ class Tensor {
392
398
  prod(dims, keepDims = false) {
393
399
  if (typeof this.value === "number")
394
400
  return this;
395
- if (typeof dims === "number") {
396
- dims = [dims];
397
- }
398
401
  if (typeof dims === "undefined") {
399
402
  dims = Array.from({ length: this.shape.length }, (_, index) => index);
400
403
  }
404
+ if (Array.isArray(dims)) {
405
+ // Sort in descending order
406
+ const sortedDims = dims.sort((a, b) => b - a);
407
+ let reducedThis = this;
408
+ for (let i = 0; i < sortedDims.length; i++) {
409
+ reducedThis = reducedThis.prod(sortedDims[i], true);
410
+ }
411
+ return keepDims ? reducedThis : reducedThis.squeeze(dims);
412
+ }
401
413
  // Dims that are reduced now have size-1
402
- const outputShape = this.shape.map((dim, i) => dims.includes(i) ? 1 : dim);
414
+ const outputShape = this.shape.map((dim, i) => dims === i ? 1 : dim);
403
415
  const outputStrides = Tensor.getStrides(outputShape);
404
416
  const outputSize = Tensor.shapeToSize(outputShape);
405
417
  const outputValue = new Array(outputSize).fill(1);
@@ -408,7 +420,7 @@ class Tensor {
408
420
  for (let realFlatIndex = 0; realFlatIndex < originalSize; realFlatIndex++) {
409
421
  const coords = Tensor.indexToCoords(realFlatIndex, this.strides);
410
422
  // Force 0 on reduced axes to collapse into size-1 dims
411
- const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
423
+ const outCoords = coords.map((val, i) => dims === i ? 0 : val);
412
424
  // Convert output coordinates to flat index
413
425
  const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
414
426
  // Multiply into product
@@ -427,7 +439,7 @@ class Tensor {
427
439
  for (let realFlatIndex = 0; realFlatIndex < originalSize; realFlatIndex++) {
428
440
  const coords = Tensor.indexToCoords(realFlatIndex, this.strides);
429
441
  // Force 0 on reduced axes to collapse into size-1 dims
430
- const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
442
+ const outCoords = coords.map((val, i) => dims === i ? 0 : val);
431
443
  // Convert output coordinates to flat index
432
444
  const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
433
445
  // Grad is the product of other elements of the same axis, which is product of all els divided by the current value
@@ -443,14 +455,20 @@ class Tensor {
443
455
  mean(dims, keepDims = false) {
444
456
  if (typeof this.value === "number")
445
457
  return this;
446
- if (typeof dims === "number") {
447
- dims = [dims];
448
- }
449
458
  if (typeof dims === "undefined") {
450
459
  dims = Array.from({ length: this.shape.length }, (_, index) => index);
451
460
  }
461
+ if (Array.isArray(dims)) {
462
+ // Sort in descending order
463
+ const sortedDims = dims.sort((a, b) => b - a);
464
+ let reducedThis = this;
465
+ for (let i = 0; i < sortedDims.length; i++) {
466
+ reducedThis = reducedThis.mean(sortedDims[i], true);
467
+ }
468
+ return keepDims ? reducedThis : reducedThis.squeeze(dims);
469
+ }
452
470
  // Dims that are reduced now have size-1
453
- const outputShape = this.shape.map((dim, i) => dims.includes(i) ? 1 : dim);
471
+ const outputShape = this.shape.map((dim, i) => dims === i ? 1 : dim);
454
472
  const outputStrides = Tensor.getStrides(outputShape);
455
473
  const outputSize = Tensor.shapeToSize(outputShape);
456
474
  const outputValue = new Array(outputSize).fill(0);
@@ -460,7 +478,7 @@ class Tensor {
460
478
  for (let realFlatIndex = 0; realFlatIndex < originalSize; realFlatIndex++) {
461
479
  const coords = Tensor.indexToCoords(realFlatIndex, this.strides);
462
480
  // Force 0 on reduced axes to collapse into size-1 dims
463
- const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
481
+ const outCoords = coords.map((val, i) => dims === i ? 0 : val);
464
482
  // Convert output coordinates to flat index
465
483
  const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
466
484
  // Calculate sum and contributors to the sum
@@ -485,7 +503,7 @@ class Tensor {
485
503
  for (let realFlatIndex = 0; realFlatIndex < originalSize; realFlatIndex++) {
486
504
  const coords = Tensor.indexToCoords(realFlatIndex, this.strides);
487
505
  // Force 0 on reduced axes to collapse into size-1 dims
488
- const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
506
+ const outCoords = coords.map((val, i) => dims === i ? 0 : val);
489
507
  // Convert output coordinates to flat index
490
508
  const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
491
509
  // Mean = 1/n * (el1 + el2 + ... + eln) so grad = 1/n
@@ -501,14 +519,20 @@ class Tensor {
501
519
  max(dims, keepDims = false) {
502
520
  if (typeof this.value === "number")
503
521
  return this;
504
- if (typeof dims === "number") {
505
- dims = [dims];
506
- }
507
522
  if (typeof dims === "undefined") {
508
523
  dims = Array.from({ length: this.shape.length }, (_, index) => index);
509
524
  }
525
+ if (Array.isArray(dims)) {
526
+ // Sort in descending order
527
+ const sortedDims = dims.sort((a, b) => b - a);
528
+ let reducedThis = this;
529
+ for (let i = 0; i < sortedDims.length; i++) {
530
+ reducedThis = reducedThis.max(sortedDims[i], true);
531
+ }
532
+ return keepDims ? reducedThis : reducedThis.squeeze(dims);
533
+ }
510
534
  // Dims that are reduced now have size-1
511
- const outputShape = this.shape.map((dim, i) => dims.includes(i) ? 1 : dim);
535
+ const outputShape = this.shape.map((dim, i) => dims === i ? 1 : dim);
512
536
  const outputStrides = Tensor.getStrides(outputShape);
513
537
  const outputSize = Tensor.shapeToSize(outputShape);
514
538
  const outputValue = new Array(outputSize).fill(-Infinity);
@@ -517,7 +541,7 @@ class Tensor {
517
541
  for (let realFlatIndex = 0; realFlatIndex < originalSize; realFlatIndex++) {
518
542
  const coords = Tensor.indexToCoords(realFlatIndex, this.strides);
519
543
  // Force 0 on reduced axes to collapse into size-1 dims
520
- const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
544
+ const outCoords = coords.map((val, i) => dims === i ? 0 : val);
521
545
  // Convert output coordinates to flat index
522
546
  const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
523
547
  // Get max over time
@@ -535,14 +559,25 @@ class Tensor {
535
559
  out.children.push(this);
536
560
  out.gradFn = () => {
537
561
  const gradShape = this.shape, gradStrides = this.strides, gradValue = new Array(originalSize).fill(0);
562
+ const shareCounts = new Array(outputSize).fill(0);
563
+ const originalValue = this.value;
564
+ for (let realFlatIndex = 0; realFlatIndex < originalSize; realFlatIndex++) {
565
+ const coords = Tensor.indexToCoords(realFlatIndex, this.strides);
566
+ // Force 0 on reduced axes to collapse into size-1 dims
567
+ const outCoords = coords.map((val, i) => dims === i ? 0 : val);
568
+ // Convert output coordinates to flat index
569
+ const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
570
+ // We collect how many elements share the same max value first
571
+ shareCounts[outFlatIndex] += outputValue[outFlatIndex] === originalValue[realFlatIndex] ? 1 : 0;
572
+ }
538
573
  for (let realFlatIndex = 0; realFlatIndex < originalSize; realFlatIndex++) {
539
574
  const coords = Tensor.indexToCoords(realFlatIndex, this.strides);
540
575
  // Force 0 on reduced axes to collapse into size-1 dims
541
- const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
576
+ const outCoords = coords.map((val, i) => dims === i ? 0 : val);
542
577
  // Convert output coordinates to flat index
543
578
  const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
544
- // Calculate grad by checking if a positon holds a value equal to the max value
545
- gradValue[realFlatIndex] = outputValue[outFlatIndex] === this.value[realFlatIndex] ? 1 : 0;
579
+ // Here we share the grad between the elements that share the same max value
580
+ gradValue[realFlatIndex] = outputValue[outFlatIndex] === originalValue[realFlatIndex] ? 1 / shareCounts[outFlatIndex] : 0;
546
581
  }
547
582
  const localGrad = new Tensor(gradValue, { shape: gradShape, strides: gradStrides });
548
583
  Tensor.addGrad(this, out.grad.withGrad(false).mul(localGrad));
@@ -554,14 +589,20 @@ class Tensor {
554
589
  min(dims, keepDims = false) {
555
590
  if (typeof this.value === "number")
556
591
  return this;
557
- if (typeof dims === "number") {
558
- dims = [dims];
559
- }
560
592
  if (typeof dims === "undefined") {
561
593
  dims = Array.from({ length: this.shape.length }, (_, index) => index);
562
594
  }
595
+ if (Array.isArray(dims)) {
596
+ // Sort in descending order
597
+ const sortedDims = dims.sort((a, b) => b - a);
598
+ let reducedThis = this;
599
+ for (let i = 0; i < sortedDims.length; i++) {
600
+ reducedThis = reducedThis.min(sortedDims[i], true);
601
+ }
602
+ return keepDims ? reducedThis : reducedThis.squeeze(dims);
603
+ }
563
604
  // Dims that are reduced now have size-1
564
- const outputShape = this.shape.map((dim, i) => dims.includes(i) ? 1 : dim);
605
+ const outputShape = this.shape.map((dim, i) => dims === i ? 1 : dim);
565
606
  const outputStrides = Tensor.getStrides(outputShape);
566
607
  const outputSize = Tensor.shapeToSize(outputShape);
567
608
  const outputValue = new Array(outputSize).fill(Infinity);
@@ -570,7 +611,7 @@ class Tensor {
570
611
  for (let realFlatIndex = 0; realFlatIndex < originalSize; realFlatIndex++) {
571
612
  const coords = Tensor.indexToCoords(realFlatIndex, this.strides);
572
613
  // Force 0 on reduced axes to collapse into size-1 dims
573
- const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
614
+ const outCoords = coords.map((val, i) => dims === i ? 0 : val);
574
615
  // Convert output coordinates to flat index
575
616
  const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
576
617
  // Get min over time
@@ -588,14 +629,25 @@ class Tensor {
588
629
  out.children.push(this);
589
630
  out.gradFn = () => {
590
631
  const gradShape = this.shape, gradStrides = this.strides, gradValue = new Array(originalSize).fill(0);
632
+ const shareCounts = new Array(outputSize).fill(0);
633
+ const originalValue = this.value;
634
+ for (let realFlatIndex = 0; realFlatIndex < originalSize; realFlatIndex++) {
635
+ const coords = Tensor.indexToCoords(realFlatIndex, this.strides);
636
+ // Force 0 on reduced axes to collapse into size-1 dims
637
+ const outCoords = coords.map((val, i) => dims === i ? 0 : val);
638
+ // Convert output coordinates to flat index
639
+ const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
640
+ // We collect how many elements share the same min value first
641
+ shareCounts[outFlatIndex] += outputValue[outFlatIndex] === originalValue[realFlatIndex] ? 1 : 0;
642
+ }
591
643
  for (let realFlatIndex = 0; realFlatIndex < originalSize; realFlatIndex++) {
592
644
  const coords = Tensor.indexToCoords(realFlatIndex, this.strides);
593
645
  // Force 0 on reduced axes to collapse into size-1 dims
594
- const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
646
+ const outCoords = coords.map((val, i) => dims === i ? 0 : val);
595
647
  // Convert output coordinates to flat index
596
648
  const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
597
- // Calculate grad by checking if a positon holds a value equal to the min value
598
- gradValue[realFlatIndex] = outputValue[outFlatIndex] === this.value[realFlatIndex] ? 1 : 0;
649
+ // Here we share the grad between the elements that share the same min value
650
+ gradValue[realFlatIndex] = outputValue[outFlatIndex] === originalValue[realFlatIndex] ? 1 / shareCounts[outFlatIndex] : 0;
599
651
  }
600
652
  const localGrad = new Tensor(gradValue, { shape: gradShape, strides: gradStrides });
601
653
  Tensor.addGrad(this, out.grad.withGrad(false).mul(localGrad));
@@ -603,18 +655,34 @@ class Tensor {
603
655
  }
604
656
  return keepDims ? out : out.squeeze(dims);
605
657
  }
658
+ // Tensor variance reduction
659
+ var(dims, keepDims = false) {
660
+ const meanXSquared = this.square().mean(dims, keepDims);
661
+ const meanXSquaredExpanded = this.mean(dims, keepDims).square();
662
+ return meanXSquared.sub(meanXSquaredExpanded);
663
+ }
664
+ // Tensor standard deviation reduction
665
+ std(dims, keepDims = false) {
666
+ return this.var(dims, keepDims).sqrt();
667
+ }
606
668
  // Tensor product reduction
607
669
  softmax(dims) {
608
670
  if (typeof this.value === "number")
609
671
  return this;
610
- if (typeof dims === "number") {
611
- dims = [dims];
612
- }
613
672
  if (typeof dims === "undefined") {
614
673
  dims = Array.from({ length: this.shape.length }, (_, index) => index);
615
674
  }
675
+ if (Array.isArray(dims)) {
676
+ // Sort in descending order
677
+ const sortedDims = dims.sort((a, b) => b - a);
678
+ let reducedThis = this;
679
+ for (let i = 0; i < sortedDims.length; i++) {
680
+ reducedThis = reducedThis.softmax(sortedDims[i]);
681
+ }
682
+ return reducedThis;
683
+ }
616
684
  // Dims that are reduced now have size-1
617
- const expSumShape = this.shape.map((dim, i) => dims.includes(i) ? 1 : dim);
685
+ const expSumShape = this.shape.map((dim, i) => dims === i ? 1 : dim);
618
686
  const expSumStrides = Tensor.getStrides(expSumShape);
619
687
  const expSumSize = Tensor.shapeToSize(expSumShape);
620
688
  const expSumValue = new Array(expSumSize).fill(0);
@@ -626,7 +694,7 @@ class Tensor {
626
694
  for (let realFlatIndex = 0; realFlatIndex < outputSize; realFlatIndex++) {
627
695
  const coords = Tensor.indexToCoords(realFlatIndex, outputStrides);
628
696
  // Force 0 on reduced axes to collapse into size-1 dims
629
- const expSumCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
697
+ const expSumCoords = coords.map((val, i) => dims === i ? 0 : val);
630
698
  // Convert exp sum coordinates to flat index
631
699
  const expSumFlatIndex = Tensor.coordsToIndex(expSumCoords, expSumStrides);
632
700
  // Add e^x to the sum cache
@@ -636,7 +704,7 @@ class Tensor {
636
704
  for (let realFlatIndex = 0; realFlatIndex < outputSize; realFlatIndex++) {
637
705
  const coords = Tensor.indexToCoords(realFlatIndex, outputStrides);
638
706
  // Force 0 on reduced axes to collapse into size-1 dims
639
- const expSumCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
707
+ const expSumCoords = coords.map((val, i) => dims === i ? 0 : val);
640
708
  // Convert exp sum coordinates to flat index
641
709
  const expSumFlatIndex = Tensor.coordsToIndex(expSumCoords, expSumStrides);
642
710
  // Calculate e^xi / sum
package/dist/nn.d.ts ADDED
@@ -0,0 +1,14 @@
1
+ import { Tensor, TensorValue } from "./core";
2
+ declare class Linear {
3
+ weight: Tensor;
4
+ bias?: Tensor;
5
+ constructor(inFeatures: number, outFeatures: number, bias?: boolean, customInit?: (shape: number[]) => Tensor);
6
+ forward(input: Tensor | TensorValue): Tensor;
7
+ }
8
+ export declare const nn: {
9
+ Linear: typeof Linear;
10
+ state: {
11
+ getParameters(model: any, visited?: WeakSet<object>): Tensor[];
12
+ };
13
+ };
14
+ export {};
package/dist/nn.js ADDED
@@ -0,0 +1,54 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.nn = void 0;
4
+ const core_1 = require("./core");
5
+ class Linear {
6
+ weight;
7
+ bias;
8
+ constructor(inFeatures, outFeatures, bias = true, customInit) {
9
+ let initFunc = (shape) => {
10
+ const bound = 1 / Math.sqrt(inFeatures);
11
+ return core_1.Tensor.uniform(shape, -bound, bound, { requiresGrad: true });
12
+ };
13
+ if (customInit) {
14
+ initFunc = customInit;
15
+ }
16
+ this.weight = initFunc([outFeatures, inFeatures]);
17
+ if (bias) {
18
+ this.bias = initFunc([outFeatures]);
19
+ }
20
+ }
21
+ forward(input) {
22
+ input = core_1.Tensor.forceTensor(input);
23
+ let output = input.matmul(this.weight.t());
24
+ if (this.bias) {
25
+ output = output.add(this.bias);
26
+ }
27
+ return output;
28
+ }
29
+ }
30
+ const state = {
31
+ getParameters(model, visited = new WeakSet()) {
32
+ if (visited.has(model)) {
33
+ return [];
34
+ }
35
+ visited.add(model);
36
+ const parameters = [];
37
+ for (const key in model) {
38
+ if (!model.hasOwnProperty(key))
39
+ continue;
40
+ const value = model[key];
41
+ if (value instanceof core_1.Tensor) {
42
+ parameters.push(value);
43
+ }
44
+ else if (typeof value === "object" && value !== null) {
45
+ parameters.push(...state.getParameters(value, visited));
46
+ }
47
+ }
48
+ return parameters;
49
+ }
50
+ };
51
+ exports.nn = {
52
+ Linear,
53
+ state
54
+ };
package/dist/optim.js CHANGED
@@ -20,9 +20,8 @@ class SGD {
20
20
  }
21
21
  step() {
22
22
  for (const param of this.params) {
23
- if (!param.grad) {
24
- throw new Error("Can not apply SGD on empty grad");
25
- }
23
+ if (!param.grad || !param.requiresGrad)
24
+ continue;
26
25
  let grad = param.grad.detach(), detachedParam = param.detach();
27
26
  // Apply weight decay (L2 regularization)
28
27
  if (this.weightDecay !== 0) {
@@ -80,9 +79,8 @@ class Adam {
80
79
  const biasCorrection1 = 1 - Math.pow(beta1, this.stepCount);
81
80
  const biasCorrection2 = 1 - Math.pow(beta2, this.stepCount);
82
81
  for (const param of this.params) {
83
- if (!param.grad) {
84
- throw new Error("Can not apply Adam on empty grad");
85
- }
82
+ if (!param.grad || !param.requiresGrad)
83
+ continue;
86
84
  let grad = param.grad.detach(), detachedParam = param.detach();
87
85
  // Apply weight decay (L2 regularization)
88
86
  if (this.weightDecay !== 0) {
package/index.d.ts CHANGED
@@ -1,2 +1,3 @@
1
1
  export * from "./dist/core";
2
2
  export * from "./dist/optim";
3
+ export * from "./dist/nn";
package/index.js CHANGED
@@ -1,4 +1,5 @@
1
1
  module.exports = {
2
2
  ...require("./dist/core"),
3
- ...require("./dist/optim")
3
+ ...require("./dist/optim"),
4
+ ...require("./dist/nn")
4
5
  };
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "catniff",
3
- "version": "0.4.1",
4
- "description": "A small Torch-like deep learning framework for Javascript with tensor and autograd support",
3
+ "version": "0.5.0",
4
+ "description": "A small Torch-like deep learning framework for Javascript",
5
5
  "main": "index.js",
6
6
  "scripts": {
7
7
  "test": "echo \"Error: no test specified\" && exit 1"