catniff 0.6.9 β†’ 0.6.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,4 +1,4 @@
1
- # Catniff
1
+ # Catniff 😺🌿
2
2
 
3
3
  Catniff is a small deep learning framework for Javacript, built to be Torch-like, but more direct on tensors and autograd usage like Tinygrad. This project is under development currently, so keep in mind that APIs can be unstable and backwards-incompatible. On a side-note, the name is a play on "catnip" and "differentiation".
4
4
 
package/dist/core.d.ts CHANGED
@@ -89,6 +89,7 @@ export declare class Tensor {
89
89
  var(dims?: number[] | number, keepDims?: boolean): Tensor;
90
90
  std(dims?: number[] | number, keepDims?: boolean): Tensor;
91
91
  softmax(dim?: number): Tensor;
92
+ softmin(dim?: number): Tensor;
92
93
  add(other: TensorValue | Tensor): Tensor;
93
94
  sub(other: TensorValue | Tensor): Tensor;
94
95
  subtract: (other: TensorValue | Tensor) => Tensor;
@@ -158,6 +159,10 @@ export declare class Tensor {
158
159
  log10(): Tensor;
159
160
  log1p(): Tensor;
160
161
  relu(): Tensor;
162
+ leakyRelu(negativeSlope?: number): Tensor;
163
+ elu(alpha?: number): Tensor;
164
+ selu(): Tensor;
165
+ celu(alpha?: number): Tensor;
161
166
  sigmoid(): Tensor;
162
167
  tanh(): Tensor;
163
168
  softplus(): Tensor;
package/dist/core.js CHANGED
@@ -329,7 +329,7 @@ class Tensor {
329
329
  // Verify shape size
330
330
  const originalSize = this.numel;
331
331
  const outputSize = Tensor.shapeToSize(newShape);
332
- if (originalSize !== outputSize) {
332
+ if (originalSize !== outputSize || typeof this.value === "number") {
333
333
  throw new Error("Can not create view: incompatible sizes");
334
334
  }
335
335
  // Verify compatibility (only contiguity for now)
@@ -357,7 +357,7 @@ class Tensor {
357
357
  // Verify shape size
358
358
  const originalSize = this.numel;
359
359
  const outputSize = Tensor.shapeToSize(newShape);
360
- if (originalSize !== outputSize) {
360
+ if (originalSize !== outputSize || typeof this.value === "number") {
361
361
  throw new Error("Can not reshape: incompatible sizes");
362
362
  }
363
363
  // Create new tensor with forced compatibility (only contiguity for now)
@@ -645,6 +645,10 @@ class Tensor {
645
645
  if (dim < 0) {
646
646
  dim += this.shape.length;
647
647
  }
648
+ // If dimension out of bound, throw error
649
+ if (dim >= this.shape.length || dim < 0) {
650
+ throw new Error("Dimension do not exist to chunk");
651
+ }
648
652
  const sliceOpt = new Array(this.shape.length);
649
653
  for (let index = 0; index < sliceOpt.length; index++) {
650
654
  sliceOpt[index] = [];
@@ -907,14 +911,37 @@ class Tensor {
907
911
  if (typeof this.value === "number")
908
912
  return this;
909
913
  // Handle negative indexing
910
- if (dim < 0)
911
- dim = this.shape.length + dim;
914
+ if (dim < 0) {
915
+ dim += this.shape.length;
916
+ }
917
+ // If dimension out of bound, throw error
918
+ if (dim >= this.shape.length || dim < 0) {
919
+ throw new Error("Dimension do not exist to apply softmax");
920
+ }
912
921
  const maxVals = this.max(dim, true);
913
922
  const shifted = this.sub(maxVals);
914
923
  const expVals = shifted.exp();
915
924
  const sumExp = expVals.sum(dim, true);
916
925
  return expVals.div(sumExp);
917
926
  }
927
+ // Tensor softmin
928
+ softmin(dim = -1) {
929
+ if (typeof this.value === "number")
930
+ return this;
931
+ // Handle negative indexing
932
+ if (dim < 0) {
933
+ dim += this.shape.length;
934
+ }
935
+ // If dimension out of bound, throw error
936
+ if (dim >= this.shape.length || dim < 0) {
937
+ throw new Error("Dimension do not exist to apply softmin");
938
+ }
939
+ const maxVals = this.max(dim, true);
940
+ const shifted = maxVals.sub(this);
941
+ const expVals = shifted.exp();
942
+ const sumExp = expVals.sum(dim, true);
943
+ return expVals.div(sumExp);
944
+ }
918
945
  // Tensor element-wise addition
919
946
  add(other) {
920
947
  return this.elementWiseABDAG(other, (a, b) => a + b, (self, other, outGrad) => outGrad, (self, other, outGrad) => outGrad);
@@ -1137,6 +1164,32 @@ class Tensor {
1137
1164
  relu() {
1138
1165
  return this.elementWiseSelfDAG((a) => Math.max(a, 0), (self, outGrad) => outGrad.mul(self.gt(0)));
1139
1166
  }
1167
+ // Tensor element-wise leaky relu
1168
+ leakyRelu(negativeSlope = 0.01) {
1169
+ return this.elementWiseSelfDAG((a) => Math.max(a, 0) + negativeSlope * Math.min(a, 0), (self, outGrad) => {
1170
+ return outGrad.mul(self.gt(0).add(self.le(0).mul(negativeSlope)));
1171
+ });
1172
+ }
1173
+ // Tensor element-wise elu
1174
+ elu(alpha = 1) {
1175
+ return this.elementWiseSelfDAG((a) => a > 0 ? a : alpha * (Math.expm1(a)), (self, outGrad) => {
1176
+ return outGrad.mul(self.gt(0).add(self.le(0).mul(self.exp().mul(alpha))));
1177
+ });
1178
+ }
1179
+ // Tensor element-wise selu
1180
+ selu() {
1181
+ const alpha = 1.6732632423543772848170429916717;
1182
+ const scale = 1.0507009873554804934193349852946;
1183
+ return this.elementWiseSelfDAG((a) => scale * (a >= 0 ? a : alpha * Math.expm1(a)), (self, outGrad) => {
1184
+ return outGrad.mul(self.gt(0).mul(scale).add(self.le(0).mul(self.exp().mul(alpha * scale))));
1185
+ });
1186
+ }
1187
+ // Tensor element-wise celu
1188
+ celu(alpha = 1) {
1189
+ return this.elementWiseSelfDAG((a) => a >= 0 ? a : alpha * (Math.expm1(a / alpha)), (self, outGrad) => {
1190
+ return outGrad.mul(self.gt(0).add(self.le(0).mul(self.div(alpha).exp())));
1191
+ });
1192
+ }
1140
1193
  // Tensor element-wise sigmoid
1141
1194
  sigmoid() {
1142
1195
  return this.elementWiseSelfDAG((a) => 1 / (1 + Math.exp(-a)), (self, outGrad) => {
package/dist/optim.d.ts CHANGED
@@ -55,10 +55,10 @@ declare class AdamW extends BaseOptimizer {
55
55
  constructor(params: Tensor[], options?: AdamWOptions);
56
56
  step(): void;
57
57
  }
58
- export declare class Optim {
59
- static BaseOptimizer: typeof BaseOptimizer;
60
- static SGD: typeof SGD;
61
- static Adam: typeof Adam;
62
- static AdamW: typeof AdamW;
63
- }
58
+ export declare const Optim: {
59
+ BaseOptimizer: typeof BaseOptimizer;
60
+ SGD: typeof SGD;
61
+ Adam: typeof Adam;
62
+ AdamW: typeof AdamW;
63
+ };
64
64
  export {};
package/dist/optim.js CHANGED
@@ -184,10 +184,9 @@ class AdamW extends BaseOptimizer {
184
184
  }
185
185
  }
186
186
  }
187
- class Optim {
188
- static BaseOptimizer = BaseOptimizer;
189
- static SGD = SGD;
190
- static Adam = Adam;
191
- static AdamW = AdamW;
192
- }
193
- exports.Optim = Optim;
187
+ exports.Optim = {
188
+ BaseOptimizer,
189
+ SGD,
190
+ Adam,
191
+ AdamW
192
+ };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "catniff",
3
- "version": "0.6.9",
3
+ "version": "0.6.11",
4
4
  "description": "A small Torch-like deep learning framework for Javascript",
5
5
  "main": "index.js",
6
6
  "scripts": {