catniff 0.6.9 β 0.6.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/core.d.ts +2 -0
- package/dist/core.js +37 -4
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# Catniff
|
|
1
|
+
# Catniff πΊπΏ
|
|
2
2
|
|
|
3
3
|
Catniff is a small deep learning framework for Javacript, built to be Torch-like, but more direct on tensors and autograd usage like Tinygrad. This project is under development currently, so keep in mind that APIs can be unstable and backwards-incompatible. On a side-note, the name is a play on "catnip" and "differentiation".
|
|
4
4
|
|
package/dist/core.d.ts
CHANGED
|
@@ -89,6 +89,7 @@ export declare class Tensor {
|
|
|
89
89
|
var(dims?: number[] | number, keepDims?: boolean): Tensor;
|
|
90
90
|
std(dims?: number[] | number, keepDims?: boolean): Tensor;
|
|
91
91
|
softmax(dim?: number): Tensor;
|
|
92
|
+
softmin(dim?: number): Tensor;
|
|
92
93
|
add(other: TensorValue | Tensor): Tensor;
|
|
93
94
|
sub(other: TensorValue | Tensor): Tensor;
|
|
94
95
|
subtract: (other: TensorValue | Tensor) => Tensor;
|
|
@@ -158,6 +159,7 @@ export declare class Tensor {
|
|
|
158
159
|
log10(): Tensor;
|
|
159
160
|
log1p(): Tensor;
|
|
160
161
|
relu(): Tensor;
|
|
162
|
+
leakyRelu(negativeSlope?: number): Tensor;
|
|
161
163
|
sigmoid(): Tensor;
|
|
162
164
|
tanh(): Tensor;
|
|
163
165
|
softplus(): Tensor;
|
package/dist/core.js
CHANGED
|
@@ -329,7 +329,7 @@ class Tensor {
|
|
|
329
329
|
// Verify shape size
|
|
330
330
|
const originalSize = this.numel;
|
|
331
331
|
const outputSize = Tensor.shapeToSize(newShape);
|
|
332
|
-
if (originalSize !== outputSize) {
|
|
332
|
+
if (originalSize !== outputSize || typeof this.value === "number") {
|
|
333
333
|
throw new Error("Can not create view: incompatible sizes");
|
|
334
334
|
}
|
|
335
335
|
// Verify compatibility (only contiguity for now)
|
|
@@ -357,7 +357,7 @@ class Tensor {
|
|
|
357
357
|
// Verify shape size
|
|
358
358
|
const originalSize = this.numel;
|
|
359
359
|
const outputSize = Tensor.shapeToSize(newShape);
|
|
360
|
-
if (originalSize !== outputSize) {
|
|
360
|
+
if (originalSize !== outputSize || typeof this.value === "number") {
|
|
361
361
|
throw new Error("Can not reshape: incompatible sizes");
|
|
362
362
|
}
|
|
363
363
|
// Create new tensor with forced compatibility (only contiguity for now)
|
|
@@ -645,6 +645,10 @@ class Tensor {
|
|
|
645
645
|
if (dim < 0) {
|
|
646
646
|
dim += this.shape.length;
|
|
647
647
|
}
|
|
648
|
+
// If dimension out of bound, throw error
|
|
649
|
+
if (dim >= this.shape.length || dim < 0) {
|
|
650
|
+
throw new Error("Dimension do not exist to chunk");
|
|
651
|
+
}
|
|
648
652
|
const sliceOpt = new Array(this.shape.length);
|
|
649
653
|
for (let index = 0; index < sliceOpt.length; index++) {
|
|
650
654
|
sliceOpt[index] = [];
|
|
@@ -907,14 +911,37 @@ class Tensor {
|
|
|
907
911
|
if (typeof this.value === "number")
|
|
908
912
|
return this;
|
|
909
913
|
// Handle negative indexing
|
|
910
|
-
if (dim < 0)
|
|
911
|
-
dim
|
|
914
|
+
if (dim < 0) {
|
|
915
|
+
dim += this.shape.length;
|
|
916
|
+
}
|
|
917
|
+
// If dimension out of bound, throw error
|
|
918
|
+
if (dim >= this.shape.length || dim < 0) {
|
|
919
|
+
throw new Error("Dimension do not exist to apply softmax");
|
|
920
|
+
}
|
|
912
921
|
const maxVals = this.max(dim, true);
|
|
913
922
|
const shifted = this.sub(maxVals);
|
|
914
923
|
const expVals = shifted.exp();
|
|
915
924
|
const sumExp = expVals.sum(dim, true);
|
|
916
925
|
return expVals.div(sumExp);
|
|
917
926
|
}
|
|
927
|
+
// Tensor softmin
|
|
928
|
+
softmin(dim = -1) {
|
|
929
|
+
if (typeof this.value === "number")
|
|
930
|
+
return this;
|
|
931
|
+
// Handle negative indexing
|
|
932
|
+
if (dim < 0) {
|
|
933
|
+
dim += this.shape.length;
|
|
934
|
+
}
|
|
935
|
+
// If dimension out of bound, throw error
|
|
936
|
+
if (dim >= this.shape.length || dim < 0) {
|
|
937
|
+
throw new Error("Dimension do not exist to apply softmin");
|
|
938
|
+
}
|
|
939
|
+
const maxVals = this.max(dim, true);
|
|
940
|
+
const shifted = maxVals.sub(this);
|
|
941
|
+
const expVals = shifted.exp();
|
|
942
|
+
const sumExp = expVals.sum(dim, true);
|
|
943
|
+
return expVals.div(sumExp);
|
|
944
|
+
}
|
|
918
945
|
// Tensor element-wise addition
|
|
919
946
|
add(other) {
|
|
920
947
|
return this.elementWiseABDAG(other, (a, b) => a + b, (self, other, outGrad) => outGrad, (self, other, outGrad) => outGrad);
|
|
@@ -1137,6 +1164,12 @@ class Tensor {
|
|
|
1137
1164
|
relu() {
|
|
1138
1165
|
return this.elementWiseSelfDAG((a) => Math.max(a, 0), (self, outGrad) => outGrad.mul(self.gt(0)));
|
|
1139
1166
|
}
|
|
1167
|
+
// Tensor element-wise leaky relu
|
|
1168
|
+
leakyRelu(negativeSlope = 0.01) {
|
|
1169
|
+
return this.elementWiseSelfDAG((a) => Math.max(a, 0) + negativeSlope * Math.min(a, 0), (self, outGrad) => {
|
|
1170
|
+
return outGrad.mul(self.gt(0).add(self.le(0).mul(negativeSlope)));
|
|
1171
|
+
});
|
|
1172
|
+
}
|
|
1140
1173
|
// Tensor element-wise sigmoid
|
|
1141
1174
|
sigmoid() {
|
|
1142
1175
|
return this.elementWiseSelfDAG((a) => 1 / (1 + Math.exp(-a)), (self, outGrad) => {
|