catniff 0.6.10 → 0.6.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/core.d.ts CHANGED
@@ -160,6 +160,9 @@ export declare class Tensor {
160
160
  log1p(): Tensor;
161
161
  relu(): Tensor;
162
162
  leakyRelu(negativeSlope?: number): Tensor;
163
+ elu(alpha?: number): Tensor;
164
+ selu(): Tensor;
165
+ celu(alpha?: number): Tensor;
163
166
  sigmoid(): Tensor;
164
167
  tanh(): Tensor;
165
168
  softplus(): Tensor;
package/dist/core.js CHANGED
@@ -1170,6 +1170,26 @@ class Tensor {
1170
1170
  return outGrad.mul(self.gt(0).add(self.le(0).mul(negativeSlope)));
1171
1171
  });
1172
1172
  }
1173
+ // Tensor element-wise elu
1174
+ elu(alpha = 1) {
1175
+ return this.elementWiseSelfDAG((a) => a > 0 ? a : alpha * (Math.expm1(a)), (self, outGrad) => {
1176
+ return outGrad.mul(self.gt(0).add(self.le(0).mul(self.exp().mul(alpha))));
1177
+ });
1178
+ }
1179
+ // Tensor element-wise selu
1180
+ selu() {
1181
+ const alpha = 1.6732632423543772848170429916717;
1182
+ const scale = 1.0507009873554804934193349852946;
1183
+ return this.elementWiseSelfDAG((a) => scale * (a >= 0 ? a : alpha * Math.expm1(a)), (self, outGrad) => {
1184
+ return outGrad.mul(self.gt(0).mul(scale).add(self.le(0).mul(self.exp().mul(alpha * scale))));
1185
+ });
1186
+ }
1187
+ // Tensor element-wise celu
1188
+ celu(alpha = 1) {
1189
+ return this.elementWiseSelfDAG((a) => a >= 0 ? a : alpha * (Math.expm1(a / alpha)), (self, outGrad) => {
1190
+ return outGrad.mul(self.gt(0).add(self.le(0).mul(self.div(alpha).exp())));
1191
+ });
1192
+ }
1173
1193
  // Tensor element-wise sigmoid
1174
1194
  sigmoid() {
1175
1195
  return this.elementWiseSelfDAG((a) => 1 / (1 + Math.exp(-a)), (self, outGrad) => {
package/dist/optim.d.ts CHANGED
@@ -55,10 +55,10 @@ declare class AdamW extends BaseOptimizer {
55
55
  constructor(params: Tensor[], options?: AdamWOptions);
56
56
  step(): void;
57
57
  }
58
- export declare class Optim {
59
- static BaseOptimizer: typeof BaseOptimizer;
60
- static SGD: typeof SGD;
61
- static Adam: typeof Adam;
62
- static AdamW: typeof AdamW;
63
- }
58
+ export declare const Optim: {
59
+ BaseOptimizer: typeof BaseOptimizer;
60
+ SGD: typeof SGD;
61
+ Adam: typeof Adam;
62
+ AdamW: typeof AdamW;
63
+ };
64
64
  export {};
package/dist/optim.js CHANGED
@@ -184,10 +184,9 @@ class AdamW extends BaseOptimizer {
184
184
  }
185
185
  }
186
186
  }
187
- class Optim {
188
- static BaseOptimizer = BaseOptimizer;
189
- static SGD = SGD;
190
- static Adam = Adam;
191
- static AdamW = AdamW;
192
- }
193
- exports.Optim = Optim;
187
+ exports.Optim = {
188
+ BaseOptimizer,
189
+ SGD,
190
+ Adam,
191
+ AdamW
192
+ };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "catniff",
3
- "version": "0.6.10",
3
+ "version": "0.6.11",
4
4
  "description": "A small Torch-like deep learning framework for Javascript",
5
5
  "main": "index.js",
6
6
  "scripts": {