catniff 0.9.0 → 0.9.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/core.d.ts +1 -0
- package/dist/core.js +7 -8
- package/package.json +1 -1
package/dist/core.d.ts
CHANGED
|
@@ -202,6 +202,7 @@ export declare class Tensor {
|
|
|
202
202
|
gelu(approximate?: string): Tensor;
|
|
203
203
|
maximum(other: TensorValue | Tensor): Tensor;
|
|
204
204
|
minimum(other: TensorValue | Tensor): Tensor;
|
|
205
|
+
copysign(other: TensorValue | Tensor): Tensor;
|
|
205
206
|
round(): Tensor;
|
|
206
207
|
floor(): Tensor;
|
|
207
208
|
ceil(): Tensor;
|
package/dist/core.js
CHANGED
|
@@ -328,14 +328,9 @@ class Tensor {
|
|
|
328
328
|
}
|
|
329
329
|
const reducedGrad = accumGrad.sum(axesToReduce, true);
|
|
330
330
|
const squeezedGrad = reducedGrad.squeeze(axesToSqueeze);
|
|
331
|
-
// Enforce 0-offset contiguous grads and correct dtype
|
|
332
331
|
if (typeof tensor.grad === "undefined") {
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
if (grad.offset !== 0) {
|
|
336
|
-
grad = grad.clone();
|
|
337
|
-
}
|
|
338
|
-
tensor.grad = grad.contiguous().cast(tensor.dtype);
|
|
332
|
+
// Force default grad to have same shape and dtype as original tensor
|
|
333
|
+
tensor.grad = Tensor.zerosLike(tensor).add(squeezedGrad.cast(tensor.dtype));
|
|
339
334
|
}
|
|
340
335
|
else {
|
|
341
336
|
tensor.grad = tensor.grad.add(squeezedGrad.cast(tensor.dtype));
|
|
@@ -1815,6 +1810,10 @@ class Tensor {
|
|
|
1815
1810
|
minimum(other) {
|
|
1816
1811
|
return this.elementWiseABDAG(other, (a, b) => Math.min(a, b), (self, other, outGrad) => outGrad.mul(self.lt(other).add(self.eq(other).mul(0.5))), (self, other, outGrad) => outGrad.mul(other.lt(self).add(other.eq(self).mul(0.5))));
|
|
1817
1812
|
}
|
|
1813
|
+
// Tensor element-wise copysign
|
|
1814
|
+
copysign(other) {
|
|
1815
|
+
return this.elementWiseABDAG(other, (a, b) => Math.abs(a) * (Object.is(b, -0) || b < 0 ? -1 : 1), (self, other, outGrad) => outGrad.mul(self.sign().mul(other.sign())), (self, other, outGrad) => new Tensor(0));
|
|
1816
|
+
}
|
|
1818
1817
|
// Tensor element-wise round
|
|
1819
1818
|
round() {
|
|
1820
1819
|
return this.elementWiseSelfDAG((a) => Math.round(a));
|
|
@@ -2235,7 +2234,7 @@ class Tensor {
|
|
|
2235
2234
|
else {
|
|
2236
2235
|
// Each group handles Cin/groups input channels and Cout/groups output channels.
|
|
2237
2236
|
// chunk(groups, 2) splits the Cin*kH*kW axis into groups equal slices,
|
|
2238
|
-
// each of size CinPerGroup*kH*kW
|
|
2237
|
+
// each of size CinPerGroup*kH*kW - valid because reshape laid Cin outermost.
|
|
2239
2238
|
const patchChunks = x.chunk(groups, 2); // Tensor[groups], each [N, Hout*Wout, CinPerGroup*kH*kW]
|
|
2240
2239
|
const weightChunks = w.chunk(groups, 0); // Tensor[groups], each [Cout/groups, CinPerGroup*kH*kW]
|
|
2241
2240
|
const groupOuts = patchChunks.map((patch, i) => patch.matmul(weightChunks[i].t()) // [N, Hout*Wout, Cout/groups]
|