catniff 0.2.5 → 0.2.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/core.d.ts CHANGED
@@ -39,6 +39,8 @@ export declare class Tensor {
39
39
  squeeze(dims?: number[] | number): Tensor;
40
40
  unsqueeze(dim: number): Tensor;
41
41
  sum(dims?: number[] | number, keepDims?: boolean): Tensor;
42
+ prod(dims?: number[] | number, keepDims?: boolean): Tensor;
43
+ mean(dims?: number[] | number, keepDims?: boolean): Tensor;
42
44
  add(other: TensorValue | Tensor): Tensor;
43
45
  sub(other: TensorValue | Tensor): Tensor;
44
46
  mul(other: TensorValue | Tensor): Tensor;
package/dist/core.js CHANGED
@@ -351,6 +351,7 @@ class Tensor {
351
351
  gradStrides = this.strides;
352
352
  gradValue = new Array(originalSize).fill(0);
353
353
  }
354
+ // Calculate new value after sum
354
355
  for (let index = 0; index < originalSize; index++) {
355
356
  const coords = Tensor.indexToCoords(index, this.shape, this.strides);
356
357
  // Force 0 on reduced axes to collapse into size-1 dims
@@ -362,7 +363,7 @@ class Tensor {
362
363
  outputValue[outFlatIndex] += this.value[realFlatIndex];
363
364
  // Mark for gradient if needed
364
365
  if (this.requiresGrad) {
365
- (gradValue)[realFlatIndex] = 1;
366
+ gradValue[realFlatIndex] = 1;
366
367
  }
367
368
  }
368
369
  const out = new Tensor(outputValue, {
@@ -380,6 +381,136 @@ class Tensor {
380
381
  }
381
382
  return keepDims ? out : out.squeeze(dims);
382
383
  }
384
+ // Tensor product reduction
385
+ prod(dims, keepDims = false) {
386
+ if (typeof this.value === "number")
387
+ return new Tensor(this.value);
388
+ if (typeof dims === "number") {
389
+ dims = [dims];
390
+ }
391
+ if (typeof dims === "undefined") {
392
+ dims = Array.from({ length: this.shape.length }, (_, index) => index);
393
+ }
394
+ // Dims that are reduced now have size-1
395
+ const outputShape = this.shape.map((dim, i) => dims.includes(i) ? 1 : dim);
396
+ const outputStrides = Tensor.getStrides(outputShape);
397
+ const outputSize = Tensor.shapeToSize(outputShape);
398
+ const outputValue = new Array(outputSize).fill(1);
399
+ const originalSize = Tensor.shapeToSize(this.shape);
400
+ // Gradient data
401
+ let gradShape, gradStrides, gradValue = [];
402
+ // Allocate gradient data only when needed
403
+ if (this.requiresGrad) {
404
+ gradShape = this.shape;
405
+ gradStrides = this.strides;
406
+ gradValue = new Array(originalSize).fill(0);
407
+ }
408
+ // Calculate new value after multiplying
409
+ for (let index = 0; index < originalSize; index++) {
410
+ const coords = Tensor.indexToCoords(index, this.shape, this.strides);
411
+ // Force 0 on reduced axes to collapse into size-1 dims
412
+ const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
413
+ // Convert output coordinates to flat index
414
+ const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
415
+ // Accumulate, outFlatIndex should match multiple realFlatIndexes
416
+ const realFlatIndex = Tensor.coordsToIndex(coords, this.strides);
417
+ outputValue[outFlatIndex] *= this.value[realFlatIndex];
418
+ }
419
+ const out = new Tensor(outputValue, {
420
+ shape: outputShape,
421
+ strides: outputStrides
422
+ });
423
+ // Set up gradient if needed
424
+ if (this.requiresGrad) {
425
+ // Grad is the product of other elements of the same axis, which is product of all els divided by the current value
426
+ for (let index = 0; index < originalSize; index++) {
427
+ const coords = Tensor.indexToCoords(index, this.shape, this.strides);
428
+ // Force 0 on reduced axes to collapse into size-1 dims
429
+ const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
430
+ // Convert output coordinates to flat index
431
+ const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
432
+ // Accumulate, outFlatIndex should match multiple realFlatIndexes
433
+ const realFlatIndex = Tensor.coordsToIndex(coords, this.strides);
434
+ // Calculate gradient at position
435
+ gradValue[realFlatIndex] = outputValue[outFlatIndex] / this.value[realFlatIndex];
436
+ }
437
+ out.requiresGrad = true;
438
+ out.children.push(this);
439
+ out.gradFn = () => {
440
+ const localGrad = new Tensor(gradValue, { shape: gradShape, strides: gradStrides });
441
+ Tensor.addGrad(this, out.grad.withGrad(false).mul(localGrad));
442
+ };
443
+ }
444
+ return keepDims ? out : out.squeeze(dims);
445
+ }
446
+ // Tensor mean reduction
447
+ mean(dims, keepDims = false) {
448
+ if (typeof this.value === "number")
449
+ return new Tensor(this.value);
450
+ if (typeof dims === "number") {
451
+ dims = [dims];
452
+ }
453
+ if (typeof dims === "undefined") {
454
+ dims = Array.from({ length: this.shape.length }, (_, index) => index);
455
+ }
456
+ // Dims that are reduced now have size-1
457
+ const outputShape = this.shape.map((dim, i) => dims.includes(i) ? 1 : dim);
458
+ const outputStrides = Tensor.getStrides(outputShape);
459
+ const outputSize = Tensor.shapeToSize(outputShape);
460
+ const outputValue = new Array(outputSize).fill(0);
461
+ const outputFeeders = new Array(outputSize).fill(0);
462
+ const originalSize = Tensor.shapeToSize(this.shape);
463
+ // Gradient data
464
+ let gradShape, gradStrides, gradValue = [];
465
+ // Allocate gradient data only when needed
466
+ if (this.requiresGrad) {
467
+ gradShape = this.shape;
468
+ gradStrides = this.strides;
469
+ gradValue = new Array(originalSize).fill(0);
470
+ }
471
+ // Calculate sums and how many elements contribute to specific positions
472
+ for (let index = 0; index < originalSize; index++) {
473
+ const coords = Tensor.indexToCoords(index, this.shape, this.strides);
474
+ // Force 0 on reduced axes to collapse into size-1 dims
475
+ const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
476
+ // Convert output coordinates to flat index
477
+ const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
478
+ // Accumulate, outFlatIndex should match multiple realFlatIndexes
479
+ const realFlatIndex = Tensor.coordsToIndex(coords, this.strides);
480
+ outputValue[outFlatIndex] += this.value[realFlatIndex];
481
+ outputFeeders[outFlatIndex]++;
482
+ }
483
+ // Calculate mean by dividing sum by the number of contributors to the position
484
+ for (let index = 0; index < outputSize; index++) {
485
+ outputValue[index] /= outputFeeders[index];
486
+ }
487
+ const out = new Tensor(outputValue, {
488
+ shape: outputShape,
489
+ strides: outputStrides
490
+ });
491
+ // Set up gradient if needed
492
+ if (this.requiresGrad) {
493
+ // Calculate grad by assiging 1 divide by the number of contributors to the position
494
+ for (let index = 0; index < originalSize; index++) {
495
+ const coords = Tensor.indexToCoords(index, this.shape, this.strides);
496
+ // Force 0 on reduced axes to collapse into size-1 dims
497
+ const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
498
+ // Convert output coordinates to flat index
499
+ const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
500
+ // Accumulate, outFlatIndex should match multiple realFlatIndexes
501
+ const realFlatIndex = Tensor.coordsToIndex(coords, this.strides);
502
+ // Mean = 1/n * (el1 + el2 + ... + eln) so grad = 1/n
503
+ gradValue[realFlatIndex] = 1 / outputFeeders[outFlatIndex];
504
+ }
505
+ out.requiresGrad = true;
506
+ out.children.push(this);
507
+ out.gradFn = () => {
508
+ const localGrad = new Tensor(gradValue, { shape: gradShape, strides: gradStrides });
509
+ Tensor.addGrad(this, out.grad.withGrad(false).mul(localGrad));
510
+ };
511
+ }
512
+ return keepDims ? out : out.squeeze(dims);
513
+ }
383
514
  // Tensor element-wise addition
384
515
  add(other) {
385
516
  return this.elementWiseABDAG(other, (a, b) => a + b, (self, other, outGrad) => outGrad, (self, other, outGrad) => outGrad);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "catniff",
3
- "version": "0.2.5",
3
+ "version": "0.2.6",
4
4
  "description": "A cute autograd engine for Javascript",
5
5
  "main": "index.js",
6
6
  "scripts": {