catniff 0.2.11 → 0.2.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -87,6 +87,9 @@ I'm mostly just learning and playing with this currently, so there are no concre
87
87
  * Proper documentation.
88
88
  * GPU acceleration.
89
89
  * Some general neural net APIs.
90
+ * Refactor code.
91
+ * Proper tests.
92
+ * Option to load more backends.
90
93
 
91
94
  ## Copyrights and License
92
95
 
package/dist/core.d.ts CHANGED
@@ -41,6 +41,8 @@ export declare class Tensor {
41
41
  sum(dims?: number[] | number, keepDims?: boolean): Tensor;
42
42
  prod(dims?: number[] | number, keepDims?: boolean): Tensor;
43
43
  mean(dims?: number[] | number, keepDims?: boolean): Tensor;
44
+ max(dims?: number[] | number, keepDims?: boolean): Tensor;
45
+ min(dims?: number[] | number, keepDims?: boolean): Tensor;
44
46
  add(other: TensorValue | Tensor): Tensor;
45
47
  sub(other: TensorValue | Tensor): Tensor;
46
48
  subtract: (other: TensorValue | Tensor) => Tensor;
package/dist/core.js CHANGED
@@ -252,7 +252,7 @@ class Tensor {
252
252
  // Tensor squeeze
253
253
  squeeze(dims) {
254
254
  if (typeof this.value === "number")
255
- return new Tensor(this.value);
255
+ return this;
256
256
  if (typeof dims === "number") {
257
257
  dims = [dims];
258
258
  }
@@ -300,10 +300,9 @@ class Tensor {
300
300
  }
301
301
  // Tensor unsqueeze - adds dimension of size 1 at specified position
302
302
  unsqueeze(dim) {
303
- if (typeof this.value === "number")
304
- return new Tensor([this.value]);
305
- if (dim < 0 || dim > this.shape.length) {
306
- throw new Error(`Invalid dimension ${dim} for unsqueeze`);
303
+ let thisValue = this.value;
304
+ if (typeof thisValue === "number") {
305
+ thisValue = [thisValue];
307
306
  }
308
307
  // Insert size-1 dimension at specified position
309
308
  const newShape = [...this.shape];
@@ -320,7 +319,7 @@ class Tensor {
320
319
  newDimStride = this.strides[dim] * this.shape[dim];
321
320
  }
322
321
  newStrides.splice(dim, 0, newDimStride);
323
- const out = new Tensor(this.value, { shape: newShape, strides: newStrides });
322
+ const out = new Tensor(thisValue, { shape: newShape, strides: newStrides });
324
323
  // Set up gradient if needed
325
324
  if (this.requiresGrad) {
326
325
  out.requiresGrad = true;
@@ -334,7 +333,7 @@ class Tensor {
334
333
  // Tensor sum reduction
335
334
  sum(dims, keepDims = false) {
336
335
  if (typeof this.value === "number")
337
- return new Tensor(this.value);
336
+ return this;
338
337
  if (typeof dims === "number") {
339
338
  dims = [dims];
340
339
  }
@@ -364,6 +363,7 @@ class Tensor {
364
363
  const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
365
364
  // Accumulate, outFlatIndex should match multiple realFlatIndexes
366
365
  const realFlatIndex = Tensor.coordsToIndex(coords, this.strides);
366
+ // Add into sum
367
367
  outputValue[outFlatIndex] += this.value[realFlatIndex];
368
368
  // Mark for gradient if needed
369
369
  if (this.requiresGrad) {
@@ -388,7 +388,7 @@ class Tensor {
388
388
  // Tensor product reduction
389
389
  prod(dims, keepDims = false) {
390
390
  if (typeof this.value === "number")
391
- return new Tensor(this.value);
391
+ return this;
392
392
  if (typeof dims === "number") {
393
393
  dims = [dims];
394
394
  }
@@ -401,14 +401,6 @@ class Tensor {
401
401
  const outputSize = Tensor.shapeToSize(outputShape);
402
402
  const outputValue = new Array(outputSize).fill(1);
403
403
  const originalSize = Tensor.shapeToSize(this.shape);
404
- // Gradient data
405
- let gradShape, gradStrides, gradValue = [];
406
- // Allocate gradient data only when needed
407
- if (this.requiresGrad) {
408
- gradShape = this.shape;
409
- gradStrides = this.strides;
410
- gradValue = new Array(originalSize).fill(0);
411
- }
412
404
  // Calculate new value after multiplying
413
405
  for (let index = 0; index < originalSize; index++) {
414
406
  const coords = Tensor.indexToCoords(index, this.strides);
@@ -418,6 +410,7 @@ class Tensor {
418
410
  const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
419
411
  // Accumulate, outFlatIndex should match multiple realFlatIndexes
420
412
  const realFlatIndex = Tensor.coordsToIndex(coords, this.strides);
413
+ // Multiply into product
421
414
  outputValue[outFlatIndex] *= this.value[realFlatIndex];
422
415
  }
423
416
  const out = new Tensor(outputValue, {
@@ -426,7 +419,7 @@ class Tensor {
426
419
  });
427
420
  // Set up gradient if needed
428
421
  if (this.requiresGrad) {
429
- // Grad is the product of other elements of the same axis, which is product of all els divided by the current value
422
+ const gradShape = this.shape, gradStrides = this.strides, gradValue = new Array(originalSize).fill(0);
430
423
  for (let index = 0; index < originalSize; index++) {
431
424
  const coords = Tensor.indexToCoords(index, this.strides);
432
425
  // Force 0 on reduced axes to collapse into size-1 dims
@@ -435,7 +428,7 @@ class Tensor {
435
428
  const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
436
429
  // Accumulate, outFlatIndex should match multiple realFlatIndexes
437
430
  const realFlatIndex = Tensor.coordsToIndex(coords, this.strides);
438
- // Calculate gradient at position
431
+ // Grad is the product of other elements of the same axis, which is product of all els divided by the current value
439
432
  gradValue[realFlatIndex] = outputValue[outFlatIndex] / this.value[realFlatIndex];
440
433
  }
441
434
  out.requiresGrad = true;
@@ -450,7 +443,7 @@ class Tensor {
450
443
  // Tensor mean reduction
451
444
  mean(dims, keepDims = false) {
452
445
  if (typeof this.value === "number")
453
- return new Tensor(this.value);
446
+ return this;
454
447
  if (typeof dims === "number") {
455
448
  dims = [dims];
456
449
  }
@@ -464,14 +457,6 @@ class Tensor {
464
457
  const outputValue = new Array(outputSize).fill(0);
465
458
  const outputFeeders = new Array(outputSize).fill(0);
466
459
  const originalSize = Tensor.shapeToSize(this.shape);
467
- // Gradient data
468
- let gradShape, gradStrides, gradValue = [];
469
- // Allocate gradient data only when needed
470
- if (this.requiresGrad) {
471
- gradShape = this.shape;
472
- gradStrides = this.strides;
473
- gradValue = new Array(originalSize).fill(0);
474
- }
475
460
  // Calculate sums and how many elements contribute to specific positions
476
461
  for (let index = 0; index < originalSize; index++) {
477
462
  const coords = Tensor.indexToCoords(index, this.strides);
@@ -481,6 +466,7 @@ class Tensor {
481
466
  const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
482
467
  // Accumulate, outFlatIndex should match multiple realFlatIndexes
483
468
  const realFlatIndex = Tensor.coordsToIndex(coords, this.strides);
469
+ // Calculate sum and contributors to the sum
484
470
  outputValue[outFlatIndex] += this.value[realFlatIndex];
485
471
  outputFeeders[outFlatIndex]++;
486
472
  }
@@ -494,7 +480,8 @@ class Tensor {
494
480
  });
495
481
  // Set up gradient if needed
496
482
  if (this.requiresGrad) {
497
- // Calculate grad by assiging 1 divide by the number of contributors to the position
483
+ const gradShape = this.shape, gradStrides = this.strides, gradValue = new Array(originalSize).fill(0);
484
+ // Calculate grad by assigning 1 divided by the number of contributors to the position
498
485
  for (let index = 0; index < originalSize; index++) {
499
486
  const coords = Tensor.indexToCoords(index, this.strides);
500
487
  // Force 0 on reduced axes to collapse into size-1 dims
@@ -515,6 +502,120 @@ class Tensor {
515
502
  }
516
503
  return keepDims ? out : out.squeeze(dims);
517
504
  }
505
+ // Tensor maximum reduction
506
+ max(dims, keepDims = false) {
507
+ if (typeof this.value === "number")
508
+ return this;
509
+ if (typeof dims === "number") {
510
+ dims = [dims];
511
+ }
512
+ if (typeof dims === "undefined") {
513
+ dims = Array.from({ length: this.shape.length }, (_, index) => index);
514
+ }
515
+ // Dims that are reduced now have size-1
516
+ const outputShape = this.shape.map((dim, i) => dims.includes(i) ? 1 : dim);
517
+ const outputStrides = Tensor.getStrides(outputShape);
518
+ const outputSize = Tensor.shapeToSize(outputShape);
519
+ const outputValue = new Array(outputSize).fill(-Infinity);
520
+ const originalSize = Tensor.shapeToSize(this.shape);
521
+ // Calculate maximum values of axes
522
+ for (let index = 0; index < originalSize; index++) {
523
+ const coords = Tensor.indexToCoords(index, this.strides);
524
+ // Force 0 on reduced axes to collapse into size-1 dims
525
+ const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
526
+ // Convert output coordinates to flat index
527
+ const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
528
+ // Accumulate, outFlatIndex should match multiple realFlatIndexes
529
+ const realFlatIndex = Tensor.coordsToIndex(coords, this.strides);
530
+ // Get max over time
531
+ if (this.value[realFlatIndex] > outputValue[outFlatIndex]) {
532
+ outputValue[outFlatIndex] = this.value[realFlatIndex];
533
+ }
534
+ }
535
+ const out = new Tensor(outputValue, {
536
+ shape: outputShape,
537
+ strides: outputStrides
538
+ });
539
+ // Set up gradient if needed
540
+ if (this.requiresGrad) {
541
+ const gradShape = this.shape, gradStrides = this.strides, gradValue = new Array(originalSize).fill(0);
542
+ for (let index = 0; index < originalSize; index++) {
543
+ const coords = Tensor.indexToCoords(index, this.strides);
544
+ // Force 0 on reduced axes to collapse into size-1 dims
545
+ const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
546
+ // Convert output coordinates to flat index
547
+ const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
548
+ // Accumulate, outFlatIndex should match multiple realFlatIndexes
549
+ const realFlatIndex = Tensor.coordsToIndex(coords, this.strides);
550
+ // Calculate grad by checking if a positon holds a value equal to the max value
551
+ gradValue[realFlatIndex] = outputValue[outFlatIndex] === this.value[realFlatIndex] ? 1 : 0;
552
+ }
553
+ out.requiresGrad = true;
554
+ out.children.push(this);
555
+ out.gradFn = () => {
556
+ const localGrad = new Tensor(gradValue, { shape: gradShape, strides: gradStrides });
557
+ Tensor.addGrad(this, out.grad.withGrad(false).mul(localGrad));
558
+ };
559
+ }
560
+ return keepDims ? out : out.squeeze(dims);
561
+ }
562
+ // Tensor minimum reduction
563
+ min(dims, keepDims = false) {
564
+ if (typeof this.value === "number")
565
+ return this;
566
+ if (typeof dims === "number") {
567
+ dims = [dims];
568
+ }
569
+ if (typeof dims === "undefined") {
570
+ dims = Array.from({ length: this.shape.length }, (_, index) => index);
571
+ }
572
+ // Dims that are reduced now have size-1
573
+ const outputShape = this.shape.map((dim, i) => dims.includes(i) ? 1 : dim);
574
+ const outputStrides = Tensor.getStrides(outputShape);
575
+ const outputSize = Tensor.shapeToSize(outputShape);
576
+ const outputValue = new Array(outputSize).fill(Infinity);
577
+ const originalSize = Tensor.shapeToSize(this.shape);
578
+ // Calculate minimum values of axes
579
+ for (let index = 0; index < originalSize; index++) {
580
+ const coords = Tensor.indexToCoords(index, this.strides);
581
+ // Force 0 on reduced axes to collapse into size-1 dims
582
+ const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
583
+ // Convert output coordinates to flat index
584
+ const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
585
+ // Accumulate, outFlatIndex should match multiple realFlatIndexes
586
+ const realFlatIndex = Tensor.coordsToIndex(coords, this.strides);
587
+ // Get min over time
588
+ if (this.value[realFlatIndex] < outputValue[outFlatIndex]) {
589
+ outputValue[outFlatIndex] = this.value[realFlatIndex];
590
+ }
591
+ }
592
+ const out = new Tensor(outputValue, {
593
+ shape: outputShape,
594
+ strides: outputStrides
595
+ });
596
+ // Set up gradient if needed
597
+ if (this.requiresGrad) {
598
+ const gradShape = this.shape, gradStrides = this.strides, gradValue = new Array(originalSize).fill(0);
599
+ for (let index = 0; index < originalSize; index++) {
600
+ const coords = Tensor.indexToCoords(index, this.strides);
601
+ // Force 0 on reduced axes to collapse into size-1 dims
602
+ const outCoords = coords.map((val, i) => dims.includes(i) ? 0 : val);
603
+ // Convert output coordinates to flat index
604
+ const outFlatIndex = Tensor.coordsToIndex(outCoords, outputStrides);
605
+ // Accumulate, outFlatIndex should match multiple realFlatIndexes
606
+ const realFlatIndex = Tensor.coordsToIndex(coords, this.strides);
607
+ // Calculate grad by checking if a positon holds a value equal to the min value
608
+ gradValue[realFlatIndex] = outputValue[outFlatIndex] === this.value[realFlatIndex] ? 1 : 0;
609
+ }
610
+ out.requiresGrad = true;
611
+ out.children.push(this);
612
+ out.gradFn = () => {
613
+ const localGrad = new Tensor(gradValue, { shape: gradShape, strides: gradStrides });
614
+ Tensor.addGrad(this, out.grad.withGrad(false).mul(localGrad));
615
+ };
616
+ }
617
+ return keepDims ? out : out.squeeze(dims);
618
+ }
518
619
  // Tensor element-wise addition
519
620
  add(other) {
520
621
  return this.elementWiseABDAG(other, (a, b) => a + b, (self, other, outGrad) => outGrad, (self, other, outGrad) => outGrad);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "catniff",
3
- "version": "0.2.11",
3
+ "version": "0.2.12",
4
4
  "description": "A cute autograd engine for Javascript",
5
5
  "main": "index.js",
6
6
  "scripts": {