@elaraai/east-py-datascience 0.0.2-beta.1 → 0.0.2-beta.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -258,6 +258,202 @@ export declare const torch_mlp_predict: import("@elaraai/east").PlatformDefiniti
258
258
  output_dim: IntegerType;
259
259
  }>;
260
260
  }>, ArrayType<ArrayType<FloatType>>], ArrayType<FloatType>>;
261
+ /**
262
+ * Train a PyTorch MLP model with multi-output support.
263
+ *
264
+ * Supports multi-output regression (predicting multiple values per sample)
265
+ * and autoencoders (where input equals target for reconstruction learning).
266
+ * Output dimension is inferred from y.shape[1] unless overridden in config.
267
+ *
268
+ * @param X - Feature matrix (n_samples x n_features)
269
+ * @param y - Target matrix (n_samples x n_outputs)
270
+ * @param mlp_config - MLP architecture configuration
271
+ * @param train_config - Training configuration
272
+ * @returns Model blob and training result
273
+ */
274
+ export declare const torch_mlp_train_multi: import("@elaraai/east").PlatformDefinition<[ArrayType<ArrayType<FloatType>>, ArrayType<ArrayType<FloatType>>, StructType<{
275
+ /** Hidden layer sizes, e.g., [64, 32] */
276
+ hidden_layers: ArrayType<IntegerType>;
277
+ /** Activation function (default relu) */
278
+ activation: OptionType<VariantType<{
279
+ /** Rectified Linear Unit */
280
+ relu: StructType<{}>;
281
+ /** Hyperbolic tangent */
282
+ tanh: StructType<{}>;
283
+ /** Sigmoid function */
284
+ sigmoid: StructType<{}>;
285
+ /** Leaky ReLU */
286
+ leaky_relu: StructType<{}>;
287
+ }>>;
288
+ /** Dropout rate (default 0.0) */
289
+ dropout: OptionType<FloatType>;
290
+ /** Output dimension (default 1) */
291
+ output_dim: OptionType<IntegerType>;
292
+ }>, StructType<{
293
+ /** Number of epochs (default 100) */
294
+ epochs: OptionType<IntegerType>;
295
+ /** Batch size (default 32) */
296
+ batch_size: OptionType<IntegerType>;
297
+ /** Learning rate (default 0.001) */
298
+ learning_rate: OptionType<FloatType>;
299
+ /** Loss function (default mse) */
300
+ loss: OptionType<VariantType<{
301
+ /** Mean Squared Error (regression) */
302
+ mse: StructType<{}>;
303
+ /** Mean Absolute Error (regression) */
304
+ mae: StructType<{}>;
305
+ /** Cross Entropy (classification) */
306
+ cross_entropy: StructType<{}>;
307
+ }>>;
308
+ /** Optimizer (default adam) */
309
+ optimizer: OptionType<VariantType<{
310
+ /** Adam optimizer */
311
+ adam: StructType<{}>;
312
+ /** Stochastic Gradient Descent */
313
+ sgd: StructType<{}>;
314
+ /** AdamW with weight decay */
315
+ adamw: StructType<{}>;
316
+ /** RMSprop optimizer */
317
+ rmsprop: StructType<{}>;
318
+ }>>;
319
+ /** Early stopping patience, 0 = disabled */
320
+ early_stopping: OptionType<IntegerType>;
321
+ /** Validation split fraction (default 0.2) */
322
+ validation_split: OptionType<FloatType>;
323
+ /** Random seed for reproducibility */
324
+ random_state: OptionType<IntegerType>;
325
+ }>], StructType<{
326
+ /** Trained model blob */
327
+ model: VariantType<{
328
+ torch_mlp: StructType<{
329
+ data: BlobType;
330
+ n_features: IntegerType;
331
+ hidden_layers: ArrayType<IntegerType>;
332
+ output_dim: IntegerType;
333
+ }>;
334
+ }>;
335
+ /** Training result with losses */
336
+ result: StructType<{
337
+ /** Training loss per epoch */
338
+ train_losses: ArrayType<FloatType>;
339
+ /** Validation loss per epoch */
340
+ val_losses: ArrayType<FloatType>;
341
+ /** Best epoch (for early stopping) */
342
+ best_epoch: IntegerType;
343
+ }>;
344
+ }>>;
345
+ /**
346
+ * Make predictions with a trained PyTorch MLP (multi-output).
347
+ *
348
+ * Returns a matrix where each row contains the predicted outputs for a sample.
349
+ *
350
+ * @param model - Trained MLP model blob
351
+ * @param X - Feature matrix (n_samples x n_features)
352
+ * @returns Predicted matrix (n_samples x n_outputs)
353
+ */
354
+ export declare const torch_mlp_predict_multi: import("@elaraai/east").PlatformDefinition<[VariantType<{
355
+ /** PyTorch MLP model */
356
+ torch_mlp: StructType<{
357
+ /** Cloudpickle serialized model */
358
+ data: BlobType;
359
+ /** Number of input features */
360
+ n_features: IntegerType;
361
+ /** Hidden layer sizes */
362
+ hidden_layers: ArrayType<IntegerType>;
363
+ /** Output dimension */
364
+ output_dim: IntegerType;
365
+ }>;
366
+ }>, ArrayType<ArrayType<FloatType>>], ArrayType<ArrayType<FloatType>>>;
367
+ /**
368
+ * Extract intermediate layer activations (embeddings) from a trained MLP.
369
+ *
370
+ * For autoencoders, this allows extracting the bottleneck representation.
371
+ * The layer_index specifies which hidden layer's output to return (0-indexed).
372
+ *
373
+ * For an autoencoder with architecture [input -> 8 -> 2 -> 8 -> output]
374
+ * (hidden_layers: [8, 2, 8]):
375
+ * - layer_index=0: output after first hidden layer (8 features)
376
+ * - layer_index=1: output after second hidden layer (2 features) <- bottleneck
377
+ * - layer_index=2: output after third hidden layer (8 features)
378
+ *
379
+ * @param model - Trained MLP model blob
380
+ * @param X - Feature matrix (n_samples x n_features)
381
+ * @param layer_index - Which hidden layer's output to return (0-indexed)
382
+ * @returns Embedding matrix (n_samples x hidden_dim at that layer)
383
+ *
384
+ * @example
385
+ * ```ts
386
+ * // Train autoencoder: 4 features -> 8 -> 2 (bottleneck) -> 8 -> 4 features
387
+ * const mlp_config = $.let({
388
+ * hidden_layers: [8n, 2n, 8n],
389
+ * activation: variant('some', variant('relu', {})),
390
+ * dropout: variant('none', null),
391
+ * output_dim: variant('none', null),
392
+ * });
393
+ * const output = $.let(Torch.mlpTrainMulti(X, X, mlp_config, train_config));
394
+ *
395
+ * // Extract bottleneck embeddings (layer_index=1 for the 2-dim bottleneck)
396
+ * const embeddings = $.let(Torch.mlpEncode(output.model, X, 1n));
397
+ * // embeddings is now (n_samples x 2)
398
+ * ```
399
+ */
400
+ export declare const torch_mlp_encode: import("@elaraai/east").PlatformDefinition<[VariantType<{
401
+ /** PyTorch MLP model */
402
+ torch_mlp: StructType<{
403
+ /** Cloudpickle serialized model */
404
+ data: BlobType;
405
+ /** Number of input features */
406
+ n_features: IntegerType;
407
+ /** Hidden layer sizes */
408
+ hidden_layers: ArrayType<IntegerType>;
409
+ /** Output dimension */
410
+ output_dim: IntegerType;
411
+ }>;
412
+ }>, ArrayType<ArrayType<FloatType>>, IntegerType], ArrayType<ArrayType<FloatType>>>;
413
+ /**
414
+ * Decode embeddings back through the decoder portion of an MLP.
415
+ *
416
+ * For autoencoders, this takes bottleneck activations and runs them through
417
+ * the decoder to reconstruct the output. This is the complement to mlpEncode.
418
+ *
419
+ * For an autoencoder with architecture [input -> 8 -> 2 -> 8 -> output]
420
+ * (hidden_layers: [8, 2, 8]):
421
+ * - layer_index=1: Start from the 2-dim bottleneck, run through layers 2+ to output
422
+ * - layer_index=0: Start from the 8-dim first layer, run through layers 1+ to output
423
+ *
424
+ * Use case: Compute weighted average of origin embeddings, then decode to
425
+ * get the reconstructed blend weight distribution.
426
+ *
427
+ * @param model - Trained MLP model blob
428
+ * @param embeddings - Embedding matrix (n_samples x hidden_dim at layer_index)
429
+ * @param layer_index - Which hidden layer the embeddings come from (0-indexed)
430
+ * @returns Decoded output matrix (n_samples x output_dim)
431
+ *
432
+ * @example
433
+ * ```ts
434
+ * // After training autoencoder and extracting embeddings...
435
+ * const origin_embeddings = $.let(Torch.mlpEncode(output.model, X_onehot, 1n));
436
+ *
437
+ * // Compute weighted blend embedding (e.g., 50% origin A + 50% origin B)
438
+ * const blend_embedding = $.let(...); // weighted average of origin embeddings
439
+ *
440
+ * // Decode back to weight distribution
441
+ * const reconstructed = $.let(Torch.mlpDecode(output.model, blend_embedding, 1n));
442
+ * ```
443
+ */
444
+ export declare const torch_mlp_decode: import("@elaraai/east").PlatformDefinition<[VariantType<{
445
+ /** PyTorch MLP model */
446
+ torch_mlp: StructType<{
447
+ /** Cloudpickle serialized model */
448
+ data: BlobType;
449
+ /** Number of input features */
450
+ n_features: IntegerType;
451
+ /** Hidden layer sizes */
452
+ hidden_layers: ArrayType<IntegerType>;
453
+ /** Output dimension */
454
+ output_dim: IntegerType;
455
+ }>;
456
+ }>, ArrayType<ArrayType<FloatType>>, IntegerType], ArrayType<ArrayType<FloatType>>>;
261
457
  /**
262
458
  * Type definitions for PyTorch functions.
263
459
  */
@@ -431,7 +627,7 @@ export declare const TorchTypes: {
431
627
  * ```
432
628
  */
433
629
  export declare const Torch: {
434
- /** Train MLP model */
630
+ /** Train MLP model (single output) */
435
631
  readonly mlpTrain: import("@elaraai/east").PlatformDefinition<[ArrayType<ArrayType<FloatType>>, ArrayType<FloatType>, StructType<{
436
632
  /** Hidden layer sizes, e.g., [64, 32] */
437
633
  hidden_layers: ArrayType<IntegerType>;
@@ -503,7 +699,7 @@ export declare const Torch: {
503
699
  best_epoch: IntegerType;
504
700
  }>;
505
701
  }>>;
506
- /** Make predictions with MLP */
702
+ /** Make predictions with MLP (single output) */
507
703
  readonly mlpPredict: import("@elaraai/east").PlatformDefinition<[VariantType<{
508
704
  /** PyTorch MLP model */
509
705
  torch_mlp: StructType<{
@@ -517,6 +713,120 @@ export declare const Torch: {
517
713
  output_dim: IntegerType;
518
714
  }>;
519
715
  }>, ArrayType<ArrayType<FloatType>>], ArrayType<FloatType>>;
716
+ /** Train MLP model (multi-output) */
717
+ readonly mlpTrainMulti: import("@elaraai/east").PlatformDefinition<[ArrayType<ArrayType<FloatType>>, ArrayType<ArrayType<FloatType>>, StructType<{
718
+ /** Hidden layer sizes, e.g., [64, 32] */
719
+ hidden_layers: ArrayType<IntegerType>;
720
+ /** Activation function (default relu) */
721
+ activation: OptionType<VariantType<{
722
+ /** Rectified Linear Unit */
723
+ relu: StructType<{}>;
724
+ /** Hyperbolic tangent */
725
+ tanh: StructType<{}>;
726
+ /** Sigmoid function */
727
+ sigmoid: StructType<{}>;
728
+ /** Leaky ReLU */
729
+ leaky_relu: StructType<{}>;
730
+ }>>;
731
+ /** Dropout rate (default 0.0) */
732
+ dropout: OptionType<FloatType>;
733
+ /** Output dimension (default 1) */
734
+ output_dim: OptionType<IntegerType>;
735
+ }>, StructType<{
736
+ /** Number of epochs (default 100) */
737
+ epochs: OptionType<IntegerType>;
738
+ /** Batch size (default 32) */
739
+ batch_size: OptionType<IntegerType>;
740
+ /** Learning rate (default 0.001) */
741
+ learning_rate: OptionType<FloatType>;
742
+ /** Loss function (default mse) */
743
+ loss: OptionType<VariantType<{
744
+ /** Mean Squared Error (regression) */
745
+ mse: StructType<{}>;
746
+ /** Mean Absolute Error (regression) */
747
+ mae: StructType<{}>;
748
+ /** Cross Entropy (classification) */
749
+ cross_entropy: StructType<{}>;
750
+ }>>;
751
+ /** Optimizer (default adam) */
752
+ optimizer: OptionType<VariantType<{
753
+ /** Adam optimizer */
754
+ adam: StructType<{}>;
755
+ /** Stochastic Gradient Descent */
756
+ sgd: StructType<{}>;
757
+ /** AdamW with weight decay */
758
+ adamw: StructType<{}>;
759
+ /** RMSprop optimizer */
760
+ rmsprop: StructType<{}>;
761
+ }>>;
762
+ /** Early stopping patience, 0 = disabled */
763
+ early_stopping: OptionType<IntegerType>;
764
+ /** Validation split fraction (default 0.2) */
765
+ validation_split: OptionType<FloatType>;
766
+ /** Random seed for reproducibility */
767
+ random_state: OptionType<IntegerType>;
768
+ }>], StructType<{
769
+ /** Trained model blob */
770
+ model: VariantType<{
771
+ torch_mlp: StructType<{
772
+ data: BlobType;
773
+ n_features: IntegerType;
774
+ hidden_layers: ArrayType<IntegerType>;
775
+ output_dim: IntegerType;
776
+ }>;
777
+ }>;
778
+ /** Training result with losses */
779
+ result: StructType<{
780
+ /** Training loss per epoch */
781
+ train_losses: ArrayType<FloatType>;
782
+ /** Validation loss per epoch */
783
+ val_losses: ArrayType<FloatType>;
784
+ /** Best epoch (for early stopping) */
785
+ best_epoch: IntegerType;
786
+ }>;
787
+ }>>;
788
+ /** Make predictions with MLP (multi-output) */
789
+ readonly mlpPredictMulti: import("@elaraai/east").PlatformDefinition<[VariantType<{
790
+ /** PyTorch MLP model */
791
+ torch_mlp: StructType<{
792
+ /** Cloudpickle serialized model */
793
+ data: BlobType;
794
+ /** Number of input features */
795
+ n_features: IntegerType;
796
+ /** Hidden layer sizes */
797
+ hidden_layers: ArrayType<IntegerType>;
798
+ /** Output dimension */
799
+ output_dim: IntegerType;
800
+ }>;
801
+ }>, ArrayType<ArrayType<FloatType>>], ArrayType<ArrayType<FloatType>>>;
802
+ /** Extract intermediate layer activations (embeddings) from MLP */
803
+ readonly mlpEncode: import("@elaraai/east").PlatformDefinition<[VariantType<{
804
+ /** PyTorch MLP model */
805
+ torch_mlp: StructType<{
806
+ /** Cloudpickle serialized model */
807
+ data: BlobType;
808
+ /** Number of input features */
809
+ n_features: IntegerType;
810
+ /** Hidden layer sizes */
811
+ hidden_layers: ArrayType<IntegerType>;
812
+ /** Output dimension */
813
+ output_dim: IntegerType;
814
+ }>;
815
+ }>, ArrayType<ArrayType<FloatType>>, IntegerType], ArrayType<ArrayType<FloatType>>>;
816
+ /** Decode embeddings back through decoder portion of MLP */
817
+ readonly mlpDecode: import("@elaraai/east").PlatformDefinition<[VariantType<{
818
+ /** PyTorch MLP model */
819
+ torch_mlp: StructType<{
820
+ /** Cloudpickle serialized model */
821
+ data: BlobType;
822
+ /** Number of input features */
823
+ n_features: IntegerType;
824
+ /** Hidden layer sizes */
825
+ hidden_layers: ArrayType<IntegerType>;
826
+ /** Output dimension */
827
+ output_dim: IntegerType;
828
+ }>;
829
+ }>, ArrayType<ArrayType<FloatType>>, IntegerType], ArrayType<ArrayType<FloatType>>>;
520
830
  /** Type definitions */
521
831
  readonly Types: {
522
832
  /** Vector type (array of floats) */
@@ -1 +1 @@
1
- {"version":3,"file":"torch.d.ts","sourceRoot":"","sources":["../../src/torch/torch.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH;;;;;;;GAOG;AAEH,OAAO,EAEH,UAAU,EACV,WAAW,EACX,UAAU,EACV,WAAW,EACX,SAAS,EACT,QAAQ,EACR,SAAS,EACZ,MAAM,eAAe,CAAC;AAIvB,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAMrD;;GAEG;AACH,eAAO,MAAM,mBAAmB;IAC5B,4BAA4B;;IAE5B,yBAAyB;;IAEzB,uBAAuB;;IAEvB,iBAAiB;;EAEnB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,aAAa;IACtB,sCAAsC;;IAEtC,uCAAuC;;IAEvC,qCAAqC;;EAEvC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,kBAAkB;IAC3B,qBAAqB;;IAErB,kCAAkC;;IAElC,8BAA8B;;IAE9B,wBAAwB;;EAE1B,CAAC;AAMH;;GAEG;AACH,eAAO,MAAM,kBAAkB;IAC3B,yCAAyC;;IAEzC,yCAAyC;;QA9CzC,4BAA4B;;QAE5B,yBAAyB;;QAEzB,uBAAuB;;QAEvB,iBAAiB;;;IA0CjB,iCAAiC;;IAEjC,mCAAmC;;EAErC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,oBAAoB;IAC7B,qCAAqC;;IAErC,8BAA8B;;IAE9B,oCAAoC;;IAEpC,kCAAkC;;QAlDlC,sCAAsC;;QAEtC,uCAAuC;;QAEvC,qCAAqC;;;IAgDrC,+BAA+B;;QAxC/B,qBAAqB;;QAErB,kCAAkC;;QAElC,8BAA8B;;QAE9B,wBAAwB;;;IAoCxB,4CAA4C;;IAE5C,8CAA8C;;IAE9C,sCAAsC;;EAExC,CAAC;AAMH;;GAEG;AACH,eAAO,MAAM,oBAAoB;IAC7B,8BAA8B;;IAE9B,gCAAgC;;IAEhC,sCAAsC;;EAExC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,oBAAoB;IAC7B,yBAAyB;;;;;;;;;IASzB,kCAAkC;;QArBlC,8BAA8B;;QAE9B,gCAAgC;;QAEhC,sCAAsC;;;EAmBxC,CAAC;AAMH;;GAEG;AACH,eAAO,MAAM,kBAAkB;IAC3B,wBAAwB;;QAEpB,mCAAmC;;QAEnC,+BAA+B;;QAE/B,yBAAyB;;QAEzB,uBAAuB;;;EAG7B,CAAC;AAMH;;;;;;;;GAQG;AACH,eAAO,MAAM,eAAe;IAnGxB,yCAAyC;;IAEzC,yCAAyC;;QA9CzC,4BAA4B;;QAE5B,yBAAyB;;QAEzB,uBAAuB;;QAEvB,iBAAiB;;;IA0CjB,iCAAiC;;IAEjC,mCAAmC;;;IAQnC,qCAAqC;;IAErC,8BAA8B;;IAE9B,oCAAoC;;IAEpC,kCAAkC;;QAlDlC,sCAAsC;;QAEtC,uCAAuC;;QAEvC,qCAAqC;;;IAgDrC,+BAA+B;;QAxC/B,qBAAqB;;QAErB,kCAAkC;;QAElC,8BAA8B;;QAE9B,wBAAwB;;;IAoCxB,4CAA4C;;IAE5C,8CAA8C;;IAE9C,sCAAsC;;;IAwBtC,yBAAyB;;;;;;;;;IASzB,kCAAkC;;QArBlC,8BAA8B;;QAE9B,gCAAgC;;QAEhC,sCAAsC;;;GA2DzC,CAAC;AAEF;;;;;;GAMG;AACH,eAAO,MAAM,iBAAiB;IAvC1B,wBAAwB;;QAEpB,mCAAmC;;QAEnC,+BAA+B;;QAE/B,yBAAyB;;QAEzB,uBAAuB;;;2DAmC9B,CAAC;AAMF;;GAEG;AACH,eAAO,MAAM,UAAU;IACnB,oCAAoC;;IAEpC,uCAAuC;;IAEvC,+BAA+B;;QA9K/B,4BAA4B;;QAE5B,yBAAyB;;QAEzB,uBAAuB;;QAEvB,iBAAiB;;;IA0KjB,yBAAyB;;QAlKzB,sCAAsC;;QAEtC,uCAAuC;;QAEvC,qCAAqC;;;IAgKrC,qBAAqB;;QAxJrB,qBAAqB;;QAErB,kCAAkC;;QAElC,8BAA8B;;QAE9B,wBAAwB;;;IAoJxB,6BAA6B;;QAxI7B,yCAAyC;;QAEzC,yCAAyC;;YA9CzC,4BAA4B;;YAE5B,yBAAyB;;YAEzB,uBAAuB;;YAEvB,iBAAiB;;;QA0CjB,iCAAiC;;QAEjC,mCAAmC;;;IAoInC,kCAAkC;;QA5HlC,qCAAqC;;QAErC,8BAA8B;;QAE9B,oCAAoC;;QAEpC,kCAAkC;;YAlDlC,sCAAsC;;YAEtC,uCAAuC;;YAEvC,qCAAqC;;;QAgDrC,+BAA+B;;YAxC/B,qBAAqB;;YAErB,kCAAkC;;YAElC,8BAA8B;;YAE9B,wBAAwB;;;QAoCxB,4CAA4C;;QAE5C,8CAA8C;;QAE9C,sCAAsC;;;IAgHtC,2BAA2B;;QApG3B,8BAA8B;;QAE9B,gCAAgC;;QAEhC,sCAAsC;;;IAkGtC,4CAA4C;;QA1F5C,yBAAyB;;;;;;;;;QASzB,kCAAkC;;YArBlC,8BAA8B;;YAE9B,gCAAgC;;YAEhC,sCAAsC;;;;IAoGtC,yCAAyC;;QAvEzC,wBAAwB;;YAEpB,mCAAmC;;YAEnC,+BAA+B;;YAE/B,yBAAyB;;YAEzB,uBAAuB;;;;CAiErB,CAAC;AAEX;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,eAAO,MAAM,KAAK;IACd,sBAAsB;;QAtLtB,yCAAyC;;QAEzC,yCAAyC;;YA9CzC,4BAA4B;;YAE5B,yBAAyB;;YAEzB,uBAAuB;;YAEvB,iBAAiB;;;QA0CjB,iCAAiC;;QAEjC,mCAAmC;;;QAQnC,qCAAqC;;QAErC,8BAA8B;;QAE9B,oCAAoC;;QAEpC,kCAAkC;;YAlDlC,sCAAsC;;YAEtC,uCAAuC;;YAEvC,qCAAqC;;;QAgDrC,+BAA+B;;YAxC/B,qBAAqB;;YAErB,kCAAkC;;YAElC,8BAA8B;;YAE9B,wBAAwB;;;QAoCxB,4CAA4C;;QAE5C,8CAA8C;;QAE9C,sCAAsC;;;QAwBtC,yBAAyB;;;;;;;;;QASzB,kCAAkC;;YArBlC,8BAA8B;;YAE9B,gCAAgC;;YAEhC,sCAAsC;;;;IA4ItC,gCAAgC;;QA/GhC,wBAAwB;;YAEpB,mCAAmC;;YAEnC,+BAA+B;;YAE/B,yBAAyB;;YAEzB,uBAAuB;;;;IAyG3B,uBAAuB;;QA5DvB,oCAAoC;;QAEpC,uCAAuC;;QAEvC,+BAA+B;;YA9K/B,4BAA4B;;YAE5B,yBAAyB;;YAEzB,uBAAuB;;YAEvB,iBAAiB;;;QA0KjB,yBAAyB;;YAlKzB,sCAAsC;;YAEtC,uCAAuC;;YAEvC,qCAAqC;;;QAgKrC,qBAAqB;;YAxJrB,qBAAqB;;YAErB,kCAAkC;;YAElC,8BAA8B;;YAE9B,wBAAwB;;;QAoJxB,6BAA6B;;YAxI7B,yCAAyC;;YAEzC,yCAAyC;;gBA9CzC,4BAA4B;;gBAE5B,yBAAyB;;gBAEzB,uBAAuB;;gBAEvB,iBAAiB;;;YA0CjB,iCAAiC;;YAEjC,mCAAmC;;;QAoInC,kCAAkC;;YA5HlC,qCAAqC;;YAErC,8BAA8B;;YAE9B,oCAAoC;;YAEpC,kCAAkC;;gBAlDlC,sCAAsC;;gBAEtC,uCAAuC;;gBAEvC,qCAAqC;;;YAgDrC,+BAA+B;;gBAxC/B,qBAAqB;;gBAErB,kCAAkC;;gBAElC,8BAA8B;;gBAE9B,wBAAwB;;;YAoCxB,4CAA4C;;YAE5C,8CAA8C;;YAE9C,sCAAsC;;;QAgHtC,2BAA2B;;YApG3B,8BAA8B;;YAE9B,gCAAgC;;YAEhC,sCAAsC;;;QAkGtC,4CAA4C;;YA1F5C,yBAAyB;;;;;;;;;YASzB,kCAAkC;;gBArBlC,8BAA8B;;gBAE9B,gCAAgC;;gBAEhC,sCAAsC;;;;QAoGtC,yCAAyC;;YAvEzC,wBAAwB;;gBAEpB,mCAAmC;;gBAEnC,+BAA+B;;gBAE/B,yBAAyB;;gBAEzB,uBAAuB;;;;;CA2GrB,CAAC"}
1
+ {"version":3,"file":"torch.d.ts","sourceRoot":"","sources":["../../src/torch/torch.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH;;;;;;;GAOG;AAEH,OAAO,EAEH,UAAU,EACV,WAAW,EACX,UAAU,EACV,WAAW,EACX,SAAS,EACT,QAAQ,EACR,SAAS,EACZ,MAAM,eAAe,CAAC;AAIvB,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAMrD;;GAEG;AACH,eAAO,MAAM,mBAAmB;IAC5B,4BAA4B;;IAE5B,yBAAyB;;IAEzB,uBAAuB;;IAEvB,iBAAiB;;EAEnB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,aAAa;IACtB,sCAAsC;;IAEtC,uCAAuC;;IAEvC,qCAAqC;;EAEvC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,kBAAkB;IAC3B,qBAAqB;;IAErB,kCAAkC;;IAElC,8BAA8B;;IAE9B,wBAAwB;;EAE1B,CAAC;AAMH;;GAEG;AACH,eAAO,MAAM,kBAAkB;IAC3B,yCAAyC;;IAEzC,yCAAyC;;QA9CzC,4BAA4B;;QAE5B,yBAAyB;;QAEzB,uBAAuB;;QAEvB,iBAAiB;;;IA0CjB,iCAAiC;;IAEjC,mCAAmC;;EAErC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,oBAAoB;IAC7B,qCAAqC;;IAErC,8BAA8B;;IAE9B,oCAAoC;;IAEpC,kCAAkC;;QAlDlC,sCAAsC;;QAEtC,uCAAuC;;QAEvC,qCAAqC;;;IAgDrC,+BAA+B;;QAxC/B,qBAAqB;;QAErB,kCAAkC;;QAElC,8BAA8B;;QAE9B,wBAAwB;;;IAoCxB,4CAA4C;;IAE5C,8CAA8C;;IAE9C,sCAAsC;;EAExC,CAAC;AAMH;;GAEG;AACH,eAAO,MAAM,oBAAoB;IAC7B,8BAA8B;;IAE9B,gCAAgC;;IAEhC,sCAAsC;;EAExC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,oBAAoB;IAC7B,yBAAyB;;;;;;;;;IASzB,kCAAkC;;QArBlC,8BAA8B;;QAE9B,gCAAgC;;QAEhC,sCAAsC;;;EAmBxC,CAAC;AAMH;;GAEG;AACH,eAAO,MAAM,kBAAkB;IAC3B,wBAAwB;;QAEpB,mCAAmC;;QAEnC,+BAA+B;;QAE/B,yBAAyB;;QAEzB,uBAAuB;;;EAG7B,CAAC;AAMH;;;;;;;;GAQG;AACH,eAAO,MAAM,eAAe;IAnGxB,yCAAyC;;IAEzC,yCAAyC;;QA9CzC,4BAA4B;;QAE5B,yBAAyB;;QAEzB,uBAAuB;;QAEvB,iBAAiB;;;IA0CjB,iCAAiC;;IAEjC,mCAAmC;;;IAQnC,qCAAqC;;IAErC,8BAA8B;;IAE9B,oCAAoC;;IAEpC,kCAAkC;;QAlDlC,sCAAsC;;QAEtC,uCAAuC;;QAEvC,qCAAqC;;;IAgDrC,+BAA+B;;QAxC/B,qBAAqB;;QAErB,kCAAkC;;QAElC,8BAA8B;;QAE9B,wBAAwB;;;IAoCxB,4CAA4C;;IAE5C,8CAA8C;;IAE9C,sCAAsC;;;IAwBtC,yBAAyB;;;;;;;;;IASzB,kCAAkC;;QArBlC,8BAA8B;;QAE9B,gCAAgC;;QAEhC,sCAAsC;;;GA2DzC,CAAC;AAEF;;;;;;GAMG;AACH,eAAO,MAAM,iBAAiB;IAvC1B,wBAAwB;;QAEpB,mCAAmC;;QAEnC,+BAA+B;;QAE/B,yBAAyB;;QAEzB,uBAAuB;;;2DAmC9B,CAAC;AAEF;;;;;;;;;;;;GAYG;AACH,eAAO,MAAM,qBAAqB;IAnI9B,yCAAyC;;IAEzC,yCAAyC;;QA9CzC,4BAA4B;;QAE5B,yBAAyB;;QAEzB,uBAAuB;;QAEvB,iBAAiB;;;IA0CjB,iCAAiC;;IAEjC,mCAAmC;;;IAQnC,qCAAqC;;IAErC,8BAA8B;;IAE9B,oCAAoC;;IAEpC,kCAAkC;;QAlDlC,sCAAsC;;QAEtC,uCAAuC;;QAEvC,qCAAqC;;;IAgDrC,+BAA+B;;QAxC/B,qBAAqB;;QAErB,kCAAkC;;QAElC,8BAA8B;;QAE9B,wBAAwB;;;IAoCxB,4CAA4C;;IAE5C,8CAA8C;;IAE9C,sCAAsC;;;IAwBtC,yBAAyB;;;;;;;;;IASzB,kCAAkC;;QArBlC,8BAA8B;;QAE9B,gCAAgC;;QAEhC,sCAAsC;;;GA2FzC,CAAC;AAEF;;;;;;;;GAQG;AACH,eAAO,MAAM,uBAAuB;IAzEhC,wBAAwB;;QAEpB,mCAAmC;;QAEnC,+BAA+B;;QAE/B,yBAAyB;;QAEzB,uBAAuB;;;sEAqE9B,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,eAAO,MAAM,gBAAgB;IAhHzB,wBAAwB;;QAEpB,mCAAmC;;QAEnC,+BAA+B;;QAE/B,yBAAyB;;QAEzB,uBAAuB;;;mFA4G9B,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,eAAO,MAAM,gBAAgB;IArJzB,wBAAwB;;QAEpB,mCAAmC;;QAEnC,+BAA+B;;QAE/B,yBAAyB;;QAEzB,uBAAuB;;;mFAiJ9B,CAAC;AAMF;;GAEG;AACH,eAAO,MAAM,UAAU;IACnB,oCAAoC;;IAEpC,uCAAuC;;IAEvC,+BAA+B;;QA5R/B,4BAA4B;;QAE5B,yBAAyB;;QAEzB,uBAAuB;;QAEvB,iBAAiB;;;IAwRjB,yBAAyB;;QAhRzB,sCAAsC;;QAEtC,uCAAuC;;QAEvC,qCAAqC;;;IA8QrC,qBAAqB;;QAtQrB,qBAAqB;;QAErB,kCAAkC;;QAElC,8BAA8B;;QAE9B,wBAAwB;;;IAkQxB,6BAA6B;;QAtP7B,yCAAyC;;QAEzC,yCAAyC;;YA9CzC,4BAA4B;;YAE5B,yBAAyB;;YAEzB,uBAAuB;;YAEvB,iBAAiB;;;QA0CjB,iCAAiC;;QAEjC,mCAAmC;;;IAkPnC,kCAAkC;;QA1OlC,qCAAqC;;QAErC,8BAA8B;;QAE9B,oCAAoC;;QAEpC,kCAAkC;;YAlDlC,sCAAsC;;YAEtC,uCAAuC;;YAEvC,qCAAqC;;;QAgDrC,+BAA+B;;YAxC/B,qBAAqB;;YAErB,kCAAkC;;YAElC,8BAA8B;;YAE9B,wBAAwB;;;QAoCxB,4CAA4C;;QAE5C,8CAA8C;;QAE9C,sCAAsC;;;IA8NtC,2BAA2B;;QAlN3B,8BAA8B;;QAE9B,gCAAgC;;QAEhC,sCAAsC;;;IAgNtC,4CAA4C;;QAxM5C,yBAAyB;;;;;;;;;QASzB,kCAAkC;;YArBlC,8BAA8B;;YAE9B,gCAAgC;;YAEhC,sCAAsC;;;;IAkNtC,yCAAyC;;QArLzC,wBAAwB;;YAEpB,mCAAmC;;YAEnC,+BAA+B;;YAE/B,yBAAyB;;YAEzB,uBAAuB;;;;CA+KrB,CAAC;AAEX;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,eAAO,MAAM,KAAK;IACd,sCAAsC;;QApStC,yCAAyC;;QAEzC,yCAAyC;;YA9CzC,4BAA4B;;YAE5B,yBAAyB;;YAEzB,uBAAuB;;YAEvB,iBAAiB;;;QA0CjB,iCAAiC;;QAEjC,mCAAmC;;;QAQnC,qCAAqC;;QAErC,8BAA8B;;QAE9B,oCAAoC;;QAEpC,kCAAkC;;YAlDlC,sCAAsC;;YAEtC,uCAAuC;;YAEvC,qCAAqC;;;QAgDrC,+BAA+B;;YAxC/B,qBAAqB;;YAErB,kCAAkC;;YAElC,8BAA8B;;YAE9B,wBAAwB;;;QAoCxB,4CAA4C;;QAE5C,8CAA8C;;QAE9C,sCAAsC;;;QAwBtC,yBAAyB;;;;;;;;;QASzB,kCAAkC;;YArBlC,8BAA8B;;YAE9B,gCAAgC;;YAEhC,sCAAsC;;;;IA0PtC,gDAAgD;;QA7NhD,wBAAwB;;YAEpB,mCAAmC;;YAEnC,+BAA+B;;YAE/B,yBAAyB;;YAEzB,uBAAuB;;;;IAuN3B,qCAAqC;;QAxSrC,yCAAyC;;QAEzC,yCAAyC;;YA9CzC,4BAA4B;;YAE5B,yBAAyB;;YAEzB,uBAAuB;;YAEvB,iBAAiB;;;QA0CjB,iCAAiC;;QAEjC,mCAAmC;;;QAQnC,qCAAqC;;QAErC,8BAA8B;;QAE9B,oCAAoC;;QAEpC,kCAAkC;;YAlDlC,sCAAsC;;YAEtC,uCAAuC;;YAEvC,qCAAqC;;;QAgDrC,+BAA+B;;YAxC/B,qBAAqB;;YAErB,kCAAkC;;YAElC,8BAA8B;;YAE9B,wBAAwB;;;QAoCxB,4CAA4C;;QAE5C,8CAA8C;;QAE9C,sCAAsC;;;QAwBtC,yBAAyB;;;;;;;;;QASzB,kCAAkC;;YArBlC,8BAA8B;;YAE9B,gCAAgC;;YAEhC,sCAAsC;;;;IA8PtC,+CAA+C;;QAjO/C,wBAAwB;;YAEpB,mCAAmC;;YAEnC,+BAA+B;;YAE/B,yBAAyB;;YAEzB,uBAAuB;;;;IA2N3B,mEAAmE;;QAnOnE,wBAAwB;;YAEpB,mCAAmC;;YAEnC,+BAA+B;;YAE/B,yBAAyB;;YAEzB,uBAAuB;;;;IA6N3B,4DAA4D;;QArO5D,wBAAwB;;YAEpB,mCAAmC;;YAEnC,+BAA+B;;YAE/B,yBAAyB;;YAEzB,uBAAuB;;;;IA+N3B,uBAAuB;;QApEvB,oCAAoC;;QAEpC,uCAAuC;;QAEvC,+BAA+B;;YA5R/B,4BAA4B;;YAE5B,yBAAyB;;YAEzB,uBAAuB;;YAEvB,iBAAiB;;;QAwRjB,yBAAyB;;YAhRzB,sCAAsC;;YAEtC,uCAAuC;;YAEvC,qCAAqC;;;QA8QrC,qBAAqB;;YAtQrB,qBAAqB;;YAErB,kCAAkC;;YAElC,8BAA8B;;YAE9B,wBAAwB;;;QAkQxB,6BAA6B;;YAtP7B,yCAAyC;;YAEzC,yCAAyC;;gBA9CzC,4BAA4B;;gBAE5B,yBAAyB;;gBAEzB,uBAAuB;;gBAEvB,iBAAiB;;;YA0CjB,iCAAiC;;YAEjC,mCAAmC;;;QAkPnC,kCAAkC;;YA1OlC,qCAAqC;;YAErC,8BAA8B;;YAE9B,oCAAoC;;YAEpC,kCAAkC;;gBAlDlC,sCAAsC;;gBAEtC,uCAAuC;;gBAEvC,qCAAqC;;;YAgDrC,+BAA+B;;gBAxC/B,qBAAqB;;gBAErB,kCAAkC;;gBAElC,8BAA8B;;gBAE9B,wBAAwB;;;YAoCxB,4CAA4C;;YAE5C,8CAA8C;;YAE9C,sCAAsC;;;QA8NtC,2BAA2B;;YAlN3B,8BAA8B;;YAE9B,gCAAgC;;YAEhC,sCAAsC;;;QAgNtC,4CAA4C;;YAxM5C,yBAAyB;;;;;;;;;YASzB,kCAAkC;;gBArBlC,8BAA8B;;gBAE9B,gCAAgC;;gBAEhC,sCAAsC;;;;QAkNtC,yCAAyC;;YArLzC,wBAAwB;;gBAEpB,mCAAmC;;gBAEnC,+BAA+B;;gBAE/B,yBAAyB;;gBAEzB,uBAAuB;;;;;CAiOrB,CAAC"}
@@ -161,6 +161,96 @@ export const torch_mlp_train = East.platform("torch_mlp_train", [MatrixType, Vec
161
161
  * @returns Predicted values
162
162
  */
163
163
  export const torch_mlp_predict = East.platform("torch_mlp_predict", [TorchModelBlobType, MatrixType], VectorType);
164
+ /**
165
+ * Train a PyTorch MLP model with multi-output support.
166
+ *
167
+ * Supports multi-output regression (predicting multiple values per sample)
168
+ * and autoencoders (where input equals target for reconstruction learning).
169
+ * Output dimension is inferred from y.shape[1] unless overridden in config.
170
+ *
171
+ * @param X - Feature matrix (n_samples x n_features)
172
+ * @param y - Target matrix (n_samples x n_outputs)
173
+ * @param mlp_config - MLP architecture configuration
174
+ * @param train_config - Training configuration
175
+ * @returns Model blob and training result
176
+ */
177
+ export const torch_mlp_train_multi = East.platform("torch_mlp_train_multi", [MatrixType, MatrixType, TorchMLPConfigType, TorchTrainConfigType], TorchTrainOutputType);
178
+ /**
179
+ * Make predictions with a trained PyTorch MLP (multi-output).
180
+ *
181
+ * Returns a matrix where each row contains the predicted outputs for a sample.
182
+ *
183
+ * @param model - Trained MLP model blob
184
+ * @param X - Feature matrix (n_samples x n_features)
185
+ * @returns Predicted matrix (n_samples x n_outputs)
186
+ */
187
+ export const torch_mlp_predict_multi = East.platform("torch_mlp_predict_multi", [TorchModelBlobType, MatrixType], MatrixType);
188
+ /**
189
+ * Extract intermediate layer activations (embeddings) from a trained MLP.
190
+ *
191
+ * For autoencoders, this allows extracting the bottleneck representation.
192
+ * The layer_index specifies which hidden layer's output to return (0-indexed).
193
+ *
194
+ * For an autoencoder with architecture [input -> 8 -> 2 -> 8 -> output]
195
+ * (hidden_layers: [8, 2, 8]):
196
+ * - layer_index=0: output after first hidden layer (8 features)
197
+ * - layer_index=1: output after second hidden layer (2 features) <- bottleneck
198
+ * - layer_index=2: output after third hidden layer (8 features)
199
+ *
200
+ * @param model - Trained MLP model blob
201
+ * @param X - Feature matrix (n_samples x n_features)
202
+ * @param layer_index - Which hidden layer's output to return (0-indexed)
203
+ * @returns Embedding matrix (n_samples x hidden_dim at that layer)
204
+ *
205
+ * @example
206
+ * ```ts
207
+ * // Train autoencoder: 4 features -> 8 -> 2 (bottleneck) -> 8 -> 4 features
208
+ * const mlp_config = $.let({
209
+ * hidden_layers: [8n, 2n, 8n],
210
+ * activation: variant('some', variant('relu', {})),
211
+ * dropout: variant('none', null),
212
+ * output_dim: variant('none', null),
213
+ * });
214
+ * const output = $.let(Torch.mlpTrainMulti(X, X, mlp_config, train_config));
215
+ *
216
+ * // Extract bottleneck embeddings (layer_index=1 for the 2-dim bottleneck)
217
+ * const embeddings = $.let(Torch.mlpEncode(output.model, X, 1n));
218
+ * // embeddings is now (n_samples x 2)
219
+ * ```
220
+ */
221
+ export const torch_mlp_encode = East.platform("torch_mlp_encode", [TorchModelBlobType, MatrixType, IntegerType], MatrixType);
222
+ /**
223
+ * Decode embeddings back through the decoder portion of an MLP.
224
+ *
225
+ * For autoencoders, this takes bottleneck activations and runs them through
226
+ * the decoder to reconstruct the output. This is the complement to mlpEncode.
227
+ *
228
+ * For an autoencoder with architecture [input -> 8 -> 2 -> 8 -> output]
229
+ * (hidden_layers: [8, 2, 8]):
230
+ * - layer_index=1: Start from the 2-dim bottleneck, run through layers 2+ to output
231
+ * - layer_index=0: Start from the 8-dim first layer, run through layers 1+ to output
232
+ *
233
+ * Use case: Compute weighted average of origin embeddings, then decode to
234
+ * get the reconstructed blend weight distribution.
235
+ *
236
+ * @param model - Trained MLP model blob
237
+ * @param embeddings - Embedding matrix (n_samples x hidden_dim at layer_index)
238
+ * @param layer_index - Which hidden layer the embeddings come from (0-indexed)
239
+ * @returns Decoded output matrix (n_samples x output_dim)
240
+ *
241
+ * @example
242
+ * ```ts
243
+ * // After training autoencoder and extracting embeddings...
244
+ * const origin_embeddings = $.let(Torch.mlpEncode(output.model, X_onehot, 1n));
245
+ *
246
+ * // Compute weighted blend embedding (e.g., 50% origin A + 50% origin B)
247
+ * const blend_embedding = $.let(...); // weighted average of origin embeddings
248
+ *
249
+ * // Decode back to weight distribution
250
+ * const reconstructed = $.let(Torch.mlpDecode(output.model, blend_embedding, 1n));
251
+ * ```
252
+ */
253
+ export const torch_mlp_decode = East.platform("torch_mlp_decode", [TorchModelBlobType, MatrixType, IntegerType], MatrixType);
164
254
  // ============================================================================
165
255
  // Grouped Export
166
256
  // ============================================================================
@@ -223,10 +313,18 @@ export const TorchTypes = {
223
313
  * ```
224
314
  */
225
315
  export const Torch = {
226
- /** Train MLP model */
316
+ /** Train MLP model (single output) */
227
317
  mlpTrain: torch_mlp_train,
228
- /** Make predictions with MLP */
318
+ /** Make predictions with MLP (single output) */
229
319
  mlpPredict: torch_mlp_predict,
320
+ /** Train MLP model (multi-output) */
321
+ mlpTrainMulti: torch_mlp_train_multi,
322
+ /** Make predictions with MLP (multi-output) */
323
+ mlpPredictMulti: torch_mlp_predict_multi,
324
+ /** Extract intermediate layer activations (embeddings) from MLP */
325
+ mlpEncode: torch_mlp_encode,
326
+ /** Decode embeddings back through decoder portion of MLP */
327
+ mlpDecode: torch_mlp_decode,
230
328
  /** Type definitions */
231
329
  Types: TorchTypes,
232
330
  };
@@ -1 +1 @@
1
- {"version":3,"file":"torch.js","sourceRoot":"","sources":["../../src/torch/torch.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH;;;;;;;GAOG;AAEH,OAAO,EACH,IAAI,EACJ,UAAU,EACV,WAAW,EACX,UAAU,EACV,WAAW,EACX,SAAS,EACT,QAAQ,EACR,SAAS,GACZ,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAErD,yCAAyC;AACzC,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAErD,+EAA+E;AAC/E,aAAa;AACb,+EAA+E;AAE/E;;GAEG;AACH,MAAM,CAAC,MAAM,mBAAmB,GAAG,WAAW,CAAC;IAC3C,4BAA4B;IAC5B,IAAI,EAAE,UAAU,CAAC,EAAE,CAAC;IACpB,yBAAyB;IACzB,IAAI,EAAE,UAAU,CAAC,EAAE,CAAC;IACpB,uBAAuB;IACvB,OAAO,EAAE,UAAU,CAAC,EAAE,CAAC;IACvB,iBAAiB;IACjB,UAAU,EAAE,UAAU,CAAC,EAAE,CAAC;CAC7B,CAAC,CAAC;AAEH;;GAEG;AACH,MAAM,CAAC,MAAM,aAAa,GAAG,WAAW,CAAC;IACrC,sCAAsC;IACtC,GAAG,EAAE,UAAU,CAAC,EAAE,CAAC;IACnB,uCAAuC;IACvC,GAAG,EAAE,UAAU,CAAC,EAAE,CAAC;IACnB,qCAAqC;IACrC,aAAa,EAAE,UAAU,CAAC,EAAE,CAAC;CAChC,CAAC,CAAC;AAEH;;GAEG;AACH,MAAM,CAAC,MAAM,kBAAkB,GAAG,WAAW,CAAC;IAC1C,qBAAqB;IACrB,IAAI,EAAE,UAAU,CAAC,EAAE,CAAC;IACpB,kCAAkC;IAClC,GAAG,EAAE,UAAU,CAAC,EAAE,CAAC;IACnB,8BAA8B;IAC9B,KAAK,EAAE,UAAU,CAAC,EAAE,CAAC;IACrB,wBAAwB;IACxB,OAAO,EAAE,UAAU,CAAC,EAAE,CAAC;CAC1B,CAAC,CAAC;AAEH,+EAA+E;AAC/E,eAAe;AACf,+EAA+E;AAE/E;;GAEG;AACH,MAAM,CAAC,MAAM,kBAAkB,GAAG,UAAU,CAAC;IACzC,yCAAyC;IACzC,aAAa,EAAE,SAAS,CAAC,WAAW,CAAC;IACrC,yCAAyC;IACzC,UAAU,EAAE,UAAU,CAAC,mBAAmB,CAAC;IAC3C,iCAAiC;IACjC,OAAO,EAAE,UAAU,CAAC,SAAS,CAAC;IAC9B,mCAAmC;IACnC,UAAU,EAAE,UAAU,CAAC,WAAW,CAAC;CACtC,CAAC,CAAC;AAEH;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,UAAU,CAAC;IAC3C,qCAAqC;IACrC,MAAM,EAAE,UAAU,CAAC,WAAW,CAAC;IAC/B,8BAA8B;IAC9B,UAAU,EAAE,UAAU,CAAC,WAAW,CAAC;IACnC,oCAAoC;IACpC,aAAa,EAAE,UAAU,CAAC,SAAS,CAAC;IACpC,kCAAkC;IAClC,IAAI,EAAE,UAAU,CAAC,aAAa,CAAC;IAC/B,+BAA+B;IAC/B,SAAS,EAAE,UAAU,CAAC,kBAAkB,CAAC;IACzC,4CAA4C;IAC5C,cAAc,EAAE,UAAU,CAAC,WAAW,CAAC;IACvC,8CAA8C;IAC9C,gBAAgB,EAAE,UAAU,CAAC,SAAS,CAAC;IACvC,sCAAsC;IACtC,YAAY,EAAE,UAAU,CAAC,WAAW,CAAC;CACxC,CAAC,CAAC;AAEH,+EAA+E;AAC/E,eAAe;AACf,+EAA+E;AAE/E;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,UAAU,CAAC;IAC3C,8BAA8B;IAC9B,YAAY,EAAE,UAAU;IACxB,gCAAgC;IAChC,UAAU,EAAE,UAAU;IACtB,sCAAsC;IACtC,UAAU,EAAE,WAAW;CAC1B,CAAC,CAAC;AAEH;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,UAAU,CAAC;IAC3C,yBAAyB;IACzB,KAAK,EAAE,WAAW,CAAC;QACf,SAAS,EAAE,UAAU,CAAC;YAClB,IAAI,EAAE,QAAQ;YACd,UAAU,EAAE,WAAW;YACvB,aAAa,EAAE,SAAS,CAAC,WAAW,CAAC;YACrC,UAAU,EAAE,WAAW;SAC1B,CAAC;KACL,CAAC;IACF,kCAAkC;IAClC,MAAM,EAAE,oBAAoB;CAC/B,CAAC,CAAC;AAEH,+EAA+E;AAC/E,mBAAmB;AACnB,+EAA+E;AAE/E;;GAEG;AACH,MAAM,CAAC,MAAM,kBAAkB,GAAG,WAAW,CAAC;IAC1C,wBAAwB;IACxB,SAAS,EAAE,UAAU,CAAC;QAClB,mCAAmC;QACnC,IAAI,EAAE,QAAQ;QACd,+BAA+B;QAC/B,UAAU,EAAE,WAAW;QACvB,yBAAyB;QACzB,aAAa,EAAE,SAAS,CAAC,WAAW,CAAC;QACrC,uBAAuB;QACvB,UAAU,EAAE,WAAW;KAC1B,CAAC;CACL,CAAC,CAAC;AAEH,+EAA+E;AAC/E,qBAAqB;AACrB,+EAA+E;AAE/E;;;;;;;;GAQG;AACH,MAAM,CAAC,MAAM,eAAe,GAAG,IAAI,CAAC,QAAQ,CACxC,iBAAiB,EACjB,CAAC,UAAU,EAAE,UAAU,EAAE,kBAAkB,EAAE,oBAAoB,CAAC,EAClE,oBAAoB,CACvB,CAAC;AAEF;;;;;;GAMG;AACH,MAAM,CAAC,MAAM,iBAAiB,GAAG,IAAI,CAAC,QAAQ,CAC1C,mBAAmB,EACnB,CAAC,kBAAkB,EAAE,UAAU,CAAC,EAChC,UAAU,CACb,CAAC;AAEF,+EAA+E;AAC/E,iBAAiB;AACjB,+EAA+E;AAE/E;;GAEG;AACH,MAAM,CAAC,MAAM,UAAU,GAAG;IACtB,oCAAoC;IACpC,UAAU;IACV,uCAAuC;IACvC,UAAU;IACV,+BAA+B;IAC/B,mBAAmB;IACnB,yBAAyB;IACzB,aAAa;IACb,qBAAqB;IACrB,kBAAkB;IAClB,6BAA6B;IAC7B,kBAAkB;IAClB,kCAAkC;IAClC,oBAAoB;IACpB,2BAA2B;IAC3B,oBAAoB;IACpB,4CAA4C;IAC5C,oBAAoB;IACpB,yCAAyC;IACzC,aAAa,EAAE,kBAAkB;CAC3B,CAAC;AAEX;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,MAAM,CAAC,MAAM,KAAK,GAAG;IACjB,sBAAsB;IACtB,QAAQ,EAAE,eAAe;IACzB,gCAAgC;IAChC,UAAU,EAAE,iBAAiB;IAC7B,uBAAuB;IACvB,KAAK,EAAE,UAAU;CACX,CAAC"}
1
+ {"version":3,"file":"torch.js","sourceRoot":"","sources":["../../src/torch/torch.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH;;;;;;;GAOG;AAEH,OAAO,EACH,IAAI,EACJ,UAAU,EACV,WAAW,EACX,UAAU,EACV,WAAW,EACX,SAAS,EACT,QAAQ,EACR,SAAS,GACZ,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAErD,yCAAyC;AACzC,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAErD,+EAA+E;AAC/E,aAAa;AACb,+EAA+E;AAE/E;;GAEG;AACH,MAAM,CAAC,MAAM,mBAAmB,GAAG,WAAW,CAAC;IAC3C,4BAA4B;IAC5B,IAAI,EAAE,UAAU,CAAC,EAAE,CAAC;IACpB,yBAAyB;IACzB,IAAI,EAAE,UAAU,CAAC,EAAE,CAAC;IACpB,uBAAuB;IACvB,OAAO,EAAE,UAAU,CAAC,EAAE,CAAC;IACvB,iBAAiB;IACjB,UAAU,EAAE,UAAU,CAAC,EAAE,CAAC;CAC7B,CAAC,CAAC;AAEH;;GAEG;AACH,MAAM,CAAC,MAAM,aAAa,GAAG,WAAW,CAAC;IACrC,sCAAsC;IACtC,GAAG,EAAE,UAAU,CAAC,EAAE,CAAC;IACnB,uCAAuC;IACvC,GAAG,EAAE,UAAU,CAAC,EAAE,CAAC;IACnB,qCAAqC;IACrC,aAAa,EAAE,UAAU,CAAC,EAAE,CAAC;CAChC,CAAC,CAAC;AAEH;;GAEG;AACH,MAAM,CAAC,MAAM,kBAAkB,GAAG,WAAW,CAAC;IAC1C,qBAAqB;IACrB,IAAI,EAAE,UAAU,CAAC,EAAE,CAAC;IACpB,kCAAkC;IAClC,GAAG,EAAE,UAAU,CAAC,EAAE,CAAC;IACnB,8BAA8B;IAC9B,KAAK,EAAE,UAAU,CAAC,EAAE,CAAC;IACrB,wBAAwB;IACxB,OAAO,EAAE,UAAU,CAAC,EAAE,CAAC;CAC1B,CAAC,CAAC;AAEH,+EAA+E;AAC/E,eAAe;AACf,+EAA+E;AAE/E;;GAEG;AACH,MAAM,CAAC,MAAM,kBAAkB,GAAG,UAAU,CAAC;IACzC,yCAAyC;IACzC,aAAa,EAAE,SAAS,CAAC,WAAW,CAAC;IACrC,yCAAyC;IACzC,UAAU,EAAE,UAAU,CAAC,mBAAmB,CAAC;IAC3C,iCAAiC;IACjC,OAAO,EAAE,UAAU,CAAC,SAAS,CAAC;IAC9B,mCAAmC;IACnC,UAAU,EAAE,UAAU,CAAC,WAAW,CAAC;CACtC,CAAC,CAAC;AAEH;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,UAAU,CAAC;IAC3C,qCAAqC;IACrC,MAAM,EAAE,UAAU,CAAC,WAAW,CAAC;IAC/B,8BAA8B;IAC9B,UAAU,EAAE,UAAU,CAAC,WAAW,CAAC;IACnC,oCAAoC;IACpC,aAAa,EAAE,UAAU,CAAC,SAAS,CAAC;IACpC,kCAAkC;IAClC,IAAI,EAAE,UAAU,CAAC,aAAa,CAAC;IAC/B,+BAA+B;IAC/B,SAAS,EAAE,UAAU,CAAC,kBAAkB,CAAC;IACzC,4CAA4C;IAC5C,cAAc,EAAE,UAAU,CAAC,WAAW,CAAC;IACvC,8CAA8C;IAC9C,gBAAgB,EAAE,UAAU,CAAC,SAAS,CAAC;IACvC,sCAAsC;IACtC,YAAY,EAAE,UAAU,CAAC,WAAW,CAAC;CACxC,CAAC,CAAC;AAEH,+EAA+E;AAC/E,eAAe;AACf,+EAA+E;AAE/E;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,UAAU,CAAC;IAC3C,8BAA8B;IAC9B,YAAY,EAAE,UAAU;IACxB,gCAAgC;IAChC,UAAU,EAAE,UAAU;IACtB,sCAAsC;IACtC,UAAU,EAAE,WAAW;CAC1B,CAAC,CAAC;AAEH;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,UAAU,CAAC;IAC3C,yBAAyB;IACzB,KAAK,EAAE,WAAW,CAAC;QACf,SAAS,EAAE,UAAU,CAAC;YAClB,IAAI,EAAE,QAAQ;YACd,UAAU,EAAE,WAAW;YACvB,aAAa,EAAE,SAAS,CAAC,WAAW,CAAC;YACrC,UAAU,EAAE,WAAW;SAC1B,CAAC;KACL,CAAC;IACF,kCAAkC;IAClC,MAAM,EAAE,oBAAoB;CAC/B,CAAC,CAAC;AAEH,+EAA+E;AAC/E,mBAAmB;AACnB,+EAA+E;AAE/E;;GAEG;AACH,MAAM,CAAC,MAAM,kBAAkB,GAAG,WAAW,CAAC;IAC1C,wBAAwB;IACxB,SAAS,EAAE,UAAU,CAAC;QAClB,mCAAmC;QACnC,IAAI,EAAE,QAAQ;QACd,+BAA+B;QAC/B,UAAU,EAAE,WAAW;QACvB,yBAAyB;QACzB,aAAa,EAAE,SAAS,CAAC,WAAW,CAAC;QACrC,uBAAuB;QACvB,UAAU,EAAE,WAAW;KAC1B,CAAC;CACL,CAAC,CAAC;AAEH,+EAA+E;AAC/E,qBAAqB;AACrB,+EAA+E;AAE/E;;;;;;;;GAQG;AACH,MAAM,CAAC,MAAM,eAAe,GAAG,IAAI,CAAC,QAAQ,CACxC,iBAAiB,EACjB,CAAC,UAAU,EAAE,UAAU,EAAE,kBAAkB,EAAE,oBAAoB,CAAC,EAClE,oBAAoB,CACvB,CAAC;AAEF;;;;;;GAMG;AACH,MAAM,CAAC,MAAM,iBAAiB,GAAG,IAAI,CAAC,QAAQ,CAC1C,mBAAmB,EACnB,CAAC,kBAAkB,EAAE,UAAU,CAAC,EAChC,UAAU,CACb,CAAC;AAEF;;;;;;;;;;;;GAYG;AACH,MAAM,CAAC,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAC9C,uBAAuB,EACvB,CAAC,UAAU,EAAE,UAAU,EAAE,kBAAkB,EAAE,oBAAoB,CAAC,EAClE,oBAAoB,CACvB,CAAC;AAEF;;;;;;;;GAQG;AACH,MAAM,CAAC,MAAM,uBAAuB,GAAG,IAAI,CAAC,QAAQ,CAChD,yBAAyB,EACzB,CAAC,kBAAkB,EAAE,UAAU,CAAC,EAChC,UAAU,CACb,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,MAAM,CAAC,MAAM,gBAAgB,GAAG,IAAI,CAAC,QAAQ,CACzC,kBAAkB,EAClB,CAAC,kBAAkB,EAAE,UAAU,EAAE,WAAW,CAAC,EAC7C,UAAU,CACb,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,MAAM,CAAC,MAAM,gBAAgB,GAAG,IAAI,CAAC,QAAQ,CACzC,kBAAkB,EAClB,CAAC,kBAAkB,EAAE,UAAU,EAAE,WAAW,CAAC,EAC7C,UAAU,CACb,CAAC;AAEF,+EAA+E;AAC/E,iBAAiB;AACjB,+EAA+E;AAE/E;;GAEG;AACH,MAAM,CAAC,MAAM,UAAU,GAAG;IACtB,oCAAoC;IACpC,UAAU;IACV,uCAAuC;IACvC,UAAU;IACV,+BAA+B;IAC/B,mBAAmB;IACnB,yBAAyB;IACzB,aAAa;IACb,qBAAqB;IACrB,kBAAkB;IAClB,6BAA6B;IAC7B,kBAAkB;IAClB,kCAAkC;IAClC,oBAAoB;IACpB,2BAA2B;IAC3B,oBAAoB;IACpB,4CAA4C;IAC5C,oBAAoB;IACpB,yCAAyC;IACzC,aAAa,EAAE,kBAAkB;CAC3B,CAAC;AAEX;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,MAAM,CAAC,MAAM,KAAK,GAAG;IACjB,sCAAsC;IACtC,QAAQ,EAAE,eAAe;IACzB,gDAAgD;IAChD,UAAU,EAAE,iBAAiB;IAC7B,qCAAqC;IACrC,aAAa,EAAE,qBAAqB;IACpC,+CAA+C;IAC/C,eAAe,EAAE,uBAAuB;IACxC,mEAAmE;IACnE,SAAS,EAAE,gBAAgB;IAC3B,4DAA4D;IAC5D,SAAS,EAAE,gBAAgB;IAC3B,uBAAuB;IACvB,KAAK,EAAE,UAAU;CACX,CAAC"}