@elaraai/east-py-datascience 0.0.2-beta.5 → 0.0.2-beta.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/gp/gp.d.ts +49 -49
- package/dist/gp/gp.d.ts.map +1 -1
- package/dist/gp/gp.js +7 -7
- package/dist/gp/gp.js.map +1 -1
- package/dist/mads/mads.d.ts +33 -33
- package/dist/mads/mads.d.ts.map +1 -1
- package/dist/mads/mads.js +5 -5
- package/dist/mads/mads.js.map +1 -1
- package/dist/ngboost/ngboost.d.ts +35 -35
- package/dist/ngboost/ngboost.d.ts.map +1 -1
- package/dist/ngboost/ngboost.js +3 -3
- package/dist/ngboost/ngboost.js.map +1 -1
- package/dist/shap/shap.d.ts +11 -11
- package/dist/shap/shap.d.ts.map +1 -1
- package/dist/shap/shap.js +3 -3
- package/dist/shap/shap.js.map +1 -1
- package/dist/sklearn/sklearn.d.ts +64 -64
- package/dist/torch/torch.d.ts +234 -121
- package/dist/torch/torch.d.ts.map +1 -1
- package/dist/torch/torch.js +33 -15
- package/dist/torch/torch.js.map +1 -1
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +1 -1
package/dist/torch/torch.d.ts
CHANGED
|
@@ -10,44 +10,58 @@
|
|
|
10
10
|
*
|
|
11
11
|
* @packageDocumentation
|
|
12
12
|
*/
|
|
13
|
-
import { StructType, VariantType, OptionType, IntegerType, FloatType, BlobType, ArrayType } from "@elaraai/east";
|
|
13
|
+
import { StructType, VariantType, OptionType, IntegerType, FloatType, BlobType, ArrayType, NullType } from "@elaraai/east";
|
|
14
14
|
export { VectorType, MatrixType } from "../types.js";
|
|
15
15
|
/**
|
|
16
|
-
* Activation function type for
|
|
16
|
+
* Activation function type for hidden layers.
|
|
17
17
|
*/
|
|
18
18
|
export declare const TorchActivationType: VariantType<{
|
|
19
19
|
/** Rectified Linear Unit */
|
|
20
|
-
relu:
|
|
20
|
+
relu: NullType;
|
|
21
21
|
/** Hyperbolic tangent */
|
|
22
|
-
tanh:
|
|
22
|
+
tanh: NullType;
|
|
23
23
|
/** Sigmoid function */
|
|
24
|
-
sigmoid:
|
|
24
|
+
sigmoid: NullType;
|
|
25
25
|
/** Leaky ReLU */
|
|
26
|
-
leaky_relu:
|
|
26
|
+
leaky_relu: NullType;
|
|
27
27
|
}>;
|
|
28
28
|
/**
|
|
29
29
|
* Loss function type for training.
|
|
30
30
|
*/
|
|
31
31
|
export declare const TorchLossType: VariantType<{
|
|
32
32
|
/** Mean Squared Error (regression) */
|
|
33
|
-
mse:
|
|
33
|
+
mse: NullType;
|
|
34
34
|
/** Mean Absolute Error (regression) */
|
|
35
|
-
mae:
|
|
35
|
+
mae: NullType;
|
|
36
36
|
/** Cross Entropy (classification) */
|
|
37
|
-
cross_entropy:
|
|
37
|
+
cross_entropy: NullType;
|
|
38
|
+
/** KL Divergence (distribution matching, use with softmax output) */
|
|
39
|
+
kl_div: NullType;
|
|
38
40
|
}>;
|
|
39
41
|
/**
|
|
40
42
|
* Optimizer type for training.
|
|
41
43
|
*/
|
|
42
44
|
export declare const TorchOptimizerType: VariantType<{
|
|
43
45
|
/** Adam optimizer */
|
|
44
|
-
adam:
|
|
46
|
+
adam: NullType;
|
|
45
47
|
/** Stochastic Gradient Descent */
|
|
46
|
-
sgd:
|
|
48
|
+
sgd: NullType;
|
|
47
49
|
/** AdamW with weight decay */
|
|
48
|
-
adamw:
|
|
50
|
+
adamw: NullType;
|
|
49
51
|
/** RMSprop optimizer */
|
|
50
|
-
rmsprop:
|
|
52
|
+
rmsprop: NullType;
|
|
53
|
+
}>;
|
|
54
|
+
/**
|
|
55
|
+
* Output activation function type for the final layer.
|
|
56
|
+
* Applied only to the output layer, not hidden layers.
|
|
57
|
+
*/
|
|
58
|
+
export declare const TorchOutputActivationType: VariantType<{
|
|
59
|
+
/** No activation (linear output) - default */
|
|
60
|
+
none: NullType;
|
|
61
|
+
/** Softmax (outputs sum to 1, for probability distributions) */
|
|
62
|
+
softmax: NullType;
|
|
63
|
+
/** Sigmoid (each output independently in [0,1]) */
|
|
64
|
+
sigmoid: NullType;
|
|
51
65
|
}>;
|
|
52
66
|
/**
|
|
53
67
|
* Configuration for MLP architecture.
|
|
@@ -55,16 +69,25 @@ export declare const TorchOptimizerType: VariantType<{
|
|
|
55
69
|
export declare const TorchMLPConfigType: StructType<{
|
|
56
70
|
/** Hidden layer sizes, e.g., [64, 32] */
|
|
57
71
|
hidden_layers: ArrayType<IntegerType>;
|
|
58
|
-
/** Activation function (default relu) */
|
|
72
|
+
/** Activation function for hidden layers (default relu) */
|
|
59
73
|
activation: OptionType<VariantType<{
|
|
60
74
|
/** Rectified Linear Unit */
|
|
61
|
-
relu:
|
|
75
|
+
relu: NullType;
|
|
62
76
|
/** Hyperbolic tangent */
|
|
63
|
-
tanh:
|
|
77
|
+
tanh: NullType;
|
|
64
78
|
/** Sigmoid function */
|
|
65
|
-
sigmoid:
|
|
79
|
+
sigmoid: NullType;
|
|
66
80
|
/** Leaky ReLU */
|
|
67
|
-
leaky_relu:
|
|
81
|
+
leaky_relu: NullType;
|
|
82
|
+
}>>;
|
|
83
|
+
/** Output activation function (default none/linear) */
|
|
84
|
+
output_activation: OptionType<VariantType<{
|
|
85
|
+
/** No activation (linear output) - default */
|
|
86
|
+
none: NullType;
|
|
87
|
+
/** Softmax (outputs sum to 1, for probability distributions) */
|
|
88
|
+
softmax: NullType;
|
|
89
|
+
/** Sigmoid (each output independently in [0,1]) */
|
|
90
|
+
sigmoid: NullType;
|
|
68
91
|
}>>;
|
|
69
92
|
/** Dropout rate (default 0.0) */
|
|
70
93
|
dropout: OptionType<FloatType>;
|
|
@@ -84,22 +107,24 @@ export declare const TorchTrainConfigType: StructType<{
|
|
|
84
107
|
/** Loss function (default mse) */
|
|
85
108
|
loss: OptionType<VariantType<{
|
|
86
109
|
/** Mean Squared Error (regression) */
|
|
87
|
-
mse:
|
|
110
|
+
mse: NullType;
|
|
88
111
|
/** Mean Absolute Error (regression) */
|
|
89
|
-
mae:
|
|
112
|
+
mae: NullType;
|
|
90
113
|
/** Cross Entropy (classification) */
|
|
91
|
-
cross_entropy:
|
|
114
|
+
cross_entropy: NullType;
|
|
115
|
+
/** KL Divergence (distribution matching, use with softmax output) */
|
|
116
|
+
kl_div: NullType;
|
|
92
117
|
}>>;
|
|
93
118
|
/** Optimizer (default adam) */
|
|
94
119
|
optimizer: OptionType<VariantType<{
|
|
95
120
|
/** Adam optimizer */
|
|
96
|
-
adam:
|
|
121
|
+
adam: NullType;
|
|
97
122
|
/** Stochastic Gradient Descent */
|
|
98
|
-
sgd:
|
|
123
|
+
sgd: NullType;
|
|
99
124
|
/** AdamW with weight decay */
|
|
100
|
-
adamw:
|
|
125
|
+
adamw: NullType;
|
|
101
126
|
/** RMSprop optimizer */
|
|
102
|
-
rmsprop:
|
|
127
|
+
rmsprop: NullType;
|
|
103
128
|
}>>;
|
|
104
129
|
/** Early stopping patience, 0 = disabled */
|
|
105
130
|
early_stopping: OptionType<IntegerType>;
|
|
@@ -170,16 +195,25 @@ export declare const TorchModelBlobType: VariantType<{
|
|
|
170
195
|
export declare const torch_mlp_train: import("@elaraai/east").PlatformDefinition<[ArrayType<ArrayType<FloatType>>, ArrayType<FloatType>, StructType<{
|
|
171
196
|
/** Hidden layer sizes, e.g., [64, 32] */
|
|
172
197
|
hidden_layers: ArrayType<IntegerType>;
|
|
173
|
-
/** Activation function (default relu) */
|
|
198
|
+
/** Activation function for hidden layers (default relu) */
|
|
174
199
|
activation: OptionType<VariantType<{
|
|
175
200
|
/** Rectified Linear Unit */
|
|
176
|
-
relu:
|
|
201
|
+
relu: NullType;
|
|
177
202
|
/** Hyperbolic tangent */
|
|
178
|
-
tanh:
|
|
203
|
+
tanh: NullType;
|
|
179
204
|
/** Sigmoid function */
|
|
180
|
-
sigmoid:
|
|
205
|
+
sigmoid: NullType;
|
|
181
206
|
/** Leaky ReLU */
|
|
182
|
-
leaky_relu:
|
|
207
|
+
leaky_relu: NullType;
|
|
208
|
+
}>>;
|
|
209
|
+
/** Output activation function (default none/linear) */
|
|
210
|
+
output_activation: OptionType<VariantType<{
|
|
211
|
+
/** No activation (linear output) - default */
|
|
212
|
+
none: NullType;
|
|
213
|
+
/** Softmax (outputs sum to 1, for probability distributions) */
|
|
214
|
+
softmax: NullType;
|
|
215
|
+
/** Sigmoid (each output independently in [0,1]) */
|
|
216
|
+
sigmoid: NullType;
|
|
183
217
|
}>>;
|
|
184
218
|
/** Dropout rate (default 0.0) */
|
|
185
219
|
dropout: OptionType<FloatType>;
|
|
@@ -195,22 +229,24 @@ export declare const torch_mlp_train: import("@elaraai/east").PlatformDefinition
|
|
|
195
229
|
/** Loss function (default mse) */
|
|
196
230
|
loss: OptionType<VariantType<{
|
|
197
231
|
/** Mean Squared Error (regression) */
|
|
198
|
-
mse:
|
|
232
|
+
mse: NullType;
|
|
199
233
|
/** Mean Absolute Error (regression) */
|
|
200
|
-
mae:
|
|
234
|
+
mae: NullType;
|
|
201
235
|
/** Cross Entropy (classification) */
|
|
202
|
-
cross_entropy:
|
|
236
|
+
cross_entropy: NullType;
|
|
237
|
+
/** KL Divergence (distribution matching, use with softmax output) */
|
|
238
|
+
kl_div: NullType;
|
|
203
239
|
}>>;
|
|
204
240
|
/** Optimizer (default adam) */
|
|
205
241
|
optimizer: OptionType<VariantType<{
|
|
206
242
|
/** Adam optimizer */
|
|
207
|
-
adam:
|
|
243
|
+
adam: NullType;
|
|
208
244
|
/** Stochastic Gradient Descent */
|
|
209
|
-
sgd:
|
|
245
|
+
sgd: NullType;
|
|
210
246
|
/** AdamW with weight decay */
|
|
211
|
-
adamw:
|
|
247
|
+
adamw: NullType;
|
|
212
248
|
/** RMSprop optimizer */
|
|
213
|
-
rmsprop:
|
|
249
|
+
rmsprop: NullType;
|
|
214
250
|
}>>;
|
|
215
251
|
/** Early stopping patience, 0 = disabled */
|
|
216
252
|
early_stopping: OptionType<IntegerType>;
|
|
@@ -274,16 +310,25 @@ export declare const torch_mlp_predict: import("@elaraai/east").PlatformDefiniti
|
|
|
274
310
|
export declare const torch_mlp_train_multi: import("@elaraai/east").PlatformDefinition<[ArrayType<ArrayType<FloatType>>, ArrayType<ArrayType<FloatType>>, StructType<{
|
|
275
311
|
/** Hidden layer sizes, e.g., [64, 32] */
|
|
276
312
|
hidden_layers: ArrayType<IntegerType>;
|
|
277
|
-
/** Activation function (default relu) */
|
|
313
|
+
/** Activation function for hidden layers (default relu) */
|
|
278
314
|
activation: OptionType<VariantType<{
|
|
279
315
|
/** Rectified Linear Unit */
|
|
280
|
-
relu:
|
|
316
|
+
relu: NullType;
|
|
281
317
|
/** Hyperbolic tangent */
|
|
282
|
-
tanh:
|
|
318
|
+
tanh: NullType;
|
|
283
319
|
/** Sigmoid function */
|
|
284
|
-
sigmoid:
|
|
320
|
+
sigmoid: NullType;
|
|
285
321
|
/** Leaky ReLU */
|
|
286
|
-
leaky_relu:
|
|
322
|
+
leaky_relu: NullType;
|
|
323
|
+
}>>;
|
|
324
|
+
/** Output activation function (default none/linear) */
|
|
325
|
+
output_activation: OptionType<VariantType<{
|
|
326
|
+
/** No activation (linear output) - default */
|
|
327
|
+
none: NullType;
|
|
328
|
+
/** Softmax (outputs sum to 1, for probability distributions) */
|
|
329
|
+
softmax: NullType;
|
|
330
|
+
/** Sigmoid (each output independently in [0,1]) */
|
|
331
|
+
sigmoid: NullType;
|
|
287
332
|
}>>;
|
|
288
333
|
/** Dropout rate (default 0.0) */
|
|
289
334
|
dropout: OptionType<FloatType>;
|
|
@@ -299,22 +344,24 @@ export declare const torch_mlp_train_multi: import("@elaraai/east").PlatformDefi
|
|
|
299
344
|
/** Loss function (default mse) */
|
|
300
345
|
loss: OptionType<VariantType<{
|
|
301
346
|
/** Mean Squared Error (regression) */
|
|
302
|
-
mse:
|
|
347
|
+
mse: NullType;
|
|
303
348
|
/** Mean Absolute Error (regression) */
|
|
304
|
-
mae:
|
|
349
|
+
mae: NullType;
|
|
305
350
|
/** Cross Entropy (classification) */
|
|
306
|
-
cross_entropy:
|
|
351
|
+
cross_entropy: NullType;
|
|
352
|
+
/** KL Divergence (distribution matching, use with softmax output) */
|
|
353
|
+
kl_div: NullType;
|
|
307
354
|
}>>;
|
|
308
355
|
/** Optimizer (default adam) */
|
|
309
356
|
optimizer: OptionType<VariantType<{
|
|
310
357
|
/** Adam optimizer */
|
|
311
|
-
adam:
|
|
358
|
+
adam: NullType;
|
|
312
359
|
/** Stochastic Gradient Descent */
|
|
313
|
-
sgd:
|
|
360
|
+
sgd: NullType;
|
|
314
361
|
/** AdamW with weight decay */
|
|
315
|
-
adamw:
|
|
362
|
+
adamw: NullType;
|
|
316
363
|
/** RMSprop optimizer */
|
|
317
|
-
rmsprop:
|
|
364
|
+
rmsprop: NullType;
|
|
318
365
|
}>>;
|
|
319
366
|
/** Early stopping patience, 0 = disabled */
|
|
320
367
|
early_stopping: OptionType<IntegerType>;
|
|
@@ -462,51 +509,71 @@ export declare const TorchTypes: {
|
|
|
462
509
|
readonly VectorType: ArrayType<FloatType>;
|
|
463
510
|
/** Matrix type (2D array of floats) */
|
|
464
511
|
readonly MatrixType: ArrayType<ArrayType<FloatType>>;
|
|
465
|
-
/** Activation function type */
|
|
512
|
+
/** Activation function type for hidden layers */
|
|
466
513
|
readonly TorchActivationType: VariantType<{
|
|
467
514
|
/** Rectified Linear Unit */
|
|
468
|
-
relu:
|
|
515
|
+
relu: NullType;
|
|
469
516
|
/** Hyperbolic tangent */
|
|
470
|
-
tanh:
|
|
517
|
+
tanh: NullType;
|
|
471
518
|
/** Sigmoid function */
|
|
472
|
-
sigmoid:
|
|
519
|
+
sigmoid: NullType;
|
|
473
520
|
/** Leaky ReLU */
|
|
474
|
-
leaky_relu:
|
|
521
|
+
leaky_relu: NullType;
|
|
522
|
+
}>;
|
|
523
|
+
/** Output activation function type */
|
|
524
|
+
readonly TorchOutputActivationType: VariantType<{
|
|
525
|
+
/** No activation (linear output) - default */
|
|
526
|
+
none: NullType;
|
|
527
|
+
/** Softmax (outputs sum to 1, for probability distributions) */
|
|
528
|
+
softmax: NullType;
|
|
529
|
+
/** Sigmoid (each output independently in [0,1]) */
|
|
530
|
+
sigmoid: NullType;
|
|
475
531
|
}>;
|
|
476
532
|
/** Loss function type */
|
|
477
533
|
readonly TorchLossType: VariantType<{
|
|
478
534
|
/** Mean Squared Error (regression) */
|
|
479
|
-
mse:
|
|
535
|
+
mse: NullType;
|
|
480
536
|
/** Mean Absolute Error (regression) */
|
|
481
|
-
mae:
|
|
537
|
+
mae: NullType;
|
|
482
538
|
/** Cross Entropy (classification) */
|
|
483
|
-
cross_entropy:
|
|
539
|
+
cross_entropy: NullType;
|
|
540
|
+
/** KL Divergence (distribution matching, use with softmax output) */
|
|
541
|
+
kl_div: NullType;
|
|
484
542
|
}>;
|
|
485
543
|
/** Optimizer type */
|
|
486
544
|
readonly TorchOptimizerType: VariantType<{
|
|
487
545
|
/** Adam optimizer */
|
|
488
|
-
adam:
|
|
546
|
+
adam: NullType;
|
|
489
547
|
/** Stochastic Gradient Descent */
|
|
490
|
-
sgd:
|
|
548
|
+
sgd: NullType;
|
|
491
549
|
/** AdamW with weight decay */
|
|
492
|
-
adamw:
|
|
550
|
+
adamw: NullType;
|
|
493
551
|
/** RMSprop optimizer */
|
|
494
|
-
rmsprop:
|
|
552
|
+
rmsprop: NullType;
|
|
495
553
|
}>;
|
|
496
554
|
/** MLP configuration type */
|
|
497
555
|
readonly TorchMLPConfigType: StructType<{
|
|
498
556
|
/** Hidden layer sizes, e.g., [64, 32] */
|
|
499
557
|
hidden_layers: ArrayType<IntegerType>;
|
|
500
|
-
/** Activation function (default relu) */
|
|
558
|
+
/** Activation function for hidden layers (default relu) */
|
|
501
559
|
activation: OptionType<VariantType<{
|
|
502
560
|
/** Rectified Linear Unit */
|
|
503
|
-
relu:
|
|
561
|
+
relu: NullType;
|
|
504
562
|
/** Hyperbolic tangent */
|
|
505
|
-
tanh:
|
|
563
|
+
tanh: NullType;
|
|
506
564
|
/** Sigmoid function */
|
|
507
|
-
sigmoid:
|
|
565
|
+
sigmoid: NullType;
|
|
508
566
|
/** Leaky ReLU */
|
|
509
|
-
leaky_relu:
|
|
567
|
+
leaky_relu: NullType;
|
|
568
|
+
}>>;
|
|
569
|
+
/** Output activation function (default none/linear) */
|
|
570
|
+
output_activation: OptionType<VariantType<{
|
|
571
|
+
/** No activation (linear output) - default */
|
|
572
|
+
none: NullType;
|
|
573
|
+
/** Softmax (outputs sum to 1, for probability distributions) */
|
|
574
|
+
softmax: NullType;
|
|
575
|
+
/** Sigmoid (each output independently in [0,1]) */
|
|
576
|
+
sigmoid: NullType;
|
|
510
577
|
}>>;
|
|
511
578
|
/** Dropout rate (default 0.0) */
|
|
512
579
|
dropout: OptionType<FloatType>;
|
|
@@ -524,22 +591,24 @@ export declare const TorchTypes: {
|
|
|
524
591
|
/** Loss function (default mse) */
|
|
525
592
|
loss: OptionType<VariantType<{
|
|
526
593
|
/** Mean Squared Error (regression) */
|
|
527
|
-
mse:
|
|
594
|
+
mse: NullType;
|
|
528
595
|
/** Mean Absolute Error (regression) */
|
|
529
|
-
mae:
|
|
596
|
+
mae: NullType;
|
|
530
597
|
/** Cross Entropy (classification) */
|
|
531
|
-
cross_entropy:
|
|
598
|
+
cross_entropy: NullType;
|
|
599
|
+
/** KL Divergence (distribution matching, use with softmax output) */
|
|
600
|
+
kl_div: NullType;
|
|
532
601
|
}>>;
|
|
533
602
|
/** Optimizer (default adam) */
|
|
534
603
|
optimizer: OptionType<VariantType<{
|
|
535
604
|
/** Adam optimizer */
|
|
536
|
-
adam:
|
|
605
|
+
adam: NullType;
|
|
537
606
|
/** Stochastic Gradient Descent */
|
|
538
|
-
sgd:
|
|
607
|
+
sgd: NullType;
|
|
539
608
|
/** AdamW with weight decay */
|
|
540
|
-
adamw:
|
|
609
|
+
adamw: NullType;
|
|
541
610
|
/** RMSprop optimizer */
|
|
542
|
-
rmsprop:
|
|
611
|
+
rmsprop: NullType;
|
|
543
612
|
}>>;
|
|
544
613
|
/** Early stopping patience, 0 = disabled */
|
|
545
614
|
early_stopping: OptionType<IntegerType>;
|
|
@@ -631,16 +700,25 @@ export declare const Torch: {
|
|
|
631
700
|
readonly mlpTrain: import("@elaraai/east").PlatformDefinition<[ArrayType<ArrayType<FloatType>>, ArrayType<FloatType>, StructType<{
|
|
632
701
|
/** Hidden layer sizes, e.g., [64, 32] */
|
|
633
702
|
hidden_layers: ArrayType<IntegerType>;
|
|
634
|
-
/** Activation function (default relu) */
|
|
703
|
+
/** Activation function for hidden layers (default relu) */
|
|
635
704
|
activation: OptionType<VariantType<{
|
|
636
705
|
/** Rectified Linear Unit */
|
|
637
|
-
relu:
|
|
706
|
+
relu: NullType;
|
|
638
707
|
/** Hyperbolic tangent */
|
|
639
|
-
tanh:
|
|
708
|
+
tanh: NullType;
|
|
640
709
|
/** Sigmoid function */
|
|
641
|
-
sigmoid:
|
|
710
|
+
sigmoid: NullType;
|
|
642
711
|
/** Leaky ReLU */
|
|
643
|
-
leaky_relu:
|
|
712
|
+
leaky_relu: NullType;
|
|
713
|
+
}>>;
|
|
714
|
+
/** Output activation function (default none/linear) */
|
|
715
|
+
output_activation: OptionType<VariantType<{
|
|
716
|
+
/** No activation (linear output) - default */
|
|
717
|
+
none: NullType;
|
|
718
|
+
/** Softmax (outputs sum to 1, for probability distributions) */
|
|
719
|
+
softmax: NullType;
|
|
720
|
+
/** Sigmoid (each output independently in [0,1]) */
|
|
721
|
+
sigmoid: NullType;
|
|
644
722
|
}>>;
|
|
645
723
|
/** Dropout rate (default 0.0) */
|
|
646
724
|
dropout: OptionType<FloatType>;
|
|
@@ -656,22 +734,24 @@ export declare const Torch: {
|
|
|
656
734
|
/** Loss function (default mse) */
|
|
657
735
|
loss: OptionType<VariantType<{
|
|
658
736
|
/** Mean Squared Error (regression) */
|
|
659
|
-
mse:
|
|
737
|
+
mse: NullType;
|
|
660
738
|
/** Mean Absolute Error (regression) */
|
|
661
|
-
mae:
|
|
739
|
+
mae: NullType;
|
|
662
740
|
/** Cross Entropy (classification) */
|
|
663
|
-
cross_entropy:
|
|
741
|
+
cross_entropy: NullType;
|
|
742
|
+
/** KL Divergence (distribution matching, use with softmax output) */
|
|
743
|
+
kl_div: NullType;
|
|
664
744
|
}>>;
|
|
665
745
|
/** Optimizer (default adam) */
|
|
666
746
|
optimizer: OptionType<VariantType<{
|
|
667
747
|
/** Adam optimizer */
|
|
668
|
-
adam:
|
|
748
|
+
adam: NullType;
|
|
669
749
|
/** Stochastic Gradient Descent */
|
|
670
|
-
sgd:
|
|
750
|
+
sgd: NullType;
|
|
671
751
|
/** AdamW with weight decay */
|
|
672
|
-
adamw:
|
|
752
|
+
adamw: NullType;
|
|
673
753
|
/** RMSprop optimizer */
|
|
674
|
-
rmsprop:
|
|
754
|
+
rmsprop: NullType;
|
|
675
755
|
}>>;
|
|
676
756
|
/** Early stopping patience, 0 = disabled */
|
|
677
757
|
early_stopping: OptionType<IntegerType>;
|
|
@@ -717,16 +797,25 @@ export declare const Torch: {
|
|
|
717
797
|
readonly mlpTrainMulti: import("@elaraai/east").PlatformDefinition<[ArrayType<ArrayType<FloatType>>, ArrayType<ArrayType<FloatType>>, StructType<{
|
|
718
798
|
/** Hidden layer sizes, e.g., [64, 32] */
|
|
719
799
|
hidden_layers: ArrayType<IntegerType>;
|
|
720
|
-
/** Activation function (default relu) */
|
|
800
|
+
/** Activation function for hidden layers (default relu) */
|
|
721
801
|
activation: OptionType<VariantType<{
|
|
722
802
|
/** Rectified Linear Unit */
|
|
723
|
-
relu:
|
|
803
|
+
relu: NullType;
|
|
724
804
|
/** Hyperbolic tangent */
|
|
725
|
-
tanh:
|
|
805
|
+
tanh: NullType;
|
|
726
806
|
/** Sigmoid function */
|
|
727
|
-
sigmoid:
|
|
807
|
+
sigmoid: NullType;
|
|
728
808
|
/** Leaky ReLU */
|
|
729
|
-
leaky_relu:
|
|
809
|
+
leaky_relu: NullType;
|
|
810
|
+
}>>;
|
|
811
|
+
/** Output activation function (default none/linear) */
|
|
812
|
+
output_activation: OptionType<VariantType<{
|
|
813
|
+
/** No activation (linear output) - default */
|
|
814
|
+
none: NullType;
|
|
815
|
+
/** Softmax (outputs sum to 1, for probability distributions) */
|
|
816
|
+
softmax: NullType;
|
|
817
|
+
/** Sigmoid (each output independently in [0,1]) */
|
|
818
|
+
sigmoid: NullType;
|
|
730
819
|
}>>;
|
|
731
820
|
/** Dropout rate (default 0.0) */
|
|
732
821
|
dropout: OptionType<FloatType>;
|
|
@@ -742,22 +831,24 @@ export declare const Torch: {
|
|
|
742
831
|
/** Loss function (default mse) */
|
|
743
832
|
loss: OptionType<VariantType<{
|
|
744
833
|
/** Mean Squared Error (regression) */
|
|
745
|
-
mse:
|
|
834
|
+
mse: NullType;
|
|
746
835
|
/** Mean Absolute Error (regression) */
|
|
747
|
-
mae:
|
|
836
|
+
mae: NullType;
|
|
748
837
|
/** Cross Entropy (classification) */
|
|
749
|
-
cross_entropy:
|
|
838
|
+
cross_entropy: NullType;
|
|
839
|
+
/** KL Divergence (distribution matching, use with softmax output) */
|
|
840
|
+
kl_div: NullType;
|
|
750
841
|
}>>;
|
|
751
842
|
/** Optimizer (default adam) */
|
|
752
843
|
optimizer: OptionType<VariantType<{
|
|
753
844
|
/** Adam optimizer */
|
|
754
|
-
adam:
|
|
845
|
+
adam: NullType;
|
|
755
846
|
/** Stochastic Gradient Descent */
|
|
756
|
-
sgd:
|
|
847
|
+
sgd: NullType;
|
|
757
848
|
/** AdamW with weight decay */
|
|
758
|
-
adamw:
|
|
849
|
+
adamw: NullType;
|
|
759
850
|
/** RMSprop optimizer */
|
|
760
|
-
rmsprop:
|
|
851
|
+
rmsprop: NullType;
|
|
761
852
|
}>>;
|
|
762
853
|
/** Early stopping patience, 0 = disabled */
|
|
763
854
|
early_stopping: OptionType<IntegerType>;
|
|
@@ -833,51 +924,71 @@ export declare const Torch: {
|
|
|
833
924
|
readonly VectorType: ArrayType<FloatType>;
|
|
834
925
|
/** Matrix type (2D array of floats) */
|
|
835
926
|
readonly MatrixType: ArrayType<ArrayType<FloatType>>;
|
|
836
|
-
/** Activation function type */
|
|
927
|
+
/** Activation function type for hidden layers */
|
|
837
928
|
readonly TorchActivationType: VariantType<{
|
|
838
929
|
/** Rectified Linear Unit */
|
|
839
|
-
relu:
|
|
930
|
+
relu: NullType;
|
|
840
931
|
/** Hyperbolic tangent */
|
|
841
|
-
tanh:
|
|
932
|
+
tanh: NullType;
|
|
842
933
|
/** Sigmoid function */
|
|
843
|
-
sigmoid:
|
|
934
|
+
sigmoid: NullType;
|
|
844
935
|
/** Leaky ReLU */
|
|
845
|
-
leaky_relu:
|
|
936
|
+
leaky_relu: NullType;
|
|
937
|
+
}>;
|
|
938
|
+
/** Output activation function type */
|
|
939
|
+
readonly TorchOutputActivationType: VariantType<{
|
|
940
|
+
/** No activation (linear output) - default */
|
|
941
|
+
none: NullType;
|
|
942
|
+
/** Softmax (outputs sum to 1, for probability distributions) */
|
|
943
|
+
softmax: NullType;
|
|
944
|
+
/** Sigmoid (each output independently in [0,1]) */
|
|
945
|
+
sigmoid: NullType;
|
|
846
946
|
}>;
|
|
847
947
|
/** Loss function type */
|
|
848
948
|
readonly TorchLossType: VariantType<{
|
|
849
949
|
/** Mean Squared Error (regression) */
|
|
850
|
-
mse:
|
|
950
|
+
mse: NullType;
|
|
851
951
|
/** Mean Absolute Error (regression) */
|
|
852
|
-
mae:
|
|
952
|
+
mae: NullType;
|
|
853
953
|
/** Cross Entropy (classification) */
|
|
854
|
-
cross_entropy:
|
|
954
|
+
cross_entropy: NullType;
|
|
955
|
+
/** KL Divergence (distribution matching, use with softmax output) */
|
|
956
|
+
kl_div: NullType;
|
|
855
957
|
}>;
|
|
856
958
|
/** Optimizer type */
|
|
857
959
|
readonly TorchOptimizerType: VariantType<{
|
|
858
960
|
/** Adam optimizer */
|
|
859
|
-
adam:
|
|
961
|
+
adam: NullType;
|
|
860
962
|
/** Stochastic Gradient Descent */
|
|
861
|
-
sgd:
|
|
963
|
+
sgd: NullType;
|
|
862
964
|
/** AdamW with weight decay */
|
|
863
|
-
adamw:
|
|
965
|
+
adamw: NullType;
|
|
864
966
|
/** RMSprop optimizer */
|
|
865
|
-
rmsprop:
|
|
967
|
+
rmsprop: NullType;
|
|
866
968
|
}>;
|
|
867
969
|
/** MLP configuration type */
|
|
868
970
|
readonly TorchMLPConfigType: StructType<{
|
|
869
971
|
/** Hidden layer sizes, e.g., [64, 32] */
|
|
870
972
|
hidden_layers: ArrayType<IntegerType>;
|
|
871
|
-
/** Activation function (default relu) */
|
|
973
|
+
/** Activation function for hidden layers (default relu) */
|
|
872
974
|
activation: OptionType<VariantType<{
|
|
873
975
|
/** Rectified Linear Unit */
|
|
874
|
-
relu:
|
|
976
|
+
relu: NullType;
|
|
875
977
|
/** Hyperbolic tangent */
|
|
876
|
-
tanh:
|
|
978
|
+
tanh: NullType;
|
|
877
979
|
/** Sigmoid function */
|
|
878
|
-
sigmoid:
|
|
980
|
+
sigmoid: NullType;
|
|
879
981
|
/** Leaky ReLU */
|
|
880
|
-
leaky_relu:
|
|
982
|
+
leaky_relu: NullType;
|
|
983
|
+
}>>;
|
|
984
|
+
/** Output activation function (default none/linear) */
|
|
985
|
+
output_activation: OptionType<VariantType<{
|
|
986
|
+
/** No activation (linear output) - default */
|
|
987
|
+
none: NullType;
|
|
988
|
+
/** Softmax (outputs sum to 1, for probability distributions) */
|
|
989
|
+
softmax: NullType;
|
|
990
|
+
/** Sigmoid (each output independently in [0,1]) */
|
|
991
|
+
sigmoid: NullType;
|
|
881
992
|
}>>;
|
|
882
993
|
/** Dropout rate (default 0.0) */
|
|
883
994
|
dropout: OptionType<FloatType>;
|
|
@@ -895,22 +1006,24 @@ export declare const Torch: {
|
|
|
895
1006
|
/** Loss function (default mse) */
|
|
896
1007
|
loss: OptionType<VariantType<{
|
|
897
1008
|
/** Mean Squared Error (regression) */
|
|
898
|
-
mse:
|
|
1009
|
+
mse: NullType;
|
|
899
1010
|
/** Mean Absolute Error (regression) */
|
|
900
|
-
mae:
|
|
1011
|
+
mae: NullType;
|
|
901
1012
|
/** Cross Entropy (classification) */
|
|
902
|
-
cross_entropy:
|
|
1013
|
+
cross_entropy: NullType;
|
|
1014
|
+
/** KL Divergence (distribution matching, use with softmax output) */
|
|
1015
|
+
kl_div: NullType;
|
|
903
1016
|
}>>;
|
|
904
1017
|
/** Optimizer (default adam) */
|
|
905
1018
|
optimizer: OptionType<VariantType<{
|
|
906
1019
|
/** Adam optimizer */
|
|
907
|
-
adam:
|
|
1020
|
+
adam: NullType;
|
|
908
1021
|
/** Stochastic Gradient Descent */
|
|
909
|
-
sgd:
|
|
1022
|
+
sgd: NullType;
|
|
910
1023
|
/** AdamW with weight decay */
|
|
911
|
-
adamw:
|
|
1024
|
+
adamw: NullType;
|
|
912
1025
|
/** RMSprop optimizer */
|
|
913
|
-
rmsprop:
|
|
1026
|
+
rmsprop: NullType;
|
|
914
1027
|
}>>;
|
|
915
1028
|
/** Early stopping patience, 0 = disabled */
|
|
916
1029
|
early_stopping: OptionType<IntegerType>;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"torch.d.ts","sourceRoot":"","sources":["../../src/torch/torch.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH;;;;;;;GAOG;AAEH,OAAO,EAEH,UAAU,EACV,WAAW,EACX,UAAU,EACV,WAAW,EACX,SAAS,EACT,QAAQ,EACR,SAAS,
|
|
1
|
+
{"version":3,"file":"torch.d.ts","sourceRoot":"","sources":["../../src/torch/torch.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH;;;;;;;GAOG;AAEH,OAAO,EAEH,UAAU,EACV,WAAW,EACX,UAAU,EACV,WAAW,EACX,SAAS,EACT,QAAQ,EACR,SAAS,EACT,QAAQ,EACX,MAAM,eAAe,CAAC;AAIvB,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAMrD;;GAEG;AACH,eAAO,MAAM,mBAAmB;IAC5B,4BAA4B;;IAE5B,yBAAyB;;IAEzB,uBAAuB;;IAEvB,iBAAiB;;EAEnB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,aAAa;IACtB,sCAAsC;;IAEtC,uCAAuC;;IAEvC,qCAAqC;;IAErC,qEAAqE;;EAEvE,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,kBAAkB;IAC3B,qBAAqB;;IAErB,kCAAkC;;IAElC,8BAA8B;;IAE9B,wBAAwB;;EAE1B,CAAC;AAEH;;;GAGG;AACH,eAAO,MAAM,yBAAyB;IAClC,8CAA8C;;IAE9C,gEAAgE;;IAEhE,mDAAmD;;EAErD,CAAC;AAMH;;GAEG;AACH,eAAO,MAAM,kBAAkB;IAC3B,yCAAyC;;IAEzC,2DAA2D;;QA7D3D,4BAA4B;;QAE5B,yBAAyB;;QAEzB,uBAAuB;;QAEvB,iBAAiB;;;IAyDjB,uDAAuD;;QApBvD,8CAA8C;;QAE9C,gEAAgE;;QAEhE,mDAAmD;;;IAkBnD,iCAAiC;;IAEjC,mCAAmC;;EAErC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,oBAAoB;IAC7B,qCAAqC;;IAErC,8BAA8B;;IAE9B,oCAAoC;;IAEpC,kCAAkC;;QAnElC,sCAAsC;;QAEtC,uCAAuC;;QAEvC,qCAAqC;;QAErC,qEAAqE;;;IA+DrE,+BAA+B;;QAvD/B,qBAAqB;;QAErB,kCAAkC;;QAElC,8BAA8B;;QAE9B,wBAAwB;;;IAmDxB,4CAA4C;;IAE5C,8CAA8C;;IAE9C,sCAAsC;;EAExC,CAAC;AAMH;;GAEG;AACH,eAAO,MAAM,oBAAoB;IAC7B,8BAA8B;;IAE9B,gCAAgC;;IAEhC,sCAAsC;;EAExC,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,oBAAoB;IAC7B,yBAAyB;;;;;;;;;IASzB,kCAAkC;;QArBlC,8BAA8B;;QAE9B,gCAAgC;;QAEhC,sCAAsC;;;EAmBxC,CAAC;AAMH;;GAEG;AACH,eAAO,MAAM,kBAAkB;IAC3B,wBAAwB;;QAEpB,mCAAmC;;QAEnC,+BAA+B;;QAE/B,yBAAyB;;QAEzB,uBAAuB;;;EAG7B,CAAC;AAMH;;;;;;;;GAQG;AACH,eAAO,MAAM,eAAe;IArGxB,yCAAyC;;IAEzC,2DAA2D;;QA7D3D,4BAA4B;;QAE5B,yBAAyB;;QAEzB,uBAAuB;;QAEvB,iBAAiB;;;IAyDjB,uDAAuD;;QApBvD,8CAA8C;;QAE9C,gEAAgE;;QAEhE,mDAAmD;;;IAkBnD,iCAAiC;;IAEjC,mCAAmC;;;IAQnC,qCAAqC;;IAErC,8BAA8B;;IAE9B,oCAAoC;;IAEpC,kCAAkC;;QAnElC,sCAAsC;;QAEtC,uCAAuC;;QAEvC,qCAAqC;;QAErC,qEAAqE;;;IA+DrE,+BAA+B;;QAvD/B,qBAAqB;;QAErB,kCAAkC;;QAElC,8BAA8B;;QAE9B,wBAAwB;;;IAmDxB,4CAA4C;;IAE5C,8CAA8C;;IAE9C,sCAAsC;;;IAwBtC,yBAAyB;;;;;;;;;IASzB,kCAAkC;;QArBlC,8BAA8B;;QAE9B,gCAAgC;;QAEhC,sCAAsC;;;GA2DzC,CAAC;AAEF;;;;;;GAMG;AACH,eAAO,MAAM,iBAAiB;IAvC1B,wBAAwB;;QAEpB,mCAAmC;;QAEnC,+BAA+B;;QAE/B,yBAAyB;;QAEzB,uBAAuB;;;2DAmC9B,CAAC;AAEF;;;;;;;;;;;;GAYG;AACH,eAAO,MAAM,qBAAqB;IArI9B,yCAAyC;;IAEzC,2DAA2D;;QA7D3D,4BAA4B;;QAE5B,yBAAyB;;QAEzB,uBAAuB;;QAEvB,iBAAiB;;;IAyDjB,uDAAuD;;QApBvD,8CAA8C;;QAE9C,gEAAgE;;QAEhE,mDAAmD;;;IAkBnD,iCAAiC;;IAEjC,mCAAmC;;;IAQnC,qCAAqC;;IAErC,8BAA8B;;IAE9B,oCAAoC;;IAEpC,kCAAkC;;QAnElC,sCAAsC;;QAEtC,uCAAuC;;QAEvC,qCAAqC;;QAErC,qEAAqE;;;IA+DrE,+BAA+B;;QAvD/B,qBAAqB;;QAErB,kCAAkC;;QAElC,8BAA8B;;QAE9B,wBAAwB;;;IAmDxB,4CAA4C;;IAE5C,8CAA8C;;IAE9C,sCAAsC;;;IAwBtC,yBAAyB;;;;;;;;;IASzB,kCAAkC;;QArBlC,8BAA8B;;QAE9B,gCAAgC;;QAEhC,sCAAsC;;;GA2FzC,CAAC;AAEF;;;;;;;;GAQG;AACH,eAAO,MAAM,uBAAuB;IAzEhC,wBAAwB;;QAEpB,mCAAmC;;QAEnC,+BAA+B;;QAE/B,yBAAyB;;QAEzB,uBAAuB;;;sEAqE9B,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,eAAO,MAAM,gBAAgB;IAhHzB,wBAAwB;;QAEpB,mCAAmC;;QAEnC,+BAA+B;;QAE/B,yBAAyB;;QAEzB,uBAAuB;;;mFA4G9B,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,eAAO,MAAM,gBAAgB;IArJzB,wBAAwB;;QAEpB,mCAAmC;;QAEnC,+BAA+B;;QAE/B,yBAAyB;;QAEzB,uBAAuB;;;mFAiJ9B,CAAC;AAMF;;GAEG;AACH,eAAO,MAAM,UAAU;IACnB,oCAAoC;;IAEpC,uCAAuC;;IAEvC,iDAAiD;;QA7SjD,4BAA4B;;QAE5B,yBAAyB;;QAEzB,uBAAuB;;QAEvB,iBAAiB;;;IAySjB,sCAAsC;;QApQtC,8CAA8C;;QAE9C,gEAAgE;;QAEhE,mDAAmD;;;IAkQnD,yBAAyB;;QAnSzB,sCAAsC;;QAEtC,uCAAuC;;QAEvC,qCAAqC;;QAErC,qEAAqE;;;IA+RrE,qBAAqB;;QAvRrB,qBAAqB;;QAErB,kCAAkC;;QAElC,8BAA8B;;QAE9B,wBAAwB;;;IAmRxB,6BAA6B;;QA1P7B,yCAAyC;;QAEzC,2DAA2D;;YA7D3D,4BAA4B;;YAE5B,yBAAyB;;YAEzB,uBAAuB;;YAEvB,iBAAiB;;;QAyDjB,uDAAuD;;YApBvD,8CAA8C;;YAE9C,gEAAgE;;YAEhE,mDAAmD;;;QAkBnD,iCAAiC;;QAEjC,mCAAmC;;;IAoPnC,kCAAkC;;QA5OlC,qCAAqC;;QAErC,8BAA8B;;QAE9B,oCAAoC;;QAEpC,kCAAkC;;YAnElC,sCAAsC;;YAEtC,uCAAuC;;YAEvC,qCAAqC;;YAErC,qEAAqE;;;QA+DrE,+BAA+B;;YAvD/B,qBAAqB;;YAErB,kCAAkC;;YAElC,8BAA8B;;YAE9B,wBAAwB;;;QAmDxB,4CAA4C;;QAE5C,8CAA8C;;QAE9C,sCAAsC;;;IAgOtC,2BAA2B;;QApN3B,8BAA8B;;QAE9B,gCAAgC;;QAEhC,sCAAsC;;;IAkNtC,4CAA4C;;QA1M5C,yBAAyB;;;;;;;;;QASzB,kCAAkC;;YArBlC,8BAA8B;;YAE9B,gCAAgC;;YAEhC,sCAAsC;;;;IAoNtC,yCAAyC;;QAvLzC,wBAAwB;;YAEpB,mCAAmC;;YAEnC,+BAA+B;;YAE/B,yBAAyB;;YAEzB,uBAAuB;;;;CAiLrB,CAAC;AAEX;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,eAAO,MAAM,KAAK;IACd,sCAAsC;;QAxStC,yCAAyC;;QAEzC,2DAA2D;;YA7D3D,4BAA4B;;YAE5B,yBAAyB;;YAEzB,uBAAuB;;YAEvB,iBAAiB;;;QAyDjB,uDAAuD;;YApBvD,8CAA8C;;YAE9C,gEAAgE;;YAEhE,mDAAmD;;;QAkBnD,iCAAiC;;QAEjC,mCAAmC;;;QAQnC,qCAAqC;;QAErC,8BAA8B;;QAE9B,oCAAoC;;QAEpC,kCAAkC;;YAnElC,sCAAsC;;YAEtC,uCAAuC;;YAEvC,qCAAqC;;YAErC,qEAAqE;;;QA+DrE,+BAA+B;;YAvD/B,qBAAqB;;YAErB,kCAAkC;;YAElC,8BAA8B;;YAE9B,wBAAwB;;;QAmDxB,4CAA4C;;QAE5C,8CAA8C;;QAE9C,sCAAsC;;;QAwBtC,yBAAyB;;;;;;;;;QASzB,kCAAkC;;YArBlC,8BAA8B;;YAE9B,gCAAgC;;YAEhC,sCAAsC;;;;IA4PtC,gDAAgD;;QA/NhD,wBAAwB;;YAEpB,mCAAmC;;YAEnC,+BAA+B;;YAE/B,yBAAyB;;YAEzB,uBAAuB;;;;IAyN3B,qCAAqC;;QA5SrC,yCAAyC;;QAEzC,2DAA2D;;YA7D3D,4BAA4B;;YAE5B,yBAAyB;;YAEzB,uBAAuB;;YAEvB,iBAAiB;;;QAyDjB,uDAAuD;;YApBvD,8CAA8C;;YAE9C,gEAAgE;;YAEhE,mDAAmD;;;QAkBnD,iCAAiC;;QAEjC,mCAAmC;;;QAQnC,qCAAqC;;QAErC,8BAA8B;;QAE9B,oCAAoC;;QAEpC,kCAAkC;;YAnElC,sCAAsC;;YAEtC,uCAAuC;;YAEvC,qCAAqC;;YAErC,qEAAqE;;;QA+DrE,+BAA+B;;YAvD/B,qBAAqB;;YAErB,kCAAkC;;YAElC,8BAA8B;;YAE9B,wBAAwB;;;QAmDxB,4CAA4C;;QAE5C,8CAA8C;;QAE9C,sCAAsC;;;QAwBtC,yBAAyB;;;;;;;;;QASzB,kCAAkC;;YArBlC,8BAA8B;;YAE9B,gCAAgC;;YAEhC,sCAAsC;;;;IAgQtC,+CAA+C;;QAnO/C,wBAAwB;;YAEpB,mCAAmC;;YAEnC,+BAA+B;;YAE/B,yBAAyB;;YAEzB,uBAAuB;;;;IA6N3B,mEAAmE;;QArOnE,wBAAwB;;YAEpB,mCAAmC;;YAEnC,+BAA+B;;YAE/B,yBAAyB;;YAEzB,uBAAuB;;;;IA+N3B,4DAA4D;;QAvO5D,wBAAwB;;YAEpB,mCAAmC;;YAEnC,+BAA+B;;YAE/B,yBAAyB;;YAEzB,uBAAuB;;;;IAiO3B,uBAAuB;;QAtEvB,oCAAoC;;QAEpC,uCAAuC;;QAEvC,iDAAiD;;YA7SjD,4BAA4B;;YAE5B,yBAAyB;;YAEzB,uBAAuB;;YAEvB,iBAAiB;;;QAySjB,sCAAsC;;YApQtC,8CAA8C;;YAE9C,gEAAgE;;YAEhE,mDAAmD;;;QAkQnD,yBAAyB;;YAnSzB,sCAAsC;;YAEtC,uCAAuC;;YAEvC,qCAAqC;;YAErC,qEAAqE;;;QA+RrE,qBAAqB;;YAvRrB,qBAAqB;;YAErB,kCAAkC;;YAElC,8BAA8B;;YAE9B,wBAAwB;;;QAmRxB,6BAA6B;;YA1P7B,yCAAyC;;YAEzC,2DAA2D;;gBA7D3D,4BAA4B;;gBAE5B,yBAAyB;;gBAEzB,uBAAuB;;gBAEvB,iBAAiB;;;YAyDjB,uDAAuD;;gBApBvD,8CAA8C;;gBAE9C,gEAAgE;;gBAEhE,mDAAmD;;;YAkBnD,iCAAiC;;YAEjC,mCAAmC;;;QAoPnC,kCAAkC;;YA5OlC,qCAAqC;;YAErC,8BAA8B;;YAE9B,oCAAoC;;YAEpC,kCAAkC;;gBAnElC,sCAAsC;;gBAEtC,uCAAuC;;gBAEvC,qCAAqC;;gBAErC,qEAAqE;;;YA+DrE,+BAA+B;;gBAvD/B,qBAAqB;;gBAErB,kCAAkC;;gBAElC,8BAA8B;;gBAE9B,wBAAwB;;;YAmDxB,4CAA4C;;YAE5C,8CAA8C;;YAE9C,sCAAsC;;;QAgOtC,2BAA2B;;YApN3B,8BAA8B;;YAE9B,gCAAgC;;YAEhC,sCAAsC;;;QAkNtC,4CAA4C;;YA1M5C,yBAAyB;;;;;;;;;YASzB,kCAAkC;;gBArBlC,8BAA8B;;gBAE9B,gCAAgC;;gBAEhC,sCAAsC;;;;QAoNtC,yCAAyC;;YAvLzC,wBAAwB;;gBAEpB,mCAAmC;;gBAEnC,+BAA+B;;gBAE/B,yBAAyB;;gBAEzB,uBAAuB;;;;;CAmOrB,CAAC"}
|