cdk-comprehend-s3olap 2.0.198 → 2.0.200

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/.jsii +3 -3
  2. package/lib/cdk-comprehend-s3olap.js +2 -2
  3. package/lib/comprehend-lambdas.js +2 -2
  4. package/lib/iam-roles.js +4 -4
  5. package/node_modules/aws-sdk/CHANGELOG.md +17 -1
  6. package/node_modules/aws-sdk/README.md +1 -1
  7. package/node_modules/aws-sdk/apis/amp-2020-08-01.min.json +149 -149
  8. package/node_modules/aws-sdk/apis/appstream-2016-12-01.min.json +523 -212
  9. package/node_modules/aws-sdk/apis/appstream-2016-12-01.paginators.json +10 -0
  10. package/node_modules/aws-sdk/apis/chime-2018-05-01.min.json +344 -84
  11. package/node_modules/aws-sdk/apis/cleanrooms-2022-02-17.min.json +56 -49
  12. package/node_modules/aws-sdk/apis/dynamodb-2012-08-10.min.json +265 -256
  13. package/node_modules/aws-sdk/apis/ecs-2014-11-13.min.json +3 -0
  14. package/node_modules/aws-sdk/apis/glue-2017-03-31.min.json +624 -606
  15. package/node_modules/aws-sdk/apis/ivs-2020-07-14.min.json +2 -2
  16. package/node_modules/aws-sdk/apis/sagemaker-2017-07-24.min.json +934 -857
  17. package/node_modules/aws-sdk/apis/transfer-2018-11-05.min.json +68 -67
  18. package/node_modules/aws-sdk/clients/amp.d.ts +132 -132
  19. package/node_modules/aws-sdk/clients/appstream.d.ts +426 -8
  20. package/node_modules/aws-sdk/clients/chime.d.ts +268 -268
  21. package/node_modules/aws-sdk/clients/cleanrooms.d.ts +33 -22
  22. package/node_modules/aws-sdk/clients/dynamodb.d.ts +28 -0
  23. package/node_modules/aws-sdk/clients/ecs.d.ts +4 -0
  24. package/node_modules/aws-sdk/clients/gamelift.d.ts +6 -6
  25. package/node_modules/aws-sdk/clients/glue.d.ts +23 -0
  26. package/node_modules/aws-sdk/clients/mediaconvert.d.ts +7 -7
  27. package/node_modules/aws-sdk/clients/sagemaker.d.ts +113 -12
  28. package/node_modules/aws-sdk/clients/transfer.d.ts +11 -6
  29. package/node_modules/aws-sdk/dist/aws-sdk-core-react-native.js +1 -1
  30. package/node_modules/aws-sdk/dist/aws-sdk-react-native.js +12 -12
  31. package/node_modules/aws-sdk/dist/aws-sdk.js +420 -408
  32. package/node_modules/aws-sdk/dist/aws-sdk.min.js +74 -74
  33. package/node_modules/aws-sdk/lib/core.js +1 -1
  34. package/node_modules/aws-sdk/lib/dynamodb/document_client.d.ts +28 -0
  35. package/node_modules/aws-sdk/package.json +1 -1
  36. package/package.json +5 -5
@@ -2666,6 +2666,8 @@ declare namespace SageMaker {
2666
2666
  AgentCount: Long;
2667
2667
  }
2668
2668
  export type AgentVersions = AgentVersion[];
2669
+ export type AggregationTransformationValue = "sum"|"avg"|"first"|"min"|"max"|string;
2670
+ export type AggregationTransformations = {[key: string]: AggregationTransformationValue};
2669
2671
  export interface Alarm {
2670
2672
  /**
2671
2673
  * The name of a CloudWatch alarm in your account.
@@ -3207,11 +3209,11 @@ declare namespace SageMaker {
3207
3209
  }
3208
3210
  export interface AutoMLJobChannel {
3209
3211
  /**
3210
- * The type of channel. Defines whether the data are used for training or validation. The default value is training. Channels for training and validation must share the same ContentType
3212
+ * The type of channel. Defines whether the data are used for training or validation. The default value is training. Channels for training and validation must share the same ContentType The type of channel defaults to training for the time-series forecasting problem type.
3211
3213
  */
3212
3214
  ChannelType?: AutoMLChannelType;
3213
3215
  /**
3214
- * The content type of the data from the input source. The following are the allowed content types for different problems: For Tabular problem types: text/csv;header=present or x-application/vnd.amazon+parquet. The default value is text/csv;header=present. For ImageClassification: image/png, image/jpeg, or image/*. The default value is image/*. For TextClassification: text/csv;header=present or x-application/vnd.amazon+parquet. The default value is text/csv;header=present.
3216
+ * The content type of the data from the input source. The following are the allowed content types for different problems: For tabular problem types: text/csv;header=present or x-application/vnd.amazon+parquet. The default value is text/csv;header=present. For image classification: image/png, image/jpeg, or image/*. The default value is image/*. For text classification: text/csv;header=present or x-application/vnd.amazon+parquet. The default value is text/csv;header=present. For time-series forecasting: text/csv;header=present or x-application/vnd.amazon+parquet. The default value is text/csv;header=present.
3215
3217
  */
3216
3218
  ContentType?: ContentType;
3217
3219
  /**
@@ -3225,7 +3227,7 @@ declare namespace SageMaker {
3225
3227
  }
3226
3228
  export interface AutoMLJobCompletionCriteria {
3227
3229
  /**
3228
- * The maximum number of times a training job is allowed to run. For job V2s (jobs created by calling CreateAutoMLJobV2), the supported value is 1.
3230
+ * The maximum number of times a training job is allowed to run. For text and image classification, as well as time-series forecasting problem types, the supported value is 1. For tabular problem types, the maximum value is 750.
3229
3231
  */
3230
3232
  MaxCandidates?: MaxCandidates;
3231
3233
  /**
@@ -3263,12 +3265,12 @@ declare namespace SageMaker {
3263
3265
  export type AutoMLJobName = string;
3264
3266
  export interface AutoMLJobObjective {
3265
3267
  /**
3266
- * The name of the objective metric used to measure the predictive quality of a machine learning system. During training, the model's parameters are updated iteratively to optimize its performance based on the feedback provided by the objective metric when evaluating the model on the validation dataset. For the list of all available metrics supported by Autopilot, see Autopilot metrics. If you do not specify a metric explicitly, the default behavior is to automatically use: For tabular problem types: Regression: MSE. Binary classification: F1. Multiclass classification: Accuracy. For image or text classification problem types: Accuracy
3268
+ * The name of the objective metric used to measure the predictive quality of a machine learning system. During training, the model's parameters are updated iteratively to optimize its performance based on the feedback provided by the objective metric when evaluating the model on the validation dataset. For the list of all available metrics supported by Autopilot, see Autopilot metrics. If you do not specify a metric explicitly, the default behavior is to automatically use: For tabular problem types: Regression: MSE. Binary classification: F1. Multiclass classification: Accuracy. For image or text classification problem types: Accuracy For time-series forecasting problem types: AverageWeightedQuantileLoss
3267
3269
  */
3268
3270
  MetricName: AutoMLMetricEnum;
3269
3271
  }
3270
3272
  export type AutoMLJobObjectiveType = "Maximize"|"Minimize"|string;
3271
- export type AutoMLJobSecondaryStatus = "Starting"|"AnalyzingData"|"FeatureEngineering"|"ModelTuning"|"MaxCandidatesReached"|"Failed"|"Stopped"|"MaxAutoMLJobRuntimeReached"|"Stopping"|"CandidateDefinitionsGenerated"|"GeneratingExplainabilityReport"|"Completed"|"ExplainabilityError"|"DeployingModel"|"ModelDeploymentError"|"GeneratingModelInsightsReport"|"ModelInsightsError"|"TrainingModels"|string;
3273
+ export type AutoMLJobSecondaryStatus = "Starting"|"AnalyzingData"|"FeatureEngineering"|"ModelTuning"|"MaxCandidatesReached"|"Failed"|"Stopped"|"MaxAutoMLJobRuntimeReached"|"Stopping"|"CandidateDefinitionsGenerated"|"GeneratingExplainabilityReport"|"Completed"|"ExplainabilityError"|"DeployingModel"|"ModelDeploymentError"|"GeneratingModelInsightsReport"|"ModelInsightsError"|"TrainingModels"|"PreTraining"|string;
3272
3274
  export type AutoMLJobStatus = "Completed"|"InProgress"|"Failed"|"Stopped"|"Stopping"|string;
3273
3275
  export interface AutoMLJobStepMetadata {
3274
3276
  /**
@@ -3316,8 +3318,8 @@ declare namespace SageMaker {
3316
3318
  PartialFailureReasons?: AutoMLPartialFailureReasons;
3317
3319
  }
3318
3320
  export type AutoMLMaxResults = number;
3319
- export type AutoMLMetricEnum = "Accuracy"|"MSE"|"F1"|"F1macro"|"AUC"|"RMSE"|"MAE"|"R2"|"BalancedAccuracy"|"Precision"|"PrecisionMacro"|"Recall"|"RecallMacro"|string;
3320
- export type AutoMLMetricExtendedEnum = "Accuracy"|"MSE"|"F1"|"F1macro"|"AUC"|"RMSE"|"MAE"|"R2"|"BalancedAccuracy"|"Precision"|"PrecisionMacro"|"Recall"|"RecallMacro"|"LogLoss"|"InferenceLatency"|string;
3321
+ export type AutoMLMetricEnum = "Accuracy"|"MSE"|"F1"|"F1macro"|"AUC"|"RMSE"|"MAE"|"R2"|"BalancedAccuracy"|"Precision"|"PrecisionMacro"|"Recall"|"RecallMacro"|"MAPE"|"MASE"|"WAPE"|"AverageWeightedQuantileLoss"|string;
3322
+ export type AutoMLMetricExtendedEnum = "Accuracy"|"MSE"|"F1"|"F1macro"|"AUC"|"RMSE"|"MAE"|"R2"|"BalancedAccuracy"|"Precision"|"PrecisionMacro"|"Recall"|"RecallMacro"|"LogLoss"|"InferenceLatency"|"MAPE"|"MASE"|"WAPE"|"AverageWeightedQuantileLoss"|string;
3321
3323
  export type AutoMLMode = "AUTO"|"ENSEMBLING"|"HYPERPARAMETER_TUNING"|string;
3322
3324
  export type AutoMLNameContains = string;
3323
3325
  export interface AutoMLOutputDataConfig {
@@ -3350,8 +3352,12 @@ declare namespace SageMaker {
3350
3352
  * Settings used to configure an AutoML job V2 for a tabular problem type (regression, classification).
3351
3353
  */
3352
3354
  TabularJobConfig?: TabularJobConfig;
3355
+ /**
3356
+ * Settings used to configure an AutoML job V2 for a time-series forecasting problem type.
3357
+ */
3358
+ TimeSeriesForecastingJobConfig?: TimeSeriesForecastingJobConfig;
3353
3359
  }
3354
- export type AutoMLProblemTypeConfigName = "ImageClassification"|"TextClassification"|"Tabular"|string;
3360
+ export type AutoMLProblemTypeConfigName = "ImageClassification"|"TextClassification"|"Tabular"|"TimeSeriesForecasting"|string;
3355
3361
  export interface AutoMLProblemTypeResolvedAttributes {
3356
3362
  /**
3357
3363
  * Defines the resolved attributes for the TABULAR problem type.
@@ -3419,6 +3425,7 @@ declare namespace SageMaker {
3419
3425
  }
3420
3426
  export type AutotuneMode = "Enabled"|string;
3421
3427
  export type AwsManagedHumanLoopRequestSource = "AWS/Rekognition/DetectModerationLabels/Image/V3"|"AWS/Textract/AnalyzeDocument/Forms/V1"|string;
3428
+ export type BacktestResultsLocation = string;
3422
3429
  export interface BatchDataCaptureConfig {
3423
3430
  /**
3424
3431
  * The Amazon S3 location being used to capture the data.
@@ -3608,6 +3615,10 @@ declare namespace SageMaker {
3608
3615
  * The Amazon S3 prefix to the model insight artifacts generated for the AutoML candidate.
3609
3616
  */
3610
3617
  ModelInsights?: ModelInsightsLocation;
3618
+ /**
3619
+ * The Amazon S3 prefix to the accuracy metrics and the inference results observed over the testing window. Available only for the time-series forecasting problem type.
3620
+ */
3621
+ BacktestResults?: BacktestResultsLocation;
3611
3622
  }
3612
3623
  export type CandidateDefinitionNotebookLocation = string;
3613
3624
  export interface CandidateGenerationConfig {
@@ -4422,7 +4433,7 @@ declare namespace SageMaker {
4422
4433
  */
4423
4434
  AutoMLJobName: AutoMLJobName;
4424
4435
  /**
4425
- * An array of channel objects describing the input data and their location. Each channel is a named input source. Similar to the InputDataConfig attribute in the CreateAutoMLJob input parameters. The supported formats depend on the problem type: For Tabular problem types: S3Prefix, ManifestFile. For ImageClassification: S3Prefix, ManifestFile, AugmentedManifestFile. For TextClassification: S3Prefix.
4436
+ * An array of channel objects describing the input data and their location. Each channel is a named input source. Similar to the InputDataConfig attribute in the CreateAutoMLJob input parameters. The supported formats depend on the problem type: For tabular problem types: S3Prefix, ManifestFile. For image classification: S3Prefix, ManifestFile, AugmentedManifestFile. For text classification: S3Prefix. For time-series forecasting: S3Prefix.
4426
4437
  */
4427
4438
  AutoMLJobInputDataConfig: AutoMLJobInputDataConfig;
4428
4439
  /**
@@ -4454,7 +4465,7 @@ declare namespace SageMaker {
4454
4465
  */
4455
4466
  ModelDeployConfig?: ModelDeployConfig;
4456
4467
  /**
4457
- * This structure specifies how to split the data into train and validation datasets. The validation and training datasets must contain the same headers. For jobs created by calling CreateAutoMLJob, the validation dataset must be less than 2 GB in size.
4468
+ * This structure specifies how to split the data into train and validation datasets. The validation and training datasets must contain the same headers. For jobs created by calling CreateAutoMLJob, the validation dataset must be less than 2 GB in size. This attribute must not be set for the time-series forecasting problem type, as Autopilot automatically splits the input dataset into training and validation sets.
4458
4469
  */
4459
4470
  DataSplitConfig?: AutoMLDataSplitConfig;
4460
4471
  }
@@ -6849,11 +6860,15 @@ declare namespace SageMaker {
6849
6860
  /**
6850
6861
  * Update policy for a blue/green deployment. If this update policy is specified, SageMaker creates a new fleet during the deployment while maintaining the old fleet. SageMaker flips traffic to the new fleet according to the specified traffic routing configuration. Only one update policy should be used in the deployment configuration. If no update policy is specified, SageMaker uses a blue/green deployment strategy with all at once traffic shifting by default.
6851
6862
  */
6852
- BlueGreenUpdatePolicy: BlueGreenUpdatePolicy;
6863
+ BlueGreenUpdatePolicy?: BlueGreenUpdatePolicy;
6853
6864
  /**
6854
6865
  * Automatic rollback configuration for handling endpoint deployment failures and recovery.
6855
6866
  */
6856
6867
  AutoRollbackConfiguration?: AutoRollbackConfig;
6868
+ /**
6869
+ * Specifies a rolling deployment strategy for updating a SageMaker endpoint.
6870
+ */
6871
+ RollingUpdatePolicy?: RollingUpdatePolicy;
6857
6872
  }
6858
6873
  export interface DeploymentRecommendation {
6859
6874
  /**
@@ -10925,7 +10940,7 @@ declare namespace SageMaker {
10925
10940
  }
10926
10941
  export type EndpointPerformances = EndpointPerformance[];
10927
10942
  export type EndpointSortKey = "Name"|"CreationTime"|"Status"|string;
10928
- export type EndpointStatus = "OutOfService"|"Creating"|"Updating"|"SystemUpdating"|"RollingBack"|"InService"|"Deleting"|"Failed"|string;
10943
+ export type EndpointStatus = "OutOfService"|"Creating"|"Updating"|"SystemUpdating"|"RollingBack"|"InService"|"Deleting"|"Failed"|"UpdateRollbackFailed"|string;
10929
10944
  export interface EndpointSummary {
10930
10945
  /**
10931
10946
  * The name of the endpoint.
@@ -11295,6 +11310,10 @@ declare namespace SageMaker {
11295
11310
  }
11296
11311
  export type FileSystemId = string;
11297
11312
  export type FileSystemType = "EFS"|"FSxLustre"|string;
11313
+ export type FillingTransformationMap = {[key: string]: FillingTransformationValue};
11314
+ export type FillingTransformationValue = string;
11315
+ export type FillingTransformations = {[key: string]: FillingTransformationMap};
11316
+ export type FillingType = "frontfill"|"middlefill"|"backfill"|"futurefill"|"frontfill_value"|"middlefill_value"|"backfill_value"|"futurefill_value"|string;
11298
11317
  export interface Filter {
11299
11318
  /**
11300
11319
  * A resource property name. For example, TrainingJobName. For valid property names, see SearchRecord. You must specify a valid property for the resource.
@@ -11388,6 +11407,10 @@ declare namespace SageMaker {
11388
11407
  export type FlowDefinitionTaskKeywords = FlowDefinitionTaskKeyword[];
11389
11408
  export type FlowDefinitionTaskTimeLimitInSeconds = number;
11390
11409
  export type FlowDefinitionTaskTitle = string;
11410
+ export type ForecastFrequency = string;
11411
+ export type ForecastHorizon = number;
11412
+ export type ForecastQuantile = string;
11413
+ export type ForecastQuantiles = ForecastQuantile[];
11391
11414
  export type Framework = "TENSORFLOW"|"KERAS"|"MXNET"|"ONNX"|"PYTORCH"|"XGBOOST"|"TFLITE"|"DARKNET"|"SKLEARN"|string;
11392
11415
  export type FrameworkVersion = string;
11393
11416
  export type GenerateCandidateDefinitionsOnly = boolean;
@@ -11505,6 +11528,8 @@ declare namespace SageMaker {
11505
11528
  }
11506
11529
  export type GitConfigUrl = string;
11507
11530
  export type Group = string;
11531
+ export type GroupingAttributeName = string;
11532
+ export type GroupingAttributeNames = GroupingAttributeName[];
11508
11533
  export type Groups = Group[];
11509
11534
  export type HookParameters = {[key: string]: ConfigValue};
11510
11535
  export type Horovod = boolean;
@@ -12618,6 +12643,7 @@ declare namespace SageMaker {
12618
12643
  export type InvocationsMaxRetries = number;
12619
12644
  export type InvocationsTimeoutInSeconds = number;
12620
12645
  export type IotRoleAlias = string;
12646
+ export type ItemIdentifierAttributeName = string;
12621
12647
  export type JobDurationInSeconds = number;
12622
12648
  export type JobReferenceCode = string;
12623
12649
  export type JobReferenceCodeContains = string;
@@ -19591,6 +19617,24 @@ declare namespace SageMaker {
19591
19617
  MaximumRetryAttempts: MaximumRetryAttempts;
19592
19618
  }
19593
19619
  export type RoleArn = string;
19620
+ export interface RollingUpdatePolicy {
19621
+ /**
19622
+ * Batch size for each rolling step to provision capacity and turn on traffic on the new endpoint fleet, and terminate capacity on the old endpoint fleet. Value must be between 5% to 50% of the variant's total instance count.
19623
+ */
19624
+ MaximumBatchSize: CapacitySize;
19625
+ /**
19626
+ * The length of the baking period, during which SageMaker monitors alarms for each batch on the new fleet.
19627
+ */
19628
+ WaitIntervalInSeconds: WaitIntervalInSeconds;
19629
+ /**
19630
+ * The time limit for the total deployment. Exceeding this limit causes a timeout.
19631
+ */
19632
+ MaximumExecutionTimeoutInSeconds?: MaximumExecutionTimeoutInSeconds;
19633
+ /**
19634
+ * Batch size for rollback to the old endpoint fleet. Each rolling step to provision capacity and turn on traffic on the old endpoint fleet, and terminate capacity on the new endpoint fleet. If this field is absent, the default value will be set to 100% of total capacity which means to bring up the whole capacity of the old fleet at once during rollback.
19635
+ */
19636
+ RollbackMaximumBatchSize?: CapacitySize;
19637
+ }
19594
19638
  export type RootAccess = "Enabled"|"Disabled"|string;
19595
19639
  export type RuleConfigurationName = string;
19596
19640
  export type RuleEvaluationStatus = "InProgress"|"NoIssuesFound"|"IssuesFound"|"Error"|"Stopping"|"Stopped"|string;
@@ -20402,6 +20446,51 @@ declare namespace SageMaker {
20402
20446
  TargetLabelColumn?: TargetLabelColumn;
20403
20447
  }
20404
20448
  export type ThingName = string;
20449
+ export interface TimeSeriesConfig {
20450
+ /**
20451
+ * The name of the column representing the target variable that you want to predict for each item in your dataset. The data type of the target variable must be numerical.
20452
+ */
20453
+ TargetAttributeName: TargetAttributeName;
20454
+ /**
20455
+ * The name of the column indicating a point in time at which the target value of a given item is recorded.
20456
+ */
20457
+ TimestampAttributeName: TimestampAttributeName;
20458
+ /**
20459
+ * The name of the column that represents the set of item identifiers for which you want to predict the target value.
20460
+ */
20461
+ ItemIdentifierAttributeName: ItemIdentifierAttributeName;
20462
+ /**
20463
+ * A set of columns names that can be grouped with the item identifier column to create a composite key for which a target value is predicted.
20464
+ */
20465
+ GroupingAttributeNames?: GroupingAttributeNames;
20466
+ }
20467
+ export interface TimeSeriesForecastingJobConfig {
20468
+ /**
20469
+ * A URL to the Amazon S3 data source containing additional selected features that complement the target, itemID, timestamp, and grouped columns set in TimeSeriesConfig. When not provided, the AutoML job V2 includes all the columns from the original dataset that are not already declared in TimeSeriesConfig. If provided, the AutoML job V2 only considers these additional columns as a complement to the ones declared in TimeSeriesConfig. You can input FeatureAttributeNames (optional) in JSON format as shown below: { "FeatureAttributeNames":["col1", "col2", ...] }. You can also specify the data type of the feature (optional) in the format shown below: { "FeatureDataTypes":{"col1":"numeric", "col2":"categorical" ... } } Autopilot supports the following data types: numeric, categorical, text, and datetime. These column keys must not include any column set in TimeSeriesConfig. When not provided, the AutoML job V2 includes all the columns from the original dataset that are not already declared in TimeSeriesConfig. If provided, the AutoML job V2 only considers these additional columns as a complement to the ones declared in TimeSeriesConfig. Autopilot supports the following data types: numeric, categorical, text, and datetime.
20470
+ */
20471
+ FeatureSpecificationS3Uri?: S3Uri;
20472
+ CompletionCriteria?: AutoMLJobCompletionCriteria;
20473
+ /**
20474
+ * The frequency of predictions in a forecast. Valid intervals are an integer followed by Y (Year), M (Month), W (Week), D (Day), H (Hour), and min (Minute). For example, 1D indicates every day and 15min indicates every 15 minutes. The value of a frequency must not overlap with the next larger frequency. For example, you must use a frequency of 1H instead of 60min. The valid values for each frequency are the following: Minute - 1-59 Hour - 1-23 Day - 1-6 Week - 1-4 Month - 1-11 Year - 1
20475
+ */
20476
+ ForecastFrequency: ForecastFrequency;
20477
+ /**
20478
+ * The number of time-steps that the model predicts. The forecast horizon is also called the prediction length. The maximum forecast horizon is the lesser of 500 time-steps or 1/4 of the time-steps in the dataset.
20479
+ */
20480
+ ForecastHorizon: ForecastHorizon;
20481
+ /**
20482
+ * The quantiles used to train the model for forecasts at a specified quantile. You can specify quantiles from 0.01 (p1) to 0.99 (p99), by increments of 0.01 or higher. Up to five forecast quantiles can be specified. When ForecastQuantiles is not provided, the AutoML job uses the quantiles p10, p50, and p90 as default.
20483
+ */
20484
+ ForecastQuantiles?: ForecastQuantiles;
20485
+ /**
20486
+ * The transformations modifying specific attributes of the time-series, such as filling strategies for missing values.
20487
+ */
20488
+ Transformations?: TimeSeriesTransformations;
20489
+ /**
20490
+ * The collection of components that defines the time-series.
20491
+ */
20492
+ TimeSeriesConfig: TimeSeriesConfig;
20493
+ }
20405
20494
  export interface TimeSeriesForecastingSettings {
20406
20495
  /**
20407
20496
  * Describes whether time series forecasting is enabled or disabled in the Canvas application.
@@ -20412,7 +20501,18 @@ declare namespace SageMaker {
20412
20501
  */
20413
20502
  AmazonForecastRoleArn?: RoleArn;
20414
20503
  }
20504
+ export interface TimeSeriesTransformations {
20505
+ /**
20506
+ * A key value pair defining the filling method for a column, where the key is the column name and the value is an object which defines the filling logic. You can specify multiple filling methods for a single column. The supported filling methods and their corresponding options are: frontfill: none (Supported only for target column) middlefill: zero, value, median, mean, min, max backfill: zero, value, median, mean, min, max futurefill: zero, value, median, mean, min, max To set a filling method to a specific value, set the fill parameter to the chosen filling method value (for example "backfill" : "value"), and define the filling value in an additional parameter prefixed with "_value". For example, to set backfill to a value of 2, you must include two parameters: "backfill": "value" and "backfill_value":"2".
20507
+ */
20508
+ Filling?: FillingTransformations;
20509
+ /**
20510
+ * A key value pair defining the aggregation method for a column, where the key is the column name and the value is the aggregation method. The supported aggregation methods are sum (default), avg, first, min, max. Aggregation is only supported for the target column.
20511
+ */
20512
+ Aggregation?: AggregationTransformations;
20513
+ }
20415
20514
  export type Timestamp = Date;
20515
+ export type TimestampAttributeName = string;
20416
20516
  export type TrafficDurationInSeconds = number;
20417
20517
  export interface TrafficPattern {
20418
20518
  /**
@@ -20944,6 +21044,7 @@ declare namespace SageMaker {
20944
21044
  */
20945
21045
  S3Uri: S3Uri;
20946
21046
  }
21047
+ export type TransformationAttributeName = string;
20947
21048
  export interface Trial {
20948
21049
  /**
20949
21050
  * The name of the trial.
@@ -532,7 +532,12 @@ declare namespace Transfer {
532
532
  * Used for outbound requests (from an Transfer Family server to a partner AS2 server) to determine whether the partner response for transfers is synchronous or asynchronous. Specify either of the following values: SYNC: The system expects a synchronous MDN response, confirming that the file was transferred successfully (or not). NONE: Specifies that no MDN response is required.
533
533
  */
534
534
  MdnResponse?: MdnResponse;
535
+ /**
536
+ * Provides Basic authentication support to the AS2 Connectors API. To use Basic authentication, you must provide the name or Amazon Resource Name (ARN) of a secret in Secrets Manager. The default value for this parameter is null, which indicates that Basic authentication is not enabled for the connector. If the connector should use Basic authentication, the secret needs to be in the following format: { "Username": "user-name", "Password": "user-password" } Replace user-name and user-password with the credentials for the actual user that is being authenticated. Note the following: You are storing these credentials in Secrets Manager, not passing them directly into this API. If you are using the API, SDKs, or CloudFormation to configure your connector, then you must create the secret before you can enable Basic authentication. However, if you are using the Amazon Web Services management console, you can have the system create the secret for you. If you have previously enabled Basic authentication for a connector, you can disable it by using the UpdateConnector API call. For example, if you are using the CLI, you can run the following command to remove Basic authentication: update-connector --connector-id my-connector-id --as2-config 'BasicAuthSecretId=""'
537
+ */
538
+ BasicAuthSecretId?: As2ConnectorSecretId;
535
539
  }
540
+ export type As2ConnectorSecretId = string;
536
541
  export type As2Id = string;
537
542
  export type As2Transport = "HTTP"|string;
538
543
  export type As2Transports = As2Transport[];
@@ -630,7 +635,7 @@ declare namespace Transfer {
630
635
  */
631
636
  BaseDirectory: HomeDirectory;
632
637
  /**
633
- * With AS2, you can send files by calling StartFileTransfer and specifying the file paths in the request parameter, SendFilePaths. We use the file’s parent directory (for example, for --send-file-paths /bucket/dir/file.txt, parent directory is /bucket/dir/) to temporarily store a processed AS2 message file, store the MDN when we receive them from the partner, and write a final JSON file containing relevant metadata of the transmission. So, the AccessRole needs to provide read and write access to the parent directory of the file location used in the StartFileTransfer request. Additionally, you need to provide read and write access to the parent directory of the files that you intend to send with StartFileTransfer.
638
+ * With AS2, you can send files by calling StartFileTransfer and specifying the file paths in the request parameter, SendFilePaths. We use the file’s parent directory (for example, for --send-file-paths /bucket/dir/file.txt, parent directory is /bucket/dir/) to temporarily store a processed AS2 message file, store the MDN when we receive them from the partner, and write a final JSON file containing relevant metadata of the transmission. So, the AccessRole needs to provide read and write access to the parent directory of the file location used in the StartFileTransfer request. Additionally, you need to provide read and write access to the parent directory of the files that you intend to send with StartFileTransfer. If you are using Basic authentication for your AS2 connector, the access role requires the secretsmanager:GetSecretValue permission for the secret. If the secret is encrypted using a customer-managed key instead of the Amazon Web Services managed key in Secrets Manager, then the role also needs the kms:Decrypt permission for that key.
634
639
  */
635
640
  AccessRole: Role;
636
641
  /**
@@ -658,7 +663,7 @@ declare namespace Transfer {
658
663
  */
659
664
  As2Config: As2ConnectorConfig;
660
665
  /**
661
- * With AS2, you can send files by calling StartFileTransfer and specifying the file paths in the request parameter, SendFilePaths. We use the file’s parent directory (for example, for --send-file-paths /bucket/dir/file.txt, parent directory is /bucket/dir/) to temporarily store a processed AS2 message file, store the MDN when we receive them from the partner, and write a final JSON file containing relevant metadata of the transmission. So, the AccessRole needs to provide read and write access to the parent directory of the file location used in the StartFileTransfer request. Additionally, you need to provide read and write access to the parent directory of the files that you intend to send with StartFileTransfer.
666
+ * With AS2, you can send files by calling StartFileTransfer and specifying the file paths in the request parameter, SendFilePaths. We use the file’s parent directory (for example, for --send-file-paths /bucket/dir/file.txt, parent directory is /bucket/dir/) to temporarily store a processed AS2 message file, store the MDN when we receive them from the partner, and write a final JSON file containing relevant metadata of the transmission. So, the AccessRole needs to provide read and write access to the parent directory of the file location used in the StartFileTransfer request. Additionally, you need to provide read and write access to the parent directory of the files that you intend to send with StartFileTransfer. If you are using Basic authentication for your AS2 connector, the access role requires the secretsmanager:GetSecretValue permission for the secret. If the secret is encrypted using a customer-managed key instead of the Amazon Web Services managed key in Secrets Manager, then the role also needs the kms:Decrypt permission for that key.
662
667
  */
663
668
  AccessRole: Role;
664
669
  /**
@@ -1211,7 +1216,7 @@ declare namespace Transfer {
1211
1216
  */
1212
1217
  BaseDirectory?: HomeDirectory;
1213
1218
  /**
1214
- * With AS2, you can send files by calling StartFileTransfer and specifying the file paths in the request parameter, SendFilePaths. We use the file’s parent directory (for example, for --send-file-paths /bucket/dir/file.txt, parent directory is /bucket/dir/) to temporarily store a processed AS2 message file, store the MDN when we receive them from the partner, and write a final JSON file containing relevant metadata of the transmission. So, the AccessRole needs to provide read and write access to the parent directory of the file location used in the StartFileTransfer request. Additionally, you need to provide read and write access to the parent directory of the files that you intend to send with StartFileTransfer.
1219
+ * With AS2, you can send files by calling StartFileTransfer and specifying the file paths in the request parameter, SendFilePaths. We use the file’s parent directory (for example, for --send-file-paths /bucket/dir/file.txt, parent directory is /bucket/dir/) to temporarily store a processed AS2 message file, store the MDN when we receive them from the partner, and write a final JSON file containing relevant metadata of the transmission. So, the AccessRole needs to provide read and write access to the parent directory of the file location used in the StartFileTransfer request. Additionally, you need to provide read and write access to the parent directory of the files that you intend to send with StartFileTransfer. If you are using Basic authentication for your AS2 connector, the access role requires the secretsmanager:GetSecretValue permission for the secret. If the secret is encrypted using a customer-managed key instead of the Amazon Web Services managed key in Secrets Manager, then the role also needs the kms:Decrypt permission for that key.
1215
1220
  */
1216
1221
  AccessRole?: Role;
1217
1222
  /**
@@ -1295,7 +1300,7 @@ declare namespace Transfer {
1295
1300
  */
1296
1301
  As2Config?: As2ConnectorConfig;
1297
1302
  /**
1298
- * With AS2, you can send files by calling StartFileTransfer and specifying the file paths in the request parameter, SendFilePaths. We use the file’s parent directory (for example, for --send-file-paths /bucket/dir/file.txt, parent directory is /bucket/dir/) to temporarily store a processed AS2 message file, store the MDN when we receive them from the partner, and write a final JSON file containing relevant metadata of the transmission. So, the AccessRole needs to provide read and write access to the parent directory of the file location used in the StartFileTransfer request. Additionally, you need to provide read and write access to the parent directory of the files that you intend to send with StartFileTransfer.
1303
+ * With AS2, you can send files by calling StartFileTransfer and specifying the file paths in the request parameter, SendFilePaths. We use the file’s parent directory (for example, for --send-file-paths /bucket/dir/file.txt, parent directory is /bucket/dir/) to temporarily store a processed AS2 message file, store the MDN when we receive them from the partner, and write a final JSON file containing relevant metadata of the transmission. So, the AccessRole needs to provide read and write access to the parent directory of the file location used in the StartFileTransfer request. Additionally, you need to provide read and write access to the parent directory of the files that you intend to send with StartFileTransfer. If you are using Basic authentication for your AS2 connector, the access role requires the secretsmanager:GetSecretValue permission for the secret. If the secret is encrypted using a customer-managed key instead of the Amazon Web Services managed key in Secrets Manager, then the role also needs the kms:Decrypt permission for that key.
1299
1304
  */
1300
1305
  AccessRole?: Role;
1301
1306
  /**
@@ -2706,7 +2711,7 @@ declare namespace Transfer {
2706
2711
  */
2707
2712
  BaseDirectory?: HomeDirectory;
2708
2713
  /**
2709
- * With AS2, you can send files by calling StartFileTransfer and specifying the file paths in the request parameter, SendFilePaths. We use the file’s parent directory (for example, for --send-file-paths /bucket/dir/file.txt, parent directory is /bucket/dir/) to temporarily store a processed AS2 message file, store the MDN when we receive them from the partner, and write a final JSON file containing relevant metadata of the transmission. So, the AccessRole needs to provide read and write access to the parent directory of the file location used in the StartFileTransfer request. Additionally, you need to provide read and write access to the parent directory of the files that you intend to send with StartFileTransfer.
2714
+ * With AS2, you can send files by calling StartFileTransfer and specifying the file paths in the request parameter, SendFilePaths. We use the file’s parent directory (for example, for --send-file-paths /bucket/dir/file.txt, parent directory is /bucket/dir/) to temporarily store a processed AS2 message file, store the MDN when we receive them from the partner, and write a final JSON file containing relevant metadata of the transmission. So, the AccessRole needs to provide read and write access to the parent directory of the file location used in the StartFileTransfer request. Additionally, you need to provide read and write access to the parent directory of the files that you intend to send with StartFileTransfer. If you are using Basic authentication for your AS2 connector, the access role requires the secretsmanager:GetSecretValue permission for the secret. If the secret is encrypted using a customer-managed key instead of the Amazon Web Services managed key in Secrets Manager, then the role also needs the kms:Decrypt permission for that key.
2710
2715
  */
2711
2716
  AccessRole?: Role;
2712
2717
  }
@@ -2754,7 +2759,7 @@ declare namespace Transfer {
2754
2759
  */
2755
2760
  As2Config?: As2ConnectorConfig;
2756
2761
  /**
2757
- * With AS2, you can send files by calling StartFileTransfer and specifying the file paths in the request parameter, SendFilePaths. We use the file’s parent directory (for example, for --send-file-paths /bucket/dir/file.txt, parent directory is /bucket/dir/) to temporarily store a processed AS2 message file, store the MDN when we receive them from the partner, and write a final JSON file containing relevant metadata of the transmission. So, the AccessRole needs to provide read and write access to the parent directory of the file location used in the StartFileTransfer request. Additionally, you need to provide read and write access to the parent directory of the files that you intend to send with StartFileTransfer.
2762
+ * With AS2, you can send files by calling StartFileTransfer and specifying the file paths in the request parameter, SendFilePaths. We use the file’s parent directory (for example, for --send-file-paths /bucket/dir/file.txt, parent directory is /bucket/dir/) to temporarily store a processed AS2 message file, store the MDN when we receive them from the partner, and write a final JSON file containing relevant metadata of the transmission. So, the AccessRole needs to provide read and write access to the parent directory of the file location used in the StartFileTransfer request. Additionally, you need to provide read and write access to the parent directory of the files that you intend to send with StartFileTransfer. If you are using Basic authentication for your AS2 connector, the access role requires the secretsmanager:GetSecretValue permission for the secret. If the secret is encrypted using a customer-managed key instead of the Amazon Web Services managed key in Secrets Manager, then the role also needs the kms:Decrypt permission for that key.
2758
2763
  */
2759
2764
  AccessRole?: Role;
2760
2765
  /**
@@ -83,7 +83,7 @@ return /******/ (function(modules) { // webpackBootstrap
83
83
  /**
84
84
  * @constant
85
85
  */
86
- VERSION: '2.1407.0',
86
+ VERSION: '2.1409.0',
87
87
 
88
88
  /**
89
89
  * @api private