@aws-sdk/client-sagemaker 3.131.0 → 3.132.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. package/CHANGELOG.md +11 -0
  2. package/dist-cjs/SageMaker.js +135 -0
  3. package/dist-cjs/commands/CreateEdgeDeploymentPlanCommand.js +36 -0
  4. package/dist-cjs/commands/CreateEdgeDeploymentStageCommand.js +36 -0
  5. package/dist-cjs/commands/CreateHyperParameterTuningJobCommand.js +3 -3
  6. package/dist-cjs/commands/DeleteEdgeDeploymentPlanCommand.js +36 -0
  7. package/dist-cjs/commands/DeleteEdgeDeploymentStageCommand.js +36 -0
  8. package/dist-cjs/commands/DescribeEdgeDeploymentPlanCommand.js +36 -0
  9. package/dist-cjs/commands/DescribeHyperParameterTuningJobCommand.js +3 -3
  10. package/dist-cjs/commands/DescribeImageCommand.js +3 -3
  11. package/dist-cjs/commands/DescribeImageVersionCommand.js +3 -3
  12. package/dist-cjs/commands/DescribeInferenceRecommendationsJobCommand.js +1 -2
  13. package/dist-cjs/commands/ListEdgeDeploymentPlansCommand.js +36 -0
  14. package/dist-cjs/commands/ListPipelineExecutionStepsCommand.js +3 -3
  15. package/dist-cjs/commands/ListPipelineParametersForExecutionCommand.js +3 -3
  16. package/dist-cjs/commands/ListPipelinesCommand.js +3 -3
  17. package/dist-cjs/commands/ListProcessingJobsCommand.js +3 -3
  18. package/dist-cjs/commands/ListStageDevicesCommand.js +36 -0
  19. package/dist-cjs/commands/StartEdgeDeploymentStageCommand.js +36 -0
  20. package/dist-cjs/commands/StopEdgeDeploymentStageCommand.js +36 -0
  21. package/dist-cjs/commands/index.js +9 -0
  22. package/dist-cjs/models/models_0.js +54 -54
  23. package/dist-cjs/models/models_1.js +106 -112
  24. package/dist-cjs/models/models_2.js +152 -140
  25. package/dist-cjs/models/models_3.js +162 -3
  26. package/dist-cjs/pagination/ListEdgeDeploymentPlansPaginator.js +36 -0
  27. package/dist-cjs/pagination/ListStageDevicesPaginator.js +36 -0
  28. package/dist-cjs/pagination/index.js +2 -0
  29. package/dist-cjs/protocols/Aws_json1_1.js +710 -11
  30. package/dist-es/SageMaker.js +135 -0
  31. package/dist-es/commands/CreateEdgeDeploymentPlanCommand.js +39 -0
  32. package/dist-es/commands/CreateEdgeDeploymentStageCommand.js +39 -0
  33. package/dist-es/commands/CreateHyperParameterTuningJobCommand.js +1 -1
  34. package/dist-es/commands/DeleteEdgeDeploymentPlanCommand.js +39 -0
  35. package/dist-es/commands/DeleteEdgeDeploymentStageCommand.js +39 -0
  36. package/dist-es/commands/DescribeEdgeDeploymentPlanCommand.js +39 -0
  37. package/dist-es/commands/DescribeHyperParameterTuningJobCommand.js +1 -1
  38. package/dist-es/commands/DescribeImageCommand.js +1 -1
  39. package/dist-es/commands/DescribeImageVersionCommand.js +1 -1
  40. package/dist-es/commands/DescribeInferenceRecommendationsJobCommand.js +1 -2
  41. package/dist-es/commands/ListEdgeDeploymentPlansCommand.js +39 -0
  42. package/dist-es/commands/ListPipelineExecutionStepsCommand.js +1 -1
  43. package/dist-es/commands/ListPipelineParametersForExecutionCommand.js +1 -1
  44. package/dist-es/commands/ListPipelinesCommand.js +1 -1
  45. package/dist-es/commands/ListProcessingJobsCommand.js +1 -1
  46. package/dist-es/commands/ListStageDevicesCommand.js +39 -0
  47. package/dist-es/commands/StartEdgeDeploymentStageCommand.js +39 -0
  48. package/dist-es/commands/StopEdgeDeploymentStageCommand.js +39 -0
  49. package/dist-es/commands/index.js +9 -0
  50. package/dist-es/models/models_0.js +39 -37
  51. package/dist-es/models/models_1.js +72 -82
  52. package/dist-es/models/models_2.js +114 -94
  53. package/dist-es/models/models_3.js +110 -0
  54. package/dist-es/pagination/ListEdgeDeploymentPlansPaginator.js +75 -0
  55. package/dist-es/pagination/ListStageDevicesPaginator.js +75 -0
  56. package/dist-es/pagination/index.js +2 -0
  57. package/dist-es/protocols/Aws_json1_1.js +798 -12
  58. package/dist-types/SageMaker.d.ts +63 -0
  59. package/dist-types/SageMakerClient.d.ts +11 -2
  60. package/dist-types/commands/CreateEdgeDeploymentPlanCommand.d.ts +35 -0
  61. package/dist-types/commands/CreateEdgeDeploymentStageCommand.d.ts +35 -0
  62. package/dist-types/commands/CreateHyperParameterTuningJobCommand.d.ts +1 -1
  63. package/dist-types/commands/DeleteEdgeDeploymentPlanCommand.d.ts +35 -0
  64. package/dist-types/commands/DeleteEdgeDeploymentStageCommand.d.ts +35 -0
  65. package/dist-types/commands/DescribeEdgeDeploymentPlanCommand.d.ts +35 -0
  66. package/dist-types/commands/DescribeHyperParameterTuningJobCommand.d.ts +1 -1
  67. package/dist-types/commands/DescribeImageCommand.d.ts +1 -1
  68. package/dist-types/commands/DescribeImageVersionCommand.d.ts +1 -1
  69. package/dist-types/commands/DescribeInferenceRecommendationsJobCommand.d.ts +1 -2
  70. package/dist-types/commands/ListEdgeDeploymentPlansCommand.d.ts +35 -0
  71. package/dist-types/commands/ListPipelineExecutionStepsCommand.d.ts +1 -1
  72. package/dist-types/commands/ListPipelineParametersForExecutionCommand.d.ts +1 -1
  73. package/dist-types/commands/ListPipelinesCommand.d.ts +1 -1
  74. package/dist-types/commands/ListProcessingJobsCommand.d.ts +1 -1
  75. package/dist-types/commands/ListStageDevicesCommand.d.ts +35 -0
  76. package/dist-types/commands/StartEdgeDeploymentStageCommand.d.ts +35 -0
  77. package/dist-types/commands/StopEdgeDeploymentStageCommand.d.ts +35 -0
  78. package/dist-types/commands/index.d.ts +9 -0
  79. package/dist-types/models/models_0.d.ts +150 -451
  80. package/dist-types/models/models_1.d.ts +645 -504
  81. package/dist-types/models/models_2.d.ts +5430 -5423
  82. package/dist-types/models/models_3.d.ts +730 -3
  83. package/dist-types/pagination/ListEdgeDeploymentPlansPaginator.d.ts +4 -0
  84. package/dist-types/pagination/ListStageDevicesPaginator.d.ts +4 -0
  85. package/dist-types/pagination/index.d.ts +2 -0
  86. package/dist-types/protocols/Aws_json1_1.d.ts +27 -0
  87. package/dist-types/ts3.4/SageMaker.d.ts +45 -0
  88. package/dist-types/ts3.4/SageMakerClient.d.ts +11 -2
  89. package/dist-types/ts3.4/commands/CreateEdgeDeploymentPlanCommand.d.ts +17 -0
  90. package/dist-types/ts3.4/commands/CreateEdgeDeploymentStageCommand.d.ts +17 -0
  91. package/dist-types/ts3.4/commands/CreateHyperParameterTuningJobCommand.d.ts +1 -1
  92. package/dist-types/ts3.4/commands/DeleteEdgeDeploymentPlanCommand.d.ts +17 -0
  93. package/dist-types/ts3.4/commands/DeleteEdgeDeploymentStageCommand.d.ts +17 -0
  94. package/dist-types/ts3.4/commands/DescribeEdgeDeploymentPlanCommand.d.ts +17 -0
  95. package/dist-types/ts3.4/commands/DescribeHyperParameterTuningJobCommand.d.ts +1 -1
  96. package/dist-types/ts3.4/commands/DescribeImageCommand.d.ts +1 -1
  97. package/dist-types/ts3.4/commands/DescribeImageVersionCommand.d.ts +1 -1
  98. package/dist-types/ts3.4/commands/DescribeInferenceRecommendationsJobCommand.d.ts +1 -2
  99. package/dist-types/ts3.4/commands/ListEdgeDeploymentPlansCommand.d.ts +17 -0
  100. package/dist-types/ts3.4/commands/ListPipelineExecutionStepsCommand.d.ts +1 -1
  101. package/dist-types/ts3.4/commands/ListPipelineParametersForExecutionCommand.d.ts +1 -1
  102. package/dist-types/ts3.4/commands/ListPipelinesCommand.d.ts +1 -1
  103. package/dist-types/ts3.4/commands/ListProcessingJobsCommand.d.ts +1 -1
  104. package/dist-types/ts3.4/commands/ListStageDevicesCommand.d.ts +17 -0
  105. package/dist-types/ts3.4/commands/StartEdgeDeploymentStageCommand.d.ts +17 -0
  106. package/dist-types/ts3.4/commands/StopEdgeDeploymentStageCommand.d.ts +17 -0
  107. package/dist-types/ts3.4/commands/index.d.ts +9 -0
  108. package/dist-types/ts3.4/models/models_0.d.ts +91 -132
  109. package/dist-types/ts3.4/models/models_1.d.ts +235 -238
  110. package/dist-types/ts3.4/models/models_2.d.ts +341 -324
  111. package/dist-types/ts3.4/models/models_3.d.ts +371 -3
  112. package/dist-types/ts3.4/pagination/ListEdgeDeploymentPlansPaginator.d.ts +4 -0
  113. package/dist-types/ts3.4/pagination/ListStageDevicesPaginator.d.ts +4 -0
  114. package/dist-types/ts3.4/pagination/index.d.ts +2 -0
  115. package/dist-types/ts3.4/protocols/Aws_json1_1.d.ts +27 -0
  116. package/package.json +1 -1
@@ -8300,6 +8300,156 @@ export declare namespace CreateDomainResponse {
8300
8300
  */
8301
8301
  const filterSensitiveLog: (obj: CreateDomainResponse) => any;
8302
8302
  }
8303
+ /**
8304
+ * <p>Contains information about the configuration of a model in a deployment.</p>
8305
+ */
8306
+ export interface EdgeDeploymentModelConfig {
8307
+ /**
8308
+ * <p>The name the device application uses to reference this model.</p>
8309
+ */
8310
+ ModelHandle: string | undefined;
8311
+ /**
8312
+ * <p>The edge packaging job associated with this deployment.</p>
8313
+ */
8314
+ EdgePackagingJobName: string | undefined;
8315
+ }
8316
+ export declare namespace EdgeDeploymentModelConfig {
8317
+ /**
8318
+ * @internal
8319
+ */
8320
+ const filterSensitiveLog: (obj: EdgeDeploymentModelConfig) => any;
8321
+ }
8322
+ export declare enum FailureHandlingPolicy {
8323
+ DoNothing = "DO_NOTHING",
8324
+ RollbackOnFailure = "ROLLBACK_ON_FAILURE"
8325
+ }
8326
+ /**
8327
+ * <p>Contains information about the configuration of a deployment.</p>
8328
+ */
8329
+ export interface EdgeDeploymentConfig {
8330
+ /**
8331
+ * <p>Toggle that determines whether to rollback to previous configuration if the current deployment fails.
8332
+ * By default this is turned on. You may turn this off if you want to investigate the errors yourself.</p>
8333
+ */
8334
+ FailureHandlingPolicy: FailureHandlingPolicy | string | undefined;
8335
+ }
8336
+ export declare namespace EdgeDeploymentConfig {
8337
+ /**
8338
+ * @internal
8339
+ */
8340
+ const filterSensitiveLog: (obj: EdgeDeploymentConfig) => any;
8341
+ }
8342
+ export declare enum DeviceSubsetType {
8343
+ NameContains = "NAMECONTAINS",
8344
+ Percentage = "PERCENTAGE",
8345
+ Selection = "SELECTION"
8346
+ }
8347
+ /**
8348
+ * <p>Contains information about the configurations of selected devices.</p>
8349
+ */
8350
+ export interface DeviceSelectionConfig {
8351
+ /**
8352
+ * <p>Type of device subsets to deploy to the current stage.</p>
8353
+ */
8354
+ DeviceSubsetType: DeviceSubsetType | string | undefined;
8355
+ /**
8356
+ * <p>Percentage of devices in the fleet to deploy to the current stage.</p>
8357
+ */
8358
+ Percentage?: number;
8359
+ /**
8360
+ * <p>List of devices chosen to deploy.</p>
8361
+ */
8362
+ DeviceNames?: string[];
8363
+ /**
8364
+ * <p>A filter to select devices with names containing this name.</p>
8365
+ */
8366
+ DeviceNameContains?: string;
8367
+ }
8368
+ export declare namespace DeviceSelectionConfig {
8369
+ /**
8370
+ * @internal
8371
+ */
8372
+ const filterSensitiveLog: (obj: DeviceSelectionConfig) => any;
8373
+ }
8374
+ /**
8375
+ * <p>Contains information about a stage in an edge deployment plan.</p>
8376
+ */
8377
+ export interface DeploymentStage {
8378
+ /**
8379
+ * <p>The name of the stage.</p>
8380
+ */
8381
+ StageName: string | undefined;
8382
+ /**
8383
+ * <p>Configuration of the devices in the stage.</p>
8384
+ */
8385
+ DeviceSelectionConfig: DeviceSelectionConfig | undefined;
8386
+ /**
8387
+ * <p>Configuration of the deployment details.</p>
8388
+ */
8389
+ DeploymentConfig?: EdgeDeploymentConfig;
8390
+ }
8391
+ export declare namespace DeploymentStage {
8392
+ /**
8393
+ * @internal
8394
+ */
8395
+ const filterSensitiveLog: (obj: DeploymentStage) => any;
8396
+ }
8397
+ export interface CreateEdgeDeploymentPlanRequest {
8398
+ /**
8399
+ * <p>The name of the edge deployment plan.</p>
8400
+ */
8401
+ EdgeDeploymentPlanName: string | undefined;
8402
+ /**
8403
+ * <p>List of models associated with the edge deployment plan.</p>
8404
+ */
8405
+ ModelConfigs: EdgeDeploymentModelConfig[] | undefined;
8406
+ /**
8407
+ * <p>The device fleet used for this edge deployment plan.</p>
8408
+ */
8409
+ DeviceFleetName: string | undefined;
8410
+ /**
8411
+ * <p>List of stages of the edge deployment plan. The number of stages is limited to 10 per deployment.</p>
8412
+ */
8413
+ Stages?: DeploymentStage[];
8414
+ /**
8415
+ * <p>List of tags with which to tag the edge deployment plan.</p>
8416
+ */
8417
+ Tags?: Tag[];
8418
+ }
8419
+ export declare namespace CreateEdgeDeploymentPlanRequest {
8420
+ /**
8421
+ * @internal
8422
+ */
8423
+ const filterSensitiveLog: (obj: CreateEdgeDeploymentPlanRequest) => any;
8424
+ }
8425
+ export interface CreateEdgeDeploymentPlanResponse {
8426
+ /**
8427
+ * <p>The ARN of the edge deployment plan.</p>
8428
+ */
8429
+ EdgeDeploymentPlanArn: string | undefined;
8430
+ }
8431
+ export declare namespace CreateEdgeDeploymentPlanResponse {
8432
+ /**
8433
+ * @internal
8434
+ */
8435
+ const filterSensitiveLog: (obj: CreateEdgeDeploymentPlanResponse) => any;
8436
+ }
8437
+ export interface CreateEdgeDeploymentStageRequest {
8438
+ /**
8439
+ * <p>The name of the edge deployment plan.</p>
8440
+ */
8441
+ EdgeDeploymentPlanName: string | undefined;
8442
+ /**
8443
+ * <p>List of stages to be added to the edge deployment plan.</p>
8444
+ */
8445
+ Stages: DeploymentStage[] | undefined;
8446
+ }
8447
+ export declare namespace CreateEdgeDeploymentStageRequest {
8448
+ /**
8449
+ * @internal
8450
+ */
8451
+ const filterSensitiveLog: (obj: CreateEdgeDeploymentStageRequest) => any;
8452
+ }
8303
8453
  export interface CreateEdgePackagingJobRequest {
8304
8454
  /**
8305
8455
  * <p>The name of the edge packaging job.</p>
@@ -10115,454 +10265,3 @@ export declare namespace TuningJobCompletionCriteria {
10115
10265
  */
10116
10266
  const filterSensitiveLog: (obj: TuningJobCompletionCriteria) => any;
10117
10267
  }
10118
- /**
10119
- * <p>Configures a hyperparameter tuning job.</p>
10120
- */
10121
- export interface HyperParameterTuningJobConfig {
10122
- /**
10123
- * <p>Specifies how hyperparameter tuning chooses the combinations of hyperparameter values
10124
- * to use for the training job it launches. To use the Bayesian search strategy, set this
10125
- * to <code>Bayesian</code>. To randomly search, set it to <code>Random</code>. For
10126
- * information about search strategies, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/automatic-model-tuning-how-it-works.html">How
10127
- * Hyperparameter Tuning Works</a>.</p>
10128
- */
10129
- Strategy: HyperParameterTuningJobStrategyType | string | undefined;
10130
- /**
10131
- * <p>The <a>HyperParameterTuningJobObjective</a> object that specifies the
10132
- * objective
10133
- * metric for this tuning job.</p>
10134
- */
10135
- HyperParameterTuningJobObjective?: HyperParameterTuningJobObjective;
10136
- /**
10137
- * <p>The <a>ResourceLimits</a> object that specifies the
10138
- * maximum
10139
- * number of training jobs and parallel training jobs for this tuning
10140
- * job.</p>
10141
- */
10142
- ResourceLimits: ResourceLimits | undefined;
10143
- /**
10144
- * <p>The <a>ParameterRanges</a> object that specifies the ranges of
10145
- * hyperparameters
10146
- * that this tuning job searches.</p>
10147
- */
10148
- ParameterRanges?: ParameterRanges;
10149
- /**
10150
- * <p>Specifies whether to use early stopping for training jobs launched by the
10151
- * hyperparameter tuning job. This can be one of the following values (the default value is
10152
- * <code>OFF</code>):</p>
10153
- * <dl>
10154
- * <dt>OFF</dt>
10155
- * <dd>
10156
- * <p>Training jobs launched by the hyperparameter tuning job do not use early
10157
- * stopping.</p>
10158
- * </dd>
10159
- * <dt>AUTO</dt>
10160
- * <dd>
10161
- * <p>SageMaker stops training jobs launched by the hyperparameter tuning job when
10162
- * they are unlikely to perform better than previously completed training jobs.
10163
- * For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/automatic-model-tuning-early-stopping.html">Stop Training Jobs Early</a>.</p>
10164
- * </dd>
10165
- * </dl>
10166
- */
10167
- TrainingJobEarlyStoppingType?: TrainingJobEarlyStoppingType | string;
10168
- /**
10169
- * <p>The tuning job's completion criteria.</p>
10170
- */
10171
- TuningJobCompletionCriteria?: TuningJobCompletionCriteria;
10172
- }
10173
- export declare namespace HyperParameterTuningJobConfig {
10174
- /**
10175
- * @internal
10176
- */
10177
- const filterSensitiveLog: (obj: HyperParameterTuningJobConfig) => any;
10178
- }
10179
- /**
10180
- * <p>Specifies
10181
- * which
10182
- * training algorithm to use for training jobs that a hyperparameter
10183
- * tuning job launches and the metrics to monitor.</p>
10184
- */
10185
- export interface HyperParameterAlgorithmSpecification {
10186
- /**
10187
- * <p> The registry path of the Docker image that contains the training algorithm. For
10188
- * information about Docker registry paths for built-in algorithms, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-algo-docker-registry-paths.html">Algorithms
10189
- * Provided by Amazon SageMaker: Common Parameters</a>. SageMaker supports both
10190
- * <code>registry/repository[:tag]</code> and <code>registry/repository[@digest]</code>
10191
- * image path formats. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/your-algorithms.html">Using Your Own Algorithms with Amazon
10192
- * SageMaker</a>.</p>
10193
- */
10194
- TrainingImage?: string;
10195
- /**
10196
- * <p>The training input mode that the algorithm supports. For more information about input modes, see
10197
- * <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/algos.html">Algorithms</a>.</p>
10198
- *
10199
- * <p>
10200
- * <b>Pipe mode</b>
10201
- * </p>
10202
- * <p>If an algorithm supports <code>Pipe</code> mode, Amazon SageMaker streams data directly
10203
- * from Amazon S3 to the container.</p>
10204
- *
10205
- * <p>
10206
- * <b>File mode</b>
10207
- * </p>
10208
- * <p>If an algorithm supports <code>File</code> mode, SageMaker
10209
- * downloads the training data from S3 to the provisioned ML storage volume, and mounts the
10210
- * directory to the Docker volume for the training container.</p>
10211
- * <p>You must provision the ML storage volume with sufficient capacity
10212
- * to accommodate the data downloaded from S3. In addition to the training data, the ML
10213
- * storage volume also stores the output model. The algorithm container uses the ML storage
10214
- * volume to also store intermediate information, if any.</p>
10215
- * <p>For distributed algorithms, training data is distributed uniformly.
10216
- * Your training duration is predictable if the input data objects sizes are
10217
- * approximately the same. SageMaker does not split the files any further for model training.
10218
- * If the object sizes are skewed, training won't be optimal as the data distribution is also
10219
- * skewed when one host in a training cluster is overloaded, thus becoming a bottleneck in
10220
- * training.</p>
10221
- *
10222
- * <p>
10223
- * <b>FastFile mode</b>
10224
- * </p>
10225
- * <p>If an algorithm supports <code>FastFile</code> mode, SageMaker streams data directly
10226
- * from S3 to the container with no code changes, and provides file system access to
10227
- * the data. Users can author their training script to interact with these files as if
10228
- * they were stored on disk.</p>
10229
- * <p>
10230
- * <code>FastFile</code> mode works best when the data is read sequentially.
10231
- * Augmented manifest files aren't supported.
10232
- * The startup time is lower when there are fewer files in the S3 bucket provided.</p>
10233
- */
10234
- TrainingInputMode: TrainingInputMode | string | undefined;
10235
- /**
10236
- * <p>The name of the resource algorithm to use for the hyperparameter tuning job. If you
10237
- * specify a value for this parameter, do not specify a value for
10238
- * <code>TrainingImage</code>.</p>
10239
- */
10240
- AlgorithmName?: string;
10241
- /**
10242
- * <p>An array of <a>MetricDefinition</a> objects that specify the
10243
- * metrics
10244
- * that the algorithm emits.</p>
10245
- */
10246
- MetricDefinitions?: MetricDefinition[];
10247
- }
10248
- export declare namespace HyperParameterAlgorithmSpecification {
10249
- /**
10250
- * @internal
10251
- */
10252
- const filterSensitiveLog: (obj: HyperParameterAlgorithmSpecification) => any;
10253
- }
10254
- /**
10255
- * <p>The retry strategy to use when a training job fails due to an
10256
- * <code>InternalServerError</code>. <code>RetryStrategy</code> is specified as part of
10257
- * the <code>CreateTrainingJob</code> and <code>CreateHyperParameterTuningJob</code>
10258
- * requests. You can add the <code>StoppingCondition</code> parameter to the request to
10259
- * limit the training time for the complete job.</p>
10260
- */
10261
- export interface RetryStrategy {
10262
- /**
10263
- * <p>The number of times to retry the job. When the job is retried, it's
10264
- * <code>SecondaryStatus</code> is changed to <code>STARTING</code>.</p>
10265
- */
10266
- MaximumRetryAttempts: number | undefined;
10267
- }
10268
- export declare namespace RetryStrategy {
10269
- /**
10270
- * @internal
10271
- */
10272
- const filterSensitiveLog: (obj: RetryStrategy) => any;
10273
- }
10274
- /**
10275
- * <p>Defines
10276
- * the training jobs launched by a hyperparameter tuning job.</p>
10277
- */
10278
- export interface HyperParameterTrainingJobDefinition {
10279
- /**
10280
- * <p>The job definition name.</p>
10281
- */
10282
- DefinitionName?: string;
10283
- /**
10284
- * <p>Defines the objective metric for a hyperparameter tuning job.
10285
- * Hyperparameter
10286
- * tuning uses the value of this metric to evaluate the training jobs it launches, and
10287
- * returns the training job that results in either the highest or lowest value for this
10288
- * metric, depending on the value you specify for the <code>Type</code>
10289
- * parameter.</p>
10290
- */
10291
- TuningObjective?: HyperParameterTuningJobObjective;
10292
- /**
10293
- * <p>Specifies ranges of integer, continuous, and categorical hyperparameters that a
10294
- * hyperparameter tuning job searches. The hyperparameter tuning job launches training jobs
10295
- * with hyperparameter values within these ranges to find the combination of values that
10296
- * result in the training job with the best performance as measured by the objective metric
10297
- * of the hyperparameter tuning job.</p>
10298
- * <note>
10299
- * <p>The maximum number of items specified for <code>Array Members</code> refers to
10300
- * the maximum number of hyperparameters for each range and also the maximum for the
10301
- * hyperparameter tuning job itself. That is, the sum of the number of hyperparameters
10302
- * for all the ranges can't exceed the maximum number specified.</p>
10303
- * </note>
10304
- */
10305
- HyperParameterRanges?: ParameterRanges;
10306
- /**
10307
- * <p>Specifies the values of hyperparameters
10308
- * that
10309
- * do not change for the tuning job.</p>
10310
- */
10311
- StaticHyperParameters?: Record<string, string>;
10312
- /**
10313
- * <p>The <a>HyperParameterAlgorithmSpecification</a> object that
10314
- * specifies
10315
- * the resource algorithm to use for the training jobs that the tuning
10316
- * job launches.</p>
10317
- */
10318
- AlgorithmSpecification: HyperParameterAlgorithmSpecification | undefined;
10319
- /**
10320
- * <p>The Amazon Resource Name (ARN) of the
10321
- * IAM
10322
- * role associated with the training jobs that the tuning job
10323
- * launches.</p>
10324
- */
10325
- RoleArn: string | undefined;
10326
- /**
10327
- * <p>An array of <a>Channel</a> objects that specify
10328
- * the
10329
- * input for the training jobs that the tuning job launches.</p>
10330
- */
10331
- InputDataConfig?: Channel[];
10332
- /**
10333
- * <p>The <a>VpcConfig</a> object that
10334
- * specifies
10335
- * the VPC that you want the training jobs that this hyperparameter
10336
- * tuning job launches to connect to. Control access to and from your
10337
- * training
10338
- * container by configuring the VPC. For more information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/train-vpc.html">Protect Training Jobs
10339
- * by Using an Amazon Virtual Private Cloud</a>.</p>
10340
- */
10341
- VpcConfig?: VpcConfig;
10342
- /**
10343
- * <p>Specifies the path to the Amazon S3 bucket where you
10344
- * store
10345
- * model artifacts from the training jobs that the tuning job
10346
- * launches.</p>
10347
- */
10348
- OutputDataConfig: OutputDataConfig | undefined;
10349
- /**
10350
- * <p>The resources,
10351
- * including
10352
- * the compute instances and storage volumes, to use for the training
10353
- * jobs that the tuning job launches.</p>
10354
- * <p>Storage volumes store model artifacts and
10355
- * incremental
10356
- * states. Training algorithms might also use storage volumes for
10357
- * scratch
10358
- * space. If you want SageMaker to use the storage volume to store the
10359
- * training data, choose <code>File</code> as the <code>TrainingInputMode</code> in the
10360
- * algorithm specification. For distributed training algorithms, specify an instance count
10361
- * greater than 1.</p>
10362
- */
10363
- ResourceConfig: ResourceConfig | undefined;
10364
- /**
10365
- * <p>Specifies a limit to how long a model hyperparameter training job can run. It also
10366
- * specifies how long a managed spot training job has to complete. When the job reaches the
10367
- * time limit, SageMaker ends the training job. Use this API to cap model training costs.</p>
10368
- */
10369
- StoppingCondition: StoppingCondition | undefined;
10370
- /**
10371
- * <p>Isolates the training container. No inbound or outbound network calls can be made,
10372
- * except for calls between peers within a training cluster for distributed training. If
10373
- * network isolation is used for training jobs that are configured to use a VPC, SageMaker
10374
- * downloads and uploads customer data and model artifacts through the specified VPC, but
10375
- * the training container does not have network access.</p>
10376
- */
10377
- EnableNetworkIsolation?: boolean;
10378
- /**
10379
- * <p>To encrypt all communications between ML compute instances in distributed training,
10380
- * choose <code>True</code>. Encryption provides greater security for distributed training,
10381
- * but training might take longer. How long it takes depends on the amount of communication
10382
- * between compute instances, especially if you use a deep learning algorithm in
10383
- * distributed training.</p>
10384
- */
10385
- EnableInterContainerTrafficEncryption?: boolean;
10386
- /**
10387
- * <p>A Boolean indicating whether managed spot training is enabled (<code>True</code>) or
10388
- * not (<code>False</code>).</p>
10389
- */
10390
- EnableManagedSpotTraining?: boolean;
10391
- /**
10392
- * <p>Contains information about the output location for managed spot training checkpoint
10393
- * data. </p>
10394
- */
10395
- CheckpointConfig?: CheckpointConfig;
10396
- /**
10397
- * <p>The number of times to retry the job when the job fails due to an
10398
- * <code>InternalServerError</code>.</p>
10399
- */
10400
- RetryStrategy?: RetryStrategy;
10401
- }
10402
- export declare namespace HyperParameterTrainingJobDefinition {
10403
- /**
10404
- * @internal
10405
- */
10406
- const filterSensitiveLog: (obj: HyperParameterTrainingJobDefinition) => any;
10407
- }
10408
- /**
10409
- * <p>A previously completed or stopped hyperparameter tuning job to be used as a starting
10410
- * point for a new hyperparameter tuning job.</p>
10411
- */
10412
- export interface ParentHyperParameterTuningJob {
10413
- /**
10414
- * <p>The name of the hyperparameter tuning job to be used as a starting point for a new
10415
- * hyperparameter tuning job.</p>
10416
- */
10417
- HyperParameterTuningJobName?: string;
10418
- }
10419
- export declare namespace ParentHyperParameterTuningJob {
10420
- /**
10421
- * @internal
10422
- */
10423
- const filterSensitiveLog: (obj: ParentHyperParameterTuningJob) => any;
10424
- }
10425
- export declare enum HyperParameterTuningJobWarmStartType {
10426
- IDENTICAL_DATA_AND_ALGORITHM = "IdenticalDataAndAlgorithm",
10427
- TRANSFER_LEARNING = "TransferLearning"
10428
- }
10429
- /**
10430
- * <p>Specifies the configuration for a hyperparameter tuning job that uses one or more
10431
- * previous hyperparameter tuning jobs as a starting point. The results of previous tuning
10432
- * jobs are used to inform which combinations of hyperparameters to search over in the new
10433
- * tuning job.</p>
10434
- * <p>All training jobs launched by the new hyperparameter tuning job are evaluated by using
10435
- * the objective metric, and the training job that performs the best is compared to the
10436
- * best training jobs from the parent tuning jobs. From these, the training job that
10437
- * performs the best as measured by the objective metric is returned as the overall best
10438
- * training job.</p>
10439
- * <note>
10440
- * <p>All training jobs launched by parent hyperparameter tuning jobs and the new
10441
- * hyperparameter tuning jobs count against the limit of training jobs for the tuning
10442
- * job.</p>
10443
- * </note>
10444
- */
10445
- export interface HyperParameterTuningJobWarmStartConfig {
10446
- /**
10447
- * <p>An array of hyperparameter tuning jobs that are used as the starting point for the new
10448
- * hyperparameter tuning job. For more information about warm starting a hyperparameter
10449
- * tuning job, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/automatic-model-tuning-warm-start.html">Using a Previous
10450
- * Hyperparameter Tuning Job as a Starting Point</a>.</p>
10451
- * <p>Hyperparameter tuning jobs created before October 1, 2018 cannot be used as parent
10452
- * jobs for warm start tuning jobs.</p>
10453
- */
10454
- ParentHyperParameterTuningJobs: ParentHyperParameterTuningJob[] | undefined;
10455
- /**
10456
- * <p>Specifies one of the following:</p>
10457
- * <dl>
10458
- * <dt>IDENTICAL_DATA_AND_ALGORITHM</dt>
10459
- * <dd>
10460
- * <p>The new hyperparameter tuning job uses the same input data and training
10461
- * image as the parent tuning jobs. You can change the hyperparameter ranges to
10462
- * search and the maximum number of training jobs that the hyperparameter
10463
- * tuning job launches. You cannot use a new version of the training algorithm,
10464
- * unless the changes in the new version do not affect the algorithm itself.
10465
- * For example, changes that improve logging or adding support for a different
10466
- * data format are allowed. You can also change hyperparameters from tunable to
10467
- * static, and from static to tunable, but the total number of static plus
10468
- * tunable hyperparameters must remain the same as it is in all parent jobs.
10469
- * The objective metric for the new tuning job must be the same as for all
10470
- * parent jobs.</p>
10471
- * </dd>
10472
- * <dt>TRANSFER_LEARNING</dt>
10473
- * <dd>
10474
- * <p>The new hyperparameter tuning job can include input data, hyperparameter
10475
- * ranges, maximum number of concurrent training jobs, and maximum number of
10476
- * training jobs that are different than those of its parent hyperparameter
10477
- * tuning jobs. The training image can also be a different version from the
10478
- * version used in the parent hyperparameter tuning job. You can also change
10479
- * hyperparameters from tunable to static, and from static to tunable, but the
10480
- * total number of static plus tunable hyperparameters must remain the same as
10481
- * it is in all parent jobs. The objective metric for the new tuning job must
10482
- * be the same as for all parent jobs.</p>
10483
- * </dd>
10484
- * </dl>
10485
- */
10486
- WarmStartType: HyperParameterTuningJobWarmStartType | string | undefined;
10487
- }
10488
- export declare namespace HyperParameterTuningJobWarmStartConfig {
10489
- /**
10490
- * @internal
10491
- */
10492
- const filterSensitiveLog: (obj: HyperParameterTuningJobWarmStartConfig) => any;
10493
- }
10494
- export interface CreateHyperParameterTuningJobRequest {
10495
- /**
10496
- * <p>The name of the tuning job. This name is the prefix for the names of all training jobs
10497
- * that this tuning job launches. The name must be unique within the same Amazon Web Services account and
10498
- * Amazon Web Services Region. The name must have 1 to 32 characters. Valid characters are a-z, A-Z, 0-9,
10499
- * and : + = @ _ % - (hyphen). The name is not case sensitive.</p>
10500
- */
10501
- HyperParameterTuningJobName: string | undefined;
10502
- /**
10503
- * <p>The <a>HyperParameterTuningJobConfig</a> object that describes the tuning
10504
- * job, including the search strategy, the objective metric used to evaluate training jobs,
10505
- * ranges of parameters to search, and resource limits for the tuning job. For more
10506
- * information, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/automatic-model-tuning-how-it-works.html">How
10507
- * Hyperparameter Tuning Works</a>.</p>
10508
- */
10509
- HyperParameterTuningJobConfig: HyperParameterTuningJobConfig | undefined;
10510
- /**
10511
- * <p>The <a>HyperParameterTrainingJobDefinition</a> object that describes the
10512
- * training jobs that this tuning job launches, including static hyperparameters, input
10513
- * data configuration, output data configuration, resource configuration, and stopping
10514
- * condition.</p>
10515
- */
10516
- TrainingJobDefinition?: HyperParameterTrainingJobDefinition;
10517
- /**
10518
- * <p>A list of the <a>HyperParameterTrainingJobDefinition</a> objects launched
10519
- * for this tuning job.</p>
10520
- */
10521
- TrainingJobDefinitions?: HyperParameterTrainingJobDefinition[];
10522
- /**
10523
- * <p>Specifies the configuration for starting the hyperparameter tuning job using one or
10524
- * more previous tuning jobs as a starting point. The results of previous tuning jobs are
10525
- * used to inform which combinations of hyperparameters to search over in the new tuning
10526
- * job.</p>
10527
- * <p>All training jobs launched by the new hyperparameter tuning job are evaluated by using
10528
- * the objective metric. If you specify <code>IDENTICAL_DATA_AND_ALGORITHM</code> as the
10529
- * <code>WarmStartType</code> value for the warm start configuration, the training job
10530
- * that performs the best in the new tuning job is compared to the best training jobs from
10531
- * the parent tuning jobs. From these, the training job that performs the best as measured
10532
- * by the objective metric is returned as the overall best training job.</p>
10533
- * <note>
10534
- * <p>All training jobs launched by parent hyperparameter tuning jobs and the new
10535
- * hyperparameter tuning jobs count against the limit of training jobs for the tuning
10536
- * job.</p>
10537
- * </note>
10538
- */
10539
- WarmStartConfig?: HyperParameterTuningJobWarmStartConfig;
10540
- /**
10541
- * <p>An array of key-value pairs. You can use tags to categorize your Amazon Web Services resources in
10542
- * different ways, for example, by purpose, owner, or environment. For more information,
10543
- * see <a href="https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html">Tagging Amazon Web Services
10544
- * Resources</a>.</p>
10545
- * <p>Tags that you specify for the tuning job are also added to all training jobs that the
10546
- * tuning job launches.</p>
10547
- */
10548
- Tags?: Tag[];
10549
- }
10550
- export declare namespace CreateHyperParameterTuningJobRequest {
10551
- /**
10552
- * @internal
10553
- */
10554
- const filterSensitiveLog: (obj: CreateHyperParameterTuningJobRequest) => any;
10555
- }
10556
- export interface CreateHyperParameterTuningJobResponse {
10557
- /**
10558
- * <p>The Amazon Resource Name (ARN) of the tuning job. SageMaker assigns an ARN to a
10559
- * hyperparameter tuning job when you create it.</p>
10560
- */
10561
- HyperParameterTuningJobArn: string | undefined;
10562
- }
10563
- export declare namespace CreateHyperParameterTuningJobResponse {
10564
- /**
10565
- * @internal
10566
- */
10567
- const filterSensitiveLog: (obj: CreateHyperParameterTuningJobResponse) => any;
10568
- }