@aws-sdk/client-bedrock 3.830.0 → 3.831.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -27,7 +27,7 @@ declare const CreateCustomModelCommand_base: {
27
27
  getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions;
28
28
  };
29
29
  /**
30
- * <p>Creates a new custom model in Amazon Bedrock from an existing SageMaker AI-trained Amazon Nova model stored in an Amazon-managed Amazon S3 bucket. After the model is active, you can use it for inference.</p> <p>To use the model for inference, you must purchase Provisioned Throughput for it. You can't use On-demand inference with these custom models. For more information about Provisioned Throughput, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/prov-throughput.html">Provisioned Throughput</a>.</p> <p>The model appears in <code>ListCustomModels</code> with a <code>customizationType</code> of <code>imported</code>. To track the status of the new model, you use the <code>GetCustomModel</code> API operation. The model can be in the following states:</p> <ul> <li> <p> <code>Creating</code> - Initial state during validation and registration</p> </li> <li> <p> <code>Active</code> - Model is ready for use in inference</p> </li> <li> <p> <code>Failed</code> - Creation process encountered an error</p> </li> </ul> <p>For more information about creating custom models, including specific model requirements, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/create-custom-model-from-existing.html">Import a SageMaker AI-trained Amazon Nova model</a> in the Amazon Bedrock User Guide. </p> <note> <p>You use the <code>CreateCustomModel</code> API to import only SageMaker AI-trained Amazon Nova models. To import open-source models, you use the <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_CreateModelImportJob.html">CreateModelImportJob</a>. </p> </note> <p> <b>Related APIs</b> </p> <ul> <li> <p> <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_GetCustomModel.html">GetCustomModel</a> </p> </li> <li> <p> <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_ListCustomModels.html">ListCustomModels</a> </p> </li> <li> <p> <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_DeleteCustomModel.html">DeleteCustomModel</a> </p> </li> </ul>
30
+ * <p>Creates a new custom model in Amazon Bedrock. After the model is active, you can use it for inference.</p> <p>To use the model for inference, you must purchase Provisioned Throughput for it. You can't use On-demand inference with these custom models. For more information about Provisioned Throughput, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/prov-throughput.html">Provisioned Throughput</a>.</p> <p>The model appears in <code>ListCustomModels</code> with a <code>customizationType</code> of <code>imported</code>. To track the status of the new model, you use the <code>GetCustomModel</code> API operation. The model can be in the following states:</p> <ul> <li> <p> <code>Creating</code> - Initial state during validation and registration</p> </li> <li> <p> <code>Active</code> - Model is ready for use in inference</p> </li> <li> <p> <code>Failed</code> - Creation process encountered an error</p> </li> </ul> <p> <b>Related APIs</b> </p> <ul> <li> <p> <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_GetCustomModel.html">GetCustomModel</a> </p> </li> <li> <p> <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_ListCustomModels.html">ListCustomModels</a> </p> </li> <li> <p> <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_DeleteCustomModel.html">DeleteCustomModel</a> </p> </li> </ul>
31
31
  * @example
32
32
  * Use a bare-bones client and the command you need to make an API call.
33
33
  * ```javascript
@@ -475,7 +475,7 @@ export interface UpdateMarketplaceModelEndpointResponse {
475
475
  marketplaceModelEndpoint: MarketplaceModelEndpoint | undefined;
476
476
  }
477
477
  /**
478
- * <p>The Amazon S3 data source of the model to import. For the <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_CreateCustomModel.html">CreateCustomModel</a> API operation, you must specify the Amazon S3 URI for the Amazon-managed Amazon S3 bucket containing your model artifacts. SageMaker AI creates this bucket when you run your first SageMaker AI training job.</p>
478
+ * <p>The Amazon S3 data source of the model to import. </p>
479
479
  * @public
480
480
  */
481
481
  export interface S3DataSource {
@@ -525,7 +525,7 @@ export interface CreateCustomModelRequest {
525
525
  */
526
526
  modelName: string | undefined;
527
527
  /**
528
- * <p>The data source for the model. The Amazon S3 URI in the model source must be for the Amazon-managed Amazon S3 bucket containing your model artifacts. SageMaker AI creates this bucket when you run your first SageMaker AI training job.</p>
528
+ * <p>The data source for the model. The Amazon S3 URI in the model source must be for the Amazon-managed Amazon S3 bucket containing your model artifacts.</p>
529
529
  * @public
530
530
  */
531
531
  modelSourceConfig: ModelDataSource | undefined;
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@aws-sdk/client-bedrock",
3
3
  "description": "AWS SDK for JavaScript Bedrock Client for Node.js, Browser and React Native",
4
- "version": "3.830.0",
4
+ "version": "3.831.0",
5
5
  "scripts": {
6
6
  "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'",
7
7
  "build:cjs": "node ../../scripts/compilation/inline client-bedrock",