@aws-sdk/client-bedrock 3.679.0 → 3.686.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +16 -0
- package/dist-cjs/index.js +133 -3
- package/dist-es/Bedrock.js +4 -0
- package/dist-es/commands/CreateInferenceProfileCommand.js +23 -0
- package/dist-es/commands/DeleteInferenceProfileCommand.js +22 -0
- package/dist-es/commands/GetInferenceProfileCommand.js +2 -1
- package/dist-es/commands/ListInferenceProfilesCommand.js +2 -1
- package/dist-es/commands/index.js +2 -0
- package/dist-es/models/models_0.js +28 -0
- package/dist-es/protocols/Aws_restJson1.js +54 -0
- package/dist-types/Bedrock.d.ts +14 -0
- package/dist-types/BedrockClient.d.ts +4 -2
- package/dist-types/commands/CreateInferenceProfileCommand.d.ts +108 -0
- package/dist-types/commands/DeleteInferenceProfileCommand.d.ts +87 -0
- package/dist-types/commands/GetInferenceProfileCommand.d.ts +6 -6
- package/dist-types/commands/ListInferenceProfilesCommand.d.ts +8 -7
- package/dist-types/commands/ListTagsForResourceCommand.d.ts +1 -1
- package/dist-types/commands/TagResourceCommand.d.ts +1 -1
- package/dist-types/commands/UntagResourceCommand.d.ts +1 -1
- package/dist-types/commands/index.d.ts +2 -0
- package/dist-types/models/models_0.d.ts +164 -21
- package/dist-types/protocols/Aws_restJson1.d.ts +18 -0
- package/dist-types/ts3.4/Bedrock.d.ts +34 -0
- package/dist-types/ts3.4/BedrockClient.d.ts +12 -0
- package/dist-types/ts3.4/commands/CreateInferenceProfileCommand.d.ts +51 -0
- package/dist-types/ts3.4/commands/DeleteInferenceProfileCommand.d.ts +51 -0
- package/dist-types/ts3.4/commands/index.d.ts +2 -0
- package/dist-types/ts3.4/models/models_0.d.ts +56 -6
- package/dist-types/ts3.4/protocols/Aws_restJson1.d.ts +24 -0
- package/package.json +35 -35
|
@@ -11,6 +11,7 @@ import { BatchDeleteEvaluationJobCommandInput, BatchDeleteEvaluationJobCommandOu
|
|
|
11
11
|
import { CreateEvaluationJobCommandInput, CreateEvaluationJobCommandOutput } from "./commands/CreateEvaluationJobCommand";
|
|
12
12
|
import { CreateGuardrailCommandInput, CreateGuardrailCommandOutput } from "./commands/CreateGuardrailCommand";
|
|
13
13
|
import { CreateGuardrailVersionCommandInput, CreateGuardrailVersionCommandOutput } from "./commands/CreateGuardrailVersionCommand";
|
|
14
|
+
import { CreateInferenceProfileCommandInput, CreateInferenceProfileCommandOutput } from "./commands/CreateInferenceProfileCommand";
|
|
14
15
|
import { CreateModelCopyJobCommandInput, CreateModelCopyJobCommandOutput } from "./commands/CreateModelCopyJobCommand";
|
|
15
16
|
import { CreateModelCustomizationJobCommandInput, CreateModelCustomizationJobCommandOutput } from "./commands/CreateModelCustomizationJobCommand";
|
|
16
17
|
import { CreateModelImportJobCommandInput, CreateModelImportJobCommandOutput } from "./commands/CreateModelImportJobCommand";
|
|
@@ -19,6 +20,7 @@ import { CreateProvisionedModelThroughputCommandInput, CreateProvisionedModelThr
|
|
|
19
20
|
import { DeleteCustomModelCommandInput, DeleteCustomModelCommandOutput } from "./commands/DeleteCustomModelCommand";
|
|
20
21
|
import { DeleteGuardrailCommandInput, DeleteGuardrailCommandOutput } from "./commands/DeleteGuardrailCommand";
|
|
21
22
|
import { DeleteImportedModelCommandInput, DeleteImportedModelCommandOutput } from "./commands/DeleteImportedModelCommand";
|
|
23
|
+
import { DeleteInferenceProfileCommandInput, DeleteInferenceProfileCommandOutput } from "./commands/DeleteInferenceProfileCommand";
|
|
22
24
|
import { DeleteModelInvocationLoggingConfigurationCommandInput, DeleteModelInvocationLoggingConfigurationCommandOutput } from "./commands/DeleteModelInvocationLoggingConfigurationCommand";
|
|
23
25
|
import { DeleteProvisionedModelThroughputCommandInput, DeleteProvisionedModelThroughputCommandOutput } from "./commands/DeleteProvisionedModelThroughputCommand";
|
|
24
26
|
import { GetCustomModelCommandInput, GetCustomModelCommandOutput } from "./commands/GetCustomModelCommand";
|
|
@@ -59,11 +61,11 @@ export { __Client };
|
|
|
59
61
|
/**
|
|
60
62
|
* @public
|
|
61
63
|
*/
|
|
62
|
-
export type ServiceInputTypes = BatchDeleteEvaluationJobCommandInput | CreateEvaluationJobCommandInput | CreateGuardrailCommandInput | CreateGuardrailVersionCommandInput | CreateModelCopyJobCommandInput | CreateModelCustomizationJobCommandInput | CreateModelImportJobCommandInput | CreateModelInvocationJobCommandInput | CreateProvisionedModelThroughputCommandInput | DeleteCustomModelCommandInput | DeleteGuardrailCommandInput | DeleteImportedModelCommandInput | DeleteModelInvocationLoggingConfigurationCommandInput | DeleteProvisionedModelThroughputCommandInput | GetCustomModelCommandInput | GetEvaluationJobCommandInput | GetFoundationModelCommandInput | GetGuardrailCommandInput | GetImportedModelCommandInput | GetInferenceProfileCommandInput | GetModelCopyJobCommandInput | GetModelCustomizationJobCommandInput | GetModelImportJobCommandInput | GetModelInvocationJobCommandInput | GetModelInvocationLoggingConfigurationCommandInput | GetProvisionedModelThroughputCommandInput | ListCustomModelsCommandInput | ListEvaluationJobsCommandInput | ListFoundationModelsCommandInput | ListGuardrailsCommandInput | ListImportedModelsCommandInput | ListInferenceProfilesCommandInput | ListModelCopyJobsCommandInput | ListModelCustomizationJobsCommandInput | ListModelImportJobsCommandInput | ListModelInvocationJobsCommandInput | ListProvisionedModelThroughputsCommandInput | ListTagsForResourceCommandInput | PutModelInvocationLoggingConfigurationCommandInput | StopEvaluationJobCommandInput | StopModelCustomizationJobCommandInput | StopModelInvocationJobCommandInput | TagResourceCommandInput | UntagResourceCommandInput | UpdateGuardrailCommandInput | UpdateProvisionedModelThroughputCommandInput;
|
|
64
|
+
export type ServiceInputTypes = BatchDeleteEvaluationJobCommandInput | CreateEvaluationJobCommandInput | CreateGuardrailCommandInput | CreateGuardrailVersionCommandInput | CreateInferenceProfileCommandInput | CreateModelCopyJobCommandInput | CreateModelCustomizationJobCommandInput | CreateModelImportJobCommandInput | CreateModelInvocationJobCommandInput | CreateProvisionedModelThroughputCommandInput | DeleteCustomModelCommandInput | DeleteGuardrailCommandInput | DeleteImportedModelCommandInput | DeleteInferenceProfileCommandInput | DeleteModelInvocationLoggingConfigurationCommandInput | DeleteProvisionedModelThroughputCommandInput | GetCustomModelCommandInput | GetEvaluationJobCommandInput | GetFoundationModelCommandInput | GetGuardrailCommandInput | GetImportedModelCommandInput | GetInferenceProfileCommandInput | GetModelCopyJobCommandInput | GetModelCustomizationJobCommandInput | GetModelImportJobCommandInput | GetModelInvocationJobCommandInput | GetModelInvocationLoggingConfigurationCommandInput | GetProvisionedModelThroughputCommandInput | ListCustomModelsCommandInput | ListEvaluationJobsCommandInput | ListFoundationModelsCommandInput | ListGuardrailsCommandInput | ListImportedModelsCommandInput | ListInferenceProfilesCommandInput | ListModelCopyJobsCommandInput | ListModelCustomizationJobsCommandInput | ListModelImportJobsCommandInput | ListModelInvocationJobsCommandInput | ListProvisionedModelThroughputsCommandInput | ListTagsForResourceCommandInput | PutModelInvocationLoggingConfigurationCommandInput | StopEvaluationJobCommandInput | StopModelCustomizationJobCommandInput | StopModelInvocationJobCommandInput | TagResourceCommandInput | UntagResourceCommandInput | UpdateGuardrailCommandInput | UpdateProvisionedModelThroughputCommandInput;
|
|
63
65
|
/**
|
|
64
66
|
* @public
|
|
65
67
|
*/
|
|
66
|
-
export type ServiceOutputTypes = BatchDeleteEvaluationJobCommandOutput | CreateEvaluationJobCommandOutput | CreateGuardrailCommandOutput | CreateGuardrailVersionCommandOutput | CreateModelCopyJobCommandOutput | CreateModelCustomizationJobCommandOutput | CreateModelImportJobCommandOutput | CreateModelInvocationJobCommandOutput | CreateProvisionedModelThroughputCommandOutput | DeleteCustomModelCommandOutput | DeleteGuardrailCommandOutput | DeleteImportedModelCommandOutput | DeleteModelInvocationLoggingConfigurationCommandOutput | DeleteProvisionedModelThroughputCommandOutput | GetCustomModelCommandOutput | GetEvaluationJobCommandOutput | GetFoundationModelCommandOutput | GetGuardrailCommandOutput | GetImportedModelCommandOutput | GetInferenceProfileCommandOutput | GetModelCopyJobCommandOutput | GetModelCustomizationJobCommandOutput | GetModelImportJobCommandOutput | GetModelInvocationJobCommandOutput | GetModelInvocationLoggingConfigurationCommandOutput | GetProvisionedModelThroughputCommandOutput | ListCustomModelsCommandOutput | ListEvaluationJobsCommandOutput | ListFoundationModelsCommandOutput | ListGuardrailsCommandOutput | ListImportedModelsCommandOutput | ListInferenceProfilesCommandOutput | ListModelCopyJobsCommandOutput | ListModelCustomizationJobsCommandOutput | ListModelImportJobsCommandOutput | ListModelInvocationJobsCommandOutput | ListProvisionedModelThroughputsCommandOutput | ListTagsForResourceCommandOutput | PutModelInvocationLoggingConfigurationCommandOutput | StopEvaluationJobCommandOutput | StopModelCustomizationJobCommandOutput | StopModelInvocationJobCommandOutput | TagResourceCommandOutput | UntagResourceCommandOutput | UpdateGuardrailCommandOutput | UpdateProvisionedModelThroughputCommandOutput;
|
|
68
|
+
export type ServiceOutputTypes = BatchDeleteEvaluationJobCommandOutput | CreateEvaluationJobCommandOutput | CreateGuardrailCommandOutput | CreateGuardrailVersionCommandOutput | CreateInferenceProfileCommandOutput | CreateModelCopyJobCommandOutput | CreateModelCustomizationJobCommandOutput | CreateModelImportJobCommandOutput | CreateModelInvocationJobCommandOutput | CreateProvisionedModelThroughputCommandOutput | DeleteCustomModelCommandOutput | DeleteGuardrailCommandOutput | DeleteImportedModelCommandOutput | DeleteInferenceProfileCommandOutput | DeleteModelInvocationLoggingConfigurationCommandOutput | DeleteProvisionedModelThroughputCommandOutput | GetCustomModelCommandOutput | GetEvaluationJobCommandOutput | GetFoundationModelCommandOutput | GetGuardrailCommandOutput | GetImportedModelCommandOutput | GetInferenceProfileCommandOutput | GetModelCopyJobCommandOutput | GetModelCustomizationJobCommandOutput | GetModelImportJobCommandOutput | GetModelInvocationJobCommandOutput | GetModelInvocationLoggingConfigurationCommandOutput | GetProvisionedModelThroughputCommandOutput | ListCustomModelsCommandOutput | ListEvaluationJobsCommandOutput | ListFoundationModelsCommandOutput | ListGuardrailsCommandOutput | ListImportedModelsCommandOutput | ListInferenceProfilesCommandOutput | ListModelCopyJobsCommandOutput | ListModelCustomizationJobsCommandOutput | ListModelImportJobsCommandOutput | ListModelInvocationJobsCommandOutput | ListProvisionedModelThroughputsCommandOutput | ListTagsForResourceCommandOutput | PutModelInvocationLoggingConfigurationCommandOutput | StopEvaluationJobCommandOutput | StopModelCustomizationJobCommandOutput | StopModelInvocationJobCommandOutput | TagResourceCommandOutput | UntagResourceCommandOutput | UpdateGuardrailCommandOutput | UpdateProvisionedModelThroughputCommandOutput;
|
|
67
69
|
/**
|
|
68
70
|
* @public
|
|
69
71
|
*/
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
import { Command as $Command } from "@smithy/smithy-client";
|
|
2
|
+
import { MetadataBearer as __MetadataBearer } from "@smithy/types";
|
|
3
|
+
import { BedrockClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../BedrockClient";
|
|
4
|
+
import { CreateInferenceProfileRequest, CreateInferenceProfileResponse } from "../models/models_0";
|
|
5
|
+
/**
|
|
6
|
+
* @public
|
|
7
|
+
*/
|
|
8
|
+
export type { __MetadataBearer };
|
|
9
|
+
export { $Command };
|
|
10
|
+
/**
|
|
11
|
+
* @public
|
|
12
|
+
*
|
|
13
|
+
* The input for {@link CreateInferenceProfileCommand}.
|
|
14
|
+
*/
|
|
15
|
+
export interface CreateInferenceProfileCommandInput extends CreateInferenceProfileRequest {
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* @public
|
|
19
|
+
*
|
|
20
|
+
* The output of {@link CreateInferenceProfileCommand}.
|
|
21
|
+
*/
|
|
22
|
+
export interface CreateInferenceProfileCommandOutput extends CreateInferenceProfileResponse, __MetadataBearer {
|
|
23
|
+
}
|
|
24
|
+
declare const CreateInferenceProfileCommand_base: {
|
|
25
|
+
new (input: CreateInferenceProfileCommandInput): import("@smithy/smithy-client").CommandImpl<CreateInferenceProfileCommandInput, CreateInferenceProfileCommandOutput, BedrockClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes>;
|
|
26
|
+
new (__0_0: CreateInferenceProfileCommandInput): import("@smithy/smithy-client").CommandImpl<CreateInferenceProfileCommandInput, CreateInferenceProfileCommandOutput, BedrockClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes>;
|
|
27
|
+
getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions;
|
|
28
|
+
};
|
|
29
|
+
/**
|
|
30
|
+
* <p>Creates an application inference profile to track metrics and costs when invoking a model. To create an application inference profile for a foundation model in one region, specify the ARN of the model in that region. To create an application inference profile for a foundation model across multiple regions, specify the ARN of the system-defined inference profile that contains the regions that you want to route requests to. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference.html">Increase throughput and resilience with cross-region inference in Amazon Bedrock</a>. in the Amazon Bedrock User Guide.</p>
|
|
31
|
+
* @example
|
|
32
|
+
* Use a bare-bones client and the command you need to make an API call.
|
|
33
|
+
* ```javascript
|
|
34
|
+
* import { BedrockClient, CreateInferenceProfileCommand } from "@aws-sdk/client-bedrock"; // ES Modules import
|
|
35
|
+
* // const { BedrockClient, CreateInferenceProfileCommand } = require("@aws-sdk/client-bedrock"); // CommonJS import
|
|
36
|
+
* const client = new BedrockClient(config);
|
|
37
|
+
* const input = { // CreateInferenceProfileRequest
|
|
38
|
+
* inferenceProfileName: "STRING_VALUE", // required
|
|
39
|
+
* description: "STRING_VALUE",
|
|
40
|
+
* clientRequestToken: "STRING_VALUE",
|
|
41
|
+
* modelSource: { // InferenceProfileModelSource Union: only one key present
|
|
42
|
+
* copyFrom: "STRING_VALUE",
|
|
43
|
+
* },
|
|
44
|
+
* tags: [ // TagList
|
|
45
|
+
* { // Tag
|
|
46
|
+
* key: "STRING_VALUE", // required
|
|
47
|
+
* value: "STRING_VALUE", // required
|
|
48
|
+
* },
|
|
49
|
+
* ],
|
|
50
|
+
* };
|
|
51
|
+
* const command = new CreateInferenceProfileCommand(input);
|
|
52
|
+
* const response = await client.send(command);
|
|
53
|
+
* // { // CreateInferenceProfileResponse
|
|
54
|
+
* // inferenceProfileArn: "STRING_VALUE", // required
|
|
55
|
+
* // status: "ACTIVE",
|
|
56
|
+
* // };
|
|
57
|
+
*
|
|
58
|
+
* ```
|
|
59
|
+
*
|
|
60
|
+
* @param CreateInferenceProfileCommandInput - {@link CreateInferenceProfileCommandInput}
|
|
61
|
+
* @returns {@link CreateInferenceProfileCommandOutput}
|
|
62
|
+
* @see {@link CreateInferenceProfileCommandInput} for command's `input` shape.
|
|
63
|
+
* @see {@link CreateInferenceProfileCommandOutput} for command's `response` shape.
|
|
64
|
+
* @see {@link BedrockClientResolvedConfig | config} for BedrockClient's `config` shape.
|
|
65
|
+
*
|
|
66
|
+
* @throws {@link AccessDeniedException} (client fault)
|
|
67
|
+
* <p>The request is denied because of missing access permissions.</p>
|
|
68
|
+
*
|
|
69
|
+
* @throws {@link ConflictException} (client fault)
|
|
70
|
+
* <p>Error occurred because of a conflict while performing an operation.</p>
|
|
71
|
+
*
|
|
72
|
+
* @throws {@link InternalServerException} (server fault)
|
|
73
|
+
* <p>An internal server error occurred. Retry your request.</p>
|
|
74
|
+
*
|
|
75
|
+
* @throws {@link ResourceNotFoundException} (client fault)
|
|
76
|
+
* <p>The specified resource Amazon Resource Name (ARN) was not found. Check the Amazon Resource Name (ARN) and try your request again.</p>
|
|
77
|
+
*
|
|
78
|
+
* @throws {@link ServiceQuotaExceededException} (client fault)
|
|
79
|
+
* <p>The number of requests exceeds the service quota. Resubmit your request later.</p>
|
|
80
|
+
*
|
|
81
|
+
* @throws {@link ThrottlingException} (client fault)
|
|
82
|
+
* <p>The number of requests exceeds the limit. Resubmit your request later.</p>
|
|
83
|
+
*
|
|
84
|
+
* @throws {@link TooManyTagsException} (client fault)
|
|
85
|
+
* <p>The request contains more tags than can be associated with a resource (50 tags per resource).
|
|
86
|
+
* The maximum number of tags includes both existing tags and those included in your current request. </p>
|
|
87
|
+
*
|
|
88
|
+
* @throws {@link ValidationException} (client fault)
|
|
89
|
+
* <p>Input validation failed. Check your request parameters and retry the request.</p>
|
|
90
|
+
*
|
|
91
|
+
* @throws {@link BedrockServiceException}
|
|
92
|
+
* <p>Base exception class for all service exceptions from Bedrock service.</p>
|
|
93
|
+
*
|
|
94
|
+
* @public
|
|
95
|
+
*/
|
|
96
|
+
export declare class CreateInferenceProfileCommand extends CreateInferenceProfileCommand_base {
|
|
97
|
+
/** @internal type navigation helper, not in runtime. */
|
|
98
|
+
protected static __types: {
|
|
99
|
+
api: {
|
|
100
|
+
input: CreateInferenceProfileRequest;
|
|
101
|
+
output: CreateInferenceProfileResponse;
|
|
102
|
+
};
|
|
103
|
+
sdk: {
|
|
104
|
+
input: CreateInferenceProfileCommandInput;
|
|
105
|
+
output: CreateInferenceProfileCommandOutput;
|
|
106
|
+
};
|
|
107
|
+
};
|
|
108
|
+
}
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import { Command as $Command } from "@smithy/smithy-client";
|
|
2
|
+
import { MetadataBearer as __MetadataBearer } from "@smithy/types";
|
|
3
|
+
import { BedrockClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../BedrockClient";
|
|
4
|
+
import { DeleteInferenceProfileRequest, DeleteInferenceProfileResponse } from "../models/models_0";
|
|
5
|
+
/**
|
|
6
|
+
* @public
|
|
7
|
+
*/
|
|
8
|
+
export type { __MetadataBearer };
|
|
9
|
+
export { $Command };
|
|
10
|
+
/**
|
|
11
|
+
* @public
|
|
12
|
+
*
|
|
13
|
+
* The input for {@link DeleteInferenceProfileCommand}.
|
|
14
|
+
*/
|
|
15
|
+
export interface DeleteInferenceProfileCommandInput extends DeleteInferenceProfileRequest {
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* @public
|
|
19
|
+
*
|
|
20
|
+
* The output of {@link DeleteInferenceProfileCommand}.
|
|
21
|
+
*/
|
|
22
|
+
export interface DeleteInferenceProfileCommandOutput extends DeleteInferenceProfileResponse, __MetadataBearer {
|
|
23
|
+
}
|
|
24
|
+
declare const DeleteInferenceProfileCommand_base: {
|
|
25
|
+
new (input: DeleteInferenceProfileCommandInput): import("@smithy/smithy-client").CommandImpl<DeleteInferenceProfileCommandInput, DeleteInferenceProfileCommandOutput, BedrockClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes>;
|
|
26
|
+
new (__0_0: DeleteInferenceProfileCommandInput): import("@smithy/smithy-client").CommandImpl<DeleteInferenceProfileCommandInput, DeleteInferenceProfileCommandOutput, BedrockClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes>;
|
|
27
|
+
getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions;
|
|
28
|
+
};
|
|
29
|
+
/**
|
|
30
|
+
* <p>Deletes an application inference profile. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference.html">Increase throughput and resilience with cross-region inference in Amazon Bedrock</a>. in the Amazon Bedrock User Guide.</p>
|
|
31
|
+
* @example
|
|
32
|
+
* Use a bare-bones client and the command you need to make an API call.
|
|
33
|
+
* ```javascript
|
|
34
|
+
* import { BedrockClient, DeleteInferenceProfileCommand } from "@aws-sdk/client-bedrock"; // ES Modules import
|
|
35
|
+
* // const { BedrockClient, DeleteInferenceProfileCommand } = require("@aws-sdk/client-bedrock"); // CommonJS import
|
|
36
|
+
* const client = new BedrockClient(config);
|
|
37
|
+
* const input = { // DeleteInferenceProfileRequest
|
|
38
|
+
* inferenceProfileIdentifier: "STRING_VALUE", // required
|
|
39
|
+
* };
|
|
40
|
+
* const command = new DeleteInferenceProfileCommand(input);
|
|
41
|
+
* const response = await client.send(command);
|
|
42
|
+
* // {};
|
|
43
|
+
*
|
|
44
|
+
* ```
|
|
45
|
+
*
|
|
46
|
+
* @param DeleteInferenceProfileCommandInput - {@link DeleteInferenceProfileCommandInput}
|
|
47
|
+
* @returns {@link DeleteInferenceProfileCommandOutput}
|
|
48
|
+
* @see {@link DeleteInferenceProfileCommandInput} for command's `input` shape.
|
|
49
|
+
* @see {@link DeleteInferenceProfileCommandOutput} for command's `response` shape.
|
|
50
|
+
* @see {@link BedrockClientResolvedConfig | config} for BedrockClient's `config` shape.
|
|
51
|
+
*
|
|
52
|
+
* @throws {@link AccessDeniedException} (client fault)
|
|
53
|
+
* <p>The request is denied because of missing access permissions.</p>
|
|
54
|
+
*
|
|
55
|
+
* @throws {@link ConflictException} (client fault)
|
|
56
|
+
* <p>Error occurred because of a conflict while performing an operation.</p>
|
|
57
|
+
*
|
|
58
|
+
* @throws {@link InternalServerException} (server fault)
|
|
59
|
+
* <p>An internal server error occurred. Retry your request.</p>
|
|
60
|
+
*
|
|
61
|
+
* @throws {@link ResourceNotFoundException} (client fault)
|
|
62
|
+
* <p>The specified resource Amazon Resource Name (ARN) was not found. Check the Amazon Resource Name (ARN) and try your request again.</p>
|
|
63
|
+
*
|
|
64
|
+
* @throws {@link ThrottlingException} (client fault)
|
|
65
|
+
* <p>The number of requests exceeds the limit. Resubmit your request later.</p>
|
|
66
|
+
*
|
|
67
|
+
* @throws {@link ValidationException} (client fault)
|
|
68
|
+
* <p>Input validation failed. Check your request parameters and retry the request.</p>
|
|
69
|
+
*
|
|
70
|
+
* @throws {@link BedrockServiceException}
|
|
71
|
+
* <p>Base exception class for all service exceptions from Bedrock service.</p>
|
|
72
|
+
*
|
|
73
|
+
* @public
|
|
74
|
+
*/
|
|
75
|
+
export declare class DeleteInferenceProfileCommand extends DeleteInferenceProfileCommand_base {
|
|
76
|
+
/** @internal type navigation helper, not in runtime. */
|
|
77
|
+
protected static __types: {
|
|
78
|
+
api: {
|
|
79
|
+
input: DeleteInferenceProfileRequest;
|
|
80
|
+
output: {};
|
|
81
|
+
};
|
|
82
|
+
sdk: {
|
|
83
|
+
input: DeleteInferenceProfileCommandInput;
|
|
84
|
+
output: DeleteInferenceProfileCommandOutput;
|
|
85
|
+
};
|
|
86
|
+
};
|
|
87
|
+
}
|
|
@@ -27,7 +27,7 @@ declare const GetInferenceProfileCommand_base: {
|
|
|
27
27
|
getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions;
|
|
28
28
|
};
|
|
29
29
|
/**
|
|
30
|
-
* <p>Gets information about an inference profile. For more information, see the Amazon Bedrock User Guide.</p>
|
|
30
|
+
* <p>Gets information about an inference profile. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference.html">Increase throughput and resilience with cross-region inference in Amazon Bedrock</a>. in the Amazon Bedrock User Guide.</p>
|
|
31
31
|
* @example
|
|
32
32
|
* Use a bare-bones client and the command you need to make an API call.
|
|
33
33
|
* ```javascript
|
|
@@ -41,18 +41,18 @@ declare const GetInferenceProfileCommand_base: {
|
|
|
41
41
|
* const response = await client.send(command);
|
|
42
42
|
* // { // GetInferenceProfileResponse
|
|
43
43
|
* // inferenceProfileName: "STRING_VALUE", // required
|
|
44
|
+
* // description: "STRING_VALUE",
|
|
45
|
+
* // createdAt: new Date("TIMESTAMP"),
|
|
46
|
+
* // updatedAt: new Date("TIMESTAMP"),
|
|
47
|
+
* // inferenceProfileArn: "STRING_VALUE", // required
|
|
44
48
|
* // models: [ // InferenceProfileModels // required
|
|
45
49
|
* // { // InferenceProfileModel
|
|
46
50
|
* // modelArn: "STRING_VALUE",
|
|
47
51
|
* // },
|
|
48
52
|
* // ],
|
|
49
|
-
* // description: "STRING_VALUE",
|
|
50
|
-
* // createdAt: new Date("TIMESTAMP"),
|
|
51
|
-
* // updatedAt: new Date("TIMESTAMP"),
|
|
52
|
-
* // inferenceProfileArn: "STRING_VALUE", // required
|
|
53
53
|
* // inferenceProfileId: "STRING_VALUE", // required
|
|
54
54
|
* // status: "ACTIVE", // required
|
|
55
|
-
* // type: "SYSTEM_DEFINED", // required
|
|
55
|
+
* // type: "SYSTEM_DEFINED" || "APPLICATION", // required
|
|
56
56
|
* // };
|
|
57
57
|
*
|
|
58
58
|
* ```
|
|
@@ -24,10 +24,10 @@ export interface ListInferenceProfilesCommandOutput extends ListInferenceProfile
|
|
|
24
24
|
declare const ListInferenceProfilesCommand_base: {
|
|
25
25
|
new (input: ListInferenceProfilesCommandInput): import("@smithy/smithy-client").CommandImpl<ListInferenceProfilesCommandInput, ListInferenceProfilesCommandOutput, BedrockClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes>;
|
|
26
26
|
new (...[input]: [] | [ListInferenceProfilesCommandInput]): import("@smithy/smithy-client").CommandImpl<ListInferenceProfilesCommandInput, ListInferenceProfilesCommandOutput, BedrockClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes>;
|
|
27
|
-
getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions;
|
|
27
|
+
getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions;
|
|
28
28
|
};
|
|
29
29
|
/**
|
|
30
|
-
* <p>Returns a list of inference profiles that you can use.</p>
|
|
30
|
+
* <p>Returns a list of inference profiles that you can use. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference.html">Increase throughput and resilience with cross-region inference in Amazon Bedrock</a>. in the Amazon Bedrock User Guide.</p>
|
|
31
31
|
* @example
|
|
32
32
|
* Use a bare-bones client and the command you need to make an API call.
|
|
33
33
|
* ```javascript
|
|
@@ -37,6 +37,7 @@ declare const ListInferenceProfilesCommand_base: {
|
|
|
37
37
|
* const input = { // ListInferenceProfilesRequest
|
|
38
38
|
* maxResults: Number("int"),
|
|
39
39
|
* nextToken: "STRING_VALUE",
|
|
40
|
+
* typeEquals: "SYSTEM_DEFINED" || "APPLICATION",
|
|
40
41
|
* };
|
|
41
42
|
* const command = new ListInferenceProfilesCommand(input);
|
|
42
43
|
* const response = await client.send(command);
|
|
@@ -44,18 +45,18 @@ declare const ListInferenceProfilesCommand_base: {
|
|
|
44
45
|
* // inferenceProfileSummaries: [ // InferenceProfileSummaries
|
|
45
46
|
* // { // InferenceProfileSummary
|
|
46
47
|
* // inferenceProfileName: "STRING_VALUE", // required
|
|
48
|
+
* // description: "STRING_VALUE",
|
|
49
|
+
* // createdAt: new Date("TIMESTAMP"),
|
|
50
|
+
* // updatedAt: new Date("TIMESTAMP"),
|
|
51
|
+
* // inferenceProfileArn: "STRING_VALUE", // required
|
|
47
52
|
* // models: [ // InferenceProfileModels // required
|
|
48
53
|
* // { // InferenceProfileModel
|
|
49
54
|
* // modelArn: "STRING_VALUE",
|
|
50
55
|
* // },
|
|
51
56
|
* // ],
|
|
52
|
-
* // description: "STRING_VALUE",
|
|
53
|
-
* // createdAt: new Date("TIMESTAMP"),
|
|
54
|
-
* // updatedAt: new Date("TIMESTAMP"),
|
|
55
|
-
* // inferenceProfileArn: "STRING_VALUE", // required
|
|
56
57
|
* // inferenceProfileId: "STRING_VALUE", // required
|
|
57
58
|
* // status: "ACTIVE", // required
|
|
58
|
-
* // type: "SYSTEM_DEFINED", // required
|
|
59
|
+
* // type: "SYSTEM_DEFINED" || "APPLICATION", // required
|
|
59
60
|
* // },
|
|
60
61
|
* // ],
|
|
61
62
|
* // nextToken: "STRING_VALUE",
|
|
@@ -28,7 +28,7 @@ declare const ListTagsForResourceCommand_base: {
|
|
|
28
28
|
};
|
|
29
29
|
/**
|
|
30
30
|
* <p>List the tags associated with the specified resource.</p>
|
|
31
|
-
* <p>For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/
|
|
31
|
+
* <p>For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-service.html">Tagging resources</a> in the <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-service.html">Amazon Bedrock User Guide</a>.</p>
|
|
32
32
|
* @example
|
|
33
33
|
* Use a bare-bones client and the command you need to make an API call.
|
|
34
34
|
* ```javascript
|
|
@@ -27,7 +27,7 @@ declare const TagResourceCommand_base: {
|
|
|
27
27
|
getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions;
|
|
28
28
|
};
|
|
29
29
|
/**
|
|
30
|
-
* <p>Associate tags with a resource. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/
|
|
30
|
+
* <p>Associate tags with a resource. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-service.html">Tagging resources</a> in the <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-service.html">Amazon Bedrock User Guide</a>.</p>
|
|
31
31
|
* @example
|
|
32
32
|
* Use a bare-bones client and the command you need to make an API call.
|
|
33
33
|
* ```javascript
|
|
@@ -27,7 +27,7 @@ declare const UntagResourceCommand_base: {
|
|
|
27
27
|
getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions;
|
|
28
28
|
};
|
|
29
29
|
/**
|
|
30
|
-
* <p>Remove one or more tags from a resource. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/
|
|
30
|
+
* <p>Remove one or more tags from a resource. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-service.html">Tagging resources</a> in the <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-service.html">Amazon Bedrock User Guide</a>.</p>
|
|
31
31
|
* @example
|
|
32
32
|
* Use a bare-bones client and the command you need to make an API call.
|
|
33
33
|
* ```javascript
|
|
@@ -2,6 +2,7 @@ export * from "./BatchDeleteEvaluationJobCommand";
|
|
|
2
2
|
export * from "./CreateEvaluationJobCommand";
|
|
3
3
|
export * from "./CreateGuardrailCommand";
|
|
4
4
|
export * from "./CreateGuardrailVersionCommand";
|
|
5
|
+
export * from "./CreateInferenceProfileCommand";
|
|
5
6
|
export * from "./CreateModelCopyJobCommand";
|
|
6
7
|
export * from "./CreateModelCustomizationJobCommand";
|
|
7
8
|
export * from "./CreateModelImportJobCommand";
|
|
@@ -10,6 +11,7 @@ export * from "./CreateProvisionedModelThroughputCommand";
|
|
|
10
11
|
export * from "./DeleteCustomModelCommand";
|
|
11
12
|
export * from "./DeleteGuardrailCommand";
|
|
12
13
|
export * from "./DeleteImportedModelCommand";
|
|
14
|
+
export * from "./DeleteInferenceProfileCommand";
|
|
13
15
|
export * from "./DeleteModelInvocationLoggingConfigurationCommand";
|
|
14
16
|
export * from "./DeleteProvisionedModelThroughputCommand";
|
|
15
17
|
export * from "./GetCustomModelCommand";
|
|
@@ -2150,25 +2150,65 @@ export interface UpdateGuardrailResponse {
|
|
|
2150
2150
|
updatedAt: Date | undefined;
|
|
2151
2151
|
}
|
|
2152
2152
|
/**
|
|
2153
|
+
* <p>Contains information about the model or system-defined inference profile that is the source for an inference profile..</p>
|
|
2153
2154
|
* @public
|
|
2154
2155
|
*/
|
|
2155
|
-
export
|
|
2156
|
+
export type InferenceProfileModelSource = InferenceProfileModelSource.CopyFromMember | InferenceProfileModelSource.$UnknownMember;
|
|
2157
|
+
/**
|
|
2158
|
+
* @public
|
|
2159
|
+
*/
|
|
2160
|
+
export declare namespace InferenceProfileModelSource {
|
|
2156
2161
|
/**
|
|
2157
|
-
* <p>The
|
|
2162
|
+
* <p>The ARN of the model or system-defined inference profile that is the source for the inference profile.</p>
|
|
2158
2163
|
* @public
|
|
2159
2164
|
*/
|
|
2160
|
-
|
|
2165
|
+
interface CopyFromMember {
|
|
2166
|
+
copyFrom: string;
|
|
2167
|
+
$unknown?: never;
|
|
2168
|
+
}
|
|
2169
|
+
/**
|
|
2170
|
+
* @public
|
|
2171
|
+
*/
|
|
2172
|
+
interface $UnknownMember {
|
|
2173
|
+
copyFrom?: never;
|
|
2174
|
+
$unknown: [string, any];
|
|
2175
|
+
}
|
|
2176
|
+
interface Visitor<T> {
|
|
2177
|
+
copyFrom: (value: string) => T;
|
|
2178
|
+
_: (name: string, value: any) => T;
|
|
2179
|
+
}
|
|
2180
|
+
const visit: <T>(value: InferenceProfileModelSource, visitor: Visitor<T>) => T;
|
|
2161
2181
|
}
|
|
2162
2182
|
/**
|
|
2163
|
-
* <p>Contains information about a model.</p>
|
|
2164
2183
|
* @public
|
|
2165
2184
|
*/
|
|
2166
|
-
export interface
|
|
2185
|
+
export interface CreateInferenceProfileRequest {
|
|
2167
2186
|
/**
|
|
2168
|
-
* <p>
|
|
2187
|
+
* <p>A name for the inference profile.</p>
|
|
2169
2188
|
* @public
|
|
2170
2189
|
*/
|
|
2171
|
-
|
|
2190
|
+
inferenceProfileName: string | undefined;
|
|
2191
|
+
/**
|
|
2192
|
+
* <p>A description for the inference profile.</p>
|
|
2193
|
+
* @public
|
|
2194
|
+
*/
|
|
2195
|
+
description?: string;
|
|
2196
|
+
/**
|
|
2197
|
+
* <p>A unique, case-sensitive identifier to ensure that the API request completes no more than one time. If this token matches a previous request,
|
|
2198
|
+
* Amazon Bedrock ignores the request, but does not return an error. For more information, see <a href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring idempotency</a>.</p>
|
|
2199
|
+
* @public
|
|
2200
|
+
*/
|
|
2201
|
+
clientRequestToken?: string;
|
|
2202
|
+
/**
|
|
2203
|
+
* <p>The foundation model or system-defined inference profile that the inference profile will track metrics and costs for.</p>
|
|
2204
|
+
* @public
|
|
2205
|
+
*/
|
|
2206
|
+
modelSource: InferenceProfileModelSource | undefined;
|
|
2207
|
+
/**
|
|
2208
|
+
* <p>An array of objects, each of which contains a tag and its value. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-service.html">Tagging resources</a> in the <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-service.html">Amazon Bedrock User Guide</a>.</p>
|
|
2209
|
+
* @public
|
|
2210
|
+
*/
|
|
2211
|
+
tags?: Tag[];
|
|
2172
2212
|
}
|
|
2173
2213
|
/**
|
|
2174
2214
|
* @public
|
|
@@ -2181,11 +2221,63 @@ export declare const InferenceProfileStatus: {
|
|
|
2181
2221
|
* @public
|
|
2182
2222
|
*/
|
|
2183
2223
|
export type InferenceProfileStatus = (typeof InferenceProfileStatus)[keyof typeof InferenceProfileStatus];
|
|
2224
|
+
/**
|
|
2225
|
+
* @public
|
|
2226
|
+
*/
|
|
2227
|
+
export interface CreateInferenceProfileResponse {
|
|
2228
|
+
/**
|
|
2229
|
+
* <p>The ARN of the inference profile that you created.</p>
|
|
2230
|
+
* @public
|
|
2231
|
+
*/
|
|
2232
|
+
inferenceProfileArn: string | undefined;
|
|
2233
|
+
/**
|
|
2234
|
+
* <p>The status of the inference profile. <code>ACTIVE</code> means that the inference profile is ready to be used.</p>
|
|
2235
|
+
* @public
|
|
2236
|
+
*/
|
|
2237
|
+
status?: InferenceProfileStatus;
|
|
2238
|
+
}
|
|
2239
|
+
/**
|
|
2240
|
+
* @public
|
|
2241
|
+
*/
|
|
2242
|
+
export interface DeleteInferenceProfileRequest {
|
|
2243
|
+
/**
|
|
2244
|
+
* <p>The Amazon Resource Name (ARN) or ID of the application inference profile to delete.</p>
|
|
2245
|
+
* @public
|
|
2246
|
+
*/
|
|
2247
|
+
inferenceProfileIdentifier: string | undefined;
|
|
2248
|
+
}
|
|
2249
|
+
/**
|
|
2250
|
+
* @public
|
|
2251
|
+
*/
|
|
2252
|
+
export interface DeleteInferenceProfileResponse {
|
|
2253
|
+
}
|
|
2254
|
+
/**
|
|
2255
|
+
* @public
|
|
2256
|
+
*/
|
|
2257
|
+
export interface GetInferenceProfileRequest {
|
|
2258
|
+
/**
|
|
2259
|
+
* <p>The ID or Amazon Resource Name (ARN) of the inference profile.</p>
|
|
2260
|
+
* @public
|
|
2261
|
+
*/
|
|
2262
|
+
inferenceProfileIdentifier: string | undefined;
|
|
2263
|
+
}
|
|
2264
|
+
/**
|
|
2265
|
+
* <p>Contains information about a model.</p>
|
|
2266
|
+
* @public
|
|
2267
|
+
*/
|
|
2268
|
+
export interface InferenceProfileModel {
|
|
2269
|
+
/**
|
|
2270
|
+
* <p>The Amazon Resource Name (ARN) of the model.</p>
|
|
2271
|
+
* @public
|
|
2272
|
+
*/
|
|
2273
|
+
modelArn?: string;
|
|
2274
|
+
}
|
|
2184
2275
|
/**
|
|
2185
2276
|
* @public
|
|
2186
2277
|
* @enum
|
|
2187
2278
|
*/
|
|
2188
2279
|
export declare const InferenceProfileType: {
|
|
2280
|
+
readonly APPLICATION: "APPLICATION";
|
|
2189
2281
|
readonly SYSTEM_DEFINED: "SYSTEM_DEFINED";
|
|
2190
2282
|
};
|
|
2191
2283
|
/**
|
|
@@ -2201,11 +2293,6 @@ export interface GetInferenceProfileResponse {
|
|
|
2201
2293
|
* @public
|
|
2202
2294
|
*/
|
|
2203
2295
|
inferenceProfileName: string | undefined;
|
|
2204
|
-
/**
|
|
2205
|
-
* <p>A list of information about each model in the inference profile.</p>
|
|
2206
|
-
* @public
|
|
2207
|
-
*/
|
|
2208
|
-
models: InferenceProfileModel[] | undefined;
|
|
2209
2296
|
/**
|
|
2210
2297
|
* <p>The description of the inference profile.</p>
|
|
2211
2298
|
* @public
|
|
@@ -2226,18 +2313,33 @@ export interface GetInferenceProfileResponse {
|
|
|
2226
2313
|
* @public
|
|
2227
2314
|
*/
|
|
2228
2315
|
inferenceProfileArn: string | undefined;
|
|
2316
|
+
/**
|
|
2317
|
+
* <p>A list of information about each model in the inference profile.</p>
|
|
2318
|
+
* @public
|
|
2319
|
+
*/
|
|
2320
|
+
models: InferenceProfileModel[] | undefined;
|
|
2229
2321
|
/**
|
|
2230
2322
|
* <p>The unique identifier of the inference profile.</p>
|
|
2231
2323
|
* @public
|
|
2232
2324
|
*/
|
|
2233
2325
|
inferenceProfileId: string | undefined;
|
|
2234
2326
|
/**
|
|
2235
|
-
* <p>The status of the inference profile. <code>ACTIVE</code> means that the inference profile is
|
|
2327
|
+
* <p>The status of the inference profile. <code>ACTIVE</code> means that the inference profile is ready to be used.</p>
|
|
2236
2328
|
* @public
|
|
2237
2329
|
*/
|
|
2238
2330
|
status: InferenceProfileStatus | undefined;
|
|
2239
2331
|
/**
|
|
2240
|
-
* <p>The type of the inference profile.
|
|
2332
|
+
* <p>The type of the inference profile. The following types are possible:</p>
|
|
2333
|
+
* <ul>
|
|
2334
|
+
* <li>
|
|
2335
|
+
* <p>
|
|
2336
|
+
* <code>SYSTEM_DEFINED</code> – The inference profile is defined by Amazon Bedrock. You can route inference requests across regions with these inference profiles.</p>
|
|
2337
|
+
* </li>
|
|
2338
|
+
* <li>
|
|
2339
|
+
* <p>
|
|
2340
|
+
* <code>APPLICATION</code> – The inference profile was created by a user. This type of inference profile can track metrics and costs when invoking the model in it. The inference profile may route requests to one or multiple regions.</p>
|
|
2341
|
+
* </li>
|
|
2342
|
+
* </ul>
|
|
2241
2343
|
* @public
|
|
2242
2344
|
*/
|
|
2243
2345
|
type: InferenceProfileType | undefined;
|
|
@@ -2256,6 +2358,21 @@ export interface ListInferenceProfilesRequest {
|
|
|
2256
2358
|
* @public
|
|
2257
2359
|
*/
|
|
2258
2360
|
nextToken?: string;
|
|
2361
|
+
/**
|
|
2362
|
+
* <p>Filters for inference profiles that match the type you specify.</p>
|
|
2363
|
+
* <ul>
|
|
2364
|
+
* <li>
|
|
2365
|
+
* <p>
|
|
2366
|
+
* <code>SYSTEM_DEFINED</code> – The inference profile is defined by Amazon Bedrock. You can route inference requests across regions with these inference profiles.</p>
|
|
2367
|
+
* </li>
|
|
2368
|
+
* <li>
|
|
2369
|
+
* <p>
|
|
2370
|
+
* <code>APPLICATION</code> – The inference profile was created by a user. This type of inference profile can track metrics and costs when invoking the model in it. The inference profile may route requests to one or multiple regions.</p>
|
|
2371
|
+
* </li>
|
|
2372
|
+
* </ul>
|
|
2373
|
+
* @public
|
|
2374
|
+
*/
|
|
2375
|
+
typeEquals?: InferenceProfileType;
|
|
2259
2376
|
}
|
|
2260
2377
|
/**
|
|
2261
2378
|
* <p>Contains information about an inference profile.</p>
|
|
@@ -2267,11 +2384,6 @@ export interface InferenceProfileSummary {
|
|
|
2267
2384
|
* @public
|
|
2268
2385
|
*/
|
|
2269
2386
|
inferenceProfileName: string | undefined;
|
|
2270
|
-
/**
|
|
2271
|
-
* <p>A list of information about each model in the inference profile.</p>
|
|
2272
|
-
* @public
|
|
2273
|
-
*/
|
|
2274
|
-
models: InferenceProfileModel[] | undefined;
|
|
2275
2387
|
/**
|
|
2276
2388
|
* <p>The description of the inference profile.</p>
|
|
2277
2389
|
* @public
|
|
@@ -2292,18 +2404,33 @@ export interface InferenceProfileSummary {
|
|
|
2292
2404
|
* @public
|
|
2293
2405
|
*/
|
|
2294
2406
|
inferenceProfileArn: string | undefined;
|
|
2407
|
+
/**
|
|
2408
|
+
* <p>A list of information about each model in the inference profile.</p>
|
|
2409
|
+
* @public
|
|
2410
|
+
*/
|
|
2411
|
+
models: InferenceProfileModel[] | undefined;
|
|
2295
2412
|
/**
|
|
2296
2413
|
* <p>The unique identifier of the inference profile.</p>
|
|
2297
2414
|
* @public
|
|
2298
2415
|
*/
|
|
2299
2416
|
inferenceProfileId: string | undefined;
|
|
2300
2417
|
/**
|
|
2301
|
-
* <p>The status of the inference profile. <code>ACTIVE</code> means that the inference profile is
|
|
2418
|
+
* <p>The status of the inference profile. <code>ACTIVE</code> means that the inference profile is ready to be used.</p>
|
|
2302
2419
|
* @public
|
|
2303
2420
|
*/
|
|
2304
2421
|
status: InferenceProfileStatus | undefined;
|
|
2305
2422
|
/**
|
|
2306
|
-
* <p>The type of the inference profile.
|
|
2423
|
+
* <p>The type of the inference profile. The following types are possible:</p>
|
|
2424
|
+
* <ul>
|
|
2425
|
+
* <li>
|
|
2426
|
+
* <p>
|
|
2427
|
+
* <code>SYSTEM_DEFINED</code> – The inference profile is defined by Amazon Bedrock. You can route inference requests across regions with these inference profiles.</p>
|
|
2428
|
+
* </li>
|
|
2429
|
+
* <li>
|
|
2430
|
+
* <p>
|
|
2431
|
+
* <code>APPLICATION</code> – The inference profile was created by a user. This type of inference profile can track metrics and costs when invoking the model in it. The inference profile may route requests to one or multiple regions.</p>
|
|
2432
|
+
* </li>
|
|
2433
|
+
* </ul>
|
|
2307
2434
|
* @public
|
|
2308
2435
|
*/
|
|
2309
2436
|
type: InferenceProfileType | undefined;
|
|
@@ -5048,6 +5175,22 @@ export declare const ListGuardrailsResponseFilterSensitiveLog: (obj: ListGuardra
|
|
|
5048
5175
|
* @internal
|
|
5049
5176
|
*/
|
|
5050
5177
|
export declare const UpdateGuardrailRequestFilterSensitiveLog: (obj: UpdateGuardrailRequest) => any;
|
|
5178
|
+
/**
|
|
5179
|
+
* @internal
|
|
5180
|
+
*/
|
|
5181
|
+
export declare const CreateInferenceProfileRequestFilterSensitiveLog: (obj: CreateInferenceProfileRequest) => any;
|
|
5182
|
+
/**
|
|
5183
|
+
* @internal
|
|
5184
|
+
*/
|
|
5185
|
+
export declare const GetInferenceProfileResponseFilterSensitiveLog: (obj: GetInferenceProfileResponse) => any;
|
|
5186
|
+
/**
|
|
5187
|
+
* @internal
|
|
5188
|
+
*/
|
|
5189
|
+
export declare const InferenceProfileSummaryFilterSensitiveLog: (obj: InferenceProfileSummary) => any;
|
|
5190
|
+
/**
|
|
5191
|
+
* @internal
|
|
5192
|
+
*/
|
|
5193
|
+
export declare const ListInferenceProfilesResponseFilterSensitiveLog: (obj: ListInferenceProfilesResponse) => any;
|
|
5051
5194
|
/**
|
|
5052
5195
|
* @internal
|
|
5053
5196
|
*/
|