@aws-sdk/client-bedrock 3.637.0 → 3.640.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +16 -0
- package/dist-cjs/index.js +131 -0
- package/dist-es/Bedrock.js +4 -0
- package/dist-es/commands/GetInferenceProfileCommand.js +24 -0
- package/dist-es/commands/ListInferenceProfilesCommand.js +24 -0
- package/dist-es/commands/index.js +2 -0
- package/dist-es/models/models_0.js +6 -0
- package/dist-es/pagination/ListInferenceProfilesPaginator.js +4 -0
- package/dist-es/pagination/index.js +1 -0
- package/dist-es/protocols/Aws_restJson1.js +79 -0
- package/dist-types/Bedrock.d.ts +15 -0
- package/dist-types/BedrockClient.d.ts +4 -2
- package/dist-types/commands/CreateModelInvocationJobCommand.d.ts +2 -2
- package/dist-types/commands/GetInferenceProfileCommand.d.ts +87 -0
- package/dist-types/commands/ListInferenceProfilesCommand.d.ts +90 -0
- package/dist-types/commands/index.d.ts +2 -0
- package/dist-types/models/models_0.d.ts +178 -4
- package/dist-types/pagination/ListInferenceProfilesPaginator.d.ts +7 -0
- package/dist-types/pagination/index.d.ts +1 -0
- package/dist-types/protocols/Aws_restJson1.d.ts +18 -0
- package/dist-types/ts3.4/Bedrock.d.ts +35 -0
- package/dist-types/ts3.4/BedrockClient.d.ts +12 -0
- package/dist-types/ts3.4/commands/GetInferenceProfileCommand.d.ts +40 -0
- package/dist-types/ts3.4/commands/ListInferenceProfilesCommand.d.ts +40 -0
- package/dist-types/ts3.4/commands/index.d.ts +2 -0
- package/dist-types/ts3.4/models/models_0.d.ts +46 -0
- package/dist-types/ts3.4/pagination/ListInferenceProfilesPaginator.d.ts +11 -0
- package/dist-types/ts3.4/pagination/index.d.ts +1 -0
- package/dist-types/ts3.4/protocols/Aws_restJson1.d.ts +24 -0
- package/package.json +1 -1
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import { Command as $Command } from "@smithy/smithy-client";
|
|
2
|
+
import { MetadataBearer as __MetadataBearer } from "@smithy/types";
|
|
3
|
+
import { BedrockClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../BedrockClient";
|
|
4
|
+
import { GetInferenceProfileRequest, GetInferenceProfileResponse } from "../models/models_0";
|
|
5
|
+
/**
|
|
6
|
+
* @public
|
|
7
|
+
*/
|
|
8
|
+
export type { __MetadataBearer };
|
|
9
|
+
export { $Command };
|
|
10
|
+
/**
|
|
11
|
+
* @public
|
|
12
|
+
*
|
|
13
|
+
* The input for {@link GetInferenceProfileCommand}.
|
|
14
|
+
*/
|
|
15
|
+
export interface GetInferenceProfileCommandInput extends GetInferenceProfileRequest {
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* @public
|
|
19
|
+
*
|
|
20
|
+
* The output of {@link GetInferenceProfileCommand}.
|
|
21
|
+
*/
|
|
22
|
+
export interface GetInferenceProfileCommandOutput extends GetInferenceProfileResponse, __MetadataBearer {
|
|
23
|
+
}
|
|
24
|
+
declare const GetInferenceProfileCommand_base: {
|
|
25
|
+
new (input: GetInferenceProfileCommandInput): import("@smithy/smithy-client").CommandImpl<GetInferenceProfileCommandInput, GetInferenceProfileCommandOutput, BedrockClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes>;
|
|
26
|
+
new (__0_0: GetInferenceProfileCommandInput): import("@smithy/smithy-client").CommandImpl<GetInferenceProfileCommandInput, GetInferenceProfileCommandOutput, BedrockClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes>;
|
|
27
|
+
getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions;
|
|
28
|
+
};
|
|
29
|
+
/**
|
|
30
|
+
* <p>Gets information about an inference profile. For more information, see the Amazon Bedrock User Guide.</p>
|
|
31
|
+
* @example
|
|
32
|
+
* Use a bare-bones client and the command you need to make an API call.
|
|
33
|
+
* ```javascript
|
|
34
|
+
* import { BedrockClient, GetInferenceProfileCommand } from "@aws-sdk/client-bedrock"; // ES Modules import
|
|
35
|
+
* // const { BedrockClient, GetInferenceProfileCommand } = require("@aws-sdk/client-bedrock"); // CommonJS import
|
|
36
|
+
* const client = new BedrockClient(config);
|
|
37
|
+
* const input = { // GetInferenceProfileRequest
|
|
38
|
+
* inferenceProfileIdentifier: "STRING_VALUE", // required
|
|
39
|
+
* };
|
|
40
|
+
* const command = new GetInferenceProfileCommand(input);
|
|
41
|
+
* const response = await client.send(command);
|
|
42
|
+
* // { // GetInferenceProfileResponse
|
|
43
|
+
* // inferenceProfileName: "STRING_VALUE", // required
|
|
44
|
+
* // models: [ // InferenceProfileModels // required
|
|
45
|
+
* // { // InferenceProfileModel
|
|
46
|
+
* // modelArn: "STRING_VALUE",
|
|
47
|
+
* // },
|
|
48
|
+
* // ],
|
|
49
|
+
* // description: "STRING_VALUE",
|
|
50
|
+
* // createdAt: new Date("TIMESTAMP"),
|
|
51
|
+
* // updatedAt: new Date("TIMESTAMP"),
|
|
52
|
+
* // inferenceProfileArn: "STRING_VALUE", // required
|
|
53
|
+
* // inferenceProfileId: "STRING_VALUE", // required
|
|
54
|
+
* // status: "ACTIVE", // required
|
|
55
|
+
* // type: "SYSTEM_DEFINED", // required
|
|
56
|
+
* // };
|
|
57
|
+
*
|
|
58
|
+
* ```
|
|
59
|
+
*
|
|
60
|
+
* @param GetInferenceProfileCommandInput - {@link GetInferenceProfileCommandInput}
|
|
61
|
+
* @returns {@link GetInferenceProfileCommandOutput}
|
|
62
|
+
* @see {@link GetInferenceProfileCommandInput} for command's `input` shape.
|
|
63
|
+
* @see {@link GetInferenceProfileCommandOutput} for command's `response` shape.
|
|
64
|
+
* @see {@link BedrockClientResolvedConfig | config} for BedrockClient's `config` shape.
|
|
65
|
+
*
|
|
66
|
+
* @throws {@link AccessDeniedException} (client fault)
|
|
67
|
+
* <p>The request is denied because of missing access permissions.</p>
|
|
68
|
+
*
|
|
69
|
+
* @throws {@link InternalServerException} (server fault)
|
|
70
|
+
* <p>An internal server error occurred. Retry your request.</p>
|
|
71
|
+
*
|
|
72
|
+
* @throws {@link ResourceNotFoundException} (client fault)
|
|
73
|
+
* <p>The specified resource Amazon Resource Name (ARN) was not found. Check the Amazon Resource Name (ARN) and try your request again.</p>
|
|
74
|
+
*
|
|
75
|
+
* @throws {@link ThrottlingException} (client fault)
|
|
76
|
+
* <p>The number of requests exceeds the limit. Resubmit your request later.</p>
|
|
77
|
+
*
|
|
78
|
+
* @throws {@link ValidationException} (client fault)
|
|
79
|
+
* <p>Input validation failed. Check your request parameters and retry the request.</p>
|
|
80
|
+
*
|
|
81
|
+
* @throws {@link BedrockServiceException}
|
|
82
|
+
* <p>Base exception class for all service exceptions from Bedrock service.</p>
|
|
83
|
+
*
|
|
84
|
+
* @public
|
|
85
|
+
*/
|
|
86
|
+
export declare class GetInferenceProfileCommand extends GetInferenceProfileCommand_base {
|
|
87
|
+
}
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import { Command as $Command } from "@smithy/smithy-client";
|
|
2
|
+
import { MetadataBearer as __MetadataBearer } from "@smithy/types";
|
|
3
|
+
import { BedrockClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../BedrockClient";
|
|
4
|
+
import { ListInferenceProfilesRequest, ListInferenceProfilesResponse } from "../models/models_0";
|
|
5
|
+
/**
|
|
6
|
+
* @public
|
|
7
|
+
*/
|
|
8
|
+
export type { __MetadataBearer };
|
|
9
|
+
export { $Command };
|
|
10
|
+
/**
|
|
11
|
+
* @public
|
|
12
|
+
*
|
|
13
|
+
* The input for {@link ListInferenceProfilesCommand}.
|
|
14
|
+
*/
|
|
15
|
+
export interface ListInferenceProfilesCommandInput extends ListInferenceProfilesRequest {
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* @public
|
|
19
|
+
*
|
|
20
|
+
* The output of {@link ListInferenceProfilesCommand}.
|
|
21
|
+
*/
|
|
22
|
+
export interface ListInferenceProfilesCommandOutput extends ListInferenceProfilesResponse, __MetadataBearer {
|
|
23
|
+
}
|
|
24
|
+
declare const ListInferenceProfilesCommand_base: {
|
|
25
|
+
new (input: ListInferenceProfilesCommandInput): import("@smithy/smithy-client").CommandImpl<ListInferenceProfilesCommandInput, ListInferenceProfilesCommandOutput, BedrockClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes>;
|
|
26
|
+
new (...[input]: [] | [ListInferenceProfilesCommandInput]): import("@smithy/smithy-client").CommandImpl<ListInferenceProfilesCommandInput, ListInferenceProfilesCommandOutput, BedrockClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes>;
|
|
27
|
+
getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions;
|
|
28
|
+
};
|
|
29
|
+
/**
|
|
30
|
+
* <p>Returns a list of inference profiles that you can use.</p>
|
|
31
|
+
* @example
|
|
32
|
+
* Use a bare-bones client and the command you need to make an API call.
|
|
33
|
+
* ```javascript
|
|
34
|
+
* import { BedrockClient, ListInferenceProfilesCommand } from "@aws-sdk/client-bedrock"; // ES Modules import
|
|
35
|
+
* // const { BedrockClient, ListInferenceProfilesCommand } = require("@aws-sdk/client-bedrock"); // CommonJS import
|
|
36
|
+
* const client = new BedrockClient(config);
|
|
37
|
+
* const input = { // ListInferenceProfilesRequest
|
|
38
|
+
* maxResults: Number("int"),
|
|
39
|
+
* nextToken: "STRING_VALUE",
|
|
40
|
+
* };
|
|
41
|
+
* const command = new ListInferenceProfilesCommand(input);
|
|
42
|
+
* const response = await client.send(command);
|
|
43
|
+
* // { // ListInferenceProfilesResponse
|
|
44
|
+
* // inferenceProfileSummaries: [ // InferenceProfileSummaries
|
|
45
|
+
* // { // InferenceProfileSummary
|
|
46
|
+
* // inferenceProfileName: "STRING_VALUE", // required
|
|
47
|
+
* // models: [ // InferenceProfileModels // required
|
|
48
|
+
* // { // InferenceProfileModel
|
|
49
|
+
* // modelArn: "STRING_VALUE",
|
|
50
|
+
* // },
|
|
51
|
+
* // ],
|
|
52
|
+
* // description: "STRING_VALUE",
|
|
53
|
+
* // createdAt: new Date("TIMESTAMP"),
|
|
54
|
+
* // updatedAt: new Date("TIMESTAMP"),
|
|
55
|
+
* // inferenceProfileArn: "STRING_VALUE", // required
|
|
56
|
+
* // inferenceProfileId: "STRING_VALUE", // required
|
|
57
|
+
* // status: "ACTIVE", // required
|
|
58
|
+
* // type: "SYSTEM_DEFINED", // required
|
|
59
|
+
* // },
|
|
60
|
+
* // ],
|
|
61
|
+
* // nextToken: "STRING_VALUE",
|
|
62
|
+
* // };
|
|
63
|
+
*
|
|
64
|
+
* ```
|
|
65
|
+
*
|
|
66
|
+
* @param ListInferenceProfilesCommandInput - {@link ListInferenceProfilesCommandInput}
|
|
67
|
+
* @returns {@link ListInferenceProfilesCommandOutput}
|
|
68
|
+
* @see {@link ListInferenceProfilesCommandInput} for command's `input` shape.
|
|
69
|
+
* @see {@link ListInferenceProfilesCommandOutput} for command's `response` shape.
|
|
70
|
+
* @see {@link BedrockClientResolvedConfig | config} for BedrockClient's `config` shape.
|
|
71
|
+
*
|
|
72
|
+
* @throws {@link AccessDeniedException} (client fault)
|
|
73
|
+
* <p>The request is denied because of missing access permissions.</p>
|
|
74
|
+
*
|
|
75
|
+
* @throws {@link InternalServerException} (server fault)
|
|
76
|
+
* <p>An internal server error occurred. Retry your request.</p>
|
|
77
|
+
*
|
|
78
|
+
* @throws {@link ThrottlingException} (client fault)
|
|
79
|
+
* <p>The number of requests exceeds the limit. Resubmit your request later.</p>
|
|
80
|
+
*
|
|
81
|
+
* @throws {@link ValidationException} (client fault)
|
|
82
|
+
* <p>Input validation failed. Check your request parameters and retry the request.</p>
|
|
83
|
+
*
|
|
84
|
+
* @throws {@link BedrockServiceException}
|
|
85
|
+
* <p>Base exception class for all service exceptions from Bedrock service.</p>
|
|
86
|
+
*
|
|
87
|
+
* @public
|
|
88
|
+
*/
|
|
89
|
+
export declare class ListInferenceProfilesCommand extends ListInferenceProfilesCommand_base {
|
|
90
|
+
}
|
|
@@ -17,6 +17,7 @@ export * from "./GetEvaluationJobCommand";
|
|
|
17
17
|
export * from "./GetFoundationModelCommand";
|
|
18
18
|
export * from "./GetGuardrailCommand";
|
|
19
19
|
export * from "./GetImportedModelCommand";
|
|
20
|
+
export * from "./GetInferenceProfileCommand";
|
|
20
21
|
export * from "./GetModelCopyJobCommand";
|
|
21
22
|
export * from "./GetModelCustomizationJobCommand";
|
|
22
23
|
export * from "./GetModelImportJobCommand";
|
|
@@ -28,6 +29,7 @@ export * from "./ListEvaluationJobsCommand";
|
|
|
28
29
|
export * from "./ListFoundationModelsCommand";
|
|
29
30
|
export * from "./ListGuardrailsCommand";
|
|
30
31
|
export * from "./ListImportedModelsCommand";
|
|
32
|
+
export * from "./ListInferenceProfilesCommand";
|
|
31
33
|
export * from "./ListModelCopyJobsCommand";
|
|
32
34
|
export * from "./ListModelCustomizationJobsCommand";
|
|
33
35
|
export * from "./ListModelImportJobsCommand";
|
|
@@ -2149,6 +2149,180 @@ export interface UpdateGuardrailResponse {
|
|
|
2149
2149
|
*/
|
|
2150
2150
|
updatedAt: Date | undefined;
|
|
2151
2151
|
}
|
|
2152
|
+
/**
|
|
2153
|
+
* @public
|
|
2154
|
+
*/
|
|
2155
|
+
export interface GetInferenceProfileRequest {
|
|
2156
|
+
/**
|
|
2157
|
+
* <p>The unique identifier of the inference profile.</p>
|
|
2158
|
+
* @public
|
|
2159
|
+
*/
|
|
2160
|
+
inferenceProfileIdentifier: string | undefined;
|
|
2161
|
+
}
|
|
2162
|
+
/**
|
|
2163
|
+
* <p>Contains information about a model.</p>
|
|
2164
|
+
* @public
|
|
2165
|
+
*/
|
|
2166
|
+
export interface InferenceProfileModel {
|
|
2167
|
+
/**
|
|
2168
|
+
* <p>The Amazon Resource Name (ARN) of the model.</p>
|
|
2169
|
+
* @public
|
|
2170
|
+
*/
|
|
2171
|
+
modelArn?: string;
|
|
2172
|
+
}
|
|
2173
|
+
/**
|
|
2174
|
+
* @public
|
|
2175
|
+
* @enum
|
|
2176
|
+
*/
|
|
2177
|
+
export declare const InferenceProfileStatus: {
|
|
2178
|
+
readonly ACTIVE: "ACTIVE";
|
|
2179
|
+
};
|
|
2180
|
+
/**
|
|
2181
|
+
* @public
|
|
2182
|
+
*/
|
|
2183
|
+
export type InferenceProfileStatus = (typeof InferenceProfileStatus)[keyof typeof InferenceProfileStatus];
|
|
2184
|
+
/**
|
|
2185
|
+
* @public
|
|
2186
|
+
* @enum
|
|
2187
|
+
*/
|
|
2188
|
+
export declare const InferenceProfileType: {
|
|
2189
|
+
readonly SYSTEM_DEFINED: "SYSTEM_DEFINED";
|
|
2190
|
+
};
|
|
2191
|
+
/**
|
|
2192
|
+
* @public
|
|
2193
|
+
*/
|
|
2194
|
+
export type InferenceProfileType = (typeof InferenceProfileType)[keyof typeof InferenceProfileType];
|
|
2195
|
+
/**
|
|
2196
|
+
* @public
|
|
2197
|
+
*/
|
|
2198
|
+
export interface GetInferenceProfileResponse {
|
|
2199
|
+
/**
|
|
2200
|
+
* <p>The name of the inference profile.</p>
|
|
2201
|
+
* @public
|
|
2202
|
+
*/
|
|
2203
|
+
inferenceProfileName: string | undefined;
|
|
2204
|
+
/**
|
|
2205
|
+
* <p>A list of information about each model in the inference profile.</p>
|
|
2206
|
+
* @public
|
|
2207
|
+
*/
|
|
2208
|
+
models: InferenceProfileModel[] | undefined;
|
|
2209
|
+
/**
|
|
2210
|
+
* <p>The description of the inference profile.</p>
|
|
2211
|
+
* @public
|
|
2212
|
+
*/
|
|
2213
|
+
description?: string;
|
|
2214
|
+
/**
|
|
2215
|
+
* <p>The time at which the inference profile was created.</p>
|
|
2216
|
+
* @public
|
|
2217
|
+
*/
|
|
2218
|
+
createdAt?: Date;
|
|
2219
|
+
/**
|
|
2220
|
+
* <p>The time at which the inference profile was last updated.</p>
|
|
2221
|
+
* @public
|
|
2222
|
+
*/
|
|
2223
|
+
updatedAt?: Date;
|
|
2224
|
+
/**
|
|
2225
|
+
* <p>The Amazon Resource Name (ARN) of the inference profile.</p>
|
|
2226
|
+
* @public
|
|
2227
|
+
*/
|
|
2228
|
+
inferenceProfileArn: string | undefined;
|
|
2229
|
+
/**
|
|
2230
|
+
* <p>The unique identifier of the inference profile.</p>
|
|
2231
|
+
* @public
|
|
2232
|
+
*/
|
|
2233
|
+
inferenceProfileId: string | undefined;
|
|
2234
|
+
/**
|
|
2235
|
+
* <p>The status of the inference profile. <code>ACTIVE</code> means that the inference profile is available to use.</p>
|
|
2236
|
+
* @public
|
|
2237
|
+
*/
|
|
2238
|
+
status: InferenceProfileStatus | undefined;
|
|
2239
|
+
/**
|
|
2240
|
+
* <p>The type of the inference profile. <code>SYSTEM_DEFINED</code> means that the inference profile is defined by Amazon Bedrock.</p>
|
|
2241
|
+
* @public
|
|
2242
|
+
*/
|
|
2243
|
+
type: InferenceProfileType | undefined;
|
|
2244
|
+
}
|
|
2245
|
+
/**
|
|
2246
|
+
* @public
|
|
2247
|
+
*/
|
|
2248
|
+
export interface ListInferenceProfilesRequest {
|
|
2249
|
+
/**
|
|
2250
|
+
* <p>The maximum number of results to return in the response. If the total number of results is greater than this value, use the token returned in the response in the <code>nextToken</code> field when making another request to return the next batch of results.</p>
|
|
2251
|
+
* @public
|
|
2252
|
+
*/
|
|
2253
|
+
maxResults?: number;
|
|
2254
|
+
/**
|
|
2255
|
+
* <p>If the total number of results is greater than the <code>maxResults</code> value provided in the request, enter the token returned in the <code>nextToken</code> field in the response in this field to return the next batch of results.</p>
|
|
2256
|
+
* @public
|
|
2257
|
+
*/
|
|
2258
|
+
nextToken?: string;
|
|
2259
|
+
}
|
|
2260
|
+
/**
|
|
2261
|
+
* <p>Contains information about an inference profile.</p>
|
|
2262
|
+
* @public
|
|
2263
|
+
*/
|
|
2264
|
+
export interface InferenceProfileSummary {
|
|
2265
|
+
/**
|
|
2266
|
+
* <p>The name of the inference profile.</p>
|
|
2267
|
+
* @public
|
|
2268
|
+
*/
|
|
2269
|
+
inferenceProfileName: string | undefined;
|
|
2270
|
+
/**
|
|
2271
|
+
* <p>A list of information about each model in the inference profile.</p>
|
|
2272
|
+
* @public
|
|
2273
|
+
*/
|
|
2274
|
+
models: InferenceProfileModel[] | undefined;
|
|
2275
|
+
/**
|
|
2276
|
+
* <p>The description of the inference profile.</p>
|
|
2277
|
+
* @public
|
|
2278
|
+
*/
|
|
2279
|
+
description?: string;
|
|
2280
|
+
/**
|
|
2281
|
+
* <p>The time at which the inference profile was created.</p>
|
|
2282
|
+
* @public
|
|
2283
|
+
*/
|
|
2284
|
+
createdAt?: Date;
|
|
2285
|
+
/**
|
|
2286
|
+
* <p>The time at which the inference profile was last updated.</p>
|
|
2287
|
+
* @public
|
|
2288
|
+
*/
|
|
2289
|
+
updatedAt?: Date;
|
|
2290
|
+
/**
|
|
2291
|
+
* <p>The Amazon Resource Name (ARN) of the inference profile.</p>
|
|
2292
|
+
* @public
|
|
2293
|
+
*/
|
|
2294
|
+
inferenceProfileArn: string | undefined;
|
|
2295
|
+
/**
|
|
2296
|
+
* <p>The unique identifier of the inference profile.</p>
|
|
2297
|
+
* @public
|
|
2298
|
+
*/
|
|
2299
|
+
inferenceProfileId: string | undefined;
|
|
2300
|
+
/**
|
|
2301
|
+
* <p>The status of the inference profile. <code>ACTIVE</code> means that the inference profile is available to use.</p>
|
|
2302
|
+
* @public
|
|
2303
|
+
*/
|
|
2304
|
+
status: InferenceProfileStatus | undefined;
|
|
2305
|
+
/**
|
|
2306
|
+
* <p>The type of the inference profile. <code>SYSTEM_DEFINED</code> means that the inference profile is defined by Amazon Bedrock.</p>
|
|
2307
|
+
* @public
|
|
2308
|
+
*/
|
|
2309
|
+
type: InferenceProfileType | undefined;
|
|
2310
|
+
}
|
|
2311
|
+
/**
|
|
2312
|
+
* @public
|
|
2313
|
+
*/
|
|
2314
|
+
export interface ListInferenceProfilesResponse {
|
|
2315
|
+
/**
|
|
2316
|
+
* <p>A list of information about each inference profile that you can use.</p>
|
|
2317
|
+
* @public
|
|
2318
|
+
*/
|
|
2319
|
+
inferenceProfileSummaries?: InferenceProfileSummary[];
|
|
2320
|
+
/**
|
|
2321
|
+
* <p>If the total number of results is greater than the <code>maxResults</code> value provided in the request, use this token when making another request in the <code>nextToken</code> field to return the next batch of results.</p>
|
|
2322
|
+
* @public
|
|
2323
|
+
*/
|
|
2324
|
+
nextToken?: string;
|
|
2325
|
+
}
|
|
2152
2326
|
/**
|
|
2153
2327
|
* @public
|
|
2154
2328
|
*/
|
|
@@ -2283,7 +2457,7 @@ export interface CreateModelCopyJobRequest {
|
|
|
2283
2457
|
targetModelTags?: Tag[];
|
|
2284
2458
|
/**
|
|
2285
2459
|
* <p>A unique, case-sensitive identifier to ensure that the API request completes no more than one time. If this token matches a previous request,
|
|
2286
|
-
*
|
|
2460
|
+
* Amazon Bedrock ignores the request, but does not return an error. For more information, see <a href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring idempotency</a>.</p>
|
|
2287
2461
|
* @public
|
|
2288
2462
|
*/
|
|
2289
2463
|
clientRequestToken?: string;
|
|
@@ -3116,7 +3290,7 @@ export interface CreateModelInvocationJobRequest {
|
|
|
3116
3290
|
roleArn: string | undefined;
|
|
3117
3291
|
/**
|
|
3118
3292
|
* <p>A unique, case-sensitive identifier to ensure that the API request completes no more than one time. If this token matches a previous request,
|
|
3119
|
-
*
|
|
3293
|
+
* Amazon Bedrock ignores the request, but does not return an error. For more information, see <a href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring idempotency</a>.</p>
|
|
3120
3294
|
* @public
|
|
3121
3295
|
*/
|
|
3122
3296
|
clientRequestToken?: string;
|
|
@@ -3207,7 +3381,7 @@ export interface GetModelInvocationJobResponse {
|
|
|
3207
3381
|
modelId: string | undefined;
|
|
3208
3382
|
/**
|
|
3209
3383
|
* <p>A unique, case-sensitive identifier to ensure that the API request completes no more than one time. If this token matches a previous request,
|
|
3210
|
-
*
|
|
3384
|
+
* Amazon Bedrock ignores the request, but does not return an error. For more information, see <a href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring idempotency</a>.</p>
|
|
3211
3385
|
* @public
|
|
3212
3386
|
*/
|
|
3213
3387
|
clientRequestToken?: string;
|
|
@@ -3333,7 +3507,7 @@ export interface ModelInvocationJobSummary {
|
|
|
3333
3507
|
modelId: string | undefined;
|
|
3334
3508
|
/**
|
|
3335
3509
|
* <p>A unique, case-sensitive identifier to ensure that the API request completes no more than one time. If this token matches a previous request,
|
|
3336
|
-
*
|
|
3510
|
+
* Amazon Bedrock ignores the request, but does not return an error. For more information, see <a href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring idempotency</a>.</p>
|
|
3337
3511
|
* @public
|
|
3338
3512
|
*/
|
|
3339
3513
|
clientRequestToken?: string;
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { Paginator } from "@smithy/types";
|
|
2
|
+
import { ListInferenceProfilesCommandInput, ListInferenceProfilesCommandOutput } from "../commands/ListInferenceProfilesCommand";
|
|
3
|
+
import { BedrockPaginationConfiguration } from "./Interfaces";
|
|
4
|
+
/**
|
|
5
|
+
* @public
|
|
6
|
+
*/
|
|
7
|
+
export declare const paginateListInferenceProfiles: (config: BedrockPaginationConfiguration, input: ListInferenceProfilesCommandInput, ...rest: any[]) => Paginator<ListInferenceProfilesCommandOutput>;
|
|
@@ -3,6 +3,7 @@ export * from "./ListCustomModelsPaginator";
|
|
|
3
3
|
export * from "./ListEvaluationJobsPaginator";
|
|
4
4
|
export * from "./ListGuardrailsPaginator";
|
|
5
5
|
export * from "./ListImportedModelsPaginator";
|
|
6
|
+
export * from "./ListInferenceProfilesPaginator";
|
|
6
7
|
export * from "./ListModelCopyJobsPaginator";
|
|
7
8
|
export * from "./ListModelCustomizationJobsPaginator";
|
|
8
9
|
export * from "./ListModelImportJobsPaginator";
|
|
@@ -19,6 +19,7 @@ import { GetEvaluationJobCommandInput, GetEvaluationJobCommandOutput } from "../
|
|
|
19
19
|
import { GetFoundationModelCommandInput, GetFoundationModelCommandOutput } from "../commands/GetFoundationModelCommand";
|
|
20
20
|
import { GetGuardrailCommandInput, GetGuardrailCommandOutput } from "../commands/GetGuardrailCommand";
|
|
21
21
|
import { GetImportedModelCommandInput, GetImportedModelCommandOutput } from "../commands/GetImportedModelCommand";
|
|
22
|
+
import { GetInferenceProfileCommandInput, GetInferenceProfileCommandOutput } from "../commands/GetInferenceProfileCommand";
|
|
22
23
|
import { GetModelCopyJobCommandInput, GetModelCopyJobCommandOutput } from "../commands/GetModelCopyJobCommand";
|
|
23
24
|
import { GetModelCustomizationJobCommandInput, GetModelCustomizationJobCommandOutput } from "../commands/GetModelCustomizationJobCommand";
|
|
24
25
|
import { GetModelImportJobCommandInput, GetModelImportJobCommandOutput } from "../commands/GetModelImportJobCommand";
|
|
@@ -30,6 +31,7 @@ import { ListEvaluationJobsCommandInput, ListEvaluationJobsCommandOutput } from
|
|
|
30
31
|
import { ListFoundationModelsCommandInput, ListFoundationModelsCommandOutput } from "../commands/ListFoundationModelsCommand";
|
|
31
32
|
import { ListGuardrailsCommandInput, ListGuardrailsCommandOutput } from "../commands/ListGuardrailsCommand";
|
|
32
33
|
import { ListImportedModelsCommandInput, ListImportedModelsCommandOutput } from "../commands/ListImportedModelsCommand";
|
|
34
|
+
import { ListInferenceProfilesCommandInput, ListInferenceProfilesCommandOutput } from "../commands/ListInferenceProfilesCommand";
|
|
33
35
|
import { ListModelCopyJobsCommandInput, ListModelCopyJobsCommandOutput } from "../commands/ListModelCopyJobsCommand";
|
|
34
36
|
import { ListModelCustomizationJobsCommandInput, ListModelCustomizationJobsCommandOutput } from "../commands/ListModelCustomizationJobsCommand";
|
|
35
37
|
import { ListModelImportJobsCommandInput, ListModelImportJobsCommandOutput } from "../commands/ListModelImportJobsCommand";
|
|
@@ -120,6 +122,10 @@ export declare const se_GetGuardrailCommand: (input: GetGuardrailCommandInput, c
|
|
|
120
122
|
* serializeAws_restJson1GetImportedModelCommand
|
|
121
123
|
*/
|
|
122
124
|
export declare const se_GetImportedModelCommand: (input: GetImportedModelCommandInput, context: __SerdeContext) => Promise<__HttpRequest>;
|
|
125
|
+
/**
|
|
126
|
+
* serializeAws_restJson1GetInferenceProfileCommand
|
|
127
|
+
*/
|
|
128
|
+
export declare const se_GetInferenceProfileCommand: (input: GetInferenceProfileCommandInput, context: __SerdeContext) => Promise<__HttpRequest>;
|
|
123
129
|
/**
|
|
124
130
|
* serializeAws_restJson1GetModelCopyJobCommand
|
|
125
131
|
*/
|
|
@@ -164,6 +170,10 @@ export declare const se_ListGuardrailsCommand: (input: ListGuardrailsCommandInpu
|
|
|
164
170
|
* serializeAws_restJson1ListImportedModelsCommand
|
|
165
171
|
*/
|
|
166
172
|
export declare const se_ListImportedModelsCommand: (input: ListImportedModelsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>;
|
|
173
|
+
/**
|
|
174
|
+
* serializeAws_restJson1ListInferenceProfilesCommand
|
|
175
|
+
*/
|
|
176
|
+
export declare const se_ListInferenceProfilesCommand: (input: ListInferenceProfilesCommandInput, context: __SerdeContext) => Promise<__HttpRequest>;
|
|
167
177
|
/**
|
|
168
178
|
* serializeAws_restJson1ListModelCopyJobsCommand
|
|
169
179
|
*/
|
|
@@ -296,6 +306,10 @@ export declare const de_GetGuardrailCommand: (output: __HttpResponse, context: _
|
|
|
296
306
|
* deserializeAws_restJson1GetImportedModelCommand
|
|
297
307
|
*/
|
|
298
308
|
export declare const de_GetImportedModelCommand: (output: __HttpResponse, context: __SerdeContext) => Promise<GetImportedModelCommandOutput>;
|
|
309
|
+
/**
|
|
310
|
+
* deserializeAws_restJson1GetInferenceProfileCommand
|
|
311
|
+
*/
|
|
312
|
+
export declare const de_GetInferenceProfileCommand: (output: __HttpResponse, context: __SerdeContext) => Promise<GetInferenceProfileCommandOutput>;
|
|
299
313
|
/**
|
|
300
314
|
* deserializeAws_restJson1GetModelCopyJobCommand
|
|
301
315
|
*/
|
|
@@ -340,6 +354,10 @@ export declare const de_ListGuardrailsCommand: (output: __HttpResponse, context:
|
|
|
340
354
|
* deserializeAws_restJson1ListImportedModelsCommand
|
|
341
355
|
*/
|
|
342
356
|
export declare const de_ListImportedModelsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise<ListImportedModelsCommandOutput>;
|
|
357
|
+
/**
|
|
358
|
+
* deserializeAws_restJson1ListInferenceProfilesCommand
|
|
359
|
+
*/
|
|
360
|
+
export declare const de_ListInferenceProfilesCommand: (output: __HttpResponse, context: __SerdeContext) => Promise<ListInferenceProfilesCommandOutput>;
|
|
343
361
|
/**
|
|
344
362
|
* deserializeAws_restJson1ListModelCopyJobsCommand
|
|
345
363
|
*/
|
|
@@ -76,6 +76,10 @@ import {
|
|
|
76
76
|
GetImportedModelCommandInput,
|
|
77
77
|
GetImportedModelCommandOutput,
|
|
78
78
|
} from "./commands/GetImportedModelCommand";
|
|
79
|
+
import {
|
|
80
|
+
GetInferenceProfileCommandInput,
|
|
81
|
+
GetInferenceProfileCommandOutput,
|
|
82
|
+
} from "./commands/GetInferenceProfileCommand";
|
|
79
83
|
import {
|
|
80
84
|
GetModelCopyJobCommandInput,
|
|
81
85
|
GetModelCopyJobCommandOutput,
|
|
@@ -120,6 +124,10 @@ import {
|
|
|
120
124
|
ListImportedModelsCommandInput,
|
|
121
125
|
ListImportedModelsCommandOutput,
|
|
122
126
|
} from "./commands/ListImportedModelsCommand";
|
|
127
|
+
import {
|
|
128
|
+
ListInferenceProfilesCommandInput,
|
|
129
|
+
ListInferenceProfilesCommandOutput,
|
|
130
|
+
} from "./commands/ListInferenceProfilesCommand";
|
|
123
131
|
import {
|
|
124
132
|
ListModelCopyJobsCommandInput,
|
|
125
133
|
ListModelCopyJobsCommandOutput,
|
|
@@ -431,6 +439,19 @@ export interface Bedrock {
|
|
|
431
439
|
options: __HttpHandlerOptions,
|
|
432
440
|
cb: (err: any, data?: GetImportedModelCommandOutput) => void
|
|
433
441
|
): void;
|
|
442
|
+
getInferenceProfile(
|
|
443
|
+
args: GetInferenceProfileCommandInput,
|
|
444
|
+
options?: __HttpHandlerOptions
|
|
445
|
+
): Promise<GetInferenceProfileCommandOutput>;
|
|
446
|
+
getInferenceProfile(
|
|
447
|
+
args: GetInferenceProfileCommandInput,
|
|
448
|
+
cb: (err: any, data?: GetInferenceProfileCommandOutput) => void
|
|
449
|
+
): void;
|
|
450
|
+
getInferenceProfile(
|
|
451
|
+
args: GetInferenceProfileCommandInput,
|
|
452
|
+
options: __HttpHandlerOptions,
|
|
453
|
+
cb: (err: any, data?: GetInferenceProfileCommandOutput) => void
|
|
454
|
+
): void;
|
|
434
455
|
getModelCopyJob(
|
|
435
456
|
args: GetModelCopyJobCommandInput,
|
|
436
457
|
options?: __HttpHandlerOptions
|
|
@@ -586,6 +607,20 @@ export interface Bedrock {
|
|
|
586
607
|
options: __HttpHandlerOptions,
|
|
587
608
|
cb: (err: any, data?: ListImportedModelsCommandOutput) => void
|
|
588
609
|
): void;
|
|
610
|
+
listInferenceProfiles(): Promise<ListInferenceProfilesCommandOutput>;
|
|
611
|
+
listInferenceProfiles(
|
|
612
|
+
args: ListInferenceProfilesCommandInput,
|
|
613
|
+
options?: __HttpHandlerOptions
|
|
614
|
+
): Promise<ListInferenceProfilesCommandOutput>;
|
|
615
|
+
listInferenceProfiles(
|
|
616
|
+
args: ListInferenceProfilesCommandInput,
|
|
617
|
+
cb: (err: any, data?: ListInferenceProfilesCommandOutput) => void
|
|
618
|
+
): void;
|
|
619
|
+
listInferenceProfiles(
|
|
620
|
+
args: ListInferenceProfilesCommandInput,
|
|
621
|
+
options: __HttpHandlerOptions,
|
|
622
|
+
cb: (err: any, data?: ListInferenceProfilesCommandOutput) => void
|
|
623
|
+
): void;
|
|
589
624
|
listModelCopyJobs(): Promise<ListModelCopyJobsCommandOutput>;
|
|
590
625
|
listModelCopyJobs(
|
|
591
626
|
args: ListModelCopyJobsCommandInput,
|
|
@@ -121,6 +121,10 @@ import {
|
|
|
121
121
|
GetImportedModelCommandInput,
|
|
122
122
|
GetImportedModelCommandOutput,
|
|
123
123
|
} from "./commands/GetImportedModelCommand";
|
|
124
|
+
import {
|
|
125
|
+
GetInferenceProfileCommandInput,
|
|
126
|
+
GetInferenceProfileCommandOutput,
|
|
127
|
+
} from "./commands/GetInferenceProfileCommand";
|
|
124
128
|
import {
|
|
125
129
|
GetModelCopyJobCommandInput,
|
|
126
130
|
GetModelCopyJobCommandOutput,
|
|
@@ -165,6 +169,10 @@ import {
|
|
|
165
169
|
ListImportedModelsCommandInput,
|
|
166
170
|
ListImportedModelsCommandOutput,
|
|
167
171
|
} from "./commands/ListImportedModelsCommand";
|
|
172
|
+
import {
|
|
173
|
+
ListInferenceProfilesCommandInput,
|
|
174
|
+
ListInferenceProfilesCommandOutput,
|
|
175
|
+
} from "./commands/ListInferenceProfilesCommand";
|
|
168
176
|
import {
|
|
169
177
|
ListModelCopyJobsCommandInput,
|
|
170
178
|
ListModelCopyJobsCommandOutput,
|
|
@@ -248,6 +256,7 @@ export type ServiceInputTypes =
|
|
|
248
256
|
| GetFoundationModelCommandInput
|
|
249
257
|
| GetGuardrailCommandInput
|
|
250
258
|
| GetImportedModelCommandInput
|
|
259
|
+
| GetInferenceProfileCommandInput
|
|
251
260
|
| GetModelCopyJobCommandInput
|
|
252
261
|
| GetModelCustomizationJobCommandInput
|
|
253
262
|
| GetModelImportJobCommandInput
|
|
@@ -259,6 +268,7 @@ export type ServiceInputTypes =
|
|
|
259
268
|
| ListFoundationModelsCommandInput
|
|
260
269
|
| ListGuardrailsCommandInput
|
|
261
270
|
| ListImportedModelsCommandInput
|
|
271
|
+
| ListInferenceProfilesCommandInput
|
|
262
272
|
| ListModelCopyJobsCommandInput
|
|
263
273
|
| ListModelCustomizationJobsCommandInput
|
|
264
274
|
| ListModelImportJobsCommandInput
|
|
@@ -293,6 +303,7 @@ export type ServiceOutputTypes =
|
|
|
293
303
|
| GetFoundationModelCommandOutput
|
|
294
304
|
| GetGuardrailCommandOutput
|
|
295
305
|
| GetImportedModelCommandOutput
|
|
306
|
+
| GetInferenceProfileCommandOutput
|
|
296
307
|
| GetModelCopyJobCommandOutput
|
|
297
308
|
| GetModelCustomizationJobCommandOutput
|
|
298
309
|
| GetModelImportJobCommandOutput
|
|
@@ -304,6 +315,7 @@ export type ServiceOutputTypes =
|
|
|
304
315
|
| ListFoundationModelsCommandOutput
|
|
305
316
|
| ListGuardrailsCommandOutput
|
|
306
317
|
| ListImportedModelsCommandOutput
|
|
318
|
+
| ListInferenceProfilesCommandOutput
|
|
307
319
|
| ListModelCopyJobsCommandOutput
|
|
308
320
|
| ListModelCustomizationJobsCommandOutput
|
|
309
321
|
| ListModelImportJobsCommandOutput
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { Command as $Command } from "@smithy/smithy-client";
|
|
2
|
+
import { MetadataBearer as __MetadataBearer } from "@smithy/types";
|
|
3
|
+
import {
|
|
4
|
+
BedrockClientResolvedConfig,
|
|
5
|
+
ServiceInputTypes,
|
|
6
|
+
ServiceOutputTypes,
|
|
7
|
+
} from "../BedrockClient";
|
|
8
|
+
import {
|
|
9
|
+
GetInferenceProfileRequest,
|
|
10
|
+
GetInferenceProfileResponse,
|
|
11
|
+
} from "../models/models_0";
|
|
12
|
+
export { __MetadataBearer };
|
|
13
|
+
export { $Command };
|
|
14
|
+
export interface GetInferenceProfileCommandInput
|
|
15
|
+
extends GetInferenceProfileRequest {}
|
|
16
|
+
export interface GetInferenceProfileCommandOutput
|
|
17
|
+
extends GetInferenceProfileResponse,
|
|
18
|
+
__MetadataBearer {}
|
|
19
|
+
declare const GetInferenceProfileCommand_base: {
|
|
20
|
+
new (
|
|
21
|
+
input: GetInferenceProfileCommandInput
|
|
22
|
+
): import("@smithy/smithy-client").CommandImpl<
|
|
23
|
+
GetInferenceProfileCommandInput,
|
|
24
|
+
GetInferenceProfileCommandOutput,
|
|
25
|
+
BedrockClientResolvedConfig,
|
|
26
|
+
ServiceInputTypes,
|
|
27
|
+
ServiceOutputTypes
|
|
28
|
+
>;
|
|
29
|
+
new (
|
|
30
|
+
__0_0: GetInferenceProfileCommandInput
|
|
31
|
+
): import("@smithy/smithy-client").CommandImpl<
|
|
32
|
+
GetInferenceProfileCommandInput,
|
|
33
|
+
GetInferenceProfileCommandOutput,
|
|
34
|
+
BedrockClientResolvedConfig,
|
|
35
|
+
ServiceInputTypes,
|
|
36
|
+
ServiceOutputTypes
|
|
37
|
+
>;
|
|
38
|
+
getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions;
|
|
39
|
+
};
|
|
40
|
+
export declare class GetInferenceProfileCommand extends GetInferenceProfileCommand_base {}
|