@aws-sdk/client-omics 3.331.0 → 3.335.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +56 -8
- package/dist-cjs/Omics.js +12 -0
- package/dist-cjs/commands/AbortMultipartReadSetUploadCommand.js +45 -0
- package/dist-cjs/commands/CompleteMultipartReadSetUploadCommand.js +45 -0
- package/dist-cjs/commands/CreateMultipartReadSetUploadCommand.js +45 -0
- package/dist-cjs/commands/ListMultipartReadSetUploadsCommand.js +45 -0
- package/dist-cjs/commands/ListReadSetUploadPartsCommand.js +45 -0
- package/dist-cjs/commands/UploadReadSetPartCommand.js +46 -0
- package/dist-cjs/commands/index.js +6 -0
- package/dist-cjs/models/models_0.js +83 -55
- package/dist-cjs/pagination/ListMultipartReadSetUploadsPaginator.js +29 -0
- package/dist-cjs/pagination/ListReadSetUploadPartsPaginator.js +29 -0
- package/dist-cjs/pagination/index.js +2 -0
- package/dist-cjs/protocols/Aws_restJson1.js +626 -4
- package/dist-es/Omics.js +12 -0
- package/dist-es/commands/AbortMultipartReadSetUploadCommand.js +41 -0
- package/dist-es/commands/CompleteMultipartReadSetUploadCommand.js +41 -0
- package/dist-es/commands/CreateMultipartReadSetUploadCommand.js +41 -0
- package/dist-es/commands/ListMultipartReadSetUploadsCommand.js +41 -0
- package/dist-es/commands/ListReadSetUploadPartsCommand.js +41 -0
- package/dist-es/commands/UploadReadSetPartCommand.js +42 -0
- package/dist-es/commands/index.js +6 -0
- package/dist-es/models/models_0.js +78 -52
- package/dist-es/pagination/ListMultipartReadSetUploadsPaginator.js +25 -0
- package/dist-es/pagination/ListReadSetUploadPartsPaginator.js +25 -0
- package/dist-es/pagination/index.js +2 -0
- package/dist-es/protocols/Aws_restJson1.js +612 -2
- package/dist-types/Omics.d.ts +43 -1
- package/dist-types/OmicsClient.d.ts +12 -5
- package/dist-types/commands/AbortMultipartReadSetUploadCommand.d.ts +96 -0
- package/dist-types/commands/CompleteMultipartReadSetUploadCommand.d.ts +105 -0
- package/dist-types/commands/CreateMultipartReadSetUploadCommand.d.ts +120 -0
- package/dist-types/commands/CreateRunGroupCommand.d.ts +1 -0
- package/dist-types/commands/CreateSequenceStoreCommand.d.ts +2 -0
- package/dist-types/commands/CreateWorkflowCommand.d.ts +1 -0
- package/dist-types/commands/GetAnnotationImportJobCommand.d.ts +3 -0
- package/dist-types/commands/GetReadSetMetadataCommand.d.ts +1 -0
- package/dist-types/commands/GetRunCommand.d.ts +1 -0
- package/dist-types/commands/GetRunGroupCommand.d.ts +1 -0
- package/dist-types/commands/GetRunTaskCommand.d.ts +1 -0
- package/dist-types/commands/GetSequenceStoreCommand.d.ts +1 -0
- package/dist-types/commands/GetVariantImportJobCommand.d.ts +3 -0
- package/dist-types/commands/GetWorkflowCommand.d.ts +4 -0
- package/dist-types/commands/ListAnnotationImportJobsCommand.d.ts +3 -0
- package/dist-types/commands/ListMultipartReadSetUploadsCommand.d.ts +116 -0
- package/dist-types/commands/ListReadSetUploadPartsCommand.d.ts +115 -0
- package/dist-types/commands/ListReadSetsCommand.d.ts +4 -0
- package/dist-types/commands/ListRunGroupsCommand.d.ts +1 -0
- package/dist-types/commands/ListRunTasksCommand.d.ts +1 -0
- package/dist-types/commands/ListRunsCommand.d.ts +1 -0
- package/dist-types/commands/ListSequenceStoresCommand.d.ts +1 -0
- package/dist-types/commands/ListVariantImportJobsCommand.d.ts +3 -0
- package/dist-types/commands/ListWorkflowsCommand.d.ts +3 -0
- package/dist-types/commands/StartAnnotationImportJobCommand.d.ts +3 -0
- package/dist-types/commands/StartVariantImportJobCommand.d.ts +3 -0
- package/dist-types/commands/UpdateRunGroupCommand.d.ts +1 -0
- package/dist-types/commands/UploadReadSetPartCommand.d.ts +111 -0
- package/dist-types/commands/index.d.ts +6 -0
- package/dist-types/endpoint/EndpointParameters.d.ts +2 -1
- package/dist-types/models/models_0.d.ts +825 -119
- package/dist-types/pagination/ListMultipartReadSetUploadsPaginator.d.ts +7 -0
- package/dist-types/pagination/ListReadSetUploadPartsPaginator.d.ts +7 -0
- package/dist-types/pagination/index.d.ts +2 -0
- package/dist-types/protocols/Aws_restJson1.d.ts +57 -2
- package/dist-types/runtimeConfig.browser.d.ts +12 -12
- package/dist-types/runtimeConfig.d.ts +9 -9
- package/dist-types/runtimeConfig.native.d.ts +13 -13
- package/dist-types/runtimeConfig.shared.d.ts +4 -4
- package/dist-types/ts3.4/Omics.d.ts +102 -0
- package/dist-types/ts3.4/OmicsClient.d.ts +45 -7
- package/dist-types/ts3.4/commands/AbortMultipartReadSetUploadCommand.d.ts +41 -0
- package/dist-types/ts3.4/commands/CompleteMultipartReadSetUploadCommand.d.ts +41 -0
- package/dist-types/ts3.4/commands/CreateMultipartReadSetUploadCommand.d.ts +41 -0
- package/dist-types/ts3.4/commands/ListMultipartReadSetUploadsCommand.d.ts +41 -0
- package/dist-types/ts3.4/commands/ListReadSetUploadPartsCommand.d.ts +41 -0
- package/dist-types/ts3.4/commands/UploadReadSetPartCommand.d.ts +44 -0
- package/dist-types/ts3.4/commands/index.d.ts +6 -0
- package/dist-types/ts3.4/endpoint/EndpointParameters.d.ts +1 -2
- package/dist-types/ts3.4/models/models_0.d.ts +202 -52
- package/dist-types/ts3.4/pagination/ListMultipartReadSetUploadsPaginator.d.ts +11 -0
- package/dist-types/ts3.4/pagination/ListReadSetUploadPartsPaginator.d.ts +11 -0
- package/dist-types/ts3.4/pagination/index.d.ts +2 -0
- package/dist-types/ts3.4/protocols/Aws_restJson1.d.ts +74 -4
- package/dist-types/ts3.4/runtimeConfig.browser.d.ts +18 -15
- package/dist-types/ts3.4/runtimeConfig.d.ts +12 -12
- package/dist-types/ts3.4/runtimeConfig.native.d.ts +20 -15
- package/dist-types/ts3.4/runtimeConfig.shared.d.ts +4 -4
- package/package.json +7 -6
|
@@ -1,8 +1,33 @@
|
|
|
1
|
-
import { HttpRequest as __HttpRequest, isValidHostname as __isValidHostname, } from "@aws-sdk/protocol-http";
|
|
2
1
|
import { _json, decorateServiceException as __decorateServiceException, expectBoolean as __expectBoolean, expectInt32 as __expectInt32, expectLong as __expectLong, expectNonNull as __expectNonNull, expectObject as __expectObject, expectString as __expectString, expectUnion as __expectUnion, map, parseRfc3339DateTimeWithOffset as __parseRfc3339DateTimeWithOffset, resolvedPath as __resolvedPath, take, withBaseException, } from "@aws-sdk/smithy-client";
|
|
2
|
+
import { HttpRequest as __HttpRequest, isValidHostname as __isValidHostname, } from "@smithy/protocol-http";
|
|
3
3
|
import { v4 as generateIdempotencyToken } from "uuid";
|
|
4
|
-
import { AccessDeniedException, ConflictException, InternalServerException, RangeNotSatisfiableException, RequestTimeoutException, ResourceNotFoundException, ServiceQuotaExceededException, ThrottlingException, ValidationException, } from "../models/models_0";
|
|
4
|
+
import { AccessDeniedException, ConflictException, InternalServerException, NotSupportedOperationException, RangeNotSatisfiableException, RequestTimeoutException, ResourceNotFoundException, ServiceQuotaExceededException, ThrottlingException, ValidationException, } from "../models/models_0";
|
|
5
5
|
import { OmicsServiceException as __BaseException } from "../models/OmicsServiceException";
|
|
6
|
+
export const se_AbortMultipartReadSetUploadCommand = async (input, context) => {
|
|
7
|
+
const { hostname, protocol = "https", port, path: basePath } = await context.endpoint();
|
|
8
|
+
const headers = {};
|
|
9
|
+
let resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` +
|
|
10
|
+
"/sequencestore/{sequenceStoreId}/upload/{uploadId}/abort";
|
|
11
|
+
resolvedPath = __resolvedPath(resolvedPath, input, "sequenceStoreId", () => input.sequenceStoreId, "{sequenceStoreId}", false);
|
|
12
|
+
resolvedPath = __resolvedPath(resolvedPath, input, "uploadId", () => input.uploadId, "{uploadId}", false);
|
|
13
|
+
let body;
|
|
14
|
+
let { hostname: resolvedHostname } = await context.endpoint();
|
|
15
|
+
if (context.disableHostPrefix !== true) {
|
|
16
|
+
resolvedHostname = "control-storage-" + resolvedHostname;
|
|
17
|
+
if (!__isValidHostname(resolvedHostname)) {
|
|
18
|
+
throw new Error("ValidationError: prefixed hostname must be hostname compatible.");
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
return new __HttpRequest({
|
|
22
|
+
protocol,
|
|
23
|
+
hostname: resolvedHostname,
|
|
24
|
+
port,
|
|
25
|
+
method: "DELETE",
|
|
26
|
+
headers,
|
|
27
|
+
path: resolvedPath,
|
|
28
|
+
body,
|
|
29
|
+
});
|
|
30
|
+
};
|
|
6
31
|
export const se_BatchDeleteReadSetCommand = async (input, context) => {
|
|
7
32
|
const { hostname, protocol = "https", port, path: basePath } = await context.endpoint();
|
|
8
33
|
const headers = {
|
|
@@ -101,6 +126,36 @@ export const se_CancelVariantImportJobCommand = async (input, context) => {
|
|
|
101
126
|
body,
|
|
102
127
|
});
|
|
103
128
|
};
|
|
129
|
+
export const se_CompleteMultipartReadSetUploadCommand = async (input, context) => {
|
|
130
|
+
const { hostname, protocol = "https", port, path: basePath } = await context.endpoint();
|
|
131
|
+
const headers = {
|
|
132
|
+
"content-type": "application/json",
|
|
133
|
+
};
|
|
134
|
+
let resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` +
|
|
135
|
+
"/sequencestore/{sequenceStoreId}/upload/{uploadId}/complete";
|
|
136
|
+
resolvedPath = __resolvedPath(resolvedPath, input, "sequenceStoreId", () => input.sequenceStoreId, "{sequenceStoreId}", false);
|
|
137
|
+
resolvedPath = __resolvedPath(resolvedPath, input, "uploadId", () => input.uploadId, "{uploadId}", false);
|
|
138
|
+
let body;
|
|
139
|
+
body = JSON.stringify(take(input, {
|
|
140
|
+
parts: (_) => _json(_),
|
|
141
|
+
}));
|
|
142
|
+
let { hostname: resolvedHostname } = await context.endpoint();
|
|
143
|
+
if (context.disableHostPrefix !== true) {
|
|
144
|
+
resolvedHostname = "storage-" + resolvedHostname;
|
|
145
|
+
if (!__isValidHostname(resolvedHostname)) {
|
|
146
|
+
throw new Error("ValidationError: prefixed hostname must be hostname compatible.");
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
return new __HttpRequest({
|
|
150
|
+
protocol,
|
|
151
|
+
hostname: resolvedHostname,
|
|
152
|
+
port,
|
|
153
|
+
method: "POST",
|
|
154
|
+
headers,
|
|
155
|
+
path: resolvedPath,
|
|
156
|
+
body,
|
|
157
|
+
});
|
|
158
|
+
};
|
|
104
159
|
export const se_CreateAnnotationStoreCommand = async (input, context) => {
|
|
105
160
|
const { hostname, protocol = "https", port, path: basePath } = await context.endpoint();
|
|
106
161
|
const headers = {
|
|
@@ -134,6 +189,42 @@ export const se_CreateAnnotationStoreCommand = async (input, context) => {
|
|
|
134
189
|
body,
|
|
135
190
|
});
|
|
136
191
|
};
|
|
192
|
+
export const se_CreateMultipartReadSetUploadCommand = async (input, context) => {
|
|
193
|
+
const { hostname, protocol = "https", port, path: basePath } = await context.endpoint();
|
|
194
|
+
const headers = {
|
|
195
|
+
"content-type": "application/json",
|
|
196
|
+
};
|
|
197
|
+
let resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + "/sequencestore/{sequenceStoreId}/upload";
|
|
198
|
+
resolvedPath = __resolvedPath(resolvedPath, input, "sequenceStoreId", () => input.sequenceStoreId, "{sequenceStoreId}", false);
|
|
199
|
+
let body;
|
|
200
|
+
body = JSON.stringify(take(input, {
|
|
201
|
+
clientToken: [],
|
|
202
|
+
description: [],
|
|
203
|
+
generatedFrom: [],
|
|
204
|
+
name: [],
|
|
205
|
+
referenceArn: [],
|
|
206
|
+
sampleId: [],
|
|
207
|
+
sourceFileType: [],
|
|
208
|
+
subjectId: [],
|
|
209
|
+
tags: (_) => _json(_),
|
|
210
|
+
}));
|
|
211
|
+
let { hostname: resolvedHostname } = await context.endpoint();
|
|
212
|
+
if (context.disableHostPrefix !== true) {
|
|
213
|
+
resolvedHostname = "control-storage-" + resolvedHostname;
|
|
214
|
+
if (!__isValidHostname(resolvedHostname)) {
|
|
215
|
+
throw new Error("ValidationError: prefixed hostname must be hostname compatible.");
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
return new __HttpRequest({
|
|
219
|
+
protocol,
|
|
220
|
+
hostname: resolvedHostname,
|
|
221
|
+
port,
|
|
222
|
+
method: "POST",
|
|
223
|
+
headers,
|
|
224
|
+
path: resolvedPath,
|
|
225
|
+
body,
|
|
226
|
+
});
|
|
227
|
+
};
|
|
137
228
|
export const se_CreateReferenceStoreCommand = async (input, context) => {
|
|
138
229
|
const { hostname, protocol = "https", port, path: basePath } = await context.endpoint();
|
|
139
230
|
const headers = {
|
|
@@ -175,6 +266,7 @@ export const se_CreateRunGroupCommand = async (input, context) => {
|
|
|
175
266
|
body = JSON.stringify(take(input, {
|
|
176
267
|
maxCpus: [],
|
|
177
268
|
maxDuration: [],
|
|
269
|
+
maxGpus: [],
|
|
178
270
|
maxRuns: [],
|
|
179
271
|
name: [],
|
|
180
272
|
requestId: [true, (_) => _ ?? generateIdempotencyToken()],
|
|
@@ -207,6 +299,7 @@ export const se_CreateSequenceStoreCommand = async (input, context) => {
|
|
|
207
299
|
body = JSON.stringify(take(input, {
|
|
208
300
|
clientToken: [],
|
|
209
301
|
description: [],
|
|
302
|
+
fallbackLocation: [],
|
|
210
303
|
name: [],
|
|
211
304
|
sseConfig: (_) => _json(_),
|
|
212
305
|
tags: (_) => _json(_),
|
|
@@ -267,6 +360,7 @@ export const se_CreateWorkflowCommand = async (input, context) => {
|
|
|
267
360
|
const resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + "/workflow";
|
|
268
361
|
let body;
|
|
269
362
|
body = JSON.stringify(take(input, {
|
|
363
|
+
accelerators: [],
|
|
270
364
|
definitionUri: [],
|
|
271
365
|
definitionZip: (_) => context.base64Encoder(_),
|
|
272
366
|
description: [],
|
|
@@ -1007,6 +1101,34 @@ export const se_ListAnnotationStoresCommand = async (input, context) => {
|
|
|
1007
1101
|
body,
|
|
1008
1102
|
});
|
|
1009
1103
|
};
|
|
1104
|
+
export const se_ListMultipartReadSetUploadsCommand = async (input, context) => {
|
|
1105
|
+
const { hostname, protocol = "https", port, path: basePath } = await context.endpoint();
|
|
1106
|
+
const headers = {};
|
|
1107
|
+
let resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + "/sequencestore/{sequenceStoreId}/uploads";
|
|
1108
|
+
resolvedPath = __resolvedPath(resolvedPath, input, "sequenceStoreId", () => input.sequenceStoreId, "{sequenceStoreId}", false);
|
|
1109
|
+
const query = map({
|
|
1110
|
+
maxResults: [() => input.maxResults !== void 0, () => input.maxResults.toString()],
|
|
1111
|
+
nextToken: [, input.nextToken],
|
|
1112
|
+
});
|
|
1113
|
+
let body;
|
|
1114
|
+
let { hostname: resolvedHostname } = await context.endpoint();
|
|
1115
|
+
if (context.disableHostPrefix !== true) {
|
|
1116
|
+
resolvedHostname = "control-storage-" + resolvedHostname;
|
|
1117
|
+
if (!__isValidHostname(resolvedHostname)) {
|
|
1118
|
+
throw new Error("ValidationError: prefixed hostname must be hostname compatible.");
|
|
1119
|
+
}
|
|
1120
|
+
}
|
|
1121
|
+
return new __HttpRequest({
|
|
1122
|
+
protocol,
|
|
1123
|
+
hostname: resolvedHostname,
|
|
1124
|
+
port,
|
|
1125
|
+
method: "POST",
|
|
1126
|
+
headers,
|
|
1127
|
+
path: resolvedPath,
|
|
1128
|
+
query,
|
|
1129
|
+
body,
|
|
1130
|
+
});
|
|
1131
|
+
};
|
|
1010
1132
|
export const se_ListReadSetActivationJobsCommand = async (input, context) => {
|
|
1011
1133
|
const { hostname, protocol = "https", port, path: basePath } = await context.endpoint();
|
|
1012
1134
|
const headers = {
|
|
@@ -1142,6 +1264,42 @@ export const se_ListReadSetsCommand = async (input, context) => {
|
|
|
1142
1264
|
body,
|
|
1143
1265
|
});
|
|
1144
1266
|
};
|
|
1267
|
+
export const se_ListReadSetUploadPartsCommand = async (input, context) => {
|
|
1268
|
+
const { hostname, protocol = "https", port, path: basePath } = await context.endpoint();
|
|
1269
|
+
const headers = {
|
|
1270
|
+
"content-type": "application/json",
|
|
1271
|
+
};
|
|
1272
|
+
let resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` +
|
|
1273
|
+
"/sequencestore/{sequenceStoreId}/upload/{uploadId}/parts";
|
|
1274
|
+
resolvedPath = __resolvedPath(resolvedPath, input, "sequenceStoreId", () => input.sequenceStoreId, "{sequenceStoreId}", false);
|
|
1275
|
+
resolvedPath = __resolvedPath(resolvedPath, input, "uploadId", () => input.uploadId, "{uploadId}", false);
|
|
1276
|
+
const query = map({
|
|
1277
|
+
maxResults: [() => input.maxResults !== void 0, () => input.maxResults.toString()],
|
|
1278
|
+
nextToken: [, input.nextToken],
|
|
1279
|
+
});
|
|
1280
|
+
let body;
|
|
1281
|
+
body = JSON.stringify(take(input, {
|
|
1282
|
+
filter: (_) => se_ReadSetUploadPartListFilter(_, context),
|
|
1283
|
+
partSource: [],
|
|
1284
|
+
}));
|
|
1285
|
+
let { hostname: resolvedHostname } = await context.endpoint();
|
|
1286
|
+
if (context.disableHostPrefix !== true) {
|
|
1287
|
+
resolvedHostname = "control-storage-" + resolvedHostname;
|
|
1288
|
+
if (!__isValidHostname(resolvedHostname)) {
|
|
1289
|
+
throw new Error("ValidationError: prefixed hostname must be hostname compatible.");
|
|
1290
|
+
}
|
|
1291
|
+
}
|
|
1292
|
+
return new __HttpRequest({
|
|
1293
|
+
protocol,
|
|
1294
|
+
hostname: resolvedHostname,
|
|
1295
|
+
port,
|
|
1296
|
+
method: "POST",
|
|
1297
|
+
headers,
|
|
1298
|
+
path: resolvedPath,
|
|
1299
|
+
query,
|
|
1300
|
+
body,
|
|
1301
|
+
});
|
|
1302
|
+
};
|
|
1145
1303
|
export const se_ListReferenceImportJobsCommand = async (input, context) => {
|
|
1146
1304
|
const { hostname, protocol = "https", port, path: basePath } = await context.endpoint();
|
|
1147
1305
|
const headers = {
|
|
@@ -1279,6 +1437,7 @@ export const se_ListRunsCommand = async (input, context) => {
|
|
|
1279
1437
|
runGroupId: [, input.runGroupId],
|
|
1280
1438
|
startingToken: [, input.startingToken],
|
|
1281
1439
|
maxResults: [() => input.maxResults !== void 0, () => input.maxResults.toString()],
|
|
1440
|
+
status: [, input.status],
|
|
1282
1441
|
});
|
|
1283
1442
|
let body;
|
|
1284
1443
|
let { hostname: resolvedHostname } = await context.endpoint();
|
|
@@ -1486,6 +1645,7 @@ export const se_StartAnnotationImportJobCommand = async (input, context) => {
|
|
|
1486
1645
|
const resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + "/import/annotation";
|
|
1487
1646
|
let body;
|
|
1488
1647
|
body = JSON.stringify(take(input, {
|
|
1648
|
+
annotationFields: (_) => _json(_),
|
|
1489
1649
|
destinationName: [],
|
|
1490
1650
|
formatOptions: (_) => _json(_),
|
|
1491
1651
|
items: (_) => _json(_),
|
|
@@ -1680,6 +1840,7 @@ export const se_StartVariantImportJobCommand = async (input, context) => {
|
|
|
1680
1840
|
const resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + "/import/variant";
|
|
1681
1841
|
let body;
|
|
1682
1842
|
body = JSON.stringify(take(input, {
|
|
1843
|
+
annotationFields: (_) => _json(_),
|
|
1683
1844
|
destinationName: [],
|
|
1684
1845
|
items: (_) => _json(_),
|
|
1685
1846
|
roleArn: [],
|
|
@@ -1799,6 +1960,7 @@ export const se_UpdateRunGroupCommand = async (input, context) => {
|
|
|
1799
1960
|
body = JSON.stringify(take(input, {
|
|
1800
1961
|
maxCpus: [],
|
|
1801
1962
|
maxDuration: [],
|
|
1963
|
+
maxGpus: [],
|
|
1802
1964
|
maxRuns: [],
|
|
1803
1965
|
name: [],
|
|
1804
1966
|
}));
|
|
@@ -1876,6 +2038,92 @@ export const se_UpdateWorkflowCommand = async (input, context) => {
|
|
|
1876
2038
|
body,
|
|
1877
2039
|
});
|
|
1878
2040
|
};
|
|
2041
|
+
export const se_UploadReadSetPartCommand = async (input, context) => {
|
|
2042
|
+
const { hostname, protocol = "https", port, path: basePath } = await context.endpoint();
|
|
2043
|
+
const headers = {
|
|
2044
|
+
"x-amz-content-sha256": "UNSIGNED-PAYLOAD",
|
|
2045
|
+
"content-type": "application/octet-stream",
|
|
2046
|
+
};
|
|
2047
|
+
let resolvedPath = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` +
|
|
2048
|
+
"/sequencestore/{sequenceStoreId}/upload/{uploadId}/part";
|
|
2049
|
+
resolvedPath = __resolvedPath(resolvedPath, input, "sequenceStoreId", () => input.sequenceStoreId, "{sequenceStoreId}", false);
|
|
2050
|
+
resolvedPath = __resolvedPath(resolvedPath, input, "uploadId", () => input.uploadId, "{uploadId}", false);
|
|
2051
|
+
const query = map({
|
|
2052
|
+
partSource: [, __expectNonNull(input.partSource, `partSource`)],
|
|
2053
|
+
partNumber: [__expectNonNull(input.partNumber, `partNumber`) != null, () => input.partNumber.toString()],
|
|
2054
|
+
});
|
|
2055
|
+
let body;
|
|
2056
|
+
if (input.payload !== undefined) {
|
|
2057
|
+
body = input.payload;
|
|
2058
|
+
}
|
|
2059
|
+
let { hostname: resolvedHostname } = await context.endpoint();
|
|
2060
|
+
if (context.disableHostPrefix !== true) {
|
|
2061
|
+
resolvedHostname = "storage-" + resolvedHostname;
|
|
2062
|
+
if (!__isValidHostname(resolvedHostname)) {
|
|
2063
|
+
throw new Error("ValidationError: prefixed hostname must be hostname compatible.");
|
|
2064
|
+
}
|
|
2065
|
+
}
|
|
2066
|
+
return new __HttpRequest({
|
|
2067
|
+
protocol,
|
|
2068
|
+
hostname: resolvedHostname,
|
|
2069
|
+
port,
|
|
2070
|
+
method: "PUT",
|
|
2071
|
+
headers,
|
|
2072
|
+
path: resolvedPath,
|
|
2073
|
+
query,
|
|
2074
|
+
body,
|
|
2075
|
+
});
|
|
2076
|
+
};
|
|
2077
|
+
export const de_AbortMultipartReadSetUploadCommand = async (output, context) => {
|
|
2078
|
+
if (output.statusCode !== 200 && output.statusCode >= 300) {
|
|
2079
|
+
return de_AbortMultipartReadSetUploadCommandError(output, context);
|
|
2080
|
+
}
|
|
2081
|
+
const contents = map({
|
|
2082
|
+
$metadata: deserializeMetadata(output),
|
|
2083
|
+
});
|
|
2084
|
+
await collectBody(output.body, context);
|
|
2085
|
+
return contents;
|
|
2086
|
+
};
|
|
2087
|
+
const de_AbortMultipartReadSetUploadCommandError = async (output, context) => {
|
|
2088
|
+
const parsedOutput = {
|
|
2089
|
+
...output,
|
|
2090
|
+
body: await parseErrorBody(output.body, context),
|
|
2091
|
+
};
|
|
2092
|
+
const errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
|
|
2093
|
+
switch (errorCode) {
|
|
2094
|
+
case "AccessDeniedException":
|
|
2095
|
+
case "com.amazonaws.omics#AccessDeniedException":
|
|
2096
|
+
throw await de_AccessDeniedExceptionRes(parsedOutput, context);
|
|
2097
|
+
case "InternalServerException":
|
|
2098
|
+
case "com.amazonaws.omics#InternalServerException":
|
|
2099
|
+
throw await de_InternalServerExceptionRes(parsedOutput, context);
|
|
2100
|
+
case "NotSupportedOperationException":
|
|
2101
|
+
case "com.amazonaws.omics#NotSupportedOperationException":
|
|
2102
|
+
throw await de_NotSupportedOperationExceptionRes(parsedOutput, context);
|
|
2103
|
+
case "RequestTimeoutException":
|
|
2104
|
+
case "com.amazonaws.omics#RequestTimeoutException":
|
|
2105
|
+
throw await de_RequestTimeoutExceptionRes(parsedOutput, context);
|
|
2106
|
+
case "ResourceNotFoundException":
|
|
2107
|
+
case "com.amazonaws.omics#ResourceNotFoundException":
|
|
2108
|
+
throw await de_ResourceNotFoundExceptionRes(parsedOutput, context);
|
|
2109
|
+
case "ServiceQuotaExceededException":
|
|
2110
|
+
case "com.amazonaws.omics#ServiceQuotaExceededException":
|
|
2111
|
+
throw await de_ServiceQuotaExceededExceptionRes(parsedOutput, context);
|
|
2112
|
+
case "ThrottlingException":
|
|
2113
|
+
case "com.amazonaws.omics#ThrottlingException":
|
|
2114
|
+
throw await de_ThrottlingExceptionRes(parsedOutput, context);
|
|
2115
|
+
case "ValidationException":
|
|
2116
|
+
case "com.amazonaws.omics#ValidationException":
|
|
2117
|
+
throw await de_ValidationExceptionRes(parsedOutput, context);
|
|
2118
|
+
default:
|
|
2119
|
+
const parsedBody = parsedOutput.body;
|
|
2120
|
+
return throwDefaultError({
|
|
2121
|
+
output,
|
|
2122
|
+
parsedBody,
|
|
2123
|
+
errorCode,
|
|
2124
|
+
});
|
|
2125
|
+
}
|
|
2126
|
+
};
|
|
1879
2127
|
export const de_BatchDeleteReadSetCommand = async (output, context) => {
|
|
1880
2128
|
if (output.statusCode !== 200 && output.statusCode >= 300) {
|
|
1881
2129
|
return de_BatchDeleteReadSetCommandError(output, context);
|
|
@@ -2056,6 +2304,60 @@ const de_CancelVariantImportJobCommandError = async (output, context) => {
|
|
|
2056
2304
|
});
|
|
2057
2305
|
}
|
|
2058
2306
|
};
|
|
2307
|
+
export const de_CompleteMultipartReadSetUploadCommand = async (output, context) => {
|
|
2308
|
+
if (output.statusCode !== 200 && output.statusCode >= 300) {
|
|
2309
|
+
return de_CompleteMultipartReadSetUploadCommandError(output, context);
|
|
2310
|
+
}
|
|
2311
|
+
const contents = map({
|
|
2312
|
+
$metadata: deserializeMetadata(output),
|
|
2313
|
+
});
|
|
2314
|
+
const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body");
|
|
2315
|
+
const doc = take(data, {
|
|
2316
|
+
readSetId: __expectString,
|
|
2317
|
+
});
|
|
2318
|
+
Object.assign(contents, doc);
|
|
2319
|
+
return contents;
|
|
2320
|
+
};
|
|
2321
|
+
const de_CompleteMultipartReadSetUploadCommandError = async (output, context) => {
|
|
2322
|
+
const parsedOutput = {
|
|
2323
|
+
...output,
|
|
2324
|
+
body: await parseErrorBody(output.body, context),
|
|
2325
|
+
};
|
|
2326
|
+
const errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
|
|
2327
|
+
switch (errorCode) {
|
|
2328
|
+
case "AccessDeniedException":
|
|
2329
|
+
case "com.amazonaws.omics#AccessDeniedException":
|
|
2330
|
+
throw await de_AccessDeniedExceptionRes(parsedOutput, context);
|
|
2331
|
+
case "InternalServerException":
|
|
2332
|
+
case "com.amazonaws.omics#InternalServerException":
|
|
2333
|
+
throw await de_InternalServerExceptionRes(parsedOutput, context);
|
|
2334
|
+
case "NotSupportedOperationException":
|
|
2335
|
+
case "com.amazonaws.omics#NotSupportedOperationException":
|
|
2336
|
+
throw await de_NotSupportedOperationExceptionRes(parsedOutput, context);
|
|
2337
|
+
case "RequestTimeoutException":
|
|
2338
|
+
case "com.amazonaws.omics#RequestTimeoutException":
|
|
2339
|
+
throw await de_RequestTimeoutExceptionRes(parsedOutput, context);
|
|
2340
|
+
case "ResourceNotFoundException":
|
|
2341
|
+
case "com.amazonaws.omics#ResourceNotFoundException":
|
|
2342
|
+
throw await de_ResourceNotFoundExceptionRes(parsedOutput, context);
|
|
2343
|
+
case "ServiceQuotaExceededException":
|
|
2344
|
+
case "com.amazonaws.omics#ServiceQuotaExceededException":
|
|
2345
|
+
throw await de_ServiceQuotaExceededExceptionRes(parsedOutput, context);
|
|
2346
|
+
case "ThrottlingException":
|
|
2347
|
+
case "com.amazonaws.omics#ThrottlingException":
|
|
2348
|
+
throw await de_ThrottlingExceptionRes(parsedOutput, context);
|
|
2349
|
+
case "ValidationException":
|
|
2350
|
+
case "com.amazonaws.omics#ValidationException":
|
|
2351
|
+
throw await de_ValidationExceptionRes(parsedOutput, context);
|
|
2352
|
+
default:
|
|
2353
|
+
const parsedBody = parsedOutput.body;
|
|
2354
|
+
return throwDefaultError({
|
|
2355
|
+
output,
|
|
2356
|
+
parsedBody,
|
|
2357
|
+
errorCode,
|
|
2358
|
+
});
|
|
2359
|
+
}
|
|
2360
|
+
};
|
|
2059
2361
|
export const de_CreateAnnotationStoreCommand = async (output, context) => {
|
|
2060
2362
|
if (output.statusCode !== 200 && output.statusCode >= 300) {
|
|
2061
2363
|
return de_CreateAnnotationStoreCommandError(output, context);
|
|
@@ -2113,6 +2415,70 @@ const de_CreateAnnotationStoreCommandError = async (output, context) => {
|
|
|
2113
2415
|
});
|
|
2114
2416
|
}
|
|
2115
2417
|
};
|
|
2418
|
+
export const de_CreateMultipartReadSetUploadCommand = async (output, context) => {
|
|
2419
|
+
if (output.statusCode !== 200 && output.statusCode >= 300) {
|
|
2420
|
+
return de_CreateMultipartReadSetUploadCommandError(output, context);
|
|
2421
|
+
}
|
|
2422
|
+
const contents = map({
|
|
2423
|
+
$metadata: deserializeMetadata(output),
|
|
2424
|
+
});
|
|
2425
|
+
const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body");
|
|
2426
|
+
const doc = take(data, {
|
|
2427
|
+
creationTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
2428
|
+
description: __expectString,
|
|
2429
|
+
generatedFrom: __expectString,
|
|
2430
|
+
name: __expectString,
|
|
2431
|
+
referenceArn: __expectString,
|
|
2432
|
+
sampleId: __expectString,
|
|
2433
|
+
sequenceStoreId: __expectString,
|
|
2434
|
+
sourceFileType: __expectString,
|
|
2435
|
+
subjectId: __expectString,
|
|
2436
|
+
tags: _json,
|
|
2437
|
+
uploadId: __expectString,
|
|
2438
|
+
});
|
|
2439
|
+
Object.assign(contents, doc);
|
|
2440
|
+
return contents;
|
|
2441
|
+
};
|
|
2442
|
+
const de_CreateMultipartReadSetUploadCommandError = async (output, context) => {
|
|
2443
|
+
const parsedOutput = {
|
|
2444
|
+
...output,
|
|
2445
|
+
body: await parseErrorBody(output.body, context),
|
|
2446
|
+
};
|
|
2447
|
+
const errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
|
|
2448
|
+
switch (errorCode) {
|
|
2449
|
+
case "AccessDeniedException":
|
|
2450
|
+
case "com.amazonaws.omics#AccessDeniedException":
|
|
2451
|
+
throw await de_AccessDeniedExceptionRes(parsedOutput, context);
|
|
2452
|
+
case "InternalServerException":
|
|
2453
|
+
case "com.amazonaws.omics#InternalServerException":
|
|
2454
|
+
throw await de_InternalServerExceptionRes(parsedOutput, context);
|
|
2455
|
+
case "NotSupportedOperationException":
|
|
2456
|
+
case "com.amazonaws.omics#NotSupportedOperationException":
|
|
2457
|
+
throw await de_NotSupportedOperationExceptionRes(parsedOutput, context);
|
|
2458
|
+
case "RequestTimeoutException":
|
|
2459
|
+
case "com.amazonaws.omics#RequestTimeoutException":
|
|
2460
|
+
throw await de_RequestTimeoutExceptionRes(parsedOutput, context);
|
|
2461
|
+
case "ResourceNotFoundException":
|
|
2462
|
+
case "com.amazonaws.omics#ResourceNotFoundException":
|
|
2463
|
+
throw await de_ResourceNotFoundExceptionRes(parsedOutput, context);
|
|
2464
|
+
case "ServiceQuotaExceededException":
|
|
2465
|
+
case "com.amazonaws.omics#ServiceQuotaExceededException":
|
|
2466
|
+
throw await de_ServiceQuotaExceededExceptionRes(parsedOutput, context);
|
|
2467
|
+
case "ThrottlingException":
|
|
2468
|
+
case "com.amazonaws.omics#ThrottlingException":
|
|
2469
|
+
throw await de_ThrottlingExceptionRes(parsedOutput, context);
|
|
2470
|
+
case "ValidationException":
|
|
2471
|
+
case "com.amazonaws.omics#ValidationException":
|
|
2472
|
+
throw await de_ValidationExceptionRes(parsedOutput, context);
|
|
2473
|
+
default:
|
|
2474
|
+
const parsedBody = parsedOutput.body;
|
|
2475
|
+
return throwDefaultError({
|
|
2476
|
+
output,
|
|
2477
|
+
parsedBody,
|
|
2478
|
+
errorCode,
|
|
2479
|
+
});
|
|
2480
|
+
}
|
|
2481
|
+
};
|
|
2116
2482
|
export const de_CreateReferenceStoreCommand = async (output, context) => {
|
|
2117
2483
|
if (output.statusCode !== 200 && output.statusCode >= 300) {
|
|
2118
2484
|
return de_CreateReferenceStoreCommandError(output, context);
|
|
@@ -2234,6 +2600,7 @@ export const de_CreateSequenceStoreCommand = async (output, context) => {
|
|
|
2234
2600
|
arn: __expectString,
|
|
2235
2601
|
creationTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
2236
2602
|
description: __expectString,
|
|
2603
|
+
fallbackLocation: __expectString,
|
|
2237
2604
|
id: __expectString,
|
|
2238
2605
|
name: __expectString,
|
|
2239
2606
|
sseConfig: _json,
|
|
@@ -2783,6 +3150,7 @@ export const de_GetAnnotationImportJobCommand = async (output, context) => {
|
|
|
2783
3150
|
});
|
|
2784
3151
|
const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body");
|
|
2785
3152
|
const doc = take(data, {
|
|
3153
|
+
annotationFields: _json,
|
|
2786
3154
|
completionTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
2787
3155
|
creationTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
2788
3156
|
destinationName: __expectString,
|
|
@@ -3124,6 +3492,7 @@ export const de_GetReadSetMetadataCommand = async (output, context) => {
|
|
|
3124
3492
|
sequenceInformation: _json,
|
|
3125
3493
|
sequenceStoreId: __expectString,
|
|
3126
3494
|
status: __expectString,
|
|
3495
|
+
statusMessage: __expectString,
|
|
3127
3496
|
subjectId: __expectString,
|
|
3128
3497
|
});
|
|
3129
3498
|
Object.assign(contents, doc);
|
|
@@ -3386,6 +3755,7 @@ export const de_GetRunCommand = async (output, context) => {
|
|
|
3386
3755
|
});
|
|
3387
3756
|
const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body");
|
|
3388
3757
|
const doc = take(data, {
|
|
3758
|
+
accelerators: __expectString,
|
|
3389
3759
|
arn: __expectString,
|
|
3390
3760
|
creationTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
3391
3761
|
definition: __expectString,
|
|
@@ -3467,6 +3837,7 @@ export const de_GetRunGroupCommand = async (output, context) => {
|
|
|
3467
3837
|
id: __expectString,
|
|
3468
3838
|
maxCpus: __expectInt32,
|
|
3469
3839
|
maxDuration: __expectInt32,
|
|
3840
|
+
maxGpus: __expectInt32,
|
|
3470
3841
|
maxRuns: __expectInt32,
|
|
3471
3842
|
name: __expectString,
|
|
3472
3843
|
tags: _json,
|
|
@@ -3525,6 +3896,7 @@ export const de_GetRunTaskCommand = async (output, context) => {
|
|
|
3525
3896
|
const doc = take(data, {
|
|
3526
3897
|
cpus: __expectInt32,
|
|
3527
3898
|
creationTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
3899
|
+
gpus: __expectInt32,
|
|
3528
3900
|
logStream: __expectString,
|
|
3529
3901
|
memory: __expectInt32,
|
|
3530
3902
|
name: __expectString,
|
|
@@ -3589,6 +3961,7 @@ export const de_GetSequenceStoreCommand = async (output, context) => {
|
|
|
3589
3961
|
arn: __expectString,
|
|
3590
3962
|
creationTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
3591
3963
|
description: __expectString,
|
|
3964
|
+
fallbackLocation: __expectString,
|
|
3592
3965
|
id: __expectString,
|
|
3593
3966
|
name: __expectString,
|
|
3594
3967
|
sseConfig: _json,
|
|
@@ -3639,6 +4012,7 @@ export const de_GetVariantImportJobCommand = async (output, context) => {
|
|
|
3639
4012
|
});
|
|
3640
4013
|
const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body");
|
|
3641
4014
|
const doc = take(data, {
|
|
4015
|
+
annotationFields: _json,
|
|
3642
4016
|
completionTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
3643
4017
|
creationTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
3644
4018
|
destinationName: __expectString,
|
|
@@ -3749,6 +4123,7 @@ export const de_GetWorkflowCommand = async (output, context) => {
|
|
|
3749
4123
|
});
|
|
3750
4124
|
const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body");
|
|
3751
4125
|
const doc = take(data, {
|
|
4126
|
+
accelerators: __expectString,
|
|
3752
4127
|
arn: __expectString,
|
|
3753
4128
|
creationTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
3754
4129
|
definition: __expectString,
|
|
@@ -3757,6 +4132,7 @@ export const de_GetWorkflowCommand = async (output, context) => {
|
|
|
3757
4132
|
engine: __expectString,
|
|
3758
4133
|
id: __expectString,
|
|
3759
4134
|
main: __expectString,
|
|
4135
|
+
metadata: _json,
|
|
3760
4136
|
name: __expectString,
|
|
3761
4137
|
parameterTemplate: _json,
|
|
3762
4138
|
status: __expectString,
|
|
@@ -3900,6 +4276,61 @@ const de_ListAnnotationStoresCommandError = async (output, context) => {
|
|
|
3900
4276
|
});
|
|
3901
4277
|
}
|
|
3902
4278
|
};
|
|
4279
|
+
export const de_ListMultipartReadSetUploadsCommand = async (output, context) => {
|
|
4280
|
+
if (output.statusCode !== 200 && output.statusCode >= 300) {
|
|
4281
|
+
return de_ListMultipartReadSetUploadsCommandError(output, context);
|
|
4282
|
+
}
|
|
4283
|
+
const contents = map({
|
|
4284
|
+
$metadata: deserializeMetadata(output),
|
|
4285
|
+
});
|
|
4286
|
+
const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body");
|
|
4287
|
+
const doc = take(data, {
|
|
4288
|
+
nextToken: __expectString,
|
|
4289
|
+
uploads: (_) => de_MultipartReadSetUploadList(_, context),
|
|
4290
|
+
});
|
|
4291
|
+
Object.assign(contents, doc);
|
|
4292
|
+
return contents;
|
|
4293
|
+
};
|
|
4294
|
+
const de_ListMultipartReadSetUploadsCommandError = async (output, context) => {
|
|
4295
|
+
const parsedOutput = {
|
|
4296
|
+
...output,
|
|
4297
|
+
body: await parseErrorBody(output.body, context),
|
|
4298
|
+
};
|
|
4299
|
+
const errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
|
|
4300
|
+
switch (errorCode) {
|
|
4301
|
+
case "AccessDeniedException":
|
|
4302
|
+
case "com.amazonaws.omics#AccessDeniedException":
|
|
4303
|
+
throw await de_AccessDeniedExceptionRes(parsedOutput, context);
|
|
4304
|
+
case "InternalServerException":
|
|
4305
|
+
case "com.amazonaws.omics#InternalServerException":
|
|
4306
|
+
throw await de_InternalServerExceptionRes(parsedOutput, context);
|
|
4307
|
+
case "NotSupportedOperationException":
|
|
4308
|
+
case "com.amazonaws.omics#NotSupportedOperationException":
|
|
4309
|
+
throw await de_NotSupportedOperationExceptionRes(parsedOutput, context);
|
|
4310
|
+
case "RequestTimeoutException":
|
|
4311
|
+
case "com.amazonaws.omics#RequestTimeoutException":
|
|
4312
|
+
throw await de_RequestTimeoutExceptionRes(parsedOutput, context);
|
|
4313
|
+
case "ResourceNotFoundException":
|
|
4314
|
+
case "com.amazonaws.omics#ResourceNotFoundException":
|
|
4315
|
+
throw await de_ResourceNotFoundExceptionRes(parsedOutput, context);
|
|
4316
|
+
case "ServiceQuotaExceededException":
|
|
4317
|
+
case "com.amazonaws.omics#ServiceQuotaExceededException":
|
|
4318
|
+
throw await de_ServiceQuotaExceededExceptionRes(parsedOutput, context);
|
|
4319
|
+
case "ThrottlingException":
|
|
4320
|
+
case "com.amazonaws.omics#ThrottlingException":
|
|
4321
|
+
throw await de_ThrottlingExceptionRes(parsedOutput, context);
|
|
4322
|
+
case "ValidationException":
|
|
4323
|
+
case "com.amazonaws.omics#ValidationException":
|
|
4324
|
+
throw await de_ValidationExceptionRes(parsedOutput, context);
|
|
4325
|
+
default:
|
|
4326
|
+
const parsedBody = parsedOutput.body;
|
|
4327
|
+
return throwDefaultError({
|
|
4328
|
+
output,
|
|
4329
|
+
parsedBody,
|
|
4330
|
+
errorCode,
|
|
4331
|
+
});
|
|
4332
|
+
}
|
|
4333
|
+
};
|
|
3903
4334
|
export const de_ListReadSetActivationJobsCommand = async (output, context) => {
|
|
3904
4335
|
if (output.statusCode !== 200 && output.statusCode >= 300) {
|
|
3905
4336
|
return de_ListReadSetActivationJobsCommandError(output, context);
|
|
@@ -4096,6 +4527,61 @@ const de_ListReadSetsCommandError = async (output, context) => {
|
|
|
4096
4527
|
});
|
|
4097
4528
|
}
|
|
4098
4529
|
};
|
|
4530
|
+
export const de_ListReadSetUploadPartsCommand = async (output, context) => {
|
|
4531
|
+
if (output.statusCode !== 200 && output.statusCode >= 300) {
|
|
4532
|
+
return de_ListReadSetUploadPartsCommandError(output, context);
|
|
4533
|
+
}
|
|
4534
|
+
const contents = map({
|
|
4535
|
+
$metadata: deserializeMetadata(output),
|
|
4536
|
+
});
|
|
4537
|
+
const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body");
|
|
4538
|
+
const doc = take(data, {
|
|
4539
|
+
nextToken: __expectString,
|
|
4540
|
+
parts: (_) => de_ReadSetUploadPartList(_, context),
|
|
4541
|
+
});
|
|
4542
|
+
Object.assign(contents, doc);
|
|
4543
|
+
return contents;
|
|
4544
|
+
};
|
|
4545
|
+
const de_ListReadSetUploadPartsCommandError = async (output, context) => {
|
|
4546
|
+
const parsedOutput = {
|
|
4547
|
+
...output,
|
|
4548
|
+
body: await parseErrorBody(output.body, context),
|
|
4549
|
+
};
|
|
4550
|
+
const errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
|
|
4551
|
+
switch (errorCode) {
|
|
4552
|
+
case "AccessDeniedException":
|
|
4553
|
+
case "com.amazonaws.omics#AccessDeniedException":
|
|
4554
|
+
throw await de_AccessDeniedExceptionRes(parsedOutput, context);
|
|
4555
|
+
case "InternalServerException":
|
|
4556
|
+
case "com.amazonaws.omics#InternalServerException":
|
|
4557
|
+
throw await de_InternalServerExceptionRes(parsedOutput, context);
|
|
4558
|
+
case "NotSupportedOperationException":
|
|
4559
|
+
case "com.amazonaws.omics#NotSupportedOperationException":
|
|
4560
|
+
throw await de_NotSupportedOperationExceptionRes(parsedOutput, context);
|
|
4561
|
+
case "RequestTimeoutException":
|
|
4562
|
+
case "com.amazonaws.omics#RequestTimeoutException":
|
|
4563
|
+
throw await de_RequestTimeoutExceptionRes(parsedOutput, context);
|
|
4564
|
+
case "ResourceNotFoundException":
|
|
4565
|
+
case "com.amazonaws.omics#ResourceNotFoundException":
|
|
4566
|
+
throw await de_ResourceNotFoundExceptionRes(parsedOutput, context);
|
|
4567
|
+
case "ServiceQuotaExceededException":
|
|
4568
|
+
case "com.amazonaws.omics#ServiceQuotaExceededException":
|
|
4569
|
+
throw await de_ServiceQuotaExceededExceptionRes(parsedOutput, context);
|
|
4570
|
+
case "ThrottlingException":
|
|
4571
|
+
case "com.amazonaws.omics#ThrottlingException":
|
|
4572
|
+
throw await de_ThrottlingExceptionRes(parsedOutput, context);
|
|
4573
|
+
case "ValidationException":
|
|
4574
|
+
case "com.amazonaws.omics#ValidationException":
|
|
4575
|
+
throw await de_ValidationExceptionRes(parsedOutput, context);
|
|
4576
|
+
default:
|
|
4577
|
+
const parsedBody = parsedOutput.body;
|
|
4578
|
+
return throwDefaultError({
|
|
4579
|
+
output,
|
|
4580
|
+
parsedBody,
|
|
4581
|
+
errorCode,
|
|
4582
|
+
});
|
|
4583
|
+
}
|
|
4584
|
+
};
|
|
4099
4585
|
export const de_ListReferenceImportJobsCommand = async (output, context) => {
|
|
4100
4586
|
if (output.statusCode !== 200 && output.statusCode >= 300) {
|
|
4101
4587
|
return de_ListReferenceImportJobsCommandError(output, context);
|
|
@@ -5328,6 +5814,60 @@ const de_UpdateWorkflowCommandError = async (output, context) => {
|
|
|
5328
5814
|
});
|
|
5329
5815
|
}
|
|
5330
5816
|
};
|
|
5817
|
+
export const de_UploadReadSetPartCommand = async (output, context) => {
|
|
5818
|
+
if (output.statusCode !== 200 && output.statusCode >= 300) {
|
|
5819
|
+
return de_UploadReadSetPartCommandError(output, context);
|
|
5820
|
+
}
|
|
5821
|
+
const contents = map({
|
|
5822
|
+
$metadata: deserializeMetadata(output),
|
|
5823
|
+
});
|
|
5824
|
+
const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body");
|
|
5825
|
+
const doc = take(data, {
|
|
5826
|
+
checksum: __expectString,
|
|
5827
|
+
});
|
|
5828
|
+
Object.assign(contents, doc);
|
|
5829
|
+
return contents;
|
|
5830
|
+
};
|
|
5831
|
+
const de_UploadReadSetPartCommandError = async (output, context) => {
|
|
5832
|
+
const parsedOutput = {
|
|
5833
|
+
...output,
|
|
5834
|
+
body: await parseErrorBody(output.body, context),
|
|
5835
|
+
};
|
|
5836
|
+
const errorCode = loadRestJsonErrorCode(output, parsedOutput.body);
|
|
5837
|
+
switch (errorCode) {
|
|
5838
|
+
case "AccessDeniedException":
|
|
5839
|
+
case "com.amazonaws.omics#AccessDeniedException":
|
|
5840
|
+
throw await de_AccessDeniedExceptionRes(parsedOutput, context);
|
|
5841
|
+
case "InternalServerException":
|
|
5842
|
+
case "com.amazonaws.omics#InternalServerException":
|
|
5843
|
+
throw await de_InternalServerExceptionRes(parsedOutput, context);
|
|
5844
|
+
case "NotSupportedOperationException":
|
|
5845
|
+
case "com.amazonaws.omics#NotSupportedOperationException":
|
|
5846
|
+
throw await de_NotSupportedOperationExceptionRes(parsedOutput, context);
|
|
5847
|
+
case "RequestTimeoutException":
|
|
5848
|
+
case "com.amazonaws.omics#RequestTimeoutException":
|
|
5849
|
+
throw await de_RequestTimeoutExceptionRes(parsedOutput, context);
|
|
5850
|
+
case "ResourceNotFoundException":
|
|
5851
|
+
case "com.amazonaws.omics#ResourceNotFoundException":
|
|
5852
|
+
throw await de_ResourceNotFoundExceptionRes(parsedOutput, context);
|
|
5853
|
+
case "ServiceQuotaExceededException":
|
|
5854
|
+
case "com.amazonaws.omics#ServiceQuotaExceededException":
|
|
5855
|
+
throw await de_ServiceQuotaExceededExceptionRes(parsedOutput, context);
|
|
5856
|
+
case "ThrottlingException":
|
|
5857
|
+
case "com.amazonaws.omics#ThrottlingException":
|
|
5858
|
+
throw await de_ThrottlingExceptionRes(parsedOutput, context);
|
|
5859
|
+
case "ValidationException":
|
|
5860
|
+
case "com.amazonaws.omics#ValidationException":
|
|
5861
|
+
throw await de_ValidationExceptionRes(parsedOutput, context);
|
|
5862
|
+
default:
|
|
5863
|
+
const parsedBody = parsedOutput.body;
|
|
5864
|
+
return throwDefaultError({
|
|
5865
|
+
output,
|
|
5866
|
+
parsedBody,
|
|
5867
|
+
errorCode,
|
|
5868
|
+
});
|
|
5869
|
+
}
|
|
5870
|
+
};
|
|
5331
5871
|
const throwDefaultError = withBaseException(__BaseException);
|
|
5332
5872
|
const de_AccessDeniedExceptionRes = async (parsedOutput, context) => {
|
|
5333
5873
|
const contents = map({});
|
|
@@ -5368,6 +5908,19 @@ const de_InternalServerExceptionRes = async (parsedOutput, context) => {
|
|
|
5368
5908
|
});
|
|
5369
5909
|
return __decorateServiceException(exception, parsedOutput.body);
|
|
5370
5910
|
};
|
|
5911
|
+
const de_NotSupportedOperationExceptionRes = async (parsedOutput, context) => {
|
|
5912
|
+
const contents = map({});
|
|
5913
|
+
const data = parsedOutput.body;
|
|
5914
|
+
const doc = take(data, {
|
|
5915
|
+
message: __expectString,
|
|
5916
|
+
});
|
|
5917
|
+
Object.assign(contents, doc);
|
|
5918
|
+
const exception = new NotSupportedOperationException({
|
|
5919
|
+
$metadata: deserializeMetadata(parsedOutput),
|
|
5920
|
+
...contents,
|
|
5921
|
+
});
|
|
5922
|
+
return __decorateServiceException(exception, parsedOutput.body);
|
|
5923
|
+
};
|
|
5371
5924
|
const de_RangeNotSatisfiableExceptionRes = async (parsedOutput, context) => {
|
|
5372
5925
|
const contents = map({});
|
|
5373
5926
|
const data = parsedOutput.body;
|
|
@@ -5478,9 +6031,18 @@ const se_ReadSetFilter = (input, context) => {
|
|
|
5478
6031
|
return take(input, {
|
|
5479
6032
|
createdAfter: (_) => _.toISOString().split(".")[0] + "Z",
|
|
5480
6033
|
createdBefore: (_) => _.toISOString().split(".")[0] + "Z",
|
|
6034
|
+
generatedFrom: [],
|
|
5481
6035
|
name: [],
|
|
5482
6036
|
referenceArn: [],
|
|
6037
|
+
sampleId: [],
|
|
5483
6038
|
status: [],
|
|
6039
|
+
subjectId: [],
|
|
6040
|
+
});
|
|
6041
|
+
};
|
|
6042
|
+
const se_ReadSetUploadPartListFilter = (input, context) => {
|
|
6043
|
+
return take(input, {
|
|
6044
|
+
createdAfter: (_) => _.toISOString().split(".")[0] + "Z",
|
|
6045
|
+
createdBefore: (_) => _.toISOString().split(".")[0] + "Z",
|
|
5484
6046
|
});
|
|
5485
6047
|
};
|
|
5486
6048
|
const se_ReferenceFilter = (input, context) => {
|
|
@@ -5527,6 +6089,7 @@ const de_ActivateReadSetJobList = (output, context) => {
|
|
|
5527
6089
|
};
|
|
5528
6090
|
const de_AnnotationImportJobItem = (output, context) => {
|
|
5529
6091
|
return take(output, {
|
|
6092
|
+
annotationFields: _json,
|
|
5530
6093
|
completionTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
5531
6094
|
creationTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
5532
6095
|
destinationName: __expectString,
|
|
@@ -5623,6 +6186,29 @@ const de_ImportReferenceJobList = (output, context) => {
|
|
|
5623
6186
|
});
|
|
5624
6187
|
return retVal;
|
|
5625
6188
|
};
|
|
6189
|
+
const de_MultipartReadSetUploadList = (output, context) => {
|
|
6190
|
+
const retVal = (output || [])
|
|
6191
|
+
.filter((e) => e != null)
|
|
6192
|
+
.map((entry) => {
|
|
6193
|
+
return de_MultipartReadSetUploadListItem(entry, context);
|
|
6194
|
+
});
|
|
6195
|
+
return retVal;
|
|
6196
|
+
};
|
|
6197
|
+
const de_MultipartReadSetUploadListItem = (output, context) => {
|
|
6198
|
+
return take(output, {
|
|
6199
|
+
creationTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
6200
|
+
description: __expectString,
|
|
6201
|
+
generatedFrom: __expectString,
|
|
6202
|
+
name: __expectString,
|
|
6203
|
+
referenceArn: __expectString,
|
|
6204
|
+
sampleId: __expectString,
|
|
6205
|
+
sequenceStoreId: __expectString,
|
|
6206
|
+
sourceFileType: __expectString,
|
|
6207
|
+
subjectId: __expectString,
|
|
6208
|
+
tags: _json,
|
|
6209
|
+
uploadId: __expectString,
|
|
6210
|
+
});
|
|
6211
|
+
};
|
|
5626
6212
|
const de_ReadSetList = (output, context) => {
|
|
5627
6213
|
const retVal = (output || [])
|
|
5628
6214
|
.filter((e) => e != null)
|
|
@@ -5644,9 +6230,28 @@ const de_ReadSetListItem = (output, context) => {
|
|
|
5644
6230
|
sequenceInformation: _json,
|
|
5645
6231
|
sequenceStoreId: __expectString,
|
|
5646
6232
|
status: __expectString,
|
|
6233
|
+
statusMessage: __expectString,
|
|
5647
6234
|
subjectId: __expectString,
|
|
5648
6235
|
});
|
|
5649
6236
|
};
|
|
6237
|
+
const de_ReadSetUploadPartList = (output, context) => {
|
|
6238
|
+
const retVal = (output || [])
|
|
6239
|
+
.filter((e) => e != null)
|
|
6240
|
+
.map((entry) => {
|
|
6241
|
+
return de_ReadSetUploadPartListItem(entry, context);
|
|
6242
|
+
});
|
|
6243
|
+
return retVal;
|
|
6244
|
+
};
|
|
6245
|
+
const de_ReadSetUploadPartListItem = (output, context) => {
|
|
6246
|
+
return take(output, {
|
|
6247
|
+
checksum: __expectString,
|
|
6248
|
+
creationTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
6249
|
+
lastUpdatedTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
6250
|
+
partNumber: __expectInt32,
|
|
6251
|
+
partSize: __expectLong,
|
|
6252
|
+
partSource: __expectString,
|
|
6253
|
+
});
|
|
6254
|
+
};
|
|
5650
6255
|
const de_ReferenceList = (output, context) => {
|
|
5651
6256
|
const retVal = (output || [])
|
|
5652
6257
|
.filter((e) => e != null)
|
|
@@ -5701,6 +6306,7 @@ const de_RunGroupListItem = (output, context) => {
|
|
|
5701
6306
|
id: __expectString,
|
|
5702
6307
|
maxCpus: __expectInt32,
|
|
5703
6308
|
maxDuration: __expectInt32,
|
|
6309
|
+
maxGpus: __expectInt32,
|
|
5704
6310
|
maxRuns: __expectInt32,
|
|
5705
6311
|
name: __expectString,
|
|
5706
6312
|
});
|
|
@@ -5735,6 +6341,7 @@ const de_SequenceStoreDetail = (output, context) => {
|
|
|
5735
6341
|
arn: __expectString,
|
|
5736
6342
|
creationTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
5737
6343
|
description: __expectString,
|
|
6344
|
+
fallbackLocation: __expectString,
|
|
5738
6345
|
id: __expectString,
|
|
5739
6346
|
name: __expectString,
|
|
5740
6347
|
sseConfig: _json,
|
|
@@ -5760,6 +6367,7 @@ const de_TaskListItem = (output, context) => {
|
|
|
5760
6367
|
return take(output, {
|
|
5761
6368
|
cpus: __expectInt32,
|
|
5762
6369
|
creationTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
6370
|
+
gpus: __expectInt32,
|
|
5763
6371
|
memory: __expectInt32,
|
|
5764
6372
|
name: __expectString,
|
|
5765
6373
|
startTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
@@ -5770,6 +6378,7 @@ const de_TaskListItem = (output, context) => {
|
|
|
5770
6378
|
};
|
|
5771
6379
|
const de_VariantImportJobItem = (output, context) => {
|
|
5772
6380
|
return take(output, {
|
|
6381
|
+
annotationFields: _json,
|
|
5773
6382
|
completionTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
5774
6383
|
creationTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
5775
6384
|
destinationName: __expectString,
|
|
@@ -5825,6 +6434,7 @@ const de_WorkflowListItem = (output, context) => {
|
|
|
5825
6434
|
creationTime: (_) => __expectNonNull(__parseRfc3339DateTimeWithOffset(_)),
|
|
5826
6435
|
digest: __expectString,
|
|
5827
6436
|
id: __expectString,
|
|
6437
|
+
metadata: _json,
|
|
5828
6438
|
name: __expectString,
|
|
5829
6439
|
status: __expectString,
|
|
5830
6440
|
type: __expectString,
|