@aws-sdk/client-timestream-write 3.278.0 → 3.280.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +19 -13
- package/dist-cjs/TimestreamWrite.js +60 -0
- package/dist-cjs/commands/CreateBatchLoadTaskCommand.js +48 -0
- package/dist-cjs/commands/DescribeBatchLoadTaskCommand.js +48 -0
- package/dist-cjs/commands/ListBatchLoadTasksCommand.js +48 -0
- package/dist-cjs/commands/ResumeBatchLoadTaskCommand.js +48 -0
- package/dist-cjs/commands/index.js +4 -0
- package/dist-cjs/endpoint/ruleset.js +3 -3
- package/dist-cjs/models/models_0.js +153 -37
- package/dist-cjs/pagination/ListBatchLoadTasksPaginator.js +36 -0
- package/dist-cjs/pagination/index.js +1 -0
- package/dist-cjs/protocols/Aws_json1_0.js +626 -1
- package/dist-es/TimestreamWrite.js +60 -0
- package/dist-es/commands/CreateBatchLoadTaskCommand.js +44 -0
- package/dist-es/commands/DescribeBatchLoadTaskCommand.js +44 -0
- package/dist-es/commands/ListBatchLoadTasksCommand.js +44 -0
- package/dist-es/commands/ResumeBatchLoadTaskCommand.js +44 -0
- package/dist-es/commands/index.js +4 -0
- package/dist-es/endpoint/ruleset.js +3 -3
- package/dist-es/models/models_0.js +127 -34
- package/dist-es/pagination/ListBatchLoadTasksPaginator.js +32 -0
- package/dist-es/pagination/index.js +1 -0
- package/dist-es/protocols/Aws_json1_0.js +617 -0
- package/dist-types/TimestreamWrite.d.ts +154 -131
- package/dist-types/TimestreamWriteClient.d.ts +18 -8
- package/dist-types/commands/CreateBatchLoadTaskCommand.d.ts +45 -0
- package/dist-types/commands/CreateDatabaseCommand.d.ts +3 -6
- package/dist-types/commands/CreateTableCommand.d.ts +6 -9
- package/dist-types/commands/DeleteDatabaseCommand.d.ts +9 -11
- package/dist-types/commands/DeleteTableCommand.d.ts +7 -8
- package/dist-types/commands/DescribeBatchLoadTaskCommand.d.ts +40 -0
- package/dist-types/commands/DescribeDatabaseCommand.d.ts +4 -4
- package/dist-types/commands/DescribeEndpointsCommand.d.ts +10 -8
- package/dist-types/commands/DescribeTableCommand.d.ts +4 -5
- package/dist-types/commands/ListBatchLoadTasksCommand.d.ts +39 -0
- package/dist-types/commands/ListDatabasesCommand.d.ts +3 -4
- package/dist-types/commands/ListTablesCommand.d.ts +3 -4
- package/dist-types/commands/ListTagsForResourceCommand.d.ts +1 -3
- package/dist-types/commands/ResumeBatchLoadTaskCommand.d.ts +38 -0
- package/dist-types/commands/TagResourceCommand.d.ts +3 -5
- package/dist-types/commands/UntagResourceCommand.d.ts +1 -3
- package/dist-types/commands/UpdateDatabaseCommand.d.ts +5 -7
- package/dist-types/commands/UpdateTableCommand.d.ts +6 -8
- package/dist-types/commands/WriteRecordsCommand.d.ts +36 -41
- package/dist-types/commands/index.d.ts +4 -0
- package/dist-types/models/models_0.d.ts +786 -274
- package/dist-types/pagination/ListBatchLoadTasksPaginator.d.ts +4 -0
- package/dist-types/pagination/index.d.ts +1 -0
- package/dist-types/protocols/Aws_json1_0.d.ts +12 -0
- package/dist-types/ts3.4/TimestreamWrite.d.ts +68 -0
- package/dist-types/ts3.4/TimestreamWriteClient.d.ts +24 -0
- package/dist-types/ts3.4/commands/CreateBatchLoadTaskCommand.d.ts +38 -0
- package/dist-types/ts3.4/commands/DescribeBatchLoadTaskCommand.d.ts +41 -0
- package/dist-types/ts3.4/commands/ListBatchLoadTasksCommand.d.ts +38 -0
- package/dist-types/ts3.4/commands/ResumeBatchLoadTaskCommand.d.ts +38 -0
- package/dist-types/ts3.4/commands/index.d.ts +4 -0
- package/dist-types/ts3.4/models/models_0.d.ts +253 -43
- package/dist-types/ts3.4/pagination/ListBatchLoadTasksPaginator.d.ts +11 -0
- package/dist-types/ts3.4/pagination/index.d.ts +1 -0
- package/dist-types/ts3.4/protocols/Aws_json1_0.d.ts +48 -0
- package/package.json +9 -7
|
@@ -1,13 +1,17 @@
|
|
|
1
|
+
import { CreateBatchLoadTaskCommand, } from "./commands/CreateBatchLoadTaskCommand";
|
|
1
2
|
import { CreateDatabaseCommand, } from "./commands/CreateDatabaseCommand";
|
|
2
3
|
import { CreateTableCommand } from "./commands/CreateTableCommand";
|
|
3
4
|
import { DeleteDatabaseCommand, } from "./commands/DeleteDatabaseCommand";
|
|
4
5
|
import { DeleteTableCommand } from "./commands/DeleteTableCommand";
|
|
6
|
+
import { DescribeBatchLoadTaskCommand, } from "./commands/DescribeBatchLoadTaskCommand";
|
|
5
7
|
import { DescribeDatabaseCommand, } from "./commands/DescribeDatabaseCommand";
|
|
6
8
|
import { DescribeEndpointsCommand, } from "./commands/DescribeEndpointsCommand";
|
|
7
9
|
import { DescribeTableCommand, } from "./commands/DescribeTableCommand";
|
|
10
|
+
import { ListBatchLoadTasksCommand, } from "./commands/ListBatchLoadTasksCommand";
|
|
8
11
|
import { ListDatabasesCommand, } from "./commands/ListDatabasesCommand";
|
|
9
12
|
import { ListTablesCommand } from "./commands/ListTablesCommand";
|
|
10
13
|
import { ListTagsForResourceCommand, } from "./commands/ListTagsForResourceCommand";
|
|
14
|
+
import { ResumeBatchLoadTaskCommand, } from "./commands/ResumeBatchLoadTaskCommand";
|
|
11
15
|
import { TagResourceCommand } from "./commands/TagResourceCommand";
|
|
12
16
|
import { UntagResourceCommand, } from "./commands/UntagResourceCommand";
|
|
13
17
|
import { UpdateDatabaseCommand, } from "./commands/UpdateDatabaseCommand";
|
|
@@ -15,6 +19,20 @@ import { UpdateTableCommand } from "./commands/UpdateTableCommand";
|
|
|
15
19
|
import { WriteRecordsCommand, } from "./commands/WriteRecordsCommand";
|
|
16
20
|
import { TimestreamWriteClient } from "./TimestreamWriteClient";
|
|
17
21
|
export class TimestreamWrite extends TimestreamWriteClient {
|
|
22
|
+
createBatchLoadTask(args, optionsOrCb, cb) {
|
|
23
|
+
const command = new CreateBatchLoadTaskCommand(args);
|
|
24
|
+
if (typeof optionsOrCb === "function") {
|
|
25
|
+
this.send(command, optionsOrCb);
|
|
26
|
+
}
|
|
27
|
+
else if (typeof cb === "function") {
|
|
28
|
+
if (typeof optionsOrCb !== "object")
|
|
29
|
+
throw new Error(`Expect http options but get ${typeof optionsOrCb}`);
|
|
30
|
+
this.send(command, optionsOrCb || {}, cb);
|
|
31
|
+
}
|
|
32
|
+
else {
|
|
33
|
+
return this.send(command, optionsOrCb);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
18
36
|
createDatabase(args, optionsOrCb, cb) {
|
|
19
37
|
const command = new CreateDatabaseCommand(args);
|
|
20
38
|
if (typeof optionsOrCb === "function") {
|
|
@@ -71,6 +89,20 @@ export class TimestreamWrite extends TimestreamWriteClient {
|
|
|
71
89
|
return this.send(command, optionsOrCb);
|
|
72
90
|
}
|
|
73
91
|
}
|
|
92
|
+
describeBatchLoadTask(args, optionsOrCb, cb) {
|
|
93
|
+
const command = new DescribeBatchLoadTaskCommand(args);
|
|
94
|
+
if (typeof optionsOrCb === "function") {
|
|
95
|
+
this.send(command, optionsOrCb);
|
|
96
|
+
}
|
|
97
|
+
else if (typeof cb === "function") {
|
|
98
|
+
if (typeof optionsOrCb !== "object")
|
|
99
|
+
throw new Error(`Expect http options but get ${typeof optionsOrCb}`);
|
|
100
|
+
this.send(command, optionsOrCb || {}, cb);
|
|
101
|
+
}
|
|
102
|
+
else {
|
|
103
|
+
return this.send(command, optionsOrCb);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
74
106
|
describeDatabase(args, optionsOrCb, cb) {
|
|
75
107
|
const command = new DescribeDatabaseCommand(args);
|
|
76
108
|
if (typeof optionsOrCb === "function") {
|
|
@@ -113,6 +145,20 @@ export class TimestreamWrite extends TimestreamWriteClient {
|
|
|
113
145
|
return this.send(command, optionsOrCb);
|
|
114
146
|
}
|
|
115
147
|
}
|
|
148
|
+
listBatchLoadTasks(args, optionsOrCb, cb) {
|
|
149
|
+
const command = new ListBatchLoadTasksCommand(args);
|
|
150
|
+
if (typeof optionsOrCb === "function") {
|
|
151
|
+
this.send(command, optionsOrCb);
|
|
152
|
+
}
|
|
153
|
+
else if (typeof cb === "function") {
|
|
154
|
+
if (typeof optionsOrCb !== "object")
|
|
155
|
+
throw new Error(`Expect http options but get ${typeof optionsOrCb}`);
|
|
156
|
+
this.send(command, optionsOrCb || {}, cb);
|
|
157
|
+
}
|
|
158
|
+
else {
|
|
159
|
+
return this.send(command, optionsOrCb);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
116
162
|
listDatabases(args, optionsOrCb, cb) {
|
|
117
163
|
const command = new ListDatabasesCommand(args);
|
|
118
164
|
if (typeof optionsOrCb === "function") {
|
|
@@ -155,6 +201,20 @@ export class TimestreamWrite extends TimestreamWriteClient {
|
|
|
155
201
|
return this.send(command, optionsOrCb);
|
|
156
202
|
}
|
|
157
203
|
}
|
|
204
|
+
resumeBatchLoadTask(args, optionsOrCb, cb) {
|
|
205
|
+
const command = new ResumeBatchLoadTaskCommand(args);
|
|
206
|
+
if (typeof optionsOrCb === "function") {
|
|
207
|
+
this.send(command, optionsOrCb);
|
|
208
|
+
}
|
|
209
|
+
else if (typeof cb === "function") {
|
|
210
|
+
if (typeof optionsOrCb !== "object")
|
|
211
|
+
throw new Error(`Expect http options but get ${typeof optionsOrCb}`);
|
|
212
|
+
this.send(command, optionsOrCb || {}, cb);
|
|
213
|
+
}
|
|
214
|
+
else {
|
|
215
|
+
return this.send(command, optionsOrCb);
|
|
216
|
+
}
|
|
217
|
+
}
|
|
158
218
|
tagResource(args, optionsOrCb, cb) {
|
|
159
219
|
const command = new TagResourceCommand(args);
|
|
160
220
|
if (typeof optionsOrCb === "function") {
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { getEndpointPlugin } from "@aws-sdk/middleware-endpoint";
|
|
2
|
+
import { getEndpointDiscoveryPlugin } from "@aws-sdk/middleware-endpoint-discovery";
|
|
3
|
+
import { getSerdePlugin } from "@aws-sdk/middleware-serde";
|
|
4
|
+
import { Command as $Command } from "@aws-sdk/smithy-client";
|
|
5
|
+
import { CreateBatchLoadTaskRequestFilterSensitiveLog, CreateBatchLoadTaskResponseFilterSensitiveLog, } from "../models/models_0";
|
|
6
|
+
import { deserializeAws_json1_0CreateBatchLoadTaskCommand, serializeAws_json1_0CreateBatchLoadTaskCommand, } from "../protocols/Aws_json1_0";
|
|
7
|
+
export class CreateBatchLoadTaskCommand extends $Command {
|
|
8
|
+
constructor(input) {
|
|
9
|
+
super();
|
|
10
|
+
this.input = input;
|
|
11
|
+
}
|
|
12
|
+
static getEndpointParameterInstructions() {
|
|
13
|
+
return {
|
|
14
|
+
UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" },
|
|
15
|
+
Endpoint: { type: "builtInParams", name: "endpoint" },
|
|
16
|
+
Region: { type: "builtInParams", name: "region" },
|
|
17
|
+
UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" },
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
resolveMiddleware(clientStack, configuration, options) {
|
|
21
|
+
this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize));
|
|
22
|
+
this.middlewareStack.use(getEndpointPlugin(configuration, CreateBatchLoadTaskCommand.getEndpointParameterInstructions()));
|
|
23
|
+
this.middlewareStack.use(getEndpointDiscoveryPlugin(configuration, { clientStack, options, isDiscoveredEndpointRequired: true }));
|
|
24
|
+
const stack = clientStack.concat(this.middlewareStack);
|
|
25
|
+
const { logger } = configuration;
|
|
26
|
+
const clientName = "TimestreamWriteClient";
|
|
27
|
+
const commandName = "CreateBatchLoadTaskCommand";
|
|
28
|
+
const handlerExecutionContext = {
|
|
29
|
+
logger,
|
|
30
|
+
clientName,
|
|
31
|
+
commandName,
|
|
32
|
+
inputFilterSensitiveLog: CreateBatchLoadTaskRequestFilterSensitiveLog,
|
|
33
|
+
outputFilterSensitiveLog: CreateBatchLoadTaskResponseFilterSensitiveLog,
|
|
34
|
+
};
|
|
35
|
+
const { requestHandler } = configuration;
|
|
36
|
+
return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext);
|
|
37
|
+
}
|
|
38
|
+
serialize(input, context) {
|
|
39
|
+
return serializeAws_json1_0CreateBatchLoadTaskCommand(input, context);
|
|
40
|
+
}
|
|
41
|
+
deserialize(output, context) {
|
|
42
|
+
return deserializeAws_json1_0CreateBatchLoadTaskCommand(output, context);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { getEndpointPlugin } from "@aws-sdk/middleware-endpoint";
|
|
2
|
+
import { getEndpointDiscoveryPlugin } from "@aws-sdk/middleware-endpoint-discovery";
|
|
3
|
+
import { getSerdePlugin } from "@aws-sdk/middleware-serde";
|
|
4
|
+
import { Command as $Command } from "@aws-sdk/smithy-client";
|
|
5
|
+
import { DescribeBatchLoadTaskRequestFilterSensitiveLog, DescribeBatchLoadTaskResponseFilterSensitiveLog, } from "../models/models_0";
|
|
6
|
+
import { deserializeAws_json1_0DescribeBatchLoadTaskCommand, serializeAws_json1_0DescribeBatchLoadTaskCommand, } from "../protocols/Aws_json1_0";
|
|
7
|
+
export class DescribeBatchLoadTaskCommand extends $Command {
|
|
8
|
+
constructor(input) {
|
|
9
|
+
super();
|
|
10
|
+
this.input = input;
|
|
11
|
+
}
|
|
12
|
+
static getEndpointParameterInstructions() {
|
|
13
|
+
return {
|
|
14
|
+
UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" },
|
|
15
|
+
Endpoint: { type: "builtInParams", name: "endpoint" },
|
|
16
|
+
Region: { type: "builtInParams", name: "region" },
|
|
17
|
+
UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" },
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
resolveMiddleware(clientStack, configuration, options) {
|
|
21
|
+
this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize));
|
|
22
|
+
this.middlewareStack.use(getEndpointPlugin(configuration, DescribeBatchLoadTaskCommand.getEndpointParameterInstructions()));
|
|
23
|
+
this.middlewareStack.use(getEndpointDiscoveryPlugin(configuration, { clientStack, options, isDiscoveredEndpointRequired: true }));
|
|
24
|
+
const stack = clientStack.concat(this.middlewareStack);
|
|
25
|
+
const { logger } = configuration;
|
|
26
|
+
const clientName = "TimestreamWriteClient";
|
|
27
|
+
const commandName = "DescribeBatchLoadTaskCommand";
|
|
28
|
+
const handlerExecutionContext = {
|
|
29
|
+
logger,
|
|
30
|
+
clientName,
|
|
31
|
+
commandName,
|
|
32
|
+
inputFilterSensitiveLog: DescribeBatchLoadTaskRequestFilterSensitiveLog,
|
|
33
|
+
outputFilterSensitiveLog: DescribeBatchLoadTaskResponseFilterSensitiveLog,
|
|
34
|
+
};
|
|
35
|
+
const { requestHandler } = configuration;
|
|
36
|
+
return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext);
|
|
37
|
+
}
|
|
38
|
+
serialize(input, context) {
|
|
39
|
+
return serializeAws_json1_0DescribeBatchLoadTaskCommand(input, context);
|
|
40
|
+
}
|
|
41
|
+
deserialize(output, context) {
|
|
42
|
+
return deserializeAws_json1_0DescribeBatchLoadTaskCommand(output, context);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { getEndpointPlugin } from "@aws-sdk/middleware-endpoint";
|
|
2
|
+
import { getEndpointDiscoveryPlugin } from "@aws-sdk/middleware-endpoint-discovery";
|
|
3
|
+
import { getSerdePlugin } from "@aws-sdk/middleware-serde";
|
|
4
|
+
import { Command as $Command } from "@aws-sdk/smithy-client";
|
|
5
|
+
import { ListBatchLoadTasksRequestFilterSensitiveLog, ListBatchLoadTasksResponseFilterSensitiveLog, } from "../models/models_0";
|
|
6
|
+
import { deserializeAws_json1_0ListBatchLoadTasksCommand, serializeAws_json1_0ListBatchLoadTasksCommand, } from "../protocols/Aws_json1_0";
|
|
7
|
+
export class ListBatchLoadTasksCommand extends $Command {
|
|
8
|
+
constructor(input) {
|
|
9
|
+
super();
|
|
10
|
+
this.input = input;
|
|
11
|
+
}
|
|
12
|
+
static getEndpointParameterInstructions() {
|
|
13
|
+
return {
|
|
14
|
+
UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" },
|
|
15
|
+
Endpoint: { type: "builtInParams", name: "endpoint" },
|
|
16
|
+
Region: { type: "builtInParams", name: "region" },
|
|
17
|
+
UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" },
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
resolveMiddleware(clientStack, configuration, options) {
|
|
21
|
+
this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize));
|
|
22
|
+
this.middlewareStack.use(getEndpointPlugin(configuration, ListBatchLoadTasksCommand.getEndpointParameterInstructions()));
|
|
23
|
+
this.middlewareStack.use(getEndpointDiscoveryPlugin(configuration, { clientStack, options, isDiscoveredEndpointRequired: true }));
|
|
24
|
+
const stack = clientStack.concat(this.middlewareStack);
|
|
25
|
+
const { logger } = configuration;
|
|
26
|
+
const clientName = "TimestreamWriteClient";
|
|
27
|
+
const commandName = "ListBatchLoadTasksCommand";
|
|
28
|
+
const handlerExecutionContext = {
|
|
29
|
+
logger,
|
|
30
|
+
clientName,
|
|
31
|
+
commandName,
|
|
32
|
+
inputFilterSensitiveLog: ListBatchLoadTasksRequestFilterSensitiveLog,
|
|
33
|
+
outputFilterSensitiveLog: ListBatchLoadTasksResponseFilterSensitiveLog,
|
|
34
|
+
};
|
|
35
|
+
const { requestHandler } = configuration;
|
|
36
|
+
return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext);
|
|
37
|
+
}
|
|
38
|
+
serialize(input, context) {
|
|
39
|
+
return serializeAws_json1_0ListBatchLoadTasksCommand(input, context);
|
|
40
|
+
}
|
|
41
|
+
deserialize(output, context) {
|
|
42
|
+
return deserializeAws_json1_0ListBatchLoadTasksCommand(output, context);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { getEndpointPlugin } from "@aws-sdk/middleware-endpoint";
|
|
2
|
+
import { getEndpointDiscoveryPlugin } from "@aws-sdk/middleware-endpoint-discovery";
|
|
3
|
+
import { getSerdePlugin } from "@aws-sdk/middleware-serde";
|
|
4
|
+
import { Command as $Command } from "@aws-sdk/smithy-client";
|
|
5
|
+
import { ResumeBatchLoadTaskRequestFilterSensitiveLog, ResumeBatchLoadTaskResponseFilterSensitiveLog, } from "../models/models_0";
|
|
6
|
+
import { deserializeAws_json1_0ResumeBatchLoadTaskCommand, serializeAws_json1_0ResumeBatchLoadTaskCommand, } from "../protocols/Aws_json1_0";
|
|
7
|
+
export class ResumeBatchLoadTaskCommand extends $Command {
|
|
8
|
+
constructor(input) {
|
|
9
|
+
super();
|
|
10
|
+
this.input = input;
|
|
11
|
+
}
|
|
12
|
+
static getEndpointParameterInstructions() {
|
|
13
|
+
return {
|
|
14
|
+
UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" },
|
|
15
|
+
Endpoint: { type: "builtInParams", name: "endpoint" },
|
|
16
|
+
Region: { type: "builtInParams", name: "region" },
|
|
17
|
+
UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" },
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
resolveMiddleware(clientStack, configuration, options) {
|
|
21
|
+
this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize));
|
|
22
|
+
this.middlewareStack.use(getEndpointPlugin(configuration, ResumeBatchLoadTaskCommand.getEndpointParameterInstructions()));
|
|
23
|
+
this.middlewareStack.use(getEndpointDiscoveryPlugin(configuration, { clientStack, options, isDiscoveredEndpointRequired: true }));
|
|
24
|
+
const stack = clientStack.concat(this.middlewareStack);
|
|
25
|
+
const { logger } = configuration;
|
|
26
|
+
const clientName = "TimestreamWriteClient";
|
|
27
|
+
const commandName = "ResumeBatchLoadTaskCommand";
|
|
28
|
+
const handlerExecutionContext = {
|
|
29
|
+
logger,
|
|
30
|
+
clientName,
|
|
31
|
+
commandName,
|
|
32
|
+
inputFilterSensitiveLog: ResumeBatchLoadTaskRequestFilterSensitiveLog,
|
|
33
|
+
outputFilterSensitiveLog: ResumeBatchLoadTaskResponseFilterSensitiveLog,
|
|
34
|
+
};
|
|
35
|
+
const { requestHandler } = configuration;
|
|
36
|
+
return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext);
|
|
37
|
+
}
|
|
38
|
+
serialize(input, context) {
|
|
39
|
+
return serializeAws_json1_0ResumeBatchLoadTaskCommand(input, context);
|
|
40
|
+
}
|
|
41
|
+
deserialize(output, context) {
|
|
42
|
+
return deserializeAws_json1_0ResumeBatchLoadTaskCommand(output, context);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
@@ -1,13 +1,17 @@
|
|
|
1
|
+
export * from "./CreateBatchLoadTaskCommand";
|
|
1
2
|
export * from "./CreateDatabaseCommand";
|
|
2
3
|
export * from "./CreateTableCommand";
|
|
3
4
|
export * from "./DeleteDatabaseCommand";
|
|
4
5
|
export * from "./DeleteTableCommand";
|
|
6
|
+
export * from "./DescribeBatchLoadTaskCommand";
|
|
5
7
|
export * from "./DescribeDatabaseCommand";
|
|
6
8
|
export * from "./DescribeEndpointsCommand";
|
|
7
9
|
export * from "./DescribeTableCommand";
|
|
10
|
+
export * from "./ListBatchLoadTasksCommand";
|
|
8
11
|
export * from "./ListDatabasesCommand";
|
|
9
12
|
export * from "./ListTablesCommand";
|
|
10
13
|
export * from "./ListTagsForResourceCommand";
|
|
14
|
+
export * from "./ResumeBatchLoadTaskCommand";
|
|
11
15
|
export * from "./TagResourceCommand";
|
|
12
16
|
export * from "./UntagResourceCommand";
|
|
13
17
|
export * from "./UpdateDatabaseCommand";
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const
|
|
2
|
-
const a = "
|
|
3
|
-
const _data = { version: "1.0", parameters: { Region:
|
|
1
|
+
const q = "required", r = "fn", s = "argv", t = "ref";
|
|
2
|
+
const a = "isSet", b = "tree", c = "error", d = "endpoint", e = "PartitionResult", f = { [q]: false, "type": "String" }, g = { [q]: true, "default": false, "type": "Boolean" }, h = { [t]: "Endpoint" }, i = { [r]: "booleanEquals", [s]: [{ [t]: "UseFIPS" }, true] }, j = { [r]: "booleanEquals", [s]: [{ [t]: "UseDualStack" }, true] }, k = {}, l = { [r]: "booleanEquals", [s]: [true, { [r]: "getAttr", [s]: [{ [t]: e }, "supportsFIPS"] }] }, m = { [r]: "booleanEquals", [s]: [true, { [r]: "getAttr", [s]: [{ [t]: e }, "supportsDualStack"] }] }, n = [i], o = [j], p = [{ [t]: "Region" }];
|
|
3
|
+
const _data = { version: "1.0", parameters: { Region: f, UseDualStack: g, UseFIPS: g, Endpoint: f }, rules: [{ conditions: [{ [r]: a, [s]: [h] }], type: b, rules: [{ conditions: n, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: c }, { type: b, rules: [{ conditions: o, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: c }, { endpoint: { url: h, properties: k, headers: k }, type: d }] }] }, { type: b, rules: [{ conditions: [{ [r]: a, [s]: p }], type: b, rules: [{ conditions: [{ [r]: "aws.partition", [s]: p, assign: e }], type: b, rules: [{ conditions: [i, j], type: b, rules: [{ conditions: [l, m], type: b, rules: [{ type: b, rules: [{ endpoint: { url: "https://ingest.timestream-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: k, headers: k }, type: d }] }] }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: c }] }, { conditions: n, type: b, rules: [{ conditions: [l], type: b, rules: [{ type: b, rules: [{ endpoint: { url: "https://ingest.timestream-fips.{Region}.{PartitionResult#dnsSuffix}", properties: k, headers: k }, type: d }] }] }, { error: "FIPS is enabled but this partition does not support FIPS", type: c }] }, { conditions: o, type: b, rules: [{ conditions: [m], type: b, rules: [{ type: b, rules: [{ endpoint: { url: "https://ingest.timestream.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: k, headers: k }, type: d }] }] }, { error: "DualStack is enabled but this partition does not support DualStack", type: c }] }, { type: b, rules: [{ endpoint: { url: "https://ingest.timestream.{Region}.{PartitionResult#dnsSuffix}", properties: k, headers: k }, type: d }] }] }] }, { error: "Invalid Configuration: Missing Region", type: c }] }] };
|
|
4
4
|
export const ruleSet = _data;
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { SENSITIVE_STRING } from "@aws-sdk/smithy-client";
|
|
1
2
|
import { TimestreamWriteServiceException as __BaseException } from "./TimestreamWriteServiceException";
|
|
2
3
|
export class AccessDeniedException extends __BaseException {
|
|
3
4
|
constructor(opts) {
|
|
@@ -12,6 +13,48 @@ export class AccessDeniedException extends __BaseException {
|
|
|
12
13
|
this.Message = opts.Message;
|
|
13
14
|
}
|
|
14
15
|
}
|
|
16
|
+
export var BatchLoadDataFormat;
|
|
17
|
+
(function (BatchLoadDataFormat) {
|
|
18
|
+
BatchLoadDataFormat["CSV"] = "CSV";
|
|
19
|
+
})(BatchLoadDataFormat || (BatchLoadDataFormat = {}));
|
|
20
|
+
export var BatchLoadStatus;
|
|
21
|
+
(function (BatchLoadStatus) {
|
|
22
|
+
BatchLoadStatus["CREATED"] = "CREATED";
|
|
23
|
+
BatchLoadStatus["FAILED"] = "FAILED";
|
|
24
|
+
BatchLoadStatus["IN_PROGRESS"] = "IN_PROGRESS";
|
|
25
|
+
BatchLoadStatus["PENDING_RESUME"] = "PENDING_RESUME";
|
|
26
|
+
BatchLoadStatus["PROGRESS_STOPPED"] = "PROGRESS_STOPPED";
|
|
27
|
+
BatchLoadStatus["SUCCEEDED"] = "SUCCEEDED";
|
|
28
|
+
})(BatchLoadStatus || (BatchLoadStatus = {}));
|
|
29
|
+
export var MeasureValueType;
|
|
30
|
+
(function (MeasureValueType) {
|
|
31
|
+
MeasureValueType["BIGINT"] = "BIGINT";
|
|
32
|
+
MeasureValueType["BOOLEAN"] = "BOOLEAN";
|
|
33
|
+
MeasureValueType["DOUBLE"] = "DOUBLE";
|
|
34
|
+
MeasureValueType["MULTI"] = "MULTI";
|
|
35
|
+
MeasureValueType["TIMESTAMP"] = "TIMESTAMP";
|
|
36
|
+
MeasureValueType["VARCHAR"] = "VARCHAR";
|
|
37
|
+
})(MeasureValueType || (MeasureValueType = {}));
|
|
38
|
+
export var ScalarMeasureValueType;
|
|
39
|
+
(function (ScalarMeasureValueType) {
|
|
40
|
+
ScalarMeasureValueType["BIGINT"] = "BIGINT";
|
|
41
|
+
ScalarMeasureValueType["BOOLEAN"] = "BOOLEAN";
|
|
42
|
+
ScalarMeasureValueType["DOUBLE"] = "DOUBLE";
|
|
43
|
+
ScalarMeasureValueType["TIMESTAMP"] = "TIMESTAMP";
|
|
44
|
+
ScalarMeasureValueType["VARCHAR"] = "VARCHAR";
|
|
45
|
+
})(ScalarMeasureValueType || (ScalarMeasureValueType = {}));
|
|
46
|
+
export var TimeUnit;
|
|
47
|
+
(function (TimeUnit) {
|
|
48
|
+
TimeUnit["MICROSECONDS"] = "MICROSECONDS";
|
|
49
|
+
TimeUnit["MILLISECONDS"] = "MILLISECONDS";
|
|
50
|
+
TimeUnit["NANOSECONDS"] = "NANOSECONDS";
|
|
51
|
+
TimeUnit["SECONDS"] = "SECONDS";
|
|
52
|
+
})(TimeUnit || (TimeUnit = {}));
|
|
53
|
+
export var S3EncryptionOption;
|
|
54
|
+
(function (S3EncryptionOption) {
|
|
55
|
+
S3EncryptionOption["SSE_KMS"] = "SSE_KMS";
|
|
56
|
+
S3EncryptionOption["SSE_S3"] = "SSE_S3";
|
|
57
|
+
})(S3EncryptionOption || (S3EncryptionOption = {}));
|
|
15
58
|
export class ConflictException extends __BaseException {
|
|
16
59
|
constructor(opts) {
|
|
17
60
|
super({
|
|
@@ -51,6 +94,19 @@ export class InvalidEndpointException extends __BaseException {
|
|
|
51
94
|
this.Message = opts.Message;
|
|
52
95
|
}
|
|
53
96
|
}
|
|
97
|
+
export class ResourceNotFoundException extends __BaseException {
|
|
98
|
+
constructor(opts) {
|
|
99
|
+
super({
|
|
100
|
+
name: "ResourceNotFoundException",
|
|
101
|
+
$fault: "client",
|
|
102
|
+
...opts,
|
|
103
|
+
});
|
|
104
|
+
this.name = "ResourceNotFoundException";
|
|
105
|
+
this.$fault = "client";
|
|
106
|
+
Object.setPrototypeOf(this, ResourceNotFoundException.prototype);
|
|
107
|
+
this.Message = opts.Message;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
54
110
|
export class ServiceQuotaExceededException extends __BaseException {
|
|
55
111
|
constructor(opts) {
|
|
56
112
|
super({
|
|
@@ -90,49 +146,16 @@ export class ValidationException extends __BaseException {
|
|
|
90
146
|
this.Message = opts.Message;
|
|
91
147
|
}
|
|
92
148
|
}
|
|
93
|
-
export var S3EncryptionOption;
|
|
94
|
-
(function (S3EncryptionOption) {
|
|
95
|
-
S3EncryptionOption["SSE_KMS"] = "SSE_KMS";
|
|
96
|
-
S3EncryptionOption["SSE_S3"] = "SSE_S3";
|
|
97
|
-
})(S3EncryptionOption || (S3EncryptionOption = {}));
|
|
98
149
|
export var TableStatus;
|
|
99
150
|
(function (TableStatus) {
|
|
100
151
|
TableStatus["ACTIVE"] = "ACTIVE";
|
|
101
152
|
TableStatus["DELETING"] = "DELETING";
|
|
153
|
+
TableStatus["RESTORING"] = "RESTORING";
|
|
102
154
|
})(TableStatus || (TableStatus = {}));
|
|
103
|
-
export class ResourceNotFoundException extends __BaseException {
|
|
104
|
-
constructor(opts) {
|
|
105
|
-
super({
|
|
106
|
-
name: "ResourceNotFoundException",
|
|
107
|
-
$fault: "client",
|
|
108
|
-
...opts,
|
|
109
|
-
});
|
|
110
|
-
this.name = "ResourceNotFoundException";
|
|
111
|
-
this.$fault = "client";
|
|
112
|
-
Object.setPrototypeOf(this, ResourceNotFoundException.prototype);
|
|
113
|
-
this.Message = opts.Message;
|
|
114
|
-
}
|
|
115
|
-
}
|
|
116
155
|
export var DimensionValueType;
|
|
117
156
|
(function (DimensionValueType) {
|
|
118
157
|
DimensionValueType["VARCHAR"] = "VARCHAR";
|
|
119
158
|
})(DimensionValueType || (DimensionValueType = {}));
|
|
120
|
-
export var MeasureValueType;
|
|
121
|
-
(function (MeasureValueType) {
|
|
122
|
-
MeasureValueType["BIGINT"] = "BIGINT";
|
|
123
|
-
MeasureValueType["BOOLEAN"] = "BOOLEAN";
|
|
124
|
-
MeasureValueType["DOUBLE"] = "DOUBLE";
|
|
125
|
-
MeasureValueType["MULTI"] = "MULTI";
|
|
126
|
-
MeasureValueType["TIMESTAMP"] = "TIMESTAMP";
|
|
127
|
-
MeasureValueType["VARCHAR"] = "VARCHAR";
|
|
128
|
-
})(MeasureValueType || (MeasureValueType = {}));
|
|
129
|
-
export var TimeUnit;
|
|
130
|
-
(function (TimeUnit) {
|
|
131
|
-
TimeUnit["MICROSECONDS"] = "MICROSECONDS";
|
|
132
|
-
TimeUnit["MILLISECONDS"] = "MILLISECONDS";
|
|
133
|
-
TimeUnit["NANOSECONDS"] = "NANOSECONDS";
|
|
134
|
-
TimeUnit["SECONDS"] = "SECONDS";
|
|
135
|
-
})(TimeUnit || (TimeUnit = {}));
|
|
136
159
|
export class RejectedRecordsException extends __BaseException {
|
|
137
160
|
constructor(opts) {
|
|
138
161
|
super({
|
|
@@ -147,6 +170,58 @@ export class RejectedRecordsException extends __BaseException {
|
|
|
147
170
|
this.RejectedRecords = opts.RejectedRecords;
|
|
148
171
|
}
|
|
149
172
|
}
|
|
173
|
+
export const BatchLoadProgressReportFilterSensitiveLog = (obj) => ({
|
|
174
|
+
...obj,
|
|
175
|
+
});
|
|
176
|
+
export const BatchLoadTaskFilterSensitiveLog = (obj) => ({
|
|
177
|
+
...obj,
|
|
178
|
+
});
|
|
179
|
+
export const DimensionMappingFilterSensitiveLog = (obj) => ({
|
|
180
|
+
...obj,
|
|
181
|
+
});
|
|
182
|
+
export const MultiMeasureAttributeMappingFilterSensitiveLog = (obj) => ({
|
|
183
|
+
...obj,
|
|
184
|
+
});
|
|
185
|
+
export const MixedMeasureMappingFilterSensitiveLog = (obj) => ({
|
|
186
|
+
...obj,
|
|
187
|
+
});
|
|
188
|
+
export const MultiMeasureMappingsFilterSensitiveLog = (obj) => ({
|
|
189
|
+
...obj,
|
|
190
|
+
});
|
|
191
|
+
export const DataModelFilterSensitiveLog = (obj) => ({
|
|
192
|
+
...obj,
|
|
193
|
+
});
|
|
194
|
+
export const DataModelS3ConfigurationFilterSensitiveLog = (obj) => ({
|
|
195
|
+
...obj,
|
|
196
|
+
});
|
|
197
|
+
export const DataModelConfigurationFilterSensitiveLog = (obj) => ({
|
|
198
|
+
...obj,
|
|
199
|
+
});
|
|
200
|
+
export const CsvConfigurationFilterSensitiveLog = (obj) => ({
|
|
201
|
+
...obj,
|
|
202
|
+
});
|
|
203
|
+
export const DataSourceS3ConfigurationFilterSensitiveLog = (obj) => ({
|
|
204
|
+
...obj,
|
|
205
|
+
});
|
|
206
|
+
export const DataSourceConfigurationFilterSensitiveLog = (obj) => ({
|
|
207
|
+
...obj,
|
|
208
|
+
});
|
|
209
|
+
export const ReportS3ConfigurationFilterSensitiveLog = (obj) => ({
|
|
210
|
+
...obj,
|
|
211
|
+
});
|
|
212
|
+
export const ReportConfigurationFilterSensitiveLog = (obj) => ({
|
|
213
|
+
...obj,
|
|
214
|
+
});
|
|
215
|
+
export const BatchLoadTaskDescriptionFilterSensitiveLog = (obj) => ({
|
|
216
|
+
...obj,
|
|
217
|
+
});
|
|
218
|
+
export const CreateBatchLoadTaskRequestFilterSensitiveLog = (obj) => ({
|
|
219
|
+
...obj,
|
|
220
|
+
...(obj.ClientToken && { ClientToken: SENSITIVE_STRING }),
|
|
221
|
+
});
|
|
222
|
+
export const CreateBatchLoadTaskResponseFilterSensitiveLog = (obj) => ({
|
|
223
|
+
...obj,
|
|
224
|
+
});
|
|
150
225
|
export const TagFilterSensitiveLog = (obj) => ({
|
|
151
226
|
...obj,
|
|
152
227
|
});
|
|
@@ -186,6 +261,12 @@ export const DeleteDatabaseRequestFilterSensitiveLog = (obj) => ({
|
|
|
186
261
|
export const DeleteTableRequestFilterSensitiveLog = (obj) => ({
|
|
187
262
|
...obj,
|
|
188
263
|
});
|
|
264
|
+
export const DescribeBatchLoadTaskRequestFilterSensitiveLog = (obj) => ({
|
|
265
|
+
...obj,
|
|
266
|
+
});
|
|
267
|
+
export const DescribeBatchLoadTaskResponseFilterSensitiveLog = (obj) => ({
|
|
268
|
+
...obj,
|
|
269
|
+
});
|
|
189
270
|
export const DescribeDatabaseRequestFilterSensitiveLog = (obj) => ({
|
|
190
271
|
...obj,
|
|
191
272
|
});
|
|
@@ -210,6 +291,12 @@ export const DescribeTableResponseFilterSensitiveLog = (obj) => ({
|
|
|
210
291
|
export const DimensionFilterSensitiveLog = (obj) => ({
|
|
211
292
|
...obj,
|
|
212
293
|
});
|
|
294
|
+
export const ListBatchLoadTasksRequestFilterSensitiveLog = (obj) => ({
|
|
295
|
+
...obj,
|
|
296
|
+
});
|
|
297
|
+
export const ListBatchLoadTasksResponseFilterSensitiveLog = (obj) => ({
|
|
298
|
+
...obj,
|
|
299
|
+
});
|
|
213
300
|
export const ListDatabasesRequestFilterSensitiveLog = (obj) => ({
|
|
214
301
|
...obj,
|
|
215
302
|
});
|
|
@@ -240,6 +327,12 @@ export const RecordsIngestedFilterSensitiveLog = (obj) => ({
|
|
|
240
327
|
export const RejectedRecordFilterSensitiveLog = (obj) => ({
|
|
241
328
|
...obj,
|
|
242
329
|
});
|
|
330
|
+
export const ResumeBatchLoadTaskRequestFilterSensitiveLog = (obj) => ({
|
|
331
|
+
...obj,
|
|
332
|
+
});
|
|
333
|
+
export const ResumeBatchLoadTaskResponseFilterSensitiveLog = (obj) => ({
|
|
334
|
+
...obj,
|
|
335
|
+
});
|
|
243
336
|
export const TagResourceRequestFilterSensitiveLog = (obj) => ({
|
|
244
337
|
...obj,
|
|
245
338
|
});
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { ListBatchLoadTasksCommand, } from "../commands/ListBatchLoadTasksCommand";
|
|
2
|
+
import { TimestreamWrite } from "../TimestreamWrite";
|
|
3
|
+
import { TimestreamWriteClient } from "../TimestreamWriteClient";
|
|
4
|
+
const makePagedClientRequest = async (client, input, ...args) => {
|
|
5
|
+
return await client.send(new ListBatchLoadTasksCommand(input), ...args);
|
|
6
|
+
};
|
|
7
|
+
const makePagedRequest = async (client, input, ...args) => {
|
|
8
|
+
return await client.listBatchLoadTasks(input, ...args);
|
|
9
|
+
};
|
|
10
|
+
export async function* paginateListBatchLoadTasks(config, input, ...additionalArguments) {
|
|
11
|
+
let token = config.startingToken || undefined;
|
|
12
|
+
let hasNext = true;
|
|
13
|
+
let page;
|
|
14
|
+
while (hasNext) {
|
|
15
|
+
input.NextToken = token;
|
|
16
|
+
input["MaxResults"] = config.pageSize;
|
|
17
|
+
if (config.client instanceof TimestreamWrite) {
|
|
18
|
+
page = await makePagedRequest(config.client, input, ...additionalArguments);
|
|
19
|
+
}
|
|
20
|
+
else if (config.client instanceof TimestreamWriteClient) {
|
|
21
|
+
page = await makePagedClientRequest(config.client, input, ...additionalArguments);
|
|
22
|
+
}
|
|
23
|
+
else {
|
|
24
|
+
throw new Error("Invalid client, expected TimestreamWrite | TimestreamWriteClient");
|
|
25
|
+
}
|
|
26
|
+
yield page;
|
|
27
|
+
const prevToken = token;
|
|
28
|
+
token = page.NextToken;
|
|
29
|
+
hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken));
|
|
30
|
+
}
|
|
31
|
+
return undefined;
|
|
32
|
+
}
|