@aws-sdk/client-timestream-write 3.278.0 → 3.280.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/README.md +19 -13
  2. package/dist-cjs/TimestreamWrite.js +60 -0
  3. package/dist-cjs/commands/CreateBatchLoadTaskCommand.js +48 -0
  4. package/dist-cjs/commands/DescribeBatchLoadTaskCommand.js +48 -0
  5. package/dist-cjs/commands/ListBatchLoadTasksCommand.js +48 -0
  6. package/dist-cjs/commands/ResumeBatchLoadTaskCommand.js +48 -0
  7. package/dist-cjs/commands/index.js +4 -0
  8. package/dist-cjs/endpoint/ruleset.js +3 -3
  9. package/dist-cjs/models/models_0.js +153 -37
  10. package/dist-cjs/pagination/ListBatchLoadTasksPaginator.js +36 -0
  11. package/dist-cjs/pagination/index.js +1 -0
  12. package/dist-cjs/protocols/Aws_json1_0.js +626 -1
  13. package/dist-es/TimestreamWrite.js +60 -0
  14. package/dist-es/commands/CreateBatchLoadTaskCommand.js +44 -0
  15. package/dist-es/commands/DescribeBatchLoadTaskCommand.js +44 -0
  16. package/dist-es/commands/ListBatchLoadTasksCommand.js +44 -0
  17. package/dist-es/commands/ResumeBatchLoadTaskCommand.js +44 -0
  18. package/dist-es/commands/index.js +4 -0
  19. package/dist-es/endpoint/ruleset.js +3 -3
  20. package/dist-es/models/models_0.js +127 -34
  21. package/dist-es/pagination/ListBatchLoadTasksPaginator.js +32 -0
  22. package/dist-es/pagination/index.js +1 -0
  23. package/dist-es/protocols/Aws_json1_0.js +617 -0
  24. package/dist-types/TimestreamWrite.d.ts +154 -131
  25. package/dist-types/TimestreamWriteClient.d.ts +18 -8
  26. package/dist-types/commands/CreateBatchLoadTaskCommand.d.ts +45 -0
  27. package/dist-types/commands/CreateDatabaseCommand.d.ts +3 -6
  28. package/dist-types/commands/CreateTableCommand.d.ts +6 -9
  29. package/dist-types/commands/DeleteDatabaseCommand.d.ts +9 -11
  30. package/dist-types/commands/DeleteTableCommand.d.ts +7 -8
  31. package/dist-types/commands/DescribeBatchLoadTaskCommand.d.ts +40 -0
  32. package/dist-types/commands/DescribeDatabaseCommand.d.ts +4 -4
  33. package/dist-types/commands/DescribeEndpointsCommand.d.ts +10 -8
  34. package/dist-types/commands/DescribeTableCommand.d.ts +4 -5
  35. package/dist-types/commands/ListBatchLoadTasksCommand.d.ts +39 -0
  36. package/dist-types/commands/ListDatabasesCommand.d.ts +3 -4
  37. package/dist-types/commands/ListTablesCommand.d.ts +3 -4
  38. package/dist-types/commands/ListTagsForResourceCommand.d.ts +1 -3
  39. package/dist-types/commands/ResumeBatchLoadTaskCommand.d.ts +38 -0
  40. package/dist-types/commands/TagResourceCommand.d.ts +3 -5
  41. package/dist-types/commands/UntagResourceCommand.d.ts +1 -3
  42. package/dist-types/commands/UpdateDatabaseCommand.d.ts +5 -7
  43. package/dist-types/commands/UpdateTableCommand.d.ts +6 -8
  44. package/dist-types/commands/WriteRecordsCommand.d.ts +36 -41
  45. package/dist-types/commands/index.d.ts +4 -0
  46. package/dist-types/models/models_0.d.ts +786 -274
  47. package/dist-types/pagination/ListBatchLoadTasksPaginator.d.ts +4 -0
  48. package/dist-types/pagination/index.d.ts +1 -0
  49. package/dist-types/protocols/Aws_json1_0.d.ts +12 -0
  50. package/dist-types/ts3.4/TimestreamWrite.d.ts +68 -0
  51. package/dist-types/ts3.4/TimestreamWriteClient.d.ts +24 -0
  52. package/dist-types/ts3.4/commands/CreateBatchLoadTaskCommand.d.ts +38 -0
  53. package/dist-types/ts3.4/commands/DescribeBatchLoadTaskCommand.d.ts +41 -0
  54. package/dist-types/ts3.4/commands/ListBatchLoadTasksCommand.d.ts +38 -0
  55. package/dist-types/ts3.4/commands/ResumeBatchLoadTaskCommand.d.ts +38 -0
  56. package/dist-types/ts3.4/commands/index.d.ts +4 -0
  57. package/dist-types/ts3.4/models/models_0.d.ts +253 -43
  58. package/dist-types/ts3.4/pagination/ListBatchLoadTasksPaginator.d.ts +11 -0
  59. package/dist-types/ts3.4/pagination/index.d.ts +1 -0
  60. package/dist-types/ts3.4/protocols/Aws_json1_0.d.ts +48 -0
  61. package/package.json +9 -7
package/README.md CHANGED
@@ -11,12 +11,18 @@ AWS SDK for JavaScript TimestreamWrite Client for Node.js, Browser and React Nat
11
11
 
12
12
  <fullname>Amazon Timestream Write</fullname>
13
13
 
14
- <p>Amazon Timestream is a fast, scalable, fully managed time series database service that makes it easy to store and analyze trillions of time series data points per day.
15
- With Timestream, you can easily store and analyze IoT sensor data to derive insights from your IoT applications.
16
- You can analyze industrial telemetry to streamline equipment management and maintenance.
17
- You can also store and analyze log data and metrics to improve the performance and availability of your applications.
18
- Timestream is built from the ground up to effectively ingest, process,
19
- and store time series data. It organizes data to optimize query processing. It automatically scales based on the volume of data ingested and on the query volume to ensure you receive optimal performance while inserting and querying data. As your data grows over time, Timestream’s adaptive query processing engine spans across storage tiers to provide fast analysis while reducing costs.</p>
14
+ <p>Amazon Timestream is a fast, scalable, fully managed time-series database service
15
+ that makes it easy to store and analyze trillions of time-series data points per day. With
16
+ Timestream, you can easily store and analyze IoT sensor data to derive insights
17
+ from your IoT applications. You can analyze industrial telemetry to streamline equipment
18
+ management and maintenance. You can also store and analyze log data and metrics to improve
19
+ the performance and availability of your applications. </p>
20
+ <p>Timestream is built from the ground up to effectively ingest, process, and
21
+ store time-series data. It organizes data to optimize query processing. It automatically
22
+ scales based on the volume of data ingested and on the query volume to ensure you receive
23
+ optimal performance while inserting and querying data. As your data grows over time,
24
+ Timestream’s adaptive query processing engine spans across storage tiers to
25
+ provide fast analysis while reducing costs.</p>
20
26
 
21
27
  ## Installing
22
28
 
@@ -33,16 +39,16 @@ using your favorite package manager:
33
39
 
34
40
  The AWS SDK is modulized by clients and commands.
35
41
  To send a request, you only need to import the `TimestreamWriteClient` and
36
- the commands you need, for example `CreateDatabaseCommand`:
42
+ the commands you need, for example `CreateBatchLoadTaskCommand`:
37
43
 
38
44
  ```js
39
45
  // ES5 example
40
- const { TimestreamWriteClient, CreateDatabaseCommand } = require("@aws-sdk/client-timestream-write");
46
+ const { TimestreamWriteClient, CreateBatchLoadTaskCommand } = require("@aws-sdk/client-timestream-write");
41
47
  ```
42
48
 
43
49
  ```ts
44
50
  // ES6+ example
45
- import { TimestreamWriteClient, CreateDatabaseCommand } from "@aws-sdk/client-timestream-write";
51
+ import { TimestreamWriteClient, CreateBatchLoadTaskCommand } from "@aws-sdk/client-timestream-write";
46
52
  ```
47
53
 
48
54
  ### Usage
@@ -61,7 +67,7 @@ const client = new TimestreamWriteClient({ region: "REGION" });
61
67
  const params = {
62
68
  /** input parameters */
63
69
  };
64
- const command = new CreateDatabaseCommand(params);
70
+ const command = new CreateBatchLoadTaskCommand(params);
65
71
  ```
66
72
 
67
73
  #### Async/await
@@ -140,7 +146,7 @@ const client = new AWS.TimestreamWrite({ region: "REGION" });
140
146
 
141
147
  // async/await.
142
148
  try {
143
- const data = await client.createDatabase(params);
149
+ const data = await client.createBatchLoadTask(params);
144
150
  // process data.
145
151
  } catch (error) {
146
152
  // error handling.
@@ -148,7 +154,7 @@ try {
148
154
 
149
155
  // Promises.
150
156
  client
151
- .createDatabase(params)
157
+ .createBatchLoadTask(params)
152
158
  .then((data) => {
153
159
  // process data.
154
160
  })
@@ -157,7 +163,7 @@ client
157
163
  });
158
164
 
159
165
  // callbacks.
160
- client.createDatabase(params, (err, data) => {
166
+ client.createBatchLoadTask(params, (err, data) => {
161
167
  // process err and data.
162
168
  });
163
169
  ```
@@ -1,16 +1,20 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.TimestreamWrite = void 0;
4
+ const CreateBatchLoadTaskCommand_1 = require("./commands/CreateBatchLoadTaskCommand");
4
5
  const CreateDatabaseCommand_1 = require("./commands/CreateDatabaseCommand");
5
6
  const CreateTableCommand_1 = require("./commands/CreateTableCommand");
6
7
  const DeleteDatabaseCommand_1 = require("./commands/DeleteDatabaseCommand");
7
8
  const DeleteTableCommand_1 = require("./commands/DeleteTableCommand");
9
+ const DescribeBatchLoadTaskCommand_1 = require("./commands/DescribeBatchLoadTaskCommand");
8
10
  const DescribeDatabaseCommand_1 = require("./commands/DescribeDatabaseCommand");
9
11
  const DescribeEndpointsCommand_1 = require("./commands/DescribeEndpointsCommand");
10
12
  const DescribeTableCommand_1 = require("./commands/DescribeTableCommand");
13
+ const ListBatchLoadTasksCommand_1 = require("./commands/ListBatchLoadTasksCommand");
11
14
  const ListDatabasesCommand_1 = require("./commands/ListDatabasesCommand");
12
15
  const ListTablesCommand_1 = require("./commands/ListTablesCommand");
13
16
  const ListTagsForResourceCommand_1 = require("./commands/ListTagsForResourceCommand");
17
+ const ResumeBatchLoadTaskCommand_1 = require("./commands/ResumeBatchLoadTaskCommand");
14
18
  const TagResourceCommand_1 = require("./commands/TagResourceCommand");
15
19
  const UntagResourceCommand_1 = require("./commands/UntagResourceCommand");
16
20
  const UpdateDatabaseCommand_1 = require("./commands/UpdateDatabaseCommand");
@@ -18,6 +22,20 @@ const UpdateTableCommand_1 = require("./commands/UpdateTableCommand");
18
22
  const WriteRecordsCommand_1 = require("./commands/WriteRecordsCommand");
19
23
  const TimestreamWriteClient_1 = require("./TimestreamWriteClient");
20
24
  class TimestreamWrite extends TimestreamWriteClient_1.TimestreamWriteClient {
25
+ createBatchLoadTask(args, optionsOrCb, cb) {
26
+ const command = new CreateBatchLoadTaskCommand_1.CreateBatchLoadTaskCommand(args);
27
+ if (typeof optionsOrCb === "function") {
28
+ this.send(command, optionsOrCb);
29
+ }
30
+ else if (typeof cb === "function") {
31
+ if (typeof optionsOrCb !== "object")
32
+ throw new Error(`Expect http options but get ${typeof optionsOrCb}`);
33
+ this.send(command, optionsOrCb || {}, cb);
34
+ }
35
+ else {
36
+ return this.send(command, optionsOrCb);
37
+ }
38
+ }
21
39
  createDatabase(args, optionsOrCb, cb) {
22
40
  const command = new CreateDatabaseCommand_1.CreateDatabaseCommand(args);
23
41
  if (typeof optionsOrCb === "function") {
@@ -74,6 +92,20 @@ class TimestreamWrite extends TimestreamWriteClient_1.TimestreamWriteClient {
74
92
  return this.send(command, optionsOrCb);
75
93
  }
76
94
  }
95
+ describeBatchLoadTask(args, optionsOrCb, cb) {
96
+ const command = new DescribeBatchLoadTaskCommand_1.DescribeBatchLoadTaskCommand(args);
97
+ if (typeof optionsOrCb === "function") {
98
+ this.send(command, optionsOrCb);
99
+ }
100
+ else if (typeof cb === "function") {
101
+ if (typeof optionsOrCb !== "object")
102
+ throw new Error(`Expect http options but get ${typeof optionsOrCb}`);
103
+ this.send(command, optionsOrCb || {}, cb);
104
+ }
105
+ else {
106
+ return this.send(command, optionsOrCb);
107
+ }
108
+ }
77
109
  describeDatabase(args, optionsOrCb, cb) {
78
110
  const command = new DescribeDatabaseCommand_1.DescribeDatabaseCommand(args);
79
111
  if (typeof optionsOrCb === "function") {
@@ -116,6 +148,20 @@ class TimestreamWrite extends TimestreamWriteClient_1.TimestreamWriteClient {
116
148
  return this.send(command, optionsOrCb);
117
149
  }
118
150
  }
151
+ listBatchLoadTasks(args, optionsOrCb, cb) {
152
+ const command = new ListBatchLoadTasksCommand_1.ListBatchLoadTasksCommand(args);
153
+ if (typeof optionsOrCb === "function") {
154
+ this.send(command, optionsOrCb);
155
+ }
156
+ else if (typeof cb === "function") {
157
+ if (typeof optionsOrCb !== "object")
158
+ throw new Error(`Expect http options but get ${typeof optionsOrCb}`);
159
+ this.send(command, optionsOrCb || {}, cb);
160
+ }
161
+ else {
162
+ return this.send(command, optionsOrCb);
163
+ }
164
+ }
119
165
  listDatabases(args, optionsOrCb, cb) {
120
166
  const command = new ListDatabasesCommand_1.ListDatabasesCommand(args);
121
167
  if (typeof optionsOrCb === "function") {
@@ -158,6 +204,20 @@ class TimestreamWrite extends TimestreamWriteClient_1.TimestreamWriteClient {
158
204
  return this.send(command, optionsOrCb);
159
205
  }
160
206
  }
207
+ resumeBatchLoadTask(args, optionsOrCb, cb) {
208
+ const command = new ResumeBatchLoadTaskCommand_1.ResumeBatchLoadTaskCommand(args);
209
+ if (typeof optionsOrCb === "function") {
210
+ this.send(command, optionsOrCb);
211
+ }
212
+ else if (typeof cb === "function") {
213
+ if (typeof optionsOrCb !== "object")
214
+ throw new Error(`Expect http options but get ${typeof optionsOrCb}`);
215
+ this.send(command, optionsOrCb || {}, cb);
216
+ }
217
+ else {
218
+ return this.send(command, optionsOrCb);
219
+ }
220
+ }
161
221
  tagResource(args, optionsOrCb, cb) {
162
222
  const command = new TagResourceCommand_1.TagResourceCommand(args);
163
223
  if (typeof optionsOrCb === "function") {
@@ -0,0 +1,48 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.CreateBatchLoadTaskCommand = void 0;
4
+ const middleware_endpoint_1 = require("@aws-sdk/middleware-endpoint");
5
+ const middleware_endpoint_discovery_1 = require("@aws-sdk/middleware-endpoint-discovery");
6
+ const middleware_serde_1 = require("@aws-sdk/middleware-serde");
7
+ const smithy_client_1 = require("@aws-sdk/smithy-client");
8
+ const models_0_1 = require("../models/models_0");
9
+ const Aws_json1_0_1 = require("../protocols/Aws_json1_0");
10
+ class CreateBatchLoadTaskCommand extends smithy_client_1.Command {
11
+ constructor(input) {
12
+ super();
13
+ this.input = input;
14
+ }
15
+ static getEndpointParameterInstructions() {
16
+ return {
17
+ UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" },
18
+ Endpoint: { type: "builtInParams", name: "endpoint" },
19
+ Region: { type: "builtInParams", name: "region" },
20
+ UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" },
21
+ };
22
+ }
23
+ resolveMiddleware(clientStack, configuration, options) {
24
+ this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize));
25
+ this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, CreateBatchLoadTaskCommand.getEndpointParameterInstructions()));
26
+ this.middlewareStack.use((0, middleware_endpoint_discovery_1.getEndpointDiscoveryPlugin)(configuration, { clientStack, options, isDiscoveredEndpointRequired: true }));
27
+ const stack = clientStack.concat(this.middlewareStack);
28
+ const { logger } = configuration;
29
+ const clientName = "TimestreamWriteClient";
30
+ const commandName = "CreateBatchLoadTaskCommand";
31
+ const handlerExecutionContext = {
32
+ logger,
33
+ clientName,
34
+ commandName,
35
+ inputFilterSensitiveLog: models_0_1.CreateBatchLoadTaskRequestFilterSensitiveLog,
36
+ outputFilterSensitiveLog: models_0_1.CreateBatchLoadTaskResponseFilterSensitiveLog,
37
+ };
38
+ const { requestHandler } = configuration;
39
+ return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext);
40
+ }
41
+ serialize(input, context) {
42
+ return (0, Aws_json1_0_1.serializeAws_json1_0CreateBatchLoadTaskCommand)(input, context);
43
+ }
44
+ deserialize(output, context) {
45
+ return (0, Aws_json1_0_1.deserializeAws_json1_0CreateBatchLoadTaskCommand)(output, context);
46
+ }
47
+ }
48
+ exports.CreateBatchLoadTaskCommand = CreateBatchLoadTaskCommand;
@@ -0,0 +1,48 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.DescribeBatchLoadTaskCommand = void 0;
4
+ const middleware_endpoint_1 = require("@aws-sdk/middleware-endpoint");
5
+ const middleware_endpoint_discovery_1 = require("@aws-sdk/middleware-endpoint-discovery");
6
+ const middleware_serde_1 = require("@aws-sdk/middleware-serde");
7
+ const smithy_client_1 = require("@aws-sdk/smithy-client");
8
+ const models_0_1 = require("../models/models_0");
9
+ const Aws_json1_0_1 = require("../protocols/Aws_json1_0");
10
+ class DescribeBatchLoadTaskCommand extends smithy_client_1.Command {
11
+ constructor(input) {
12
+ super();
13
+ this.input = input;
14
+ }
15
+ static getEndpointParameterInstructions() {
16
+ return {
17
+ UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" },
18
+ Endpoint: { type: "builtInParams", name: "endpoint" },
19
+ Region: { type: "builtInParams", name: "region" },
20
+ UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" },
21
+ };
22
+ }
23
+ resolveMiddleware(clientStack, configuration, options) {
24
+ this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize));
25
+ this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, DescribeBatchLoadTaskCommand.getEndpointParameterInstructions()));
26
+ this.middlewareStack.use((0, middleware_endpoint_discovery_1.getEndpointDiscoveryPlugin)(configuration, { clientStack, options, isDiscoveredEndpointRequired: true }));
27
+ const stack = clientStack.concat(this.middlewareStack);
28
+ const { logger } = configuration;
29
+ const clientName = "TimestreamWriteClient";
30
+ const commandName = "DescribeBatchLoadTaskCommand";
31
+ const handlerExecutionContext = {
32
+ logger,
33
+ clientName,
34
+ commandName,
35
+ inputFilterSensitiveLog: models_0_1.DescribeBatchLoadTaskRequestFilterSensitiveLog,
36
+ outputFilterSensitiveLog: models_0_1.DescribeBatchLoadTaskResponseFilterSensitiveLog,
37
+ };
38
+ const { requestHandler } = configuration;
39
+ return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext);
40
+ }
41
+ serialize(input, context) {
42
+ return (0, Aws_json1_0_1.serializeAws_json1_0DescribeBatchLoadTaskCommand)(input, context);
43
+ }
44
+ deserialize(output, context) {
45
+ return (0, Aws_json1_0_1.deserializeAws_json1_0DescribeBatchLoadTaskCommand)(output, context);
46
+ }
47
+ }
48
+ exports.DescribeBatchLoadTaskCommand = DescribeBatchLoadTaskCommand;
@@ -0,0 +1,48 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ListBatchLoadTasksCommand = void 0;
4
+ const middleware_endpoint_1 = require("@aws-sdk/middleware-endpoint");
5
+ const middleware_endpoint_discovery_1 = require("@aws-sdk/middleware-endpoint-discovery");
6
+ const middleware_serde_1 = require("@aws-sdk/middleware-serde");
7
+ const smithy_client_1 = require("@aws-sdk/smithy-client");
8
+ const models_0_1 = require("../models/models_0");
9
+ const Aws_json1_0_1 = require("../protocols/Aws_json1_0");
10
+ class ListBatchLoadTasksCommand extends smithy_client_1.Command {
11
+ constructor(input) {
12
+ super();
13
+ this.input = input;
14
+ }
15
+ static getEndpointParameterInstructions() {
16
+ return {
17
+ UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" },
18
+ Endpoint: { type: "builtInParams", name: "endpoint" },
19
+ Region: { type: "builtInParams", name: "region" },
20
+ UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" },
21
+ };
22
+ }
23
+ resolveMiddleware(clientStack, configuration, options) {
24
+ this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize));
25
+ this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, ListBatchLoadTasksCommand.getEndpointParameterInstructions()));
26
+ this.middlewareStack.use((0, middleware_endpoint_discovery_1.getEndpointDiscoveryPlugin)(configuration, { clientStack, options, isDiscoveredEndpointRequired: true }));
27
+ const stack = clientStack.concat(this.middlewareStack);
28
+ const { logger } = configuration;
29
+ const clientName = "TimestreamWriteClient";
30
+ const commandName = "ListBatchLoadTasksCommand";
31
+ const handlerExecutionContext = {
32
+ logger,
33
+ clientName,
34
+ commandName,
35
+ inputFilterSensitiveLog: models_0_1.ListBatchLoadTasksRequestFilterSensitiveLog,
36
+ outputFilterSensitiveLog: models_0_1.ListBatchLoadTasksResponseFilterSensitiveLog,
37
+ };
38
+ const { requestHandler } = configuration;
39
+ return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext);
40
+ }
41
+ serialize(input, context) {
42
+ return (0, Aws_json1_0_1.serializeAws_json1_0ListBatchLoadTasksCommand)(input, context);
43
+ }
44
+ deserialize(output, context) {
45
+ return (0, Aws_json1_0_1.deserializeAws_json1_0ListBatchLoadTasksCommand)(output, context);
46
+ }
47
+ }
48
+ exports.ListBatchLoadTasksCommand = ListBatchLoadTasksCommand;
@@ -0,0 +1,48 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ResumeBatchLoadTaskCommand = void 0;
4
+ const middleware_endpoint_1 = require("@aws-sdk/middleware-endpoint");
5
+ const middleware_endpoint_discovery_1 = require("@aws-sdk/middleware-endpoint-discovery");
6
+ const middleware_serde_1 = require("@aws-sdk/middleware-serde");
7
+ const smithy_client_1 = require("@aws-sdk/smithy-client");
8
+ const models_0_1 = require("../models/models_0");
9
+ const Aws_json1_0_1 = require("../protocols/Aws_json1_0");
10
+ class ResumeBatchLoadTaskCommand extends smithy_client_1.Command {
11
+ constructor(input) {
12
+ super();
13
+ this.input = input;
14
+ }
15
+ static getEndpointParameterInstructions() {
16
+ return {
17
+ UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" },
18
+ Endpoint: { type: "builtInParams", name: "endpoint" },
19
+ Region: { type: "builtInParams", name: "region" },
20
+ UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" },
21
+ };
22
+ }
23
+ resolveMiddleware(clientStack, configuration, options) {
24
+ this.middlewareStack.use((0, middleware_serde_1.getSerdePlugin)(configuration, this.serialize, this.deserialize));
25
+ this.middlewareStack.use((0, middleware_endpoint_1.getEndpointPlugin)(configuration, ResumeBatchLoadTaskCommand.getEndpointParameterInstructions()));
26
+ this.middlewareStack.use((0, middleware_endpoint_discovery_1.getEndpointDiscoveryPlugin)(configuration, { clientStack, options, isDiscoveredEndpointRequired: true }));
27
+ const stack = clientStack.concat(this.middlewareStack);
28
+ const { logger } = configuration;
29
+ const clientName = "TimestreamWriteClient";
30
+ const commandName = "ResumeBatchLoadTaskCommand";
31
+ const handlerExecutionContext = {
32
+ logger,
33
+ clientName,
34
+ commandName,
35
+ inputFilterSensitiveLog: models_0_1.ResumeBatchLoadTaskRequestFilterSensitiveLog,
36
+ outputFilterSensitiveLog: models_0_1.ResumeBatchLoadTaskResponseFilterSensitiveLog,
37
+ };
38
+ const { requestHandler } = configuration;
39
+ return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext);
40
+ }
41
+ serialize(input, context) {
42
+ return (0, Aws_json1_0_1.serializeAws_json1_0ResumeBatchLoadTaskCommand)(input, context);
43
+ }
44
+ deserialize(output, context) {
45
+ return (0, Aws_json1_0_1.deserializeAws_json1_0ResumeBatchLoadTaskCommand)(output, context);
46
+ }
47
+ }
48
+ exports.ResumeBatchLoadTaskCommand = ResumeBatchLoadTaskCommand;
@@ -1,16 +1,20 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  const tslib_1 = require("tslib");
4
+ tslib_1.__exportStar(require("./CreateBatchLoadTaskCommand"), exports);
4
5
  tslib_1.__exportStar(require("./CreateDatabaseCommand"), exports);
5
6
  tslib_1.__exportStar(require("./CreateTableCommand"), exports);
6
7
  tslib_1.__exportStar(require("./DeleteDatabaseCommand"), exports);
7
8
  tslib_1.__exportStar(require("./DeleteTableCommand"), exports);
9
+ tslib_1.__exportStar(require("./DescribeBatchLoadTaskCommand"), exports);
8
10
  tslib_1.__exportStar(require("./DescribeDatabaseCommand"), exports);
9
11
  tslib_1.__exportStar(require("./DescribeEndpointsCommand"), exports);
10
12
  tslib_1.__exportStar(require("./DescribeTableCommand"), exports);
13
+ tslib_1.__exportStar(require("./ListBatchLoadTasksCommand"), exports);
11
14
  tslib_1.__exportStar(require("./ListDatabasesCommand"), exports);
12
15
  tslib_1.__exportStar(require("./ListTablesCommand"), exports);
13
16
  tslib_1.__exportStar(require("./ListTagsForResourceCommand"), exports);
17
+ tslib_1.__exportStar(require("./ResumeBatchLoadTaskCommand"), exports);
14
18
  tslib_1.__exportStar(require("./TagResourceCommand"), exports);
15
19
  tslib_1.__exportStar(require("./UntagResourceCommand"), exports);
16
20
  tslib_1.__exportStar(require("./UpdateDatabaseCommand"), exports);
@@ -1,7 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.ruleSet = void 0;
4
- const p = "required", q = "fn", r = "argv", s = "ref";
5
- const a = "PartitionResult", b = "tree", c = "error", d = "endpoint", e = { [p]: false, "type": "String" }, f = { [p]: true, "default": false, "type": "Boolean" }, g = { [s]: "Endpoint" }, h = { [q]: "booleanEquals", [r]: [{ [s]: "UseFIPS" }, true] }, i = { [q]: "booleanEquals", [r]: [{ [s]: "UseDualStack" }, true] }, j = {}, k = { [q]: "booleanEquals", [r]: [true, { [q]: "getAttr", [r]: [{ [s]: a }, "supportsFIPS"] }] }, l = { [q]: "booleanEquals", [r]: [true, { [q]: "getAttr", [r]: [{ [s]: a }, "supportsDualStack"] }] }, m = [g], n = [h], o = [i];
6
- const _data = { version: "1.0", parameters: { Region: e, UseDualStack: f, UseFIPS: f, Endpoint: e }, rules: [{ conditions: [{ [q]: "aws.partition", [r]: [{ [s]: "Region" }], assign: a }], type: b, rules: [{ conditions: [{ [q]: "isSet", [r]: m }, { [q]: "parseURL", [r]: m, assign: "url" }], type: b, rules: [{ conditions: n, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: c }, { type: b, rules: [{ conditions: o, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: c }, { endpoint: { url: g, properties: j, headers: j }, type: d }] }] }, { conditions: [h, i], type: b, rules: [{ conditions: [k, l], type: b, rules: [{ endpoint: { url: "https://ingest.timestream-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: j, headers: j }, type: d }] }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: c }] }, { conditions: n, type: b, rules: [{ conditions: [k], type: b, rules: [{ endpoint: { url: "https://ingest.timestream-fips.{Region}.{PartitionResult#dnsSuffix}", properties: j, headers: j }, type: d }] }, { error: "FIPS is enabled but this partition does not support FIPS", type: c }] }, { conditions: o, type: b, rules: [{ conditions: [l], type: b, rules: [{ endpoint: { url: "https://ingest.timestream.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: j, headers: j }, type: d }] }, { error: "DualStack is enabled but this partition does not support DualStack", type: c }] }, { endpoint: { url: "https://ingest.timestream.{Region}.{PartitionResult#dnsSuffix}", properties: j, headers: j }, type: d }] }] };
4
+ const q = "required", r = "fn", s = "argv", t = "ref";
5
+ const a = "isSet", b = "tree", c = "error", d = "endpoint", e = "PartitionResult", f = { [q]: false, "type": "String" }, g = { [q]: true, "default": false, "type": "Boolean" }, h = { [t]: "Endpoint" }, i = { [r]: "booleanEquals", [s]: [{ [t]: "UseFIPS" }, true] }, j = { [r]: "booleanEquals", [s]: [{ [t]: "UseDualStack" }, true] }, k = {}, l = { [r]: "booleanEquals", [s]: [true, { [r]: "getAttr", [s]: [{ [t]: e }, "supportsFIPS"] }] }, m = { [r]: "booleanEquals", [s]: [true, { [r]: "getAttr", [s]: [{ [t]: e }, "supportsDualStack"] }] }, n = [i], o = [j], p = [{ [t]: "Region" }];
6
+ const _data = { version: "1.0", parameters: { Region: f, UseDualStack: g, UseFIPS: g, Endpoint: f }, rules: [{ conditions: [{ [r]: a, [s]: [h] }], type: b, rules: [{ conditions: n, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: c }, { type: b, rules: [{ conditions: o, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: c }, { endpoint: { url: h, properties: k, headers: k }, type: d }] }] }, { type: b, rules: [{ conditions: [{ [r]: a, [s]: p }], type: b, rules: [{ conditions: [{ [r]: "aws.partition", [s]: p, assign: e }], type: b, rules: [{ conditions: [i, j], type: b, rules: [{ conditions: [l, m], type: b, rules: [{ type: b, rules: [{ endpoint: { url: "https://ingest.timestream-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: k, headers: k }, type: d }] }] }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: c }] }, { conditions: n, type: b, rules: [{ conditions: [l], type: b, rules: [{ type: b, rules: [{ endpoint: { url: "https://ingest.timestream-fips.{Region}.{PartitionResult#dnsSuffix}", properties: k, headers: k }, type: d }] }] }, { error: "FIPS is enabled but this partition does not support FIPS", type: c }] }, { conditions: o, type: b, rules: [{ conditions: [m], type: b, rules: [{ type: b, rules: [{ endpoint: { url: "https://ingest.timestream.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: k, headers: k }, type: d }] }] }, { error: "DualStack is enabled but this partition does not support DualStack", type: c }] }, { type: b, rules: [{ endpoint: { url: "https://ingest.timestream.{Region}.{PartitionResult#dnsSuffix}", properties: k, headers: k }, type: d }] }] }] }, { error: "Invalid Configuration: Missing Region", type: c }] }] };
7
7
  exports.ruleSet = _data;