aws-sdk-supplychain 1.15.0 → 1.16.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -54,7 +54,7 @@ module Aws::SupplyChain
54
54
  autoload :EndpointProvider, 'aws-sdk-supplychain/endpoint_provider'
55
55
  autoload :Endpoints, 'aws-sdk-supplychain/endpoints'
56
56
 
57
- GEM_VERSION = '1.15.0'
57
+ GEM_VERSION = '1.16.0'
58
58
 
59
59
  end
60
60
 
data/sig/client.rbs CHANGED
@@ -87,6 +87,112 @@ module Aws
87
87
  ) -> _CreateBillOfMaterialsImportJobResponseSuccess
88
88
  | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _CreateBillOfMaterialsImportJobResponseSuccess
89
89
 
90
+ interface _CreateDataIntegrationFlowResponseSuccess
91
+ include ::Seahorse::Client::_ResponseSuccess[Types::CreateDataIntegrationFlowResponse]
92
+ def instance_id: () -> ::String
93
+ def name: () -> ::String
94
+ end
95
+ # https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/SupplyChain/Client.html#create_data_integration_flow-instance_method
96
+ def create_data_integration_flow: (
97
+ instance_id: ::String,
98
+ name: ::String,
99
+ sources: Array[
100
+ {
101
+ source_type: ("S3" | "DATASET"),
102
+ source_name: ::String,
103
+ s3_source: {
104
+ bucket_name: ::String,
105
+ prefix: ::String,
106
+ options: {
107
+ file_type: ("CSV" | "PARQUET" | "JSON")?
108
+ }?
109
+ }?,
110
+ dataset_source: {
111
+ dataset_identifier: ::String,
112
+ options: {
113
+ load_type: ("INCREMENTAL" | "REPLACE")?,
114
+ dedupe_records: bool?
115
+ }?
116
+ }?
117
+ },
118
+ ],
119
+ transformation: {
120
+ transformation_type: ("SQL" | "NONE"),
121
+ sql_transformation: {
122
+ query: ::String
123
+ }?
124
+ },
125
+ target: {
126
+ target_type: ("S3" | "DATASET"),
127
+ s3_target: {
128
+ bucket_name: ::String,
129
+ prefix: ::String,
130
+ options: {
131
+ file_type: ("CSV" | "PARQUET" | "JSON")?
132
+ }?
133
+ }?,
134
+ dataset_target: {
135
+ dataset_identifier: ::String,
136
+ options: {
137
+ load_type: ("INCREMENTAL" | "REPLACE")?,
138
+ dedupe_records: bool?
139
+ }?
140
+ }?
141
+ },
142
+ ?tags: Hash[::String, ::String]
143
+ ) -> _CreateDataIntegrationFlowResponseSuccess
144
+ | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _CreateDataIntegrationFlowResponseSuccess
145
+
146
+ interface _CreateDataLakeDatasetResponseSuccess
147
+ include ::Seahorse::Client::_ResponseSuccess[Types::CreateDataLakeDatasetResponse]
148
+ def dataset: () -> Types::DataLakeDataset
149
+ end
150
+ # https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/SupplyChain/Client.html#create_data_lake_dataset-instance_method
151
+ def create_data_lake_dataset: (
152
+ instance_id: ::String,
153
+ namespace: ::String,
154
+ name: ::String,
155
+ ?schema: {
156
+ name: ::String,
157
+ fields: Array[
158
+ {
159
+ name: ::String,
160
+ type: ("INT" | "DOUBLE" | "STRING" | "TIMESTAMP"),
161
+ is_required: bool
162
+ },
163
+ ]
164
+ },
165
+ ?description: ::String,
166
+ ?tags: Hash[::String, ::String]
167
+ ) -> _CreateDataLakeDatasetResponseSuccess
168
+ | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _CreateDataLakeDatasetResponseSuccess
169
+
170
+ interface _DeleteDataIntegrationFlowResponseSuccess
171
+ include ::Seahorse::Client::_ResponseSuccess[Types::DeleteDataIntegrationFlowResponse]
172
+ def instance_id: () -> ::String
173
+ def name: () -> ::String
174
+ end
175
+ # https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/SupplyChain/Client.html#delete_data_integration_flow-instance_method
176
+ def delete_data_integration_flow: (
177
+ instance_id: ::String,
178
+ name: ::String
179
+ ) -> _DeleteDataIntegrationFlowResponseSuccess
180
+ | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _DeleteDataIntegrationFlowResponseSuccess
181
+
182
+ interface _DeleteDataLakeDatasetResponseSuccess
183
+ include ::Seahorse::Client::_ResponseSuccess[Types::DeleteDataLakeDatasetResponse]
184
+ def instance_id: () -> ::String
185
+ def namespace: () -> ::String
186
+ def name: () -> ::String
187
+ end
188
+ # https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/SupplyChain/Client.html#delete_data_lake_dataset-instance_method
189
+ def delete_data_lake_dataset: (
190
+ instance_id: ::String,
191
+ namespace: ::String,
192
+ name: ::String
193
+ ) -> _DeleteDataLakeDatasetResponseSuccess
194
+ | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _DeleteDataLakeDatasetResponseSuccess
195
+
90
196
  interface _GetBillOfMaterialsImportJobResponseSuccess
91
197
  include ::Seahorse::Client::_ResponseSuccess[Types::GetBillOfMaterialsImportJobResponse]
92
198
  def job: () -> Types::BillOfMaterialsImportJob
@@ -98,6 +204,66 @@ module Aws
98
204
  ) -> _GetBillOfMaterialsImportJobResponseSuccess
99
205
  | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _GetBillOfMaterialsImportJobResponseSuccess
100
206
 
207
+ interface _GetDataIntegrationFlowResponseSuccess
208
+ include ::Seahorse::Client::_ResponseSuccess[Types::GetDataIntegrationFlowResponse]
209
+ def flow: () -> Types::DataIntegrationFlow
210
+ end
211
+ # https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/SupplyChain/Client.html#get_data_integration_flow-instance_method
212
+ def get_data_integration_flow: (
213
+ instance_id: ::String,
214
+ name: ::String
215
+ ) -> _GetDataIntegrationFlowResponseSuccess
216
+ | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _GetDataIntegrationFlowResponseSuccess
217
+
218
+ interface _GetDataLakeDatasetResponseSuccess
219
+ include ::Seahorse::Client::_ResponseSuccess[Types::GetDataLakeDatasetResponse]
220
+ def dataset: () -> Types::DataLakeDataset
221
+ end
222
+ # https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/SupplyChain/Client.html#get_data_lake_dataset-instance_method
223
+ def get_data_lake_dataset: (
224
+ instance_id: ::String,
225
+ namespace: ::String,
226
+ name: ::String
227
+ ) -> _GetDataLakeDatasetResponseSuccess
228
+ | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _GetDataLakeDatasetResponseSuccess
229
+
230
+ interface _ListDataIntegrationFlowsResponseSuccess
231
+ include ::Seahorse::Client::_ResponseSuccess[Types::ListDataIntegrationFlowsResponse]
232
+ def flows: () -> ::Array[Types::DataIntegrationFlow]
233
+ def next_token: () -> ::String
234
+ end
235
+ # https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/SupplyChain/Client.html#list_data_integration_flows-instance_method
236
+ def list_data_integration_flows: (
237
+ instance_id: ::String,
238
+ ?next_token: ::String,
239
+ ?max_results: ::Integer
240
+ ) -> _ListDataIntegrationFlowsResponseSuccess
241
+ | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _ListDataIntegrationFlowsResponseSuccess
242
+
243
+ interface _ListDataLakeDatasetsResponseSuccess
244
+ include ::Seahorse::Client::_ResponseSuccess[Types::ListDataLakeDatasetsResponse]
245
+ def datasets: () -> ::Array[Types::DataLakeDataset]
246
+ def next_token: () -> ::String
247
+ end
248
+ # https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/SupplyChain/Client.html#list_data_lake_datasets-instance_method
249
+ def list_data_lake_datasets: (
250
+ instance_id: ::String,
251
+ namespace: ::String,
252
+ ?next_token: ::String,
253
+ ?max_results: ::Integer
254
+ ) -> _ListDataLakeDatasetsResponseSuccess
255
+ | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _ListDataLakeDatasetsResponseSuccess
256
+
257
+ interface _ListTagsForResourceResponseSuccess
258
+ include ::Seahorse::Client::_ResponseSuccess[Types::ListTagsForResourceResponse]
259
+ def tags: () -> ::Hash[::String, ::String]
260
+ end
261
+ # https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/SupplyChain/Client.html#list_tags_for_resource-instance_method
262
+ def list_tags_for_resource: (
263
+ resource_arn: ::String
264
+ ) -> _ListTagsForResourceResponseSuccess
265
+ | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _ListTagsForResourceResponseSuccess
266
+
101
267
  interface _SendDataIntegrationEventResponseSuccess
102
268
  include ::Seahorse::Client::_ResponseSuccess[Types::SendDataIntegrationEventResponse]
103
269
  def event_id: () -> ::String
@@ -112,6 +278,93 @@ module Aws
112
278
  ?client_token: ::String
113
279
  ) -> _SendDataIntegrationEventResponseSuccess
114
280
  | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _SendDataIntegrationEventResponseSuccess
281
+
282
+ interface _TagResourceResponseSuccess
283
+ include ::Seahorse::Client::_ResponseSuccess[Types::TagResourceResponse]
284
+ end
285
+ # https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/SupplyChain/Client.html#tag_resource-instance_method
286
+ def tag_resource: (
287
+ resource_arn: ::String,
288
+ tags: Hash[::String, ::String]
289
+ ) -> _TagResourceResponseSuccess
290
+ | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _TagResourceResponseSuccess
291
+
292
+ interface _UntagResourceResponseSuccess
293
+ include ::Seahorse::Client::_ResponseSuccess[Types::UntagResourceResponse]
294
+ end
295
+ # https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/SupplyChain/Client.html#untag_resource-instance_method
296
+ def untag_resource: (
297
+ resource_arn: ::String,
298
+ tag_keys: Array[::String]
299
+ ) -> _UntagResourceResponseSuccess
300
+ | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _UntagResourceResponseSuccess
301
+
302
+ interface _UpdateDataIntegrationFlowResponseSuccess
303
+ include ::Seahorse::Client::_ResponseSuccess[Types::UpdateDataIntegrationFlowResponse]
304
+ def flow: () -> Types::DataIntegrationFlow
305
+ end
306
+ # https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/SupplyChain/Client.html#update_data_integration_flow-instance_method
307
+ def update_data_integration_flow: (
308
+ instance_id: ::String,
309
+ name: ::String,
310
+ ?sources: Array[
311
+ {
312
+ source_type: ("S3" | "DATASET"),
313
+ source_name: ::String,
314
+ s3_source: {
315
+ bucket_name: ::String,
316
+ prefix: ::String,
317
+ options: {
318
+ file_type: ("CSV" | "PARQUET" | "JSON")?
319
+ }?
320
+ }?,
321
+ dataset_source: {
322
+ dataset_identifier: ::String,
323
+ options: {
324
+ load_type: ("INCREMENTAL" | "REPLACE")?,
325
+ dedupe_records: bool?
326
+ }?
327
+ }?
328
+ },
329
+ ],
330
+ ?transformation: {
331
+ transformation_type: ("SQL" | "NONE"),
332
+ sql_transformation: {
333
+ query: ::String
334
+ }?
335
+ },
336
+ ?target: {
337
+ target_type: ("S3" | "DATASET"),
338
+ s3_target: {
339
+ bucket_name: ::String,
340
+ prefix: ::String,
341
+ options: {
342
+ file_type: ("CSV" | "PARQUET" | "JSON")?
343
+ }?
344
+ }?,
345
+ dataset_target: {
346
+ dataset_identifier: ::String,
347
+ options: {
348
+ load_type: ("INCREMENTAL" | "REPLACE")?,
349
+ dedupe_records: bool?
350
+ }?
351
+ }?
352
+ }
353
+ ) -> _UpdateDataIntegrationFlowResponseSuccess
354
+ | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _UpdateDataIntegrationFlowResponseSuccess
355
+
356
+ interface _UpdateDataLakeDatasetResponseSuccess
357
+ include ::Seahorse::Client::_ResponseSuccess[Types::UpdateDataLakeDatasetResponse]
358
+ def dataset: () -> Types::DataLakeDataset
359
+ end
360
+ # https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/SupplyChain/Client.html#update_data_lake_dataset-instance_method
361
+ def update_data_lake_dataset: (
362
+ instance_id: ::String,
363
+ namespace: ::String,
364
+ name: ::String,
365
+ ?description: ::String
366
+ ) -> _UpdateDataLakeDatasetResponseSuccess
367
+ | (Hash[Symbol, untyped] params, ?Hash[Symbol, untyped] options) -> _UpdateDataLakeDatasetResponseSuccess
115
368
  end
116
369
  end
117
370
  end
data/sig/types.rbs CHANGED
@@ -39,6 +39,162 @@ module Aws::SupplyChain
39
39
  SENSITIVE: []
40
40
  end
41
41
 
42
+ class CreateDataIntegrationFlowRequest
43
+ attr_accessor instance_id: ::String
44
+ attr_accessor name: ::String
45
+ attr_accessor sources: ::Array[Types::DataIntegrationFlowSource]
46
+ attr_accessor transformation: Types::DataIntegrationFlowTransformation
47
+ attr_accessor target: Types::DataIntegrationFlowTarget
48
+ attr_accessor tags: ::Hash[::String, ::String]
49
+ SENSITIVE: []
50
+ end
51
+
52
+ class CreateDataIntegrationFlowResponse
53
+ attr_accessor instance_id: ::String
54
+ attr_accessor name: ::String
55
+ SENSITIVE: []
56
+ end
57
+
58
+ class CreateDataLakeDatasetRequest
59
+ attr_accessor instance_id: ::String
60
+ attr_accessor namespace: ::String
61
+ attr_accessor name: ::String
62
+ attr_accessor schema: Types::DataLakeDatasetSchema
63
+ attr_accessor description: ::String
64
+ attr_accessor tags: ::Hash[::String, ::String]
65
+ SENSITIVE: []
66
+ end
67
+
68
+ class CreateDataLakeDatasetResponse
69
+ attr_accessor dataset: Types::DataLakeDataset
70
+ SENSITIVE: []
71
+ end
72
+
73
+ class DataIntegrationFlow
74
+ attr_accessor instance_id: ::String
75
+ attr_accessor name: ::String
76
+ attr_accessor sources: ::Array[Types::DataIntegrationFlowSource]
77
+ attr_accessor transformation: Types::DataIntegrationFlowTransformation
78
+ attr_accessor target: Types::DataIntegrationFlowTarget
79
+ attr_accessor created_time: ::Time
80
+ attr_accessor last_modified_time: ::Time
81
+ SENSITIVE: []
82
+ end
83
+
84
+ class DataIntegrationFlowDatasetOptions
85
+ attr_accessor load_type: ("INCREMENTAL" | "REPLACE")
86
+ attr_accessor dedupe_records: bool
87
+ SENSITIVE: []
88
+ end
89
+
90
+ class DataIntegrationFlowDatasetSourceConfiguration
91
+ attr_accessor dataset_identifier: ::String
92
+ attr_accessor options: Types::DataIntegrationFlowDatasetOptions
93
+ SENSITIVE: []
94
+ end
95
+
96
+ class DataIntegrationFlowDatasetTargetConfiguration
97
+ attr_accessor dataset_identifier: ::String
98
+ attr_accessor options: Types::DataIntegrationFlowDatasetOptions
99
+ SENSITIVE: []
100
+ end
101
+
102
+ class DataIntegrationFlowS3Options
103
+ attr_accessor file_type: ("CSV" | "PARQUET" | "JSON")
104
+ SENSITIVE: []
105
+ end
106
+
107
+ class DataIntegrationFlowS3SourceConfiguration
108
+ attr_accessor bucket_name: ::String
109
+ attr_accessor prefix: ::String
110
+ attr_accessor options: Types::DataIntegrationFlowS3Options
111
+ SENSITIVE: []
112
+ end
113
+
114
+ class DataIntegrationFlowS3TargetConfiguration
115
+ attr_accessor bucket_name: ::String
116
+ attr_accessor prefix: ::String
117
+ attr_accessor options: Types::DataIntegrationFlowS3Options
118
+ SENSITIVE: []
119
+ end
120
+
121
+ class DataIntegrationFlowSQLTransformationConfiguration
122
+ attr_accessor query: ::String
123
+ SENSITIVE: []
124
+ end
125
+
126
+ class DataIntegrationFlowSource
127
+ attr_accessor source_type: ("S3" | "DATASET")
128
+ attr_accessor source_name: ::String
129
+ attr_accessor s3_source: Types::DataIntegrationFlowS3SourceConfiguration
130
+ attr_accessor dataset_source: Types::DataIntegrationFlowDatasetSourceConfiguration
131
+ SENSITIVE: []
132
+ end
133
+
134
+ class DataIntegrationFlowTarget
135
+ attr_accessor target_type: ("S3" | "DATASET")
136
+ attr_accessor s3_target: Types::DataIntegrationFlowS3TargetConfiguration
137
+ attr_accessor dataset_target: Types::DataIntegrationFlowDatasetTargetConfiguration
138
+ SENSITIVE: []
139
+ end
140
+
141
+ class DataIntegrationFlowTransformation
142
+ attr_accessor transformation_type: ("SQL" | "NONE")
143
+ attr_accessor sql_transformation: Types::DataIntegrationFlowSQLTransformationConfiguration
144
+ SENSITIVE: []
145
+ end
146
+
147
+ class DataLakeDataset
148
+ attr_accessor instance_id: ::String
149
+ attr_accessor namespace: ::String
150
+ attr_accessor name: ::String
151
+ attr_accessor arn: ::String
152
+ attr_accessor schema: Types::DataLakeDatasetSchema
153
+ attr_accessor description: ::String
154
+ attr_accessor created_time: ::Time
155
+ attr_accessor last_modified_time: ::Time
156
+ SENSITIVE: []
157
+ end
158
+
159
+ class DataLakeDatasetSchema
160
+ attr_accessor name: ::String
161
+ attr_accessor fields: ::Array[Types::DataLakeDatasetSchemaField]
162
+ SENSITIVE: []
163
+ end
164
+
165
+ class DataLakeDatasetSchemaField
166
+ attr_accessor name: ::String
167
+ attr_accessor type: ("INT" | "DOUBLE" | "STRING" | "TIMESTAMP")
168
+ attr_accessor is_required: bool
169
+ SENSITIVE: []
170
+ end
171
+
172
+ class DeleteDataIntegrationFlowRequest
173
+ attr_accessor instance_id: ::String
174
+ attr_accessor name: ::String
175
+ SENSITIVE: []
176
+ end
177
+
178
+ class DeleteDataIntegrationFlowResponse
179
+ attr_accessor instance_id: ::String
180
+ attr_accessor name: ::String
181
+ SENSITIVE: []
182
+ end
183
+
184
+ class DeleteDataLakeDatasetRequest
185
+ attr_accessor instance_id: ::String
186
+ attr_accessor namespace: ::String
187
+ attr_accessor name: ::String
188
+ SENSITIVE: []
189
+ end
190
+
191
+ class DeleteDataLakeDatasetResponse
192
+ attr_accessor instance_id: ::String
193
+ attr_accessor namespace: ::String
194
+ attr_accessor name: ::String
195
+ SENSITIVE: []
196
+ end
197
+
42
198
  class GetBillOfMaterialsImportJobRequest
43
199
  attr_accessor instance_id: ::String
44
200
  attr_accessor job_id: ::String
@@ -50,11 +206,71 @@ module Aws::SupplyChain
50
206
  SENSITIVE: []
51
207
  end
52
208
 
209
+ class GetDataIntegrationFlowRequest
210
+ attr_accessor instance_id: ::String
211
+ attr_accessor name: ::String
212
+ SENSITIVE: []
213
+ end
214
+
215
+ class GetDataIntegrationFlowResponse
216
+ attr_accessor flow: Types::DataIntegrationFlow
217
+ SENSITIVE: []
218
+ end
219
+
220
+ class GetDataLakeDatasetRequest
221
+ attr_accessor instance_id: ::String
222
+ attr_accessor namespace: ::String
223
+ attr_accessor name: ::String
224
+ SENSITIVE: []
225
+ end
226
+
227
+ class GetDataLakeDatasetResponse
228
+ attr_accessor dataset: Types::DataLakeDataset
229
+ SENSITIVE: []
230
+ end
231
+
53
232
  class InternalServerException
54
233
  attr_accessor message: ::String
55
234
  SENSITIVE: []
56
235
  end
57
236
 
237
+ class ListDataIntegrationFlowsRequest
238
+ attr_accessor instance_id: ::String
239
+ attr_accessor next_token: ::String
240
+ attr_accessor max_results: ::Integer
241
+ SENSITIVE: []
242
+ end
243
+
244
+ class ListDataIntegrationFlowsResponse
245
+ attr_accessor flows: ::Array[Types::DataIntegrationFlow]
246
+ attr_accessor next_token: ::String
247
+ SENSITIVE: []
248
+ end
249
+
250
+ class ListDataLakeDatasetsRequest
251
+ attr_accessor instance_id: ::String
252
+ attr_accessor namespace: ::String
253
+ attr_accessor next_token: ::String
254
+ attr_accessor max_results: ::Integer
255
+ SENSITIVE: []
256
+ end
257
+
258
+ class ListDataLakeDatasetsResponse
259
+ attr_accessor datasets: ::Array[Types::DataLakeDataset]
260
+ attr_accessor next_token: ::String
261
+ SENSITIVE: []
262
+ end
263
+
264
+ class ListTagsForResourceRequest
265
+ attr_accessor resource_arn: ::String
266
+ SENSITIVE: []
267
+ end
268
+
269
+ class ListTagsForResourceResponse
270
+ attr_accessor tags: ::Hash[::String, ::String]
271
+ SENSITIVE: []
272
+ end
273
+
58
274
  class ResourceNotFoundException
59
275
  attr_accessor message: ::String
60
276
  SENSITIVE: []
@@ -80,11 +296,56 @@ module Aws::SupplyChain
80
296
  SENSITIVE: []
81
297
  end
82
298
 
299
+ class TagResourceRequest
300
+ attr_accessor resource_arn: ::String
301
+ attr_accessor tags: ::Hash[::String, ::String]
302
+ SENSITIVE: []
303
+ end
304
+
305
+ class TagResourceResponse < Aws::EmptyStructure
306
+ end
307
+
83
308
  class ThrottlingException
84
309
  attr_accessor message: ::String
85
310
  SENSITIVE: []
86
311
  end
87
312
 
313
+ class UntagResourceRequest
314
+ attr_accessor resource_arn: ::String
315
+ attr_accessor tag_keys: ::Array[::String]
316
+ SENSITIVE: []
317
+ end
318
+
319
+ class UntagResourceResponse < Aws::EmptyStructure
320
+ end
321
+
322
+ class UpdateDataIntegrationFlowRequest
323
+ attr_accessor instance_id: ::String
324
+ attr_accessor name: ::String
325
+ attr_accessor sources: ::Array[Types::DataIntegrationFlowSource]
326
+ attr_accessor transformation: Types::DataIntegrationFlowTransformation
327
+ attr_accessor target: Types::DataIntegrationFlowTarget
328
+ SENSITIVE: []
329
+ end
330
+
331
+ class UpdateDataIntegrationFlowResponse
332
+ attr_accessor flow: Types::DataIntegrationFlow
333
+ SENSITIVE: []
334
+ end
335
+
336
+ class UpdateDataLakeDatasetRequest
337
+ attr_accessor instance_id: ::String
338
+ attr_accessor namespace: ::String
339
+ attr_accessor name: ::String
340
+ attr_accessor description: ::String
341
+ SENSITIVE: []
342
+ end
343
+
344
+ class UpdateDataLakeDatasetResponse
345
+ attr_accessor dataset: Types::DataLakeDataset
346
+ SENSITIVE: []
347
+ end
348
+
88
349
  class ValidationException
89
350
  attr_accessor message: ::String
90
351
  SENSITIVE: []
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aws-sdk-supplychain
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.15.0
4
+ version: 1.16.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Amazon Web Services
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-09-24 00:00:00.000000000 Z
11
+ date: 2024-09-30 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aws-sdk-core