aws-sdk-kafkaconnect 1.7.0 → 1.9.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 2e92ba6eea653f37be83f376039b0a23ba6d9fe55f5f0098c6bf6127b864ec4a
4
- data.tar.gz: fe87eb96820c6a53fb6f56b079c9a7999ab7862b33b68760d2456b8a623007fb
3
+ metadata.gz: 32e20776ecb6f2c68a1f24cc97ea77c22ac7ac70a5368e15a6a4fceca0cdae5b
4
+ data.tar.gz: a6f04b6bfa92d7b852154953bce77a7bf0c234167c3934efe752cf3e8dac3c49
5
5
  SHA512:
6
- metadata.gz: 2c08c0012822ed07ecfde59a549ab12d4c6deb4609c0b886276fef2abb047923fae3af2467d4be908668ff6f343e913b7d40b09ae15a75521befa01c15412c67
7
- data.tar.gz: eaffceaf63f6cf1aa62a6d336a862eb4962e77167c8533a28a1c3784d30b4a91dbc6a387a9445b218453d977e516675e8a59f179085ab45dddf18a7474c14bd0
6
+ metadata.gz: 17529eeed739e1fcea4ff838e34a97e98015e8aa0cd40cda4a1d2362d489e889af93741002e8c07f61b88a6415b6add62f5c78b99687355f632b5154d2e7cf0b
7
+ data.tar.gz: e47663d7782668e52540e564e0806041132c433545e1a485a49e8ced2c0106ecb9f42f237aa418f1660866b6886ee1ee26440f88523f3dd77cf6b538d4ef2296
data/CHANGELOG.md CHANGED
@@ -1,6 +1,18 @@
1
1
  Unreleased Changes
2
2
  ------------------
3
3
 
4
+ 1.9.0 (2023-01-18)
5
+ ------------------
6
+
7
+ * Feature - Code Generated Changes, see `./build_tools` or `aws-sdk-core`'s CHANGELOG.md for details.
8
+
9
+ * Issue - Replace runtime endpoint resolution approach with generated ruby code.
10
+
11
+ 1.8.0 (2022-10-25)
12
+ ------------------
13
+
14
+ * Feature - Code Generated Changes, see `./build_tools` or `aws-sdk-core`'s CHANGELOG.md for details.
15
+
4
16
  1.7.0 (2022-03-01)
5
17
  ------------------
6
18
 
data/VERSION CHANGED
@@ -1 +1 @@
1
- 1.7.0
1
+ 1.9.0
@@ -30,7 +30,7 @@ require 'aws-sdk-core/plugins/http_checksum.rb'
30
30
  require 'aws-sdk-core/plugins/checksum_algorithm.rb'
31
31
  require 'aws-sdk-core/plugins/defaults_mode.rb'
32
32
  require 'aws-sdk-core/plugins/recursion_detection.rb'
33
- require 'aws-sdk-core/plugins/signature_v4.rb'
33
+ require 'aws-sdk-core/plugins/sign.rb'
34
34
  require 'aws-sdk-core/plugins/protocols/rest_json.rb'
35
35
 
36
36
  Aws::Plugins::GlobalConfiguration.add_identifier(:kafkaconnect)
@@ -79,8 +79,9 @@ module Aws::KafkaConnect
79
79
  add_plugin(Aws::Plugins::ChecksumAlgorithm)
80
80
  add_plugin(Aws::Plugins::DefaultsMode)
81
81
  add_plugin(Aws::Plugins::RecursionDetection)
82
- add_plugin(Aws::Plugins::SignatureV4)
82
+ add_plugin(Aws::Plugins::Sign)
83
83
  add_plugin(Aws::Plugins::Protocols::RestJson)
84
+ add_plugin(Aws::KafkaConnect::Plugins::Endpoints)
84
85
 
85
86
  # @overload initialize(options)
86
87
  # @param [Hash] options
@@ -287,6 +288,19 @@ module Aws::KafkaConnect
287
288
  # ** Please note ** When response stubbing is enabled, no HTTP
288
289
  # requests are made, and retries are disabled.
289
290
  #
291
+ # @option options [Aws::TokenProvider] :token_provider
292
+ # A Bearer Token Provider. This can be an instance of any one of the
293
+ # following classes:
294
+ #
295
+ # * `Aws::StaticTokenProvider` - Used for configuring static, non-refreshing
296
+ # tokens.
297
+ #
298
+ # * `Aws::SSOTokenProvider` - Used for loading tokens from AWS SSO using an
299
+ # access token generated from `aws login`.
300
+ #
301
+ # When `:token_provider` is not configured directly, the `Aws::TokenProviderChain`
302
+ # will be used to search for tokens configured for your profile in shared configuration files.
303
+ #
290
304
  # @option options [Boolean] :use_dualstack_endpoint
291
305
  # When set to `true`, dualstack enabled endpoints (with `.aws` TLD)
292
306
  # will be used if available.
@@ -300,6 +314,9 @@ module Aws::KafkaConnect
300
314
  # When `true`, request parameters are validated before
301
315
  # sending the request.
302
316
  #
317
+ # @option options [Aws::KafkaConnect::EndpointProvider] :endpoint_provider
318
+ # The endpoint provider used to resolve endpoints. Any object that responds to `#resolve_endpoint(parameters)` where `parameters` is a Struct similar to `Aws::KafkaConnect::EndpointParameters`
319
+ #
303
320
  # @option options [URI::HTTP,String] :http_proxy A proxy to send
304
321
  # requests through. Formatted like 'http://proxy.com:123'.
305
322
  #
@@ -1071,7 +1088,7 @@ module Aws::KafkaConnect
1071
1088
  params: params,
1072
1089
  config: config)
1073
1090
  context[:gem_name] = 'aws-sdk-kafkaconnect'
1074
- context[:gem_version] = '1.7.0'
1091
+ context[:gem_version] = '1.9.0'
1075
1092
  Seahorse::Client::Request.new(handlers, context)
1076
1093
  end
1077
1094
 
@@ -0,0 +1,66 @@
1
+ # frozen_string_literal: true
2
+
3
+ # WARNING ABOUT GENERATED CODE
4
+ #
5
+ # This file is generated. See the contributing guide for more information:
6
+ # https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
7
+ #
8
+ # WARNING ABOUT GENERATED CODE
9
+
10
+ module Aws::KafkaConnect
11
+ # Endpoint parameters used to influence endpoints per request.
12
+ #
13
+ # @!attribute region
14
+ # The AWS region used to dispatch the request.
15
+ #
16
+ # @return [String]
17
+ #
18
+ # @!attribute use_dual_stack
19
+ # When true, use the dual-stack endpoint. If the configured endpoint does not support dual-stack, dispatching the request MAY return an error.
20
+ #
21
+ # @return [Boolean]
22
+ #
23
+ # @!attribute use_fips
24
+ # When true, send this request to the FIPS-compliant regional endpoint. If the configured endpoint does not have a FIPS compliant endpoint, dispatching the request will return an error.
25
+ #
26
+ # @return [Boolean]
27
+ #
28
+ # @!attribute endpoint
29
+ # Override the endpoint used to send this request
30
+ #
31
+ # @return [String]
32
+ #
33
+ EndpointParameters = Struct.new(
34
+ :region,
35
+ :use_dual_stack,
36
+ :use_fips,
37
+ :endpoint,
38
+ ) do
39
+ include Aws::Structure
40
+
41
+ # @api private
42
+ class << self
43
+ PARAM_MAP = {
44
+ 'Region' => :region,
45
+ 'UseDualStack' => :use_dual_stack,
46
+ 'UseFIPS' => :use_fips,
47
+ 'Endpoint' => :endpoint,
48
+ }.freeze
49
+ end
50
+
51
+ def initialize(options = {})
52
+ self[:region] = options[:region]
53
+ self[:use_dual_stack] = options[:use_dual_stack]
54
+ self[:use_dual_stack] = false if self[:use_dual_stack].nil?
55
+ if self[:use_dual_stack].nil?
56
+ raise ArgumentError, "Missing required EndpointParameter: :use_dual_stack"
57
+ end
58
+ self[:use_fips] = options[:use_fips]
59
+ self[:use_fips] = false if self[:use_fips].nil?
60
+ if self[:use_fips].nil?
61
+ raise ArgumentError, "Missing required EndpointParameter: :use_fips"
62
+ end
63
+ self[:endpoint] = options[:endpoint]
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,51 @@
1
+ # frozen_string_literal: true
2
+
3
+ # WARNING ABOUT GENERATED CODE
4
+ #
5
+ # This file is generated. See the contributing guide for more information:
6
+ # https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
7
+ #
8
+ # WARNING ABOUT GENERATED CODE
9
+
10
+ module Aws::KafkaConnect
11
+ class EndpointProvider
12
+ def resolve_endpoint(parameters)
13
+ region = parameters.region
14
+ use_dual_stack = parameters.use_dual_stack
15
+ use_fips = parameters.use_fips
16
+ endpoint = parameters.endpoint
17
+ if (partition_result = Aws::Endpoints::Matchers.aws_partition(region))
18
+ if Aws::Endpoints::Matchers.set?(endpoint) && (url = Aws::Endpoints::Matchers.parse_url(endpoint))
19
+ if Aws::Endpoints::Matchers.boolean_equals?(use_fips, true)
20
+ raise ArgumentError, "Invalid Configuration: FIPS and custom endpoint are not supported"
21
+ end
22
+ if Aws::Endpoints::Matchers.boolean_equals?(use_dual_stack, true)
23
+ raise ArgumentError, "Invalid Configuration: Dualstack and custom endpoint are not supported"
24
+ end
25
+ return Aws::Endpoints::Endpoint.new(url: endpoint, headers: {}, properties: {})
26
+ end
27
+ if Aws::Endpoints::Matchers.boolean_equals?(use_fips, true) && Aws::Endpoints::Matchers.boolean_equals?(use_dual_stack, true)
28
+ if Aws::Endpoints::Matchers.boolean_equals?(true, Aws::Endpoints::Matchers.attr(partition_result, "supportsFIPS")) && Aws::Endpoints::Matchers.boolean_equals?(true, Aws::Endpoints::Matchers.attr(partition_result, "supportsDualStack"))
29
+ return Aws::Endpoints::Endpoint.new(url: "https://kafkaconnect-fips.#{region}.#{partition_result['dualStackDnsSuffix']}", headers: {}, properties: {})
30
+ end
31
+ raise ArgumentError, "FIPS and DualStack are enabled, but this partition does not support one or both"
32
+ end
33
+ if Aws::Endpoints::Matchers.boolean_equals?(use_fips, true)
34
+ if Aws::Endpoints::Matchers.boolean_equals?(true, Aws::Endpoints::Matchers.attr(partition_result, "supportsFIPS"))
35
+ return Aws::Endpoints::Endpoint.new(url: "https://kafkaconnect-fips.#{region}.#{partition_result['dnsSuffix']}", headers: {}, properties: {})
36
+ end
37
+ raise ArgumentError, "FIPS is enabled but this partition does not support FIPS"
38
+ end
39
+ if Aws::Endpoints::Matchers.boolean_equals?(use_dual_stack, true)
40
+ if Aws::Endpoints::Matchers.boolean_equals?(true, Aws::Endpoints::Matchers.attr(partition_result, "supportsDualStack"))
41
+ return Aws::Endpoints::Endpoint.new(url: "https://kafkaconnect.#{region}.#{partition_result['dualStackDnsSuffix']}", headers: {}, properties: {})
42
+ end
43
+ raise ArgumentError, "DualStack is enabled but this partition does not support DualStack"
44
+ end
45
+ return Aws::Endpoints::Endpoint.new(url: "https://kafkaconnect.#{region}.#{partition_result['dnsSuffix']}", headers: {}, properties: {})
46
+ end
47
+ raise ArgumentError, 'No endpoint could be resolved'
48
+
49
+ end
50
+ end
51
+ end
@@ -0,0 +1,183 @@
1
+ # frozen_string_literal: true
2
+
3
+ # WARNING ABOUT GENERATED CODE
4
+ #
5
+ # This file is generated. See the contributing guide for more information:
6
+ # https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
7
+ #
8
+ # WARNING ABOUT GENERATED CODE
9
+
10
+
11
+ module Aws::KafkaConnect
12
+ module Endpoints
13
+
14
+ class CreateConnector
15
+ def self.build(context)
16
+ unless context.config.regional_endpoint
17
+ endpoint = context.config.endpoint.to_s
18
+ end
19
+ Aws::KafkaConnect::EndpointParameters.new(
20
+ region: context.config.region,
21
+ use_dual_stack: context.config.use_dualstack_endpoint,
22
+ use_fips: context.config.use_fips_endpoint,
23
+ endpoint: endpoint,
24
+ )
25
+ end
26
+ end
27
+
28
+ class CreateCustomPlugin
29
+ def self.build(context)
30
+ unless context.config.regional_endpoint
31
+ endpoint = context.config.endpoint.to_s
32
+ end
33
+ Aws::KafkaConnect::EndpointParameters.new(
34
+ region: context.config.region,
35
+ use_dual_stack: context.config.use_dualstack_endpoint,
36
+ use_fips: context.config.use_fips_endpoint,
37
+ endpoint: endpoint,
38
+ )
39
+ end
40
+ end
41
+
42
+ class CreateWorkerConfiguration
43
+ def self.build(context)
44
+ unless context.config.regional_endpoint
45
+ endpoint = context.config.endpoint.to_s
46
+ end
47
+ Aws::KafkaConnect::EndpointParameters.new(
48
+ region: context.config.region,
49
+ use_dual_stack: context.config.use_dualstack_endpoint,
50
+ use_fips: context.config.use_fips_endpoint,
51
+ endpoint: endpoint,
52
+ )
53
+ end
54
+ end
55
+
56
+ class DeleteConnector
57
+ def self.build(context)
58
+ unless context.config.regional_endpoint
59
+ endpoint = context.config.endpoint.to_s
60
+ end
61
+ Aws::KafkaConnect::EndpointParameters.new(
62
+ region: context.config.region,
63
+ use_dual_stack: context.config.use_dualstack_endpoint,
64
+ use_fips: context.config.use_fips_endpoint,
65
+ endpoint: endpoint,
66
+ )
67
+ end
68
+ end
69
+
70
+ class DeleteCustomPlugin
71
+ def self.build(context)
72
+ unless context.config.regional_endpoint
73
+ endpoint = context.config.endpoint.to_s
74
+ end
75
+ Aws::KafkaConnect::EndpointParameters.new(
76
+ region: context.config.region,
77
+ use_dual_stack: context.config.use_dualstack_endpoint,
78
+ use_fips: context.config.use_fips_endpoint,
79
+ endpoint: endpoint,
80
+ )
81
+ end
82
+ end
83
+
84
+ class DescribeConnector
85
+ def self.build(context)
86
+ unless context.config.regional_endpoint
87
+ endpoint = context.config.endpoint.to_s
88
+ end
89
+ Aws::KafkaConnect::EndpointParameters.new(
90
+ region: context.config.region,
91
+ use_dual_stack: context.config.use_dualstack_endpoint,
92
+ use_fips: context.config.use_fips_endpoint,
93
+ endpoint: endpoint,
94
+ )
95
+ end
96
+ end
97
+
98
+ class DescribeCustomPlugin
99
+ def self.build(context)
100
+ unless context.config.regional_endpoint
101
+ endpoint = context.config.endpoint.to_s
102
+ end
103
+ Aws::KafkaConnect::EndpointParameters.new(
104
+ region: context.config.region,
105
+ use_dual_stack: context.config.use_dualstack_endpoint,
106
+ use_fips: context.config.use_fips_endpoint,
107
+ endpoint: endpoint,
108
+ )
109
+ end
110
+ end
111
+
112
+ class DescribeWorkerConfiguration
113
+ def self.build(context)
114
+ unless context.config.regional_endpoint
115
+ endpoint = context.config.endpoint.to_s
116
+ end
117
+ Aws::KafkaConnect::EndpointParameters.new(
118
+ region: context.config.region,
119
+ use_dual_stack: context.config.use_dualstack_endpoint,
120
+ use_fips: context.config.use_fips_endpoint,
121
+ endpoint: endpoint,
122
+ )
123
+ end
124
+ end
125
+
126
+ class ListConnectors
127
+ def self.build(context)
128
+ unless context.config.regional_endpoint
129
+ endpoint = context.config.endpoint.to_s
130
+ end
131
+ Aws::KafkaConnect::EndpointParameters.new(
132
+ region: context.config.region,
133
+ use_dual_stack: context.config.use_dualstack_endpoint,
134
+ use_fips: context.config.use_fips_endpoint,
135
+ endpoint: endpoint,
136
+ )
137
+ end
138
+ end
139
+
140
+ class ListCustomPlugins
141
+ def self.build(context)
142
+ unless context.config.regional_endpoint
143
+ endpoint = context.config.endpoint.to_s
144
+ end
145
+ Aws::KafkaConnect::EndpointParameters.new(
146
+ region: context.config.region,
147
+ use_dual_stack: context.config.use_dualstack_endpoint,
148
+ use_fips: context.config.use_fips_endpoint,
149
+ endpoint: endpoint,
150
+ )
151
+ end
152
+ end
153
+
154
+ class ListWorkerConfigurations
155
+ def self.build(context)
156
+ unless context.config.regional_endpoint
157
+ endpoint = context.config.endpoint.to_s
158
+ end
159
+ Aws::KafkaConnect::EndpointParameters.new(
160
+ region: context.config.region,
161
+ use_dual_stack: context.config.use_dualstack_endpoint,
162
+ use_fips: context.config.use_fips_endpoint,
163
+ endpoint: endpoint,
164
+ )
165
+ end
166
+ end
167
+
168
+ class UpdateConnector
169
+ def self.build(context)
170
+ unless context.config.regional_endpoint
171
+ endpoint = context.config.endpoint.to_s
172
+ end
173
+ Aws::KafkaConnect::EndpointParameters.new(
174
+ region: context.config.region,
175
+ use_dual_stack: context.config.use_dualstack_endpoint,
176
+ use_fips: context.config.use_fips_endpoint,
177
+ endpoint: endpoint,
178
+ )
179
+ end
180
+ end
181
+
182
+ end
183
+ end
@@ -0,0 +1,92 @@
1
+ # frozen_string_literal: true
2
+
3
+ # WARNING ABOUT GENERATED CODE
4
+ #
5
+ # This file is generated. See the contributing guide for more information:
6
+ # https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
7
+ #
8
+ # WARNING ABOUT GENERATED CODE
9
+
10
+
11
+ module Aws::KafkaConnect
12
+ module Plugins
13
+ class Endpoints < Seahorse::Client::Plugin
14
+ option(
15
+ :endpoint_provider,
16
+ doc_type: 'Aws::KafkaConnect::EndpointProvider',
17
+ docstring: 'The endpoint provider used to resolve endpoints. Any '\
18
+ 'object that responds to `#resolve_endpoint(parameters)` '\
19
+ 'where `parameters` is a Struct similar to '\
20
+ '`Aws::KafkaConnect::EndpointParameters`'
21
+ ) do |cfg|
22
+ Aws::KafkaConnect::EndpointProvider.new
23
+ end
24
+
25
+ # @api private
26
+ class Handler < Seahorse::Client::Handler
27
+ def call(context)
28
+ # If endpoint was discovered, do not resolve or apply the endpoint.
29
+ unless context[:discovered_endpoint]
30
+ params = parameters_for_operation(context)
31
+ endpoint = context.config.endpoint_provider.resolve_endpoint(params)
32
+
33
+ context.http_request.endpoint = endpoint.url
34
+ apply_endpoint_headers(context, endpoint.headers)
35
+ end
36
+
37
+ context[:endpoint_params] = params
38
+ context[:auth_scheme] =
39
+ Aws::Endpoints.resolve_auth_scheme(context, endpoint)
40
+
41
+ @handler.call(context)
42
+ end
43
+
44
+ private
45
+
46
+ def apply_endpoint_headers(context, headers)
47
+ headers.each do |key, values|
48
+ value = values
49
+ .compact
50
+ .map { |s| Seahorse::Util.escape_header_list_string(s.to_s) }
51
+ .join(',')
52
+
53
+ context.http_request.headers[key] = value
54
+ end
55
+ end
56
+
57
+ def parameters_for_operation(context)
58
+ case context.operation_name
59
+ when :create_connector
60
+ Aws::KafkaConnect::Endpoints::CreateConnector.build(context)
61
+ when :create_custom_plugin
62
+ Aws::KafkaConnect::Endpoints::CreateCustomPlugin.build(context)
63
+ when :create_worker_configuration
64
+ Aws::KafkaConnect::Endpoints::CreateWorkerConfiguration.build(context)
65
+ when :delete_connector
66
+ Aws::KafkaConnect::Endpoints::DeleteConnector.build(context)
67
+ when :delete_custom_plugin
68
+ Aws::KafkaConnect::Endpoints::DeleteCustomPlugin.build(context)
69
+ when :describe_connector
70
+ Aws::KafkaConnect::Endpoints::DescribeConnector.build(context)
71
+ when :describe_custom_plugin
72
+ Aws::KafkaConnect::Endpoints::DescribeCustomPlugin.build(context)
73
+ when :describe_worker_configuration
74
+ Aws::KafkaConnect::Endpoints::DescribeWorkerConfiguration.build(context)
75
+ when :list_connectors
76
+ Aws::KafkaConnect::Endpoints::ListConnectors.build(context)
77
+ when :list_custom_plugins
78
+ Aws::KafkaConnect::Endpoints::ListCustomPlugins.build(context)
79
+ when :list_worker_configurations
80
+ Aws::KafkaConnect::Endpoints::ListWorkerConfigurations.build(context)
81
+ when :update_connector
82
+ Aws::KafkaConnect::Endpoints::UpdateConnector.build(context)
83
+ end
84
+ end
85
+ end
86
+
87
+ def add_handlers(handlers, _config)
88
+ handlers.add(Handler, step: :build, priority: 75)
89
+ end
90
+ end
91
+ end
92
+ end
@@ -13,17 +13,6 @@ module Aws::KafkaConnect
13
13
  # The details of the Apache Kafka cluster to which the connector is
14
14
  # connected.
15
15
  #
16
- # @note When making an API call, you may pass ApacheKafkaCluster
17
- # data as a hash:
18
- #
19
- # {
20
- # bootstrap_servers: "__string", # required
21
- # vpc: { # required
22
- # security_groups: ["__string"],
23
- # subnets: ["__string"], # required
24
- # },
25
- # }
26
- #
27
16
  # @!attribute [rw] bootstrap_servers
28
17
  # The bootstrap servers of the cluster.
29
18
  # @return [String]
@@ -65,21 +54,6 @@ module Aws::KafkaConnect
65
54
 
66
55
  # Specifies how the connector scales.
67
56
  #
68
- # @note When making an API call, you may pass AutoScaling
69
- # data as a hash:
70
- #
71
- # {
72
- # max_worker_count: 1, # required
73
- # mcu_count: 1, # required
74
- # min_worker_count: 1, # required
75
- # scale_in_policy: {
76
- # cpu_utilization_percentage: 1, # required
77
- # },
78
- # scale_out_policy: {
79
- # cpu_utilization_percentage: 1, # required
80
- # },
81
- # }
82
- #
83
57
  # @!attribute [rw] max_worker_count
84
58
  # The maximum number of workers allocated to the connector.
85
59
  # @return [Integer]
@@ -150,21 +124,6 @@ module Aws::KafkaConnect
150
124
 
151
125
  # The updates to the auto scaling parameters for the connector.
152
126
  #
153
- # @note When making an API call, you may pass AutoScalingUpdate
154
- # data as a hash:
155
- #
156
- # {
157
- # max_worker_count: 1, # required
158
- # mcu_count: 1, # required
159
- # min_worker_count: 1, # required
160
- # scale_in_policy: { # required
161
- # cpu_utilization_percentage: 1, # required
162
- # },
163
- # scale_out_policy: { # required
164
- # cpu_utilization_percentage: 1, # required
165
- # },
166
- # }
167
- #
168
127
  # @!attribute [rw] max_worker_count
169
128
  # The target maximum number of workers allocated to the connector.
170
129
  # @return [Integer]
@@ -215,27 +174,6 @@ module Aws::KafkaConnect
215
174
  # Information about the capacity of the connector, whether it is auto
216
175
  # scaled or provisioned.
217
176
  #
218
- # @note When making an API call, you may pass Capacity
219
- # data as a hash:
220
- #
221
- # {
222
- # auto_scaling: {
223
- # max_worker_count: 1, # required
224
- # mcu_count: 1, # required
225
- # min_worker_count: 1, # required
226
- # scale_in_policy: {
227
- # cpu_utilization_percentage: 1, # required
228
- # },
229
- # scale_out_policy: {
230
- # cpu_utilization_percentage: 1, # required
231
- # },
232
- # },
233
- # provisioned_capacity: {
234
- # mcu_count: 1, # required
235
- # worker_count: 1, # required
236
- # },
237
- # }
238
- #
239
177
  # @!attribute [rw] auto_scaling
240
178
  # Information about the auto scaling parameters for the connector.
241
179
  # @return [Types::AutoScaling]
@@ -275,27 +213,6 @@ module Aws::KafkaConnect
275
213
  # The target capacity for the connector. The capacity can be auto scaled
276
214
  # or provisioned.
277
215
  #
278
- # @note When making an API call, you may pass CapacityUpdate
279
- # data as a hash:
280
- #
281
- # {
282
- # auto_scaling: {
283
- # max_worker_count: 1, # required
284
- # mcu_count: 1, # required
285
- # min_worker_count: 1, # required
286
- # scale_in_policy: { # required
287
- # cpu_utilization_percentage: 1, # required
288
- # },
289
- # scale_out_policy: { # required
290
- # cpu_utilization_percentage: 1, # required
291
- # },
292
- # },
293
- # provisioned_capacity: {
294
- # mcu_count: 1, # required
295
- # worker_count: 1, # required
296
- # },
297
- # }
298
- #
299
216
  # @!attribute [rw] auto_scaling
300
217
  # The target auto scaling setting.
301
218
  # @return [Types::AutoScalingUpdate]
@@ -315,14 +232,6 @@ module Aws::KafkaConnect
315
232
 
316
233
  # The settings for delivering connector logs to Amazon CloudWatch Logs.
317
234
  #
318
- # @note When making an API call, you may pass CloudWatchLogsLogDelivery
319
- # data as a hash:
320
- #
321
- # {
322
- # enabled: false, # required
323
- # log_group: "__string",
324
- # }
325
- #
326
235
  # @!attribute [rw] enabled
327
236
  # Whether log delivery to Amazon CloudWatch Logs is enabled.
328
237
  # @return [Boolean]
@@ -465,80 +374,6 @@ module Aws::KafkaConnect
465
374
  include Aws::Structure
466
375
  end
467
376
 
468
- # @note When making an API call, you may pass CreateConnectorRequest
469
- # data as a hash:
470
- #
471
- # {
472
- # capacity: { # required
473
- # auto_scaling: {
474
- # max_worker_count: 1, # required
475
- # mcu_count: 1, # required
476
- # min_worker_count: 1, # required
477
- # scale_in_policy: {
478
- # cpu_utilization_percentage: 1, # required
479
- # },
480
- # scale_out_policy: {
481
- # cpu_utilization_percentage: 1, # required
482
- # },
483
- # },
484
- # provisioned_capacity: {
485
- # mcu_count: 1, # required
486
- # worker_count: 1, # required
487
- # },
488
- # },
489
- # connector_configuration: { # required
490
- # "__string" => "__string",
491
- # },
492
- # connector_description: "__stringMax1024",
493
- # connector_name: "__stringMin1Max128", # required
494
- # kafka_cluster: { # required
495
- # apache_kafka_cluster: { # required
496
- # bootstrap_servers: "__string", # required
497
- # vpc: { # required
498
- # security_groups: ["__string"],
499
- # subnets: ["__string"], # required
500
- # },
501
- # },
502
- # },
503
- # kafka_cluster_client_authentication: { # required
504
- # authentication_type: "NONE", # required, accepts NONE, IAM
505
- # },
506
- # kafka_cluster_encryption_in_transit: { # required
507
- # encryption_type: "PLAINTEXT", # required, accepts PLAINTEXT, TLS
508
- # },
509
- # kafka_connect_version: "__string", # required
510
- # log_delivery: {
511
- # worker_log_delivery: { # required
512
- # cloud_watch_logs: {
513
- # enabled: false, # required
514
- # log_group: "__string",
515
- # },
516
- # firehose: {
517
- # delivery_stream: "__string",
518
- # enabled: false, # required
519
- # },
520
- # s3: {
521
- # bucket: "__string",
522
- # enabled: false, # required
523
- # prefix: "__string",
524
- # },
525
- # },
526
- # },
527
- # plugins: [ # required
528
- # {
529
- # custom_plugin: { # required
530
- # custom_plugin_arn: "__string", # required
531
- # revision: 1, # required
532
- # },
533
- # },
534
- # ],
535
- # service_execution_role_arn: "__string", # required
536
- # worker_configuration: {
537
- # revision: 1, # required
538
- # worker_configuration_arn: "__string", # required
539
- # },
540
- # }
541
- #
542
377
  # @!attribute [rw] capacity
543
378
  # Information about the capacity allocated to the connector. Exactly
544
379
  # one of the two properties must be specified.
@@ -637,22 +472,6 @@ module Aws::KafkaConnect
637
472
  include Aws::Structure
638
473
  end
639
474
 
640
- # @note When making an API call, you may pass CreateCustomPluginRequest
641
- # data as a hash:
642
- #
643
- # {
644
- # content_type: "JAR", # required, accepts JAR, ZIP
645
- # description: "__stringMax1024",
646
- # location: { # required
647
- # s3_location: { # required
648
- # bucket_arn: "__string", # required
649
- # file_key: "__string", # required
650
- # object_version: "__string",
651
- # },
652
- # },
653
- # name: "__stringMin1Max128", # required
654
- # }
655
- #
656
475
  # @!attribute [rw] content_type
657
476
  # The type of the plugin file.
658
477
  # @return [String]
@@ -708,15 +527,6 @@ module Aws::KafkaConnect
708
527
  include Aws::Structure
709
528
  end
710
529
 
711
- # @note When making an API call, you may pass CreateWorkerConfigurationRequest
712
- # data as a hash:
713
- #
714
- # {
715
- # description: "__stringMax1024",
716
- # name: "__stringMin1Max128", # required
717
- # properties_file_content: "SyntheticCreateWorkerConfigurationRequest__string", # required
718
- # }
719
- #
720
530
  # @!attribute [rw] description
721
531
  # A summary description of the worker configuration.
722
532
  # @return [String]
@@ -770,14 +580,6 @@ module Aws::KafkaConnect
770
580
  # A plugin is an AWS resource that contains the code that defines a
771
581
  # connector's logic.
772
582
  #
773
- # @note When making an API call, you may pass CustomPlugin
774
- # data as a hash:
775
- #
776
- # {
777
- # custom_plugin_arn: "__string", # required
778
- # revision: 1, # required
779
- # }
780
- #
781
583
  # @!attribute [rw] custom_plugin_arn
782
584
  # The Amazon Resource Name (ARN) of the custom plugin.
783
585
  # @return [String]
@@ -837,17 +639,6 @@ module Aws::KafkaConnect
837
639
 
838
640
  # Information about the location of a custom plugin.
839
641
  #
840
- # @note When making an API call, you may pass CustomPluginLocation
841
- # data as a hash:
842
- #
843
- # {
844
- # s3_location: { # required
845
- # bucket_arn: "__string", # required
846
- # file_key: "__string", # required
847
- # object_version: "__string",
848
- # },
849
- # }
850
- #
851
642
  # @!attribute [rw] s3_location
852
643
  # The S3 bucket Amazon Resource Name (ARN), file key, and object
853
644
  # version of the plugin file stored in Amazon S3.
@@ -954,14 +745,6 @@ module Aws::KafkaConnect
954
745
  include Aws::Structure
955
746
  end
956
747
 
957
- # @note When making an API call, you may pass DeleteConnectorRequest
958
- # data as a hash:
959
- #
960
- # {
961
- # connector_arn: "__string", # required
962
- # current_version: "__string",
963
- # }
964
- #
965
748
  # @!attribute [rw] connector_arn
966
749
  # The Amazon Resource Name (ARN) of the connector that you want to
967
750
  # delete.
@@ -998,13 +781,6 @@ module Aws::KafkaConnect
998
781
  include Aws::Structure
999
782
  end
1000
783
 
1001
- # @note When making an API call, you may pass DeleteCustomPluginRequest
1002
- # data as a hash:
1003
- #
1004
- # {
1005
- # custom_plugin_arn: "__string", # required
1006
- # }
1007
- #
1008
784
  # @!attribute [rw] custom_plugin_arn
1009
785
  # The Amazon Resource Name (ARN) of the custom plugin that you want to
1010
786
  # delete.
@@ -1036,13 +812,6 @@ module Aws::KafkaConnect
1036
812
  include Aws::Structure
1037
813
  end
1038
814
 
1039
- # @note When making an API call, you may pass DescribeConnectorRequest
1040
- # data as a hash:
1041
- #
1042
- # {
1043
- # connector_arn: "__string", # required
1044
- # }
1045
- #
1046
815
  # @!attribute [rw] connector_arn
1047
816
  # The Amazon Resource Name (ARN) of the connector that you want to
1048
817
  # describe.
@@ -1154,13 +923,6 @@ module Aws::KafkaConnect
1154
923
  include Aws::Structure
1155
924
  end
1156
925
 
1157
- # @note When making an API call, you may pass DescribeCustomPluginRequest
1158
- # data as a hash:
1159
- #
1160
- # {
1161
- # custom_plugin_arn: "__string", # required
1162
- # }
1163
- #
1164
926
  # @!attribute [rw] custom_plugin_arn
1165
927
  # Returns information about a custom plugin.
1166
928
  # @return [String]
@@ -1217,13 +979,6 @@ module Aws::KafkaConnect
1217
979
  include Aws::Structure
1218
980
  end
1219
981
 
1220
- # @note When making an API call, you may pass DescribeWorkerConfigurationRequest
1221
- # data as a hash:
1222
- #
1223
- # {
1224
- # worker_configuration_arn: "__string", # required
1225
- # }
1226
- #
1227
982
  # @!attribute [rw] worker_configuration_arn
1228
983
  # The Amazon Resource Name (ARN) of the worker configuration that you
1229
984
  # want to get information about.
@@ -1271,14 +1026,6 @@ module Aws::KafkaConnect
1271
1026
 
1272
1027
  # The settings for delivering logs to Amazon Kinesis Data Firehose.
1273
1028
  #
1274
- # @note When making an API call, you may pass FirehoseLogDelivery
1275
- # data as a hash:
1276
- #
1277
- # {
1278
- # delivery_stream: "__string",
1279
- # enabled: false, # required
1280
- # }
1281
- #
1282
1029
  # @!attribute [rw] delivery_stream
1283
1030
  # The name of the Kinesis Data Firehose delivery stream that is the
1284
1031
  # destination for log delivery.
@@ -1351,19 +1098,6 @@ module Aws::KafkaConnect
1351
1098
  # The details of the Apache Kafka cluster to which the connector is
1352
1099
  # connected.
1353
1100
  #
1354
- # @note When making an API call, you may pass KafkaCluster
1355
- # data as a hash:
1356
- #
1357
- # {
1358
- # apache_kafka_cluster: { # required
1359
- # bootstrap_servers: "__string", # required
1360
- # vpc: { # required
1361
- # security_groups: ["__string"],
1362
- # subnets: ["__string"], # required
1363
- # },
1364
- # },
1365
- # }
1366
- #
1367
1101
  # @!attribute [rw] apache_kafka_cluster
1368
1102
  # The Apache Kafka cluster to which the connector is connected.
1369
1103
  # @return [Types::ApacheKafkaCluster]
@@ -1379,13 +1113,6 @@ module Aws::KafkaConnect
1379
1113
  # The client authentication information used in order to authenticate
1380
1114
  # with the Apache Kafka cluster.
1381
1115
  #
1382
- # @note When making an API call, you may pass KafkaClusterClientAuthentication
1383
- # data as a hash:
1384
- #
1385
- # {
1386
- # authentication_type: "NONE", # required, accepts NONE, IAM
1387
- # }
1388
- #
1389
1116
  # @!attribute [rw] authentication_type
1390
1117
  # The type of client authentication used to connect to the Apache
1391
1118
  # Kafka cluster. Value NONE means that no client authentication is
@@ -1433,13 +1160,6 @@ module Aws::KafkaConnect
1433
1160
 
1434
1161
  # Details of encryption in transit to the Apache Kafka cluster.
1435
1162
  #
1436
- # @note When making an API call, you may pass KafkaClusterEncryptionInTransit
1437
- # data as a hash:
1438
- #
1439
- # {
1440
- # encryption_type: "PLAINTEXT", # required, accepts PLAINTEXT, TLS
1441
- # }
1442
- #
1443
1163
  # @!attribute [rw] encryption_type
1444
1164
  # The type of encryption in transit to the Apache Kafka cluster.
1445
1165
  # @return [String]
@@ -1467,15 +1187,6 @@ module Aws::KafkaConnect
1467
1187
  include Aws::Structure
1468
1188
  end
1469
1189
 
1470
- # @note When making an API call, you may pass ListConnectorsRequest
1471
- # data as a hash:
1472
- #
1473
- # {
1474
- # connector_name_prefix: "__string",
1475
- # max_results: 1,
1476
- # next_token: "__string",
1477
- # }
1478
- #
1479
1190
  # @!attribute [rw] connector_name_prefix
1480
1191
  # The name prefix that you want to use to search for and list
1481
1192
  # connectors.
@@ -1520,14 +1231,6 @@ module Aws::KafkaConnect
1520
1231
  include Aws::Structure
1521
1232
  end
1522
1233
 
1523
- # @note When making an API call, you may pass ListCustomPluginsRequest
1524
- # data as a hash:
1525
- #
1526
- # {
1527
- # max_results: 1,
1528
- # next_token: "__string",
1529
- # }
1530
- #
1531
1234
  # @!attribute [rw] max_results
1532
1235
  # The maximum number of custom plugins to list in one response.
1533
1236
  # @return [Integer]
@@ -1568,14 +1271,6 @@ module Aws::KafkaConnect
1568
1271
  include Aws::Structure
1569
1272
  end
1570
1273
 
1571
- # @note When making an API call, you may pass ListWorkerConfigurationsRequest
1572
- # data as a hash:
1573
- #
1574
- # {
1575
- # max_results: 1,
1576
- # next_token: "__string",
1577
- # }
1578
- #
1579
1274
  # @!attribute [rw] max_results
1580
1275
  # The maximum number of worker configurations to list in one response.
1581
1276
  # @return [Integer]
@@ -1618,27 +1313,6 @@ module Aws::KafkaConnect
1618
1313
 
1619
1314
  # Details about log delivery.
1620
1315
  #
1621
- # @note When making an API call, you may pass LogDelivery
1622
- # data as a hash:
1623
- #
1624
- # {
1625
- # worker_log_delivery: { # required
1626
- # cloud_watch_logs: {
1627
- # enabled: false, # required
1628
- # log_group: "__string",
1629
- # },
1630
- # firehose: {
1631
- # delivery_stream: "__string",
1632
- # enabled: false, # required
1633
- # },
1634
- # s3: {
1635
- # bucket: "__string",
1636
- # enabled: false, # required
1637
- # prefix: "__string",
1638
- # },
1639
- # },
1640
- # }
1641
- #
1642
1316
  # @!attribute [rw] worker_log_delivery
1643
1317
  # The workers can send worker logs to different destination types.
1644
1318
  # This configuration specifies the details of these destinations.
@@ -1684,16 +1358,6 @@ module Aws::KafkaConnect
1684
1358
  # A plugin is an AWS resource that contains the code that defines your
1685
1359
  # connector logic.
1686
1360
  #
1687
- # @note When making an API call, you may pass Plugin
1688
- # data as a hash:
1689
- #
1690
- # {
1691
- # custom_plugin: { # required
1692
- # custom_plugin_arn: "__string", # required
1693
- # revision: 1, # required
1694
- # },
1695
- # }
1696
- #
1697
1361
  # @!attribute [rw] custom_plugin
1698
1362
  # Details about a custom plugin.
1699
1363
  # @return [Types::CustomPlugin]
@@ -1722,14 +1386,6 @@ module Aws::KafkaConnect
1722
1386
 
1723
1387
  # Details about a connector's provisioned capacity.
1724
1388
  #
1725
- # @note When making an API call, you may pass ProvisionedCapacity
1726
- # data as a hash:
1727
- #
1728
- # {
1729
- # mcu_count: 1, # required
1730
- # worker_count: 1, # required
1731
- # }
1732
- #
1733
1389
  # @!attribute [rw] mcu_count
1734
1390
  # The number of microcontroller units (MCUs) allocated to each
1735
1391
  # connector worker. The valid values are 1,2,4,8.
@@ -1770,14 +1426,6 @@ module Aws::KafkaConnect
1770
1426
 
1771
1427
  # An update to a connector's fixed capacity.
1772
1428
  #
1773
- # @note When making an API call, you may pass ProvisionedCapacityUpdate
1774
- # data as a hash:
1775
- #
1776
- # {
1777
- # mcu_count: 1, # required
1778
- # worker_count: 1, # required
1779
- # }
1780
- #
1781
1429
  # @!attribute [rw] mcu_count
1782
1430
  # The number of microcontroller units (MCUs) allocated to each
1783
1431
  # connector worker. The valid values are 1,2,4,8.
@@ -1798,15 +1446,6 @@ module Aws::KafkaConnect
1798
1446
 
1799
1447
  # The location of an object in Amazon S3.
1800
1448
  #
1801
- # @note When making an API call, you may pass S3Location
1802
- # data as a hash:
1803
- #
1804
- # {
1805
- # bucket_arn: "__string", # required
1806
- # file_key: "__string", # required
1807
- # object_version: "__string",
1808
- # }
1809
- #
1810
1449
  # @!attribute [rw] bucket_arn
1811
1450
  # The Amazon Resource Name (ARN) of an S3 bucket.
1812
1451
  # @return [String]
@@ -1855,15 +1494,6 @@ module Aws::KafkaConnect
1855
1494
 
1856
1495
  # Details about delivering logs to Amazon S3.
1857
1496
  #
1858
- # @note When making an API call, you may pass S3LogDelivery
1859
- # data as a hash:
1860
- #
1861
- # {
1862
- # bucket: "__string",
1863
- # enabled: false, # required
1864
- # prefix: "__string",
1865
- # }
1866
- #
1867
1497
  # @!attribute [rw] bucket
1868
1498
  # The name of the S3 bucket that is the destination for log delivery.
1869
1499
  # @return [String]
@@ -1914,13 +1544,6 @@ module Aws::KafkaConnect
1914
1544
 
1915
1545
  # The scale-in policy for the connector.
1916
1546
  #
1917
- # @note When making an API call, you may pass ScaleInPolicy
1918
- # data as a hash:
1919
- #
1920
- # {
1921
- # cpu_utilization_percentage: 1, # required
1922
- # }
1923
- #
1924
1547
  # @!attribute [rw] cpu_utilization_percentage
1925
1548
  # Specifies the CPU utilization percentage threshold at which you want
1926
1549
  # connector scale in to be triggered.
@@ -1951,13 +1574,6 @@ module Aws::KafkaConnect
1951
1574
 
1952
1575
  # An update to the connector's scale-in policy.
1953
1576
  #
1954
- # @note When making an API call, you may pass ScaleInPolicyUpdate
1955
- # data as a hash:
1956
- #
1957
- # {
1958
- # cpu_utilization_percentage: 1, # required
1959
- # }
1960
- #
1961
1577
  # @!attribute [rw] cpu_utilization_percentage
1962
1578
  # The target CPU utilization percentage threshold at which you want
1963
1579
  # connector scale in to be triggered.
@@ -1973,13 +1589,6 @@ module Aws::KafkaConnect
1973
1589
 
1974
1590
  # The scale-out policy for the connector.
1975
1591
  #
1976
- # @note When making an API call, you may pass ScaleOutPolicy
1977
- # data as a hash:
1978
- #
1979
- # {
1980
- # cpu_utilization_percentage: 1, # required
1981
- # }
1982
- #
1983
1592
  # @!attribute [rw] cpu_utilization_percentage
1984
1593
  # The CPU utilization percentage threshold at which you want connector
1985
1594
  # scale out to be triggered.
@@ -2010,13 +1619,6 @@ module Aws::KafkaConnect
2010
1619
 
2011
1620
  # An update to the connector's scale-out policy.
2012
1621
  #
2013
- # @note When making an API call, you may pass ScaleOutPolicyUpdate
2014
- # data as a hash:
2015
- #
2016
- # {
2017
- # cpu_utilization_percentage: 1, # required
2018
- # }
2019
- #
2020
1622
  # @!attribute [rw] cpu_utilization_percentage
2021
1623
  # The target CPU utilization percentage threshold at which you want
2022
1624
  # connector scale out to be triggered.
@@ -2090,31 +1692,6 @@ module Aws::KafkaConnect
2090
1692
  include Aws::Structure
2091
1693
  end
2092
1694
 
2093
- # @note When making an API call, you may pass UpdateConnectorRequest
2094
- # data as a hash:
2095
- #
2096
- # {
2097
- # capacity: { # required
2098
- # auto_scaling: {
2099
- # max_worker_count: 1, # required
2100
- # mcu_count: 1, # required
2101
- # min_worker_count: 1, # required
2102
- # scale_in_policy: { # required
2103
- # cpu_utilization_percentage: 1, # required
2104
- # },
2105
- # scale_out_policy: { # required
2106
- # cpu_utilization_percentage: 1, # required
2107
- # },
2108
- # },
2109
- # provisioned_capacity: {
2110
- # mcu_count: 1, # required
2111
- # worker_count: 1, # required
2112
- # },
2113
- # },
2114
- # connector_arn: "__string", # required
2115
- # current_version: "__string", # required
2116
- # }
2117
- #
2118
1695
  # @!attribute [rw] capacity
2119
1696
  # The target capacity.
2120
1697
  # @return [Types::CapacityUpdate]
@@ -2157,14 +1734,6 @@ module Aws::KafkaConnect
2157
1734
 
2158
1735
  # Information about the VPC in which the connector resides.
2159
1736
  #
2160
- # @note When making an API call, you may pass Vpc
2161
- # data as a hash:
2162
- #
2163
- # {
2164
- # security_groups: ["__string"],
2165
- # subnets: ["__string"], # required
2166
- # }
2167
- #
2168
1737
  # @!attribute [rw] security_groups
2169
1738
  # The security groups for the connector.
2170
1739
  # @return [Array<String>]
@@ -2204,14 +1773,6 @@ module Aws::KafkaConnect
2204
1773
  # The configuration of the workers, which are the processes that run the
2205
1774
  # connector logic.
2206
1775
  #
2207
- # @note When making an API call, you may pass WorkerConfiguration
2208
- # data as a hash:
2209
- #
2210
- # {
2211
- # revision: 1, # required
2212
- # worker_configuration_arn: "__string", # required
2213
- # }
2214
- #
2215
1776
  # @!attribute [rw] revision
2216
1777
  # The revision of the worker configuration.
2217
1778
  # @return [Integer]
@@ -2338,25 +1899,6 @@ module Aws::KafkaConnect
2338
1899
  # Workers can send worker logs to different destination types. This
2339
1900
  # configuration specifies the details of these destinations.
2340
1901
  #
2341
- # @note When making an API call, you may pass WorkerLogDelivery
2342
- # data as a hash:
2343
- #
2344
- # {
2345
- # cloud_watch_logs: {
2346
- # enabled: false, # required
2347
- # log_group: "__string",
2348
- # },
2349
- # firehose: {
2350
- # delivery_stream: "__string",
2351
- # enabled: false, # required
2352
- # },
2353
- # s3: {
2354
- # bucket: "__string",
2355
- # enabled: false, # required
2356
- # prefix: "__string",
2357
- # },
2358
- # }
2359
- #
2360
1902
  # @!attribute [rw] cloud_watch_logs
2361
1903
  # Details about delivering logs to Amazon CloudWatch Logs.
2362
1904
  # @return [Types::CloudWatchLogsLogDelivery]
@@ -13,9 +13,13 @@ require 'aws-sigv4'
13
13
 
14
14
  require_relative 'aws-sdk-kafkaconnect/types'
15
15
  require_relative 'aws-sdk-kafkaconnect/client_api'
16
+ require_relative 'aws-sdk-kafkaconnect/plugins/endpoints.rb'
16
17
  require_relative 'aws-sdk-kafkaconnect/client'
17
18
  require_relative 'aws-sdk-kafkaconnect/errors'
18
19
  require_relative 'aws-sdk-kafkaconnect/resource'
20
+ require_relative 'aws-sdk-kafkaconnect/endpoint_parameters'
21
+ require_relative 'aws-sdk-kafkaconnect/endpoint_provider'
22
+ require_relative 'aws-sdk-kafkaconnect/endpoints'
19
23
  require_relative 'aws-sdk-kafkaconnect/customizations'
20
24
 
21
25
  # This module provides support for Managed Streaming for Kafka Connect. This module is available in the
@@ -48,6 +52,6 @@ require_relative 'aws-sdk-kafkaconnect/customizations'
48
52
  # @!group service
49
53
  module Aws::KafkaConnect
50
54
 
51
- GEM_VERSION = '1.7.0'
55
+ GEM_VERSION = '1.9.0'
52
56
 
53
57
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aws-sdk-kafkaconnect
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.7.0
4
+ version: 1.9.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Amazon Web Services
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-03-01 00:00:00.000000000 Z
11
+ date: 2023-01-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aws-sdk-core
@@ -19,7 +19,7 @@ dependencies:
19
19
  version: '3'
20
20
  - - ">="
21
21
  - !ruby/object:Gem::Version
22
- version: 3.127.0
22
+ version: 3.165.0
23
23
  type: :runtime
24
24
  prerelease: false
25
25
  version_requirements: !ruby/object:Gem::Requirement
@@ -29,7 +29,7 @@ dependencies:
29
29
  version: '3'
30
30
  - - ">="
31
31
  - !ruby/object:Gem::Version
32
- version: 3.127.0
32
+ version: 3.165.0
33
33
  - !ruby/object:Gem::Dependency
34
34
  name: aws-sigv4
35
35
  requirement: !ruby/object:Gem::Requirement
@@ -59,7 +59,11 @@ files:
59
59
  - lib/aws-sdk-kafkaconnect/client.rb
60
60
  - lib/aws-sdk-kafkaconnect/client_api.rb
61
61
  - lib/aws-sdk-kafkaconnect/customizations.rb
62
+ - lib/aws-sdk-kafkaconnect/endpoint_parameters.rb
63
+ - lib/aws-sdk-kafkaconnect/endpoint_provider.rb
64
+ - lib/aws-sdk-kafkaconnect/endpoints.rb
62
65
  - lib/aws-sdk-kafkaconnect/errors.rb
66
+ - lib/aws-sdk-kafkaconnect/plugins/endpoints.rb
63
67
  - lib/aws-sdk-kafkaconnect/resource.rb
64
68
  - lib/aws-sdk-kafkaconnect/types.rb
65
69
  homepage: https://github.com/aws/aws-sdk-ruby