ruby-kafka 0.7.6 → 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. data/.circleci/config.yml +102 -3
  3. data/.github/workflows/stale.yml +19 -0
  4. data/CHANGELOG.md +24 -0
  5. data/README.md +18 -0
  6. data/lib/kafka/async_producer.rb +3 -0
  7. data/lib/kafka/broker.rb +12 -0
  8. data/lib/kafka/client.rb +35 -3
  9. data/lib/kafka/cluster.rb +52 -0
  10. data/lib/kafka/compression.rb +13 -11
  11. data/lib/kafka/compressor.rb +1 -0
  12. data/lib/kafka/connection.rb +3 -0
  13. data/lib/kafka/consumer_group.rb +4 -1
  14. data/lib/kafka/datadog.rb +2 -10
  15. data/lib/kafka/fetched_batch.rb +5 -1
  16. data/lib/kafka/fetched_batch_generator.rb +4 -1
  17. data/lib/kafka/fetched_message.rb +1 -0
  18. data/lib/kafka/fetcher.rb +4 -1
  19. data/lib/kafka/gzip_codec.rb +4 -0
  20. data/lib/kafka/lz4_codec.rb +4 -0
  21. data/lib/kafka/producer.rb +20 -1
  22. data/lib/kafka/prometheus.rb +316 -0
  23. data/lib/kafka/protocol.rb +8 -0
  24. data/lib/kafka/protocol/add_offsets_to_txn_request.rb +29 -0
  25. data/lib/kafka/protocol/add_offsets_to_txn_response.rb +19 -0
  26. data/lib/kafka/protocol/join_group_request.rb +8 -2
  27. data/lib/kafka/protocol/offset_fetch_request.rb +3 -1
  28. data/lib/kafka/protocol/produce_request.rb +3 -1
  29. data/lib/kafka/protocol/record_batch.rb +5 -4
  30. data/lib/kafka/protocol/txn_offset_commit_request.rb +46 -0
  31. data/lib/kafka/protocol/txn_offset_commit_response.rb +18 -0
  32. data/lib/kafka/sasl/scram.rb +15 -12
  33. data/lib/kafka/snappy_codec.rb +4 -0
  34. data/lib/kafka/ssl_context.rb +4 -1
  35. data/lib/kafka/ssl_socket_with_timeout.rb +1 -0
  36. data/lib/kafka/tagged_logger.rb +25 -20
  37. data/lib/kafka/transaction_manager.rb +25 -0
  38. data/lib/kafka/version.rb +1 -1
  39. data/lib/kafka/zstd_codec.rb +27 -0
  40. data/ruby-kafka.gemspec +4 -2
  41. metadata +47 -6
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: '08731d84b1c82d5bf46625993d552e80face5fa5dbf71af68ff60cb454f740bf'
4
- data.tar.gz: 64aaa4a1b1dcd4200e058bfa1157d07749044752362ff296b5eadd6108c365da
3
+ metadata.gz: 8ebe10cb2c93b4387a14d069c4291751808abed924cbee6f4ff2f00770aaa1c2
4
+ data.tar.gz: 1e392b4f2b5137cd83bd191b5c629fd98b41697d389fb5c68c3100d03c3ff218
5
5
  SHA512:
6
- metadata.gz: d154e9ecbbea7014d33ef93b6e21b1897df7628abb51a04094c2209518b43097414b54f03175fa0d59f89368ca2821559b9739c054503046ea74ef6cd875a4d6
7
- data.tar.gz: 23680e5dadc11ae5a810708a8d2d7de8e5dbeb0f964164b2d29288737bcd5fa4c9885d10b9660a538a6b9faa27d1accf83af44dabce4ef6d2c56f6b105bb9c91
6
+ metadata.gz: 037a8fe1a7495f5e3d723e047392f68a1373e91f8a00337beb7ae6e053d031d6ab1b8597767a1b8b43399850e23493295180a24250e0c2f09b4f626316cd6915
7
+ data.tar.gz: 108d7160593e452f27c1e828438f4cd8c2c6c009f081a0ff949ca330f9487b948aad73cf23ba39e12adb0ed9f773c667cfeebaad1625b33770057b7b69b3dcb2
@@ -145,21 +145,117 @@ jobs:
145
145
  environment:
146
146
  LOG_LEVEL: DEBUG
147
147
  - image: wurstmeister/zookeeper
148
- - image: wurstmeister/kafka:2.12-2.1.0
148
+ - image: wurstmeister/kafka:2.12-2.1.1
149
149
  environment:
150
150
  KAFKA_ADVERTISED_HOST_NAME: localhost
151
151
  KAFKA_ADVERTISED_PORT: 9092
152
152
  KAFKA_PORT: 9092
153
153
  KAFKA_ZOOKEEPER_CONNECT: localhost:2181
154
154
  KAFKA_DELETE_TOPIC_ENABLE: true
155
- - image: wurstmeister/kafka:2.12-2.1.0
155
+ - image: wurstmeister/kafka:2.12-2.1.1
156
156
  environment:
157
157
  KAFKA_ADVERTISED_HOST_NAME: localhost
158
158
  KAFKA_ADVERTISED_PORT: 9093
159
159
  KAFKA_PORT: 9093
160
160
  KAFKA_ZOOKEEPER_CONNECT: localhost:2181
161
161
  KAFKA_DELETE_TOPIC_ENABLE: true
162
- - image: wurstmeister/kafka:2.12-2.1.0
162
+ - image: wurstmeister/kafka:2.12-2.1.1
163
+ environment:
164
+ KAFKA_ADVERTISED_HOST_NAME: localhost
165
+ KAFKA_ADVERTISED_PORT: 9094
166
+ KAFKA_PORT: 9094
167
+ KAFKA_ZOOKEEPER_CONNECT: localhost:2181
168
+ KAFKA_DELETE_TOPIC_ENABLE: true
169
+ steps:
170
+ - checkout
171
+ - run: bundle install --path vendor/bundle
172
+ - run: bundle exec rspec --profile --tag functional spec/functional
173
+
174
+ kafka-2.2:
175
+ docker:
176
+ - image: circleci/ruby:2.5.1-node
177
+ environment:
178
+ LOG_LEVEL: DEBUG
179
+ - image: wurstmeister/zookeeper
180
+ - image: wurstmeister/kafka:2.12-2.2.1
181
+ environment:
182
+ KAFKA_ADVERTISED_HOST_NAME: localhost
183
+ KAFKA_ADVERTISED_PORT: 9092
184
+ KAFKA_PORT: 9092
185
+ KAFKA_ZOOKEEPER_CONNECT: localhost:2181
186
+ KAFKA_DELETE_TOPIC_ENABLE: true
187
+ - image: wurstmeister/kafka:2.12-2.2.1
188
+ environment:
189
+ KAFKA_ADVERTISED_HOST_NAME: localhost
190
+ KAFKA_ADVERTISED_PORT: 9093
191
+ KAFKA_PORT: 9093
192
+ KAFKA_ZOOKEEPER_CONNECT: localhost:2181
193
+ KAFKA_DELETE_TOPIC_ENABLE: true
194
+ - image: wurstmeister/kafka:2.12-2.2.1
195
+ environment:
196
+ KAFKA_ADVERTISED_HOST_NAME: localhost
197
+ KAFKA_ADVERTISED_PORT: 9094
198
+ KAFKA_PORT: 9094
199
+ KAFKA_ZOOKEEPER_CONNECT: localhost:2181
200
+ KAFKA_DELETE_TOPIC_ENABLE: true
201
+ steps:
202
+ - checkout
203
+ - run: bundle install --path vendor/bundle
204
+ - run: bundle exec rspec --profile --tag functional spec/functional
205
+
206
+ kafka-2.3:
207
+ docker:
208
+ - image: circleci/ruby:2.5.1-node
209
+ environment:
210
+ LOG_LEVEL: DEBUG
211
+ - image: wurstmeister/zookeeper
212
+ - image: wurstmeister/kafka:2.12-2.3.1
213
+ environment:
214
+ KAFKA_ADVERTISED_HOST_NAME: localhost
215
+ KAFKA_ADVERTISED_PORT: 9092
216
+ KAFKA_PORT: 9092
217
+ KAFKA_ZOOKEEPER_CONNECT: localhost:2181
218
+ KAFKA_DELETE_TOPIC_ENABLE: true
219
+ - image: wurstmeister/kafka:2.12-2.3.1
220
+ environment:
221
+ KAFKA_ADVERTISED_HOST_NAME: localhost
222
+ KAFKA_ADVERTISED_PORT: 9093
223
+ KAFKA_PORT: 9093
224
+ KAFKA_ZOOKEEPER_CONNECT: localhost:2181
225
+ KAFKA_DELETE_TOPIC_ENABLE: true
226
+ - image: wurstmeister/kafka:2.12-2.3.1
227
+ environment:
228
+ KAFKA_ADVERTISED_HOST_NAME: localhost
229
+ KAFKA_ADVERTISED_PORT: 9094
230
+ KAFKA_PORT: 9094
231
+ KAFKA_ZOOKEEPER_CONNECT: localhost:2181
232
+ KAFKA_DELETE_TOPIC_ENABLE: true
233
+ steps:
234
+ - checkout
235
+ - run: bundle install --path vendor/bundle
236
+ - run: bundle exec rspec --profile --tag functional spec/functional
237
+
238
+ kafka-2.4:
239
+ docker:
240
+ - image: circleci/ruby:2.5.1-node
241
+ environment:
242
+ LOG_LEVEL: DEBUG
243
+ - image: wurstmeister/zookeeper
244
+ - image: wurstmeister/kafka:2.12-2.4.0
245
+ environment:
246
+ KAFKA_ADVERTISED_HOST_NAME: localhost
247
+ KAFKA_ADVERTISED_PORT: 9092
248
+ KAFKA_PORT: 9092
249
+ KAFKA_ZOOKEEPER_CONNECT: localhost:2181
250
+ KAFKA_DELETE_TOPIC_ENABLE: true
251
+ - image: wurstmeister/kafka:2.12-2.4.0
252
+ environment:
253
+ KAFKA_ADVERTISED_HOST_NAME: localhost
254
+ KAFKA_ADVERTISED_PORT: 9093
255
+ KAFKA_PORT: 9093
256
+ KAFKA_ZOOKEEPER_CONNECT: localhost:2181
257
+ KAFKA_DELETE_TOPIC_ENABLE: true
258
+ - image: wurstmeister/kafka:2.12-2.4.0
163
259
  environment:
164
260
  KAFKA_ADVERTISED_HOST_NAME: localhost
165
261
  KAFKA_ADVERTISED_PORT: 9094
@@ -181,3 +277,6 @@ workflows:
181
277
  - kafka-1.1
182
278
  - kafka-2.0
183
279
  - kafka-2.1
280
+ - kafka-2.2
281
+ - kafka-2.3
282
+ - kafka-2.4
@@ -0,0 +1,19 @@
1
+ name: Mark stale issues and pull requests
2
+
3
+ on:
4
+ schedule:
5
+ - cron: "0 0 * * *"
6
+
7
+ jobs:
8
+ stale:
9
+
10
+ runs-on: ubuntu-latest
11
+
12
+ steps:
13
+ - uses: actions/stale@v1
14
+ with:
15
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
16
+ stale-issue-message: 'Issue has been marked as stale due to a lack of activity.'
17
+ stale-pr-message: 'Pull request has been marked as stale due to a lack of activity.'
18
+ stale-issue-label: 'no-issue-activity'
19
+ stale-pr-label: 'no-pr-activity'
@@ -4,6 +4,30 @@ Changes and additions to the library will be listed here.
4
4
 
5
5
  ## Unreleased
6
6
 
7
+ ## 1.0.0
8
+
9
+ - Add client methods to manage configs (#759)
10
+ - Support Kafka 2.3 and 2.4.
11
+
12
+ ## 0.7.10
13
+
14
+ - Fix logger again (#762)
15
+
16
+ ## 0.7.9
17
+
18
+ - Fix SSL authentication for ruby < 2.4.0 (#742)
19
+ - Add metrics for prometheus/client (#739)
20
+ - Do not add nil message entries when ignoring old messages (#746)
21
+ - Scram authentication thread save (#743)
22
+
23
+ ## 0.7.8
24
+ - Optionally verify hostname on SSL certs (#733)
25
+
26
+ ## 0.7.7
27
+ - Producer send offsets in transaction (#723)
28
+ - Support zstd compression (#724)
29
+ - Verify SSL Certificates (#730)
30
+
7
31
  ## 0.7.6
8
32
  - Introduce regex matching in `Consumer#subscribe` (#700)
9
33
  - Only rejoin group on error if we're not in shutdown mode (#711)
data/README.md CHANGED
@@ -108,6 +108,20 @@ Or install it yourself as:
108
108
  <td>Limited support</td>
109
109
  <td>Limited support</td>
110
110
  </tr>
111
+ <tr>
112
+ <th>Kafka 2.2</th>
113
+ <td>Limited support</td>
114
+ <td>Limited support</td>
115
+ </tr>
116
+ <tr>
117
+ <th>Kafka 2.3</th>
118
+ <td>Limited support</td>
119
+ <td>Limited support</td>
120
+ </tr>
121
+ <th>Kafka 2.4</th>
122
+ <td>Limited support</td>
123
+ <td>Limited support</td>
124
+ </tr>
111
125
  </table>
112
126
 
113
127
  This library is targeting Kafka 0.9 with the v0.4.x series and Kafka 0.10 with the v0.5.x series. There's limited support for Kafka 0.8, and things should work with Kafka 0.11, although there may be performance issues due to changes in the protocol.
@@ -119,6 +133,9 @@ This library is targeting Kafka 0.9 with the v0.4.x series and Kafka 0.10 with t
119
133
  - **Kafka 1.0:** Everything that works with Kafka 0.11 should still work, but so far no features specific to Kafka 1.0 have been added.
120
134
  - **Kafka 2.0:** Everything that works with Kafka 1.0 should still work, but so far no features specific to Kafka 2.0 have been added.
121
135
  - **Kafka 2.1:** Everything that works with Kafka 2.0 should still work, but so far no features specific to Kafka 2.1 have been added.
136
+ - **Kafka 2.2:** Everything that works with Kafka 2.1 should still work, but so far no features specific to Kafka 2.2 have been added.
137
+ - **Kafka 2.3:** Everything that works with Kafka 2.2 should still work, but so far no features specific to Kafka 2.3 have been added.
138
+ - **Kafka 2.4:** Everything that works with Kafka 2.3 should still work, but so far no features specific to Kafka 2.4 have been added.
122
139
 
123
140
  This library requires Ruby 2.1 or higher.
124
141
 
@@ -424,6 +441,7 @@ Compression is enabled by passing the `compression_codec` parameter to `#produce
424
441
  * `:snappy` for [Snappy](http://google.github.io/snappy/) compression.
425
442
  * `:gzip` for [gzip](https://en.wikipedia.org/wiki/Gzip) compression.
426
443
  * `:lz4` for [LZ4](https://en.wikipedia.org/wiki/LZ4_(compression_algorithm)) compression.
444
+ * `:zstd` for [zstd](https://facebook.github.io/zstd/) compression.
427
445
 
428
446
  By default, all message sets will be compressed if you specify a compression codec. To increase the compression threshold, set `compression_threshold` to an integer value higher than one.
429
447
 
@@ -103,6 +103,9 @@ module Kafka
103
103
  # @raise [BufferOverflow] if the message queue is full.
104
104
  # @return [nil]
105
105
  def produce(value, topic:, **options)
106
+ # We want to fail fast if `topic` isn't a String
107
+ topic = topic.to_str
108
+
106
109
  ensure_threads_running!
107
110
 
108
111
  if @queue.size >= @max_queue_size
@@ -182,6 +182,18 @@ module Kafka
182
182
  send_request(request)
183
183
  end
184
184
 
185
+ def add_offsets_to_txn(**options)
186
+ request = Protocol::AddOffsetsToTxnRequest.new(**options)
187
+
188
+ send_request(request)
189
+ end
190
+
191
+ def txn_offset_commit(**options)
192
+ request = Protocol::TxnOffsetCommitRequest.new(**options)
193
+
194
+ send_request(request)
195
+ end
196
+
185
197
  private
186
198
 
187
199
  def send_request(request)
@@ -71,7 +71,7 @@ module Kafka
71
71
  ssl_client_cert_key_password: nil, ssl_client_cert_chain: nil, sasl_gssapi_principal: nil,
72
72
  sasl_gssapi_keytab: nil, sasl_plain_authzid: '', sasl_plain_username: nil, sasl_plain_password: nil,
73
73
  sasl_scram_username: nil, sasl_scram_password: nil, sasl_scram_mechanism: nil,
74
- sasl_over_ssl: true, ssl_ca_certs_from_system: false, sasl_oauth_token_provider: nil)
74
+ sasl_over_ssl: true, ssl_ca_certs_from_system: false, sasl_oauth_token_provider: nil, ssl_verify_hostname: true)
75
75
  @logger = TaggedLogger.new(logger)
76
76
  @instrumenter = Instrumenter.new(client_id: client_id)
77
77
  @seed_brokers = normalize_seed_brokers(seed_brokers)
@@ -84,6 +84,7 @@ module Kafka
84
84
  client_cert_key_password: ssl_client_cert_key_password,
85
85
  client_cert_chain: ssl_client_cert_chain,
86
86
  ca_certs_from_system: ssl_ca_certs_from_system,
87
+ verify_hostname: ssl_verify_hostname
87
88
  )
88
89
 
89
90
  sasl_authenticator = SaslAuthenticator.new(
@@ -137,6 +138,9 @@ module Kafka
137
138
  def deliver_message(value, key: nil, headers: {}, topic:, partition: nil, partition_key: nil, retries: 1)
138
139
  create_time = Time.now
139
140
 
141
+ # We want to fail fast if `topic` isn't a String
142
+ topic = topic.to_str
143
+
140
144
  message = PendingMessage.new(
141
145
  value: value,
142
146
  key: key,
@@ -233,8 +237,8 @@ module Kafka
233
237
  # result in {BufferOverflow} being raised.
234
238
  #
235
239
  # @param compression_codec [Symbol, nil] the name of the compression codec to
236
- # use, or nil if no compression should be performed. Valid codecs: `:snappy`
237
- # and `:gzip`.
240
+ # use, or nil if no compression should be performed. Valid codecs: `:snappy`,
241
+ # `:gzip`, `:lz4`, `:zstd`
238
242
  #
239
243
  # @param compression_threshold [Integer] the number of messages that needs to
240
244
  # be in a message set before it should be compressed. Note that message sets
@@ -336,6 +340,7 @@ module Kafka
336
340
  def consumer(
337
341
  group_id:,
338
342
  session_timeout: 30,
343
+ rebalance_timeout: 60,
339
344
  offset_commit_interval: 10,
340
345
  offset_commit_threshold: 0,
341
346
  heartbeat_interval: 10,
@@ -356,6 +361,7 @@ module Kafka
356
361
  logger: @logger,
357
362
  group_id: group_id,
358
363
  session_timeout: session_timeout,
364
+ rebalance_timeout: rebalance_timeout,
359
365
  retention_time: retention_time,
360
366
  instrumenter: instrumenter,
361
367
  )
@@ -529,6 +535,24 @@ module Kafka
529
535
  end
530
536
  end
531
537
 
538
+ # Describe broker configs
539
+ #
540
+ # @param broker_id [int] the id of the broker
541
+ # @param configs [Array] array of config keys.
542
+ # @return [Array<Kafka::Protocol::DescribeConfigsResponse::ConfigEntry>]
543
+ def describe_configs(broker_id, configs = [])
544
+ @cluster.describe_configs(broker_id, configs)
545
+ end
546
+
547
+ # Alter broker configs
548
+ #
549
+ # @param broker_id [int] the id of the broker
550
+ # @param configs [Array] array of config strings.
551
+ # @return [nil]
552
+ def alter_configs(broker_id, configs = [])
553
+ @cluster.alter_configs(broker_id, configs)
554
+ end
555
+
532
556
  # Creates a topic in the cluster.
533
557
  #
534
558
  # @example Creating a topic with log compaction
@@ -614,6 +638,14 @@ module Kafka
614
638
  @cluster.describe_group(group_id)
615
639
  end
616
640
 
641
+ # Fetch all committed offsets for a consumer group
642
+ #
643
+ # @param group_id [String] the id of the consumer group
644
+ # @return [Hash<String, Hash<Integer, Kafka::Protocol::OffsetFetchResponse::PartitionOffsetInfo>>]
645
+ def fetch_group_offsets(group_id)
646
+ @cluster.fetch_group_offsets(group_id)
647
+ end
648
+
617
649
  # Create partitions for a topic.
618
650
  #
619
651
  # @param name [String] the name of the topic.
@@ -45,6 +45,10 @@ module Kafka
45
45
  new_topics = topics - @target_topics
46
46
 
47
47
  unless new_topics.empty?
48
+ if new_topics.any? { |topic| topic.nil? or topic.empty? }
49
+ raise ArgumentError, "Topic must not be nil or empty"
50
+ end
51
+
48
52
  @logger.info "New topics added to target list: #{new_topics.to_a.join(', ')}"
49
53
 
50
54
  @target_topics.merge(new_topics)
@@ -139,6 +143,40 @@ module Kafka
139
143
  end
140
144
  end
141
145
 
146
+ def describe_configs(broker_id, configs = [])
147
+ options = {
148
+ resources: [[Kafka::Protocol::RESOURCE_TYPE_CLUSTER, broker_id.to_s, configs]]
149
+ }
150
+
151
+ info = cluster_info.brokers.find {|broker| broker.node_id == broker_id }
152
+ broker = @broker_pool.connect(info.host, info.port, node_id: info.node_id)
153
+
154
+ response = broker.describe_configs(**options)
155
+
156
+ response.resources.each do |resource|
157
+ Protocol.handle_error(resource.error_code, resource.error_message)
158
+ end
159
+
160
+ response.resources.first.configs
161
+ end
162
+
163
+ def alter_configs(broker_id, configs = [])
164
+ options = {
165
+ resources: [[Kafka::Protocol::RESOURCE_TYPE_CLUSTER, broker_id.to_s, configs]]
166
+ }
167
+
168
+ info = cluster_info.brokers.find {|broker| broker.node_id == broker_id }
169
+ broker = @broker_pool.connect(info.host, info.port, node_id: info.node_id)
170
+
171
+ response = broker.alter_configs(**options)
172
+
173
+ response.resources.each do |resource|
174
+ Protocol.handle_error(resource.error_code, resource.error_message)
175
+ end
176
+
177
+ nil
178
+ end
179
+
142
180
  def partitions_for(topic)
143
181
  add_target_topics([topic])
144
182
  refresh_metadata_if_necessary!
@@ -252,6 +290,20 @@ module Kafka
252
290
  group
253
291
  end
254
292
 
293
+ def fetch_group_offsets(group_id)
294
+ topics = get_group_coordinator(group_id: group_id)
295
+ .fetch_offsets(group_id: group_id, topics: nil)
296
+ .topics
297
+
298
+ topics.each do |_, partitions|
299
+ partitions.each do |_, response|
300
+ Protocol.handle_error(response.error_code)
301
+ end
302
+ end
303
+
304
+ topics
305
+ end
306
+
255
307
  def create_partitions_for(name, num_partitions:, timeout:)
256
308
  options = {
257
309
  topics: [[name, num_partitions, nil]],
@@ -3,27 +3,27 @@
3
3
  require "kafka/snappy_codec"
4
4
  require "kafka/gzip_codec"
5
5
  require "kafka/lz4_codec"
6
+ require "kafka/zstd_codec"
6
7
 
7
8
  module Kafka
8
9
  module Compression
9
- CODEC_NAMES = {
10
- 1 => :gzip,
11
- 2 => :snappy,
12
- 3 => :lz4,
13
- }.freeze
14
-
15
- CODECS = {
10
+ CODECS_BY_NAME = {
16
11
  :gzip => GzipCodec.new,
17
12
  :snappy => SnappyCodec.new,
18
13
  :lz4 => LZ4Codec.new,
14
+ :zstd => ZstdCodec.new,
19
15
  }.freeze
20
16
 
17
+ CODECS_BY_ID = CODECS_BY_NAME.each_with_object({}) do |(_, codec), hash|
18
+ hash[codec.codec_id] = codec
19
+ end.freeze
20
+
21
21
  def self.codecs
22
- CODECS.keys
22
+ CODECS_BY_NAME.keys
23
23
  end
24
24
 
25
25
  def self.find_codec(name)
26
- codec = CODECS.fetch(name) do
26
+ codec = CODECS_BY_NAME.fetch(name) do
27
27
  raise "Unknown compression codec #{name}"
28
28
  end
29
29
 
@@ -33,11 +33,13 @@ module Kafka
33
33
  end
34
34
 
35
35
  def self.find_codec_by_id(codec_id)
36
- codec_name = CODEC_NAMES.fetch(codec_id) do
36
+ codec = CODECS_BY_ID.fetch(codec_id) do
37
37
  raise "Unknown codec id #{codec_id}"
38
38
  end
39
39
 
40
- find_codec(codec_name)
40
+ codec.load
41
+
42
+ codec
41
43
  end
42
44
  end
43
45
  end