rdkafka 0.11.1 → 0.13.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. checksums.yaml +4 -4
  2. data/.semaphore/semaphore.yml +7 -3
  3. data/CHANGELOG.md +23 -2
  4. data/Gemfile +2 -0
  5. data/README.md +26 -0
  6. data/Rakefile +2 -0
  7. data/dist/librdkafka_2.0.2.tar.gz +0 -0
  8. data/ext/Rakefile +56 -27
  9. data/lib/rdkafka/abstract_handle.rb +2 -0
  10. data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
  11. data/lib/rdkafka/admin/create_topic_report.rb +2 -0
  12. data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
  13. data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
  14. data/lib/rdkafka/admin.rb +50 -33
  15. data/lib/rdkafka/bindings.rb +59 -39
  16. data/lib/rdkafka/callbacks.rb +7 -1
  17. data/lib/rdkafka/config.rb +15 -12
  18. data/lib/rdkafka/consumer/headers.rb +24 -7
  19. data/lib/rdkafka/consumer/message.rb +3 -1
  20. data/lib/rdkafka/consumer/partition.rb +2 -0
  21. data/lib/rdkafka/consumer/topic_partition_list.rb +2 -0
  22. data/lib/rdkafka/consumer.rb +86 -44
  23. data/lib/rdkafka/error.rb +15 -0
  24. data/lib/rdkafka/metadata.rb +4 -2
  25. data/lib/rdkafka/native_kafka.rb +115 -0
  26. data/lib/rdkafka/producer/delivery_handle.rb +5 -2
  27. data/lib/rdkafka/producer/delivery_report.rb +9 -2
  28. data/lib/rdkafka/producer.rb +56 -38
  29. data/lib/rdkafka/version.rb +5 -3
  30. data/lib/rdkafka.rb +3 -0
  31. data/rdkafka.gemspec +2 -0
  32. data/spec/rdkafka/abstract_handle_spec.rb +2 -0
  33. data/spec/rdkafka/admin/create_topic_handle_spec.rb +2 -0
  34. data/spec/rdkafka/admin/create_topic_report_spec.rb +2 -0
  35. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +2 -0
  36. data/spec/rdkafka/admin/delete_topic_report_spec.rb +2 -0
  37. data/spec/rdkafka/admin_spec.rb +4 -3
  38. data/spec/rdkafka/bindings_spec.rb +9 -0
  39. data/spec/rdkafka/callbacks_spec.rb +2 -0
  40. data/spec/rdkafka/config_spec.rb +17 -2
  41. data/spec/rdkafka/consumer/headers_spec.rb +62 -0
  42. data/spec/rdkafka/consumer/message_spec.rb +2 -0
  43. data/spec/rdkafka/consumer/partition_spec.rb +2 -0
  44. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +2 -0
  45. data/spec/rdkafka/consumer_spec.rb +123 -27
  46. data/spec/rdkafka/error_spec.rb +2 -0
  47. data/spec/rdkafka/metadata_spec.rb +2 -0
  48. data/spec/rdkafka/native_kafka_spec.rb +124 -0
  49. data/spec/rdkafka/producer/delivery_handle_spec.rb +5 -0
  50. data/spec/rdkafka/producer/delivery_report_spec.rb +8 -2
  51. data/spec/rdkafka/producer_spec.rb +103 -24
  52. data/spec/spec_helper.rb +17 -1
  53. metadata +13 -9
  54. data/bin/console +0 -11
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::RdkafkaError do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "securerandom"
3
5
 
@@ -0,0 +1,124 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
4
+
5
+ describe Rdkafka::NativeKafka do
6
+ let(:config) { rdkafka_producer_config }
7
+ let(:native) { config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer) }
8
+ let(:closing) { false }
9
+ let(:thread) { double(Thread) }
10
+
11
+ subject(:client) { described_class.new(native, run_polling_thread: true) }
12
+
13
+ before do
14
+ allow(Thread).to receive(:new).and_return(thread)
15
+
16
+ allow(thread).to receive(:[]=).with(:closing, anything)
17
+ allow(thread).to receive(:join)
18
+ allow(thread).to receive(:abort_on_exception=).with(anything)
19
+ end
20
+
21
+ after { client.close }
22
+
23
+ context "defaults" do
24
+ it "sets the thread to abort on exception" do
25
+ expect(thread).to receive(:abort_on_exception=).with(true)
26
+
27
+ client
28
+ end
29
+
30
+ it "sets the thread `closing` flag to false" do
31
+ expect(thread).to receive(:[]=).with(:closing, false)
32
+
33
+ client
34
+ end
35
+ end
36
+
37
+ context "the polling thread" do
38
+ it "is created" do
39
+ expect(Thread).to receive(:new)
40
+
41
+ client
42
+ end
43
+ end
44
+
45
+ it "exposes the inner client" do
46
+ client.with_inner do |inner|
47
+ expect(inner).to eq(native)
48
+ end
49
+ end
50
+
51
+ context "when client was not yet closed (`nil`)" do
52
+ it "is not closed" do
53
+ expect(client.closed?).to eq(false)
54
+ end
55
+
56
+ context "and attempt to close" do
57
+ it "calls the `destroy` binding" do
58
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native).and_call_original
59
+
60
+ client.close
61
+ end
62
+
63
+ it "indicates to the polling thread that it is closing" do
64
+ expect(thread).to receive(:[]=).with(:closing, true)
65
+
66
+ client.close
67
+ end
68
+
69
+ it "joins the polling thread" do
70
+ expect(thread).to receive(:join)
71
+
72
+ client.close
73
+ end
74
+
75
+ it "closes and unassign the native client" do
76
+ client.close
77
+
78
+ expect(client.closed?).to eq(true)
79
+ end
80
+ end
81
+ end
82
+
83
+ context "when client was already closed" do
84
+ before { client.close }
85
+
86
+ it "is closed" do
87
+ expect(client.closed?).to eq(true)
88
+ end
89
+
90
+ context "and attempt to close again" do
91
+ it "does not call the `destroy` binding" do
92
+ expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy_flags)
93
+
94
+ client.close
95
+ end
96
+
97
+ it "does not indicate to the polling thread that it is closing" do
98
+ expect(thread).not_to receive(:[]=).with(:closing, true)
99
+
100
+ client.close
101
+ end
102
+
103
+ it "does not join the polling thread" do
104
+ expect(thread).not_to receive(:join)
105
+
106
+ client.close
107
+ end
108
+
109
+ it "does not close and unassign the native client again" do
110
+ client.close
111
+
112
+ expect(client.closed?).to eq(true)
113
+ end
114
+ end
115
+ end
116
+
117
+ it "provides a finalizer that closes the native kafka client" do
118
+ expect(client.closed?).to eq(false)
119
+
120
+ client.finalizer.call("some-ignored-object-id")
121
+
122
+ expect(client.closed?).to eq(true)
123
+ end
124
+ end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Producer::DeliveryHandle do
@@ -9,6 +11,7 @@ describe Rdkafka::Producer::DeliveryHandle do
9
11
  handle[:response] = response
10
12
  handle[:partition] = 2
11
13
  handle[:offset] = 100
14
+ handle[:topic_name] = FFI::MemoryPointer.from_string("produce_test_topic")
12
15
  end
13
16
  end
14
17
 
@@ -29,6 +32,7 @@ describe Rdkafka::Producer::DeliveryHandle do
29
32
 
30
33
  expect(report.partition).to eq(2)
31
34
  expect(report.offset).to eq(100)
35
+ expect(report.topic_name).to eq("produce_test_topic")
32
36
  end
33
37
 
34
38
  it "should wait without a timeout" do
@@ -36,6 +40,7 @@ describe Rdkafka::Producer::DeliveryHandle do
36
40
 
37
41
  expect(report.partition).to eq(2)
38
42
  expect(report.offset).to eq(100)
43
+ expect(report.topic_name).to eq("produce_test_topic")
39
44
  end
40
45
  end
41
46
  end
@@ -1,7 +1,9 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Producer::DeliveryReport do
4
- subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "error") }
6
+ subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "topic", -1) }
5
7
 
6
8
  it "should get the partition" do
7
9
  expect(subject.partition).to eq 2
@@ -11,7 +13,11 @@ describe Rdkafka::Producer::DeliveryReport do
11
13
  expect(subject.offset).to eq 100
12
14
  end
13
15
 
16
+ it "should get the topic_name" do
17
+ expect(subject.topic_name).to eq "topic"
18
+ end
19
+
14
20
  it "should get the error" do
15
- expect(subject.error).to eq "error"
21
+ expect(subject.error).to eq -1
16
22
  end
17
23
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "zlib"
3
5
 
@@ -7,7 +9,8 @@ describe Rdkafka::Producer do
7
9
 
8
10
  after do
9
11
  # Registry should always end up being empty
10
- expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
12
+ registry = Rdkafka::Producer::DeliveryHandle::REGISTRY
13
+ expect(registry).to be_empty, registry.inspect
11
14
  producer.close
12
15
  consumer.close
13
16
  end
@@ -30,6 +33,7 @@ describe Rdkafka::Producer do
30
33
  expect(report).not_to be_nil
31
34
  expect(report.partition).to eq 1
32
35
  expect(report.offset).to be >= 0
36
+ expect(report.topic_name).to eq "produce_test_topic"
33
37
  @callback_called = true
34
38
  end
35
39
 
@@ -49,6 +53,27 @@ describe Rdkafka::Producer do
49
53
  # Callback should have been called
50
54
  expect(@callback_called).to be true
51
55
  end
56
+
57
+ it "should provide handle" do
58
+ @callback_handle = nil
59
+
60
+ producer.delivery_callback = lambda { |_, handle| @callback_handle = handle }
61
+
62
+ # Produce a message
63
+ handle = producer.produce(
64
+ topic: "produce_test_topic",
65
+ payload: "payload",
66
+ key: "key"
67
+ )
68
+
69
+ # Wait for it to be delivered
70
+ handle.wait(max_wait_timeout: 15)
71
+
72
+ # Join the producer thread.
73
+ producer.close
74
+
75
+ expect(handle).to be @callback_handle
76
+ end
52
77
  end
53
78
 
54
79
  context "with a callable object" do
@@ -92,6 +117,37 @@ describe Rdkafka::Producer do
92
117
  expect(called_report.first).not_to be_nil
93
118
  expect(called_report.first.partition).to eq 1
94
119
  expect(called_report.first.offset).to be >= 0
120
+ expect(called_report.first.topic_name).to eq "produce_test_topic"
121
+ end
122
+
123
+ it "should provide handle" do
124
+ callback_handles = []
125
+ callback = Class.new do
126
+ def initialize(callback_handles)
127
+ @callback_handles = callback_handles
128
+ end
129
+
130
+ def call(_, handle)
131
+ @callback_handles << handle
132
+ end
133
+ end
134
+ producer.delivery_callback = callback.new(callback_handles)
135
+
136
+ # Produce a message
137
+ handle = producer.produce(
138
+ topic: "produce_test_topic",
139
+ payload: "payload",
140
+ key: "key"
141
+ )
142
+
143
+ # Wait for it to be delivered
144
+ handle.wait(max_wait_timeout: 15)
145
+
146
+ # Join the producer thread.
147
+ producer.close
148
+
149
+ # Callback should have been called
150
+ expect(handle).to be callback_handles.first
95
151
  end
96
152
  end
97
153
 
@@ -129,10 +185,11 @@ describe Rdkafka::Producer do
129
185
  expect(report.partition).to eq 1
130
186
  expect(report.offset).to be >= 0
131
187
 
132
- # Close producer
188
+ # Flush and close producer
189
+ producer.flush
133
190
  producer.close
134
191
 
135
- # Consume message and verify it's content
192
+ # Consume message and verify its content
136
193
  message = wait_for_message(
137
194
  topic: "produce_test_topic",
138
195
  delivery_report: report,
@@ -156,7 +213,7 @@ describe Rdkafka::Producer do
156
213
  )
157
214
  report = handle.wait(max_wait_timeout: 5)
158
215
 
159
- # Consume message and verify it's content
216
+ # Consume message and verify its content
160
217
  message = wait_for_message(
161
218
  topic: "produce_test_topic",
162
219
  delivery_report: report,
@@ -200,6 +257,28 @@ describe Rdkafka::Producer do
200
257
  expect(messages[2].key).to eq key
201
258
  end
202
259
 
260
+ it "should produce a message with empty string without crashing" do
261
+ messages = [{key: 'a', partition_key: ''}]
262
+
263
+ messages = messages.map do |m|
264
+ handle = producer.produce(
265
+ topic: "partitioner_test_topic",
266
+ payload: "payload partition",
267
+ key: m[:key],
268
+ partition_key: m[:partition_key]
269
+ )
270
+ report = handle.wait(max_wait_timeout: 5)
271
+
272
+ wait_for_message(
273
+ topic: "partitioner_test_topic",
274
+ delivery_report: report,
275
+ )
276
+ end
277
+
278
+ expect(messages[0].partition).to eq 0
279
+ expect(messages[0].key).to eq 'a'
280
+ end
281
+
203
282
  it "should produce a message with utf-8 encoding" do
204
283
  handle = producer.produce(
205
284
  topic: "produce_test_topic",
@@ -208,7 +287,7 @@ describe Rdkafka::Producer do
208
287
  )
209
288
  report = handle.wait(max_wait_timeout: 5)
210
289
 
211
- # Consume message and verify it's content
290
+ # Consume message and verify its content
212
291
  message = wait_for_message(
213
292
  topic: "produce_test_topic",
214
293
  delivery_report: report,
@@ -241,7 +320,7 @@ describe Rdkafka::Producer do
241
320
  )
242
321
  report = handle.wait(max_wait_timeout: 5)
243
322
 
244
- # Consume message and verify it's content
323
+ # Consume message and verify its content
245
324
  message = wait_for_message(
246
325
  topic: "produce_test_topic",
247
326
  delivery_report: report,
@@ -262,7 +341,7 @@ describe Rdkafka::Producer do
262
341
  )
263
342
  report = handle.wait(max_wait_timeout: 5)
264
343
 
265
- # Consume message and verify it's content
344
+ # Consume message and verify its content
266
345
  message = wait_for_message(
267
346
  topic: "produce_test_topic",
268
347
  delivery_report: report,
@@ -282,7 +361,7 @@ describe Rdkafka::Producer do
282
361
  )
283
362
  report = handle.wait(max_wait_timeout: 5)
284
363
 
285
- # Consume message and verify it's content
364
+ # Consume message and verify its content
286
365
  message = wait_for_message(
287
366
  topic: "produce_test_topic",
288
367
  delivery_report: report,
@@ -300,7 +379,7 @@ describe Rdkafka::Producer do
300
379
  )
301
380
  report = handle.wait(max_wait_timeout: 5)
302
381
 
303
- # Consume message and verify it's content
382
+ # Consume message and verify its content
304
383
  message = wait_for_message(
305
384
  topic: "produce_test_topic",
306
385
  delivery_report: report,
@@ -320,7 +399,7 @@ describe Rdkafka::Producer do
320
399
  )
321
400
  report = handle.wait(max_wait_timeout: 5)
322
401
 
323
- # Consume message and verify it's content
402
+ # Consume message and verify its content
324
403
  message = wait_for_message(
325
404
  topic: "produce_test_topic",
326
405
  delivery_report: report,
@@ -329,9 +408,9 @@ describe Rdkafka::Producer do
329
408
 
330
409
  expect(message.payload).to eq "payload headers"
331
410
  expect(message.key).to eq "key headers"
332
- expect(message.headers[:foo]).to eq "bar"
333
- expect(message.headers[:baz]).to eq "foobar"
334
- expect(message.headers[:foobar]).to be_nil
411
+ expect(message.headers["foo"]).to eq "bar"
412
+ expect(message.headers["baz"]).to eq "foobar"
413
+ expect(message.headers["foobar"]).to be_nil
335
414
  end
336
415
 
337
416
  it "should produce a message with empty headers" do
@@ -343,7 +422,7 @@ describe Rdkafka::Producer do
343
422
  )
344
423
  report = handle.wait(max_wait_timeout: 5)
345
424
 
346
- # Consume message and verify it's content
425
+ # Consume message and verify its content
347
426
  message = wait_for_message(
348
427
  topic: "produce_test_topic",
349
428
  delivery_report: report,
@@ -376,19 +455,15 @@ describe Rdkafka::Producer do
376
455
  end
377
456
  end
378
457
 
379
- it "should produce a message in a forked process" do
458
+ it "should produce a message in a forked process", skip: defined?(JRUBY_VERSION) && "Kernel#fork is not available" do
380
459
  # Fork, produce a message, send the report over a pipe and
381
460
  # wait for and check the message in the main process.
382
-
383
- # Kernel#fork is not available in JRuby
384
- skip if defined?(JRUBY_VERSION)
385
-
386
461
  reader, writer = IO.pipe
387
462
 
388
- fork do
463
+ pid = fork do
389
464
  reader.close
390
465
 
391
- # Avoids sharing the socket between processes.
466
+ # Avoid sharing the client between processes.
392
467
  producer = rdkafka_producer_config.producer
393
468
 
394
469
  handle = producer.produce(
@@ -401,24 +476,28 @@ describe Rdkafka::Producer do
401
476
 
402
477
  report_json = JSON.generate(
403
478
  "partition" => report.partition,
404
- "offset" => report.offset
479
+ "offset" => report.offset,
480
+ "topic_name" => report.topic_name
405
481
  )
406
482
 
407
483
  writer.write(report_json)
408
484
  writer.close
485
+ producer.flush
409
486
  producer.close
410
487
  end
488
+ Process.wait(pid)
411
489
 
412
490
  writer.close
413
491
  report_hash = JSON.parse(reader.read)
414
492
  report = Rdkafka::Producer::DeliveryReport.new(
415
493
  report_hash["partition"],
416
- report_hash["offset"]
494
+ report_hash["offset"],
495
+ report_hash["topic_name"]
417
496
  )
418
497
 
419
498
  reader.close
420
499
 
421
- # Consume message and verify it's content
500
+ # Consume message and verify its content
422
501
  message = wait_for_message(
423
502
  topic: "produce_test_topic",
424
503
  delivery_report: report,
data/spec/spec_helper.rb CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  unless ENV["CI"] == "true"
2
4
  require "simplecov"
3
5
  SimpleCov.start do
@@ -71,7 +73,7 @@ def new_native_topic(topic_name="topic_name", native_client: )
71
73
  end
72
74
 
73
75
  def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer: nil)
74
- new_consumer = !!consumer
76
+ new_consumer = consumer.nil?
75
77
  consumer ||= rdkafka_consumer_config.consumer
76
78
  consumer.subscribe(topic)
77
79
  timeout = Time.now.to_i + timeout_in_seconds
@@ -104,6 +106,20 @@ def wait_for_unassignment(consumer)
104
106
  end
105
107
  end
106
108
 
109
+ def notify_listener(listener, &block)
110
+ # 1. subscribe and poll
111
+ consumer.subscribe("consume_test_topic")
112
+ wait_for_assignment(consumer)
113
+ consumer.poll(100)
114
+
115
+ block.call if block
116
+
117
+ # 2. unsubscribe
118
+ consumer.unsubscribe
119
+ wait_for_unassignment(consumer)
120
+ consumer.close
121
+ end
122
+
107
123
  RSpec.configure do |config|
108
124
  config.filter_run focus: true
109
125
  config.run_all_when_everything_filtered = true
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.11.1
4
+ version: 0.13.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-11-23 00:00:00.000000000 Z
11
+ date: 2024-07-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi
@@ -139,8 +139,7 @@ dependencies:
139
139
  description: Modern Kafka client library for Ruby based on librdkafka
140
140
  email:
141
141
  - thijs@appsignal.com
142
- executables:
143
- - console
142
+ executables: []
144
143
  extensions:
145
144
  - ext/Rakefile
146
145
  extra_rdoc_files: []
@@ -155,7 +154,7 @@ files:
155
154
  - LICENSE
156
155
  - README.md
157
156
  - Rakefile
158
- - bin/console
157
+ - dist/librdkafka_2.0.2.tar.gz
159
158
  - docker-compose.yml
160
159
  - ext/README.md
161
160
  - ext/Rakefile
@@ -176,6 +175,7 @@ files:
176
175
  - lib/rdkafka/consumer/topic_partition_list.rb
177
176
  - lib/rdkafka/error.rb
178
177
  - lib/rdkafka/metadata.rb
178
+ - lib/rdkafka/native_kafka.rb
179
179
  - lib/rdkafka/producer.rb
180
180
  - lib/rdkafka/producer/delivery_handle.rb
181
181
  - lib/rdkafka/producer/delivery_report.rb
@@ -190,12 +190,14 @@ files:
190
190
  - spec/rdkafka/bindings_spec.rb
191
191
  - spec/rdkafka/callbacks_spec.rb
192
192
  - spec/rdkafka/config_spec.rb
193
+ - spec/rdkafka/consumer/headers_spec.rb
193
194
  - spec/rdkafka/consumer/message_spec.rb
194
195
  - spec/rdkafka/consumer/partition_spec.rb
195
196
  - spec/rdkafka/consumer/topic_partition_list_spec.rb
196
197
  - spec/rdkafka/consumer_spec.rb
197
198
  - spec/rdkafka/error_spec.rb
198
199
  - spec/rdkafka/metadata_spec.rb
200
+ - spec/rdkafka/native_kafka_spec.rb
199
201
  - spec/rdkafka/producer/delivery_handle_spec.rb
200
202
  - spec/rdkafka/producer/delivery_report_spec.rb
201
203
  - spec/rdkafka/producer_spec.rb
@@ -204,7 +206,7 @@ homepage: https://github.com/thijsc/rdkafka-ruby
204
206
  licenses:
205
207
  - MIT
206
208
  metadata: {}
207
- post_install_message:
209
+ post_install_message:
208
210
  rdoc_options: []
209
211
  require_paths:
210
212
  - lib
@@ -219,8 +221,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
219
221
  - !ruby/object:Gem::Version
220
222
  version: '0'
221
223
  requirements: []
222
- rubygems_version: 3.1.4
223
- signing_key:
224
+ rubygems_version: 3.5.14
225
+ signing_key:
224
226
  specification_version: 4
225
227
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
226
228
  It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+
@@ -235,12 +237,14 @@ test_files:
235
237
  - spec/rdkafka/bindings_spec.rb
236
238
  - spec/rdkafka/callbacks_spec.rb
237
239
  - spec/rdkafka/config_spec.rb
240
+ - spec/rdkafka/consumer/headers_spec.rb
238
241
  - spec/rdkafka/consumer/message_spec.rb
239
242
  - spec/rdkafka/consumer/partition_spec.rb
240
243
  - spec/rdkafka/consumer/topic_partition_list_spec.rb
241
244
  - spec/rdkafka/consumer_spec.rb
242
245
  - spec/rdkafka/error_spec.rb
243
246
  - spec/rdkafka/metadata_spec.rb
247
+ - spec/rdkafka/native_kafka_spec.rb
244
248
  - spec/rdkafka/producer/delivery_handle_spec.rb
245
249
  - spec/rdkafka/producer/delivery_report_spec.rb
246
250
  - spec/rdkafka/producer_spec.rb
data/bin/console DELETED
@@ -1,11 +0,0 @@
1
- #!/usr/bin/env ruby
2
-
3
- # frozen_string_literal: true
4
-
5
- ENV["IRBRC"] = File.join(File.dirname(__FILE__), ".irbrc")
6
-
7
- require "bundler/setup"
8
- require "rdkafka"
9
-
10
- require "irb"
11
- IRB.start(__FILE__)