rdkafka 0.12.0 → 0.13.0.beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. checksums.yaml +4 -4
  2. data/.semaphore/semaphore.yml +6 -2
  3. data/CHANGELOG.md +12 -0
  4. data/Gemfile +2 -0
  5. data/Rakefile +2 -0
  6. data/ext/Rakefile +2 -0
  7. data/lib/rdkafka/abstract_handle.rb +2 -0
  8. data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
  9. data/lib/rdkafka/admin/create_topic_report.rb +2 -0
  10. data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
  11. data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
  12. data/lib/rdkafka/admin.rb +35 -35
  13. data/lib/rdkafka/bindings.rb +19 -5
  14. data/lib/rdkafka/callbacks.rb +7 -1
  15. data/lib/rdkafka/config.rb +7 -5
  16. data/lib/rdkafka/consumer/headers.rb +24 -7
  17. data/lib/rdkafka/consumer/message.rb +3 -1
  18. data/lib/rdkafka/consumer/partition.rb +2 -0
  19. data/lib/rdkafka/consumer/topic_partition_list.rb +2 -0
  20. data/lib/rdkafka/consumer.rb +19 -10
  21. data/lib/rdkafka/error.rb +9 -0
  22. data/lib/rdkafka/metadata.rb +2 -0
  23. data/lib/rdkafka/native_kafka.rb +52 -0
  24. data/lib/rdkafka/producer/delivery_handle.rb +5 -2
  25. data/lib/rdkafka/producer/delivery_report.rb +9 -2
  26. data/lib/rdkafka/producer.rb +11 -10
  27. data/lib/rdkafka/version.rb +5 -3
  28. data/lib/rdkafka.rb +3 -1
  29. data/rdkafka.gemspec +2 -0
  30. data/spec/rdkafka/abstract_handle_spec.rb +2 -0
  31. data/spec/rdkafka/admin/create_topic_handle_spec.rb +2 -0
  32. data/spec/rdkafka/admin/create_topic_report_spec.rb +2 -0
  33. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +2 -0
  34. data/spec/rdkafka/admin/delete_topic_report_spec.rb +2 -0
  35. data/spec/rdkafka/admin_spec.rb +4 -3
  36. data/spec/rdkafka/bindings_spec.rb +2 -0
  37. data/spec/rdkafka/callbacks_spec.rb +2 -0
  38. data/spec/rdkafka/config_spec.rb +14 -0
  39. data/spec/rdkafka/consumer/headers_spec.rb +62 -0
  40. data/spec/rdkafka/consumer/message_spec.rb +2 -0
  41. data/spec/rdkafka/consumer/partition_spec.rb +2 -0
  42. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +2 -0
  43. data/spec/rdkafka/consumer_spec.rb +84 -15
  44. data/spec/rdkafka/error_spec.rb +2 -0
  45. data/spec/rdkafka/metadata_spec.rb +2 -0
  46. data/spec/rdkafka/{producer/client_spec.rb → native_kafka_spec.rb} +8 -6
  47. data/spec/rdkafka/producer/delivery_handle_spec.rb +5 -0
  48. data/spec/rdkafka/producer/delivery_report_spec.rb +8 -2
  49. data/spec/rdkafka/producer_spec.rb +45 -16
  50. data/spec/spec_helper.rb +16 -0
  51. metadata +13 -11
  52. data/lib/rdkafka/producer/client.rb +0 -47
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "zlib"
3
5
 
@@ -7,7 +9,8 @@ describe Rdkafka::Producer do
7
9
 
8
10
  after do
9
11
  # Registry should always end up being empty
10
- expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
12
+ registry = Rdkafka::Producer::DeliveryHandle::REGISTRY
13
+ expect(registry).to be_empty, registry.inspect
11
14
  producer.close
12
15
  consumer.close
13
16
  end
@@ -30,6 +33,7 @@ describe Rdkafka::Producer do
30
33
  expect(report).not_to be_nil
31
34
  expect(report.partition).to eq 1
32
35
  expect(report.offset).to be >= 0
36
+ expect(report.topic_name).to eq "produce_test_topic"
33
37
  @callback_called = true
34
38
  end
35
39
 
@@ -113,6 +117,7 @@ describe Rdkafka::Producer do
113
117
  expect(called_report.first).not_to be_nil
114
118
  expect(called_report.first.partition).to eq 1
115
119
  expect(called_report.first.offset).to be >= 0
120
+ expect(called_report.first.topic_name).to eq "produce_test_topic"
116
121
  end
117
122
 
118
123
  it "should provide handle" do
@@ -183,7 +188,7 @@ describe Rdkafka::Producer do
183
188
  # Close producer
184
189
  producer.close
185
190
 
186
- # Consume message and verify it's content
191
+ # Consume message and verify its content
187
192
  message = wait_for_message(
188
193
  topic: "produce_test_topic",
189
194
  delivery_report: report,
@@ -207,7 +212,7 @@ describe Rdkafka::Producer do
207
212
  )
208
213
  report = handle.wait(max_wait_timeout: 5)
209
214
 
210
- # Consume message and verify it's content
215
+ # Consume message and verify its content
211
216
  message = wait_for_message(
212
217
  topic: "produce_test_topic",
213
218
  delivery_report: report,
@@ -251,6 +256,28 @@ describe Rdkafka::Producer do
251
256
  expect(messages[2].key).to eq key
252
257
  end
253
258
 
259
+ it "should produce a message with empty string without crashing" do
260
+ messages = [{key: 'a', partition_key: ''}]
261
+
262
+ messages = messages.map do |m|
263
+ handle = producer.produce(
264
+ topic: "partitioner_test_topic",
265
+ payload: "payload partition",
266
+ key: m[:key],
267
+ partition_key: m[:partition_key]
268
+ )
269
+ report = handle.wait(max_wait_timeout: 5)
270
+
271
+ wait_for_message(
272
+ topic: "partitioner_test_topic",
273
+ delivery_report: report,
274
+ )
275
+ end
276
+
277
+ expect(messages[0].partition).to eq 0
278
+ expect(messages[0].key).to eq 'a'
279
+ end
280
+
254
281
  it "should produce a message with utf-8 encoding" do
255
282
  handle = producer.produce(
256
283
  topic: "produce_test_topic",
@@ -259,7 +286,7 @@ describe Rdkafka::Producer do
259
286
  )
260
287
  report = handle.wait(max_wait_timeout: 5)
261
288
 
262
- # Consume message and verify it's content
289
+ # Consume message and verify its content
263
290
  message = wait_for_message(
264
291
  topic: "produce_test_topic",
265
292
  delivery_report: report,
@@ -292,7 +319,7 @@ describe Rdkafka::Producer do
292
319
  )
293
320
  report = handle.wait(max_wait_timeout: 5)
294
321
 
295
- # Consume message and verify it's content
322
+ # Consume message and verify its content
296
323
  message = wait_for_message(
297
324
  topic: "produce_test_topic",
298
325
  delivery_report: report,
@@ -313,7 +340,7 @@ describe Rdkafka::Producer do
313
340
  )
314
341
  report = handle.wait(max_wait_timeout: 5)
315
342
 
316
- # Consume message and verify it's content
343
+ # Consume message and verify its content
317
344
  message = wait_for_message(
318
345
  topic: "produce_test_topic",
319
346
  delivery_report: report,
@@ -333,7 +360,7 @@ describe Rdkafka::Producer do
333
360
  )
334
361
  report = handle.wait(max_wait_timeout: 5)
335
362
 
336
- # Consume message and verify it's content
363
+ # Consume message and verify its content
337
364
  message = wait_for_message(
338
365
  topic: "produce_test_topic",
339
366
  delivery_report: report,
@@ -351,7 +378,7 @@ describe Rdkafka::Producer do
351
378
  )
352
379
  report = handle.wait(max_wait_timeout: 5)
353
380
 
354
- # Consume message and verify it's content
381
+ # Consume message and verify its content
355
382
  message = wait_for_message(
356
383
  topic: "produce_test_topic",
357
384
  delivery_report: report,
@@ -371,7 +398,7 @@ describe Rdkafka::Producer do
371
398
  )
372
399
  report = handle.wait(max_wait_timeout: 5)
373
400
 
374
- # Consume message and verify it's content
401
+ # Consume message and verify its content
375
402
  message = wait_for_message(
376
403
  topic: "produce_test_topic",
377
404
  delivery_report: report,
@@ -380,9 +407,9 @@ describe Rdkafka::Producer do
380
407
 
381
408
  expect(message.payload).to eq "payload headers"
382
409
  expect(message.key).to eq "key headers"
383
- expect(message.headers[:foo]).to eq "bar"
384
- expect(message.headers[:baz]).to eq "foobar"
385
- expect(message.headers[:foobar]).to be_nil
410
+ expect(message.headers["foo"]).to eq "bar"
411
+ expect(message.headers["baz"]).to eq "foobar"
412
+ expect(message.headers["foobar"]).to be_nil
386
413
  end
387
414
 
388
415
  it "should produce a message with empty headers" do
@@ -394,7 +421,7 @@ describe Rdkafka::Producer do
394
421
  )
395
422
  report = handle.wait(max_wait_timeout: 5)
396
423
 
397
- # Consume message and verify it's content
424
+ # Consume message and verify its content
398
425
  message = wait_for_message(
399
426
  topic: "produce_test_topic",
400
427
  delivery_report: report,
@@ -448,7 +475,8 @@ describe Rdkafka::Producer do
448
475
 
449
476
  report_json = JSON.generate(
450
477
  "partition" => report.partition,
451
- "offset" => report.offset
478
+ "offset" => report.offset,
479
+ "topic_name" => report.topic_name
452
480
  )
453
481
 
454
482
  writer.write(report_json)
@@ -460,12 +488,13 @@ describe Rdkafka::Producer do
460
488
  report_hash = JSON.parse(reader.read)
461
489
  report = Rdkafka::Producer::DeliveryReport.new(
462
490
  report_hash["partition"],
463
- report_hash["offset"]
491
+ report_hash["offset"],
492
+ report_hash["topic_name"]
464
493
  )
465
494
 
466
495
  reader.close
467
496
 
468
- # Consume message and verify it's content
497
+ # Consume message and verify its content
469
498
  message = wait_for_message(
470
499
  topic: "produce_test_topic",
471
500
  delivery_report: report,
data/spec/spec_helper.rb CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  unless ENV["CI"] == "true"
2
4
  require "simplecov"
3
5
  SimpleCov.start do
@@ -104,6 +106,20 @@ def wait_for_unassignment(consumer)
104
106
  end
105
107
  end
106
108
 
109
+ def notify_listener(listener, &block)
110
+ # 1. subscribe and poll
111
+ consumer.subscribe("consume_test_topic")
112
+ wait_for_assignment(consumer)
113
+ consumer.poll(100)
114
+
115
+ block.call if block
116
+
117
+ # 2. unsubscribe
118
+ consumer.unsubscribe
119
+ wait_for_unassignment(consumer)
120
+ consumer.close
121
+ end
122
+
107
123
  RSpec.configure do |config|
108
124
  config.filter_run focus: true
109
125
  config.run_all_when_everything_filtered = true
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.12.0
4
+ version: 0.13.0.beta.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-06-17 00:00:00.000000000 Z
11
+ date: 2022-10-12 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi
@@ -176,8 +176,8 @@ files:
176
176
  - lib/rdkafka/consumer/topic_partition_list.rb
177
177
  - lib/rdkafka/error.rb
178
178
  - lib/rdkafka/metadata.rb
179
+ - lib/rdkafka/native_kafka.rb
179
180
  - lib/rdkafka/producer.rb
180
- - lib/rdkafka/producer/client.rb
181
181
  - lib/rdkafka/producer/delivery_handle.rb
182
182
  - lib/rdkafka/producer/delivery_report.rb
183
183
  - lib/rdkafka/version.rb
@@ -191,13 +191,14 @@ files:
191
191
  - spec/rdkafka/bindings_spec.rb
192
192
  - spec/rdkafka/callbacks_spec.rb
193
193
  - spec/rdkafka/config_spec.rb
194
+ - spec/rdkafka/consumer/headers_spec.rb
194
195
  - spec/rdkafka/consumer/message_spec.rb
195
196
  - spec/rdkafka/consumer/partition_spec.rb
196
197
  - spec/rdkafka/consumer/topic_partition_list_spec.rb
197
198
  - spec/rdkafka/consumer_spec.rb
198
199
  - spec/rdkafka/error_spec.rb
199
200
  - spec/rdkafka/metadata_spec.rb
200
- - spec/rdkafka/producer/client_spec.rb
201
+ - spec/rdkafka/native_kafka_spec.rb
201
202
  - spec/rdkafka/producer/delivery_handle_spec.rb
202
203
  - spec/rdkafka/producer/delivery_report_spec.rb
203
204
  - spec/rdkafka/producer_spec.rb
@@ -206,7 +207,7 @@ homepage: https://github.com/thijsc/rdkafka-ruby
206
207
  licenses:
207
208
  - MIT
208
209
  metadata: {}
209
- post_install_message:
210
+ post_install_message:
210
211
  rdoc_options: []
211
212
  require_paths:
212
213
  - lib
@@ -217,12 +218,12 @@ required_ruby_version: !ruby/object:Gem::Requirement
217
218
  version: '2.6'
218
219
  required_rubygems_version: !ruby/object:Gem::Requirement
219
220
  requirements:
220
- - - ">="
221
+ - - ">"
221
222
  - !ruby/object:Gem::Version
222
- version: '0'
223
+ version: 1.3.1
223
224
  requirements: []
224
- rubygems_version: 3.0.3
225
- signing_key:
225
+ rubygems_version: 3.3.13
226
+ signing_key:
226
227
  specification_version: 4
227
228
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
228
229
  It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+
@@ -237,13 +238,14 @@ test_files:
237
238
  - spec/rdkafka/bindings_spec.rb
238
239
  - spec/rdkafka/callbacks_spec.rb
239
240
  - spec/rdkafka/config_spec.rb
241
+ - spec/rdkafka/consumer/headers_spec.rb
240
242
  - spec/rdkafka/consumer/message_spec.rb
241
243
  - spec/rdkafka/consumer/partition_spec.rb
242
244
  - spec/rdkafka/consumer/topic_partition_list_spec.rb
243
245
  - spec/rdkafka/consumer_spec.rb
244
246
  - spec/rdkafka/error_spec.rb
245
247
  - spec/rdkafka/metadata_spec.rb
246
- - spec/rdkafka/producer/client_spec.rb
248
+ - spec/rdkafka/native_kafka_spec.rb
247
249
  - spec/rdkafka/producer/delivery_handle_spec.rb
248
250
  - spec/rdkafka/producer/delivery_report_spec.rb
249
251
  - spec/rdkafka/producer_spec.rb
@@ -1,47 +0,0 @@
1
- module Rdkafka
2
- class Producer
3
- class Client
4
- def initialize(native)
5
- @native = native
6
-
7
- # Start thread to poll client for delivery callbacks
8
- @polling_thread = Thread.new do
9
- loop do
10
- Rdkafka::Bindings.rd_kafka_poll(native, 250)
11
- # Exit thread if closing and the poll queue is empty
12
- if Thread.current[:closing] && Rdkafka::Bindings.rd_kafka_outq_len(native) == 0
13
- break
14
- end
15
- end
16
- end
17
- @polling_thread.abort_on_exception = true
18
- @polling_thread[:closing] = false
19
- end
20
-
21
- def native
22
- @native
23
- end
24
-
25
- def finalizer
26
- ->(_) { close }
27
- end
28
-
29
- def closed?
30
- @native.nil?
31
- end
32
-
33
- def close(object_id=nil)
34
- return unless @native
35
-
36
- # Indicate to polling thread that we're closing
37
- @polling_thread[:closing] = true
38
- # Wait for the polling thread to finish up
39
- @polling_thread.join
40
-
41
- Rdkafka::Bindings.rd_kafka_destroy(@native)
42
-
43
- @native = nil
44
- end
45
- end
46
- end
47
- end