rdkafka 0.12.0 → 0.13.0.beta.7
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.semaphore/semaphore.yml +7 -3
- data/CHANGELOG.md +18 -0
- data/Gemfile +2 -0
- data/README.md +26 -0
- data/Rakefile +2 -0
- data/ext/Rakefile +2 -0
- data/lib/rdkafka/abstract_handle.rb +2 -0
- data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
- data/lib/rdkafka/admin/create_topic_report.rb +2 -0
- data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
- data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
- data/lib/rdkafka/admin.rb +48 -31
- data/lib/rdkafka/bindings.rb +50 -37
- data/lib/rdkafka/callbacks.rb +7 -1
- data/lib/rdkafka/config.rb +13 -10
- data/lib/rdkafka/consumer/headers.rb +24 -7
- data/lib/rdkafka/consumer/message.rb +3 -1
- data/lib/rdkafka/consumer/partition.rb +2 -0
- data/lib/rdkafka/consumer/topic_partition_list.rb +2 -0
- data/lib/rdkafka/consumer.rb +84 -44
- data/lib/rdkafka/error.rb +9 -0
- data/lib/rdkafka/metadata.rb +4 -2
- data/lib/rdkafka/native_kafka.rb +83 -0
- data/lib/rdkafka/producer/delivery_handle.rb +5 -2
- data/lib/rdkafka/producer/delivery_report.rb +9 -2
- data/lib/rdkafka/producer.rb +35 -13
- data/lib/rdkafka/version.rb +5 -3
- data/lib/rdkafka.rb +3 -1
- data/rdkafka.gemspec +2 -0
- data/spec/rdkafka/abstract_handle_spec.rb +2 -0
- data/spec/rdkafka/admin/create_topic_handle_spec.rb +2 -0
- data/spec/rdkafka/admin/create_topic_report_spec.rb +2 -0
- data/spec/rdkafka/admin/delete_topic_handle_spec.rb +2 -0
- data/spec/rdkafka/admin/delete_topic_report_spec.rb +2 -0
- data/spec/rdkafka/admin_spec.rb +4 -3
- data/spec/rdkafka/bindings_spec.rb +2 -0
- data/spec/rdkafka/callbacks_spec.rb +2 -0
- data/spec/rdkafka/config_spec.rb +17 -2
- data/spec/rdkafka/consumer/headers_spec.rb +62 -0
- data/spec/rdkafka/consumer/message_spec.rb +2 -0
- data/spec/rdkafka/consumer/partition_spec.rb +2 -0
- data/spec/rdkafka/consumer/topic_partition_list_spec.rb +2 -0
- data/spec/rdkafka/consumer_spec.rb +97 -22
- data/spec/rdkafka/error_spec.rb +2 -0
- data/spec/rdkafka/metadata_spec.rb +2 -0
- data/spec/rdkafka/{producer/client_spec.rb → native_kafka_spec.rb} +13 -34
- data/spec/rdkafka/producer/delivery_handle_spec.rb +5 -0
- data/spec/rdkafka/producer/delivery_report_spec.rb +8 -2
- data/spec/rdkafka/producer_spec.rb +51 -19
- data/spec/spec_helper.rb +17 -1
- metadata +14 -14
- data/bin/console +0 -11
- data/lib/rdkafka/producer/client.rb +0 -47
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require "spec_helper"
|
2
4
|
require "zlib"
|
3
5
|
|
@@ -7,7 +9,8 @@ describe Rdkafka::Producer do
|
|
7
9
|
|
8
10
|
after do
|
9
11
|
# Registry should always end up being empty
|
10
|
-
|
12
|
+
registry = Rdkafka::Producer::DeliveryHandle::REGISTRY
|
13
|
+
expect(registry).to be_empty, registry.inspect
|
11
14
|
producer.close
|
12
15
|
consumer.close
|
13
16
|
end
|
@@ -30,6 +33,7 @@ describe Rdkafka::Producer do
|
|
30
33
|
expect(report).not_to be_nil
|
31
34
|
expect(report.partition).to eq 1
|
32
35
|
expect(report.offset).to be >= 0
|
36
|
+
expect(report.topic_name).to eq "produce_test_topic"
|
33
37
|
@callback_called = true
|
34
38
|
end
|
35
39
|
|
@@ -113,6 +117,7 @@ describe Rdkafka::Producer do
|
|
113
117
|
expect(called_report.first).not_to be_nil
|
114
118
|
expect(called_report.first.partition).to eq 1
|
115
119
|
expect(called_report.first.offset).to be >= 0
|
120
|
+
expect(called_report.first.topic_name).to eq "produce_test_topic"
|
116
121
|
end
|
117
122
|
|
118
123
|
it "should provide handle" do
|
@@ -180,10 +185,11 @@ describe Rdkafka::Producer do
|
|
180
185
|
expect(report.partition).to eq 1
|
181
186
|
expect(report.offset).to be >= 0
|
182
187
|
|
183
|
-
#
|
188
|
+
# Flush and close producer
|
189
|
+
producer.flush
|
184
190
|
producer.close
|
185
191
|
|
186
|
-
# Consume message and verify
|
192
|
+
# Consume message and verify its content
|
187
193
|
message = wait_for_message(
|
188
194
|
topic: "produce_test_topic",
|
189
195
|
delivery_report: report,
|
@@ -207,7 +213,7 @@ describe Rdkafka::Producer do
|
|
207
213
|
)
|
208
214
|
report = handle.wait(max_wait_timeout: 5)
|
209
215
|
|
210
|
-
# Consume message and verify
|
216
|
+
# Consume message and verify its content
|
211
217
|
message = wait_for_message(
|
212
218
|
topic: "produce_test_topic",
|
213
219
|
delivery_report: report,
|
@@ -251,6 +257,28 @@ describe Rdkafka::Producer do
|
|
251
257
|
expect(messages[2].key).to eq key
|
252
258
|
end
|
253
259
|
|
260
|
+
it "should produce a message with empty string without crashing" do
|
261
|
+
messages = [{key: 'a', partition_key: ''}]
|
262
|
+
|
263
|
+
messages = messages.map do |m|
|
264
|
+
handle = producer.produce(
|
265
|
+
topic: "partitioner_test_topic",
|
266
|
+
payload: "payload partition",
|
267
|
+
key: m[:key],
|
268
|
+
partition_key: m[:partition_key]
|
269
|
+
)
|
270
|
+
report = handle.wait(max_wait_timeout: 5)
|
271
|
+
|
272
|
+
wait_for_message(
|
273
|
+
topic: "partitioner_test_topic",
|
274
|
+
delivery_report: report,
|
275
|
+
)
|
276
|
+
end
|
277
|
+
|
278
|
+
expect(messages[0].partition).to eq 0
|
279
|
+
expect(messages[0].key).to eq 'a'
|
280
|
+
end
|
281
|
+
|
254
282
|
it "should produce a message with utf-8 encoding" do
|
255
283
|
handle = producer.produce(
|
256
284
|
topic: "produce_test_topic",
|
@@ -259,7 +287,7 @@ describe Rdkafka::Producer do
|
|
259
287
|
)
|
260
288
|
report = handle.wait(max_wait_timeout: 5)
|
261
289
|
|
262
|
-
# Consume message and verify
|
290
|
+
# Consume message and verify its content
|
263
291
|
message = wait_for_message(
|
264
292
|
topic: "produce_test_topic",
|
265
293
|
delivery_report: report,
|
@@ -292,7 +320,7 @@ describe Rdkafka::Producer do
|
|
292
320
|
)
|
293
321
|
report = handle.wait(max_wait_timeout: 5)
|
294
322
|
|
295
|
-
# Consume message and verify
|
323
|
+
# Consume message and verify its content
|
296
324
|
message = wait_for_message(
|
297
325
|
topic: "produce_test_topic",
|
298
326
|
delivery_report: report,
|
@@ -313,7 +341,7 @@ describe Rdkafka::Producer do
|
|
313
341
|
)
|
314
342
|
report = handle.wait(max_wait_timeout: 5)
|
315
343
|
|
316
|
-
# Consume message and verify
|
344
|
+
# Consume message and verify its content
|
317
345
|
message = wait_for_message(
|
318
346
|
topic: "produce_test_topic",
|
319
347
|
delivery_report: report,
|
@@ -333,7 +361,7 @@ describe Rdkafka::Producer do
|
|
333
361
|
)
|
334
362
|
report = handle.wait(max_wait_timeout: 5)
|
335
363
|
|
336
|
-
# Consume message and verify
|
364
|
+
# Consume message and verify its content
|
337
365
|
message = wait_for_message(
|
338
366
|
topic: "produce_test_topic",
|
339
367
|
delivery_report: report,
|
@@ -351,7 +379,7 @@ describe Rdkafka::Producer do
|
|
351
379
|
)
|
352
380
|
report = handle.wait(max_wait_timeout: 5)
|
353
381
|
|
354
|
-
# Consume message and verify
|
382
|
+
# Consume message and verify its content
|
355
383
|
message = wait_for_message(
|
356
384
|
topic: "produce_test_topic",
|
357
385
|
delivery_report: report,
|
@@ -371,7 +399,7 @@ describe Rdkafka::Producer do
|
|
371
399
|
)
|
372
400
|
report = handle.wait(max_wait_timeout: 5)
|
373
401
|
|
374
|
-
# Consume message and verify
|
402
|
+
# Consume message and verify its content
|
375
403
|
message = wait_for_message(
|
376
404
|
topic: "produce_test_topic",
|
377
405
|
delivery_report: report,
|
@@ -380,9 +408,9 @@ describe Rdkafka::Producer do
|
|
380
408
|
|
381
409
|
expect(message.payload).to eq "payload headers"
|
382
410
|
expect(message.key).to eq "key headers"
|
383
|
-
expect(message.headers[
|
384
|
-
expect(message.headers[
|
385
|
-
expect(message.headers[
|
411
|
+
expect(message.headers["foo"]).to eq "bar"
|
412
|
+
expect(message.headers["baz"]).to eq "foobar"
|
413
|
+
expect(message.headers["foobar"]).to be_nil
|
386
414
|
end
|
387
415
|
|
388
416
|
it "should produce a message with empty headers" do
|
@@ -394,7 +422,7 @@ describe Rdkafka::Producer do
|
|
394
422
|
)
|
395
423
|
report = handle.wait(max_wait_timeout: 5)
|
396
424
|
|
397
|
-
# Consume message and verify
|
425
|
+
# Consume message and verify its content
|
398
426
|
message = wait_for_message(
|
399
427
|
topic: "produce_test_topic",
|
400
428
|
delivery_report: report,
|
@@ -432,10 +460,10 @@ describe Rdkafka::Producer do
|
|
432
460
|
# wait for and check the message in the main process.
|
433
461
|
reader, writer = IO.pipe
|
434
462
|
|
435
|
-
fork do
|
463
|
+
pid = fork do
|
436
464
|
reader.close
|
437
465
|
|
438
|
-
#
|
466
|
+
# Avoid sharing the client between processes.
|
439
467
|
producer = rdkafka_producer_config.producer
|
440
468
|
|
441
469
|
handle = producer.produce(
|
@@ -448,24 +476,28 @@ describe Rdkafka::Producer do
|
|
448
476
|
|
449
477
|
report_json = JSON.generate(
|
450
478
|
"partition" => report.partition,
|
451
|
-
"offset" => report.offset
|
479
|
+
"offset" => report.offset,
|
480
|
+
"topic_name" => report.topic_name
|
452
481
|
)
|
453
482
|
|
454
483
|
writer.write(report_json)
|
455
484
|
writer.close
|
485
|
+
producer.flush
|
456
486
|
producer.close
|
457
487
|
end
|
488
|
+
Process.wait(pid)
|
458
489
|
|
459
490
|
writer.close
|
460
491
|
report_hash = JSON.parse(reader.read)
|
461
492
|
report = Rdkafka::Producer::DeliveryReport.new(
|
462
493
|
report_hash["partition"],
|
463
|
-
report_hash["offset"]
|
494
|
+
report_hash["offset"],
|
495
|
+
report_hash["topic_name"]
|
464
496
|
)
|
465
497
|
|
466
498
|
reader.close
|
467
499
|
|
468
|
-
# Consume message and verify
|
500
|
+
# Consume message and verify its content
|
469
501
|
message = wait_for_message(
|
470
502
|
topic: "produce_test_topic",
|
471
503
|
delivery_report: report,
|
data/spec/spec_helper.rb
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
unless ENV["CI"] == "true"
|
2
4
|
require "simplecov"
|
3
5
|
SimpleCov.start do
|
@@ -71,7 +73,7 @@ def new_native_topic(topic_name="topic_name", native_client: )
|
|
71
73
|
end
|
72
74
|
|
73
75
|
def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer: nil)
|
74
|
-
new_consumer =
|
76
|
+
new_consumer = consumer.nil?
|
75
77
|
consumer ||= rdkafka_consumer_config.consumer
|
76
78
|
consumer.subscribe(topic)
|
77
79
|
timeout = Time.now.to_i + timeout_in_seconds
|
@@ -104,6 +106,20 @@ def wait_for_unassignment(consumer)
|
|
104
106
|
end
|
105
107
|
end
|
106
108
|
|
109
|
+
def notify_listener(listener, &block)
|
110
|
+
# 1. subscribe and poll
|
111
|
+
consumer.subscribe("consume_test_topic")
|
112
|
+
wait_for_assignment(consumer)
|
113
|
+
consumer.poll(100)
|
114
|
+
|
115
|
+
block.call if block
|
116
|
+
|
117
|
+
# 2. unsubscribe
|
118
|
+
consumer.unsubscribe
|
119
|
+
wait_for_unassignment(consumer)
|
120
|
+
consumer.close
|
121
|
+
end
|
122
|
+
|
107
123
|
RSpec.configure do |config|
|
108
124
|
config.filter_run focus: true
|
109
125
|
config.run_all_when_everything_filtered = true
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: rdkafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.13.0.beta.7
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Thijs Cadier
|
8
|
-
autorequire:
|
8
|
+
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2023-06-12 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: ffi
|
@@ -139,8 +139,7 @@ dependencies:
|
|
139
139
|
description: Modern Kafka client library for Ruby based on librdkafka
|
140
140
|
email:
|
141
141
|
- thijs@appsignal.com
|
142
|
-
executables:
|
143
|
-
- console
|
142
|
+
executables: []
|
144
143
|
extensions:
|
145
144
|
- ext/Rakefile
|
146
145
|
extra_rdoc_files: []
|
@@ -155,7 +154,6 @@ files:
|
|
155
154
|
- LICENSE
|
156
155
|
- README.md
|
157
156
|
- Rakefile
|
158
|
-
- bin/console
|
159
157
|
- docker-compose.yml
|
160
158
|
- ext/README.md
|
161
159
|
- ext/Rakefile
|
@@ -176,8 +174,8 @@ files:
|
|
176
174
|
- lib/rdkafka/consumer/topic_partition_list.rb
|
177
175
|
- lib/rdkafka/error.rb
|
178
176
|
- lib/rdkafka/metadata.rb
|
177
|
+
- lib/rdkafka/native_kafka.rb
|
179
178
|
- lib/rdkafka/producer.rb
|
180
|
-
- lib/rdkafka/producer/client.rb
|
181
179
|
- lib/rdkafka/producer/delivery_handle.rb
|
182
180
|
- lib/rdkafka/producer/delivery_report.rb
|
183
181
|
- lib/rdkafka/version.rb
|
@@ -191,13 +189,14 @@ files:
|
|
191
189
|
- spec/rdkafka/bindings_spec.rb
|
192
190
|
- spec/rdkafka/callbacks_spec.rb
|
193
191
|
- spec/rdkafka/config_spec.rb
|
192
|
+
- spec/rdkafka/consumer/headers_spec.rb
|
194
193
|
- spec/rdkafka/consumer/message_spec.rb
|
195
194
|
- spec/rdkafka/consumer/partition_spec.rb
|
196
195
|
- spec/rdkafka/consumer/topic_partition_list_spec.rb
|
197
196
|
- spec/rdkafka/consumer_spec.rb
|
198
197
|
- spec/rdkafka/error_spec.rb
|
199
198
|
- spec/rdkafka/metadata_spec.rb
|
200
|
-
- spec/rdkafka/
|
199
|
+
- spec/rdkafka/native_kafka_spec.rb
|
201
200
|
- spec/rdkafka/producer/delivery_handle_spec.rb
|
202
201
|
- spec/rdkafka/producer/delivery_report_spec.rb
|
203
202
|
- spec/rdkafka/producer_spec.rb
|
@@ -206,7 +205,7 @@ homepage: https://github.com/thijsc/rdkafka-ruby
|
|
206
205
|
licenses:
|
207
206
|
- MIT
|
208
207
|
metadata: {}
|
209
|
-
post_install_message:
|
208
|
+
post_install_message:
|
210
209
|
rdoc_options: []
|
211
210
|
require_paths:
|
212
211
|
- lib
|
@@ -217,12 +216,12 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
217
216
|
version: '2.6'
|
218
217
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
219
218
|
requirements:
|
220
|
-
- - "
|
219
|
+
- - ">"
|
221
220
|
- !ruby/object:Gem::Version
|
222
|
-
version:
|
221
|
+
version: 1.3.1
|
223
222
|
requirements: []
|
224
|
-
rubygems_version: 3.
|
225
|
-
signing_key:
|
223
|
+
rubygems_version: 3.4.1
|
224
|
+
signing_key:
|
226
225
|
specification_version: 4
|
227
226
|
summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
|
228
227
|
It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+
|
@@ -237,13 +236,14 @@ test_files:
|
|
237
236
|
- spec/rdkafka/bindings_spec.rb
|
238
237
|
- spec/rdkafka/callbacks_spec.rb
|
239
238
|
- spec/rdkafka/config_spec.rb
|
239
|
+
- spec/rdkafka/consumer/headers_spec.rb
|
240
240
|
- spec/rdkafka/consumer/message_spec.rb
|
241
241
|
- spec/rdkafka/consumer/partition_spec.rb
|
242
242
|
- spec/rdkafka/consumer/topic_partition_list_spec.rb
|
243
243
|
- spec/rdkafka/consumer_spec.rb
|
244
244
|
- spec/rdkafka/error_spec.rb
|
245
245
|
- spec/rdkafka/metadata_spec.rb
|
246
|
-
- spec/rdkafka/
|
246
|
+
- spec/rdkafka/native_kafka_spec.rb
|
247
247
|
- spec/rdkafka/producer/delivery_handle_spec.rb
|
248
248
|
- spec/rdkafka/producer/delivery_report_spec.rb
|
249
249
|
- spec/rdkafka/producer_spec.rb
|
data/bin/console
DELETED
@@ -1,47 +0,0 @@
|
|
1
|
-
module Rdkafka
|
2
|
-
class Producer
|
3
|
-
class Client
|
4
|
-
def initialize(native)
|
5
|
-
@native = native
|
6
|
-
|
7
|
-
# Start thread to poll client for delivery callbacks
|
8
|
-
@polling_thread = Thread.new do
|
9
|
-
loop do
|
10
|
-
Rdkafka::Bindings.rd_kafka_poll(native, 250)
|
11
|
-
# Exit thread if closing and the poll queue is empty
|
12
|
-
if Thread.current[:closing] && Rdkafka::Bindings.rd_kafka_outq_len(native) == 0
|
13
|
-
break
|
14
|
-
end
|
15
|
-
end
|
16
|
-
end
|
17
|
-
@polling_thread.abort_on_exception = true
|
18
|
-
@polling_thread[:closing] = false
|
19
|
-
end
|
20
|
-
|
21
|
-
def native
|
22
|
-
@native
|
23
|
-
end
|
24
|
-
|
25
|
-
def finalizer
|
26
|
-
->(_) { close }
|
27
|
-
end
|
28
|
-
|
29
|
-
def closed?
|
30
|
-
@native.nil?
|
31
|
-
end
|
32
|
-
|
33
|
-
def close(object_id=nil)
|
34
|
-
return unless @native
|
35
|
-
|
36
|
-
# Indicate to polling thread that we're closing
|
37
|
-
@polling_thread[:closing] = true
|
38
|
-
# Wait for the polling thread to finish up
|
39
|
-
@polling_thread.join
|
40
|
-
|
41
|
-
Rdkafka::Bindings.rd_kafka_destroy(@native)
|
42
|
-
|
43
|
-
@native = nil
|
44
|
-
end
|
45
|
-
end
|
46
|
-
end
|
47
|
-
end
|