rdkafka 0.22.0.beta1-x86_64-linux-gnu

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (102) hide show
  1. checksums.yaml +7 -0
  2. data/.github/CODEOWNERS +3 -0
  3. data/.github/FUNDING.yml +1 -0
  4. data/.github/workflows/ci_linux_x86_64_gnu.yml +249 -0
  5. data/.github/workflows/ci_linux_x86_64_musl.yml +205 -0
  6. data/.github/workflows/ci_macos_arm64.yml +306 -0
  7. data/.github/workflows/push_linux_x86_64_gnu.yml +64 -0
  8. data/.github/workflows/push_linux_x86_64_musl.yml +77 -0
  9. data/.github/workflows/push_macos_arm64.yml +54 -0
  10. data/.github/workflows/push_ruby.yml +37 -0
  11. data/.github/workflows/verify-action-pins.yml +16 -0
  12. data/.gitignore +14 -0
  13. data/.rspec +2 -0
  14. data/.ruby-gemset +1 -0
  15. data/.ruby-version +1 -0
  16. data/.yardopts +2 -0
  17. data/CHANGELOG.md +247 -0
  18. data/Gemfile +5 -0
  19. data/MIT-LICENSE +22 -0
  20. data/README.md +178 -0
  21. data/Rakefile +96 -0
  22. data/docker-compose.yml +25 -0
  23. data/ext/README.md +19 -0
  24. data/ext/Rakefile +131 -0
  25. data/ext/build_common.sh +361 -0
  26. data/ext/build_linux_x86_64_gnu.sh +306 -0
  27. data/ext/build_linux_x86_64_musl.sh +763 -0
  28. data/ext/build_macos_arm64.sh +550 -0
  29. data/ext/librdkafka.so +0 -0
  30. data/lib/rdkafka/abstract_handle.rb +116 -0
  31. data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
  32. data/lib/rdkafka/admin/config_binding_result.rb +30 -0
  33. data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
  34. data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
  35. data/lib/rdkafka/admin/create_acl_report.rb +24 -0
  36. data/lib/rdkafka/admin/create_partitions_handle.rb +27 -0
  37. data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
  38. data/lib/rdkafka/admin/create_topic_handle.rb +29 -0
  39. data/lib/rdkafka/admin/create_topic_report.rb +24 -0
  40. data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
  41. data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
  42. data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
  43. data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
  44. data/lib/rdkafka/admin/delete_topic_handle.rb +29 -0
  45. data/lib/rdkafka/admin/delete_topic_report.rb +24 -0
  46. data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
  47. data/lib/rdkafka/admin/describe_acl_report.rb +24 -0
  48. data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
  49. data/lib/rdkafka/admin/describe_configs_report.rb +54 -0
  50. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
  51. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +54 -0
  52. data/lib/rdkafka/admin.rb +833 -0
  53. data/lib/rdkafka/bindings.rb +566 -0
  54. data/lib/rdkafka/callbacks.rb +415 -0
  55. data/lib/rdkafka/config.rb +398 -0
  56. data/lib/rdkafka/consumer/headers.rb +79 -0
  57. data/lib/rdkafka/consumer/message.rb +86 -0
  58. data/lib/rdkafka/consumer/partition.rb +51 -0
  59. data/lib/rdkafka/consumer/topic_partition_list.rb +169 -0
  60. data/lib/rdkafka/consumer.rb +653 -0
  61. data/lib/rdkafka/error.rb +101 -0
  62. data/lib/rdkafka/helpers/oauth.rb +58 -0
  63. data/lib/rdkafka/helpers/time.rb +14 -0
  64. data/lib/rdkafka/metadata.rb +115 -0
  65. data/lib/rdkafka/native_kafka.rb +139 -0
  66. data/lib/rdkafka/producer/delivery_handle.rb +40 -0
  67. data/lib/rdkafka/producer/delivery_report.rb +46 -0
  68. data/lib/rdkafka/producer/partitions_count_cache.rb +216 -0
  69. data/lib/rdkafka/producer.rb +430 -0
  70. data/lib/rdkafka/version.rb +7 -0
  71. data/lib/rdkafka.rb +54 -0
  72. data/rdkafka.gemspec +65 -0
  73. data/renovate.json +92 -0
  74. data/spec/rdkafka/abstract_handle_spec.rb +117 -0
  75. data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
  76. data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
  77. data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
  78. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  79. data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
  80. data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
  81. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
  82. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  83. data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
  84. data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
  85. data/spec/rdkafka/admin_spec.rb +770 -0
  86. data/spec/rdkafka/bindings_spec.rb +223 -0
  87. data/spec/rdkafka/callbacks_spec.rb +20 -0
  88. data/spec/rdkafka/config_spec.rb +258 -0
  89. data/spec/rdkafka/consumer/headers_spec.rb +73 -0
  90. data/spec/rdkafka/consumer/message_spec.rb +139 -0
  91. data/spec/rdkafka/consumer/partition_spec.rb +57 -0
  92. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +248 -0
  93. data/spec/rdkafka/consumer_spec.rb +1274 -0
  94. data/spec/rdkafka/error_spec.rb +89 -0
  95. data/spec/rdkafka/metadata_spec.rb +79 -0
  96. data/spec/rdkafka/native_kafka_spec.rb +130 -0
  97. data/spec/rdkafka/producer/delivery_handle_spec.rb +45 -0
  98. data/spec/rdkafka/producer/delivery_report_spec.rb +25 -0
  99. data/spec/rdkafka/producer/partitions_count_cache_spec.rb +359 -0
  100. data/spec/rdkafka/producer_spec.rb +1052 -0
  101. data/spec/spec_helper.rb +195 -0
  102. metadata +276 -0
@@ -0,0 +1,1052 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "zlib"
4
+
5
+ describe Rdkafka::Producer do
6
+ let(:producer) { rdkafka_producer_config.producer }
7
+ let(:consumer) { rdkafka_consumer_config.consumer }
8
+
9
+ after do
10
+ # Registry should always end up being empty
11
+ registry = Rdkafka::Producer::DeliveryHandle::REGISTRY
12
+ expect(registry).to be_empty, registry.inspect
13
+ producer.close
14
+ consumer.close
15
+ end
16
+
17
+ describe 'producer without auto-start' do
18
+ let(:producer) { rdkafka_producer_config.producer(native_kafka_auto_start: false) }
19
+
20
+ it 'expect to be able to start it later and close' do
21
+ producer.start
22
+ producer.close
23
+ end
24
+
25
+ it 'expect to be able to close it without starting' do
26
+ producer.close
27
+ end
28
+ end
29
+
30
+ describe '#name' do
31
+ it { expect(producer.name).to include('rdkafka#producer-') }
32
+ end
33
+
34
+ describe '#produce with topic config alterations' do
35
+ context 'when config is not valid' do
36
+ it 'expect to raise error' do
37
+ expect do
38
+ producer.produce(topic: 'test', payload: '', topic_config: { 'invalid': 'invalid' })
39
+ end.to raise_error(Rdkafka::Config::ConfigError)
40
+ end
41
+ end
42
+
43
+ context 'when config is valid' do
44
+ it 'expect to raise error' do
45
+ expect do
46
+ producer.produce(topic: 'test', payload: '', topic_config: { 'acks': 1 }).wait
47
+ end.not_to raise_error
48
+ end
49
+
50
+ context 'when alteration should change behavior' do
51
+ # This is set incorrectly for a reason
52
+ # If alteration would not work, this will hang the spec suite
53
+ let(:producer) do
54
+ rdkafka_producer_config(
55
+ 'message.timeout.ms': 1_000_000,
56
+ :"bootstrap.servers" => "127.0.0.1:9094",
57
+ ).producer
58
+ end
59
+
60
+ it 'expect to give up on delivery fast based on alteration config' do
61
+ expect do
62
+ producer.produce(
63
+ topic: 'produce_config_test',
64
+ payload: 'test',
65
+ topic_config: {
66
+ 'compression.type': 'gzip',
67
+ 'message.timeout.ms': 1
68
+ }
69
+ ).wait
70
+ end.to raise_error(Rdkafka::RdkafkaError, /msg_timed_out/)
71
+ end
72
+ end
73
+ end
74
+ end
75
+
76
+ context "delivery callback" do
77
+ context "with a proc/lambda" do
78
+ it "should set the callback" do
79
+ expect {
80
+ producer.delivery_callback = lambda do |delivery_handle|
81
+ puts delivery_handle
82
+ end
83
+ }.not_to raise_error
84
+ expect(producer.delivery_callback).to respond_to :call
85
+ end
86
+
87
+ it "should call the callback when a message is delivered" do
88
+ @callback_called = false
89
+
90
+ producer.delivery_callback = lambda do |report|
91
+ expect(report).not_to be_nil
92
+ expect(report.label).to eq "label"
93
+ expect(report.partition).to eq 1
94
+ expect(report.offset).to be >= 0
95
+ expect(report.topic_name).to eq "produce_test_topic"
96
+ @callback_called = true
97
+ end
98
+
99
+ # Produce a message
100
+ handle = producer.produce(
101
+ topic: "produce_test_topic",
102
+ payload: "payload",
103
+ key: "key",
104
+ label: "label"
105
+ )
106
+
107
+ expect(handle.label).to eq "label"
108
+
109
+ # Wait for it to be delivered
110
+ handle.wait(max_wait_timeout: 15)
111
+
112
+ # Join the producer thread.
113
+ producer.close
114
+
115
+ # Callback should have been called
116
+ expect(@callback_called).to be true
117
+ end
118
+
119
+ it "should provide handle" do
120
+ @callback_handle = nil
121
+
122
+ producer.delivery_callback = lambda { |_, handle| @callback_handle = handle }
123
+
124
+ # Produce a message
125
+ handle = producer.produce(
126
+ topic: "produce_test_topic",
127
+ payload: "payload",
128
+ key: "key"
129
+ )
130
+
131
+ # Wait for it to be delivered
132
+ handle.wait(max_wait_timeout: 15)
133
+
134
+ # Join the producer thread.
135
+ producer.close
136
+
137
+ expect(handle).to be @callback_handle
138
+ end
139
+ end
140
+
141
+ context "with a callable object" do
142
+ it "should set the callback" do
143
+ callback = Class.new do
144
+ def call(stats); end
145
+ end
146
+ expect {
147
+ producer.delivery_callback = callback.new
148
+ }.not_to raise_error
149
+ expect(producer.delivery_callback).to respond_to :call
150
+ end
151
+
152
+ it "should call the callback when a message is delivered" do
153
+ called_report = []
154
+ callback = Class.new do
155
+ def initialize(called_report)
156
+ @called_report = called_report
157
+ end
158
+
159
+ def call(report)
160
+ @called_report << report
161
+ end
162
+ end
163
+ producer.delivery_callback = callback.new(called_report)
164
+
165
+ # Produce a message
166
+ handle = producer.produce(
167
+ topic: "produce_test_topic",
168
+ payload: "payload",
169
+ key: "key"
170
+ )
171
+
172
+ # Wait for it to be delivered
173
+ handle.wait(max_wait_timeout: 15)
174
+
175
+ # Join the producer thread.
176
+ producer.close
177
+
178
+ # Callback should have been called
179
+ expect(called_report.first).not_to be_nil
180
+ expect(called_report.first.partition).to eq 1
181
+ expect(called_report.first.offset).to be >= 0
182
+ expect(called_report.first.topic_name).to eq "produce_test_topic"
183
+ end
184
+
185
+ it "should provide handle" do
186
+ callback_handles = []
187
+ callback = Class.new do
188
+ def initialize(callback_handles)
189
+ @callback_handles = callback_handles
190
+ end
191
+
192
+ def call(_, handle)
193
+ @callback_handles << handle
194
+ end
195
+ end
196
+ producer.delivery_callback = callback.new(callback_handles)
197
+
198
+ # Produce a message
199
+ handle = producer.produce(
200
+ topic: "produce_test_topic",
201
+ payload: "payload",
202
+ key: "key"
203
+ )
204
+
205
+ # Wait for it to be delivered
206
+ handle.wait(max_wait_timeout: 15)
207
+
208
+ # Join the producer thread.
209
+ producer.close
210
+
211
+ # Callback should have been called
212
+ expect(handle).to be callback_handles.first
213
+ end
214
+ end
215
+
216
+ it "should not accept a callback that's not callable" do
217
+ expect {
218
+ producer.delivery_callback = 'a string'
219
+ }.to raise_error(TypeError)
220
+ end
221
+ end
222
+
223
+ it "should require a topic" do
224
+ expect {
225
+ producer.produce(
226
+ payload: "payload",
227
+ key: "key"
228
+ )
229
+ }.to raise_error ArgumentError, /missing keyword: [\:]?topic/
230
+ end
231
+
232
+ it "should produce a message" do
233
+ # Produce a message
234
+ handle = producer.produce(
235
+ topic: "produce_test_topic",
236
+ payload: "payload",
237
+ key: "key",
238
+ label: "label"
239
+ )
240
+
241
+ # Should be pending at first
242
+ expect(handle.pending?).to be true
243
+ expect(handle.label).to eq "label"
244
+
245
+ # Check delivery handle and report
246
+ report = handle.wait(max_wait_timeout: 5)
247
+ expect(handle.pending?).to be false
248
+ expect(report).not_to be_nil
249
+ expect(report.partition).to eq 1
250
+ expect(report.offset).to be >= 0
251
+ expect(report.label).to eq "label"
252
+
253
+ # Flush and close producer
254
+ producer.flush
255
+ producer.close
256
+
257
+ # Consume message and verify its content
258
+ message = wait_for_message(
259
+ topic: "produce_test_topic",
260
+ delivery_report: report,
261
+ consumer: consumer
262
+ )
263
+ expect(message.partition).to eq 1
264
+ expect(message.payload).to eq "payload"
265
+ expect(message.key).to eq "key"
266
+ # Since api.version.request is on by default we will get
267
+ # the message creation timestamp if it's not set.
268
+ expect(message.timestamp).to be_within(10).of(Time.now)
269
+ end
270
+
271
+ it "should produce a message with a specified partition" do
272
+ # Produce a message
273
+ handle = producer.produce(
274
+ topic: "produce_test_topic",
275
+ payload: "payload partition",
276
+ key: "key partition",
277
+ partition: 1
278
+ )
279
+ report = handle.wait(max_wait_timeout: 5)
280
+
281
+ # Consume message and verify its content
282
+ message = wait_for_message(
283
+ topic: "produce_test_topic",
284
+ delivery_report: report,
285
+ consumer: consumer
286
+ )
287
+ expect(message.partition).to eq 1
288
+ expect(message.key).to eq "key partition"
289
+ end
290
+
291
+ it "should produce a message to the same partition with a similar partition key" do
292
+ # Avoid partitioner collisions.
293
+ while true
294
+ key = ('a'..'z').to_a.shuffle.take(10).join('')
295
+ partition_key = ('a'..'z').to_a.shuffle.take(10).join('')
296
+ partition_count = producer.partition_count('partitioner_test_topic')
297
+ break if (Zlib.crc32(key) % partition_count) != (Zlib.crc32(partition_key) % partition_count)
298
+ end
299
+
300
+ # Produce a message with key, partition_key and key + partition_key
301
+ messages = [{key: key}, {partition_key: partition_key}, {key: key, partition_key: partition_key}]
302
+
303
+ messages = messages.map do |m|
304
+ handle = producer.produce(
305
+ topic: "partitioner_test_topic",
306
+ payload: "payload partition",
307
+ key: m[:key],
308
+ partition_key: m[:partition_key]
309
+ )
310
+ report = handle.wait(max_wait_timeout: 5)
311
+
312
+ wait_for_message(
313
+ topic: "partitioner_test_topic",
314
+ delivery_report: report,
315
+ )
316
+ end
317
+
318
+ expect(messages[0].partition).not_to eq(messages[2].partition)
319
+ expect(messages[1].partition).to eq(messages[2].partition)
320
+ expect(messages[0].key).to eq key
321
+ expect(messages[1].key).to be_nil
322
+ expect(messages[2].key).to eq key
323
+ end
324
+
325
+ it "should produce a message with empty string without crashing" do
326
+ messages = [{key: 'a', partition_key: ''}]
327
+
328
+ messages = messages.map do |m|
329
+ handle = producer.produce(
330
+ topic: "partitioner_test_topic",
331
+ payload: "payload partition",
332
+ key: m[:key],
333
+ partition_key: m[:partition_key]
334
+ )
335
+ report = handle.wait(max_wait_timeout: 5)
336
+
337
+ wait_for_message(
338
+ topic: "partitioner_test_topic",
339
+ delivery_report: report,
340
+ )
341
+ end
342
+
343
+ expect(messages[0].partition).to eq 0
344
+ expect(messages[0].key).to eq 'a'
345
+ end
346
+
347
+ it "should produce a message with utf-8 encoding" do
348
+ handle = producer.produce(
349
+ topic: "produce_test_topic",
350
+ payload: "Τη γλώσσα μου έδωσαν ελληνική",
351
+ key: "key utf8"
352
+ )
353
+ report = handle.wait(max_wait_timeout: 5)
354
+
355
+ # Consume message and verify its content
356
+ message = wait_for_message(
357
+ topic: "produce_test_topic",
358
+ delivery_report: report,
359
+ consumer: consumer
360
+ )
361
+
362
+ expect(message.partition).to eq 1
363
+ expect(message.payload.force_encoding("utf-8")).to eq "Τη γλώσσα μου έδωσαν ελληνική"
364
+ expect(message.key).to eq "key utf8"
365
+ end
366
+
367
+ it "should produce a message to a non-existing topic with key and partition key" do
368
+ new_topic = "it-#{SecureRandom.uuid}"
369
+
370
+ handle = producer.produce(
371
+ # Needs to be a new topic each time
372
+ topic: new_topic,
373
+ payload: "payload",
374
+ key: "key",
375
+ partition_key: "partition_key",
376
+ label: "label"
377
+ )
378
+
379
+ # Should be pending at first
380
+ expect(handle.pending?).to be true
381
+ expect(handle.label).to eq "label"
382
+
383
+ # Check delivery handle and report
384
+ report = handle.wait(max_wait_timeout: 5)
385
+ expect(handle.pending?).to be false
386
+ expect(report).not_to be_nil
387
+ expect(report.partition).to eq 0
388
+ expect(report.offset).to be >= 0
389
+ expect(report.label).to eq "label"
390
+
391
+ # Flush and close producer
392
+ producer.flush
393
+ producer.close
394
+
395
+ # Consume message and verify its content
396
+ message = wait_for_message(
397
+ topic: new_topic,
398
+ delivery_report: report,
399
+ consumer: consumer
400
+ )
401
+ expect(message.partition).to eq 0
402
+ expect(message.payload).to eq "payload"
403
+ expect(message.key).to eq "key"
404
+ # Since api.version.request is on by default we will get
405
+ # the message creation timestamp if it's not set.
406
+ expect(message.timestamp).to be_within(10).of(Time.now)
407
+ end
408
+
409
+ context "timestamp" do
410
+ it "should raise a type error if not nil, integer or time" do
411
+ expect {
412
+ producer.produce(
413
+ topic: "produce_test_topic",
414
+ payload: "payload timestamp",
415
+ key: "key timestamp",
416
+ timestamp: "10101010"
417
+ )
418
+ }.to raise_error TypeError
419
+ end
420
+
421
+ it "should produce a message with an integer timestamp" do
422
+ handle = producer.produce(
423
+ topic: "produce_test_topic",
424
+ payload: "payload timestamp",
425
+ key: "key timestamp",
426
+ timestamp: 1505069646252
427
+ )
428
+ report = handle.wait(max_wait_timeout: 5)
429
+
430
+ # Consume message and verify its content
431
+ message = wait_for_message(
432
+ topic: "produce_test_topic",
433
+ delivery_report: report,
434
+ consumer: consumer
435
+ )
436
+
437
+ expect(message.partition).to eq 2
438
+ expect(message.key).to eq "key timestamp"
439
+ expect(message.timestamp).to eq Time.at(1505069646, 252_000)
440
+ end
441
+
442
+ it "should produce a message with a time timestamp" do
443
+ handle = producer.produce(
444
+ topic: "produce_test_topic",
445
+ payload: "payload timestamp",
446
+ key: "key timestamp",
447
+ timestamp: Time.at(1505069646, 353_000)
448
+ )
449
+ report = handle.wait(max_wait_timeout: 5)
450
+
451
+ # Consume message and verify its content
452
+ message = wait_for_message(
453
+ topic: "produce_test_topic",
454
+ delivery_report: report,
455
+ consumer: consumer
456
+ )
457
+
458
+ expect(message.partition).to eq 2
459
+ expect(message.key).to eq "key timestamp"
460
+ expect(message.timestamp).to eq Time.at(1505069646, 353_000)
461
+ end
462
+ end
463
+
464
+ it "should produce a message with nil key" do
465
+ handle = producer.produce(
466
+ topic: "produce_test_topic",
467
+ payload: "payload no key"
468
+ )
469
+ report = handle.wait(max_wait_timeout: 5)
470
+
471
+ # Consume message and verify its content
472
+ message = wait_for_message(
473
+ topic: "produce_test_topic",
474
+ delivery_report: report,
475
+ consumer: consumer
476
+ )
477
+
478
+ expect(message.key).to be_nil
479
+ expect(message.payload).to eq "payload no key"
480
+ end
481
+
482
+ it "should produce a message with nil payload" do
483
+ handle = producer.produce(
484
+ topic: "produce_test_topic",
485
+ key: "key no payload"
486
+ )
487
+ report = handle.wait(max_wait_timeout: 5)
488
+
489
+ # Consume message and verify its content
490
+ message = wait_for_message(
491
+ topic: "produce_test_topic",
492
+ delivery_report: report,
493
+ consumer: consumer
494
+ )
495
+
496
+ expect(message.key).to eq "key no payload"
497
+ expect(message.payload).to be_nil
498
+ end
499
+
500
+ it "should produce a message with headers" do
501
+ handle = producer.produce(
502
+ topic: "produce_test_topic",
503
+ payload: "payload headers",
504
+ key: "key headers",
505
+ headers: { foo: :bar, baz: :foobar }
506
+ )
507
+ report = handle.wait(max_wait_timeout: 5)
508
+
509
+ # Consume message and verify its content
510
+ message = wait_for_message(
511
+ topic: "produce_test_topic",
512
+ delivery_report: report,
513
+ consumer: consumer
514
+ )
515
+
516
+ expect(message.payload).to eq "payload headers"
517
+ expect(message.key).to eq "key headers"
518
+ expect(message.headers["foo"]).to eq "bar"
519
+ expect(message.headers["baz"]).to eq "foobar"
520
+ expect(message.headers["foobar"]).to be_nil
521
+ end
522
+
523
+ it "should produce a message with empty headers" do
524
+ handle = producer.produce(
525
+ topic: "produce_test_topic",
526
+ payload: "payload headers",
527
+ key: "key headers",
528
+ headers: {}
529
+ )
530
+ report = handle.wait(max_wait_timeout: 5)
531
+
532
+ # Consume message and verify its content
533
+ message = wait_for_message(
534
+ topic: "produce_test_topic",
535
+ delivery_report: report,
536
+ consumer: consumer
537
+ )
538
+
539
+ expect(message.payload).to eq "payload headers"
540
+ expect(message.key).to eq "key headers"
541
+ expect(message.headers).to be_empty
542
+ end
543
+
544
+ it "should produce message that aren't waited for and not crash" do
545
+ 5.times do
546
+ 200.times do
547
+ producer.produce(
548
+ topic: "produce_test_topic",
549
+ payload: "payload not waiting",
550
+ key: "key not waiting"
551
+ )
552
+ end
553
+
554
+ # Allow some time for a GC run
555
+ sleep 1
556
+ end
557
+
558
+ # Wait for the delivery notifications
559
+ 10.times do
560
+ break if Rdkafka::Producer::DeliveryHandle::REGISTRY.empty?
561
+ sleep 1
562
+ end
563
+ end
564
+
565
+ it "should produce a message in a forked process", skip: defined?(JRUBY_VERSION) && "Kernel#fork is not available" do
566
+ # Fork, produce a message, send the report over a pipe and
567
+ # wait for and check the message in the main process.
568
+ reader, writer = IO.pipe
569
+
570
+ pid = fork do
571
+ reader.close
572
+
573
+ # Avoid sharing the client between processes.
574
+ producer = rdkafka_producer_config.producer
575
+
576
+ handle = producer.produce(
577
+ topic: "produce_test_topic",
578
+ payload: "payload-forked",
579
+ key: "key-forked"
580
+ )
581
+
582
+ report = handle.wait(max_wait_timeout: 5)
583
+
584
+ report_json = JSON.generate(
585
+ "partition" => report.partition,
586
+ "offset" => report.offset,
587
+ "topic_name" => report.topic_name
588
+ )
589
+
590
+ writer.write(report_json)
591
+ writer.close
592
+ producer.flush
593
+ producer.close
594
+ end
595
+ Process.wait(pid)
596
+
597
+ writer.close
598
+ report_hash = JSON.parse(reader.read)
599
+ report = Rdkafka::Producer::DeliveryReport.new(
600
+ report_hash["partition"],
601
+ report_hash["offset"],
602
+ report_hash["topic_name"]
603
+ )
604
+
605
+ reader.close
606
+
607
+ # Consume message and verify its content
608
+ message = wait_for_message(
609
+ topic: "produce_test_topic",
610
+ delivery_report: report,
611
+ consumer: consumer
612
+ )
613
+ expect(message.partition).to eq 0
614
+ expect(message.payload).to eq "payload-forked"
615
+ expect(message.key).to eq "key-forked"
616
+ end
617
+
618
+ it "should raise an error when producing fails" do
619
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_producev).and_return(20)
620
+
621
+ expect {
622
+ producer.produce(
623
+ topic: "produce_test_topic",
624
+ key: "key error"
625
+ )
626
+ }.to raise_error Rdkafka::RdkafkaError
627
+ end
628
+
629
+ it "should raise a timeout error when waiting too long" do
630
+ handle = producer.produce(
631
+ topic: "produce_test_topic",
632
+ payload: "payload timeout",
633
+ key: "key timeout"
634
+ )
635
+ expect {
636
+ handle.wait(max_wait_timeout: 0)
637
+ }.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError
638
+
639
+ # Waiting a second time should work
640
+ handle.wait(max_wait_timeout: 5)
641
+ end
642
+
643
+ context "methods that should not be called after a producer has been closed" do
644
+ before do
645
+ producer.close
646
+ end
647
+
648
+ # Affected methods and a non-invalid set of parameters for the method
649
+ {
650
+ :produce => { topic: nil },
651
+ :partition_count => nil,
652
+ }.each do |method, args|
653
+ it "raises an exception if #{method} is called" do
654
+ expect {
655
+ if args.is_a?(Hash)
656
+ producer.public_send(method, **args)
657
+ else
658
+ producer.public_send(method, args)
659
+ end
660
+ }.to raise_exception(Rdkafka::ClosedProducerError, /#{method.to_s}/)
661
+ end
662
+ end
663
+ end
664
+
665
+ context "when not being able to deliver the message" do
666
+ let(:producer) do
667
+ rdkafka_producer_config(
668
+ "bootstrap.servers": "127.0.0.1:9093",
669
+ "message.timeout.ms": 100
670
+ ).producer
671
+ end
672
+
673
+ it "should contain the error in the response when not deliverable" do
674
+ handler = producer.produce(topic: 'produce_test_topic', payload: nil, label: 'na')
675
+ # Wait for the async callbacks and delivery registry to update
676
+ sleep(2)
677
+ expect(handler.create_result.error).to be_a(Rdkafka::RdkafkaError)
678
+ expect(handler.create_result.label).to eq('na')
679
+ end
680
+ end
681
+
682
+ context "when topic does not exist and allow.auto.create.topics is false" do
683
+ let(:producer) do
684
+ rdkafka_producer_config(
685
+ "bootstrap.servers": "127.0.0.1:9092",
686
+ "message.timeout.ms": 100,
687
+ "allow.auto.create.topics": false
688
+ ).producer
689
+ end
690
+
691
+ it "should contain the error in the response when not deliverable" do
692
+ handler = producer.produce(topic: "it-#{SecureRandom.uuid}", payload: nil, label: 'na')
693
+ # Wait for the async callbacks and delivery registry to update
694
+ sleep(2)
695
+ expect(handler.create_result.error).to be_a(Rdkafka::RdkafkaError)
696
+ expect(handler.create_result.error.code).to eq(:msg_timed_out)
697
+ expect(handler.create_result.label).to eq('na')
698
+ end
699
+ end
700
+
701
+ describe '#partition_count' do
702
+ it { expect(producer.partition_count('consume_test_topic')).to eq(3) }
703
+
704
+ context 'when the partition count value is already cached' do
705
+ before do
706
+ producer.partition_count('consume_test_topic')
707
+ allow(::Rdkafka::Metadata).to receive(:new).and_call_original
708
+ end
709
+
710
+ it 'expect not to query it again' do
711
+ producer.partition_count('consume_test_topic')
712
+ expect(::Rdkafka::Metadata).not_to have_received(:new)
713
+ end
714
+ end
715
+
716
+ context 'when the partition count value was cached but time expired' do
717
+ before do
718
+ ::Rdkafka::Producer.partitions_count_cache = Rdkafka::Producer::PartitionsCountCache.new
719
+ allow(::Rdkafka::Metadata).to receive(:new).and_call_original
720
+ end
721
+
722
+ it 'expect to query it again' do
723
+ producer.partition_count('consume_test_topic')
724
+ expect(::Rdkafka::Metadata).to have_received(:new)
725
+ end
726
+ end
727
+
728
+ context 'when the partition count value was cached and time did not expire' do
729
+ before do
730
+ allow(::Process).to receive(:clock_gettime).and_return(0, 29.001)
731
+ producer.partition_count('consume_test_topic')
732
+ allow(::Rdkafka::Metadata).to receive(:new).and_call_original
733
+ end
734
+
735
+ it 'expect not to query it again' do
736
+ producer.partition_count('consume_test_topic')
737
+ expect(::Rdkafka::Metadata).not_to have_received(:new)
738
+ end
739
+ end
740
+ end
741
+
742
+ describe '#flush' do
743
+ it "should return flush when it can flush all outstanding messages or when no messages" do
744
+ producer.produce(
745
+ topic: "produce_test_topic",
746
+ payload: "payload headers",
747
+ key: "key headers",
748
+ headers: {}
749
+ )
750
+
751
+ expect(producer.flush(5_000)).to eq(true)
752
+ end
753
+
754
+ context 'when it cannot flush due to a timeout' do
755
+ let(:producer) do
756
+ rdkafka_producer_config(
757
+ "bootstrap.servers": "127.0.0.1:9093",
758
+ "message.timeout.ms": 2_000
759
+ ).producer
760
+ end
761
+
762
+ after do
763
+ # Allow rdkafka to evict message preventing memory-leak
764
+ sleep(2)
765
+ end
766
+
767
+ it "should return false on flush when cannot deliver and beyond timeout" do
768
+ producer.produce(
769
+ topic: "produce_test_topic",
770
+ payload: "payload headers",
771
+ key: "key headers",
772
+ headers: {}
773
+ )
774
+
775
+ expect(producer.flush(1_000)).to eq(false)
776
+ end
777
+ end
778
+
779
+ context 'when there is a different error' do
780
+ before { allow(Rdkafka::Bindings).to receive(:rd_kafka_flush).and_return(-199) }
781
+
782
+ it 'should raise it' do
783
+ expect { producer.flush }.to raise_error(Rdkafka::RdkafkaError)
784
+ end
785
+ end
786
+ end
787
+
788
+ describe '#purge' do
789
+ context 'when no outgoing messages' do
790
+ it { expect(producer.purge).to eq(true) }
791
+ end
792
+
793
+ context 'when librdkafka purge returns an error' do
794
+ before { expect(Rdkafka::Bindings).to receive(:rd_kafka_purge).and_return(-153) }
795
+
796
+ it 'expect to raise an error' do
797
+ expect { producer.purge }.to raise_error(Rdkafka::RdkafkaError, /retry/)
798
+ end
799
+ end
800
+
801
+ context 'when there are outgoing things in the queue' do
802
+ let(:producer) do
803
+ rdkafka_producer_config(
804
+ "bootstrap.servers": "127.0.0.1:9093",
805
+ "message.timeout.ms": 2_000
806
+ ).producer
807
+ end
808
+
809
+ it "should should purge and move forward" do
810
+ producer.produce(
811
+ topic: "produce_test_topic",
812
+ payload: "payload headers"
813
+ )
814
+
815
+ expect(producer.purge).to eq(true)
816
+ expect(producer.flush(1_000)).to eq(true)
817
+ end
818
+
819
+ it "should materialize the delivery handles" do
820
+ handle = producer.produce(
821
+ topic: "produce_test_topic",
822
+ payload: "payload headers"
823
+ )
824
+
825
+ expect(producer.purge).to eq(true)
826
+
827
+ expect { handle.wait }.to raise_error(Rdkafka::RdkafkaError, /purge_queue/)
828
+ end
829
+
830
+ context "when using delivery_callback" do
831
+ let(:delivery_reports) { [] }
832
+
833
+ let(:delivery_callback) do
834
+ ->(delivery_report) { delivery_reports << delivery_report }
835
+ end
836
+
837
+ before { producer.delivery_callback = delivery_callback }
838
+
839
+ it "should run the callback" do
840
+ handle = producer.produce(
841
+ topic: "produce_test_topic",
842
+ payload: "payload headers"
843
+ )
844
+
845
+ expect(producer.purge).to eq(true)
846
+ # queue purge
847
+ expect(delivery_reports[0].error).to eq(-152)
848
+ end
849
+ end
850
+ end
851
+ end
852
+
853
+ describe '#oauthbearer_set_token' do
854
+ context 'when sasl not configured' do
855
+ it 'should return RD_KAFKA_RESP_ERR__STATE' do
856
+ response = producer.oauthbearer_set_token(
857
+ token: "foo",
858
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
859
+ principal_name: "kafka-cluster"
860
+ )
861
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
862
+ end
863
+ end
864
+
865
+ context 'when sasl configured' do
866
+ it 'should succeed' do
867
+ producer_sasl = rdkafka_producer_config(
868
+ {
869
+ "security.protocol": "sasl_ssl",
870
+ "sasl.mechanisms": 'OAUTHBEARER'
871
+ }
872
+ ).producer
873
+ response = producer_sasl.oauthbearer_set_token(
874
+ token: "foo",
875
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
876
+ principal_name: "kafka-cluster"
877
+ )
878
+ expect(response).to eq(0)
879
+ end
880
+ end
881
+ end
882
+
883
+ describe "#produce with headers" do
884
+ it "should produce a message with array headers" do
885
+ headers = {
886
+ "version" => ["2.1.3", "2.1.4"],
887
+ "type" => "String"
888
+ }
889
+
890
+ report = producer.produce(
891
+ topic: "consume_test_topic",
892
+ key: "key headers",
893
+ headers: headers
894
+ ).wait
895
+
896
+ message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
897
+ expect(message).to be
898
+ expect(message.key).to eq('key headers')
899
+ expect(message.headers['type']).to eq('String')
900
+ expect(message.headers['version']).to eq(["2.1.3", "2.1.4"])
901
+ end
902
+
903
+ it "should produce a message with single value headers" do
904
+ headers = {
905
+ "version" => "2.1.3",
906
+ "type" => "String"
907
+ }
908
+
909
+ report = producer.produce(
910
+ topic: "consume_test_topic",
911
+ key: "key headers",
912
+ headers: headers
913
+ ).wait
914
+
915
+ message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
916
+ expect(message).to be
917
+ expect(message.key).to eq('key headers')
918
+ expect(message.headers['type']).to eq('String')
919
+ expect(message.headers['version']).to eq('2.1.3')
920
+ end
921
+ end
922
+
923
+
924
+ describe 'with active statistics callback' do
925
+ let(:producer) do
926
+ rdkafka_producer_config('statistics.interval.ms': 1_000).producer
927
+ end
928
+
929
+ let(:count_cache_hash) { described_class.partitions_count_cache.to_h }
930
+ let(:pre_statistics_ttl) { count_cache_hash.fetch('produce_test_topic', [])[0] }
931
+ let(:post_statistics_ttl) { count_cache_hash.fetch('produce_test_topic', [])[0] }
932
+
933
+ context "when using partition key" do
934
+ before do
935
+ Rdkafka::Config.statistics_callback = ->(*) {}
936
+
937
+ # This call will make a blocking request to the metadata cache
938
+ producer.produce(
939
+ topic: "produce_test_topic",
940
+ payload: "payload headers",
941
+ partition_key: "test"
942
+ ).wait
943
+
944
+ pre_statistics_ttl
945
+
946
+ # We wait to make sure that statistics are triggered and that there is a refresh
947
+ sleep(1.5)
948
+
949
+ post_statistics_ttl
950
+ end
951
+
952
+ it 'expect to update ttl on the partitions count cache via statistics' do
953
+ expect(pre_statistics_ttl).to be < post_statistics_ttl
954
+ end
955
+ end
956
+
957
+ context "when not using partition key" do
958
+ before do
959
+ Rdkafka::Config.statistics_callback = ->(*) {}
960
+
961
+ # This call will make a blocking request to the metadata cache
962
+ producer.produce(
963
+ topic: "produce_test_topic",
964
+ payload: "payload headers"
965
+ ).wait
966
+
967
+ pre_statistics_ttl
968
+
969
+ # We wait to make sure that statistics are triggered and that there is a refresh
970
+ sleep(1.5)
971
+
972
+ # This will anyhow be populated from statistic
973
+ post_statistics_ttl
974
+ end
975
+
976
+ it 'expect not to update ttl on the partitions count cache via blocking but via use stats' do
977
+ expect(pre_statistics_ttl).to be_nil
978
+ expect(post_statistics_ttl).not_to be_nil
979
+ end
980
+ end
981
+ end
982
+
983
+ describe 'without active statistics callback' do
984
+ let(:producer) do
985
+ rdkafka_producer_config('statistics.interval.ms': 1_000).producer
986
+ end
987
+
988
+ let(:count_cache_hash) { described_class.partitions_count_cache.to_h }
989
+ let(:pre_statistics_ttl) { count_cache_hash.fetch('produce_test_topic', [])[0] }
990
+ let(:post_statistics_ttl) { count_cache_hash.fetch('produce_test_topic', [])[0] }
991
+
992
+ context "when using partition key" do
993
+ before do
994
+ # This call will make a blocking request to the metadata cache
995
+ producer.produce(
996
+ topic: "produce_test_topic",
997
+ payload: "payload headers",
998
+ partition_key: "test"
999
+ ).wait
1000
+
1001
+ pre_statistics_ttl
1002
+
1003
+ # We wait to make sure that statistics are triggered and that there is a refresh
1004
+ sleep(1.5)
1005
+
1006
+ post_statistics_ttl
1007
+ end
1008
+
1009
+ it 'expect not to update ttl on the partitions count cache via statistics' do
1010
+ expect(pre_statistics_ttl).to eq post_statistics_ttl
1011
+ end
1012
+ end
1013
+
1014
+ context "when not using partition key" do
1015
+ before do
1016
+ # This call will make a blocking request to the metadata cache
1017
+ producer.produce(
1018
+ topic: "produce_test_topic",
1019
+ payload: "payload headers"
1020
+ ).wait
1021
+
1022
+ pre_statistics_ttl
1023
+
1024
+ # We wait to make sure that statistics are triggered and that there is a refresh
1025
+ sleep(1.5)
1026
+
1027
+ # This should not be populated because stats are not in use
1028
+ post_statistics_ttl
1029
+ end
1030
+
1031
+ it 'expect not to update ttl on the partitions count cache via anything' do
1032
+ expect(pre_statistics_ttl).to be_nil
1033
+ expect(post_statistics_ttl).to be_nil
1034
+ end
1035
+ end
1036
+ end
1037
+
1038
+ describe 'with other fiber closing' do
1039
+ context 'when we create many fibers and close producer in some of them' do
1040
+ it 'expect not to crash ruby' do
1041
+ 10.times do |i|
1042
+ producer = rdkafka_producer_config.producer
1043
+
1044
+ Fiber.new do
1045
+ GC.start
1046
+ producer.close
1047
+ end.resume
1048
+ end
1049
+ end
1050
+ end
1051
+ end
1052
+ end