rdkafka 0.4.2 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -71,15 +71,15 @@ describe Rdkafka::RdkafkaError do
71
71
  end
72
72
 
73
73
  it "should not equal another error with a different error code" do
74
- expect(subject).to eq Rdkafka::RdkafkaError.new(20, "Error explanation")
74
+ expect(subject).not_to eq Rdkafka::RdkafkaError.new(20, "Error explanation")
75
75
  end
76
76
 
77
77
  it "should not equal another error with a different message" do
78
- expect(subject).to eq Rdkafka::RdkafkaError.new(10, "Different error explanation")
78
+ expect(subject).not_to eq Rdkafka::RdkafkaError.new(10, "Different error explanation")
79
79
  end
80
80
 
81
81
  it "should not equal another error with no message" do
82
- expect(subject).to eq Rdkafka::RdkafkaError.new(10)
82
+ expect(subject).not_to eq Rdkafka::RdkafkaError.new(10)
83
83
  end
84
84
  end
85
85
  end
@@ -46,7 +46,7 @@ describe Rdkafka::Producer::DeliveryHandle do
46
46
 
47
47
  it "should wait until the timeout and then raise an error" do
48
48
  expect {
49
- subject.wait(0.1)
49
+ subject.wait(max_wait_timeout: 0.1)
50
50
  }.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError
51
51
  end
52
52
 
@@ -61,7 +61,7 @@ describe Rdkafka::Producer::DeliveryHandle do
61
61
  end
62
62
 
63
63
  it "should wait without a timeout" do
64
- report = subject.wait(nil)
64
+ report = subject.wait(max_wait_timeout: nil)
65
65
 
66
66
  expect(report.partition).to eq(2)
67
67
  expect(report.offset).to eq(100)
@@ -1,7 +1,7 @@
1
1
  require "spec_helper"
2
2
 
3
3
  describe Rdkafka::Producer::DeliveryReport do
4
- subject { Rdkafka::Producer::DeliveryReport.new(2, 100) }
4
+ subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "error") }
5
5
 
6
6
  it "should get the partition" do
7
7
  expect(subject.partition).to eq 2
@@ -10,4 +10,8 @@ describe Rdkafka::Producer::DeliveryReport do
10
10
  it "should get the offset" do
11
11
  expect(subject.offset).to eq 100
12
12
  end
13
+
14
+ it "should get the error" do
15
+ expect(subject.error).to eq "error"
16
+ end
13
17
  end
@@ -2,10 +2,13 @@ require "spec_helper"
2
2
 
3
3
  describe Rdkafka::Producer do
4
4
  let(:producer) { rdkafka_config.producer }
5
+ let(:consumer) { rdkafka_config.consumer }
5
6
 
6
7
  after do
7
8
  # Registry should always end up being empty
8
9
  expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
10
+ producer.close
11
+ consumer.close
9
12
  end
10
13
 
11
14
  context "delivery callback" do
@@ -27,6 +30,7 @@ describe Rdkafka::Producer do
27
30
  it "should call the callback when a message is delivered" do
28
31
  @callback_called = false
29
32
 
33
+
30
34
  producer.delivery_callback = lambda do |report|
31
35
  expect(report).not_to be_nil
32
36
  expect(report.partition).to eq 1
@@ -42,7 +46,10 @@ describe Rdkafka::Producer do
42
46
  )
43
47
 
44
48
  # Wait for it to be delivered
45
- handle.wait(5)
49
+ handle.wait(max_wait_timeout: 15)
50
+
51
+ # Join the producer thread.
52
+ producer.close
46
53
 
47
54
  # Callback should have been called
48
55
  expect(@callback_called).to be true
@@ -55,7 +62,7 @@ describe Rdkafka::Producer do
55
62
  payload: "payload",
56
63
  key: "key"
57
64
  )
58
- }.to raise_error ArgumentError, "missing keyword: topic"
65
+ }.to raise_error ArgumentError, /missing keyword: [\:]?topic/
59
66
  end
60
67
 
61
68
  it "should produce a message" do
@@ -70,7 +77,7 @@ describe Rdkafka::Producer do
70
77
  expect(handle.pending?).to be true
71
78
 
72
79
  # Check delivery handle and report
73
- report = handle.wait(5)
80
+ report = handle.wait(max_wait_timeout: 5)
74
81
  expect(handle.pending?).to be false
75
82
  expect(report).not_to be_nil
76
83
  expect(report.partition).to eq 1
@@ -82,14 +89,15 @@ describe Rdkafka::Producer do
82
89
  # Consume message and verify it's content
83
90
  message = wait_for_message(
84
91
  topic: "produce_test_topic",
85
- delivery_report: report
92
+ delivery_report: report,
93
+ consumer: consumer
86
94
  )
87
95
  expect(message.partition).to eq 1
88
96
  expect(message.payload).to eq "payload"
89
97
  expect(message.key).to eq "key"
90
98
  # Since api.version.request is on by default we will get
91
99
  # the message creation timestamp if it's not set.
92
- expect(message.timestamp).to be_within(5).of(Time.now)
100
+ expect(message.timestamp).to be_within(10).of(Time.now)
93
101
  end
94
102
 
95
103
  it "should produce a message with a specified partition" do
@@ -100,29 +108,65 @@ describe Rdkafka::Producer do
100
108
  key: "key partition",
101
109
  partition: 1
102
110
  )
103
- report = handle.wait(5)
111
+ report = handle.wait(max_wait_timeout: 5)
104
112
 
105
113
  # Consume message and verify it's content
106
114
  message = wait_for_message(
107
115
  topic: "produce_test_topic",
108
- delivery_report: report
116
+ delivery_report: report,
117
+ consumer: consumer
109
118
  )
110
119
  expect(message.partition).to eq 1
111
120
  expect(message.key).to eq "key partition"
112
121
  end
113
122
 
123
+ it "should produce a message to the same partition with a similar partition key" do
124
+ # Avoid partitioner collisions.
125
+ while true
126
+ key = ('a'..'z').to_a.shuffle.take(10).join('')
127
+ partition_key = ('a'..'z').to_a.shuffle.take(10).join('')
128
+ partition_count = producer.partition_count('partitioner_test_topic')
129
+ break if (Zlib.crc32(key) % partition_count) != (Zlib.crc32(partition_key) % partition_count)
130
+ end
131
+
132
+ # Produce a message with key, partition_key and key + partition_key
133
+ messages = [{key: key}, {partition_key: partition_key}, {key: key, partition_key: partition_key}]
134
+
135
+ messages = messages.map do |m|
136
+ handle = producer.produce(
137
+ topic: "partitioner_test_topic",
138
+ payload: "payload partition",
139
+ key: m[:key],
140
+ partition_key: m[:partition_key]
141
+ )
142
+ report = handle.wait(max_wait_timeout: 5)
143
+
144
+ wait_for_message(
145
+ topic: "partitioner_test_topic",
146
+ delivery_report: report,
147
+ )
148
+ end
149
+
150
+ expect(messages[0].partition).not_to eq(messages[2].partition)
151
+ expect(messages[1].partition).to eq(messages[2].partition)
152
+ expect(messages[0].key).to eq key
153
+ expect(messages[1].key).to be_nil
154
+ expect(messages[2].key).to eq key
155
+ end
156
+
114
157
  it "should produce a message with utf-8 encoding" do
115
158
  handle = producer.produce(
116
159
  topic: "produce_test_topic",
117
160
  payload: "Τη γλώσσα μου έδωσαν ελληνική",
118
161
  key: "key utf8"
119
162
  )
120
- report = handle.wait(5)
163
+ report = handle.wait(max_wait_timeout: 5)
121
164
 
122
165
  # Consume message and verify it's content
123
166
  message = wait_for_message(
124
167
  topic: "produce_test_topic",
125
- delivery_report: report
168
+ delivery_report: report,
169
+ consumer: consumer
126
170
  )
127
171
 
128
172
  expect(message.partition).to eq 1
@@ -149,12 +193,13 @@ describe Rdkafka::Producer do
149
193
  key: "key timestamp",
150
194
  timestamp: 1505069646252
151
195
  )
152
- report = handle.wait(5)
196
+ report = handle.wait(max_wait_timeout: 5)
153
197
 
154
198
  # Consume message and verify it's content
155
199
  message = wait_for_message(
156
200
  topic: "produce_test_topic",
157
- delivery_report: report
201
+ delivery_report: report,
202
+ consumer: consumer
158
203
  )
159
204
 
160
205
  expect(message.partition).to eq 2
@@ -169,12 +214,13 @@ describe Rdkafka::Producer do
169
214
  key: "key timestamp",
170
215
  timestamp: Time.at(1505069646, 353_000)
171
216
  )
172
- report = handle.wait(5)
217
+ report = handle.wait(max_wait_timeout: 5)
173
218
 
174
219
  # Consume message and verify it's content
175
220
  message = wait_for_message(
176
221
  topic: "produce_test_topic",
177
- delivery_report: report
222
+ delivery_report: report,
223
+ consumer: consumer
178
224
  )
179
225
 
180
226
  expect(message.partition).to eq 2
@@ -188,12 +234,13 @@ describe Rdkafka::Producer do
188
234
  topic: "produce_test_topic",
189
235
  payload: "payload no key"
190
236
  )
191
- report = handle.wait(5)
237
+ report = handle.wait(max_wait_timeout: 5)
192
238
 
193
239
  # Consume message and verify it's content
194
240
  message = wait_for_message(
195
241
  topic: "produce_test_topic",
196
- delivery_report: report
242
+ delivery_report: report,
243
+ consumer: consumer
197
244
  )
198
245
 
199
246
  expect(message.key).to be_nil
@@ -205,18 +252,63 @@ describe Rdkafka::Producer do
205
252
  topic: "produce_test_topic",
206
253
  key: "key no payload"
207
254
  )
208
- report = handle.wait(5)
255
+ report = handle.wait(max_wait_timeout: 5)
209
256
 
210
257
  # Consume message and verify it's content
211
258
  message = wait_for_message(
212
259
  topic: "produce_test_topic",
213
- delivery_report: report
260
+ delivery_report: report,
261
+ consumer: consumer
214
262
  )
215
263
 
216
264
  expect(message.key).to eq "key no payload"
217
265
  expect(message.payload).to be_nil
218
266
  end
219
267
 
268
+ it "should produce a message with headers" do
269
+ handle = producer.produce(
270
+ topic: "produce_test_topic",
271
+ payload: "payload headers",
272
+ key: "key headers",
273
+ headers: { foo: :bar, baz: :foobar }
274
+ )
275
+ report = handle.wait(max_wait_timeout: 5)
276
+
277
+ # Consume message and verify it's content
278
+ message = wait_for_message(
279
+ topic: "produce_test_topic",
280
+ delivery_report: report,
281
+ consumer: consumer
282
+ )
283
+
284
+ expect(message.payload).to eq "payload headers"
285
+ expect(message.key).to eq "key headers"
286
+ expect(message.headers[:foo]).to eq "bar"
287
+ expect(message.headers[:baz]).to eq "foobar"
288
+ expect(message.headers[:foobar]).to be_nil
289
+ end
290
+
291
+ it "should produce a message with empty headers" do
292
+ handle = producer.produce(
293
+ topic: "produce_test_topic",
294
+ payload: "payload headers",
295
+ key: "key headers",
296
+ headers: {}
297
+ )
298
+ report = handle.wait(max_wait_timeout: 5)
299
+
300
+ # Consume message and verify it's content
301
+ message = wait_for_message(
302
+ topic: "produce_test_topic",
303
+ delivery_report: report,
304
+ consumer: consumer
305
+ )
306
+
307
+ expect(message.payload).to eq "payload headers"
308
+ expect(message.key).to eq "key headers"
309
+ expect(message.headers).to be_empty
310
+ end
311
+
220
312
  it "should produce message that aren't waited for and not crash" do
221
313
  5.times do
222
314
  200.times do
@@ -238,55 +330,58 @@ describe Rdkafka::Producer do
238
330
  end
239
331
  end
240
332
 
241
- # TODO this spec crashes if you create and use the producer before
242
- # forking like so:
243
- #
244
- # @producer = producer
245
- #
246
- # This will be added as part of https://github.com/appsignal/rdkafka-ruby/issues/19
247
- #it "should produce a message in a forked process" do
248
- # # Fork, produce a message, send the report of a pipe and
249
- # # wait for it in the main process.
250
-
251
- # reader, writer = IO.pipe
252
-
253
- # fork do
254
- # reader.close
255
-
256
- # handle = producer.produce(
257
- # topic: "produce_test_topic",
258
- # payload: "payload",
259
- # key: "key"
260
- # )
261
-
262
- # report = handle.wait(5)
263
- # producer.close
264
-
265
- # report_json = JSON.generate(
266
- # "partition" => report.partition,
267
- # "offset" => report.offset
268
- # )
269
-
270
- # writer.write(report_json)
271
- # end
272
-
273
- # writer.close
274
-
275
- # report_hash = JSON.parse(reader.read)
276
- # report = Rdkafka::Producer::DeliveryReport.new(
277
- # report_hash["partition"],
278
- # report_hash["offset"]
279
- # )
280
-
281
- # # Consume message and verify it's content
282
- # message = wait_for_message(
283
- # topic: "produce_test_topic",
284
- # delivery_report: report
285
- # )
286
- # expect(message.partition).to eq 1
287
- # expect(message.payload).to eq "payload"
288
- # expect(message.key).to eq "key"
289
- #end
333
+ it "should produce a message in a forked process" do
334
+ # Fork, produce a message, send the report over a pipe and
335
+ # wait for and check the message in the main process.
336
+
337
+ # Kernel#fork is not available in JRuby
338
+ skip if defined?(JRUBY_VERSION)
339
+
340
+ reader, writer = IO.pipe
341
+
342
+ fork do
343
+ reader.close
344
+
345
+ # Avoids sharing the socket between processes.
346
+ producer = rdkafka_config.producer
347
+
348
+ handle = producer.produce(
349
+ topic: "produce_test_topic",
350
+ payload: "payload-forked",
351
+ key: "key-forked"
352
+ )
353
+
354
+ report = handle.wait(max_wait_timeout: 5)
355
+
356
+ report_json = JSON.generate(
357
+ "partition" => report.partition,
358
+ "offset" => report.offset
359
+ )
360
+
361
+ writer.write(report_json)
362
+ writer.close
363
+ producer.close
364
+ end
365
+
366
+ writer.close
367
+ report_hash = JSON.parse(reader.read)
368
+ report = Rdkafka::Producer::DeliveryReport.new(
369
+ report_hash["partition"],
370
+ report_hash["offset"]
371
+ )
372
+
373
+ reader.close
374
+
375
+ # Consume message and verify it's content
376
+ message = wait_for_message(
377
+ topic: "produce_test_topic",
378
+ delivery_report: report,
379
+ consumer: consumer
380
+ )
381
+ expect(message.partition).to eq 0
382
+ expect(message.payload).to eq "payload-forked"
383
+ expect(message.key).to eq "key-forked"
384
+ end
290
385
 
291
386
  it "should raise an error when producing fails" do
292
387
  expect(Rdkafka::Bindings).to receive(:rd_kafka_producev).and_return(20)
@@ -306,10 +401,10 @@ describe Rdkafka::Producer do
306
401
  key: "key timeout"
307
402
  )
308
403
  expect {
309
- handle.wait(0)
404
+ handle.wait(max_wait_timeout: 0)
310
405
  }.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError
311
406
 
312
407
  # Waiting a second time should work
313
- handle.wait(5)
408
+ handle.wait(max_wait_timeout: 5)
314
409
  end
315
410
  end
@@ -7,6 +7,14 @@ require "pry"
7
7
  require "rspec"
8
8
  require "rdkafka"
9
9
 
10
+ `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic consume_test_topic`
11
+ `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic empty_test_topic`
12
+ `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic load_test_topic`
13
+ `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic produce_test_topic`
14
+ `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic rake_test_topic`
15
+ `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic watermarks_test_topic`
16
+ `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 25 --if-not-exists --topic partitioner_test_topic`
17
+
10
18
  def rdkafka_config(config_overrides={})
11
19
  config = {
12
20
  :"api.version.request" => false,
@@ -25,12 +33,12 @@ def rdkafka_config(config_overrides={})
25
33
  Rdkafka::Config.new(config)
26
34
  end
27
35
 
28
- def native_client
36
+ def new_native_client
29
37
  config = rdkafka_config
30
38
  config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer)
31
39
  end
32
40
 
33
- def new_native_topic(topic_name="topic_name")
41
+ def new_native_topic(topic_name="topic_name", native_client: )
34
42
  Rdkafka::Bindings.rd_kafka_topic_new(
35
43
  native_client,
36
44
  topic_name,
@@ -39,7 +47,8 @@ def new_native_topic(topic_name="topic_name")
39
47
  end
40
48
 
41
49
  def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer: nil)
42
- consumer = rdkafka_config.consumer if consumer.nil?
50
+ new_consumer = !!consumer
51
+ consumer ||= rdkafka_config.consumer
43
52
  consumer.subscribe(topic)
44
53
  timeout = Time.now.to_i + timeout_in_seconds
45
54
  loop do
@@ -53,6 +62,8 @@ def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer:
53
62
  return message
54
63
  end
55
64
  end
65
+ ensure
66
+ consumer.close if new_consumer
56
67
  end
57
68
 
58
69
  def wait_for_assignment(consumer)
@@ -61,3 +72,10 @@ def wait_for_assignment(consumer)
61
72
  sleep 1
62
73
  end
63
74
  end
75
+
76
+ def wait_for_unassignment(consumer)
77
+ 10.times do
78
+ break if consumer.assignment.empty?
79
+ sleep 1
80
+ end
81
+ end