rdkafka 0.3.5 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.travis.yml +6 -6
- data/CHANGELOG.md +12 -0
- data/README.md +42 -6
- data/Rakefile +13 -17
- data/docker-compose.yml +18 -0
- data/ext/Rakefile +17 -1
- data/lib/rdkafka/bindings.rb +19 -1
- data/lib/rdkafka/config.rb +24 -0
- data/lib/rdkafka/consumer.rb +55 -11
- data/lib/rdkafka/consumer/message.rb +16 -2
- data/lib/rdkafka/consumer/partition.rb +6 -2
- data/lib/rdkafka/consumer/topic_partition_list.rb +100 -44
- data/lib/rdkafka/producer.rb +15 -6
- data/lib/rdkafka/version.rb +2 -2
- data/spec/rdkafka/bindings_spec.rb +23 -0
- data/spec/rdkafka/config_spec.rb +16 -0
- data/spec/rdkafka/consumer/message_spec.rb +20 -6
- data/spec/rdkafka/consumer/partition_spec.rb +10 -1
- data/spec/rdkafka/consumer/topic_partition_list_spec.rb +111 -32
- data/spec/rdkafka/consumer_spec.rb +111 -4
- data/spec/rdkafka/producer_spec.rb +100 -17
- metadata +4 -3
@@ -47,7 +47,64 @@ describe Rdkafka::Consumer do
|
|
47
47
|
end
|
48
48
|
end
|
49
49
|
|
50
|
-
|
50
|
+
describe "#assign and #assignment" do
|
51
|
+
it "should return an empty assignment if nothing is assigned" do
|
52
|
+
expect(consumer.assignment).to be_empty
|
53
|
+
end
|
54
|
+
|
55
|
+
it "should only accept a topic partition list in assign" do
|
56
|
+
expect {
|
57
|
+
consumer.assign("list")
|
58
|
+
}.to raise_error TypeError
|
59
|
+
end
|
60
|
+
|
61
|
+
it "should raise an error when assigning fails" do
|
62
|
+
expect(Rdkafka::Bindings).to receive(:rd_kafka_assign).and_return(20)
|
63
|
+
expect {
|
64
|
+
consumer.assign(Rdkafka::Consumer::TopicPartitionList.new)
|
65
|
+
}.to raise_error Rdkafka::RdkafkaError
|
66
|
+
end
|
67
|
+
|
68
|
+
it "should assign specific topic/partitions and return that assignment" do
|
69
|
+
tpl = Rdkafka::Consumer::TopicPartitionList.new
|
70
|
+
tpl.add_topic("consume_test_topic", (0..2))
|
71
|
+
consumer.assign(tpl)
|
72
|
+
|
73
|
+
assignment = consumer.assignment
|
74
|
+
expect(assignment).not_to be_empty
|
75
|
+
expect(assignment.to_h["consume_test_topic"].length).to eq 3
|
76
|
+
end
|
77
|
+
|
78
|
+
it "should return the assignment when subscribed" do
|
79
|
+
# Make sure there's a message
|
80
|
+
report = producer.produce(
|
81
|
+
topic: "consume_test_topic",
|
82
|
+
payload: "payload 1",
|
83
|
+
key: "key 1",
|
84
|
+
partition: 0
|
85
|
+
).wait
|
86
|
+
|
87
|
+
# Subscribe and poll until partitions are assigned
|
88
|
+
consumer.subscribe("consume_test_topic")
|
89
|
+
100.times do
|
90
|
+
consumer.poll(100)
|
91
|
+
break unless consumer.assignment.empty?
|
92
|
+
end
|
93
|
+
|
94
|
+
assignment = consumer.assignment
|
95
|
+
expect(assignment).not_to be_empty
|
96
|
+
expect(assignment.to_h["consume_test_topic"].length).to eq 3
|
97
|
+
end
|
98
|
+
|
99
|
+
it "should raise an error when getting assignment fails" do
|
100
|
+
expect(Rdkafka::Bindings).to receive(:rd_kafka_assignment).and_return(20)
|
101
|
+
expect {
|
102
|
+
consumer.assignment
|
103
|
+
}.to raise_error Rdkafka::RdkafkaError
|
104
|
+
end
|
105
|
+
end
|
106
|
+
|
107
|
+
describe "#close" do
|
51
108
|
it "should close a consumer" do
|
52
109
|
consumer.subscribe("consume_test_topic")
|
53
110
|
consumer.close
|
@@ -73,12 +130,46 @@ describe Rdkafka::Consumer do
|
|
73
130
|
)
|
74
131
|
end
|
75
132
|
|
76
|
-
it "should only accept a topic partition list" do
|
133
|
+
it "should only accept a topic partition list in committed" do
|
77
134
|
expect {
|
78
135
|
consumer.committed("list")
|
79
136
|
}.to raise_error TypeError
|
80
137
|
end
|
81
138
|
|
139
|
+
it "should commit in sync mode" do
|
140
|
+
expect {
|
141
|
+
consumer.commit(nil, true)
|
142
|
+
}.not_to raise_error
|
143
|
+
end
|
144
|
+
|
145
|
+
it "should only accept a topic partition list in commit if not nil" do
|
146
|
+
expect {
|
147
|
+
consumer.commit("list")
|
148
|
+
}.to raise_error TypeError
|
149
|
+
end
|
150
|
+
|
151
|
+
it "should commit a specific topic partion list" do
|
152
|
+
# Make sure there are some message
|
153
|
+
3.times do |i|
|
154
|
+
producer.produce(
|
155
|
+
topic: "consume_test_topic",
|
156
|
+
payload: "payload 1",
|
157
|
+
key: "key 1",
|
158
|
+
partition: i
|
159
|
+
).wait
|
160
|
+
end
|
161
|
+
|
162
|
+
list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
|
163
|
+
list.add_topic_and_partitions_with_offsets("consume_test_topic", {0 => 1, 1 => 1, 2 => 1})
|
164
|
+
end
|
165
|
+
consumer.commit(list)
|
166
|
+
|
167
|
+
partitions = consumer.committed(list).to_h["consume_test_topic"]
|
168
|
+
expect(partitions[0].offset).to eq 1
|
169
|
+
expect(partitions[1].offset).to eq 1
|
170
|
+
expect(partitions[2].offset).to eq 1
|
171
|
+
end
|
172
|
+
|
82
173
|
it "should raise an error when committing fails" do
|
83
174
|
expect(Rdkafka::Bindings).to receive(:rd_kafka_commit).and_return(20)
|
84
175
|
|
@@ -87,14 +178,30 @@ describe Rdkafka::Consumer do
|
|
87
178
|
}.to raise_error(Rdkafka::RdkafkaError)
|
88
179
|
end
|
89
180
|
|
181
|
+
it "should fetch the committed offsets for the current assignment" do
|
182
|
+
consumer.subscribe("consume_test_topic")
|
183
|
+
# Wait for the assignment to be made
|
184
|
+
10.times do
|
185
|
+
break if !consumer.assignment.empty?
|
186
|
+
sleep 1
|
187
|
+
end
|
188
|
+
|
189
|
+
partitions = consumer.committed.to_h["consume_test_topic"]
|
190
|
+
expect(partitions).not_to be_nil
|
191
|
+
expect(partitions[0].offset).to be > 0
|
192
|
+
expect(partitions[1].offset).to be nil
|
193
|
+
expect(partitions[2].offset).to be nil
|
194
|
+
end
|
195
|
+
|
90
196
|
it "should fetch the committed offsets for a specified topic partition list" do
|
91
197
|
list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
|
92
198
|
list.add_topic("consume_test_topic", [0, 1, 2])
|
93
199
|
end
|
94
200
|
partitions = consumer.committed(list).to_h["consume_test_topic"]
|
201
|
+
expect(partitions).not_to be_nil
|
95
202
|
expect(partitions[0].offset).to be > 0
|
96
|
-
expect(partitions[1].offset).to
|
97
|
-
expect(partitions[2].offset).to
|
203
|
+
expect(partitions[1].offset).to be nil
|
204
|
+
expect(partitions[2].offset).to be nil
|
98
205
|
end
|
99
206
|
|
100
207
|
it "should raise an error when getting committed fails" do
|
@@ -48,7 +48,7 @@ describe Rdkafka::Producer do
|
|
48
48
|
expect(message.key).to eq "key"
|
49
49
|
# Since api.version.request is on by default we will get
|
50
50
|
# the message creation timestamp if it's not set.
|
51
|
-
expect(message.timestamp).to
|
51
|
+
expect(message.timestamp).to be_within(5).of(Time.now)
|
52
52
|
end
|
53
53
|
|
54
54
|
it "should produce a message with a specified partition" do
|
@@ -89,24 +89,57 @@ describe Rdkafka::Producer do
|
|
89
89
|
expect(message.key).to eq "key utf8"
|
90
90
|
end
|
91
91
|
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
92
|
+
context "timestamp" do
|
93
|
+
it "should raise a type error if not nil, integer or time" do
|
94
|
+
expect {
|
95
|
+
producer.produce(
|
96
|
+
topic: "produce_test_topic",
|
97
|
+
payload: "payload timestamp",
|
98
|
+
key: "key timestamp",
|
99
|
+
timestamp: "10101010"
|
100
|
+
)
|
101
|
+
}.to raise_error TypeError
|
102
|
+
end
|
100
103
|
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
104
|
+
it "should produce a message with an integer timestamp" do
|
105
|
+
handle = producer.produce(
|
106
|
+
topic: "produce_test_topic",
|
107
|
+
payload: "payload timestamp",
|
108
|
+
key: "key timestamp",
|
109
|
+
timestamp: 1505069646252
|
110
|
+
)
|
111
|
+
report = handle.wait(5)
|
112
|
+
|
113
|
+
# Consume message and verify it's content
|
114
|
+
message = wait_for_message(
|
115
|
+
topic: "produce_test_topic",
|
116
|
+
delivery_report: report
|
117
|
+
)
|
118
|
+
|
119
|
+
expect(message.partition).to eq 2
|
120
|
+
expect(message.key).to eq "key timestamp"
|
121
|
+
expect(message.timestamp).to eq Time.at(1505069646, 252_000)
|
122
|
+
end
|
123
|
+
|
124
|
+
it "should produce a message with a time timestamp" do
|
125
|
+
handle = producer.produce(
|
126
|
+
topic: "produce_test_topic",
|
127
|
+
payload: "payload timestamp",
|
128
|
+
key: "key timestamp",
|
129
|
+
timestamp: Time.at(1505069646, 353_000)
|
130
|
+
)
|
131
|
+
report = handle.wait(5)
|
132
|
+
|
133
|
+
# Consume message and verify it's content
|
134
|
+
message = wait_for_message(
|
135
|
+
topic: "produce_test_topic",
|
136
|
+
delivery_report: report
|
137
|
+
)
|
106
138
|
|
107
|
-
|
108
|
-
|
109
|
-
|
139
|
+
expect(message.partition).to eq 2
|
140
|
+
expect(message.key).to eq "key timestamp"
|
141
|
+
expect(message.timestamp).to eq Time.at(1505069646, 353_000)
|
142
|
+
end
|
110
143
|
end
|
111
144
|
|
112
145
|
it "should produce a message with nil key" do
|
@@ -164,6 +197,56 @@ describe Rdkafka::Producer do
|
|
164
197
|
end
|
165
198
|
end
|
166
199
|
|
200
|
+
# TODO this spec crashes if you create and use the producer before
|
201
|
+
# forking like so:
|
202
|
+
#
|
203
|
+
# @producer = producer
|
204
|
+
#
|
205
|
+
# This will be added as part of https://github.com/appsignal/rdkafka-ruby/issues/19
|
206
|
+
#it "should produce a message in a forked process" do
|
207
|
+
# # Fork, produce a message, send the report of a pipe and
|
208
|
+
# # wait for it in the main process.
|
209
|
+
|
210
|
+
# reader, writer = IO.pipe
|
211
|
+
|
212
|
+
# fork do
|
213
|
+
# reader.close
|
214
|
+
|
215
|
+
# handle = producer.produce(
|
216
|
+
# topic: "produce_test_topic",
|
217
|
+
# payload: "payload",
|
218
|
+
# key: "key"
|
219
|
+
# )
|
220
|
+
|
221
|
+
# report = handle.wait(5)
|
222
|
+
# producer.close
|
223
|
+
|
224
|
+
# report_json = JSON.generate(
|
225
|
+
# "partition" => report.partition,
|
226
|
+
# "offset" => report.offset
|
227
|
+
# )
|
228
|
+
|
229
|
+
# writer.write(report_json)
|
230
|
+
# end
|
231
|
+
|
232
|
+
# writer.close
|
233
|
+
|
234
|
+
# report_hash = JSON.parse(reader.read)
|
235
|
+
# report = Rdkafka::Producer::DeliveryReport.new(
|
236
|
+
# report_hash["partition"],
|
237
|
+
# report_hash["offset"]
|
238
|
+
# )
|
239
|
+
|
240
|
+
# # Consume message and verify it's content
|
241
|
+
# message = wait_for_message(
|
242
|
+
# topic: "produce_test_topic",
|
243
|
+
# delivery_report: report
|
244
|
+
# )
|
245
|
+
# expect(message.partition).to eq 1
|
246
|
+
# expect(message.payload).to eq "payload"
|
247
|
+
# expect(message.key).to eq "key"
|
248
|
+
#end
|
249
|
+
|
167
250
|
it "should raise an error when producing fails" do
|
168
251
|
expect(Rdkafka::Bindings).to receive(:rd_kafka_producev).and_return(20)
|
169
252
|
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: rdkafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.4.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Thijs Cadier
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2018-
|
11
|
+
date: 2018-09-24 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: ffi
|
@@ -110,6 +110,7 @@ files:
|
|
110
110
|
- LICENSE
|
111
111
|
- README.md
|
112
112
|
- Rakefile
|
113
|
+
- docker-compose.yml
|
113
114
|
- ext/Rakefile
|
114
115
|
- lib/rdkafka.rb
|
115
116
|
- lib/rdkafka/bindings.rb
|
@@ -155,7 +156,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
155
156
|
version: '0'
|
156
157
|
requirements: []
|
157
158
|
rubyforge_project:
|
158
|
-
rubygems_version: 2.7.
|
159
|
+
rubygems_version: 2.7.6
|
159
160
|
signing_key:
|
160
161
|
specification_version: 4
|
161
162
|
summary: Kafka client library wrapping librdkafka using the ffi gem and futures from
|