rdkafka 0.11.1 → 0.12.0.beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 9b7042f241be5aad91c403f61aabb8e8ba87afa46674db8ba58c487fe01f88c9
4
- data.tar.gz: 5cb00dc3dc8b4069a8e62cda9271e290d1371434332767270426488dedcff641
3
+ metadata.gz: a58b7e8b03dec32027bf3d9ec513a87e6bb0a8abf2f6598cd34a965bebce2dbb
4
+ data.tar.gz: 603c985bd19a2213671b600680a748279e21d7b4aeb3f23a7f1efa16a5e1229c
5
5
  SHA512:
6
- metadata.gz: 2b287a0d81aca7909702969778fb64585b6808860971d50e1c5540eb0c43cf73617f9e701605b816e8f52a81e39d9909512e1d113f6a550555e6ca503dc5ff58
7
- data.tar.gz: 7924ab06f52646168b52d0478b605484e4aa1a3674ace4f97af5b56588aea6dec8e614d3258b0670f92f18a905509ae6982d2c8999fa719f8ea7451e515ecb3d
6
+ metadata.gz: 9e5a5bd4c911f93572d0f8be2e5f576b08ee0ff86ab773e9e7d9db1182002e62c1c57b03cfe6e7b686c9890c35f6fca811fe10daad3a0d4d57bd67a5340c7fa1
7
+ data.tar.gz: 5fd3c57d9797f3b1f7c636f8190cee0b8cfc56f14daa03a2be21da6ff11fff1b7e00e233ab10a37b280b06ac2a197c155ebf1fdd6dfc8a97af9ce8bd38a341e1
data/CHANGELOG.md CHANGED
@@ -1,6 +1,3 @@
1
- # 0.11.1
2
- * Use mini_portile2 2.6, otherwise you can't run nokogiri and rdkafka at the same time.
3
-
4
1
  # 0.11.0
5
2
  * Upgrade librdkafka to 1.8.2
6
3
  * Bump supported minimum Ruby version to 2.6
@@ -179,7 +179,7 @@ module Rdkafka
179
179
  # Set callback to receive delivery reports on config
180
180
  Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::Callbacks::DeliveryCallbackFunction)
181
181
  # Return producer with Kafka client
182
- Rdkafka::Producer.new(native_kafka(config, :rd_kafka_producer)).tap do |producer|
182
+ Rdkafka::Producer.new(Rdkafka::Producer::Client.new(native_kafka(config, :rd_kafka_producer))).tap do |producer|
183
183
  opaque.producer = producer
184
184
  end
185
185
  end
@@ -0,0 +1,47 @@
1
+ module Rdkafka
2
+ class Producer
3
+ class Client
4
+ def initialize(native)
5
+ @native = native
6
+
7
+ # Start thread to poll client for delivery callbacks
8
+ @polling_thread = Thread.new do
9
+ loop do
10
+ Rdkafka::Bindings.rd_kafka_poll(native, 250)
11
+ # Exit thread if closing and the poll queue is empty
12
+ if Thread.current[:closing] && Rdkafka::Bindings.rd_kafka_outq_len(native) == 0
13
+ break
14
+ end
15
+ end
16
+ end
17
+ @polling_thread.abort_on_exception = true
18
+ @polling_thread[:closing] = false
19
+ end
20
+
21
+ def native
22
+ @native
23
+ end
24
+
25
+ def finalizer
26
+ ->(_) { close }
27
+ end
28
+
29
+ def closed?
30
+ @native.nil?
31
+ end
32
+
33
+ def close(object_id=nil)
34
+ return unless @native
35
+
36
+ # Indicate to polling thread that we're closing
37
+ @polling_thread[:closing] = true
38
+ # Wait for the polling thread to finish up
39
+ @polling_thread.join
40
+
41
+ Rdkafka::Bindings.rd_kafka_destroy(@native)
42
+
43
+ @native = nil
44
+ end
45
+ end
46
+ end
47
+ end
@@ -1,4 +1,4 @@
1
- require "securerandom"
1
+ require "objspace"
2
2
 
3
3
  module Rdkafka
4
4
  # A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
@@ -10,25 +10,11 @@ module Rdkafka
10
10
  attr_reader :delivery_callback
11
11
 
12
12
  # @private
13
- def initialize(native_kafka)
14
- @id = SecureRandom.uuid
15
- @closing = false
16
- @native_kafka = native_kafka
13
+ def initialize(client)
14
+ @client = client
17
15
 
18
16
  # Makes sure, that the producer gets closed before it gets GCed by Ruby
19
- ObjectSpace.define_finalizer(@id, proc { close })
20
-
21
- # Start thread to poll client for delivery callbacks
22
- @polling_thread = Thread.new do
23
- loop do
24
- Rdkafka::Bindings.rd_kafka_poll(@native_kafka, 250)
25
- # Exit thread if closing and the poll queue is empty
26
- if @closing && Rdkafka::Bindings.rd_kafka_outq_len(@native_kafka) == 0
27
- break
28
- end
29
- end
30
- end
31
- @polling_thread.abort_on_exception = true
17
+ ObjectSpace.define_finalizer(self, client.finalizer)
32
18
  end
33
19
 
34
20
  # Set a callback that will be called every time a message is successfully produced.
@@ -44,16 +30,9 @@ module Rdkafka
44
30
 
45
31
  # Close this producer and wait for the internal poll queue to empty.
46
32
  def close
47
- ObjectSpace.undefine_finalizer(@id)
48
-
49
- return unless @native_kafka
33
+ ObjectSpace.undefine_finalizer(self)
50
34
 
51
- # Indicate to polling thread that we're closing
52
- @closing = true
53
- # Wait for the polling thread to finish up
54
- @polling_thread.join
55
- Rdkafka::Bindings.rd_kafka_destroy(@native_kafka)
56
- @native_kafka = nil
35
+ @client.close
57
36
  end
58
37
 
59
38
  # Partition count for a given topic.
@@ -65,7 +44,7 @@ module Rdkafka
65
44
  #
66
45
  def partition_count(topic)
67
46
  closed_producer_check(__method__)
68
- Rdkafka::Metadata.new(@native_kafka, topic).topics&.first[:partition_count]
47
+ Rdkafka::Metadata.new(@client.native, topic).topics&.first[:partition_count]
69
48
  end
70
49
 
71
50
  # Produces a message to a Kafka topic. The message is added to rdkafka's queue, call {DeliveryHandle#wait wait} on the returned delivery handle to make sure it is delivered.
@@ -157,7 +136,7 @@ module Rdkafka
157
136
 
158
137
  # Produce the message
159
138
  response = Rdkafka::Bindings.rd_kafka_producev(
160
- @native_kafka,
139
+ @client.native,
161
140
  *args
162
141
  )
163
142
 
@@ -176,7 +155,7 @@ module Rdkafka
176
155
  end
177
156
 
178
157
  def closed_producer_check(method)
179
- raise Rdkafka::ClosedProducerError.new(method) if @native_kafka.nil?
158
+ raise Rdkafka::ClosedProducerError.new(method) if @client.closed?
180
159
  end
181
160
  end
182
161
  end
@@ -1,5 +1,5 @@
1
1
  module Rdkafka
2
- VERSION = "0.11.1"
2
+ VERSION = "0.12.0.beta.0"
3
3
  LIBRDKAFKA_VERSION = "1.8.2"
4
4
  LIBRDKAFKA_SOURCE_SHA256 = "6a747d293a7a4613bd2897e28e8791476fbe1ae7361f2530a876e0fd483482a6"
5
5
  end
data/lib/rdkafka.rb CHANGED
@@ -17,5 +17,6 @@ require "rdkafka/consumer/topic_partition_list"
17
17
  require "rdkafka/error"
18
18
  require "rdkafka/metadata"
19
19
  require "rdkafka/producer"
20
+ require "rdkafka/producer/client"
20
21
  require "rdkafka/producer/delivery_handle"
21
22
  require "rdkafka/producer/delivery_report"
@@ -0,0 +1,144 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Producer::Client do
4
+ let(:native) { double }
5
+ let(:closing) { false }
6
+ let(:thread) { double(Thread) }
7
+
8
+ subject(:client) { described_class.new(native) }
9
+
10
+ before do
11
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(native, 250)
12
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(native).and_return(0)
13
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_destroy)
14
+ allow(Thread).to receive(:new).and_return(thread)
15
+
16
+ allow(thread).to receive(:[]=).with(:closing, anything)
17
+ allow(thread).to receive(:join)
18
+ allow(thread).to receive(:abort_on_exception=).with(anything)
19
+ end
20
+
21
+ context "defaults" do
22
+ it "sets the thread to abort on exception" do
23
+ expect(thread).to receive(:abort_on_exception=).with(true)
24
+
25
+ client
26
+ end
27
+
28
+ it "sets the thread `closing` flag to false" do
29
+ expect(thread).to receive(:[]=).with(:closing, false)
30
+
31
+ client
32
+ end
33
+ end
34
+
35
+ context "the polling thread" do
36
+ it "is created" do
37
+ expect(Thread).to receive(:new)
38
+
39
+ client
40
+ end
41
+
42
+ it "polls the native with default 250ms timeout" do
43
+ polling_loop_expects do
44
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(native, 250)
45
+ end
46
+ end
47
+
48
+ it "check the out queue of native client" do
49
+ polling_loop_expects do
50
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(native)
51
+ end
52
+ end
53
+ end
54
+
55
+ def polling_loop_expects(&block)
56
+ Thread.current[:closing] = true # this forces the loop break with line #12
57
+
58
+ allow(Thread).to receive(:new).and_yield do |_|
59
+ block.call
60
+ end.and_return(thread)
61
+
62
+ client
63
+ end
64
+
65
+ it "exposes `native` client" do
66
+ expect(client.native).to eq(native)
67
+ end
68
+
69
+ context "when client was not yet closed (`nil`)" do
70
+ it "is not closed" do
71
+ expect(client.closed?).to eq(false)
72
+ end
73
+
74
+ context "and attempt to close" do
75
+ it "calls the `destroy` binding" do
76
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native)
77
+
78
+ client.close
79
+ end
80
+
81
+ it "indicates to the polling thread that it is closing" do
82
+ expect(thread).to receive(:[]=).with(:closing, true)
83
+
84
+ client.close
85
+ end
86
+
87
+ it "joins the polling thread" do
88
+ expect(thread).to receive(:join)
89
+
90
+ client.close
91
+ end
92
+
93
+ it "closes and unassign the native client" do
94
+ client.close
95
+
96
+ expect(client.native).to eq(nil)
97
+ expect(client.closed?).to eq(true)
98
+ end
99
+ end
100
+ end
101
+
102
+ context "when client was already closed" do
103
+ before { client.close }
104
+
105
+ it "is closed" do
106
+ expect(client.closed?).to eq(true)
107
+ end
108
+
109
+ context "and attempt to close again" do
110
+ it "does not call the `destroy` binding" do
111
+ expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy)
112
+
113
+ client.close
114
+ end
115
+
116
+ it "does not indicate to the polling thread that it is closing" do
117
+ expect(thread).not_to receive(:[]=).with(:closing, true)
118
+
119
+ client.close
120
+ end
121
+
122
+ it "does not join the polling thread" do
123
+ expect(thread).not_to receive(:join)
124
+
125
+ client.close
126
+ end
127
+
128
+ it "does not close and unassign the native client again" do
129
+ client.close
130
+
131
+ expect(client.native).to eq(nil)
132
+ expect(client.closed?).to eq(true)
133
+ end
134
+ end
135
+ end
136
+
137
+ it "provide a finalizer Proc that closes the `native` client" do
138
+ expect(client.closed?).to eq(false)
139
+
140
+ client.finalizer.call("some-ignored-object-id")
141
+
142
+ expect(client.closed?).to eq(true)
143
+ end
144
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.11.1
4
+ version: 0.12.0.beta.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-11-23 00:00:00.000000000 Z
11
+ date: 2022-03-03 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi
@@ -177,6 +177,7 @@ files:
177
177
  - lib/rdkafka/error.rb
178
178
  - lib/rdkafka/metadata.rb
179
179
  - lib/rdkafka/producer.rb
180
+ - lib/rdkafka/producer/client.rb
180
181
  - lib/rdkafka/producer/delivery_handle.rb
181
182
  - lib/rdkafka/producer/delivery_report.rb
182
183
  - lib/rdkafka/version.rb
@@ -196,6 +197,7 @@ files:
196
197
  - spec/rdkafka/consumer_spec.rb
197
198
  - spec/rdkafka/error_spec.rb
198
199
  - spec/rdkafka/metadata_spec.rb
200
+ - spec/rdkafka/producer/client_spec.rb
199
201
  - spec/rdkafka/producer/delivery_handle_spec.rb
200
202
  - spec/rdkafka/producer/delivery_report_spec.rb
201
203
  - spec/rdkafka/producer_spec.rb
@@ -215,9 +217,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
215
217
  version: '2.6'
216
218
  required_rubygems_version: !ruby/object:Gem::Requirement
217
219
  requirements:
218
- - - ">="
220
+ - - ">"
219
221
  - !ruby/object:Gem::Version
220
- version: '0'
222
+ version: 1.3.1
221
223
  requirements: []
222
224
  rubygems_version: 3.1.4
223
225
  signing_key:
@@ -241,6 +243,7 @@ test_files:
241
243
  - spec/rdkafka/consumer_spec.rb
242
244
  - spec/rdkafka/error_spec.rb
243
245
  - spec/rdkafka/metadata_spec.rb
246
+ - spec/rdkafka/producer/client_spec.rb
244
247
  - spec/rdkafka/producer/delivery_handle_spec.rb
245
248
  - spec/rdkafka/producer/delivery_report_spec.rb
246
249
  - spec/rdkafka/producer_spec.rb