karafka-rdkafka 0.20.0-arm64-darwin

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (102) hide show
  1. checksums.yaml +7 -0
  2. data/.github/CODEOWNERS +3 -0
  3. data/.github/FUNDING.yml +1 -0
  4. data/.github/workflows/ci_linux_x86_64_gnu.yml +249 -0
  5. data/.github/workflows/ci_linux_x86_64_musl.yml +205 -0
  6. data/.github/workflows/ci_macos_arm64.yml +306 -0
  7. data/.github/workflows/push_linux_x86_64_gnu.yml +64 -0
  8. data/.github/workflows/push_linux_x86_64_musl.yml +77 -0
  9. data/.github/workflows/push_macos_arm64.yml +54 -0
  10. data/.github/workflows/push_ruby.yml +37 -0
  11. data/.github/workflows/verify-action-pins.yml +16 -0
  12. data/.gitignore +15 -0
  13. data/.rspec +2 -0
  14. data/.ruby-gemset +1 -0
  15. data/.ruby-version +1 -0
  16. data/.yardopts +2 -0
  17. data/CHANGELOG.md +330 -0
  18. data/Gemfile +5 -0
  19. data/MIT-LICENSE +22 -0
  20. data/README.md +177 -0
  21. data/Rakefile +96 -0
  22. data/docker-compose.yml +25 -0
  23. data/ext/README.md +19 -0
  24. data/ext/Rakefile +131 -0
  25. data/ext/build_common.sh +361 -0
  26. data/ext/build_linux_x86_64_gnu.sh +306 -0
  27. data/ext/build_linux_x86_64_musl.sh +763 -0
  28. data/ext/build_macos_arm64.sh +550 -0
  29. data/ext/librdkafka.dylib +0 -0
  30. data/karafka-rdkafka.gemspec +88 -0
  31. data/lib/rdkafka/abstract_handle.rb +116 -0
  32. data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
  33. data/lib/rdkafka/admin/config_binding_result.rb +30 -0
  34. data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
  35. data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
  36. data/lib/rdkafka/admin/create_acl_report.rb +24 -0
  37. data/lib/rdkafka/admin/create_partitions_handle.rb +30 -0
  38. data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
  39. data/lib/rdkafka/admin/create_topic_handle.rb +32 -0
  40. data/lib/rdkafka/admin/create_topic_report.rb +24 -0
  41. data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
  42. data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
  43. data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
  44. data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
  45. data/lib/rdkafka/admin/delete_topic_handle.rb +32 -0
  46. data/lib/rdkafka/admin/delete_topic_report.rb +24 -0
  47. data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
  48. data/lib/rdkafka/admin/describe_acl_report.rb +24 -0
  49. data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
  50. data/lib/rdkafka/admin/describe_configs_report.rb +48 -0
  51. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
  52. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +48 -0
  53. data/lib/rdkafka/admin.rb +832 -0
  54. data/lib/rdkafka/bindings.rb +584 -0
  55. data/lib/rdkafka/callbacks.rb +415 -0
  56. data/lib/rdkafka/config.rb +398 -0
  57. data/lib/rdkafka/consumer/headers.rb +79 -0
  58. data/lib/rdkafka/consumer/message.rb +86 -0
  59. data/lib/rdkafka/consumer/partition.rb +57 -0
  60. data/lib/rdkafka/consumer/topic_partition_list.rb +190 -0
  61. data/lib/rdkafka/consumer.rb +663 -0
  62. data/lib/rdkafka/error.rb +201 -0
  63. data/lib/rdkafka/helpers/oauth.rb +58 -0
  64. data/lib/rdkafka/helpers/time.rb +14 -0
  65. data/lib/rdkafka/metadata.rb +115 -0
  66. data/lib/rdkafka/native_kafka.rb +139 -0
  67. data/lib/rdkafka/producer/delivery_handle.rb +48 -0
  68. data/lib/rdkafka/producer/delivery_report.rb +45 -0
  69. data/lib/rdkafka/producer/partitions_count_cache.rb +216 -0
  70. data/lib/rdkafka/producer.rb +497 -0
  71. data/lib/rdkafka/version.rb +7 -0
  72. data/lib/rdkafka.rb +54 -0
  73. data/renovate.json +92 -0
  74. data/spec/rdkafka/abstract_handle_spec.rb +117 -0
  75. data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
  76. data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
  77. data/spec/rdkafka/admin/create_topic_handle_spec.rb +54 -0
  78. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  79. data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
  80. data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
  81. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +54 -0
  82. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  83. data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
  84. data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
  85. data/spec/rdkafka/admin_spec.rb +970 -0
  86. data/spec/rdkafka/bindings_spec.rb +198 -0
  87. data/spec/rdkafka/callbacks_spec.rb +20 -0
  88. data/spec/rdkafka/config_spec.rb +258 -0
  89. data/spec/rdkafka/consumer/headers_spec.rb +73 -0
  90. data/spec/rdkafka/consumer/message_spec.rb +139 -0
  91. data/spec/rdkafka/consumer/partition_spec.rb +57 -0
  92. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +248 -0
  93. data/spec/rdkafka/consumer_spec.rb +1296 -0
  94. data/spec/rdkafka/error_spec.rb +95 -0
  95. data/spec/rdkafka/metadata_spec.rb +79 -0
  96. data/spec/rdkafka/native_kafka_spec.rb +130 -0
  97. data/spec/rdkafka/producer/delivery_handle_spec.rb +60 -0
  98. data/spec/rdkafka/producer/delivery_report_spec.rb +25 -0
  99. data/spec/rdkafka/producer/partitions_count_cache_spec.rb +359 -0
  100. data/spec/rdkafka/producer_spec.rb +1528 -0
  101. data/spec/spec_helper.rb +195 -0
  102. metadata +275 -0
@@ -0,0 +1,198 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'zlib'
4
+
5
+ describe Rdkafka::Bindings do
6
+ it "should load librdkafka" do
7
+ expect(Rdkafka::Bindings.ffi_libraries.map(&:name).first).to include "librdkafka"
8
+ end
9
+
10
+ describe ".lib_extension" do
11
+ it "should know the lib extension for darwin" do
12
+ stub_const('RbConfig::CONFIG', 'host_os' =>'darwin')
13
+ expect(Rdkafka::Bindings.lib_extension).to eq "dylib"
14
+ end
15
+
16
+ it "should know the lib extension for linux" do
17
+ stub_const('RbConfig::CONFIG', 'host_os' =>'linux')
18
+ expect(Rdkafka::Bindings.lib_extension).to eq "so"
19
+ end
20
+ end
21
+
22
+ it "should successfully call librdkafka" do
23
+ expect {
24
+ Rdkafka::Bindings.rd_kafka_conf_new
25
+ }.not_to raise_error
26
+ end
27
+
28
+ describe "log callback" do
29
+ let(:log_queue) { Rdkafka::Config.log_queue }
30
+ before do
31
+ allow(log_queue).to receive(:<<)
32
+ end
33
+
34
+ it "should log fatal messages" do
35
+ Rdkafka::Bindings::LogCallback.call(nil, 0, nil, "log line")
36
+ expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
37
+ end
38
+
39
+ it "should log fatal messages" do
40
+ Rdkafka::Bindings::LogCallback.call(nil, 1, nil, "log line")
41
+ expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
42
+ end
43
+
44
+ it "should log fatal messages" do
45
+ Rdkafka::Bindings::LogCallback.call(nil, 2, nil, "log line")
46
+ expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
47
+ end
48
+
49
+ it "should log error messages" do
50
+ Rdkafka::Bindings::LogCallback.call(nil, 3, nil, "log line")
51
+ expect(log_queue).to have_received(:<<).with([Logger::ERROR, "rdkafka: log line"])
52
+ end
53
+
54
+ it "should log warning messages" do
55
+ Rdkafka::Bindings::LogCallback.call(nil, 4, nil, "log line")
56
+ expect(log_queue).to have_received(:<<).with([Logger::WARN, "rdkafka: log line"])
57
+ end
58
+
59
+ it "should log info messages" do
60
+ Rdkafka::Bindings::LogCallback.call(nil, 5, nil, "log line")
61
+ expect(log_queue).to have_received(:<<).with([Logger::INFO, "rdkafka: log line"])
62
+ end
63
+
64
+ it "should log info messages" do
65
+ Rdkafka::Bindings::LogCallback.call(nil, 6, nil, "log line")
66
+ expect(log_queue).to have_received(:<<).with([Logger::INFO, "rdkafka: log line"])
67
+ end
68
+
69
+ it "should log debug messages" do
70
+ Rdkafka::Bindings::LogCallback.call(nil, 7, nil, "log line")
71
+ expect(log_queue).to have_received(:<<).with([Logger::DEBUG, "rdkafka: log line"])
72
+ end
73
+
74
+ it "should log unknown messages" do
75
+ Rdkafka::Bindings::LogCallback.call(nil, 100, nil, "log line")
76
+ expect(log_queue).to have_received(:<<).with([Logger::UNKNOWN, "rdkafka: log line"])
77
+ end
78
+ end
79
+
80
+ describe "stats callback" do
81
+ context "without a stats callback" do
82
+ it "should do nothing" do
83
+ expect {
84
+ Rdkafka::Bindings::StatsCallback.call(nil, "{}", 2, nil)
85
+ }.not_to raise_error
86
+ end
87
+ end
88
+
89
+ context "with a stats callback" do
90
+ before do
91
+ Rdkafka::Config.statistics_callback = lambda do |stats|
92
+ $received_stats = stats
93
+ end
94
+ end
95
+
96
+ it "should call the stats callback with a stats hash" do
97
+ Rdkafka::Bindings::StatsCallback.call(nil, "{\"received\":1}", 13, nil)
98
+ expect($received_stats).to eq({'received' => 1})
99
+ end
100
+ end
101
+ end
102
+
103
+ describe "error callback" do
104
+ context "without an error callback" do
105
+ it "should do nothing" do
106
+ expect {
107
+ Rdkafka::Bindings::ErrorCallback.call(nil, 1, "error", nil)
108
+ }.not_to raise_error
109
+ end
110
+ end
111
+
112
+ context "with an error callback" do
113
+ before do
114
+ Rdkafka::Config.error_callback = lambda do |error|
115
+ $received_error = error
116
+ end
117
+ end
118
+
119
+ it "should call the error callback with an Rdkafka::Error" do
120
+ Rdkafka::Bindings::ErrorCallback.call(nil, 8, "Broker not available", nil)
121
+ expect($received_error.code).to eq(:broker_not_available)
122
+ expect($received_error.broker_message).to eq("Broker not available")
123
+ end
124
+ end
125
+ end
126
+
127
+ describe "oauthbearer set token" do
128
+ context "with args" do
129
+ before do
130
+ DEFAULT_TOKEN_EXPIRY_SECONDS = 900
131
+ $token_value = "token"
132
+ $md_lifetime_ms = Time.now.to_i*1000 + DEFAULT_TOKEN_EXPIRY_SECONDS * 1000
133
+ $md_principal_name = "kafka-cluster"
134
+ $extensions = nil
135
+ $extension_size = 0
136
+ $error_buffer = FFI::MemoryPointer.from_string(" " * 256)
137
+ end
138
+
139
+ it "should set token or capture failure" do
140
+ RdKafkaTestConsumer.with do |consumer_ptr|
141
+ response = Rdkafka::Bindings.rd_kafka_oauthbearer_set_token(consumer_ptr, $token_value, $md_lifetime_ms, $md_principal_name, $extensions, $extension_size, $error_buffer, 256)
142
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
143
+ expect($error_buffer.read_string).to eq("SASL/OAUTHBEARER is not the configured authentication mechanism")
144
+ end
145
+ end
146
+ end
147
+ end
148
+
149
+ describe "oauthbearer set token failure" do
150
+
151
+ context "without args" do
152
+
153
+ it "should fail" do
154
+ expect {
155
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure
156
+ }.to raise_error(ArgumentError)
157
+ end
158
+ end
159
+
160
+ context "with args" do
161
+ it "should succeed" do
162
+ expect {
163
+ errstr = "error"
164
+ RdKafkaTestConsumer.with do |consumer_ptr|
165
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure(consumer_ptr, errstr)
166
+ end
167
+ }.to_not raise_error
168
+ end
169
+ end
170
+ end
171
+
172
+ describe "oauthbearer callback" do
173
+ context "without an oauthbearer callback" do
174
+ it "should do nothing" do
175
+ expect {
176
+ Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(nil, "", nil)
177
+ }.not_to raise_error
178
+ end
179
+ end
180
+
181
+ context "with an oauthbearer callback" do
182
+ before do
183
+ Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
184
+ $received_config = config
185
+ $received_client_name = client_name
186
+ end
187
+ end
188
+
189
+ it "should call the oauth bearer callback and receive config and client name" do
190
+ RdKafkaTestConsumer.with do |consumer_ptr|
191
+ Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(consumer_ptr, "{}", nil)
192
+ expect($received_config).to eq("{}")
193
+ expect($received_client_name).to match(/consumer/)
194
+ end
195
+ end
196
+ end
197
+ end
198
+ end
@@ -0,0 +1,20 @@
1
+ # frozen_string_literal: true
2
+
3
+ describe Rdkafka::Callbacks do
4
+
5
+ # The code in the call back functions is 100% covered by other specs. Due to
6
+ # the large number of collaborators, and the fact that FFI does not play
7
+ # nicely with doubles, it was very difficult to construct tests that were
8
+ # not over-mocked.
9
+
10
+ # For debugging purposes, if you suspect that you are running into trouble in
11
+ # one of the callback functions, it may be helpful to surround the inner body
12
+ # of the method with something like:
13
+ #
14
+ # begin
15
+ # <method body>
16
+ # rescue => ex; puts ex.inspect; puts ex.backtrace; end;
17
+ #
18
+ # This will output to STDOUT any exceptions that are being raised in the callback.
19
+
20
+ end
@@ -0,0 +1,258 @@
1
+ # frozen_string_literal: true
2
+
3
+ describe Rdkafka::Config do
4
+ context "logger" do
5
+ it "should have a default logger" do
6
+ expect(Rdkafka::Config.logger).to be_a Logger
7
+ end
8
+
9
+ it "should set the logger" do
10
+ logger = Logger.new(STDOUT)
11
+ expect(Rdkafka::Config.logger).not_to eq logger
12
+ Rdkafka::Config.logger = logger
13
+ expect(Rdkafka::Config.logger).to eq logger
14
+ end
15
+
16
+ it "should not accept a nil logger" do
17
+ expect {
18
+ Rdkafka::Config.logger = nil
19
+ }.to raise_error(Rdkafka::Config::NoLoggerError)
20
+ end
21
+
22
+ it "supports logging queue" do
23
+ log = StringIO.new
24
+ Rdkafka::Config.logger = Logger.new(log)
25
+ Rdkafka::Config.ensure_log_thread
26
+
27
+ Rdkafka::Config.log_queue << [Logger::FATAL, "I love testing"]
28
+ 20.times do
29
+ break if log.string != ""
30
+ sleep 0.05
31
+ end
32
+
33
+ expect(log.string).to include "FATAL -- : I love testing"
34
+ end
35
+
36
+ unless RUBY_PLATFORM == 'java'
37
+ it "expect to start new logger thread after fork and work" do
38
+ reader, writer = IO.pipe
39
+
40
+ pid = fork do
41
+ $stdout.reopen(writer)
42
+ Rdkafka::Config.logger = Logger.new($stdout)
43
+ reader.close
44
+ producer = rdkafka_producer_config(debug: 'all').producer
45
+ producer.close
46
+ writer.close
47
+ sleep(1)
48
+ end
49
+
50
+ writer.close
51
+ Process.wait(pid)
52
+ output = reader.read
53
+ expect(output.split("\n").size).to be >= 20
54
+ end
55
+ end
56
+ end
57
+
58
+ context "statistics callback" do
59
+ context "with a proc/lambda" do
60
+ it "should set the callback" do
61
+ expect {
62
+ Rdkafka::Config.statistics_callback = lambda do |stats|
63
+ puts stats
64
+ end
65
+ }.not_to raise_error
66
+ expect(Rdkafka::Config.statistics_callback).to respond_to :call
67
+ end
68
+ end
69
+
70
+ context "with a callable object" do
71
+ it "should set the callback" do
72
+ callback = Class.new do
73
+ def call(stats); end
74
+ end
75
+ expect {
76
+ Rdkafka::Config.statistics_callback = callback.new
77
+ }.not_to raise_error
78
+ expect(Rdkafka::Config.statistics_callback).to respond_to :call
79
+ end
80
+ end
81
+
82
+ it "should not accept a callback that's not callable" do
83
+ expect {
84
+ Rdkafka::Config.statistics_callback = 'a string'
85
+ }.to raise_error(TypeError)
86
+ end
87
+ end
88
+
89
+ context "error callback" do
90
+ context "with a proc/lambda" do
91
+ it "should set the callback" do
92
+ expect {
93
+ Rdkafka::Config.error_callback = lambda do |error|
94
+ puts error
95
+ end
96
+ }.not_to raise_error
97
+ expect(Rdkafka::Config.error_callback).to respond_to :call
98
+ end
99
+ end
100
+
101
+ context "with a callable object" do
102
+ it "should set the callback" do
103
+ callback = Class.new do
104
+ def call(stats); end
105
+ end
106
+ expect {
107
+ Rdkafka::Config.error_callback = callback.new
108
+ }.not_to raise_error
109
+ expect(Rdkafka::Config.error_callback).to respond_to :call
110
+ end
111
+ end
112
+
113
+ it "should not accept a callback that's not callable" do
114
+ expect {
115
+ Rdkafka::Config.error_callback = 'a string'
116
+ }.to raise_error(TypeError)
117
+ end
118
+ end
119
+
120
+ context "oauthbearer calllback" do
121
+ context "with a proc/lambda" do
122
+ it "should set the callback" do
123
+ expect {
124
+ Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
125
+ puts config
126
+ puts client_name
127
+ end
128
+ }.not_to raise_error
129
+ expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
130
+ end
131
+ end
132
+
133
+ context "with a callable object" do
134
+ it "should set the callback" do
135
+ callback = Class.new do
136
+ def call(config, client_name); end
137
+ end
138
+
139
+ expect {
140
+ Rdkafka::Config.oauthbearer_token_refresh_callback = callback.new
141
+ }.not_to raise_error
142
+ expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
143
+ end
144
+ end
145
+
146
+ it "should not accept a callback that's not callable" do
147
+ expect {
148
+ Rdkafka::Config.oauthbearer_token_refresh_callback = 'not a callback'
149
+ }.to raise_error(TypeError)
150
+ end
151
+ end
152
+
153
+ context "configuration" do
154
+ it "should store configuration" do
155
+ config = Rdkafka::Config.new
156
+ config[:"key"] = 'value'
157
+ expect(config[:"key"]).to eq 'value'
158
+ end
159
+
160
+ it "should use default configuration" do
161
+ config = Rdkafka::Config.new
162
+ expect(config[:"api.version.request"]).to eq true
163
+ end
164
+
165
+ it "should create a consumer with valid config" do
166
+ consumer = rdkafka_consumer_config.consumer
167
+ expect(consumer).to be_a Rdkafka::Consumer
168
+ consumer.close
169
+ end
170
+
171
+ it "should create a consumer with consumer_poll_set set to false" do
172
+ config = rdkafka_consumer_config
173
+ config.consumer_poll_set = false
174
+ consumer = config.consumer
175
+ expect(consumer).to be_a Rdkafka::Consumer
176
+ consumer.close
177
+ end
178
+
179
+ it "should raise an error when creating a consumer with invalid config" do
180
+ config = Rdkafka::Config.new('invalid.key' => 'value')
181
+ expect {
182
+ config.consumer
183
+ }.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
184
+ end
185
+
186
+ it "should raise an error when creating a consumer with a nil key in the config" do
187
+ config = Rdkafka::Config.new(nil => 'value')
188
+ expect {
189
+ config.consumer
190
+ }.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"\"")
191
+ end
192
+
193
+ it "should treat a nil value as blank" do
194
+ config = Rdkafka::Config.new('security.protocol' => nil)
195
+ expect {
196
+ config.consumer
197
+ config.producer
198
+ }.to raise_error(Rdkafka::Config::ConfigError, "Configuration property \"security.protocol\" cannot be set to empty value")
199
+ end
200
+
201
+ it "should create a producer with valid config" do
202
+ producer = rdkafka_consumer_config.producer
203
+ expect(producer).to be_a Rdkafka::Producer
204
+ producer.close
205
+ end
206
+
207
+ it "should raise an error when creating a producer with invalid config" do
208
+ config = Rdkafka::Config.new('invalid.key' => 'value')
209
+ expect {
210
+ config.producer
211
+ }.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
212
+ end
213
+
214
+ it "allows string partitioner key" do
215
+ expect(Rdkafka::Producer).to receive(:new).with(kind_of(Rdkafka::NativeKafka), "murmur2").and_call_original
216
+ config = Rdkafka::Config.new("partitioner" => "murmur2")
217
+ config.producer.close
218
+ end
219
+
220
+ it "allows symbol partitioner key" do
221
+ expect(Rdkafka::Producer).to receive(:new).with(kind_of(Rdkafka::NativeKafka), "murmur2").and_call_original
222
+ config = Rdkafka::Config.new(:partitioner => "murmur2")
223
+ config.producer.close
224
+ end
225
+
226
+ it "should allow configuring zstd compression" do
227
+ config = Rdkafka::Config.new('compression.codec' => 'zstd')
228
+ begin
229
+ producer = config.producer
230
+ expect(producer).to be_a Rdkafka::Producer
231
+ producer.close
232
+ rescue Rdkafka::Config::ConfigError => ex
233
+ pending "Zstd compression not supported on this machine"
234
+ raise ex
235
+ end
236
+ end
237
+
238
+ it "should raise an error when client creation fails for a consumer" do
239
+ config = Rdkafka::Config.new(
240
+ "security.protocol" => "SSL",
241
+ "ssl.ca.location" => "/nonsense"
242
+ )
243
+ expect {
244
+ config.consumer
245
+ }.to raise_error(Rdkafka::Config::ClientCreationError, /ssl.ca.location failed(.*)/)
246
+ end
247
+
248
+ it "should raise an error when client creation fails for a producer" do
249
+ config = Rdkafka::Config.new(
250
+ "security.protocol" => "SSL",
251
+ "ssl.ca.location" => "/nonsense"
252
+ )
253
+ expect {
254
+ config.producer
255
+ }.to raise_error(Rdkafka::Config::ClientCreationError, /ssl.ca.location failed(.*)/)
256
+ end
257
+ end
258
+ end
@@ -0,0 +1,73 @@
1
+ # frozen_string_literal: true
2
+
3
+ describe Rdkafka::Consumer::Headers do
4
+ let(:headers) do
5
+ { # Note String keys!
6
+ "version" => ["2.1.3", "2.1.4"],
7
+ "type" => "String"
8
+ }
9
+ end
10
+ let(:native_message) { double('native message') }
11
+ let(:headers_ptr) { double('headers pointer') }
12
+
13
+ describe '.from_native' do
14
+ before do
15
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_message_headers).with(native_message, anything) do |_, headers_ptrptr|
16
+ expect(headers_ptrptr).to receive(:read_pointer).and_return(headers_ptr)
17
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
18
+ end
19
+
20
+ # First version header
21
+ expect(Rdkafka::Bindings).to \
22
+ receive(:rd_kafka_header_get_all)
23
+ .with(headers_ptr, 0, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
24
+ expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 0", read_string_to_null: "version"))
25
+ expect(size_ptr).to receive(:[]).with(:value).and_return(headers["version"][0].size)
26
+ expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 0", read_string: headers["version"][0]))
27
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
28
+ end
29
+
30
+ # Second version header
31
+ expect(Rdkafka::Bindings).to \
32
+ receive(:rd_kafka_header_get_all)
33
+ .with(headers_ptr, 1, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
34
+ expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 1", read_string_to_null: "version"))
35
+ expect(size_ptr).to receive(:[]).with(:value).and_return(headers["version"][1].size)
36
+ expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 1", read_string: headers["version"][1]))
37
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
38
+ end
39
+
40
+ # Single type header
41
+ expect(Rdkafka::Bindings).to \
42
+ receive(:rd_kafka_header_get_all)
43
+ .with(headers_ptr, 2, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
44
+ expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 2", read_string_to_null: "type"))
45
+ expect(size_ptr).to receive(:[]).with(:value).and_return(headers["type"].size)
46
+ expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 2", read_string: headers["type"]))
47
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
48
+ end
49
+
50
+ expect(Rdkafka::Bindings).to \
51
+ receive(:rd_kafka_header_get_all)
52
+ .with(headers_ptr, 3, anything, anything, anything)
53
+ .and_return(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__NOENT)
54
+ end
55
+
56
+ subject { described_class.from_native(native_message) }
57
+
58
+ it { is_expected.to eq(headers) }
59
+ it { is_expected.to be_frozen }
60
+
61
+ it 'returns array for duplicate headers' do
62
+ expect(subject['version']).to eq(["2.1.3", "2.1.4"])
63
+ end
64
+
65
+ it 'returns string for single headers' do
66
+ expect(subject['type']).to eq("String")
67
+ end
68
+
69
+ it 'does not support symbols mappings' do
70
+ expect(subject.key?(:version)).to eq(false)
71
+ end
72
+ end
73
+ end
@@ -0,0 +1,139 @@
1
+ # frozen_string_literal: true
2
+
3
+ describe Rdkafka::Consumer::Message do
4
+ let(:native_client) { new_native_client }
5
+ let(:native_topic) { new_native_topic(native_client: native_client) }
6
+ let(:payload) { nil }
7
+ let(:key) { nil }
8
+ let(:native_message) do
9
+ Rdkafka::Bindings::Message.new.tap do |message|
10
+ message[:rkt] = native_topic
11
+ message[:partition] = 3
12
+ message[:offset] = 100
13
+ if payload
14
+ ptr = FFI::MemoryPointer.new(:char, payload.bytesize)
15
+ ptr.put_bytes(0, payload)
16
+ message[:payload] = ptr
17
+ message[:len] = payload.bytesize
18
+ end
19
+ if key
20
+ ptr = FFI::MemoryPointer.new(:char, key.bytesize)
21
+ ptr.put_bytes(0, key)
22
+ message[:key] = ptr
23
+ message[:key_len] = key.bytesize
24
+ end
25
+ end
26
+ end
27
+
28
+ after(:each) do
29
+ Rdkafka::Bindings.rd_kafka_destroy(native_client)
30
+ end
31
+
32
+ subject { Rdkafka::Consumer::Message.new(native_message) }
33
+
34
+ before do
35
+ # mock headers, because it produces 'segmentation fault' while settings or reading headers for
36
+ # a message which is created from scratch
37
+ #
38
+ # Code dump example:
39
+ #
40
+ # ```
41
+ # frame #7: 0x000000010dacf5ab librdkafka.dylib`rd_list_destroy + 11
42
+ # frame #8: 0x000000010dae5a7e librdkafka.dylib`rd_kafka_headers_destroy + 14
43
+ # frame #9: 0x000000010da9ab40 librdkafka.dylib`rd_kafka_message_set_headers + 32
44
+ # ```
45
+ expect( Rdkafka::Bindings).to receive(:rd_kafka_message_headers).with(any_args) do
46
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR__NOENT
47
+ end
48
+ end
49
+
50
+ it "should have a topic" do
51
+ expect(subject.topic).to eq "topic_name"
52
+ end
53
+
54
+ it "should have a partition" do
55
+ expect(subject.partition).to eq 3
56
+ end
57
+
58
+ context "payload" do
59
+ it "should have a nil payload when none is present" do
60
+ expect(subject.payload).to be_nil
61
+ end
62
+
63
+ context "present payload" do
64
+ let(:payload) { "payload content" }
65
+
66
+ it "should have a payload" do
67
+ expect(subject.payload).to eq "payload content"
68
+ end
69
+ end
70
+ end
71
+
72
+ context "key" do
73
+ it "should have a nil key when none is present" do
74
+ expect(subject.key).to be_nil
75
+ end
76
+
77
+ context "present key" do
78
+ let(:key) { "key content" }
79
+
80
+ it "should have a key" do
81
+ expect(subject.key).to eq "key content"
82
+ end
83
+ end
84
+ end
85
+
86
+ it "should have an offset" do
87
+ expect(subject.offset).to eq 100
88
+ end
89
+
90
+ describe "#timestamp" do
91
+ context "without a timestamp" do
92
+ before do
93
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_message_timestamp).and_return(-1)
94
+ end
95
+
96
+ it "should have a nil timestamp if not present" do
97
+ expect(subject.timestamp).to be_nil
98
+ end
99
+ end
100
+
101
+ context "with a timestamp" do
102
+ before do
103
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_message_timestamp).and_return(1505069646250)
104
+ end
105
+
106
+ it "should have timestamp if present" do
107
+ expect(subject.timestamp).to eq Time.at(1505069646, 250_000)
108
+ end
109
+ end
110
+ end
111
+
112
+ describe "#to_s" do
113
+ before do
114
+ allow(subject).to receive(:timestamp).and_return(1000)
115
+ end
116
+
117
+ it "should have a human readable representation" do
118
+ expect(subject.to_s).to eq "<Message in 'topic_name' with key '', payload '', partition 3, offset 100, timestamp 1000>"
119
+ end
120
+
121
+ context "with key and payload" do
122
+ let(:key) { "key" }
123
+ let(:payload) { "payload" }
124
+
125
+ it "should have a human readable representation" do
126
+ expect(subject.to_s).to eq "<Message in 'topic_name' with key 'key', payload 'payload', partition 3, offset 100, timestamp 1000>"
127
+ end
128
+ end
129
+
130
+ context "with a very long key and payload" do
131
+ let(:key) { "k" * 100_000 }
132
+ let(:payload) { "p" * 100_000 }
133
+
134
+ it "should have a human readable representation" do
135
+ expect(subject.to_s).to eq "<Message in 'topic_name' with key 'kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk...', payload 'pppppppppppppppppppppppppppppppppppppppp...', partition 3, offset 100, timestamp 1000>"
136
+ end
137
+ end
138
+ end
139
+ end