karafka-rdkafka 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +2 -0
  3. data/.gitignore +8 -0
  4. data/.rspec +1 -0
  5. data/.semaphore/semaphore.yml +23 -0
  6. data/.yardopts +2 -0
  7. data/CHANGELOG.md +104 -0
  8. data/Gemfile +3 -0
  9. data/Guardfile +19 -0
  10. data/LICENSE +21 -0
  11. data/README.md +114 -0
  12. data/Rakefile +96 -0
  13. data/bin/console +11 -0
  14. data/docker-compose.yml +24 -0
  15. data/ext/README.md +18 -0
  16. data/ext/Rakefile +62 -0
  17. data/lib/rdkafka/abstract_handle.rb +82 -0
  18. data/lib/rdkafka/admin/create_topic_handle.rb +27 -0
  19. data/lib/rdkafka/admin/create_topic_report.rb +22 -0
  20. data/lib/rdkafka/admin/delete_topic_handle.rb +27 -0
  21. data/lib/rdkafka/admin/delete_topic_report.rb +22 -0
  22. data/lib/rdkafka/admin.rb +155 -0
  23. data/lib/rdkafka/bindings.rb +312 -0
  24. data/lib/rdkafka/callbacks.rb +106 -0
  25. data/lib/rdkafka/config.rb +299 -0
  26. data/lib/rdkafka/consumer/headers.rb +63 -0
  27. data/lib/rdkafka/consumer/message.rb +84 -0
  28. data/lib/rdkafka/consumer/partition.rb +49 -0
  29. data/lib/rdkafka/consumer/topic_partition_list.rb +164 -0
  30. data/lib/rdkafka/consumer.rb +565 -0
  31. data/lib/rdkafka/error.rb +86 -0
  32. data/lib/rdkafka/metadata.rb +92 -0
  33. data/lib/rdkafka/producer/client.rb +47 -0
  34. data/lib/rdkafka/producer/delivery_handle.rb +22 -0
  35. data/lib/rdkafka/producer/delivery_report.rb +26 -0
  36. data/lib/rdkafka/producer.rb +178 -0
  37. data/lib/rdkafka/version.rb +5 -0
  38. data/lib/rdkafka.rb +22 -0
  39. data/rdkafka.gemspec +36 -0
  40. data/spec/rdkafka/abstract_handle_spec.rb +113 -0
  41. data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
  42. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  43. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
  44. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  45. data/spec/rdkafka/admin_spec.rb +203 -0
  46. data/spec/rdkafka/bindings_spec.rb +134 -0
  47. data/spec/rdkafka/callbacks_spec.rb +20 -0
  48. data/spec/rdkafka/config_spec.rb +182 -0
  49. data/spec/rdkafka/consumer/message_spec.rb +139 -0
  50. data/spec/rdkafka/consumer/partition_spec.rb +57 -0
  51. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +223 -0
  52. data/spec/rdkafka/consumer_spec.rb +1008 -0
  53. data/spec/rdkafka/error_spec.rb +89 -0
  54. data/spec/rdkafka/metadata_spec.rb +78 -0
  55. data/spec/rdkafka/producer/client_spec.rb +145 -0
  56. data/spec/rdkafka/producer/delivery_handle_spec.rb +42 -0
  57. data/spec/rdkafka/producer/delivery_report_spec.rb +17 -0
  58. data/spec/rdkafka/producer_spec.rb +525 -0
  59. data/spec/spec_helper.rb +139 -0
  60. data.tar.gz.sig +0 -0
  61. metadata +277 -0
  62. metadata.gz.sig +0 -0
@@ -0,0 +1,27 @@
1
+ module Rdkafka
2
+ class Admin
3
+ class DeleteTopicHandle < AbstractHandle
4
+ layout :pending, :bool,
5
+ :response, :int,
6
+ :error_string, :pointer,
7
+ :result_name, :pointer
8
+
9
+ # @return [String] the name of the operation
10
+ def operation_name
11
+ "delete topic"
12
+ end
13
+
14
+ # @return [Boolean] whether the delete topic was successful
15
+ def create_result
16
+ DeleteTopicReport.new(self[:error_string], self[:result_name])
17
+ end
18
+
19
+ def raise_error
20
+ raise RdkafkaError.new(
21
+ self[:response],
22
+ broker_message: DeleteTopicReport.new(self[:error_string], self[:result_name]).error_string
23
+ )
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,22 @@
1
+ module Rdkafka
2
+ class Admin
3
+ class DeleteTopicReport
4
+ # Any error message generated from the DeleteTopic
5
+ # @return [String]
6
+ attr_reader :error_string
7
+
8
+ # The name of the topic deleted
9
+ # @return [String]
10
+ attr_reader :result_name
11
+
12
+ def initialize(error_string, result_name)
13
+ if error_string != FFI::Pointer::NULL
14
+ @error_string = error_string.read_string
15
+ end
16
+ if result_name != FFI::Pointer::NULL
17
+ @result_name = @result_name = result_name.read_string
18
+ end
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,155 @@
1
+ module Rdkafka
2
+ class Admin
3
+ # @private
4
+ def initialize(native_kafka)
5
+ @native_kafka = native_kafka
6
+ @closing = false
7
+
8
+ # Start thread to poll client for callbacks
9
+ @polling_thread = Thread.new do
10
+ loop do
11
+ Rdkafka::Bindings.rd_kafka_poll(@native_kafka, 250)
12
+ # Exit thread if closing and the poll queue is empty
13
+ if @closing && Rdkafka::Bindings.rd_kafka_outq_len(@native_kafka) == 0
14
+ break
15
+ end
16
+ end
17
+ end
18
+ @polling_thread.abort_on_exception = true
19
+ end
20
+
21
+ # Close this admin instance
22
+ def close
23
+ return unless @native_kafka
24
+
25
+ # Indicate to polling thread that we're closing
26
+ @closing = true
27
+ # Wait for the polling thread to finish up
28
+ @polling_thread.join
29
+ Rdkafka::Bindings.rd_kafka_destroy(@native_kafka)
30
+ @native_kafka = nil
31
+ end
32
+
33
+ # Create a topic with the given partition count and replication factor
34
+ #
35
+ # @raise [ConfigError] When the partition count or replication factor are out of valid range
36
+ # @raise [RdkafkaError] When the topic name is invalid or the topic already exists
37
+ # @raise [RdkafkaError] When the topic configuration is invalid
38
+ #
39
+ # @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of creating the topic
40
+ def create_topic(topic_name, partition_count, replication_factor, topic_config={})
41
+
42
+ # Create a rd_kafka_NewTopic_t representing the new topic
43
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
44
+ new_topic_ptr = Rdkafka::Bindings.rd_kafka_NewTopic_new(
45
+ FFI::MemoryPointer.from_string(topic_name),
46
+ partition_count,
47
+ replication_factor,
48
+ error_buffer,
49
+ 256
50
+ )
51
+ if new_topic_ptr.null?
52
+ raise Rdkafka::Config::ConfigError.new(error_buffer.read_string)
53
+ end
54
+
55
+ unless topic_config.nil?
56
+ topic_config.each do |key, value|
57
+ Rdkafka::Bindings.rd_kafka_NewTopic_set_config(
58
+ new_topic_ptr,
59
+ key.to_s,
60
+ value.to_s
61
+ )
62
+ end
63
+ end
64
+
65
+ # Note that rd_kafka_CreateTopics can create more than one topic at a time
66
+ pointer_array = [new_topic_ptr]
67
+ topics_array_ptr = FFI::MemoryPointer.new(:pointer)
68
+ topics_array_ptr.write_array_of_pointer(pointer_array)
69
+
70
+ # Get a pointer to the queue that our request will be enqueued on
71
+ queue_ptr = Rdkafka::Bindings.rd_kafka_queue_get_background(@native_kafka)
72
+ if queue_ptr.null?
73
+ Rdkafka::Bindings.rd_kafka_NewTopic_destroy(new_topic_ptr)
74
+ raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
75
+ end
76
+
77
+ # Create and register the handle we will return to the caller
78
+ create_topic_handle = CreateTopicHandle.new
79
+ create_topic_handle[:pending] = true
80
+ create_topic_handle[:response] = -1
81
+ CreateTopicHandle.register(create_topic_handle)
82
+ admin_options_ptr = Rdkafka::Bindings.rd_kafka_AdminOptions_new(@native_kafka, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATETOPICS)
83
+ Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, create_topic_handle.to_ptr)
84
+
85
+ begin
86
+ Rdkafka::Bindings.rd_kafka_CreateTopics(
87
+ @native_kafka,
88
+ topics_array_ptr,
89
+ 1,
90
+ admin_options_ptr,
91
+ queue_ptr
92
+ )
93
+ rescue Exception
94
+ CreateTopicHandle.remove(create_topic_handle.to_ptr.address)
95
+ raise
96
+ ensure
97
+ Rdkafka::Bindings.rd_kafka_AdminOptions_destroy(admin_options_ptr)
98
+ Rdkafka::Bindings.rd_kafka_queue_destroy(queue_ptr)
99
+ Rdkafka::Bindings.rd_kafka_NewTopic_destroy(new_topic_ptr)
100
+ end
101
+
102
+ create_topic_handle
103
+ end
104
+
105
+ # Delete the named topic
106
+ #
107
+ # @raise [RdkafkaError] When the topic name is invalid or the topic does not exist
108
+ #
109
+ # @return [DeleteTopicHandle] Delete topic handle that can be used to wait for the result of deleting the topic
110
+ def delete_topic(topic_name)
111
+
112
+ # Create a rd_kafka_DeleteTopic_t representing the topic to be deleted
113
+ delete_topic_ptr = Rdkafka::Bindings.rd_kafka_DeleteTopic_new(FFI::MemoryPointer.from_string(topic_name))
114
+
115
+ # Note that rd_kafka_DeleteTopics can create more than one topic at a time
116
+ pointer_array = [delete_topic_ptr]
117
+ topics_array_ptr = FFI::MemoryPointer.new(:pointer)
118
+ topics_array_ptr.write_array_of_pointer(pointer_array)
119
+
120
+ # Get a pointer to the queue that our request will be enqueued on
121
+ queue_ptr = Rdkafka::Bindings.rd_kafka_queue_get_background(@native_kafka)
122
+ if queue_ptr.null?
123
+ Rdkafka::Bindings.rd_kafka_DeleteTopic_destroy(delete_topic_ptr)
124
+ raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
125
+ end
126
+
127
+ # Create and register the handle we will return to the caller
128
+ delete_topic_handle = DeleteTopicHandle.new
129
+ delete_topic_handle[:pending] = true
130
+ delete_topic_handle[:response] = -1
131
+ DeleteTopicHandle.register(delete_topic_handle)
132
+ admin_options_ptr = Rdkafka::Bindings.rd_kafka_AdminOptions_new(@native_kafka, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_DELETETOPICS)
133
+ Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, delete_topic_handle.to_ptr)
134
+
135
+ begin
136
+ Rdkafka::Bindings.rd_kafka_DeleteTopics(
137
+ @native_kafka,
138
+ topics_array_ptr,
139
+ 1,
140
+ admin_options_ptr,
141
+ queue_ptr
142
+ )
143
+ rescue Exception
144
+ DeleteTopicHandle.remove(delete_topic_handle.to_ptr.address)
145
+ raise
146
+ ensure
147
+ Rdkafka::Bindings.rd_kafka_AdminOptions_destroy(admin_options_ptr)
148
+ Rdkafka::Bindings.rd_kafka_queue_destroy(queue_ptr)
149
+ Rdkafka::Bindings.rd_kafka_DeleteTopic_destroy(delete_topic_ptr)
150
+ end
151
+
152
+ delete_topic_handle
153
+ end
154
+ end
155
+ end
@@ -0,0 +1,312 @@
1
+ require "ffi"
2
+ require "json"
3
+ require "logger"
4
+
5
+ module Rdkafka
6
+ # @private
7
+ module Bindings
8
+ extend FFI::Library
9
+
10
+ def self.lib_extension
11
+ if RbConfig::CONFIG['host_os'] =~ /darwin/
12
+ 'dylib'
13
+ else
14
+ 'so'
15
+ end
16
+ end
17
+
18
+ ffi_lib File.join(File.dirname(__FILE__), "../../ext/librdkafka.#{lib_extension}")
19
+
20
+ RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS = -175
21
+ RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS = -174
22
+ RD_KAFKA_RESP_ERR__NOENT = -156
23
+ RD_KAFKA_RESP_ERR_NO_ERROR = 0
24
+
25
+ RD_KAFKA_OFFSET_END = -1
26
+ RD_KAFKA_OFFSET_BEGINNING = -2
27
+ RD_KAFKA_OFFSET_STORED = -1000
28
+ RD_KAFKA_OFFSET_INVALID = -1001
29
+
30
+ class SizePtr < FFI::Struct
31
+ layout :value, :size_t
32
+ end
33
+
34
+ # Polling
35
+
36
+ attach_function :rd_kafka_poll, [:pointer, :int], :void, blocking: true
37
+ attach_function :rd_kafka_outq_len, [:pointer], :int, blocking: true
38
+
39
+ # Metadata
40
+
41
+ attach_function :rd_kafka_memberid, [:pointer], :string
42
+ attach_function :rd_kafka_clusterid, [:pointer], :string
43
+ attach_function :rd_kafka_metadata, [:pointer, :int, :pointer, :pointer, :int], :int
44
+ attach_function :rd_kafka_metadata_destroy, [:pointer], :void
45
+
46
+ # Message struct
47
+
48
+ class Message < FFI::Struct
49
+ layout :err, :int,
50
+ :rkt, :pointer,
51
+ :partition, :int32,
52
+ :payload, :pointer,
53
+ :len, :size_t,
54
+ :key, :pointer,
55
+ :key_len, :size_t,
56
+ :offset, :int64,
57
+ :_private, :pointer
58
+ end
59
+
60
+ attach_function :rd_kafka_message_destroy, [:pointer], :void
61
+ attach_function :rd_kafka_message_timestamp, [:pointer, :pointer], :int64
62
+ attach_function :rd_kafka_topic_new, [:pointer, :string, :pointer], :pointer
63
+ attach_function :rd_kafka_topic_destroy, [:pointer], :pointer
64
+ attach_function :rd_kafka_topic_name, [:pointer], :string
65
+
66
+ # TopicPartition ad TopicPartitionList structs
67
+
68
+ class TopicPartition < FFI::Struct
69
+ layout :topic, :string,
70
+ :partition, :int32,
71
+ :offset, :int64,
72
+ :metadata, :pointer,
73
+ :metadata_size, :size_t,
74
+ :opaque, :pointer,
75
+ :err, :int,
76
+ :_private, :pointer
77
+ end
78
+
79
+ class TopicPartitionList < FFI::Struct
80
+ layout :cnt, :int,
81
+ :size, :int,
82
+ :elems, :pointer
83
+ end
84
+
85
+ attach_function :rd_kafka_topic_partition_list_new, [:int32], :pointer
86
+ attach_function :rd_kafka_topic_partition_list_add, [:pointer, :string, :int32], :void
87
+ attach_function :rd_kafka_topic_partition_list_set_offset, [:pointer, :string, :int32, :int64], :void
88
+ attach_function :rd_kafka_topic_partition_list_destroy, [:pointer], :void
89
+ attach_function :rd_kafka_topic_partition_list_copy, [:pointer], :pointer
90
+
91
+ # Errors
92
+
93
+ attach_function :rd_kafka_err2name, [:int], :string
94
+ attach_function :rd_kafka_err2str, [:int], :string
95
+
96
+ # Configuration
97
+
98
+ enum :kafka_config_response, [
99
+ :config_unknown, -2,
100
+ :config_invalid, -1,
101
+ :config_ok, 0
102
+ ]
103
+
104
+ attach_function :rd_kafka_conf_new, [], :pointer
105
+ attach_function :rd_kafka_conf_set, [:pointer, :string, :string, :pointer, :int], :kafka_config_response
106
+ callback :log_cb, [:pointer, :int, :string, :string], :void
107
+ attach_function :rd_kafka_conf_set_log_cb, [:pointer, :log_cb], :void
108
+ attach_function :rd_kafka_conf_set_opaque, [:pointer, :pointer], :void
109
+ callback :stats_cb, [:pointer, :string, :int, :pointer], :int
110
+ attach_function :rd_kafka_conf_set_stats_cb, [:pointer, :stats_cb], :void
111
+ callback :error_cb, [:pointer, :int, :string, :pointer], :void
112
+ attach_function :rd_kafka_conf_set_error_cb, [:pointer, :error_cb], :void
113
+
114
+ # Log queue
115
+ attach_function :rd_kafka_set_log_queue, [:pointer, :pointer], :void
116
+ attach_function :rd_kafka_queue_get_main, [:pointer], :pointer
117
+
118
+ LogCallback = FFI::Function.new(
119
+ :void, [:pointer, :int, :string, :string]
120
+ ) do |_client_ptr, level, _level_string, line|
121
+ severity = case level
122
+ when 0 || 1 || 2
123
+ Logger::FATAL
124
+ when 3
125
+ Logger::ERROR
126
+ when 4
127
+ Logger::WARN
128
+ when 5 || 6
129
+ Logger::INFO
130
+ when 7
131
+ Logger::DEBUG
132
+ else
133
+ Logger::UNKNOWN
134
+ end
135
+ Rdkafka::Config.log_queue << [severity, "rdkafka: #{line}"]
136
+ end
137
+
138
+ StatsCallback = FFI::Function.new(
139
+ :int, [:pointer, :string, :int, :pointer]
140
+ ) do |_client_ptr, json, _json_len, _opaque|
141
+ # Pass the stats hash to callback in config
142
+ if Rdkafka::Config.statistics_callback
143
+ stats = JSON.parse(json)
144
+ Rdkafka::Config.statistics_callback.call(stats)
145
+ end
146
+
147
+ # Return 0 so librdkafka frees the json string
148
+ 0
149
+ end
150
+
151
+ ErrorCallback = FFI::Function.new(
152
+ :void, [:pointer, :int, :string, :pointer]
153
+ ) do |_client_prr, err_code, reason, _opaque|
154
+ if Rdkafka::Config.error_callback
155
+ error = Rdkafka::RdkafkaError.new(err_code, broker_message: reason)
156
+ Rdkafka::Config.error_callback.call(error)
157
+ end
158
+ end
159
+
160
+ # Handle
161
+
162
+ enum :kafka_type, [
163
+ :rd_kafka_producer,
164
+ :rd_kafka_consumer
165
+ ]
166
+
167
+ attach_function :rd_kafka_new, [:kafka_type, :pointer, :pointer, :int], :pointer
168
+ attach_function :rd_kafka_destroy, [:pointer], :void
169
+
170
+ # Consumer
171
+
172
+ attach_function :rd_kafka_subscribe, [:pointer, :pointer], :int
173
+ attach_function :rd_kafka_unsubscribe, [:pointer], :int
174
+ attach_function :rd_kafka_subscription, [:pointer, :pointer], :int
175
+ attach_function :rd_kafka_assign, [:pointer, :pointer], :int
176
+ attach_function :rd_kafka_assignment, [:pointer, :pointer], :int
177
+ attach_function :rd_kafka_committed, [:pointer, :pointer, :int], :int
178
+ attach_function :rd_kafka_commit, [:pointer, :pointer, :bool], :int, blocking: true
179
+ attach_function :rd_kafka_poll_set_consumer, [:pointer], :void
180
+ attach_function :rd_kafka_consumer_poll, [:pointer, :int], :pointer, blocking: true
181
+ attach_function :rd_kafka_consumer_close, [:pointer], :void, blocking: true
182
+ attach_function :rd_kafka_offset_store, [:pointer, :int32, :int64], :int
183
+ attach_function :rd_kafka_pause_partitions, [:pointer, :pointer], :int
184
+ attach_function :rd_kafka_resume_partitions, [:pointer, :pointer], :int
185
+ attach_function :rd_kafka_seek, [:pointer, :int32, :int64, :int], :int
186
+
187
+ # Headers
188
+ attach_function :rd_kafka_header_get_all, [:pointer, :size_t, :pointer, :pointer, SizePtr], :int
189
+ attach_function :rd_kafka_message_headers, [:pointer, :pointer], :int
190
+
191
+ # Rebalance
192
+
193
+ callback :rebalance_cb_function, [:pointer, :int, :pointer, :pointer], :void
194
+ attach_function :rd_kafka_conf_set_rebalance_cb, [:pointer, :rebalance_cb_function], :void
195
+
196
+ RebalanceCallback = FFI::Function.new(
197
+ :void, [:pointer, :int, :pointer, :pointer]
198
+ ) do |client_ptr, code, partitions_ptr, opaque_ptr|
199
+ case code
200
+ when RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS
201
+ Rdkafka::Bindings.rd_kafka_assign(client_ptr, partitions_ptr)
202
+ else # RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS or errors
203
+ Rdkafka::Bindings.rd_kafka_assign(client_ptr, FFI::Pointer::NULL)
204
+ end
205
+
206
+ opaque = Rdkafka::Config.opaques[opaque_ptr.to_i]
207
+ return unless opaque
208
+
209
+ tpl = Rdkafka::Consumer::TopicPartitionList.from_native_tpl(partitions_ptr).freeze
210
+ consumer = Rdkafka::Consumer.new(client_ptr)
211
+
212
+ begin
213
+ case code
214
+ when RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS
215
+ opaque.call_on_partitions_assigned(consumer, tpl)
216
+ when RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS
217
+ opaque.call_on_partitions_revoked(consumer, tpl)
218
+ end
219
+ rescue Exception => err
220
+ Rdkafka::Config.logger.error("Unhandled exception: #{err.class} - #{err.message}")
221
+ end
222
+ end
223
+
224
+ # Stats
225
+
226
+ attach_function :rd_kafka_query_watermark_offsets, [:pointer, :string, :int, :pointer, :pointer, :int], :int
227
+
228
+ # Producer
229
+
230
+ RD_KAFKA_VTYPE_END = 0
231
+ RD_KAFKA_VTYPE_TOPIC = 1
232
+ RD_KAFKA_VTYPE_RKT = 2
233
+ RD_KAFKA_VTYPE_PARTITION = 3
234
+ RD_KAFKA_VTYPE_VALUE = 4
235
+ RD_KAFKA_VTYPE_KEY = 5
236
+ RD_KAFKA_VTYPE_OPAQUE = 6
237
+ RD_KAFKA_VTYPE_MSGFLAGS = 7
238
+ RD_KAFKA_VTYPE_TIMESTAMP = 8
239
+ RD_KAFKA_VTYPE_HEADER = 9
240
+ RD_KAFKA_VTYPE_HEADERS = 10
241
+
242
+ RD_KAFKA_MSG_F_COPY = 0x2
243
+
244
+ attach_function :rd_kafka_producev, [:pointer, :varargs], :int
245
+ callback :delivery_cb, [:pointer, :pointer, :pointer], :void
246
+ attach_function :rd_kafka_conf_set_dr_msg_cb, [:pointer, :delivery_cb], :void
247
+
248
+ # Partitioner
249
+ PARTITIONERS = %w(random consistent consistent_random murmur2 murmur2_random fnv1a fnv1a_random).each_with_object({}) do |name, hsh|
250
+ method_name = "rd_kafka_msg_partitioner_#{name}".to_sym
251
+ attach_function method_name, [:pointer, :pointer, :size_t, :int32, :pointer, :pointer], :int32
252
+ hsh[name] = method_name
253
+ end
254
+
255
+ def self.partitioner(str, partition_count, partitioner_name = "consistent_random")
256
+ # Return RD_KAFKA_PARTITION_UA(unassigned partition) when partition count is nil/zero.
257
+ return -1 unless partition_count&.nonzero?
258
+
259
+ str_ptr = FFI::MemoryPointer.from_string(str)
260
+ method_name = PARTITIONERS.fetch(partitioner_name) do
261
+ raise Rdkafka::Config::ConfigError.new("Unknown partitioner: #{partitioner_name}")
262
+ end
263
+ public_send(method_name, nil, str_ptr, str.size, partition_count, nil, nil)
264
+ end
265
+
266
+ # Create Topics
267
+
268
+ RD_KAFKA_ADMIN_OP_CREATETOPICS = 1 # rd_kafka_admin_op_t
269
+ RD_KAFKA_EVENT_CREATETOPICS_RESULT = 100 # rd_kafka_event_type_t
270
+
271
+ attach_function :rd_kafka_CreateTopics, [:pointer, :pointer, :size_t, :pointer, :pointer], :void
272
+ attach_function :rd_kafka_NewTopic_new, [:pointer, :size_t, :size_t, :pointer, :size_t], :pointer
273
+ attach_function :rd_kafka_NewTopic_set_config, [:pointer, :string, :string], :int32
274
+ attach_function :rd_kafka_NewTopic_destroy, [:pointer], :void
275
+ attach_function :rd_kafka_event_CreateTopics_result, [:pointer], :pointer
276
+ attach_function :rd_kafka_CreateTopics_result_topics, [:pointer, :pointer], :pointer
277
+
278
+ # Delete Topics
279
+
280
+ RD_KAFKA_ADMIN_OP_DELETETOPICS = 2 # rd_kafka_admin_op_t
281
+ RD_KAFKA_EVENT_DELETETOPICS_RESULT = 101 # rd_kafka_event_type_t
282
+
283
+ attach_function :rd_kafka_DeleteTopics, [:pointer, :pointer, :size_t, :pointer, :pointer], :int32
284
+ attach_function :rd_kafka_DeleteTopic_new, [:pointer], :pointer
285
+ attach_function :rd_kafka_DeleteTopic_destroy, [:pointer], :void
286
+ attach_function :rd_kafka_event_DeleteTopics_result, [:pointer], :pointer
287
+ attach_function :rd_kafka_DeleteTopics_result_topics, [:pointer, :pointer], :pointer
288
+
289
+ # Background Queue and Callback
290
+
291
+ attach_function :rd_kafka_queue_get_background, [:pointer], :pointer
292
+ attach_function :rd_kafka_conf_set_background_event_cb, [:pointer, :pointer], :void
293
+ attach_function :rd_kafka_queue_destroy, [:pointer], :void
294
+
295
+ # Admin Options
296
+
297
+ attach_function :rd_kafka_AdminOptions_new, [:pointer, :int32], :pointer
298
+ attach_function :rd_kafka_AdminOptions_set_opaque, [:pointer, :pointer], :void
299
+ attach_function :rd_kafka_AdminOptions_destroy, [:pointer], :void
300
+
301
+ # Extracting data from event types
302
+
303
+ attach_function :rd_kafka_event_type, [:pointer], :int32
304
+ attach_function :rd_kafka_event_opaque, [:pointer], :pointer
305
+
306
+ # Extracting data from topic results
307
+
308
+ attach_function :rd_kafka_topic_result_error, [:pointer], :int32
309
+ attach_function :rd_kafka_topic_result_error_string, [:pointer], :pointer
310
+ attach_function :rd_kafka_topic_result_name, [:pointer], :pointer
311
+ end
312
+ end
@@ -0,0 +1,106 @@
1
+ module Rdkafka
2
+ module Callbacks
3
+
4
+ # Extracts attributes of a rd_kafka_topic_result_t
5
+ #
6
+ # @private
7
+ class TopicResult
8
+ attr_reader :result_error, :error_string, :result_name
9
+
10
+ def initialize(topic_result_pointer)
11
+ @result_error = Rdkafka::Bindings.rd_kafka_topic_result_error(topic_result_pointer)
12
+ @error_string = Rdkafka::Bindings.rd_kafka_topic_result_error_string(topic_result_pointer)
13
+ @result_name = Rdkafka::Bindings.rd_kafka_topic_result_name(topic_result_pointer)
14
+ end
15
+
16
+ def self.create_topic_results_from_array(count, array_pointer)
17
+ (1..count).map do |index|
18
+ result_pointer = (array_pointer + (index - 1)).read_pointer
19
+ new(result_pointer)
20
+ end
21
+ end
22
+ end
23
+
24
+ # FFI Function used for Create Topic and Delete Topic callbacks
25
+ BackgroundEventCallbackFunction = FFI::Function.new(
26
+ :void, [:pointer, :pointer, :pointer]
27
+ ) do |client_ptr, event_ptr, opaque_ptr|
28
+ BackgroundEventCallback.call(client_ptr, event_ptr, opaque_ptr)
29
+ end
30
+
31
+ # @private
32
+ class BackgroundEventCallback
33
+ def self.call(_, event_ptr, _)
34
+ event_type = Rdkafka::Bindings.rd_kafka_event_type(event_ptr)
35
+ if event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_CREATETOPICS_RESULT
36
+ process_create_topic(event_ptr)
37
+ elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETETOPICS_RESULT
38
+ process_delete_topic(event_ptr)
39
+ end
40
+ end
41
+
42
+ private
43
+
44
+ def self.process_create_topic(event_ptr)
45
+ create_topics_result = Rdkafka::Bindings.rd_kafka_event_CreateTopics_result(event_ptr)
46
+
47
+ # Get the number of create topic results
48
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
49
+ create_topic_result_array = Rdkafka::Bindings.rd_kafka_CreateTopics_result_topics(create_topics_result, pointer_to_size_t)
50
+ create_topic_results = TopicResult.create_topic_results_from_array(pointer_to_size_t.read_int, create_topic_result_array)
51
+ create_topic_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
52
+
53
+ if create_topic_handle = Rdkafka::Admin::CreateTopicHandle.remove(create_topic_handle_ptr.address)
54
+ create_topic_handle[:response] = create_topic_results[0].result_error
55
+ create_topic_handle[:error_string] = create_topic_results[0].error_string
56
+ create_topic_handle[:result_name] = create_topic_results[0].result_name
57
+ create_topic_handle[:pending] = false
58
+ end
59
+ end
60
+
61
+ def self.process_delete_topic(event_ptr)
62
+ delete_topics_result = Rdkafka::Bindings.rd_kafka_event_DeleteTopics_result(event_ptr)
63
+
64
+ # Get the number of topic results
65
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
66
+ delete_topic_result_array = Rdkafka::Bindings.rd_kafka_DeleteTopics_result_topics(delete_topics_result, pointer_to_size_t)
67
+ delete_topic_results = TopicResult.create_topic_results_from_array(pointer_to_size_t.read_int, delete_topic_result_array)
68
+ delete_topic_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
69
+
70
+ if delete_topic_handle = Rdkafka::Admin::DeleteTopicHandle.remove(delete_topic_handle_ptr.address)
71
+ delete_topic_handle[:response] = delete_topic_results[0].result_error
72
+ delete_topic_handle[:error_string] = delete_topic_results[0].error_string
73
+ delete_topic_handle[:result_name] = delete_topic_results[0].result_name
74
+ delete_topic_handle[:pending] = false
75
+ end
76
+ end
77
+ end
78
+
79
+ # FFI Function used for Message Delivery callbacks
80
+
81
+ DeliveryCallbackFunction = FFI::Function.new(
82
+ :void, [:pointer, :pointer, :pointer]
83
+ ) do |client_ptr, message_ptr, opaque_ptr|
84
+ DeliveryCallback.call(client_ptr, message_ptr, opaque_ptr)
85
+ end
86
+
87
+ # @private
88
+ class DeliveryCallback
89
+ def self.call(_, message_ptr, opaque_ptr)
90
+ message = Rdkafka::Bindings::Message.new(message_ptr)
91
+ delivery_handle_ptr_address = message[:_private].address
92
+ if delivery_handle = Rdkafka::Producer::DeliveryHandle.remove(delivery_handle_ptr_address)
93
+ # Update delivery handle
94
+ delivery_handle[:response] = message[:err]
95
+ delivery_handle[:partition] = message[:partition]
96
+ delivery_handle[:offset] = message[:offset]
97
+ delivery_handle[:pending] = false
98
+ # Call delivery callback on opaque
99
+ if opaque = Rdkafka::Config.opaques[opaque_ptr.to_i]
100
+ opaque.call_delivery_callback(Rdkafka::Producer::DeliveryReport.new(message[:partition], message[:offset], message[:err]), delivery_handle)
101
+ end
102
+ end
103
+ end
104
+ end
105
+ end
106
+ end