kafka 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +14 -0
  3. data/.rubocop.yml +210 -0
  4. data/.travis.yml +45 -0
  5. data/CHANGELOG.md +3 -0
  6. data/CODE_OF_CONDUCT.md +74 -0
  7. data/Gemfile +5 -0
  8. data/LICENSE.txt +21 -0
  9. data/README.md +182 -0
  10. data/Rakefile +69 -0
  11. data/examples/consumer.rb +55 -0
  12. data/examples/producer.rb +46 -0
  13. data/ext/Rakefile +69 -0
  14. data/kafka.gemspec +39 -0
  15. data/lib/kafka/admin.rb +141 -0
  16. data/lib/kafka/config.rb +145 -0
  17. data/lib/kafka/consumer.rb +87 -0
  18. data/lib/kafka/error.rb +44 -0
  19. data/lib/kafka/ffi/admin/admin_options.rb +121 -0
  20. data/lib/kafka/ffi/admin/config_entry.rb +97 -0
  21. data/lib/kafka/ffi/admin/config_resource.rb +101 -0
  22. data/lib/kafka/ffi/admin/delete_topic.rb +19 -0
  23. data/lib/kafka/ffi/admin/new_partitions.rb +77 -0
  24. data/lib/kafka/ffi/admin/new_topic.rb +91 -0
  25. data/lib/kafka/ffi/admin/result.rb +66 -0
  26. data/lib/kafka/ffi/admin/topic_result.rb +32 -0
  27. data/lib/kafka/ffi/admin.rb +16 -0
  28. data/lib/kafka/ffi/broker_metadata.rb +32 -0
  29. data/lib/kafka/ffi/client.rb +640 -0
  30. data/lib/kafka/ffi/config.rb +382 -0
  31. data/lib/kafka/ffi/consumer.rb +342 -0
  32. data/lib/kafka/ffi/error.rb +25 -0
  33. data/lib/kafka/ffi/event.rb +215 -0
  34. data/lib/kafka/ffi/group_info.rb +75 -0
  35. data/lib/kafka/ffi/group_list.rb +27 -0
  36. data/lib/kafka/ffi/group_member_info.rb +52 -0
  37. data/lib/kafka/ffi/message/header.rb +205 -0
  38. data/lib/kafka/ffi/message.rb +205 -0
  39. data/lib/kafka/ffi/metadata.rb +58 -0
  40. data/lib/kafka/ffi/opaque.rb +81 -0
  41. data/lib/kafka/ffi/opaque_pointer.rb +73 -0
  42. data/lib/kafka/ffi/partition_metadata.rb +61 -0
  43. data/lib/kafka/ffi/producer.rb +144 -0
  44. data/lib/kafka/ffi/queue.rb +65 -0
  45. data/lib/kafka/ffi/topic.rb +32 -0
  46. data/lib/kafka/ffi/topic_config.rb +126 -0
  47. data/lib/kafka/ffi/topic_metadata.rb +42 -0
  48. data/lib/kafka/ffi/topic_partition.rb +43 -0
  49. data/lib/kafka/ffi/topic_partition_list.rb +167 -0
  50. data/lib/kafka/ffi.rb +624 -0
  51. data/lib/kafka/poller.rb +28 -0
  52. data/lib/kafka/producer/delivery_report.rb +120 -0
  53. data/lib/kafka/producer.rb +127 -0
  54. data/lib/kafka/version.rb +8 -0
  55. data/lib/kafka.rb +11 -0
  56. metadata +159 -0
@@ -0,0 +1,382 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "ffi"
4
+ require "kafka/ffi/opaque_pointer"
5
+
6
+ module Kafka::FFI
7
+ class Config < OpaquePointer
8
+ def self.new
9
+ Kafka::FFI.rd_kafka_conf_new
10
+ end
11
+
12
+ def initialize(ptr)
13
+ super(ptr)
14
+
15
+ # Maintain references to all of the set callbacks to avoid them being
16
+ # garbage collected.
17
+ @callbacks = {}
18
+ end
19
+
20
+ # Set the config option at `key` to `value`. The configuration options
21
+ # match those used by librdkafka (and the Java client).
22
+ #
23
+ # @see https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
24
+ #
25
+ # @param key [String] Configuration key
26
+ # @param value [String] Value to set
27
+ #
28
+ # @raise [Kafka::FFI::UnknownConfigKey]
29
+ # @raise [Kafka::FFI::InvalidConfigValue]
30
+ def set(key, value)
31
+ key = key.to_s
32
+ value = value.to_s
33
+
34
+ error = ::FFI::MemoryPointer.new(:char, 512)
35
+ result = ::Kafka::FFI.rd_kafka_conf_set(self, key, value, error, error.size)
36
+
37
+ # See config_result enum in ffi.rb
38
+ case result
39
+ when :ok
40
+ nil
41
+ when :unknown
42
+ raise Kafka::FFI::UnknownConfigKey.new(key, value, error.read_string)
43
+ when :invalid
44
+ raise Kafka::FFI::InvalidConfigValue.new(key, value, error.read_string)
45
+ end
46
+ ensure
47
+ error.free if error
48
+ end
49
+
50
+ # Get the current config value for the given key.
51
+ #
52
+ # @param key [String] Config key to fetch the setting for.
53
+ #
54
+ # @return [String, :unknown] Value for the key or :unknown if not already
55
+ # set.
56
+ def get(key)
57
+ key = key.to_s
58
+
59
+ # Will contain the size of the value at key
60
+ size = ::FFI::MemoryPointer.new(:size_t)
61
+
62
+ # Make an initial request for the size of buffer we need to allocate.
63
+ # When trying to make a guess at the potential size the code would often
64
+ # segfault due to rd_kafka_conf_get reallocating the buffer.
65
+ err = ::Kafka::FFI.rd_kafka_conf_get(self, key, ::FFI::Pointer::NULL, size)
66
+ if err != :ok
67
+ return err
68
+ end
69
+
70
+ # Allocate a string long enough to contain the whole value.
71
+ value = ::FFI::MemoryPointer.new(:char, size.read(:size_t))
72
+ err = ::Kafka::FFI.rd_kafka_conf_get(self, key, value, size)
73
+ if err != :ok
74
+ return err
75
+ end
76
+
77
+ value.read_string
78
+ ensure
79
+ size.free if size
80
+ value.free if value
81
+ end
82
+
83
+ # Duplicate the current config
84
+ #
85
+ # @return [Config] Duplicated config
86
+ def dup
87
+ ::Kafka::FFI.rd_kafka_conf_dup(self)
88
+ end
89
+
90
+ # Duplicate the config but do not copy any config options that match the
91
+ # filtered keys.
92
+ def dup_filter(*filter)
93
+ ptr = ::FFI::MemoryPointer.new(:pointer, filter.length)
94
+
95
+ ptr.write_array_of_pointer(
96
+ filter.map { |str| ::FFI::MemoryPointer.from_string(str) },
97
+ )
98
+
99
+ ::Kafka::FFI.rd_kafka_conf_dup_filter(self, filter.length, ptr)
100
+ ensure
101
+ ptr.free
102
+ end
103
+
104
+ # rubocop:disable Naming/AccessorMethodName
105
+ # Disabled to allow matching librdkafka naming convention
106
+
107
+ # Enable event sourcing. Convenience method to set the `enabled_events`
108
+ # option as an integer.
109
+ #
110
+ # @example Set events using event symbol names
111
+ # config.set_events([ :delivery, :log, :fetch ])
112
+ #
113
+ # @example Set events using event constants
114
+ # config.set_events([ RD_KAFKA_EVENT_DR, RD_KAFKA_EVENT_LOG ])
115
+ #
116
+ # @param events_mask [Integer, Array<Symbol, Integer>] Bitmask of events to
117
+ # enable during queue poll.
118
+ def set_events(events_mask)
119
+ mask = events_mask
120
+
121
+ # Support setting events
122
+ if events_mask.is_a?(Array)
123
+ mask = 0
124
+ enum = ::Kafka::FFI.enum_type(:event_type)
125
+
126
+ events_mask.each do |val|
127
+ case val
128
+ when Integer then mask |= val
129
+ when Symbol then mask |= (enum[val] || 0)
130
+ end
131
+ end
132
+ end
133
+
134
+ ::Kafka::FFI.rd_kafka_conf_set_events(self, mask)
135
+ end
136
+
137
+ # Set the callback that will be used for events published to the background
138
+ # queue. This enables a background thread that runs internal to librdkafka
139
+ # and can be used as a standard receiver for APIs that take a queue.
140
+ #
141
+ # @see Client#get_background_queue
142
+ #
143
+ # @note The application is responsible for calling #destroy on the event.
144
+ # @note The application must not call #destroy on the Client inside the
145
+ # callback.
146
+ #
147
+ # @yield [client, event, opaque] Called when a event is received by the
148
+ # queue.
149
+ # @yieldparam client [Client] Kafka Client for the event
150
+ # @yieldparam event [Event] The event that occurred
151
+ # @yieldparam opaque [::FFI::Pointer] Pointer to the configuration's opaque
152
+ # pointer that was set via set_opaque.
153
+ def set_background_event_cb(&block)
154
+ @callbacks[:background_event_cb] = block
155
+ ::Kafka::FFI.rd_kafka_conf_set_background_event_cb(self, &block)
156
+ end
157
+ alias background_event_cb= set_background_event_cb
158
+
159
+ # Set delivery report callback for the config. The delivery report callback
160
+ # will be called once for each message accepted by Producer#produce. The
161
+ # Message will have #error set in the event of a producer error.
162
+ #
163
+ # The callback is called when a message is successfully produced or if
164
+ # librdkafka encountered a permanent failure.
165
+ #
166
+ # @note Producer only
167
+ #
168
+ # @yield [client, message, opaque] Called for each Message produced.
169
+ # @yieldparam client [Client] Kafka Client for the event
170
+ # @yieldparam message [Message] Message that was produced
171
+ # @yieldparam opaque [::FFI::Pointer] Pointer to the configuration's opaque
172
+ # pointer that was set via set_opaque.
173
+ def set_dr_msg_cb(&block)
174
+ @callbacks[:dr_msg_cb] = block
175
+ ::Kafka::FFI.rd_kafka_conf_set_dr_msg_cb(self, &block)
176
+ end
177
+ alias dr_msg_cb= set_dr_msg_cb
178
+
179
+ # Set consume callback for use with consumer_poll.
180
+ #
181
+ # @note Consumer only
182
+ #
183
+ # @yield [message, opaque]
184
+ # @yieldparam message [Message]
185
+ # @yieldparam opaque [::FFI::Pointer]
186
+ def set_consume_cb(&block)
187
+ @callbacks[:consume_cb] = block
188
+ ::Kafka::FFI.rd_kafka_conf_set_consume_cb(self, &block)
189
+ end
190
+ alias consume_cb= set_consume_cb
191
+
192
+ # Set rebalance callback for use with consumer group balancing. Setting the
193
+ # rebalance callback will turn off librdkafka's automatic handling of
194
+ # assignment/revocation and delegates the responsibility to the
195
+ # application's callback.
196
+ #
197
+ # @see rdkafka.h rd_kafka_conf_set_rebalance_cb
198
+ # @note Consumer only
199
+ #
200
+ # @yield [client, error, partitions, opaque]
201
+ # @yieldparam client [Client]
202
+ # @yieldparam error [RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS] Callback
203
+ # contains new assignments for the consumer.
204
+ # @yieldparam error [RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS] Callback
205
+ # contains revocation of assignments for the consumer.
206
+ # @yieldparam error [Integer] Other error code
207
+ # @yieldparam partitions [TopicPartitionList] Set of partitions to assign
208
+ # or revoke.
209
+ def set_rebalance_cb(&block)
210
+ @callbacks[:rebalance_cb] = block
211
+ ::Kafka::FFI.rd_kafka_conf_set_rebalance_cb(self, &block)
212
+ end
213
+ alias rebalance_cb= set_rebalance_cb
214
+
215
+ # Set offset commit callback which is called when offsets are committed by
216
+ # the consumer.
217
+ #
218
+ # @note Consumer only
219
+ #
220
+ # @yield [client, error, offets]
221
+ # @yieldparam client [Client]
222
+ # @yieldparam error [RD_KAFKA_RESP_ERR__NO_OFFSET] No partitions had valid
223
+ # offsets to commit. This should not be considered an error.
224
+ # @yieldparam error [Integer] Error committing the offsets
225
+ # @yieldparam offsets [TopicPartitionList] Committed offsets
226
+ def set_offset_commit_cb(&block)
227
+ @callbacks[:offset_commit_cb] = block
228
+ ::Kafka::FFI.rd_kafka_conf_set_offset_commit_cb(self, &block)
229
+ end
230
+ alias offset_commit_cb= set_offset_commit_cb
231
+
232
+ # Set error callback that is used by librdkafka to signal warnings and
233
+ # errors back to the application. These errors should generally be
234
+ # considered informational and non-permanent, librdkafka will try to
235
+ # recover from all types of errors.
236
+ #
237
+ # @yield [client, error, reason, opaque]
238
+ # @yieldparam client [Client]
239
+ # @yieldparam error [RD_KAFKA_RESP_ERR__FATAL] Fatal error occurred
240
+ # @yieldparam error [Integer] Other error occurred
241
+ # @yieldparam reason [String]
242
+ # @yieldparam opaque [::FFI::Pointer]
243
+ def set_error_cb(&block)
244
+ @callbacks[:error_cb] = block
245
+ ::Kafka::FFI.rd_kafka_conf_set_error_cb(self, &block)
246
+ end
247
+ alias error_cb= set_error_cb
248
+
249
+ # Set throttle callback that is used to forward broker throttle times to
250
+ # the application.
251
+ #
252
+ # @yield [client, broker_name, broker_id, throttle_ms, opaque]
253
+ # @yieldparam client [Client]
254
+ # @yieldparam broker_name [String]
255
+ # @yieldparam broker_id [Integer]
256
+ # @yieldparam throttle_ms [Integer] Throttle time in milliseconds
257
+ # @yieldparam opaque [::FFI::Pointer]
258
+ def set_throttle_cb(&block)
259
+ @callbacks[:throttle_cb] = block
260
+ ::Kafka::FFI.rd_kafka_conf_set_throttle_cb(self, &block)
261
+ end
262
+ alias throttle_cb= set_throttle_cb
263
+
264
+ # Set the logging callback. By default librdkafka will print to stderr (or
265
+ # syslog if configured).
266
+ #
267
+ # @note The application MUST NOT call any librdkafka APIs or do any
268
+ # prolonged work in a log_cb unless logs have been forwarded to a queue
269
+ # via set_log_queue.
270
+ #
271
+ # @yield [client, level, facility, message]
272
+ # @yieldparam client [Client]
273
+ # @yieldparam level [Integer] Log level
274
+ # @yieldparam facility [String] Log facility
275
+ # @yieldparam message [String] Log message
276
+ def set_log_cb(&block)
277
+ @callbacks[:log_cb] = block
278
+ ::Kafka::FFI.rd_kafka_conf_set_log_cb(self, &block)
279
+ end
280
+ alias log_cb= set_log_cb
281
+
282
+ # Set statistics callback that is triggered every `statistics.interval.ms`
283
+ # with a JSON document containing connection statistics.
284
+ #
285
+ # @see https://github.com/edenhill/librdkafka/blob/master/STATISTICS.md
286
+ #
287
+ # @yield [client, json, json_len, opaque]
288
+ # @yieldparam client [Client]
289
+ # @yieldparam json [String] Statistics payload
290
+ # @yieldparam json_len [Integer] Length of the JSON payload
291
+ # @yieldparam opaque [::FFI::Pointer]
292
+ def set_stats_cb(&block)
293
+ @callbacks[:stats_cb] = block
294
+ ::Kafka::FFI.rd_kafka_conf_set_stats_cb(self, &block)
295
+ end
296
+ alias stats_cb= set_stats_cb
297
+
298
+ def set_oauthbearer_token_refresh_cb(&block)
299
+ @callbacks[:oauthbearer_token_refresh_cb] = block
300
+ ::Kafka::FFI.rd_kafka_conf_set_oauthbearer_token_refresh_cb(self, &block)
301
+ end
302
+ alias oauthbearer_token_refresh_cb= set_oauthbearer_token_refresh_cb
303
+
304
+ def set_socket_cb(&block)
305
+ @callbacks[:socket_cb] = block
306
+ ::Kafka::FFI.rd_kafka_conf_set_socket_cb(self, &block)
307
+ end
308
+ alias socket_cb= set_socket_cb
309
+
310
+ def set_connect_cb(&block)
311
+ @callbacks[:connect_cb] = block
312
+ ::Kafka::FFI.rd_kafka_conf_set_connect_cb(self, &block)
313
+ end
314
+ alias connect_cb= set_connect_cb
315
+
316
+ def set_closesocket_cb(&block)
317
+ @callbacks[:closesocket_cb] = block
318
+ ::Kafka::FFI.rd_kafka_conf_set_closesocket_cb(self, &block)
319
+ end
320
+ alias closesocket_cb= set_closesocket_cb
321
+
322
+ def set_open_cb(&block)
323
+ if ::FFI::Platform.windows?
324
+ raise Error, "set_open_cb is not available on Windows"
325
+ end
326
+
327
+ @callbacks[:open_cb] = block
328
+ ::Kafka::FFI.rd_kafka_conf_set_open_cb(self, &block)
329
+ end
330
+ alias open_cb= set_open_cb
331
+
332
+ def set_ssl_cert_verify_cb(&block)
333
+ @callbacks[:ssl_cert_verify_cb] = block
334
+ ::Kafka::FFI.rd_kafka_conf_set_ssl_cert_verify_cb(self, &block)
335
+ end
336
+ alias ssl_cert_verify_cb= set_ssl_cert_verify_cb
337
+
338
+ # rubocop:enable Naming/AccessorMethodName
339
+
340
+ # Set the certificate for secure communication with the Kafka cluster.
341
+ #
342
+ # @note The private key may require a password which must be specified with
343
+ # the `ssl.key.password` property prior to calling this function.
344
+ #
345
+ # @note Private and public keys, in PEM format, can be set with the
346
+ # `ssl.key.pem` and `ssl.certificate.pem` configuration properties.
347
+ #
348
+ # @param cert_type [:public, :private, :ca]
349
+ # @param cert_enc [:pkcs12, :der, :pem]
350
+ # @param certificate [String] Encoded certificate
351
+ # @param certificate [nil] Clear the stored certificate
352
+ #
353
+ # @raise [ConfigError] Certificate was not properly encoded or librdkafka
354
+ # was not compiled with SSL/TLS.
355
+ def set_ssl_cert(cert_type, cert_enc, certificate)
356
+ error = ::MemoryPointer.new(:char, 512)
357
+
358
+ err = ::Kafka::FFI.rd_kafka_conf_set_ssl_cert(cert_type, cert_enc, certificate, certificate.bytesize, error, error.size)
359
+ if err != :ok
360
+ # Property name isn't exact since this appears to have some routing
361
+ # based on cert type to determine the exact key.
362
+ raise ConfigError, "ssl_cert", error.read_string
363
+ end
364
+
365
+ nil
366
+ ensure
367
+ error.free
368
+ end
369
+ alias ssl_cert= set_ssl_cert
370
+
371
+ # Free all resources used by the config.
372
+ #
373
+ # @note Never call #destroy on a Config that has been passed to
374
+ # Kafka::FFI.rd_kafka_new since the handle will take ownership of the
375
+ # config.
376
+ def destroy
377
+ if !pointer.null?
378
+ ::Kafka::FFI.rd_kafka_conf_destroy(self)
379
+ end
380
+ end
381
+ end
382
+ end