kafka 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +14 -0
  3. data/.rubocop.yml +210 -0
  4. data/.travis.yml +45 -0
  5. data/CHANGELOG.md +3 -0
  6. data/CODE_OF_CONDUCT.md +74 -0
  7. data/Gemfile +5 -0
  8. data/LICENSE.txt +21 -0
  9. data/README.md +182 -0
  10. data/Rakefile +69 -0
  11. data/examples/consumer.rb +55 -0
  12. data/examples/producer.rb +46 -0
  13. data/ext/Rakefile +69 -0
  14. data/kafka.gemspec +39 -0
  15. data/lib/kafka/admin.rb +141 -0
  16. data/lib/kafka/config.rb +145 -0
  17. data/lib/kafka/consumer.rb +87 -0
  18. data/lib/kafka/error.rb +44 -0
  19. data/lib/kafka/ffi/admin/admin_options.rb +121 -0
  20. data/lib/kafka/ffi/admin/config_entry.rb +97 -0
  21. data/lib/kafka/ffi/admin/config_resource.rb +101 -0
  22. data/lib/kafka/ffi/admin/delete_topic.rb +19 -0
  23. data/lib/kafka/ffi/admin/new_partitions.rb +77 -0
  24. data/lib/kafka/ffi/admin/new_topic.rb +91 -0
  25. data/lib/kafka/ffi/admin/result.rb +66 -0
  26. data/lib/kafka/ffi/admin/topic_result.rb +32 -0
  27. data/lib/kafka/ffi/admin.rb +16 -0
  28. data/lib/kafka/ffi/broker_metadata.rb +32 -0
  29. data/lib/kafka/ffi/client.rb +640 -0
  30. data/lib/kafka/ffi/config.rb +382 -0
  31. data/lib/kafka/ffi/consumer.rb +342 -0
  32. data/lib/kafka/ffi/error.rb +25 -0
  33. data/lib/kafka/ffi/event.rb +215 -0
  34. data/lib/kafka/ffi/group_info.rb +75 -0
  35. data/lib/kafka/ffi/group_list.rb +27 -0
  36. data/lib/kafka/ffi/group_member_info.rb +52 -0
  37. data/lib/kafka/ffi/message/header.rb +205 -0
  38. data/lib/kafka/ffi/message.rb +205 -0
  39. data/lib/kafka/ffi/metadata.rb +58 -0
  40. data/lib/kafka/ffi/opaque.rb +81 -0
  41. data/lib/kafka/ffi/opaque_pointer.rb +73 -0
  42. data/lib/kafka/ffi/partition_metadata.rb +61 -0
  43. data/lib/kafka/ffi/producer.rb +144 -0
  44. data/lib/kafka/ffi/queue.rb +65 -0
  45. data/lib/kafka/ffi/topic.rb +32 -0
  46. data/lib/kafka/ffi/topic_config.rb +126 -0
  47. data/lib/kafka/ffi/topic_metadata.rb +42 -0
  48. data/lib/kafka/ffi/topic_partition.rb +43 -0
  49. data/lib/kafka/ffi/topic_partition_list.rb +167 -0
  50. data/lib/kafka/ffi.rb +624 -0
  51. data/lib/kafka/poller.rb +28 -0
  52. data/lib/kafka/producer/delivery_report.rb +120 -0
  53. data/lib/kafka/producer.rb +127 -0
  54. data/lib/kafka/version.rb +8 -0
  55. data/lib/kafka.rb +11 -0
  56. metadata +159 -0
@@ -0,0 +1,120 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Kafka::Producer
4
+ class DeliveryReport
5
+ # @return [nil] Delivery was successful or report has not been received
6
+ # yet.
7
+ # @return [Kafka::ResponseError] Error delivering the Message.
8
+ attr_reader :error
9
+
10
+ # @return [nil] Report has not been received yet
11
+ # @return [String] Name of the topic Message was delivered to.
12
+ attr_reader :topic
13
+
14
+ # @return [nil] Report has not been received yet
15
+ # @return [Integer] Offset for the message on partition.
16
+ attr_reader :offset
17
+
18
+ # @return [nil] Report has not been received yet
19
+ # @return [Integer] Partition the message was delivered to.
20
+ attr_reader :partition
21
+
22
+ # Initializes a new DeliveryReport
23
+ #
24
+ # @param block [Proc] Callback to call with the DeliveryReport when it is
25
+ # received from the cluster.
26
+ def initialize(&block)
27
+ @mutex = Mutex.new
28
+ @waiter = ConditionVariable.new
29
+
30
+ @error = nil
31
+ @topic = nil
32
+ @offset = nil
33
+ @partition = nil
34
+ @callback = block
35
+
36
+ # Will be set to true by a call to #done. Fast out for any callers to
37
+ # #wait that may come in after done has already been called.
38
+ @done = false
39
+ end
40
+
41
+ # Returns true when the report has been received back from the kafka
42
+ # cluster.
43
+ #
44
+ # @return [Boolean] True when the server has reported back on the
45
+ # delivery.
46
+ def received?
47
+ @done
48
+ end
49
+
50
+ # @return [Boolean] Is the report for an error?
51
+ def error?
52
+ error.nil?
53
+ end
54
+
55
+ # Returns if the delivery was successful
56
+ #
57
+ # @return [Boolean] True when the report was delivered to the cluster
58
+ # successfully.
59
+ def successful?
60
+ !error
61
+ end
62
+
63
+ # @private
64
+ #
65
+ # Set the response based on the message and notify anyone waiting on the
66
+ # result.
67
+ #
68
+ # @param message [Kafka::FFI::Message]
69
+ def done(message)
70
+ @mutex.synchronize do
71
+ @error = message.error
72
+
73
+ @offset = message.offset
74
+ @topic = message.topic
75
+ @partition = message.partition
76
+
77
+ @done = true
78
+ @waiter.broadcast
79
+
80
+ remove_instance_variable(:@mutex)
81
+ remove_instance_variable(:@waiter)
82
+ end
83
+
84
+ if @callback
85
+ @callback.call(self)
86
+ end
87
+ end
88
+
89
+ # Wait for a report to be received for the delivery from the cluster.
90
+ #
91
+ # @param timeout [Integer] Maximum time to wait in milliseconds.
92
+ #
93
+ # @raise [Kafka::ResponseError<RD_KAFKA_RESP_ERR__TIMED_OUT>] No report
94
+ # received before timeout.
95
+ def wait(timeout: 5000)
96
+ # Fast out since the delivery report has already been reported back from
97
+ # the cluster.
98
+ if @done
99
+ return
100
+ end
101
+
102
+ @mutex.synchronize do
103
+ # Convert from milliseconds to seconds to match Ruby's API. Takes
104
+ # milliseconds to be consistent with librdkafka APIs.
105
+ if timeout
106
+ timeout /= 1000.0
107
+ end
108
+
109
+ @waiter.wait(@mutex, timeout)
110
+
111
+ # No report was received for the message before we timed out waiting.
112
+ if !@done
113
+ raise ::Kafka::ResponseError, ::Kafka::FFI::RD_KAFKA_RESP_ERR__TIMED_OUT
114
+ end
115
+ end
116
+
117
+ nil
118
+ end
119
+ end
120
+ end
@@ -0,0 +1,127 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "kafka/poller"
4
+
5
+ module Kafka
6
+ class Producer
7
+ require "kafka/producer/delivery_report"
8
+
9
+ # Returns the backing Kafka::FFI::Producer.
10
+ #
11
+ # @DANGER Using the backing Producer means being aware of memory management
12
+ # and could leave the producer in a bad state. Make sure you know what
13
+ # you're doing.
14
+ #
15
+ # @return [Kafka::FFI::Producer]
16
+ attr_reader :client
17
+
18
+ # Initialize a new Producer for the configured cluster.
19
+ #
20
+ # @param config [Config]
21
+ def initialize(config)
22
+ config = config.dup
23
+
24
+ # Configure callbacks
25
+ config.on_delivery_report(&method(:on_delivery_report))
26
+
27
+ @client = ::Kafka::FFI::Producer.new(config)
28
+
29
+ # Periodically call poll on the client to ensure callbacks are fired.
30
+ @poller = Poller.new(@client)
31
+ end
32
+
33
+ # Produce and publish a message to the Kafka cluster.
34
+ #
35
+ # @param topic [String] Topic to publish the message to
36
+ # @param payload [String] Message's payload
37
+ # @param key [String] Optional partitioning key Kafka can use to determine
38
+ # the correct partition.
39
+ # @param partition [-1, nil] Kafka will determine the partition
40
+ # automatically based on the `partitioner` config option.
41
+ # @param partition [Integer] Specifiy partition to publish the message to.
42
+ # @param headers [Hash{String => String}] Set of headers to attach to the
43
+ # message.
44
+ # @param timestamp [nil] Let Kafka automatically assign Message timestamp
45
+ # @param timestamp [Time] Timestamp for the message
46
+ # @param timestamp [Integer] Timestamp as milliseconds since unix epoch
47
+ #
48
+ # @param block [Proc] Optional asyncronous callback to be called when the
49
+ # delivery status of the message is reported back from librdkafka. The
50
+ # callback MUST avoid expensive or long running processing as that may
51
+ # causes issues inside librdkafka.
52
+ #
53
+ # @yield [report] Called asyncronously when the report is received from
54
+ # Kafka on the success or failure of the delivery.
55
+ # @yieldparam report [DeliveryReport] Delivery status of the message.
56
+ #
57
+ # @return [DeliveryReport] Report of the success or failure of the
58
+ # delivery. Call #wait to block until the report is received.
59
+ def produce(topic, payload, key: nil, partition: nil, headers: nil, timestamp: nil, &block)
60
+ report = DeliveryReport.new(&block)
61
+
62
+ # Allocate a pointer to a small chunk of memory. We will use the pointer
63
+ # (not the value it points to) as a key for looking up the DeliveryReport
64
+ # in the callback.
65
+ #
66
+ # Using a MemoryPointer as a key also ensures we have a reference to the
67
+ # Pointer so it doesn't get garbage collected away and it can be freed it
68
+ # in the callback since the raw FFI::Pointer disallows #free as FFI
69
+ # doesn't believe we allocated it.
70
+ opaque = Kafka::FFI::Opaque.new(report)
71
+
72
+ @client.produce(topic, payload, key: key, partition: partition, headers: headers, timestamp: timestamp, opaque: opaque)
73
+
74
+ report
75
+ rescue
76
+ opaque.free
77
+
78
+ raise
79
+ end
80
+
81
+ # Wait until all outstanding produce requests are completed.
82
+ #
83
+ # @raise [Kafka::ResponseError] Timeout was reached before all
84
+ # outstanding requests were completed.
85
+ def flush(timeout: 1000)
86
+ @client.flush(timeout: timeout)
87
+ end
88
+
89
+ # Gracefully shutdown the Producer, flushing any pending deliveries, and
90
+ # finally releasing an memory back to the system.
91
+ #
92
+ # @note Once #close is call it is no longer safe to call any other method
93
+ # on the Producer.
94
+ #
95
+ # @param timeout [Integer] Maximum time to wait in milliseconds for
96
+ # messages to be flushed.
97
+ def close(timeout: 30000)
98
+ # @see https://github.com/edenhill/librdkafka/blob/master/INTRODUCTION.md#producer
99
+ @poller.stop
100
+
101
+ @client.flush(timeout: timeout)
102
+ @client.poll
103
+
104
+ # Client handles destroying cached Topics
105
+ @client.destroy
106
+ end
107
+
108
+ private
109
+
110
+ # @param client [Kafka::FFI::Producer]
111
+ # @param message [Kafka::FFI::Message]
112
+ # @param opaque [FFI::Pointer]
113
+ def on_delivery_report(_client, message, _opaque)
114
+ op = message.opaque
115
+ if op.nil?
116
+ return
117
+ end
118
+
119
+ begin
120
+ report = op.value
121
+ report.done(message)
122
+ ensure
123
+ op.free
124
+ end
125
+ end
126
+ end
127
+ end
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ VERSION = "0.5.0"
5
+
6
+ LIBRDKAFKA_VERSION = "1.3.0"
7
+ LIBRDKAFKA_CHECKSUM = "465cab533ebc5b9ca8d97c90ab69e0093460665ebaf38623209cf343653c76d2"
8
+ end
data/lib/kafka.rb ADDED
@@ -0,0 +1,11 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ require "kafka/ffi"
5
+ require "kafka/admin"
6
+ require "kafka/error"
7
+ require "kafka/config"
8
+ require "kafka/version"
9
+ require "kafka/consumer"
10
+ require "kafka/producer"
11
+ end
metadata ADDED
@@ -0,0 +1,159 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: kafka
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.5.0
5
+ platform: ruby
6
+ authors:
7
+ - Chris Gaffney
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2020-01-21 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: ffi
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: '0'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: '0'
27
+ - !ruby/object:Gem::Dependency
28
+ name: mini_portile2
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ">="
32
+ - !ruby/object:Gem::Version
33
+ version: '0'
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ">="
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
41
+ - !ruby/object:Gem::Dependency
42
+ name: rake
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - ">="
46
+ - !ruby/object:Gem::Version
47
+ version: '0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - ">="
53
+ - !ruby/object:Gem::Version
54
+ version: '0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: rspec
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - ">="
60
+ - !ruby/object:Gem::Version
61
+ version: '0'
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - ">="
67
+ - !ruby/object:Gem::Version
68
+ version: '0'
69
+ description: |
70
+ Kafka provides binding to librdafka as well as a default producer and
71
+ consumer implementation.
72
+ email:
73
+ - gaffneyc@gmail.com
74
+ executables: []
75
+ extensions:
76
+ - ext/Rakefile
77
+ extra_rdoc_files: []
78
+ files:
79
+ - ".gitignore"
80
+ - ".rubocop.yml"
81
+ - ".travis.yml"
82
+ - CHANGELOG.md
83
+ - CODE_OF_CONDUCT.md
84
+ - Gemfile
85
+ - LICENSE.txt
86
+ - README.md
87
+ - Rakefile
88
+ - examples/consumer.rb
89
+ - examples/producer.rb
90
+ - ext/Rakefile
91
+ - kafka.gemspec
92
+ - lib/kafka.rb
93
+ - lib/kafka/admin.rb
94
+ - lib/kafka/config.rb
95
+ - lib/kafka/consumer.rb
96
+ - lib/kafka/error.rb
97
+ - lib/kafka/ffi.rb
98
+ - lib/kafka/ffi/admin.rb
99
+ - lib/kafka/ffi/admin/admin_options.rb
100
+ - lib/kafka/ffi/admin/config_entry.rb
101
+ - lib/kafka/ffi/admin/config_resource.rb
102
+ - lib/kafka/ffi/admin/delete_topic.rb
103
+ - lib/kafka/ffi/admin/new_partitions.rb
104
+ - lib/kafka/ffi/admin/new_topic.rb
105
+ - lib/kafka/ffi/admin/result.rb
106
+ - lib/kafka/ffi/admin/topic_result.rb
107
+ - lib/kafka/ffi/broker_metadata.rb
108
+ - lib/kafka/ffi/client.rb
109
+ - lib/kafka/ffi/config.rb
110
+ - lib/kafka/ffi/consumer.rb
111
+ - lib/kafka/ffi/error.rb
112
+ - lib/kafka/ffi/event.rb
113
+ - lib/kafka/ffi/group_info.rb
114
+ - lib/kafka/ffi/group_list.rb
115
+ - lib/kafka/ffi/group_member_info.rb
116
+ - lib/kafka/ffi/message.rb
117
+ - lib/kafka/ffi/message/header.rb
118
+ - lib/kafka/ffi/metadata.rb
119
+ - lib/kafka/ffi/opaque.rb
120
+ - lib/kafka/ffi/opaque_pointer.rb
121
+ - lib/kafka/ffi/partition_metadata.rb
122
+ - lib/kafka/ffi/producer.rb
123
+ - lib/kafka/ffi/queue.rb
124
+ - lib/kafka/ffi/topic.rb
125
+ - lib/kafka/ffi/topic_config.rb
126
+ - lib/kafka/ffi/topic_metadata.rb
127
+ - lib/kafka/ffi/topic_partition.rb
128
+ - lib/kafka/ffi/topic_partition_list.rb
129
+ - lib/kafka/poller.rb
130
+ - lib/kafka/producer.rb
131
+ - lib/kafka/producer/delivery_report.rb
132
+ - lib/kafka/version.rb
133
+ homepage: http://github.com/deadmanssnitch/kafka
134
+ licenses:
135
+ - MIT
136
+ metadata:
137
+ homepage_uri: http://github.com/deadmanssnitch/kafka
138
+ source_code_uri: http://github.com/deadmanssnitch/kafka
139
+ changelog_uri: https://github.com/deadmanssnitch/kafka/blob/master/CHANGELOG.md
140
+ post_install_message:
141
+ rdoc_options: []
142
+ require_paths:
143
+ - lib
144
+ required_ruby_version: !ruby/object:Gem::Requirement
145
+ requirements:
146
+ - - ">="
147
+ - !ruby/object:Gem::Version
148
+ version: 2.5.0
149
+ required_rubygems_version: !ruby/object:Gem::Requirement
150
+ requirements:
151
+ - - ">="
152
+ - !ruby/object:Gem::Version
153
+ version: '0'
154
+ requirements: []
155
+ rubygems_version: 3.1.2
156
+ signing_key:
157
+ specification_version: 4
158
+ summary: Kafka client bindings to librdkafka
159
+ test_files: []