kafka 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +14 -0
  3. data/.rubocop.yml +210 -0
  4. data/.travis.yml +45 -0
  5. data/CHANGELOG.md +3 -0
  6. data/CODE_OF_CONDUCT.md +74 -0
  7. data/Gemfile +5 -0
  8. data/LICENSE.txt +21 -0
  9. data/README.md +182 -0
  10. data/Rakefile +69 -0
  11. data/examples/consumer.rb +55 -0
  12. data/examples/producer.rb +46 -0
  13. data/ext/Rakefile +69 -0
  14. data/kafka.gemspec +39 -0
  15. data/lib/kafka/admin.rb +141 -0
  16. data/lib/kafka/config.rb +145 -0
  17. data/lib/kafka/consumer.rb +87 -0
  18. data/lib/kafka/error.rb +44 -0
  19. data/lib/kafka/ffi/admin/admin_options.rb +121 -0
  20. data/lib/kafka/ffi/admin/config_entry.rb +97 -0
  21. data/lib/kafka/ffi/admin/config_resource.rb +101 -0
  22. data/lib/kafka/ffi/admin/delete_topic.rb +19 -0
  23. data/lib/kafka/ffi/admin/new_partitions.rb +77 -0
  24. data/lib/kafka/ffi/admin/new_topic.rb +91 -0
  25. data/lib/kafka/ffi/admin/result.rb +66 -0
  26. data/lib/kafka/ffi/admin/topic_result.rb +32 -0
  27. data/lib/kafka/ffi/admin.rb +16 -0
  28. data/lib/kafka/ffi/broker_metadata.rb +32 -0
  29. data/lib/kafka/ffi/client.rb +640 -0
  30. data/lib/kafka/ffi/config.rb +382 -0
  31. data/lib/kafka/ffi/consumer.rb +342 -0
  32. data/lib/kafka/ffi/error.rb +25 -0
  33. data/lib/kafka/ffi/event.rb +215 -0
  34. data/lib/kafka/ffi/group_info.rb +75 -0
  35. data/lib/kafka/ffi/group_list.rb +27 -0
  36. data/lib/kafka/ffi/group_member_info.rb +52 -0
  37. data/lib/kafka/ffi/message/header.rb +205 -0
  38. data/lib/kafka/ffi/message.rb +205 -0
  39. data/lib/kafka/ffi/metadata.rb +58 -0
  40. data/lib/kafka/ffi/opaque.rb +81 -0
  41. data/lib/kafka/ffi/opaque_pointer.rb +73 -0
  42. data/lib/kafka/ffi/partition_metadata.rb +61 -0
  43. data/lib/kafka/ffi/producer.rb +144 -0
  44. data/lib/kafka/ffi/queue.rb +65 -0
  45. data/lib/kafka/ffi/topic.rb +32 -0
  46. data/lib/kafka/ffi/topic_config.rb +126 -0
  47. data/lib/kafka/ffi/topic_metadata.rb +42 -0
  48. data/lib/kafka/ffi/topic_partition.rb +43 -0
  49. data/lib/kafka/ffi/topic_partition_list.rb +167 -0
  50. data/lib/kafka/ffi.rb +624 -0
  51. data/lib/kafka/poller.rb +28 -0
  52. data/lib/kafka/producer/delivery_report.rb +120 -0
  53. data/lib/kafka/producer.rb +127 -0
  54. data/lib/kafka/version.rb +8 -0
  55. data/lib/kafka.rb +11 -0
  56. metadata +159 -0
@@ -0,0 +1,52 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "ffi"
4
+
5
+ module Kafka::FFI
6
+ class GroupMemberInfo < ::FFI::Struct
7
+ layout(
8
+ :member_id, :string,
9
+ :client_id, :string,
10
+ :client_host, :string,
11
+ :member_metadata, :pointer,
12
+ :member_metadata_size, :int,
13
+ :member_assignment, :pointer,
14
+ :member_assignment_size, :int
15
+ )
16
+
17
+ # Returns the broker generated member id for the consumer.
18
+ #
19
+ # @return [String] Member ID
20
+ def member_id
21
+ self[:member_id]
22
+ end
23
+
24
+ # Returns the consumer's client.id config setting
25
+ #
26
+ # @return [String] Client ID
27
+ def client_id
28
+ self[:client_id]
29
+ end
30
+
31
+ # Returns the hostname of the consumer
32
+ #
33
+ # @return [String] Consumer's hostname
34
+ def client_host
35
+ self[:client_host]
36
+ end
37
+
38
+ # Returns the binary metadata for the consumer
39
+ #
40
+ # @return [String] Consumer metadata
41
+ def member_metadata
42
+ self[:member_metadata].read_string(self[:member_metadata_size])
43
+ end
44
+
45
+ # Returns the binary assignment data for the consumer
46
+ #
47
+ # @return [String] Assignments
48
+ def member_assignment
49
+ self[:member_assignment].read_string(self[:member_assignment_size])
50
+ end
51
+ end
52
+ end
@@ -0,0 +1,205 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "ffi"
4
+ require "kafka/ffi/opaque_pointer"
5
+
6
+ module Kafka::FFI
7
+ class Message::Header < OpaquePointer
8
+ def self.new(count = 0)
9
+ if count.is_a?(::FFI::Pointer)
10
+ return super(count)
11
+ end
12
+
13
+ ::Kafka::FFI.rd_kafka_headers_new(count)
14
+ end
15
+
16
+ # Count returns the number of headers in the set.
17
+ #
18
+ # @return [Integer] Number of headers
19
+ def count
20
+ ::Kafka::FFI.rd_kafka_header_cnt(self)
21
+ end
22
+ alias size count
23
+ alias length count
24
+
25
+ # Add header with name and value.
26
+ #
27
+ # @param name [String] Header key name
28
+ # @param value [#to_s, nil] Header key value
29
+ #
30
+ # @raise [Kafka::ResponseError] Error that occurred adding the header
31
+ def add(name, value)
32
+ name = name.to_s
33
+
34
+ value_size = 0
35
+ if value
36
+ value = value.to_s
37
+ value_size = value.bytesize
38
+ end
39
+
40
+ err = ::Kafka::FFI.rd_kafka_header_add(self, name, name.length, value, value_size)
41
+ if err != :ok
42
+ raise ::Kafka::ResponseError, err
43
+ end
44
+
45
+ nil
46
+ end
47
+
48
+ # Make a copy of the headers list
49
+ #
50
+ # @return [Header] Copy of the headers
51
+ def copy
52
+ ::Kafka::FFI.rd_kafka_headers_copy(self)
53
+ end
54
+
55
+ # Remove all headers with the given name.
56
+ #
57
+ # @param name [String] Header key name to remove
58
+ #
59
+ # @raise [Kafka::ResponseError] Error that occurred removing the header
60
+ # @raise [Kafka::ResponseError<RD_KAFKA_RESP_ERR__READ_ONLY>] Header is
61
+ # read only.
62
+ def remove(name)
63
+ name = name.to_s
64
+
65
+ err = ::Kafka::FFI.rd_kafka_header_remove(self, name)
66
+ case err
67
+ when :ok
68
+ nil
69
+ when ::Kafka::FFI::RD_KAFKA_RESP_ERR__NOENT
70
+ # Header field does not exist. Just return nil since the effect (key
71
+ # doesn't exist) is the same.
72
+ nil
73
+ else
74
+ raise ::Kafka::ResponseError, err
75
+ end
76
+ end
77
+
78
+ # Retrieve all headers that match the given name
79
+ #
80
+ # @param name [String] Header key name
81
+ #
82
+ # @raise [Kafka::ResponseError] Error that occurred retrieving the header
83
+ # values
84
+ #
85
+ # @return [Array<String, nil>] List of values for the header
86
+ def get(name)
87
+ name = name.to_s
88
+
89
+ idx = 0
90
+ values = []
91
+
92
+ value = ::FFI::MemoryPointer.new(:pointer)
93
+ size = ::FFI::MemoryPointer.new(:pointer)
94
+
95
+ loop do
96
+ err = ::Kafka::FFI.rd_kafka_header_get(self, idx, name, value, size)
97
+
98
+ case err
99
+ when :ok
100
+ # Read the returned value and add it to the result list.
101
+ idx += 1
102
+ ptr = value.read_pointer
103
+
104
+ values << (ptr.null? ? nil : ptr.read_string(size.read(:size_t)))
105
+ when ::Kafka::FFI::RD_KAFKA_RESP_ERR__NOENT
106
+ # Reached the end of the list of values so break and return the set
107
+ # of found values.
108
+ break
109
+ else
110
+ raise ::Kafka::ResponseError, err
111
+ end
112
+ end
113
+
114
+ values
115
+ ensure
116
+ value.free if value
117
+ size.free if size
118
+ end
119
+
120
+ # rubocop:disable Naming/AccessorMethodName
121
+
122
+ # Retrieve all of the headers and their values
123
+ #
124
+ # @raise [Kafka::ResponseError] Error if occurred retrieving the headers.
125
+ #
126
+ # @return [Hash<String, Array<String>>] Set of header keys and their values
127
+ # @return [Hash{}] Header is empty
128
+ def get_all
129
+ name = ::FFI::MemoryPointer.new(:pointer)
130
+ value = ::FFI::MemoryPointer.new(:pointer)
131
+ size = ::FFI::MemoryPointer.new(:pointer)
132
+
133
+ idx = 0
134
+ result = {}
135
+
136
+ loop do
137
+ err = ::Kafka::FFI.rd_kafka_header_get_all(self, idx, name, value, size)
138
+
139
+ case err
140
+ when :ok
141
+ # Read the returned value and add it to the result list.
142
+ idx += 1
143
+
144
+ key = name.read_pointer.read_string
145
+ val = value.read_pointer
146
+
147
+ result[key] ||= []
148
+ result[key] << (val.null? ? nil : val.read_string(size.read(:size_t)))
149
+ when ::Kafka::FFI::RD_KAFKA_RESP_ERR__NOENT
150
+ # Reached the end of the list of values so break and return the set
151
+ # of found values.
152
+ break
153
+ else
154
+ raise ::Kafka::ResponseError, err
155
+ end
156
+ end
157
+
158
+ result
159
+ ensure
160
+ name.free if name
161
+ value.free if value
162
+ size.free if size
163
+ end
164
+ alias to_hash get_all
165
+ alias to_h get_all
166
+ # rubocop:enable Naming/AccessorMethodName
167
+
168
+ # Find the last header in the list that matches the given name.
169
+ #
170
+ # @param name [String] Header key name
171
+ #
172
+ # @raise [Kafka::ResponseError] Error that occurred retrieving the header
173
+ # value
174
+ #
175
+ # @return [String] Value of the last matching header with name
176
+ # @return [nil] No header with that name exists
177
+ def get_last(name)
178
+ name = name.to_s
179
+ value = ::FFI::MemoryPointer.new(:pointer)
180
+ size = ::FFI::MemoryPointer.new(:pointer)
181
+
182
+ err = ::Kafka::FFI.rd_kafka_header_get_last(self, name, value, size)
183
+ if err != :ok
184
+ # No header with that name exists so just return nil
185
+ if err == ::Kafka::FFI::RD_KAFKA_RESP_ERR__NOENT
186
+ return nil
187
+ end
188
+
189
+ raise ::Kafka::ResponseError, err
190
+ end
191
+
192
+ ptr = value.read_pointer
193
+ ptr.null? ? nil : ptr.read_string(size.read(:size_t))
194
+ ensure
195
+ value.free
196
+ size.free
197
+ end
198
+
199
+ def destroy
200
+ if !pointer.null?
201
+ ::Kafka::FFI.rd_kafka_headers_destroy(self)
202
+ end
203
+ end
204
+ end
205
+ end
@@ -0,0 +1,205 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka::FFI
4
+ class Message < ::FFI::Struct
5
+ require "kafka/ffi/message/header"
6
+
7
+ layout(
8
+ :err, :error_code,
9
+ :rkt, Topic,
10
+ :partition, :int32,
11
+ :payload, :pointer,
12
+ :len, :size_t,
13
+ :key, :pointer,
14
+ :key_len, :size_t,
15
+ :offset, :int64,
16
+ :private, Opaque
17
+ )
18
+
19
+ # Retrieve the error associated with this message. For consumers this is
20
+ # used to report per-topic+partition consumer errors. For producers this is
21
+ # set when received in the dr_msg_cb callback to signify a fatal error
22
+ # publishing the message.
23
+ #
24
+ # @return [nil] Message does not have an error
25
+ # @return [Kafka::ResponseError] RD_KAFKA_RESP_ERR__* error code
26
+ def error
27
+ if self[:err] != :ok
28
+ ::Kafka::ResponseError.new(self[:err])
29
+ end
30
+ end
31
+
32
+ # Returns the name of the Topic the Message was published to.
33
+ #
34
+ # @return [nil] Topic information was not available
35
+ # @return [String] Name of the Topic the message was published to.
36
+ def topic
37
+ if self[:rkt].nil?
38
+ return nil
39
+ end
40
+
41
+ self[:rkt].name
42
+ end
43
+
44
+ # Returns the optional message key used to publish the message. This key is
45
+ # used for partition assignment based on the `partitioner` or
46
+ # `partitioner_cb` config options.
47
+ #
48
+ # @return [nil] No partitioning key was provided
49
+ # @return [String] The partitioning key
50
+ def key
51
+ if self[:key].null?
52
+ return nil
53
+ end
54
+
55
+ self[:key].read_string(self[:key_len])
56
+ end
57
+
58
+ # Returns the partition the message was published to.
59
+ #
60
+ # @return [Integer] Partition
61
+ def partition
62
+ self[:partition]
63
+ end
64
+
65
+ # Returns the message's offset as published in the topic's partition. When
66
+ # error != nil, offset the error occurred at.
67
+ #
68
+ # @return [Integer] Message offset
69
+ # @return [RD_KAFKA_OFFSET_INVALID] Message was retried and idempotence is
70
+ # enabled.
71
+ def offset
72
+ self[:offset]
73
+ end
74
+
75
+ # Returns the per message opaque pointer that was given to produce. This is
76
+ # a pointer to a Ruby object owned by the application.
77
+ #
78
+ # @note Using the opaque is dangerous and requires that the application
79
+ # maintain a reference to the object passed to produce. Failing to do so
80
+ # will cause segfaults due to the object having been garbage collected.
81
+ #
82
+ # @example Retrieve object from opaque
83
+ # require "fiddle"
84
+ # obj = Fiddle::Pointer.new(msg.opaque.to_i).to_value
85
+ #
86
+ # @return [nil] Opaque was not set
87
+ # @return [FFI::Pointer] Pointer to opaque address
88
+ def opaque
89
+ self[:private]
90
+ end
91
+
92
+ # Returns the message's payload. When error != nil, will contain a string
93
+ # describing the error.
94
+ #
95
+ # @return [String] Message payload or error string.
96
+ def payload
97
+ if self[:payload].null?
98
+ return nil
99
+ end
100
+
101
+ self[:payload].read_string(self[:len])
102
+ end
103
+
104
+ # Get the message header list
105
+ #
106
+ # @raise [Kafka::ResponseError] Error occurred parsing headers
107
+ #
108
+ # @return [nil] Message does not have any headers
109
+ # @return [Message::Headers] Set of headers
110
+ def headers
111
+ ptr = ::FFI::MemoryPointer.new(:pointer)
112
+
113
+ err = ::Kafka::FFI.rd_kafka_message_headers(self, ptr)
114
+ case err
115
+ when :ok
116
+ if ptr.null?
117
+ nil
118
+ else
119
+ Message::Headers.new(ptr)
120
+ end
121
+ when RD_KAFKA_RESP_ERR__NOENT
122
+ # Messages does not have headers
123
+ nil
124
+ else
125
+ raise ::Kafka::ResponseError, err
126
+ end
127
+ ensure
128
+ ptr.free
129
+ end
130
+
131
+ # Get the Message's headers and detach them from the Message (setting its
132
+ # headers to nil). Calling detach_headers means the applicaiton is now the
133
+ # owner of the returned Message::Header and must eventually call #destroy
134
+ # when the application is done with them.
135
+ #
136
+ # @raise [Kafka::ResponseError] Error occurred parsing headers
137
+ #
138
+ # @return [nil] Message does not have any headers
139
+ # @return [Message::Headers] Set of headers
140
+ def detach_headers
141
+ ptr = ::FFI::MemoryPointer.new(:pointer)
142
+
143
+ err = ::Kafka::FFI.rd_kafka_message_detach_headers(self, ptr)
144
+ case err
145
+ when :ok
146
+ if ptr.null?
147
+ nil
148
+ else
149
+ Message::Headers.new(ptr)
150
+ end
151
+ when RD_KAFKA_RESP_ERR__NOENT
152
+ # Messages does not have headers
153
+ nil
154
+ else
155
+ raise ::Kafka::ResponseError, err
156
+ end
157
+ ensure
158
+ ptr.free
159
+ end
160
+
161
+ # rubocop:disable Naming/AccessorMethodName
162
+
163
+ # Replace the Message's headers with a new set.
164
+ #
165
+ # @note The Message takes ownership of the headers and they will be
166
+ # destroyed automatically with the Message.
167
+ def set_headers(headers)
168
+ ::Kafka::FFI.rd_kafka_set_headers(self, headers)
169
+ end
170
+ alias headers= set_headers
171
+ # rubocop:enable Naming/AccessorMethodName
172
+
173
+ # Retrieve the timestamp for a consumed message.
174
+ #
175
+ # @example Convert timestamp to a Time
176
+ # ts = message.timestamp
177
+ # ts = ts && Time.at(0, ts, :millisecond).utc
178
+ #
179
+ # @return [Integer] Message timestamp as milliseconds since unix epoch
180
+ # @return [nil] Timestamp is not available
181
+ def timestamp
182
+ # @todo: Type (second param) [rd_kafka_timestamp_type_t enum]
183
+ ts = ::Kafka::FFI.rd_kafka_message_timestamp(self, nil)
184
+ ts == -1 ? nil : ts
185
+ end
186
+
187
+ # Retrieve the latency since the Message was published to Kafka.
188
+ #
189
+ # @return [Integer] Latency since produce() call in microseconds
190
+ # @return [nil] Latency not available
191
+ def latency
192
+ latency = ::Kafka::FFI.rd_kafka_message_latency(self)
193
+ latency == -1 ? nil : latency
194
+ end
195
+
196
+ # Frees resources used by the messages and hands ownership by to
197
+ # librdkafka. The application should call destroy when done processing the
198
+ # message.
199
+ def destroy
200
+ if !null?
201
+ ::Kafka::FFI.rd_kafka_message_destroy(self)
202
+ end
203
+ end
204
+ end
205
+ end
@@ -0,0 +1,58 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka::FFI
4
+ class Metadata < ::FFI::Struct
5
+ layout(
6
+ :broker_cnt, :int,
7
+ :brokers, :pointer, # *rd_kafka_metadata_broker
8
+ :topic_cnt, :int,
9
+ :topics, :pointer, # *rd_kafka_metadata_topic
10
+ :orig_broker_id, :int32,
11
+ :orig_broker_name, :string
12
+ )
13
+
14
+ # Returns detailed metadata for the Brokers in the cluster.
15
+ #
16
+ # @return [Array<BrokerMetadata>] Metadata about known Brokers.
17
+ def brokers
18
+ ptr = self[:brokers]
19
+
20
+ self[:broker_cnt].times.map do |i|
21
+ BrokerMetadata.new(ptr + (i * BrokerMetadata.size))
22
+ end
23
+ end
24
+
25
+ # Returns detailed metadata about the topics and their partitions.
26
+ #
27
+ # @return [Array<TopicMetadata>] Metadata about known Topics.
28
+ def topics
29
+ ptr = self[:topics]
30
+
31
+ self[:topic_cnt].times.map do |i|
32
+ TopicMetadata.new(ptr + (i * TopicMetadata.size))
33
+ end
34
+ end
35
+
36
+ # Returns the ID of the Broker that the metadata request was served by.
37
+ #
38
+ # @return [Integer] Broker ID
39
+ def broker_id
40
+ self[:orig_broker_id]
41
+ end
42
+
43
+ # Returns the name of the Broker that the metadata request was served by.
44
+ #
45
+ # @return [String] Broker name
46
+ def broker_name
47
+ self[:orig_broker_name]
48
+ end
49
+
50
+ # Destroy the Metadata response, returning it's resources back to the
51
+ # system.
52
+ def destroy
53
+ if !null?
54
+ ::Kafka::FFI.rd_kafka_metadata_destroy(self)
55
+ end
56
+ end
57
+ end
58
+ end
@@ -0,0 +1,81 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka::FFI
4
+ # Opaque provides a safe mechanism for providing Ruby objects as opaque
5
+ # pointers.
6
+ #
7
+ # Opaque pointers are used heavily in librdkafka to allow for passing
8
+ # references to application state into callbacks, configs, and other
9
+ # contexts. Ruby's garbage collector cannot check for references held in
10
+ # external FFI memory and will garbage collect objects that are otherwise not
11
+ # referenced leading to a segmentation fault.
12
+ #
13
+ # Opaque solves this by allocated a memory address which is used as a hash
14
+ # key to look up the Ruby object when needed. This keeps a reference to the
15
+ # object in Ruby so it is not garbage collected. This method allows for Ruby
16
+ # objects to be moved in memory during compaction (in Ruby 2.7+) compared to
17
+ # storing a referece to a Ruby object.
18
+ class Opaque
19
+ extend ::FFI::DataConverter
20
+ native_type :pointer
21
+
22
+ # Registry holds references to all known Opaque instances in the system
23
+ # keyed by the pointer address associated with it.
24
+ @registry = {}
25
+
26
+ class << self
27
+ # Register the Opaque the registry, keeping a reference to it to avoid it
28
+ # being garbage collected. This will replace any existing Opaque with the
29
+ # same address.
30
+ #
31
+ # @param opaque [Opaque]
32
+ def register(opaque)
33
+ @registry[opaque.pointer.address] = opaque
34
+ end
35
+
36
+ # Remove the Opaque from the registry, putting it back in contention for
37
+ # garbage collection.
38
+ #
39
+ # @param opaque [Opaque]
40
+ def remove(opaque)
41
+ @registry.delete(opaque.pointer.address)
42
+ end
43
+
44
+ # @param value [Opaque]
45
+ def to_native(value, _ctx)
46
+ value.pointer
47
+ end
48
+
49
+ # @param value [FFI::Pointer]
50
+ def from_native(value, _ctx)
51
+ if value.null?
52
+ return nil
53
+ end
54
+
55
+ @registry.fetch(value.address, nil)
56
+ end
57
+ end
58
+
59
+ attr_reader :value
60
+ attr_reader :pointer
61
+
62
+ def initialize(value)
63
+ @value = value
64
+ @pointer = ::FFI::MemoryPointer.new(:int8)
65
+
66
+ Opaque.register(self)
67
+ end
68
+
69
+ # Free releases the pointer back to the system and removes the Opaque from
70
+ # the registry. free should only be called when the Opaque is no longer
71
+ # stored in librdkafka as it frees the backing pointer which could cause a
72
+ # segfault if still referenced.
73
+ def free
74
+ Opaque.remove(self)
75
+ @pointer.free
76
+
77
+ @value = nil
78
+ @pointer = nil
79
+ end
80
+ end
81
+ end
@@ -0,0 +1,73 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "ffi"
4
+
5
+ module Kafka::FFI
6
+ # OpaquePointer pointer provides a common pattern where we receive a pointer
7
+ # to a struct but don't care about the structure and need to pass it to
8
+ # functions. OpaquePointer gives type safety by checking types before
9
+ # converting.
10
+ #
11
+ # @note Kafka has several options for `opaque` pointers that get passed to
12
+ # callbacks. Those opaque pointers are not related to this opaque pointer.
13
+ class OpaquePointer
14
+ extend ::FFI::DataConverter
15
+
16
+ # @attr pointer [FFI::Pointer] Pointer to the implementing class
17
+ attr_reader :pointer
18
+
19
+ class << self
20
+ # Convert from a FFI::Pointer to the implementing class.
21
+ #
22
+ # @param value [FFI::Pointer]
23
+ #
24
+ # @return [nil] Passed ::FFI::Pointer::NULL
25
+ # @return Instance of the class backed by the pointer.
26
+ def from_native(value, _ctx)
27
+ if !value.is_a?(::FFI::Pointer)
28
+ raise TypeError, "from_native can only convert from a ::FFI::Pointer to #{self}"
29
+ end
30
+
31
+ # The equivalent of a native NULL pointer is nil.
32
+ if value.null?
33
+ return nil
34
+ end
35
+
36
+ obj = allocate
37
+ obj.send(:initialize, value)
38
+ obj
39
+ end
40
+
41
+ # Convert from a Kafka::FFI type to a native FFI type.
42
+ #
43
+ # @param value [Object] Instance to retrieve a pointer for.
44
+ #
45
+ # @return [FFI::Pointer] Pointer to the opaque struct
46
+ def to_native(value, _ctx)
47
+ if value.nil?
48
+ return ::FFI::Pointer::NULL
49
+ end
50
+
51
+ if !value.is_a?(self)
52
+ raise TypeError, "expected a kind of #{self}, was #{value.class}"
53
+ end
54
+
55
+ value.pointer
56
+ end
57
+
58
+ # Provide ::FFI::Struct API compatility for consistency with
59
+ # attach_function is called with an OpaquePointer.
60
+ def by_ref
61
+ self
62
+ end
63
+
64
+ def inherited(subclass)
65
+ subclass.native_type :pointer
66
+ end
67
+ end
68
+
69
+ def initialize(pointer)
70
+ @pointer = pointer
71
+ end
72
+ end
73
+ end