rdkafka 0.2.0 → 0.3.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 33e13eee28a6f2356635744a0490e06d236afd9d
4
- data.tar.gz: 23a74b8693241447df18042c3cf0482a03ec9c82
3
+ metadata.gz: cd80c3cdbb5bc790349df27bdef345b86e37d341
4
+ data.tar.gz: 822bad9c78b508e28cc7030443841b622b4d6df2
5
5
  SHA512:
6
- metadata.gz: 2c130f0bfa4a968186cf51490e7453e81ac879c2ff3fa082707332235a6d98fc94e25c6278bac249f05a62afbea2cc70534bd05fd24ee07e1754418cf909d156
7
- data.tar.gz: 5c5d186816373f5cd28a5d7cc583a79cfded06522d4707da855373e06ce57ff90903c639a1232e535abfa692f27a395e699d4f6b47792a97b9777ada224c8e44
6
+ metadata.gz: 63f00a3e196e10359220321613b03335bcec067e342155edbe256ef8a04716b435d2444674839e694e423a08a91e240e2e7d0756fb79402a354b8c8ab49cd3d1
7
+ data.tar.gz: 4e112ca823c2b500187aed86edf540f3eda4fbb45cee3b22ee908a1605696545cff2a77e2b56ba2d73500e185a3c1524992638e6ccb31d8f3c14d32cb783c1bf
@@ -1,3 +1,8 @@
1
+ # 0.3.0
2
+ * Move both add topic methods to one `add_topic` in `TopicPartitionList`
3
+ * Add committed offsets to consumer
4
+ * Add query watermark offset to consumer
5
+
1
6
  # 0.2.0
2
7
  * Some refactoring and add inline documentation
3
8
 
data/README.md CHANGED
@@ -54,7 +54,7 @@ end
54
54
 
55
55
  ## Development
56
56
 
57
- Run `bundle` and `cd ext && bundle exec rake compile && cd ..`. Then
57
+ Run `bundle` and `cd ext && bundle exec rake && cd ..`. Then
58
58
  create the topics as expected in the specs: `bundle exec rake create_topics`.
59
59
 
60
60
  You can then run `bundle exec rspec` to run the tests. To see rdkafka
data/Rakefile CHANGED
@@ -7,6 +7,7 @@ task :create_topics do
7
7
  else
8
8
  'kafka-topics'
9
9
  end
10
+ `#{kafka_topics} --create --topic=consume_test_topic --zookeeper=127.0.0.1:2181 --partitions=3 --replication-factor=1`
10
11
  `#{kafka_topics} --create --topic=produce_test_topic --zookeeper=127.0.0.1:2181 --partitions=3 --replication-factor=1`
11
12
  `#{kafka_topics} --create --topic=rake_test_topic --zookeeper=127.0.0.1:2181 --partitions=3 --replication-factor=1`
12
13
  end
@@ -1,10 +1,12 @@
1
1
  require "rdkafka/version"
2
2
 
3
+ require "rdkafka/bindings"
3
4
  require "rdkafka/config"
4
5
  require "rdkafka/consumer"
5
6
  require "rdkafka/consumer/message"
7
+ require "rdkafka/consumer/partition"
8
+ require "rdkafka/consumer/topic_partition_list"
6
9
  require "rdkafka/error"
7
- require "rdkafka/ffi"
8
10
  require "rdkafka/producer"
9
11
  require "rdkafka/producer/delivery_handle"
10
12
  require "rdkafka/producer/delivery_report"
@@ -3,8 +3,8 @@ require "logger"
3
3
 
4
4
  module Rdkafka
5
5
  # @private
6
- module FFI
7
- extend ::FFI::Library
6
+ module Bindings
7
+ extend FFI::Library
8
8
 
9
9
  def self.lib_extension
10
10
  if Gem::Platform.local.os.include?("darwin")
@@ -23,7 +23,7 @@ module Rdkafka
23
23
 
24
24
  # Message struct
25
25
 
26
- class Message < ::FFI::Struct
26
+ class Message < FFI::Struct
27
27
  layout :err, :int,
28
28
  :rkt, :pointer,
29
29
  :partition, :int32,
@@ -42,8 +42,8 @@ module Rdkafka
42
42
 
43
43
  # TopicPartition ad TopicPartitionList structs
44
44
 
45
- class TopicPartition < ::FFI::Struct
46
- layout :topic, :string,
45
+ class TopicPartition < FFI::Struct
46
+ layout :topic, :string,
47
47
  :partition, :int32,
48
48
  :offset, :int64,
49
49
  :metadata, :pointer,
@@ -53,15 +53,16 @@ module Rdkafka
53
53
  :_private, :pointer
54
54
  end
55
55
 
56
- class TopicPartitionList < ::FFI::Struct
56
+ class TopicPartitionList < FFI::Struct
57
57
  layout :cnt, :int,
58
58
  :size, :int,
59
- :elems, TopicPartition.ptr
59
+ :elems, :pointer
60
60
  end
61
61
 
62
62
  attach_function :rd_kafka_topic_partition_list_new, [:int32], :pointer
63
63
  attach_function :rd_kafka_topic_partition_list_add, [:pointer, :string, :int32], :void
64
64
  attach_function :rd_kafka_topic_partition_list_destroy, [:pointer], :void
65
+ attach_function :rd_kafka_topic_partition_list_copy, [:pointer], :pointer
65
66
 
66
67
  # Errors
67
68
 
@@ -85,7 +86,7 @@ module Rdkafka
85
86
  attach_function :rd_kafka_set_log_queue, [:pointer, :pointer], :void
86
87
  attach_function :rd_kafka_queue_get_main, [:pointer], :pointer
87
88
 
88
- LogCallback = ::FFI::Function.new(
89
+ LogCallback = FFI::Function.new(
89
90
  :void, [:pointer, :int, :string, :string]
90
91
  ) do |_client_ptr, level, _level_string, line|
91
92
  severity = case level
@@ -118,11 +119,19 @@ module Rdkafka
118
119
  # Consumer
119
120
 
120
121
  attach_function :rd_kafka_subscribe, [:pointer, :pointer], :int
122
+ attach_function :rd_kafka_unsubscribe, [:pointer], :int
123
+ attach_function :rd_kafka_subscription, [:pointer, :pointer], :int
124
+ attach_function :rd_kafka_assignment, [:pointer, :pointer], :int
125
+ attach_function :rd_kafka_committed, [:pointer, :pointer, :int], :int
121
126
  attach_function :rd_kafka_commit, [:pointer, :pointer, :bool], :int, blocking: true
122
127
  attach_function :rd_kafka_poll_set_consumer, [:pointer], :void
123
128
  attach_function :rd_kafka_consumer_poll, [:pointer, :int], :pointer, blocking: true
124
129
  attach_function :rd_kafka_consumer_close, [:pointer], :void, blocking: true
125
130
 
131
+ # Stats
132
+
133
+ attach_function :rd_kafka_query_watermark_offsets, [:pointer, :string, :int, :pointer, :pointer, :int], :int
134
+
126
135
  # Producer
127
136
 
128
137
  RD_KAFKA_VTYPE_END = 0
@@ -141,7 +150,7 @@ module Rdkafka
141
150
  callback :delivery_cb, [:pointer, :pointer, :pointer], :void
142
151
  attach_function :rd_kafka_conf_set_dr_msg_cb, [:pointer, :delivery_cb], :void
143
152
 
144
- DeliveryCallback = ::FFI::Function.new(
153
+ DeliveryCallback = FFI::Function.new(
145
154
  :void, [:pointer, :pointer, :pointer]
146
155
  ) do |client_ptr, message_ptr, opaque_ptr|
147
156
  message = Message.new(message_ptr)
@@ -74,7 +74,7 @@ module Rdkafka
74
74
  def consumer
75
75
  kafka = native_kafka(native_config, :rd_kafka_consumer)
76
76
  # Redirect the main queue to the consumer
77
- Rdkafka::FFI.rd_kafka_poll_set_consumer(kafka)
77
+ Rdkafka::Bindings.rd_kafka_poll_set_consumer(kafka)
78
78
  # Return consumer with Kafka client
79
79
  Rdkafka::Consumer.new(kafka)
80
80
  end
@@ -89,7 +89,7 @@ module Rdkafka
89
89
  # Create Kafka config
90
90
  config = native_config
91
91
  # Set callback to receive delivery reports on config
92
- Rdkafka::FFI.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::FFI::DeliveryCallback)
92
+ Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::Bindings::DeliveryCallback)
93
93
  # Return producer with Kafka client
94
94
  Rdkafka::Producer.new(native_kafka(config, :rd_kafka_producer))
95
95
  end
@@ -108,10 +108,10 @@ module Rdkafka
108
108
  # This method is only intented to be used to create a client,
109
109
  # using it in another way will leak memory.
110
110
  def native_config
111
- Rdkafka::FFI.rd_kafka_conf_new.tap do |config|
111
+ Rdkafka::Bindings.rd_kafka_conf_new.tap do |config|
112
112
  @config_hash.merge(REQUIRED_CONFIG).each do |key, value|
113
- error_buffer = ::FFI::MemoryPointer.from_string(" " * 256)
114
- result = Rdkafka::FFI.rd_kafka_conf_set(
113
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
114
+ result = Rdkafka::Bindings.rd_kafka_conf_set(
115
115
  config,
116
116
  key.to_s,
117
117
  value.to_s,
@@ -123,13 +123,13 @@ module Rdkafka
123
123
  end
124
124
  end
125
125
  # Set log callback
126
- Rdkafka::FFI.rd_kafka_conf_set_log_cb(config, Rdkafka::FFI::LogCallback)
126
+ Rdkafka::Bindings.rd_kafka_conf_set_log_cb(config, Rdkafka::Bindings::LogCallback)
127
127
  end
128
128
  end
129
129
 
130
130
  def native_kafka(config, type)
131
- error_buffer = ::FFI::MemoryPointer.from_string(" " * 256)
132
- handle = Rdkafka::FFI.rd_kafka_new(
131
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
132
+ handle = Rdkafka::Bindings.rd_kafka_new(
133
133
  type,
134
134
  config,
135
135
  error_buffer,
@@ -141,14 +141,14 @@ module Rdkafka
141
141
  end
142
142
 
143
143
  # Redirect log to handle's queue
144
- Rdkafka::FFI.rd_kafka_set_log_queue(
144
+ Rdkafka::Bindings.rd_kafka_set_log_queue(
145
145
  handle,
146
- Rdkafka::FFI.rd_kafka_queue_get_main(handle)
146
+ Rdkafka::Bindings.rd_kafka_queue_get_main(handle)
147
147
  )
148
148
 
149
- ::FFI::AutoPointer.new(
149
+ FFI::AutoPointer.new(
150
150
  handle,
151
- Rdkafka::FFI.method(:rd_kafka_destroy)
151
+ Rdkafka::Bindings.method(:rd_kafka_destroy)
152
152
  )
153
153
  end
154
154
  end
@@ -16,10 +16,10 @@ module Rdkafka
16
16
  # Close this consumer
17
17
  # @return [nil]
18
18
  def close
19
- Rdkafka::FFI.rd_kafka_consumer_close(@native_kafka)
19
+ Rdkafka::Bindings.rd_kafka_consumer_close(@native_kafka)
20
20
  end
21
21
 
22
- # Subscribe to one or more topics
22
+ # Subscribe to one or more topics letting Kafka handle partition assignments.
23
23
  #
24
24
  # @param topics [Array<String>] One or more topic names
25
25
  #
@@ -28,22 +28,97 @@ module Rdkafka
28
28
  # @return [nil]
29
29
  def subscribe(*topics)
30
30
  # Create topic partition list with topics and no partition set
31
- tpl = Rdkafka::FFI.rd_kafka_topic_partition_list_new(topics.length)
31
+ tpl = Rdkafka::Bindings.rd_kafka_topic_partition_list_new(topics.length)
32
32
  topics.each do |topic|
33
- Rdkafka::FFI.rd_kafka_topic_partition_list_add(
33
+ Rdkafka::Bindings.rd_kafka_topic_partition_list_add(
34
34
  tpl,
35
35
  topic,
36
36
  -1
37
37
  )
38
38
  end
39
39
  # Subscribe to topic partition list and check this was successful
40
- response = Rdkafka::FFI.rd_kafka_subscribe(@native_kafka, tpl)
40
+ response = Rdkafka::Bindings.rd_kafka_subscribe(@native_kafka, tpl)
41
41
  if response != 0
42
42
  raise Rdkafka::RdkafkaError.new(response)
43
43
  end
44
44
  ensure
45
45
  # Clean up the topic partition list
46
- Rdkafka::FFI.rd_kafka_topic_partition_list_destroy(tpl)
46
+ Rdkafka::Bindings.rd_kafka_topic_partition_list_destroy(tpl)
47
+ end
48
+
49
+ # Unsubscribe from all subscribed topics.
50
+ #
51
+ # @raise [RdkafkaError] When unsubscribing fails
52
+ #
53
+ # @return [nil]
54
+ def unsubscribe
55
+ response = Rdkafka::Bindings.rd_kafka_unsubscribe(@native_kafka)
56
+ if response != 0
57
+ raise Rdkafka::RdkafkaError.new(response)
58
+ end
59
+ end
60
+
61
+ # Return the current subscription to topics and partitions
62
+ #
63
+ # @raise [RdkafkaError] When getting the subscription fails.
64
+ #
65
+ # @return [TopicPartitionList]
66
+ def subscription
67
+ tpl = FFI::MemoryPointer.new(:pointer)
68
+ response = Rdkafka::Bindings.rd_kafka_subscription(@native_kafka, tpl)
69
+ if response != 0
70
+ raise Rdkafka::RdkafkaError.new(response)
71
+ end
72
+ Rdkafka::Consumer::TopicPartitionList.new(tpl.get_pointer(0))
73
+ end
74
+
75
+ # Return the current committed offset per partition for this consumer group.
76
+ # The offset field of each requested partition will either be set to stored offset or to -1001 in case there was no stored offset for that partition.
77
+ #
78
+ # @param list [TopicPartitionList] The topic with partitions to get the offsets for.
79
+ # @param timeout_ms [Integer] The timeout for fetching this information.
80
+ #
81
+ # @raise [RdkafkaError] When getting the committed positions fails.
82
+ #
83
+ # @return [TopicPartitionList]
84
+ def committed(list, timeout_ms=200)
85
+ unless list.is_a?(TopicPartitionList)
86
+ raise TypeError.new("list has to be a TopicPartitionList")
87
+ end
88
+ tpl = list.copy_tpl
89
+ response = Rdkafka::Bindings.rd_kafka_committed(@native_kafka, tpl, timeout_ms)
90
+ if response != 0
91
+ raise Rdkafka::RdkafkaError.new(response)
92
+ end
93
+ Rdkafka::Consumer::TopicPartitionList.new(tpl)
94
+ end
95
+
96
+ # Query broker for low (oldest/beginning) and high (newest/end) offsets for a partition.
97
+ #
98
+ # @param topic [String] The topic to query
99
+ # @param partition [Integer] The partition to query
100
+ # @param timeout_ms [Integer] The timeout for querying the broker
101
+ #
102
+ # @raise [RdkafkaError] When querying the broker fails.
103
+ #
104
+ # @return [Integer] The low and high watermark
105
+ def query_watermark_offsets(topic, partition, timeout_ms=200)
106
+ low = FFI::MemoryPointer.new(:int64, 1)
107
+ high = FFI::MemoryPointer.new(:int64, 1)
108
+
109
+ response = Rdkafka::Bindings.rd_kafka_query_watermark_offsets(
110
+ @native_kafka,
111
+ topic,
112
+ partition,
113
+ low,
114
+ high,
115
+ timeout_ms
116
+ )
117
+ if response != 0
118
+ raise Rdkafka::RdkafkaError.new(response)
119
+ end
120
+
121
+ return low.read_int, high.read_int
47
122
  end
48
123
 
49
124
  # Commit the current offsets of this consumer
@@ -54,7 +129,7 @@ module Rdkafka
54
129
  #
55
130
  # @return [nil]
56
131
  def commit(async=false)
57
- response = Rdkafka::FFI.rd_kafka_commit(@native_kafka, nil, async)
132
+ response = Rdkafka::Bindings.rd_kafka_commit(@native_kafka, nil, async)
58
133
  if response != 0
59
134
  raise Rdkafka::RdkafkaError.new(response)
60
135
  end
@@ -68,12 +143,12 @@ module Rdkafka
68
143
  #
69
144
  # @return [Message, nil] A message or nil if there was no new message within the timeout
70
145
  def poll(timeout_ms)
71
- message_ptr = Rdkafka::FFI.rd_kafka_consumer_poll(@native_kafka, timeout_ms)
146
+ message_ptr = Rdkafka::Bindings.rd_kafka_consumer_poll(@native_kafka, timeout_ms)
72
147
  if message_ptr.null?
73
148
  nil
74
149
  else
75
150
  # Create struct wrapper
76
- native_message = Rdkafka::FFI::Message.new(message_ptr)
151
+ native_message = Rdkafka::Bindings::Message.new(message_ptr)
77
152
  # Raise error if needed
78
153
  if native_message[:err] != 0
79
154
  raise Rdkafka::RdkafkaError.new(native_message[:err])
@@ -84,7 +159,7 @@ module Rdkafka
84
159
  ensure
85
160
  # Clean up rdkafka message if there is one
86
161
  unless message_ptr.null?
87
- Rdkafka::FFI.rd_kafka_message_destroy(message_ptr)
162
+ Rdkafka::Bindings.rd_kafka_message_destroy(message_ptr)
88
163
  end
89
164
  end
90
165
 
@@ -29,7 +29,7 @@ module Rdkafka
29
29
  # @private
30
30
  def initialize(native_message)
31
31
  unless native_message[:rkt].null?
32
- @topic = FFI.rd_kafka_topic_name(native_message[:rkt])
32
+ @topic = Rdkafka::Bindings.rd_kafka_topic_name(native_message[:rkt])
33
33
  end
34
34
  @partition = native_message[:partition]
35
35
  unless native_message[:payload].null?
@@ -39,12 +39,21 @@ module Rdkafka
39
39
  @key = native_message[:key].read_string(native_message[:key_len])
40
40
  end
41
41
  @offset = native_message[:offset]
42
- @timestamp = FFI.rd_kafka_message_timestamp(native_message, nil)
42
+ @timestamp = Rdkafka::Bindings.rd_kafka_message_timestamp(native_message, nil)
43
43
  end
44
44
 
45
+ # Human readable representation of this message.
45
46
  # @return [String]
46
47
  def to_s
47
- "Message in '#{topic}' with key '#{key}', payload '#{payload}', partition #{partition}, offset #{offset}, timestamp #{timestamp}"
48
+ "<Message in '#{topic}' with key '#{truncate(key)}', payload '#{truncate(payload)}', partition #{partition}, offset #{offset}, timestamp #{timestamp}>"
49
+ end
50
+
51
+ def truncate(string)
52
+ if string && string.length > 40
53
+ "#{string[0..39]}..."
54
+ else
55
+ string
56
+ end
48
57
  end
49
58
  end
50
59
  end
@@ -0,0 +1,40 @@
1
+ module Rdkafka
2
+ class Consumer
3
+ # Information about a partition, used in {TopicPartitionList}.
4
+ class Partition
5
+ # Partition number
6
+ # @return [Integer]
7
+ attr_reader :partition
8
+
9
+ # Partition's offset
10
+ # @return [Integer]
11
+ attr_reader :offset
12
+
13
+ # @private
14
+ def initialize(partition, offset)
15
+ @partition = partition
16
+ @offset = offset
17
+ end
18
+
19
+ # Human readable representation of this partition.
20
+ # @return [String]
21
+ def to_s
22
+ "<Partition #{partition} with offset #{offset}>"
23
+ end
24
+
25
+ # Human readable representation of this partition.
26
+ # @return [String]
27
+ def inspect
28
+ to_s
29
+ end
30
+
31
+ # Whether another partition is equal to this
32
+ # @return [Boolean]
33
+ def ==(other)
34
+ self.class == other.class &&
35
+ self.partition == other.partition &&
36
+ self.offset == other.offset
37
+ end
38
+ end
39
+ end
40
+ end
@@ -0,0 +1,93 @@
1
+ module Rdkafka
2
+ class Consumer
3
+ # A list of topics with their partition information
4
+ class TopicPartitionList
5
+ # Create a new topic partition list.
6
+ #
7
+ # @param pointer [FFI::Pointer, nil] Optional pointer to an existing native list
8
+ #
9
+ # @return [TopicPartitionList]
10
+ def initialize(pointer=nil)
11
+ @tpl =
12
+ Rdkafka::Bindings::TopicPartitionList.new(
13
+ FFI::AutoPointer.new(
14
+ pointer || Rdkafka::Bindings.rd_kafka_topic_partition_list_new(5),
15
+ Rdkafka::Bindings.method(:rd_kafka_topic_partition_list_destroy)
16
+ )
17
+ )
18
+ end
19
+
20
+ # Number of items in the list
21
+ # @return [Integer]
22
+ def count
23
+ @tpl[:cnt]
24
+ end
25
+
26
+ # Whether this list is empty
27
+ # @return [Boolean]
28
+ def empty?
29
+ count == 0
30
+ end
31
+
32
+ # Add a topic with optionally partitions to the list.
33
+ #
34
+ # @param topic [String] The topic's name
35
+ # @param partition [Array<Integer>] The topic's partition's
36
+ #
37
+ # @return [nil]
38
+ def add_topic(topic, partitions=nil)
39
+ if partitions.nil?
40
+ Rdkafka::Bindings.rd_kafka_topic_partition_list_add(
41
+ @tpl,
42
+ topic,
43
+ -1
44
+ )
45
+ else
46
+ partitions.each do |partition|
47
+ Rdkafka::Bindings.rd_kafka_topic_partition_list_add(
48
+ @tpl,
49
+ topic,
50
+ partition
51
+ )
52
+ end
53
+ end
54
+ end
55
+
56
+ # Return a `Hash` with the topics as keys and and an array of partition information as the value if present.
57
+ #
58
+ # @return [Hash<String, [Array<Partition>, nil]>]
59
+ def to_h
60
+ {}.tap do |out|
61
+ count.times do |i|
62
+ ptr = @tpl[:elems] + (i * Rdkafka::Bindings::TopicPartition.size)
63
+ elem = Rdkafka::Bindings::TopicPartition.new(ptr)
64
+ if elem[:partition] == -1
65
+ out[elem[:topic]] = nil
66
+ else
67
+ partitions = out[elem[:topic]] || []
68
+ partition = Partition.new(elem[:partition], elem[:offset])
69
+ partitions.push(partition)
70
+ out[elem[:topic]] = partitions
71
+ end
72
+ end
73
+ end
74
+ end
75
+
76
+ # Human readable representation of this list.
77
+ # @return [String]
78
+ def to_s
79
+ "<TopicPartitionList: #{to_h}>"
80
+ end
81
+
82
+ def ==(other)
83
+ self.to_h == other.to_h
84
+ end
85
+
86
+ # Return a copy of the internal native list
87
+ # @private
88
+ def copy_tpl
89
+ Rdkafka::Bindings.rd_kafka_topic_partition_list_copy(@tpl)
90
+ end
91
+ end
92
+ end
93
+ end
@@ -14,7 +14,7 @@ module Rdkafka
14
14
  # This error's code, for example `:partition_eof`, `:msg_size_too_large`.
15
15
  # @return [Symbol]
16
16
  def code
17
- code = Rdkafka::FFI.rd_kafka_err2name(@rdkafka_response).downcase
17
+ code = Rdkafka::Bindings.rd_kafka_err2name(@rdkafka_response).downcase
18
18
  if code[0] == "_"
19
19
  code[1..-1].to_sym
20
20
  else
@@ -25,7 +25,7 @@ module Rdkafka
25
25
  # Human readable representation of this error.
26
26
  # @return [String]
27
27
  def to_s
28
- "#{Rdkafka::FFI.rd_kafka_err2str(@rdkafka_response)} (#{code})"
28
+ "#{Rdkafka::Bindings.rd_kafka_err2str(@rdkafka_response)} (#{code})"
29
29
  end
30
30
 
31
31
  # Whether this error indicates the partition is EOF.
@@ -8,9 +8,9 @@ module Rdkafka
8
8
  # Start thread to poll client for delivery callbacks
9
9
  @polling_thread = Thread.new do
10
10
  loop do
11
- Rdkafka::FFI.rd_kafka_poll(@native_kafka, 250)
11
+ Rdkafka::Bindings.rd_kafka_poll(@native_kafka, 250)
12
12
  # Exit thread if closing and the poll queue is empty
13
- if @closing && Rdkafka::FFI.rd_kafka_outq_len(@native_kafka) == 0
13
+ if @closing && Rdkafka::Bindings.rd_kafka_outq_len(@native_kafka) == 0
14
14
  break
15
15
  end
16
16
  end
@@ -71,16 +71,16 @@ module Rdkafka
71
71
  delivery_handle[:offset] = -1
72
72
 
73
73
  # Produce the message
74
- response = Rdkafka::FFI.rd_kafka_producev(
74
+ response = Rdkafka::Bindings.rd_kafka_producev(
75
75
  @native_kafka,
76
- :int, Rdkafka::FFI::RD_KAFKA_VTYPE_TOPIC, :string, topic,
77
- :int, Rdkafka::FFI::RD_KAFKA_VTYPE_MSGFLAGS, :int, Rdkafka::FFI::RD_KAFKA_MSG_F_COPY,
78
- :int, Rdkafka::FFI::RD_KAFKA_VTYPE_VALUE, :buffer_in, payload, :size_t, payload_size,
79
- :int, Rdkafka::FFI::RD_KAFKA_VTYPE_KEY, :buffer_in, key, :size_t, key_size,
80
- :int, Rdkafka::FFI::RD_KAFKA_VTYPE_PARTITION, :int32, partition,
81
- :int, Rdkafka::FFI::RD_KAFKA_VTYPE_TIMESTAMP, :int64, timestamp,
82
- :int, Rdkafka::FFI::RD_KAFKA_VTYPE_OPAQUE, :pointer, delivery_handle,
83
- :int, Rdkafka::FFI::RD_KAFKA_VTYPE_END
76
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_TOPIC, :string, topic,
77
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_MSGFLAGS, :int, Rdkafka::Bindings::RD_KAFKA_MSG_F_COPY,
78
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_VALUE, :buffer_in, payload, :size_t, payload_size,
79
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_KEY, :buffer_in, key, :size_t, key_size,
80
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_PARTITION, :int32, partition,
81
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_TIMESTAMP, :int64, timestamp,
82
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_OPAQUE, :pointer, delivery_handle,
83
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_END
84
84
  )
85
85
 
86
86
  # Raise error if the produce call was not successfull
@@ -2,7 +2,7 @@ module Rdkafka
2
2
  class Producer
3
3
  # Handle to wait for a delivery report which is returned when
4
4
  # producing a message.
5
- class DeliveryHandle < ::FFI::Struct
5
+ class DeliveryHandle < FFI::Struct
6
6
  layout :pending, :bool,
7
7
  :response, :int,
8
8
  :partition, :int,
@@ -1,4 +1,4 @@
1
1
  module Rdkafka
2
- VERSION = "0.2.0"
2
+ VERSION = "0.3.0"
3
3
  LIBRDKAFKA_VERSION = "0.11.0"
4
4
  end
@@ -0,0 +1,13 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Bindings do
4
+ it "should load librdkafka" do
5
+ expect(Rdkafka::Bindings.ffi_libraries.map(&:name).first).to include "librdkafka"
6
+ end
7
+
8
+ it "should successfully call librdkafka" do
9
+ expect {
10
+ Rdkafka::Bindings.rd_kafka_conf_new
11
+ }.not_to raise_error
12
+ end
13
+ end
@@ -2,7 +2,7 @@ require "spec_helper"
2
2
 
3
3
  describe Rdkafka::Consumer::Message do
4
4
  let(:native_topic) do
5
- Rdkafka::FFI.rd_kafka_topic_new(
5
+ Rdkafka::Bindings.rd_kafka_topic_new(
6
6
  native_client,
7
7
  "topic_name",
8
8
  nil
@@ -11,18 +11,18 @@ describe Rdkafka::Consumer::Message do
11
11
  let(:payload) { nil }
12
12
  let(:key) { nil }
13
13
  let(:native_message) do
14
- Rdkafka::FFI::Message.new.tap do |message|
14
+ Rdkafka::Bindings::Message.new.tap do |message|
15
15
  message[:rkt] = native_topic
16
16
  message[:partition] = 3
17
17
  message[:offset] = 100
18
18
  if payload
19
- ptr = ::FFI::MemoryPointer.new(:char, payload.bytesize)
19
+ ptr = FFI::MemoryPointer.new(:char, payload.bytesize)
20
20
  ptr.put_bytes(0, payload)
21
21
  message[:payload] = ptr
22
22
  message[:len] = payload.bytesize
23
23
  end
24
24
  if key
25
- ptr = ::FFI::MemoryPointer.new(:char, key.bytesize)
25
+ ptr = FFI::MemoryPointer.new(:char, key.bytesize)
26
26
  ptr.put_bytes(0, key)
27
27
  message[:key] = ptr
28
28
  message[:key_len] = key.bytesize
@@ -78,4 +78,32 @@ describe Rdkafka::Consumer::Message do
78
78
  subject.timestamp
79
79
  }.not_to raise_error
80
80
  end
81
+
82
+ describe "#to_s" do
83
+ before do
84
+ allow(subject).to receive(:timestamp).and_return(1000)
85
+ end
86
+
87
+ it "should have a human readable representation" do
88
+ expect(subject.to_s).to eq "<Message in 'topic_name' with key '', payload '', partition 3, offset 100, timestamp 1000>"
89
+ end
90
+
91
+ context "with key and payload" do
92
+ let(:key) { "key" }
93
+ let(:payload) { "payload" }
94
+
95
+ it "should have a human readable representation" do
96
+ expect(subject.to_s).to eq "<Message in 'topic_name' with key 'key', payload 'payload', partition 3, offset 100, timestamp 1000>"
97
+ end
98
+ end
99
+
100
+ context "with a very long key and payload" do
101
+ let(:key) { "k" * 100_000 }
102
+ let(:payload) { "p" * 100_000 }
103
+
104
+ it "should have a human readable representation" do
105
+ expect(subject.to_s).to eq "<Message in 'topic_name' with key 'kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk...', payload 'pppppppppppppppppppppppppppppppppppppppp...', partition 3, offset 100, timestamp 1000>"
106
+ end
107
+ end
108
+ end
81
109
  end
@@ -0,0 +1,35 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Consumer::Partition do
4
+ subject { Rdkafka::Consumer::Partition.new(1, 100) }
5
+
6
+ it "should have a partition" do
7
+ expect(subject.partition).to eq 1
8
+ end
9
+
10
+ it "should have an offset" do
11
+ expect(subject.offset).to eq 100
12
+ end
13
+
14
+ describe "#to_s" do
15
+ it "should return a human readable representation" do
16
+ expect(subject.to_s).to eq "<Partition 1 with offset 100>"
17
+ end
18
+ end
19
+
20
+ describe "#inspect" do
21
+ it "should return a human readable representation" do
22
+ expect(subject.to_s).to eq "<Partition 1 with offset 100>"
23
+ end
24
+ end
25
+
26
+ describe "#==" do
27
+ it "should equal another partition with the same content" do
28
+ expect(subject).to eq Rdkafka::Consumer::Partition.new(1, 100)
29
+ end
30
+
31
+ it "should not equal another partition with different content" do
32
+ expect(subject).not_to eq Rdkafka::Consumer::Partition.new(2, 101)
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,94 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Consumer::TopicPartitionList do
4
+ it "should create a list from an existing native list" do
5
+ pointer = Rdkafka::Bindings.rd_kafka_topic_partition_list_new(5)
6
+ Rdkafka::Bindings.rd_kafka_topic_partition_list_add(
7
+ pointer,
8
+ "topic",
9
+ -1
10
+ )
11
+ list = Rdkafka::Consumer::TopicPartitionList.new(pointer)
12
+
13
+ other = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
14
+ list.add_topic("topic")
15
+ end
16
+
17
+ expect(list).to eq other
18
+ end
19
+
20
+ it "should create a new list and add unassigned topics" do
21
+ list = Rdkafka::Consumer::TopicPartitionList.new
22
+
23
+ expect(list.count).to eq 0
24
+ expect(list.empty?).to be true
25
+
26
+ list.add_topic("topic1")
27
+ list.add_topic("topic2")
28
+
29
+ expect(list.count).to eq 2
30
+ expect(list.empty?).to be false
31
+
32
+ hash = list.to_h
33
+ expect(hash.count).to eq 2
34
+ expect(hash).to eq ({
35
+ "topic1" => nil,
36
+ "topic2" => nil
37
+ })
38
+ end
39
+
40
+ it "should create a new list and add assigned topics" do
41
+ list = Rdkafka::Consumer::TopicPartitionList.new
42
+
43
+ expect(list.count).to eq 0
44
+ expect(list.empty?).to be true
45
+
46
+ list.add_topic("topic1", [0, 1, 2])
47
+ list.add_topic("topic2", [0, 1])
48
+
49
+ expect(list.count).to eq 5
50
+ expect(list.empty?).to be false
51
+
52
+ hash = list.to_h
53
+ expect(hash.count).to eq 2
54
+ expect(hash["topic1"]).to eq([
55
+ Rdkafka::Consumer::Partition.new(0, -1001),
56
+ Rdkafka::Consumer::Partition.new(1, -1001),
57
+ Rdkafka::Consumer::Partition.new(2, -1001)
58
+ ])
59
+ expect(hash["topic2"]).to eq([
60
+ Rdkafka::Consumer::Partition.new(0, -1001),
61
+ Rdkafka::Consumer::Partition.new(1, -1001)
62
+ ])
63
+ end
64
+
65
+ describe "#to_s" do
66
+ it "should return a human readable representation" do
67
+ list = Rdkafka::Consumer::TopicPartitionList.new
68
+ list.add_topic("topic1", [0, 1])
69
+
70
+ expected = "<TopicPartitionList: {\"topic1\"=>[<Partition 0 with offset -1001>, <Partition 1 with offset -1001>]}>"
71
+
72
+ expect(list.to_s).to eq expected
73
+ end
74
+ end
75
+
76
+ describe "#==" do
77
+ subject do
78
+ Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
79
+ list.add_topic("topic1", [0])
80
+ end
81
+ end
82
+
83
+ it "should equal another partition with the same content" do
84
+ other = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
85
+ list.add_topic("topic1", [0])
86
+ end
87
+ expect(subject).to eq other
88
+ end
89
+
90
+ it "should not equal another partition with different content" do
91
+ expect(subject).not_to eq Rdkafka::Consumer::TopicPartitionList.new
92
+ end
93
+ end
94
+ end
@@ -1,5 +1,68 @@
1
1
  require "spec_helper"
2
2
 
3
3
  describe Rdkafka::Consumer do
4
+ let(:config) { rdkafka_config }
5
+ let(:consumer) { config.consumer }
6
+ let(:producer) { config.producer }
4
7
 
8
+ context "subscription" do
9
+ it "should subscribe" do
10
+ expect(consumer.subscription).to be_empty
11
+
12
+ consumer.subscribe("consume_test_topic")
13
+
14
+ expect(consumer.subscription).not_to be_empty
15
+ expected_subscription = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
16
+ list.add_topic("consume_test_topic")
17
+ end
18
+ expect(consumer.subscription).to eq expected_subscription
19
+
20
+ consumer.unsubscribe
21
+
22
+ expect(consumer.subscription).to be_empty
23
+ end
24
+ end
25
+
26
+ describe "committed" do
27
+ before do
28
+ # Make sure there's a stored offset
29
+ report = producer.produce(
30
+ topic: "consume_test_topic",
31
+ payload: "payload 1",
32
+ key: "key 1",
33
+ partition: 0
34
+ ).wait
35
+ message = wait_for_message(
36
+ topic: "consume_test_topic",
37
+ delivery_report: report,
38
+ config: config
39
+ )
40
+ end
41
+
42
+ it "should fetch the committed offsets for a specified topic partition list" do
43
+ list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
44
+ list.add_topic("consume_test_topic", [0, 1, 2])
45
+ end
46
+ partitions = consumer.committed(list).to_h["consume_test_topic"]
47
+ expect(partitions[0].offset).to be > 0
48
+ expect(partitions[1].offset).to eq -1001
49
+ expect(partitions[2].offset).to eq -1001
50
+ end
51
+ end
52
+
53
+ describe "watermark offsets" do
54
+ it "should return the watermark offsets" do
55
+ # Make sure there's a message
56
+ producer.produce(
57
+ topic: "consume_test_topic",
58
+ payload: "payload 1",
59
+ key: "key 1",
60
+ partition: 0
61
+ ).wait
62
+
63
+ low, high = consumer.query_watermark_offsets("consume_test_topic", 0, 5000)
64
+ expect(low).to eq 0
65
+ expect(high).to be > 0
66
+ end
67
+ end
5
68
  end
@@ -22,8 +22,9 @@ def native_client
22
22
  config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer)
23
23
  end
24
24
 
25
- def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30)
26
- consumer = rdkafka_config.consumer
25
+ def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, config: nil)
26
+ config = rdkafka_config if config.nil?
27
+ consumer = config.consumer
27
28
  consumer.subscribe(topic)
28
29
  timeout = Time.now.to_i + timeout_in_seconds
29
30
  loop do
@@ -38,5 +39,6 @@ def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30)
38
39
  end
39
40
  end
40
41
  ensure
42
+ consumer.commit
41
43
  consumer.close
42
44
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.0
4
+ version: 0.3.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-10-13 00:00:00.000000000 Z
11
+ date: 2017-10-17 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi
@@ -98,21 +98,25 @@ files:
98
98
  - Rakefile
99
99
  - ext/Rakefile
100
100
  - lib/rdkafka.rb
101
+ - lib/rdkafka/bindings.rb
101
102
  - lib/rdkafka/config.rb
102
103
  - lib/rdkafka/consumer.rb
103
104
  - lib/rdkafka/consumer/message.rb
105
+ - lib/rdkafka/consumer/partition.rb
106
+ - lib/rdkafka/consumer/topic_partition_list.rb
104
107
  - lib/rdkafka/error.rb
105
- - lib/rdkafka/ffi.rb
106
108
  - lib/rdkafka/producer.rb
107
109
  - lib/rdkafka/producer/delivery_handle.rb
108
110
  - lib/rdkafka/producer/delivery_report.rb
109
111
  - lib/rdkafka/version.rb
110
112
  - rdkafka.gemspec
113
+ - spec/rdkafka/bindings_spec.rb
111
114
  - spec/rdkafka/config_spec.rb
112
115
  - spec/rdkafka/consumer/message_spec.rb
116
+ - spec/rdkafka/consumer/partition_spec.rb
117
+ - spec/rdkafka/consumer/topic_partition_list_spec.rb
113
118
  - spec/rdkafka/consumer_spec.rb
114
119
  - spec/rdkafka/error_spec.rb
115
- - spec/rdkafka/ffi_spec.rb
116
120
  - spec/rdkafka/producer/delivery_handle_spec.rb
117
121
  - spec/rdkafka/producer/delivery_report_spec.rb
118
122
  - spec/rdkafka/producer_spec.rb
@@ -143,11 +147,13 @@ specification_version: 4
143
147
  summary: Kafka client library wrapping librdkafka using the ffi gem and futures from
144
148
  concurrent-ruby for Kafka 0.10+
145
149
  test_files:
150
+ - spec/rdkafka/bindings_spec.rb
146
151
  - spec/rdkafka/config_spec.rb
147
152
  - spec/rdkafka/consumer/message_spec.rb
153
+ - spec/rdkafka/consumer/partition_spec.rb
154
+ - spec/rdkafka/consumer/topic_partition_list_spec.rb
148
155
  - spec/rdkafka/consumer_spec.rb
149
156
  - spec/rdkafka/error_spec.rb
150
- - spec/rdkafka/ffi_spec.rb
151
157
  - spec/rdkafka/producer/delivery_handle_spec.rb
152
158
  - spec/rdkafka/producer/delivery_report_spec.rb
153
159
  - spec/rdkafka/producer_spec.rb
@@ -1,13 +0,0 @@
1
- require "spec_helper"
2
-
3
- describe Rdkafka::FFI do
4
- it "should load librdkafka" do
5
- expect(Rdkafka::FFI.ffi_libraries.map(&:name).first).to include "librdkafka"
6
- end
7
-
8
- it "should successfully call librdkafka" do
9
- expect {
10
- Rdkafka::FFI.rd_kafka_conf_new
11
- }.not_to raise_error
12
- end
13
- end