rdkafka 0.0.1 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: c019ec1ebf89bf81ce40340c8866e589d86826e3
4
- data.tar.gz: cf1fdaadf5527542dbc1847587f43d8497d7f3d6
3
+ metadata.gz: 1444aae0a46b7a7fc96660980a185d659a59c1b9
4
+ data.tar.gz: c8b0a132119a090cd7a004c6dbe3c6b1fc06db3c
5
5
  SHA512:
6
- metadata.gz: b58a95c81f0ae2b09d347acf12dbaf71057c4c26c25a6411eb5212723eee33d0b107db61fef385a39bb05079db586ba2d9406f6f1f097f5bb49e8c6852206a33
7
- data.tar.gz: 3f5b7bd2277397df7acc096b3145a2e65e33df4d9ee1c7646b5d33d20992daecdd78fffb996abaccbb6b3304d131a7123a8fb94b2d7fdef4e8ad43b6403cdac5
6
+ metadata.gz: ba75b3de888cda846b746fdcb4b5dcbdde21a0d8efcb4143e0176bbfea0ab34dce2d8180037c6c0d6fd9dd63770b0adeb460b1beb3a5d278f423b4a8916bf3a1
7
+ data.tar.gz: d825345bfb72038f9b577ec7dfe5a429087ad71fe133e17fa9a65d6b7945905d22e75695dbbef8305d8b459f2f19c99dcd117746152034a492cedbf3f2bf3e38
data/README.md CHANGED
@@ -4,5 +4,13 @@ Kafka client library wrapping `librdkafka` using the FFI gem for Kafka 0.10+ and
4
4
 
5
5
  ## Development
6
6
 
7
- Run `bundle` and `cd ext && bundle exec rake compile && cd ..`. You can then run
8
- `bundle exec rspec` to run the tests.
7
+ Run `bundle` and `cd ext && bundle exec rake compile && cd ..`. Then
8
+ create the topics as expected in the specs: `bundle exec rake create_topics`.
9
+
10
+ You can then run `bundle exec rspec` to run the tests. To see rdkafka
11
+ debug output:
12
+
13
+ ```
14
+ DEBUG_PRODUCER=true bundle exec rspec
15
+ DEBUG_CONSUMER=true bundle exec rspec
16
+ ```
data/Rakefile ADDED
@@ -0,0 +1,30 @@
1
+ require "./lib/rdkafka"
2
+
3
+ task :create_topics do
4
+ `kafka-topics --create --topic=produce_test_topic --zookeeper=127.0.0.1:2181 --partitions=1 --replication-factor=1`
5
+ `kafka-topics --create --topic=rake_test_topic --zookeeper=127.0.0.1:2181 --partitions=1 --replication-factor=1`
6
+ end
7
+
8
+ task :produce_message do
9
+ producer = Rdkafka::Config.new(
10
+ :"bootstrap.servers" => "localhost:9092"
11
+ ).producer
12
+ producer.produce(
13
+ topic: "rake_test_topic",
14
+ payload: "payload from Rake",
15
+ key: "key from Rake"
16
+ ).wait
17
+ end
18
+
19
+ task :consume_messages do
20
+ consumer = Rdkafka::Config.new(
21
+ :"bootstrap.servers" => "localhost:9092",
22
+ :"group.id" => "rake_test",
23
+ :"enable.partition.eof" => false,
24
+ :"auto.offset.reset" => "earliest"
25
+ ).consumer
26
+ consumer.subscribe("rake_test_topic")
27
+ consumer.each do |message|
28
+ puts message
29
+ end
30
+ end
@@ -1,7 +1,7 @@
1
1
  module Rdkafka
2
2
  class Config
3
3
  DEFAULT_CONFIG = {
4
- "api.version.request" => "true"
4
+ :"api.version.request" => true
5
5
  }
6
6
 
7
7
  def initialize(config_hash = {})
@@ -17,7 +17,9 @@ module Rdkafka
17
17
  end
18
18
 
19
19
  def consumer
20
- Rdkafka::Consumer.new(native_kafka(native_config, :rd_kafka_consumer))
20
+ kafka = native_kafka(native_config, :rd_kafka_consumer)
21
+ Rdkafka::FFI.rd_kafka_poll_set_consumer(kafka)
22
+ Rdkafka::Consumer.new(kafka)
21
23
  end
22
24
 
23
25
  def producer
@@ -43,8 +45,8 @@ module Rdkafka
43
45
  error_buffer = ::FFI::MemoryPointer.from_string(" " * 256)
44
46
  result = Rdkafka::FFI.rd_kafka_conf_set(
45
47
  config,
46
- key,
47
- value,
48
+ key.to_s,
49
+ value.to_s,
48
50
  error_buffer,
49
51
  256
50
52
  )
@@ -1,7 +1,63 @@
1
1
  module Rdkafka
2
2
  class Consumer
3
+ include Enumerable
4
+
3
5
  def initialize(native_kafka)
4
6
  @native_kafka = native_kafka
5
7
  end
8
+
9
+ def subscribe(*topics)
10
+ # Create topic partition list with topics and no partition set
11
+ tpl = Rdkafka::FFI.rd_kafka_topic_partition_list_new(topics.length)
12
+ topics.each do |topic|
13
+ Rdkafka::FFI.rd_kafka_topic_partition_list_add(
14
+ tpl,
15
+ topic,
16
+ -1
17
+ )
18
+ end
19
+ # Subscribe to topic partition list and check this was successful
20
+ response = Rdkafka::FFI.rd_kafka_subscribe(@native_kafka, tpl)
21
+ if response != 0
22
+ raise Rdkafka::RdkafkaError.new(response)
23
+ end
24
+ ensure
25
+ # Clean up the topic partition list
26
+ Rdkafka::FFI.rd_kafka_topic_partition_list_destroy(tpl)
27
+ end
28
+
29
+ def commit(async=false)
30
+ response = Rdkafka::FFI.rd_kafka_commit(@native_kafka, nil, async)
31
+ if response != 0
32
+ raise Rdkafka::RdkafkaError.new(response)
33
+ end
34
+ end
35
+
36
+ def poll(timeout_ms=100)
37
+ message_ptr = Rdkafka::FFI.rd_kafka_consumer_poll(@native_kafka, timeout_ms)
38
+ if message_ptr.null?
39
+ nil
40
+ else
41
+ message = Rdkafka::FFI::Message.new(message_ptr)
42
+ if message.err != 0
43
+ raise Rdkafka::RdkafkaError.new(message.err)
44
+ end
45
+ message
46
+ end
47
+ end
48
+
49
+ def each(&block)
50
+ loop do
51
+ message = poll(10)
52
+ if message
53
+ block.call(message)
54
+ else
55
+ # Sleep here instead of using a longer poll timeout so interrupting the
56
+ # program works properly, MRI has a hard time interrupting FFI calls.
57
+ sleep 0.1
58
+ next
59
+ end
60
+ end
61
+ end
6
62
  end
7
63
  end
data/lib/rdkafka/error.rb CHANGED
@@ -3,23 +3,25 @@ module Rdkafka
3
3
  attr_reader :rdkafka_response
4
4
 
5
5
  def initialize(response)
6
+ raise TypeError.new("Response has to be an integer") unless response.is_a? Integer
6
7
  @rdkafka_response = response
7
8
  end
8
9
 
9
10
  def code
10
- if @rdkafka_response.nil?
11
- :unknown_error
11
+ code = Rdkafka::FFI.rd_kafka_err2name(@rdkafka_response).downcase
12
+ if code[0] == "_"
13
+ code[1..-1].to_sym
12
14
  else
13
- Rdkafka::FFI.rd_kafka_err2name(@rdkafka_response).downcase.to_sym
15
+ code.to_sym
14
16
  end
15
17
  end
16
18
 
17
19
  def to_s
18
- if @rdkafka_response.nil?
19
- "Unknown error: Response code is nil"
20
- else
21
- Rdkafka::FFI.rd_kafka_err2str(@rdkafka_response)
22
- end
20
+ "#{Rdkafka::FFI.rd_kafka_err2str(@rdkafka_response)} (#{code})"
21
+ end
22
+
23
+ def is_partition_eof?
24
+ code == :partition_eof
23
25
  end
24
26
  end
25
27
  end
data/lib/rdkafka/ffi.rb CHANGED
@@ -7,11 +7,12 @@ module Rdkafka
7
7
  ffi_lib "ext/ports/#{MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION).host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.dylib"
8
8
 
9
9
  # Polling
10
+
10
11
  attach_function :rd_kafka_poll, [:pointer, :int], :void
11
12
 
12
13
  # Message struct
13
14
 
14
- class Message < ::FFI::Struct
15
+ class Message < ::FFI::ManagedStruct
15
16
  layout :err, :int,
16
17
  :rkt, :pointer,
17
18
  :partition, :int32,
@@ -21,8 +22,69 @@ module Rdkafka
21
22
  :key_len, :size_t,
22
23
  :offset, :int64,
23
24
  :_private, :pointer
25
+
26
+ def err
27
+ self[:err]
28
+ end
29
+
30
+ def partition
31
+ self[:partition]
32
+ end
33
+
34
+ def payload
35
+ if self[:payload].null?
36
+ nil
37
+ else
38
+ self[:payload].read_string(self[:len])
39
+ end
40
+ end
41
+
42
+ def key
43
+ if self[:key].null?
44
+ nil
45
+ else
46
+ self[:key].read_string(self[:key_len])
47
+ end
48
+ end
49
+
50
+ def offset
51
+ self[:offset]
52
+ end
53
+
54
+ def to_s
55
+ "Message with key '#{key}', payload '#{payload}', partition '#{partition}', offset '#{offset}'"
56
+ end
57
+
58
+ def self.release(ptr)
59
+ rd_kafka_message_destroy(ptr)
60
+ end
61
+ end
62
+
63
+ attach_function :rd_kafka_message_destroy, [:pointer], :void
64
+
65
+ # TopicPartition ad TopicPartitionList structs
66
+
67
+ class TopicPartition < ::FFI::Struct
68
+ layout :topic, :string,
69
+ :partition, :int32,
70
+ :offset, :int64,
71
+ :metadata, :pointer,
72
+ :metadata_size, :size_t,
73
+ :opaque, :pointer,
74
+ :err, :int,
75
+ :_private, :pointer
76
+ end
77
+
78
+ class TopicPartitionList < ::FFI::Struct
79
+ layout :cnt, :int,
80
+ :size, :int,
81
+ :elems, TopicPartition.ptr
24
82
  end
25
83
 
84
+ attach_function :rd_kafka_topic_partition_list_new, [:int32], :pointer
85
+ attach_function :rd_kafka_topic_partition_list_add, [:pointer, :string, :int32], :void
86
+ attach_function :rd_kafka_topic_partition_list_destroy, [:pointer], :void
87
+
26
88
  # Errors
27
89
 
28
90
  attach_function :rd_kafka_err2name, [:int], :string
@@ -49,7 +111,14 @@ module Rdkafka
49
111
  attach_function :rd_kafka_new, [:kafka_type, :pointer, :pointer, :int], :pointer
50
112
  attach_function :rd_kafka_destroy, [:pointer], :void
51
113
 
52
- # Producing
114
+ # Consumer
115
+
116
+ attach_function :rd_kafka_subscribe, [:pointer, :pointer], :int
117
+ attach_function :rd_kafka_commit, [:pointer, :pointer, :bool], :int
118
+ attach_function :rd_kafka_poll_set_consumer, [:pointer], :void
119
+ attach_function :rd_kafka_consumer_poll, [:pointer, :int], :pointer
120
+
121
+ # Producer
53
122
 
54
123
  RD_KAFKA_VTYPE_END = 0
55
124
  RD_KAFKA_VTYPE_TOPIC = 1
@@ -1,4 +1,4 @@
1
1
  module Rdkafka
2
- VERSION = "0.0.1"
2
+ VERSION = "0.1.0"
3
3
  LIBRDKAFKA_VERSION = "0.11.0"
4
4
  end
data/rdkafka.gemspec CHANGED
@@ -14,7 +14,7 @@ Gem::Specification.new do |gem|
14
14
  gem.name = 'rdkafka'
15
15
  gem.require_paths = ['lib']
16
16
  gem.version = Rdkafka::VERSION
17
- gem.required_ruby_version = '>= 2.0'
17
+ gem.required_ruby_version = '>= 2.1'
18
18
  gem.extensions = %w(ext/Rakefile)
19
19
 
20
20
  gem.add_dependency 'ffi', '~> 1.9'
@@ -1,11 +1,13 @@
1
1
  require "spec_helper"
2
2
 
3
3
  describe Rdkafka::RdkafkaError do
4
- describe "#code" do
5
- it "should handle a nil response" do
6
- expect(Rdkafka::RdkafkaError.new(nil).code).to eq :unknown_error
7
- end
4
+ it "should raise a type error for a nil response" do
5
+ expect {
6
+ Rdkafka::RdkafkaError.new(nil)
7
+ }.to raise_error TypeError
8
+ end
8
9
 
10
+ describe "#code" do
9
11
  it "should handle an invalid response" do
10
12
  expect(Rdkafka::RdkafkaError.new(933975).code).to eq :err_933975?
11
13
  end
@@ -13,19 +15,29 @@ describe Rdkafka::RdkafkaError do
13
15
  it "should return error messages from rdkafka" do
14
16
  expect(Rdkafka::RdkafkaError.new(10).code).to eq :msg_size_too_large
15
17
  end
16
- end
17
18
 
18
- describe "#to_s" do
19
- it "should handle a nil response" do
20
- expect(Rdkafka::RdkafkaError.new(nil).to_s).to eq "Unknown error: Response code is nil"
19
+ it "should strip a leading underscore" do
20
+ expect(Rdkafka::RdkafkaError.new(-191).code).to eq :partition_eof
21
21
  end
22
+ end
22
23
 
24
+ describe "#to_s" do
23
25
  it "should handle an invalid response" do
24
- expect(Rdkafka::RdkafkaError.new(933975).to_s).to eq "Err-933975?"
26
+ expect(Rdkafka::RdkafkaError.new(933975).to_s).to eq "Err-933975? (err_933975?)"
25
27
  end
26
28
 
27
29
  it "should return error messages from rdkafka" do
28
- expect(Rdkafka::RdkafkaError.new(10).to_s).to eq "Broker: Message size too large"
30
+ expect(Rdkafka::RdkafkaError.new(10).to_s).to eq "Broker: Message size too large (msg_size_too_large)"
31
+ end
32
+ end
33
+
34
+ describe "#is_partition_eof?" do
35
+ it "should be false when not partition eof" do
36
+ expect(Rdkafka::RdkafkaError.new(933975).is_partition_eof?).to be false
37
+ end
38
+
39
+ it "should be true when partition eof" do
40
+ expect(Rdkafka::RdkafkaError.new(-191).is_partition_eof?).to be true
29
41
  end
30
42
  end
31
43
  end
@@ -1,31 +1,47 @@
1
1
  require "spec_helper"
2
2
 
3
3
  describe Rdkafka::Producer do
4
- let(:producer) do
5
- rdkafka_config.producer
6
- end
4
+ let(:producer) { rdkafka_config.producer }
5
+ let(:consumer) { rdkafka_config.consumer }
7
6
 
8
7
  it "should require a topic" do
9
8
  expect {
10
9
  producer.produce(
11
10
  payload: "payload",
12
- key: "key"
11
+ key: "key"
13
12
  )
14
13
  }.to raise_error ArgumentError, "missing keyword: topic"
15
14
  end
16
15
 
17
16
  it "should produce a message" do
17
+ consumer.subscribe("produce_test_topic")
18
+ # Make sure the consumer is running before we produce
19
+ 5.times do
20
+ consumer.poll
21
+ end
22
+
18
23
  handle = producer.produce(
19
- topic: "produce_test_topic",
24
+ topic: "produce_test_topic",
20
25
  payload: "payload 1",
21
- key: "key 1"
26
+ key: "key 1"
22
27
  )
23
28
  expect(handle.pending?).to be true
24
29
 
30
+ # Check delivery handle and report
25
31
  report = handle.wait
26
32
  expect(handle.pending?).to be false
27
33
  expect(report).not_to be_nil
28
34
  expect(report.partition).to eq 0
29
35
  expect(report.offset).to be > 0
36
+
37
+ # Consume message and verify it's content
38
+ message = consumer.first
39
+ expect(message).not_to be_nil
40
+ expect(message.partition).to eq 0
41
+ expect(message.offset).to eq report.offset
42
+ expect(message.payload).to eq "payload 1"
43
+ expect(message.key).to eq "key 1"
44
+
45
+ consumer.commit
30
46
  end
31
47
  end
data/spec/spec_helper.rb CHANGED
@@ -3,5 +3,17 @@ require "rspec"
3
3
  require "rdkafka"
4
4
 
5
5
  def rdkafka_config
6
- Rdkafka::Config.new("bootstrap.servers" => "localhost:9092")
6
+ debug = if ENV["DEBUG_PRODUCER"]
7
+ "broker,topic,msg"
8
+ elsif ENV["DEBUG_CONSUMER"]
9
+ "cgrp,topic,fetch"
10
+ else
11
+ ""
12
+ end
13
+ Rdkafka::Config.new(
14
+ :"bootstrap.servers" => "localhost:9092",
15
+ :"group.id" => "ruby_test",
16
+ :"enable.partition.eof" => false,
17
+ :"debug" => debug
18
+ )
7
19
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.1
4
+ version: 0.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-08-28 00:00:00.000000000 Z
11
+ date: 2017-08-30 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi
@@ -92,6 +92,7 @@ files:
92
92
  - Gemfile
93
93
  - LICENSE
94
94
  - README.md
95
+ - Rakefile
95
96
  - ext/Rakefile
96
97
  - lib/rdkafka.rb
97
98
  - lib/rdkafka/config.rb
@@ -120,7 +121,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
120
121
  requirements:
121
122
  - - ">="
122
123
  - !ruby/object:Gem::Version
123
- version: '2.0'
124
+ version: '2.1'
124
125
  required_rubygems_version: !ruby/object:Gem::Requirement
125
126
  requirements:
126
127
  - - ">="