rdkafka 0.1.2 → 0.1.8

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: befdc0d1b36b36c452e380b639e0fa02636e9d0b
4
- data.tar.gz: 5e18316623374fb32876dae52de52a5a99fab49b
3
+ metadata.gz: a9b016b16ea543f75f53572f0a16e89aae152000
4
+ data.tar.gz: ea73ee4d951be90743901a7b9e2fe75708823ebb
5
5
  SHA512:
6
- metadata.gz: b100c399dee2eae775f78eb4dd8efd90cdc747d6cf88979ac6f5834e5e2f081f902b5591225a4270cfb74bcdc5b7098ac7da74b978c80d8fae761c6fe623dcc2
7
- data.tar.gz: 993a439bb4883b25aff235b87c73fc4ac5993b2e076b414e50e6bc4947ef0d8d8e29b0a16d780d1e8697a845fff664fa002075aebec13ff8cfd3bacf11ea13d2
6
+ metadata.gz: 75f6817837f86d3b136a98fe87850bb63275bc389a5bdb7abfb8dd3dfd14633a73ac0073a26cd98d596b63901bf3796e4ec87f6e4c2c25320328f250fd960308
7
+ data.tar.gz: 7cac0e27475bd2571e6eb104fcaf977efed4e306b04cd8c735b2c390d119eff0f2538eced0a06f66c90c36d60ecc5e569b8bd77fe7f01e4c38ca40eaba960fda
data/.gitignore CHANGED
@@ -1,4 +1,5 @@
1
1
  Gemfile.lock
2
2
  ext/ports
3
3
  ext/tmp
4
+ ext/librdkafka.*
4
5
  *.gem
data/ext/Rakefile CHANGED
@@ -2,14 +2,30 @@ require File.expand_path('../../lib/rdkafka/version', __FILE__)
2
2
  require "mini_portile2"
3
3
  require "fileutils"
4
4
 
5
- task :default do
5
+ task :default => :clean do
6
+ # Download and compile librdkafka
6
7
  recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
7
8
  recipe.files = ["https://github.com/edenhill/librdkafka/archive/v#{Rdkafka::LIBRDKAFKA_VERSION}.tar.gz"]
8
9
  recipe.configure_options = ["--host=#{recipe.host}"]
9
10
  recipe.cook
11
+ # Move dynamic library we're interested in
12
+ if recipe.host.include?('darwin')
13
+ from_extension = '1.dylib'
14
+ to_extension = 'dylib'
15
+ else
16
+ from_extension = 'so.1'
17
+ to_extension = 'so'
18
+ end
19
+ lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
20
+ FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
21
+ # Cleanup files created by miniportile we don't need in the gem
22
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
23
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
10
24
  end
11
25
 
12
26
  task :clean do
13
- FileUtils.rm_rf "ports"
14
- FileUtils.rm_rf "tmp"
27
+ FileUtils.rm_f File.join(File.dirname(__FILE__), "librdkafka.dylib")
28
+ FileUtils.rm_f File.join(File.dirname(__FILE__), "librdkafka.so")
29
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
30
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
15
31
  end
@@ -1,5 +1,17 @@
1
+ require "logger"
2
+
1
3
  module Rdkafka
2
4
  class Config
5
+ @@logger = Logger.new(STDOUT)
6
+
7
+ def self.logger
8
+ @@logger
9
+ end
10
+
11
+ def self.logger=(logger)
12
+ @@logger=logger
13
+ end
14
+
3
15
  DEFAULT_CONFIG = {
4
16
  :"api.version.request" => true
5
17
  }
@@ -39,23 +51,22 @@ module Rdkafka
39
51
  # This method is only intented to be used to create a client,
40
52
  # using it in another way will leak memory.
41
53
  def native_config
42
- config = Rdkafka::FFI.rd_kafka_conf_new
43
-
44
- @config_hash.each do |key, value|
45
- error_buffer = ::FFI::MemoryPointer.from_string(" " * 256)
46
- result = Rdkafka::FFI.rd_kafka_conf_set(
47
- config,
48
- key.to_s,
49
- value.to_s,
50
- error_buffer,
51
- 256
52
- )
53
- unless result == :config_ok
54
- raise ConfigError.new(error_buffer.read_string)
54
+ Rdkafka::FFI.rd_kafka_conf_new.tap do |config|
55
+ @config_hash.each do |key, value|
56
+ error_buffer = ::FFI::MemoryPointer.from_string(" " * 256)
57
+ result = Rdkafka::FFI.rd_kafka_conf_set(
58
+ config,
59
+ key.to_s,
60
+ value.to_s,
61
+ error_buffer,
62
+ 256
63
+ )
64
+ unless result == :config_ok
65
+ raise ConfigError.new(error_buffer.read_string)
66
+ end
55
67
  end
68
+ Rdkafka::FFI.rd_kafka_conf_set_log_cb(config, Rdkafka::FFI::LogCallback)
56
69
  end
57
-
58
- config
59
70
  end
60
71
 
61
72
  def native_kafka(config, type)
data/lib/rdkafka/ffi.rb CHANGED
@@ -1,10 +1,19 @@
1
1
  require "ffi"
2
- require "mini_portile2"
2
+ require "logger"
3
3
 
4
4
  module Rdkafka
5
5
  module FFI
6
6
  extend ::FFI::Library
7
- ffi_lib File.join(File.dirname(__FILE__), "../../ext/ports/#{MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION).host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.dylib")
7
+
8
+ def self.lib_extension
9
+ if Gem::Platform.local.os.include?("darwin")
10
+ 'dylib'
11
+ else
12
+ 'so'
13
+ end
14
+ end
15
+
16
+ ffi_lib File.join(File.dirname(__FILE__), "../../ext/librdkafka.#{lib_extension}")
8
17
 
9
18
  # Polling
10
19
 
@@ -27,6 +36,10 @@ module Rdkafka
27
36
  self[:err]
28
37
  end
29
38
 
39
+ def topic
40
+ FFI.rd_kafka_topic_name(self[:rkt])
41
+ end
42
+
30
43
  def partition
31
44
  self[:partition]
32
45
  end
@@ -52,7 +65,7 @@ module Rdkafka
52
65
  end
53
66
 
54
67
  def to_s
55
- "Message with key '#{key}', payload '#{payload}', partition '#{partition}', offset '#{offset}'"
68
+ "Message in '#{topic}' with key '#{key}', payload '#{payload}', partition '#{partition}', offset '#{offset}'"
56
69
  end
57
70
 
58
71
  def self.release(ptr)
@@ -61,6 +74,7 @@ module Rdkafka
61
74
  end
62
75
 
63
76
  attach_function :rd_kafka_message_destroy, [:pointer], :void
77
+ attach_function :rd_kafka_topic_name, [:pointer], :string
64
78
 
65
79
  # TopicPartition ad TopicPartitionList structs
66
80
 
@@ -100,6 +114,26 @@ module Rdkafka
100
114
 
101
115
  attach_function :rd_kafka_conf_new, [], :pointer
102
116
  attach_function :rd_kafka_conf_set, [:pointer, :string, :string, :pointer, :int], :kafka_config_response
117
+ callback :log_cb, [:pointer, :int, :string, :string], :void
118
+ attach_function :rd_kafka_conf_set_log_cb, [:pointer, :log_cb], :void
119
+
120
+ LogCallback = Proc.new do |client_ptr, level, level_string, line|
121
+ severity = case level
122
+ when 0 || 1 || 2
123
+ Logger::FATAL
124
+ when 3
125
+ Logger::ERROR
126
+ when 4
127
+ Logger::WARN
128
+ when 5 || 6
129
+ Logger::INFO
130
+ when 7
131
+ Logger::DEBUG
132
+ else
133
+ Logger::UNKNOWN
134
+ end
135
+ Rdkafka::Config.logger.add(severity) { line }
136
+ end
103
137
 
104
138
  # Handle
105
139
 
@@ -62,6 +62,8 @@ module Rdkafka
62
62
  end
63
63
  end
64
64
 
65
+ class WaitTimeoutError < RuntimeError; end
66
+
65
67
  class DeliveryHandle < ::FFI::Struct
66
68
  layout :pending, :bool,
67
69
  :response, :int,
@@ -73,10 +75,18 @@ module Rdkafka
73
75
  end
74
76
 
75
77
  # Wait for the delivery report
76
- def wait
78
+ def wait(timeout_in_seconds=10)
79
+ timeout = if timeout_in_seconds
80
+ Time.now.to_i + timeout_in_seconds
81
+ else
82
+ nil
83
+ end
77
84
  loop do
78
85
  if pending?
79
- sleep 0.05
86
+ if timeout && timeout <= Time.now.to_i
87
+ raise WaitTimeoutError.new("Waiting for delivery timed out after #{timeout_in_seconds} seconds")
88
+ end
89
+ sleep 0.1
80
90
  next
81
91
  elsif self[:response] != 0
82
92
  raise RdkafkaError.new(self[:response])
@@ -1,4 +1,4 @@
1
1
  module Rdkafka
2
- VERSION = "0.1.2"
2
+ VERSION = "0.1.8"
3
3
  LIBRDKAFKA_VERSION = "0.11.0"
4
4
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.2
4
+ version: 0.1.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-08-30 00:00:00.000000000 Z
11
+ date: 2017-08-31 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi