hermann 0.17.0 → 0.18.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: b08abedfd6c15b1c7c1aa5dfe66315501c212512
4
- data.tar.gz: 3bf7306d482a260fd73004e4d8087cd5f3a1f063
3
+ metadata.gz: 92486b735dc5e9e88b0243fabc35340cd3c3a117
4
+ data.tar.gz: aa5b9762eb50d57943a1c6bf1a78d84abc6c5055
5
5
  SHA512:
6
- metadata.gz: 901a4eecf276229c561186f5be901b6ed87313d35d6878365621ee0302328870f24fe02661620d633f4e55462aaf953a8036c3da88d5d6e6d66c7e71e9e6a9d4
7
- data.tar.gz: 300db74c52df5be4f01078b6098c9a0cd528da811c4681e2fb0dde337c7b5b9bfe1cbe645769f9c616adbc9cfe5a841d9413f2d51208518e2dd1a20a4fce57cf
6
+ metadata.gz: 52e2b6523b9d0dd60e14a90e73babca4bb07e8ee2a72d2b3eecf799ede905e61813765e9d32d25cfc5194558f4551a914218bb8537e11debea2a13ab629f346e
7
+ data.tar.gz: 9847e3e907dfcba53523e7f019041a8c73f046efc8f5d1b04afbcec148e9b5d0f72b2aa2fc6154af3db11799ddc55e7646cc3b7687df0bcde43eefbf79259614
@@ -44,6 +44,81 @@ class RdKafkaRecipe < MiniPortile
44
44
  raise 'Checksum error!'
45
45
  end
46
46
  end
47
+
48
+ def download_file_http(url, full_path, count = 3)
49
+ filename = File.basename(full_path)
50
+ uri = URI.parse(url)
51
+
52
+ if ENV['http_proxy']
53
+ _, userinfo, p_host, p_port = URI.split(ENV['http_proxy'])
54
+ proxy_user, proxy_pass = userinfo.split(/:/) if userinfo
55
+ http = Net::HTTP.new(uri.host, uri.port, p_host, p_port, proxy_user, proxy_pass)
56
+ else
57
+ http = Net::HTTP.new(uri.host, uri.port)
58
+
59
+ if URI::HTTPS === uri
60
+ http.use_ssl = true
61
+ http.verify_mode = OpenSSL::SSL::VERIFY_PEER
62
+
63
+ store = OpenSSL::X509::Store.new
64
+
65
+ # Auto-include system-provided certificates
66
+ store.set_default_paths
67
+
68
+ if ENV.has_key?("SSL_CERT_FILE") && File.exist?(ENV["SSL_CERT_FILE"])
69
+ store.add_file ENV["SSL_CERT_FILE"]
70
+ end
71
+
72
+ http.cert_store = store
73
+ end
74
+ end
75
+
76
+ message "Downloading #{filename} "
77
+ http.start do |h|
78
+ h.request_get(uri.path, 'Accept-Encoding' => 'identity') do |response|
79
+ case response
80
+ when Net::HTTPNotFound
81
+ output "404 - Not Found"
82
+ return false
83
+
84
+ when Net::HTTPClientError
85
+ output "Error: Client Error: #{response.inspect}"
86
+ return false
87
+
88
+ when Net::HTTPRedirection
89
+ raise "Too many redirections for the original URL, halting." if count <= 0
90
+ url = response["location"]
91
+ return download_file(url, full_path, count - 1)
92
+
93
+ when Net::HTTPOK
94
+ return with_tempfile(filename, full_path) do |temp_file|
95
+ size = 0
96
+ progress = 0
97
+ total = response.header["Content-Length"].to_i
98
+
99
+ if total == 0
100
+ # There are cases when apparently GitHub.com will return an empty
101
+ # content-length header, which means we can't really trust the
102
+ # response, so we'll treat it like a redirect
103
+ puts "Empty content-length header, retrying"
104
+ return download_file(url, full_path, count - 1)
105
+ end
106
+
107
+ response.read_body do |chunk|
108
+ temp_file << chunk
109
+ size += chunk.size
110
+ new_progress = (size * 100) / total
111
+ unless new_progress == progress
112
+ message "\rDownloading %s (%3d%%) " % [filename, new_progress]
113
+ end
114
+ progress = new_progress
115
+ end
116
+ output
117
+ end
118
+ end
119
+ end
120
+ end
121
+ end
47
122
  end
48
123
  ################################################################################
49
124
 
@@ -534,16 +534,19 @@ void producer_init_kafka(VALUE self, HermannInstanceConfig* config) {
534
534
  *
535
535
  * @param self VALUE the Ruby producer instance
536
536
  * @param message VALUE the ruby String containing the outgoing message.
537
+ * @param topic VALUE the ruby String containing the topic to use for the
538
+ * outgoing message.
537
539
  * @param result VALUE the Hermann::Result object to be fulfilled when the
538
540
  * push completes
539
541
  */
540
- static VALUE producer_push_single(VALUE self, VALUE message, VALUE result) {
542
+ static VALUE producer_push_single(VALUE self, VALUE message, VALUE topic, VALUE result) {
541
543
 
542
544
  HermannInstanceConfig* producerConfig;
543
545
  /* Context pointer, pointing to `result`, for the librdkafka delivery
544
546
  * callback
545
547
  */
546
548
  hermann_push_ctx_t *delivery_ctx = (hermann_push_ctx_t *)malloc(sizeof(hermann_push_ctx_t));
549
+ rd_kafka_topic_t *rkt = NULL;
547
550
 
548
551
  TRACER("self: %p, message: %p, result: %p)\n", self, message, result);
549
552
 
@@ -554,10 +557,9 @@ static VALUE producer_push_single(VALUE self, VALUE message, VALUE result) {
554
557
 
555
558
  TRACER("producerConfig: %p\n", producerConfig);
556
559
 
557
- if ((NULL == producerConfig->topic) ||
558
- (0 == strlen(producerConfig->topic))) {
559
- fprintf(stderr, "Topic is null!\n");
560
- rb_raise(rb_eRuntimeError, "Topic cannot be empty");
560
+ if ((Qnil == topic) ||
561
+ (0 == RSTRING_LEN(topic))) {
562
+ rb_raise(rb_eArgError, "Topic cannot be empty");
561
563
  return self;
562
564
  }
563
565
 
@@ -567,6 +569,15 @@ static VALUE producer_push_single(VALUE self, VALUE message, VALUE result) {
567
569
 
568
570
  TRACER("kafka initialized\n");
569
571
 
572
+ rkt = rd_kafka_topic_new(producerConfig->rk,
573
+ RSTRING_PTR(topic),
574
+ NULL);
575
+
576
+ if (NULL == rkt) {
577
+ rb_raise(rb_eRuntimeError, "Could not construct a topic structure");
578
+ return self;
579
+ }
580
+
570
581
  /* Only pass result through if it's non-nil */
571
582
  if (Qnil != result) {
572
583
  delivery_ctx->result = result;
@@ -576,7 +587,7 @@ static VALUE producer_push_single(VALUE self, VALUE message, VALUE result) {
576
587
  TRACER("rd_kafka_produce() message of %i bytes\n", RSTRING_LEN(message));
577
588
 
578
589
  /* Send/Produce message. */
579
- if (-1 == rd_kafka_produce(producerConfig->rkt,
590
+ if (-1 == rd_kafka_produce(rkt,
580
591
  producerConfig->partition,
581
592
  RD_KAFKA_MSG_F_COPY,
582
593
  RSTRING_PTR(message),
@@ -590,6 +601,10 @@ static VALUE producer_push_single(VALUE self, VALUE message, VALUE result) {
590
601
  /* TODO: raise a Ruby exception here, requires a test though */
591
602
  }
592
603
 
604
+ if (NULL != rkt) {
605
+ rd_kafka_topic_destroy(rkt);
606
+ }
607
+
593
608
  TRACER("returning\n");
594
609
 
595
610
  return self;
@@ -913,11 +928,9 @@ static VALUE producer_allocate(VALUE klass) {
913
928
  * Set up the configuration context for the Producer instance
914
929
  *
915
930
  * @param self VALUE the Producer instance
916
- * @param topic VALUE the Ruby string naming the topic
917
931
  * @param brokers VALUE a Ruby string containing host:port pairs separated by commas
918
932
  */
919
933
  static VALUE producer_initialize(VALUE self,
920
- VALUE topic,
921
934
  VALUE brokers) {
922
935
 
923
936
  HermannInstanceConfig* producerConfig;
@@ -926,12 +939,9 @@ static VALUE producer_initialize(VALUE self,
926
939
 
927
940
  TRACER("initialize Producer ruby object\n");
928
941
 
929
-
930
- topicPtr = StringValuePtr(topic);
931
942
  brokersPtr = StringValuePtr(brokers);
932
943
  Data_Get_Struct(self, HermannInstanceConfig, producerConfig);
933
944
 
934
- producerConfig->topic = topicPtr;
935
945
  producerConfig->brokers = brokersPtr;
936
946
  /** Using RD_KAFKA_PARTITION_UA specifies we want the partitioner callback to be called to determine the target
937
947
  * partition
@@ -1011,11 +1021,11 @@ void Init_hermann_lib() {
1011
1021
  rb_define_alloc_func(c_producer, producer_allocate);
1012
1022
 
1013
1023
  /* Initialize */
1014
- rb_define_method(c_producer, "initialize", producer_initialize, 2);
1024
+ rb_define_method(c_producer, "initialize", producer_initialize, 1);
1015
1025
  rb_define_method(c_producer, "initialize_copy", producer_init_copy, 1);
1016
1026
 
1017
1027
  /* Producer.push_single(msg) */
1018
- rb_define_method(c_producer, "push_single", producer_push_single, 2);
1028
+ rb_define_method(c_producer, "push_single", producer_push_single, 3);
1019
1029
 
1020
1030
  /* Producer.tick */
1021
1031
  rb_define_method(c_producer, "tick", producer_tick, 1);
@@ -1,10 +1,11 @@
1
1
  require 'hermann'
2
2
  require 'zk'
3
3
  require 'json'
4
+ require 'hermann/errors'
4
5
 
5
6
  module Hermann
6
7
  module Discovery
7
- class NoBrokersError < StandardError; end
8
+
8
9
 
9
10
  # Communicates with Zookeeper to discover kafka broker ids
10
11
  #
@@ -24,12 +25,16 @@ module Hermann
24
25
  # of 20 times the tickTime2 times the tick time set on server"
25
26
  #
26
27
  # @return [String] comma separated list of brokers
28
+ #
29
+ # @raises [NoBrokersError] if could not discover brokers thru zookeeper
27
30
  def get_brokers(timeout=0)
28
31
  brokers = []
29
32
  ZK.open(zookeepers, {:timeout => timeout}) do |zk|
30
33
  brokers = fetch_brokers(zk)
31
34
  end
32
- raise NoBrokersError if brokers.empty?
35
+ if brokers.empty?
36
+ raise Hermann::Errors::NoBrokersError
37
+ end
33
38
  brokers.join(',')
34
39
  end
35
40
 
@@ -3,6 +3,12 @@ module Hermann
3
3
  module Errors
4
4
  # Error for connectivity problems with the Kafka brokers
5
5
  class ConnectivityError; end;
6
+
7
+ # For passing incorrect config and options to kafka
8
+ class ConfigurationError < StandardError; end
9
+
10
+ # cannot discover brokers from zookeeper
11
+ class NoBrokersError < StandardError; end
6
12
  end
7
13
  end
8
14
 
@@ -12,13 +12,17 @@ module Hermann
12
12
  class Producer
13
13
  attr_reader :topic, :brokers, :internal, :children
14
14
 
15
- def initialize(topic, brokers)
15
+ # Initialize a producer object with a default topic and broker list
16
+ #
17
+ # @param [String] topic The default topic to use for pushing messages
18
+ # @param [Array] brokers An array of "host:port" strings for the brokers
19
+ def initialize(topic, brokers, opts={})
16
20
  @topic = topic
17
21
  @brokers = brokers
18
22
  if RUBY_PLATFORM == "java"
19
- @internal = Hermann::Provider::JavaProducer.new(topic, brokers)
23
+ @internal = Hermann::Provider::JavaProducer.new(brokers, opts)
20
24
  else
21
- @internal = Hermann::Lib::Producer.new(topic, brokers)
25
+ @internal = Hermann::Lib::Producer.new(brokers)
22
26
  end
23
27
  # We're tracking children so we can make sure that at Producer exit we
24
28
  # make a reasonable attempt to clean up outstanding result objects
@@ -42,23 +46,31 @@ module Hermann
42
46
 
43
47
  # Push a value onto the Kafka topic passed to this +Producer+
44
48
  #
45
- # @param [Array] value An array of values to push, will push each one
46
- # separately
47
49
  # @param [Object] value A single object to push
50
+ # @param [Hash] opts to pass to push method
51
+ # @option opts [String] :topic The topic to push messages to
52
+ #
48
53
  # @return [Hermann::Result] A future-like object which will store the
49
54
  # result from the broker
50
- def push(value)
51
- result = create_result
55
+ def push(value, opts={})
56
+ topic = opts[:topic] || @topic
57
+ result = nil
52
58
 
53
59
  if value.kind_of? Array
54
- return value.map { |e| self.push(e) }
55
- else
56
- if RUBY_PLATFORM == "java"
57
- result = @internal.push_single(value)
60
+ return value.map { |e| self.push(e, opts) }
61
+ end
62
+
63
+ if RUBY_PLATFORM == "java"
64
+ result = @internal.push_single(value, topic, nil)
65
+ unless result.nil?
58
66
  @children << result
59
- else
60
- @internal.push_single(value, result)
61
67
  end
68
+ # Reaping children on the push just to make sure that it does get
69
+ # called correctly and we don't leak memory
70
+ reap_children
71
+ else
72
+ result = create_result
73
+ @internal.push_single(value, topic, result)
62
74
  end
63
75
 
64
76
  return result
@@ -118,7 +130,7 @@ module Hermann
118
130
  end
119
131
 
120
132
  # Perform the actual reactor tick
121
- # @raises [StandardError[ in case of underlying failures in librdkafka
133
+ # @raises [StandardError] in case of underlying failures in librdkafka
122
134
  def execute_tick(timeout)
123
135
  if timeout == 0
124
136
  @internal.tick(0)
@@ -4,61 +4,103 @@ require 'json'
4
4
 
5
5
  module Hermann
6
6
  module Provider
7
-
8
7
  # This class simulates the kafka producer class within a java environment.
9
8
  # If the producer throw an exception within the Promise a call to +.value!+
10
9
  # will raise the exception and the rejected flag will be set to true
11
10
  #
12
11
  class JavaProducer
13
- attr_accessor :topic, :producer
12
+ attr_accessor :producer
13
+
14
14
 
15
- def initialize(topic, brokers)
16
- @topic = topic
17
- properties = create_properties(:brokers => brokers)
15
+ # Instantiate JavaProducer
16
+ #
17
+ # @params [String] list of brokers
18
+ #
19
+ # @params [Hash] hash of kafka attributes, overrides defaults
20
+ #
21
+ # @raises [RuntimeError] if brokers string is nil/empty
22
+ #
23
+ # ==== Examples
24
+ #
25
+ # JavaProducer.new('0:9092', {'request.required.acks' => '1'})
26
+ #
27
+ def initialize(brokers, opts={})
28
+ properties = create_properties(brokers, opts)
18
29
  config = create_config(properties)
19
30
  @producer = JavaApiUtil::Producer.new(config)
20
31
  end
21
32
 
22
33
  DEFAULTS = {
23
- :string_encoder => 'kafka.serializer.StringEncoder',
24
- :partitioner => 'kafka.producer.DefaultPartitioner',
25
- :required_acks => "1"
34
+ 'serializer.class' => 'kafka.serializer.StringEncoder',
35
+ 'partitioner.class' => 'kafka.producer.DefaultPartitioner',
36
+ 'request.required.acks' => '1'
26
37
  }.freeze
27
38
 
28
39
  # Push a value onto the Kafka topic passed to this +Producer+
29
40
  #
30
41
  # @param [Object] value A single object to push
42
+ # @param [String] topic to push message to
31
43
  #
32
44
  # @return +Concurrent::Promise+ Representa a promise to send the
33
45
  # data to the kafka broker. Upon execution the Promise's status
34
46
  # will be set
35
- def push_single(msg)
47
+ def push_single(msg, topic, unused)
36
48
  Concurrent::Promise.execute {
37
- data = ProducerUtil::KeyedMessage.new(@topic, msg)
49
+ data = ProducerUtil::KeyedMessage.new(topic, msg)
38
50
  @producer.send(data)
39
51
  }
40
52
  end
41
53
 
54
+ # No-op for now
55
+ def connected?
56
+ return false
57
+ end
58
+
59
+ # No-op for now
60
+ def errored?
61
+ return false
62
+ end
63
+
64
+ # No-op for now
65
+ def connect(timeout=0)
66
+ nil
67
+ end
68
+
42
69
  private
70
+
71
+ # Creates a ProducerConfig object
72
+ #
73
+ # @param [Properties] object with broker properties
74
+ #
43
75
  # @return [ProducerConfig] - packaged config for +Producer+
44
76
  def create_config(properties)
45
77
  ProducerUtil::ProducerConfig.new(properties)
46
78
  end
47
79
 
80
+ # Creates Properties Object
81
+ #
82
+ # @param [Hash] brokers passed into this function
83
+ # @option args [String] :brokers - string of brokers
84
+ #
48
85
  # @return [Properties] properties object for creating +ProducerConfig+
49
- def create_properties(args={})
50
- brokers = args[:brokers]
51
- str_encoder = DEFAULTS[:string_encoder]
52
- partitioner = DEFAULTS[:partitioner]
53
- acks = DEFAULTS[:required_acks]
54
-
86
+ #
87
+ # @raises [RuntimeError] if options does not contain key value strings
88
+ def create_properties(brokers, opts={})
89
+ brokers = { 'metadata.broker.list' => brokers }
90
+ options = DEFAULTS.merge(brokers).merge(opts)
55
91
  properties = JavaUtil::Properties.new
56
- properties.put('metadata.broker.list', brokers)
57
- properties.put('serializer.class', str_encoder)
58
- properties.put('partitioner.class', partitioner)
59
- properties.put('request.required.acks', acks)
92
+ options.each do |key, val|
93
+ validate_property!(key, val)
94
+ properties.put(key, val)
95
+ end
60
96
  properties
61
97
  end
98
+
99
+ def validate_property!(key, val)
100
+ if key.to_s.empty? || val.to_s.empty?
101
+ raise Hermann::Errors::ConfigurationError, "Invalid Broker Properties"
102
+ end
103
+ end
62
104
  end
63
105
  end
64
106
  end
@@ -1,3 +1,3 @@
1
1
  module Hermann
2
- VERSION = '0.17.0'
2
+ VERSION = '0.18.0'
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: hermann
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.17.0
4
+ version: 0.18.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - R. Tyler Croy
@@ -10,7 +10,7 @@ autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
12
 
13
- date: 2014-10-10 00:00:00 Z
13
+ date: 2014-10-14 00:00:00 Z
14
14
  dependencies:
15
15
  - !ruby/object:Gem::Dependency
16
16
  name: concurrent-ruby