fluent-plugin-kafka 0.0.9 → 0.0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 9cad342ed5984ebc424872a660379b6e471a135d
4
- data.tar.gz: 73ea1a4a10f3584f719c45582d54f52b67b52c72
3
+ metadata.gz: 887e010ab7b1ac813f4fa0c412d8348ffc192831
4
+ data.tar.gz: 52f631a447899497ac9765294f697783755ae5ab
5
5
  SHA512:
6
- metadata.gz: 82ff85d1bc2353109a05092f8cda28237bb4b24cec7af7cce539c55c8d4125e3a9a96e540e43ad33bf2dfe63c1120585b4c3ba7430d70812b973894a92cfc79b
7
- data.tar.gz: ff574d372eee407d78e199809551bd1ac397c611b018b665416b0c8812cfe3f442b48e75aa320e66933b72a166172d9c09cc0fca913e5141c202d18a0cdd794a
6
+ metadata.gz: dcd0a646d4cbb926fa3503a00e75f0ac6188e4a7a5bd8a3c3332481c2989dd913a3d4f072d839c35236e0a9ae2728076db2426d2664ec0aee8778a86246673f6
7
+ data.tar.gz: bf1f03fb1134a9e16153156332da8013450c8f5a5cbc45a343ff9a6bcb4c682a8c5858339267a588a0246edce614f51f9b8ff5babdde5f26fb3a0c3b36d75bb8
@@ -12,7 +12,7 @@ Gem::Specification.new do |gem|
12
12
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
13
13
  gem.name = "fluent-plugin-kafka"
14
14
  gem.require_paths = ["lib"]
15
- gem.version = '0.0.9'
15
+ gem.version = '0.0.10'
16
16
  gem.add_dependency 'fluentd'
17
17
  gem.add_dependency 'poseidon'
18
18
  gem.add_dependency 'ltsv'
@@ -23,23 +23,39 @@ class Fluent::KafkaOutput < Fluent::Output
23
23
  attr_accessor :output_data_type
24
24
  attr_accessor :field_separator
25
25
 
26
- def configure(conf)
27
- super
26
+ @seed_brokers = []
27
+
28
+ def refresh_producer()
28
29
  if @zookeeper
29
- require 'zookeeper'
30
- require 'yajl'
31
30
  @seed_brokers = []
32
31
  z = Zookeeper.new(@zookeeper)
33
32
  z.get_children(:path => '/brokers/ids')[:children].each do |id|
34
33
  broker = Yajl.load(z.get(:path => "/brokers/ids/#{id}")[:data])
35
34
  @seed_brokers.push("#{broker['host']}:#{broker['port']}")
36
35
  end
37
- log.info "brokers has been set via Zookeeper: #{@seed_brokers}"
36
+ log.info "brokers has been refreshed via Zookeeper: #{@seed_brokers}"
37
+ end
38
+ begin
39
+ if @seed_brokers.length > 0
40
+ @producer = Poseidon::Producer.new(@seed_brokers, @client_id, :max_send_retries => @max_send_retries, :required_acks => @required_acks, :ack_timeout_ms => @ack_timeout_ms)
41
+ log.info "initialized producer #{@client_id}"
42
+ else
43
+ log.warn "No brokers found on Zookeeper"
44
+ end
45
+ rescue Exception => e
46
+ log.error e
47
+ end
48
+ end
49
+
50
+ def configure(conf)
51
+ super
52
+ if @zookeeper
53
+ require 'zookeeper'
54
+ require 'yajl'
38
55
  else
39
56
  @seed_brokers = @brokers.match(",").nil? ? [@brokers] : @brokers.split(",")
40
57
  log.info "brokers has been set directly: #{@seed_brokers}"
41
58
  end
42
- @producers = {} # keyed by topic:partition
43
59
  case @output_data_type
44
60
  when 'json'
45
61
  require 'yajl'
@@ -72,6 +88,7 @@ class Fluent::KafkaOutput < Fluent::Output
72
88
 
73
89
  def start
74
90
  super
91
+ refresh_producer()
75
92
  end
76
93
 
77
94
  def shutdown
@@ -100,15 +117,20 @@ class Fluent::KafkaOutput < Fluent::Output
100
117
  end
101
118
 
102
119
  def emit(tag, es, chain)
103
- chain.next
104
- es.each do |time,record|
105
- record['time'] = time if @output_include_time
106
- record['tag'] = tag if @output_include_tag
107
- topic = record['topic'] || self.default_topic || tag
108
- partition = record['partition'] || self.default_partition
109
- message = Poseidon::MessageToSend.new(topic, parse_record(record))
110
- @producers[topic] ||= Poseidon::Producer.new(@seed_brokers, self.client_id, :max_send_retries => @max_send_retries, :required_acks => @required_acks, :ack_timeout_ms => @ack_timeout_ms)
111
- @producers[topic].send_messages([message])
120
+ begin
121
+ chain.next
122
+ es.each do |time,record|
123
+ record['time'] = time if @output_include_time
124
+ record['tag'] = tag if @output_include_tag
125
+ topic = record['topic'] || self.default_topic || tag
126
+ partition = record['partition'] || self.default_partition
127
+ message = Poseidon::MessageToSend.new(topic, parse_record(record))
128
+ @producer.send_messages([message])
129
+ end
130
+ rescue Exception => e
131
+ log.warn("Send exception occurred: #{e}")
132
+ refresh_producer()
133
+ raise e
112
134
  end
113
135
  end
114
136
 
@@ -29,18 +29,35 @@ class Fluent::KafkaOutputBuffered < Fluent::BufferedOutput
29
29
  define_method("log") { $log }
30
30
  end
31
31
 
32
- def configure(conf)
33
- super
32
+ @seed_brokers = []
33
+
34
+ def refresh_producer()
34
35
  if @zookeeper
35
- require 'zookeeper'
36
- require 'yajl'
37
36
  @seed_brokers = []
38
37
  z = Zookeeper.new(@zookeeper)
39
38
  z.get_children(:path => '/brokers/ids')[:children].each do |id|
40
39
  broker = Yajl.load(z.get(:path => "/brokers/ids/#{id}")[:data])
41
40
  @seed_brokers.push("#{broker['host']}:#{broker['port']}")
42
41
  end
43
- log.info "brokers has been set via Zookeeper: #{@seed_brokers}"
42
+ log.info "brokers has been refreshed via Zookeeper: #{@seed_brokers}"
43
+ end
44
+ begin
45
+ if @seed_brokers.length > 0
46
+ @producer = Poseidon::Producer.new(@seed_brokers, @client_id, :max_send_retries => @max_send_retries, :required_acks => @required_acks, :ack_timeout_ms => @ack_timeout_ms)
47
+ log.info "initialized producer #{@client_id}"
48
+ else
49
+ log.warn "No brokers found on Zookeeper"
50
+ end
51
+ rescue Exception => e
52
+ log.error e
53
+ end
54
+ end
55
+
56
+ def configure(conf)
57
+ super
58
+ if @zookeeper
59
+ require 'zookeeper'
60
+ require 'yajl'
44
61
  else
45
62
  @seed_brokers = @brokers.match(",").nil? ? [@brokers] : @brokers.split(",")
46
63
  log.info "brokers has been set directly: #{@seed_brokers}"
@@ -77,8 +94,7 @@ class Fluent::KafkaOutputBuffered < Fluent::BufferedOutput
77
94
 
78
95
  def start
79
96
  super
80
- @producer = Poseidon::Producer.new(@seed_brokers, @client_id, :max_send_retries => @max_send_retries, :required_acks => @required_acks, :ack_timeout_ms => @ack_timeout_ms)
81
- log.info "initialized producer #{@client_id}"
97
+ refresh_producer()
82
98
  end
83
99
 
84
100
  def shutdown
@@ -115,31 +131,37 @@ class Fluent::KafkaOutputBuffered < Fluent::BufferedOutput
115
131
  bytes_by_topic = {}
116
132
  messages = []
117
133
  messages_bytes = 0
118
- chunk.msgpack_each { |tag, time, record|
119
- record['time'] = time if @output_include_time
120
- record['tag'] = tag if @output_include_tag
121
- topic = record['topic'] || @default_topic || tag
122
-
123
- records_by_topic[topic] ||= 0
124
- bytes_by_topic[topic] ||= 0
125
-
126
- record_buf = parse_record(record)
127
- record_buf_bytes = record_buf.bytesize
128
- if messages.length > 0 and messages_bytes + record_buf_bytes > @kafka_agg_max_bytes
134
+ begin
135
+ chunk.msgpack_each { |tag, time, record|
136
+ record['time'] = time if @output_include_time
137
+ record['tag'] = tag if @output_include_tag
138
+ topic = record['topic'] || @default_topic || tag
139
+
140
+ records_by_topic[topic] ||= 0
141
+ bytes_by_topic[topic] ||= 0
142
+
143
+ record_buf = parse_record(record)
144
+ record_buf_bytes = record_buf.bytesize
145
+ if messages.length > 0 and messages_bytes + record_buf_bytes > @kafka_agg_max_bytes
146
+ @producer.send_messages(messages)
147
+ messages = []
148
+ messages_bytes = 0
149
+ end
150
+ messages << Poseidon::MessageToSend.new(topic, record_buf)
151
+ messages_bytes += record_buf_bytes
152
+
153
+ records_by_topic[topic] += 1
154
+ bytes_by_topic[topic] += record_buf_bytes
155
+ }
156
+ if messages.length > 0
129
157
  @producer.send_messages(messages)
130
- messages = []
131
- messages_bytes = 0
132
158
  end
133
- messages << Poseidon::MessageToSend.new(topic, record_buf)
134
- messages_bytes += record_buf_bytes
135
-
136
- records_by_topic[topic] += 1
137
- bytes_by_topic[topic] += record_buf_bytes
138
- }
139
- if messages.length > 0
140
- @producer.send_messages(messages)
159
+ log.debug "(records|bytes) (#{records_by_topic}|#{bytes_by_topic})"
141
160
  end
142
- log.debug "(records|bytes) (#{records_by_topic}|#{bytes_by_topic})"
161
+ rescue Exception => e
162
+ log.warn "Send exception occurred: #{e}"
163
+ refresh_producer()
164
+ # Raise exception to retry sendind messages
165
+ raise e
143
166
  end
144
-
145
167
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.9
4
+ version: 0.0.10
5
5
  platform: ruby
6
6
  authors:
7
7
  - Hidemasa Togashi
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-01-18 00:00:00.000000000 Z
11
+ date: 2015-03-21 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: fluentd