fluent-plugin-kafka 0.3.1 → 0.3.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: f58dd81646c8ff29f5f9e55bb200fda728ef83f1
4
- data.tar.gz: 2b47f106dfebc12704d5b751290bfff7a42a44ad
3
+ metadata.gz: 512f9e6a1ab34f3206a79904ff3134da1ee6de54
4
+ data.tar.gz: 27bcf99b9a4eac5ed2cf15694a249db4f3db6ffb
5
5
  SHA512:
6
- metadata.gz: 6ecc97a6901090f2f9cd54df11e0af55bc9df901ae6c5186867bd14f26796e762b8d1b3e3140d050827408094adb3d0f99a849bd4171d466e4bbe05b806b2cb8
7
- data.tar.gz: 41800c1ddd32cd17cccb0af7cdaff2da11e3f7354f050c4b554ce35aeafb1ddff486f41dcee41806dc9ee002cb0c068025183d9e96996e6c45dba4337c10740b
6
+ metadata.gz: 0d926b09af1ed8611d85a40eb353aeda21b39f862c85a4131ada9e9fcfa9d057157fb107a56808af905c89c669c6a452b03eef508e665ae3957f7599c25c19af
7
+ data.tar.gz: 60d029566a17521e19dd93e0e6d2cefd35990a8b14d89202c42ba6cf6512608f78dced06e23d0e1386e95d206173ad78a77c65dba081f1c3e5adc150f74c07d4
data/README.md CHANGED
@@ -138,6 +138,11 @@ Install snappy module before you use snappy compression.
138
138
 
139
139
  $ gem install snappy
140
140
 
141
+ snappy gem uses native extension, so you need to install several packages before.
142
+ On Ubuntu, need development packages and snappy library.
143
+
144
+ $ sudo apt-get install build-essential autoconf automake libtool libsnappy-dev
145
+
141
146
  #### Load balancing
142
147
 
143
148
  Messages will be assigned a partition at random as default by ruby-kafka, but messages with the same partition key will always be assigned to the same partition by setting `default_partition_key` in config file.
@@ -12,7 +12,7 @@ Gem::Specification.new do |gem|
12
12
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
13
13
  gem.name = "fluent-plugin-kafka"
14
14
  gem.require_paths = ["lib"]
15
- gem.version = '0.3.1'
15
+ gem.version = '0.3.2'
16
16
  gem.required_ruby_version = ">= 2.1.0"
17
17
 
18
18
  gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
@@ -68,6 +68,15 @@ class Fluent::KafkaGroupInput < Fluent::Input
68
68
  end
69
69
 
70
70
  @parser_proc = setup_parser
71
+
72
+ @consumer_opts = {:group_id => @consumer_group}
73
+ @consumer_opts[:session_timeout] = @session_timeout if @session_timeout
74
+ @consumer_opts[:offset_commit_interval] = @offset_commit_interval if @offset_commit_interval
75
+ @consumer_opts[:offset_commit_threshold] = @offset_commit_threshold if @offset_commit_threshold
76
+
77
+ @fetch_opts = {}
78
+ @fetch_opts[:max_wait_time] = @max_wait_time if @max_wait_time
79
+ @fetch_opts[:min_bytes] = @min_bytes if @min_bytes
71
80
  end
72
81
 
73
82
  def setup_parser
@@ -89,55 +98,64 @@ class Fluent::KafkaGroupInput < Fluent::Input
89
98
  def start
90
99
  super
91
100
 
92
- consumer_opts = {:group_id => @consumer_group}
93
- consumer_opts[:session_timeout] = @session_timeout if @session_timeout
94
- consumer_opts[:offset_commit_interval] = @offset_commit_interval if @offset_commit_interval
95
- consumer_opts[:offset_commit_threshold] = @offset_commit_threshold if @offset_commit_threshold
96
-
97
- @fetch_opts = {}
98
- @fetch_opts[:max_wait_time] = @max_wait_time if @max_wait_time
99
- @fetch_opts[:min_bytes] = @min_bytes if @min_bytes
100
-
101
101
  @kafka = Kafka.new(seed_brokers: @brokers,
102
102
  ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
103
103
  ssl_client_cert: read_ssl_file(@ssl_client_cert),
104
104
  ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key))
105
- @consumer = @kafka.consumer(consumer_opts)
106
- @topics.each { |topic|
107
- @consumer.subscribe(topic, start_from_beginning: @start_from_beginning)
108
- }
105
+ @consumer = setup_consumer
109
106
  @thread = Thread.new(&method(:run))
110
107
  end
111
108
 
112
109
  def shutdown
113
- @consumer.stop
110
+ # This nil assignment should be guarded by mutex in multithread programming manner.
111
+ # But the situation is very low contention, so we don't use mutex for now.
112
+ # If the problem happens, we will add a guard for consumer.
113
+ consumer = @consumer
114
+ @consumer = nil
115
+ consumer.stop
116
+
114
117
  @thread.join
115
118
  @kafka.close
116
119
  super
117
120
  end
118
121
 
122
+ def setup_consumer
123
+ consumer = @kafka.consumer(@consumer_opts)
124
+ @topics.each { |topic|
125
+ consumer.subscribe(topic, start_from_beginning: @start_from_beginning)
126
+ }
127
+ consumer
128
+ end
129
+
119
130
  def run
120
- @consumer.each_batch(@fetch_opts) { |batch|
121
- es = Fluent::MultiEventStream.new
122
- tag = batch.topic
123
- tag = @add_prefix + "." + tag if @add_prefix
124
- tag = tag + "." + @add_suffix if @add_suffix
125
-
126
- batch.messages.each { |msg|
127
- begin
128
- es.add(Fluent::Engine.now, @parser_proc.call(msg))
129
- rescue => e
130
- $log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset
131
- $log.debug_backtrace
132
- end
133
- }
134
-
135
- unless es.empty?
136
- router.emit_stream(tag, es)
131
+ while @consumer
132
+ begin
133
+ @consumer.each_batch(@fetch_opts) { |batch|
134
+ es = Fluent::MultiEventStream.new
135
+ tag = batch.topic
136
+ tag = @add_prefix + "." + tag if @add_prefix
137
+ tag = tag + "." + @add_suffix if @add_suffix
138
+
139
+ batch.messages.each { |msg|
140
+ begin
141
+ es.add(Fluent::Engine.now, @parser_proc.call(msg))
142
+ rescue => e
143
+ log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset
144
+ log.debug_backtrace
145
+ end
146
+ }
147
+
148
+ unless es.empty?
149
+ router.emit_stream(tag, es)
150
+ end
151
+ }
152
+ rescue => e
153
+ log.error "unexpected error during consuming events from kafka. Re-fetch events.", :error => e.to_s
154
+ log.error_backtrace
137
155
  end
138
- }
156
+ end
139
157
  rescue => e
140
- $log.error "unexpected error", :error => e.to_s
141
- $log.error_backtrace
158
+ log.error "unexpected error during consumer object access", :error => e.to_s
159
+ log.error_backtrace
142
160
  end
143
161
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.1
4
+ version: 0.3.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Hidemasa Togashi
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2016-08-27 00:00:00.000000000 Z
12
+ date: 2016-10-06 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd