fluent-plugin-webhdfs 0.4.1 → 0.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 5e43e061acd7552d5a0e482bf7ced1e319d1d107
4
- data.tar.gz: 934ac37c1a6d985261ae573ce0724546481269ff
3
+ metadata.gz: 61f1146fdfa68de12937082f2321f097e4aa5858
4
+ data.tar.gz: 502673646e6db2db20973079a4c2056515b7de11
5
5
  SHA512:
6
- metadata.gz: be9c06348692d4f0506f0e7d8caa1fc4d6bfa4d683c1fdb0daa3fd87d4657ddbebaff4380c3fc03405030836cabc2fb856e02754aa8bccff1ddd5f7c7b49980f
7
- data.tar.gz: fee1d87a57a64ea65ca14a2f3f7765eb115bd362d504f0ccade232132a384b4d8f599ccd264c8cdcb2f326c4a8062b7c5e456e4cf11c8cc7fab5d78dadf09e3a
6
+ metadata.gz: b634dd49ac2d1e3d552e7dbda396cc5aca47c0cd090219c5464a3a834ec9ffd5c0030d013474e4b4ac83870252f45af567788d97e0671113278d41bd168b969a
7
+ data.tar.gz: 90dfe3fc5e45a3667df905874607875cba4cd18e49e22cc05318dd4141ec1e1cec3d36d933ac15634d780ef46ab75565d63fea6ffcc5c3678b901ced071042c4
@@ -1,12 +1,15 @@
1
1
  language: ruby
2
2
 
3
3
  rvm:
4
- - 1.9.3
5
4
  - 2.0.0
6
- - 2.1.1
5
+ - 2.1
6
+ - 2.2
7
7
 
8
8
  branches:
9
9
  only:
10
10
  - master
11
11
 
12
+ before_install:
13
+ - gem update bundler
14
+
12
15
  script: bundle exec rake test
@@ -2,13 +2,13 @@
2
2
 
3
3
  Gem::Specification.new do |gem|
4
4
  gem.name = "fluent-plugin-webhdfs"
5
- gem.version = "0.4.1"
5
+ gem.version = "0.4.2"
6
6
  gem.authors = ["TAGOMORI Satoshi"]
7
7
  gem.email = ["tagomoris@gmail.com"]
8
8
  gem.summary = %q{Fluentd plugin to write data on HDFS over WebHDFS, with flexible formatting}
9
9
  gem.description = %q{For WebHDFS and HttpFs of Hadoop HDFS}
10
10
  gem.homepage = "https://github.com/fluent/fluent-plugin-webhdfs"
11
- gem.license = "APLv2"
11
+ gem.license = "Apache-2.0"
12
12
 
13
13
  gem.files = `git ls-files`.split($\)
14
14
  gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
@@ -16,8 +16,10 @@ Gem::Specification.new do |gem|
16
16
  gem.require_paths = ["lib"]
17
17
 
18
18
  gem.add_development_dependency "rake"
19
+ gem.add_development_dependency "test-unit"
19
20
  gem.add_runtime_dependency "fluentd", '>= 0.10.53'
20
21
  gem.add_runtime_dependency "fluent-mixin-plaintextformatter", '>= 0.2.1'
21
22
  gem.add_runtime_dependency "fluent-mixin-config-placeholders", ">= 0.3.0"
22
23
  gem.add_runtime_dependency "webhdfs", '>= 0.6.0'
24
+ gem.add_runtime_dependency "bzip2-ffi"
23
25
  end
@@ -1,5 +1,7 @@
1
1
  # -*- coding: utf-8 -*-
2
2
 
3
+ require 'tempfile'
4
+
3
5
  require 'fluent/mixin/config_placeholders'
4
6
  require 'fluent/mixin/plaintextformatter'
5
7
 
@@ -9,40 +11,66 @@ class Fluent::WebHDFSOutput < Fluent::TimeSlicedOutput
9
11
  config_set_default :buffer_type, 'memory'
10
12
  config_set_default :time_slice_format, '%Y%m%d'
11
13
 
14
+ # For fluentd v0.12.16 or earlier
15
+ class << self
16
+ unless method_defined?(:desc)
17
+ def desc(description)
18
+ end
19
+ end
20
+ end
21
+
22
+ desc 'WebHDFS/HttpFs host'
12
23
  config_param :host, :string, :default => nil
24
+ desc 'WebHDFS/HttpFs port'
13
25
  config_param :port, :integer, :default => 50070
26
+ desc 'Namenode (host:port)'
14
27
  config_param :namenode, :string, :default => nil # host:port
28
+ desc 'Standby namenode for Namenode HA (host:port)'
15
29
  config_param :standby_namenode, :string, :default => nil # host:port
16
30
 
31
+ desc 'Ignore errors on start up'
17
32
  config_param :ignore_start_check_error, :bool, :default => false
18
33
 
19
34
  include Fluent::Mixin::ConfigPlaceholders
20
35
 
36
+ desc 'Output file path on HDFS'
21
37
  config_param :path, :string
38
+ desc 'User name for pseudo authentication'
22
39
  config_param :username, :string, :default => nil
23
40
 
41
+ desc 'Store data over HttpFs instead of WebHDFS'
24
42
  config_param :httpfs, :bool, :default => false
25
43
 
44
+ desc 'Number of seconds to wait for the connection to open'
26
45
  config_param :open_timeout, :integer, :default => 30 # from ruby net/http default
46
+ desc 'Number of seconds to wait for one block to be read'
27
47
  config_param :read_timeout, :integer, :default => 60 # from ruby net/http default
28
48
 
49
+ desc 'Retry automatically when known errors of HDFS are occurred'
29
50
  config_param :retry_known_errors, :bool, :default => false
51
+ desc 'Retry interval'
30
52
  config_param :retry_interval, :integer, :default => nil
53
+ desc 'The number of retries'
31
54
  config_param :retry_times, :integer, :default => nil
32
55
 
33
56
  # how many times of write failure before switch to standby namenode
34
57
  # by default it's 11 times that costs 1023 seconds inside fluentd,
35
58
  # which is considered enough to exclude the scenes that caused by temporary network fail or single datanode fail
59
+ desc 'How many times of write failure before switch to standby namenode'
36
60
  config_param :failures_before_use_standby, :integer, :default => 11
37
61
 
38
62
  include Fluent::Mixin::PlainTextFormatter
39
63
 
40
64
  config_param :default_tag, :string, :default => 'tag_missing'
41
65
 
66
+ desc 'Append data or not'
42
67
  config_param :append, :bool, :default => true
43
68
 
69
+ desc 'Use SSL or not'
44
70
  config_param :ssl, :bool, :default => false
71
+ desc 'OpenSSL certificate authority file'
45
72
  config_param :ssl_ca_file, :string, :default => nil
73
+ desc 'OpenSSL verify mode (none,peer)'
46
74
  config_param :ssl_verify_mode, :default => nil do |val|
47
75
  case val
48
76
  when 'none'
@@ -54,9 +82,11 @@ class Fluent::WebHDFSOutput < Fluent::TimeSlicedOutput
54
82
  end
55
83
  end
56
84
 
85
+ desc 'Use kerberos authentication or not'
57
86
  config_param :kerberos, :bool, :default => false
58
87
 
59
- SUPPORTED_COMPRESS = ['gzip']
88
+ SUPPORTED_COMPRESS = ['gzip', 'bzip2']
89
+ desc "Compress method (#{SUPPORTED_COMPRESS.join(',')})"
60
90
  config_param :compress, :default => nil do |val|
61
91
  unless SUPPORTED_COMPRESS.include? val
62
92
  raise Fluent::ConfigError, "unsupported compress: #{val}"
@@ -66,11 +96,15 @@ class Fluent::WebHDFSOutput < Fluent::TimeSlicedOutput
66
96
 
67
97
  CHUNK_ID_PLACE_HOLDER = '${chunk_id}'
68
98
 
99
+ attr_reader :compressor
100
+
69
101
  def initialize
70
102
  super
71
103
  require 'net/http'
72
104
  require 'time'
73
105
  require 'webhdfs'
106
+
107
+ @compressor = nil
74
108
  end
75
109
 
76
110
  # Define `log` method for v0.10.42 or earlier
@@ -91,6 +125,13 @@ class Fluent::WebHDFSOutput < Fluent::TimeSlicedOutput
91
125
 
92
126
  super
93
127
 
128
+ begin
129
+ @compressor = COMPRESSOR_REGISTRY.lookup(@compress || 'text').new
130
+ rescue
131
+ $log.warn "#{@comress} not found. Use 'text' instead"
132
+ @compressor = COMPRESSOR_REGISTRY.lookup('text').new
133
+ end
134
+
94
135
  if @host
95
136
  @namenode_host = @host
96
137
  @namenode_port = @port
@@ -230,31 +271,18 @@ class Fluent::WebHDFSOutput < Fluent::TimeSlicedOutput
230
271
  else
231
272
  path_format(chunk.key).gsub(CHUNK_ID_PLACE_HOLDER, chunk_unique_id_to_str(chunk.unique_id))
232
273
  end
233
- if @compress
234
- case @compress
235
- when 'gzip'
236
- hdfs_path = "#{hdfs_path}.gz"
237
- end
238
- end
274
+ hdfs_path = "#{hdfs_path}#{@compressor.ext}"
239
275
  hdfs_path
240
276
  end
241
277
 
242
278
  def compress_context(chunk, &block)
243
- case @compress
244
- when 'gzip'
245
- require 'zlib'
246
- require 'tempfile'
279
+ begin
247
280
  tmp = Tempfile.new("webhdfs-")
248
- begin
249
- w = Zlib::GzipWriter.new(tmp)
250
- chunk.write_to(w)
251
- w.close
252
- tmp.close
253
- tmp.open
254
- yield tmp
255
- ensure
256
- tmp.close(true) rescue nil
257
- end
281
+ @compressor.compress(chunk, tmp)
282
+ tmp.rewind
283
+ yield tmp
284
+ ensure
285
+ tmp.close(true) rescue nil
258
286
  end
259
287
  end
260
288
 
@@ -263,12 +291,8 @@ class Fluent::WebHDFSOutput < Fluent::TimeSlicedOutput
263
291
 
264
292
  failovered = false
265
293
  begin
266
- if @compress
267
- compress_context(chunk) do |data|
268
- send_data(hdfs_path, data)
269
- end
270
- else
271
- send_data(hdfs_path, chunk.read)
294
+ compress_context(chunk) do |data|
295
+ send_data(hdfs_path, data)
272
296
  end
273
297
  rescue => e
274
298
  log.warn "failed to communicate hdfs cluster, path: #{hdfs_path}"
@@ -291,4 +315,45 @@ class Fluent::WebHDFSOutput < Fluent::TimeSlicedOutput
291
315
  end
292
316
  hdfs_path
293
317
  end
318
+
319
+ class Compressor
320
+ include Fluent::Configurable
321
+
322
+ def initialise(options = {})
323
+ super()
324
+ end
325
+
326
+ def configure(conf)
327
+ super
328
+ end
329
+
330
+ def ext
331
+ end
332
+
333
+ def compress(chunk)
334
+ end
335
+
336
+ private
337
+
338
+ def check_command(command, algo = nil)
339
+ require 'open3'
340
+
341
+ algo = command if algo.nil?
342
+ begin
343
+ Open3.capture3("#{command} -V")
344
+ rescue Errno::ENOENT
345
+ raise ConfigError, "'#{command}' utility must be in PATH for #{algo} compression"
346
+ end
347
+ end
348
+ end
349
+
350
+ COMPRESSOR_REGISTRY = Fluent::Registry.new(:webhdfs_compressor_type, 'fluent/plugin/webhdfs_compressor_')
351
+
352
+ def self.register_compressor(name, compressor)
353
+ COMPRESSOR_REGISTRY.register(name, compressor)
354
+ end
294
355
  end
356
+
357
+ require 'fluent/plugin/webhdfs_compressor_text'
358
+ require 'fluent/plugin/webhdfs_compressor_gzip'
359
+ require 'fluent/plugin/webhdfs_compressor_bzip2'
@@ -0,0 +1,21 @@
1
+ module Fluent
2
+ class WebHDFSOutput < Fluent::TimeSlicedOutput
3
+ class Bzip2Compressor < Compressor
4
+ WebHDFSOutput.register_compressor('bzip2', self)
5
+
6
+ def initialize(options = {})
7
+ require "bzip2/ffi"
8
+ end
9
+
10
+ def ext
11
+ ".bz2"
12
+ end
13
+
14
+ def compress(chunk, tmp)
15
+ Bzip2::FFI::Writer.open(tmp) do |writer|
16
+ chunk.write_to(writer)
17
+ end
18
+ end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,21 @@
1
+ module Fluent
2
+ class WebHDFSOutput < Fluent::TimeSlicedOutput
3
+ class GzipCompressor < Compressor
4
+ WebHDFSOutput.register_compressor('gzip', self)
5
+
6
+ def initialize(options = {})
7
+ require "zlib"
8
+ end
9
+
10
+ def ext
11
+ ".gz"
12
+ end
13
+
14
+ def compress(chunk, tmp)
15
+ w = Zlib::GzipWriter.new(tmp)
16
+ chunk.write_to(w)
17
+ w.close
18
+ end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,15 @@
1
+ module Fluent
2
+ class WebHDFSOutput < Fluent::TimeSlicedOutput
3
+ class TextCompressor < Compressor
4
+ WebHDFSOutput.register_compressor('text', self)
5
+
6
+ def ext
7
+ ""
8
+ end
9
+
10
+ def compress(chunk, tmp)
11
+ chunk.write_to(tmp)
12
+ end
13
+ end
14
+ end
15
+ end
@@ -5,43 +5,51 @@ class WebHDFSOutputTest < Test::Unit::TestCase
5
5
  host namenode.local
6
6
  path /hdfs/path/file.%Y%m%d.log
7
7
  ]
8
+ def setup
9
+ Fluent::Test.setup
10
+ end
8
11
 
9
12
  def create_driver(conf=CONFIG,tag='test')
10
13
  Fluent::Test::OutputTestDriver.new(Fluent::WebHDFSOutput, tag).configure(conf)
11
14
  end
12
15
 
13
- def test_configure
14
- d = create_driver
15
- assert_equal 'namenode.local', d.instance.instance_eval{ @namenode_host }
16
- assert_equal 50070, d.instance.instance_eval{ @namenode_port }
17
- assert_equal '/hdfs/path/file.%Y%m%d.log', d.instance.path
18
- assert_equal '%Y%m%d', d.instance.time_slice_format
19
- assert_equal false, d.instance.httpfs
20
- assert_nil d.instance.username
21
- assert_equal false, d.instance.ignore_start_check_error
22
-
23
- assert_equal true, d.instance.output_include_time
24
- assert_equal true, d.instance.output_include_tag
25
- assert_equal 'json', d.instance.output_data_type
26
- assert_nil d.instance.remove_prefix
27
- assert_equal 'TAB', d.instance.field_separator
28
- assert_equal true, d.instance.add_newline
29
- assert_equal 'tag_missing', d.instance.default_tag
16
+ class ConfigureTest < self
17
+ def test_default
18
+ d = create_driver
19
+ assert_equal 'namenode.local', d.instance.instance_eval{ @namenode_host }
20
+ assert_equal 50070, d.instance.instance_eval{ @namenode_port }
21
+ assert_equal '/hdfs/path/file.%Y%m%d.log', d.instance.path
22
+ assert_equal '%Y%m%d', d.instance.time_slice_format
23
+ assert_equal false, d.instance.httpfs
24
+ assert_nil d.instance.username
25
+ assert_equal false, d.instance.ignore_start_check_error
26
+
27
+ assert_equal true, d.instance.output_include_time
28
+ assert_equal true, d.instance.output_include_tag
29
+ assert_equal 'json', d.instance.output_data_type
30
+ assert_nil d.instance.remove_prefix
31
+ assert_equal 'TAB', d.instance.field_separator
32
+ assert_equal true, d.instance.add_newline
33
+ assert_equal 'tag_missing', d.instance.default_tag
34
+ end
30
35
 
31
- d = create_driver %[
36
+ def test_httpfs
37
+ d = create_driver %[
32
38
  namenode server.local:14000
33
39
  path /hdfs/path/file.%Y%m%d.%H%M.log
34
40
  httpfs yes
35
41
  username hdfs_user
36
42
  ]
37
- assert_equal 'server.local', d.instance.instance_eval{ @namenode_host }
38
- assert_equal 14000, d.instance.instance_eval{ @namenode_port }
39
- assert_equal '/hdfs/path/file.%Y%m%d.%H%M.log', d.instance.path
40
- assert_equal '%Y%m%d%H%M', d.instance.time_slice_format
41
- assert_equal true, d.instance.httpfs
42
- assert_equal 'hdfs_user', d.instance.username
43
+ assert_equal 'server.local', d.instance.instance_eval{ @namenode_host }
44
+ assert_equal 14000, d.instance.instance_eval{ @namenode_port }
45
+ assert_equal '/hdfs/path/file.%Y%m%d.%H%M.log', d.instance.path
46
+ assert_equal '%Y%m%d%H%M', d.instance.time_slice_format
47
+ assert_equal true, d.instance.httpfs
48
+ assert_equal 'hdfs_user', d.instance.username
49
+ end
43
50
 
44
- d = create_driver %[
51
+ def test_ssl
52
+ d = create_driver %[
45
53
  namenode server.local:14000
46
54
  path /hdfs/path/file.%Y%m%d.%H%M.log
47
55
  ssl true
@@ -49,75 +57,92 @@ ssl_ca_file /path/to/ca_file.pem
49
57
  ssl_verify_mode peer
50
58
  kerberos true
51
59
  ]
52
- assert_equal 'server.local', d.instance.instance_eval{ @namenode_host }
53
- assert_equal 14000, d.instance.instance_eval{ @namenode_port }
54
- assert_equal '/hdfs/path/file.%Y%m%d.%H%M.log', d.instance.path
55
- assert_equal '%Y%m%d%H%M', d.instance.time_slice_format
56
- assert_equal true, d.instance.ssl
57
- assert_equal '/path/to/ca_file.pem', d.instance.ssl_ca_file
58
- assert_equal :peer, d.instance.ssl_verify_mode
59
- assert_equal true, d.instance.kerberos
60
+ assert_equal 'server.local', d.instance.instance_eval{ @namenode_host }
61
+ assert_equal 14000, d.instance.instance_eval{ @namenode_port }
62
+ assert_equal '/hdfs/path/file.%Y%m%d.%H%M.log', d.instance.path
63
+ assert_equal '%Y%m%d%H%M', d.instance.time_slice_format
64
+ assert_equal true, d.instance.ssl
65
+ assert_equal '/path/to/ca_file.pem', d.instance.ssl_ca_file
66
+ assert_equal :peer, d.instance.ssl_verify_mode
67
+ assert_equal true, d.instance.kerberos
68
+ end
60
69
 
61
- d = create_driver %[
70
+ data(gzip: ['gzip', Fluent::WebHDFSOutput::GzipCompressor],
71
+ bzip2: ['bzip2', Fluent::WebHDFSOutput::Bzip2Compressor])
72
+ def test_compress(data)
73
+ compress_type, compressor_class = data
74
+ d = create_driver %[
62
75
  namenode server.local:14000
63
76
  path /hdfs/path/file.%Y%m%d.%H%M.log
64
- compress gzip
77
+ compress #{compress_type}
65
78
  ]
66
- assert_equal 'server.local', d.instance.instance_eval{ @namenode_host }
67
- assert_equal 14000, d.instance.instance_eval{ @namenode_port }
68
- assert_equal '/hdfs/path/file.%Y%m%d.%H%M.log', d.instance.path
69
- assert_equal '%Y%m%d%H%M', d.instance.time_slice_format
70
- assert_equal 'gzip', d.instance.compress
71
- end
79
+ assert_equal 'server.local', d.instance.instance_eval{ @namenode_host }
80
+ assert_equal 14000, d.instance.instance_eval{ @namenode_port }
81
+ assert_equal '/hdfs/path/file.%Y%m%d.%H%M.log', d.instance.path
82
+ assert_equal '%Y%m%d%H%M', d.instance.time_slice_format
83
+ assert_equal compress_type, d.instance.compress
84
+ assert_equal compressor_class, d.instance.compressor.class
85
+ end
72
86
 
73
- def test_configure_placeholders
74
- d = create_driver %[
87
+ def test_placeholders
88
+ d = create_driver %[
75
89
  hostname testing.node.local
76
90
  namenode server.local:50070
77
91
  path /hdfs/${hostname}/file.%Y%m%d%H.log
78
92
  ]
79
- assert_equal '/hdfs/testing.node.local/file.%Y%m%d%H.log', d.instance.path
80
- end
93
+ assert_equal '/hdfs/testing.node.local/file.%Y%m%d%H.log', d.instance.path
94
+ end
81
95
 
82
- def test_path_format
83
- d = create_driver
84
- assert_equal '/hdfs/path/file.%Y%m%d.log', d.instance.path
85
- assert_equal '%Y%m%d', d.instance.time_slice_format
86
- assert_equal '/hdfs/path/file.20120718.log', d.instance.path_format('20120718')
96
+ class PathFormatTest < self
97
+ def test_default
98
+ d = create_driver
99
+ assert_equal '/hdfs/path/file.%Y%m%d.log', d.instance.path
100
+ assert_equal '%Y%m%d', d.instance.time_slice_format
101
+ assert_equal '/hdfs/path/file.20120718.log', d.instance.path_format('20120718')
102
+ end
87
103
 
88
- d = create_driver %[
104
+ def test_time_slice_format
105
+ d = create_driver %[
89
106
  namenode server.local:14000
90
107
  path /hdfs/path/file.%Y%m%d.%H%M.log
91
108
  ]
92
- assert_equal '/hdfs/path/file.%Y%m%d.%H%M.log', d.instance.path
93
- assert_equal '%Y%m%d%H%M', d.instance.time_slice_format
94
- assert_equal '/hdfs/path/file.20120718.1503.log', d.instance.path_format('201207181503')
109
+ assert_equal '/hdfs/path/file.%Y%m%d.%H%M.log', d.instance.path
110
+ assert_equal '%Y%m%d%H%M', d.instance.time_slice_format
111
+ assert_equal '/hdfs/path/file.20120718.1503.log', d.instance.path_format('201207181503')
112
+ end
113
+ end
95
114
 
96
- assert_raise Fluent::ConfigError do
97
- d = create_driver %[
115
+ class InvalidTest < self
116
+ def test_path
117
+ assert_raise Fluent::ConfigError do
118
+ d = create_driver %[
98
119
  namenode server.local:14000
99
120
  path /hdfs/path/file.%Y%m%d.%H%M.log
100
121
  append false
101
122
  ]
102
- end
103
- end
123
+ end
124
+ end
104
125
 
105
- def test_invalid_configure
106
- assert_raise Fluent::ConfigError do
107
- create_driver %[
108
- namenode server.local:14000
109
- path /hdfs/path/file.%Y%m%d.%H%M.log
110
- ssl true
111
- ssl_verify_mode invalid
112
- ]
113
- end
114
- assert_raise Fluent::ConfigError do
115
- create_driver %[
116
- namenode server.local:14000
117
- path /hdfs/path/file.%Y%m%d.%H%M.log
118
- compress invalid
119
- ]
126
+ def test_ssl
127
+ assert_raise Fluent::ConfigError do
128
+ create_driver %[
129
+ namenode server.local:14000
130
+ path /hdfs/path/file.%Y%m%d.%H%M.log
131
+ ssl true
132
+ ssl_verify_mode invalid
133
+ ]
134
+ end
135
+ end
136
+
137
+ def test_invalid_compress
138
+ assert_raise Fluent::ConfigError do
139
+ create_driver %[
140
+ namenode server.local:14000
141
+ path /hdfs/path/file.%Y%m%d.%H%M.log
142
+ compress invalid
143
+ ]
144
+ end
145
+ end
120
146
  end
121
147
  end
122
-
123
148
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-webhdfs
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.4.1
4
+ version: 0.4.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - TAGOMORI Satoshi
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-11-10 00:00:00.000000000 Z
11
+ date: 2016-01-07 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rake
@@ -24,6 +24,20 @@ dependencies:
24
24
  - - ">="
25
25
  - !ruby/object:Gem::Version
26
26
  version: '0'
27
+ - !ruby/object:Gem::Dependency
28
+ name: test-unit
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ">="
32
+ - !ruby/object:Gem::Version
33
+ version: '0'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ">="
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
27
41
  - !ruby/object:Gem::Dependency
28
42
  name: fluentd
29
43
  requirement: !ruby/object:Gem::Requirement
@@ -80,6 +94,20 @@ dependencies:
80
94
  - - ">="
81
95
  - !ruby/object:Gem::Version
82
96
  version: 0.6.0
97
+ - !ruby/object:Gem::Dependency
98
+ name: bzip2-ffi
99
+ requirement: !ruby/object:Gem::Requirement
100
+ requirements:
101
+ - - ">="
102
+ - !ruby/object:Gem::Version
103
+ version: '0'
104
+ type: :runtime
105
+ prerelease: false
106
+ version_requirements: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - ">="
109
+ - !ruby/object:Gem::Version
110
+ version: '0'
83
111
  description: For WebHDFS and HttpFs of Hadoop HDFS
84
112
  email:
85
113
  - tagomoris@gmail.com
@@ -95,11 +123,14 @@ files:
95
123
  - Rakefile
96
124
  - fluent-plugin-webhdfs.gemspec
97
125
  - lib/fluent/plugin/out_webhdfs.rb
126
+ - lib/fluent/plugin/webhdfs_compressor_bzip2.rb
127
+ - lib/fluent/plugin/webhdfs_compressor_gzip.rb
128
+ - lib/fluent/plugin/webhdfs_compressor_text.rb
98
129
  - test/helper.rb
99
130
  - test/plugin/test_out_webhdfs.rb
100
131
  homepage: https://github.com/fluent/fluent-plugin-webhdfs
101
132
  licenses:
102
- - APLv2
133
+ - Apache-2.0
103
134
  metadata: {}
104
135
  post_install_message:
105
136
  rdoc_options: []
@@ -117,10 +148,11 @@ required_rubygems_version: !ruby/object:Gem::Requirement
117
148
  version: '0'
118
149
  requirements: []
119
150
  rubyforge_project:
120
- rubygems_version: 2.2.2
151
+ rubygems_version: 2.4.5
121
152
  signing_key:
122
153
  specification_version: 4
123
154
  summary: Fluentd plugin to write data on HDFS over WebHDFS, with flexible formatting
124
155
  test_files:
125
156
  - test/helper.rb
126
157
  - test/plugin/test_out_webhdfs.rb
158
+ has_rdoc: