fluent-plugin-webhdfs 1.1.0 → 1.1.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +7 -0
- data/fluent-plugin-webhdfs.gemspec +1 -1
- data/lib/fluent/plugin/out_webhdfs.rb +1 -1
- data/lib/fluent/plugin/webhdfs_compressor_gzip.rb +1 -1
- data/test/plugin/test_gzip_compressor.rb +57 -0
- metadata +5 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8da92c765c784f8e14759a04362206c34e30dc10
|
4
|
+
data.tar.gz: ae75ac7168ff93fd11000036d39fcdb0dd088186
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: cd75219dc52eb63ded9f03673b4843ae55cc7bd4da179aa15d29639077287ee128fb1be24868f253218b43dd073412a2a7517272db3ad253a7f896fad470d754
|
7
|
+
data.tar.gz: e72169a9a84418fc776e43311eb842ed1bb6b58b83dcc9fab5b8a51cf75dcacd04fca4f9654706204bd8f62f96420e9fa20116febddd6e6168ced4fc5dce3a6f
|
data/README.md
CHANGED
@@ -10,6 +10,13 @@
|
|
10
10
|
|
11
11
|
Paths on HDFS can be generated from event timestamp, tag or any other fields in records.
|
12
12
|
|
13
|
+
## Requirements
|
14
|
+
|
15
|
+
| fluent-plugin-webhdfs | fluentd | ruby |
|
16
|
+
|-----------------------|------------|--------|
|
17
|
+
| >= 1.0.0 | >= v0.14.4 | >= 2.1 |
|
18
|
+
| < 1.0.0 | < v0.14.0 | >= 1.9 |
|
19
|
+
|
13
20
|
### Older versions
|
14
21
|
|
15
22
|
The versions of `0.x.x` of this plugin are for older version of Fluentd (v0.12.x). Old style configuration parameters (using `output_data_type`, `output_include_*` or others) are still supported, but are deprecated.
|
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
Gem::Specification.new do |gem|
|
4
4
|
gem.name = "fluent-plugin-webhdfs"
|
5
|
-
gem.version = "1.1.
|
5
|
+
gem.version = "1.1.1"
|
6
6
|
gem.authors = ["TAGOMORI Satoshi"]
|
7
7
|
gem.email = ["tagomoris@gmail.com"]
|
8
8
|
gem.summary = %q{Fluentd plugin to write data on HDFS over WebHDFS, with flexible formatting}
|
@@ -312,7 +312,7 @@ class Fluent::Plugin::WebHDFSOutput < Fluent::Plugin::Output
|
|
312
312
|
hdfs_path = "#{hdfs_path}#{@compressor.ext}"
|
313
313
|
if @replace_random_uuid
|
314
314
|
uuid_random = SecureRandom.uuid
|
315
|
-
hdfs_path.gsub
|
315
|
+
hdfs_path = hdfs_path.gsub('%{uuid}', uuid_random).gsub('%{uuid_flush}', uuid_random)
|
316
316
|
end
|
317
317
|
hdfs_path
|
318
318
|
end
|
@@ -0,0 +1,57 @@
|
|
1
|
+
require "helper"
|
2
|
+
require "fluent/plugin/buf_memory"
|
3
|
+
begin
|
4
|
+
require "zlib"
|
5
|
+
rescue LoadError
|
6
|
+
end
|
7
|
+
|
8
|
+
class GzipCompressorTest < Test::Unit::TestCase
|
9
|
+
class Gzip < self
|
10
|
+
|
11
|
+
CONFIG = %[
|
12
|
+
host namenode.local
|
13
|
+
path /hdfs/path/file.%Y%m%d.log
|
14
|
+
]
|
15
|
+
|
16
|
+
def setup
|
17
|
+
omit unless Object.const_defined?(:Zlib)
|
18
|
+
Fluent::Test.setup
|
19
|
+
@compressor = Fluent::Plugin::WebHDFSOutput::GzipCompressor.new
|
20
|
+
end
|
21
|
+
|
22
|
+
def create_driver(conf = CONFIG)
|
23
|
+
Fluent::Test::Driver::Output.new(Fluent::Plugin::WebHDFSOutput).configure(conf)
|
24
|
+
end
|
25
|
+
|
26
|
+
def test_ext
|
27
|
+
assert_equal(".gz", @compressor.ext)
|
28
|
+
end
|
29
|
+
|
30
|
+
def test_compress
|
31
|
+
d = create_driver
|
32
|
+
if d.instance.respond_to?(:buffer)
|
33
|
+
buffer = d.instance.buffer
|
34
|
+
else
|
35
|
+
buffer = d.instance.instance_variable_get(:@buffer)
|
36
|
+
end
|
37
|
+
|
38
|
+
if buffer.respond_to?(:generate_chunk)
|
39
|
+
chunk = buffer.generate_chunk("test")
|
40
|
+
chunk.concat("hello gzip\n" * 32 * 1024, 1)
|
41
|
+
else
|
42
|
+
chunk = buffer.new_chunk("test")
|
43
|
+
chunk << "hello gzip\n" * 32 * 1024
|
44
|
+
end
|
45
|
+
|
46
|
+
io = Tempfile.new("gzip-")
|
47
|
+
@compressor.compress(chunk, io)
|
48
|
+
assert !io.closed?
|
49
|
+
chunk_bytesize = chunk.respond_to?(:bytesize) ? chunk.bytesize : chunk.size
|
50
|
+
assert(chunk_bytesize > io.read.bytesize)
|
51
|
+
io.rewind
|
52
|
+
reader = Zlib::GzipReader.new(io)
|
53
|
+
assert_equal(chunk.read, reader.read)
|
54
|
+
io.close
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-webhdfs
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.1.
|
4
|
+
version: 1.1.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- TAGOMORI Satoshi
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-
|
11
|
+
date: 2017-05-09 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: rake
|
@@ -145,6 +145,7 @@ files:
|
|
145
145
|
- lib/fluent/plugin/webhdfs_compressor_text.rb
|
146
146
|
- test/helper.rb
|
147
147
|
- test/plugin/test_compressor.rb
|
148
|
+
- test/plugin/test_gzip_compressor.rb
|
148
149
|
- test/plugin/test_out_webhdfs.rb
|
149
150
|
homepage: https://github.com/fluent/fluent-plugin-webhdfs
|
150
151
|
licenses:
|
@@ -166,11 +167,12 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
166
167
|
version: '0'
|
167
168
|
requirements: []
|
168
169
|
rubyforge_project:
|
169
|
-
rubygems_version: 2.6.
|
170
|
+
rubygems_version: 2.6.11
|
170
171
|
signing_key:
|
171
172
|
specification_version: 4
|
172
173
|
summary: Fluentd plugin to write data on HDFS over WebHDFS, with flexible formatting
|
173
174
|
test_files:
|
174
175
|
- test/helper.rb
|
175
176
|
- test/plugin/test_compressor.rb
|
177
|
+
- test/plugin/test_gzip_compressor.rb
|
176
178
|
- test/plugin/test_out_webhdfs.rb
|