fluent-plugin-buffer-lightening 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +17 -0
- data/Gemfile +4 -0
- data/LICENSE.txt +13 -0
- data/README.md +68 -0
- data/Rakefile +11 -0
- data/fluent-plugin-buffer-lightening.gemspec +21 -0
- data/lib/fluent/plugin/buf_lightening.rb +84 -0
- data/lib/fluent/plugin/output_try_flush_interval_patch.rb +113 -0
- data/test/helper.rb +29 -0
- data/test/plugin/dummy_output.rb +28 -0
- data/test/plugin/test_buf_lightening.rb +62 -0
- metadata +101 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 10e80754b85bcc5fa9bc83d47e75ee7bf7637b68
|
4
|
+
data.tar.gz: 558822e85a83088e88268869ad13cbc121a7c39f
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: ef5d463b78bf73975c8c5ddad3e01a09960bc6b115ea578308d4dbf6d0afd5e9c0022b8164ea6f5e6b08573cf342819cfe8245304b6ff6c885019b6f93e8b03b
|
7
|
+
data.tar.gz: f6fdf47d488ecc61ce7efa12e392409686f2b1fbf94c2de8b2807f3e3eb5aa5eb1ca4f3303516ce1144f3e8740269a3c6b8e34994d89f12ac4ec395e2bc08bfa
|
data/.gitignore
ADDED
data/Gemfile
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,13 @@
|
|
1
|
+
Copyright (c) 2012- TAGOMORI Satoshi
|
2
|
+
|
3
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
you may not use this file except in compliance with the License.
|
5
|
+
You may obtain a copy of the License at
|
6
|
+
|
7
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
|
9
|
+
Unless required by applicable law or agreed to in writing, software
|
10
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
See the License for the specific language governing permissions and
|
13
|
+
limitations under the License.
|
data/README.md
ADDED
@@ -0,0 +1,68 @@
|
|
1
|
+
# fluent-plugin-buffer-lightening
|
2
|
+
|
3
|
+
Fluentd buffer plugin on memory to flush with many types of chunk limit methods:
|
4
|
+
* events count limit in chunk
|
5
|
+
|
6
|
+
These options are to decrease latency from emit to write, and to control chunk sizes and flush sizes.
|
7
|
+
|
8
|
+
**NOTICE:** Lightening buffer plugin stores data on memory, so these data will be lost when process/server crashes.
|
9
|
+
|
10
|
+
And current version of this plugin adds `try_flush_interval` option to BufferedOutput plugins, to flush buffer chunk with high frequency. For this option, run fluentd with `-r fluent/plugin/output_try_flush_interval_patch`.
|
11
|
+
|
12
|
+
## Installation
|
13
|
+
|
14
|
+
Do `gem install fluent-plugin-buffer-lightening` or `fluent-gem ...`.
|
15
|
+
|
16
|
+
## Configuration
|
17
|
+
|
18
|
+
Lightening buffer plugin can be enabled with all of buffered output plugins.
|
19
|
+
|
20
|
+
To flush chunks per 100 records, configure like this:
|
21
|
+
|
22
|
+
```
|
23
|
+
<match data.**>
|
24
|
+
type any_buffered_output_plugin
|
25
|
+
buffer_type lightening
|
26
|
+
buffer_chunk_records_limit 100
|
27
|
+
# other options...
|
28
|
+
</match>
|
29
|
+
```
|
30
|
+
|
31
|
+
Options of `buffer_type memory` are also available:
|
32
|
+
```
|
33
|
+
<match data.**>
|
34
|
+
type any_buffered_output_plugin
|
35
|
+
buffer_type lightening
|
36
|
+
buffer_chunk_limit 10M
|
37
|
+
buffer_chunk_records_limit 100
|
38
|
+
# other options...
|
39
|
+
</match>
|
40
|
+
```
|
41
|
+
|
42
|
+
### For less delay
|
43
|
+
|
44
|
+
For more frequently flushing, use `flush_interval` and `try_flush_interval` with floating point values:
|
45
|
+
```
|
46
|
+
<match data.**>
|
47
|
+
type any_buffered_output_plugin
|
48
|
+
buffer_type lightening
|
49
|
+
buffer_chunk_records_limit 100
|
50
|
+
# other options...
|
51
|
+
flush_interval 0.5
|
52
|
+
try_flush_interval 0.1 # 0.6sec delay for worst case
|
53
|
+
</match>
|
54
|
+
```
|
55
|
+
|
56
|
+
And, execute fluentd as `fluentd -r fluent/plugin/output_try_flush_interval_patch -c fluentd.conf`.
|
57
|
+
|
58
|
+
## TODO
|
59
|
+
|
60
|
+
* remove `output_try_flush_interval_patch` with incoming fluentd dependency
|
61
|
+
* more limit patterns
|
62
|
+
* patches welcome!
|
63
|
+
|
64
|
+
## Copyright
|
65
|
+
|
66
|
+
* Copyright (c) 2013- TAGOMORI Satoshi (tagomoris)
|
67
|
+
* License
|
68
|
+
* Apache License, Version 2.0
|
data/Rakefile
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
|
3
|
+
Gem::Specification.new do |spec|
|
4
|
+
spec.name = "fluent-plugin-buffer-lightening"
|
5
|
+
spec.version = "0.0.1"
|
6
|
+
spec.authors = ["TAGOMORI Satoshi"]
|
7
|
+
spec.email = ["tagomoris@gmail.com"]
|
8
|
+
spec.description = %q{Fluentd memory buffer plugin with many types of chunk limits}
|
9
|
+
spec.summary = %q{Alternative memory buffer plugin for Fluentd to realize less delay}
|
10
|
+
spec.homepage = "https://github.com/tagomoris/fluent-plugin-buffer-lightening"
|
11
|
+
spec.license = "APLv2"
|
12
|
+
|
13
|
+
spec.files = `git ls-files`.split($/)
|
14
|
+
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
15
|
+
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
16
|
+
spec.require_paths = ["lib"]
|
17
|
+
|
18
|
+
spec.add_development_dependency "bundler", "~> 1.3"
|
19
|
+
spec.add_development_dependency "rake"
|
20
|
+
spec.add_runtime_dependency "fluentd"
|
21
|
+
end
|
@@ -0,0 +1,84 @@
|
|
1
|
+
require 'fluent/plugin/buf_memory'
|
2
|
+
require_relative 'output_try_flush_interval_patch'
|
3
|
+
|
4
|
+
module Fluent
|
5
|
+
class LighteningBufferChunk < MemoryBufferChunk
|
6
|
+
attr_reader :record_counter
|
7
|
+
|
8
|
+
def initialize(key, data='')
|
9
|
+
super
|
10
|
+
@record_counter = 0
|
11
|
+
end
|
12
|
+
|
13
|
+
def <<(data)
|
14
|
+
super
|
15
|
+
@record_counter += 1
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
class LighteningBuffer < MemoryBuffer
|
20
|
+
Fluent::Plugin.register_buffer('lightening', self)
|
21
|
+
|
22
|
+
config_param :buffer_chunk_records_limit, :integer, :default => nil
|
23
|
+
|
24
|
+
def configure(conf)
|
25
|
+
super
|
26
|
+
end
|
27
|
+
|
28
|
+
def new_chunk(key)
|
29
|
+
LighteningBufferChunk.new(key)
|
30
|
+
end
|
31
|
+
|
32
|
+
def storable?(chunk, data)
|
33
|
+
return false if chunk.size + data.bytesize > @buffer_chunk_limit
|
34
|
+
return false if @buffer_chunk_records_limit && chunk.record_counter >= @buffer_chunk_records_limit
|
35
|
+
true
|
36
|
+
end
|
37
|
+
|
38
|
+
# TODO: remove w/ fluentd v0.10.42 (or td-agent including fluentd v0.10.42)
|
39
|
+
def emit(key, data, chain) # copy&paste from BasicBuffer, and fix to add hook point
|
40
|
+
key = key.to_s
|
41
|
+
|
42
|
+
synchronize do
|
43
|
+
top = (@map[key] ||= new_chunk(key))
|
44
|
+
|
45
|
+
if storable?(top, data) # hook point (FIXED THIS LINE ONLY)
|
46
|
+
chain.next
|
47
|
+
top << data
|
48
|
+
return false
|
49
|
+
elsif @queue.size >= @buffer_queue_limit
|
50
|
+
raise BufferQueueLimitError, "queue size exceeds limit"
|
51
|
+
end
|
52
|
+
|
53
|
+
if data.bytesize > @buffer_chunk_limit
|
54
|
+
$log.warn "Size of the emitted data exceeds buffer_chunk_limit."
|
55
|
+
$log.warn "This may occur problems in the output plugins ``at this server.``"
|
56
|
+
$log.warn "To avoid problems, set a smaller number to the buffer_chunk_limit"
|
57
|
+
$log.warn "in the forward output ``at the log forwarding server.``"
|
58
|
+
end
|
59
|
+
|
60
|
+
nc = new_chunk(key)
|
61
|
+
ok = false
|
62
|
+
|
63
|
+
begin
|
64
|
+
nc << data
|
65
|
+
chain.next
|
66
|
+
|
67
|
+
flush_trigger = false
|
68
|
+
@queue.synchronize {
|
69
|
+
enqueue(top)
|
70
|
+
flush_trigger = @queue.empty?
|
71
|
+
@queue << top
|
72
|
+
@map[key] = nc
|
73
|
+
}
|
74
|
+
|
75
|
+
ok = true
|
76
|
+
return flush_trigger
|
77
|
+
ensure
|
78
|
+
nc.purge unless ok
|
79
|
+
end
|
80
|
+
|
81
|
+
end # synchronize
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
@@ -0,0 +1,113 @@
|
|
1
|
+
# MEMO: execute fluentd with "-r fluent/plugin/output_try_flush_interval_patch" switch
|
2
|
+
|
3
|
+
# TODO: remove w/ fluentd v0.10.42 (or td-agent including fluentd v0.10.42)
|
4
|
+
module Fluent
|
5
|
+
class BufferedOutput < Output
|
6
|
+
config_param :try_flush_interval, :float, :default => 1
|
7
|
+
|
8
|
+
# override with @try_flush_interval
|
9
|
+
def try_flush
|
10
|
+
time = Engine.now
|
11
|
+
|
12
|
+
empty = @buffer.queue_size == 0
|
13
|
+
if empty && @next_flush_time < (now = Engine.now)
|
14
|
+
@buffer.synchronize do
|
15
|
+
if @next_flush_time < now
|
16
|
+
enqueue_buffer
|
17
|
+
@next_flush_time = now + @flush_interval
|
18
|
+
empty = @buffer.queue_size == 0
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
if empty
|
23
|
+
return time + @try_flush_interval
|
24
|
+
end
|
25
|
+
|
26
|
+
begin
|
27
|
+
retrying = !@error_history.empty?
|
28
|
+
|
29
|
+
if retrying
|
30
|
+
@error_history.synchronize do
|
31
|
+
if retrying = !@error_history.empty? # re-check in synchronize
|
32
|
+
if @next_retry_time >= time
|
33
|
+
# allow retrying for only one thread
|
34
|
+
return time + @try_flush_interval
|
35
|
+
end
|
36
|
+
# assume next retry failes and
|
37
|
+
# clear them if when it succeeds
|
38
|
+
@last_retry_time = time
|
39
|
+
@error_history << time
|
40
|
+
@next_retry_time += calc_retry_wait
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
if @secondary && @error_history.size > @retry_limit
|
46
|
+
has_next = flush_secondary(@secondary)
|
47
|
+
else
|
48
|
+
has_next = @buffer.pop(self)
|
49
|
+
end
|
50
|
+
|
51
|
+
# success
|
52
|
+
if retrying
|
53
|
+
@error_history.clear
|
54
|
+
# Note: don't notify to other threads to prevent
|
55
|
+
# burst to recovered server
|
56
|
+
$log.warn "retry succeeded.", :instance=>object_id
|
57
|
+
end
|
58
|
+
|
59
|
+
if has_next
|
60
|
+
return Engine.now + @queued_chunk_flush_interval
|
61
|
+
else
|
62
|
+
return time + @try_flush_interval
|
63
|
+
end
|
64
|
+
|
65
|
+
rescue => e
|
66
|
+
if retrying
|
67
|
+
error_count = @error_history.size
|
68
|
+
else
|
69
|
+
# first error
|
70
|
+
error_count = 0
|
71
|
+
@error_history.synchronize do
|
72
|
+
if @error_history.empty?
|
73
|
+
@last_retry_time = time
|
74
|
+
@error_history << time
|
75
|
+
@next_retry_time = time + calc_retry_wait
|
76
|
+
end
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
if error_count < @retry_limit
|
81
|
+
$log.warn "temporarily failed to flush the buffer.", :next_retry=>Time.at(@next_retry_time), :error_class=>e.class.to_s, :error=>e.to_s, :instance=>object_id
|
82
|
+
$log.warn_backtrace e.backtrace
|
83
|
+
|
84
|
+
elsif @secondary
|
85
|
+
if error_count == @retry_limit
|
86
|
+
$log.warn "failed to flush the buffer.", :error_class=>e.class.to_s, :error=>e.to_s, :instance=>object_id
|
87
|
+
$log.warn "retry count exceededs limit. falling back to secondary output."
|
88
|
+
$log.warn_backtrace e.backtrace
|
89
|
+
retry # retry immediately
|
90
|
+
elsif error_count <= @retry_limit + @secondary_limit
|
91
|
+
$log.warn "failed to flush the buffer, next retry will be with secondary output.", :next_retry=>Time.at(@next_retry_time), :error_class=>e.class.to_s, :error=>e.to_s, :instance=>object_id
|
92
|
+
$log.warn_backtrace e.backtrace
|
93
|
+
else
|
94
|
+
$log.warn "failed to flush the buffer.", :error_class=>e.class, :error=>e.to_s, :instance=>object_id
|
95
|
+
$log.warn "secondary retry count exceededs limit."
|
96
|
+
$log.warn_backtrace e.backtrace
|
97
|
+
write_abort
|
98
|
+
@error_history.clear
|
99
|
+
end
|
100
|
+
|
101
|
+
else
|
102
|
+
$log.warn "failed to flush the buffer.", :error_class=>e.class.to_s, :error=>e.to_s, :instance=>object_id
|
103
|
+
$log.warn "retry count exceededs limit."
|
104
|
+
$log.warn_backtrace e.backtrace
|
105
|
+
write_abort
|
106
|
+
@error_history.clear
|
107
|
+
end
|
108
|
+
|
109
|
+
return @next_retry_time
|
110
|
+
end
|
111
|
+
end
|
112
|
+
end
|
113
|
+
end
|
data/test/helper.rb
ADDED
@@ -0,0 +1,29 @@
|
|
1
|
+
require 'rubygems'
|
2
|
+
require 'bundler'
|
3
|
+
begin
|
4
|
+
Bundler.setup(:default, :development)
|
5
|
+
rescue Bundler::BundlerError => e
|
6
|
+
$stderr.puts e.message
|
7
|
+
$stderr.puts "Run `bundle install` to install missing gems"
|
8
|
+
exit e.status_code
|
9
|
+
end
|
10
|
+
require 'test/unit'
|
11
|
+
|
12
|
+
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
|
13
|
+
$LOAD_PATH.unshift(File.dirname(__FILE__))
|
14
|
+
require 'fluent/test'
|
15
|
+
unless ENV.has_key?('VERBOSE')
|
16
|
+
nulllogger = Object.new
|
17
|
+
nulllogger.instance_eval {|obj|
|
18
|
+
def method_missing(method, *args)
|
19
|
+
# pass
|
20
|
+
end
|
21
|
+
}
|
22
|
+
$log = nulllogger
|
23
|
+
end
|
24
|
+
|
25
|
+
require 'fluent/plugin/output_try_flush_interval_patch'
|
26
|
+
require 'fluent/plugin/buf_lightening'
|
27
|
+
|
28
|
+
class Test::Unit::TestCase
|
29
|
+
end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
require 'json'
|
2
|
+
|
3
|
+
module Fluent
|
4
|
+
class DummyBufferedOutput < BufferedOutput
|
5
|
+
Fluent::Plugin.register_output('lightening_test', self)
|
6
|
+
|
7
|
+
attr_reader :written
|
8
|
+
|
9
|
+
def start
|
10
|
+
super
|
11
|
+
@written = []
|
12
|
+
end
|
13
|
+
|
14
|
+
def format(tag, time, record)
|
15
|
+
[tag, time, record.merge({"format_time" => Time.now.to_f})].to_json + "\n"
|
16
|
+
end
|
17
|
+
|
18
|
+
def write(chunk)
|
19
|
+
chunk_lines = chunk.read.split("\n").select{|line| not line.empty?}
|
20
|
+
@written.push(* chunk.lines.map{ |line|
|
21
|
+
p line
|
22
|
+
tag, time, record = JSON.parse(line)
|
23
|
+
record.update({"write_time" => Time.now.to_f})
|
24
|
+
})
|
25
|
+
true
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
@@ -0,0 +1,62 @@
|
|
1
|
+
require 'helper'
|
2
|
+
require_relative 'dummy_output'
|
3
|
+
|
4
|
+
class DummyChain
|
5
|
+
def next
|
6
|
+
end
|
7
|
+
end
|
8
|
+
|
9
|
+
class LighteningBufferTest < Test::Unit::TestCase
|
10
|
+
CONFIG = %[
|
11
|
+
buffer_type lightening
|
12
|
+
flush_interval 0.1
|
13
|
+
try_flush_interval 0.03
|
14
|
+
buffer_chunk_records_limit 10
|
15
|
+
]
|
16
|
+
|
17
|
+
def create_driver(conf=CONFIG, tag='test')
|
18
|
+
Fluent::Test::OutputTestDriver.new(Fluent::DummyBufferedOutput, tag).configure(conf)
|
19
|
+
end
|
20
|
+
|
21
|
+
def test_configure
|
22
|
+
d = create_driver
|
23
|
+
assert d.instance # successfully configured
|
24
|
+
assert_equal 0.1, d.instance.flush_interval
|
25
|
+
assert_equal 0.03, d.instance.try_flush_interval
|
26
|
+
assert_equal 10, d.instance.instance_eval{ @buffer }.buffer_chunk_records_limit
|
27
|
+
end
|
28
|
+
|
29
|
+
def test_emit
|
30
|
+
d = create_driver
|
31
|
+
buffer = d.instance.instance_eval{ @buffer }
|
32
|
+
assert buffer
|
33
|
+
buffer.start
|
34
|
+
|
35
|
+
assert_nil buffer.instance_eval{ @map[''] }
|
36
|
+
|
37
|
+
d.emit({"a" => 1})
|
38
|
+
assert_equal 1, buffer.instance_eval{ @map[''] }.record_counter
|
39
|
+
|
40
|
+
d.emit({"a" => 2}); d.emit({"a" => 3}); d.emit({"a" => 4})
|
41
|
+
d.emit({"a" => 5}); d.emit({"a" => 6}); d.emit({"a" => 7});
|
42
|
+
d.emit({"a" => 8});
|
43
|
+
assert_equal 8, buffer.instance_eval{ @map[''] }.record_counter
|
44
|
+
|
45
|
+
chain = DummyChain.new
|
46
|
+
tag = d.instance.instance_eval{ @tag }
|
47
|
+
time = Time.now.to_i
|
48
|
+
|
49
|
+
assert !buffer.emit(tag, d.instance.format(tag, time, {"a" => 9}), chain) # flush_trigger false
|
50
|
+
assert_equal 9, buffer.instance_eval{ @map[''] }.record_counter
|
51
|
+
|
52
|
+
assert !buffer.emit(tag, d.instance.format(tag, time, {"a" => 10}), chain) # flush_trigger false
|
53
|
+
assert_equal 10, buffer.instance_eval{ @map[''] }.record_counter
|
54
|
+
|
55
|
+
assert buffer.emit(tag, d.instance.format(tag, time, {"a" => 11}), chain) # flush_trigger true
|
56
|
+
assert_equal 1, buffer.instance_eval{ @map[''] }.record_counter # new chunk
|
57
|
+
|
58
|
+
assert !buffer.emit(tag, d.instance.format(tag, time, {"a" => 12}), chain) # flush_trigger false
|
59
|
+
assert_equal 2, buffer.instance_eval{ @map[''] }.record_counter
|
60
|
+
end
|
61
|
+
|
62
|
+
end
|
metadata
ADDED
@@ -0,0 +1,101 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: fluent-plugin-buffer-lightening
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.1
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- TAGOMORI Satoshi
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2013-12-22 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: bundler
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ~>
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '1.3'
|
20
|
+
type: :development
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - ~>
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '1.3'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: rake
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - '>='
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '0'
|
34
|
+
type: :development
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - '>='
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '0'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: fluentd
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - '>='
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - '>='
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '0'
|
55
|
+
description: Fluentd memory buffer plugin with many types of chunk limits
|
56
|
+
email:
|
57
|
+
- tagomoris@gmail.com
|
58
|
+
executables: []
|
59
|
+
extensions: []
|
60
|
+
extra_rdoc_files: []
|
61
|
+
files:
|
62
|
+
- .gitignore
|
63
|
+
- Gemfile
|
64
|
+
- LICENSE.txt
|
65
|
+
- README.md
|
66
|
+
- Rakefile
|
67
|
+
- fluent-plugin-buffer-lightening.gemspec
|
68
|
+
- lib/fluent/plugin/buf_lightening.rb
|
69
|
+
- lib/fluent/plugin/output_try_flush_interval_patch.rb
|
70
|
+
- test/helper.rb
|
71
|
+
- test/plugin/dummy_output.rb
|
72
|
+
- test/plugin/test_buf_lightening.rb
|
73
|
+
homepage: https://github.com/tagomoris/fluent-plugin-buffer-lightening
|
74
|
+
licenses:
|
75
|
+
- APLv2
|
76
|
+
metadata: {}
|
77
|
+
post_install_message:
|
78
|
+
rdoc_options: []
|
79
|
+
require_paths:
|
80
|
+
- lib
|
81
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
82
|
+
requirements:
|
83
|
+
- - '>='
|
84
|
+
- !ruby/object:Gem::Version
|
85
|
+
version: '0'
|
86
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
87
|
+
requirements:
|
88
|
+
- - '>='
|
89
|
+
- !ruby/object:Gem::Version
|
90
|
+
version: '0'
|
91
|
+
requirements: []
|
92
|
+
rubyforge_project:
|
93
|
+
rubygems_version: 2.0.3
|
94
|
+
signing_key:
|
95
|
+
specification_version: 4
|
96
|
+
summary: Alternative memory buffer plugin for Fluentd to realize less delay
|
97
|
+
test_files:
|
98
|
+
- test/helper.rb
|
99
|
+
- test/plugin/dummy_output.rb
|
100
|
+
- test/plugin/test_buf_lightening.rb
|
101
|
+
has_rdoc:
|