fluent-plugin-sampling-filter 0.2.1 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.travis.yml +0 -1
- data/fluent-plugin-sampling-filter.gemspec +2 -2
- data/lib/fluent/plugin/filter_sampling.rb +38 -44
- data/lib/fluent/plugin/out_sampling_filter.rb +17 -26
- data/test/plugin/test_filter_sampling.rb +53 -56
- data/test/plugin/test_out_sampling_filter.rb +71 -70
- metadata +5 -5
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 4bb5bf2e2842f56be230defcabebf6574271d6c6
|
4
|
+
data.tar.gz: 54962ccbd36c5c0d502e7bbb0e79fc45748c545d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: ccd41752621ecfa648e8e2427517fb68c19e9a0b1ea8e0569c81145b82e15d971e6aab52debd2bcdf458e6fff2c4eb71b2e3d3b0448cfc9b21ec512a4bbb2d0e
|
7
|
+
data.tar.gz: 8dcc45ffd1bec441e39882accb4fde2e0290ff183e669f67b26efa96e95c8b61957279f9cff89f05049469d946517092b97c1ebfc06767fe1d07a766409dfae8
|
data/.travis.yml
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
Gem::Specification.new do |gem|
|
4
4
|
gem.name = "fluent-plugin-sampling-filter"
|
5
|
-
gem.version = "0.
|
5
|
+
gem.version = "1.0.0"
|
6
6
|
gem.authors = ["TAGOMORI Satoshi"]
|
7
7
|
gem.email = ["tagomoris@gmail.com"]
|
8
8
|
gem.description = %q{fluentd plugin to pickup sample data from matched massages}
|
@@ -17,5 +17,5 @@ Gem::Specification.new do |gem|
|
|
17
17
|
|
18
18
|
gem.add_development_dependency "rake"
|
19
19
|
gem.add_runtime_dependency "test-unit", "~> 3.1.0"
|
20
|
-
gem.add_runtime_dependency "fluentd", [">= 0.12
|
20
|
+
gem.add_runtime_dependency "fluentd", [">= 0.14.12", "< 2"]
|
21
21
|
end
|
@@ -1,62 +1,56 @@
|
|
1
|
-
|
1
|
+
require 'fluent/plugin/filter'
|
2
|
+
require 'fluent/clock'
|
3
|
+
|
4
|
+
class Fluent::Plugin::SamplingFilter < Fluent::Plugin::Filter
|
2
5
|
Fluent::Plugin.register_filter('sampling_filter', self)
|
3
6
|
|
4
7
|
config_param :interval, :integer
|
5
|
-
config_param :sample_unit, :
|
8
|
+
config_param :sample_unit, :enum, list: [:tag, :all], default: :tag
|
6
9
|
config_param :minimum_rate_per_min, :integer, default: nil
|
7
10
|
|
8
11
|
def configure(conf)
|
9
12
|
super
|
10
13
|
|
11
|
-
@sample_unit = case @sample_unit
|
12
|
-
when 'tag'
|
13
|
-
:tag
|
14
|
-
when 'all'
|
15
|
-
:all
|
16
|
-
else
|
17
|
-
raise Fluent::ConfigError, "sample_unit allows only 'tag' or 'all'"
|
18
|
-
end
|
19
14
|
@counts = {}
|
20
15
|
@resets = {} if @minimum_rate_per_min
|
21
16
|
end
|
22
17
|
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
tag
|
28
|
-
end
|
29
|
-
|
30
|
-
new_es = Fluent::MultiEventStream.new
|
18
|
+
# Access to @counts SHOULD be protected by mutex, with a heavy penalty.
|
19
|
+
# Code below is not thread safe, but @counts (counter for sampling rate) is not
|
20
|
+
# so serious value (and probably will not be broken...),
|
21
|
+
# then i let here as it is now.
|
31
22
|
|
32
|
-
|
33
|
-
|
34
|
-
# so serious value (and probably will not be broken...),
|
35
|
-
# then i let here as it is now.
|
23
|
+
def filter(tag, _time, record)
|
24
|
+
t = @sample_unit == :all ? 'all' : tag
|
36
25
|
if @minimum_rate_per_min
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
26
|
+
filter_with_minimum_rate(t, record)
|
27
|
+
else
|
28
|
+
filter_simple(t, record)
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
def filter_simple(t, record)
|
33
|
+
c = (@counts[t] = @counts.fetch(t, 0) + 1)
|
34
|
+
# reset only just before @counts[t] is to be bignum from fixnum
|
35
|
+
@counts[t] = 0 if c > 0x6fffffff
|
36
|
+
if c % @interval == 0
|
37
|
+
record
|
38
|
+
else
|
39
|
+
nil
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
def filter_with_minimum_rate(t, record)
|
44
|
+
@resets[t] ||= Fluent::Clock.now + (60 - rand(30))
|
45
|
+
if Fluent::Clock.now > @resets[t]
|
46
|
+
@resets[t] = Fluent::Clock.now + 60
|
47
|
+
@counts[t] = 0
|
48
|
+
end
|
49
|
+
c = (@counts[t] = @counts.fetch(t, 0) + 1)
|
50
|
+
if c < @minimum_rate_per_min || c % @interval == 0
|
51
|
+
record.dup
|
50
52
|
else
|
51
|
-
|
52
|
-
c = (@counts[t] = @counts.fetch(t, 0) + 1)
|
53
|
-
if c % @interval == 0
|
54
|
-
new_es.add(time, record.dup)
|
55
|
-
# reset only just before @counts[t] is to be bignum from fixnum
|
56
|
-
@counts[t] = 0 if c > 0x6fffffff
|
57
|
-
end
|
58
|
-
end
|
53
|
+
nil
|
59
54
|
end
|
60
|
-
new_es
|
61
55
|
end
|
62
56
|
end
|
@@ -1,36 +1,31 @@
|
|
1
|
-
|
1
|
+
require 'fluent/plugin/output'
|
2
|
+
require 'fluent/clock'
|
3
|
+
|
4
|
+
class Fluent::Plugin::SamplingFilterOutput < Fluent::Plugin::Output
|
2
5
|
Fluent::Plugin.register_output('sampling_filter', self)
|
3
6
|
|
4
|
-
|
5
|
-
config_param :sample_unit, :string, :default => 'tag'
|
6
|
-
config_param :remove_prefix, :string, :default => nil
|
7
|
-
config_param :add_prefix, :string, :default => 'sampled'
|
8
|
-
config_param :minimum_rate_per_min, :integer, :default => nil
|
7
|
+
helpers :event_emitter
|
9
8
|
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
9
|
+
config_param :interval, :integer
|
10
|
+
config_param :sample_unit, :enum, list: [:tag, :all], default: :tag
|
11
|
+
config_param :remove_prefix, :string, default: nil
|
12
|
+
config_param :add_prefix, :string, default: 'sampled'
|
13
|
+
config_param :minimum_rate_per_min, :integer, default: nil
|
14
14
|
|
15
15
|
def configure(conf)
|
16
16
|
super
|
17
17
|
|
18
|
+
log.warn "sampling_filter output plugin is deprecated. use sampling_filter filter plugin instead with <label> routing."
|
19
|
+
|
18
20
|
if @remove_prefix
|
19
21
|
@removed_prefix_string = @remove_prefix + '.'
|
20
22
|
@removed_length = @removed_prefix_string.length
|
21
23
|
elsif @add_prefix.empty?
|
22
24
|
raise Fluent::ConfigError, "either of 'add_prefix' or 'remove_prefix' must be specified"
|
23
25
|
end
|
26
|
+
@added_prefix_string = nil
|
24
27
|
@added_prefix_string = @add_prefix + '.' unless @add_prefix.empty?
|
25
28
|
|
26
|
-
@sample_unit = case @sample_unit
|
27
|
-
when 'tag'
|
28
|
-
:tag
|
29
|
-
when 'all'
|
30
|
-
:all
|
31
|
-
else
|
32
|
-
raise Fluent::ConfigError, "sample_unit allows only 'tag' or 'all'"
|
33
|
-
end
|
34
29
|
@counts = {}
|
35
30
|
@resets = {} if @minimum_rate_per_min
|
36
31
|
end
|
@@ -51,7 +46,7 @@ class Fluent::SamplingFilterOutput < Fluent::Output
|
|
51
46
|
}
|
52
47
|
end
|
53
48
|
|
54
|
-
def
|
49
|
+
def process(tag, es)
|
55
50
|
t = if @sample_unit == :all
|
56
51
|
'all'
|
57
52
|
else
|
@@ -65,11 +60,9 @@ class Fluent::SamplingFilterOutput < Fluent::Output
|
|
65
60
|
# so serious value (and probably will not be broken...),
|
66
61
|
# then i let here as it is now.
|
67
62
|
if @minimum_rate_per_min
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
if Fluent::Engine.now > @resets[t]
|
72
|
-
@resets[t] = Fluent::Engine.now + 60
|
63
|
+
@resets[t] ||= Fluent::Clock.now + (60 - rand(30))
|
64
|
+
if Fluent::Clock.now > @resets[t]
|
65
|
+
@resets[t] = Fluent::Clock.now + 60
|
73
66
|
@counts[t] = 0
|
74
67
|
end
|
75
68
|
es.each do |time,record|
|
@@ -90,7 +83,5 @@ class Fluent::SamplingFilterOutput < Fluent::Output
|
|
90
83
|
end
|
91
84
|
|
92
85
|
emit_sampled(tag, pairs)
|
93
|
-
|
94
|
-
chain.next
|
95
86
|
end
|
96
87
|
end
|
@@ -1,4 +1,5 @@
|
|
1
1
|
require 'helper'
|
2
|
+
require 'fluent/test/driver/filter'
|
2
3
|
|
3
4
|
class SamplingFilterTest < Test::Unit::TestCase
|
4
5
|
def setup
|
@@ -10,13 +11,13 @@ class SamplingFilterTest < Test::Unit::TestCase
|
|
10
11
|
sample_unit tag
|
11
12
|
]
|
12
13
|
|
13
|
-
def create_driver(conf=CONFIG
|
14
|
-
Fluent::Test::
|
14
|
+
def create_driver(conf=CONFIG)
|
15
|
+
Fluent::Test::Driver::Filter.new(Fluent::Plugin::SamplingFilter).configure(conf)
|
15
16
|
end
|
16
17
|
|
17
18
|
def test_configure
|
18
19
|
assert_raise(Fluent::ConfigError) {
|
19
|
-
|
20
|
+
create_driver('')
|
20
21
|
}
|
21
22
|
d = create_driver %[
|
22
23
|
interval 5
|
@@ -34,54 +35,52 @@ class SamplingFilterTest < Test::Unit::TestCase
|
|
34
35
|
end
|
35
36
|
|
36
37
|
def test_filter
|
37
|
-
d1 = create_driver(CONFIG
|
38
|
+
d1 = create_driver(CONFIG)
|
38
39
|
time = Time.parse("2012-01-02 13:14:15").to_i
|
39
|
-
d1.run do
|
40
|
-
d1.
|
41
|
-
d1.
|
42
|
-
d1.
|
43
|
-
d1.
|
44
|
-
d1.
|
45
|
-
d1.
|
46
|
-
d1.
|
47
|
-
d1.
|
48
|
-
d1.
|
49
|
-
d1.
|
50
|
-
d1.
|
51
|
-
d1.
|
40
|
+
d1.run(default_tag: 'input.hoge1') do
|
41
|
+
d1.feed({'field1' => 'record1', 'field2' => 1})
|
42
|
+
d1.feed({'field1' => 'record2', 'field2' => 2})
|
43
|
+
d1.feed({'field1' => 'record3', 'field2' => 3})
|
44
|
+
d1.feed({'field1' => 'record4', 'field2' => 4})
|
45
|
+
d1.feed({'field1' => 'record5', 'field2' => 5})
|
46
|
+
d1.feed({'field1' => 'record6', 'field2' => 6})
|
47
|
+
d1.feed({'field1' => 'record7', 'field2' => 7})
|
48
|
+
d1.feed({'field1' => 'record8', 'field2' => 8})
|
49
|
+
d1.feed({'field1' => 'record9', 'field2' => 9})
|
50
|
+
d1.feed({'field1' => 'record10', 'field2' => 10})
|
51
|
+
d1.feed({'field1' => 'record11', 'field2' => 11})
|
52
|
+
d1.feed({'field1' => 'record12', 'field2' => 12})
|
52
53
|
end
|
53
|
-
filtered = d1.
|
54
|
+
filtered = d1.filtered
|
54
55
|
assert_equal 1, filtered.length
|
55
|
-
assert_equal '
|
56
|
-
assert_equal
|
57
|
-
assert_equal 10, filtered[0][2]['field2']
|
56
|
+
assert_equal 'record10', filtered[0][1]['field1']
|
57
|
+
assert_equal 10, filtered[0][1]['field2']
|
58
58
|
|
59
59
|
d2 = create_driver(%[
|
60
60
|
interval 3
|
61
|
-
]
|
61
|
+
])
|
62
62
|
time = Time.parse("2012-01-02 13:14:15").to_i
|
63
|
-
d2.run do
|
64
|
-
d2.
|
65
|
-
d2.
|
66
|
-
d2.
|
67
|
-
d2.
|
68
|
-
d2.
|
69
|
-
d2.
|
70
|
-
d2.
|
71
|
-
d2.
|
72
|
-
d2.
|
73
|
-
d2.
|
74
|
-
d2.
|
75
|
-
d2.
|
63
|
+
d2.run(default_tag: 'input.hoge2') do
|
64
|
+
d2.feed({'field1' => 'record1', 'field2' => 1})
|
65
|
+
d2.feed({'field1' => 'record2', 'field2' => 2})
|
66
|
+
d2.feed({'field1' => 'record3', 'field2' => 3})
|
67
|
+
d2.feed({'field1' => 'record4', 'field2' => 4})
|
68
|
+
d2.feed({'field1' => 'record5', 'field2' => 5})
|
69
|
+
d2.feed({'field1' => 'record6', 'field2' => 6})
|
70
|
+
d2.feed({'field1' => 'record7', 'field2' => 7})
|
71
|
+
d2.feed({'field1' => 'record8', 'field2' => 8})
|
72
|
+
d2.feed({'field1' => 'record9', 'field2' => 9})
|
73
|
+
d2.feed({'field1' => 'record10', 'field2' => 10})
|
74
|
+
d2.feed({'field1' => 'record11', 'field2' => 11})
|
75
|
+
d2.feed({'field1' => 'record12', 'field2' => 12})
|
76
76
|
end
|
77
|
-
filtered = d2.
|
77
|
+
filtered = d2.filtered
|
78
78
|
assert_equal 4, filtered.length
|
79
|
-
assert_equal 'input.hoge2', filtered[0][0] # tag
|
80
79
|
|
81
|
-
assert_equal 'record3', filtered[0][
|
82
|
-
assert_equal 'record6', filtered[1][
|
83
|
-
assert_equal 'record9', filtered[2][
|
84
|
-
assert_equal 'record12', filtered[3][
|
80
|
+
assert_equal 'record3', filtered[0][1]['field1']
|
81
|
+
assert_equal 'record6', filtered[1][1]['field1']
|
82
|
+
assert_equal 'record9', filtered[2][1]['field1']
|
83
|
+
assert_equal 'record12', filtered[3][1]['field1']
|
85
84
|
end
|
86
85
|
|
87
86
|
def test_filter_minimum_rate
|
@@ -90,21 +89,20 @@ interval 10
|
|
90
89
|
sample_unit tag
|
91
90
|
minimum_rate_per_min 100
|
92
91
|
]
|
93
|
-
d = create_driver(config
|
92
|
+
d = create_driver(config)
|
94
93
|
time = Time.parse("2012-01-02 13:14:15").to_i
|
95
|
-
d.run do
|
94
|
+
d.run(default_tag: 'input.hoge3') do
|
96
95
|
(1..100).each do |t|
|
97
|
-
d.
|
96
|
+
d.feed(time, {'times' => t, 'data' => 'x'})
|
98
97
|
end
|
99
98
|
(101..130).each do |t|
|
100
|
-
d.
|
99
|
+
d.feed(time, {'times' => t, 'data' => 'y'})
|
101
100
|
end
|
102
101
|
end
|
103
|
-
filtered = d.
|
102
|
+
filtered = d.filtered
|
104
103
|
assert_equal 103, filtered.length
|
105
|
-
assert_equal
|
106
|
-
assert_equal (
|
107
|
-
assert_equal (['x']*100 + ['y']*3), filtered.map{|t,time,r| r['data']}
|
104
|
+
assert_equal ((1..100).map(&:to_i) + [110, 120, 130]), filtered.map{|_time,r| r['times']}
|
105
|
+
assert_equal (['x']*100 + ['y']*3), filtered.map{|_time,r| r['data']}
|
108
106
|
end
|
109
107
|
|
110
108
|
def test_filter_minimum_rate_expire
|
@@ -113,17 +111,16 @@ interval 10
|
|
113
111
|
sample_unit tag
|
114
112
|
minimum_rate_per_min 10
|
115
113
|
]
|
116
|
-
d = create_driver(config
|
114
|
+
d = create_driver(config)
|
117
115
|
time = Time.parse("2012-01-02 13:14:15").to_i
|
118
|
-
d.run do
|
116
|
+
d.run(default_tag: 'input.hoge4') do
|
119
117
|
(1..30).each do |t|
|
120
|
-
d.
|
118
|
+
d.feed(time, {'times' => t, 'data' => 'x'})
|
121
119
|
end
|
122
120
|
end
|
123
|
-
filtered = d.
|
121
|
+
filtered = d.filtered
|
124
122
|
assert_equal 12, filtered.length
|
125
|
-
assert_equal
|
126
|
-
assert_equal (
|
127
|
-
assert_equal (['x']*12), filtered.map{|t,time,r| r['data']}
|
123
|
+
assert_equal ((1..10).map(&:to_i)+[20,30]), filtered.map{|_time,r| r['times']}
|
124
|
+
assert_equal (['x']*12), filtered.map{|_time,r| r['data']}
|
128
125
|
end
|
129
126
|
end
|
@@ -1,10 +1,11 @@
|
|
1
1
|
require 'helper'
|
2
|
+
require 'fluent/test/driver/output'
|
2
3
|
|
3
4
|
class SamplingFilterOutputTest < Test::Unit::TestCase
|
4
5
|
def setup
|
5
6
|
Fluent::Test.setup
|
6
7
|
end
|
7
|
-
|
8
|
+
|
8
9
|
CONFIG = %[
|
9
10
|
interval 10
|
10
11
|
sample_unit tag
|
@@ -12,13 +13,13 @@ class SamplingFilterOutputTest < Test::Unit::TestCase
|
|
12
13
|
add_prefix sampled
|
13
14
|
]
|
14
15
|
|
15
|
-
def create_driver(conf=CONFIG
|
16
|
-
Fluent::Test::
|
16
|
+
def create_driver(conf=CONFIG)
|
17
|
+
Fluent::Test::Driver::Output.new(Fluent::Plugin::SamplingFilterOutput).configure(conf)
|
17
18
|
end
|
18
19
|
|
19
20
|
def test_configure
|
20
21
|
assert_raise(Fluent::ConfigError) {
|
21
|
-
|
22
|
+
create_driver('')
|
22
23
|
}
|
23
24
|
d = create_driver %[
|
24
25
|
interval 5
|
@@ -33,7 +34,7 @@ class SamplingFilterOutputTest < Test::Unit::TestCase
|
|
33
34
|
interval 1000
|
34
35
|
sample_unit all
|
35
36
|
remove_prefix test
|
36
|
-
add_prefix output
|
37
|
+
add_prefix output
|
37
38
|
]
|
38
39
|
assert_equal 1000, d.instance.interval
|
39
40
|
assert_equal :all, d.instance.sample_unit
|
@@ -42,54 +43,54 @@ class SamplingFilterOutputTest < Test::Unit::TestCase
|
|
42
43
|
end
|
43
44
|
|
44
45
|
def test_emit
|
45
|
-
d1 = create_driver(CONFIG
|
46
|
+
d1 = create_driver(CONFIG)
|
46
47
|
time = Time.parse("2012-01-02 13:14:15").to_i
|
47
|
-
d1.run do
|
48
|
-
d1.
|
49
|
-
d1.
|
50
|
-
d1.
|
51
|
-
d1.
|
52
|
-
d1.
|
53
|
-
d1.
|
54
|
-
d1.
|
55
|
-
d1.
|
56
|
-
d1.
|
57
|
-
d1.
|
58
|
-
d1.
|
59
|
-
d1.
|
48
|
+
d1.run(default_tag: 'input.hoge1') do
|
49
|
+
d1.feed(time, {'field1' => 'record1', 'field2' => 1})
|
50
|
+
d1.feed(time, {'field1' => 'record2', 'field2' => 2})
|
51
|
+
d1.feed(time, {'field1' => 'record3', 'field2' => 3})
|
52
|
+
d1.feed(time, {'field1' => 'record4', 'field2' => 4})
|
53
|
+
d1.feed(time, {'field1' => 'record5', 'field2' => 5})
|
54
|
+
d1.feed(time, {'field1' => 'record6', 'field2' => 6})
|
55
|
+
d1.feed(time, {'field1' => 'record7', 'field2' => 7})
|
56
|
+
d1.feed(time, {'field1' => 'record8', 'field2' => 8})
|
57
|
+
d1.feed(time, {'field1' => 'record9', 'field2' => 9})
|
58
|
+
d1.feed(time, {'field1' => 'record10', 'field2' => 10})
|
59
|
+
d1.feed(time, {'field1' => 'record11', 'field2' => 11})
|
60
|
+
d1.feed(time, {'field1' => 'record12', 'field2' => 12})
|
60
61
|
end
|
61
|
-
|
62
|
-
assert_equal 1,
|
63
|
-
assert_equal 'sampled.hoge1',
|
64
|
-
assert_equal 'record10',
|
65
|
-
assert_equal 10,
|
62
|
+
events = d1.events
|
63
|
+
assert_equal 1, events.length
|
64
|
+
assert_equal 'sampled.hoge1', events[0][0] # tag
|
65
|
+
assert_equal 'record10', events[0][2]['field1']
|
66
|
+
assert_equal 10, events[0][2]['field2']
|
66
67
|
|
67
68
|
d2 = create_driver(%[
|
68
69
|
interval 3
|
69
|
-
]
|
70
|
+
])
|
70
71
|
time = Time.parse("2012-01-02 13:14:15").to_i
|
71
|
-
d2.run do
|
72
|
-
d2.
|
73
|
-
d2.
|
74
|
-
d2.
|
75
|
-
d2.
|
76
|
-
d2.
|
77
|
-
d2.
|
78
|
-
d2.
|
79
|
-
d2.
|
80
|
-
d2.
|
81
|
-
d2.
|
82
|
-
d2.
|
83
|
-
d2.
|
72
|
+
d2.run(default_tag: 'input.hoge2') do
|
73
|
+
d2.feed(time, {'field1' => 'record1', 'field2' => 1})
|
74
|
+
d2.feed(time, {'field1' => 'record2', 'field2' => 2})
|
75
|
+
d2.feed(time, {'field1' => 'record3', 'field2' => 3})
|
76
|
+
d2.feed(time, {'field1' => 'record4', 'field2' => 4})
|
77
|
+
d2.feed(time, {'field1' => 'record5', 'field2' => 5})
|
78
|
+
d2.feed(time, {'field1' => 'record6', 'field2' => 6})
|
79
|
+
d2.feed(time, {'field1' => 'record7', 'field2' => 7})
|
80
|
+
d2.feed(time, {'field1' => 'record8', 'field2' => 8})
|
81
|
+
d2.feed(time, {'field1' => 'record9', 'field2' => 9})
|
82
|
+
d2.feed(time, {'field1' => 'record10', 'field2' => 10})
|
83
|
+
d2.feed(time, {'field1' => 'record11', 'field2' => 11})
|
84
|
+
d2.feed(time, {'field1' => 'record12', 'field2' => 12})
|
84
85
|
end
|
85
|
-
|
86
|
-
assert_equal 4,
|
87
|
-
assert_equal 'sampled.input.hoge2',
|
86
|
+
events = d2.events
|
87
|
+
assert_equal 4, events.length
|
88
|
+
assert_equal 'sampled.input.hoge2', events[0][0] # tag
|
88
89
|
|
89
|
-
assert_equal 'record3',
|
90
|
-
assert_equal 'record6',
|
91
|
-
assert_equal 'record9',
|
92
|
-
assert_equal 'record12',
|
90
|
+
assert_equal 'record3', events[0][2]['field1']
|
91
|
+
assert_equal 'record6', events[1][2]['field1']
|
92
|
+
assert_equal 'record9', events[2][2]['field1']
|
93
|
+
assert_equal 'record12', events[3][2]['field1']
|
93
94
|
end
|
94
95
|
|
95
96
|
def test_minimum_rate
|
@@ -99,21 +100,21 @@ sample_unit tag
|
|
99
100
|
remove_prefix input
|
100
101
|
minimum_rate_per_min 100
|
101
102
|
]
|
102
|
-
d = create_driver(config
|
103
|
+
d = create_driver(config)
|
103
104
|
time = Time.parse("2012-01-02 13:14:15").to_i
|
104
|
-
d.run do
|
105
|
+
d.run(default_tag: 'input.hoge3') do
|
105
106
|
(1..100).each do |t|
|
106
|
-
d.
|
107
|
+
d.feed(time, {'times' => t, 'data' => 'x'})
|
107
108
|
end
|
108
109
|
(101..130).each do |t|
|
109
|
-
d.
|
110
|
+
d.feed(time, {'times' => t, 'data' => 'y'})
|
110
111
|
end
|
111
112
|
end
|
112
|
-
|
113
|
-
assert_equal 103,
|
114
|
-
assert_equal 'sampled.hoge3',
|
115
|
-
assert_equal ((1..100).map(&:to_i) + [110, 120, 130]),
|
116
|
-
assert_equal (['x']*100 + ['y']*3),
|
113
|
+
events = d.events
|
114
|
+
assert_equal 103, events.length
|
115
|
+
assert_equal 'sampled.hoge3', events[0][0]
|
116
|
+
assert_equal ((1..100).map(&:to_i) + [110, 120, 130]), events.map{|_tag,_time,r| r['times']}
|
117
|
+
assert_equal (['x']*100 + ['y']*3), events.map{|_tag,_time,r| r['data']}
|
117
118
|
|
118
119
|
end
|
119
120
|
def test_minimum_rate_expire
|
@@ -126,39 +127,39 @@ sample_unit tag
|
|
126
127
|
remove_prefix input
|
127
128
|
minimum_rate_per_min 10
|
128
129
|
]
|
129
|
-
d = create_driver(config
|
130
|
+
d = create_driver(config)
|
130
131
|
time = Time.parse("2012-01-02 13:14:15").to_i
|
131
|
-
d.run do
|
132
|
+
d.run(default_tag: 'input.hoge4') do
|
132
133
|
(1..100).each do |t|
|
133
|
-
d.
|
134
|
+
d.feed(time, {'times' => t, 'data' => 'x'})
|
134
135
|
end
|
135
136
|
sleep 60
|
136
137
|
(101..130).each do |t|
|
137
|
-
d.
|
138
|
+
d.feed(time+60, {'times' => t, 'data' => 'y'})
|
138
139
|
end
|
139
140
|
end
|
140
|
-
|
141
|
-
# assert_equal (19 + 12),
|
142
|
-
assert_equal 'sampled.hoge4',
|
143
|
-
assert_equal ((1..10).map(&:to_i)+[20,30,40,50,60,70,80,90,100]+(101..110).map(&:to_i)+[120,130]),
|
144
|
-
assert_equal (['x']*19 + ['y']*12),
|
141
|
+
events = d.events
|
142
|
+
# assert_equal (19 + 12), events.length
|
143
|
+
assert_equal 'sampled.hoge4', events[0][0]
|
144
|
+
assert_equal ((1..10).map(&:to_i)+[20,30,40,50,60,70,80,90,100]+(101..110).map(&:to_i)+[120,130]), events.map{|_tag,_time,r| r['times']}
|
145
|
+
assert_equal (['x']*19 + ['y']*12), events.map{|_tag,_time,r| r['data']}
|
145
146
|
end
|
146
147
|
|
147
148
|
def test_without_add_prefix_but_remove_prefix
|
148
149
|
config = %[
|
149
150
|
interval 10
|
150
|
-
add_prefix
|
151
|
+
add_prefix
|
151
152
|
remove_prefix input
|
152
153
|
]
|
153
|
-
d = create_driver(config
|
154
|
+
d = create_driver(config)
|
154
155
|
time = Time.parse("2012-01-02 13:14:15").to_i
|
155
|
-
d.run do
|
156
|
+
d.run(default_tag: 'input.hoge3') do
|
156
157
|
(1..100).each do |t|
|
157
|
-
d.
|
158
|
+
d.feed(time, {'times' => t, 'data' => 'x'})
|
158
159
|
end
|
159
160
|
end
|
160
|
-
|
161
|
-
assert_equal 10,
|
162
|
-
assert_equal 'hoge3',
|
161
|
+
events = d.events
|
162
|
+
assert_equal 10, events.length
|
163
|
+
assert_equal 'hoge3', events[0][0]
|
163
164
|
end
|
164
165
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-sampling-filter
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 1.0.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- TAGOMORI Satoshi
|
@@ -44,20 +44,20 @@ dependencies:
|
|
44
44
|
requirements:
|
45
45
|
- - ">="
|
46
46
|
- !ruby/object:Gem::Version
|
47
|
-
version: 0.12
|
47
|
+
version: 0.14.12
|
48
48
|
- - "<"
|
49
49
|
- !ruby/object:Gem::Version
|
50
|
-
version:
|
50
|
+
version: '2'
|
51
51
|
type: :runtime
|
52
52
|
prerelease: false
|
53
53
|
version_requirements: !ruby/object:Gem::Requirement
|
54
54
|
requirements:
|
55
55
|
- - ">="
|
56
56
|
- !ruby/object:Gem::Version
|
57
|
-
version: 0.12
|
57
|
+
version: 0.14.12
|
58
58
|
- - "<"
|
59
59
|
- !ruby/object:Gem::Version
|
60
|
-
version:
|
60
|
+
version: '2'
|
61
61
|
description: fluentd plugin to pickup sample data from matched massages
|
62
62
|
email:
|
63
63
|
- tagomoris@gmail.com
|