fluent-plugin-sampling-filter 0.2.1 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: d8938823efb7b0fc5c35a84a5a5f76b75d532358
4
- data.tar.gz: edf5a0bebb0bd60e188a8916754e5feef53fc218
3
+ metadata.gz: 4bb5bf2e2842f56be230defcabebf6574271d6c6
4
+ data.tar.gz: 54962ccbd36c5c0d502e7bbb0e79fc45748c545d
5
5
  SHA512:
6
- metadata.gz: 86bc4dde59d152e2da88370b06b5c3e98e440c906bd1d853e2a02aa37a69e4da83debd5886496c5691c30351963fe52a14d07e6403dc5b1765434b4e306cfdba
7
- data.tar.gz: d8a156be0aa4293aa857105c29405cb515b1155aa6a1f5e63646dd58774168fe070c7039274bea197165d54bb206bf3bd594436cc41a001a2a8bb54d3864975e
6
+ metadata.gz: ccd41752621ecfa648e8e2427517fb68c19e9a0b1ea8e0569c81145b82e15d971e6aab52debd2bcdf458e6fff2c4eb71b2e3d3b0448cfc9b21ec512a4bbb2d0e
7
+ data.tar.gz: 8dcc45ffd1bec441e39882accb4fde2e0290ff183e669f67b26efa96e95c8b61957279f9cff89f05049469d946517092b97c1ebfc06767fe1d07a766409dfae8
data/.travis.yml CHANGED
@@ -1,7 +1,6 @@
1
1
  sudo: false
2
2
  language: ruby
3
3
  rvm:
4
- - 2.0.0
5
4
  - 2.1
6
5
  - 2.2
7
6
  - 2.3.0
@@ -2,7 +2,7 @@
2
2
 
3
3
  Gem::Specification.new do |gem|
4
4
  gem.name = "fluent-plugin-sampling-filter"
5
- gem.version = "0.2.1"
5
+ gem.version = "1.0.0"
6
6
  gem.authors = ["TAGOMORI Satoshi"]
7
7
  gem.email = ["tagomoris@gmail.com"]
8
8
  gem.description = %q{fluentd plugin to pickup sample data from matched massages}
@@ -17,5 +17,5 @@ Gem::Specification.new do |gem|
17
17
 
18
18
  gem.add_development_dependency "rake"
19
19
  gem.add_runtime_dependency "test-unit", "~> 3.1.0"
20
- gem.add_runtime_dependency "fluentd", [">= 0.12.0", "< 0.14.0"]
20
+ gem.add_runtime_dependency "fluentd", [">= 0.14.12", "< 2"]
21
21
  end
@@ -1,62 +1,56 @@
1
- class Fluent::SamplingFilter < Fluent::Filter
1
+ require 'fluent/plugin/filter'
2
+ require 'fluent/clock'
3
+
4
+ class Fluent::Plugin::SamplingFilter < Fluent::Plugin::Filter
2
5
  Fluent::Plugin.register_filter('sampling_filter', self)
3
6
 
4
7
  config_param :interval, :integer
5
- config_param :sample_unit, :string, default: 'tag'
8
+ config_param :sample_unit, :enum, list: [:tag, :all], default: :tag
6
9
  config_param :minimum_rate_per_min, :integer, default: nil
7
10
 
8
11
  def configure(conf)
9
12
  super
10
13
 
11
- @sample_unit = case @sample_unit
12
- when 'tag'
13
- :tag
14
- when 'all'
15
- :all
16
- else
17
- raise Fluent::ConfigError, "sample_unit allows only 'tag' or 'all'"
18
- end
19
14
  @counts = {}
20
15
  @resets = {} if @minimum_rate_per_min
21
16
  end
22
17
 
23
- def filter_stream(tag, es)
24
- t = if @sample_unit == :all
25
- 'all'
26
- else
27
- tag
28
- end
29
-
30
- new_es = Fluent::MultiEventStream.new
18
+ # Access to @counts SHOULD be protected by mutex, with a heavy penalty.
19
+ # Code below is not thread safe, but @counts (counter for sampling rate) is not
20
+ # so serious value (and probably will not be broken...),
21
+ # then i let here as it is now.
31
22
 
32
- # Access to @counts SHOULD be protected by mutex, with a heavy penalty.
33
- # Code below is not thread safe, but @counts (counter for sampling rate) is not
34
- # so serious value (and probably will not be broken...),
35
- # then i let here as it is now.
23
+ def filter(tag, _time, record)
24
+ t = @sample_unit == :all ? 'all' : tag
36
25
  if @minimum_rate_per_min
37
- unless @resets[t]
38
- @resets[t] = Fluent::Engine.now + (60 - rand(30))
39
- end
40
- if Fluent::Engine.now > @resets[t]
41
- @resets[t] = Fluent::Engine.now + 60
42
- @counts[t] = 0
43
- end
44
- es.each do |time,record|
45
- c = (@counts[t] = @counts.fetch(t, 0) + 1)
46
- if c < @minimum_rate_per_min or c % @interval == 0
47
- new_es.add(time, record.dup)
48
- end
49
- end
26
+ filter_with_minimum_rate(t, record)
27
+ else
28
+ filter_simple(t, record)
29
+ end
30
+ end
31
+
32
+ def filter_simple(t, record)
33
+ c = (@counts[t] = @counts.fetch(t, 0) + 1)
34
+ # reset only just before @counts[t] is to be bignum from fixnum
35
+ @counts[t] = 0 if c > 0x6fffffff
36
+ if c % @interval == 0
37
+ record
38
+ else
39
+ nil
40
+ end
41
+ end
42
+
43
+ def filter_with_minimum_rate(t, record)
44
+ @resets[t] ||= Fluent::Clock.now + (60 - rand(30))
45
+ if Fluent::Clock.now > @resets[t]
46
+ @resets[t] = Fluent::Clock.now + 60
47
+ @counts[t] = 0
48
+ end
49
+ c = (@counts[t] = @counts.fetch(t, 0) + 1)
50
+ if c < @minimum_rate_per_min || c % @interval == 0
51
+ record.dup
50
52
  else
51
- es.each do |time,record|
52
- c = (@counts[t] = @counts.fetch(t, 0) + 1)
53
- if c % @interval == 0
54
- new_es.add(time, record.dup)
55
- # reset only just before @counts[t] is to be bignum from fixnum
56
- @counts[t] = 0 if c > 0x6fffffff
57
- end
58
- end
53
+ nil
59
54
  end
60
- new_es
61
55
  end
62
56
  end
@@ -1,36 +1,31 @@
1
- class Fluent::SamplingFilterOutput < Fluent::Output
1
+ require 'fluent/plugin/output'
2
+ require 'fluent/clock'
3
+
4
+ class Fluent::Plugin::SamplingFilterOutput < Fluent::Plugin::Output
2
5
  Fluent::Plugin.register_output('sampling_filter', self)
3
6
 
4
- config_param :interval, :integer
5
- config_param :sample_unit, :string, :default => 'tag'
6
- config_param :remove_prefix, :string, :default => nil
7
- config_param :add_prefix, :string, :default => 'sampled'
8
- config_param :minimum_rate_per_min, :integer, :default => nil
7
+ helpers :event_emitter
9
8
 
10
- # Define `log` method for v0.10.42 or earlier
11
- unless method_defined?(:log)
12
- define_method("log") { $log }
13
- end
9
+ config_param :interval, :integer
10
+ config_param :sample_unit, :enum, list: [:tag, :all], default: :tag
11
+ config_param :remove_prefix, :string, default: nil
12
+ config_param :add_prefix, :string, default: 'sampled'
13
+ config_param :minimum_rate_per_min, :integer, default: nil
14
14
 
15
15
  def configure(conf)
16
16
  super
17
17
 
18
+ log.warn "sampling_filter output plugin is deprecated. use sampling_filter filter plugin instead with <label> routing."
19
+
18
20
  if @remove_prefix
19
21
  @removed_prefix_string = @remove_prefix + '.'
20
22
  @removed_length = @removed_prefix_string.length
21
23
  elsif @add_prefix.empty?
22
24
  raise Fluent::ConfigError, "either of 'add_prefix' or 'remove_prefix' must be specified"
23
25
  end
26
+ @added_prefix_string = nil
24
27
  @added_prefix_string = @add_prefix + '.' unless @add_prefix.empty?
25
28
 
26
- @sample_unit = case @sample_unit
27
- when 'tag'
28
- :tag
29
- when 'all'
30
- :all
31
- else
32
- raise Fluent::ConfigError, "sample_unit allows only 'tag' or 'all'"
33
- end
34
29
  @counts = {}
35
30
  @resets = {} if @minimum_rate_per_min
36
31
  end
@@ -51,7 +46,7 @@ class Fluent::SamplingFilterOutput < Fluent::Output
51
46
  }
52
47
  end
53
48
 
54
- def emit(tag, es, chain)
49
+ def process(tag, es)
55
50
  t = if @sample_unit == :all
56
51
  'all'
57
52
  else
@@ -65,11 +60,9 @@ class Fluent::SamplingFilterOutput < Fluent::Output
65
60
  # so serious value (and probably will not be broken...),
66
61
  # then i let here as it is now.
67
62
  if @minimum_rate_per_min
68
- unless @resets[t]
69
- @resets[t] = Fluent::Engine.now + (60 - rand(30))
70
- end
71
- if Fluent::Engine.now > @resets[t]
72
- @resets[t] = Fluent::Engine.now + 60
63
+ @resets[t] ||= Fluent::Clock.now + (60 - rand(30))
64
+ if Fluent::Clock.now > @resets[t]
65
+ @resets[t] = Fluent::Clock.now + 60
73
66
  @counts[t] = 0
74
67
  end
75
68
  es.each do |time,record|
@@ -90,7 +83,5 @@ class Fluent::SamplingFilterOutput < Fluent::Output
90
83
  end
91
84
 
92
85
  emit_sampled(tag, pairs)
93
-
94
- chain.next
95
86
  end
96
87
  end
@@ -1,4 +1,5 @@
1
1
  require 'helper'
2
+ require 'fluent/test/driver/filter'
2
3
 
3
4
  class SamplingFilterTest < Test::Unit::TestCase
4
5
  def setup
@@ -10,13 +11,13 @@ class SamplingFilterTest < Test::Unit::TestCase
10
11
  sample_unit tag
11
12
  ]
12
13
 
13
- def create_driver(conf=CONFIG,tag='test')
14
- Fluent::Test::FilterTestDriver.new(Fluent::SamplingFilter, tag).configure(conf)
14
+ def create_driver(conf=CONFIG)
15
+ Fluent::Test::Driver::Filter.new(Fluent::Plugin::SamplingFilter).configure(conf)
15
16
  end
16
17
 
17
18
  def test_configure
18
19
  assert_raise(Fluent::ConfigError) {
19
- d = create_driver('')
20
+ create_driver('')
20
21
  }
21
22
  d = create_driver %[
22
23
  interval 5
@@ -34,54 +35,52 @@ class SamplingFilterTest < Test::Unit::TestCase
34
35
  end
35
36
 
36
37
  def test_filter
37
- d1 = create_driver(CONFIG, 'input.hoge1')
38
+ d1 = create_driver(CONFIG)
38
39
  time = Time.parse("2012-01-02 13:14:15").to_i
39
- d1.run do
40
- d1.filter({'field1' => 'record1', 'field2' => 1})
41
- d1.filter({'field1' => 'record2', 'field2' => 2})
42
- d1.filter({'field1' => 'record3', 'field2' => 3})
43
- d1.filter({'field1' => 'record4', 'field2' => 4})
44
- d1.filter({'field1' => 'record5', 'field2' => 5})
45
- d1.filter({'field1' => 'record6', 'field2' => 6})
46
- d1.filter({'field1' => 'record7', 'field2' => 7})
47
- d1.filter({'field1' => 'record8', 'field2' => 8})
48
- d1.filter({'field1' => 'record9', 'field2' => 9})
49
- d1.filter({'field1' => 'record10', 'field2' => 10})
50
- d1.filter({'field1' => 'record11', 'field2' => 11})
51
- d1.filter({'field1' => 'record12', 'field2' => 12})
40
+ d1.run(default_tag: 'input.hoge1') do
41
+ d1.feed({'field1' => 'record1', 'field2' => 1})
42
+ d1.feed({'field1' => 'record2', 'field2' => 2})
43
+ d1.feed({'field1' => 'record3', 'field2' => 3})
44
+ d1.feed({'field1' => 'record4', 'field2' => 4})
45
+ d1.feed({'field1' => 'record5', 'field2' => 5})
46
+ d1.feed({'field1' => 'record6', 'field2' => 6})
47
+ d1.feed({'field1' => 'record7', 'field2' => 7})
48
+ d1.feed({'field1' => 'record8', 'field2' => 8})
49
+ d1.feed({'field1' => 'record9', 'field2' => 9})
50
+ d1.feed({'field1' => 'record10', 'field2' => 10})
51
+ d1.feed({'field1' => 'record11', 'field2' => 11})
52
+ d1.feed({'field1' => 'record12', 'field2' => 12})
52
53
  end
53
- filtered = d1.filtered_as_array
54
+ filtered = d1.filtered
54
55
  assert_equal 1, filtered.length
55
- assert_equal 'input.hoge1', filtered[0][0] # tag
56
- assert_equal 'record10', filtered[0][2]['field1']
57
- assert_equal 10, filtered[0][2]['field2']
56
+ assert_equal 'record10', filtered[0][1]['field1']
57
+ assert_equal 10, filtered[0][1]['field2']
58
58
 
59
59
  d2 = create_driver(%[
60
60
  interval 3
61
- ], 'input.hoge2')
61
+ ])
62
62
  time = Time.parse("2012-01-02 13:14:15").to_i
63
- d2.run do
64
- d2.filter({'field1' => 'record1', 'field2' => 1})
65
- d2.filter({'field1' => 'record2', 'field2' => 2})
66
- d2.filter({'field1' => 'record3', 'field2' => 3})
67
- d2.filter({'field1' => 'record4', 'field2' => 4})
68
- d2.filter({'field1' => 'record5', 'field2' => 5})
69
- d2.filter({'field1' => 'record6', 'field2' => 6})
70
- d2.filter({'field1' => 'record7', 'field2' => 7})
71
- d2.filter({'field1' => 'record8', 'field2' => 8})
72
- d2.filter({'field1' => 'record9', 'field2' => 9})
73
- d2.filter({'field1' => 'record10', 'field2' => 10})
74
- d2.filter({'field1' => 'record11', 'field2' => 11})
75
- d2.filter({'field1' => 'record12', 'field2' => 12})
63
+ d2.run(default_tag: 'input.hoge2') do
64
+ d2.feed({'field1' => 'record1', 'field2' => 1})
65
+ d2.feed({'field1' => 'record2', 'field2' => 2})
66
+ d2.feed({'field1' => 'record3', 'field2' => 3})
67
+ d2.feed({'field1' => 'record4', 'field2' => 4})
68
+ d2.feed({'field1' => 'record5', 'field2' => 5})
69
+ d2.feed({'field1' => 'record6', 'field2' => 6})
70
+ d2.feed({'field1' => 'record7', 'field2' => 7})
71
+ d2.feed({'field1' => 'record8', 'field2' => 8})
72
+ d2.feed({'field1' => 'record9', 'field2' => 9})
73
+ d2.feed({'field1' => 'record10', 'field2' => 10})
74
+ d2.feed({'field1' => 'record11', 'field2' => 11})
75
+ d2.feed({'field1' => 'record12', 'field2' => 12})
76
76
  end
77
- filtered = d2.filtered_as_array
77
+ filtered = d2.filtered
78
78
  assert_equal 4, filtered.length
79
- assert_equal 'input.hoge2', filtered[0][0] # tag
80
79
 
81
- assert_equal 'record3', filtered[0][2]['field1']
82
- assert_equal 'record6', filtered[1][2]['field1']
83
- assert_equal 'record9', filtered[2][2]['field1']
84
- assert_equal 'record12', filtered[3][2]['field1']
80
+ assert_equal 'record3', filtered[0][1]['field1']
81
+ assert_equal 'record6', filtered[1][1]['field1']
82
+ assert_equal 'record9', filtered[2][1]['field1']
83
+ assert_equal 'record12', filtered[3][1]['field1']
85
84
  end
86
85
 
87
86
  def test_filter_minimum_rate
@@ -90,21 +89,20 @@ interval 10
90
89
  sample_unit tag
91
90
  minimum_rate_per_min 100
92
91
  ]
93
- d = create_driver(config, 'input.hoge3')
92
+ d = create_driver(config)
94
93
  time = Time.parse("2012-01-02 13:14:15").to_i
95
- d.run do
94
+ d.run(default_tag: 'input.hoge3') do
96
95
  (1..100).each do |t|
97
- d.filter({'times' => t, 'data' => 'x'})
96
+ d.feed(time, {'times' => t, 'data' => 'x'})
98
97
  end
99
98
  (101..130).each do |t|
100
- d.filter({'times' => t, 'data' => 'y'})
99
+ d.feed(time, {'times' => t, 'data' => 'y'})
101
100
  end
102
101
  end
103
- filtered = d.filtered_as_array
102
+ filtered = d.filtered
104
103
  assert_equal 103, filtered.length
105
- assert_equal 'input.hoge3', filtered[0][0]
106
- assert_equal ((1..100).map(&:to_i) + [110, 120, 130]), filtered.map{|t,time,r| r['times']}
107
- assert_equal (['x']*100 + ['y']*3), filtered.map{|t,time,r| r['data']}
104
+ assert_equal ((1..100).map(&:to_i) + [110, 120, 130]), filtered.map{|_time,r| r['times']}
105
+ assert_equal (['x']*100 + ['y']*3), filtered.map{|_time,r| r['data']}
108
106
  end
109
107
 
110
108
  def test_filter_minimum_rate_expire
@@ -113,17 +111,16 @@ interval 10
113
111
  sample_unit tag
114
112
  minimum_rate_per_min 10
115
113
  ]
116
- d = create_driver(config, 'input.hoge4')
114
+ d = create_driver(config)
117
115
  time = Time.parse("2012-01-02 13:14:15").to_i
118
- d.run do
116
+ d.run(default_tag: 'input.hoge4') do
119
117
  (1..30).each do |t|
120
- d.filter({'times' => t, 'data' => 'x'})
118
+ d.feed(time, {'times' => t, 'data' => 'x'})
121
119
  end
122
120
  end
123
- filtered = d.filtered_as_array
121
+ filtered = d.filtered
124
122
  assert_equal 12, filtered.length
125
- assert_equal 'input.hoge4', filtered[0][0]
126
- assert_equal ((1..10).map(&:to_i)+[20,30]), filtered.map{|t,time,r| r['times']}
127
- assert_equal (['x']*12), filtered.map{|t,time,r| r['data']}
123
+ assert_equal ((1..10).map(&:to_i)+[20,30]), filtered.map{|_time,r| r['times']}
124
+ assert_equal (['x']*12), filtered.map{|_time,r| r['data']}
128
125
  end
129
126
  end
@@ -1,10 +1,11 @@
1
1
  require 'helper'
2
+ require 'fluent/test/driver/output'
2
3
 
3
4
  class SamplingFilterOutputTest < Test::Unit::TestCase
4
5
  def setup
5
6
  Fluent::Test.setup
6
7
  end
7
-
8
+
8
9
  CONFIG = %[
9
10
  interval 10
10
11
  sample_unit tag
@@ -12,13 +13,13 @@ class SamplingFilterOutputTest < Test::Unit::TestCase
12
13
  add_prefix sampled
13
14
  ]
14
15
 
15
- def create_driver(conf=CONFIG,tag='test')
16
- Fluent::Test::OutputTestDriver.new(Fluent::SamplingFilterOutput, tag).configure(conf)
16
+ def create_driver(conf=CONFIG)
17
+ Fluent::Test::Driver::Output.new(Fluent::Plugin::SamplingFilterOutput).configure(conf)
17
18
  end
18
19
 
19
20
  def test_configure
20
21
  assert_raise(Fluent::ConfigError) {
21
- d = create_driver('')
22
+ create_driver('')
22
23
  }
23
24
  d = create_driver %[
24
25
  interval 5
@@ -33,7 +34,7 @@ class SamplingFilterOutputTest < Test::Unit::TestCase
33
34
  interval 1000
34
35
  sample_unit all
35
36
  remove_prefix test
36
- add_prefix output
37
+ add_prefix output
37
38
  ]
38
39
  assert_equal 1000, d.instance.interval
39
40
  assert_equal :all, d.instance.sample_unit
@@ -42,54 +43,54 @@ class SamplingFilterOutputTest < Test::Unit::TestCase
42
43
  end
43
44
 
44
45
  def test_emit
45
- d1 = create_driver(CONFIG, 'input.hoge1')
46
+ d1 = create_driver(CONFIG)
46
47
  time = Time.parse("2012-01-02 13:14:15").to_i
47
- d1.run do
48
- d1.emit({'field1' => 'record1', 'field2' => 1})
49
- d1.emit({'field1' => 'record2', 'field2' => 2})
50
- d1.emit({'field1' => 'record3', 'field2' => 3})
51
- d1.emit({'field1' => 'record4', 'field2' => 4})
52
- d1.emit({'field1' => 'record5', 'field2' => 5})
53
- d1.emit({'field1' => 'record6', 'field2' => 6})
54
- d1.emit({'field1' => 'record7', 'field2' => 7})
55
- d1.emit({'field1' => 'record8', 'field2' => 8})
56
- d1.emit({'field1' => 'record9', 'field2' => 9})
57
- d1.emit({'field1' => 'record10', 'field2' => 10})
58
- d1.emit({'field1' => 'record11', 'field2' => 11})
59
- d1.emit({'field1' => 'record12', 'field2' => 12})
48
+ d1.run(default_tag: 'input.hoge1') do
49
+ d1.feed(time, {'field1' => 'record1', 'field2' => 1})
50
+ d1.feed(time, {'field1' => 'record2', 'field2' => 2})
51
+ d1.feed(time, {'field1' => 'record3', 'field2' => 3})
52
+ d1.feed(time, {'field1' => 'record4', 'field2' => 4})
53
+ d1.feed(time, {'field1' => 'record5', 'field2' => 5})
54
+ d1.feed(time, {'field1' => 'record6', 'field2' => 6})
55
+ d1.feed(time, {'field1' => 'record7', 'field2' => 7})
56
+ d1.feed(time, {'field1' => 'record8', 'field2' => 8})
57
+ d1.feed(time, {'field1' => 'record9', 'field2' => 9})
58
+ d1.feed(time, {'field1' => 'record10', 'field2' => 10})
59
+ d1.feed(time, {'field1' => 'record11', 'field2' => 11})
60
+ d1.feed(time, {'field1' => 'record12', 'field2' => 12})
60
61
  end
61
- emits = d1.emits
62
- assert_equal 1, emits.length
63
- assert_equal 'sampled.hoge1', emits[0][0] # tag
64
- assert_equal 'record10', emits[0][2]['field1']
65
- assert_equal 10, emits[0][2]['field2']
62
+ events = d1.events
63
+ assert_equal 1, events.length
64
+ assert_equal 'sampled.hoge1', events[0][0] # tag
65
+ assert_equal 'record10', events[0][2]['field1']
66
+ assert_equal 10, events[0][2]['field2']
66
67
 
67
68
  d2 = create_driver(%[
68
69
  interval 3
69
- ], 'input.hoge2')
70
+ ])
70
71
  time = Time.parse("2012-01-02 13:14:15").to_i
71
- d2.run do
72
- d2.emit({'field1' => 'record1', 'field2' => 1})
73
- d2.emit({'field1' => 'record2', 'field2' => 2})
74
- d2.emit({'field1' => 'record3', 'field2' => 3})
75
- d2.emit({'field1' => 'record4', 'field2' => 4})
76
- d2.emit({'field1' => 'record5', 'field2' => 5})
77
- d2.emit({'field1' => 'record6', 'field2' => 6})
78
- d2.emit({'field1' => 'record7', 'field2' => 7})
79
- d2.emit({'field1' => 'record8', 'field2' => 8})
80
- d2.emit({'field1' => 'record9', 'field2' => 9})
81
- d2.emit({'field1' => 'record10', 'field2' => 10})
82
- d2.emit({'field1' => 'record11', 'field2' => 11})
83
- d2.emit({'field1' => 'record12', 'field2' => 12})
72
+ d2.run(default_tag: 'input.hoge2') do
73
+ d2.feed(time, {'field1' => 'record1', 'field2' => 1})
74
+ d2.feed(time, {'field1' => 'record2', 'field2' => 2})
75
+ d2.feed(time, {'field1' => 'record3', 'field2' => 3})
76
+ d2.feed(time, {'field1' => 'record4', 'field2' => 4})
77
+ d2.feed(time, {'field1' => 'record5', 'field2' => 5})
78
+ d2.feed(time, {'field1' => 'record6', 'field2' => 6})
79
+ d2.feed(time, {'field1' => 'record7', 'field2' => 7})
80
+ d2.feed(time, {'field1' => 'record8', 'field2' => 8})
81
+ d2.feed(time, {'field1' => 'record9', 'field2' => 9})
82
+ d2.feed(time, {'field1' => 'record10', 'field2' => 10})
83
+ d2.feed(time, {'field1' => 'record11', 'field2' => 11})
84
+ d2.feed(time, {'field1' => 'record12', 'field2' => 12})
84
85
  end
85
- emits = d2.emits
86
- assert_equal 4, emits.length
87
- assert_equal 'sampled.input.hoge2', emits[0][0] # tag
86
+ events = d2.events
87
+ assert_equal 4, events.length
88
+ assert_equal 'sampled.input.hoge2', events[0][0] # tag
88
89
 
89
- assert_equal 'record3', emits[0][2]['field1']
90
- assert_equal 'record6', emits[1][2]['field1']
91
- assert_equal 'record9', emits[2][2]['field1']
92
- assert_equal 'record12', emits[3][2]['field1']
90
+ assert_equal 'record3', events[0][2]['field1']
91
+ assert_equal 'record6', events[1][2]['field1']
92
+ assert_equal 'record9', events[2][2]['field1']
93
+ assert_equal 'record12', events[3][2]['field1']
93
94
  end
94
95
 
95
96
  def test_minimum_rate
@@ -99,21 +100,21 @@ sample_unit tag
99
100
  remove_prefix input
100
101
  minimum_rate_per_min 100
101
102
  ]
102
- d = create_driver(config, 'input.hoge3')
103
+ d = create_driver(config)
103
104
  time = Time.parse("2012-01-02 13:14:15").to_i
104
- d.run do
105
+ d.run(default_tag: 'input.hoge3') do
105
106
  (1..100).each do |t|
106
- d.emit({'times' => t, 'data' => 'x'})
107
+ d.feed(time, {'times' => t, 'data' => 'x'})
107
108
  end
108
109
  (101..130).each do |t|
109
- d.emit({'times' => t, 'data' => 'y'})
110
+ d.feed(time, {'times' => t, 'data' => 'y'})
110
111
  end
111
112
  end
112
- emits = d.emits
113
- assert_equal 103, emits.length
114
- assert_equal 'sampled.hoge3', emits[0][0]
115
- assert_equal ((1..100).map(&:to_i) + [110, 120, 130]), emits.map{|t,time,r| r['times']}
116
- assert_equal (['x']*100 + ['y']*3), emits.map{|t,time,r| r['data']}
113
+ events = d.events
114
+ assert_equal 103, events.length
115
+ assert_equal 'sampled.hoge3', events[0][0]
116
+ assert_equal ((1..100).map(&:to_i) + [110, 120, 130]), events.map{|_tag,_time,r| r['times']}
117
+ assert_equal (['x']*100 + ['y']*3), events.map{|_tag,_time,r| r['data']}
117
118
 
118
119
  end
119
120
  def test_minimum_rate_expire
@@ -126,39 +127,39 @@ sample_unit tag
126
127
  remove_prefix input
127
128
  minimum_rate_per_min 10
128
129
  ]
129
- d = create_driver(config, 'input.hoge4')
130
+ d = create_driver(config)
130
131
  time = Time.parse("2012-01-02 13:14:15").to_i
131
- d.run do
132
+ d.run(default_tag: 'input.hoge4') do
132
133
  (1..100).each do |t|
133
- d.emit({'times' => t, 'data' => 'x'})
134
+ d.feed(time, {'times' => t, 'data' => 'x'})
134
135
  end
135
136
  sleep 60
136
137
  (101..130).each do |t|
137
- d.emit({'times' => t, 'data' => 'y'})
138
+ d.feed(time+60, {'times' => t, 'data' => 'y'})
138
139
  end
139
140
  end
140
- emits = d.emits
141
- # assert_equal (19 + 12), emits.length
142
- assert_equal 'sampled.hoge4', emits[0][0]
143
- assert_equal ((1..10).map(&:to_i)+[20,30,40,50,60,70,80,90,100]+(101..110).map(&:to_i)+[120,130]), emits.map{|t,time,r| r['times']}
144
- assert_equal (['x']*19 + ['y']*12), emits.map{|t,time,r| r['data']}
141
+ events = d.events
142
+ # assert_equal (19 + 12), events.length
143
+ assert_equal 'sampled.hoge4', events[0][0]
144
+ assert_equal ((1..10).map(&:to_i)+[20,30,40,50,60,70,80,90,100]+(101..110).map(&:to_i)+[120,130]), events.map{|_tag,_time,r| r['times']}
145
+ assert_equal (['x']*19 + ['y']*12), events.map{|_tag,_time,r| r['data']}
145
146
  end
146
147
 
147
148
  def test_without_add_prefix_but_remove_prefix
148
149
  config = %[
149
150
  interval 10
150
- add_prefix # empty
151
+ add_prefix
151
152
  remove_prefix input
152
153
  ]
153
- d = create_driver(config, 'input.hoge3')
154
+ d = create_driver(config)
154
155
  time = Time.parse("2012-01-02 13:14:15").to_i
155
- d.run do
156
+ d.run(default_tag: 'input.hoge3') do
156
157
  (1..100).each do |t|
157
- d.emit({'times' => t, 'data' => 'x'})
158
+ d.feed(time, {'times' => t, 'data' => 'x'})
158
159
  end
159
160
  end
160
- emits = d.emits
161
- assert_equal 10, emits.length
162
- assert_equal 'hoge3', emits[0][0]
161
+ events = d.events
162
+ assert_equal 10, events.length
163
+ assert_equal 'hoge3', events[0][0]
163
164
  end
164
165
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-sampling-filter
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.1
4
+ version: 1.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - TAGOMORI Satoshi
@@ -44,20 +44,20 @@ dependencies:
44
44
  requirements:
45
45
  - - ">="
46
46
  - !ruby/object:Gem::Version
47
- version: 0.12.0
47
+ version: 0.14.12
48
48
  - - "<"
49
49
  - !ruby/object:Gem::Version
50
- version: 0.14.0
50
+ version: '2'
51
51
  type: :runtime
52
52
  prerelease: false
53
53
  version_requirements: !ruby/object:Gem::Requirement
54
54
  requirements:
55
55
  - - ">="
56
56
  - !ruby/object:Gem::Version
57
- version: 0.12.0
57
+ version: 0.14.12
58
58
  - - "<"
59
59
  - !ruby/object:Gem::Version
60
- version: 0.14.0
60
+ version: '2'
61
61
  description: fluentd plugin to pickup sample data from matched massages
62
62
  email:
63
63
  - tagomoris@gmail.com