logfile_interval 1.2.1 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. checksums.yaml +4 -4
  2. data/Gemfile.lock +1 -1
  3. data/README.md +15 -11
  4. data/bin/aggregate_access_log.rb +9 -9
  5. data/bin/readme.rb +6 -5
  6. data/docs/design.rb +88 -62
  7. data/lib/logfile_interval/aggregator/base.rb +15 -1
  8. data/lib/logfile_interval/aggregator/count.rb +10 -2
  9. data/lib/logfile_interval/aggregator/delta.rb +6 -6
  10. data/lib/logfile_interval/aggregator/num_lines.rb +13 -0
  11. data/lib/logfile_interval/aggregator/registrar.rb +32 -0
  12. data/lib/logfile_interval/aggregator_set.rb +34 -0
  13. data/lib/logfile_interval/interval.rb +5 -31
  14. data/lib/logfile_interval/interval_builder/ascending.rb +23 -0
  15. data/lib/logfile_interval/interval_builder/descending.rb +22 -0
  16. data/lib/logfile_interval/interval_builder.rb +62 -19
  17. data/lib/logfile_interval/logfile.rb +34 -5
  18. data/lib/logfile_interval/logfile_set.rb +38 -24
  19. data/lib/logfile_interval/parsed_line/base.rb +31 -0
  20. data/lib/logfile_interval/{line_parser/base.rb → parsed_line/parser.rb} +15 -40
  21. data/lib/logfile_interval/version.rb +1 -1
  22. data/lib/logfile_interval.rb +4 -2
  23. data/spec/lib/aggregator_set_spec.rb +15 -0
  24. data/spec/lib/aggregator_spec.rb +39 -29
  25. data/spec/lib/custom_aggregator_spec.rb +3 -3
  26. data/spec/lib/interval_builder_spec.rb +185 -38
  27. data/spec/lib/interval_spec.rb +41 -30
  28. data/spec/lib/line_parser/base_spec.rb +16 -5
  29. data/spec/lib/logfile_set_spec.rb +23 -1
  30. data/spec/lib/logfile_spec.rb +23 -1
  31. data/spec/support/lib/access_log.rb +1 -1
  32. data/spec/support/lib/custom_timing_log.rb +3 -3
  33. data/spec/support/lib/timing_log.rb +4 -2
  34. data/spec/support/logfiles/timing.log.2 +1 -0
  35. metadata +13 -5
  36. data/lib/logfile_interval/aggregator/group_and_count.rb +0 -14
  37. data/lib/logfile_interval/aggregator.rb +0 -27
@@ -11,7 +11,7 @@ module LogfileInterval
11
11
  end
12
12
 
13
13
  it 'gets instantiated with empty data' do
14
- interval = Interval.new(@end_time, @length, LineParser::TimingLog)
14
+ interval = Interval.new(@end_time, @length, ParsedLine::TimingLog.columns)
15
15
  interval.size.should == 0
16
16
  interval[:total_time].should == 0
17
17
  interval[:num_bytes].should == 0
@@ -21,21 +21,22 @@ module LogfileInterval
21
21
 
22
22
  context :to_hash do
23
23
  it 'returns a hash' do
24
- interval = Interval.new(@end_time, @length, LineParser::TimingLog)
24
+ interval = Interval.new(@end_time, @length, ParsedLine::TimingLog)
25
25
  interval.to_hash.should be_a(Hash)
26
26
  end
27
27
 
28
28
  it 'has a key for all columns' do
29
- record = LineParser::TimingLog.create_record('1385942400, 192.168.0.5, posts#index, 100, 2000, 53.0')
30
- interval = Interval.new(@end_time, @length, LineParser::TimingLog)
29
+ record = ParsedLine::TimingLog.create_record('1385942400, 192.168.0.5, posts#index, 100, 2000, 53.0')
30
+ interval = Interval.new(@end_time, @length, ParsedLine::TimingLog.columns)
31
31
  interval.add_record(record)
32
32
  hinterval = interval.to_hash
33
33
  hinterval.keys.should include(:ip, :total_time, :action, :num_bytes, :rss)
34
34
  end
35
35
 
36
36
  it 'with no data, should have keys with 0 values' do
37
- interval = Interval.new(@end_time, @length, LineParser::TimingLog)
37
+ interval = Interval.new(@end_time, @length, ParsedLine::TimingLog.columns)
38
38
  hinterval = interval.to_hash
39
+ hinterval[:num_lines].should == 0
39
40
  hinterval[:ip].should == 0
40
41
  hinterval[:action].should == 0
41
42
  hinterval[:total_time].should == 0
@@ -47,40 +48,46 @@ module LogfileInterval
47
48
  context :add_record do
48
49
  context 'basics' do
49
50
  before :each do
50
- @interval = Interval.new(@end_time, @length, LineParser::TimingLog)
51
+ @interval = Interval.new(@end_time, @length, ParsedLine::TimingLog.columns)
51
52
  end
52
53
 
53
54
  it 'rejects record out of interval' do
54
- oor_record = LineParser::TimingLog.create_record('1385942450, 192.168.0.5, posts#index, 100, 20000, 50.0')
55
+ oor_record = ParsedLine::TimingLog.create_record('1385942450, 192.168.0.5, posts#index, 100, 20000, 50.0')
55
56
  lambda { @interval.add_record(oor_record) }.should raise_error(Interval::OutOfRange)
56
57
  end
57
58
 
58
59
  it 'rejects record at interval start_time' do
59
- oor_record = LineParser::TimingLog.create_record('1385942100, 192.168.0.5, posts#index, 100, 20000, 50.0')
60
+ oor_record = ParsedLine::TimingLog.create_record('1385942100, 192.168.0.5, posts#index, 100, 20000, 50.0')
60
61
  lambda { @interval.add_record(oor_record) }.should raise_error(Interval::OutOfRange)
61
62
  end
62
63
 
64
+ it 'accepts record at interval end_time' do
65
+ oor_record = ParsedLine::TimingLog.create_record('1385942400, 192.168.0.5, posts#index, 100, 20000, 50.0')
66
+ lambda { @interval.add_record(oor_record) }.should_not raise_error
67
+ end
68
+
63
69
  it 'adds 1 record to interval' do
64
- record1 = LineParser::TimingLog.create_record('1385942400, 192.168.0.5, posts#index, 100, 20000, 50.0')
70
+ record1 = ParsedLine::TimingLog.create_record('1385942400, 192.168.0.5, posts#index, 100, 20000, 50.0')
65
71
  @interval.add_record(record1)
66
72
 
67
73
  @interval.size.should == 1
74
+ @interval[:num_lines].should == 1
68
75
  @interval[:total_time].should == 100
69
76
  @interval[:num_bytes].should == 20000
70
- @interval[:action].should == 1
71
- @interval[:ip].should == 1
77
+ @interval[:action].should == {"posts#index"=>1}
78
+ @interval[:ip].should == {"192.168.0.5"=>1}
72
79
  end
73
80
  end
74
81
 
75
82
  context 'with count and group by options' do
76
- it 'creates an aggregator of type GroupAndCount' do
77
- expect(Aggregator::GroupAndCount).to receive(:new)
78
- interval = Interval.new(@end_time, @length, LineParser::TimingLogWithGrouping)
83
+ it 'creates an aggregator of type Count' do
84
+ expect(Aggregator::Count).to receive(:new).twice
85
+ interval = Interval.new(@end_time, @length, ParsedLine::TimingLogWithGrouping.columns)
79
86
  end
80
87
 
81
88
  it 'add_record accepts key and subkey' do
82
- interval = Interval.new(@end_time, @length, LineParser::TimingLogWithGrouping)
83
- record1 = LineParser::TimingLogWithGrouping.create_record('1385942400, 192.168.0.5, posts#index, 100, 20000, 53.0')
89
+ interval = Interval.new(@end_time, @length, ParsedLine::TimingLogWithGrouping.columns)
90
+ record1 = ParsedLine::TimingLogWithGrouping.create_record('1385942400, 192.168.0.5, posts#index, 100, 20000, 53.0')
84
91
  interval.add_record(record1)
85
92
  interval.size.should == 1
86
93
  end
@@ -88,13 +95,13 @@ module LogfileInterval
88
95
 
89
96
  context 'with 3 records' do
90
97
  before :each do
91
- @interval = Interval.new(@end_time, @length, LineParser::TimingLog)
98
+ @interval = Interval.new(@end_time, @length, ParsedLine::TimingLog.columns)
92
99
 
93
- record1 = LineParser::TimingLog.create_record('1385942400, 192.168.0.5, posts#index, 100, 20000, 53.0')
100
+ record1 = ParsedLine::TimingLog.create_record('1385942400, 192.168.0.5, posts#index, 100, 20000, 53.0')
94
101
  @interval.add_record(record1)
95
- record2 = LineParser::TimingLog.create_record('1385942300, 192.168.0.5, posts#show, 50, 10000, 51.0')
102
+ record2 = ParsedLine::TimingLog.create_record('1385942300, 192.168.0.5, posts#show, 50, 10000, 51.0')
96
103
  @interval.add_record(record2)
97
- record3 = LineParser::TimingLog.create_record('1385942200, 10.10.10.10, posts#show, 60, 12000, 50.0')
104
+ record3 = ParsedLine::TimingLog.create_record('1385942200, 10.10.10.10, posts#show, 60, 12000, 50.0')
98
105
  @interval.add_record(record3)
99
106
  end
100
107
 
@@ -102,6 +109,10 @@ module LogfileInterval
102
109
  @interval.size.should == 3
103
110
  end
104
111
 
112
+ it 'counts the number of lines with the num_lines aggregator' do
113
+ @interval[:num_lines].should == 3
114
+ end
115
+
105
116
  it 'averages columns with average aggregator' do
106
117
  @interval[:total_time].should == 70
107
118
  end
@@ -111,26 +122,26 @@ module LogfileInterval
111
122
  end
112
123
 
113
124
  it 'averages the delta columns with delta aggregator' do
114
- @interval[:rss].should == 1.5
125
+ @interval[:rss].should == -1.5
115
126
  end
116
127
 
117
128
  it 'counts columns with group aggregator' do
118
- @interval[:ip].should == 3
119
- @interval[:action].should == 3
129
+ @interval[:ip].should == { '192.168.0.5' => 2, '10.10.10.10' => 1 }
130
+ @interval[:action].should == { 'posts#index' => 1, 'posts#show' => 2}
120
131
  end
121
132
  end
122
133
 
123
134
  context 'with group_by key' do
124
135
  before :each do
125
- @interval = Interval.new(@end_time, @length, LineParser::TimingLogWithGrouping)
136
+ @interval = Interval.new(@end_time, @length, ParsedLine::TimingLogWithGrouping.columns)
126
137
 
127
- record1 = LineParser::TimingLogWithGrouping.create_record('1385942400, 192.168.0.5, posts#index, 100, 20000, 53.0')
138
+ record1 = ParsedLine::TimingLogWithGrouping.create_record('1385942400, 192.168.0.5, posts#index, 100, 20000, 53.0')
128
139
  @interval.add_record(record1)
129
- record2 = LineParser::TimingLogWithGrouping.create_record('1385942300, 192.168.0.5, posts#show, 50, 10000, 51.0')
140
+ record2 = ParsedLine::TimingLogWithGrouping.create_record('1385942300, 192.168.0.5, posts#show, 50, 10000, 51.0')
130
141
  @interval.add_record(record2)
131
- record3 = LineParser::TimingLogWithGrouping.create_record('1385942200, 192.168.0.5, posts#show, 60, 12000, 50.0')
142
+ record3 = ParsedLine::TimingLogWithGrouping.create_record('1385942200, 192.168.0.5, posts#show, 60, 12000, 50.0')
132
143
  @interval.add_record(record3)
133
- record4 = LineParser::TimingLogWithGrouping.create_record('1385942180, 10.10.10.10, posts#index, 100, 20000, 48.0')
144
+ record4 = ParsedLine::TimingLogWithGrouping.create_record('1385942180, 10.10.10.10, posts#index, 100, 20000, 48.0')
134
145
  @interval.add_record(record4)
135
146
  end
136
147
 
@@ -166,8 +177,8 @@ module LogfileInterval
166
177
  it 'averages deltas on value column per group column' do
167
178
  @interval[:rss].should be_a(Hash)
168
179
  @interval[:rss].size.should == 2
169
- @interval[:rss]['posts#index'].should == 5
170
- @interval[:rss]['posts#show'].should == 1
180
+ @interval[:rss]['posts#index'].should == -5
181
+ @interval[:rss]['posts#show'].should == -1
171
182
  end
172
183
  end
173
184
  end
@@ -3,7 +3,7 @@ require 'spec_helper'
3
3
  module LogfileInterval
4
4
  data_dir = File.join(File.dirname(__FILE__), '..', 'support/logfiles')
5
5
 
6
- module LineParser
6
+ module ParsedLine
7
7
 
8
8
  describe Base do
9
9
  before :each do
@@ -25,6 +25,21 @@ module LogfileInterval
25
25
  record.valid?.should be_false
26
26
  end
27
27
 
28
+ describe 'class' do
29
+ subject { AccessLog }
30
+
31
+ it { should respond_to :each }
32
+
33
+ describe '#each' do
34
+ it 'iterates over columns' do
35
+ AccessLog.each do |col|
36
+ col.first.should be_a(Symbol)
37
+ col.last.should be_a(Hash)
38
+ end
39
+ end
40
+ end
41
+ end
42
+
28
43
  context :create_record do
29
44
 
30
45
  it 'instanciates a new AccessLog object' do
@@ -65,10 +80,6 @@ module LogfileInterval
65
80
  it 'must fail unless a column is configured'do
66
81
  lambda { NoColumnLog.new(@line) }.should raise_error ConfigurationError
67
82
  end
68
-
69
- it 'must fail with custom aggregator but no custom class' do
70
- lambda { MissingCustomClass.add_column(:name => 'ip', :pos => 1, :aggregator => :custom) }.should raise_error ConfigurationError
71
- end
72
83
  end
73
84
 
74
85
  describe TimingLog do
@@ -7,7 +7,7 @@ module LogfileInterval
7
7
  describe LogfileSet do
8
8
  before :each do
9
9
  @logfiles = ["#{data_dir}/access.log.2", "#{data_dir}/access.log.1"]
10
- @set = LogfileSet.new(@logfiles, LineParser::AccessLog)
10
+ @set = LogfileSet.new(@logfiles, ParsedLine::AccessLog)
11
11
  @first_line = '66.249.67.176 - - [23/Jun/2013:17:00:01 -0800] "GET /package/core/raring/universe/proposed/openldap HTTP/1.1" 200 185 "-" "Google"'
12
12
  @second_line = '12.24.48.96 - - [23/Jun/2013:16:59:00 -0800] "GET /package/core/raring/universe/proposed/openldap HTTP/1.1" 200 4555 "-" "Bing)"'
13
13
  @last_line = '12.24.48.96 - - [23/Jun/2013:16:49:00 -0800] "GET /package/core/raring/universe/proposed/bash HTTP/1.1" 200 4555 "-" "Bing)"'
@@ -37,6 +37,28 @@ module LogfileInterval
37
37
  e.next.should == @second_line
38
38
  end
39
39
  end
40
+
41
+ context :order do
42
+ it 'iterates backward when order is :desc' do
43
+ lines = []
44
+ set = LogfileSet.new(@logfiles, ParsedLine::AccessLog, :desc)
45
+ set.each_line do |line|
46
+ lines << line
47
+ end
48
+ lines.last.should == @last_line
49
+ lines.first.should == @first_line
50
+ end
51
+
52
+ it 'iterates upward when order is :asc' do
53
+ lines = []
54
+ set = LogfileSet.new(@logfiles, ParsedLine::AccessLog, :asc)
55
+ set.each_line do |line|
56
+ lines << line
57
+ end
58
+ lines.first.should == @last_line
59
+ lines.last.should == @first_line
60
+ end
61
+ end
40
62
  end
41
63
 
42
64
  describe :each_parsed_line do
@@ -6,7 +6,7 @@ module LogfileInterval
6
6
 
7
7
  describe Logfile do
8
8
  before :each do
9
- @alf = Logfile.new("#{data_dir}/access.log", LineParser::AccessLog)
9
+ @alf = Logfile.new("#{data_dir}/access.log", ParsedLine::AccessLog)
10
10
  @first_line = '78.54.172.146 - - [01/Jan/2012:16:30:51 -0800] "GET /package/core/oneiric/main/base/abrowser-6.0 HTTP/1.1" 200 6801 "http://www.google.com/url?sa=t&rct=j&q=abrowser 6.0&esrc=s&source=web&cd=4&sqi=2&ved=0CDYQFjAD&url=http%3A%2F%2Fwww.ubuntuupdates.org%2Fpackages%2Fshow%2F268762&ei=s-QlT8vJFon1sgb54unBDg&usg=AFQjCNHCHC0bxTf6aXAfUwT6Erjta6WLaQ&sig2=ceCi1odtaB8Vcf6IWg2a3w" "Mozilla/5.0 (Ubuntu; X11; Linux x86_64; rv:9.0.1) Gecko/20100101 Firefox/9.0.1"'
11
11
  @second_line = '78.54.172.146 - - [01/Jan/2012:16:30:51 -0800] "GET /package/show/2 HTTP/1.1" 302 6801 "http://www.google.com/url?sa=t&rct=j&q=abrowser 6.0&esrc=s&source=web&cd=4&sqi=2&ved=0CDYQFjAD&url=http%3A%2F%2Fwww.ubuntuupdates.org%2Fpackages%2Fshow%2F268762&ei=s-QlT8vJFon1sgb54unBDg&usg=AFQjCNHCHC0bxTf6aXAfUwT6Erjta6WLaQ&sig2=ceCi1odtaB8Vcf6IWg2a3w" "Mozilla/5.0 (Ubuntu; X11; Linux x86_64; rv:9.0.1) Gecko/20100101 Firefox/9.0.1"'
12
12
  @last_line = '66.249.67.176 - - [01/Jan/2012:00:57:47 -0800] "GET /packages/show/1 HTTP/1.1" 301 185 "-" "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"'
@@ -37,6 +37,28 @@ module LogfileInterval
37
37
  e.next.should == @second_line
38
38
  end
39
39
  end
40
+
41
+ context :order do
42
+ it 'iterates backward when order is :desc' do
43
+ lines = []
44
+ lf = Logfile.new("#{data_dir}/access.log", ParsedLine::AccessLog, :desc)
45
+ lf.each_line do |line|
46
+ lines << line
47
+ end
48
+ lines.last.should == @last_line
49
+ lines.first.should == @first_line
50
+ end
51
+
52
+ it 'iterates upward when order is :asc' do
53
+ lines = []
54
+ lf = Logfile.new("#{data_dir}/access.log", ParsedLine::AccessLog, :asc)
55
+ lf.each_line do |line|
56
+ lines << line
57
+ end
58
+ lines.first.should == @last_line
59
+ lines.last.should == @first_line
60
+ end
61
+ end
40
62
  end
41
63
 
42
64
  describe :each_parsed_line do
@@ -1,7 +1,7 @@
1
1
  module LogfileInterval
2
2
  data_dir = File.join(File.dirname(__FILE__), '..', 'support/logfiles')
3
3
 
4
- module LineParser
4
+ module ParsedLine
5
5
 
6
6
  class AccessLog < Base
7
7
  # Example line:
@@ -12,7 +12,7 @@ module LogfileInterval
12
12
  end
13
13
  end
14
14
 
15
- module LineParser
15
+ module ParsedLine
16
16
  class CustomTimingLog < Base
17
17
  # Line format:
18
18
  # timestamp, ip, controller#action, total_time, bytes, rss
@@ -21,8 +21,8 @@ module LogfileInterval
21
21
 
22
22
  add_column :name => :timestamp, :pos => 1, :aggregator => :timestamp
23
23
  add_column :name => :ip, :pos => 2, :aggregator => :count
24
- add_column :name => :num_slow, :pos => 4, :aggregator => :custom, :conversion => :integer,
25
- :custom_class => Aggregator::CountOverThreshold, :custom_options => { :threshold => 100 }
24
+ add_column :name => :num_slow, :pos => 4, :aggregator => :count_over_threshold, :conversion => :integer,
25
+ :custom_options => { :threshold => 100 }
26
26
 
27
27
  def time
28
28
  Time.at(self.timestamp.to_i)
@@ -1,5 +1,5 @@
1
1
  module LogfileInterval
2
- module LineParser
2
+ module ParsedLine
3
3
  class TimingLog < Base
4
4
  # Line format:
5
5
  # timestamp, ip, controller#action, total_time, bytes, rss
@@ -7,6 +7,7 @@ module LogfileInterval
7
7
  set_regex /^(\d+),\s*([\d\.]+),\s*(\w+#\w+),\s*(\d+),\s*(\d+),\s*([\d\.]+)$/
8
8
 
9
9
  add_column :name => :timestamp, :pos => 1, :aggregator => :timestamp
10
+ add_column :name => :num_lines, :pos => 2, :aggregator => :num_lines
10
11
  add_column :name => :ip, :pos => 2, :aggregator => :count
11
12
  add_column :name => :action, :pos => 3, :aggregator => :count
12
13
  add_column :name => :total_time, :pos => 4, :aggregator => :average, :conversion => :integer
@@ -25,8 +26,9 @@ module LogfileInterval
25
26
  set_regex /^(\d+),\s*([\d\.]+),\s*(\w+#\w+),\s*(\d+),\s*(\d+),\s*([\d\.]+)$/
26
27
 
27
28
  add_column :name => :timestamp, :pos => 1, :aggregator => :timestamp
29
+ add_column :name => :num_lines, :pos => 2, :aggregator => :num_lines
28
30
  add_column :name => :ip_by_action, :pos => 2, :aggregator => :count, :group_by => :action
29
- add_column :name => :action, :pos => 3, :aggregator => :count, :group_by => :action
31
+ add_column :name => :action, :pos => 3, :aggregator => :count
30
32
  add_column :name => :total_time, :pos => 4, :aggregator => :average, :group_by => :action, :conversion => :integer
31
33
  add_column :name => :num_bytes, :pos => 5, :aggregator => :sum, :group_by => :action, :conversion => :integer
32
34
  add_column :name => :rss, :pos => 6, :aggregator => :delta, :group_by => :action, :conversion => :float
@@ -0,0 +1 @@
1
+ 1385941440, 192.168.0.5, posts#show, 100, 16000, 48.00
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logfile_interval
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.1
4
+ version: 2.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Philippe Le Rohellec
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-01-13 00:00:00.000000000 Z
11
+ date: 2014-03-03 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -100,22 +100,27 @@ files:
100
100
  - bin/readme.rb
101
101
  - docs/design.rb
102
102
  - lib/logfile_interval.rb
103
- - lib/logfile_interval/aggregator.rb
104
103
  - lib/logfile_interval/aggregator/average.rb
105
104
  - lib/logfile_interval/aggregator/base.rb
106
105
  - lib/logfile_interval/aggregator/count.rb
107
106
  - lib/logfile_interval/aggregator/delta.rb
108
- - lib/logfile_interval/aggregator/group_and_count.rb
107
+ - lib/logfile_interval/aggregator/num_lines.rb
108
+ - lib/logfile_interval/aggregator/registrar.rb
109
109
  - lib/logfile_interval/aggregator/sum.rb
110
+ - lib/logfile_interval/aggregator_set.rb
110
111
  - lib/logfile_interval/interval.rb
111
112
  - lib/logfile_interval/interval_builder.rb
112
- - lib/logfile_interval/line_parser/base.rb
113
+ - lib/logfile_interval/interval_builder/ascending.rb
114
+ - lib/logfile_interval/interval_builder/descending.rb
113
115
  - lib/logfile_interval/logfile.rb
114
116
  - lib/logfile_interval/logfile_set.rb
117
+ - lib/logfile_interval/parsed_line/base.rb
118
+ - lib/logfile_interval/parsed_line/parser.rb
115
119
  - lib/logfile_interval/util/counter.rb
116
120
  - lib/logfile_interval/util/file_backward.rb
117
121
  - lib/logfile_interval/version.rb
118
122
  - logfile_interval.gemspec
123
+ - spec/lib/aggregator_set_spec.rb
119
124
  - spec/lib/aggregator_spec.rb
120
125
  - spec/lib/counter_spec.rb
121
126
  - spec/lib/custom_aggregator_spec.rb
@@ -134,6 +139,7 @@ files:
134
139
  - spec/support/logfiles/access.log.3
135
140
  - spec/support/logfiles/timing.log
136
141
  - spec/support/logfiles/timing.log.1
142
+ - spec/support/logfiles/timing.log.2
137
143
  homepage: https://github.com/plerohellec/logfile_interval
138
144
  licenses:
139
145
  - MIT
@@ -159,6 +165,7 @@ signing_key:
159
165
  specification_version: 4
160
166
  summary: Aggregate logfile data into intervals
161
167
  test_files:
168
+ - spec/lib/aggregator_set_spec.rb
162
169
  - spec/lib/aggregator_spec.rb
163
170
  - spec/lib/counter_spec.rb
164
171
  - spec/lib/custom_aggregator_spec.rb
@@ -177,3 +184,4 @@ test_files:
177
184
  - spec/support/logfiles/access.log.3
178
185
  - spec/support/logfiles/timing.log
179
186
  - spec/support/logfiles/timing.log.1
187
+ - spec/support/logfiles/timing.log.2
@@ -1,14 +0,0 @@
1
- module LogfileInterval
2
- module Aggregator
3
- class GroupAndCount < Base
4
- def each
5
- @val.each { |k, v| yield k, v }
6
- end
7
-
8
- def add(value, group_by)
9
- raise ArgumentError, 'group_by argument is mandatory for GroupAndCount#add' unless group_by
10
- @val.increment_subkey(value, key(group_by))
11
- end
12
- end
13
- end
14
- end
@@ -1,27 +0,0 @@
1
- lib_dir = File.expand_path('..', __FILE__)
2
-
3
- require "#{lib_dir}/aggregator/base"
4
- require "#{lib_dir}/aggregator/sum"
5
- require "#{lib_dir}/aggregator/count"
6
- require "#{lib_dir}/aggregator/group_and_count"
7
- require "#{lib_dir}/aggregator/average"
8
- require "#{lib_dir}/aggregator/delta"
9
-
10
- module LogfileInterval
11
- module Aggregator
12
- def self.klass(options)
13
- case options[:aggregator]
14
- when :sum then Sum
15
- when :average then Average
16
- when :count
17
- if options[:group_by] && options[:group_by] != options[:name]
18
- GroupAndCount
19
- else
20
- Count
21
- end
22
- when :delta then Delta
23
- when :custom then options.fetch(:custom_class)
24
- end
25
- end
26
- end
27
- end