logfile_interval 1.2.1 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. checksums.yaml +4 -4
  2. data/Gemfile.lock +1 -1
  3. data/README.md +15 -11
  4. data/bin/aggregate_access_log.rb +9 -9
  5. data/bin/readme.rb +6 -5
  6. data/docs/design.rb +88 -62
  7. data/lib/logfile_interval/aggregator/base.rb +15 -1
  8. data/lib/logfile_interval/aggregator/count.rb +10 -2
  9. data/lib/logfile_interval/aggregator/delta.rb +6 -6
  10. data/lib/logfile_interval/aggregator/num_lines.rb +13 -0
  11. data/lib/logfile_interval/aggregator/registrar.rb +32 -0
  12. data/lib/logfile_interval/aggregator_set.rb +34 -0
  13. data/lib/logfile_interval/interval.rb +5 -31
  14. data/lib/logfile_interval/interval_builder/ascending.rb +23 -0
  15. data/lib/logfile_interval/interval_builder/descending.rb +22 -0
  16. data/lib/logfile_interval/interval_builder.rb +62 -19
  17. data/lib/logfile_interval/logfile.rb +34 -5
  18. data/lib/logfile_interval/logfile_set.rb +38 -24
  19. data/lib/logfile_interval/parsed_line/base.rb +31 -0
  20. data/lib/logfile_interval/{line_parser/base.rb → parsed_line/parser.rb} +15 -40
  21. data/lib/logfile_interval/version.rb +1 -1
  22. data/lib/logfile_interval.rb +4 -2
  23. data/spec/lib/aggregator_set_spec.rb +15 -0
  24. data/spec/lib/aggregator_spec.rb +39 -29
  25. data/spec/lib/custom_aggregator_spec.rb +3 -3
  26. data/spec/lib/interval_builder_spec.rb +185 -38
  27. data/spec/lib/interval_spec.rb +41 -30
  28. data/spec/lib/line_parser/base_spec.rb +16 -5
  29. data/spec/lib/logfile_set_spec.rb +23 -1
  30. data/spec/lib/logfile_spec.rb +23 -1
  31. data/spec/support/lib/access_log.rb +1 -1
  32. data/spec/support/lib/custom_timing_log.rb +3 -3
  33. data/spec/support/lib/timing_log.rb +4 -2
  34. data/spec/support/logfiles/timing.log.2 +1 -0
  35. metadata +13 -5
  36. data/lib/logfile_interval/aggregator/group_and_count.rb +0 -14
  37. data/lib/logfile_interval/aggregator.rb +0 -27
@@ -0,0 +1,31 @@
1
+ module LogfileInterval
2
+ module ParsedLine
3
+ class ConfigurationError < StandardError; end
4
+
5
+ class Base
6
+ attr_reader :data
7
+
8
+ extend Parser
9
+
10
+ def initialize(line)
11
+ @data = self.class.parse(line)
12
+ @valid = @data ? true : false
13
+ end
14
+
15
+ def valid?
16
+ @valid
17
+ end
18
+
19
+ def time
20
+ raise NotImplemented
21
+ end
22
+
23
+ def [](name)
24
+ @data[name]
25
+ end
26
+ end
27
+ end
28
+ end
29
+
30
+
31
+
@@ -1,15 +1,9 @@
1
1
  module LogfileInterval
2
- module LineParser
3
- AGGREGATION_FUNCTIONS = [ :sum, :average, :timestamp, :count, :delta, :custom ]
4
-
2
+ module ParsedLine
5
3
  class ConfigurationError < StandardError; end
6
4
 
7
- class Base
8
- attr_reader :data
9
-
10
- class << self
5
+ module Parser
11
6
  attr_reader :regex
12
-
13
7
  def columns
14
8
  @columns ||= {}
15
9
  end
@@ -53,21 +47,22 @@ module LogfileInterval
53
47
 
54
48
  def set_column_custom_options(column_name, options)
55
49
  raise ArgumentError, "Invalid column name: #{column_name}" unless columns.has_key?(column_name)
56
- raise ArgumentError, "This column is not custom: #{column_name}" unless columns[column_name].has_key?(:custom_class)
57
50
  columns[column_name][:custom_options] = options
58
51
  end
59
52
 
53
+
54
+ def each(&block)
55
+ columns.each(&block)
56
+ end
57
+
60
58
  private
61
59
 
62
60
  def validate_column_options(options)
63
61
  validate_option(options, :name)
64
62
  validate_option(options, :pos)
65
63
  validate_option(options, :aggregator)
66
- unless AGGREGATION_FUNCTIONS.include?(options[:aggregator])
67
- raise ConfigurationError, "aggregator must be one of #{AGGREGATION_FUNCTIONS.join(', ')}"
68
- end
69
- if options[:aggregator] == :custom
70
- validate_option(options, :custom_class, ':custom_class must be set for :custom aggregator type')
64
+ unless Aggregator::Base.exist?(options[:aggregator]) || options[:aggregator] == :timestamp
65
+ raise ConfigurationError, "aggregator must be one of #{Aggregator::Base.all.join(', ')}"
71
66
  end
72
67
  end
73
68
 
@@ -78,13 +73,14 @@ module LogfileInterval
78
73
  def sanitize_column_options(options)
79
74
  options[:name] = options[:name].to_sym
80
75
  if options.has_key?(:group_by)
81
- options[:group_by] = options[:group_by].to_sym
76
+ if options[:group_by].to_sym != options[:name]
77
+ options[:group_by] = options[:group_by].to_sym
78
+ else
79
+ options.delete(:group_by)
80
+ end
82
81
  end
83
82
  options[:conversion] = options.fetch(:conversion, :string)
84
- if options[:aggregator] == :custom
85
- options[:custom_options] = options.fetch(:custom_options, {})
86
- end
87
- options[:aggregator_class] = Aggregator.klass(options)
83
+ options[:aggregator_class] = Aggregator::Base.klass(options[:aggregator])
88
84
  options.delete(:aggregator)
89
85
  options
90
86
  end
@@ -96,27 +92,6 @@ module LogfileInterval
96
92
  else val
97
93
  end
98
94
  end
99
- end
100
-
101
- def initialize(line)
102
- @data = self.class.parse(line)
103
- @valid = @data ? true : false
104
- end
105
-
106
- def valid?
107
- @valid
108
- end
109
-
110
- def time
111
- raise NotImplemented
112
- end
113
-
114
- def [](name)
115
- @data[name]
116
- end
117
95
  end
118
96
  end
119
97
  end
120
-
121
-
122
-
@@ -1,3 +1,3 @@
1
1
  module LogfileInterval
2
- VERSION = "1.2.1"
2
+ VERSION = "2.0.0"
3
3
  end
@@ -1,14 +1,16 @@
1
1
  lib_dir = File.expand_path('..', __FILE__)
2
2
 
3
3
  require "#{lib_dir}/logfile_interval/version"
4
+ require "#{lib_dir}/logfile_interval/aggregator_set"
4
5
  require "#{lib_dir}/logfile_interval/interval"
5
6
  require "#{lib_dir}/logfile_interval/interval_builder"
6
7
  require "#{lib_dir}/logfile_interval/logfile"
7
8
  require "#{lib_dir}/logfile_interval/logfile_set"
8
- require "#{lib_dir}/logfile_interval/line_parser/base"
9
+ require "#{lib_dir}/logfile_interval/parsed_line/parser"
10
+ require "#{lib_dir}/logfile_interval/parsed_line/base"
9
11
  require "#{lib_dir}/logfile_interval/util/counter"
10
12
  require "#{lib_dir}/logfile_interval/util/file_backward"
11
- require "#{lib_dir}/logfile_interval/aggregator"
13
+ require "#{lib_dir}/logfile_interval/aggregator/base"
12
14
 
13
15
  module LogfileInterval
14
16
  end
@@ -0,0 +1,15 @@
1
+ require 'spec_helper'
2
+ require File.join(File.dirname(__FILE__), '..', 'support/lib/timing_log')
3
+
4
+ module LogfileInterval
5
+ data_dir = File.join(File.dirname(__FILE__), '..', 'support/logfiles')
6
+
7
+ describe AggregatorSet, 'with empty columns' do
8
+ subject { AggregatorSet.new({}) }
9
+
10
+ it { should respond_to :add }
11
+ it { should respond_to :to_hash }
12
+ it { should respond_to :[] }
13
+
14
+ end
15
+ end
@@ -3,16 +3,21 @@ require 'spec_helper'
3
3
  module LogfileInterval
4
4
 
5
5
  module Aggregator
6
- class CustomAggregator; end
6
+ class CustomAggregator < Base; end
7
+
8
+ class BizarroAggregator < Base
9
+ register_aggregator :weird_add, self
10
+ end
7
11
 
8
- describe Aggregator do
12
+ describe Base do
9
13
  it 'finds the aggregator class' do
10
- Aggregator.klass({ :aggregator => :sum}).should == Sum
11
- Aggregator.klass({ :aggregator => :average}).should == Average
12
- Aggregator.klass({ :aggregator => :count}).should == Count
13
- Aggregator.klass({ :aggregator => :count, :group_by => :foo}).should == GroupAndCount
14
- Aggregator.klass({ :aggregator => :delta}).should == Delta
15
- Aggregator.klass({ :aggregator => :custom, :custom_class => CustomAggregator}).should == CustomAggregator
14
+ Aggregator::Base.klass(:num_lines).should == NumLines
15
+ Aggregator::Base.klass(:sum).should == Sum
16
+ Aggregator::Base.klass(:average).should == Average
17
+ Aggregator::Base.klass(:count).should == Count
18
+ Aggregator::Base.klass(:delta).should == Delta
19
+ Aggregator::Base.klass(:custom_aggregator).should == CustomAggregator
20
+ Aggregator::Base.klass(:weird_add).should == BizarroAggregator
16
21
  end
17
22
  end
18
23
 
@@ -32,7 +37,7 @@ module LogfileInterval
32
37
  end
33
38
 
34
39
  it 'returns a hash' do
35
- aggregator.values.should be_a(Hash) unless aggregator.is_a?(Delta)
40
+ aggregator.values.should be_a(Hash) unless [ Delta, NumLines ].include?(aggregator.class)
36
41
  end
37
42
  end
38
43
 
@@ -61,7 +66,7 @@ module LogfileInterval
61
66
  end
62
67
  end
63
68
 
64
- [ Count, Sum, Average, Delta ]. each do |klass|
69
+ [ NumLines, Count, Sum, Average, Delta ]. each do |klass|
65
70
  describe klass do
66
71
  it_behaves_like 'an aggregator'
67
72
  end
@@ -69,6 +74,16 @@ module LogfileInterval
69
74
 
70
75
 
71
76
  describe 'without group_by key' do
77
+ describe NumLines do
78
+ it 'counts total number of lines' do
79
+ nl = NumLines.new
80
+ nl.add(55)
81
+ nl.add(54)
82
+ nl.add(1008)
83
+ nl.value.should == 3
84
+ end
85
+ end
86
+
72
87
  describe Sum do
73
88
  it 'sums up values' do
74
89
  sum = Sum.new
@@ -90,9 +105,9 @@ module LogfileInterval
90
105
  describe Delta do
91
106
  it 'averages delta values' do
92
107
  d = Delta.new
93
- d.add(1.4)
94
- d.add(1.1)
95
108
  d.add(1.0)
109
+ d.add(1.1)
110
+ d.add(1.4)
96
111
  d.value.round(5).should == 0.2
97
112
  end
98
113
  end
@@ -104,7 +119,8 @@ module LogfileInterval
104
119
  g.add('500')
105
120
  g.add('301')
106
121
  g.add('200')
107
- g.value.should == 4
122
+ g.value.should == 0
123
+ g.values.size.should == 3
108
124
  end
109
125
  end
110
126
  end
@@ -145,20 +161,18 @@ module LogfileInterval
145
161
  describe Count do
146
162
  it 'groups values and increment counters' do
147
163
  g = Count.new
148
- g.add('200', '200')
149
- g.add('500', '500')
150
- g.add('301', '301')
151
- g.add('200', '200')
164
+ g.add('200')
165
+ g.add('500')
166
+ g.add('301')
167
+ g.add('200')
152
168
  g.values.should be_a(Hash)
153
169
  g.values.should include({'200' => 2})
154
170
  g.values.should include({'301' => 1})
155
171
  g.values.should include({'500' => 1})
156
172
  end
157
- end
158
173
 
159
- describe GroupAndCount do
160
174
  it 'each yields a key and a hash' do
161
- gac = GroupAndCount.new
175
+ gac = Count.new
162
176
  gac.add :key1, :subkey1
163
177
  gac.first.should be_an(Array)
164
178
  gac.first.size.should == 2
@@ -167,11 +181,7 @@ module LogfileInterval
167
181
 
168
182
  context :add do
169
183
  before :each do
170
- @gac = GroupAndCount.new
171
- end
172
-
173
- it 'requires a group_by argument' do
174
- lambda { @gac.add('foo') }.should raise_error ArgumentError
184
+ @gac = Count.new
175
185
  end
176
186
 
177
187
  it 'counts number of occurence of subkey for key' do
@@ -201,10 +211,10 @@ module LogfileInterval
201
211
  d.add(5, :key2)
202
212
  d.values.should be_a(Hash)
203
213
  d.values.size.should == 2
204
- d.value(:key1).should == 3
205
- d.values[:key1].should == 3
206
- d.value(:key2).should == 2.5
207
- d.values[:key2].should == 2.5
214
+ d.value(:key1).should == -3
215
+ d.values[:key1].should == -3
216
+ d.value(:key2).should == -2.5
217
+ d.values[:key2].should == -2.5
208
218
  end
209
219
  end
210
220
  end
@@ -6,7 +6,7 @@ module LogfileInterval
6
6
  before :each do
7
7
  @end_time = Time.new(2013, 12, 01, 16, 00, 00, '-08:00')
8
8
  @length = 300
9
- @line_parser_class = LineParser::CustomTimingLog
9
+ @line_parser_class = ParsedLine::CustomTimingLog
10
10
  end
11
11
 
12
12
  def fill_interval
@@ -24,7 +24,7 @@ module LogfileInterval
24
24
 
25
25
  @interval.size.should == 3
26
26
  @interval[:num_slow].should == 1
27
- @interval[:ip].should == 3
27
+ @interval[:ip].should == {"192.168.0.5"=>2, "10.10.10.10"=>1}
28
28
  end
29
29
 
30
30
  describe 'set_column_custom_options' do
@@ -34,7 +34,7 @@ module LogfileInterval
34
34
 
35
35
  @interval.size.should == 3
36
36
  @interval[:num_slow].should == 2
37
- @interval[:ip].should == 3
37
+ @interval[:ip].should == {"192.168.0.5"=>2, "10.10.10.10"=>1}
38
38
  end
39
39
  end
40
40
  end
@@ -7,63 +7,210 @@ module LogfileInterval
7
7
  describe IntervalBuilder do
8
8
  before :each do
9
9
  @logfiles = ["#{data_dir}/timing.log", "#{data_dir}/timing.log.1" ]
10
- @set = LogfileSet.new(@logfiles, LineParser::TimingLog)
11
- @builder = IntervalBuilder.new(@set, 300)
12
10
  end
13
11
 
14
- context :each_interval do
12
+ describe 'initialization' do
15
13
  before :each do
14
+ @set = LogfileSet.new(@logfiles, ParsedLine::TimingLog)
15
+ @builder = IntervalBuilder.new(@set.each_parsed_line, ParsedLine::TimingLog, 300)
16
+ end
17
+
18
+ it 'accepts a logfile as the parsed_lines_enum argument' do
19
+ logfile = Logfile.new("#{data_dir}/timing.log", ParsedLine::TimingLog)
20
+ builder = IntervalBuilder.new(logfile, ParsedLine::TimingLog, 300)
16
21
  Time.stub(:now).and_return(Time.new(2013,12,01,16,0,1,'-08:00'))
17
- @intervals = []
18
- @builder.each_interval do |interval|
19
- @intervals << interval
22
+ intervals = []
23
+ builder.each_interval do |interval|
24
+ intervals << interval
20
25
  end
26
+ intervals.size.should == 1
21
27
  end
22
28
 
23
- it 'finds intervals from all logfiles' do
24
- @intervals.size.should == 2
29
+ it 'accepts a logfile_set as the parsed_lines_enum argument' do
30
+ builder = IntervalBuilder.new(@set, ParsedLine::TimingLog, 300)
31
+ Time.stub(:now).and_return(Time.new(2013,12,01,16,0,1,'-08:00'))
32
+ intervals = []
33
+ builder.each_interval do |interval|
34
+ intervals << interval
35
+ end
36
+ intervals.size.should == 2
25
37
  end
38
+ end
26
39
 
27
- context 'first interval' do
28
- it 'got records from both logfiles' do
29
- @intervals.first.size.should == 4
30
- @intervals.first.end_time.should == Time.new(2013,12,01,16,0,0,'-08:00')
31
- @intervals.first[:total_time].should == 700.0/4
32
- @intervals.first[:num_bytes].should == 52000
33
- @intervals.first[:rss].round(5).should == 0.60
34
- @intervals.first[:ip].should == 4
35
- @intervals.first[:action].should == 4
40
+ describe :each_interval do
41
+ context 'without a block' do
42
+ it 'returns an enumerator' do
43
+ set = LogfileSet.new(@logfiles, ParsedLine::TimingLog, :desc)
44
+ builder = IntervalBuilder.new(set, ParsedLine::TimingLog, 300)
45
+ e = builder.each_interval
46
+ e.should be_a(Enumerator)
36
47
  end
37
48
  end
38
49
 
39
- context 'second interval' do
40
- it 'got records from second logfile only' do
41
- @intervals.last.size.should == 2
42
- @intervals.last.end_time.should == Time.new(2013,12,01,15,55,0,'-08:00')
43
- @intervals.last[:total_time].should == 300
44
- @intervals.last[:num_bytes].should == 41000
45
- @intervals.last[:rss].round(5).should == 0.20
46
- @intervals.last[:ip].should == 2
47
- @intervals.last[:action].should == 2
50
+ context 'with empty logfiles' do
51
+ it 'does not yield any interval' do
52
+ logfiles = ["#{data_dir}/non_existing_timing.log", "#{data_dir}/non_existing_timing.log.1" ]
53
+ set = LogfileSet.new(logfiles, ParsedLine::TimingLog)
54
+ builder = IntervalBuilder.new(set, ParsedLine::TimingLog, 300)
55
+ intervals = []
56
+ builder.each_interval do |interval|
57
+ intervals << interval
58
+ end
59
+ intervals.should be_empty
48
60
  end
49
61
  end
50
62
 
51
- context 'without a block' do
52
- it 'should return an iterator' do
53
- e = @builder.each_interval
54
- e.should be_an(Enumerator)
55
- e.next.end_time.should == Time.new(2013,12,01,16,0,0,'-08:00')
63
+ context 'in descending order' do
64
+ before :each do
65
+ Time.stub(:now).and_return(Time.new(2013,12,01,16,0,1,'-08:00'))
66
+ @set = LogfileSet.new(@logfiles, ParsedLine::TimingLog, :desc)
67
+ @builder = IntervalBuilder.new(@set, ParsedLine::TimingLog, 300)
68
+ @intervals = []
69
+ @builder.each_interval do |interval|
70
+ @intervals << interval
71
+ end
72
+ end
73
+
74
+ it 'finds intervals from all logfiles' do
75
+ @intervals.size.should == 2
76
+ end
77
+
78
+ context 'first interval' do
79
+ it 'got records from both logfiles' do
80
+ @intervals.first.size.should == 4
81
+ @intervals.first.end_time.should == Time.new(2013,12,01,16,0,0,'-08:00')
82
+ @intervals.first[:total_time].should == 700.0/4
83
+ @intervals.first[:num_bytes].should == 52000
84
+ @intervals.first[:rss].round(5).should == -0.60
85
+ @intervals.first[:ip].should == {"192.168.0.5"=>3, "192.168.0.10"=>1}
86
+ @intervals.first[:action].should == {"posts#show"=>2, "posts#create"=>1, "posts#index"=>1}
87
+ end
88
+ end
89
+
90
+ context 'second interval' do
91
+ it 'got records from second logfile only' do
92
+ @intervals.last.size.should == 2
93
+ @intervals.last.end_time.should == Time.new(2013,12,01,15,55,0,'-08:00')
94
+ @intervals.last[:total_time].should == 300
95
+ @intervals.last[:num_bytes].should == 41000
96
+ @intervals.last[:rss].round(5).should == -0.20
97
+ @intervals.last[:ip].should == {"192.168.0.10"=>1, "192.168.0.5"=>1}
98
+ @intervals.last[:action].should == {"posts#index"=>1, "posts#show"=>1}
99
+ end
100
+ end
101
+ end
102
+
103
+ context 'in ascending order' do
104
+ before :each do
105
+ Time.stub(:now).and_return(Time.new(2013,12,01,16,0,1,'-08:00'))
106
+ @intervals = []
107
+ @set = LogfileSet.new(@logfiles, ParsedLine::TimingLog, :asc)
108
+ @builder = IntervalBuilder.new(@set, ParsedLine::TimingLog, 300)
109
+ @builder.each_interval do |interval|
110
+ @intervals << interval
111
+ end
112
+ end
113
+
114
+ it 'builds first interval older than last interval' do
115
+ first_time = @intervals.first.start_time
116
+ last_time = @intervals.last.start_time
117
+ first_time.should be < last_time
118
+ end
119
+
120
+ it 'builds first interval with start time at 5 minute boundary below first record' do
121
+ first_start_time = @intervals.first.start_time
122
+ first_start_time.should == Time.new(2013,12,01,15,50,0,'-08:00')
123
+ end
124
+
125
+ it 'builds last interval with end time at 5 minute boundary following last record' do
126
+ last_end_time = @intervals.last.end_time
127
+ last_end_time.should == Time.new(2013,12,01,16,0,0,'-08:00')
128
+ end
129
+
130
+ it 'puts the right data in the right intervals' do
131
+ @intervals.first.size.should == 2
132
+ @intervals.first.end_time.should == Time.new(2013,12,01,15,55,0,'-08:00')
133
+ @intervals.first[:total_time].should == 300
134
+ @intervals.first[:num_bytes].should == 41000
135
+ @intervals.first[:rss].round(5).should == 0.20
136
+ @intervals.first[:ip].should == {"192.168.0.10"=>1, "192.168.0.5"=>1}
137
+ @intervals.first[:action].should == {"posts#index"=>1, "posts#show"=>1}
138
+
139
+ @intervals.last.size.should == 4
140
+ @intervals.last.end_time.should == Time.new(2013,12,01,16,0,0,'-08:00')
141
+ @intervals.last[:total_time].should == 700.0/4
142
+ @intervals.last[:num_bytes].should == 52000
143
+ @intervals.last[:rss].round(5).should == 0.60
144
+ @intervals.last[:ip].should == {"192.168.0.5"=>3, "192.168.0.10"=>1}
145
+ @intervals.last[:action].should == {"posts#show"=>2, "posts#create"=>1, "posts#index"=>1}
146
+ end
147
+ end
148
+
149
+ context 'with a gap in the logfiles' do
150
+ before :each do
151
+ Time.stub(:now).and_return(Time.new(2013,12,01,16,0,1,'-08:00'))
152
+ @logfiles = ["#{data_dir}/timing.log", "#{data_dir}/timing.log.1", "#{data_dir}/timing.log.2" ]
153
+ end
154
+
155
+ context 'in descending order' do
156
+ before :each do
157
+ @set = LogfileSet.new(@logfiles, ParsedLine::TimingLog, :desc)
158
+ @builder = IntervalBuilder.new(@set, ParsedLine::TimingLog, 300)
159
+ @intervals = []
160
+ @builder.each_interval do |interval|
161
+ @intervals << interval
162
+ end
163
+ end
164
+
165
+ it 'creates an empty interval' do
166
+ @intervals.size.should == 4
167
+ gap_interval = @intervals[2]
168
+ gap_interval.size.should == 0
169
+ gap_interval.end_time.should == Time.new(2013,12,01,15,50,0,'-08:00')
170
+ end
171
+ end
172
+
173
+ context 'in ascending order' do
174
+ before :each do
175
+ @set = LogfileSet.new(@logfiles, ParsedLine::TimingLog, :asc)
176
+ @builder = IntervalBuilder.new(@set, ParsedLine::TimingLog, 300)
177
+ @intervals = []
178
+ @builder.each_interval do |interval|
179
+ @intervals << interval
180
+ end
181
+ end
182
+
183
+ it 'creates an empty interval' do
184
+ @intervals.size.should == 4
185
+ gap_interval = @intervals[1]
186
+ gap_interval.size.should == 0
187
+ gap_interval.end_time.should == Time.new(2013,12,01,15,50,0,'-08:00')
188
+ end
56
189
  end
57
190
  end
58
191
  end
59
192
 
60
- context :last_interval do
61
- it 'returns the most recent interval' do
62
- Time.stub(:now).and_return(Time.new(2013,12,01,16,0,1,'-08:00'))
63
- interval = @builder.last_interval
64
- interval.end_time.should == Time.new(2013,12,01,16,0,0,'-08:00')
65
- interval.size.should == 4
66
- interval[:num_bytes].should == 52000
193
+ describe :first_interval do
194
+ context 'with parsed_lines_enum in ascending order' do
195
+ it 'returns the oldest interval' do
196
+ Time.stub(:now).and_return(Time.new(2013,12,01,16,0,1,'-08:00'))
197
+ set = LogfileSet.new(@logfiles, ParsedLine::TimingLog, :asc)
198
+ builder = IntervalBuilder.new(set, ParsedLine::TimingLog, 300)
199
+ interval = builder.first_interval
200
+ interval.end_time.should == Time.new(2013,12,01,15,55,0,'-08:00')
201
+ interval.size.should == 2
202
+ end
203
+ end
204
+
205
+ context 'with parsed_lines_enum in descending order' do
206
+ it 'returns the most recent interval' do
207
+ Time.stub(:now).and_return(Time.new(2013,12,01,16,0,1,'-08:00'))
208
+ set = LogfileSet.new(@logfiles, ParsedLine::TimingLog, :desc)
209
+ builder = IntervalBuilder.new(set, ParsedLine::TimingLog, 300)
210
+ interval = builder.first_interval
211
+ interval.end_time.should == Time.new(2013,12,01,16,0,0,'-08:00')
212
+ interval.size.should == 4
213
+ end
67
214
  end
68
215
  end
69
216
  end