logfile_interval 1.1.1 → 1.1.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 37c52b3b1355cf1589c84a6f2f8e76233222eb80
4
- data.tar.gz: 5ec64eb915a57de6c94974756d4a3d8a58042dc6
3
+ metadata.gz: b10f4f6bcfa47f21b82b70bf541ef5403638c1ac
4
+ data.tar.gz: a0d4c42cd3f531b96fcbfc3aa8c7a0b4b2b3de1b
5
5
  SHA512:
6
- metadata.gz: 2b6eb7f3ecbfc8b12fa5900c02ca45e6f58ec1730018500c860c3be7e63ee0a83f402ee000e9141720ec13f6d89c7585dc6befbde89ec86929f51707e8fc47f1
7
- data.tar.gz: 865b1ae02eb35830c8723f3b9ad5b013a3f0a335eaecc05cc95f077f468db34855af91510a00d77d4a0caf45183f708263daa78c99cb6da9f7b12c574e2bc5be
6
+ metadata.gz: 954c42936dee2d249e2fc71f25cf175455f0350e4d96951bdddcb0c766ed18a53415c43a279746779df8d6e6ea89c83dc6e433272599bb0d5792d6f9b3a3f89e
7
+ data.tar.gz: 289e3c63c70eeb4d606bc5a5431d6ea0c69c277b6b31cb717bcdfd4ef28f5a2bce0d660ed3bd8de0d3257eed237d1baf7c8f3b64fd879f204553037ab95c4f46
data/.travis.yml CHANGED
@@ -2,4 +2,5 @@ language: ruby
2
2
  rvm:
3
3
  - 1.9.3
4
4
  - 2.0.0
5
+ - 2.1.0
5
6
  script: bundle exec rspec spec
data/Gemfile.lock CHANGED
@@ -1,18 +1,18 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- logfile_interval (1.1.1)
4
+ logfile_interval (1.1.2)
5
5
 
6
6
  GEM
7
7
  remote: https://rubygems.org/
8
8
  specs:
9
9
  columnize (0.3.6)
10
- debugger (1.6.2)
10
+ debugger (1.6.5)
11
11
  columnize (>= 0.3.1)
12
12
  debugger-linecache (~> 1.2.0)
13
- debugger-ruby_core_source (~> 1.2.3)
13
+ debugger-ruby_core_source (~> 1.3.1)
14
14
  debugger-linecache (1.2.0)
15
- debugger-ruby_core_source (1.2.4)
15
+ debugger-ruby_core_source (1.3.1)
16
16
  diff-lcs (1.2.5)
17
17
  docile (1.1.1)
18
18
  multi_json (1.8.2)
data/README.md CHANGED
@@ -38,12 +38,7 @@ class AccessLog < LogfileInterval::LineParser::Base
38
38
  end
39
39
  end
40
40
 
41
- path = ENV['ACCESS_LOG_PATH']
42
- file = LogfileInterval::Logfile.new(path, AccessLog)
43
- unless file.exist?
44
- puts "#{path} is not found"
45
- exit 1
46
- end
41
+ file = LogfileInterval::Logfile.new('path_to_logfile', AccessLog)
47
42
 
48
43
  builder = LogfileInterval::IntervalBuilder.new(file, 300)
49
44
  builder.each_interval do |interval|
@@ -184,6 +179,8 @@ interval_builder.each_interval do |interval|
184
179
  end
185
180
  end
186
181
  ```
182
+ ## Design document
183
+ Design outline is at [design.rb](docs/design.rb).
187
184
 
188
185
  ## Installation
189
186
  Add this line to your application's Gemfile:
data/docs/design.rb CHANGED
@@ -1,13 +1,4 @@
1
1
  module LogfileInterval
2
- class Logfile
3
- end
4
-
5
- class LogfileSet
6
- end
7
-
8
- class Interval
9
- end
10
-
11
2
  module LineParser
12
3
  class Base
13
4
  class << self
@@ -15,74 +6,165 @@ module LogfileInterval
15
6
  end
16
7
 
17
8
  def add_column(name, options)
9
+ agg = Aggregators.klass(aggregator)
10
+ @columns[name] = { :pos => pos, :aggregator => agg, :conversion => conversion }
11
+ define_method(name)
18
12
  end
19
13
 
20
14
  def parse(line)
21
- @data = {}
22
-
23
15
  match_data = regex.match(line)
24
- columns.each do |name, options|
25
- val = match_data[options[:pos]]
26
- @data[name] = convert(val, options[:conversion])
27
- end
28
- @data
16
+ data = f(match_data)
29
17
  end
30
18
 
31
- def convert(val, conversion)
32
- case options[:conversion]
33
- when :integer then val.to_i
34
- else val
35
- end
19
+ def create_record(line)
20
+ record = new(line)
21
+ return record.valid? ? record : nil
36
22
  end
37
23
  end
38
-
39
24
  end
40
25
 
41
26
  class AccessLog < Base
42
27
  set_regex /blah/
43
28
  add_column :name => :foo, :pos => 1, :conversion => integer, :aggregator => :average
44
29
 
30
+ def initialize(line)
31
+ @data = self.class.parse(line)
32
+ end
33
+ end
34
+ end
35
+
36
+
37
+ class Interval
38
+ def initialize(end_time, length, parser)
39
+ @data = {}
40
+ parser.columns.each do |name, options|
41
+ @data[name] = options[:aggregator].new
42
+ end
43
+ end
44
+
45
+ def [](name)
46
+ @data[name].value
47
+ end
48
+
49
+ def add_record(record)
50
+ return unless record.valid?
51
+ raise ParserMismatch unless record.class == parser
52
+
53
+ @size += 1
54
+ parser.columns.each do |name, options|
55
+ @data[name].add(record[name])
56
+ end
57
+ end
58
+ end
59
+
60
+ module Aggregator
61
+ def self.klass(aggregator)
62
+ case aggregator
63
+ when :sum then Sum
64
+ end
65
+ end
66
+
67
+ class Sum
68
+ def initialize
69
+ @val = 0
70
+ end
71
+
72
+ def add(value)
73
+ @val += value
74
+ end
75
+ end
76
+
77
+ class Count
78
+ def initialize
79
+ @val = Counter.new
80
+ end
81
+
82
+ def add(value)
83
+ @val.increment(value)
84
+ end
85
+ end
86
+ end
87
+
88
+ class Logfile
89
+ def initialize(filename, parser)
90
+ end
91
+
92
+ def each_line
93
+ end
94
+
95
+ def each_parsed_line
96
+ each_line do |line|
97
+ record = parser.create_record(line)
98
+ yield record if record
99
+ end
45
100
  end
46
101
  end
47
102
 
48
- class Record
49
- def initialize(parser, line)
50
- @parser = parser
51
- @data = parser.parse(line)
103
+ class LogfileSet
104
+ def initialize(filenames_array, parser)
105
+ end
106
+
107
+ def ordered_filenames
108
+ end
109
+
110
+ def each_line
52
111
  end
53
112
 
54
- def valid_columns
55
- @parser.columns.keys
113
+ def each_parsed_line
56
114
  end
115
+ end
57
116
 
58
- def method_missing(meth, *args)
59
- if valid_columns.include?(meth) && args.none
60
- self[meth]
61
- else
62
- super
117
+ class IntervalBuilder
118
+ def initialize(logfile_set, length)
119
+ parser = logfile_set.parser
120
+ end
121
+
122
+ def each_interval
123
+ interval = Interval.new(now, length)
124
+ set.each_parsed_line(parser) do |record|
125
+ while record.time < interval.start_time do
126
+ yield interval
127
+ interval = Interval.new(interval.start_time, length)
128
+ end
129
+ interval.add(record)
63
130
  end
64
131
  end
65
132
  end
133
+
134
+ class Counter < Hash
135
+ def increment(key)
136
+ self[key] = self[key] ? self[key] + 1 : 1
137
+ end
138
+ end
66
139
  end
67
140
 
68
141
  logfiles = [ 'access.log', 'access.log.1', 'access.log.2' ]
69
142
  logfile = logfiles.first
70
- parser = LineParser::AccessLog.new
71
143
 
72
- logfile_iterator = LogfileInterval::Logfile.new(parser, logfile)
144
+ parser = LineParser::AccessLog
73
145
 
146
+ logfile_iterator = LogfileInterval::Logfile.new(logfile, parser)
74
147
  logfile_iterator.each_line do |line|
148
+ puts line.class # String
75
149
  puts line
76
150
  end
77
151
 
152
+ parser = LineParser::AccessLog
78
153
  logfile_iterator.each_parsed_line do |record|
154
+ puts record.class # LineParser::AccessLog
79
155
  puts record.ip
80
156
  puts record.time
81
157
  end
82
158
 
83
- interval_builder = LogfileInterval::Interval.new(parser, logfiles)
159
+ set_iterator = LogfileInterval::LogfileSet.new(logfiles, parser)
160
+ set_iterator.each_parsed_line do |record|
161
+ puts record.class # LineParser::AccessLog
162
+ end
84
163
 
164
+ length = 5.minutes
165
+ interval_builder = LogfileInterval::IntervalBuilder.new(logfiles, length)
85
166
  interval_builder.each_interval do |interval|
167
+ puts interval.class # LogfileInterval::Interval
86
168
  puts interval.start_time
87
169
  puts interval.length
88
170
  interval[:ip].each do |ip, count|
@@ -0,0 +1,14 @@
1
+ module LogfileInterval
2
+ module Aggregator
3
+ class Average < Base
4
+ def add(value, group_by = nil)
5
+ @val.add(key(group_by), value)
6
+ @size.increment(key(group_by))
7
+ end
8
+
9
+ def val(k)
10
+ average(k)
11
+ end
12
+ end
13
+ end
14
+ end
@@ -0,0 +1,52 @@
1
+ module LogfileInterval
2
+ module Aggregator
3
+ class Base
4
+ include Enumerable
5
+
6
+ def initialize
7
+ @val = Util::Counter.new
8
+ @size = Util::Counter.new
9
+ end
10
+
11
+ def value(group = nil)
12
+ val(key(group))
13
+ end
14
+
15
+ def values
16
+ if single_value?
17
+ value
18
+ else
19
+ self.inject({}) { |h, v| h[v[0]] = v[1]; h }
20
+ end
21
+ end
22
+
23
+ def add(value, group_by = nil)
24
+ raise NotImplementedError
25
+ end
26
+
27
+ private
28
+ def key(group_by = nil)
29
+ group_by ? group_by : :all
30
+ end
31
+
32
+ def single_value?
33
+ return true if @val.empty?
34
+ @val.keys.count == 1 && @val.keys.first == :all
35
+ end
36
+
37
+ def each
38
+ @val.each_key do |k|
39
+ yield k, val(k)
40
+ end
41
+ end
42
+
43
+ def val(k)
44
+ @val[k]
45
+ end
46
+
47
+ def average(k)
48
+ @size[k] > 0 ? @val[k].to_f / @size[k].to_f : 0
49
+ end
50
+ end
51
+ end
52
+ end
@@ -0,0 +1,9 @@
1
+ module LogfileInterval
2
+ module Aggregator
3
+ class Count < Base
4
+ def add(value, group_by = nil)
5
+ @val.add(key(group_by), 1)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,22 @@
1
+ module LogfileInterval
2
+ module Aggregator
3
+ class Delta < Base
4
+ def initialize
5
+ @previous = Util::Counter.new
6
+ super
7
+ end
8
+
9
+ def add(value, group_by = nil)
10
+ if @previous.has_key?(key(group_by))
11
+ @val.add(key(group_by), @previous[key(group_by)] - value)
12
+ @size.increment(key(group_by))
13
+ end
14
+ @previous.set(key(group_by), value)
15
+ end
16
+
17
+ def val(k)
18
+ average(k)
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,14 @@
1
+ module LogfileInterval
2
+ module Aggregator
3
+ class GroupAndCount < Base
4
+ def each
5
+ @val.each { |k, v| yield k, v }
6
+ end
7
+
8
+ def add(value, group_by)
9
+ raise ArgumentError, 'group_by argument is mandatory for GroupAndCount#add' unless group_by
10
+ @val.increment_subkey(value, key(group_by))
11
+ end
12
+ end
13
+ end
14
+ end
@@ -0,0 +1,9 @@
1
+ module LogfileInterval
2
+ module Aggregator
3
+ class Sum < Base
4
+ def add(value, group_by = nil)
5
+ @val.add(key(group_by), value)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,24 @@
1
+ lib_dir = File.expand_path('..', __FILE__)
2
+
3
+ puts "lib_dir=#{lib_dir}"
4
+
5
+ require "#{lib_dir}/aggregator/base"
6
+ require "#{lib_dir}/aggregator/sum"
7
+ require "#{lib_dir}/aggregator/count"
8
+ require "#{lib_dir}/aggregator/group_and_count"
9
+ require "#{lib_dir}/aggregator/average"
10
+ require "#{lib_dir}/aggregator/delta"
11
+
12
+ module LogfileInterval
13
+ module Aggregator
14
+ def self.klass(aggregator)
15
+ case aggregator
16
+ when :sum then Sum
17
+ when :average then Average
18
+ when :count then Count
19
+ when :group_and_count then GroupAndCount
20
+ when :delta then Delta
21
+ end
22
+ end
23
+ end
24
+ end
@@ -29,6 +29,15 @@ module LogfileInterval
29
29
  @data.each(&block)
30
30
  end
31
31
 
32
+ def to_hash
33
+ @data.inject({}) do |h, pair|
34
+ k = pair[0]
35
+ v = pair[1]
36
+ h[k] = v.values
37
+ h
38
+ end
39
+ end
40
+
32
41
  def add_record(record)
33
42
  return unless record.valid?
34
43
  raise ParserMismatch unless record.class == parser
@@ -9,6 +9,8 @@ module LogfileInterval
9
9
  end
10
10
 
11
11
  def each_interval
12
+ return enum_for(:each_interval) unless block_given?
13
+
12
14
  secs = (Time.now.to_i / length.to_i) * length.to_i
13
15
  rounded_end_time = Time.at(secs)
14
16
  current_interval = Interval.new(rounded_end_time, length, parser)
@@ -24,7 +24,9 @@ module LogfileInterval
24
24
 
25
25
  def each_line
26
26
  return unless exist?
27
- f = FileBackward.new(@filename)
27
+ return enum_for(:each_line) unless block_given?
28
+
29
+ f = Util::FileBackward.new(@filename)
28
30
  while(line = f.gets)
29
31
  yield line.chomp
30
32
  end
@@ -32,6 +34,7 @@ module LogfileInterval
32
34
  end
33
35
 
34
36
  def each_parsed_line
37
+ return enum_for(:each_parsed_line) unless block_given?
35
38
  each_line do |line|
36
39
  record = parser.create_record(line)
37
40
  yield record if record
@@ -21,6 +21,8 @@ module LogfileInterval
21
21
  end
22
22
 
23
23
  def each_parsed_line
24
+ return enum_for(:each_parsed_line) unless block_given?
25
+
24
26
  ordered_filenames.each do |filename|
25
27
  tfile = Logfile.new(filename, parser)
26
28
  tfile.each_parsed_line do |record|
@@ -30,6 +32,8 @@ module LogfileInterval
30
32
  end
31
33
 
32
34
  def each_line
35
+ return enum_for(:each_line) unless block_given?
36
+
33
37
  ordered_filenames.each do |filename|
34
38
  tfile = Logfile.new(filename, parser)
35
39
  tfile.each_line do |line|
@@ -1,5 +1,5 @@
1
1
  module LogfileInterval
2
- module LineParser
2
+ module Util
3
3
  class Counter < Hash
4
4
  def increment(key)
5
5
  if self.has_key?(key)
@@ -0,0 +1,51 @@
1
+ module LogfileInterval
2
+ module Util
3
+ # Based on Perl's File::ReadBackwards module, by Uri Guttman.
4
+ class FileBackward
5
+ MAX_READ_SIZE = 1 << 10 # 1024
6
+
7
+ def initialize( *args )
8
+ return unless File.exist?(args[0])
9
+ @file = File.new(*args)
10
+ @file.seek(0, IO::SEEK_END)
11
+
12
+ @current_pos = @file.pos
13
+
14
+ @read_size = @file.pos % MAX_READ_SIZE
15
+ @read_size = MAX_READ_SIZE if @read_size.zero?
16
+
17
+ @line_buffer = Array.new
18
+ end
19
+
20
+ def gets( sep_string = $/ )
21
+ return nil unless @file
22
+ return @line_buffer.pop if @line_buffer.size > 2 or @current_pos.zero?
23
+
24
+ @current_pos -= @read_size
25
+ @file.seek(@current_pos, IO::SEEK_SET)
26
+
27
+ @line_buffer[0] = "#{@file.read(@read_size)}#{@line_buffer[0]}"
28
+ @read_size = MAX_READ_SIZE # Set a size for the next read.
29
+
30
+ @line_buffer[0] =
31
+ @line_buffer[0].scan(/.*?#{Regexp.escape(sep_string)}|.+/)
32
+ @line_buffer.flatten!
33
+
34
+ gets(sep_string)
35
+ end
36
+
37
+ def close
38
+ return unless @file
39
+ @file.close()
40
+ end
41
+ end
42
+ end
43
+ end
44
+
45
+ # f = FileBackward.new('../log/development.log')
46
+ # i = 0
47
+ # while(line = f.gets())
48
+ # puts line
49
+ # i += 1
50
+ # break if i>30
51
+ # end
@@ -1,3 +1,3 @@
1
1
  module LogfileInterval
2
- VERSION = "1.1.1"
2
+ VERSION = "1.1.2"
3
3
  end
@@ -1,14 +1,14 @@
1
1
  lib_dir = File.expand_path('..', __FILE__)
2
2
 
3
3
  require "#{lib_dir}/logfile_interval/version"
4
- require "#{lib_dir}/logfile_interval/file_backward"
5
4
  require "#{lib_dir}/logfile_interval/interval"
6
5
  require "#{lib_dir}/logfile_interval/interval_builder"
7
6
  require "#{lib_dir}/logfile_interval/logfile"
8
7
  require "#{lib_dir}/logfile_interval/logfile_set"
9
8
  require "#{lib_dir}/logfile_interval/line_parser/base"
10
- require "#{lib_dir}/logfile_interval/line_parser/aggregator"
11
- require "#{lib_dir}/logfile_interval/line_parser/counter"
9
+ require "#{lib_dir}/logfile_interval/util/counter"
10
+ require "#{lib_dir}/logfile_interval/util/file_backward"
11
+ require "#{lib_dir}/logfile_interval/aggregator"
12
12
 
13
13
  module LogfileInterval
14
14
  end