request-log-analyzer 1.1.3 → 1.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/README.rdoc CHANGED
@@ -3,7 +3,7 @@
3
3
  This is a simple command line tool to analyze request log files of both Rails and
4
4
  Merb to produce a performance report. Its purpose is to find what actions are best candidates for optimization.
5
5
 
6
- * Analyzes Rails log files (all versions)
6
+ * Analyzes Rails log files (all versions), Merb logs, or any other log format
7
7
  * Can combine multiple files (handy if you are using logrotate)
8
8
  * Uses several metrics, including cumulative request time, average request time, process blockers, database and rendering time, HTTP methods and statuses, Rails action cache statistics, etc.) (Sample output: http://wiki.github.com/wvanbergen/request-log-analyzer/sample-output)
9
9
  * Low memory footprint (server-safe)
@@ -1,6 +1,7 @@
1
1
  #!/usr/bin/ruby
2
2
  require File.dirname(__FILE__) + '/../lib/request_log_analyzer'
3
3
  require File.dirname(__FILE__) + '/../lib/cli/command_line_arguments'
4
+ require File.dirname(__FILE__) + '/../lib/cli/progressbar'
4
5
  require File.dirname(__FILE__) + '/../lib/cli/tools'
5
6
 
6
7
  # Parse the arguments given via commandline
@@ -1,15 +1,25 @@
1
1
  require 'date'
2
- require File.dirname(__FILE__) + '/cli/progressbar'
3
2
 
3
+ # RequestLogAnalyzer is the base namespace in which all functionality of RequestLogAnalyzer is implemented.
4
+ #
5
+ # - This module itselfs contains some functions to help with class and source file loading.
6
+ # - The actual application resides in the RequestLogAnalyzer::Controller class.
4
7
  module RequestLogAnalyzer
5
-
8
+
9
+ # The current version of request-log-analyzer.
10
+ # This will be diplayed in output reports etc.
6
11
  VERSION = '1.1'
7
12
 
13
+ # Loads constants in the RequestLogAnalyzer namespace using self.load_default_class_file(base, const)
14
+ # <tt>const</tt>:: The constant that is not yet loaded in the RequestLogAnalyzer namespace. This should be passed as a string or symbol.
8
15
  def self.const_missing(const)
9
16
  load_default_class_file(RequestLogAnalyzer, const)
10
17
  end
11
18
 
12
- # Function to implement
19
+ # Loads constants that reside in the RequestLogAnalyzer tree using the constant name
20
+ # and its base constant to determine the filename.
21
+ # <tt>base</tt>:: The base constant to load the constant from. This should be Foo when the constant Foo::Bar is being loaded.
22
+ # <tt>const</tt>:: The constant to load from the base constant as a string or symbol. This should be 'Bar' or :Bar when the constant Foo::Bar is being loaded.
13
23
  def self.load_default_class_file(base, const)
14
24
  path = to_underscore(base.to_s)
15
25
  basename = to_underscore(const.to_s)
@@ -19,14 +29,16 @@ module RequestLogAnalyzer
19
29
  end
20
30
 
21
31
  # Convert a string/symbol in camelcase (RequestLogAnalyzer::Controller) to underscores (request_log_analyzer/controller)
32
+ # This function can be used to load the file (using require) in which the given constant is defined.
33
+ # <tt>str</tt>:: The string to convert in the following format: <tt>ModuleName::ClassName</tt>
22
34
  def self.to_underscore(str)
23
35
  str.to_s.gsub(/::/, '/').gsub(/([A-Z]+)([A-Z][a-z])/,'\1_\2').gsub(/([a-z\d])([A-Z])/,'\1_\2').tr("-", "_").downcase
24
36
  end
25
37
 
26
- # Convert a string/symbol in underscores (request_log_analyzer/controller) to camelcase (RequestLogAnalyzer::Controller)
38
+ # Convert a string/symbol in underscores (<tt>request_log_analyzer/controller</tt>) to camelcase
39
+ # (<tt>RequestLogAnalyzer::Controller</tt>). This can be used to find the class that is defined in a given filename.
40
+ # <tt>str</tt>:: The string to convert in the following format: <tt>module_name/class_name</tt>
27
41
  def self.to_camelcase(str)
28
- str.to_s.to_s.gsub(/\/(.?)/) { "::" + $1.upcase }.gsub(/(^|_)(.)/) { $2.upcase }
42
+ str.to_s.gsub(/\/(.?)/) { "::" + $1.upcase }.gsub(/(^|_)(.)/) { $2.upcase }
29
43
  end
30
44
  end
31
-
32
-
@@ -39,8 +39,8 @@ module RequestLogAnalyzer::FileFormat
39
39
 
40
40
  report do |analyze|
41
41
  analyze.timespan :line_type => :started
42
+ analyze.frequency :category => REQUEST_CATEGORIZER, :amount => 20, :title => "Top 20 by hits"
42
43
  analyze.hourly_spread :line_type => :started
43
-
44
44
  analyze.duration :dispatch_time, :category => REQUEST_CATEGORIZER, :title => 'Request dispatch duration'
45
45
  # analyze.duration :action_time, :category => REQUEST_CATEGORIZER, :title => 'Request action duration'
46
46
  # analyze.duration :after_filters_time, :category => REQUEST_CATEGORIZER, :title => 'Request after_filter duration'
@@ -162,14 +162,18 @@ module RequestLogAnalyzer::Output
162
162
  bar << colorize(characters[:block] * (width.to_f * (row[index].to_f - column[:treshold])).round, :red)
163
163
  row_values.push(bar)
164
164
  else
165
+ # Create a bar by combining block characters
165
166
  row_values.push(characters[:block] * (width.to_f * row[index].to_f).round)
166
167
  end
167
168
  else
169
+ # Too few characters for a ratio bar. Display nothing
168
170
  row_values.push('')
169
171
  end
170
172
  else
171
- alignment = (columns[index][:align] == :right ? '' : '-')
172
- row_values.push("%#{alignment}#{width}s" % row[index].to_s[0...width])
173
+ alignment = (columns[index][:align] == :right ? '' : '-')
174
+ cell_value = "%#{alignment}#{width}s" % row[index].to_s[0...width]
175
+ cell_value = colorize(cell_value, :bold, :brown) if columns[index][:highlight]
176
+ row_values.push(cell_value)
173
177
  end
174
178
  end
175
179
  puts row_values.join(style[:cell_separator] ? " #{characters[:vertical_line]} " : ' ')
@@ -1,12 +1,27 @@
1
+ # The RequestLogAnalyzer::Source module contains all functionality that loads requests from a given source
2
+ # and feed them to the pipeline for further processing. The requests (see RequestLogAnalyzer::Request) that
3
+ # will be parsed from a source, will be piped throug filters (see RequestLogAnalyzer::Filter) and are then
4
+ # fed to an aggregator (see RequestLogAnalyzer::Aggregator). The source instance is thus the beginning of
5
+ # the RequestLogAnalyzer chain.
6
+ #
7
+ # - The base class for all sources is RequestLogAnalyzer::Source::Base. All source classes should inherit from this class.
8
+ # - Currently, RequestLogAnalyzer::Source::LogParser is the only implemented source.
1
9
  module RequestLogAnalyzer::Source
2
10
 
11
+ # Loads constants that reside in the RequestLogAnalyzer::Source namespace. This function uses
12
+ # RequestLogAnalyzer::load_default_class_file to load the file in which the constant is declared.
13
+ # <tt>const</tt>:: The constant to load in the RequestLogAnalyzer::Source namespace.
3
14
  def self.const_missing(const)
4
15
  RequestLogAnalyzer::load_default_class_file(self, const)
5
16
  end
6
17
 
7
- # Base Source class. All other sources inherit from this class
18
+ # The base Source class. All other sources should inherit from this class.
19
+ #
20
+ # A source implememtation should at least implement the each_request method, which should yield
21
+ # RequestLogAnalyzer::Request instances that will be fed through the pipleine.
8
22
  class Base
9
23
 
24
+ # Make the Spurce instance aware of the current file format
10
25
  include RequestLogAnalyzer::FileFormat::Awareness
11
26
 
12
27
  # A hash of options
@@ -24,23 +39,29 @@ module RequestLogAnalyzer::Source
24
39
  # The number of skipped lines because of warnings
25
40
  attr_reader :skipped_lines
26
41
 
27
- # Base source class used to filter input requests.
28
-
29
- # Initializer
30
- # <tt>format</tt> The file format
31
- # <tt>options</tt> Are passed to the filters.
42
+ # Initializer, which will register the file format and save any options given as a hash.
43
+ # <tt>format</tt>:: The file format instance
44
+ # <tt>options</tt>:: A hash of options that can be used by a specific Source implementation
32
45
  def initialize(format, options = {})
33
46
  @options = options
34
47
  register_file_format(format)
35
48
  end
36
49
 
50
+ # The prepare method is called before the RequestLogAnalyzer::Source::Base#each_request method is called.
51
+ # Use this method to implement any initialization that should occur before this source can produce Request
52
+ # instances.
37
53
  def prepare
38
54
  end
39
55
 
40
- def each_request(&block)
56
+ # This function is called to actually produce the requests that will be send into the pipeline.
57
+ # The implementation should yield instances of RequestLogAnalyzer::Request.
58
+ # <tt>options</tt>:: A Hash of options that can be used in the implementation.
59
+ def each_request(options = {}, &block) # :yields: request
41
60
  return true
42
61
  end
43
62
 
63
+ # This function is called after RequestLogAnalyzer::Source::Base#each_request finished. Any code to
64
+ # wrap up, free resources, etc. can be put in this method.
44
65
  def finalize
45
66
  end
46
67
 
@@ -7,18 +7,24 @@ module RequestLogAnalyzer::Source
7
7
  # De order in which lines occur is used to combine lines to a single request. If these lines
8
8
  # are mixed, requests cannot be combined properly. This can be the case if data is written to
9
9
  # the log file simultaneously by different mongrel processes. This problem is detected by the
10
- # parser, but the requests that are mixed up cannot be parsed. It will emit warnings when this
11
- # occurs.
10
+ # parser. It will emit warnings when this occurs. LogParser supports multiple parse strategies
11
+ # that deal differently with this problem.
12
12
  class LogParser < Base
13
13
 
14
+ # The default parse strategy that will be used to parse the input.
14
15
  DEFAULT_PARSE_STRATEGY = 'assume-correct'
16
+
17
+ # All available parse strategies.
15
18
  PARSE_STRATEGIES = ['cautious', 'assume-correct']
16
19
 
17
20
  attr_reader :source_files
18
21
 
19
- # Initializes the parser instance.
22
+ # Initializes the log file parser instance.
20
23
  # It will apply the language specific FileFormat module to this instance. It will use the line
21
- # definitions in this module to parse any input.
24
+ # definitions in this module to parse any input that it is given (see parse_io).
25
+ #
26
+ # <tt>format</tt>:: The current file format instance
27
+ # <tt>options</tt>:: A hash of options that are used by the parser
22
28
  def initialize(format, options = {})
23
29
  @line_definitions = {}
24
30
  @options = options
@@ -35,7 +41,11 @@ module RequestLogAnalyzer::Source
35
41
  self.register_file_format(format)
36
42
  end
37
43
 
38
- def each_request(options = {}, &block)
44
+ # Reads the input, which can either be a file, sequence of files or STDIN to parse
45
+ # lines specified in the FileFormat. This lines will be combined into Request instances,
46
+ # that will be yielded. The actual parsing occurs in the parse_io method.
47
+ # <tt>options</tt>:: A Hash of options that will be pased to parse_io.
48
+ def each_request(options = {}, &block) # :yields: request
39
49
 
40
50
  case @source_files
41
51
  when IO;
@@ -50,28 +60,46 @@ module RequestLogAnalyzer::Source
50
60
  end
51
61
  end
52
62
 
53
- # Parses a list of consequent files of the same format
54
- def parse_files(files, options = {}, &block)
63
+ # Parses a list of subsequent files of the same format, by calling parse_file for every
64
+ # file in the array.
65
+ # <tt>files</tt>:: The Array of files that should be parsed
66
+ # <tt>options</tt>:: A Hash of options that will be pased to parse_io.
67
+ def parse_files(files, options = {}, &block) # :yields: request
55
68
  files.each { |file| parse_file(file, options, &block) }
56
69
  end
57
70
 
58
- # Parses a file.
59
- # Creates an IO stream for the provided file, and sends it to parse_io for further handling
71
+ # Parses a log file. Creates an IO stream for the provided file, and sends it to parse_io for
72
+ # further handling. This method supports progress updates that can be used to display a progressbar
73
+ # <tt>file</tt>:: The file that should be parsed.
74
+ # <tt>options</tt>:: A Hash of options that will be pased to parse_io.
60
75
  def parse_file(file, options = {}, &block)
61
76
  @progress_handler.call(:started, file) if @progress_handler
62
77
  File.open(file, 'r') { |f| parse_io(f, options, &block) }
63
78
  @progress_handler.call(:finished, file) if @progress_handler
64
79
  end
65
80
 
81
+
82
+ # Parses an IO stream. It will simply call parse_io. This function does not support progress updates
83
+ # because the length of a stream is not known.
84
+ # <tt>stream</tt>:: The IO stream that should be parsed.
85
+ # <tt>options</tt>:: A Hash of options that will be pased to parse_io.
66
86
  def parse_stream(stream, options = {}, &block)
67
87
  parse_io(stream, options, &block)
68
88
  end
69
89
 
70
- # Finds a log line and then parses the information in the line.
71
- # Yields a hash containing the information found.
72
- # <tt>*line_types</tt> The log line types to look for (defaults to LOG_LINES.keys).
73
- # Yeilds a Hash when it encounters a chunk of information.
74
- def parse_io(io, options = {}, &block)
90
+ # This method loops over each line of the input stream. It will try to parse this line as any of
91
+ # the lines that are defined by the current file format (see RequestLogAnalyazer::FileFormat).
92
+ # It will then combine these parsed line into requests using heuristics. These requests (see
93
+ # RequestLogAnalyzer::Request) will then be yielded for further processing in the pipeline.
94
+ #
95
+ # - RequestLogAnalyzer::LineDefinition#matches is called to test if a line matches a line definition of the file format.
96
+ # - update_current_request is used to combine parsed lines into requests using heuristics.
97
+ # - The method will yield progress updates if a progress handler is installed using progress=
98
+ # - The method will yield parse warnings if a warning handler is installed using warning=
99
+ #
100
+ # <tt>io</tt>:: The IO instance to use as source
101
+ # <tt>options</tt>:: A hash of options that can be used by the parser.
102
+ def parse_io(io, options = {}, &block) # :yields: request
75
103
 
76
104
  @current_io = io
77
105
  @current_io.each_line do |line|
@@ -95,19 +123,27 @@ module RequestLogAnalyzer::Source
95
123
  @current_io = nil
96
124
  end
97
125
 
98
- # Add a block to this method to install a progress handler while parsing
126
+ # Add a block to this method to install a progress handler while parsing.
127
+ # <tt>proc</tt>:: The proc that will be called to handle progress update messages
99
128
  def progress=(proc)
100
129
  @progress_handler = proc
101
130
  end
102
131
 
103
- # Add a block to this method to install a warning handler while parsing
132
+ # Add a block to this method to install a warning handler while parsing,
133
+ # <tt>proc</tt>:: The proc that will be called to handle parse warning messages
104
134
  def warning=(proc)
105
135
  @warning_handler = proc
106
136
  end
107
137
 
108
- # This method is called by the parser if it encounteres any problems.
109
- # It will call the warning handler. The default controller will pass all warnings to every
110
- # aggregator that is registered and running
138
+ # This method is called by the parser if it encounteres any parsing problems.
139
+ # It will call the installed warning handler if any.
140
+ #
141
+ # By default, RequestLogAnalyzer::Controller will install a warning handler
142
+ # that will pass the warnings to each aggregator so they can do something useful
143
+ # with it.
144
+ #
145
+ # <tt>type</tt>:: The warning type (a Symbol)
146
+ # <tt>message</tt>:: A message explaining the warning
111
147
  def warn(type, message)
112
148
  @warning_handler.call(type, message, @current_io.lineno) if @warning_handler
113
149
  end
@@ -118,13 +154,25 @@ module RequestLogAnalyzer::Source
118
154
  # new request when a header line is encountered en will emit the request when a footer line
119
155
  # is encountered.
120
156
  #
157
+ # Combining the lines is done using heuristics. Problems can occur in this process. The
158
+ # current parse strategy defines how these cases are handled.
159
+ #
160
+ # When using the 'assume-correct' parse strategy (default):
161
+ # - Every line that is parsed before a header line is ignored as it cannot be included in
162
+ # any request. It will emit a :no_current_request warning.
163
+ # - If a header line is found before the previous requests was closed, the previous request
164
+ # will be yielded and a new request will be started.
165
+ #
166
+ # When using the 'cautious' parse strategy:
121
167
  # - Every line that is parsed before a header line is ignored as it cannot be included in
122
168
  # any request. It will emit a :no_current_request warning.
123
169
  # - A header line that is parsed before a request is closed by a footer line, is a sign of
124
- # an unprpertly ordered file. All data that is gathered for the request until then is
125
- # discarded, the next request is ignored as well and a :unclosed_request warning is
170
+ # an unproperly ordered file. All data that is gathered for the request until then is
171
+ # discarded and the next request is ignored as well. An :unclosed_request warning is
126
172
  # emitted.
127
- def update_current_request(request_data, &block)
173
+ #
174
+ # <tt>request_data</tt>:: A hash of data that was parsed from the last line.
175
+ def update_current_request(request_data, &block) # :yields: request
128
176
  if header_line?(request_data)
129
177
  unless @current_request.nil?
130
178
  case options[:parse_strategy]
@@ -153,21 +201,28 @@ module RequestLogAnalyzer::Source
153
201
  end
154
202
  end
155
203
 
156
- # Handles the parsed request by calling the request handler.
157
- # The default controller will send the request to every running aggegator.
158
- def handle_request(request, &block)
204
+ # Handles the parsed request by sending it into the pipeline.
205
+ #
206
+ # - It will call RequestLogAnalyzer::Request#validate on the request instance
207
+ # - It will send the request into the pipeline, checking whether it was accepted by all the filters.
208
+ # - It will update the parsed_requests and skipped_requests variables accordingly
209
+ #
210
+ # <tt>request</tt>:: The parsed request instance (RequestLogAnalyzer::Request)
211
+ def handle_request(request, &block) # :yields: request
159
212
  @parsed_requests += 1
160
213
  request.validate
161
214
  accepted = block_given? ? yield(request) : true
162
215
  @skipped_requests += 1 if not accepted
163
216
  end
164
217
 
165
- # Checks whether a given line hash is a header line.
218
+ # Checks whether a given line hash is a header line according to the current file format.
219
+ # <tt>hash</tt>:: A hash of data that was parsed from the line.
166
220
  def header_line?(hash)
167
221
  hash[:line_definition].header
168
222
  end
169
223
 
170
- # Checks whether a given line hash is a footer line.
224
+ # Checks whether a given line hash is a footer line according to the current file format.
225
+ # <tt>hash</tt>:: A hash of data that was parsed from the line.
171
226
  def footer_line?(hash)
172
227
  hash[:line_definition].footer
173
228
  end
@@ -49,9 +49,11 @@ module RequestLogAnalyzer::Tracker
49
49
  duration = options[:duration].respond_to?(:call) ? options[:duration].call(request) : request[options[:duration]]
50
50
 
51
51
  if !duration.nil? && !category.nil?
52
- @categories[category] ||= {:hits => 0, :cumulative => 0.0}
52
+ @categories[category] ||= {:hits => 0, :cumulative => 0.0, :min => duration, :max => duration }
53
53
  @categories[category][:hits] += 1
54
54
  @categories[category][:cumulative] += duration
55
+ @categories[category][:min] = duration if duration < @categories[category][:min]
56
+ @categories[category][:max] = duration if duration > @categories[category][:max]
55
57
  end
56
58
  end
57
59
  end
@@ -63,7 +65,15 @@ module RequestLogAnalyzer::Tracker
63
65
  def cumulative_duration(cat)
64
66
  categories[cat][:cumulative]
65
67
  end
66
-
68
+
69
+ def min_duration(cat)
70
+ categories[cat][:min]
71
+ end
72
+
73
+ def max_duration(cat)
74
+ categories[cat][:max]
75
+ end
76
+
67
77
  def average_duration(cat)
68
78
  categories[cat][:cumulative] / categories[cat][:hits]
69
79
  end
@@ -106,11 +116,16 @@ module RequestLogAnalyzer::Tracker
106
116
  output.title(options[:title])
107
117
 
108
118
  top_categories = @categories.sort { |a, b| yield(b[1]) <=> yield(a[1]) }.slice(0...amount)
109
- output.table({:title => 'Category', :width => :rest}, {:title => 'Hits', :align => :right, :min_width => 4},
110
- {:title => 'Cumulative', :align => :right, :min_width => 10}, {:title => 'Average', :align => :right, :min_width => 8}) do |rows|
119
+ output.table({:title => 'Category', :width => :rest},
120
+ {:title => 'Hits', :align => :right, :highlight => (options[:sort] == :hits), :min_width => 4},
121
+ {:title => 'Cumulative', :align => :right, :highlight => (options[:sort] == :cumulative), :min_width => 10},
122
+ {:title => 'Average', :align => :right, :highlight => (options[:sort] == :average), :min_width => 8},
123
+ {:title => 'Min', :align => :right, :highlight => (options[:sort] == :min)},
124
+ {:title => 'Max', :align => :right, :highlight => (options[:sort] == :max)}) do |rows|
111
125
 
112
126
  top_categories.each do |(cat, info)|
113
- rows << [cat, info[:hits], "%0.02fs" % info[:cumulative], "%0.02fs" % (info[:cumulative] / info[:hits])]
127
+ rows << [cat, info[:hits], "%0.02fs" % info[:cumulative], "%0.02fs" % (info[:cumulative] / info[:hits]),
128
+ "%0.02fs" % info[:min], "%0.02fs" % info[:max]]
114
129
  end
115
130
  end
116
131
 
@@ -125,11 +140,11 @@ module RequestLogAnalyzer::Tracker
125
140
  options[:report].each do |report|
126
141
  case report
127
142
  when :average
128
- report_table(output, options[:top], :title => "#{options[:title]} - top #{options[:top]} by average time") { |cat| cat[:cumulative] / cat[:hits] }
143
+ report_table(output, options[:top], :title => "#{options[:title]} - top #{options[:top]} by average time", :sort => :average) { |cat| cat[:cumulative] / cat[:hits] }
129
144
  when :cumulative
130
- report_table(output, options[:top], :title => "#{options[:title]} - top #{options[:top]} by cumulative time") { |cat| cat[:cumulative] }
145
+ report_table(output, options[:top], :title => "#{options[:title]} - top #{options[:top]} by cumulative time", :sort => :cumulative) { |cat| cat[:cumulative] }
131
146
  when :hits
132
- report_table(output, options[:top], :title => "#{options[:title]} - top #{options[:top]} by hits") { |cat| cat[:hits] }
147
+ report_table(output, options[:top], :title => "#{options[:title]} - top #{options[:top]} by hits", :sort => :hits) { |cat| cat[:hits] }
133
148
  else
134
149
  raise "Unknown duration report specified: #{report}!"
135
150
  end
@@ -41,6 +41,15 @@ describe RequestLogAnalyzer::Tracker::Duration, 'static category' do
41
41
  @tracker.average_duration('b').should == 0.35
42
42
  end
43
43
 
44
+ it "should set min and max duration correctly" do
45
+ @tracker.update(request(:category => 'a', :duration => 0.2))
46
+ @tracker.update(request(:category => 'b', :duration => 0.3))
47
+ @tracker.update(request(:category => 'b', :duration => 0.4))
48
+
49
+ @tracker.min_duration('b').should == 0.3
50
+ @tracker.max_duration('b').should == 0.4
51
+ end
52
+
44
53
  end
45
54
 
46
55
  describe RequestLogAnalyzer::Tracker::Duration, 'dynamic category' do
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: request-log-analyzer
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.3
4
+ version: 1.1.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Willem van Bergen
@@ -10,7 +10,7 @@ autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
12
 
13
- date: 2009-01-29 00:00:00 +01:00
13
+ date: 2009-02-08 00:00:00 +01:00
14
14
  default_executable: request-log-analyzer
15
15
  dependencies: []
16
16
 
@@ -143,7 +143,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
143
143
  requirements: []
144
144
 
145
145
  rubyforge_project: r-l-a
146
- rubygems_version: 1.2.0
146
+ rubygems_version: 1.3.1
147
147
  signing_key:
148
148
  specification_version: 2
149
149
  summary: A command line tool to analyze Rails logs