erkki-production_log_analyzer 2009022401 → 2009022402
Sign up to get free protection for your applications and to get access to all the features.
- data/Rakefile +1 -1
- data/lib/production_log/analyzer.rb +3 -13
- data/lib/production_log/parser.rb +1 -1
- metadata +1 -1
data/Rakefile
CHANGED
@@ -3,7 +3,7 @@ require 'hoe'
|
|
3
3
|
$:.unshift './lib'
|
4
4
|
require 'production_log/analyzer'
|
5
5
|
|
6
|
-
Hoe.new 'production_log_analyzer', '
|
6
|
+
Hoe.new 'production_log_analyzer', '2009022402' do |p|
|
7
7
|
p.summary = p.paragraphs_of('README.txt', 1).join ' '
|
8
8
|
p.description = p.paragraphs_of('README.txt', 7).join ' '
|
9
9
|
p.author = 'Eric Hodel'
|
@@ -136,8 +136,6 @@ class Analyzer
|
|
136
136
|
# An Array of all the request render times for the log file.
|
137
137
|
|
138
138
|
attr_reader :render_times
|
139
|
-
|
140
|
-
attr_reader :row_counts, :query_counts
|
141
139
|
|
142
140
|
##
|
143
141
|
# Generates and sends an email report with lots of fun stuff in it. This
|
@@ -177,8 +175,6 @@ class Analyzer
|
|
177
175
|
@logfile_name = logfile_name
|
178
176
|
@request_times = Hash.new { |h,k| h[k] = [] }
|
179
177
|
@db_times = Hash.new { |h,k| h[k] = [] }
|
180
|
-
@row_counts = Hash.new { |h,k| h[k] = [] }
|
181
|
-
@query_counts = Hash.new { |h,k| h[k] = [] }
|
182
178
|
@render_times = Hash.new { |h,k| h[k] = [] }
|
183
179
|
end
|
184
180
|
|
@@ -192,8 +188,6 @@ class Analyzer
|
|
192
188
|
next if entry_page.nil?
|
193
189
|
@request_times[entry_page] << entry.request_time
|
194
190
|
@db_times[entry_page] << entry.db_time
|
195
|
-
@row_counts[entry_page] << entry.row_count
|
196
|
-
@query_counts[entry_page] << entry.query_count
|
197
191
|
@render_times[entry_page] << entry.render_time
|
198
192
|
end
|
199
193
|
end
|
@@ -330,12 +324,12 @@ class Analyzer
|
|
330
324
|
|
331
325
|
# header
|
332
326
|
record = [pad_request_name("#{title} Summary"), 'Count', 'Avg', 'Std Dev',
|
333
|
-
'Min', 'Max'
|
327
|
+
'Min', 'Max']
|
334
328
|
list << record.join("\t")
|
335
329
|
|
336
330
|
# all requests
|
337
331
|
times = records.values.flatten
|
338
|
-
record = [times.average, times.standard_deviation, times.min, times.max
|
332
|
+
record = [times.average, times.standard_deviation, times.min, times.max]
|
339
333
|
record.map! { |v| "%0.3f" % v }
|
340
334
|
record.unshift [pad_request_name('ALL REQUESTS'), times.size]
|
341
335
|
list << record.join("\t")
|
@@ -344,11 +338,7 @@ class Analyzer
|
|
344
338
|
list << nil
|
345
339
|
|
346
340
|
records.sort_by { |k,v| v.size}.reverse_each do |req, times|
|
347
|
-
|
348
|
-
average_rows = @row_counts[req].average
|
349
|
-
average_queries = @query_counts[req].average
|
350
|
-
end
|
351
|
-
record = [times.average, times.standard_deviation, times.min, times.max, average_queries || 0, average_rows || 0]
|
341
|
+
record = [times.average, times.standard_deviation, times.min, times.max]
|
352
342
|
record.map! { |v| "%0.3f" % v }
|
353
343
|
record.unshift ["#{pad_request_name req}", times.size]
|
354
344
|
list << record.join("\t")
|
@@ -105,7 +105,7 @@ module LogParser
|
|
105
105
|
@request_size = log_info['Request Size'].to_i
|
106
106
|
@response_size = log_info['Response Size'].to_i
|
107
107
|
|
108
|
-
@page = log_info['Processed']
|
108
|
+
@page = log_info['Processed'] if log_info['Processed']
|
109
109
|
@page += ".#{log_info['Response Format']}" if log_info['Response Format']
|
110
110
|
|
111
111
|
@db_time = log_info['DB'].split(' ').first.to_f if log_info['DB']
|