log_sense 1.0.5 → 1.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +1 -1
- data/Gemfile.lock +2 -2
- data/LICENSE.txt +1 -1
- data/exe/log_sense +3 -2
- data/lib/log_sense/apache_data_cruncher.rb +14 -4
- data/lib/log_sense/emitter.rb +1 -7
- data/lib/log_sense/ip_locator.rb +2 -0
- data/lib/log_sense/options_parser.rb +22 -9
- data/lib/log_sense/rails_data_cruncher.rb +14 -3
- data/lib/log_sense/rails_log_parser.rb +125 -6
- data/lib/log_sense/templates/_command_invocation.html.erb +29 -0
- data/lib/log_sense/templates/_command_invocation.txt.erb +6 -0
- data/lib/log_sense/templates/_output_table.html.erb +1 -1
- data/lib/log_sense/templates/_performance.html.erb +23 -0
- data/lib/log_sense/templates/_performance.txt.erb +9 -0
- data/lib/log_sense/templates/_summary.html.erb +34 -0
- data/lib/log_sense/templates/_summary.txt.erb +10 -0
- data/lib/log_sense/templates/_total_hits.html.erb +32 -0
- data/lib/log_sense/templates/apache.html.erb +216 -299
- data/lib/log_sense/templates/rails.txt.erb +28 -7
- data/lib/log_sense/version.rb +1 -1
- data/sample_logs/empty_log.log +0 -0
- data/sample_logs/safety-critical_org.log +364 -0
- data/sample_logs/spmbook_com.log +1636 -0
- metadata +13 -7
- data/alr-styles.css +0 -61
- data/lib/log_sense/templates/#apache.org.erb# +0 -266
- data/lib/log_sense/templates/.#apache.org.erb +0 -1
- data/lib/log_sense/templates/apache.org.erb +0 -266
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: db5e56ec451207b98789e32e541d30c97f274f7a046cf0ce863a0ff37597f180
|
4
|
+
data.tar.gz: d0711b2517329a01137a45a4279db59a66d678ffcc165a058a0b1b04c3b1fecb
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 0b050298809b1003e1cc2e0ac28fd903a0da1ef3214a7e7ce3cb09012d46ebcd135dc2ea8dab8985078de4a10f5a928ad13b8aefa6aa9d2f333e3cad50934ec0
|
7
|
+
data.tar.gz: 49116ba2ec1da8f676b87f8512fdc5731be8c07203c52e864a6cdf4f470a1343adabd33153d5e77b6d64c2493e8ca18fff9a196c589d88aedd6fbc7fffbccc9e
|
data/.gitignore
CHANGED
data/Gemfile.lock
CHANGED
data/LICENSE.txt
CHANGED
data/exe/log_sense
CHANGED
@@ -6,10 +6,11 @@ require 'log_sense.rb'
|
|
6
6
|
# Parse Command Line Arguments
|
7
7
|
#
|
8
8
|
|
9
|
-
# better be here... OptionsParser consumes ARGV
|
9
|
+
# this better be here... OptionsParser consumes ARGV
|
10
10
|
@command_line = ARGV.join(" ")
|
11
|
+
|
11
12
|
@options = LogSense::OptionsParser.parse ARGV
|
12
|
-
@input_file = @options[:input_file]
|
13
|
+
@input_file = @options[:input_file] || ARGV[0]
|
13
14
|
@output_file = @options[:output_file]
|
14
15
|
|
15
16
|
if not @input_file
|
@@ -11,8 +11,8 @@ module LogSense
|
|
11
11
|
last_day_s = db.execute "SELECT datetime from LogLine order by datetime desc limit 1"
|
12
12
|
|
13
13
|
# make first and last day into dates or nil
|
14
|
-
@first_day = first_day_s
|
15
|
-
@last_day = last_day_s
|
14
|
+
@first_day = first_day_s[0][0] ? nil : Date.parse(first_day_s[0][0])
|
15
|
+
@last_day = last_day_s[0][0] ? nil : Date.parse(last_day_s[0][0])
|
16
16
|
|
17
17
|
@total_days = 0
|
18
18
|
if @first_day and @last_day
|
@@ -20,8 +20,13 @@ module LogSense
|
|
20
20
|
end
|
21
21
|
|
22
22
|
@log_size = db.execute "SELECT count(datetime) from LogLine"
|
23
|
-
@
|
23
|
+
@log_size = @log_size[0][0]
|
24
|
+
|
24
25
|
@selfpolls_size = db.execute "SELECT count(datetime) from LogLine where ip == '::1'"
|
26
|
+
@selfpolls_size = @selfpolls_size[0][0]
|
27
|
+
|
28
|
+
@crawlers_size = db.execute "SELECT count(datetime) from LogLine where bot == 1"
|
29
|
+
@crawlers_size = @crawlers_size[0][0]
|
25
30
|
|
26
31
|
@first_day_requested = options[:from_date]
|
27
32
|
@last_day_requested = options[:to_date]
|
@@ -74,8 +79,13 @@ module LogSense
|
|
74
79
|
EOS
|
75
80
|
|
76
81
|
@total_hits = db.execute "SELECT count(datetime) from LogLine where #{filter}"
|
77
|
-
@
|
82
|
+
@total_hits = @total_hits[0][0]
|
83
|
+
|
84
|
+
@total_unique_visits = db.execute "SELECT count(distinct(unique_visitor)) from LogLine where #{filter}"
|
85
|
+
@total_unique_visits = @total_unique_visits[0][0]
|
86
|
+
|
78
87
|
@total_size = db.execute "SELECT #{human_readable_size} from LogLine where #{filter}"
|
88
|
+
@total_size = @total_size[0][0]
|
79
89
|
|
80
90
|
@daily_distribution = db.execute "SELECT date(datetime), #{human_readable_day}, count(datetime), count(distinct(unique_visitor)), #{human_readable_size} from LogLine where #{filter} group by date(datetime)"
|
81
91
|
@time_distribution = db.execute "SELECT strftime('%H', datetime), count(datetime), count(distinct(unique_visitor)), #{human_readable_size} from LogLine where #{filter} group by strftime('%H', datetime)"
|
data/lib/log_sense/emitter.rb
CHANGED
@@ -33,14 +33,8 @@ module LogSense
|
|
33
33
|
|
34
34
|
private
|
35
35
|
|
36
|
-
def self.output_txt_table name, headings, rows
|
37
|
-
name = "#+NAME: #{name}"
|
38
|
-
table = Terminal::Table.new headings: headings, rows: rows, style: { border_x: "-", border_i: "|" }
|
39
|
-
name + "\n" + table.to_s
|
40
|
-
end
|
41
|
-
|
42
36
|
def self.render(template, vars)
|
43
|
-
@template = File.join(File.dirname(__FILE__), "templates", "_#{template}
|
37
|
+
@template = File.join(File.dirname(__FILE__), "templates", "_#{template}")
|
44
38
|
erb_template = File.read @template
|
45
39
|
ERB.new(erb_template).result(OpenStruct.new(vars).instance_eval { binding })
|
46
40
|
end
|
data/lib/log_sense/ip_locator.rb
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
require 'optparse'
|
2
2
|
require 'optparse/date'
|
3
|
-
require '
|
3
|
+
require 'log_sense/version'
|
4
4
|
|
5
5
|
module LogSense
|
6
6
|
module OptionsParser
|
@@ -14,19 +14,19 @@ module LogSense
|
|
14
14
|
opt_parser = OptionParser.new do |opts|
|
15
15
|
opts.banner = "Usage: log_sense [options] [logfile]"
|
16
16
|
|
17
|
-
opts.on("-fFORMAT", "--
|
17
|
+
opts.on("-fFORMAT", "--input-format=FORMAT", String, "Input format (either rails or apache)") do |n|
|
18
18
|
args[:input_format] = n
|
19
19
|
end
|
20
20
|
|
21
|
-
opts.on("-iINPUT_FILE", "--input=INPUT_FILE", String, "Input file") do |n|
|
21
|
+
opts.on("-iINPUT_FILE", "--input-file=INPUT_FILE", String, "Input file") do |n|
|
22
22
|
args[:input_file] = n
|
23
23
|
end
|
24
24
|
|
25
|
-
opts.on("-tFORMAT", "--
|
25
|
+
opts.on("-tFORMAT", "--output-format=FORMAT", String, "Output format: html, org, txt, sqlite. See below for available formats") do |n|
|
26
26
|
args[:output_format] = n
|
27
27
|
end
|
28
28
|
|
29
|
-
opts.on("-oOUTPUT_FILE", "--output=OUTPUT_FILE", String, "Output file") do |n|
|
29
|
+
opts.on("-oOUTPUT_FILE", "--output-file=OUTPUT_FILE", String, "Output file") do |n|
|
30
30
|
args[:output_file] = n
|
31
31
|
end
|
32
32
|
|
@@ -51,22 +51,35 @@ module LogSense
|
|
51
51
|
end
|
52
52
|
end
|
53
53
|
|
54
|
-
opts.on("-
|
54
|
+
opts.on("-ns", "--no-selfpoll", "Ignore self poll entries (requests from ::1; applies to Apache Logs)") do
|
55
55
|
args[:no_selfpoll] = true
|
56
56
|
end
|
57
57
|
|
58
58
|
opts.on("-v", "--version", "Prints version information") do
|
59
59
|
puts "log_sense version #{LogSense::VERSION}"
|
60
|
-
puts "Copyright (C)
|
60
|
+
puts "Copyright (C) 2021 Shair.Tech"
|
61
61
|
puts "Distributed under the terms of the MIT license"
|
62
|
-
puts ""
|
63
|
-
puts "Written by Adolfo Villafiorita"
|
64
62
|
exit
|
65
63
|
end
|
66
64
|
|
67
65
|
opts.on("-h", "--help", "Prints this help") do
|
68
66
|
puts opts
|
67
|
+
puts ""
|
69
68
|
puts "This is version #{LogSense::VERSION}"
|
69
|
+
|
70
|
+
puts ""
|
71
|
+
puts "Output formats"
|
72
|
+
pathname = File.join(File.dirname(__FILE__), "templates", "*")
|
73
|
+
templates = Dir.glob(pathname).select { |x| ! File.basename(x).start_with? /_|#/ and ! File.basename(x).end_with? "~" }
|
74
|
+
components = templates.map { |x| File.basename(x).split "." }.group_by { |x| x[0] }
|
75
|
+
components.each do |k, vs|
|
76
|
+
puts "#{k} parsing can produce the following outputs:"
|
77
|
+
puts " - sqlite"
|
78
|
+
vs.each do |v|
|
79
|
+
puts " - #{v[1]}"
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
70
83
|
exit
|
71
84
|
end
|
72
85
|
end
|
@@ -8,22 +8,28 @@ module LogSense
|
|
8
8
|
#
|
9
9
|
|
10
10
|
def self.crunch db, options = { limit: 30 }
|
11
|
-
first_day_s = db.execute "SELECT started_at from Event order by started_at limit 1"
|
11
|
+
first_day_s = db.execute "SELECT started_at from Event where started_at not NULL order by started_at limit 1"
|
12
12
|
# we could use ended_at to cover the full activity period, but I prefer started_at
|
13
13
|
# with the meaning that the monitor event initiation
|
14
14
|
last_day_s = db.execute "SELECT started_at from Event order by started_at desc limit 1"
|
15
15
|
|
16
16
|
# make first and last day into dates or nil
|
17
17
|
# TODO: bug possible value here: [[nil]], which is not empty
|
18
|
-
@first_day = first_day_s
|
19
|
-
@last_day =
|
18
|
+
@first_day = first_day_s&.first&.first ? Date.parse(first_day_s[0][0]) : nil
|
19
|
+
@last_day = last_day_s&.first&.first ? Date.parse(last_day_s[0][0]) : nil
|
20
20
|
|
21
21
|
@total_days = 0
|
22
22
|
if @first_day and @last_day
|
23
23
|
@total_days = (@last_day - @first_day).to_i
|
24
24
|
end
|
25
25
|
|
26
|
+
@log_size = db.execute "SELECT count(started_at) from Event"
|
27
|
+
@log_size = @log_size[0][0]
|
28
|
+
|
29
|
+
# SAME AS ABOVE (but log_size is wrong in the case of Rails
|
30
|
+
# logs, since an event takes more than one line)
|
26
31
|
@events = db.execute "SELECT count(started_at) from Event"
|
32
|
+
@events = @events[0][0]
|
27
33
|
|
28
34
|
@first_day_requested = options[:from_date]
|
29
35
|
@last_day_requested = options[:to_date]
|
@@ -74,6 +80,9 @@ module LogSense
|
|
74
80
|
|
75
81
|
@total_events = db.execute "SELECT count(started_at) from Event where #{filter}"
|
76
82
|
|
83
|
+
@total_unique_visits = db.execute "SELECT count(distinct(unique_visitor)) from Event where #{filter}"
|
84
|
+
@total_unique_visits = @total_unique_visits[0][0]
|
85
|
+
|
77
86
|
@daily_distribution = db.execute "SELECT date(started_at), #{human_readable_day}, count(started_at) from Event where #{filter} group by date(started_at)"
|
78
87
|
@time_distribution = db.execute "SELECT strftime('%H', started_at), count(started_at) from Event where #{filter} group by strftime('%H', started_at)"
|
79
88
|
|
@@ -91,6 +100,8 @@ module LogSense
|
|
91
100
|
|
92
101
|
@performance = db.execute "SELECT distinct(controller), count(controller), printf(\"%.2f\", min(duration_total_ms)), printf(\"%.2f\", avg(duration_total_ms)), printf(\"%.2f\", max(duration_total_ms)) from Event group by controller order by controller"
|
93
102
|
|
103
|
+
@fatal = db.execute "SELECT strftime(\"%Y-%m-%d %H:%M\", started_at), ip, url, log_id FROM Event WHERE exit_status == 'F'"
|
104
|
+
|
94
105
|
data = {}
|
95
106
|
self.instance_variables.each do |variable|
|
96
107
|
var_as_symbol = variable.to_s[1..-1].to_sym
|
@@ -8,10 +8,12 @@ module LogSense
|
|
8
8
|
db = SQLite3::Database.new ":memory:"
|
9
9
|
db.execute 'CREATE TABLE IF NOT EXISTS Event(
|
10
10
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
11
|
+
exit_status TEXT,
|
11
12
|
started_at TEXT,
|
12
13
|
ended_at TEXT,
|
13
14
|
log_id TEXT,
|
14
15
|
ip TEXT,
|
16
|
+
unique_visitor TEXT,
|
15
17
|
url TEXT,
|
16
18
|
controller TEXT,
|
17
19
|
html_verb TEXT,
|
@@ -19,14 +21,17 @@ module LogSense
|
|
19
21
|
duration_total_ms FLOAT,
|
20
22
|
duration_views_ms FLOAT,
|
21
23
|
duration_ar_ms FLOAT,
|
22
|
-
allocations INTEGER
|
24
|
+
allocations INTEGER,
|
25
|
+
comment TEXT
|
23
26
|
)'
|
24
27
|
|
25
28
|
ins = db.prepare("insert into Event(
|
29
|
+
exit_status,
|
26
30
|
started_at,
|
27
31
|
ended_at,
|
28
32
|
log_id,
|
29
33
|
ip,
|
34
|
+
unique_visitor,
|
30
35
|
url,
|
31
36
|
controller,
|
32
37
|
html_verb,
|
@@ -34,8 +39,10 @@ module LogSense
|
|
34
39
|
duration_total_ms,
|
35
40
|
duration_views_ms,
|
36
41
|
duration_ar_ms,
|
37
|
-
allocations
|
38
|
-
|
42
|
+
allocations,
|
43
|
+
comment
|
44
|
+
)
|
45
|
+
values (#{Array.new(15, '?').join(', ')})")
|
39
46
|
|
40
47
|
# requests in the log might be interleaved.
|
41
48
|
#
|
@@ -59,7 +66,7 @@ module LogSense
|
|
59
66
|
|
60
67
|
File.readlines(filename).each do |line|
|
61
68
|
# We discard LOG_LEVEL != 'I'
|
62
|
-
next if line[0] != 'I'
|
69
|
+
next if line[0] != 'I' and line[0] != 'F'
|
63
70
|
|
64
71
|
data = self.match_and_process_start line
|
65
72
|
if data
|
@@ -75,6 +82,37 @@ module LogSense
|
|
75
82
|
next
|
76
83
|
end
|
77
84
|
|
85
|
+
data = self.match_and_process_fatal line
|
86
|
+
if data
|
87
|
+
id = data[:log_id]
|
88
|
+
# it might as well be that the first event started before
|
89
|
+
# the log. With this, we make sure we add only events whose
|
90
|
+
# start was logged and parsed
|
91
|
+
if pending[id]
|
92
|
+
event = data.merge (pending[id] || {})
|
93
|
+
|
94
|
+
ins.execute(
|
95
|
+
event[:exit_status],
|
96
|
+
event[:started_at],
|
97
|
+
event[:ended_at],
|
98
|
+
event[:log_id],
|
99
|
+
event[:ip],
|
100
|
+
"#{DateTime.parse(event[:started_at] || event[:ended_at]).strftime("%Y-%m-%d")} #{event[:ip]}",
|
101
|
+
event[:url],
|
102
|
+
event[:controller],
|
103
|
+
event[:html_verb],
|
104
|
+
event[:status],
|
105
|
+
event[:duration_total_ms],
|
106
|
+
event[:duration_views_ms],
|
107
|
+
event[:duration_ar_ms],
|
108
|
+
event[:allocations],
|
109
|
+
event[:comment]
|
110
|
+
)
|
111
|
+
|
112
|
+
pending.delete(id)
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
78
116
|
data = self.match_and_process_completed line
|
79
117
|
if data
|
80
118
|
id = data[:log_id]
|
@@ -86,10 +124,12 @@ module LogSense
|
|
86
124
|
event = data.merge (pending[id] || {})
|
87
125
|
|
88
126
|
ins.execute(
|
127
|
+
event[:exit_status],
|
89
128
|
event[:started_at],
|
90
129
|
event[:ended_at],
|
91
130
|
event[:log_id],
|
92
131
|
event[:ip],
|
132
|
+
"#{DateTime.parse(event[:started_at] || event[:ended_at]).strftime("%Y-%m-%d")} #{event[:ip]}",
|
93
133
|
event[:url],
|
94
134
|
event[:controller],
|
95
135
|
event[:html_verb],
|
@@ -97,12 +137,47 @@ module LogSense
|
|
97
137
|
event[:duration_total_ms],
|
98
138
|
event[:duration_views_ms],
|
99
139
|
event[:duration_ar_ms],
|
100
|
-
event[:allocations]
|
140
|
+
event[:allocations],
|
141
|
+
event[:comment]
|
101
142
|
)
|
102
143
|
|
103
144
|
pending.delete(id)
|
104
145
|
end
|
105
146
|
end
|
147
|
+
|
148
|
+
|
149
|
+
data = self.match_and_process_completed_no_alloc line
|
150
|
+
if data
|
151
|
+
id = data[:log_id]
|
152
|
+
|
153
|
+
# it might as well be that the first event started before
|
154
|
+
# the log. With this, we make sure we add only events whose
|
155
|
+
# start was logged and parsed
|
156
|
+
if pending[id]
|
157
|
+
event = data.merge (pending[id] || {})
|
158
|
+
|
159
|
+
ins.execute(
|
160
|
+
event[:exit_status],
|
161
|
+
event[:started_at],
|
162
|
+
event[:ended_at],
|
163
|
+
event[:log_id],
|
164
|
+
event[:ip],
|
165
|
+
"#{DateTime.parse(event[:ended_at]).strftime("%Y-%m-%d")} #{event[:ip]}",
|
166
|
+
event[:url],
|
167
|
+
event[:controller],
|
168
|
+
event[:html_verb],
|
169
|
+
event[:status],
|
170
|
+
event[:duration_total_ms],
|
171
|
+
event[:duration_views_ms],
|
172
|
+
event[:duration_ar_ms],
|
173
|
+
event[:allocations],
|
174
|
+
event[:comment]
|
175
|
+
)
|
176
|
+
|
177
|
+
pending.delete(id)
|
178
|
+
end
|
179
|
+
end
|
180
|
+
|
106
181
|
end
|
107
182
|
|
108
183
|
db
|
@@ -138,9 +213,10 @@ module LogSense
|
|
138
213
|
COMPLETED_REGEXP = /I, \[#{TIMESTAMP} #[0-9]+\] INFO -- : \[#{ID}\] Completed #{STATUS} [^ ]+ in (?<total>#{MSECS})ms \(Views: (?<views>#{MSECS})ms \| ActiveRecord: (?<arec>#{MSECS})ms \| Allocations: (?<alloc>[0-9]+)\)/
|
139
214
|
|
140
215
|
def self.match_and_process_completed line
|
141
|
-
matchdata = COMPLETED_REGEXP.match line
|
216
|
+
matchdata = (COMPLETED_REGEXP.match line)
|
142
217
|
if matchdata
|
143
218
|
{
|
219
|
+
exit_status: "I",
|
144
220
|
ended_at: matchdata[:timestamp],
|
145
221
|
log_id: matchdata[:id],
|
146
222
|
status: matchdata[:status],
|
@@ -148,12 +224,36 @@ module LogSense
|
|
148
224
|
duration_views_ms: matchdata[:views],
|
149
225
|
duration_ar_ms: matchdata[:arec],
|
150
226
|
allocations: matchdata[:alloc],
|
227
|
+
comment: ""
|
228
|
+
}
|
229
|
+
else
|
230
|
+
nil
|
231
|
+
end
|
232
|
+
end
|
233
|
+
|
234
|
+
# I, [2021-12-09T16:53:52.657727 #2735058] INFO -- : [0064e403-9eb2-439d-8fe1-a334c86f5532] Completed 200 OK in 13ms (Views: 11.1ms | ActiveRecord: 1.2ms)
|
235
|
+
COMPLETED_NO_ALLOC_REGEXP = /I, \[#{TIMESTAMP} #[0-9]+\] INFO -- : \[#{ID}\] Completed #{STATUS} [^ ]+ in (?<total>#{MSECS})ms \(Views: (?<views>#{MSECS})ms \| ActiveRecord: (?<arec>#{MSECS})ms\)/
|
236
|
+
|
237
|
+
def self.match_and_process_completed_no_alloc line
|
238
|
+
matchdata = (COMPLETED_NO_ALLOC_REGEXP.match line)
|
239
|
+
if matchdata
|
240
|
+
{
|
241
|
+
exit_status: "I",
|
242
|
+
ended_at: matchdata[:timestamp],
|
243
|
+
log_id: matchdata[:id],
|
244
|
+
status: matchdata[:status],
|
245
|
+
duration_total_ms: matchdata[:total],
|
246
|
+
duration_views_ms: matchdata[:views],
|
247
|
+
duration_ar_ms: matchdata[:arec],
|
248
|
+
allocations: -1,
|
249
|
+
comment: ""
|
151
250
|
}
|
152
251
|
else
|
153
252
|
nil
|
154
253
|
end
|
155
254
|
end
|
156
255
|
|
256
|
+
|
157
257
|
# I, [2021-10-19T08:16:34.345162 #10477] INFO -- : [67103c0d-455d-4fe8-951e-87e97628cb66] Processing by PeopleController#show as HTML
|
158
258
|
PROCESSING_REGEXP = /I, \[#{TIMESTAMP} #[0-9]+\] INFO -- : \[#{ID}\] Processing by (?<controller>[^ ]+) as/
|
159
259
|
|
@@ -169,6 +269,25 @@ module LogSense
|
|
169
269
|
end
|
170
270
|
end
|
171
271
|
|
272
|
+
# F, [2021-12-04T00:34:05.838973 #2735058] FATAL -- : [3a16162e-a6a5-435e-a9d8-c4df5dc0f728]
|
273
|
+
# F, [2021-12-04T00:34:05.839157 #2735058] FATAL -- : [3a16162e-a6a5-435e-a9d8-c4df5dc0f728] ActionController::RoutingError (No route matches [GET] "/wp/wp-includes/wlwmanifest.xml"):
|
274
|
+
# F, [2021-12-04T00:34:05.839209 #2735058] FATAL -- : [3a16162e-a6a5-435e-a9d8-c4df5dc0f728]
|
275
|
+
# F, [2021-12-04T00:34:05.839269 #2735058] FATAL -- : [3a16162e-a6a5-435e-a9d8-c4df5dc0f728] actionpack (5.2.4.4) lib/action_dispatch/middleware/debug_exceptions.rb:65:in `call'
|
276
|
+
FATAL_REGEXP = /F, \[#{TIMESTAMP} #[0-9]+\] FATAL -- : \[#{ID}\] (?<comment>.*)$/
|
277
|
+
|
278
|
+
def self.match_and_process_fatal line
|
279
|
+
matchdata = FATAL_REGEXP.match line
|
280
|
+
if matchdata
|
281
|
+
{
|
282
|
+
exit_status: "F",
|
283
|
+
log_id: matchdata[:id],
|
284
|
+
comment: matchdata[:comment]
|
285
|
+
}
|
286
|
+
else
|
287
|
+
nil
|
288
|
+
end
|
289
|
+
end
|
290
|
+
|
172
291
|
end
|
173
292
|
|
174
293
|
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
<table class="table unstriped command-invocation">
|
2
|
+
<tbody>
|
3
|
+
<tr>
|
4
|
+
<th>CLI Command</th>
|
5
|
+
<td><code><%= data[:command] %></code></td>
|
6
|
+
</tr>
|
7
|
+
<tr>
|
8
|
+
<th>Input file</th>
|
9
|
+
<td><code><%= (data[:log_file] || "stdin") %></code></td>
|
10
|
+
</tr>
|
11
|
+
<tr>
|
12
|
+
<th>Ignore crawlers</th>
|
13
|
+
<td><code><%= options[:ignore_crawlers] %></code></td></tr>
|
14
|
+
<tr>
|
15
|
+
<th>Only crawlers</th>
|
16
|
+
<td><code><%= options[:only_crawlers] %></code></td>
|
17
|
+
</tr>
|
18
|
+
<tr>
|
19
|
+
<th>No selfpoll</th>
|
20
|
+
<td><code><%= options[:no_selfpoll] %></code></td>
|
21
|
+
</tr>
|
22
|
+
<tr>
|
23
|
+
<th>Filter by date</th>
|
24
|
+
<td>
|
25
|
+
<code><%= (options[:from_date] != nil or options[:to_date] != nil) %></code>
|
26
|
+
</td>
|
27
|
+
</tr>
|
28
|
+
</tbody>
|
29
|
+
</table>
|
@@ -4,7 +4,7 @@ def slugify string
|
|
4
4
|
end
|
5
5
|
%>
|
6
6
|
|
7
|
-
<table id="<%= slugify(title || "")
|
7
|
+
<table id="<%= slugify(title || "") %>-table" class="table unstriped data-table <%= slugify(title || "") %>">
|
8
8
|
<thead>
|
9
9
|
<tr>
|
10
10
|
<% header.each do |heading| %>
|
@@ -0,0 +1,23 @@
|
|
1
|
+
<table class="table unstriped performance">
|
2
|
+
<tbody>
|
3
|
+
<tr>
|
4
|
+
<th>Analysis started at</th>
|
5
|
+
<td><%= data[:started_at].to_s %></td>
|
6
|
+
</tr>
|
7
|
+
<tr>
|
8
|
+
<th>Analysis ended at</th>
|
9
|
+
<td><%= data[:ended_at].to_s %></td>
|
10
|
+
</tr>
|
11
|
+
<tr>
|
12
|
+
<th>Duration</th>
|
13
|
+
<td><%= "%02d:%02d" % [data[:duration] / 60, data[:duration] % 60] %></td>
|
14
|
+
</tr>
|
15
|
+
<tr>
|
16
|
+
<th>Events</th>
|
17
|
+
<td><%= data[:log_size] %></td>
|
18
|
+
</tr>
|
19
|
+
<tr>
|
20
|
+
<th>Parsed Events/sec</th>
|
21
|
+
<td><%= "%.2f" % (data[:log_size] / data[:duration]) %></td></tr>
|
22
|
+
</tbody>
|
23
|
+
</table>
|
@@ -0,0 +1,9 @@
|
|
1
|
+
<%=
|
2
|
+
table = Terminal::Table.new rows: [ ["Analysis started at", data[:started_at].to_s ],
|
3
|
+
["Analysis ended at", data[:ended_at].to_s ],
|
4
|
+
["Duration", "%02d:%02d" % [data[:duration] / 60, data[:duration] % 60] ],
|
5
|
+
["Events", "%9d" % data[:log_size] ],
|
6
|
+
["Parsed events/sec", "%.2f" % (data[:log_size] / data[:duration]) ] ]
|
7
|
+
table.align_column(2, :right)
|
8
|
+
table
|
9
|
+
%>
|
@@ -0,0 +1,34 @@
|
|
1
|
+
<table class="table unstriped summary">
|
2
|
+
<tr>
|
3
|
+
<th>Input file</th>
|
4
|
+
<td><b><%= (data[:log_file] || "stdin") %></b></td>
|
5
|
+
</tr>
|
6
|
+
<tr>
|
7
|
+
<th class="period">Period Analyzed</th>
|
8
|
+
<td class="period">
|
9
|
+
<%= data[:first_day_in_analysis] %>
|
10
|
+
--
|
11
|
+
<%= data[:last_day_in_analysis] %>
|
12
|
+
</td>
|
13
|
+
</tr>
|
14
|
+
<tr>
|
15
|
+
<th class="days">Days </th>
|
16
|
+
<td class="days"><%= data[:total_days_in_analysis] %></td>
|
17
|
+
</tr>
|
18
|
+
<tr>
|
19
|
+
<th class="hits">Hits</th>
|
20
|
+
<td class="hits"><%= data[:total_hits] %></td>
|
21
|
+
</tr>
|
22
|
+
<tr>
|
23
|
+
<th class="unique-visits">Unique Visits</th>
|
24
|
+
<td class="unique-visits"><%= data[:total_unique_visits] %></td>
|
25
|
+
</tr>
|
26
|
+
<tr>
|
27
|
+
<th class="avg-hits-per-unique-visits">Unique Visits</th>
|
28
|
+
<td class="avg-hits-per-unique-visits"><%= data[:total_unique_visits] != 0 ? data[:total_hits] / data[:total_unique_visits] : "N/A" %></td>
|
29
|
+
</tr>
|
30
|
+
<tr>
|
31
|
+
<th class="tx">Tx</th>
|
32
|
+
<td class="tx"><%= data[:total_size] %></td>
|
33
|
+
</tr>
|
34
|
+
</table>
|
@@ -0,0 +1,10 @@
|
|
1
|
+
<%=
|
2
|
+
table = Terminal::Table.new rows: [ ["Input File", data[:log_file] || "stdin" ],
|
3
|
+
["Period Analyzed", "#{data[:first_day_in_analysis]} -- #{data[:last_day_in_analysis]}" ],
|
4
|
+
["Days", data[:total_days_in_analysis] ],
|
5
|
+
["Events", data[:events] ],
|
6
|
+
["Unique Visits", data[:total_unique_visits] ],
|
7
|
+
["Avg. Events per Visit", data[:total_unique_visits] != 0 ? data[:events] / data[:total_unique_visits] : "N/A" ]
|
8
|
+
]
|
9
|
+
table
|
10
|
+
%>
|
@@ -0,0 +1,32 @@
|
|
1
|
+
<table class="table unstriped log-structure">
|
2
|
+
<tbody>
|
3
|
+
<tr>
|
4
|
+
<th>Input file</th>
|
5
|
+
<td><b><%= (data[:log_file] || "stdin") %></b></td>
|
6
|
+
</tr>
|
7
|
+
<tr>
|
8
|
+
<th>Period in Log</th>
|
9
|
+
<td><%= data[:first_day] %> -- <%= data[:last_day] %></td>
|
10
|
+
</tr>
|
11
|
+
<tr>
|
12
|
+
<th>Total days</th>
|
13
|
+
<td><%= data[:total_days] %></td>
|
14
|
+
</tr>
|
15
|
+
<tr>
|
16
|
+
<th>Log size</th>
|
17
|
+
<td><%= data[:log_size] %></td>
|
18
|
+
</tr>
|
19
|
+
<tr>
|
20
|
+
<th>Self poll entries</th>
|
21
|
+
<td><%= data[:selfpolls_size] %></td>
|
22
|
+
</tr>
|
23
|
+
<tr>
|
24
|
+
<th>Crawlers</th>
|
25
|
+
<td><%= data[:crawlers_size] %></td>
|
26
|
+
</tr>
|
27
|
+
<tr>
|
28
|
+
<th>Entries considered</th>
|
29
|
+
<td><%= data[:total_hits] %></td>
|
30
|
+
</tr>
|
31
|
+
</tbody>
|
32
|
+
</table>
|