log_sense 1.4.0 → 1.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,10 +2,8 @@ require 'sqlite3'
2
2
 
3
3
  module LogSense
4
4
  module RailsLogParser
5
- def self.parse filename, options = {}
6
- content = filename ? File.readlines(filename) : ARGF.readlines
7
-
8
- db = SQLite3::Database.new ":memory:"
5
+ def self.parse(streams, options = {})
6
+ db = SQLite3::Database.new ':memory:'
9
7
  db.execute 'CREATE TABLE IF NOT EXISTS Event(
10
8
  id INTEGER PRIMARY KEY AUTOINCREMENT,
11
9
  exit_status TEXT,
@@ -17,12 +15,14 @@ module LogSense
17
15
  url TEXT,
18
16
  controller TEXT,
19
17
  html_verb TEXT,
20
- status INTEGER,
18
+ status INTEGER,
21
19
  duration_total_ms FLOAT,
22
20
  duration_views_ms FLOAT,
23
21
  duration_ar_ms FLOAT,
24
22
  allocations INTEGER,
25
- comment TEXT
23
+ comment TEXT,
24
+ source_file TEXT,
25
+ line_number INTEGER
26
26
  )'
27
27
 
28
28
  ins = db.prepare("insert into Event(
@@ -35,30 +35,34 @@ module LogSense
35
35
  url,
36
36
  controller,
37
37
  html_verb,
38
- status,
38
+ status,
39
39
  duration_total_ms,
40
40
  duration_views_ms,
41
41
  duration_ar_ms,
42
42
  allocations,
43
- comment
43
+ comment,
44
+ source_file,
45
+ line_number
44
46
  )
45
- values (#{Array.new(15, '?').join(', ')})")
47
+ values (#{Array.new(17, '?').join(', ')})")
46
48
 
47
-
48
49
  db.execute 'CREATE TABLE IF NOT EXISTS Error(
49
50
  id INTEGER PRIMARY KEY AUTOINCREMENT,
50
51
  log_id TEXT,
51
52
  context TEXT,
52
- description TEXT
53
+ description TEXT,
54
+ filename TEXT,
55
+ line_number INTEGER
53
56
  )'
54
57
 
55
58
  ins_error = db.prepare("insert into Error(
56
59
  log_id,
57
60
  context,
58
- description
61
+ description,
62
+ filename,
63
+ line_number
59
64
  )
60
- values (?, ?, ?)")
61
-
65
+ values (?, ?, ?, ?, ?)")
62
66
 
63
67
  # requests in the log might be interleaved.
64
68
  #
@@ -79,94 +83,101 @@ module LogSense
79
83
  # and they appears in the order shown above: started, processing, ...
80
84
  #
81
85
  # Different requests might be interleaved, of course
82
-
83
- File.readlines(filename).each do |line|
84
- # I and F for completed requests, [ is for error messages
85
- next if line[0] != 'I' and line[0] != 'F' and line[0] != '['
86
-
87
- data = self.match_and_process_error line
88
- if data
89
- ins_error.execute(data[:log_id], data[:context], data[:description])
90
- next
91
- end
92
-
93
- data = self.match_and_process_start line
94
- if data
95
- id = data[:log_id]
96
- pending[id] = data.merge (pending[id] || {})
97
- next
98
- end
99
-
100
- data = self.match_and_process_processing_by line
101
- if data
102
- id = data[:log_id]
103
- pending[id] = data.merge (pending[id] || {})
104
- next
105
- end
106
-
107
- data = self.match_and_process_fatal line
108
- if data
109
- id = data[:log_id]
110
- # it might as well be that the first event started before
111
- # the log. With this, we make sure we add only events whose
112
- # start was logged and parsed
113
- if pending[id]
114
- event = data.merge (pending[id] || {})
86
+ #
87
+ streams.each do |stream|
88
+ stream.readlines.each_with_index do |line, line_number|
89
+ filename = stream == $stdin ? "stdin" : stream.path
115
90
 
116
- ins.execute(
117
- event[:exit_status],
118
- event[:started_at],
119
- event[:ended_at],
120
- event[:log_id],
121
- event[:ip],
122
- unique_visitor_id(event),
123
- event[:url],
124
- event[:controller],
125
- event[:html_verb],
126
- event[:status],
127
- event[:duration_total_ms],
128
- event[:duration_views_ms],
129
- event[:duration_ar_ms],
130
- event[:allocations],
131
- event[:comment]
132
- )
91
+ # I and F for completed requests, [ is for error messages
92
+ next if line[0] != 'I' and line[0] != 'F' and line[0] != '['
133
93
 
134
- pending.delete(id)
94
+ data = match_and_process_error line
95
+ if data
96
+ ins_error.execute(data[:log_id], data[:context], data[:description], filename, line_number)
97
+ next
98
+ end
99
+
100
+ data = match_and_process_start line
101
+ if data
102
+ id = data[:log_id]
103
+ pending[id] = data.merge(pending[id] || {})
104
+ next
135
105
  end
136
- end
137
-
138
- data = self.match_and_process_completed line
139
- if data
140
- id = data[:log_id]
141
106
 
142
- # it might as well be that the first event started before
143
- # the log. With this, we make sure we add only events whose
144
- # start was logged and parsed
145
- if pending[id]
146
- event = data.merge (pending[id] || {})
107
+ data = match_and_process_processing_by line
108
+ if data
109
+ id = data[:log_id]
110
+ pending[id] = data.merge(pending[id] || {})
111
+ next
112
+ end
147
113
 
148
- ins.execute(
149
- event[:exit_status],
150
- event[:started_at],
151
- event[:ended_at],
152
- event[:log_id],
153
- event[:ip],
154
- unique_visitor_id(event),
155
- event[:url],
156
- event[:controller],
157
- event[:html_verb],
158
- event[:status],
159
- event[:duration_total_ms],
160
- event[:duration_views_ms],
161
- event[:duration_ar_ms],
162
- event[:allocations],
163
- event[:comment]
164
- )
114
+ data = match_and_process_fatal line
115
+ if data
116
+ id = data[:log_id]
117
+ # it might as well be that the first event started before
118
+ # the log. With this, we make sure we add only events whose
119
+ # start was logged and parsed
120
+ if pending[id]
121
+ event = data.merge(pending[id] || {})
122
+
123
+ ins.execute(
124
+ event[:exit_status],
125
+ event[:started_at],
126
+ event[:ended_at],
127
+ event[:log_id],
128
+ event[:ip],
129
+ unique_visitor_id(event),
130
+ event[:url],
131
+ event[:controller],
132
+ event[:html_verb],
133
+ event[:status],
134
+ event[:duration_total_ms],
135
+ event[:duration_views_ms],
136
+ event[:duration_ar_ms],
137
+ event[:allocations],
138
+ event[:comment],
139
+ filename,
140
+ line_number
141
+ )
142
+
143
+ pending.delete(id)
144
+ end
145
+ end
165
146
 
166
- pending.delete(id)
147
+ data = self.match_and_process_completed line
148
+ if data
149
+ id = data[:log_id]
150
+
151
+ # it might as well be that the first event started before
152
+ # the log. With this, we make sure we add only events whose
153
+ # start was logged and parsed
154
+ if pending[id]
155
+ event = data.merge (pending[id] || {})
156
+
157
+ ins.execute(
158
+ event[:exit_status],
159
+ event[:started_at],
160
+ event[:ended_at],
161
+ event[:log_id],
162
+ event[:ip],
163
+ unique_visitor_id(event),
164
+ event[:url],
165
+ event[:controller],
166
+ event[:html_verb],
167
+ event[:status],
168
+ event[:duration_total_ms],
169
+ event[:duration_views_ms],
170
+ event[:duration_ar_ms],
171
+ event[:allocations],
172
+ event[:comment],
173
+ filename,
174
+ line_number
175
+ )
176
+
177
+ pending.delete(id)
178
+ end
167
179
  end
168
180
  end
169
-
170
181
  end
171
182
 
172
183
  db
@@ -226,7 +237,7 @@ module LogSense
226
237
  # I, [2021-12-06T14:28:19.736545 #2804090] INFO -- : [34091cb5-3e7b-4042-aaf8-6c6510d3f14c] Completed 500 Internal Server Error in 66ms (ActiveRecord: 8.0ms | Allocations: 24885)
227
238
  COMPLETED_REGEXP = /I, \[#{TIMESTAMP} #[0-9]+\] INFO -- : \[#{ID}\] Completed #{STATUS} #{STATUS_IN_WORDS} in (?<total>#{MSECS})ms \((Views: (?<views>#{MSECS})ms \| )?ActiveRecord: (?<arec>#{MSECS})ms( \| Allocations: (?<alloc>[0-9]+))?\)/
228
239
 
229
- def self.match_and_process_completed line
240
+ def self.match_and_process_completed(line)
230
241
  matchdata = (COMPLETED_REGEXP.match line)
231
242
  # exit_status = matchdata[:status].to_i == 500 ? "E" : "I"
232
243
  if matchdata
@@ -267,7 +278,7 @@ module LogSense
267
278
  # F, [2021-12-04T00:34:05.839269 #2735058] FATAL -- : [3a16162e-a6a5-435e-a9d8-c4df5dc0f728] actionpack (5.2.4.4) lib/action_dispatch/middleware/debug_exceptions.rb:65:in `call'
268
279
  FATAL_REGEXP = /F, \[#{TIMESTAMP} #[0-9]+\] FATAL -- : \[#{ID}\] (?<comment>.*)$/
269
280
 
270
- def self.match_and_process_fatal line
281
+ def self.match_and_process_fatal(line)
271
282
  matchdata = FATAL_REGEXP.match line
272
283
  if matchdata
273
284
  {
@@ -281,11 +292,8 @@ module LogSense
281
292
  end
282
293
 
283
294
  # generate a unique visitor id from an event
284
- def self.unique_visitor_id event
295
+ def self.unique_visitor_id(event)
285
296
  "#{DateTime.parse(event[:started_at] || event[:ended_at] || "1970-01-01").strftime("%Y-%m-%d")} #{event[:ip]}"
286
297
  end
287
-
288
298
  end
289
-
290
299
  end
291
-
@@ -4,10 +4,6 @@
4
4
  <th>CLI Command</th>
5
5
  <td><code><%= data[:command] %></code></td>
6
6
  </tr>
7
- <tr>
8
- <th>Input file</th>
9
- <td><code><%= (data[:log_file] || "stdin") %></code></td>
10
- </tr>
11
7
  <tr>
12
8
  <th>Ignore crawlers</th>
13
9
  <td><code><%= options[:ignore_crawlers] %></code></td></tr>
@@ -1,6 +1,7 @@
1
1
  <%=
2
- table = Terminal::Table.new rows: [ ["Command", data[:command] ],
3
- ["Input file", data[:log_file] || "stdin" ]
4
- ]
2
+ table = Terminal::Table.new rows: [
3
+ ["Command", data[:command] ],
4
+ ]
5
+ table.style = { border_i: "|" }
5
6
  table
6
7
  %>
@@ -1,9 +1,3 @@
1
- <%
2
- def slugify string
3
- string.downcase.gsub(/ +/, '-')
4
- end
5
- %>
6
-
7
1
  <table id="table-<%= index %>" class="table unstriped">
8
2
  <thead>
9
3
  <tr>
@@ -22,7 +16,7 @@ end
22
16
  <%= report[:datatable_options] + "," if report[:datatable_options] %>
23
17
  columns: [
24
18
  <% report[:header].each do |header| %>
25
- { data: '<%= header %>', className: '<%= slugify(header) %>' },
19
+ { data: '<%= header %>', className: '<%= Emitter::slugify(header) %>' },
26
20
  <% end %>
27
21
  ]
28
22
  });
@@ -0,0 +1,13 @@
1
+ <%=
2
+ # shortens long URLs and long descriptions
3
+ shortened = Emitter::shorten(report[:rows], report[:header], data[:width])
4
+
5
+ # build and style the table
6
+ table = Terminal::Table.new headings: report[:header], rows: shortened
7
+ table.style = { border_i: "|" }
8
+ columns = report[:header].size - 1
9
+ (0..columns).map { |i| table.align_column(i, report[:column_alignment][i] || :left) }
10
+
11
+ # return it
12
+ table
13
+ %>
@@ -15,7 +15,7 @@
15
15
  <%= data[:log_size] %> <span class="stats-list-label">Events</span>
16
16
  </li>
17
17
  <li class="stats-list-positive">
18
- <td><%= "%.2f" % (data[:log_size] / data[:duration]) %>
18
+ <%= "%.2f" % (data[:log_size] / data[:duration]) %>
19
19
  <span class="stats-list-label">Parsed Events/sec</span>
20
20
  </li>
21
21
  </ul>
@@ -1,9 +1,12 @@
1
1
  <%=
2
- table = Terminal::Table.new rows: [ ["Analysis started at", data[:started_at].to_s ],
3
- ["Analysis ended at", data[:ended_at].to_s ],
4
- ["Duration", "%02d:%02d" % [data[:duration] / 60, data[:duration] % 60] ],
5
- ["Events", "%9d" % data[:log_size] ],
6
- ["Parsed events/sec", "%.2f" % (data[:log_size] / data[:duration]) ] ]
2
+ table = Terminal::Table.new rows: [
3
+ ["Analysis started at", data[:started_at].to_s ],
4
+ ["Analysis ended at", data[:ended_at].to_s ],
5
+ ["Duration", "%02d:%02d" % [data[:duration] / 60, data[:duration] % 60] ],
6
+ ["Events", "%9d" % data[:log_size] ],
7
+ ["Parsed events/sec", "%.2f" % (data[:log_size] / data[:duration]) ]
8
+ ]
9
+ table.style = { border_i: "|" }
7
10
  table.align_column(2, :right)
8
11
  table
9
12
  %>
@@ -1,11 +1,10 @@
1
1
  <script>
2
- /* this is used both by Vega and DataTable */
2
+ /* this is used both by Vega and DataTable for <%= report[:title] %>*/
3
3
  data_<%= index %> = [
4
4
  <% report[:rows].each do |row| %>
5
5
  {
6
6
  <% report[:header].each_with_index do |h, i| %>
7
- <% resized_row = (row[i] || '').size > 150 ? "#{row[i][0..150]...}" : (row[i] || "") %>
8
- "<%= h %>": <%= (row[i].class == Integer or row[i].class == Float) ? row[i] : "\"#{Emitter::escape_javascript(resized_row)}\"" %>,
7
+ "<%= h %>": "<%= Emitter::process row[i] %>",
9
8
  <% end %>
10
9
  },
11
10
  <% end %>
@@ -23,6 +23,6 @@
23
23
  </li>
24
24
  <li class="stats-list-negative">
25
25
  <%= data[:total_unique_visits] != 0 ? data[:total_hits] / data[:total_unique_visits] : "N/A" %>
26
- <span class="stats-list-label">Page Visited / Unique Visitor</span>
26
+ <span class="stats-list-label">Hits / Unique Visitor</span>
27
27
  </li>
28
28
  </ul>
@@ -1,10 +1,13 @@
1
1
  <%=
2
- table = Terminal::Table.new rows: [ ["Input File", data[:log_file] || "stdin" ],
3
- ["Period Analyzed", "#{data[:first_day_in_analysis]} -- #{data[:last_day_in_analysis]}" ],
4
- ["Days", data[:total_days_in_analysis] ],
5
- ["Events", data[:events] ],
6
- ["Unique Visits", data[:total_unique_visits] ],
7
- ["Avg. Events per Visit", data[:total_unique_visits] != 0 ? data[:events] / data[:total_unique_visits] : "N/A" ]
8
- ]
9
- table
2
+ table = Terminal::Table.new rows: [
3
+ ["From", data[:first_day_in_analysis]],
4
+ ["To", data[:last_day_in_analysis]],
5
+ ["Days", data[:total_days_in_analysis]],
6
+ ["Hits", data[:total_hits]],
7
+ ["Unique Visits", data[:total_unique_visits]],
8
+ ["Unique Visits / Day", data[:total_days_in_analysis] > 0 ? "%.2f" % (data[:total_unique_visits] / data[:total_days_in_analysis].to_f) : "N/A"],
9
+ ["Hits/Unique Visitor", data[:total_unique_visits] != 0 ? data[:total_hits] / data[:total_unique_visits] : "N/A"]
10
+ ]
11
+ table.style = { border_i: "|" }
12
+ table
10
13
  %>
@@ -220,217 +220,6 @@
220
220
  </article>
221
221
  </div>
222
222
 
223
- <% @reports = [
224
- { title: "Daily Distribution",
225
- header: ["Day", "DOW", "Hits", "Visits", "Size"],
226
- rows: data[:daily_distribution],
227
- vega_spec: {
228
- "layer": [
229
- {
230
- "mark": {
231
- "type": "line",
232
- "point": {
233
- "filled": false,
234
- "fill": "white"
235
- }
236
- },
237
- "encoding": {
238
- "y": {"field": "Hits", "type": "quantitative"}
239
- }
240
- },
241
- {
242
- "mark": {
243
- "type": "text",
244
- "color": "#3E5772",
245
- "align": "middle",
246
- "baseline": "top",
247
- "dx": -10,
248
- "yOffset": -15
249
- },
250
- "encoding": {
251
- "text": {"field": "Hits", "type": "quantitative"},
252
- "y": {"field": "Hits", "type": "quantitative"}
253
- }
254
- },
255
-
256
- {
257
- "mark": {
258
- "type": "line",
259
- "color": "#A52A2A",
260
- "point": {
261
- "color": "#A52A2A",
262
- "filled": false,
263
- "fill": "white",
264
- }
265
- },
266
- "encoding": {
267
- "y": {"field": "Visits", "type": "quantitative"}
268
- }
269
- },
270
-
271
- {
272
- "mark": {
273
- "type": "text",
274
- "color": "#A52A2A",
275
- "align": "middle",
276
- "baseline": "top",
277
- "dx": -10,
278
- "yOffset": -15
279
- },
280
- "encoding": {
281
- "text": {"field": "Visits", "type": "quantitative"},
282
- "y": {"field": "Visits", "type": "quantitative"}
283
- }
284
- },
285
-
286
- ],
287
- "encoding": {
288
- "x": {"field": "Day", "type": "temporal"},
289
- }
290
- }
291
-
292
- },
293
- { title: "Time Distribution",
294
- header: ["Hour", "Hits", "Visits", "Size"],
295
- rows: data[:time_distribution],
296
- vega_spec: {
297
- "layer": [
298
- {
299
- "mark": "bar"
300
- },
301
- {
302
- "mark": {
303
- "type": "text",
304
- "align": "middle",
305
- "baseline": "top",
306
- "dx": -10,
307
- "yOffset": -15
308
- },
309
- "encoding": {
310
- "text": {"field": "Hits", "type": "quantitative"},
311
- "y": {"field": "Hits", "type": "quantitative"}
312
- }
313
- },
314
- ],
315
- "encoding": {
316
- "x": {"field": "Hour", "type": "nominal"},
317
- "y": {"field": "Hits", "type": "quantitative"}
318
- }
319
- }
320
- },
321
- { title: "20_ and 30_ on HTML pages",
322
- header: ["Path", "Hits", "Visits", "Size", "Status"],
323
- rows: data[:most_requested_pages],
324
- datatable_options: "columnDefs: [{ width: \"40%\", targets: 0 } ]"
325
- },
326
- { title: "20_ and 30_ on other resources",
327
- header: ["Path", "Hits", "Visits", "Size", "Status"],
328
- rows: data[:most_requested_resources],
329
- datatable_options: "columnDefs: [{ width: \"40%\", targets: 0 } ]"
330
- },
331
- { title: "40_ and 50_x on HTML pages",
332
- header: ["Path", "Hits", "Visits", "Status"],
333
- rows: data[:missed_pages],
334
- datatable_options: "columnDefs: [{ width: \"40%\", targets: 0 } ]"
335
- },
336
- { title: "40_ and 50_ on other resources",
337
- header: ["Path", "Hits", "Visits", "Status"],
338
- rows: data[:missed_resources],
339
- datatable_options: "columnDefs: [{ width: \"40%\", targets: 0 } ]"
340
- },
341
- { title: "Statuses",
342
- header: ["Status", "Count"],
343
- rows: data[:statuses],
344
- vega_spec: {
345
- "mark": "bar",
346
- "encoding": {
347
- "x": {"field": "Status", "type": "nominal"},
348
- "y": {"field": "Count", "type": "quantitative"}
349
- }
350
- }
351
- },
352
- { title: "Daily Statuses",
353
- header: ["Date", "S_2xx", "S_3xx", "S_4xx"],
354
- rows: data[:statuses_by_day],
355
- vega_spec: {
356
- "transform": [ {"fold": ["S_2xx", "S_3xx", "S_4xx" ] }],
357
- "mark": "bar",
358
- "encoding": {
359
- "x": {
360
- "field": "Date",
361
- "type": "ordinal",
362
- "timeUnit": "day",
363
- },
364
- "y": {
365
- "aggregate": "sum",
366
- "field": "value",
367
- "type": "quantitative"
368
- },
369
- "color": {
370
- "field": "key",
371
- "type": "nominal",
372
- "scale": {
373
- "domain": ["S_2xx", "S_3xx", "S_4xx"],
374
- "range": ["#228b22", "#ff8c00", "#a52a2a"]
375
- },
376
- }
377
- }
378
- }
379
- },
380
- { title: "Browsers",
381
- header: ["Browser", "Hits", "Visits", "Size"],
382
- rows: data[:browsers],
383
- vega_spec: {
384
- "layer": [
385
- { "mark": "bar" },
386
- {
387
- "mark": {
388
- "type": "text",
389
- "align": "middle",
390
- "baseline": "top",
391
- "dx": -10,
392
- "yOffset": -15
393
- },
394
- "encoding": {
395
- "text": {"field": "Hits", "type": "quantitative"},
396
- }
397
- },
398
- ],
399
- "encoding": {
400
- "x": {"field": "Browser", "type": "nominal"},
401
- "y": {"field": "Hits", "type": "quantitative"}
402
- }
403
- }
404
- },
405
- { title: "Platforms",
406
- header: ["Platform", "Hits", "Visits", "Size"],
407
- rows: data[:platforms],
408
- vega_spec: {
409
- "layer": [
410
- { "mark": "bar" },
411
- {
412
- "mark": {
413
- "type": "text",
414
- "align": "middle",
415
- "baseline": "top",
416
- "dx": -10,
417
- "yOffset": -15
418
- },
419
- "encoding": {
420
- "text": {"field": "Hits", "type": "quantitative"},
421
- }
422
- },
423
- ],
424
- "encoding": {
425
- "x": {"field": "Platform", "type": "nominal"},
426
- "y": {"field": "Hits", "type": "quantitative"}
427
- }
428
- }
429
- },
430
- { title: "IPs", header: ["IPs", "Hits", "Visits", "Size", "Country"], rows: data[:ips] },
431
- { title: "Referers", header: ["Referers", "Hits", "Visits", "Size"], rows: data[:referers], col: "small-12 cell" },
432
- ]
433
- %>
434
223
  <div class="grid-x grid-margin-x">
435
224
  <% @reports.each_with_index do |report, index| %>
436
225
  <article class="card cell <%= report[:col] || "small-12 large-6" %>" >
@@ -565,7 +354,7 @@
565
354
 
566
355
  <div class="small-12 large-6 cell">
567
356
  <article>
568
- <h2 id="performance"> Performance</h2>
357
+ <h2 id="performance">Performance</h2>
569
358
 
570
359
  <%= render "performance.html.erb", data: data %>
571
360
  </article>