request-log-analyzer 1.10.1 → 1.11.0
Sign up to get free protection for your applications and to get access to all the features.
- data/bin/request-log-analyzer +0 -1
- data/lib/request_log_analyzer.rb +15 -29
- data/lib/request_log_analyzer/aggregator.rb +5 -5
- data/lib/request_log_analyzer/aggregator/database_inserter.rb +2 -1
- data/lib/request_log_analyzer/controller.rb +0 -3
- data/lib/request_log_analyzer/database.rb +6 -7
- data/lib/request_log_analyzer/file_format.rb +42 -13
- data/lib/request_log_analyzer/file_format/apache.rb +1 -1
- data/lib/request_log_analyzer/file_format/delayed_job2.rb +2 -2
- data/lib/request_log_analyzer/file_format/delayed_job21.rb +2 -2
- data/lib/request_log_analyzer/file_format/haproxy.rb +107 -13
- data/lib/request_log_analyzer/file_format/mysql.rb +5 -5
- data/lib/request_log_analyzer/file_format/rails3.rb +7 -0
- data/lib/request_log_analyzer/filter.rb +4 -5
- data/lib/request_log_analyzer/line_definition.rb +6 -4
- data/lib/request_log_analyzer/output.rb +3 -5
- data/lib/request_log_analyzer/source.rb +3 -4
- data/lib/request_log_analyzer/source/log_parser.rb +56 -4
- data/lib/request_log_analyzer/tracker.rb +8 -8
- data/request-log-analyzer.gemspec +3 -3
- data/spec/fixtures/mysql_slow_query.log +0 -1
- data/spec/integration/command_line_usage_spec.rb +0 -5
- data/spec/lib/helpers.rb +2 -2
- data/spec/lib/matchers.rb +38 -7
- data/spec/lib/mocks.rb +1 -5
- data/spec/unit/database/base_class_spec.rb +1 -0
- data/spec/unit/file_format/amazon_s3_format_spec.rb +58 -55
- data/spec/unit/file_format/apache_format_spec.rb +74 -162
- data/spec/unit/file_format/common_regular_expressions_spec.rb +51 -26
- data/spec/unit/file_format/delayed_job21_format_spec.rb +22 -31
- data/spec/unit/file_format/delayed_job2_format_spec.rb +27 -32
- data/spec/unit/file_format/delayed_job_format_spec.rb +44 -63
- data/spec/unit/file_format/haproxy_format_spec.rb +69 -71
- data/spec/unit/file_format/line_definition_spec.rb +26 -33
- data/spec/unit/file_format/merb_format_spec.rb +22 -37
- data/spec/unit/file_format/mysql_format_spec.rb +80 -123
- data/spec/unit/file_format/oink_format_spec.rb +29 -61
- data/spec/unit/file_format/postgresql_format_spec.rb +2 -4
- data/spec/unit/file_format/rack_format_spec.rb +49 -44
- data/spec/unit/file_format/rails3_format_spec.rb +17 -20
- data/spec/unit/file_format/rails_format_spec.rb +52 -68
- data/spec/unit/file_format/w3c_format_spec.rb +40 -39
- data/spec/unit/source/log_parser_spec.rb +1 -1
- metadata +4 -7
- data/lib/mixins/gets_memory_protection.rb +0 -80
- data/lib/request_log_analyzer/output/fancy_html.rb +0 -44
- data/lib/request_log_analyzer/source/database_loader.rb +0 -87
@@ -1,41 +1,39 @@
|
|
1
1
|
require 'spec_helper'
|
2
2
|
|
3
|
-
describe RequestLogAnalyzer::FileFormat::
|
3
|
+
describe RequestLogAnalyzer::FileFormat::DelayedJob2 do
|
4
4
|
|
5
|
-
|
6
|
-
RequestLogAnalyzer::FileFormat.load(:delayed_job).should be_valid
|
7
|
-
end
|
5
|
+
subject { RequestLogAnalyzer::FileFormat.load(:delayed_job2) }
|
8
6
|
|
7
|
+
it { should be_well_formed }
|
8
|
+
it { should have_line_definition(:job_lock).capturing(:timestamp, :job, :host, :pid) }
|
9
|
+
it { should have_line_definition(:job_completed).capturing(:timestamp, :duration, :host, :pid) }
|
10
|
+
it { should have(4).report_trackers }
|
11
|
+
|
9
12
|
describe '#parse_line' do
|
10
13
|
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
+
let(:job_lock_sample1) { "2010-05-17T17:37:34+0000: * [Worker(delayed_job host:hostname.co.uk pid:11888)] acquired lock on S3FileJob" }
|
15
|
+
let(:job_lock_sample2) { "2010-05-17T17:37:34+0000: * [Worker(delayed_job.0 host:hostname.co.uk pid:11888)] acquired lock on S3FileJob" }
|
16
|
+
let(:job_completed_sample) { '2010-05-17T17:37:35+0000: * [JOB] delayed_job host:hostname.co.uk pid:11888 completed after 1.0676' }
|
17
|
+
let(:starting_sample) { '2010-05-17T17:36:44+0000: *** Starting job worker delayed_job host:hostname.co.uk pid:11888' }
|
18
|
+
let(:summary_sample) { '3 jobs processed at 0.3163 j/s, 0 failed ...' }
|
19
|
+
|
20
|
+
it { should parse_line(job_lock_sample1, 'with a single worker').as(:job_lock).and_capture(
|
21
|
+
:timestamp => 20100517173734, :job => 'S3FileJob', :host => 'hostname.co.uk', :pid => 11888) }
|
14
22
|
|
15
|
-
it
|
16
|
-
|
17
|
-
@file_format.should parse_line(line).as(:job_lock).and_capture(:timestamp => 20100517173734,
|
18
|
-
:job => 'S3FileJob', :host => 'hostname.co.uk', :pid => 11888)
|
19
|
-
end
|
23
|
+
it { should parse_line(job_lock_sample2, 'with multiple workers').as(:job_lock).and_capture(
|
24
|
+
:timestamp => 20100517173734, :job => 'S3FileJob', :host => 'hostname.co.uk', :pid => 11888) }
|
20
25
|
|
21
|
-
it
|
22
|
-
|
23
|
-
@file_format.should parse_line(line).as(:job_lock).and_capture(:timestamp => 20100517173734,
|
24
|
-
:job => 'S3FileJob', :host => 'hostname.co.uk', :pid => 11888)
|
25
|
-
end
|
26
|
+
it { should parse_line(job_completed_sample).as(:job_completed).and_capture(:timestamp => 20100517173735,
|
27
|
+
:duration => 1.0676, :host => 'hostname.co.uk', :pid => 11888) }
|
26
28
|
|
27
|
-
it
|
28
|
-
|
29
|
-
|
30
|
-
:duration => 1.0676, :host => 'hostname.co.uk', :pid => 11888)
|
31
|
-
end
|
29
|
+
it { should_not parse_line(starting_sample, 'a starting line') }
|
30
|
+
it { should_not parse_line(summary_sample, 'a summary line') }
|
31
|
+
it { should_not parse_line('nonsense', 'a nonsense line') }
|
32
32
|
end
|
33
33
|
|
34
34
|
describe '#parse_io' do
|
35
|
-
|
36
|
-
|
37
|
-
end
|
38
|
-
|
35
|
+
let(:log_parser) { RequestLogAnalyzer::Source::LogParser.new(subject) }
|
36
|
+
|
39
37
|
it "should parse a batch of completed jobs without warnings" do
|
40
38
|
fragment = <<-EOLOG
|
41
39
|
2010-05-17T17:36:44+0000: *** Starting job worker delayed_job host:hostname.co.uk pid:11888
|
@@ -49,12 +47,9 @@ describe RequestLogAnalyzer::FileFormat::DelayedJob do
|
|
49
47
|
2010-05-19T11:47:26+0000: Exiting...
|
50
48
|
EOLOG
|
51
49
|
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
@log_parser.parse_io(StringIO.new(fragment)) do |request|
|
56
|
-
request_counter.hit! if request.kind_of?(RequestLogAnalyzer::FileFormat::DelayedJob2::Request)
|
57
|
-
end
|
50
|
+
log_parser.should_receive(:handle_request).exactly(3).times
|
51
|
+
log_parser.should_not_receive(:warn)
|
52
|
+
log_parser.parse_string(fragment)
|
58
53
|
end
|
59
54
|
end
|
60
55
|
end
|
@@ -1,83 +1,64 @@
|
|
1
1
|
require 'spec_helper'
|
2
2
|
|
3
3
|
describe RequestLogAnalyzer::FileFormat::DelayedJob do
|
4
|
+
|
5
|
+
subject { RequestLogAnalyzer::FileFormat.load(:delayed_job) }
|
4
6
|
|
5
|
-
it
|
6
|
-
|
7
|
-
|
7
|
+
it { should be_well_formed }
|
8
|
+
it { should have_line_definition(:job_lock).capturing(:job) }
|
9
|
+
it { should have_line_definition(:job_completed).capturing(:completed_job, :duration) }
|
10
|
+
it { should have_line_definition(:job_lock_failed).capturing(:locked_job) }
|
11
|
+
it { should have_line_definition(:job_failed).capturing(:failed_job, :attempts, :exception) }
|
12
|
+
it { should have(3).report_trackers }
|
8
13
|
|
9
14
|
describe '#parse_line' do
|
10
|
-
|
11
|
-
|
12
|
-
|
15
|
+
let(:job_lock_sample) { '* [JOB] acquiring lock on BackgroundJob::ThumbnailSaver' }
|
16
|
+
let(:job_completed_sample) { '* [JOB] BackgroundJob::ThumbnailSaver completed after 0.7932' }
|
17
|
+
let(:job_lock_failed_sample) { '* [JOB] failed to acquire exclusive lock for BackgroundJob::ThumbnailSaver' }
|
18
|
+
let(:job_failed_sample) { "* [JOB] BackgroundJob::ThumbnailSaver failed with ActiveRecord::RecordNotFound: Couldn't find Design with ID=20413443 - 1 failed attempts" }
|
19
|
+
let(:summary_sample) { '1 jobs processed at 1.0834 j/s, 0 failed ...' }
|
13
20
|
|
14
|
-
it
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
it "should parse a :job_completed line correctly" do
|
20
|
-
line = '* [JOB] BackgroundJob::ThumbnailSaver completed after 0.7932'
|
21
|
-
@file_format.should parse_line(line).as(:job_completed).and_capture(
|
22
|
-
:duration => 0.7932, :completed_job => 'BackgroundJob::ThumbnailSaver')
|
23
|
-
end
|
24
|
-
|
25
|
-
it "should pase a :job_failed line correctly" do
|
26
|
-
line = "* [JOB] BackgroundJob::ThumbnailSaver failed with ActiveRecord::RecordNotFound: Couldn't find Design with ID=20413443 - 1 failed attempts"
|
27
|
-
@file_format.should parse_line(line).as(:job_failed).and_capture(:attempts => 1,
|
28
|
-
:failed_job => 'BackgroundJob::ThumbnailSaver', :exception => 'ActiveRecord::RecordNotFound')
|
29
|
-
end
|
30
|
-
|
31
|
-
it "should parse a failed job lock line correctly" do
|
32
|
-
line = "* [JOB] failed to acquire exclusive lock for BackgroundJob::ThumbnailSaver"
|
33
|
-
@file_format.should parse_line(line).as(:job_lock_failed).and_capture(:locked_job => 'BackgroundJob::ThumbnailSaver')
|
34
|
-
end
|
35
|
-
|
36
|
-
# it "should pase a :batch_completed line correctly" do
|
37
|
-
# line = '1 jobs processed at 1.0834 j/s, 0 failed ...'
|
38
|
-
# @file_format.should parse_line(line).as(:batch_completed).and_capture(
|
39
|
-
# :mean_duration => 0.7932, :total_amount => 1, :failed_amount => 0)
|
40
|
-
# end
|
21
|
+
it { should parse_line(job_lock_sample).as(:job_lock).and_capture(:job => 'BackgroundJob::ThumbnailSaver') }
|
22
|
+
it { should parse_line(job_completed_sample).as(:job_completed).and_capture(:duration => 0.7932, :completed_job => 'BackgroundJob::ThumbnailSaver') }
|
23
|
+
it { should parse_line(job_lock_failed_sample).as(:job_lock_failed).and_capture(:locked_job => 'BackgroundJob::ThumbnailSaver') }
|
24
|
+
it { should parse_line(job_failed_sample).as(:job_failed).and_capture(:attempts => 1, :failed_job => 'BackgroundJob::ThumbnailSaver', :exception => 'ActiveRecord::RecordNotFound') }
|
41
25
|
|
26
|
+
it { should_not parse_line(summary_sample, 'a summary line') }
|
27
|
+
it { should_not parse_line('nonsense', 'a nonsense line') }
|
42
28
|
end
|
43
|
-
|
29
|
+
|
44
30
|
describe '#parse_io' do
|
45
|
-
|
46
|
-
@log_parser = RequestLogAnalyzer::Source::LogParser.new(RequestLogAnalyzer::FileFormat.load(:delayed_job))
|
47
|
-
end
|
31
|
+
let(:log_parser) { RequestLogAnalyzer::Source::LogParser.new(subject) }
|
48
32
|
|
49
33
|
it "should parse a batch of completed jobs without warnings" do
|
50
|
-
fragment =
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
@log_parser.should_not_receive(:warn)
|
34
|
+
fragment = log_snippet(<<-EOLOG)
|
35
|
+
* [JOB] acquiring lock on BackgroundJob::ThumbnailSaver
|
36
|
+
* [JOB] BackgroundJob::ThumbnailSaver completed after 0.9114
|
37
|
+
* [JOB] acquiring lock on BackgroundJob::ThumbnailSaver
|
38
|
+
* [JOB] BackgroundJob::ThumbnailSaver completed after 0.9110
|
39
|
+
2 jobs processed at 1.0832 j/s, 0 failed ...
|
40
|
+
EOLOG
|
58
41
|
|
59
|
-
|
60
|
-
|
61
|
-
|
42
|
+
log_parser.should_receive(:handle_request).twice
|
43
|
+
log_parser.should_not_receive(:warn)
|
44
|
+
log_parser.parse_io(fragment)
|
62
45
|
end
|
63
46
|
|
64
47
|
it "should parse a batch with a failed job without warnings" do
|
65
|
-
fragment =
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
48
|
+
fragment = log_snippet(<<-EOLOG)
|
49
|
+
* [JOB] acquiring lock on BackgroundJob::ThumbnailSaver
|
50
|
+
* [JOB] BackgroundJob::ThumbnailSaver completed after 1.0627
|
51
|
+
* [JOB] acquiring lock on BackgroundJob::ThumbnailSaver
|
52
|
+
* [JOB] BackgroundJob::ThumbnailSaver failed with ActiveRecord::RecordNotFound: Couldn't find Design with ID=20413443 - 3 failed attempts
|
53
|
+
Couldn't find Design with ID=20413443
|
54
|
+
* [JOB] acquiring lock on BackgroundJob::ThumbnailSaver
|
55
|
+
* [JOB] failed to acquire exclusive lock for BackgroundJob::ThumbnailSaver
|
56
|
+
2 jobs processed at 1.4707 j/s, 1 failed ...
|
57
|
+
EOLOG
|
73
58
|
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
@log_parser.parse_io(StringIO.new(fragment)) do |request|
|
78
|
-
request_counter.hit! if request.kind_of?(RequestLogAnalyzer::FileFormat::DelayedJob::Request)
|
79
|
-
end
|
59
|
+
log_parser.should_receive(:handle_request).exactly(3).times
|
60
|
+
log_parser.should_not_receive(:warn)
|
61
|
+
log_parser.parse_io(fragment)
|
80
62
|
end
|
81
63
|
end
|
82
64
|
end
|
83
|
-
|
@@ -2,83 +2,81 @@ require 'spec_helper'
|
|
2
2
|
|
3
3
|
describe RequestLogAnalyzer::FileFormat::Haproxy do
|
4
4
|
|
5
|
-
|
6
|
-
|
7
|
-
@log_parser = RequestLogAnalyzer::Source::LogParser.new(@file_format)
|
8
|
-
@sample1 = 'Feb 6 12:14:14 localhost haproxy[14389]: 10.0.1.2:33317 [06/Feb/2009:12:14:14.655] http-in static/srv1 10/0/30/69/109 200 2750 - - ---- 1/1/1/1/0 0/0 {1wt.eu} {} "GET /index.html HTTP/1.1"'
|
9
|
-
@sample2 = 'haproxy[18113]: 127.0.0.1:34549 [15/Oct/2003:15:19:06.103] px-http px-http/<NOSRV> -1/-1/-1/-1/+50001 408 +2750 - - cR-- 2/2/2/0/+2 0/0 ""'
|
10
|
-
end
|
5
|
+
subject { RequestLogAnalyzer::FileFormat.load(:haproxy) }
|
6
|
+
let(:log_parser) { RequestLogAnalyzer::Source::LogParser.new(subject) }
|
11
7
|
|
12
|
-
it
|
13
|
-
@file_format.should be_valid
|
14
|
-
end
|
8
|
+
it { should be_well_formed }
|
15
9
|
|
16
|
-
it
|
17
|
-
|
18
|
-
|
10
|
+
it { should have_line_definition(:haproxy13).capturing(:client_ip, :timestamp, :frontend_name, :backend_name, :server_name, :tq, :tw, :tc, :tr, :tt, :status_code, :bytes_read, :captured_request_cookie, :captured_response_cookie, :termination_event_code, :terminated_session_state, :clientside_persistence_cookie, :serverside_persistence_cookie, :actconn, :feconn, :beconn, :srv_conn, :retries, :srv_queue, :backend_queue, :captured_request_headers, :captured_response_headers, :http_request) }
|
11
|
+
it { should have_line_definition(:haproxy12).capturing(:client_ip, :timestamp, :frontend_name, :server_name, :tq, :tw, :tc, :tr, :tt, :status_code, :bytes_read, :captured_request_cookie, :captured_response_cookie, :termination_event_code, :terminated_session_state, :clientside_persistence_cookie, :serverside_persistence_cookie, :srv_conn, :listener_conn, :process_conn, :srv_queue, :backend_queue, :captured_request_headers, :captured_response_headers, :http_request) }
|
12
|
+
it { should have_line_definition(:haproxy11).capturing(:client_ip, :timestamp, :frontend_name, :server_name, :tq, :tc, :tr, :tt, :status_code, :bytes_read, :captured_request_cookie, :captured_response_cookie, :termination_event_code, :terminated_session_state, :clientside_persistence_cookie, :serverside_persistence_cookie, :listener_conn, :process_conn, :captured_request_headers, :captured_response_headers, :http_request) }
|
19
13
|
|
20
|
-
it
|
21
|
-
@file_format.parse_line(@sample1).should include(:line_definition, :captures)
|
22
|
-
end
|
14
|
+
it { should have(14).report_trackers }
|
23
15
|
|
24
|
-
|
25
|
-
|
26
|
-
|
16
|
+
let(:sample_haproxy13) { 'Feb 6 12:14:14 localhost haproxy[14389]: 10.0.1.2:33317 [06/Feb/2009:12:14:14.655] http-in static/srv1 10/0/30/69/109 200 2750 - - ---- 1/1/1/1/0 0/0 {1wt.eu} {} "GET /index.html HTTP/1.1"' }
|
17
|
+
let(:sample_haproxy12) { 'Mar 15 06:36:49 localhost haproxy[9367]: 127.0.0.1:38990 [15/Mar/2011:06:36:45.103] as-proxy mc-search-2 0/0/0/730/731 200 29404 - - --NN 2/54/54 0/0 {66.249.68.216} {} "GET /neighbor/26014153 HTTP/1.0" ' }
|
18
|
+
let(:sample_haproxy11) { 'haproxy[674]: 127.0.0.1:33320 [15/Oct/2003:08:32:17] relais-http Srv1 9/7/14/30 502 243 - - PH-- 2/3 "GET /cgi-bin/bug.cgi? HTTP/1.0"' }
|
19
|
+
let(:sample_errors) { 'haproxy[18113]: 127.0.0.1:34549 [15/Oct/2003:15:19:06.103] px-http px-http/<NOSRV> -1/-1/-1/-1/+50001 408 +2750 - - cR-- 2/2/2/0/+2 0/0 ""' }
|
27
20
|
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
request[:srv_queue].should == 0
|
54
|
-
request[:backend_queue].should == 0
|
55
|
-
request[:captured_request_headers].should == '{1wt.eu}'
|
56
|
-
request[:captured_response_headers].should == nil
|
57
|
-
request[:http_request].should == 'GET /index.html HTTP/1.1'
|
58
|
-
end
|
59
|
-
end
|
21
|
+
describe '#parse_line' do
|
22
|
+
it { should parse_line(sample_haproxy13, 'an haproxy 1.3 access line').and_capture(
|
23
|
+
:client_ip => '10.0.1.2', :tq => 0.010, :captured_request_cookie => nil,
|
24
|
+
:timestamp => 20090206121414, :tw => 0.000, :captured_response_cookie => nil,
|
25
|
+
:frontend_name => 'http-in', :tc => 0.030, :clientside_persistence_cookie => nil,
|
26
|
+
:backend_name => 'static', :tr => 0.069, :serverside_persistence_cookie => nil,
|
27
|
+
:server_name => 'srv1', :tt => 0.109, :termination_event_code => nil,
|
28
|
+
:status_code => 200, :actconn => 1, :terminated_session_state => nil,
|
29
|
+
:bytes_read => 2750, :feconn => 1, :captured_request_headers => '{1wt.eu}',
|
30
|
+
:backend_queue => 0, :beconn => 1, :captured_response_headers => nil,
|
31
|
+
:retries => 0, :srv_conn => 1, :srv_queue => 0,
|
32
|
+
:http_request => 'GET /index.html HTTP/1.1')
|
33
|
+
}
|
34
|
+
|
35
|
+
it { should parse_line(sample_haproxy12, 'an haproxy 1.2 access line').and_capture(
|
36
|
+
:client_ip => '127.0.0.1', :tq => 0.000, :captured_request_cookie => nil,
|
37
|
+
:timestamp => 20110315063645, :tw => 0.000, :captured_response_cookie => nil,
|
38
|
+
:frontend_name => 'as-proxy', :tc => 0.000, :clientside_persistence_cookie => 'N',
|
39
|
+
:server_name => 'mc-search-2',:tr => 0.730, :serverside_persistence_cookie => 'N',
|
40
|
+
:status_code => 200, :tt => 0.731, :termination_event_code => nil,
|
41
|
+
:bytes_read => 29404, :listener_conn => 54, :terminated_session_state => nil,
|
42
|
+
:backend_queue => 0, :process_conn => 54, :captured_request_headers => '{66.249.68.216}',
|
43
|
+
:srv_queue => 0, :srv_conn => 2, :captured_response_headers => nil,
|
44
|
+
:http_request => 'GET /neighbor/26014153 HTTP/1.0')
|
45
|
+
}
|
60
46
|
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
47
|
+
it { should parse_line(sample_haproxy11, 'an haproxy 1.1 access line').and_capture(
|
48
|
+
:client_ip => '127.0.0.1', :tq => 0.009, :captured_request_cookie => nil,
|
49
|
+
:timestamp => 20031015083217, :tc => 0.007, :captured_response_cookie => nil,
|
50
|
+
:frontend_name => 'relais-http',:tr => 0.014, :clientside_persistence_cookie => nil,
|
51
|
+
:server_name => 'Srv1', :tt => 0.030, :serverside_persistence_cookie => nil,
|
52
|
+
:status_code => 502, :listener_conn => 2,:termination_event_code => 'P',
|
53
|
+
:bytes_read => 243, :process_conn => 3, :terminated_session_state => 'H',
|
54
|
+
:captured_request_headers => nil, :captured_response_headers => nil,
|
55
|
+
:http_request => nil)
|
56
|
+
}
|
57
|
+
|
58
|
+
it { should parse_line(sample_errors, 'a failed access line').and_capture(
|
59
|
+
:timestamp => 20031015151906, :tq => nil, :captured_request_cookie => nil,
|
60
|
+
:server_name => '<NOSRV>', :tw => nil, :captured_response_cookie => nil,
|
61
|
+
:bytes_read => 2750, :tc => nil, :clientside_persistence_cookie => nil,
|
62
|
+
:retries => 2, :tr => nil, :serverside_persistence_cookie => nil,
|
63
|
+
:http_request => nil, :tt => 50.001, :termination_event_code => 'c',
|
64
|
+
:terminated_session_state => 'R',
|
65
|
+
:captured_request_headers => nil,
|
66
|
+
:captured_response_headers => nil)
|
67
|
+
}
|
68
|
+
|
69
|
+
it { should_not parse_line('nonsense') }
|
70
|
+
end
|
71
|
+
|
72
|
+
describe '#parse_io' do
|
73
|
+
let(:log_parser) { RequestLogAnalyzer::Source::LogParser.new(subject) }
|
74
|
+
let(:snippet) { log_snippet(sample_haproxy13, sample_haproxy12, sample_haproxy11, sample_errors, 'nonsense') }
|
75
|
+
|
76
|
+
it "should parse a log snippet without warnings" do
|
77
|
+
log_parser.should_receive(:handle_request).exactly(4).times
|
78
|
+
log_parser.should_not_receive(:warn)
|
79
|
+
log_parser.parse_io(snippet)
|
81
80
|
end
|
82
81
|
end
|
83
|
-
|
84
82
|
end
|
@@ -2,74 +2,67 @@ require 'spec_helper'
|
|
2
2
|
|
3
3
|
describe RequestLogAnalyzer::LineDefinition do
|
4
4
|
|
5
|
-
|
6
|
-
@line_definition = RequestLogAnalyzer::LineDefinition.new(:test, {
|
5
|
+
subject { RequestLogAnalyzer::LineDefinition.new(:test, {
|
7
6
|
:teaser => /Testing /,
|
8
7
|
:regexp => /Testing (\w+), tries\: (\d+)/,
|
9
8
|
:captures => [{ :name => :what, :type => :string }, { :name => :tries, :type => :integer }]
|
10
9
|
})
|
11
|
-
|
10
|
+
}
|
12
11
|
|
13
12
|
describe '#matches' do
|
14
13
|
|
15
14
|
it "should return false on an unmatching line" do
|
16
|
-
|
15
|
+
subject.matches("nonmatching").should be_false
|
17
16
|
end
|
18
17
|
|
19
18
|
it "should return false when only the teaser matches" do
|
20
|
-
|
19
|
+
subject.matches("Testing LineDefinition").should be_false
|
21
20
|
end
|
22
21
|
|
23
22
|
it "should parse a line and capture the expected values" do
|
24
|
-
|
23
|
+
subject.matches("Testing LineDefinition, tries: 123").should == {:line_definition => subject, :captures => ['LineDefinition', '123'] }
|
25
24
|
end
|
26
25
|
|
27
26
|
it "should know which names it can capture" do
|
28
|
-
|
29
|
-
|
30
|
-
|
27
|
+
subject.captures?(:what).should be_true
|
28
|
+
subject.captures?(:tries).should be_true
|
29
|
+
subject.captures?(:bogus).should be_false
|
31
30
|
end
|
32
|
-
|
33
31
|
end
|
34
32
|
|
35
33
|
describe '#convert_captured_values' do
|
36
|
-
|
37
|
-
before(:each) do
|
38
|
-
@request = mock('request')
|
39
|
-
@request.stub!(:convert_value).and_return('foo')
|
40
|
-
end
|
34
|
+
let(:request) { mock('request', :convert_value => 'foo') }
|
41
35
|
|
42
36
|
it "should call convert_value for every captured value" do
|
43
|
-
|
44
|
-
|
37
|
+
request.should_receive(:convert_value).twice
|
38
|
+
subject.convert_captured_values(['test', '123'], request)
|
45
39
|
end
|
46
40
|
|
47
41
|
it "should set the converted values" do
|
48
|
-
|
42
|
+
subject.convert_captured_values(['test', '123'], request).should == {:what => 'foo', :tries => 'foo'}
|
49
43
|
end
|
50
44
|
|
51
45
|
context 'when using :provides option' do
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
46
|
+
|
47
|
+
subject { RequestLogAnalyzer::LineDefinition.new(:test,
|
48
|
+
:regexp => /Hash\: (\{.+\})/,
|
49
|
+
:captures => [{ :name => :hash, :type => :hash, :provides => {:bar => :string}}])
|
50
|
+
}
|
51
|
+
|
52
|
+
before do
|
53
|
+
request.stub!(:convert_value).with("{:bar=>'baz'}", anything).and_return(:bar => 'baz')
|
54
|
+
request.stub!(:convert_value).with('baz', anything).and_return('foo')
|
60
55
|
end
|
61
56
|
|
62
57
|
it "should call Request#convert_value for the initial hash and the value in the hash" do
|
63
|
-
|
64
|
-
|
65
|
-
|
58
|
+
request.should_receive(:convert_value).with("{:bar=>'baz'}", anything).and_return(:bar => 'baz')
|
59
|
+
request.should_receive(:convert_value).with("baz", anything)
|
60
|
+
subject.convert_captured_values(["{:bar=>'baz'}"], request)
|
66
61
|
end
|
67
62
|
|
68
|
-
it "should
|
69
|
-
|
70
|
-
@ld.convert_captured_values(["{:bar=>'baz'}"], @request)[:bar].should eql('foo')
|
63
|
+
it "should return the converted hash" do
|
64
|
+
subject.convert_captured_values(["{:bar=>'baz'}"], request).should include(:bar => 'foo')
|
71
65
|
end
|
72
66
|
end
|
73
|
-
|
74
67
|
end
|
75
68
|
end
|