request-log-analyzer 1.13.1 → 1.13.3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (115) hide show
  1. checksums.yaml +4 -4
  2. data/.gitignore +1 -0
  3. data/bin/console +17 -0
  4. data/lib/cli/command_line_arguments.rb +29 -36
  5. data/lib/cli/database_console.rb +1 -3
  6. data/lib/cli/database_console_init.rb +11 -11
  7. data/lib/cli/progressbar.rb +30 -32
  8. data/lib/cli/tools.rb +20 -23
  9. data/lib/request_log_analyzer.rb +8 -8
  10. data/lib/request_log_analyzer/aggregator.rb +4 -7
  11. data/lib/request_log_analyzer/aggregator/database_inserter.rb +10 -13
  12. data/lib/request_log_analyzer/aggregator/echo.rb +5 -7
  13. data/lib/request_log_analyzer/aggregator/summarizer.rb +15 -18
  14. data/lib/request_log_analyzer/class_level_inheritable_attributes.rb +23 -0
  15. data/lib/request_log_analyzer/controller.rb +36 -42
  16. data/lib/request_log_analyzer/database.rb +4 -6
  17. data/lib/request_log_analyzer/database/base.rb +39 -41
  18. data/lib/request_log_analyzer/database/connection.rb +8 -10
  19. data/lib/request_log_analyzer/database/request.rb +1 -3
  20. data/lib/request_log_analyzer/database/source.rb +0 -2
  21. data/lib/request_log_analyzer/database/warning.rb +4 -6
  22. data/lib/request_log_analyzer/file_format.rb +46 -49
  23. data/lib/request_log_analyzer/file_format/amazon_s3.rb +15 -19
  24. data/lib/request_log_analyzer/file_format/apache.rb +42 -45
  25. data/lib/request_log_analyzer/file_format/delayed_job.rb +13 -15
  26. data/lib/request_log_analyzer/file_format/delayed_job2.rb +9 -11
  27. data/lib/request_log_analyzer/file_format/delayed_job21.rb +9 -11
  28. data/lib/request_log_analyzer/file_format/delayed_job3.rb +5 -8
  29. data/lib/request_log_analyzer/file_format/delayed_job4.rb +5 -8
  30. data/lib/request_log_analyzer/file_format/haproxy.rb +44 -48
  31. data/lib/request_log_analyzer/file_format/merb.rb +13 -17
  32. data/lib/request_log_analyzer/file_format/mysql.rb +21 -25
  33. data/lib/request_log_analyzer/file_format/nginx.rb +0 -2
  34. data/lib/request_log_analyzer/file_format/oink.rb +30 -31
  35. data/lib/request_log_analyzer/file_format/postgresql.rb +11 -15
  36. data/lib/request_log_analyzer/file_format/rack.rb +0 -2
  37. data/lib/request_log_analyzer/file_format/rails.rb +100 -104
  38. data/lib/request_log_analyzer/file_format/rails3.rb +19 -23
  39. data/lib/request_log_analyzer/file_format/rails_development.rb +0 -1
  40. data/lib/request_log_analyzer/file_format/w3c.rb +16 -18
  41. data/lib/request_log_analyzer/filter.rb +0 -2
  42. data/lib/request_log_analyzer/filter/anonymize.rb +4 -7
  43. data/lib/request_log_analyzer/filter/field.rb +3 -6
  44. data/lib/request_log_analyzer/filter/timespan.rb +2 -6
  45. data/lib/request_log_analyzer/line_definition.rb +16 -19
  46. data/lib/request_log_analyzer/log_processor.rb +10 -14
  47. data/lib/request_log_analyzer/mailer.rb +9 -12
  48. data/lib/request_log_analyzer/output.rb +12 -14
  49. data/lib/request_log_analyzer/output/fixed_width.rb +21 -28
  50. data/lib/request_log_analyzer/output/html.rb +11 -14
  51. data/lib/request_log_analyzer/request.rb +53 -33
  52. data/lib/request_log_analyzer/source.rb +2 -5
  53. data/lib/request_log_analyzer/source/log_parser.rb +9 -16
  54. data/lib/request_log_analyzer/tracker.rb +10 -12
  55. data/lib/request_log_analyzer/tracker/duration.rb +4 -6
  56. data/lib/request_log_analyzer/tracker/frequency.rb +9 -11
  57. data/lib/request_log_analyzer/tracker/hourly_spread.rb +8 -11
  58. data/lib/request_log_analyzer/tracker/numeric_value.rb +40 -44
  59. data/lib/request_log_analyzer/tracker/timespan.rb +5 -8
  60. data/lib/request_log_analyzer/tracker/traffic.rb +8 -10
  61. data/lib/request_log_analyzer/version.rb +1 -1
  62. data/request-log-analyzer.gemspec +6 -6
  63. data/spec/integration/command_line_usage_spec.rb +33 -33
  64. data/spec/integration/mailer_spec.rb +181 -185
  65. data/spec/integration/munin_plugins_rails_spec.rb +20 -20
  66. data/spec/integration/scout_spec.rb +40 -41
  67. data/spec/lib/helpers.rb +8 -9
  68. data/spec/lib/macros.rb +2 -4
  69. data/spec/lib/matchers.rb +20 -25
  70. data/spec/lib/mocks.rb +10 -11
  71. data/spec/lib/testing_format.rb +8 -10
  72. data/spec/spec_helper.rb +5 -1
  73. data/spec/unit/aggregator/database_inserter_spec.rb +23 -23
  74. data/spec/unit/aggregator/summarizer_spec.rb +7 -7
  75. data/spec/unit/controller/controller_spec.rb +14 -14
  76. data/spec/unit/controller/log_processor_spec.rb +3 -3
  77. data/spec/unit/database/base_class_spec.rb +36 -37
  78. data/spec/unit/database/connection_spec.rb +10 -10
  79. data/spec/unit/database/database_spec.rb +11 -11
  80. data/spec/unit/file_format/amazon_s3_format_spec.rb +66 -62
  81. data/spec/unit/file_format/apache_format_spec.rb +57 -52
  82. data/spec/unit/file_format/common_regular_expressions_spec.rb +18 -21
  83. data/spec/unit/file_format/delayed_job21_format_spec.rb +22 -16
  84. data/spec/unit/file_format/delayed_job2_format_spec.rb +22 -16
  85. data/spec/unit/file_format/delayed_job3_format_spec.rb +14 -10
  86. data/spec/unit/file_format/delayed_job4_format_spec.rb +14 -10
  87. data/spec/unit/file_format/delayed_job_format_spec.rb +12 -12
  88. data/spec/unit/file_format/file_format_api_spec.rb +19 -19
  89. data/spec/unit/file_format/format_autodetection_spec.rb +7 -7
  90. data/spec/unit/file_format/haproxy_format_spec.rb +53 -49
  91. data/spec/unit/file_format/inheritance_spec.rb +13 -0
  92. data/spec/unit/file_format/line_definition_spec.rb +35 -33
  93. data/spec/unit/file_format/merb_format_spec.rb +13 -11
  94. data/spec/unit/file_format/mysql_format_spec.rb +24 -24
  95. data/spec/unit/file_format/oink_format_spec.rb +29 -29
  96. data/spec/unit/file_format/postgresql_format_spec.rb +9 -9
  97. data/spec/unit/file_format/rack_format_spec.rb +36 -31
  98. data/spec/unit/file_format/rails3_format_spec.rb +46 -46
  99. data/spec/unit/file_format/rails_format_spec.rb +52 -53
  100. data/spec/unit/file_format/w3c_format_spec.rb +27 -24
  101. data/spec/unit/filter/anonymize_filter_spec.rb +7 -7
  102. data/spec/unit/filter/field_filter_spec.rb +26 -26
  103. data/spec/unit/filter/filter_spec.rb +4 -4
  104. data/spec/unit/filter/timespan_filter_spec.rb +22 -22
  105. data/spec/unit/mailer_spec.rb +21 -21
  106. data/spec/unit/request_spec.rb +29 -29
  107. data/spec/unit/source/log_parser_spec.rb +5 -5
  108. data/spec/unit/tracker/duration_tracker_spec.rb +23 -23
  109. data/spec/unit/tracker/frequency_tracker_spec.rb +29 -30
  110. data/spec/unit/tracker/hourly_spread_spec.rb +35 -35
  111. data/spec/unit/tracker/numeric_value_tracker_spec.rb +71 -72
  112. data/spec/unit/tracker/timespan_tracker_spec.rb +31 -31
  113. data/spec/unit/tracker/tracker_api_spec.rb +43 -44
  114. data/spec/unit/tracker/traffic_tracker_spec.rb +7 -7
  115. metadata +38 -35
@@ -3,34 +3,40 @@ require 'spec_helper'
3
3
  describe RequestLogAnalyzer::FileFormat::DelayedJob do
4
4
 
5
5
  subject { RequestLogAnalyzer::FileFormat.load(:delayed_job21) }
6
-
6
+
7
7
  it { should be_well_formed }
8
8
  it { should have_line_definition(:job_lock).capturing(:timestamp, :job, :host, :pid) }
9
9
  it { should have_line_definition(:job_completed).capturing(:timestamp, :duration, :host, :pid) }
10
- it { should have(4).report_trackers }
10
+ it { should satisfy { |ff| ff.report_trackers.length == 4 } }
11
11
 
12
12
  describe '#parse_line' do
13
-
14
- let(:job_lock_sample1) { "2010-05-17T17:37:34+0000: [Worker(delayed_job host:hostname.co.uk pid:11888)] acquired lock on S3FileJob" }
15
- let(:job_lock_sample2) { "2010-05-17T17:37:34+0000: [Worker(delayed_job.0 host:hostname.co.uk pid:11888)] acquired lock on S3FileJob" }
13
+
14
+ let(:job_lock_sample1) { '2010-05-17T17:37:34+0000: [Worker(delayed_job host:hostname.co.uk pid:11888)] acquired lock on S3FileJob' }
15
+ let(:job_lock_sample2) { '2010-05-17T17:37:34+0000: [Worker(delayed_job.0 host:hostname.co.uk pid:11888)] acquired lock on S3FileJob' }
16
16
  let(:job_completed_sample1) { '2010-05-17T17:37:35+0000: [Worker(delayed_job host:hostname.co.uk pid:11888)] S3FileJob completed after 1.0676' }
17
17
 
18
- it { should parse_line(job_lock_sample1, 'with a single worker').as(:job_lock).and_capture(
19
- :timestamp => 20100517173734, :job => 'S3FileJob', :host => 'hostname.co.uk', :pid => 11888) }
18
+ it do
19
+ should parse_line(job_lock_sample1, 'with a single worker').as(:job_lock).and_capture(
20
+ timestamp: 20_100_517_173_734, job: 'S3FileJob', host: 'hostname.co.uk', pid: 11_888)
21
+ end
22
+
23
+ it do
24
+ should parse_line(job_lock_sample2, 'with multiple workers').as(:job_lock).and_capture(
25
+ timestamp: 20_100_517_173_734, job: 'S3FileJob', host: 'hostname.co.uk', pid: 11_888)
26
+ end
20
27
 
21
- it { should parse_line(job_lock_sample2, 'with multiple workers').as(:job_lock).and_capture(
22
- :timestamp => 20100517173734, :job => 'S3FileJob', :host => 'hostname.co.uk', :pid => 11888) }
28
+ it do
29
+ should parse_line(job_completed_sample1).as(:job_completed).and_capture(
30
+ timestamp: 20_100_517_173_735, duration: 1.0676, host: 'hostname.co.uk', pid: 11_888, job: 'S3FileJob')
31
+ end
23
32
 
24
- it { should parse_line(job_completed_sample1).as(:job_completed).and_capture(
25
- :timestamp => 20100517173735, :duration => 1.0676, :host => 'hostname.co.uk', :pid => 11888, :job => 'S3FileJob') }
26
-
27
33
  it { should_not parse_line('nonsense', 'a nonsense line') }
28
34
  end
29
-
35
+
30
36
  describe '#parse_io' do
31
- let(:log_parser) { RequestLogAnalyzer::Source::LogParser.new(subject) }
32
-
33
- it "should parse a batch of completed jobs without warnings" do
37
+ let(:log_parser) { RequestLogAnalyzer::Source::LogParser.new(subject) }
38
+
39
+ it 'should parse a batch of completed jobs without warnings' do
34
40
  fragment = log_snippet(<<-EOLOG)
35
41
  2010-05-17T17:36:44+0000: *** Starting job worker delayed_job host:hostname.co.uk pid:11888
36
42
  2010-05-17T17:37:34+0000: [Worker(delayed_job host:hostname.co.uk pid:11888)] acquired lock on S3FileJob
@@ -6,35 +6,41 @@ describe RequestLogAnalyzer::FileFormat::DelayedJob2 do
6
6
 
7
7
  it { should be_well_formed }
8
8
  it { should have_line_definition(:job_lock).capturing(:timestamp, :job, :host, :pid) }
9
- it { should have_line_definition(:job_completed).capturing(:timestamp, :duration, :host, :pid) }
10
- it { should have(4).report_trackers }
11
-
9
+ it { should have_line_definition(:job_completed).capturing(:timestamp, :duration, :host, :pid) }
10
+ it { should satisfy { |ff| ff.report_trackers.length == 4 } }
11
+
12
12
  describe '#parse_line' do
13
-
14
- let(:job_lock_sample1) { "2010-05-17T17:37:34+0000: * [Worker(delayed_job host:hostname.co.uk pid:11888)] acquired lock on S3FileJob" }
15
- let(:job_lock_sample2) { "2010-05-17T17:37:34+0000: * [Worker(delayed_job.0 host:hostname.co.uk pid:11888)] acquired lock on S3FileJob" }
13
+
14
+ let(:job_lock_sample1) { '2010-05-17T17:37:34+0000: * [Worker(delayed_job host:hostname.co.uk pid:11888)] acquired lock on S3FileJob' }
15
+ let(:job_lock_sample2) { '2010-05-17T17:37:34+0000: * [Worker(delayed_job.0 host:hostname.co.uk pid:11888)] acquired lock on S3FileJob' }
16
16
  let(:job_completed_sample) { '2010-05-17T17:37:35+0000: * [JOB] delayed_job host:hostname.co.uk pid:11888 completed after 1.0676' }
17
- let(:starting_sample) { '2010-05-17T17:36:44+0000: *** Starting job worker delayed_job host:hostname.co.uk pid:11888' }
17
+ let(:starting_sample) { '2010-05-17T17:36:44+0000: *** Starting job worker delayed_job host:hostname.co.uk pid:11888' }
18
18
  let(:summary_sample) { '3 jobs processed at 0.3163 j/s, 0 failed ...' }
19
-
20
- it { should parse_line(job_lock_sample1, 'with a single worker').as(:job_lock).and_capture(
21
- :timestamp => 20100517173734, :job => 'S3FileJob', :host => 'hostname.co.uk', :pid => 11888) }
22
19
 
23
- it { should parse_line(job_lock_sample2, 'with multiple workers').as(:job_lock).and_capture(
24
- :timestamp => 20100517173734, :job => 'S3FileJob', :host => 'hostname.co.uk', :pid => 11888) }
20
+ it do
21
+ should parse_line(job_lock_sample1, 'with a single worker').as(:job_lock).and_capture(
22
+ timestamp: 20_100_517_173_734, job: 'S3FileJob', host: 'hostname.co.uk', pid: 11_888)
23
+ end
25
24
 
26
- it { should parse_line(job_completed_sample).as(:job_completed).and_capture(:timestamp => 20100517173735,
27
- :duration => 1.0676, :host => 'hostname.co.uk', :pid => 11888) }
25
+ it do
26
+ should parse_line(job_lock_sample2, 'with multiple workers').as(:job_lock).and_capture(
27
+ timestamp: 20_100_517_173_734, job: 'S3FileJob', host: 'hostname.co.uk', pid: 11_888)
28
+ end
29
+
30
+ it do
31
+ should parse_line(job_completed_sample).as(:job_completed).and_capture(timestamp: 20_100_517_173_735,
32
+ duration: 1.0676, host: 'hostname.co.uk', pid: 11_888)
33
+ end
28
34
 
29
35
  it { should_not parse_line(starting_sample, 'a starting line') }
30
36
  it { should_not parse_line(summary_sample, 'a summary line') }
31
37
  it { should_not parse_line('nonsense', 'a nonsense line') }
32
38
  end
33
-
39
+
34
40
  describe '#parse_io' do
35
41
  let(:log_parser) { RequestLogAnalyzer::Source::LogParser.new(subject) }
36
42
 
37
- it "should parse a batch of completed jobs without warnings" do
43
+ it 'should parse a batch of completed jobs without warnings' do
38
44
  fragment = <<-EOLOG
39
45
  2010-05-17T17:36:44+0000: *** Starting job worker delayed_job host:hostname.co.uk pid:11888
40
46
  2010-05-17T17:37:34+0000: * [Worker(delayed_job host:hostname.co.uk pid:11888)] acquired lock on S3FileJob
@@ -8,8 +8,7 @@ describe RequestLogAnalyzer::FileFormat::DelayedJob do
8
8
  it { should have_line_definition(:job_completed).capturing(:timestamp, :duration, :host, :pid, :job) }
9
9
  it { should have_line_definition(:job_failed).capturing(:timestamp, :host, :pid, :job, :attempts) }
10
10
  it { should have_line_definition(:job_deleted).capturing(:timestamp, :host, :pid, :job, :failures) }
11
- it { should have(6).report_trackers }
12
-
11
+ it { should satisfy { |ff| ff.report_trackers.length == 6 } }
13
12
 
14
13
  describe '#parse_line' do
15
14
 
@@ -17,23 +16,28 @@ describe RequestLogAnalyzer::FileFormat::DelayedJob do
17
16
  let(:job_failed_sample) { '2010-05-17T17:37:35+0000: [Worker(delayed_job host:hostname.co.uk pid:11888)] S3FileJob.create failed with SocketError: getaddrinfo: Name or service not known - 0 failed attempts' }
18
17
  let(:job_deleted_sample) { '2010-05-17T17:37:35+0000: [Worker(delayed_job host:hostname.co.uk pid:11888)] PERMANENTLY removing S3FileJob.create because of 25 consecutive failures.' }
19
18
 
20
- it { should parse_line(job_completed_sample).as(:job_completed).and_capture(
21
- :timestamp => 20100517173735, :duration => 1.0676, :host => 'hostname.co.uk', :pid => 11888, :job => 'S3FileJob.create') }
19
+ it do
20
+ should parse_line(job_completed_sample).as(:job_completed).and_capture(
21
+ timestamp: 20_100_517_173_735, duration: 1.0676, host: 'hostname.co.uk', pid: 11_888, job: 'S3FileJob.create')
22
+ end
22
23
 
23
- it { should parse_line(job_failed_sample).as(:job_failed).and_capture(
24
- :timestamp => 20100517173735, :host => 'hostname.co.uk', :pid => 11888, :job => 'S3FileJob.create failed with SocketError: getaddrinfo: Name or service not known', :attempts => 0) }
24
+ it do
25
+ should parse_line(job_failed_sample).as(:job_failed).and_capture(
26
+ timestamp: 20_100_517_173_735, host: 'hostname.co.uk', pid: 11_888, job: 'S3FileJob.create failed with SocketError: getaddrinfo: Name or service not known', attempts: 0)
27
+ end
25
28
 
26
- it { should parse_line(job_deleted_sample).as(:job_deleted).and_capture(
27
- :timestamp => 20100517173735, :host => 'hostname.co.uk', :pid => 11888, :job => 'S3FileJob.create', :failures => 25) }
29
+ it do
30
+ should parse_line(job_deleted_sample).as(:job_deleted).and_capture(
31
+ timestamp: 20_100_517_173_735, host: 'hostname.co.uk', pid: 11_888, job: 'S3FileJob.create', failures: 25)
32
+ end
28
33
 
29
34
  it { should_not parse_line('nonsense', 'a nonsense line') }
30
35
  end
31
36
 
32
-
33
37
  describe '#parse_io' do
34
38
  let(:log_parser) { RequestLogAnalyzer::Source::LogParser.new(subject) }
35
39
 
36
- it "should parse a batch of completed jobs without warnings" do
40
+ it 'should parse a batch of completed jobs without warnings' do
37
41
  fragment = log_snippet(<<-EOLOG)
38
42
  2010-05-17T17:36:44+0000: *** Starting job worker delayed_job host:hostname.co.uk pid:11888
39
43
  2010-05-17T17:37:35+0000: [Worker(delayed_job host:hostname.co.uk pid:11888)] S3FileJob completed after 1.0676
@@ -8,8 +8,7 @@ describe RequestLogAnalyzer::FileFormat::DelayedJob do
8
8
  it { should have_line_definition(:job_completed).capturing(:timestamp, :duration, :host, :pid, :job) }
9
9
  it { should have_line_definition(:job_failed).capturing(:timestamp, :host, :pid, :job, :attempts, :error) }
10
10
  it { should have_line_definition(:job_deleted).capturing(:timestamp, :host, :pid, :job, :failures) }
11
- it { should have(6).report_trackers }
12
-
11
+ it { should satisfy { |ff| ff.report_trackers.length == 6 } }
13
12
 
14
13
  describe '#parse_line' do
15
14
 
@@ -17,23 +16,28 @@ describe RequestLogAnalyzer::FileFormat::DelayedJob do
17
16
  let(:job_failed_sample) { '2010-05-17T17:37:35+0000: [Worker(delayed_job host:hostname.co.uk pid:11888)] Job S3FileJob.create (id=534785) FAILED (0 prior attempts) with SocketError: getaddrinfo: Name or service not known' }
18
17
  let(:job_deleted_sample) { '2010-05-17T17:37:35+0000: [Worker(delayed_job host:hostname.co.uk pid:11888)] Job S3FileJob.create (id=534785) REMOVED permanently because of 25 consecutive failures' }
19
18
 
20
- it { should parse_line(job_completed_sample).as(:job_completed).and_capture(
21
- :timestamp => 20100517173735, :duration => 1.0676, :host => 'hostname.co.uk', :pid => 11888, :job => 'S3FileJob.create') }
19
+ it do
20
+ should parse_line(job_completed_sample).as(:job_completed).and_capture(
21
+ timestamp: 20_100_517_173_735, duration: 1.0676, host: 'hostname.co.uk', pid: 11_888, job: 'S3FileJob.create')
22
+ end
22
23
 
23
- it { should parse_line(job_failed_sample).as(:job_failed).and_capture(
24
- :timestamp => 20100517173735, :host => 'hostname.co.uk', :pid => 11888, :job => 'S3FileJob.create (id=534785)', :attempts => 0, :error => "SocketError: getaddrinfo: Name or service not known") }
24
+ it do
25
+ should parse_line(job_failed_sample).as(:job_failed).and_capture(
26
+ timestamp: 20_100_517_173_735, host: 'hostname.co.uk', pid: 11_888, job: 'S3FileJob.create (id=534785)', attempts: 0, error: 'SocketError: getaddrinfo: Name or service not known')
27
+ end
25
28
 
26
- it { should parse_line(job_deleted_sample).as(:job_deleted).and_capture(
27
- :timestamp => 20100517173735, :host => 'hostname.co.uk', :pid => 11888, :job => 'S3FileJob.create (id=534785)', :failures => 25) }
29
+ it do
30
+ should parse_line(job_deleted_sample).as(:job_deleted).and_capture(
31
+ timestamp: 20_100_517_173_735, host: 'hostname.co.uk', pid: 11_888, job: 'S3FileJob.create (id=534785)', failures: 25)
32
+ end
28
33
 
29
34
  it { should_not parse_line('nonsense', 'a nonsense line') }
30
35
  end
31
36
 
32
-
33
37
  describe '#parse_io' do
34
38
  let(:log_parser) { RequestLogAnalyzer::Source::LogParser.new(subject) }
35
39
 
36
- it "should parse a batch of completed jobs without warnings" do
40
+ it 'should parse a batch of completed jobs without warnings' do
37
41
  fragment = log_snippet(<<-EOLOG)
38
42
  2010-05-17T17:36:44+0000: *** Starting job worker delayed_job host:hostname.co.uk pid:11888
39
43
  2010-05-17T17:37:35+0000: [Worker(delayed_job host:hostname.co.uk pid:11888)] Job S3FileJob (id=534785) COMPLETED after 1.0676
@@ -1,7 +1,7 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  describe RequestLogAnalyzer::FileFormat::DelayedJob do
4
-
4
+
5
5
  subject { RequestLogAnalyzer::FileFormat.load(:delayed_job) }
6
6
 
7
7
  it { should be_well_formed }
@@ -9,28 +9,28 @@ describe RequestLogAnalyzer::FileFormat::DelayedJob do
9
9
  it { should have_line_definition(:job_completed).capturing(:completed_job, :duration) }
10
10
  it { should have_line_definition(:job_lock_failed).capturing(:locked_job) }
11
11
  it { should have_line_definition(:job_failed).capturing(:failed_job, :attempts, :exception) }
12
- it { should have(3).report_trackers }
12
+ it { should satisfy { |ff| ff.report_trackers.length == 3 } }
13
13
 
14
14
  describe '#parse_line' do
15
15
  let(:job_lock_sample) { '* [JOB] acquiring lock on BackgroundJob::ThumbnailSaver' }
16
16
  let(:job_completed_sample) { '* [JOB] BackgroundJob::ThumbnailSaver completed after 0.7932' }
17
17
  let(:job_lock_failed_sample) { '* [JOB] failed to acquire exclusive lock for BackgroundJob::ThumbnailSaver' }
18
- let(:job_failed_sample) { "* [JOB] BackgroundJob::ThumbnailSaver failed with ActiveRecord::RecordNotFound: Couldn't find Design with ID=20413443 - 1 failed attempts" }
18
+ let(:job_failed_sample) { "* [JOB] BackgroundJob::ThumbnailSaver failed with ActiveRecord::RecordNotFound: Couldn't find Design with ID=20413443 - 1 failed attempts" }
19
19
  let(:summary_sample) { '1 jobs processed at 1.0834 j/s, 0 failed ...' }
20
20
 
21
- it { should parse_line(job_lock_sample).as(:job_lock).and_capture(:job => 'BackgroundJob::ThumbnailSaver') }
22
- it { should parse_line(job_completed_sample).as(:job_completed).and_capture(:duration => 0.7932, :completed_job => 'BackgroundJob::ThumbnailSaver') }
23
- it { should parse_line(job_lock_failed_sample).as(:job_lock_failed).and_capture(:locked_job => 'BackgroundJob::ThumbnailSaver') }
24
- it { should parse_line(job_failed_sample).as(:job_failed).and_capture(:attempts => 1, :failed_job => 'BackgroundJob::ThumbnailSaver', :exception => 'ActiveRecord::RecordNotFound') }
21
+ it { should parse_line(job_lock_sample).as(:job_lock).and_capture(job: 'BackgroundJob::ThumbnailSaver') }
22
+ it { should parse_line(job_completed_sample).as(:job_completed).and_capture(duration: 0.7932, completed_job: 'BackgroundJob::ThumbnailSaver') }
23
+ it { should parse_line(job_lock_failed_sample).as(:job_lock_failed).and_capture(locked_job: 'BackgroundJob::ThumbnailSaver') }
24
+ it { should parse_line(job_failed_sample).as(:job_failed).and_capture(attempts: 1, failed_job: 'BackgroundJob::ThumbnailSaver', exception: 'ActiveRecord::RecordNotFound') }
25
25
 
26
26
  it { should_not parse_line(summary_sample, 'a summary line') }
27
27
  it { should_not parse_line('nonsense', 'a nonsense line') }
28
28
  end
29
29
 
30
30
  describe '#parse_io' do
31
- let(:log_parser) { RequestLogAnalyzer::Source::LogParser.new(subject) }
32
-
33
- it "should parse a batch of completed jobs without warnings" do
31
+ let(:log_parser) { RequestLogAnalyzer::Source::LogParser.new(subject) }
32
+
33
+ it 'should parse a batch of completed jobs without warnings' do
34
34
  fragment = log_snippet(<<-EOLOG)
35
35
  * [JOB] acquiring lock on BackgroundJob::ThumbnailSaver
36
36
  * [JOB] BackgroundJob::ThumbnailSaver completed after 0.9114
@@ -43,8 +43,8 @@ describe RequestLogAnalyzer::FileFormat::DelayedJob do
43
43
  log_parser.should_not_receive(:warn)
44
44
  log_parser.parse_io(fragment)
45
45
  end
46
-
47
- it "should parse a batch with a failed job without warnings" do
46
+
47
+ it 'should parse a batch with a failed job without warnings' do
48
48
  fragment = log_snippet(<<-EOLOG)
49
49
  * [JOB] acquiring lock on BackgroundJob::ThumbnailSaver
50
50
  * [JOB] BackgroundJob::ThumbnailSaver completed after 1.0627
@@ -2,27 +2,27 @@ require 'spec_helper'
2
2
 
3
3
  describe RequestLogAnalyzer::FileFormat do
4
4
 
5
- describe ".format_definition" do
5
+ describe '.format_definition' do
6
6
 
7
7
  before(:each) do
8
8
  @first_file_format = Class.new(RequestLogAnalyzer::FileFormat::Base)
9
9
  @second_file_format = Class.new(RequestLogAnalyzer::FileFormat::Base)
10
10
  end
11
11
 
12
- it "should specify line definitions directly within the file_format" do
13
- @first_file_format.format_definition.direct_test :regexp => /test/
12
+ it 'should specify line definitions directly within the file_format' do
13
+ @first_file_format.format_definition.direct_test regexp: /test/
14
14
  @first_file_format.should have_line_definition(:direct_test)
15
15
  end
16
16
 
17
- it "specify lines with a block for the format definition" do
17
+ it 'specify lines with a block for the format definition' do
18
18
  @first_file_format.format_definition do |format|
19
- format.block_test :regexp => /test (\w+)/, :captures => [{:name => :tester, :type => :string}]
19
+ format.block_test regexp: /test (\w+)/, captures: [{ name: :tester, type: :string }]
20
20
  end
21
21
 
22
22
  @first_file_format.should have_line_definition(:block_test).capturing(:tester)
23
23
  end
24
24
 
25
- it "should specify a line with a block" do
25
+ it 'should specify a line with a block' do
26
26
  @first_file_format.format_definition.hash_test do |line|
27
27
  line.regexp = /test/
28
28
  line.captures = []
@@ -31,39 +31,39 @@ describe RequestLogAnalyzer::FileFormat do
31
31
  @first_file_format.should have_line_definition(:hash_test)
32
32
  end
33
33
 
34
- it "should define lines only for its own language" do
35
- @first_file_format.format_definition.first :regexp => /test 123/
36
- @second_file_format.format_definition.second :regexp => /test 456/
34
+ it 'should define lines only for its own language' do
35
+ @first_file_format.format_definition.first regexp: /test 123/
36
+ @second_file_format.format_definition.second regexp: /test 456/
37
37
 
38
- @first_file_format.should have_line_definition(:first)
39
- @first_file_format.should_not have_line_definition(:second)
38
+ @first_file_format.should have_line_definition(:first)
39
+ @first_file_format.should_not have_line_definition(:second)
40
40
  @second_file_format.should_not have_line_definition(:first)
41
- @second_file_format.should have_line_definition(:second)
41
+ @second_file_format.should have_line_definition(:second)
42
42
  end
43
43
  end
44
44
 
45
- describe ".load" do
45
+ describe '.load' do
46
46
 
47
- it "should return an instance of a FileFormat class" do
47
+ it 'should return an instance of a FileFormat class' do
48
48
  @file_format = RequestLogAnalyzer::FileFormat.load(TestingFormat)
49
49
  @file_format.should be_kind_of(TestingFormat)
50
50
  end
51
51
 
52
- it "should return itself if it already is a FileFormat::Base instance" do
52
+ it 'should return itself if it already is a FileFormat::Base instance' do
53
53
  @file_format = RequestLogAnalyzer::FileFormat.load(testing_format)
54
54
  @file_format.should be_kind_of(TestingFormat)
55
55
  end
56
56
 
57
- it "should load a predefined file format from the /file_format dir" do
57
+ it 'should load a predefined file format from the /file_format dir' do
58
58
  @file_format = RequestLogAnalyzer::FileFormat.load(:rails)
59
59
  @file_format.should be_kind_of(RequestLogAnalyzer::FileFormat::Rails)
60
60
  end
61
61
 
62
- it "should load a provided format file" do
62
+ it 'should load a provided format file' do
63
63
  format_filename = File.expand_path('../../lib/testing_format.rb', File.dirname(__FILE__))
64
64
  @file_format = RequestLogAnalyzer::FileFormat.load(format_filename)
65
65
  @file_format.should be_kind_of(TestingFormat)
66
66
  end
67
-
67
+
68
68
  end
69
- end
69
+ end
@@ -3,37 +3,37 @@ require 'spec_helper'
3
3
  describe RequestLogAnalyzer::FileFormat do
4
4
 
5
5
  describe '.autodetect' do
6
- it "should autodetect a Merb log" do
6
+ it 'should autodetect a Merb log' do
7
7
  file_format = RequestLogAnalyzer::FileFormat.autodetect(log_fixture(:merb))
8
8
  file_format.should be_instance_of(RequestLogAnalyzer::FileFormat::Merb)
9
9
  end
10
10
 
11
- it "should autodetect a MySQL slow query log" do
11
+ it 'should autodetect a MySQL slow query log' do
12
12
  file_format = RequestLogAnalyzer::FileFormat.autodetect(log_fixture(:mysql_slow_query))
13
13
  file_format.should be_instance_of(RequestLogAnalyzer::FileFormat::Mysql)
14
14
  end
15
15
 
16
- it "should autodetect a Rails 1.x log" do
16
+ it 'should autodetect a Rails 1.x log' do
17
17
  file_format = RequestLogAnalyzer::FileFormat.autodetect(log_fixture(:rails_1x))
18
18
  file_format.should be_instance_of(RequestLogAnalyzer::FileFormat::Rails)
19
19
  end
20
20
 
21
- it "should autodetect a Rails 2.x log" do
21
+ it 'should autodetect a Rails 2.x log' do
22
22
  file_format = RequestLogAnalyzer::FileFormat.autodetect(log_fixture(:rails_22))
23
23
  file_format.should be_instance_of(RequestLogAnalyzer::FileFormat::RailsDevelopment)
24
24
  end
25
25
 
26
- it "should autodetect an Apache access log" do
26
+ it 'should autodetect an Apache access log' do
27
27
  file_format = RequestLogAnalyzer::FileFormat.autodetect(log_fixture(:apache_common))
28
28
  file_format.should be_instance_of(RequestLogAnalyzer::FileFormat::Apache)
29
29
  end
30
30
 
31
- it "should autodetect a Rack access log" do
31
+ it 'should autodetect a Rack access log' do
32
32
  file_format = RequestLogAnalyzer::FileFormat.autodetect(log_fixture(:sinatra))
33
33
  file_format.should be_instance_of(RequestLogAnalyzer::FileFormat::Rack)
34
34
  end
35
35
 
36
- it "should not find any file format with a bogus file" do
36
+ it 'should not find any file format with a bogus file' do
37
37
  RequestLogAnalyzer::FileFormat.autodetect(log_fixture(:test_order)).should be_nil
38
38
  end
39
39
  end
@@ -11,7 +11,7 @@ describe RequestLogAnalyzer::FileFormat::Haproxy do
11
11
  it { should have_line_definition(:haproxy12).capturing(:client_ip, :timestamp, :frontend_name, :server_name, :tq, :tw, :tc, :tr, :tt, :status_code, :bytes_read, :captured_request_cookie, :captured_response_cookie, :termination_event_code, :terminated_session_state, :clientside_persistence_cookie, :serverside_persistence_cookie, :srv_conn, :listener_conn, :process_conn, :srv_queue, :backend_queue, :captured_request_headers, :captured_response_headers, :http_request) }
12
12
  it { should have_line_definition(:haproxy11).capturing(:client_ip, :timestamp, :frontend_name, :server_name, :tq, :tc, :tr, :tt, :status_code, :bytes_read, :captured_request_cookie, :captured_response_cookie, :termination_event_code, :terminated_session_state, :clientside_persistence_cookie, :serverside_persistence_cookie, :listener_conn, :process_conn, :captured_request_headers, :captured_response_headers, :http_request) }
13
13
 
14
- it { should have(14).report_trackers }
14
+ it { should satisfy { |ff| ff.report_trackers.length == 14 } }
15
15
 
16
16
  let(:sample_haproxy13) { 'Feb 6 12:14:14 localhost haproxy[14389]: 10.0.1.2:33317 [06/Feb/2009:12:14:14.655] http-in static/srv1 10/0/30/69/109 200 2750 - - ---- 1/1/1/1/0 0/0 {1wt.eu} {} "GET /index.html HTTP/1.1"' }
17
17
  let(:sample_haproxy12) { 'Mar 15 06:36:49 localhost haproxy[9367]: 127.0.0.1:38990 [15/Mar/2011:06:36:45.103] as-proxy mc-search-2 0/0/0/730/731 200 29404 - - --NN 2/54/54 0/0 {66.249.68.216} {} "GET /neighbor/26014153 HTTP/1.0" ' }
@@ -19,61 +19,65 @@ describe RequestLogAnalyzer::FileFormat::Haproxy do
19
19
  let(:sample_errors) { 'haproxy[18113]: 127.0.0.1:34549 [15/Oct/2003:15:19:06.103] px-http px-http/<NOSRV> -1/-1/-1/-1/+50001 408 +2750 - - cR-- 2/2/2/0/+2 0/0 ""' }
20
20
 
21
21
  describe '#parse_line' do
22
- it { should parse_line(sample_haproxy13, 'an haproxy 1.3 access line').and_capture(
23
- :client_ip => '10.0.1.2', :tq => 0.010, :captured_request_cookie => nil,
24
- :timestamp => 20090206121414, :tw => 0.000, :captured_response_cookie => nil,
25
- :frontend_name => 'http-in', :tc => 0.030, :clientside_persistence_cookie => nil,
26
- :backend_name => 'static', :tr => 0.069, :serverside_persistence_cookie => nil,
27
- :server_name => 'srv1', :tt => 0.109, :termination_event_code => nil,
28
- :status_code => 200, :actconn => 1, :terminated_session_state => nil,
29
- :bytes_read => 2750, :feconn => 1, :captured_request_headers => '{1wt.eu}',
30
- :backend_queue => 0, :beconn => 1, :captured_response_headers => nil,
31
- :retries => 0, :srv_conn => 1, :srv_queue => 0,
32
- :http_request => 'GET /index.html HTTP/1.1')
33
- }
22
+ it do
23
+ should parse_line(sample_haproxy13, 'an haproxy 1.3 access line').and_capture(
24
+ client_ip: '10.0.1.2', tq: 0.010, captured_request_cookie: nil,
25
+ timestamp: 20_090_206_121_414, tw: 0.000, captured_response_cookie: nil,
26
+ frontend_name: 'http-in', tc: 0.030, clientside_persistence_cookie: nil,
27
+ backend_name: 'static', tr: 0.069, serverside_persistence_cookie: nil,
28
+ server_name: 'srv1', tt: 0.109, termination_event_code: nil,
29
+ status_code: 200, actconn: 1, terminated_session_state: nil,
30
+ bytes_read: 2750, feconn: 1, captured_request_headers: '{1wt.eu}',
31
+ backend_queue: 0, beconn: 1, captured_response_headers: nil,
32
+ retries: 0, srv_conn: 1, srv_queue: 0,
33
+ http_request: 'GET /index.html HTTP/1.1')
34
+ end
35
+
36
+ it do
37
+ should parse_line(sample_haproxy12, 'an haproxy 1.2 access line').and_capture(
38
+ client_ip: '127.0.0.1', tq: 0.000, captured_request_cookie: nil,
39
+ timestamp: 20_110_315_063_645, tw: 0.000, captured_response_cookie: nil,
40
+ frontend_name: 'as-proxy', tc: 0.000, clientside_persistence_cookie: 'N',
41
+ server_name: 'mc-search-2', tr: 0.730, serverside_persistence_cookie: 'N',
42
+ status_code: 200, tt: 0.731, termination_event_code: nil,
43
+ bytes_read: 29_404, listener_conn: 54, terminated_session_state: nil,
44
+ backend_queue: 0, process_conn: 54, captured_request_headers: '{66.249.68.216}',
45
+ srv_queue: 0, srv_conn: 2, captured_response_headers: nil,
46
+ http_request: 'GET /neighbor/26014153 HTTP/1.0')
47
+ end
48
+
49
+ it do
50
+ should parse_line(sample_haproxy11, 'an haproxy 1.1 access line').and_capture(
51
+ client_ip: '127.0.0.1', tq: 0.009, captured_request_cookie: nil,
52
+ timestamp: 20_031_015_083_217, tc: 0.007, captured_response_cookie: nil,
53
+ frontend_name: 'relais-http', tr: 0.014, clientside_persistence_cookie: nil,
54
+ server_name: 'Srv1', tt: 0.030, serverside_persistence_cookie: nil,
55
+ status_code: 502, listener_conn: 2, termination_event_code: 'P',
56
+ bytes_read: 243, process_conn: 3, terminated_session_state: 'H',
57
+ captured_request_headers: nil, captured_response_headers: nil,
58
+ http_request: nil)
59
+ end
34
60
 
35
- it { should parse_line(sample_haproxy12, 'an haproxy 1.2 access line').and_capture(
36
- :client_ip => '127.0.0.1', :tq => 0.000, :captured_request_cookie => nil,
37
- :timestamp => 20110315063645, :tw => 0.000, :captured_response_cookie => nil,
38
- :frontend_name => 'as-proxy', :tc => 0.000, :clientside_persistence_cookie => 'N',
39
- :server_name => 'mc-search-2',:tr => 0.730, :serverside_persistence_cookie => 'N',
40
- :status_code => 200, :tt => 0.731, :termination_event_code => nil,
41
- :bytes_read => 29404, :listener_conn => 54, :terminated_session_state => nil,
42
- :backend_queue => 0, :process_conn => 54, :captured_request_headers => '{66.249.68.216}',
43
- :srv_queue => 0, :srv_conn => 2, :captured_response_headers => nil,
44
- :http_request => 'GET /neighbor/26014153 HTTP/1.0')
45
- }
61
+ it do
62
+ should parse_line(sample_errors, 'a failed access line').and_capture(
63
+ timestamp: 20_031_015_151_906, tq: nil, captured_request_cookie: nil,
64
+ server_name: '<NOSRV>', tw: nil, captured_response_cookie: nil,
65
+ bytes_read: 2750, tc: nil, clientside_persistence_cookie: nil,
66
+ retries: 2, tr: nil, serverside_persistence_cookie: nil,
67
+ http_request: nil, tt: 50.001, termination_event_code: 'c',
68
+ terminated_session_state: 'R',
69
+ captured_request_headers: nil,
70
+ captured_response_headers: nil)
71
+ end
46
72
 
47
- it { should parse_line(sample_haproxy11, 'an haproxy 1.1 access line').and_capture(
48
- :client_ip => '127.0.0.1', :tq => 0.009, :captured_request_cookie => nil,
49
- :timestamp => 20031015083217, :tc => 0.007, :captured_response_cookie => nil,
50
- :frontend_name => 'relais-http',:tr => 0.014, :clientside_persistence_cookie => nil,
51
- :server_name => 'Srv1', :tt => 0.030, :serverside_persistence_cookie => nil,
52
- :status_code => 502, :listener_conn => 2,:termination_event_code => 'P',
53
- :bytes_read => 243, :process_conn => 3, :terminated_session_state => 'H',
54
- :captured_request_headers => nil, :captured_response_headers => nil,
55
- :http_request => nil)
56
- }
57
-
58
- it { should parse_line(sample_errors, 'a failed access line').and_capture(
59
- :timestamp => 20031015151906, :tq => nil, :captured_request_cookie => nil,
60
- :server_name => '<NOSRV>', :tw => nil, :captured_response_cookie => nil,
61
- :bytes_read => 2750, :tc => nil, :clientside_persistence_cookie => nil,
62
- :retries => 2, :tr => nil, :serverside_persistence_cookie => nil,
63
- :http_request => nil, :tt => 50.001, :termination_event_code => 'c',
64
- :terminated_session_state => 'R',
65
- :captured_request_headers => nil,
66
- :captured_response_headers => nil)
67
- }
68
-
69
73
  it { should_not parse_line('nonsense') }
70
74
  end
71
-
75
+
72
76
  describe '#parse_io' do
73
77
  let(:log_parser) { RequestLogAnalyzer::Source::LogParser.new(subject) }
74
78
  let(:snippet) { log_snippet(sample_haproxy13, sample_haproxy12, sample_haproxy11, sample_errors, 'nonsense') }
75
-
76
- it "should parse a log snippet without warnings" do
79
+
80
+ it 'should parse a log snippet without warnings' do
77
81
  log_parser.should_receive(:handle_request).exactly(4).times
78
82
  log_parser.should_not_receive(:warn)
79
83
  log_parser.parse_io(snippet)