haproxy_log_parser 0.0.2 → 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.2
1
+ 0.0.3
@@ -5,10 +5,11 @@ Gem::Specification.new do |s|
5
5
  s.homepage = 'https://github.com/tobyhs/haproxy_log_parser'
6
6
  s.summary = 'Parser for HAProxy logs in the HTTP log format'
7
7
  s.description = s.summary
8
+ s.license = 'MIT'
8
9
 
9
10
  s.add_dependency 'treetop'
10
11
 
11
- s.add_development_dependency 'rspec'
12
+ s.add_development_dependency 'rspec', '~> 2.13'
12
13
 
13
14
  s.files = Dir.glob('lib/**/*') + [
14
15
  'README.rdoc',
@@ -15,8 +15,8 @@ module HAProxyLogParser
15
15
  actconn:integer '/' feconn:integer '/' beconn:integer '/'
16
16
  srv_conn:integer '/' retries:integer ' '
17
17
  srv_queue:integer '/' backend_queue:integer ' '
18
- '{' captured_request_headers:captured_headers '} '
19
- '{' captured_response_headers:captured_headers '} '
18
+ captured_request_headers:('{' headers:captured_headers '} ')?
19
+ captured_response_headers:('{' headers:captured_headers '} ')?
20
20
  '"' http_request:[^"]+ '"'
21
21
  "\n"?
22
22
  end
@@ -54,5 +54,3 @@ module HAProxyLogParser
54
54
  end
55
55
  end
56
56
  end
57
-
58
- # vim:ai
@@ -39,10 +39,18 @@ module HAProxyLogParser
39
39
  cookie = decode_captured_cookie(result.send(field).text_value)
40
40
  entry.send("#{field}=", cookie)
41
41
  end
42
+
42
43
  [:captured_request_headers, :captured_response_headers].each do |field|
43
- headers = decode_captured_headers(result.send(field).text_value)
44
+ if result.send(field).respond_to?(:headers)
45
+ headers = decode_captured_headers(
46
+ result.send(field).headers.text_value
47
+ )
48
+ else
49
+ headers = []
50
+ end
44
51
  entry.send("#{field}=", headers)
45
52
  end
53
+
46
54
  entry.http_request = unescape(result.http_request.text_value)
47
55
 
48
56
  entry
@@ -7,68 +7,68 @@ describe HAProxyLogParser do
7
7
  describe '.parse' do
8
8
  it 'parses LINE[0] correctly' do
9
9
  entry = HAProxyLogParser.parse(LINES[0])
10
- entry.client_ip.should == '10.0.8.2'
11
- entry.client_port.should == 34028
12
- entry.accept_date.should == Time.local(2011, 8, 9, 20, 30, 46, 429)
13
- entry.frontend_name.should == 'proxy-out'
10
+ expect(entry.client_ip).to eq('10.0.8.2')
11
+ expect(entry.client_port).to eq(34028)
12
+ expect(entry.accept_date).to eq(Time.local(2011, 8, 9, 20, 30, 46, 429))
13
+ expect(entry.frontend_name).to eq('proxy-out')
14
14
  expect(entry).to be_ssl
15
- entry.backend_name.should == 'proxy-out'
16
- entry.server_name.should == 'cache1'
17
- entry.tq.should == 1
18
- entry.tw.should == 0
19
- entry.tc.should == 2
20
- entry.tr.should == 126
21
- entry.tt.should == 128
22
- entry.status_code.should == 301
23
- entry.bytes_read.should == 223
24
- entry.captured_request_cookie.should == {}
25
- entry.captured_response_cookie.should == {}
26
- entry.termination_state.should == '----'
27
- entry.actconn.should == 617
28
- entry.feconn.should == 523
29
- entry.beconn.should == 336
30
- entry.srv_conn.should == 168
31
- entry.retries.should == 0
32
- entry.srv_queue.should == 0
33
- entry.backend_queue.should == 0
34
- entry.captured_request_headers.should == ['www.sytadin.equipement.gouv.fr', '', 'http://trafic.1wt.eu/']
35
- entry.captured_response_headers.should == ['Apache', '230', '', '', 'http://www.sytadin.']
36
- entry.http_request.should == 'GET http://www.sytadin.equipement.gouv.fr/ HTTP/1.1'
15
+ expect(entry.backend_name).to eq('proxy-out')
16
+ expect(entry.server_name).to eq('cache1')
17
+ expect(entry.tq).to eq(1)
18
+ expect(entry.tw).to eq(0)
19
+ expect(entry.tc).to eq(2)
20
+ expect(entry.tr).to eq(126)
21
+ expect(entry.tt).to eq(128)
22
+ expect(entry.status_code).to eq(301)
23
+ expect(entry.bytes_read).to eq(223)
24
+ expect(entry.captured_request_cookie).to eq({})
25
+ expect(entry.captured_response_cookie).to eq({})
26
+ expect(entry.termination_state).to eq('----')
27
+ expect(entry.actconn).to eq(617)
28
+ expect(entry.feconn).to eq(523)
29
+ expect(entry.beconn).to eq(336)
30
+ expect(entry.srv_conn).to eq(168)
31
+ expect(entry.retries).to eq(0)
32
+ expect(entry.srv_queue).to eq(0)
33
+ expect(entry.backend_queue).to eq(0)
34
+ expect(entry.captured_request_headers).to eq(['www.sytadin.equipement.gouv.fr', '', 'http://trafic.1wt.eu/'])
35
+ expect(entry.captured_response_headers).to eq(['Apache', '230', '', '', 'http://www.sytadin.'])
36
+ expect(entry.http_request).to eq('GET http://www.sytadin.equipement.gouv.fr/ HTTP/1.1')
37
37
  end
38
38
 
39
39
  it 'parses LINES[1] correctly' do
40
40
  entry = HAProxyLogParser.parse(LINES[1])
41
- entry.client_ip.should == '192.168.1.215'
42
- entry.client_port.should == 50679
43
- entry.accept_date.should == Time.local(2012, 5, 21, 1, 35, 46, 146)
44
- entry.frontend_name.should == 'webapp'
41
+ expect(entry.client_ip).to eq('192.168.1.215')
42
+ expect(entry.client_port).to eq(50679)
43
+ expect(entry.accept_date).to eq(Time.local(2012, 5, 21, 1, 35, 46, 146))
44
+ expect(entry.frontend_name).to eq('webapp')
45
45
  expect(entry).to_not be_ssl
46
- entry.backend_name.should == 'webapp_backend'
47
- entry.server_name.should == 'web09'
48
- entry.tq.should == 27
49
- entry.tw.should == 0
50
- entry.tc.should == 1
51
- entry.tr.should == 0
52
- entry.tt.should == 217
53
- entry.status_code.should == 200
54
- entry.bytes_read.should == 1367
55
- entry.captured_request_cookie.should == {'session' => 'abc'}
56
- entry.captured_response_cookie.should == {'session' => 'xyz'}
57
- entry.termination_state.should == '----'
58
- entry.actconn.should == 600
59
- entry.feconn.should == 529
60
- entry.beconn.should == 336
61
- entry.srv_conn.should == 158
62
- entry.retries.should == 0
63
- entry.srv_queue.should == 0
64
- entry.backend_queue.should == 0
65
- entry.captured_request_headers.should == ['|| {5F41}', 'http://google.com/', '']
66
- entry.captured_response_headers.should == ['1270925568', '', '']
67
- entry.http_request.should == 'GET /images/image.gif HTTP/1.1'
46
+ expect(entry.backend_name).to eq('webapp_backend')
47
+ expect(entry.server_name).to eq('web09')
48
+ expect(entry.tq).to eq(27)
49
+ expect(entry.tw).to eq(0)
50
+ expect(entry.tc).to eq(1)
51
+ expect(entry.tr).to eq(0)
52
+ expect(entry.tt).to eq(217)
53
+ expect(entry.status_code).to eq(200)
54
+ expect(entry.bytes_read).to eq(1367)
55
+ expect(entry.captured_request_cookie).to eq({'session' => 'abc'})
56
+ expect(entry.captured_response_cookie).to eq({'session' => 'xyz'})
57
+ expect(entry.termination_state).to eq('----')
58
+ expect(entry.actconn).to eq(600)
59
+ expect(entry.feconn).to eq(529)
60
+ expect(entry.beconn).to eq(336)
61
+ expect(entry.srv_conn).to eq(158)
62
+ expect(entry.retries).to eq(0)
63
+ expect(entry.srv_queue).to eq(0)
64
+ expect(entry.backend_queue).to eq(0)
65
+ expect(entry.captured_request_headers).to eq(['|| {5F41}', 'http://google.com/', ''])
66
+ expect(entry.captured_response_headers).to eq([])
67
+ expect(entry.http_request).to eq('GET /images/image.gif HTTP/1.1')
68
68
  end
69
69
 
70
70
  it 'returns nil if the line is invalid' do
71
- HAProxyLogParser.parse('asdf jkl;').should be_nil
71
+ expect(HAProxyLogParser.parse('asdf jkl;')).to be_nil
72
72
  end
73
73
  end
74
74
  end
data/spec/sample.log CHANGED
@@ -1,2 +1,2 @@
1
1
  Aug 9 20:30:46 localhost haproxy[2022]: 10.0.8.2:34028 [09/Aug/2011:20:30:46.429] proxy-out~ proxy-out/cache1 1/0/2/126/+128 301 +223 - - ---- 617/523/336/168/0 0/0 {www.sytadin.equipement.gouv.fr||http://trafic.1wt.eu/} {Apache|230|||http://www.sytadin.} "GET http://www.sytadin.equipement.gouv.fr/ HTTP/1.1"
2
- May 21 01:35:46 10.18.237.5 haproxy[26747]: 192.168.1.215:50679 [21/May/2012:01:35:46.146] webapp webapp_backend/web09 27/0/1/0/217 200 1367 session=abc session=xyz ---- 600/529/336/158/0 0/0 {#7C#7C #7B5F41#7D|http://google.com/|} {1270925568||} "GET /images/image.gif HTTP/1.1"
2
+ May 21 01:35:46 10.18.237.5 haproxy[26747]: 192.168.1.215:50679 [21/May/2012:01:35:46.146] webapp webapp_backend/web09 27/0/1/0/217 200 1367 session=abc session=xyz ---- 600/529/336/158/0 0/0 {#7C#7C #7B5F41#7D|http://google.com/|} "GET /images/image.gif HTTP/1.1"
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: haproxy_log_parser
3
3
  version: !ruby/object:Gem::Version
4
- hash: 27
4
+ hash: 25
5
5
  prerelease: false
6
6
  segments:
7
7
  - 0
8
8
  - 0
9
- - 2
10
- version: 0.0.2
9
+ - 3
10
+ version: 0.0.3
11
11
  platform: ruby
12
12
  authors:
13
13
  - Toby Hsieh
@@ -15,7 +15,7 @@ autorequire:
15
15
  bindir: bin
16
16
  cert_chain: []
17
17
 
18
- date: 2013-09-23 00:00:00 -07:00
18
+ date: 2013-12-16 00:00:00 -08:00
19
19
  default_executable:
20
20
  dependencies:
21
21
  - !ruby/object:Gem::Dependency
@@ -38,12 +38,13 @@ dependencies:
38
38
  requirement: &id002 !ruby/object:Gem::Requirement
39
39
  none: false
40
40
  requirements:
41
- - - ">="
41
+ - - ~>
42
42
  - !ruby/object:Gem::Version
43
- hash: 3
43
+ hash: 25
44
44
  segments:
45
- - 0
46
- version: "0"
45
+ - 2
46
+ - 13
47
+ version: "2.13"
47
48
  type: :development
48
49
  version_requirements: *id002
49
50
  description: Parser for HAProxy logs in the HTTP log format
@@ -65,8 +66,8 @@ files:
65
66
  - spec/sample.log
66
67
  has_rdoc: true
67
68
  homepage: https://github.com/tobyhs/haproxy_log_parser
68
- licenses: []
69
-
69
+ licenses:
70
+ - MIT
70
71
  post_install_message:
71
72
  rdoc_options: []
72
73