logstash-patterns-core 0.1.10 → 0.3.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.gitignore +2 -0
- data/CHANGELOG.md +14 -0
- data/CONTRIBUTORS +1 -0
- data/NOTICE.TXT +5 -0
- data/README.md +1 -1
- data/logstash-patterns-core.gemspec +2 -1
- data/patterns/aws +11 -0
- data/patterns/bro +13 -0
- data/patterns/firewalls +26 -2
- data/patterns/grok-patterns +2 -2
- data/patterns/haproxy +3 -1
- data/patterns/java +13 -0
- data/patterns/rails +13 -0
- data/spec/patterns/bro.rb +126 -0
- data/spec/patterns/core_spec.rb +43 -0
- data/spec/patterns/firewalls_spec.rb +53 -0
- data/spec/patterns/haproxy_spec.rb +43 -0
- data/spec/patterns/rails3_spec.rb +56 -0
- data/spec/patterns/s3_spec.rb +132 -0
- data/spec/patterns/shorewall_spec.rb +90 -0
- data/spec/patterns/syslog_spec.rb +26 -0
- metadata +35 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 1d76c75e17f367e1289a5523405de72dd14229b0
|
4
|
+
data.tar.gz: 6625c91537bf4a556681664510d33908b0b6645d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: e7a69562203c00b4546523ae154789279ccf05e80af4c77fae2ae9ab573b5079d8d4c84f36b3934b9dbf77e6f06f20e4f8d1261622d1e36e618af90ebc099701
|
7
|
+
data.tar.gz: 68f1f9f483723eece2b6fa015f27c57b8c1f2538cea86a76484a0fcee6c9bc13c101d60f09391c765dc0c84336fcbb117376db0aed8e97412613d6dabeee5978
|
data/.gitignore
CHANGED
data/CHANGELOG.md
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
# 0.3.0
|
2
|
+
- Updated the AWS S3 patterns
|
3
|
+
- Added patterns for rails 3
|
4
|
+
- Added patterns for haproxy
|
5
|
+
- Added patterns for bro http.log
|
6
|
+
- Added shorewall patterns
|
7
|
+
# 0.2.0
|
8
|
+
- Added patterns for S3 and ELB access logs amazon services
|
9
|
+
# 0.1.12
|
10
|
+
- add some missing Cisco ASA firewall system log patterns
|
11
|
+
- fix cisco firewall policy_id regex for policies with '-' in the name
|
12
|
+
# 0.1.11
|
13
|
+
- Added Catalina and Tomcat patterns
|
14
|
+
- Added German month names
|
data/CONTRIBUTORS
CHANGED
data/NOTICE.TXT
ADDED
data/README.md
CHANGED
@@ -13,7 +13,7 @@ Logstash provides infrastructure to automatically generate documentation for thi
|
|
13
13
|
|
14
14
|
## Need Help?
|
15
15
|
|
16
|
-
Need help? Try #logstash on freenode IRC or the logstash
|
16
|
+
Need help? Try #logstash on freenode IRC or the https://discuss.elastic.co/c/logstash discussion forum.
|
17
17
|
|
18
18
|
## Developing
|
19
19
|
|
@@ -1,7 +1,7 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-patterns-core'
|
4
|
-
s.version = '0.
|
4
|
+
s.version = '0.3.0'
|
5
5
|
s.licenses = ['Apache License (2.0)']
|
6
6
|
s.summary = "Patterns to be used in logstash"
|
7
7
|
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
|
@@ -24,5 +24,6 @@ Gem::Specification.new do |s|
|
|
24
24
|
|
25
25
|
s.add_development_dependency 'logstash-filter-grok'
|
26
26
|
s.add_development_dependency 'logstash-devutils'
|
27
|
+
s.add_development_dependency 'logstash-filter-grok'
|
27
28
|
end
|
28
29
|
|
data/patterns/aws
ADDED
@@ -0,0 +1,11 @@
|
|
1
|
+
S3_REQUEST_LINE (?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})
|
2
|
+
|
3
|
+
S3_ACCESS_LOG %{WORD:owner} %{NOTSPACE:bucket} \[%{HTTPDATE:timestamp}\] %{IP:clientip} %{NOTSPACE:requester} %{NOTSPACE:request_id} %{NOTSPACE:operation} %{NOTSPACE:key} (?:"%{S3_REQUEST_LINE}"|-) (?:%{INT:response:int}|-) (?:-|%{NOTSPACE:error_code}) (?:%{INT:bytes:int}|-) (?:%{INT:object_size:int}|-) (?:%{INT:request_time_ms:int}|-) (?:%{INT:turnaround_time_ms:int}|-) (?:%{QS:referrer}|-) (?:"?%{QS:agent}"?|-) (?:-|%{NOTSPACE:version_id})
|
4
|
+
|
5
|
+
ELB_URIPATHPARAM %{URIPATH:path}(?:%{URIPARAM:params})?
|
6
|
+
|
7
|
+
ELB_URI %{URIPROTO:proto}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST:urihost})?(?:%{ELB_URIPATHPARAM})?
|
8
|
+
|
9
|
+
ELB_REQUEST_LINE (?:%{WORD:verb} %{ELB_URI:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})
|
10
|
+
|
11
|
+
ELB_ACCESS_LOG %{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:elb} %{IP:clientip}:%{INT:clientport:int} (?:(%{IP:backendip}:?:%{INT:backendport:int})|-) %{NUMBER:request_processing_time:float} %{NUMBER:backend_processing_time:float} %{NUMBER:response_processing_time:float} %{INT:response:int} %{INT:backend_response:int} %{INT:received_bytes:int} %{INT:bytes:int} "%{ELB_REQUEST_LINE}"
|
data/patterns/bro
ADDED
@@ -0,0 +1,13 @@
|
|
1
|
+
# https://www.bro.org/sphinx/script-reference/log-files.html
|
2
|
+
|
3
|
+
# http.log
|
4
|
+
BRO_HTTP %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{INT:trans_depth}\t%{GREEDYDATA:method}\t%{GREEDYDATA:domain}\t%{GREEDYDATA:uri}\t%{GREEDYDATA:referrer}\t%{GREEDYDATA:user_agent}\t%{NUMBER:request_body_len}\t%{NUMBER:response_body_len}\t%{GREEDYDATA:status_code}\t%{GREEDYDATA:status_msg}\t%{GREEDYDATA:info_code}\t%{GREEDYDATA:info_msg}\t%{GREEDYDATA:filename}\t%{GREEDYDATA:bro_tags}\t%{GREEDYDATA:username}\t%{GREEDYDATA:password}\t%{GREEDYDATA:proxied}\t%{GREEDYDATA:orig_fuids}\t%{GREEDYDATA:orig_mime_types}\t%{GREEDYDATA:resp_fuids}\t%{GREEDYDATA:resp_mime_types}
|
5
|
+
|
6
|
+
# dns.log
|
7
|
+
BRO_DNS %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{WORD:proto}\t%{INT:trans_id}\t%{GREEDYDATA:query}\t%{GREEDYDATA:qclass}\t%{GREEDYDATA:qclass_name}\t%{GREEDYDATA:qtype}\t%{GREEDYDATA:qtype_name}\t%{GREEDYDATA:rcode}\t%{GREEDYDATA:rcode_name}\t%{GREEDYDATA:AA}\t%{GREEDYDATA:TC}\t%{GREEDYDATA:RD}\t%{GREEDYDATA:RA}\t%{GREEDYDATA:Z}\t%{GREEDYDATA:answers}\t%{GREEDYDATA:TTLs}\t%{GREEDYDATA:rejected}
|
8
|
+
|
9
|
+
# conn.log
|
10
|
+
BRO_CONN %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{WORD:proto}\t%{GREEDYDATA:service}\t%{NUMBER:duration}\t%{NUMBER:orig_bytes}\t%{NUMBER:resp_bytes}\t%{GREEDYDATA:conn_state}\t%{GREEDYDATA:local_orig}\t%{GREEDYDATA:missed_bytes}\t%{GREEDYDATA:history}\t%{GREEDYDATA:orig_pkts}\t%{GREEDYDATA:orig_ip_bytes}\t%{GREEDYDATA:resp_pkts}\t%{GREEDYDATA:resp_ip_bytes}\t%{GREEDYDATA:tunnel_parents}
|
11
|
+
|
12
|
+
# files.log
|
13
|
+
BRO_FILES %{NUMBER:ts}\t%{NOTSPACE:fuid}\t%{IP:tx_hosts}\t%{IP:rx_hosts}\t%{NOTSPACE:conn_uids}\t%{GREEDYDATA:source}\t%{GREEDYDATA:depth}\t%{GREEDYDATA:analyzers}\t%{GREEDYDATA:mime_type}\t%{GREEDYDATA:filename}\t%{GREEDYDATA:duration}\t%{GREEDYDATA:local_orig}\t%{GREEDYDATA:is_orig}\t%{GREEDYDATA:seen_bytes}\t%{GREEDYDATA:total_bytes}\t%{GREEDYDATA:missing_bytes}\t%{GREEDYDATA:overflow_bytes}\t%{GREEDYDATA:timedout}\t%{GREEDYDATA:parent_fuid}\t%{GREEDYDATA:md5}\t%{GREEDYDATA:sha1}\t%{GREEDYDATA:sha256}\t%{GREEDYDATA:extracted}
|
data/patterns/firewalls
CHANGED
@@ -11,6 +11,24 @@ CISCO_REASON Duplicate TCP SYN|Failed to locate egress interface|Invalid transpo
|
|
11
11
|
CISCO_DIRECTION Inbound|inbound|Outbound|outbound
|
12
12
|
CISCO_INTERVAL first hit|%{INT}-second interval
|
13
13
|
CISCO_XLATE_TYPE static|dynamic
|
14
|
+
# ASA-1-104001
|
15
|
+
CISCOFW104001 \((?:Primary|Secondary)\) Switching to ACTIVE - %{GREEDYDATA:switch_reason}
|
16
|
+
# ASA-1-104002
|
17
|
+
CISCOFW104002 \((?:Primary|Secondary)\) Switching to STANDBY - %{GREEDYDATA:switch_reason}
|
18
|
+
# ASA-1-104003
|
19
|
+
CISCOFW104003 \((?:Primary|Secondary)\) Switching to FAILED\.
|
20
|
+
# ASA-1-104004
|
21
|
+
CISCOFW104004 \((?:Primary|Secondary)\) Switching to OK\.
|
22
|
+
# ASA-1-105003
|
23
|
+
CISCOFW105003 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{GREEDYDATA:interface_name} waiting
|
24
|
+
# ASA-1-105004
|
25
|
+
CISCOFW105004 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{GREEDYDATA:interface_name} normal
|
26
|
+
# ASA-1-105005
|
27
|
+
CISCOFW105005 \((?:Primary|Secondary)\) Lost Failover communications with mate on [Ii]nterface %{GREEDYDATA:interface_name}
|
28
|
+
# ASA-1-105008
|
29
|
+
CISCOFW105008 \((?:Primary|Secondary)\) Testing [Ii]nterface %{GREEDYDATA:interface_name}
|
30
|
+
# ASA-1-105009
|
31
|
+
CISCOFW105009 \((?:Primary|Secondary)\) Testing on [Ii]nterface %{GREEDYDATA:interface_name} (?:Passed|Failed)
|
14
32
|
# ASA-2-106001
|
15
33
|
CISCOFW106001 %{CISCO_DIRECTION:direction} %{WORD:protocol} connection %{CISCO_ACTION:action} from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} flags %{GREEDYDATA:tcp_flags} on interface %{GREEDYDATA:interface}
|
16
34
|
# ASA-2-106006, ASA-2-106007, ASA-2-106010
|
@@ -22,9 +40,9 @@ CISCOFW106015 %{CISCO_ACTION:action} %{WORD:protocol} \(%{DATA:policy_id}\) from
|
|
22
40
|
# ASA-1-106021
|
23
41
|
CISCOFW106021 %{CISCO_ACTION:action} %{WORD:protocol} reverse path check from %{IP:src_ip} to %{IP:dst_ip} on interface %{GREEDYDATA:interface}
|
24
42
|
# ASA-4-106023
|
25
|
-
CISCOFW106023 %{CISCO_ACTION:action} %{WORD:protocol} src %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{IP:dst_ip}(/%{INT:dst_port})?(\(%{DATA:dst_fwuser}\))?( \(type %{INT:icmp_type}, code %{INT:icmp_code}\))? by access-group %{
|
43
|
+
CISCOFW106023 %{CISCO_ACTION:action} %{WORD:protocol} src %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{IP:dst_ip}(/%{INT:dst_port})?(\(%{DATA:dst_fwuser}\))?( \(type %{INT:icmp_type}, code %{INT:icmp_code}\))? by access-group %{NOTSPACE:policy_id} \[%{DATA:hashcode1}, %{DATA:hashcode2}\]
|
26
44
|
# ASA-5-106100
|
27
|
-
CISCOFW106100 access-list %{
|
45
|
+
CISCOFW106100 access-list %{NOTSPACE:policy_id} %{CISCO_ACTION:action} %{WORD:protocol} %{DATA:src_interface}/%{IP:src_ip}\(%{INT:src_port}\)(\(%{DATA:src_fwuser}\))? -> %{DATA:dst_interface}/%{IP:dst_ip}\(%{INT:dst_port}\)(\(%{DATA:src_fwuser}\))? hit-cnt %{INT:hit_count} %{CISCO_INTERVAL:interval} \[%{DATA:hashcode1}, %{DATA:hashcode2}\]
|
28
46
|
# ASA-6-110002
|
29
47
|
CISCOFW110002 %{CISCO_REASON:reason} for %{WORD:protocol} from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port}
|
30
48
|
# ASA-6-302010
|
@@ -39,6 +57,8 @@ CISCOFW305011 %{CISCO_ACTION:action} %{CISCO_XLATE_TYPE:xlate_type} %{WORD:proto
|
|
39
57
|
CISCOFW313001_313004_313008 %{CISCO_ACTION:action} %{WORD:protocol} type=%{INT:icmp_type}, code=%{INT:icmp_code} from %{IP:src_ip} on interface %{DATA:interface}( to %{IP:dst_ip})?
|
40
58
|
# ASA-4-313005
|
41
59
|
CISCOFW313005 %{CISCO_REASON:reason} for %{WORD:protocol} error message: %{WORD:err_protocol} src %{DATA:err_src_interface}:%{IP:err_src_ip}(\(%{DATA:err_src_fwuser}\))? dst %{DATA:err_dst_interface}:%{IP:err_dst_ip}(\(%{DATA:err_dst_fwuser}\))? \(type %{INT:err_icmp_type}, code %{INT:err_icmp_code}\) on %{DATA:interface} interface\. Original IP payload: %{WORD:protocol} src %{IP:orig_src_ip}/%{INT:orig_src_port}(\(%{DATA:orig_src_fwuser}\))? dst %{IP:orig_dst_ip}/%{INT:orig_dst_port}(\(%{DATA:orig_dst_fwuser}\))?
|
60
|
+
# ASA-5-321001
|
61
|
+
CISCOFW321001 Resource '%{WORD:resource_name}' limit of %{POSINT:resource_limit} reached for system
|
42
62
|
# ASA-4-402117
|
43
63
|
CISCOFW402117 %{WORD:protocol}: Received a non-IPSec packet \(protocol= %{WORD:orig_protocol}\) from %{IP:src_ip} to %{IP:dst_ip}
|
44
64
|
# ASA-4-402119
|
@@ -58,3 +78,7 @@ CISCOFW713172 Group = %{GREEDYDATA:group}, IP = %{IP:src_ip}, Automatic NAT Dete
|
|
58
78
|
# ASA-4-733100
|
59
79
|
CISCOFW733100 \[\s*%{DATA:drop_type}\s*\] drop %{DATA:drop_rate_id} exceeded. Current burst rate is %{INT:drop_rate_current_burst} per second, max configured rate is %{INT:drop_rate_max_burst}; Current average rate is %{INT:drop_rate_current_avg} per second, max configured rate is %{INT:drop_rate_max_avg}; Cumulative total count is %{INT:drop_total_count}
|
60
80
|
#== End Cisco ASA ==
|
81
|
+
|
82
|
+
# Shorewall firewall logs
|
83
|
+
SHOREWALL (%{SYSLOGTIMESTAMP:timestamp}) (%{WORD:nf_host}) kernel:.*Shorewall:(%{WORD:nf_action1})?:(%{WORD:nf_action2})?.*IN=(%{USERNAME:nf_in_interface})?.*(OUT= *MAC=(%{COMMONMAC:nf_dst_mac}):(%{COMMONMAC:nf_src_mac})?|OUT=%{USERNAME:nf_out_interface}).*SRC=(%{IPV4:nf_src_ip}).*DST=(%{IPV4:nf_dst_ip}).*LEN=(%{WORD:nf_len}).?*TOS=(%{WORD:nf_tos}).?*PREC=(%{WORD:nf_prec}).?*TTL=(%{INT:nf_ttl}).?*ID=(%{INT:nf_id}).?*PROTO=(%{WORD:nf_protocol}).?*SPT=(%{INT:nf_src_port}?.*DPT=%{INT:nf_dst_port}?.*)
|
84
|
+
#== End Shorewall
|
data/patterns/grok-patterns
CHANGED
@@ -45,7 +45,7 @@ URIPATHPARAM %{URIPATH}(?:%{URIPARAM})?
|
|
45
45
|
URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:%{URIPATHPARAM})?
|
46
46
|
|
47
47
|
# Months: January, Feb, 3, 03, 12, December
|
48
|
-
MONTH \b(?:Jan(?:uary)?|Feb(?:ruary)?|
|
48
|
+
MONTH \b(?:Jan(?:uary|uar)?|Feb(?:ruary|ruar)?|M(?:a|ä)?r(?:ch|z)?|Apr(?:il)?|Ma(?:y|i)?|Jun(?:e|i)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|O(?:c|k)?t(?:ober)?|Nov(?:ember)?|De(?:c|z)(?:ember)?)\b
|
49
49
|
MONTHNUM (?:0?[1-9]|1[0-2])
|
50
50
|
MONTHNUM2 (?:0[1-9]|1[0-2])
|
51
51
|
MONTHDAY (?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9])
|
@@ -76,7 +76,7 @@ DATESTAMP_EVENTLOG %{YEAR}%{MONTHNUM2}%{MONTHDAY}%{HOUR}%{MINUTE}%{SECOND}
|
|
76
76
|
|
77
77
|
# Syslog Dates: Month Day HH:MM:SS
|
78
78
|
SYSLOGTIMESTAMP %{MONTH} +%{MONTHDAY} %{TIME}
|
79
|
-
PROG
|
79
|
+
PROG [\x21-\x5a\x5c\x5e-\x7e]+
|
80
80
|
SYSLOGPROG %{PROG:program}(?:\[%{POSINT:pid}\])?
|
81
81
|
SYSLOGHOST %{IPORHOST}
|
82
82
|
SYSLOGFACILITY <%{NONNEGINT:facility}.%{NONNEGINT:priority}>
|
data/patterns/haproxy
CHANGED
@@ -31,7 +31,9 @@ HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:captured_response_headers}
|
|
31
31
|
# HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:response_header_content_type}\|%{DATA:response_header_content_encoding}\|%{DATA:response_header_cache_control}\|%{DATA:response_header_last_modified}
|
32
32
|
|
33
33
|
# parse a haproxy 'httplog' line
|
34
|
-
|
34
|
+
HAPROXYHTTPBASE %{IP:client_ip}:%{INT:client_port} \[%{HAPROXYDATE:accept_date}\] %{NOTSPACE:frontend_name} %{NOTSPACE:backend_name}/%{NOTSPACE:server_name} %{INT:time_request}/%{INT:time_queue}/%{INT:time_backend_connect}/%{INT:time_backend_response}/%{NOTSPACE:time_duration} %{INT:http_status_code} %{NOTSPACE:bytes_read} %{DATA:captured_request_cookie} %{DATA:captured_response_cookie} %{NOTSPACE:termination_state} %{INT:actconn}/%{INT:feconn}/%{INT:beconn}/%{INT:srvconn}/%{NOTSPACE:retries} %{INT:srv_queue}/%{INT:backend_queue} (\{%{HAPROXYCAPTUREDREQUESTHEADERS}\})?( )?(\{%{HAPROXYCAPTUREDRESPONSEHEADERS}\})?( )?"(<BADREQ>|(%{WORD:http_verb} (%{URIPROTO:http_proto}://)?(?:%{USER:http_user}(?::[^@]*)?@)?(?:%{URIHOST:http_host})?(?:%{URIPATHPARAM:http_request})?( HTTP/%{NUMBER:http_version})?))?"
|
35
|
+
|
36
|
+
HAPROXYHTTP %{SYSLOGTIMESTAMP:syslog_timestamp} %{IPORHOST:syslog_server} %{SYSLOGPROG}: %{HAPROXYHTTPBASE}
|
35
37
|
|
36
38
|
# parse a haproxy 'tcplog' line
|
37
39
|
HAPROXYTCP %{SYSLOGTIMESTAMP:syslog_timestamp} %{IPORHOST:syslog_server} %{SYSLOGPROG}: %{IP:client_ip}:%{INT:client_port} \[%{HAPROXYDATE:accept_date}\] %{NOTSPACE:frontend_name} %{NOTSPACE:backend_name}/%{NOTSPACE:server_name} %{INT:time_queue}/%{INT:time_backend_connect}/%{NOTSPACE:time_duration} %{NOTSPACE:bytes_read} %{NOTSPACE:termination_state} %{INT:actconn}/%{INT:feconn}/%{INT:beconn}/%{INT:srvconn}/%{NOTSPACE:retries} %{INT:srv_queue}/%{INT:backend_queue}
|
data/patterns/java
CHANGED
@@ -5,3 +5,16 @@ JAVAFILE (?:[A-Za-z0-9_. -]+)
|
|
5
5
|
JAVAMETHOD (?:(<init>)|[a-zA-Z$_][a-zA-Z$_0-9]*)
|
6
6
|
#Line number is optional in special cases 'Native method' or 'Unknown source'
|
7
7
|
JAVASTACKTRACEPART %{SPACE}at %{JAVACLASS:class}\.%{JAVAMETHOD:method}\(%{JAVAFILE:file}(?::%{NUMBER:line})?\)
|
8
|
+
# Java Logs
|
9
|
+
JAVATHREAD (?:[A-Z]{2}-Processor[\d]+)
|
10
|
+
JAVACLASS (?:[a-zA-Z0-9-]+\.)+[A-Za-z0-9$]+
|
11
|
+
JAVAFILE (?:[A-Za-z0-9_.-]+)
|
12
|
+
JAVASTACKTRACEPART at %{JAVACLASS:class}\.%{WORD:method}\(%{JAVAFILE:file}:%{NUMBER:line}\)
|
13
|
+
JAVALOGMESSAGE (.*)
|
14
|
+
# MMM dd, yyyy HH:mm:ss eg: Jan 9, 2014 7:13:13 AM
|
15
|
+
CATALINA_DATESTAMP %{MONTH} %{MONTHDAY}, 20%{YEAR} %{HOUR}:?%{MINUTE}(?::?%{SECOND}) (?:AM|PM)
|
16
|
+
# yyyy-MM-dd HH:mm:ss,SSS ZZZ eg: 2014-01-09 17:32:25,527 -0800
|
17
|
+
TOMCAT_DATESTAMP 20%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}(?::?%{SECOND}) %{ISO8601_TIMEZONE}
|
18
|
+
CATALINALOG %{CATALINA_DATESTAMP:timestamp} %{JAVACLASS:class} %{JAVALOGMESSAGE:logmessage}
|
19
|
+
# 2014-01-09 20:03:28,269 -0800 | ERROR | com.example.service.ExampleService - something compeletely unexpected happened...
|
20
|
+
TOMCATLOG %{TOMCAT_DATESTAMP:timestamp} \| %{LOGLEVEL:level} \| %{JAVACLASS:class} - %{JAVALOGMESSAGE:logmessage}
|
data/patterns/rails
ADDED
@@ -0,0 +1,13 @@
|
|
1
|
+
RUUID \h{32}
|
2
|
+
# rails controller with action
|
3
|
+
RCONTROLLER (?<controller>[^#]+)#(?<action>\w+)
|
4
|
+
|
5
|
+
# this will often be the only line:
|
6
|
+
RAILS3HEAD (?m)Started %{WORD:verb} "%{URIPATHPARAM:request}" for %{IPORHOST:clientip} at (?<timestamp>%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND} %{ISO8601_TIMEZONE})
|
7
|
+
# for some a strange reason, params are stripped of {} - not sure that's a good idea.
|
8
|
+
RPROCESSING \W*Processing by %{RCONTROLLER} as (?<format>\S+)(?:\W*Parameters: {%{DATA:params}}\W*)?
|
9
|
+
RAILS3FOOT Completed %{NUMBER:response}%{DATA} in %{NUMBER:totalms}ms %{RAILS3PROFILE}%{GREEDYDATA}
|
10
|
+
RAILS3PROFILE (?:\(Views: %{NUMBER:viewms}ms \| ActiveRecord: %{NUMBER:activerecordms}ms|\(ActiveRecord: %{NUMBER:activerecordms}ms)?
|
11
|
+
|
12
|
+
# putting it all together
|
13
|
+
RAILS3 %{RAILS3HEAD}(?:%{RPROCESSING})?(?<context>(?:%{DATA}\n)*)(?:%{RAILS3FOOT})?
|
@@ -0,0 +1,126 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "logstash/patterns/core"
|
4
|
+
|
5
|
+
describe "HTTP" do
|
6
|
+
|
7
|
+
let(:value) { "1432555199.633017 COpk6E3vkURP8QQNKl 192.168.9.35 55281 178.236.7.146 80 4 POST www.amazon.it /xa/dealcontent/v2/GetDeals?nocache=1432555199326 http://www.amazon.it/ Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 223 1859 200 OK - - - (empty) - - - FrLEcY3AUPKdcYGf29 text/plain FOJpbGzIMh9syPxH8 text/plain" }
|
8
|
+
let(:grok) { grok_match(subject, value) }
|
9
|
+
|
10
|
+
it "a pattern pass the grok expression" do
|
11
|
+
expect(grok).to pass
|
12
|
+
end
|
13
|
+
|
14
|
+
it "matches a simple message" do
|
15
|
+
expect(subject).to match(value)
|
16
|
+
end
|
17
|
+
|
18
|
+
it "generates the ts field" do
|
19
|
+
expect(grok).to include("ts" => "1432555199.633017")
|
20
|
+
end
|
21
|
+
|
22
|
+
it "generates the uid field" do
|
23
|
+
expect(grok).to include("uid" => "COpk6E3vkURP8QQNKl")
|
24
|
+
end
|
25
|
+
|
26
|
+
it "generates the orig_h field" do
|
27
|
+
expect(grok).to include("orig_h" => "192.168.9.35")
|
28
|
+
end
|
29
|
+
|
30
|
+
it "generates the orig_p field" do
|
31
|
+
expect(grok).to include("orig_p" => "55281")
|
32
|
+
end
|
33
|
+
|
34
|
+
it "generates the resp_h field" do
|
35
|
+
expect(grok).to include("resp_h" => "178.236.7.146")
|
36
|
+
end
|
37
|
+
|
38
|
+
it "generates the resp_p field" do
|
39
|
+
expect(grok).to include("resp_p" => "80")
|
40
|
+
end
|
41
|
+
|
42
|
+
it "generates the trans_depth field" do
|
43
|
+
expect(grok).to include("trans_depth" => "4")
|
44
|
+
end
|
45
|
+
|
46
|
+
it "generates the method field" do
|
47
|
+
expect(grok).to include("method" => "POST")
|
48
|
+
end
|
49
|
+
|
50
|
+
it "generates the domain field" do
|
51
|
+
expect(grok).to include("domain" => "www.amazon.it")
|
52
|
+
end
|
53
|
+
|
54
|
+
it "generates the uri field" do
|
55
|
+
expect(grok).to include("uri" => "/xa/dealcontent/v2/GetDeals?nocache=1432555199326")
|
56
|
+
end
|
57
|
+
|
58
|
+
it "generates the referrer field" do
|
59
|
+
expect(grok).to include("referrer" => "http://www.amazon.it/")
|
60
|
+
end
|
61
|
+
|
62
|
+
it "generates the user_agent field" do
|
63
|
+
expect(grok).to include("user_agent" => "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36")
|
64
|
+
end
|
65
|
+
|
66
|
+
it "generates the request_body_len field" do
|
67
|
+
expect(grok).to include("request_body_len" => "223")
|
68
|
+
end
|
69
|
+
|
70
|
+
it "generates the response_body_len field" do
|
71
|
+
expect(grok).to include("response_body_len" => "1859")
|
72
|
+
end
|
73
|
+
|
74
|
+
it "generates the status_code field" do
|
75
|
+
expect(grok).to include("status_code" => "200")
|
76
|
+
end
|
77
|
+
|
78
|
+
it "generates the status_msg field" do
|
79
|
+
expect(grok).to include("status_msg" => "OK")
|
80
|
+
end
|
81
|
+
|
82
|
+
it "generates the info_code field" do
|
83
|
+
expect(grok).to include("info_code" => "-")
|
84
|
+
end
|
85
|
+
|
86
|
+
it "generates the info_msg field" do
|
87
|
+
expect(grok).to include("info_msg" => "-")
|
88
|
+
end
|
89
|
+
|
90
|
+
it "generates the filename field" do
|
91
|
+
expect(grok).to include("filename" => "-")
|
92
|
+
end
|
93
|
+
|
94
|
+
it "generates the bro_tags field" do
|
95
|
+
expect(grok).to include("bro_tags" => "(empty)")
|
96
|
+
end
|
97
|
+
|
98
|
+
it "generates the username field" do
|
99
|
+
expect(grok).to include("username" => "-")
|
100
|
+
end
|
101
|
+
|
102
|
+
it "generates the password field" do
|
103
|
+
expect(grok).to include("password" => "-")
|
104
|
+
end
|
105
|
+
|
106
|
+
it "generates the proxied field" do
|
107
|
+
expect(grok).to include("proxied" => "-")
|
108
|
+
end
|
109
|
+
|
110
|
+
it "generates the orig_fuids field" do
|
111
|
+
expect(grok).to include("orig_fuids" => "FrLEcY3AUPKdcYGf29")
|
112
|
+
end
|
113
|
+
|
114
|
+
it "generates the orig_mime_types field" do
|
115
|
+
expect(grok).to include("orig_mime_types" => "text/plain")
|
116
|
+
end
|
117
|
+
|
118
|
+
it "generates the resp_fuids field" do
|
119
|
+
expect(grok).to include("resp_fuids" => "FOJpbGzIMh9syPxH8")
|
120
|
+
end
|
121
|
+
|
122
|
+
it "generates the resp_mime_types field" do
|
123
|
+
expect(grok).to include("resp_mime_types" => "text/plain")
|
124
|
+
end
|
125
|
+
|
126
|
+
end
|
data/spec/patterns/core_spec.rb
CHANGED
@@ -29,3 +29,46 @@ describe "COMMONAPACHELOG" do
|
|
29
29
|
end
|
30
30
|
|
31
31
|
end
|
32
|
+
|
33
|
+
describe "HTTP DATE parsing" do
|
34
|
+
|
35
|
+
context "HTTPDATE", "when having a German month" do
|
36
|
+
|
37
|
+
let(:value) { '[04/Mai/2015:13:17:15 +0200]'}
|
38
|
+
|
39
|
+
it "generates the month field" do
|
40
|
+
expect(grok_match(subject, value)).to pass
|
41
|
+
end
|
42
|
+
|
43
|
+
end
|
44
|
+
|
45
|
+
context "HTTPDATE", "when having a English month" do
|
46
|
+
|
47
|
+
let(:value) { '[04/March/2015:13:17:15 +0200]'}
|
48
|
+
|
49
|
+
it "generates the month field" do
|
50
|
+
expect(grok_match(subject, value)).to pass
|
51
|
+
end
|
52
|
+
|
53
|
+
end
|
54
|
+
|
55
|
+
context "HTTPDATE", "when having a wrong months" do
|
56
|
+
|
57
|
+
let(:value) { '[04/Map/2015:13:17:15 +0200]'}
|
58
|
+
|
59
|
+
it "generates the month field" do
|
60
|
+
expect(grok_match(subject, value)).not_to pass
|
61
|
+
end
|
62
|
+
|
63
|
+
end
|
64
|
+
|
65
|
+
end
|
66
|
+
|
67
|
+
describe "TOMCATLOG" do
|
68
|
+
|
69
|
+
let(:value) { '2014-01-09 20:03:28,269 -0800 | ERROR | com.example.service.ExampleService - something compeletely unexpected happened...'}
|
70
|
+
|
71
|
+
it "generates the logmessage field" do
|
72
|
+
expect(grok_match(subject, value)).to include("logmessage" => "something compeletely unexpected happened...")
|
73
|
+
end
|
74
|
+
end
|
@@ -0,0 +1,53 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "logstash/patterns/core"
|
4
|
+
|
5
|
+
describe "FIREWALLS" do
|
6
|
+
|
7
|
+
|
8
|
+
let(:pattern104001) { "CISCOFW104001" }
|
9
|
+
|
10
|
+
context "parsing a 104001 message" do
|
11
|
+
|
12
|
+
let(:value) { "(Secondary) Switching to ACTIVE - Service card in other unit has failed" }
|
13
|
+
|
14
|
+
subject { grok_match(pattern104001, value) }
|
15
|
+
|
16
|
+
it { should include("switch_reason" => "Service card in other unit has failed") }
|
17
|
+
|
18
|
+
it "generates a message field" do
|
19
|
+
expect(subject["message"]).to include("(Secondary) Switching to ACTIVE - Service card in other unit has failed")
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
let(:pattern106100) { "CISCOFW106100" }
|
24
|
+
|
25
|
+
context "parsing a 106100 message" do
|
26
|
+
|
27
|
+
let(:value) { "access-list inside permitted tcp inside/10.10.123.45(51763) -> outside/192.168.67.89(80) hit-cnt 1 first hit [0x62c4905, 0x0]" }
|
28
|
+
|
29
|
+
subject { grok_match(pattern106100, value) }
|
30
|
+
|
31
|
+
it { should include("policy_id" => "inside") }
|
32
|
+
|
33
|
+
it "generates a message field" do
|
34
|
+
expect(subject["message"]).to include("access-list inside permitted tcp inside/10.10.123.45(51763) -> outside/192.168.67.89(80) hit-cnt 1 first hit [0x62c4905, 0x0]")
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
let(:pattern106100) { "CISCOFW106100" }
|
39
|
+
|
40
|
+
context "parsing a 106100 message with hypen in acl name" do
|
41
|
+
|
42
|
+
let(:value) { "access-list outside-entry permitted tcp outside/10.11.12.13(54726) -> inside/192.168.17.18(80) hit-cnt 1 300-second interval [0x32b3835, 0x0]" }
|
43
|
+
|
44
|
+
subject { grok_match(pattern106100, value) }
|
45
|
+
|
46
|
+
it { should include("policy_id" => "outside-entry") }
|
47
|
+
|
48
|
+
it "generates a message field" do
|
49
|
+
expect(subject["message"]).to include("access-list outside-entry permitted tcp outside/10.11.12.13(54726) -> inside/192.168.17.18(80) hit-cnt 1 300-second interval [0x32b3835, 0x0]")
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
end
|
@@ -0,0 +1,43 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "logstash/patterns/core"
|
4
|
+
|
5
|
+
describe "HAPROXY" do
|
6
|
+
|
7
|
+
let(:haproxyhttp_pattern) { "HAPROXYHTTP" }
|
8
|
+
|
9
|
+
context "Parsing HAPROXY log line from raw syslog line" do
|
10
|
+
|
11
|
+
let(:value) { 'Dec 9 13:01:26 localhost haproxy[28029]: 127.0.0.1:39759 [09/Dec/2013:12:59:46.633] loadbalancer default/instance8 0/51536/1/48082/99627 200 83285 - - ---- 87/87/87/1/0 0/67 {77.24.148.74} "GET /path/to/image HTTP/1.1"' }
|
12
|
+
subject { grok_match(haproxyhttp_pattern, value) }
|
13
|
+
|
14
|
+
it { should include("program" => "haproxy") }
|
15
|
+
it { should include("client_ip" => "127.0.0.1") }
|
16
|
+
it { should include("http_verb" => "GET") }
|
17
|
+
it { should include("server_name" => "instance8") }
|
18
|
+
|
19
|
+
it "generates a message field" do
|
20
|
+
expect(subject["message"]).to include("loadbalancer default/instance8")
|
21
|
+
end
|
22
|
+
|
23
|
+
end
|
24
|
+
|
25
|
+
let(:haproxyhttpbase_pattern) { "HAPROXYHTTPBASE" }
|
26
|
+
|
27
|
+
context "Parsing HAPROXY log line without syslog specific enteries. This mimics an event coming from a syslog input." do
|
28
|
+
|
29
|
+
let(:value) { '127.0.0.1:39759 [09/Dec/2013:12:59:46.633] loadbalancer default/instance8 0/51536/1/48082/99627 200 83285 - - ---- 87/87/87/1/0 0/67 {77.24.148.74} "GET /path/to/image HTTP/1.1"' }
|
30
|
+
subject { grok_match(haproxyhttpbase_pattern, value) }
|
31
|
+
|
32
|
+
# Assume 'program' would be matched by the syslog input.
|
33
|
+
it { should include("client_ip" => "127.0.0.1") }
|
34
|
+
it { should include("http_verb" => "GET") }
|
35
|
+
it { should include("server_name" => "instance8") }
|
36
|
+
|
37
|
+
it "generates a message field" do
|
38
|
+
expect(subject["message"]).to include("loadbalancer default/instance8")
|
39
|
+
end
|
40
|
+
|
41
|
+
end
|
42
|
+
|
43
|
+
end
|
@@ -0,0 +1,56 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "logstash/patterns/core"
|
4
|
+
|
5
|
+
describe "RAILS" do
|
6
|
+
let(:rails3_pattern) { "RAILS3" }
|
7
|
+
|
8
|
+
context "Parsing RAILS3 single-line log from raw log file" do
|
9
|
+
|
10
|
+
let(:value) { 'Started POST "/api/v3/internal/allowed" for 127.0.0.1 at 2015-08-05 11:37:01 +0200' }
|
11
|
+
|
12
|
+
subject { grok_match(rails3_pattern, value) }
|
13
|
+
|
14
|
+
# Started
|
15
|
+
it { should include("verb" => "POST" ) }
|
16
|
+
it { should include("request" => "/api/v3/internal/allowed" ) }
|
17
|
+
# for
|
18
|
+
it { should include("clientip" => "127.0.0.1" ) }
|
19
|
+
# at
|
20
|
+
it { should include("timestamp" => "2015-08-05 11:37:01 +0200" ) }
|
21
|
+
end
|
22
|
+
|
23
|
+
context "Parsing RAILS3 multi-line log from raw log file" do
|
24
|
+
|
25
|
+
let(:value) { 'Started GET "/puppet/postfix/notes?target_id=162&target_type=issue&last_fetched_at=1438695732" for 127.0.0.1 at 2015-08-05 07:40:22 +0200
|
26
|
+
Processing by Projects::NotesController#index as JSON
|
27
|
+
Parameters: {"target_id"=>"162", "target_type"=>"issue", "last_fetched_at"=>"1438695732", "namespace_id"=>"puppet", "project_id"=>"postfix"}
|
28
|
+
Completed 200 OK in 640ms (Views: 1.7ms | ActiveRecord: 91.0ms)' }
|
29
|
+
subject { grok_match(rails3_pattern, value) }
|
30
|
+
|
31
|
+
# started
|
32
|
+
it { should include("verb" => "GET" ) }
|
33
|
+
it { should include("request" => "/puppet/postfix/notes?target_id=162&target_type=issue&last_fetched_at=1438695732" ) }
|
34
|
+
# for
|
35
|
+
it { should include("clientip" => "127.0.0.1" ) }
|
36
|
+
# at
|
37
|
+
it { should include("timestamp" => "2015-08-05 07:40:22 +0200" ) }
|
38
|
+
# Processing by
|
39
|
+
it { should include("controller" => "Projects::NotesController" ) }
|
40
|
+
it { should include("action" => "index" ) }
|
41
|
+
# as
|
42
|
+
it { should include("format" => "JSON" ) }
|
43
|
+
# Parameters
|
44
|
+
it { should include("params" => '"target_id"=>"162", "target_type"=>"issue", "last_fetched_at"=>"1438695732", "namespace_id"=>"puppet", "project_id"=>"postfix"' ) }
|
45
|
+
# Completed
|
46
|
+
it { should include("response" => "200" ) }
|
47
|
+
# in
|
48
|
+
it { should include("totalms" => "640" ) }
|
49
|
+
# (Views:
|
50
|
+
it { should include("viewms" => "1.7" ) }
|
51
|
+
# | ActiveRecord:
|
52
|
+
it { should include("activerecordms" => "91.0" ) }
|
53
|
+
|
54
|
+
end
|
55
|
+
|
56
|
+
end
|
@@ -0,0 +1,132 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "logstash/patterns/core"
|
4
|
+
|
5
|
+
|
6
|
+
describe "ELB_ACCESS_LOG" do
|
7
|
+
|
8
|
+
let(:pattern) { "ELB_ACCESS_LOG" }
|
9
|
+
|
10
|
+
context "parsing an access log" do
|
11
|
+
|
12
|
+
let(:value) { "2014-02-15T23:39:43.945958Z my-test-loadbalancer 192.168.131.39:2817 10.0.0.1:80 0.000073 0.001048 0.000057 200 200 0 29 \"GET http://www.example.com:80/ HTTP/1.1\"" }
|
13
|
+
|
14
|
+
subject { grok_match(pattern, value) }
|
15
|
+
|
16
|
+
it { should include("timestamp" => "2014-02-15T23:39:43.945958Z" ) }
|
17
|
+
it { should include("elb" => "my-test-loadbalancer" ) }
|
18
|
+
it { should include("clientip" => "192.168.131.39" ) }
|
19
|
+
it { should include("clientport" => 2817 ) }
|
20
|
+
it { should include("backendip" => "10.0.0.1" ) }
|
21
|
+
it { should include("backendport" => 80 ) }
|
22
|
+
it { should include("request_processing_time" => 0.000073 ) }
|
23
|
+
it { should include("backend_processing_time" => 0.001048 ) }
|
24
|
+
it { should include("response_processing_time" => 0.000057 ) }
|
25
|
+
it { should include("response" => 200 ) }
|
26
|
+
it { should include("backend_response" => 200 ) }
|
27
|
+
it { should include("received_bytes" => 0 ) }
|
28
|
+
it { should include("bytes" => 29 ) }
|
29
|
+
it { should include("verb" => "GET" ) }
|
30
|
+
it { should include("request" => "http://www.example.com:80/" ) }
|
31
|
+
it { should include("proto" => "http" ) }
|
32
|
+
it { should include("httpversion" => "1.1" ) }
|
33
|
+
it { should include("urihost" => "www.example.com:80" ) }
|
34
|
+
it { should include("path" => "/" ) }
|
35
|
+
|
36
|
+
["tags", "params"].each do |attribute|
|
37
|
+
it "have #{attribute} as nil" do
|
38
|
+
expect(subject[attribute]).to be_nil
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
context "parsing a PUT request access log with missing backend info" do
|
44
|
+
|
45
|
+
let(:value) { '2015-04-10T08:11:09.865823Z us-west-1-production-media 49.150.87.133:55128 - -1 -1 -1 408 0 1294336 0 "PUT https://media.xxxyyyzzz.com:443/videos/F4_M-T4X0MM6Hvy1PFHesw HTTP/1.1"' }
|
46
|
+
|
47
|
+
subject { grok_match(pattern, value) }
|
48
|
+
|
49
|
+
it "a pattern pass the grok expression" do
|
50
|
+
expect(subject).to pass
|
51
|
+
end
|
52
|
+
|
53
|
+
["backendip", "backendport"].each do |attribute|
|
54
|
+
it "have #{attribute} as nil" do
|
55
|
+
expect(subject[attribute]).to be_nil
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
describe "S3_ACCESS_LOG" do
|
62
|
+
|
63
|
+
let(:pattern) { "S3_ACCESS_LOG" }
|
64
|
+
|
65
|
+
context "parsing GET.VERSIONING message" do
|
66
|
+
|
67
|
+
let(:value) { "79a5 mybucket [06/Feb/2014:00:00:38 +0000] 192.0.2.3 79a5 3E57427F3EXAMPLE REST.GET.VERSIONING - \"GET /mybucket?versioning HTTP/1.1\" 200 - 113 - 7 - \"-\" \"S3Console/0.4\" -" }
|
68
|
+
|
69
|
+
subject { grok_match(pattern, value) }
|
70
|
+
|
71
|
+
it { should include("owner" => "79a5" ) }
|
72
|
+
it { should include("bucket" => "mybucket" ) }
|
73
|
+
it { should include("timestamp" => "06/Feb/2014:00:00:38 +0000" ) }
|
74
|
+
it { should include("clientip" => "192.0.2.3" ) }
|
75
|
+
it { should include("requester" => "79a5" ) }
|
76
|
+
it { should include("request_id" => "3E57427F3EXAMPLE" ) }
|
77
|
+
it { should include("operation" => "REST.GET.VERSIONING" ) }
|
78
|
+
it { should include("key" => "-" ) }
|
79
|
+
|
80
|
+
it { should include("verb" => "GET" ) }
|
81
|
+
it { should include("request" => "/mybucket?versioning" ) }
|
82
|
+
it { should include("httpversion" => "1.1" ) }
|
83
|
+
it { should include("response" => 200 ) }
|
84
|
+
it { should include("bytes" => 113 ) }
|
85
|
+
|
86
|
+
it { should include("request_time_ms" => 7 ) }
|
87
|
+
it { should include("referrer" => "\"-\"" ) }
|
88
|
+
it { should include("agent" => "\"S3Console/0.4\"" ) }
|
89
|
+
|
90
|
+
|
91
|
+
["tags", "error_code", "object_size", "turnaround_time_ms", "version_id"].each do |attribute|
|
92
|
+
it "have #{attribute} as nil" do
|
93
|
+
expect(subject[attribute]).to be_nil
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
end
|
98
|
+
|
99
|
+
context "parsing a GET.OBJECT message" do
|
100
|
+
|
101
|
+
let(:value) { "79a5 mybucket [12/May/2014:07:54:01 +0000] 10.0.1.2 - 7ACC4BE89EXAMPLE REST.GET.OBJECT foo/bar.html \"GET /foo/bar.html HTTP/1.1\" 304 - - 1718 10 - \"-\" \"Mozilla/5.0\" -" }
|
102
|
+
|
103
|
+
subject { grok_match(pattern, value) }
|
104
|
+
|
105
|
+
it { should include("owner" => "79a5" ) }
|
106
|
+
it { should include("bucket" => "mybucket" ) }
|
107
|
+
it { should include("timestamp" => "12/May/2014:07:54:01 +0000" ) }
|
108
|
+
it { should include("clientip" => "10.0.1.2" ) }
|
109
|
+
it { should include("requester" => "-" ) }
|
110
|
+
it { should include("request_id" => "7ACC4BE89EXAMPLE" ) }
|
111
|
+
it { should include("operation" => "REST.GET.OBJECT" ) }
|
112
|
+
it { should include("key" => "foo/bar.html" ) }
|
113
|
+
|
114
|
+
it { should include("verb" => "GET" ) }
|
115
|
+
it { should include("request" => "/foo/bar.html" ) }
|
116
|
+
it { should include("httpversion" => "1.1" ) }
|
117
|
+
it { should include("response" => 304 ) }
|
118
|
+
it { should include("object_size" => 1718 ) }
|
119
|
+
|
120
|
+
it { should include("request_time_ms" => 10 ) }
|
121
|
+
it { should include("referrer" => "\"-\"" ) }
|
122
|
+
it { should include("agent" => "\"Mozilla/5.0\"" ) }
|
123
|
+
|
124
|
+
|
125
|
+
["tags", "error_code", "turnaround_time_ms", "version_id", "bytes"].each do |attribute|
|
126
|
+
it "have #{attribute} as nil" do
|
127
|
+
expect(subject[attribute]).to be_nil
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
end
|
132
|
+
end
|
@@ -0,0 +1,90 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "logstash/patterns/core"
|
4
|
+
|
5
|
+
describe "SHOREWALL" do
|
6
|
+
|
7
|
+
let(:pattern) { "SHOREWALL" }
|
8
|
+
|
9
|
+
context "parsing a message with OUT interface" do
|
10
|
+
|
11
|
+
let(:value) { "May 28 17:23:25 myHost kernel: [3124658.791874] Shorewall:FORWARD:REJECT:IN=eth2 OUT=eth2 SRC=1.2.3.4 DST=1.2.3.4 LEN=141 TOS=0x00 PREC=0x00 TTL=63 ID=55251 PROTO=UDP SPT=5353 DPT=5353 LEN=121" }
|
12
|
+
|
13
|
+
subject { grok_match(pattern, value) }
|
14
|
+
|
15
|
+
it { should include("timestamp" => "May 28 17:23:25") }
|
16
|
+
|
17
|
+
it { should include("nf_host" => "myHost") }
|
18
|
+
|
19
|
+
it { should include("nf_action1" => "FORWARD") }
|
20
|
+
|
21
|
+
it { should include("nf_action2" => "REJECT") }
|
22
|
+
|
23
|
+
it { should include("nf_in_interface" => "eth2") }
|
24
|
+
|
25
|
+
it { should include("nf_out_interface" => "eth2") }
|
26
|
+
|
27
|
+
it { should include("nf_src_ip" => "1.2.3.4") }
|
28
|
+
|
29
|
+
it { should include("nf_dst_ip" => "1.2.3.4") }
|
30
|
+
|
31
|
+
it { should include("nf_len" => "141") }
|
32
|
+
|
33
|
+
it { should include("nf_tos" => "0x00") }
|
34
|
+
|
35
|
+
it { should include("nf_prec" => "0x00") }
|
36
|
+
|
37
|
+
it { should include("nf_ttl" => "63") }
|
38
|
+
|
39
|
+
it { should include("nf_id" => "55251") }
|
40
|
+
|
41
|
+
it { should include("nf_protocol" => "UDP") }
|
42
|
+
|
43
|
+
it { should include("nf_src_port" => "5353") }
|
44
|
+
|
45
|
+
it { should include("nf_dst_port" => "5353") }
|
46
|
+
end
|
47
|
+
|
48
|
+
context "parsing a message without OUT interface" do
|
49
|
+
|
50
|
+
let(:value) { "May 28 17:31:07 myHost kernel: [3125121.106700] Shorewall:net2fw:DROP:IN=eth1 OUT= MAC=00:02:b3:c7:2f:77:38:72:c0:6e:92:9c:08:00 SRC=1.2.3.4 DST=1.2.3.4 LEN=60 TOS=0x00 PREC=0x00 TTL=49 ID=6480 DF PROTO=TCP SPT=59088 DPT=8080 WINDOW=2920 RES=0x00 SYN URGP=0" }
|
51
|
+
|
52
|
+
subject { grok_match(pattern, value) }
|
53
|
+
|
54
|
+
it { should include("timestamp" => "May 28 17:31:07") }
|
55
|
+
|
56
|
+
it { should include("nf_host" => "myHost") }
|
57
|
+
|
58
|
+
it { should include("nf_action1" => "net2fw") }
|
59
|
+
|
60
|
+
it { should include("nf_action2" => "DROP") }
|
61
|
+
|
62
|
+
it { should include("nf_in_interface" => "eth1") }
|
63
|
+
|
64
|
+
it { expect(subject["nf_out_interface"]).to be_nil }
|
65
|
+
|
66
|
+
it { should include("nf_dst_mac" => "00:02:b3:c7:2f:77") }
|
67
|
+
|
68
|
+
it { should include("nf_src_mac" => "38:72:c0:6e:92:9c") }
|
69
|
+
|
70
|
+
it { should include("nf_src_ip" => "1.2.3.4") }
|
71
|
+
|
72
|
+
it { should include("nf_dst_ip" => "1.2.3.4") }
|
73
|
+
|
74
|
+
it { should include("nf_len" => "60") }
|
75
|
+
|
76
|
+
it { should include("nf_tos" => "0x00") }
|
77
|
+
|
78
|
+
it { should include("nf_prec" => "0x00") }
|
79
|
+
|
80
|
+
it { should include("nf_ttl" => "49") }
|
81
|
+
|
82
|
+
it { should include("nf_id" => "6480") }
|
83
|
+
|
84
|
+
it { should include("nf_protocol" => "TCP") }
|
85
|
+
|
86
|
+
it { should include("nf_src_port" => "59088") }
|
87
|
+
|
88
|
+
it { should include("nf_dst_port" => "8080") }
|
89
|
+
end
|
90
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "logstash/patterns/core"
|
4
|
+
|
5
|
+
describe "SYSLOGLINE" do
|
6
|
+
|
7
|
+
it "matches a simple message with pid" do
|
8
|
+
expect(subject).to match("May 11 15:17:02 meow.soy.se CRON[10973]: pam_unix(cron:session): session opened for user root by (uid=0)")
|
9
|
+
end
|
10
|
+
|
11
|
+
it "matches prog with slash" do
|
12
|
+
expect(subject).to match("Mar 16 00:01:25 evita postfix/smtpd[1713]: connect from camomile.cloud9.net[168.100.1.3]")
|
13
|
+
end
|
14
|
+
|
15
|
+
it "matches prog from ansible" do
|
16
|
+
expect(subject).to match("May 11 15:40:51 meow.soy.se ansible-<stdin>: Invoked with filter=* fact_path=/etc/ansible/facts.d")
|
17
|
+
end
|
18
|
+
|
19
|
+
it "matches prog from RFC5424 APP-NAME" do
|
20
|
+
# https://tools.ietf.org/html/rfc5424#section-6.2.5
|
21
|
+
# https://tools.ietf.org/html/rfc5424#section-6
|
22
|
+
tag_from_rfc = ((33..126).map { |c| c.chr } - %w{[ ]}).join
|
23
|
+
expect(subject).to match("May 11 15:40:51 meow.soy.se #{tag_from_rfc}: Just some data which conforms to RFC5424")
|
24
|
+
end
|
25
|
+
|
26
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-patterns-core
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.3.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2015-
|
11
|
+
date: 2015-08-19 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: logstash-core
|
@@ -58,6 +58,20 @@ dependencies:
|
|
58
58
|
version: '0'
|
59
59
|
prerelease: false
|
60
60
|
type: :development
|
61
|
+
- !ruby/object:Gem::Dependency
|
62
|
+
name: logstash-filter-grok
|
63
|
+
version_requirements: !ruby/object:Gem::Requirement
|
64
|
+
requirements:
|
65
|
+
- - '>='
|
66
|
+
- !ruby/object:Gem::Version
|
67
|
+
version: '0'
|
68
|
+
requirement: !ruby/object:Gem::Requirement
|
69
|
+
requirements:
|
70
|
+
- - '>='
|
71
|
+
- !ruby/object:Gem::Version
|
72
|
+
version: '0'
|
73
|
+
prerelease: false
|
74
|
+
type: :development
|
61
75
|
description: This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program
|
62
76
|
email: info@elastic.co
|
63
77
|
executables: []
|
@@ -65,13 +79,17 @@ extensions: []
|
|
65
79
|
extra_rdoc_files: []
|
66
80
|
files:
|
67
81
|
- .gitignore
|
82
|
+
- CHANGELOG.md
|
68
83
|
- CONTRIBUTORS
|
69
84
|
- Gemfile
|
70
85
|
- LICENSE
|
86
|
+
- NOTICE.TXT
|
71
87
|
- README.md
|
72
88
|
- Rakefile
|
73
89
|
- lib/logstash/patterns/core.rb
|
74
90
|
- logstash-patterns-core.gemspec
|
91
|
+
- patterns/aws
|
92
|
+
- patterns/bro
|
75
93
|
- patterns/firewalls
|
76
94
|
- patterns/grok-patterns
|
77
95
|
- patterns/haproxy
|
@@ -83,11 +101,19 @@ files:
|
|
83
101
|
- patterns/mongodb
|
84
102
|
- patterns/nagios
|
85
103
|
- patterns/postgresql
|
104
|
+
- patterns/rails
|
86
105
|
- patterns/redis
|
87
106
|
- patterns/ruby
|
107
|
+
- spec/patterns/bro.rb
|
88
108
|
- spec/patterns/core_spec.rb
|
109
|
+
- spec/patterns/firewalls_spec.rb
|
110
|
+
- spec/patterns/haproxy_spec.rb
|
89
111
|
- spec/patterns/mongodb_spec.rb
|
90
112
|
- spec/patterns/nagios_spec.rb
|
113
|
+
- spec/patterns/rails3_spec.rb
|
114
|
+
- spec/patterns/s3_spec.rb
|
115
|
+
- spec/patterns/shorewall_spec.rb
|
116
|
+
- spec/patterns/syslog_spec.rb
|
91
117
|
- spec/spec_helper.rb
|
92
118
|
homepage: http://www.elastic.co/guide/en/logstash/current/index.html
|
93
119
|
licenses:
|
@@ -115,7 +141,14 @@ signing_key:
|
|
115
141
|
specification_version: 4
|
116
142
|
summary: Patterns to be used in logstash
|
117
143
|
test_files:
|
144
|
+
- spec/patterns/bro.rb
|
118
145
|
- spec/patterns/core_spec.rb
|
146
|
+
- spec/patterns/firewalls_spec.rb
|
147
|
+
- spec/patterns/haproxy_spec.rb
|
119
148
|
- spec/patterns/mongodb_spec.rb
|
120
149
|
- spec/patterns/nagios_spec.rb
|
150
|
+
- spec/patterns/rails3_spec.rb
|
151
|
+
- spec/patterns/s3_spec.rb
|
152
|
+
- spec/patterns/shorewall_spec.rb
|
153
|
+
- spec/patterns/syslog_spec.rb
|
121
154
|
- spec/spec_helper.rb
|