logstash-filter-categoriser 1.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/CHANGELOG.md +7 -0
- data/CONTRIBUTORS +11 -0
- data/DEVELOPER.md +2 -0
- data/Gemfile +11 -0
- data/LICENSE +13 -0
- data/NOTICE.TXT +5 -0
- data/README.md +98 -0
- data/lib/logstash/filters/categoriser.rb +80 -0
- data/lib/logstash/filters/categoriser/config.rb +153 -0
- data/lib/logstash/filters/categoriser/rules.rb +185 -0
- data/lib/logstash/filters/example.rb +48 -0
- data/logstash-filter-categoriser.gemspec +24 -0
- data/spec/filters/categoriser_rules_spec.rb +80 -0
- data/spec/filters/categoriser_spec.rb +80 -0
- data/spec/filters/example_spec.rb +20 -0
- data/spec/fixtures/invalid_json.rules.json +3 -0
- data/spec/fixtures/test.rules.json +15 -0
- data/spec/spec_helper.rb +2 -0
- metadata +114 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: b8ed1e2988fc25a200ab05ab26d11c2f27ce20a4afe757341383e36a3d33c2a2
|
4
|
+
data.tar.gz: 1e7e44e58d9f586024ca96fc2a69c08b1195563e15bf6e37695d79e89a1a6928
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: e38601bcbd3abb4be52ab6a2e4dd7ea3aea88a8fa56136e1f3d41115cbe5f5e8032f696ebf55ac8337e27ce571ff0e637b5ba641e37787f25993286d7ce9db16
|
7
|
+
data.tar.gz: 6e61ecac55f46662375303d5650175864a19bcbd477ec573758d5069b85480da3b8ce01392f69c8cf1e3a5b1079e6842fc8ffa8d7c43ef25f4c1574bf00b8653
|
data/CHANGELOG.md
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
## 3.0.2
|
2
|
+
- Docs: Add documentation template
|
3
|
+
## 2.0.0
|
4
|
+
- Plugins were updated to follow the new shutdown semantic, this mainly allows Logstash to instruct input plugins to terminate gracefully,
|
5
|
+
instead of using Thread.raise on the plugins' threads. Ref: https://github.com/elastic/logstash/pull/3895
|
6
|
+
- Dependency on logstash-core update to 2.0
|
7
|
+
|
data/CONTRIBUTORS
ADDED
@@ -0,0 +1,11 @@
|
|
1
|
+
The following is a list of people who have contributed ideas, code, bug
|
2
|
+
reports, or in general have helped logstash along its way.
|
3
|
+
|
4
|
+
Contributors:
|
5
|
+
* Aaron Mildenstein (untergeek)
|
6
|
+
* Pier-Hugues Pellerin (ph)
|
7
|
+
|
8
|
+
Note: If you've sent us patches, bug reports, or otherwise contributed to
|
9
|
+
Logstash, and you aren't on the list above and want to be, please let us know
|
10
|
+
and we'll make sure you're here. Contributions from folks like you are what make
|
11
|
+
open source awesome.
|
data/DEVELOPER.md
ADDED
data/Gemfile
ADDED
@@ -0,0 +1,11 @@
|
|
1
|
+
source 'https://rubygems.org'
|
2
|
+
|
3
|
+
gemspec
|
4
|
+
|
5
|
+
logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash"
|
6
|
+
use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
|
7
|
+
|
8
|
+
if Dir.exist?(logstash_path) && use_logstash_source
|
9
|
+
gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
|
10
|
+
gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
|
11
|
+
end
|
data/LICENSE
ADDED
@@ -0,0 +1,13 @@
|
|
1
|
+
Copyright (c) 2012-2018 Elasticsearch <http://www.elastic.co>
|
2
|
+
|
3
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
you may not use this file except in compliance with the License.
|
5
|
+
You may obtain a copy of the License at
|
6
|
+
|
7
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
|
9
|
+
Unless required by applicable law or agreed to in writing, software
|
10
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
See the License for the specific language governing permissions and
|
13
|
+
limitations under the License.
|
data/NOTICE.TXT
ADDED
data/README.md
ADDED
@@ -0,0 +1,98 @@
|
|
1
|
+
# Logstash Plugin
|
2
|
+
|
3
|
+
[![Travis Build Status](https://travis-ci.org/logstash-plugins/logstash-filter-example.svg)](https://travis-ci.org/logstash-plugins/logstash-filter-example)
|
4
|
+
|
5
|
+
This is a plugin for [Logstash](https://github.com/elastic/logstash).
|
6
|
+
|
7
|
+
It is fully free and fully open source. The license is Apache 2.0, meaning you are pretty much free to use it however you want in whatever way.
|
8
|
+
|
9
|
+
## Documentation
|
10
|
+
|
11
|
+
Logstash provides infrastructure to automatically build documentation for this plugin. We provide a template file, index.asciidoc, where you can add documentation. The contents of this file will be converted into html and then placed with other plugin documentation in a [central location](http://www.elastic.co/guide/en/logstash/current/).
|
12
|
+
|
13
|
+
- For formatting config examples, you can use the asciidoc `[source,json]` directive
|
14
|
+
- For more asciidoc formatting tips, see the excellent reference here https://github.com/elastic/docs#asciidoc-guide
|
15
|
+
|
16
|
+
## Need Help?
|
17
|
+
|
18
|
+
Need help? Try #logstash on freenode IRC or the https://discuss.elastic.co/c/logstash discussion forum.
|
19
|
+
|
20
|
+
## Developing
|
21
|
+
|
22
|
+
### 1. Plugin Developement and Testing
|
23
|
+
|
24
|
+
#### Code
|
25
|
+
- To get started, you'll need JRuby with the Bundler gem installed.
|
26
|
+
|
27
|
+
- Create a new plugin or clone and existing from the GitHub [logstash-plugins](https://github.com/logstash-plugins) organization. We also provide [example plugins](https://github.com/logstash-plugins?query=example).
|
28
|
+
|
29
|
+
- Install dependencies
|
30
|
+
```sh
|
31
|
+
bundle install
|
32
|
+
```
|
33
|
+
|
34
|
+
#### Test
|
35
|
+
|
36
|
+
- Update your dependencies
|
37
|
+
|
38
|
+
```sh
|
39
|
+
bundle install
|
40
|
+
```
|
41
|
+
|
42
|
+
- Run tests
|
43
|
+
|
44
|
+
```sh
|
45
|
+
bundle exec rspec
|
46
|
+
```
|
47
|
+
|
48
|
+
### 2. Running your unpublished Plugin in Logstash
|
49
|
+
|
50
|
+
#### 2.1 Run in a local Logstash clone
|
51
|
+
|
52
|
+
- Edit Logstash `Gemfile` and add the local plugin path, for example:
|
53
|
+
```ruby
|
54
|
+
gem "logstash-filter-awesome", :path => "/your/local/logstash-filter-awesome"
|
55
|
+
```
|
56
|
+
- Install plugin
|
57
|
+
```sh
|
58
|
+
# Logstash 2.3 and higher
|
59
|
+
bin/logstash-plugin install --no-verify
|
60
|
+
|
61
|
+
# Prior to Logstash 2.3
|
62
|
+
bin/plugin install --no-verify
|
63
|
+
|
64
|
+
```
|
65
|
+
- Run Logstash with your plugin
|
66
|
+
```sh
|
67
|
+
bin/logstash -e 'filter {awesome {}}'
|
68
|
+
```
|
69
|
+
At this point any modifications to the plugin code will be applied to this local Logstash setup. After modifying the plugin, simply rerun Logstash.
|
70
|
+
|
71
|
+
#### 2.2 Run in an installed Logstash
|
72
|
+
|
73
|
+
You can use the same **2.1** method to run your plugin in an installed Logstash by editing its `Gemfile` and pointing the `:path` to your local plugin development directory or you can build the gem and install it using:
|
74
|
+
|
75
|
+
- Build your plugin gem
|
76
|
+
```sh
|
77
|
+
gem build logstash-filter-awesome.gemspec
|
78
|
+
```
|
79
|
+
- Install the plugin from the Logstash home
|
80
|
+
```sh
|
81
|
+
# Logstash 2.3 and higher
|
82
|
+
bin/logstash-plugin install --no-verify
|
83
|
+
|
84
|
+
# Prior to Logstash 2.3
|
85
|
+
bin/plugin install --no-verify
|
86
|
+
|
87
|
+
```
|
88
|
+
- Start Logstash and proceed to test the plugin
|
89
|
+
|
90
|
+
## Contributing
|
91
|
+
|
92
|
+
All contributions are welcome: ideas, patches, documentation, bug reports, complaints, and even something you drew up on a napkin.
|
93
|
+
|
94
|
+
Programming is not a required skill. Whatever you've seen about open source and maintainers or community members saying "send patches or die" - you will not see that here.
|
95
|
+
|
96
|
+
It is more important to the community that you are able to contribute.
|
97
|
+
|
98
|
+
For more information about contributing, see the [CONTRIBUTING](https://github.com/elastic/logstash/blob/master/CONTRIBUTING.md) file.
|
@@ -0,0 +1,80 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
# frozen_string_literal: true
|
3
|
+
require "logstash/filters/base"
|
4
|
+
require "logstash/namespace"
|
5
|
+
|
6
|
+
#
|
7
|
+
# A way to categorise devices based on existing fields.
|
8
|
+
#
|
9
|
+
# In my scenario I have multiple types of devices all
|
10
|
+
# sending syslog logs. I use this to quickly separate
|
11
|
+
# them in order to run a pipeline for each device type.
|
12
|
+
#
|
13
|
+
# Example config:
|
14
|
+
#
|
15
|
+
# filter {
|
16
|
+
# categoriser {
|
17
|
+
# rules_file => "/etc/logstash/device_type.rules.json"
|
18
|
+
# target => "device_type"
|
19
|
+
# default_category => "unknown"
|
20
|
+
# }
|
21
|
+
# }
|
22
|
+
#
|
23
|
+
# .. with an example rules file:
|
24
|
+
#
|
25
|
+
# {
|
26
|
+
# "cisco_asa_firewall": ["hostname", "contains", "-asa-"],
|
27
|
+
# "cisco_pix_fwsm_firewall": [
|
28
|
+
# "or", [
|
29
|
+
# ["hostname", "contains", "-pix-"],
|
30
|
+
# ["hostname", "contains", "-fwsm-"]]],
|
31
|
+
# "web_servers": ["hostname", "starts_with", "web"]
|
32
|
+
# }
|
33
|
+
#
|
34
|
+
# This would replace the contents of the "device_type" field
|
35
|
+
# with the category in the rules file, ie "cisco_asa_firewall".
|
36
|
+
# If we don't match any rules then "device_type" will be set
|
37
|
+
# to "unknown".
|
38
|
+
#
|
39
|
+
class LogStash::Filters::Categoriser < LogStash::Filters::Base
|
40
|
+
|
41
|
+
config_name "categoriser"
|
42
|
+
|
43
|
+
require 'logstash/filters/categoriser/rules'
|
44
|
+
|
45
|
+
# The rules filename, ie:
|
46
|
+
# filter {
|
47
|
+
# categoriser {
|
48
|
+
# rules_file => "/etc/logstash/device_type.rules.json"
|
49
|
+
# target => "device_type"
|
50
|
+
# }
|
51
|
+
# }
|
52
|
+
config :rules_file, :validate => :string
|
53
|
+
config :target, :validate => :string, :default => "category"
|
54
|
+
config :default_category, :validate => :string, :default => "unknown"
|
55
|
+
|
56
|
+
public def register
|
57
|
+
# Add instance variables
|
58
|
+
filter_config = LogStash::Filters::Categoriser::Rules.new(@logger)
|
59
|
+
@rules = filter_config.read_config(@rules_file)
|
60
|
+
end
|
61
|
+
|
62
|
+
public def filter(event)
|
63
|
+
device_type = find_device_type(event)
|
64
|
+
@logger.debug? && @logger.debug("Device type: #{device_type}")
|
65
|
+
event.set(@target, device_type)
|
66
|
+
|
67
|
+
# filter_matched should go in the last line of our successful code
|
68
|
+
filter_matched(event)
|
69
|
+
end
|
70
|
+
|
71
|
+
private def find_device_type(event)
|
72
|
+
matched_type = @rules.find do |type, matcher|
|
73
|
+
matcher.call(event)
|
74
|
+
end || [@default_category, nil]
|
75
|
+
|
76
|
+
matched_type.first
|
77
|
+
end
|
78
|
+
|
79
|
+
end
|
80
|
+
|
@@ -0,0 +1,153 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require 'json'
|
5
|
+
|
6
|
+
class LogStash::Filters::Categoriser::RulesError < StandardError; end
|
7
|
+
|
8
|
+
class LogStash::Filters::Categoriser::Rules
|
9
|
+
|
10
|
+
def initialize(logger)
|
11
|
+
@logger = logger
|
12
|
+
end
|
13
|
+
|
14
|
+
def default_config
|
15
|
+
default =<<-JSONCONFIG.gsub(/^ {6}/,'')
|
16
|
+
{
|
17
|
+
"cisco_asa_firewall": ["hostname", "contains", "-asa-"],
|
18
|
+
"cisco_pix_firewall": [
|
19
|
+
"or", [
|
20
|
+
["hostname", "contains", "-pix-"],
|
21
|
+
["hostname", "contains", "-fwsm-"]]],
|
22
|
+
"f5_bigip": ["hostname", "contains", "-bigip-"],
|
23
|
+
"checkpoint_ipso_firewall": ["hostname", "contains", "-nok-"],
|
24
|
+
"checkpoint_gaia_firewall": ["hostname", "contains", "-cpg-"],
|
25
|
+
"bluecoat_proxysg": ["hostname", "contains", "-bcsg-"],
|
26
|
+
"bluecoat_proxyav": ["hostname", "contains", "-bcav-"]
|
27
|
+
}
|
28
|
+
JSONCONFIG
|
29
|
+
JSON.load(default)
|
30
|
+
end
|
31
|
+
|
32
|
+
def read_config(filename)
|
33
|
+
conf = JSON.load(File.read(filename))
|
34
|
+
load_config(conf)
|
35
|
+
end
|
36
|
+
|
37
|
+
# Returns a hash:
|
38
|
+
# {"cisco_asa_firewall" => Proc}
|
39
|
+
#
|
40
|
+
# So
|
41
|
+
def load_config(config)
|
42
|
+
config.keys.reduce({}) do |acc, key|
|
43
|
+
acc.merge({key => parse_checks(config[key])})
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
# +checks+::
|
48
|
+
# ["and", [
|
49
|
+
# ["hostname", "contains", "-pix-"],
|
50
|
+
# ["hostname", "contains", "-fwsm-"]
|
51
|
+
# ]]
|
52
|
+
# Returns a proc that you can pass 'event' into
|
53
|
+
# The proc will return true or false.
|
54
|
+
def parse_checks(checks)
|
55
|
+
if !checks.is_a?(Array)
|
56
|
+
raise ConfigError, "checks should be an array"
|
57
|
+
end
|
58
|
+
|
59
|
+
case checks.first
|
60
|
+
when "and"
|
61
|
+
if checks.last.is_a?(Array)
|
62
|
+
Proc.new do |event|
|
63
|
+
child_procs = checks.last.map {|check| parse_checks(check)}
|
64
|
+
child_procs.all? {|child| child.call(event)}
|
65
|
+
end
|
66
|
+
else
|
67
|
+
@logger.warn('Invalid config: "and" should be followed by an array, ie: ["and", [["hostname", "contains", "a"], ["hostname ", "contains", "b"]]]')
|
68
|
+
end
|
69
|
+
|
70
|
+
when "or"
|
71
|
+
if checks.last.is_a?(Array)
|
72
|
+
Proc.new do |event|
|
73
|
+
child_procs = checks.last.map {|check| parse_checks(check)}
|
74
|
+
child_procs.any? {|child| child.call(event)}
|
75
|
+
end
|
76
|
+
else
|
77
|
+
@logger.warn('Invalid config: "or" should be followed by an array, ie: ["or", [["hostname", "contains", "a"], ["hostname", "contains", "b"]]]')
|
78
|
+
end
|
79
|
+
|
80
|
+
when "not"
|
81
|
+
if checks.last.is_a?(Array)
|
82
|
+
Proc.new do |event|
|
83
|
+
not parse_checks(checks.last).call(event)
|
84
|
+
end
|
85
|
+
end
|
86
|
+
else
|
87
|
+
Proc.new { |event| load_check(checks).call(event) }
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
|
92
|
+
# Loads a check, ie
|
93
|
+
# ["hostname", "contains", "-asa-"]
|
94
|
+
#
|
95
|
+
# .. where the format is:
|
96
|
+
#
|
97
|
+
# [<field name>, <contains | regex>, <argument>]
|
98
|
+
#
|
99
|
+
# Returns a proc that can be used later to determine
|
100
|
+
# if the check is true or false, ie:
|
101
|
+
#
|
102
|
+
# asa_check = load_check(["hostname", "contains", "-asa-"])
|
103
|
+
# asa_check.(event)
|
104
|
+
#
|
105
|
+
def load_check(check)
|
106
|
+
if check.length == 3
|
107
|
+
if check.all? {|x| x.is_a?(String) }
|
108
|
+
(check_field, check_command, check_argument) = check
|
109
|
+
|
110
|
+
case check_command
|
111
|
+
when "contains"
|
112
|
+
Proc.new do |event|
|
113
|
+
field_value = event.get(check_field)
|
114
|
+
generic_check_ok?(check, check_field, field_value) && field_value.include?(check_argument)
|
115
|
+
end
|
116
|
+
when "regex"
|
117
|
+
regex = Regexp.new(check_argument, Regexp::IGNORECASE)
|
118
|
+
Proc.new do |event|
|
119
|
+
field_value = event.get(check_field)
|
120
|
+
generic_check_ok?(check, check_field, field_value) && regex.match?(field_value)
|
121
|
+
end
|
122
|
+
else
|
123
|
+
@logger.warn("Invalid config: #{check}, the second argument must be one of: \"contains\", \"regex\"")
|
124
|
+
# TODO - see if it's ok to raise an error here?
|
125
|
+
raise ConfigError, "Invalid config: #{check}, the second argument must be one of: \"contains\", \"regex\""
|
126
|
+
end
|
127
|
+
|
128
|
+
else
|
129
|
+
@logger.warn("Invalid config: #{check}, all elements must be a string, ie: " + '["hostname", "contains", "-asa-"]')
|
130
|
+
Proc.new{|event| false}
|
131
|
+
end
|
132
|
+
else
|
133
|
+
@logger.warn("Invalid config: #{check}, expected three elements, ie: " + '["hostname", "contains", "-asa-"]')
|
134
|
+
Proc.new{|event| false}
|
135
|
+
end
|
136
|
+
end
|
137
|
+
|
138
|
+
private def generic_check_ok?(check, check_field, field_value)
|
139
|
+
if field_value.nil?
|
140
|
+
@logger.warn("vf_device_type - Could not read missing field, check #{check} returns false.", :field => check_field)
|
141
|
+
false
|
142
|
+
else
|
143
|
+
if !field_value.is_a?(String)
|
144
|
+
@logger.warn("vf_device_type - Field must be a string, check #{check} returns false (field was a(n) #{field_value.class}).", :field => check_field)
|
145
|
+
false
|
146
|
+
else
|
147
|
+
true
|
148
|
+
end
|
149
|
+
end
|
150
|
+
end
|
151
|
+
|
152
|
+
end
|
153
|
+
|
@@ -0,0 +1,185 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require 'json'
|
5
|
+
|
6
|
+
class LogStash::Filters::Categoriser::RulesError < StandardError; end
|
7
|
+
|
8
|
+
# This class loads the rules file.
|
9
|
+
#
|
10
|
+
#
|
11
|
+
class LogStash::Filters::Categoriser::Rules
|
12
|
+
|
13
|
+
def initialize(logger)
|
14
|
+
@logger = logger
|
15
|
+
end
|
16
|
+
|
17
|
+
# Returns a hash of Procs that can be used to determine
|
18
|
+
# the device's category.
|
19
|
+
#
|
20
|
+
# For example, given a rules file something like this:
|
21
|
+
#
|
22
|
+
# {
|
23
|
+
# "one": ["hostname", "contains", "one"],
|
24
|
+
# "two": ["or", [
|
25
|
+
# ["hostname", "eq", "a"],
|
26
|
+
# ["message", "eq", "b"]
|
27
|
+
# ]]
|
28
|
+
# }
|
29
|
+
#
|
30
|
+
# This method would return a hash:
|
31
|
+
#
|
32
|
+
# {"one" => Proc, "two" => Proc}
|
33
|
+
#
|
34
|
+
# We can then pass "event" into one of the Procs
|
35
|
+
# and they'll return true or false, for example:
|
36
|
+
#
|
37
|
+
# matched_type = @rules.find do |key, matcher|
|
38
|
+
# matcher.call(event)
|
39
|
+
# end
|
40
|
+
#
|
41
|
+
# @rules is the "key" => Proc hash above. "matcher"
|
42
|
+
# is the Proc.
|
43
|
+
#
|
44
|
+
#
|
45
|
+
def read_config(filename)
|
46
|
+
conf = if filename.respond_to?(:read)
|
47
|
+
JSON.load(filename)
|
48
|
+
else
|
49
|
+
JSON.load(File.read(filename))
|
50
|
+
end
|
51
|
+
|
52
|
+
load_config(conf)
|
53
|
+
end
|
54
|
+
|
55
|
+
# Returns a hash, ie:
|
56
|
+
# {"cisco_asa_firewall" => Proc}
|
57
|
+
#
|
58
|
+
def load_config(config)
|
59
|
+
config.keys.reduce({}) do |acc, key|
|
60
|
+
acc.merge({key => parse_checks(config[key])})
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
# +checks+::
|
65
|
+
# Example:
|
66
|
+
#
|
67
|
+
# ["and", [
|
68
|
+
# ["hostname", "contains", "-pix-"],
|
69
|
+
# ["hostname", "contains", "-fwsm-"]
|
70
|
+
# ]]
|
71
|
+
#
|
72
|
+
# Returns a proc that you can pass 'event' into
|
73
|
+
# The proc will return true or false.
|
74
|
+
private def parse_checks(checks)
|
75
|
+
if !checks.is_a?(Array)
|
76
|
+
raise LogStash::Filters::Categoriser::RulesError, "Invalid config: #{checks.dump} Checks should be an array, " + 'ie: ["hostname", "starts_with", "web"]'
|
77
|
+
end
|
78
|
+
|
79
|
+
case checks.first
|
80
|
+
when "and"
|
81
|
+
if checks.last.is_a?(Array)
|
82
|
+
Proc.new do |event|
|
83
|
+
child_procs = checks.last.map {|check| parse_checks(check)}
|
84
|
+
child_procs.all? {|child| child.call(event)}
|
85
|
+
end
|
86
|
+
else
|
87
|
+
@logger.warn('Invalid config: "and" should be followed by an array, ie: ["and", [["hostname", "contains", "a"], ["hostname ", "contains", "b"]]]')
|
88
|
+
end
|
89
|
+
|
90
|
+
when "or"
|
91
|
+
if checks.last.is_a?(Array)
|
92
|
+
Proc.new do |event|
|
93
|
+
child_procs = checks.last.map {|check| parse_checks(check)}
|
94
|
+
child_procs.any? {|child| child.call(event)}
|
95
|
+
end
|
96
|
+
else
|
97
|
+
@logger.warn('Invalid config: "or" should be followed by an array, ie: ["or", [["hostname", "contains", "a"], ["hostname", "contains", "b"]]]')
|
98
|
+
end
|
99
|
+
|
100
|
+
when "not"
|
101
|
+
if checks.last.is_a?(Array)
|
102
|
+
Proc.new do |event|
|
103
|
+
not parse_checks(checks.last).call(event)
|
104
|
+
end
|
105
|
+
end
|
106
|
+
else
|
107
|
+
Proc.new { |event| load_check(checks).call(event) }
|
108
|
+
end
|
109
|
+
end
|
110
|
+
|
111
|
+
|
112
|
+
# Loads a check, ie
|
113
|
+
# ["hostname", "contains", "-asa-"]
|
114
|
+
#
|
115
|
+
# .. where the format is:
|
116
|
+
#
|
117
|
+
# [<field name>, <contains | eq etc>, <argument>]
|
118
|
+
#
|
119
|
+
# Returns a proc that can be used later to determine
|
120
|
+
# if the check is true or false, ie:
|
121
|
+
#
|
122
|
+
# asa_check = load_check(["hostname", "contains", "-asa-"])
|
123
|
+
# asa_check.call(event)
|
124
|
+
#
|
125
|
+
private def load_check(check)
|
126
|
+
tag = "#{self.class}\##{__method__}"
|
127
|
+
if check.length == 3
|
128
|
+
if check.all? {|x| x.is_a?(String) }
|
129
|
+
(check_field, check_command, check_argument) = check
|
130
|
+
|
131
|
+
|
132
|
+
case check_command
|
133
|
+
when "equals", "eql", "eq"
|
134
|
+
Proc.new do |event|
|
135
|
+
field_value = event.get(check_field)
|
136
|
+
generic_check_ok?(check, check_field, field_value) && field_value.eql?(check_argument)
|
137
|
+
end
|
138
|
+
when "include", "contains"
|
139
|
+
Proc.new do |event|
|
140
|
+
field_value = event.get(check_field)
|
141
|
+
generic_check_ok?(check, check_field, field_value) && field_value.include?(check_argument)
|
142
|
+
end
|
143
|
+
when "start_with", "starts_with"
|
144
|
+
Proc.new do |event|
|
145
|
+
field_value = event.get(check_field)
|
146
|
+
generic_check_ok?(check, check_field, field_value) && field_value.start_with?(check_argument)
|
147
|
+
end
|
148
|
+
when "end_with", "ends_with"
|
149
|
+
Proc.new do |event|
|
150
|
+
field_value = event.get(check_field)
|
151
|
+
generic_check_ok?(check, check_field, field_value) && field_value.end_with?(check_argument)
|
152
|
+
end
|
153
|
+
else
|
154
|
+
valid_commands = ["contains", "starts_with", "ends_with", "eq"]
|
155
|
+
@logger.warn(tag) { "Invalid config: #{check}, the second argument must be one of: #{valid_commands}" }
|
156
|
+
raise LogStash::Filters::Categoriser::RulesError, "Invalid config: #{check}, the second argument must be one of: #{valid_commands} "
|
157
|
+
end
|
158
|
+
|
159
|
+
else
|
160
|
+
@logger.warn(tag) { "Invalid config: #{check}, all elements must be a string, ie: " + '["hostname", "contains", "-asa-"]' }
|
161
|
+
Proc.new{|event| false}
|
162
|
+
end
|
163
|
+
else
|
164
|
+
@logger.warn(tag) { "Invalid config: #{check}, expected three elements, ie: " + '["hostname", "contains", "-asa-"]' }
|
165
|
+
Proc.new{|event| false}
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
private def generic_check_ok?(check, check_field, field_value)
|
170
|
+
tag = "#{self.class}\##{__method__}"
|
171
|
+
if field_value.nil?
|
172
|
+
@logger.info(tag) { "Could not read missing field \"#{check_field}\", check #{check.dump} returns false." }
|
173
|
+
false
|
174
|
+
else
|
175
|
+
if !field_value.is_a?(String)
|
176
|
+
@logger.info(tag) { "Field must be a string, check #{check} returns false (field was a(n) #{field_value.class})." }
|
177
|
+
false
|
178
|
+
else
|
179
|
+
true
|
180
|
+
end
|
181
|
+
end
|
182
|
+
end
|
183
|
+
|
184
|
+
end
|
185
|
+
|
@@ -0,0 +1,48 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/filters/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
|
5
|
+
# This example filter will replace the contents of the default
|
6
|
+
# message field with whatever you specify in the configuration.
|
7
|
+
#
|
8
|
+
# It is only intended to be used as an example.
|
9
|
+
class LogStash::Filters::Example < LogStash::Filters::Base
|
10
|
+
|
11
|
+
# Setting the config_name here is required. This is how you
|
12
|
+
# configure this filter from your Logstash config.
|
13
|
+
#
|
14
|
+
# filter {
|
15
|
+
# example {
|
16
|
+
# message => "My message..."
|
17
|
+
# }
|
18
|
+
# }
|
19
|
+
#
|
20
|
+
config_name "example"
|
21
|
+
|
22
|
+
# Replace the message with this value.
|
23
|
+
config :message, :validate => :string, :default => "Hello World!"
|
24
|
+
|
25
|
+
|
26
|
+
public
|
27
|
+
def register
|
28
|
+
# Add instance variables
|
29
|
+
end # def register
|
30
|
+
|
31
|
+
public
|
32
|
+
def filter(event)
|
33
|
+
|
34
|
+
if @message
|
35
|
+
# Replace the event message with our message as configured in the
|
36
|
+
# config file.
|
37
|
+
|
38
|
+
# using the event.set API
|
39
|
+
event.set("message", @message)
|
40
|
+
# correct debugging log statement for reference
|
41
|
+
# using the event.get API
|
42
|
+
@logger.debug? && @logger.debug("Message is now: #{event.get("message")}")
|
43
|
+
end
|
44
|
+
|
45
|
+
# filter_matched should go in the last line of our successful code
|
46
|
+
filter_matched(event)
|
47
|
+
end # def filter
|
48
|
+
end # class LogStash::Filters::Example
|
@@ -0,0 +1,24 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
s.name = 'logstash-filter-categoriser'
|
3
|
+
s.version = '1.0.0'
|
4
|
+
s.licenses = ['Apache License (2.0)']
|
5
|
+
s.summary = "Allows quick categorisation of incoming logs based on other fields."
|
6
|
+
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
7
|
+
s.authors = ["Elastic", "Phil Helliwell"]
|
8
|
+
s.email = 'phil.helliwell@gmail.com'
|
9
|
+
s.homepage = "https://github.com/kill9zombie/logstash-filter-categoriser"
|
10
|
+
s.require_paths = ["lib"]
|
11
|
+
|
12
|
+
# Files
|
13
|
+
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
|
14
|
+
# Tests
|
15
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
16
|
+
|
17
|
+
# Special flag to let us know this is actually a logstash plugin
|
18
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "filter" }
|
19
|
+
|
20
|
+
# Gem dependencies
|
21
|
+
s.add_runtime_dependency "json", "~> 1.8"
|
22
|
+
s.add_runtime_dependency "logstash-core-plugin-api", "~> 2.0"
|
23
|
+
s.add_development_dependency 'logstash-devutils'
|
24
|
+
end
|
@@ -0,0 +1,80 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logger"
|
3
|
+
require "stringio"
|
4
|
+
require "logstash/devutils/rspec/spec_helper"
|
5
|
+
|
6
|
+
require "logstash/filters/base"
|
7
|
+
require "logstash/namespace"
|
8
|
+
|
9
|
+
# Dummy filter just so that we can load LogStash::Filters::Categoriser::Rules
|
10
|
+
class LogStash::Filters::Categoriser < LogStash::Filters::Base
|
11
|
+
def register
|
12
|
+
end
|
13
|
+
|
14
|
+
def filter(event)
|
15
|
+
filter_matched(event)
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
require "logstash/filters/categoriser/rules"
|
20
|
+
|
21
|
+
|
22
|
+
|
23
|
+
describe LogStash::Filters::Categoriser::Rules do
|
24
|
+
|
25
|
+
before(:each) do
|
26
|
+
@logger = Logger.new(STDOUT)
|
27
|
+
@logger.level = Logger::ERROR
|
28
|
+
end
|
29
|
+
|
30
|
+
it "raises an error for a missing rules file" do
|
31
|
+
missing_rules_file = File.expand_path("../../fixtures/missing.rules.json", __FILE__)
|
32
|
+
|
33
|
+
filter_config = LogStash::Filters::Categoriser::Rules.new(@logger)
|
34
|
+
expect{ filter_config.read_config(missing_rules_file) }.to raise_error(Errno::ENOENT)
|
35
|
+
end
|
36
|
+
|
37
|
+
it "won't load invalid JSON" do
|
38
|
+
missing_rules_file = File.expand_path("../../fixtures/invalid_json.rules.json", __FILE__)
|
39
|
+
|
40
|
+
filter_config = LogStash::Filters::Categoriser::Rules.new(@logger)
|
41
|
+
expect{ filter_config.read_config(missing_rules_file) }.to raise_error(JSON::ParserError)
|
42
|
+
end
|
43
|
+
|
44
|
+
it "won't run invalid rules" do
|
45
|
+
|
46
|
+
event = {"hostname" => "foo-asa-01"}
|
47
|
+
def event.get(field)
|
48
|
+
self[field]
|
49
|
+
end
|
50
|
+
|
51
|
+
filter_config = LogStash::Filters::Categoriser::Rules.new(@logger)
|
52
|
+
|
53
|
+
invalid_rule = StringIO.new(%q({"cisco_asa": "invalid_rule"}))
|
54
|
+
expect do
|
55
|
+
rules = filter_config.read_config(invalid_rule)
|
56
|
+
matched = rules.find {|type, matcher| matcher.call(event)}
|
57
|
+
end.to raise_error(LogStash::Filters::Categoriser::RulesError)
|
58
|
+
|
59
|
+
invalid_command = StringIO.new(%q({"cisco_asa": ["hostname", "invalid_command", "-asa-"]}))
|
60
|
+
expect do
|
61
|
+
rules = filter_config.read_config(invalid_command)
|
62
|
+
matched = rules.find {|type, matcher| matcher.call(event)}
|
63
|
+
end.to raise_error(LogStash::Filters::Categoriser::RulesError)
|
64
|
+
|
65
|
+
end
|
66
|
+
|
67
|
+
it "returns false for missing fields" do
|
68
|
+
event = {"hostname" => "foo-asa-01"}
|
69
|
+
def event.get(field)
|
70
|
+
self[field]
|
71
|
+
end
|
72
|
+
|
73
|
+
filter_config = LogStash::Filters::Categoriser::Rules.new(@logger)
|
74
|
+
|
75
|
+
missing_field_rule = StringIO.new(%q({"cisco_asa": ["alice", "contains", "-asa-"]}))
|
76
|
+
expect(
|
77
|
+
filter_config.read_config(missing_field_rule).any? {|type, matcher| matcher.call(event)}
|
78
|
+
).to be false
|
79
|
+
end
|
80
|
+
end
|
@@ -0,0 +1,80 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
3
|
+
require "logstash/filters/categoriser"
|
4
|
+
|
5
|
+
describe LogStash::Filters::Categoriser do
|
6
|
+
describe "basic rules" do
|
7
|
+
test_rules = File.expand_path("../../fixtures/test.rules.json", __FILE__)
|
8
|
+
|
9
|
+
config <<-CONFIG
|
10
|
+
filter {
|
11
|
+
categoriser {
|
12
|
+
rules_file => "#{test_rules}"
|
13
|
+
target => "device_type"
|
14
|
+
}
|
15
|
+
}
|
16
|
+
CONFIG
|
17
|
+
|
18
|
+
# Simple match
|
19
|
+
sample({"hostname" => "foo-asa-01"}) do
|
20
|
+
expect(subject.get("device_type")).to eq('cisco_asa')
|
21
|
+
end
|
22
|
+
|
23
|
+
# "or"
|
24
|
+
sample({"hostname" => "foo-dc-01"}) do
|
25
|
+
expect(subject.get("device_type")).to eq('windows')
|
26
|
+
end
|
27
|
+
sample({"hostname" => "foo-sql-01"}) do
|
28
|
+
expect(subject.get("device_type")).to eq('windows')
|
29
|
+
end
|
30
|
+
|
31
|
+
# "and"
|
32
|
+
sample({"hostname" => "web01", "program" => "httpd"}) do
|
33
|
+
expect(subject.get("device_type")).to eq('web_servers')
|
34
|
+
end
|
35
|
+
|
36
|
+
# "and" "not"
|
37
|
+
sample({"hostname" => "foo-bigip-01", "message" => "test"}) do
|
38
|
+
expect(subject.get("device_type")).to eq('bigip')
|
39
|
+
end
|
40
|
+
sample({"hostname" => "foo-bigip-01", "message" => "elephant test"}) do
|
41
|
+
expect(subject.get("device_type")).to eq('unknown')
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
describe "config defaults" do
|
46
|
+
test_rules = File.expand_path("../../fixtures/test.rules.json", __FILE__)
|
47
|
+
|
48
|
+
config <<-CONFIG
|
49
|
+
filter {
|
50
|
+
categoriser {
|
51
|
+
rules_file => "#{test_rules}"
|
52
|
+
}
|
53
|
+
}
|
54
|
+
CONFIG
|
55
|
+
|
56
|
+
sample({"hostname" => "unmatched"}) do
|
57
|
+
expect(subject.get("category")).to eq('unknown')
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
describe "config" do
|
62
|
+
test_rules = File.expand_path("../../fixtures/test.rules.json", __FILE__)
|
63
|
+
|
64
|
+
# Once more with definition
|
65
|
+
config <<-CONFIG
|
66
|
+
filter {
|
67
|
+
categoriser {
|
68
|
+
rules_file => "#{test_rules}"
|
69
|
+
target => "alice"
|
70
|
+
default_category => "bob"
|
71
|
+
}
|
72
|
+
}
|
73
|
+
CONFIG
|
74
|
+
|
75
|
+
sample({"hostname" => "unmatched"}) do
|
76
|
+
expect(subject.get("alice")).to eq('bob')
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'spec_helper'
|
3
|
+
require "logstash/filters/example"
|
4
|
+
|
5
|
+
describe LogStash::Filters::Example do
|
6
|
+
describe "Set to Hello World" do
|
7
|
+
let(:config) do <<-CONFIG
|
8
|
+
filter {
|
9
|
+
example {
|
10
|
+
message => "Hello World"
|
11
|
+
}
|
12
|
+
}
|
13
|
+
CONFIG
|
14
|
+
end
|
15
|
+
|
16
|
+
sample("message" => "some text") do
|
17
|
+
expect(subject.get("message")).to eq('Hello World')
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
{
|
2
|
+
"cisco_asa": ["hostname", "contains", "-asa-"],
|
3
|
+
"windows": [
|
4
|
+
"or", [
|
5
|
+
["hostname", "contains", "-sql-"],
|
6
|
+
["hostname", "contains", "-dc-"]]],
|
7
|
+
"bigip": [
|
8
|
+
"and", [
|
9
|
+
["hostname", "contains", "-bigip-"],
|
10
|
+
["not", ["message", "contains", "elephant"]]]],
|
11
|
+
"web_servers": [
|
12
|
+
"and", [
|
13
|
+
["hostname", "starts_with", "web"],
|
14
|
+
["program", "eq", "httpd"]]]
|
15
|
+
}
|
data/spec/spec_helper.rb
ADDED
metadata
ADDED
@@ -0,0 +1,114 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: logstash-filter-categoriser
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 1.0.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Elastic
|
8
|
+
- Phil Helliwell
|
9
|
+
autorequire:
|
10
|
+
bindir: bin
|
11
|
+
cert_chain: []
|
12
|
+
date: 2019-01-09 00:00:00.000000000 Z
|
13
|
+
dependencies:
|
14
|
+
- !ruby/object:Gem::Dependency
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '1.8'
|
20
|
+
name: json
|
21
|
+
prerelease: false
|
22
|
+
type: :runtime
|
23
|
+
version_requirements: !ruby/object:Gem::Requirement
|
24
|
+
requirements:
|
25
|
+
- - "~>"
|
26
|
+
- !ruby/object:Gem::Version
|
27
|
+
version: '1.8'
|
28
|
+
- !ruby/object:Gem::Dependency
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '2.0'
|
34
|
+
name: logstash-core-plugin-api
|
35
|
+
prerelease: false
|
36
|
+
type: :runtime
|
37
|
+
version_requirements: !ruby/object:Gem::Requirement
|
38
|
+
requirements:
|
39
|
+
- - "~>"
|
40
|
+
- !ruby/object:Gem::Version
|
41
|
+
version: '2.0'
|
42
|
+
- !ruby/object:Gem::Dependency
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - ">="
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0'
|
48
|
+
name: logstash-devutils
|
49
|
+
prerelease: false
|
50
|
+
type: :development
|
51
|
+
version_requirements: !ruby/object:Gem::Requirement
|
52
|
+
requirements:
|
53
|
+
- - ">="
|
54
|
+
- !ruby/object:Gem::Version
|
55
|
+
version: '0'
|
56
|
+
description: This gem is a Logstash plugin required to be installed on top of the
|
57
|
+
Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
|
58
|
+
gem is not a stand-alone program
|
59
|
+
email: phil.helliwell@gmail.com
|
60
|
+
executables: []
|
61
|
+
extensions: []
|
62
|
+
extra_rdoc_files: []
|
63
|
+
files:
|
64
|
+
- CHANGELOG.md
|
65
|
+
- CONTRIBUTORS
|
66
|
+
- DEVELOPER.md
|
67
|
+
- Gemfile
|
68
|
+
- LICENSE
|
69
|
+
- NOTICE.TXT
|
70
|
+
- README.md
|
71
|
+
- lib/logstash/filters/categoriser.rb
|
72
|
+
- lib/logstash/filters/categoriser/config.rb
|
73
|
+
- lib/logstash/filters/categoriser/rules.rb
|
74
|
+
- lib/logstash/filters/example.rb
|
75
|
+
- logstash-filter-categoriser.gemspec
|
76
|
+
- spec/filters/categoriser_rules_spec.rb
|
77
|
+
- spec/filters/categoriser_spec.rb
|
78
|
+
- spec/filters/example_spec.rb
|
79
|
+
- spec/fixtures/invalid_json.rules.json
|
80
|
+
- spec/fixtures/test.rules.json
|
81
|
+
- spec/spec_helper.rb
|
82
|
+
homepage: https://github.com/kill9zombie/logstash-filter-categoriser
|
83
|
+
licenses:
|
84
|
+
- Apache License (2.0)
|
85
|
+
metadata:
|
86
|
+
logstash_plugin: 'true'
|
87
|
+
logstash_group: filter
|
88
|
+
post_install_message:
|
89
|
+
rdoc_options: []
|
90
|
+
require_paths:
|
91
|
+
- lib
|
92
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
93
|
+
requirements:
|
94
|
+
- - ">="
|
95
|
+
- !ruby/object:Gem::Version
|
96
|
+
version: '0'
|
97
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
98
|
+
requirements:
|
99
|
+
- - ">="
|
100
|
+
- !ruby/object:Gem::Version
|
101
|
+
version: '0'
|
102
|
+
requirements: []
|
103
|
+
rubyforge_project:
|
104
|
+
rubygems_version: 2.6.11
|
105
|
+
signing_key:
|
106
|
+
specification_version: 4
|
107
|
+
summary: Allows quick categorisation of incoming logs based on other fields.
|
108
|
+
test_files:
|
109
|
+
- spec/filters/categoriser_rules_spec.rb
|
110
|
+
- spec/filters/categoriser_spec.rb
|
111
|
+
- spec/filters/example_spec.rb
|
112
|
+
- spec/fixtures/invalid_json.rules.json
|
113
|
+
- spec/fixtures/test.rules.json
|
114
|
+
- spec/spec_helper.rb
|