logstash-filter-trigger 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +3 -0
- data/Gemfile +2 -0
- data/LICENSE +13 -0
- data/Rakefile +7 -0
- data/lib/logstash/filters/trigger.rb +222 -0
- data/logstash-filter-trigger.gemspec +27 -0
- data/spec/filters/trigger_spec.rb +5 -0
- metadata +87 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: bea338a8dd5cf122c7f01516bf6d3f16bcbf317d
|
4
|
+
data.tar.gz: 72883f368242e6bd5ef61e62f3aa4deca979f4c2
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: f675d3ac78c104ee035002a169c3abd377083437edcfa8306e88cc552813316361a4a818caf13b25b2533afb98d35e3f0909bfea6372cbdc3c9859505fee8176
|
7
|
+
data.tar.gz: 979bf5fe26e21896ff00bb57c33f89590dbadd1d896383f8be0a24df681a9b89afb376cb2287b890a8720775a3a1e46acf11991c28cbe582f383813ef808e3c2
|
data/.gitignore
ADDED
data/Gemfile
ADDED
data/LICENSE
ADDED
@@ -0,0 +1,13 @@
|
|
1
|
+
Copyright (c) 2012-2014 Elasticsearch <http://www.elasticsearch.org>
|
2
|
+
|
3
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
you may not use this file except in compliance with the License.
|
5
|
+
You may obtain a copy of the License at
|
6
|
+
|
7
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
|
9
|
+
Unless required by applicable law or agreed to in writing, software
|
10
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
See the License for the specific language governing permissions and
|
13
|
+
limitations under the License.
|
data/Rakefile
ADDED
@@ -0,0 +1,222 @@
|
|
1
|
+
require "logstash/filters/base"
|
2
|
+
require "logstash/namespace"
|
3
|
+
require 'date'
|
4
|
+
|
5
|
+
# Applies the triggers to events, read from file
|
6
|
+
class LogStash::Filters::Trigger < LogStash::Filters::Base
|
7
|
+
|
8
|
+
if RUBY_ENGINE == "jruby"
|
9
|
+
JavaException = java.lang.Exception
|
10
|
+
UTC = org.joda.time.DateTimeZone.forID("UTC")
|
11
|
+
end
|
12
|
+
|
13
|
+
config_name "trigger"
|
14
|
+
|
15
|
+
# specify a timezone canonical ID to be used for date parsing.
|
16
|
+
# The valid ID are listed on http://joda-time.sourceforge.net/timezones.html
|
17
|
+
# Useful in case the timezone cannot be extracted from the value,
|
18
|
+
# and is not the platform default
|
19
|
+
# If this is not specified the platform default will be used.
|
20
|
+
# Canonical ID is good as it takes care of daylight saving time for you
|
21
|
+
# For example, America/Los_Angeles or Europe/France are valid IDs
|
22
|
+
config :timezone, :validate => :string
|
23
|
+
|
24
|
+
# Drop events that don't match
|
25
|
+
#
|
26
|
+
# If this is set to false, no events will be dropped at all. Rather, the
|
27
|
+
# requested tags and fields will be added to matching events, and
|
28
|
+
# non-matching events will be passed through unchanged.
|
29
|
+
config :drop, :validate => :boolean, :default => false
|
30
|
+
|
31
|
+
# The attribute/field, where the matching triggers will be saved
|
32
|
+
config :trigger_attribute, :validate => :string, :default => "trigger"
|
33
|
+
|
34
|
+
# The attribute/field, where the the timestamp for triggers come from
|
35
|
+
config :timestamp_attribute, :validate => :string, :default => "timestamp"
|
36
|
+
|
37
|
+
# The attribute/field, where the the timespan for triggers come from
|
38
|
+
config :timespan_attribute, :validate => :string, :default => "timespan"
|
39
|
+
|
40
|
+
# Default timespan, if no set in trigger-file
|
41
|
+
config :timespan_default, :validate => :string, :default => "60"
|
42
|
+
|
43
|
+
# Where should we load the triggers from?
|
44
|
+
config :trigger_path, :validate => :string, :default => "Triggers_*"
|
45
|
+
|
46
|
+
# The regular expression to match
|
47
|
+
config :trigger_pattern, :validate => :string, :required => true
|
48
|
+
|
49
|
+
# Date-format of the triggers, sent from an input e.g. triggeredpackage
|
50
|
+
config :trigger_format, :validate => :string, :required => true
|
51
|
+
|
52
|
+
# logstash ships by default with a bunch of patterns, so you don't
|
53
|
+
# necessarily need to define this yourself unless you are adding additional
|
54
|
+
# patterns.
|
55
|
+
#
|
56
|
+
# Pattern files are plain text with format:
|
57
|
+
#
|
58
|
+
# NAME PATTERN
|
59
|
+
#
|
60
|
+
# For example:
|
61
|
+
#
|
62
|
+
# NUMBER \d+
|
63
|
+
config :patterns_dir, :validate => :array, :default => []
|
64
|
+
|
65
|
+
public
|
66
|
+
def register
|
67
|
+
require "grok-pure" # rubygem 'jls-grok'
|
68
|
+
# Detect if we are running from a jarfile, pick the right path.
|
69
|
+
patterns_path = []
|
70
|
+
if __FILE__ =~ /file:\/.*\.jar!.*/
|
71
|
+
patterns_path += ["#{File.dirname(__FILE__)}/../../patterns/*"]
|
72
|
+
else
|
73
|
+
patterns_path += ["#{File.dirname(__FILE__)}/../../../patterns/*"]
|
74
|
+
end
|
75
|
+
|
76
|
+
@grok = Grok.new
|
77
|
+
|
78
|
+
@patterns_dir = patterns_path.to_a + @patterns_dir
|
79
|
+
@patterns_dir.each do |path|
|
80
|
+
# Can't read relative paths from jars, try to normalize away '../'
|
81
|
+
while path =~ /file:\/.*\.jar!.*\/\.\.\//
|
82
|
+
# replace /foo/bar/../baz => /foo/baz
|
83
|
+
path = path.gsub(/[^\/]+\/\.\.\//, "")
|
84
|
+
end
|
85
|
+
|
86
|
+
if File.directory?(path)
|
87
|
+
path = File.join(path, "*")
|
88
|
+
end
|
89
|
+
|
90
|
+
Dir.glob(path).each do |file|
|
91
|
+
@logger.info("Grok loading patterns from file", :path => file)
|
92
|
+
@grok.add_patterns_from_file(file)
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
@grok.compile(@trigger_pattern)
|
97
|
+
|
98
|
+
|
99
|
+
joda_parser = org.joda.time.format.DateTimeFormat.forPattern(@trigger_format).withDefaultYear(Time.new.year)
|
100
|
+
if @timezone
|
101
|
+
joda_parser = joda_parser.withZone(org.joda.time.DateTimeZone.forID(@timezone))
|
102
|
+
else
|
103
|
+
joda_parser = joda_parser.withOffsetParsed
|
104
|
+
end
|
105
|
+
|
106
|
+
@parser = lambda { |date| joda_parser.parseDateTime(date) }
|
107
|
+
|
108
|
+
|
109
|
+
@triggers = Hash.new { |h,k| h[k] = [] }
|
110
|
+
@last_trigger_times = Hash.new { |h,k| h[k] = [] }
|
111
|
+
|
112
|
+
@trigger_cleanup_interval = 10
|
113
|
+
end #def register
|
114
|
+
|
115
|
+
public
|
116
|
+
def filter(event)
|
117
|
+
matches = 0
|
118
|
+
dirname = File.dirname(event["path"])
|
119
|
+
|
120
|
+
cleanup_triggers(event)
|
121
|
+
|
122
|
+
read_triggers(dirname)
|
123
|
+
|
124
|
+
@triggers[dirname].each do |trigger|
|
125
|
+
|
126
|
+
startTime = trigger[:timestamp]
|
127
|
+
startTime -= trigger[:timespan].to_i
|
128
|
+
|
129
|
+
endTime = trigger[:timestamp]
|
130
|
+
endTime += trigger[:timespan].to_i
|
131
|
+
|
132
|
+
if event.timestamp >= startTime && event.timestamp <= endTime
|
133
|
+
event[@trigger_attribute] ||= []
|
134
|
+
event[@trigger_attribute] << trigger unless event[@trigger_attribute].include?(trigger)
|
135
|
+
matches += 1
|
136
|
+
end
|
137
|
+
end
|
138
|
+
|
139
|
+
if matches > 0
|
140
|
+
filter_matched(event)
|
141
|
+
else
|
142
|
+
if @drop == true
|
143
|
+
@logger.debug("trigger: dropping event, no matches") if @logger.debug?
|
144
|
+
event.cancel
|
145
|
+
else
|
146
|
+
@logger.debug("trigger: no matches, but drop set to false") if @logger.debug?
|
147
|
+
end
|
148
|
+
end
|
149
|
+
end # def filter
|
150
|
+
|
151
|
+
private
|
152
|
+
def read_triggers(dirname)
|
153
|
+
return if @triggers.include?(dirname) and not @triggers[dirname].nil?
|
154
|
+
|
155
|
+
@logger.debug("read triggers for dir #{dirname}") if @logger.debug?
|
156
|
+
|
157
|
+
triggerglob = Dir.glob(dirname + '/' + @trigger_path)
|
158
|
+
return unless triggerglob.length > 0
|
159
|
+
|
160
|
+
triggerglob.each do |triggerpath|
|
161
|
+
File.readlines(triggerpath).each do |line|
|
162
|
+
set_triggers(dirname, line.strip! || line)
|
163
|
+
end
|
164
|
+
end
|
165
|
+
end
|
166
|
+
|
167
|
+
private
|
168
|
+
def set_triggers(dirname, line)
|
169
|
+
fields = {}
|
170
|
+
|
171
|
+
match = @grok.match(line)
|
172
|
+
|
173
|
+
match.each_capture do |capture, value|
|
174
|
+
syntax, semantic, coerce = capture.split(":")
|
175
|
+
if !semantic.nil?
|
176
|
+
fields[semantic] = value
|
177
|
+
end
|
178
|
+
end
|
179
|
+
|
180
|
+
jtime = @parser.call(fields[@timestamp_attribute])
|
181
|
+
jtime = jtime.withZone(UTC)
|
182
|
+
|
183
|
+
timestamp = Time.utc(
|
184
|
+
jtime.getYear, jtime.getMonthOfYear, jtime.getDayOfMonth,
|
185
|
+
jtime.getHourOfDay, jtime.getMinuteOfHour, jtime.getSecondOfMinute,
|
186
|
+
jtime.getMillisOfSecond * 1000
|
187
|
+
)
|
188
|
+
|
189
|
+
if !fields.include?(@timespan_attribute) or fields[@timespan_attribute].nil?
|
190
|
+
timespan = @timespan_default
|
191
|
+
else
|
192
|
+
timespan = fields[@timespan_attribute]
|
193
|
+
end
|
194
|
+
|
195
|
+
trigger = {
|
196
|
+
:timestamp => timestamp,
|
197
|
+
:timespan => timespan
|
198
|
+
}
|
199
|
+
|
200
|
+
unless @triggers[dirname].include? trigger
|
201
|
+
@logger.debug("@triggers: add trigger", :timestamp => timestamp, :timespan => timespan) if @logger.debug?
|
202
|
+
@triggers[dirname] << trigger
|
203
|
+
@last_trigger_times[dirname] = Time.now
|
204
|
+
end
|
205
|
+
end
|
206
|
+
|
207
|
+
# every 10 seconds or so after last trigger(triggered by events, but if there are no events there's no point closing triggers anyway)
|
208
|
+
def cleanup_triggers(event)
|
209
|
+
now = Time.now
|
210
|
+
dirname = File.dirname(event["path"])
|
211
|
+
|
212
|
+
# renew the timestamp
|
213
|
+
@last_trigger_times[dirname] = now if @last_trigger_times.include? dirname
|
214
|
+
|
215
|
+
@last_trigger_times.each do |dirname, last_time|
|
216
|
+
if now - last_time >= @trigger_cleanup_interval
|
217
|
+
@triggers.delete dirname
|
218
|
+
@last_trigger_times.delete dirname
|
219
|
+
end
|
220
|
+
end
|
221
|
+
end
|
222
|
+
end # class LogStash::Filters::Trigger
|
@@ -0,0 +1,27 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
|
3
|
+
s.name = 'logstash-filter-trigger'
|
4
|
+
s.version = '0.1.0'
|
5
|
+
s.licenses = ['Apache License (2.0)']
|
6
|
+
s.summary = "Applies the triggers to events, read from file"
|
7
|
+
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
|
8
|
+
s.authors = ["Signify"]
|
9
|
+
s.email = 'dietmar@signifydata.com'
|
10
|
+
s.homepage = "http://www.signifydata.com"
|
11
|
+
s.require_paths = ["lib"]
|
12
|
+
|
13
|
+
# Files
|
14
|
+
s.files = `git ls-files`.split($\)
|
15
|
+
|
16
|
+
# Tests
|
17
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
18
|
+
|
19
|
+
# Special flag to let us know this is actually a logstash plugin
|
20
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "filter" }
|
21
|
+
|
22
|
+
# Gem dependencies
|
23
|
+
s.add_runtime_dependency 'logstash-core', '>= 1.4.0', '< 2.0.0'
|
24
|
+
|
25
|
+
s.add_development_dependency 'logstash-devutils'
|
26
|
+
end
|
27
|
+
|
metadata
ADDED
@@ -0,0 +1,87 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: logstash-filter-trigger
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Signify
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2015-08-25 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
requirement: !ruby/object:Gem::Requirement
|
15
|
+
requirements:
|
16
|
+
- - '>='
|
17
|
+
- !ruby/object:Gem::Version
|
18
|
+
version: 1.4.0
|
19
|
+
- - <
|
20
|
+
- !ruby/object:Gem::Version
|
21
|
+
version: 2.0.0
|
22
|
+
name: logstash-core
|
23
|
+
prerelease: false
|
24
|
+
type: :runtime
|
25
|
+
version_requirements: !ruby/object:Gem::Requirement
|
26
|
+
requirements:
|
27
|
+
- - '>='
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: 1.4.0
|
30
|
+
- - <
|
31
|
+
- !ruby/object:Gem::Version
|
32
|
+
version: 2.0.0
|
33
|
+
- !ruby/object:Gem::Dependency
|
34
|
+
requirement: !ruby/object:Gem::Requirement
|
35
|
+
requirements:
|
36
|
+
- - '>='
|
37
|
+
- !ruby/object:Gem::Version
|
38
|
+
version: '0'
|
39
|
+
name: logstash-devutils
|
40
|
+
prerelease: false
|
41
|
+
type: :development
|
42
|
+
version_requirements: !ruby/object:Gem::Requirement
|
43
|
+
requirements:
|
44
|
+
- - '>='
|
45
|
+
- !ruby/object:Gem::Version
|
46
|
+
version: '0'
|
47
|
+
description: This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program
|
48
|
+
email: dietmar@signifydata.com
|
49
|
+
executables: []
|
50
|
+
extensions: []
|
51
|
+
extra_rdoc_files: []
|
52
|
+
files:
|
53
|
+
- .gitignore
|
54
|
+
- Gemfile
|
55
|
+
- LICENSE
|
56
|
+
- Rakefile
|
57
|
+
- lib/logstash/filters/trigger.rb
|
58
|
+
- logstash-filter-trigger.gemspec
|
59
|
+
- spec/filters/trigger_spec.rb
|
60
|
+
homepage: http://www.signifydata.com
|
61
|
+
licenses:
|
62
|
+
- Apache License (2.0)
|
63
|
+
metadata:
|
64
|
+
logstash_plugin: 'true'
|
65
|
+
logstash_group: filter
|
66
|
+
post_install_message:
|
67
|
+
rdoc_options: []
|
68
|
+
require_paths:
|
69
|
+
- lib
|
70
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
71
|
+
requirements:
|
72
|
+
- - '>='
|
73
|
+
- !ruby/object:Gem::Version
|
74
|
+
version: '0'
|
75
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
76
|
+
requirements:
|
77
|
+
- - '>='
|
78
|
+
- !ruby/object:Gem::Version
|
79
|
+
version: '0'
|
80
|
+
requirements: []
|
81
|
+
rubyforge_project:
|
82
|
+
rubygems_version: 2.1.9
|
83
|
+
signing_key:
|
84
|
+
specification_version: 4
|
85
|
+
summary: Applies the triggers to events, read from file
|
86
|
+
test_files:
|
87
|
+
- spec/filters/trigger_spec.rb
|