logstash-filter-date 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +15 -0
- data/.gitignore +4 -0
- data/Gemfile +3 -0
- data/LICENSE +13 -0
- data/Rakefile +6 -0
- data/lib/logstash/filters/date.rb +236 -0
- data/logstash-filter-date.gemspec +28 -0
- data/rakelib/publish.rake +9 -0
- data/rakelib/vendor.rake +169 -0
- data/spec/filters/date_spec.rb +422 -0
- metadata +118 -0
checksums.yaml
ADDED
@@ -0,0 +1,15 @@
|
|
1
|
+
---
|
2
|
+
!binary "U0hBMQ==":
|
3
|
+
metadata.gz: !binary |-
|
4
|
+
NGIzYmRiMTk0MzRkMDVjZWQxODE1NDBjNmYyNTQ3YzlkNTUzNGMwYg==
|
5
|
+
data.tar.gz: !binary |-
|
6
|
+
ZTkyNDQxOTk3OWExNmU0ZGIxZDRlMjVjODgxODNhYzgzOTAwNGMzOQ==
|
7
|
+
SHA512:
|
8
|
+
metadata.gz: !binary |-
|
9
|
+
NTI0YTRiODQ4ZjAxMDUwZDY4YjI2MDE5NDA3YmQ2ZDVmMDY3NDFiZDZlMmVh
|
10
|
+
NTVhNTg3MjY2YjdlNDI4YjY1YWI5Y2Y1OGUwYTY0NzU5OWMwZmFiMjNjMDEy
|
11
|
+
YzI4MzEwZWNhYTEyYTFmOTllMThiZmRhYjU3MTg2MjYzMDY3MWU=
|
12
|
+
data.tar.gz: !binary |-
|
13
|
+
N2MwNGI4Zjk0ZjFjOGNkZWZkZDhhOGNjMDdhZTE4OTcwM2ZiY2I5Nzk2ZDky
|
14
|
+
OGRkMTgwNDQ4ZTQ2MjQ3ZWZhYWE5Mjg1NTVmZjVhNWZlMGMxNTVlMmM5MzFk
|
15
|
+
ZTFmZGQ5NGE4ZTI2NmRkMDAwYmY4ZDExMmUwYTk1ZWFiMzZiYTc=
|
data/.gitignore
ADDED
data/Gemfile
ADDED
data/LICENSE
ADDED
@@ -0,0 +1,13 @@
|
|
1
|
+
Copyright (c) 2012-2014 Elasticsearch <http://www.elasticsearch.org>
|
2
|
+
|
3
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
you may not use this file except in compliance with the License.
|
5
|
+
You may obtain a copy of the License at
|
6
|
+
|
7
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
|
9
|
+
Unless required by applicable law or agreed to in writing, software
|
10
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
See the License for the specific language governing permissions and
|
13
|
+
limitations under the License.
|
data/Rakefile
ADDED
@@ -0,0 +1,236 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/filters/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
require "logstash/timestamp"
|
5
|
+
|
6
|
+
# The date filter is used for parsing dates from fields, and then using that
|
7
|
+
# date or timestamp as the logstash timestamp for the event.
|
8
|
+
#
|
9
|
+
# For example, syslog events usually have timestamps like this:
|
10
|
+
#
|
11
|
+
# "Apr 17 09:32:01"
|
12
|
+
#
|
13
|
+
# You would use the date format "MMM dd HH:mm:ss" to parse this.
|
14
|
+
#
|
15
|
+
# The date filter is especially important for sorting events and for
|
16
|
+
# backfilling old data. If you don't get the date correct in your
|
17
|
+
# event, then searching for them later will likely sort out of order.
|
18
|
+
#
|
19
|
+
# In the absence of this filter, logstash will choose a timestamp based on the
|
20
|
+
# first time it sees the event (at input time), if the timestamp is not already
|
21
|
+
# set in the event. For example, with file input, the timestamp is set to the
|
22
|
+
# time of each read.
|
23
|
+
class LogStash::Filters::Date < LogStash::Filters::Base
|
24
|
+
if RUBY_ENGINE == "jruby"
|
25
|
+
JavaException = java.lang.Exception
|
26
|
+
UTC = org.joda.time.DateTimeZone.forID("UTC")
|
27
|
+
end
|
28
|
+
|
29
|
+
config_name "date"
|
30
|
+
milestone 3
|
31
|
+
|
32
|
+
# Specify a time zone canonical ID to be used for date parsing.
|
33
|
+
# The valid IDs are listed on the [Joda.org available time zones page](http://joda-time.sourceforge.net/timezones.html).
|
34
|
+
# This is useful in case the time zone cannot be extracted from the value,
|
35
|
+
# and is not the platform default.
|
36
|
+
# If this is not specified the platform default will be used.
|
37
|
+
# Canonical ID is good as it takes care of daylight saving time for you
|
38
|
+
# For example, `America/Los_Angeles` or `Europe/France` are valid IDs.
|
39
|
+
config :timezone, :validate => :string
|
40
|
+
|
41
|
+
# Specify a locale to be used for date parsing using either IETF-BCP47 or POSIX language tag.
|
42
|
+
# Simple examples are `en`,`en-US` for BCP47 or `en_US` for POSIX.
|
43
|
+
# If not specified, the platform default will be used.
|
44
|
+
#
|
45
|
+
# The locale is mostly necessary to be set for parsing month names (pattern with MMM) and
|
46
|
+
# weekday names (pattern with EEE).
|
47
|
+
#
|
48
|
+
config :locale, :validate => :string
|
49
|
+
|
50
|
+
# The date formats allowed are anything allowed by Joda-Time (java time
|
51
|
+
# library). You can see the docs for this format here:
|
52
|
+
#
|
53
|
+
# [joda.time.format.DateTimeFormat](http://joda-time.sourceforge.net/apidocs/org/joda/time/format/DateTimeFormat.html)
|
54
|
+
#
|
55
|
+
# An array with field name first, and format patterns following, `[ field,
|
56
|
+
# formats... ]`
|
57
|
+
#
|
58
|
+
# If your time field has multiple possible formats, you can do this:
|
59
|
+
#
|
60
|
+
# match => [ "logdate", "MMM dd YYY HH:mm:ss",
|
61
|
+
# "MMM d YYY HH:mm:ss", "ISO8601" ]
|
62
|
+
#
|
63
|
+
# The above will match a syslog (rfc3164) or iso8601 timestamp.
|
64
|
+
#
|
65
|
+
# There are a few special exceptions. The following format literals exist
|
66
|
+
# to help you save time and ensure correctness of date parsing.
|
67
|
+
#
|
68
|
+
# * "ISO8601" - should parse any valid ISO8601 timestamp, such as
|
69
|
+
# 2011-04-19T03:44:01.103Z
|
70
|
+
# * "UNIX" - will parse unix time in seconds since epoch
|
71
|
+
# * "UNIX_MS" - will parse unix time in milliseconds since epoch
|
72
|
+
# * "TAI64N" - will parse tai64n time values
|
73
|
+
#
|
74
|
+
# For example, if you have a field 'logdate', with a value that looks like
|
75
|
+
# 'Aug 13 2010 00:03:44', you would use this configuration:
|
76
|
+
#
|
77
|
+
# filter {
|
78
|
+
# date {
|
79
|
+
# match => [ "logdate", "MMM dd YYYY HH:mm:ss" ]
|
80
|
+
# }
|
81
|
+
# }
|
82
|
+
#
|
83
|
+
# If your field is nested in your structure, you can use the nested
|
84
|
+
# syntax [foo][bar] to match its value. For more information, please refer to
|
85
|
+
# http://logstash.net/docs/latest/configuration#fieldreferences
|
86
|
+
config :match, :validate => :array, :default => []
|
87
|
+
|
88
|
+
# Store the matching timestamp into the given target field. If not provided,
|
89
|
+
# default to updating the @timestamp field of the event.
|
90
|
+
config :target, :validate => :string, :default => "@timestamp"
|
91
|
+
|
92
|
+
# LOGSTASH-34
|
93
|
+
DATEPATTERNS = %w{ y d H m s S }
|
94
|
+
|
95
|
+
public
|
96
|
+
def initialize(config = {})
|
97
|
+
super
|
98
|
+
|
99
|
+
@parsers = Hash.new { |h,k| h[k] = [] }
|
100
|
+
end # def initialize
|
101
|
+
|
102
|
+
public
|
103
|
+
def register
|
104
|
+
require "java"
|
105
|
+
if @match.length < 2
|
106
|
+
raise LogStash::ConfigurationError, I18n.t("logstash.agent.configuration.invalid_plugin_register",
|
107
|
+
:plugin => "filter", :type => "date",
|
108
|
+
:error => "The match setting should contains first a field name and at least one date format, current value is #{@match}")
|
109
|
+
end
|
110
|
+
|
111
|
+
locale = nil
|
112
|
+
if @locale
|
113
|
+
if @locale.include? '_'
|
114
|
+
@logger.warn("Date filter now use BCP47 format for locale, replacing underscore with dash")
|
115
|
+
@locale.gsub!('_','-')
|
116
|
+
end
|
117
|
+
locale = java.util.Locale.forLanguageTag(@locale)
|
118
|
+
end
|
119
|
+
setupMatcher(@config["match"].shift, locale, @config["match"] )
|
120
|
+
end
|
121
|
+
|
122
|
+
def setupMatcher(field, locale, value)
|
123
|
+
value.each do |format|
|
124
|
+
parsers = []
|
125
|
+
case format
|
126
|
+
when "ISO8601"
|
127
|
+
iso_parser = org.joda.time.format.ISODateTimeFormat.dateTimeParser
|
128
|
+
if @timezone
|
129
|
+
iso_parser = iso_parser.withZone(org.joda.time.DateTimeZone.forID(@timezone))
|
130
|
+
else
|
131
|
+
iso_parser = iso_parser.withOffsetParsed
|
132
|
+
end
|
133
|
+
parsers << lambda { |date| iso_parser.parseMillis(date) }
|
134
|
+
#Fall back solution of almost ISO8601 date-time
|
135
|
+
almostISOparsers = [
|
136
|
+
org.joda.time.format.DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSSZ").getParser(),
|
137
|
+
org.joda.time.format.DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS").getParser()
|
138
|
+
].to_java(org.joda.time.format.DateTimeParser)
|
139
|
+
joda_parser = org.joda.time.format.DateTimeFormatterBuilder.new.append( nil, almostISOparsers ).toFormatter()
|
140
|
+
if @timezone
|
141
|
+
joda_parser = joda_parser.withZone(org.joda.time.DateTimeZone.forID(@timezone))
|
142
|
+
else
|
143
|
+
joda_parser = joda_parser.withOffsetParsed
|
144
|
+
end
|
145
|
+
parsers << lambda { |date| joda_parser.parseMillis(date) }
|
146
|
+
when "UNIX" # unix epoch
|
147
|
+
parsers << lambda do |date|
|
148
|
+
raise "Invalid UNIX epoch value '#{date}'" unless /^\d+(?:\.\d+)?$/ === date || date.is_a?(Numeric)
|
149
|
+
(date.to_f * 1000).to_i
|
150
|
+
end
|
151
|
+
when "UNIX_MS" # unix epoch in ms
|
152
|
+
parsers << lambda do |date|
|
153
|
+
raise "Invalid UNIX epoch value '#{date}'" unless /^\d+$/ === date || date.is_a?(Numeric)
|
154
|
+
date.to_i
|
155
|
+
end
|
156
|
+
when "TAI64N" # TAI64 with nanoseconds, -10000 accounts for leap seconds
|
157
|
+
parsers << lambda do |date|
|
158
|
+
# Skip leading "@" if it is present (common in tai64n times)
|
159
|
+
date = date[1..-1] if date[0, 1] == "@"
|
160
|
+
return (date[1..15].hex * 1000 - 10000)+(date[16..23].hex/1000000)
|
161
|
+
end
|
162
|
+
else
|
163
|
+
joda_parser = org.joda.time.format.DateTimeFormat.forPattern(format).withDefaultYear(Time.new.year)
|
164
|
+
if @timezone
|
165
|
+
joda_parser = joda_parser.withZone(org.joda.time.DateTimeZone.forID(@timezone))
|
166
|
+
else
|
167
|
+
joda_parser = joda_parser.withOffsetParsed
|
168
|
+
end
|
169
|
+
if (locale != nil)
|
170
|
+
joda_parser = joda_parser.withLocale(locale)
|
171
|
+
end
|
172
|
+
parsers << lambda { |date| joda_parser.parseMillis(date) }
|
173
|
+
end
|
174
|
+
|
175
|
+
@logger.debug("Adding type with date config", :type => @type,
|
176
|
+
:field => field, :format => format)
|
177
|
+
@parsers[field] << {
|
178
|
+
:parser => parsers,
|
179
|
+
:format => format
|
180
|
+
}
|
181
|
+
end
|
182
|
+
end
|
183
|
+
|
184
|
+
# def register
|
185
|
+
|
186
|
+
public
|
187
|
+
def filter(event)
|
188
|
+
@logger.debug? && @logger.debug("Date filter: received event", :type => event["type"])
|
189
|
+
return unless filter?(event)
|
190
|
+
@parsers.each do |field, fieldparsers|
|
191
|
+
@logger.debug? && @logger.debug("Date filter looking for field",
|
192
|
+
:type => event["type"], :field => field)
|
193
|
+
next unless event.include?(field)
|
194
|
+
|
195
|
+
fieldvalues = event[field]
|
196
|
+
fieldvalues = [fieldvalues] if !fieldvalues.is_a?(Array)
|
197
|
+
fieldvalues.each do |value|
|
198
|
+
next if value.nil?
|
199
|
+
begin
|
200
|
+
epochmillis = nil
|
201
|
+
success = false
|
202
|
+
last_exception = RuntimeError.new "Unknown"
|
203
|
+
fieldparsers.each do |parserconfig|
|
204
|
+
parserconfig[:parser].each do |parser|
|
205
|
+
begin
|
206
|
+
epochmillis = parser.call(value)
|
207
|
+
success = true
|
208
|
+
break # success
|
209
|
+
rescue StandardError, JavaException => e
|
210
|
+
last_exception = e
|
211
|
+
end
|
212
|
+
end # parserconfig[:parser].each
|
213
|
+
break if success
|
214
|
+
end # fieldparsers.each
|
215
|
+
|
216
|
+
raise last_exception unless success
|
217
|
+
|
218
|
+
# Convert joda DateTime to a ruby Time
|
219
|
+
event[@target] = LogStash::Timestamp.at(epochmillis / 1000, (epochmillis % 1000) * 1000)
|
220
|
+
|
221
|
+
@logger.debug? && @logger.debug("Date parsing done", :value => value, :timestamp => event[@target])
|
222
|
+
filter_matched(event)
|
223
|
+
rescue StandardError, JavaException => e
|
224
|
+
@logger.warn("Failed parsing date from field", :field => field,
|
225
|
+
:value => value, :exception => e)
|
226
|
+
# Raising here will bubble all the way up and cause an exit.
|
227
|
+
# TODO(sissel): Maybe we shouldn't raise?
|
228
|
+
# TODO(sissel): What do we do on a failure? Tag it like grok does?
|
229
|
+
#raise e
|
230
|
+
end # begin
|
231
|
+
end # fieldvalue.each
|
232
|
+
end # @parsers.each
|
233
|
+
|
234
|
+
return event
|
235
|
+
end # def filter
|
236
|
+
end # class LogStash::Filters::Date
|
@@ -0,0 +1,28 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
|
3
|
+
s.name = 'logstash-filter-date'
|
4
|
+
s.version = '0.1.0'
|
5
|
+
s.licenses = ['Apache License (2.0)']
|
6
|
+
s.summary = "The date filter is used for parsing dates from fields, and then using that date or timestamp as the logstash timestamp for the event."
|
7
|
+
s.description = "Convert arbitrary date format into Logstash timestamp"
|
8
|
+
s.authors = ["Elasticsearch"]
|
9
|
+
s.email = 'richard.pijnenburg@elasticsearch.com'
|
10
|
+
s.homepage = "http://logstash.net/"
|
11
|
+
s.require_paths = ["lib"]
|
12
|
+
|
13
|
+
# Files
|
14
|
+
s.files = `git ls-files`.split($\)+::Dir.glob('vendor/*')
|
15
|
+
|
16
|
+
# Tests
|
17
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
18
|
+
|
19
|
+
# Special flag to let us know this is actually a logstash plugin
|
20
|
+
s.metadata = { "logstash_plugin" => "true", "group" => "filter" }
|
21
|
+
|
22
|
+
# Gem dependencies
|
23
|
+
s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
|
24
|
+
s.add_runtime_dependency 'logstash-input-generator'
|
25
|
+
s.add_runtime_dependency 'logstash-codec-json'
|
26
|
+
s.add_runtime_dependency 'logstash-output-null'
|
27
|
+
end
|
28
|
+
|
@@ -0,0 +1,9 @@
|
|
1
|
+
require "gem_publisher"
|
2
|
+
|
3
|
+
desc "Publish gem to RubyGems.org"
|
4
|
+
task :publish_gem do |t|
|
5
|
+
gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
|
6
|
+
gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
|
7
|
+
puts "Published #{gem}" if gem
|
8
|
+
end
|
9
|
+
|
data/rakelib/vendor.rake
ADDED
@@ -0,0 +1,169 @@
|
|
1
|
+
require "net/http"
|
2
|
+
require "uri"
|
3
|
+
require "digest/sha1"
|
4
|
+
|
5
|
+
def vendor(*args)
|
6
|
+
return File.join("vendor", *args)
|
7
|
+
end
|
8
|
+
|
9
|
+
directory "vendor/" => ["vendor"] do |task, args|
|
10
|
+
mkdir task.name
|
11
|
+
end
|
12
|
+
|
13
|
+
def fetch(url, sha1, output)
|
14
|
+
|
15
|
+
puts "Downloading #{url}"
|
16
|
+
actual_sha1 = download(url, output)
|
17
|
+
|
18
|
+
if actual_sha1 != sha1
|
19
|
+
fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
|
20
|
+
end
|
21
|
+
end # def fetch
|
22
|
+
|
23
|
+
def file_fetch(url, sha1)
|
24
|
+
filename = File.basename( URI(url).path )
|
25
|
+
output = "vendor/#{filename}"
|
26
|
+
task output => [ "vendor/" ] do
|
27
|
+
begin
|
28
|
+
actual_sha1 = file_sha1(output)
|
29
|
+
if actual_sha1 != sha1
|
30
|
+
fetch(url, sha1, output)
|
31
|
+
end
|
32
|
+
rescue Errno::ENOENT
|
33
|
+
fetch(url, sha1, output)
|
34
|
+
end
|
35
|
+
end.invoke
|
36
|
+
|
37
|
+
return output
|
38
|
+
end
|
39
|
+
|
40
|
+
def file_sha1(path)
|
41
|
+
digest = Digest::SHA1.new
|
42
|
+
fd = File.new(path, "r")
|
43
|
+
while true
|
44
|
+
begin
|
45
|
+
digest << fd.sysread(16384)
|
46
|
+
rescue EOFError
|
47
|
+
break
|
48
|
+
end
|
49
|
+
end
|
50
|
+
return digest.hexdigest
|
51
|
+
ensure
|
52
|
+
fd.close if fd
|
53
|
+
end
|
54
|
+
|
55
|
+
def download(url, output)
|
56
|
+
uri = URI(url)
|
57
|
+
digest = Digest::SHA1.new
|
58
|
+
tmp = "#{output}.tmp"
|
59
|
+
Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
|
60
|
+
request = Net::HTTP::Get.new(uri.path)
|
61
|
+
http.request(request) do |response|
|
62
|
+
fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code)
|
63
|
+
size = (response["content-length"].to_i || -1).to_f
|
64
|
+
count = 0
|
65
|
+
File.open(tmp, "w") do |fd|
|
66
|
+
response.read_body do |chunk|
|
67
|
+
fd.write(chunk)
|
68
|
+
digest << chunk
|
69
|
+
if size > 0 && $stdout.tty?
|
70
|
+
count += chunk.bytesize
|
71
|
+
$stdout.write(sprintf("\r%0.2f%%", count/size * 100))
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
$stdout.write("\r \r") if $stdout.tty?
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
File.rename(tmp, output)
|
80
|
+
|
81
|
+
return digest.hexdigest
|
82
|
+
rescue SocketError => e
|
83
|
+
puts "Failure while downloading #{url}: #{e}"
|
84
|
+
raise
|
85
|
+
ensure
|
86
|
+
File.unlink(tmp) if File.exist?(tmp)
|
87
|
+
end # def download
|
88
|
+
|
89
|
+
def untar(tarball, &block)
|
90
|
+
require "archive/tar/minitar"
|
91
|
+
tgz = Zlib::GzipReader.new(File.open(tarball))
|
92
|
+
# Pull out typesdb
|
93
|
+
tar = Archive::Tar::Minitar::Input.open(tgz)
|
94
|
+
tar.each do |entry|
|
95
|
+
path = block.call(entry)
|
96
|
+
next if path.nil?
|
97
|
+
parent = File.dirname(path)
|
98
|
+
|
99
|
+
mkdir_p parent unless File.directory?(parent)
|
100
|
+
|
101
|
+
# Skip this file if the output file is the same size
|
102
|
+
if entry.directory?
|
103
|
+
mkdir path unless File.directory?(path)
|
104
|
+
else
|
105
|
+
entry_mode = entry.instance_eval { @mode } & 0777
|
106
|
+
if File.exists?(path)
|
107
|
+
stat = File.stat(path)
|
108
|
+
# TODO(sissel): Submit a patch to archive-tar-minitar upstream to
|
109
|
+
# expose headers in the entry.
|
110
|
+
entry_size = entry.instance_eval { @size }
|
111
|
+
# If file sizes are same, skip writing.
|
112
|
+
next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
|
113
|
+
end
|
114
|
+
puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
|
115
|
+
File.open(path, "w") do |fd|
|
116
|
+
# eof? check lets us skip empty files. Necessary because the API provided by
|
117
|
+
# Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
|
118
|
+
# IO object. Something about empty files in this EntryStream causes
|
119
|
+
# IO.copy_stream to throw "can't convert nil into String" on JRuby
|
120
|
+
# TODO(sissel): File a bug about this.
|
121
|
+
while !entry.eof?
|
122
|
+
chunk = entry.read(16384)
|
123
|
+
fd.write(chunk)
|
124
|
+
end
|
125
|
+
#IO.copy_stream(entry, fd)
|
126
|
+
end
|
127
|
+
File.chmod(entry_mode, path)
|
128
|
+
end
|
129
|
+
end
|
130
|
+
tar.close
|
131
|
+
File.unlink(tarball) if File.file?(tarball)
|
132
|
+
end # def untar
|
133
|
+
|
134
|
+
def ungz(file)
|
135
|
+
|
136
|
+
outpath = file.gsub('.gz', '')
|
137
|
+
tgz = Zlib::GzipReader.new(File.open(file))
|
138
|
+
begin
|
139
|
+
File.open(outpath, "w") do |out|
|
140
|
+
IO::copy_stream(tgz, out)
|
141
|
+
end
|
142
|
+
File.unlink(file)
|
143
|
+
rescue
|
144
|
+
File.unlink(outpath) if File.file?(outpath)
|
145
|
+
raise
|
146
|
+
end
|
147
|
+
tgz.close
|
148
|
+
end
|
149
|
+
|
150
|
+
desc "Process any vendor files required for this plugin"
|
151
|
+
task "vendor" do |task, args|
|
152
|
+
|
153
|
+
@files.each do |file|
|
154
|
+
download = file_fetch(file['url'], file['sha1'])
|
155
|
+
if download =~ /.tar.gz/
|
156
|
+
prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
|
157
|
+
untar(download) do |entry|
|
158
|
+
if !file['files'].nil?
|
159
|
+
next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
|
160
|
+
out = entry.full_name.split("/").last
|
161
|
+
end
|
162
|
+
File.join('vendor', out)
|
163
|
+
end
|
164
|
+
elsif download =~ /.gz/
|
165
|
+
ungz(download)
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
end
|
@@ -0,0 +1,422 @@
|
|
1
|
+
require "spec_helper"
|
2
|
+
require "logstash/filters/date"
|
3
|
+
|
4
|
+
puts "Skipping date performance tests because this ruby is not jruby" if RUBY_ENGINE != "jruby"
|
5
|
+
RUBY_ENGINE == "jruby" and describe LogStash::Filters::Date do
|
6
|
+
|
7
|
+
describe "giving an invalid match config, raise a configuration error" do
|
8
|
+
config <<-CONFIG
|
9
|
+
filter {
|
10
|
+
date {
|
11
|
+
match => [ "mydate"]
|
12
|
+
locale => "en"
|
13
|
+
}
|
14
|
+
}
|
15
|
+
CONFIG
|
16
|
+
|
17
|
+
sample "not_really_important" do
|
18
|
+
insist {subject}.raises LogStash::ConfigurationError
|
19
|
+
end
|
20
|
+
|
21
|
+
end
|
22
|
+
|
23
|
+
describe "parsing with ISO8601" do
|
24
|
+
config <<-CONFIG
|
25
|
+
filter {
|
26
|
+
date {
|
27
|
+
match => [ "mydate", "ISO8601" ]
|
28
|
+
locale => "en"
|
29
|
+
timezone => "UTC"
|
30
|
+
}
|
31
|
+
}
|
32
|
+
CONFIG
|
33
|
+
|
34
|
+
times = {
|
35
|
+
"2001-01-01T00:00:00-0800" => "2001-01-01T08:00:00.000Z",
|
36
|
+
"1974-03-02T04:09:09-0800" => "1974-03-02T12:09:09.000Z",
|
37
|
+
"2010-05-03T08:18:18+00:00" => "2010-05-03T08:18:18.000Z",
|
38
|
+
"2004-07-04T12:27:27-00:00" => "2004-07-04T12:27:27.000Z",
|
39
|
+
"2001-09-05T16:36:36+0000" => "2001-09-05T16:36:36.000Z",
|
40
|
+
"2001-11-06T20:45:45-0000" => "2001-11-06T20:45:45.000Z",
|
41
|
+
"2001-12-07T23:54:54Z" => "2001-12-07T23:54:54.000Z",
|
42
|
+
|
43
|
+
# TODO: This test assumes PDT
|
44
|
+
#"2001-01-01T00:00:00.123" => "2001-01-01T08:00:00.123Z",
|
45
|
+
|
46
|
+
"2010-05-03T08:18:18.123+00:00" => "2010-05-03T08:18:18.123Z",
|
47
|
+
"2004-07-04T12:27:27.123-04:00" => "2004-07-04T16:27:27.123Z",
|
48
|
+
"2001-09-05T16:36:36.123+0700" => "2001-09-05T09:36:36.123Z",
|
49
|
+
"2001-11-06T20:45:45.123-0000" => "2001-11-06T20:45:45.123Z",
|
50
|
+
"2001-12-07T23:54:54.123Z" => "2001-12-07T23:54:54.123Z",
|
51
|
+
|
52
|
+
#Almost ISO8601 support, with timezone
|
53
|
+
|
54
|
+
"2001-11-06 20:45:45.123-0000" => "2001-11-06T20:45:45.123Z",
|
55
|
+
"2001-12-07 23:54:54.123Z" => "2001-12-07T23:54:54.123Z",
|
56
|
+
|
57
|
+
#Almost ISO8601 support, without timezone
|
58
|
+
|
59
|
+
"2001-11-06 20:45:45.123" => "2001-11-06T20:45:45.123Z",
|
60
|
+
|
61
|
+
}
|
62
|
+
|
63
|
+
times.each do |input, output|
|
64
|
+
sample("mydate" => input) do
|
65
|
+
begin
|
66
|
+
insist { subject["mydate"] } == input
|
67
|
+
insist { subject["@timestamp"].time } == Time.iso8601(output).utc
|
68
|
+
rescue
|
69
|
+
#require "pry"; binding.pry
|
70
|
+
raise
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end # times.each
|
74
|
+
end
|
75
|
+
|
76
|
+
describe "parsing with java SimpleDateFormat syntax" do
|
77
|
+
config <<-CONFIG
|
78
|
+
filter {
|
79
|
+
date {
|
80
|
+
match => [ "mydate", "MMM dd HH:mm:ss Z" ]
|
81
|
+
locale => "en"
|
82
|
+
}
|
83
|
+
}
|
84
|
+
CONFIG
|
85
|
+
|
86
|
+
now = Time.now
|
87
|
+
year = now.year
|
88
|
+
require 'java'
|
89
|
+
|
90
|
+
times = {
|
91
|
+
"Nov 24 01:29:01 -0800" => "#{year}-11-24T09:29:01.000Z",
|
92
|
+
}
|
93
|
+
times.each do |input, output|
|
94
|
+
sample("mydate" => input) do
|
95
|
+
insist { subject["mydate"] } == input
|
96
|
+
insist { subject["@timestamp"].time } == Time.iso8601(output).utc
|
97
|
+
end
|
98
|
+
end # times.each
|
99
|
+
end
|
100
|
+
|
101
|
+
describe "parsing with UNIX" do
|
102
|
+
config <<-CONFIG
|
103
|
+
filter {
|
104
|
+
date {
|
105
|
+
match => [ "mydate", "UNIX" ]
|
106
|
+
locale => "en"
|
107
|
+
}
|
108
|
+
}
|
109
|
+
CONFIG
|
110
|
+
|
111
|
+
times = {
|
112
|
+
"0" => "1970-01-01T00:00:00.000Z",
|
113
|
+
"1000000000" => "2001-09-09T01:46:40.000Z",
|
114
|
+
|
115
|
+
# LOGSTASH-279 - sometimes the field is a number.
|
116
|
+
0 => "1970-01-01T00:00:00.000Z",
|
117
|
+
1000000000 => "2001-09-09T01:46:40.000Z"
|
118
|
+
}
|
119
|
+
times.each do |input, output|
|
120
|
+
sample("mydate" => input) do
|
121
|
+
insist { subject["mydate"] } == input
|
122
|
+
insist { subject["@timestamp"].time } == Time.iso8601(output).utc
|
123
|
+
end
|
124
|
+
end # times.each
|
125
|
+
|
126
|
+
#Invalid value should not be evaluated to zero (String#to_i madness)
|
127
|
+
sample("mydate" => "%{bad_value}") do
|
128
|
+
insist { subject["mydate"] } == "%{bad_value}"
|
129
|
+
insist { subject["@timestamp"] } != Time.iso8601("1970-01-01T00:00:00.000Z").utc
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
describe "parsing microsecond-precise times with UNIX (#213)" do
|
134
|
+
config <<-CONFIG
|
135
|
+
filter {
|
136
|
+
date {
|
137
|
+
match => [ "mydate", "UNIX" ]
|
138
|
+
locale => "en"
|
139
|
+
}
|
140
|
+
}
|
141
|
+
CONFIG
|
142
|
+
|
143
|
+
sample("mydate" => "1350414944.123456") do
|
144
|
+
# Joda time only supports milliseconds :\
|
145
|
+
insist { subject.timestamp.time } == Time.iso8601("2012-10-16T12:15:44.123-07:00").utc
|
146
|
+
end
|
147
|
+
|
148
|
+
#Support float values
|
149
|
+
sample("mydate" => 1350414944.123456) do
|
150
|
+
insist { subject["mydate"] } == 1350414944.123456
|
151
|
+
insist { subject["@timestamp"].time } == Time.iso8601("2012-10-16T12:15:44.123-07:00").utc
|
152
|
+
end
|
153
|
+
|
154
|
+
#Invalid value should not be evaluated to zero (String#to_i madness)
|
155
|
+
sample("mydate" => "%{bad_value}") do
|
156
|
+
insist { subject["mydate"] } == "%{bad_value}"
|
157
|
+
insist { subject["@timestamp"] } != Time.iso8601("1970-01-01T00:00:00.000Z").utc
|
158
|
+
end
|
159
|
+
end
|
160
|
+
|
161
|
+
describe "parsing with UNIX_MS" do
|
162
|
+
config <<-CONFIG
|
163
|
+
filter {
|
164
|
+
date {
|
165
|
+
match => [ "mydate", "UNIX_MS" ]
|
166
|
+
locale => "en"
|
167
|
+
}
|
168
|
+
}
|
169
|
+
CONFIG
|
170
|
+
|
171
|
+
times = {
|
172
|
+
"0" => "1970-01-01T00:00:00.000Z",
|
173
|
+
"456" => "1970-01-01T00:00:00.456Z",
|
174
|
+
"1000000000123" => "2001-09-09T01:46:40.123Z",
|
175
|
+
|
176
|
+
# LOGSTASH-279 - sometimes the field is a number.
|
177
|
+
0 => "1970-01-01T00:00:00.000Z",
|
178
|
+
456 => "1970-01-01T00:00:00.456Z",
|
179
|
+
1000000000123 => "2001-09-09T01:46:40.123Z"
|
180
|
+
}
|
181
|
+
times.each do |input, output|
|
182
|
+
sample("mydate" => input) do
|
183
|
+
insist { subject["mydate"] } == input
|
184
|
+
insist { subject["@timestamp"].time } == Time.iso8601(output)
|
185
|
+
end
|
186
|
+
end # times.each
|
187
|
+
end
|
188
|
+
|
189
|
+
describe "failed parses should not cause a failure (LOGSTASH-641)" do
|
190
|
+
config <<-'CONFIG'
|
191
|
+
input {
|
192
|
+
generator {
|
193
|
+
lines => [
|
194
|
+
'{ "mydate": "this will not parse" }',
|
195
|
+
'{ }'
|
196
|
+
]
|
197
|
+
codec => json
|
198
|
+
type => foo
|
199
|
+
count => 1
|
200
|
+
}
|
201
|
+
}
|
202
|
+
filter {
|
203
|
+
date {
|
204
|
+
match => [ "mydate", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
|
205
|
+
locale => "en"
|
206
|
+
}
|
207
|
+
}
|
208
|
+
output {
|
209
|
+
null { }
|
210
|
+
}
|
211
|
+
CONFIG
|
212
|
+
|
213
|
+
agent do
|
214
|
+
# nothing to do, if this crashes it's an error..
|
215
|
+
end
|
216
|
+
end
|
217
|
+
|
218
|
+
describe "TAI64N support" do
|
219
|
+
config <<-'CONFIG'
|
220
|
+
filter {
|
221
|
+
date {
|
222
|
+
match => [ "t", TAI64N ]
|
223
|
+
locale => "en"
|
224
|
+
}
|
225
|
+
}
|
226
|
+
CONFIG
|
227
|
+
|
228
|
+
# Try without leading "@"
|
229
|
+
sample("t" => "4000000050d506482dbdf024") do
|
230
|
+
insist { subject.timestamp.time } == Time.iso8601("2012-12-22T01:00:46.767Z").utc
|
231
|
+
end
|
232
|
+
|
233
|
+
# Should still parse successfully if it's a full tai64n time (with leading
|
234
|
+
# '@')
|
235
|
+
sample("t" => "@4000000050d506482dbdf024") do
|
236
|
+
insist { subject.timestamp.time } == Time.iso8601("2012-12-22T01:00:46.767Z").utc
|
237
|
+
end
|
238
|
+
end
|
239
|
+
|
240
|
+
describe "accept match config option with hash value (LOGSTASH-735)" do
|
241
|
+
config <<-CONFIG
|
242
|
+
filter {
|
243
|
+
date {
|
244
|
+
match => [ "mydate", "ISO8601" ]
|
245
|
+
locale => "en"
|
246
|
+
}
|
247
|
+
}
|
248
|
+
CONFIG
|
249
|
+
|
250
|
+
time = "2001-09-09T01:46:40.000Z"
|
251
|
+
|
252
|
+
sample("mydate" => time) do
|
253
|
+
insist { subject["mydate"] } == time
|
254
|
+
insist { subject["@timestamp"].time } == Time.iso8601(time).utc
|
255
|
+
end
|
256
|
+
end
|
257
|
+
|
258
|
+
describe "support deep nested field access" do
|
259
|
+
config <<-CONFIG
|
260
|
+
filter {
|
261
|
+
date {
|
262
|
+
match => [ "[data][deep]", "ISO8601" ]
|
263
|
+
locale => "en"
|
264
|
+
}
|
265
|
+
}
|
266
|
+
CONFIG
|
267
|
+
|
268
|
+
sample("data" => { "deep" => "2013-01-01T00:00:00.000Z" }) do
|
269
|
+
insist { subject["@timestamp"].time } == Time.iso8601("2013-01-01T00:00:00.000Z").utc
|
270
|
+
end
|
271
|
+
end
|
272
|
+
|
273
|
+
describe "failing to parse should not throw an exception" do
|
274
|
+
config <<-CONFIG
|
275
|
+
filter {
|
276
|
+
date {
|
277
|
+
match => [ "thedate", "yyyy/MM/dd" ]
|
278
|
+
locale => "en"
|
279
|
+
}
|
280
|
+
}
|
281
|
+
CONFIG
|
282
|
+
|
283
|
+
sample("thedate" => "2013/Apr/21") do
|
284
|
+
insist { subject["@timestamp"] } != "2013-04-21T00:00:00.000Z"
|
285
|
+
end
|
286
|
+
end
|
287
|
+
|
288
|
+
describe "success to parse should apply on_success config(add_tag,add_field...)" do
|
289
|
+
config <<-CONFIG
|
290
|
+
filter {
|
291
|
+
date {
|
292
|
+
match => [ "thedate", "yyyy/MM/dd" ]
|
293
|
+
add_tag => "tagged"
|
294
|
+
}
|
295
|
+
}
|
296
|
+
CONFIG
|
297
|
+
|
298
|
+
sample("thedate" => "2013/04/21") do
|
299
|
+
insist { subject["@timestamp"] } != "2013-04-21T00:00:00.000Z"
|
300
|
+
insist { subject["tags"] } == ["tagged"]
|
301
|
+
end
|
302
|
+
end
|
303
|
+
|
304
|
+
describe "failing to parse should not apply on_success config(add_tag,add_field...)" do
|
305
|
+
config <<-CONFIG
|
306
|
+
filter {
|
307
|
+
date {
|
308
|
+
match => [ "thedate", "yyyy/MM/dd" ]
|
309
|
+
add_tag => "tagged"
|
310
|
+
}
|
311
|
+
}
|
312
|
+
CONFIG
|
313
|
+
|
314
|
+
sample("thedate" => "2013/Apr/21") do
|
315
|
+
insist { subject["@timestamp"] } != "2013-04-21T00:00:00.000Z"
|
316
|
+
insist { subject["tags"] } == nil
|
317
|
+
end
|
318
|
+
end
|
319
|
+
|
320
|
+
describe "parsing with timezone parameter" do
|
321
|
+
config <<-CONFIG
|
322
|
+
filter {
|
323
|
+
date {
|
324
|
+
match => ["mydate", "yyyy MMM dd HH:mm:ss"]
|
325
|
+
locale => "en"
|
326
|
+
timezone => "America/Los_Angeles"
|
327
|
+
}
|
328
|
+
}
|
329
|
+
CONFIG
|
330
|
+
|
331
|
+
require 'java'
|
332
|
+
times = {
|
333
|
+
"2013 Nov 24 01:29:01" => "2013-11-24T09:29:01.000Z",
|
334
|
+
"2013 Jun 24 01:29:01" => "2013-06-24T08:29:01.000Z",
|
335
|
+
}
|
336
|
+
times.each do |input, output|
|
337
|
+
sample("mydate" => input) do
|
338
|
+
insist { subject["mydate"] } == input
|
339
|
+
insist { subject["@timestamp"].time } == Time.iso8601(output).utc
|
340
|
+
end
|
341
|
+
end # times.each
|
342
|
+
end
|
343
|
+
|
344
|
+
describe "LOGSTASH-34 - Default year should be this year" do
|
345
|
+
config <<-CONFIG
|
346
|
+
filter {
|
347
|
+
date {
|
348
|
+
match => [ "message", "EEE MMM dd HH:mm:ss" ]
|
349
|
+
locale => "en"
|
350
|
+
}
|
351
|
+
}
|
352
|
+
CONFIG
|
353
|
+
|
354
|
+
sample "Sun Jun 02 20:38:03" do
|
355
|
+
insist { subject["@timestamp"].year } == Time.now.year
|
356
|
+
end
|
357
|
+
end
|
358
|
+
|
359
|
+
describe "Supporting locale only" do
|
360
|
+
config <<-CONFIG
|
361
|
+
filter {
|
362
|
+
date {
|
363
|
+
match => [ "message", "dd MMMM yyyy" ]
|
364
|
+
locale => "fr"
|
365
|
+
timezone => "UTC"
|
366
|
+
}
|
367
|
+
}
|
368
|
+
CONFIG
|
369
|
+
|
370
|
+
sample "14 juillet 1789" do
|
371
|
+
insist { subject["@timestamp"].time } == Time.iso8601("1789-07-14T00:00:00.000Z").utc
|
372
|
+
end
|
373
|
+
end
|
374
|
+
|
375
|
+
describe "Supporting locale+country in BCP47" do
|
376
|
+
config <<-CONFIG
|
377
|
+
filter {
|
378
|
+
date {
|
379
|
+
match => [ "message", "dd MMMM yyyy" ]
|
380
|
+
locale => "fr-FR"
|
381
|
+
timezone => "UTC"
|
382
|
+
}
|
383
|
+
}
|
384
|
+
CONFIG
|
385
|
+
|
386
|
+
sample "14 juillet 1789" do
|
387
|
+
insist { subject["@timestamp"].time } == Time.iso8601("1789-07-14T00:00:00.000Z").utc
|
388
|
+
end
|
389
|
+
end
|
390
|
+
|
391
|
+
describe "Supporting locale+country in POSIX (internally replace '_' by '-')" do
|
392
|
+
config <<-CONFIG
|
393
|
+
filter {
|
394
|
+
date {
|
395
|
+
match => [ "message", "dd MMMM yyyy" ]
|
396
|
+
locale => "fr_FR"
|
397
|
+
timezone => "UTC"
|
398
|
+
}
|
399
|
+
}
|
400
|
+
CONFIG
|
401
|
+
|
402
|
+
sample "14 juillet 1789" do
|
403
|
+
insist { subject["@timestamp"].time } == Time.iso8601("1789-07-14T00:00:00.000Z").utc
|
404
|
+
end
|
405
|
+
end
|
406
|
+
|
407
|
+
describe "http dates" do
|
408
|
+
|
409
|
+
config <<-'CONFIG'
|
410
|
+
filter {
|
411
|
+
date {
|
412
|
+
match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ]
|
413
|
+
locale => "en"
|
414
|
+
}
|
415
|
+
}
|
416
|
+
CONFIG
|
417
|
+
|
418
|
+
sample("timestamp" => "25/Mar/2013:20:33:56 +0000") do
|
419
|
+
insist { subject["@timestamp"].time } == Time.iso8601("2013-03-25T20:33:56.000Z")
|
420
|
+
end
|
421
|
+
end
|
422
|
+
end
|
metadata
ADDED
@@ -0,0 +1,118 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: logstash-filter-date
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Elasticsearch
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2014-11-05 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: logstash
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ! '>='
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: 1.4.0
|
20
|
+
- - <
|
21
|
+
- !ruby/object:Gem::Version
|
22
|
+
version: 2.0.0
|
23
|
+
type: :runtime
|
24
|
+
prerelease: false
|
25
|
+
version_requirements: !ruby/object:Gem::Requirement
|
26
|
+
requirements:
|
27
|
+
- - ! '>='
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: 1.4.0
|
30
|
+
- - <
|
31
|
+
- !ruby/object:Gem::Version
|
32
|
+
version: 2.0.0
|
33
|
+
- !ruby/object:Gem::Dependency
|
34
|
+
name: logstash-input-generator
|
35
|
+
requirement: !ruby/object:Gem::Requirement
|
36
|
+
requirements:
|
37
|
+
- - ! '>='
|
38
|
+
- !ruby/object:Gem::Version
|
39
|
+
version: '0'
|
40
|
+
type: :runtime
|
41
|
+
prerelease: false
|
42
|
+
version_requirements: !ruby/object:Gem::Requirement
|
43
|
+
requirements:
|
44
|
+
- - ! '>='
|
45
|
+
- !ruby/object:Gem::Version
|
46
|
+
version: '0'
|
47
|
+
- !ruby/object:Gem::Dependency
|
48
|
+
name: logstash-codec-json
|
49
|
+
requirement: !ruby/object:Gem::Requirement
|
50
|
+
requirements:
|
51
|
+
- - ! '>='
|
52
|
+
- !ruby/object:Gem::Version
|
53
|
+
version: '0'
|
54
|
+
type: :runtime
|
55
|
+
prerelease: false
|
56
|
+
version_requirements: !ruby/object:Gem::Requirement
|
57
|
+
requirements:
|
58
|
+
- - ! '>='
|
59
|
+
- !ruby/object:Gem::Version
|
60
|
+
version: '0'
|
61
|
+
- !ruby/object:Gem::Dependency
|
62
|
+
name: logstash-output-null
|
63
|
+
requirement: !ruby/object:Gem::Requirement
|
64
|
+
requirements:
|
65
|
+
- - ! '>='
|
66
|
+
- !ruby/object:Gem::Version
|
67
|
+
version: '0'
|
68
|
+
type: :runtime
|
69
|
+
prerelease: false
|
70
|
+
version_requirements: !ruby/object:Gem::Requirement
|
71
|
+
requirements:
|
72
|
+
- - ! '>='
|
73
|
+
- !ruby/object:Gem::Version
|
74
|
+
version: '0'
|
75
|
+
description: Convert arbitrary date format into Logstash timestamp
|
76
|
+
email: richard.pijnenburg@elasticsearch.com
|
77
|
+
executables: []
|
78
|
+
extensions: []
|
79
|
+
extra_rdoc_files: []
|
80
|
+
files:
|
81
|
+
- .gitignore
|
82
|
+
- Gemfile
|
83
|
+
- LICENSE
|
84
|
+
- Rakefile
|
85
|
+
- lib/logstash/filters/date.rb
|
86
|
+
- logstash-filter-date.gemspec
|
87
|
+
- rakelib/publish.rake
|
88
|
+
- rakelib/vendor.rake
|
89
|
+
- spec/filters/date_spec.rb
|
90
|
+
homepage: http://logstash.net/
|
91
|
+
licenses:
|
92
|
+
- Apache License (2.0)
|
93
|
+
metadata:
|
94
|
+
logstash_plugin: 'true'
|
95
|
+
group: filter
|
96
|
+
post_install_message:
|
97
|
+
rdoc_options: []
|
98
|
+
require_paths:
|
99
|
+
- lib
|
100
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
101
|
+
requirements:
|
102
|
+
- - ! '>='
|
103
|
+
- !ruby/object:Gem::Version
|
104
|
+
version: '0'
|
105
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
106
|
+
requirements:
|
107
|
+
- - ! '>='
|
108
|
+
- !ruby/object:Gem::Version
|
109
|
+
version: '0'
|
110
|
+
requirements: []
|
111
|
+
rubyforge_project:
|
112
|
+
rubygems_version: 2.4.1
|
113
|
+
signing_key:
|
114
|
+
specification_version: 4
|
115
|
+
summary: The date filter is used for parsing dates from fields, and then using that
|
116
|
+
date or timestamp as the logstash timestamp for the event.
|
117
|
+
test_files:
|
118
|
+
- spec/filters/date_spec.rb
|