logstash-filter-dissect 1.0.6 → 1.0.7

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: e8ae7e2bba795f607709e23084ba6c6603470909
4
- data.tar.gz: d92b6cec5ce57c933e356b147d67d9d50d0f9602
3
+ metadata.gz: 4066828d764fb5671602ea92941a48c3d1c87adc
4
+ data.tar.gz: e9dd175b2a351effd59fc4c654d5c9be675a1aa7
5
5
  SHA512:
6
- metadata.gz: 449cafef68edfd28da3b5dc6c56ad5b93cd532261251868acb5d76f02c654b824120dfa6b937ed6b32085ffd25a4c89dc0a98752840be752580c2ba20e176b93
7
- data.tar.gz: dcea450f73cf8ad4f3f9845ffe1d850feec89cc02683c2d0e3f72049141844d2550f701a3c9aab24c1620f6504217ed02e52a84f283287c1efccfe88f1c903e7
6
+ metadata.gz: bf533ed8fd5dc76a2ca2f1dcbb4df512935bc5c0d8fa8976c1d5422762c7932d574801bd54c8ace3f7fb142a10bd2ecd1d9b29eadef31e66232a6463b2ce843e
7
+ data.tar.gz: 4175f2160f80bbfb5fa18b20f50e9a89283d611a7f3a3188737244dfe89d01fb1abb6aacbac554c8fc894df214089e0ed2e9bc52a92ab9a5b5dcdcb3f9e9924e
@@ -0,0 +1,23 @@
1
+ These instructions are for the JAVA build NOT Ruby Gem build and publish.
2
+
3
+ #### History
4
+ This is a Logstash plugin with a large amount of Java code.
5
+
6
+ The Java source is dependent on the logstash-core jar and the logstash-core-event jar.
7
+ These jars can be found in the gems that are resolved via the `s.add_runtime_dependency "logstash-core-plugin-api", "~> 2.0"` line in the gemspec.
8
+ So different versions of these gems can be installed as time goes by. The gradle build dependencies cannot refer to a static location and version.
9
+
10
+ These Rake tasks allow for Gradle to be dependent on Rubygems.
11
+ - `rake vendor` - this task calls "./gradlew vendor" but only after have done the "bundle_install" rake task
12
+ - `rake bundle_install` - this task calls `bundle install` then puts the output of `bundle show logstash-core` and `bundle show logstash-core-event` into the "gradle.properties" file.
13
+
14
+ #### Procedure
15
+ Each time a new gem version of `logstash-core-plugin-api` is released we need to run `rake vendor` to verify that any changes to the Java in logstash-core and logstash-core-event are still compatible with their usage in this plugin.
16
+
17
+ Run `rake vendor`
18
+
19
+ #### Travis
20
+ Travis is set to use `rake write_gradle_properties` before its build task
21
+
22
+ #### More Information
23
+ See the Gradle "vendor" task to understand how the the jar is generated.
data/CHANGELOG.md CHANGED
@@ -1,5 +1,8 @@
1
+ ## 1.0.7
2
+ - Update the version and rebuild the vendored jar.
3
+
1
4
  ## 1.0.6
2
- - Relax constraint on logstash-core-plugin-api to >= 1.60 <= 2.99
5
+ - Skipping this version number, it exists on Rubygems but is faulty
3
6
 
4
7
  ## 1.0.5
5
8
  - Initial commit
data/Gemfile CHANGED
@@ -1,6 +1,3 @@
1
1
  source 'https://rubygems.org'
2
2
  gemspec
3
3
 
4
- gem 'logstash-input-generator', '~> 3.0', '>= 3.0.1'
5
- gem 'logstash-output-null', '~> 3.0', '>= 3.0.1'
6
- gem 'logstash-filter-drop', '~> 3.0', '>= 3.0.1'
data/README.md CHANGED
@@ -1,3 +1,6 @@
1
+ ### NOTE
2
+ Please read BUILD_INSTRUCTIONS.md
3
+
1
4
  # Logstash Plugin
2
5
 
3
6
  [![Travis Build Status](https://travis-ci.org/logstash-plugins/logstash-filter-example.svg)](https://travis-ci.org/logstash-plugins/logstash-filter-example)
data/VERSION ADDED
@@ -0,0 +1 @@
1
+ 1.0.7
@@ -0,0 +1,4 @@
1
+ # AUTOGENERATED BY THE GRADLE SCRIPT. DO NOT EDIT.
2
+
3
+ require 'jar_dependencies'
4
+ require_jar('org.logstash.dissect', 'jruby-dissect-library', '1.0.7')
@@ -3,109 +3,134 @@ require "logstash/filters/base"
3
3
  require "logstash/namespace"
4
4
 
5
5
  require "java"
6
- require "jars/jruby-dissect-library.jar"
6
+ require "jruby-dissect-library_jars"
7
7
  require "jruby_dissector"
8
8
 
9
- # De-structures text
10
- #
11
- # The dissect filter is a kind of split operation.
12
- # Unlike a regular split operation where a single delimiter is applied to the
13
- # whole string, this operation applies a sequence of delimiters to an Event field's
14
- # string value. This sequence is called a dissection.
15
- # The dissection is created as a string using a %{} notation:
16
- # ........
17
- # delimiter suffix
18
- # +---+ ++
19
- # %{key1}/ -- %{+key2/1}: %{&key1}
20
- # +-----+ | +--+
21
- # field prefix key
22
- # ........
23
-
24
- # Note: delimiters can't contain the `%{` `}` characters.
25
-
26
- # The config should look like this:
27
- # [source, ruby]
9
+ # ==== *Dissect or how to de-structure text*
10
+ #
11
+ # The Dissect filter is a kind of split operation. Unlike a regular split operation where one delimiter is applied to the whole string, this operation applies a set of delimiters # to a string value. +
12
+ # Dissect does not use regular expressions and is very fast. +
13
+ # However, if the structure of your text varies from line to line then Grok is more suitable. +
14
+ # There is a hybrid case where Dissect can be used to de-structure the section of the line that is reliably repeated and then Grok can be used on the remaining field values with # more regex predictability and less overall work to do. +
15
+ #
16
+ # A set of fields and delimiters is called a *dissection*.
17
+ #
18
+ # The dissection is described using a set of `%{}` sections:
19
+ # ....
20
+ # %{a} - %{b} - %{c}
21
+ # ....
22
+ #
23
+ # A *field* is the text from `%` to `}` inclusive.
24
+ #
25
+ # A *delimiter* is the text between `}` and `%` characters.
26
+ #
27
+ # [NOTE]
28
+ # delimiters can't contain these `}{%` characters.
29
+ #
30
+ # The config might look like this:
31
+ # ....
28
32
  # filter {
29
33
  # dissect {
30
34
  # mapping => {
31
- # "message" => "%{timestamp} %{+timestamp} %{+timestamp} %{logsource} %{} %{program}[%{pid}]: %{msg}"
35
+ # "message" => "%{ts} %{+ts} %{+ts} %{src} %{} %{prog}[%{pid}]: %{msg}"
32
36
  # }
33
37
  # }
34
38
  # }
35
-
36
- # When dissecting a string any text between the delimiters, a found value, will be stored
37
- # in the Event using that field name.
38
-
39
- # The Key:
40
- # The key is the text between the `%{` and `}`, exclusive of the ?, +, & prefixes and the ordinal suffix.
41
- # `%{?aaa}` - key is `aaa`
42
- # `%{+bbb/3}` - key is `bbb`
43
- # `%{&ccc}` - key is `ccc`
44
-
45
- # Normal field notation
46
- # The found value is added to the Event using the key.
47
- # `%{some_field}` - a normal field
48
-
49
- # Skip field notation
50
- # The found value is recorded internally but not added to the Event.
39
+ # ....
40
+ # When dissecting a string from left to right, text is captured upto the first delimiter - this captured text is stored in the first field. This is repeated for each field/# delimiter pair thereafter until the last delimiter is reached, then *the remaining text is stored in the last field*. +
41
+ #
42
+ # *The Key:* +
43
+ # The key is the text between the `%{` and `}`, exclusive of the ?, +, & prefixes and the ordinal suffix. +
44
+ # `%{?aaa}` - key is `aaa` +
45
+ # `%{+bbb/3}` - key is `bbb` +
46
+ # `%{&ccc}` - key is `ccc` +
47
+ #
48
+ # *Normal field notation:* +
49
+ # The found value is added to the Event using the key. +
50
+ # `%{some_field}` - a normal field has no prefix or suffix
51
+ #
52
+ # *Skip field notation:* +
53
+ # The found value is stored internally but not added to the Event. +
51
54
  # The key, if supplied, is prefixed with a `?`.
52
- # `%{}` - an empty skip field
53
- # `%{?some_field} - named skip field
54
-
55
- # Append field notation
56
- # The value is appended to another value or stored if its the first field seen.
57
- # The key is prefixed with a `+`.
58
- # The final value is stored in the Event using the key.
59
- # The delimiter found before the field or a space is appended before the found value.
60
- # `%{+some_field}` - an append field
61
- # `%{+some_field/2}` - and append field with an order modifier.
62
- # An order modifier, `/number`, allows one to reorder the append sequence.
63
- # e.g. for a text of `1 2 3 go`, this `%{+a/2} %{+a/1} %{+a/4} %{+a/3}` will build a key/value of `a => 2 1 go 3`
64
- # Append fields without an order modifier will append in declared order.
65
- # e.g. for a text of `1 2 3 go`, this `%{a} %{b} %{+a}` will build two key/values of `a => 1 3 go, b => 2`
66
-
67
- # Indirect field notation
68
- # The found value is added to the Event using the found value of another field as the key.
69
- # The key is prefixed with a `&`.
70
- # `%{&some_field}` - an indirect field where the key is indirectly sourced from the value of `some_field`.
71
- # e.g. for a text of `error: some_error, description`, this `error: %{?err}, %{&desc}`will build a key/value of `'some_error' => description`
72
- # Hint: use a Skip field if you do not want the indirection key/value stored.
55
+ #
56
+ # `%{}` is an empty skip field.
57
+ #
58
+ # `%{?foo}` is a named skip field.
59
+ #
60
+ # *Append field notation:* +
61
+ # The value is appended to another value or stored if its the first field seen. +
62
+ # The key is prefixed with a `+`. +
63
+ # The final value is stored in the Event using the key. +
64
+ #
65
+ # [NOTE]
66
+ # ====
67
+ # The delimiter found before the field is appended with the value. +
68
+ # If no delimiter is found before the field, a single space character is used.
69
+ # ====
70
+ #
71
+ # `%{+some_field}` is an append field. +
72
+ # `%{+some_field/2}` is an append field with an order modifier.
73
+ #
74
+ # An order modifier, `/digits`, allows one to reorder the append sequence. +
75
+ # e.g. for a text of `1 2 3 go`, this `%{+a/2} %{+a/1} %{+a/4} %{+a/3}` will build a key/value of `a => 2 1 go 3` +
76
+ # Append fields without an order modifier will append in declared order. +
77
+ # e.g. for a text of `1 2 3 go`, this `%{a} %{b} %{+a}` will build two key/values of `a => 1 3 go, b => 2` +
78
+ #
79
+ # *Indirect field notation:* +
80
+ # The found value is added to the Event using the found value of another field as the key. +
81
+ # The key is prefixed with a `&`. +
82
+ # `%{&some_field}` - an indirect field where the key is indirectly sourced from the value of `some_field`. +
83
+ # e.g. for a text of `error: some_error, some_description`, this `error: %{?err}, %{&err}` will build a key/value of `some_error => description`.
84
+ #
85
+ # [NOTE]
86
+ # for append and indirect field the key can refer to a field that already exists in the event before dissection.
87
+ #
88
+ # [NOTE]
89
+ # use a Skip field if you do not want the indirection key/value stored.
90
+ #
73
91
  # e.g. for a text of `google: 77.98`, this `%{?a}: %{&a}` will build a key/value of `google => 77.98`.
74
-
75
- # Note: for append and indirect field the key can refer to a field that already exists in the event before dissection.
76
- # Note: append and indirect cannot be combined. This will fail validation.
77
- # `%{+&something}` - will add a value to the `&something` key, probably not the intended outcome.
78
- # `%{&+something}` will add a value to the `+something` key, again unintended.
79
-
80
- # Delimiter repetition
81
- # In the source text if a field has variable width padded with delimiters, the padding will be ignored.
92
+ #
93
+ # [NOTE]
94
+ # ===============================
95
+ # append and indirect cannot be combined and will fail validation. +
96
+ # `%{+&something}` - will add a value to the `&something` key, probably not the intended outcome. +
97
+ # `%{&+something}` will add a value to the `+something` key, again probably unintended. +
98
+ # ===============================
99
+ #
100
+ # *Delimiter repetition:* +
101
+ # In the source text if a field has variable width padded with delimiters, the padding will be ignored. +
82
102
  # e.g. for texts of:
83
- # ........
103
+ # ....
84
104
  # 00000043 ViewReceiver I
85
105
  # 000000b3 Peer I
86
- # ........
87
- # and a dissection of `%{a} %{b} %{c}`; the padding is ignored.
106
+ # ....
107
+ # with a dissection of `%{a} %{b} %{c}`; the padding is ignored, `event.get([c]) -> "I"`
88
108
  #
89
- # You probably want to put this filter in an if block to ensure that the event
90
- # contains text with a suitable layout.
91
- # [source, ruby]
109
+ # [NOTE]
110
+ # ====
111
+ # You probably want to use this filter inside an `if` block. +
112
+ # This ensures that the event contains a field value with a suitable structure for the dissection.
113
+ # ====
114
+ #
115
+ # For example...
116
+ # ....
92
117
  # filter {
93
118
  # if [type] == "syslog" or "syslog" in [tags] {
94
119
  # dissect {
95
120
  # mapping => {
96
- # "message" => "%{timestamp} %{+timestamp} %{+timestamp} %{logsource} %{} %{program}[%{pid}]: %{msg}"
121
+ # "message" => "%{ts} %{+ts} %{+ts} %{src} %{} %{prog}[%{pid}]: %{msg}"
97
122
  # }
98
123
  # }
99
124
  # }
100
125
  # }
126
+ # ....
101
127
 
102
128
  module LogStash module Filters class Dissect < LogStash::Filters::Base
103
129
 
104
130
  config_name "dissect"
105
131
 
106
- # A hash of dissections of field => value
107
- # A later dissection can be done on an earlier one
108
- # or they can be independent.
132
+ # A hash of dissections of `field => value` +
133
+ # A later dissection can be done on values from a previous dissection or they can be independent.
109
134
  #
110
135
  # For example
111
136
  # [source, ruby]
@@ -118,11 +143,25 @@ module LogStash module Filters class Dissect < LogStash::Filters::Base
118
143
  # }
119
144
  # }
120
145
  #
121
- # This is useful if you want to keep the field `description` also
146
+ # This is useful if you want to keep the field `description` but also
122
147
  # dissect it some more.
148
+
123
149
  config :mapping, :validate => :hash, :default => {}
124
150
 
125
- # TODO add docs
151
+ # With this setting `int` and `float` datatype conversions can be specified. +
152
+ # These will be done after all `mapping` dissections have taken place. +
153
+ # Feel free to use this setting on its own without a `mapping` section. +
154
+ #
155
+ # For example
156
+ # [source, ruby]
157
+ # filter {
158
+ # dissect {
159
+ # convert_datatype => {
160
+ # cpu => "float"
161
+ # code => "int"
162
+ # }
163
+ # }
164
+ # }
126
165
  config :convert_datatype, :validate => :hash, :default => {}
127
166
 
128
167
  # Append values to the `tags` field when dissection fails
@@ -131,7 +170,7 @@ module LogStash module Filters class Dissect < LogStash::Filters::Base
131
170
  public
132
171
 
133
172
  def register
134
- @dissector = LogStash::Dissector.new(@mapping)
173
+ @dissector = LogStash::Dissector.new(@mapping)
135
174
  end
136
175
 
137
176
  def filter(event)
@@ -1,8 +1,10 @@
1
+ DISSECT_VERSION = File.read(File.expand_path(File.join(File.dirname(__FILE__), "VERSION"))).strip unless defined?(DISSECT_VERSION)
2
+
1
3
  Gem::Specification.new do |s|
2
4
  s.name = 'logstash-filter-dissect'
3
- s.version = '1.0.6'
5
+ s.version = DISSECT_VERSION
4
6
  s.licenses = ['Apache License (2.0)']
5
- s.summary = "This dissect filter will destructurize text in multiple fields."
7
+ s.summary = "This dissect filter will de-structure text into multiple fields."
6
8
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
7
9
  s.authors = ["Elastic"]
8
10
  s.email = 'info@elastic.co'
@@ -10,7 +12,7 @@ Gem::Specification.new do |s|
10
12
  s.require_paths = ["lib"]
11
13
 
12
14
  # Files
13
- s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
15
+ s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','VERSION','LICENSE','NOTICE.TXT']
14
16
  # Tests
15
17
  s.test_files = s.files.grep(%r{^(test|spec|features)/})
16
18
 
@@ -18,6 +20,9 @@ Gem::Specification.new do |s|
18
20
  s.metadata = { "logstash_plugin" => "true", "logstash_group" => "filter" }
19
21
 
20
22
  # Gem dependencies
21
- s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
22
- s.add_development_dependency 'logstash-devutils', '~> 1.0.0'
23
+ s.add_runtime_dependency 'logstash-core-plugin-api', '>= 2.1.1', '<= 2.99'
24
+ s.add_runtime_dependency 'jar-dependencies'
25
+
26
+ s.add_development_dependency 'rspec'
27
+ s.add_development_dependency 'logstash-devutils'
23
28
  end
@@ -26,6 +26,16 @@ describe LogStash::Filters::Dissect do
26
26
  @msgs.push(msg[0])
27
27
  @hashes.push(msg[1])
28
28
  end
29
+
30
+ def fatal(*msg)
31
+ @msgs.push(msg[0])
32
+ @hashes.push(msg[1])
33
+ end
34
+
35
+ def trace(*msg)
36
+ @msgs.push(msg[0])
37
+ @hashes.push(msg[1])
38
+ end
29
39
  end
30
40
 
31
41
  describe "Basic dissection" do
@@ -74,6 +84,31 @@ describe LogStash::Filters::Dissect do
74
84
  end
75
85
  end
76
86
 
87
+ describe "Basic dissection with multibyte Unicode characters" do
88
+ let(:config) do <<-CONFIG
89
+ filter {
90
+ dissect {
91
+ mapping => {
92
+ message => "[%{occurred_at}]྿྿྿%{code}྿%{service}྿྿྿྿%{?ic}=%{&ic}%྿྿%{svc_message}"
93
+ }
94
+ convert_datatype => {
95
+ cpu => "float"
96
+ code => "int"
97
+ }
98
+ }
99
+ }
100
+ CONFIG
101
+ end
102
+
103
+ sample("message" => "[25/05/16 09:10:38:425 BST]྿྿྿00000001྿SystemOut྿྿྿྿cpu=95.43%྿྿java.lang:type=MemoryPool,name=class storage") do
104
+ expect(subject.get("occurred_at")).to eq("25/05/16 09:10:38:425 BST")
105
+ expect(subject.get("code")).to eq(1)
106
+ expect(subject.get("service")).to eq("SystemOut")
107
+ expect(subject.get("cpu")).to eq(95.43)
108
+ expect(subject.get("svc_message")).to eq("java.lang:type=MemoryPool,name=class storage")
109
+ end
110
+ end
111
+
77
112
  describe "Basic dissection with failing datatype conversion" do
78
113
  subject(:filter) { LogStash::Filters::Dissect.new(config) }
79
114
 
@@ -92,7 +127,7 @@ describe LogStash::Filters::Dissect do
92
127
  let(:loggr) { LoggerMock.new }
93
128
 
94
129
  before(:each) do
95
- filter.logger = loggr
130
+ filter.class.instance_variable_set("@logger", loggr)
96
131
  end
97
132
 
98
133
  it "tags and log messages are created" do
@@ -145,7 +180,7 @@ describe LogStash::Filters::Dissect do
145
180
  let(:loggr) { LoggerMock.new }
146
181
 
147
182
  before(:each) do
148
- filter.logger = loggr
183
+ filter.class.instance_variable_set("@logger", loggr)
149
184
  end
150
185
 
151
186
  it "does not raise any exceptions" do
@@ -162,11 +197,6 @@ describe LogStash::Filters::Dissect do
162
197
  describe "valid field format handling" do
163
198
  subject(:filter) { LogStash::Filters::Dissect.new(config) }
164
199
  let(:config) { {"mapping" => {"message" => "%{+timestamp/2} %{+timestamp/1} %{?no_name} %{&no_name} %{} %{program}[%{pid}]: %{msg}"}}}
165
- let(:loggr) { LoggerMock.new }
166
-
167
- before(:each) do
168
- filter.logger = loggr
169
- end
170
200
 
171
201
  it "does not raise an error in register" do
172
202
  expect{filter.register}.not_to raise_exception
@@ -175,16 +205,11 @@ describe LogStash::Filters::Dissect do
175
205
 
176
206
  describe "invalid field format handling" do
177
207
  subject(:filter) { LogStash::Filters::Dissect.new(config) }
178
- let(:loggr) { LoggerMock.new }
179
-
180
- before(:each) do
181
- filter.logger = loggr
182
- end
183
208
 
184
209
  context "when field is defined as Append and Indirect (+&)" do
185
210
  let(:config) { {"mapping" => {"message" => "%{+&timestamp}"}}}
186
211
  it "raises an error in register" do
187
- msg = "org.logstash.dissect.InvalidFieldException: Field cannot prefix with both Append and Indirect Prefix (+&): +&timestamp"
212
+ msg = "org.logstash.dissect.fields.InvalidFieldException: Field cannot prefix with both Append and Indirect Prefix (+&): +&timestamp"
188
213
  expect{filter.register}.to raise_exception(LogStash::FieldFormatError, msg)
189
214
  end
190
215
  end
@@ -192,64 +217,9 @@ describe LogStash::Filters::Dissect do
192
217
  context "when field is defined as Indirect and Append (&+)" do
193
218
  let(:config) { {"mapping" => {"message" => "%{&+timestamp}"}}}
194
219
  it "raises an error in register" do
195
- msg = "org.logstash.dissect.InvalidFieldException: Field cannot prefix with both Append and Indirect Prefix (&+): &+timestamp"
220
+ msg = "org.logstash.dissect.fields.InvalidFieldException: Field cannot prefix with both Append and Indirect Prefix (&+): &+timestamp"
196
221
  expect{filter.register}.to raise_exception(LogStash::FieldFormatError, msg)
197
222
  end
198
223
  end
199
224
  end
200
-
201
- describe "baseline performance test", :performance => true do
202
- event_count = 1000000
203
- min_rate = 30000
204
-
205
- max_duration = event_count / min_rate
206
- cfg_base = <<-CONFIG
207
- input {
208
- generator {
209
- count => #{event_count}
210
- message => "Mar 16 00:01:25 evita postfix/smtpd[1713]: connect from camomile.cloud9.net[168.100.1.3]"
211
- }
212
- }
213
- output { null { } }
214
- CONFIG
215
-
216
- config(cfg_base)
217
- start = Time.now.to_f
218
- agent do
219
- duration = (Time.now.to_f - start)
220
- puts "\n\ninputs/generator baseline rate: #{"%02.0f/sec" % (event_count / duration)}, elapsed: #{duration}s\n\n"
221
- insist { duration } < max_duration
222
- end
223
- end
224
-
225
- describe "dissect performance test", :performance => true do
226
- event_count = 1000000
227
- min_rate = 30000
228
- max_duration = event_count / min_rate
229
-
230
- cfg_filter = <<-CONFIG
231
- input {
232
- generator {
233
- count => #{event_count}
234
- message => "Mar 16 00:01:25 evita postfix/smtpd[1713]: connect from camomile.cloud9.net[168.100.1.3]"
235
- }
236
- }
237
- filter {
238
- dissect {
239
- mapping => {
240
- "message" => "%{timestamp} %{+timestamp} %{+timestamp} %{logsource} %{program}[%{pid}]: %{msg}"
241
- }
242
- }
243
- }
244
- output { null { } }
245
- CONFIG
246
-
247
- config(cfg_filter)
248
- start = Time.now.to_f
249
- agent do
250
- duration = (Time.now.to_f - start)
251
- puts "\n\nfilters/dissect rate: #{"%02.0f/sec" % (event_count / duration)}, elapsed: #{duration}s\n\n"
252
- insist { duration } < event_count / min_rate
253
- end
254
- end
255
225
  end
data/spec/spec_helper.rb CHANGED
@@ -1,4 +1,5 @@
1
1
  # encoding: utf-8
2
+ require "forwardable"
2
3
  require "logstash/devutils/rspec/spec_helper"
3
4
 
4
5
  module LogStash::Environment
metadata CHANGED
@@ -1,21 +1,21 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-filter-dissect
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.6
4
+ version: 1.0.7
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-07-14 00:00:00.000000000 Z
11
+ date: 2016-10-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
15
15
  requirements:
16
16
  - - ">="
17
17
  - !ruby/object:Gem::Version
18
- version: '1.60'
18
+ version: 2.1.1
19
19
  - - "<="
20
20
  - !ruby/object:Gem::Version
21
21
  version: '2.99'
@@ -26,30 +26,59 @@ dependencies:
26
26
  requirements:
27
27
  - - ">="
28
28
  - !ruby/object:Gem::Version
29
- version: '1.60'
29
+ version: 2.1.1
30
30
  - - "<="
31
31
  - !ruby/object:Gem::Version
32
32
  version: '2.99'
33
33
  - !ruby/object:Gem::Dependency
34
34
  requirement: !ruby/object:Gem::Requirement
35
35
  requirements:
36
- - - "~>"
36
+ - - ">="
37
+ - !ruby/object:Gem::Version
38
+ version: '0'
39
+ name: jar-dependencies
40
+ prerelease: false
41
+ type: :runtime
42
+ version_requirements: !ruby/object:Gem::Requirement
43
+ requirements:
44
+ - - ">="
45
+ - !ruby/object:Gem::Version
46
+ version: '0'
47
+ - !ruby/object:Gem::Dependency
48
+ requirement: !ruby/object:Gem::Requirement
49
+ requirements:
50
+ - - ">="
37
51
  - !ruby/object:Gem::Version
38
- version: 1.0.0
52
+ version: '0'
53
+ name: rspec
54
+ prerelease: false
55
+ type: :development
56
+ version_requirements: !ruby/object:Gem::Requirement
57
+ requirements:
58
+ - - ">="
59
+ - !ruby/object:Gem::Version
60
+ version: '0'
61
+ - !ruby/object:Gem::Dependency
62
+ requirement: !ruby/object:Gem::Requirement
63
+ requirements:
64
+ - - ">="
65
+ - !ruby/object:Gem::Version
66
+ version: '0'
39
67
  name: logstash-devutils
40
68
  prerelease: false
41
69
  type: :development
42
70
  version_requirements: !ruby/object:Gem::Requirement
43
71
  requirements:
44
- - - "~>"
72
+ - - ">="
45
73
  - !ruby/object:Gem::Version
46
- version: 1.0.0
74
+ version: '0'
47
75
  description: This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program
48
76
  email: info@elastic.co
49
77
  executables: []
50
78
  extensions: []
51
79
  extra_rdoc_files: []
52
80
  files:
81
+ - BUILD_INSTRUCTIONS.md
53
82
  - CHANGELOG.md
54
83
  - CONTRIBUTORS
55
84
  - DEVELOPER.md
@@ -57,11 +86,15 @@ files:
57
86
  - LICENSE
58
87
  - NOTICE.TXT
59
88
  - README.md
60
- - lib/jars/jruby-dissect-library.jar
89
+ - VERSION
90
+ - lib/jruby-dissect-library_jars.rb
61
91
  - lib/logstash/filters/dissect.rb
62
92
  - logstash-filter-dissect.gemspec
63
93
  - spec/filters/dissect_spec.rb
64
94
  - spec/spec_helper.rb
95
+ - vendor/jars/org/jruby/jruby-core/1.7.25/jruby-core-1.7.25.jar
96
+ - vendor/jars/org/logstash/dissect/jruby-dissect-library/1.0.5/jruby-dissect-library-1.0.5.jar
97
+ - vendor/jars/org/logstash/dissect/jruby-dissect-library/1.0.7/jruby-dissect-library-1.0.7.jar
65
98
  homepage: http://www.elastic.co/guide/en/logstash/current/index.html
66
99
  licenses:
67
100
  - Apache License (2.0)
@@ -84,10 +117,10 @@ required_rubygems_version: !ruby/object:Gem::Requirement
84
117
  version: '0'
85
118
  requirements: []
86
119
  rubyforge_project:
87
- rubygems_version: 2.6.3
120
+ rubygems_version: 2.4.8
88
121
  signing_key:
89
122
  specification_version: 4
90
- summary: This dissect filter will destructurize text in multiple fields.
123
+ summary: This dissect filter will de-structure text into multiple fields.
91
124
  test_files:
92
125
  - spec/filters/dissect_spec.rb
93
126
  - spec/spec_helper.rb
Binary file