logstash-core 1.5.0.beta2-java → 1.5.0-java
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of logstash-core might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/lib/logstash-core.rb +2 -0
- data/lib/logstash/agent.rb +0 -41
- data/lib/logstash/config/config_ast.rb +62 -29
- data/lib/logstash/config/mixin.rb +3 -3
- data/lib/logstash/environment.rb +37 -100
- data/lib/logstash/event.rb +32 -20
- data/lib/logstash/filters/base.rb +20 -0
- data/lib/logstash/java_integration.rb +72 -18
- data/lib/logstash/namespace.rb +0 -3
- data/lib/logstash/outputs/base.rb +1 -1
- data/lib/logstash/patches/bundler.rb +20 -0
- data/lib/logstash/patches/rubygems.rb +37 -0
- data/lib/logstash/pipeline.rb +59 -39
- data/lib/logstash/runner.rb +4 -50
- data/lib/logstash/util.rb +0 -1
- data/lib/logstash/util/accessors.rb +6 -0
- data/lib/logstash/version.rb +1 -1
- data/locales/en.yml +5 -0
- data/logstash-core.gemspec +51 -0
- data/spec/core/conditionals_spec.rb +428 -0
- data/spec/core/config_mixin_spec.rb +99 -0
- data/spec/core/config_spec.rb +108 -0
- data/spec/core/environment_spec.rb +44 -0
- data/spec/core/event_spec.rb +473 -0
- data/spec/core/pipeline_spec.rb +198 -0
- data/spec/core/plugin_spec.rb +106 -0
- data/spec/core/runner_spec.rb +39 -0
- data/spec/core/timestamp_spec.rb +83 -0
- data/spec/filters/base_spec.rb +318 -0
- data/spec/inputs/base_spec.rb +13 -0
- data/spec/lib/logstash/bundler_spec.rb +120 -0
- data/spec/lib/logstash/java_integration_spec.rb +257 -0
- data/spec/logstash/agent_spec.rb +37 -0
- data/spec/outputs/base_spec.rb +47 -0
- data/spec/spec_helper.rb +1 -0
- data/spec/util/accessors_spec.rb +215 -0
- data/spec/util/charset_spec.rb +74 -0
- data/spec/util/fieldeval_spec.rb +96 -0
- data/spec/util/gemfile_spec.rb +212 -0
- data/spec/util/json_spec.rb +97 -0
- data/spec/util/plugin_version_spec.rb +48 -0
- data/spec/util/retryable_spec.rb +145 -0
- data/spec/util_spec.rb +34 -0
- metadata +96 -253
- data/lib/logstash-event.rb +0 -2
- data/lib/logstash.rb +0 -4
- data/lib/logstash/JRUBY-PR1448.rb +0 -32
- data/lib/logstash/bundler.rb +0 -124
- data/lib/logstash/gemfile.rb +0 -175
- data/lib/logstash/pluginmanager.rb +0 -17
- data/lib/logstash/pluginmanager/install.rb +0 -112
- data/lib/logstash/pluginmanager/list.rb +0 -38
- data/lib/logstash/pluginmanager/main.rb +0 -22
- data/lib/logstash/pluginmanager/maven_tools_patch.rb +0 -12
- data/lib/logstash/pluginmanager/uninstall.rb +0 -49
- data/lib/logstash/pluginmanager/update.rb +0 -50
- data/lib/logstash/pluginmanager/util.rb +0 -88
@@ -0,0 +1,198 @@
|
|
1
|
+
require "spec_helper"
|
2
|
+
|
3
|
+
class DummyInput < LogStash::Inputs::Base
|
4
|
+
config_name "dummyinput"
|
5
|
+
milestone 2
|
6
|
+
|
7
|
+
def register
|
8
|
+
end
|
9
|
+
|
10
|
+
def run(queue)
|
11
|
+
end
|
12
|
+
|
13
|
+
def teardown
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
class DummyCodec < LogStash::Codecs::Base
|
18
|
+
config_name "dummycodec"
|
19
|
+
milestone 2
|
20
|
+
|
21
|
+
def decode(data)
|
22
|
+
data
|
23
|
+
end
|
24
|
+
|
25
|
+
def encode(event)
|
26
|
+
event
|
27
|
+
end
|
28
|
+
|
29
|
+
def teardown
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
class DummyOutput < LogStash::Outputs::Base
|
34
|
+
config_name "dummyoutput"
|
35
|
+
milestone 2
|
36
|
+
|
37
|
+
attr_reader :num_teardowns
|
38
|
+
|
39
|
+
def initialize(params={})
|
40
|
+
super
|
41
|
+
@num_teardowns = 0
|
42
|
+
end
|
43
|
+
|
44
|
+
def register
|
45
|
+
end
|
46
|
+
|
47
|
+
def receive(event)
|
48
|
+
end
|
49
|
+
|
50
|
+
def teardown
|
51
|
+
@num_teardowns += 1
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
class TestPipeline < LogStash::Pipeline
|
56
|
+
attr_reader :outputs
|
57
|
+
end
|
58
|
+
|
59
|
+
describe LogStash::Pipeline do
|
60
|
+
|
61
|
+
context "teardown" do
|
62
|
+
|
63
|
+
before(:each) do
|
64
|
+
LogStash::Plugin.stub(:lookup)
|
65
|
+
.with("input", "dummyinput").and_return(DummyInput)
|
66
|
+
LogStash::Plugin.stub(:lookup)
|
67
|
+
.with("codec", "plain").and_return(DummyCodec)
|
68
|
+
LogStash::Plugin.stub(:lookup)
|
69
|
+
.with("output", "dummyoutput").and_return(DummyOutput)
|
70
|
+
end
|
71
|
+
|
72
|
+
let(:test_config_without_output_workers) {
|
73
|
+
<<-eos
|
74
|
+
input {
|
75
|
+
dummyinput {}
|
76
|
+
}
|
77
|
+
|
78
|
+
output {
|
79
|
+
dummyoutput {}
|
80
|
+
}
|
81
|
+
eos
|
82
|
+
}
|
83
|
+
|
84
|
+
let(:test_config_with_output_workers) {
|
85
|
+
<<-eos
|
86
|
+
input {
|
87
|
+
dummyinput {}
|
88
|
+
}
|
89
|
+
|
90
|
+
output {
|
91
|
+
dummyoutput {
|
92
|
+
workers => 2
|
93
|
+
}
|
94
|
+
}
|
95
|
+
eos
|
96
|
+
}
|
97
|
+
|
98
|
+
context "output teardown" do
|
99
|
+
it "should call teardown of output without output-workers" do
|
100
|
+
pipeline = TestPipeline.new(test_config_without_output_workers)
|
101
|
+
pipeline.run
|
102
|
+
|
103
|
+
expect(pipeline.outputs.size ).to eq(1)
|
104
|
+
expect(pipeline.outputs.first.worker_plugins.size ).to eq(1)
|
105
|
+
expect(pipeline.outputs.first.worker_plugins.first.num_teardowns ).to eq(1)
|
106
|
+
end
|
107
|
+
|
108
|
+
it "should call output teardown correctly with output workers" do
|
109
|
+
pipeline = TestPipeline.new(test_config_with_output_workers)
|
110
|
+
pipeline.run
|
111
|
+
|
112
|
+
expect(pipeline.outputs.size ).to eq(1)
|
113
|
+
expect(pipeline.outputs.first.num_teardowns).to eq(0)
|
114
|
+
pipeline.outputs.first.worker_plugins.each do |plugin|
|
115
|
+
expect(plugin.num_teardowns ).to eq(1)
|
116
|
+
end
|
117
|
+
end
|
118
|
+
end
|
119
|
+
end
|
120
|
+
|
121
|
+
context "compiled flush function" do
|
122
|
+
|
123
|
+
context "cancelled events should not propagate down the filters" do
|
124
|
+
config <<-CONFIG
|
125
|
+
filter {
|
126
|
+
multiline {
|
127
|
+
pattern => "hello"
|
128
|
+
what => next
|
129
|
+
}
|
130
|
+
multiline {
|
131
|
+
pattern => "hello"
|
132
|
+
what => next
|
133
|
+
}
|
134
|
+
}
|
135
|
+
CONFIG
|
136
|
+
|
137
|
+
sample("hello") do
|
138
|
+
expect(subject["message"]).to eq("hello")
|
139
|
+
end
|
140
|
+
end
|
141
|
+
|
142
|
+
context "new events should propagate down the filters" do
|
143
|
+
config <<-CONFIG
|
144
|
+
filter {
|
145
|
+
clone {
|
146
|
+
clones => ["clone1"]
|
147
|
+
}
|
148
|
+
multiline {
|
149
|
+
pattern => "bar"
|
150
|
+
what => previous
|
151
|
+
}
|
152
|
+
}
|
153
|
+
CONFIG
|
154
|
+
|
155
|
+
sample(["foo", "bar"]) do
|
156
|
+
expect(subject.size).to eq(2)
|
157
|
+
|
158
|
+
expect(subject[0]["message"]).to eq("foo\nbar")
|
159
|
+
expect(subject[0]["type"]).to be_nil
|
160
|
+
expect(subject[1]["message"]).to eq("foo\nbar")
|
161
|
+
expect(subject[1]["type"]).to eq("clone1")
|
162
|
+
end
|
163
|
+
end
|
164
|
+
end
|
165
|
+
|
166
|
+
context "compiled filter funtions" do
|
167
|
+
|
168
|
+
context "new events should propagate down the filters" do
|
169
|
+
config <<-CONFIG
|
170
|
+
filter {
|
171
|
+
clone {
|
172
|
+
clones => ["clone1", "clone2"]
|
173
|
+
}
|
174
|
+
mutate {
|
175
|
+
add_field => {"foo" => "bar"}
|
176
|
+
}
|
177
|
+
}
|
178
|
+
CONFIG
|
179
|
+
|
180
|
+
sample("hello") do
|
181
|
+
expect(subject.size).to eq(3)
|
182
|
+
|
183
|
+
expect(subject[0]["message"]).to eq("hello")
|
184
|
+
expect(subject[0]["type"]).to be_nil
|
185
|
+
expect(subject[0]["foo"]).to eq("bar")
|
186
|
+
|
187
|
+
expect(subject[1]["message"]).to eq("hello")
|
188
|
+
expect(subject[1]["type"]).to eq("clone1")
|
189
|
+
expect(subject[1]["foo"]).to eq("bar")
|
190
|
+
|
191
|
+
expect(subject[2]["message"]).to eq("hello")
|
192
|
+
expect(subject[2]["type"]).to eq("clone2")
|
193
|
+
expect(subject[2]["foo"]).to eq("bar")
|
194
|
+
end
|
195
|
+
end
|
196
|
+
|
197
|
+
end
|
198
|
+
end
|
@@ -0,0 +1,106 @@
|
|
1
|
+
require "spec_helper"
|
2
|
+
require "logstash/plugin"
|
3
|
+
|
4
|
+
describe LogStash::Plugin do
|
5
|
+
it "should fail lookup on inexisting type" do
|
6
|
+
expect_any_instance_of(Cabin::Channel).to receive(:debug).once
|
7
|
+
expect { LogStash::Plugin.lookup("badbadtype", "badname") }.to raise_error(LogStash::PluginLoadingError)
|
8
|
+
end
|
9
|
+
|
10
|
+
it "should fail lookup on inexisting name" do
|
11
|
+
expect_any_instance_of(Cabin::Channel).to receive(:debug).once
|
12
|
+
expect { LogStash::Plugin.lookup("filter", "badname") }.to raise_error(LogStash::PluginLoadingError)
|
13
|
+
end
|
14
|
+
|
15
|
+
it "should fail on bad plugin class" do
|
16
|
+
LogStash::Filters::BadSuperClass = Class.new
|
17
|
+
expect { LogStash::Plugin.lookup("filter", "bad_super_class") }.to raise_error(LogStash::PluginLoadingError)
|
18
|
+
end
|
19
|
+
|
20
|
+
it "should fail on missing config_name method" do
|
21
|
+
LogStash::Filters::MissingConfigName = Class.new(LogStash::Filters::Base)
|
22
|
+
expect { LogStash::Plugin.lookup("filter", "missing_config_name") }.to raise_error(LogStash::PluginLoadingError)
|
23
|
+
end
|
24
|
+
|
25
|
+
it "should lookup an already defined plugin class" do
|
26
|
+
class LogStash::Filters::LadyGaga < LogStash::Filters::Base
|
27
|
+
config_name "lady_gaga"
|
28
|
+
end
|
29
|
+
expect(LogStash::Plugin.lookup("filter", "lady_gaga")).to eq(LogStash::Filters::LadyGaga)
|
30
|
+
end
|
31
|
+
|
32
|
+
context "when validating the plugin version" do
|
33
|
+
let(:plugin_name) { 'logstash-filter-stromae' }
|
34
|
+
subject do
|
35
|
+
Class.new(LogStash::Filters::Base) do
|
36
|
+
config_name 'stromae'
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
it "doesn't warn the user if the version is superior or equal to 1.0.0" do
|
41
|
+
allow(Gem::Specification).to receive(:find_by_name)
|
42
|
+
.with(plugin_name)
|
43
|
+
.and_return(double(:version => Gem::Version.new('1.0.0')))
|
44
|
+
|
45
|
+
expect_any_instance_of(Cabin::Channel).not_to receive(:info)
|
46
|
+
subject.validate({})
|
47
|
+
end
|
48
|
+
|
49
|
+
it 'warns the user if the plugin version is between 0.9.x and 1.0.0' do
|
50
|
+
allow(Gem::Specification).to receive(:find_by_name)
|
51
|
+
.with(plugin_name)
|
52
|
+
.and_return(double(:version => Gem::Version.new('0.9.1')))
|
53
|
+
|
54
|
+
expect_any_instance_of(Cabin::Channel).to receive(:info)
|
55
|
+
.with(/Using version 0.9.x/)
|
56
|
+
|
57
|
+
subject.validate({})
|
58
|
+
end
|
59
|
+
|
60
|
+
it 'warns the user if the plugin version is inferior to 0.9.x' do
|
61
|
+
allow(Gem::Specification).to receive(:find_by_name)
|
62
|
+
.with(plugin_name)
|
63
|
+
.and_return(double(:version => Gem::Version.new('0.1.1')))
|
64
|
+
|
65
|
+
expect_any_instance_of(Cabin::Channel).to receive(:info)
|
66
|
+
.with(/Using version 0.1.x/)
|
67
|
+
subject.validate({})
|
68
|
+
end
|
69
|
+
|
70
|
+
it "doesnt show the version notice more than once" do
|
71
|
+
one_notice = Class.new(LogStash::Filters::Base) do
|
72
|
+
config_name "stromae"
|
73
|
+
end
|
74
|
+
|
75
|
+
allow(Gem::Specification).to receive(:find_by_name)
|
76
|
+
.with(plugin_name)
|
77
|
+
.and_return(double(:version => Gem::Version.new('0.1.1')))
|
78
|
+
|
79
|
+
expect_any_instance_of(Cabin::Channel).to receive(:info)
|
80
|
+
.once
|
81
|
+
.with(/Using version 0.1.x/)
|
82
|
+
|
83
|
+
one_notice.validate({})
|
84
|
+
one_notice.validate({})
|
85
|
+
end
|
86
|
+
|
87
|
+
it "warns the user if we can't find a defined version" do
|
88
|
+
expect_any_instance_of(Cabin::Channel).to receive(:warn)
|
89
|
+
.once
|
90
|
+
.with(/plugin doesn't have a version/)
|
91
|
+
|
92
|
+
subject.validate({})
|
93
|
+
end
|
94
|
+
|
95
|
+
|
96
|
+
it 'logs a warning if the plugin use the milestone option' do
|
97
|
+
expect_any_instance_of(Cabin::Channel).to receive(:warn)
|
98
|
+
.with(/stromae plugin is using the 'milestone' method/)
|
99
|
+
|
100
|
+
class LogStash::Filters::Stromae < LogStash::Filters::Base
|
101
|
+
config_name "stromae"
|
102
|
+
milestone 2
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
require "spec_helper"
|
2
|
+
require "logstash/runner"
|
3
|
+
require "stud/task"
|
4
|
+
|
5
|
+
class NullRunner
|
6
|
+
def run(args); end
|
7
|
+
end
|
8
|
+
|
9
|
+
describe LogStash::Runner do
|
10
|
+
|
11
|
+
context "argument parsing" do
|
12
|
+
it "should run agent" do
|
13
|
+
expect(Stud::Task).to receive(:new).once.and_return(nil)
|
14
|
+
args = ["agent", "-e", ""]
|
15
|
+
expect(subject.run(args)).to eq(nil)
|
16
|
+
end
|
17
|
+
|
18
|
+
it "should run agent help" do
|
19
|
+
expect(subject).to receive(:show_help).once.and_return(nil)
|
20
|
+
args = ["agent", "-h"]
|
21
|
+
expect(subject.run(args).wait).to eq(0)
|
22
|
+
end
|
23
|
+
|
24
|
+
it "should show help with no arguments" do
|
25
|
+
expect($stderr).to receive(:puts).once.and_return("No command given")
|
26
|
+
expect($stderr).to receive(:puts).once
|
27
|
+
args = []
|
28
|
+
expect(subject.run(args).wait).to eq(1)
|
29
|
+
end
|
30
|
+
|
31
|
+
it "should show help for unknown commands" do
|
32
|
+
expect($stderr).to receive(:puts).once.and_return("No such command welp")
|
33
|
+
expect($stderr).to receive(:puts).once
|
34
|
+
args = ["welp"]
|
35
|
+
expect(subject.run(args).wait).to eq(1)
|
36
|
+
end
|
37
|
+
|
38
|
+
end
|
39
|
+
end
|
@@ -0,0 +1,83 @@
|
|
1
|
+
require "spec_helper"
|
2
|
+
require "logstash/timestamp"
|
3
|
+
|
4
|
+
describe LogStash::Timestamp do
|
5
|
+
|
6
|
+
it "should parse its own iso8601 output" do
|
7
|
+
t = Time.now
|
8
|
+
ts = LogStash::Timestamp.new(t)
|
9
|
+
expect(LogStash::Timestamp.parse_iso8601(ts.to_iso8601).to_i).to eq(t.to_i)
|
10
|
+
end
|
11
|
+
|
12
|
+
it "should coerce iso8601 string" do
|
13
|
+
t = Time.now
|
14
|
+
ts = LogStash::Timestamp.new(t)
|
15
|
+
expect(LogStash::Timestamp.coerce(ts.to_iso8601).to_i).to eq(t.to_i)
|
16
|
+
end
|
17
|
+
|
18
|
+
it "should coerce Time" do
|
19
|
+
t = Time.now
|
20
|
+
expect(LogStash::Timestamp.coerce(t).to_i).to eq(t.to_i)
|
21
|
+
end
|
22
|
+
|
23
|
+
it "should coerce Timestamp" do
|
24
|
+
t = LogStash::Timestamp.now
|
25
|
+
expect(LogStash::Timestamp.coerce(t).to_i).to eq(t.to_i)
|
26
|
+
end
|
27
|
+
|
28
|
+
it "should raise on invalid string coerce" do
|
29
|
+
expect{LogStash::Timestamp.coerce("foobar")}.to raise_error LogStash::TimestampParserError
|
30
|
+
end
|
31
|
+
|
32
|
+
it "should return nil on invalid object coerce" do
|
33
|
+
expect(LogStash::Timestamp.coerce(:foobar)).to be_nil
|
34
|
+
end
|
35
|
+
|
36
|
+
it "should support to_json" do
|
37
|
+
expect(LogStash::Timestamp.parse_iso8601("2014-09-23T00:00:00-0800").to_json).to eq("\"2014-09-23T08:00:00.000Z\"")
|
38
|
+
end
|
39
|
+
|
40
|
+
it "should support to_json and ignore arguments" do
|
41
|
+
expect(LogStash::Timestamp.parse_iso8601("2014-09-23T00:00:00-0800").to_json(:some => 1, :argumnents => "test")).to eq("\"2014-09-23T08:00:00.000Z\"")
|
42
|
+
end
|
43
|
+
|
44
|
+
it "should support timestamp comparaison" do
|
45
|
+
current = LogStash::Timestamp.new(Time.now)
|
46
|
+
future = LogStash::Timestamp.new(Time.now + 100)
|
47
|
+
|
48
|
+
expect(future > current).to eq(true)
|
49
|
+
expect(future < current).to eq(false)
|
50
|
+
expect(current == current).to eq(true)
|
51
|
+
|
52
|
+
expect(current <=> current).to eq(0)
|
53
|
+
expect(current <=> future).to eq(-1)
|
54
|
+
expect(future <=> current).to eq(1)
|
55
|
+
end
|
56
|
+
|
57
|
+
it "should allow unary operation +" do
|
58
|
+
current = Time.now
|
59
|
+
t = LogStash::Timestamp.new(current) + 10
|
60
|
+
expect(t).to eq(current + 10)
|
61
|
+
end
|
62
|
+
|
63
|
+
describe "subtraction" do
|
64
|
+
it "should work on a timestamp object" do
|
65
|
+
t = Time.now
|
66
|
+
current = LogStash::Timestamp.new(t)
|
67
|
+
future = LogStash::Timestamp.new(t + 10)
|
68
|
+
expect(future - current).to eq(10)
|
69
|
+
end
|
70
|
+
|
71
|
+
it "should work on with time object" do
|
72
|
+
current = Time.now
|
73
|
+
t = LogStash::Timestamp.new(current + 10)
|
74
|
+
expect(t - current).to eq(10)
|
75
|
+
end
|
76
|
+
|
77
|
+
it "should work with numeric value" do
|
78
|
+
current = Time.now
|
79
|
+
t = LogStash::Timestamp.new(current + 10)
|
80
|
+
expect(t - 10).to eq(current)
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
@@ -0,0 +1,318 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "logstash/json"
|
4
|
+
|
5
|
+
# use a dummy NOOP filter to test Filters::Base
|
6
|
+
class LogStash::Filters::NOOP < LogStash::Filters::Base
|
7
|
+
config_name "noop"
|
8
|
+
milestone 2
|
9
|
+
|
10
|
+
def register; end
|
11
|
+
|
12
|
+
def filter(event)
|
13
|
+
return unless filter?(event)
|
14
|
+
filter_matched(event)
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
describe LogStash::Filters::Base do
|
19
|
+
subject {LogStash::Filters::Base.new({})}
|
20
|
+
|
21
|
+
it "should provide method interfaces to override" do
|
22
|
+
expect{subject.register}.to raise_error(RuntimeError)
|
23
|
+
expect{subject.filter(:foo)}.to raise_error(RuntimeError)
|
24
|
+
end
|
25
|
+
|
26
|
+
it "should provide class public API" do
|
27
|
+
[:register, :filter, :multi_filter, :execute, :threadsafe?, :filter_matched, :filter?, :teardown].each do |method|
|
28
|
+
expect(subject).to respond_to(method)
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
context "multi_filter" do
|
33
|
+
let(:event1){LogStash::Event.new}
|
34
|
+
let(:event2){LogStash::Event.new}
|
35
|
+
|
36
|
+
it "should multi_filter without new events" do
|
37
|
+
allow(subject).to receive(:filter) do |event, &block|
|
38
|
+
nil
|
39
|
+
end
|
40
|
+
expect(subject.multi_filter([event1])).to eq([event1])
|
41
|
+
end
|
42
|
+
|
43
|
+
it "should multi_filter with new events" do
|
44
|
+
allow(subject).to receive(:filter) do |event, &block|
|
45
|
+
block.call(event2)
|
46
|
+
end
|
47
|
+
expect(subject.multi_filter([event1])).to eq([event1, event2])
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
describe LogStash::Filters::NOOP do
|
53
|
+
|
54
|
+
describe "adding multiple values to one field" do
|
55
|
+
config <<-CONFIG
|
56
|
+
filter {
|
57
|
+
noop {
|
58
|
+
add_field => ["new_field", "new_value"]
|
59
|
+
add_field => ["new_field", "new_value_2"]
|
60
|
+
}
|
61
|
+
}
|
62
|
+
CONFIG
|
63
|
+
|
64
|
+
sample "example" do
|
65
|
+
insist { subject["new_field"] } == ["new_value", "new_value_2"]
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
describe "type parsing" do
|
70
|
+
config <<-CONFIG
|
71
|
+
filter {
|
72
|
+
noop {
|
73
|
+
type => "noop"
|
74
|
+
add_tag => ["test"]
|
75
|
+
}
|
76
|
+
}
|
77
|
+
CONFIG
|
78
|
+
|
79
|
+
sample("type" => "noop") do
|
80
|
+
insist { subject["tags"] } == ["test"]
|
81
|
+
end
|
82
|
+
|
83
|
+
sample("type" => "not_noop") do
|
84
|
+
insist { subject["tags"] }.nil?
|
85
|
+
end
|
86
|
+
end
|
87
|
+
|
88
|
+
describe "tags parsing with one tag" do
|
89
|
+
config <<-CONFIG
|
90
|
+
filter {
|
91
|
+
noop {
|
92
|
+
type => "noop"
|
93
|
+
tags => ["t1"]
|
94
|
+
add_tag => ["test"]
|
95
|
+
}
|
96
|
+
}
|
97
|
+
CONFIG
|
98
|
+
|
99
|
+
sample("type" => "noop") do
|
100
|
+
insist { subject["tags"] }.nil?
|
101
|
+
end
|
102
|
+
|
103
|
+
sample("type" => "noop", "tags" => ["t1", "t2"]) do
|
104
|
+
insist { subject["tags"] } == ["t1", "t2", "test"]
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
describe "tags parsing with multiple tags" do
|
109
|
+
config <<-CONFIG
|
110
|
+
filter {
|
111
|
+
noop {
|
112
|
+
type => "noop"
|
113
|
+
tags => ["t1", "t2"]
|
114
|
+
add_tag => ["test"]
|
115
|
+
}
|
116
|
+
}
|
117
|
+
CONFIG
|
118
|
+
|
119
|
+
sample("type" => "noop") do
|
120
|
+
insist { subject["tags"] }.nil?
|
121
|
+
end
|
122
|
+
|
123
|
+
sample("type" => "noop", "tags" => ["t1"]) do
|
124
|
+
insist { subject["tags"] } == ["t1"]
|
125
|
+
end
|
126
|
+
|
127
|
+
sample("type" => "noop", "tags" => ["t1", "t2"]) do
|
128
|
+
insist { subject["tags"] } == ["t1", "t2", "test"]
|
129
|
+
end
|
130
|
+
|
131
|
+
sample("type" => "noop", "tags" => ["t1", "t2", "t3"]) do
|
132
|
+
insist { subject["tags"] } == ["t1", "t2", "t3", "test"]
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
describe "exclude_tags with 1 tag" do
|
137
|
+
config <<-CONFIG
|
138
|
+
filter {
|
139
|
+
noop {
|
140
|
+
type => "noop"
|
141
|
+
tags => ["t1"]
|
142
|
+
add_tag => ["test"]
|
143
|
+
exclude_tags => ["t2"]
|
144
|
+
}
|
145
|
+
}
|
146
|
+
CONFIG
|
147
|
+
|
148
|
+
sample("type" => "noop") do
|
149
|
+
insist { subject["tags"] }.nil?
|
150
|
+
end
|
151
|
+
|
152
|
+
sample("type" => "noop", "tags" => ["t1"]) do
|
153
|
+
insist { subject["tags"] } == ["t1", "test"]
|
154
|
+
end
|
155
|
+
|
156
|
+
sample("type" => "noop", "tags" => ["t1", "t2"]) do
|
157
|
+
insist { subject["tags"] } == ["t1", "t2"]
|
158
|
+
end
|
159
|
+
end
|
160
|
+
|
161
|
+
describe "exclude_tags with >1 tags" do
|
162
|
+
config <<-CONFIG
|
163
|
+
filter {
|
164
|
+
noop {
|
165
|
+
type => "noop"
|
166
|
+
tags => ["t1"]
|
167
|
+
add_tag => ["test"]
|
168
|
+
exclude_tags => ["t2", "t3"]
|
169
|
+
}
|
170
|
+
}
|
171
|
+
CONFIG
|
172
|
+
|
173
|
+
sample("type" => "noop", "tags" => ["t1", "t2", "t4"]) do
|
174
|
+
insist { subject["tags"] } == ["t1", "t2", "t4"]
|
175
|
+
end
|
176
|
+
|
177
|
+
sample("type" => "noop", "tags" => ["t1", "t3", "t4"]) do
|
178
|
+
insist { subject["tags"] } == ["t1", "t3", "t4"]
|
179
|
+
end
|
180
|
+
|
181
|
+
sample("type" => "noop", "tags" => ["t1", "t4", "t5"]) do
|
182
|
+
insist { subject["tags"] } == ["t1", "t4", "t5", "test"]
|
183
|
+
end
|
184
|
+
end
|
185
|
+
|
186
|
+
describe "remove_tag" do
|
187
|
+
config <<-CONFIG
|
188
|
+
filter {
|
189
|
+
noop {
|
190
|
+
type => "noop"
|
191
|
+
tags => ["t1"]
|
192
|
+
remove_tag => ["t2", "t3"]
|
193
|
+
}
|
194
|
+
}
|
195
|
+
CONFIG
|
196
|
+
|
197
|
+
sample("type" => "noop", "tags" => ["t4"]) do
|
198
|
+
insist { subject["tags"] } == ["t4"]
|
199
|
+
end
|
200
|
+
|
201
|
+
sample("type" => "noop", "tags" => ["t1", "t2", "t3"]) do
|
202
|
+
insist { subject["tags"] } == ["t1"]
|
203
|
+
end
|
204
|
+
|
205
|
+
# also test from Json deserialized data to test the handling of native Java collections by JrJackson
|
206
|
+
# see https://github.com/elastic/logstash/issues/2261
|
207
|
+
sample(LogStash::Json.load("{\"type\":\"noop\", \"tags\":[\"t1\", \"t2\", \"t3\"]}")) do
|
208
|
+
insist { subject["tags"] } == ["t1"]
|
209
|
+
end
|
210
|
+
|
211
|
+
sample("type" => "noop", "tags" => ["t1", "t2"]) do
|
212
|
+
insist { subject["tags"] } == ["t1"]
|
213
|
+
end
|
214
|
+
|
215
|
+
# also test from Json deserialized data to test the handling of native Java collections by JrJackson
|
216
|
+
# see https://github.com/elastic/logstash/issues/2261
|
217
|
+
sample(LogStash::Json.load("{\"type\":\"noop\", \"tags\":[\"t1\", \"t2\"]}")) do
|
218
|
+
insist { subject["tags"] } == ["t1"]
|
219
|
+
end
|
220
|
+
end
|
221
|
+
|
222
|
+
describe "remove_tag with dynamic value" do
|
223
|
+
config <<-CONFIG
|
224
|
+
filter {
|
225
|
+
noop {
|
226
|
+
type => "noop"
|
227
|
+
tags => ["t1"]
|
228
|
+
remove_tag => ["%{blackhole}"]
|
229
|
+
}
|
230
|
+
}
|
231
|
+
CONFIG
|
232
|
+
|
233
|
+
sample("type" => "noop", "tags" => ["t1", "goaway", "t3"], "blackhole" => "goaway") do
|
234
|
+
insist { subject["tags"] } == ["t1", "t3"]
|
235
|
+
end
|
236
|
+
|
237
|
+
# also test from Json deserialized data to test the handling of native Java collections by JrJackson
|
238
|
+
# see https://github.com/elastic/logstash/issues/2261
|
239
|
+
sample(LogStash::Json.load("{\"type\":\"noop\", \"tags\":[\"t1\", \"goaway\", \"t3\"], \"blackhole\":\"goaway\"}")) do
|
240
|
+
insist { subject["tags"] } == ["t1", "t3"]
|
241
|
+
end
|
242
|
+
end
|
243
|
+
|
244
|
+
describe "remove_field" do
|
245
|
+
config <<-CONFIG
|
246
|
+
filter {
|
247
|
+
noop {
|
248
|
+
type => "noop"
|
249
|
+
remove_field => ["t2", "t3"]
|
250
|
+
}
|
251
|
+
}
|
252
|
+
CONFIG
|
253
|
+
|
254
|
+
sample("type" => "noop", "t4" => "four") do
|
255
|
+
insist { subject }.include?("t4")
|
256
|
+
end
|
257
|
+
|
258
|
+
sample("type" => "noop", "t1" => "one", "t2" => "two", "t3" => "three") do
|
259
|
+
insist { subject }.include?("t1")
|
260
|
+
reject { subject }.include?("t2")
|
261
|
+
reject { subject }.include?("t3")
|
262
|
+
end
|
263
|
+
|
264
|
+
sample("type" => "noop", "t1" => "one", "t2" => "two") do
|
265
|
+
insist { subject }.include?("t1")
|
266
|
+
reject { subject }.include?("t2")
|
267
|
+
end
|
268
|
+
end
|
269
|
+
|
270
|
+
describe "remove_field on deep objects" do
|
271
|
+
config <<-CONFIG
|
272
|
+
filter {
|
273
|
+
noop {
|
274
|
+
type => "noop"
|
275
|
+
remove_field => ["[t1][t2]"]
|
276
|
+
}
|
277
|
+
}
|
278
|
+
CONFIG
|
279
|
+
|
280
|
+
sample("type" => "noop", "t1" => {"t2" => "two", "t3" => "three"}) do
|
281
|
+
insist { subject }.include?("t1")
|
282
|
+
reject { subject }.include?("[t1][t2]")
|
283
|
+
insist { subject }.include?("[t1][t3]")
|
284
|
+
end
|
285
|
+
end
|
286
|
+
|
287
|
+
describe "remove_field on array" do
|
288
|
+
config <<-CONFIG
|
289
|
+
filter {
|
290
|
+
noop {
|
291
|
+
type => "noop"
|
292
|
+
remove_field => ["[t1][0]"]
|
293
|
+
}
|
294
|
+
}
|
295
|
+
CONFIG
|
296
|
+
|
297
|
+
sample("type" => "noop", "t1" => ["t2", "t3"]) do
|
298
|
+
insist { subject }.include?("t1")
|
299
|
+
insist { subject["[t1][0]"] } == "t3"
|
300
|
+
end
|
301
|
+
end
|
302
|
+
|
303
|
+
describe "remove_field with dynamic value in field name" do
|
304
|
+
config <<-CONFIG
|
305
|
+
filter {
|
306
|
+
noop {
|
307
|
+
type => "noop"
|
308
|
+
remove_field => ["%{blackhole}"]
|
309
|
+
}
|
310
|
+
}
|
311
|
+
CONFIG
|
312
|
+
|
313
|
+
sample("type" => "noop", "blackhole" => "go", "go" => "away") do
|
314
|
+
insist { subject }.include?("blackhole")
|
315
|
+
reject { subject }.include?("go")
|
316
|
+
end
|
317
|
+
end
|
318
|
+
end
|