logstash-core 6.8.14-java → 7.0.0.alpha1-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/logstash-core/version.rb +3 -1
- data/lib/logstash/agent.rb +69 -85
- data/lib/logstash/api/modules/stats.rb +1 -1
- data/lib/logstash/compiler/lscl.rb +7 -7
- data/lib/logstash/config/config_ast.rb +1 -1
- data/lib/logstash/config/mixin.rb +1 -1
- data/lib/logstash/config/modules_common.rb +3 -3
- data/lib/logstash/dependency_report.rb +1 -2
- data/lib/logstash/environment.rb +4 -9
- data/lib/logstash/event.rb +1 -24
- data/lib/logstash/filter_delegator.rb +69 -2
- data/lib/logstash/filters/base.rb +2 -0
- data/lib/logstash/instrument/metric_store.rb +1 -1
- data/lib/logstash/instrument/periodic_poller/dlq.rb +7 -5
- data/lib/logstash/instrument/periodic_poller/jvm.rb +3 -3
- data/lib/logstash/instrument/periodic_poller/pq.rb +8 -6
- data/lib/logstash/instrument/periodic_pollers.rb +3 -3
- data/lib/logstash/java_pipeline.rb +11 -38
- data/lib/logstash/modules/kibana_config.rb +1 -1
- data/lib/logstash/modules/logstash_config.rb +1 -1
- data/lib/logstash/patches/resolv.rb +32 -17
- data/lib/logstash/pipeline.rb +11 -28
- data/lib/logstash/pipeline_action/base.rb +1 -1
- data/lib/logstash/pipeline_action/create.rb +13 -7
- data/lib/logstash/pipeline_action/reload.rb +12 -35
- data/lib/logstash/pipeline_action/stop.rb +6 -4
- data/lib/logstash/pipeline_settings.rb +1 -2
- data/lib/logstash/plugins/registry.rb +2 -5
- data/lib/logstash/runner.rb +0 -24
- data/lib/logstash/settings.rb +5 -5
- data/lib/logstash/state_resolver.rb +5 -5
- data/lib/logstash/util.rb +1 -11
- data/lib/logstash/util/duration_formatter.rb +1 -1
- data/lib/logstash/util/safe_uri.rb +0 -1
- data/lib/logstash/util/substitution_variables.rb +1 -22
- data/lib/logstash/util/thread_dump.rb +1 -1
- data/locales/en.yml +7 -16
- data/logstash-core.gemspec +11 -2
- data/spec/logstash/acked_queue_concurrent_stress_spec.rb +2 -2
- data/spec/logstash/agent/converge_spec.rb +31 -25
- data/spec/logstash/agent/metrics_spec.rb +1 -1
- data/spec/logstash/agent_spec.rb +7 -6
- data/spec/logstash/compiler/compiler_spec.rb +0 -28
- data/spec/logstash/config/config_ast_spec.rb +0 -15
- data/spec/logstash/config/mixin_spec.rb +2 -3
- data/spec/logstash/converge_result_spec.rb +1 -1
- data/spec/logstash/environment_spec.rb +4 -4
- data/spec/logstash/event_spec.rb +2 -10
- data/spec/logstash/filter_delegator_spec.rb +12 -2
- data/spec/logstash/filters/base_spec.rb +9 -45
- data/spec/logstash/instrument/periodic_poller/cgroup_spec.rb +2 -0
- data/spec/logstash/instrument/wrapped_write_client_spec.rb +1 -1
- data/spec/logstash/java_filter_delegator_spec.rb +11 -1
- data/spec/logstash/legacy_ruby_event_spec.rb +5 -6
- data/spec/logstash/patches_spec.rb +3 -1
- data/spec/logstash/pipeline_action/create_spec.rb +8 -14
- data/spec/logstash/pipeline_action/reload_spec.rb +9 -16
- data/spec/logstash/pipeline_action/stop_spec.rb +3 -4
- data/spec/logstash/queue_factory_spec.rb +1 -2
- data/spec/logstash/runner_spec.rb +0 -2
- data/spec/logstash/settings/array_coercible_spec.rb +1 -1
- data/spec/logstash/settings/bytes_spec.rb +2 -2
- data/spec/logstash/settings/port_range_spec.rb +1 -1
- data/spec/logstash/settings_spec.rb +0 -10
- data/spec/logstash/state_resolver_spec.rb +22 -26
- data/spec/logstash/util/safe_uri_spec.rb +0 -40
- data/spec/logstash/util/secretstore_spec.rb +1 -1
- data/spec/logstash/util/time_value_spec.rb +1 -1
- data/spec/logstash/util/wrapped_acked_queue_spec.rb +1 -1
- data/spec/logstash/webserver_spec.rb +5 -9
- data/spec/support/matchers.rb +19 -25
- data/spec/support/shared_contexts.rb +3 -3
- data/versions-gem-copy.yml +9 -9
- metadata +31 -44
- data/lib/logstash/patches/resolv_9270.rb +0 -2903
- data/lib/logstash/pipelines_registry.rb +0 -166
- data/lib/logstash/util/lazy_singleton.rb +0 -33
- data/spec/logstash/jruby_version_spec.rb +0 -15
- data/spec/logstash/pipelines_registry_spec.rb +0 -220
data/logstash-core.gemspec
CHANGED
@@ -1,4 +1,10 @@
|
|
1
1
|
# -*- encoding: utf-8 -*-
|
2
|
+
|
3
|
+
# NOTE: please use `rake artifact:gems` or `rake artifact:build-logstash-core` to build LS gems
|
4
|
+
# You can add a version qualifier (e.g. alpha1) via the VERSION_QUALIFIER env var, e.g.
|
5
|
+
# VERSION_QUALIFIER=beta2 RELEASE=1 rake artifact:build-logstash-core
|
6
|
+
# `require 'logstash-core/version'` is aware of this env var
|
7
|
+
|
2
8
|
lib = File.expand_path('../lib', __FILE__)
|
3
9
|
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
10
|
|
@@ -28,7 +34,7 @@ Gem::Specification.new do |gem|
|
|
28
34
|
gem.description = %q{The core components of logstash, the scalable log and event management tool}
|
29
35
|
gem.summary = %q{logstash-core - The core components of logstash}
|
30
36
|
gem.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
|
31
|
-
gem.license = "Apache
|
37
|
+
gem.license = "Apache-2.0"
|
32
38
|
|
33
39
|
gem.files = Dir.glob(
|
34
40
|
%w(versions-gem-copy.yml logstash-core.gemspec gemspec_jars.rb lib/**/*.rb spec/**/*.rb locales/*
|
@@ -47,7 +53,10 @@ Gem::Specification.new do |gem|
|
|
47
53
|
gem.add_runtime_dependency "filesize", "0.0.4" #(MIT license) for :bytes config validator
|
48
54
|
gem.add_runtime_dependency "gems", "~> 0.8.3" #(MIT license)
|
49
55
|
gem.add_runtime_dependency "concurrent-ruby", "~> 1.0", ">= 1.0.5"
|
50
|
-
|
56
|
+
|
57
|
+
# Later versions are ruby 2.0 only. We should remove the rack dep once we support 9k
|
58
|
+
gem.add_runtime_dependency "rack", '1.6.6'
|
59
|
+
|
51
60
|
gem.add_runtime_dependency "sinatra", '~> 1.4', '>= 1.4.6'
|
52
61
|
gem.add_runtime_dependency 'puma', '~> 2.16'
|
53
62
|
gem.add_runtime_dependency "jruby-openssl", ">= 0.9.20" # >= 0.9.13 Required to support TLSv1.2
|
@@ -14,7 +14,7 @@ describe LogStash::WrappedAckedQueue, :stress_test => true do
|
|
14
14
|
let(:reject_memo_keys) { [:reject_memo_keys, :path, :queue, :writer_threads, :collector, :metric, :reader_threads, :output_strings] }
|
15
15
|
|
16
16
|
let(:queue) do
|
17
|
-
described_class.new(path, page_capacity, 0, queue_checkpoint_acks, queue_checkpoint_writes, queue_checkpoint_interval,
|
17
|
+
described_class.new(path, page_capacity, 0, queue_checkpoint_acks, queue_checkpoint_writes, queue_checkpoint_interval, queue_capacity)
|
18
18
|
end
|
19
19
|
|
20
20
|
let(:writer_threads) do
|
@@ -70,7 +70,7 @@ describe LogStash::WrappedAckedQueue, :stress_test => true do
|
|
70
70
|
it "writes, reads, closes and reopens" do
|
71
71
|
Thread.abort_on_exception = true
|
72
72
|
|
73
|
-
# force lazy initialization to avoid
|
73
|
+
# force lazy initialization to avoid concurrency issues within threads
|
74
74
|
counts
|
75
75
|
queue
|
76
76
|
|
@@ -49,7 +49,7 @@ describe LogStash::Agent do
|
|
49
49
|
|
50
50
|
context "system pipeline" do
|
51
51
|
|
52
|
-
let(:system_pipeline_config) { mock_pipeline_config(:system_pipeline, "input {
|
52
|
+
let(:system_pipeline_config) { mock_pipeline_config(:system_pipeline, "input { generator { } } output { null {} }", { "pipeline.system" => true }) }
|
53
53
|
|
54
54
|
context "when we have a finite pipeline and a system pipeline running" do
|
55
55
|
|
@@ -65,40 +65,40 @@ describe LogStash::Agent do
|
|
65
65
|
end
|
66
66
|
|
67
67
|
context "when we have an infinite pipeline and a system pipeline running" do
|
68
|
-
let(:infinite_pipeline_config) { mock_pipeline_config(:main, "input {
|
68
|
+
let(:infinite_pipeline_config) { mock_pipeline_config(:main, "input { generator { } } output { null {} }") }
|
69
69
|
|
70
70
|
let(:source_loader) do
|
71
71
|
TestSourceLoader.new(infinite_pipeline_config, system_pipeline_config)
|
72
72
|
end
|
73
73
|
|
74
74
|
before(:each) do
|
75
|
-
|
75
|
+
@agent_task = start_agent(subject)
|
76
76
|
end
|
77
77
|
|
78
78
|
after(:each) do
|
79
|
-
|
80
|
-
@agent_task.wait
|
81
|
-
subject.shutdown
|
79
|
+
@agent_task.stop!
|
82
80
|
end
|
83
81
|
|
84
82
|
describe "#running_user_defined_pipelines" do
|
85
83
|
it "returns the user defined pipelines" do
|
86
|
-
|
87
|
-
|
88
|
-
|
84
|
+
wait_for do
|
85
|
+
subject.with_running_user_defined_pipelines {|pipelines| pipelines.keys }
|
86
|
+
end.to eq([:main])
|
87
|
+
end
|
89
88
|
end
|
90
89
|
|
91
90
|
describe "#running_user_defined_pipelines?" do
|
92
91
|
it "returns true" do
|
93
|
-
|
94
|
-
|
92
|
+
wait_for do
|
93
|
+
subject.running_user_defined_pipelines?
|
94
|
+
end.to be_truthy
|
95
95
|
end
|
96
96
|
end
|
97
97
|
end
|
98
98
|
end
|
99
99
|
|
100
100
|
context "when `config.reload.automatic`" do
|
101
|
-
let(:pipeline_config) { mock_pipeline_config(:main, "input {
|
101
|
+
let(:pipeline_config) { mock_pipeline_config(:main, "input { generator {} } output { null {} }") }
|
102
102
|
|
103
103
|
let(:source_loader) do
|
104
104
|
TestSourceLoader.new(pipeline_config)
|
@@ -114,14 +114,14 @@ describe LogStash::Agent do
|
|
114
114
|
|
115
115
|
after(:each) do
|
116
116
|
@agent_task.stop!
|
117
|
-
@agent_task.wait
|
118
|
-
subject.shutdown
|
119
117
|
end
|
120
118
|
|
121
119
|
it "converge only once" do
|
122
120
|
wait(60).for { source_loader.fetch_count }.to eq(1)
|
123
|
-
|
121
|
+
|
124
122
|
expect(subject).to have_running_pipeline?(pipeline_config)
|
123
|
+
|
124
|
+
subject.shutdown
|
125
125
|
end
|
126
126
|
end
|
127
127
|
|
@@ -135,6 +135,8 @@ describe LogStash::Agent do
|
|
135
135
|
|
136
136
|
expect(source_loader.fetch_count).to eq(1)
|
137
137
|
expect(subject.pipelines_count).to eq(0)
|
138
|
+
|
139
|
+
subject.shutdown
|
138
140
|
end
|
139
141
|
end
|
140
142
|
end
|
@@ -147,25 +149,26 @@ describe LogStash::Agent do
|
|
147
149
|
"config.reload.interval" => interval
|
148
150
|
)
|
149
151
|
end
|
150
|
-
|
151
152
|
before(:each) do
|
152
153
|
@agent_task = start_agent(subject)
|
153
154
|
end
|
154
155
|
|
155
156
|
after(:each) do
|
156
157
|
@agent_task.stop!
|
157
|
-
@agent_task.wait
|
158
|
-
subject.shutdown
|
159
158
|
end
|
160
159
|
|
161
160
|
context "and successfully load the config" do
|
162
161
|
it "converges periodically the pipelines from the configs source" do
|
163
|
-
#
|
162
|
+
sleep(2) # let the interval reload a few times
|
164
163
|
expect(subject).to have_running_pipeline?(pipeline_config)
|
165
164
|
|
166
165
|
# we rely on a periodic thread to call fetch count, we have seen unreliable run on
|
167
166
|
# travis, so lets add a few retries
|
168
|
-
try
|
167
|
+
try do
|
168
|
+
expect(source_loader.fetch_count).to be > 1
|
169
|
+
end
|
170
|
+
|
171
|
+
subject.shutdown
|
169
172
|
end
|
170
173
|
end
|
171
174
|
|
@@ -175,9 +178,12 @@ describe LogStash::Agent do
|
|
175
178
|
end
|
176
179
|
|
177
180
|
it "it will keep trying to converge" do
|
181
|
+
|
178
182
|
sleep(agent_settings.get("config.reload.interval") / 1_000_000_000.0 * 20) # let the interval reload a few times
|
179
183
|
expect(subject.pipelines_count).to eq(0)
|
180
184
|
expect(source_loader.fetch_count).to be > 1
|
185
|
+
|
186
|
+
subject.shutdown
|
181
187
|
end
|
182
188
|
end
|
183
189
|
end
|
@@ -185,8 +191,8 @@ describe LogStash::Agent do
|
|
185
191
|
end
|
186
192
|
|
187
193
|
context "when shutting down the agent" do
|
188
|
-
let(:pipeline_config) { mock_pipeline_config(:main, "input {
|
189
|
-
let(:new_pipeline_config) { mock_pipeline_config(:new, "input {
|
194
|
+
let(:pipeline_config) { mock_pipeline_config(:main, "input { generator {} } output { null {} }") }
|
195
|
+
let(:new_pipeline_config) { mock_pipeline_config(:new, "input { generator { id => 'new' } } output { null {} }") }
|
190
196
|
|
191
197
|
let(:source_loader) do
|
192
198
|
TestSourceLoader.new([pipeline_config, new_pipeline_config])
|
@@ -199,8 +205,8 @@ describe LogStash::Agent do
|
|
199
205
|
end
|
200
206
|
|
201
207
|
context "Configuration converge scenario" do
|
202
|
-
let(:pipeline_config) { mock_pipeline_config(:main, "input {
|
203
|
-
let(:new_pipeline_config) { mock_pipeline_config(:new, "input {
|
208
|
+
let(:pipeline_config) { mock_pipeline_config(:main, "input { generator {} } output { null {} }", { "pipeline.reloadable" => true }) }
|
209
|
+
let(:new_pipeline_config) { mock_pipeline_config(:new, "input { generator {} } output { null {} }", { "pipeline.reloadable" => true }) }
|
204
210
|
|
205
211
|
before do
|
206
212
|
# Set the Agent to an initial state of pipelines
|
@@ -257,7 +263,7 @@ describe LogStash::Agent do
|
|
257
263
|
end
|
258
264
|
|
259
265
|
context "when the source return a modified pipeline" do
|
260
|
-
let(:modified_pipeline_config) { mock_pipeline_config(:main, "input {
|
266
|
+
let(:modified_pipeline_config) { mock_pipeline_config(:main, "input { generator { id => 'new-and-modified' } } output { null {} }", { "pipeline.reloadable" => true }) }
|
261
267
|
|
262
268
|
let(:source_loader) do
|
263
269
|
TestSequenceSourceLoader.new(
|
@@ -229,7 +229,7 @@ describe LogStash::Agent do
|
|
229
229
|
# since the pipeline is async, it can actually take some time to have metrics recordings
|
230
230
|
# so we try a few times
|
231
231
|
try(20) do
|
232
|
-
expect { mhash(:stats, :pipelines, :main, :events) }.not_to raise_error , "Events
|
232
|
+
expect { mhash(:stats, :pipelines, :main, :events) }.not_to raise_error , "Events pipeline stats should exist"
|
233
233
|
expect { mhash(:stats, :pipelines, :main, :plugins) }.not_to raise_error, "Plugins pipeline stats should exist"
|
234
234
|
end
|
235
235
|
|
data/spec/logstash/agent_spec.rb
CHANGED
@@ -8,6 +8,7 @@ require_relative "../support/mocks_classes"
|
|
8
8
|
require "fileutils"
|
9
9
|
require_relative "../support/helpers"
|
10
10
|
require_relative "../support/matchers"
|
11
|
+
require 'timeout'
|
11
12
|
|
12
13
|
java_import org.logstash.Timestamp
|
13
14
|
|
@@ -67,7 +68,7 @@ describe LogStash::Agent do
|
|
67
68
|
let(:agent_args) { { "config.string" => config_string } }
|
68
69
|
|
69
70
|
it "should delegate settings to new pipeline" do
|
70
|
-
expect(LogStash::
|
71
|
+
expect(LogStash::JavaPipeline).to receive(:new) do |arg1, arg2|
|
71
72
|
expect(arg1).to eq(config_string)
|
72
73
|
expect(arg2.to_hash).to include(agent_args)
|
73
74
|
end
|
@@ -118,7 +119,7 @@ describe LogStash::Agent do
|
|
118
119
|
context "if state is clean" do
|
119
120
|
before :each do
|
120
121
|
allow(subject).to receive(:running_user_defined_pipelines?).and_return(true)
|
121
|
-
allow(subject).to receive(:
|
122
|
+
allow(subject).to receive(:clean_state?).and_return(false)
|
122
123
|
end
|
123
124
|
|
124
125
|
it "should not converge state more than once" do
|
@@ -141,7 +142,7 @@ describe LogStash::Agent do
|
|
141
142
|
it "does not upgrade the new config" do
|
142
143
|
t = Thread.new { subject.execute }
|
143
144
|
wait(timeout)
|
144
|
-
.for { subject.running_pipelines? && subject.
|
145
|
+
.for { subject.running_pipelines? && subject.pipelines.values.first.ready? }
|
145
146
|
.to eq(true)
|
146
147
|
expect(subject.converge_state_and_update).not_to be_a_successful_converge
|
147
148
|
expect(subject).to have_running_pipeline?(mock_config_pipeline)
|
@@ -161,7 +162,7 @@ describe LogStash::Agent do
|
|
161
162
|
it "does upgrade the new config" do
|
162
163
|
t = Thread.new { subject.execute }
|
163
164
|
Timeout.timeout(timeout) do
|
164
|
-
sleep(0.1) until subject.
|
165
|
+
sleep(0.1) until subject.pipelines_count > 0 && subject.pipelines.values.first.ready?
|
165
166
|
end
|
166
167
|
|
167
168
|
expect(subject.converge_state_and_update).to be_a_successful_converge
|
@@ -185,7 +186,7 @@ describe LogStash::Agent do
|
|
185
186
|
it "does not try to reload the pipeline" do
|
186
187
|
t = Thread.new { subject.execute }
|
187
188
|
Timeout.timeout(timeout) do
|
188
|
-
sleep(0.1) until subject.running_pipelines? && subject.
|
189
|
+
sleep(0.1) until subject.running_pipelines? && subject.pipelines.values.first.running?
|
189
190
|
end
|
190
191
|
expect(subject.converge_state_and_update).not_to be_a_successful_converge
|
191
192
|
expect(subject).to have_running_pipeline?(mock_config_pipeline)
|
@@ -205,7 +206,7 @@ describe LogStash::Agent do
|
|
205
206
|
it "tries to reload the pipeline" do
|
206
207
|
t = Thread.new { subject.execute }
|
207
208
|
Timeout.timeout(timeout) do
|
208
|
-
sleep(0.1) until subject.running_pipelines? && subject.
|
209
|
+
sleep(0.1) until subject.running_pipelines? && subject.pipelines.values.first.running?
|
209
210
|
end
|
210
211
|
|
211
212
|
expect(subject.converge_state_and_update).to be_a_successful_converge
|
@@ -252,34 +252,6 @@ describe LogStash::Compiler do
|
|
252
252
|
expect(c_plugin).to ir_eql(j.iPlugin(rand_meta, FILTER, "grok", expected_plugin_args))
|
253
253
|
end
|
254
254
|
|
255
|
-
describe "a filter plugin with a repeated hash directive with duplicated keys" do
|
256
|
-
let(:source) { "input { } filter { #{plugin_source} } output { } " }
|
257
|
-
let(:plugin_source) do
|
258
|
-
%q[
|
259
|
-
grok {
|
260
|
-
match => { "message" => "foo" }
|
261
|
-
match => { "message" => "bar" }
|
262
|
-
break_on_match => false
|
263
|
-
}
|
264
|
-
]
|
265
|
-
end
|
266
|
-
subject(:c_plugin) { compiled[:filter] }
|
267
|
-
|
268
|
-
let(:expected_plugin_args) do
|
269
|
-
{
|
270
|
-
"match" => {
|
271
|
-
"message" => ["foo", "bar"]
|
272
|
-
},
|
273
|
-
"break_on_match" => "false"
|
274
|
-
}
|
275
|
-
end
|
276
|
-
|
277
|
-
it "should merge the values of the duplicate keys into an array" do
|
278
|
-
expect(c_plugin).to ir_eql(j.iPlugin(rand_meta, FILTER, "grok", expected_plugin_args))
|
279
|
-
end
|
280
|
-
|
281
|
-
end
|
282
|
-
|
283
255
|
describe "a filter plugin that has nested Hash directives" do
|
284
256
|
let(:source) { "input { } filter { #{plugin_source} } output { } " }
|
285
257
|
let(:plugin_source) do
|
@@ -143,21 +143,6 @@ describe LogStashConfigParser do
|
|
143
143
|
|
144
144
|
expect(config).to be_nil
|
145
145
|
end
|
146
|
-
|
147
|
-
it "supports octal literals" do
|
148
|
-
parser = LogStashConfigParser.new
|
149
|
-
config = parser.parse(%q(
|
150
|
-
input {
|
151
|
-
example {
|
152
|
-
foo => 010
|
153
|
-
}
|
154
|
-
}
|
155
|
-
))
|
156
|
-
|
157
|
-
compiled_number = eval(config.recursive_select(LogStash::Config::AST::Number).first.compile)
|
158
|
-
|
159
|
-
expect(compiled_number).to be == 8
|
160
|
-
end
|
161
146
|
end
|
162
147
|
|
163
148
|
context "when config.support_escapes" do
|
@@ -132,8 +132,8 @@ describe LogStash::Config::Mixin do
|
|
132
132
|
context "with an empty list" do
|
133
133
|
let(:strings) { [] }
|
134
134
|
|
135
|
-
it "should return
|
136
|
-
expect(subject.strings).to
|
135
|
+
it "should return nil" do
|
136
|
+
expect(subject.strings).to be_nil
|
137
137
|
end
|
138
138
|
end
|
139
139
|
|
@@ -419,7 +419,6 @@ describe LogStash::Config::Mixin do
|
|
419
419
|
end
|
420
420
|
|
421
421
|
it "should use the value in the variable" do
|
422
|
-
skip("This test fails on Windows, tracked in https://github.com/elastic/logstash/issues/10454")
|
423
422
|
expect(subject.oneString).to(be == "fancy")
|
424
423
|
expect(subject.oneBoolean).to(be_truthy)
|
425
424
|
expect(subject.oneArray).to(be == [ "first array value", "fancy" ])
|
@@ -90,7 +90,7 @@ describe LogStash::ConvergeResult do
|
|
90
90
|
end
|
91
91
|
|
92
92
|
context "when all the actions are executed" do
|
93
|
-
context "all
|
93
|
+
context "all successful" do
|
94
94
|
let(:success_action) { LogStash::PipelineAction::Stop.new(:success) }
|
95
95
|
let(:success_action_2) { LogStash::PipelineAction::Stop.new(:success_2) }
|
96
96
|
|
@@ -57,14 +57,14 @@ describe LogStash::Environment do
|
|
57
57
|
context "windows" do
|
58
58
|
windows_host_os.each do |host|
|
59
59
|
it "#{host} returns true" do
|
60
|
-
|
60
|
+
expect(RbConfig::CONFIG).to receive(:[]).with("host_os").and_return(host)
|
61
61
|
expect(LogStash::Environment.windows?).to be_truthy
|
62
62
|
end
|
63
63
|
end
|
64
64
|
|
65
65
|
linux_host_os.each do |host|
|
66
66
|
it "#{host} returns false" do
|
67
|
-
|
67
|
+
expect(RbConfig::CONFIG).to receive(:[]).with("host_os").and_return(host)
|
68
68
|
expect(LogStash::Environment.windows?).to be_falsey
|
69
69
|
end
|
70
70
|
end
|
@@ -73,14 +73,14 @@ describe LogStash::Environment do
|
|
73
73
|
context "Linux" do
|
74
74
|
windows_host_os.each do |host|
|
75
75
|
it "#{host} returns true" do
|
76
|
-
|
76
|
+
expect(RbConfig::CONFIG).to receive(:[]).with("host_os").and_return(host)
|
77
77
|
expect(LogStash::Environment.linux?).to be_falsey
|
78
78
|
end
|
79
79
|
end
|
80
80
|
|
81
81
|
linux_host_os.each do |host|
|
82
82
|
it "#{host} returns false" do
|
83
|
-
|
83
|
+
expect(RbConfig::CONFIG).to receive(:[]).with("host_os").and_return(host)
|
84
84
|
expect(LogStash::Environment.linux?).to be_truthy
|
85
85
|
end
|
86
86
|
end
|
data/spec/logstash/event_spec.rb
CHANGED
@@ -141,10 +141,10 @@ describe LogStash::Event do
|
|
141
141
|
expect(e.get("foo")).to eq(BigDecimal.new(1))
|
142
142
|
end
|
143
143
|
|
144
|
-
it "should set
|
144
|
+
it "should set RubyBignum" do
|
145
145
|
e = LogStash::Event.new()
|
146
146
|
e.set("[foo]", -9223372036854776000)
|
147
|
-
expect(e.get("foo")).to be_kind_of(
|
147
|
+
expect(e.get("foo")).to be_kind_of(Bignum)
|
148
148
|
expect(e.get("foo")).to eq(-9223372036854776000)
|
149
149
|
end
|
150
150
|
|
@@ -341,14 +341,6 @@ describe LogStash::Event do
|
|
341
341
|
context "method missing exception messages" do
|
342
342
|
subject { LogStash::Event.new({"foo" => "bar"}) }
|
343
343
|
|
344
|
-
it "#[] method raises a better exception message" do
|
345
|
-
expect { subject["foo"] }.to raise_error(NoMethodError, /Direct event field references \(i\.e\. event\['field'\]\)/)
|
346
|
-
end
|
347
|
-
|
348
|
-
it "#[]= method raises a better exception message" do
|
349
|
-
expect { subject["foo"] = "baz" }.to raise_error(NoMethodError, /Direct event field references \(i\.e\. event\['field'\] = 'value'\)/)
|
350
|
-
end
|
351
|
-
|
352
344
|
it "other missing method raises normal exception message" do
|
353
345
|
expect { subject.baz() }.to raise_error(NoMethodError, /undefined method `baz' for/)
|
354
346
|
end
|
@@ -51,7 +51,7 @@ describe LogStash::FilterDelegator do
|
|
51
51
|
end
|
52
52
|
|
53
53
|
it "defines a flush method" do
|
54
|
-
expect(subject.
|
54
|
+
expect(subject.respond_to?(:flush)).to be_truthy
|
55
55
|
end
|
56
56
|
|
57
57
|
context "when the flush return events" do
|
@@ -128,7 +128,7 @@ describe LogStash::FilterDelegator do
|
|
128
128
|
end
|
129
129
|
|
130
130
|
it "doesnt define a flush method" do
|
131
|
-
expect(subject.
|
131
|
+
expect(subject.respond_to?(:flush)).to be_falsey
|
132
132
|
end
|
133
133
|
|
134
134
|
it "increments the in/out of the metric" do
|
@@ -145,4 +145,14 @@ describe LogStash::FilterDelegator do
|
|
145
145
|
end
|
146
146
|
end
|
147
147
|
|
148
|
+
context "delegate methods to the original plugin" do
|
149
|
+
# I am not testing the behavior of these methods
|
150
|
+
# this is done in the plugin tests. I just want to make sure
|
151
|
+
# the proxy delegates the methods.
|
152
|
+
LogStash::FilterDelegator::DELEGATED_METHODS.each do |method|
|
153
|
+
it "delegate method: `#{method}` to the filter" do
|
154
|
+
expect(subject.respond_to?(method))
|
155
|
+
end
|
156
|
+
end
|
157
|
+
end
|
148
158
|
end
|