logstash-core 6.8.23-java → 7.0.0.alpha1-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (80) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash/agent.rb +69 -85
  3. data/lib/logstash/api/modules/stats.rb +1 -1
  4. data/lib/logstash/compiler/lscl.rb +7 -7
  5. data/lib/logstash/config/config_ast.rb +1 -1
  6. data/lib/logstash/config/mixin.rb +1 -1
  7. data/lib/logstash/config/modules_common.rb +3 -3
  8. data/lib/logstash/dependency_report.rb +1 -2
  9. data/lib/logstash/environment.rb +4 -9
  10. data/lib/logstash/event.rb +1 -24
  11. data/lib/logstash/filter_delegator.rb +69 -2
  12. data/lib/logstash/filters/base.rb +2 -0
  13. data/lib/logstash/instrument/metric_store.rb +1 -1
  14. data/lib/logstash/instrument/periodic_poller/dlq.rb +7 -5
  15. data/lib/logstash/instrument/periodic_poller/jvm.rb +3 -3
  16. data/lib/logstash/instrument/periodic_poller/pq.rb +8 -6
  17. data/lib/logstash/instrument/periodic_pollers.rb +3 -3
  18. data/lib/logstash/java_pipeline.rb +11 -38
  19. data/lib/logstash/modules/kibana_config.rb +1 -1
  20. data/lib/logstash/modules/logstash_config.rb +1 -1
  21. data/lib/logstash/patches/resolv.rb +32 -17
  22. data/lib/logstash/pipeline.rb +11 -28
  23. data/lib/logstash/pipeline_action/base.rb +1 -1
  24. data/lib/logstash/pipeline_action/create.rb +13 -7
  25. data/lib/logstash/pipeline_action/reload.rb +12 -35
  26. data/lib/logstash/pipeline_action/stop.rb +6 -4
  27. data/lib/logstash/pipeline_settings.rb +1 -2
  28. data/lib/logstash/plugins/registry.rb +2 -5
  29. data/lib/logstash/runner.rb +0 -24
  30. data/lib/logstash/settings.rb +5 -5
  31. data/lib/logstash/state_resolver.rb +5 -5
  32. data/lib/logstash/util/duration_formatter.rb +1 -1
  33. data/lib/logstash/util/safe_uri.rb +0 -1
  34. data/lib/logstash/util/substitution_variables.rb +1 -22
  35. data/lib/logstash/util/thread_dump.rb +1 -1
  36. data/lib/logstash/util.rb +1 -11
  37. data/lib/logstash-core/version.rb +3 -1
  38. data/locales/en.yml +7 -16
  39. data/logstash-core.gemspec +11 -2
  40. data/spec/logstash/acked_queue_concurrent_stress_spec.rb +2 -2
  41. data/spec/logstash/agent/converge_spec.rb +31 -25
  42. data/spec/logstash/agent/metrics_spec.rb +1 -1
  43. data/spec/logstash/agent_spec.rb +7 -6
  44. data/spec/logstash/compiler/compiler_spec.rb +0 -28
  45. data/spec/logstash/config/config_ast_spec.rb +0 -15
  46. data/spec/logstash/config/mixin_spec.rb +2 -3
  47. data/spec/logstash/converge_result_spec.rb +1 -1
  48. data/spec/logstash/environment_spec.rb +4 -4
  49. data/spec/logstash/event_spec.rb +2 -10
  50. data/spec/logstash/filter_delegator_spec.rb +12 -2
  51. data/spec/logstash/filters/base_spec.rb +9 -45
  52. data/spec/logstash/instrument/periodic_poller/cgroup_spec.rb +2 -0
  53. data/spec/logstash/instrument/wrapped_write_client_spec.rb +1 -1
  54. data/spec/logstash/java_filter_delegator_spec.rb +11 -1
  55. data/spec/logstash/legacy_ruby_event_spec.rb +5 -6
  56. data/spec/logstash/patches_spec.rb +3 -1
  57. data/spec/logstash/pipeline_action/create_spec.rb +8 -14
  58. data/spec/logstash/pipeline_action/reload_spec.rb +9 -16
  59. data/spec/logstash/pipeline_action/stop_spec.rb +3 -4
  60. data/spec/logstash/queue_factory_spec.rb +1 -2
  61. data/spec/logstash/runner_spec.rb +0 -2
  62. data/spec/logstash/settings/array_coercible_spec.rb +1 -1
  63. data/spec/logstash/settings/bytes_spec.rb +2 -2
  64. data/spec/logstash/settings/port_range_spec.rb +1 -1
  65. data/spec/logstash/settings_spec.rb +0 -10
  66. data/spec/logstash/state_resolver_spec.rb +22 -26
  67. data/spec/logstash/util/safe_uri_spec.rb +0 -40
  68. data/spec/logstash/util/secretstore_spec.rb +1 -1
  69. data/spec/logstash/util/time_value_spec.rb +1 -1
  70. data/spec/logstash/util/wrapped_acked_queue_spec.rb +1 -1
  71. data/spec/logstash/webserver_spec.rb +5 -9
  72. data/spec/support/matchers.rb +19 -25
  73. data/spec/support/shared_contexts.rb +3 -3
  74. data/versions-gem-copy.yml +9 -9
  75. metadata +31 -44
  76. data/lib/logstash/patches/resolv_9270.rb +0 -2903
  77. data/lib/logstash/pipelines_registry.rb +0 -166
  78. data/lib/logstash/util/lazy_singleton.rb +0 -33
  79. data/spec/logstash/jruby_version_spec.rb +0 -15
  80. data/spec/logstash/pipelines_registry_spec.rb +0 -220
@@ -2,17 +2,12 @@
2
2
 
3
3
  java_import "org.logstash.secret.store.SecretStoreExt"
4
4
 
5
- require_relative 'lazy_singleton'
6
-
7
5
  module ::LogStash::Util::SubstitutionVariables
8
6
 
9
7
  include LogStash::Util::Loggable
10
8
 
11
9
  SUBSTITUTION_PLACEHOLDER_REGEX = /\${(?<name>[a-zA-Z_.][a-zA-Z0-9_.]*)(:(?<default>[^}]*))?}/
12
10
 
13
- SECRET_STORE = ::LogStash::Util::LazySingleton.new { load_secret_store }
14
- private_constant :SECRET_STORE
15
-
16
11
  # Recursive method to replace substitution variable references in parameters
17
12
  def deep_replace(value)
18
13
  if value.is_a?(Hash)
@@ -47,7 +42,7 @@ module ::LogStash::Util::SubstitutionVariables
47
42
  logger.debug("Replacing `#{placeholder}` with actual value")
48
43
 
49
44
  #check the secret store if it exists
50
- secret_store = SECRET_STORE.instance
45
+ secret_store = SecretStoreExt.getIfExists(LogStash::SETTINGS.get_setting("keystore.file").value, LogStash::SETTINGS.get_setting("keystore.classname").value)
51
46
  replacement = secret_store.nil? ? nil : secret_store.retrieveSecret(SecretStoreExt.getStoreId(name))
52
47
  #check the environment
53
48
  replacement = ENV.fetch(name, default) if replacement.nil?
@@ -59,20 +54,4 @@ module ::LogStash::Util::SubstitutionVariables
59
54
  end
60
55
  end # def replace_placeholders
61
56
 
62
- class << self
63
- private
64
-
65
- # loads a secret_store from disk if available, or returns nil
66
- #
67
- # @api private
68
- # @return [SecretStoreExt,nil]
69
- def load_secret_store
70
- SecretStoreExt.getIfExists(LogStash::SETTINGS.get_setting("keystore.file").value, LogStash::SETTINGS.get_setting("keystore.classname").value)
71
- end
72
-
73
- # @api test
74
- def reset_secret_store
75
- SECRET_STORE.reset!
76
- end
77
- end
78
57
  end
@@ -5,7 +5,7 @@ module LogStash
5
5
  module Util
6
6
  class ThreadDump
7
7
  SKIPPED_THREADS = [ "Finalizer", "Reference Handler", "Signal Dispatcher" ].freeze
8
- THREADS_COUNT_DEFAULT = 10.freeze
8
+ THREADS_COUNT_DEFAULT = 3.freeze
9
9
  IGNORE_IDLE_THREADS_DEFAULT = true.freeze
10
10
 
11
11
  attr_reader :top_count, :ignore, :dump
data/lib/logstash/util.rb CHANGED
@@ -9,8 +9,6 @@ module LogStash::Util
9
9
 
10
10
  PR_SET_NAME = 15
11
11
  def self.set_thread_name(name)
12
- previous_name = Java::java.lang.Thread.currentThread.getName() if block_given?
13
-
14
12
  if RUBY_ENGINE == "jruby"
15
13
  # Keep java and ruby thread names in sync.
16
14
  Java::java.lang.Thread.currentThread.setName(name)
@@ -23,14 +21,6 @@ module LogStash::Util
23
21
  # since MRI 1.9, JRuby, and Rubinius use system threads for this.
24
22
  LibC.prctl(PR_SET_NAME, name[0..16], 0, 0, 0)
25
23
  end
26
-
27
- if block_given?
28
- begin
29
- yield
30
- ensure
31
- set_thread_name(previous_name)
32
- end
33
- end
34
24
  end # def set_thread_name
35
25
 
36
26
  def self.set_thread_plugin(plugin)
@@ -201,7 +191,7 @@ module LogStash::Util
201
191
  o.inject({}) {|h, (k,v)| h[k] = deep_clone(v); h }
202
192
  when Array
203
193
  o.map {|v| deep_clone(v) }
204
- when Integer, Symbol, IO, TrueClass, FalseClass, NilClass
194
+ when Fixnum, Symbol, IO, TrueClass, FalseClass, NilClass
205
195
  o
206
196
  when LogStash::Codecs::Base
207
197
  o.clone
@@ -7,6 +7,8 @@ if !defined?(ALL_VERSIONS)
7
7
  require 'yaml'
8
8
  ALL_VERSIONS = YAML.load_file(File.expand_path("../../versions-gem-copy.yml", File.dirname(__FILE__)))
9
9
  end
10
+
10
11
  if !defined?(LOGSTASH_CORE_VERSION)
11
- LOGSTASH_CORE_VERSION = ALL_VERSIONS.fetch("logstash-core")
12
+ # PACKAGE_SUFFIX is declared in the artifact namespace from artifacts.rake
13
+ LOGSTASH_CORE_VERSION = defined?(PACKAGE_SUFFIX) ? "#{ALL_VERSIONS.fetch("logstash-core")}#{PACKAGE_SUFFIX}" : ALL_VERSIONS.fetch("logstash-core")
12
14
  end
data/locales/en.yml CHANGED
@@ -229,32 +229,23 @@ en:
229
229
  If you wish to use both defaults, please use
230
230
  the empty string for the '-e' flag.
231
231
  field-reference-parser: |+
232
+ (DEPRECATED) This option is no longer
233
+ configurable.
234
+
232
235
  Use the given MODE when parsing field
233
236
  references.
234
237
 
235
238
  The field reference parser is used to expand
236
239
  field references in your pipeline configs,
237
- and will be becoming more strict to better
238
- handle illegal and ambbiguous inputs in a
239
- future release of Logstash.
240
+ and has become more strict to better handle
241
+ ambiguous- and illegal-syntax inputs.
240
242
 
241
- Available MODEs are:
242
- - `LEGACY`: parse with the legacy parser,
243
- which is known to handle ambiguous- and
244
- illegal-syntax in surprising ways;
245
- warnings will not be emitted.
246
- - `COMPAT`: warn once for each distinct
247
- ambiguous- or illegal-syntax input, but
248
- continue to expand field references with
249
- the legacy parser.
243
+ The only available MODE is:
250
244
  - `STRICT`: parse in a strict manner; when
251
245
  given ambiguous- or illegal-syntax input,
252
246
  raises a runtime exception that should
253
247
  be handled by the calling plugin.
254
248
 
255
- The MODE can also be set with
256
- `config.field_reference.parser`
257
-
258
249
  modules: |+
259
250
  Load Logstash modules.
260
251
  Modules can be defined using multiple instances
@@ -298,7 +289,7 @@ en:
298
289
  pipeline-workers: |+
299
290
  Sets the number of pipeline workers to run.
300
291
  java-execution: |+
301
- Use Java execution engine.
292
+ (Beta) Use new Java execution engine.
302
293
  pipeline-batch-size: |+
303
294
  Size of batches the pipeline is to work in.
304
295
  pipeline-batch-delay: |+
@@ -1,4 +1,10 @@
1
1
  # -*- encoding: utf-8 -*-
2
+
3
+ # NOTE: please use `rake artifact:gems` or `rake artifact:build-logstash-core` to build LS gems
4
+ # You can add a version qualifier (e.g. alpha1) via the VERSION_QUALIFIER env var, e.g.
5
+ # VERSION_QUALIFIER=beta2 RELEASE=1 rake artifact:build-logstash-core
6
+ # `require 'logstash-core/version'` is aware of this env var
7
+
2
8
  lib = File.expand_path('../lib', __FILE__)
3
9
  $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
10
 
@@ -28,7 +34,7 @@ Gem::Specification.new do |gem|
28
34
  gem.description = %q{The core components of logstash, the scalable log and event management tool}
29
35
  gem.summary = %q{logstash-core - The core components of logstash}
30
36
  gem.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
31
- gem.license = "Apache License (2.0)"
37
+ gem.license = "Apache-2.0"
32
38
 
33
39
  gem.files = Dir.glob(
34
40
  %w(versions-gem-copy.yml logstash-core.gemspec gemspec_jars.rb lib/**/*.rb spec/**/*.rb locales/*
@@ -47,7 +53,10 @@ Gem::Specification.new do |gem|
47
53
  gem.add_runtime_dependency "filesize", "0.0.4" #(MIT license) for :bytes config validator
48
54
  gem.add_runtime_dependency "gems", "~> 0.8.3" #(MIT license)
49
55
  gem.add_runtime_dependency "concurrent-ruby", "~> 1.0", ">= 1.0.5"
50
- gem.add_runtime_dependency "rack", '~> 1.6', '>= 1.6.11'
56
+
57
+ # Later versions are ruby 2.0 only. We should remove the rack dep once we support 9k
58
+ gem.add_runtime_dependency "rack", '1.6.6'
59
+
51
60
  gem.add_runtime_dependency "sinatra", '~> 1.4', '>= 1.4.6'
52
61
  gem.add_runtime_dependency 'puma', '~> 2.16'
53
62
  gem.add_runtime_dependency "jruby-openssl", ">= 0.9.20" # >= 0.9.13 Required to support TLSv1.2
@@ -14,7 +14,7 @@ describe LogStash::WrappedAckedQueue, :stress_test => true do
14
14
  let(:reject_memo_keys) { [:reject_memo_keys, :path, :queue, :writer_threads, :collector, :metric, :reader_threads, :output_strings] }
15
15
 
16
16
  let(:queue) do
17
- described_class.new(path, page_capacity, 0, queue_checkpoint_acks, queue_checkpoint_writes, queue_checkpoint_interval, false, queue_capacity)
17
+ described_class.new(path, page_capacity, 0, queue_checkpoint_acks, queue_checkpoint_writes, queue_checkpoint_interval, queue_capacity)
18
18
  end
19
19
 
20
20
  let(:writer_threads) do
@@ -70,7 +70,7 @@ describe LogStash::WrappedAckedQueue, :stress_test => true do
70
70
  it "writes, reads, closes and reopens" do
71
71
  Thread.abort_on_exception = true
72
72
 
73
- # force lazy initialization to avoid concurency issues within threads
73
+ # force lazy initialization to avoid concurrency issues within threads
74
74
  counts
75
75
  queue
76
76
 
@@ -49,7 +49,7 @@ describe LogStash::Agent do
49
49
 
50
50
  context "system pipeline" do
51
51
 
52
- let(:system_pipeline_config) { mock_pipeline_config(:system_pipeline, "input { dummyblockinginput { } } output { null {} }", { "pipeline.system" => true }) }
52
+ let(:system_pipeline_config) { mock_pipeline_config(:system_pipeline, "input { generator { } } output { null {} }", { "pipeline.system" => true }) }
53
53
 
54
54
  context "when we have a finite pipeline and a system pipeline running" do
55
55
 
@@ -65,40 +65,40 @@ describe LogStash::Agent do
65
65
  end
66
66
 
67
67
  context "when we have an infinite pipeline and a system pipeline running" do
68
- let(:infinite_pipeline_config) { mock_pipeline_config(:main, "input { dummyblockinginput { } } output { null {} }") }
68
+ let(:infinite_pipeline_config) { mock_pipeline_config(:main, "input { generator { } } output { null {} }") }
69
69
 
70
70
  let(:source_loader) do
71
71
  TestSourceLoader.new(infinite_pipeline_config, system_pipeline_config)
72
72
  end
73
73
 
74
74
  before(:each) do
75
- @agent_task = start_agent(subject)
75
+ @agent_task = start_agent(subject)
76
76
  end
77
77
 
78
78
  after(:each) do
79
- @agent_task.stop!
80
- @agent_task.wait
81
- subject.shutdown
79
+ @agent_task.stop!
82
80
  end
83
81
 
84
82
  describe "#running_user_defined_pipelines" do
85
83
  it "returns the user defined pipelines" do
86
- # wait is necessary to accommodate for pipelines startup time
87
- wait(60).for {subject.running_user_defined_pipelines.keys}.to eq([:main])
88
- end
84
+ wait_for do
85
+ subject.with_running_user_defined_pipelines {|pipelines| pipelines.keys }
86
+ end.to eq([:main])
87
+ end
89
88
  end
90
89
 
91
90
  describe "#running_user_defined_pipelines?" do
92
91
  it "returns true" do
93
- # wait is necessary to accommodate for pipelines startup time
94
- wait(60).for {subject.running_user_defined_pipelines?}.to be_truthy
92
+ wait_for do
93
+ subject.running_user_defined_pipelines?
94
+ end.to be_truthy
95
95
  end
96
96
  end
97
97
  end
98
98
  end
99
99
 
100
100
  context "when `config.reload.automatic`" do
101
- let(:pipeline_config) { mock_pipeline_config(:main, "input { dummyblockinginput {} } output { null {} }") }
101
+ let(:pipeline_config) { mock_pipeline_config(:main, "input { generator {} } output { null {} }") }
102
102
 
103
103
  let(:source_loader) do
104
104
  TestSourceLoader.new(pipeline_config)
@@ -114,14 +114,14 @@ describe LogStash::Agent do
114
114
 
115
115
  after(:each) do
116
116
  @agent_task.stop!
117
- @agent_task.wait
118
- subject.shutdown
119
117
  end
120
118
 
121
119
  it "converge only once" do
122
120
  wait(60).for { source_loader.fetch_count }.to eq(1)
123
- # no need to wait here because have_running_pipeline? does the wait
121
+
124
122
  expect(subject).to have_running_pipeline?(pipeline_config)
123
+
124
+ subject.shutdown
125
125
  end
126
126
  end
127
127
 
@@ -135,6 +135,8 @@ describe LogStash::Agent do
135
135
 
136
136
  expect(source_loader.fetch_count).to eq(1)
137
137
  expect(subject.pipelines_count).to eq(0)
138
+
139
+ subject.shutdown
138
140
  end
139
141
  end
140
142
  end
@@ -147,25 +149,26 @@ describe LogStash::Agent do
147
149
  "config.reload.interval" => interval
148
150
  )
149
151
  end
150
-
151
152
  before(:each) do
152
153
  @agent_task = start_agent(subject)
153
154
  end
154
155
 
155
156
  after(:each) do
156
157
  @agent_task.stop!
157
- @agent_task.wait
158
- subject.shutdown
159
158
  end
160
159
 
161
160
  context "and successfully load the config" do
162
161
  it "converges periodically the pipelines from the configs source" do
163
- # no need to wait here because have_running_pipeline? does the wait
162
+ sleep(2) # let the interval reload a few times
164
163
  expect(subject).to have_running_pipeline?(pipeline_config)
165
164
 
166
165
  # we rely on a periodic thread to call fetch count, we have seen unreliable run on
167
166
  # travis, so lets add a few retries
168
- try { expect(source_loader.fetch_count).to be > 1 }
167
+ try do
168
+ expect(source_loader.fetch_count).to be > 1
169
+ end
170
+
171
+ subject.shutdown
169
172
  end
170
173
  end
171
174
 
@@ -175,9 +178,12 @@ describe LogStash::Agent do
175
178
  end
176
179
 
177
180
  it "it will keep trying to converge" do
181
+
178
182
  sleep(agent_settings.get("config.reload.interval") / 1_000_000_000.0 * 20) # let the interval reload a few times
179
183
  expect(subject.pipelines_count).to eq(0)
180
184
  expect(source_loader.fetch_count).to be > 1
185
+
186
+ subject.shutdown
181
187
  end
182
188
  end
183
189
  end
@@ -185,8 +191,8 @@ describe LogStash::Agent do
185
191
  end
186
192
 
187
193
  context "when shutting down the agent" do
188
- let(:pipeline_config) { mock_pipeline_config(:main, "input { dummyblockinginput {} } output { null {} }") }
189
- let(:new_pipeline_config) { mock_pipeline_config(:new, "input { dummyblockinginput { id => 'new' } } output { null {} }") }
194
+ let(:pipeline_config) { mock_pipeline_config(:main, "input { generator {} } output { null {} }") }
195
+ let(:new_pipeline_config) { mock_pipeline_config(:new, "input { generator { id => 'new' } } output { null {} }") }
190
196
 
191
197
  let(:source_loader) do
192
198
  TestSourceLoader.new([pipeline_config, new_pipeline_config])
@@ -199,8 +205,8 @@ describe LogStash::Agent do
199
205
  end
200
206
 
201
207
  context "Configuration converge scenario" do
202
- let(:pipeline_config) { mock_pipeline_config(:main, "input { dummyblockinginput {} } output { null {} }", { "pipeline.reloadable" => true }) }
203
- let(:new_pipeline_config) { mock_pipeline_config(:new, "input { dummyblockinginput {} } output { null {} }", { "pipeline.reloadable" => true }) }
208
+ let(:pipeline_config) { mock_pipeline_config(:main, "input { generator {} } output { null {} }", { "pipeline.reloadable" => true }) }
209
+ let(:new_pipeline_config) { mock_pipeline_config(:new, "input { generator {} } output { null {} }", { "pipeline.reloadable" => true }) }
204
210
 
205
211
  before do
206
212
  # Set the Agent to an initial state of pipelines
@@ -257,7 +263,7 @@ describe LogStash::Agent do
257
263
  end
258
264
 
259
265
  context "when the source return a modified pipeline" do
260
- let(:modified_pipeline_config) { mock_pipeline_config(:main, "input { dummyblockinginput { id => 'new-and-modified' } } output { null {} }", { "pipeline.reloadable" => true }) }
266
+ let(:modified_pipeline_config) { mock_pipeline_config(:main, "input { generator { id => 'new-and-modified' } } output { null {} }", { "pipeline.reloadable" => true }) }
261
267
 
262
268
  let(:source_loader) do
263
269
  TestSequenceSourceLoader.new(
@@ -229,7 +229,7 @@ describe LogStash::Agent do
229
229
  # since the pipeline is async, it can actually take some time to have metrics recordings
230
230
  # so we try a few times
231
231
  try(20) do
232
- expect { mhash(:stats, :pipelines, :main, :events) }.not_to raise_error , "Events pipelien stats should exist"
232
+ expect { mhash(:stats, :pipelines, :main, :events) }.not_to raise_error , "Events pipeline stats should exist"
233
233
  expect { mhash(:stats, :pipelines, :main, :plugins) }.not_to raise_error, "Plugins pipeline stats should exist"
234
234
  end
235
235
 
@@ -8,6 +8,7 @@ require_relative "../support/mocks_classes"
8
8
  require "fileutils"
9
9
  require_relative "../support/helpers"
10
10
  require_relative "../support/matchers"
11
+ require 'timeout'
11
12
 
12
13
  java_import org.logstash.Timestamp
13
14
 
@@ -67,7 +68,7 @@ describe LogStash::Agent do
67
68
  let(:agent_args) { { "config.string" => config_string } }
68
69
 
69
70
  it "should delegate settings to new pipeline" do
70
- expect(LogStash::Pipeline).to receive(:new) do |arg1, arg2|
71
+ expect(LogStash::JavaPipeline).to receive(:new) do |arg1, arg2|
71
72
  expect(arg1).to eq(config_string)
72
73
  expect(arg2.to_hash).to include(agent_args)
73
74
  end
@@ -118,7 +119,7 @@ describe LogStash::Agent do
118
119
  context "if state is clean" do
119
120
  before :each do
120
121
  allow(subject).to receive(:running_user_defined_pipelines?).and_return(true)
121
- allow(subject).to receive(:no_pipeline?).and_return(false)
122
+ allow(subject).to receive(:clean_state?).and_return(false)
122
123
  end
123
124
 
124
125
  it "should not converge state more than once" do
@@ -141,7 +142,7 @@ describe LogStash::Agent do
141
142
  it "does not upgrade the new config" do
142
143
  t = Thread.new { subject.execute }
143
144
  wait(timeout)
144
- .for { subject.running_pipelines? && subject.running_pipelines.values.first.ready? }
145
+ .for { subject.running_pipelines? && subject.pipelines.values.first.ready? }
145
146
  .to eq(true)
146
147
  expect(subject.converge_state_and_update).not_to be_a_successful_converge
147
148
  expect(subject).to have_running_pipeline?(mock_config_pipeline)
@@ -161,7 +162,7 @@ describe LogStash::Agent do
161
162
  it "does upgrade the new config" do
162
163
  t = Thread.new { subject.execute }
163
164
  Timeout.timeout(timeout) do
164
- sleep(0.1) until subject.running_pipelines_count > 0 && subject.running_pipelines.values.first.ready?
165
+ sleep(0.1) until subject.pipelines_count > 0 && subject.pipelines.values.first.ready?
165
166
  end
166
167
 
167
168
  expect(subject.converge_state_and_update).to be_a_successful_converge
@@ -185,7 +186,7 @@ describe LogStash::Agent do
185
186
  it "does not try to reload the pipeline" do
186
187
  t = Thread.new { subject.execute }
187
188
  Timeout.timeout(timeout) do
188
- sleep(0.1) until subject.running_pipelines? && subject.running_pipelines.values.first.running?
189
+ sleep(0.1) until subject.running_pipelines? && subject.pipelines.values.first.running?
189
190
  end
190
191
  expect(subject.converge_state_and_update).not_to be_a_successful_converge
191
192
  expect(subject).to have_running_pipeline?(mock_config_pipeline)
@@ -205,7 +206,7 @@ describe LogStash::Agent do
205
206
  it "tries to reload the pipeline" do
206
207
  t = Thread.new { subject.execute }
207
208
  Timeout.timeout(timeout) do
208
- sleep(0.1) until subject.running_pipelines? && subject.running_pipelines.values.first.running?
209
+ sleep(0.1) until subject.running_pipelines? && subject.pipelines.values.first.running?
209
210
  end
210
211
 
211
212
  expect(subject.converge_state_and_update).to be_a_successful_converge
@@ -252,34 +252,6 @@ describe LogStash::Compiler do
252
252
  expect(c_plugin).to ir_eql(j.iPlugin(rand_meta, FILTER, "grok", expected_plugin_args))
253
253
  end
254
254
 
255
- describe "a filter plugin with a repeated hash directive with duplicated keys" do
256
- let(:source) { "input { } filter { #{plugin_source} } output { } " }
257
- let(:plugin_source) do
258
- %q[
259
- grok {
260
- match => { "message" => "foo" }
261
- match => { "message" => "bar" }
262
- break_on_match => false
263
- }
264
- ]
265
- end
266
- subject(:c_plugin) { compiled[:filter] }
267
-
268
- let(:expected_plugin_args) do
269
- {
270
- "match" => {
271
- "message" => ["foo", "bar"]
272
- },
273
- "break_on_match" => "false"
274
- }
275
- end
276
-
277
- it "should merge the values of the duplicate keys into an array" do
278
- expect(c_plugin).to ir_eql(j.iPlugin(rand_meta, FILTER, "grok", expected_plugin_args))
279
- end
280
-
281
- end
282
-
283
255
  describe "a filter plugin that has nested Hash directives" do
284
256
  let(:source) { "input { } filter { #{plugin_source} } output { } " }
285
257
  let(:plugin_source) do
@@ -143,21 +143,6 @@ describe LogStashConfigParser do
143
143
 
144
144
  expect(config).to be_nil
145
145
  end
146
-
147
- it "supports octal literals" do
148
- parser = LogStashConfigParser.new
149
- config = parser.parse(%q(
150
- input {
151
- example {
152
- foo => 010
153
- }
154
- }
155
- ))
156
-
157
- compiled_number = eval(config.recursive_select(LogStash::Config::AST::Number).first.compile)
158
-
159
- expect(compiled_number).to be == 8
160
- end
161
146
  end
162
147
 
163
148
  context "when config.support_escapes" do
@@ -132,8 +132,8 @@ describe LogStash::Config::Mixin do
132
132
  context "with an empty list" do
133
133
  let(:strings) { [] }
134
134
 
135
- it "should return an empty list" do
136
- expect(subject.strings).to be_empty
135
+ it "should return nil" do
136
+ expect(subject.strings).to be_nil
137
137
  end
138
138
  end
139
139
 
@@ -419,7 +419,6 @@ describe LogStash::Config::Mixin do
419
419
  end
420
420
 
421
421
  it "should use the value in the variable" do
422
- skip("This test fails on Windows, tracked in https://github.com/elastic/logstash/issues/10454")
423
422
  expect(subject.oneString).to(be == "fancy")
424
423
  expect(subject.oneBoolean).to(be_truthy)
425
424
  expect(subject.oneArray).to(be == [ "first array value", "fancy" ])
@@ -90,7 +90,7 @@ describe LogStash::ConvergeResult do
90
90
  end
91
91
 
92
92
  context "when all the actions are executed" do
93
- context "all succesfull" do
93
+ context "all successful" do
94
94
  let(:success_action) { LogStash::PipelineAction::Stop.new(:success) }
95
95
  let(:success_action_2) { LogStash::PipelineAction::Stop.new(:success_2) }
96
96
 
@@ -57,14 +57,14 @@ describe LogStash::Environment do
57
57
  context "windows" do
58
58
  windows_host_os.each do |host|
59
59
  it "#{host} returns true" do
60
- allow(LogStash::Environment).to receive(:host_os).and_return(host)
60
+ expect(RbConfig::CONFIG).to receive(:[]).with("host_os").and_return(host)
61
61
  expect(LogStash::Environment.windows?).to be_truthy
62
62
  end
63
63
  end
64
64
 
65
65
  linux_host_os.each do |host|
66
66
  it "#{host} returns false" do
67
- allow(LogStash::Environment).to receive(:host_os).and_return(host)
67
+ expect(RbConfig::CONFIG).to receive(:[]).with("host_os").and_return(host)
68
68
  expect(LogStash::Environment.windows?).to be_falsey
69
69
  end
70
70
  end
@@ -73,14 +73,14 @@ describe LogStash::Environment do
73
73
  context "Linux" do
74
74
  windows_host_os.each do |host|
75
75
  it "#{host} returns true" do
76
- allow(LogStash::Environment).to receive(:host_os).and_return(host)
76
+ expect(RbConfig::CONFIG).to receive(:[]).with("host_os").and_return(host)
77
77
  expect(LogStash::Environment.linux?).to be_falsey
78
78
  end
79
79
  end
80
80
 
81
81
  linux_host_os.each do |host|
82
82
  it "#{host} returns false" do
83
- allow(LogStash::Environment).to receive(:host_os).and_return(host)
83
+ expect(RbConfig::CONFIG).to receive(:[]).with("host_os").and_return(host)
84
84
  expect(LogStash::Environment.linux?).to be_truthy
85
85
  end
86
86
  end
@@ -141,10 +141,10 @@ describe LogStash::Event do
141
141
  expect(e.get("foo")).to eq(BigDecimal.new(1))
142
142
  end
143
143
 
144
- it "should set RubyInteger" do
144
+ it "should set RubyBignum" do
145
145
  e = LogStash::Event.new()
146
146
  e.set("[foo]", -9223372036854776000)
147
- expect(e.get("foo")).to be_kind_of(Integer)
147
+ expect(e.get("foo")).to be_kind_of(Bignum)
148
148
  expect(e.get("foo")).to eq(-9223372036854776000)
149
149
  end
150
150
 
@@ -341,14 +341,6 @@ describe LogStash::Event do
341
341
  context "method missing exception messages" do
342
342
  subject { LogStash::Event.new({"foo" => "bar"}) }
343
343
 
344
- it "#[] method raises a better exception message" do
345
- expect { subject["foo"] }.to raise_error(NoMethodError, /Direct event field references \(i\.e\. event\['field'\]\)/)
346
- end
347
-
348
- it "#[]= method raises a better exception message" do
349
- expect { subject["foo"] = "baz" }.to raise_error(NoMethodError, /Direct event field references \(i\.e\. event\['field'\] = 'value'\)/)
350
- end
351
-
352
344
  it "other missing method raises normal exception message" do
353
345
  expect { subject.baz() }.to raise_error(NoMethodError, /undefined method `baz' for/)
354
346
  end
@@ -51,7 +51,7 @@ describe LogStash::FilterDelegator do
51
51
  end
52
52
 
53
53
  it "defines a flush method" do
54
- expect(subject.has_flush).to be_truthy
54
+ expect(subject.respond_to?(:flush)).to be_truthy
55
55
  end
56
56
 
57
57
  context "when the flush return events" do
@@ -128,7 +128,7 @@ describe LogStash::FilterDelegator do
128
128
  end
129
129
 
130
130
  it "doesnt define a flush method" do
131
- expect(subject.has_flush).to be_falsey
131
+ expect(subject.respond_to?(:flush)).to be_falsey
132
132
  end
133
133
 
134
134
  it "increments the in/out of the metric" do
@@ -145,4 +145,14 @@ describe LogStash::FilterDelegator do
145
145
  end
146
146
  end
147
147
 
148
+ context "delegate methods to the original plugin" do
149
+ # I am not testing the behavior of these methods
150
+ # this is done in the plugin tests. I just want to make sure
151
+ # the proxy delegates the methods.
152
+ LogStash::FilterDelegator::DELEGATED_METHODS.each do |method|
153
+ it "delegate method: `#{method}` to the filter" do
154
+ expect(subject.respond_to?(method))
155
+ end
156
+ end
157
+ end
148
158
  end