logstash-core 5.0.0.alpha4.snapshot1-java → 5.0.0.alpha4.snapshot2-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of logstash-core might be problematic. Click here for more details.

Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash-core/version.rb +1 -1
  3. data/lib/logstash/agent.rb +31 -36
  4. data/lib/logstash/api/command_factory.rb +3 -1
  5. data/lib/logstash/api/commands/base.rb +4 -0
  6. data/lib/logstash/api/commands/node.rb +116 -0
  7. data/lib/logstash/api/commands/stats.rb +28 -77
  8. data/lib/logstash/api/modules/base.rb +2 -2
  9. data/lib/logstash/api/modules/node.rb +23 -6
  10. data/lib/logstash/api/modules/node_stats.rb +15 -1
  11. data/lib/logstash/api/rack_app.rb +9 -6
  12. data/lib/logstash/api/service.rb +8 -47
  13. data/lib/logstash/config/config_ast.rb +11 -3
  14. data/lib/logstash/config/mixin.rb +60 -22
  15. data/lib/logstash/inputs/metrics.rb +2 -2
  16. data/lib/logstash/instrument/collector.rb +5 -6
  17. data/lib/logstash/instrument/metric.rb +1 -1
  18. data/lib/logstash/instrument/metric_store.rb +54 -0
  19. data/lib/logstash/pipeline.rb +10 -4
  20. data/lib/logstash/runner.rb +2 -2
  21. data/lib/logstash/util/safe_uri.rb +48 -0
  22. data/lib/logstash/version.rb +1 -1
  23. data/lib/logstash/webserver.rb +8 -7
  24. data/logstash-core.gemspec +1 -1
  25. data/spec/api/lib/api/node_plugins_spec.rb +32 -0
  26. data/spec/api/lib/api/node_spec.rb +41 -7
  27. data/spec/api/lib/api/node_stats_spec.rb +31 -6
  28. data/spec/api/lib/api/plugins_spec.rb +1 -7
  29. data/spec/api/lib/api/root_spec.rb +2 -7
  30. data/spec/api/lib/api/support/resource_dsl_methods.rb +14 -7
  31. data/spec/api/spec_helper.rb +24 -50
  32. data/spec/logstash/agent_spec.rb +36 -13
  33. data/spec/logstash/config/config_ast_spec.rb +43 -0
  34. data/spec/logstash/config/mixin_spec.rb +138 -0
  35. data/spec/logstash/inputs/metrics_spec.rb +10 -11
  36. data/spec/logstash/instrument/collector_spec.rb +1 -1
  37. data/spec/logstash/instrument/metric_store_spec.rb +61 -0
  38. data/spec/logstash/instrument/periodic_poller/jvm_spec.rb +6 -3
  39. data/spec/logstash/pipeline_spec.rb +9 -9
  40. data/spec/support/mocks_classes.rb +2 -1
  41. metadata +39 -35
@@ -328,11 +328,11 @@ describe LogStash::Agent do
328
328
  end
329
329
  end
330
330
 
331
+
331
332
  context "metrics after config reloading" do
332
- let(:dummy_output) { DummyOutput.new }
333
333
  let(:config) { "input { generator { } } output { dummyoutput { } }" }
334
- let(:new_config_generator_counter) { 50 }
335
- let(:new_config) { "input { generator { count => #{new_config_generator_counter} } } output { dummyoutput {} }" }
334
+ let(:new_config_generator_counter) { 500 }
335
+ let(:new_config) { "input { generator { count => #{new_config_generator_counter} } } output { dummyoutput2 {} }" }
336
336
  let(:config_path) do
337
337
  f = Stud::Temporary.file
338
338
  f.write(config)
@@ -353,11 +353,24 @@ describe LogStash::Agent do
353
353
  "metric.collect" => true })
354
354
  end
355
355
 
356
+ # We need to create theses dummy classes to know how many
357
+ # events where actually generated by the pipeline and successfully send to the output.
358
+ # Theses values are compared with what we store in the metric store.
359
+ let!(:dummy_output) { DummyOutput.new }
360
+ let!(:dummy_output2) { DummyOutput.new }
361
+ class DummyOutput2 < LogStash::Outputs::Base; end
362
+
356
363
  before :each do
357
364
  allow(DummyOutput).to receive(:new).at_least(:once).with(anything).and_return(dummy_output)
365
+ allow(DummyOutput2).to receive(:new).at_least(:once).with(anything).and_return(dummy_output2)
366
+
358
367
  allow(LogStash::Plugin).to receive(:lookup).with("input", "generator").and_return(LogStash::Inputs::Generator)
359
368
  allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(LogStash::Codecs::Plain)
360
369
  allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(DummyOutput)
370
+ allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput2").and_return(DummyOutput2)
371
+
372
+ @abort_on_exception = Thread.abort_on_exception
373
+ Thread.abort_on_exception = true
361
374
 
362
375
  @t = Thread.new do
363
376
  subject.register_pipeline("main", pipeline_settings)
@@ -368,28 +381,38 @@ describe LogStash::Agent do
368
381
  end
369
382
 
370
383
  after :each do
371
- subject.shutdown
372
- Stud.stop!(@t)
373
- @t.join
384
+ begin
385
+ subject.shutdown
386
+ Stud.stop!(@t)
387
+ @t.join
388
+ ensure
389
+ Thread.abort_on_exception = @abort_on_exception
390
+ end
374
391
  end
375
392
 
376
393
  it "resets the metric collector" do
377
394
  # We know that the store has more events coming in.
378
- sleep(0.01) while dummy_output.events.size < new_config_generator_counter
379
- snapshot = LogStash::Instrument::Collector.instance.snapshot_metric
395
+ while dummy_output.events.size <= new_config_generator_counter
396
+ sleep(0.1)
397
+ end
398
+
399
+ snapshot = subject.metric.collector.snapshot_metric
380
400
  expect(snapshot.metric_store.get_with_path("/stats/events")[:stats][:events][:in].value).to be > new_config_generator_counter
381
401
 
382
402
  # update the configuration and give some time to logstash to pick it up and do the work
383
- IO.write(config_path, new_config)
403
+ # Also force a flush to disk to make sure ruby reload it.
404
+ File.open(config_path, "w") do |f|
405
+ f.write(new_config)
406
+ f.fsync
407
+ end
384
408
 
385
409
  sleep(interval * 3) # Give time to reload the config
386
410
 
387
- # Since there is multiple threads involved with the configuration reload,
388
- # It can take some time to the stats be visible in the store but it will
389
411
  # be eventually consistent.
390
- sleep(0.01) while dummy_output.events.size < new_config_generator_counter
412
+ sleep(0.01) while dummy_output2.events.size < new_config_generator_counter
391
413
 
392
- value = LogStash::Instrument::Collector.instance.snapshot_metric.metric_store.get_with_path("/stats/events")[:stats][:events][:in].value
414
+ snapshot = subject.metric.collector.snapshot_metric
415
+ value = snapshot.metric_store.get_with_path("/stats/events")[:stats][:events][:in].value
393
416
  expect(value).to eq(new_config_generator_counter)
394
417
  end
395
418
  end
@@ -144,6 +144,49 @@ describe LogStashConfigParser do
144
144
  end
145
145
  end
146
146
 
147
+ context "when using two plugin sections of the same type" do
148
+ let(:pipeline_klass) do
149
+ Class.new do
150
+ def initialize(config)
151
+ grammar = LogStashConfigParser.new
152
+ @config = grammar.parse(config)
153
+ @code = @config.compile
154
+ eval(@code)
155
+ end
156
+ def plugin(*args);end
157
+ end
158
+ end
159
+ context "(filters)" do
160
+ let(:config_string) {
161
+ "input { generator { } }
162
+ filter { filter1 { } }
163
+ filter { filter1 { } }
164
+ output { output1 { } }"
165
+ }
166
+
167
+
168
+ it "should create a pipeline with both sections" do
169
+ generated_objects = pipeline_klass.new(config_string).instance_variable_get("@generated_objects")
170
+ filters = generated_objects.keys.map(&:to_s).select {|obj_name| obj_name.match(/^filter.+?_\d+$/) }
171
+ expect(filters.size).to eq(2)
172
+ end
173
+ end
174
+
175
+ context "(filters)" do
176
+ let(:config_string) {
177
+ "input { generator { } }
178
+ output { output1 { } }
179
+ output { output1 { } }"
180
+ }
181
+
182
+
183
+ it "should create a pipeline with both sections" do
184
+ generated_objects = pipeline_klass.new(config_string).instance_variable_get("@generated_objects")
185
+ outputs = generated_objects.keys.map(&:to_s).select {|obj_name| obj_name.match(/^output.+?_\d+$/) }
186
+ expect(outputs.size).to eq(2)
187
+ end
188
+ end
189
+ end
147
190
  context "when creating two instances of the same configuration" do
148
191
 
149
192
  let(:config_string) {
@@ -68,6 +68,74 @@ describe LogStash::Config::Mixin do
68
68
  end
69
69
  end
70
70
 
71
+ context "when validating lists of items" do
72
+ let(:klass) do
73
+ Class.new(LogStash::Filters::Base) do
74
+ config_name "multiuri"
75
+ config :uris, :validate => :uri, :list => true
76
+ config :strings, :validate => :string, :list => true
77
+ config :required_strings, :validate => :string, :list => true, :required => true
78
+ end
79
+ end
80
+
81
+ let(:uris) { ["http://example.net/1", "http://example.net/2"] }
82
+ let(:safe_uris) { uris.map {|str| ::LogStash::Util::SafeURI.new(str) } }
83
+ let(:strings) { ["I am a", "modern major general"] }
84
+ let(:required_strings) { ["required", "strings"] }
85
+
86
+ subject { klass.new("uris" => uris, "strings" => strings, "required_strings" => required_strings) }
87
+
88
+ it "a URI list should return an array of URIs" do
89
+ expect(subject.uris).to match_array(safe_uris)
90
+ end
91
+
92
+ it "a string list should return an array of strings" do
93
+ expect(subject.strings).to match_array(strings)
94
+ end
95
+
96
+ context "with a scalar value" do
97
+ let(:strings) { "foo" }
98
+
99
+ it "should return the scalar value as a single element array" do
100
+ expect(subject.strings).to match_array([strings])
101
+ end
102
+ end
103
+
104
+ context "with an empty list" do
105
+ let(:strings) { [] }
106
+
107
+ it "should return nil" do
108
+ expect(subject.strings).to be_nil
109
+ end
110
+ end
111
+
112
+ describe "with required => true" do
113
+ context "and a single element" do
114
+ let(:required_strings) { ["foo"] }
115
+
116
+ it "should return the single value" do
117
+ expect(subject.required_strings).to eql(required_strings)
118
+ end
119
+ end
120
+
121
+ context "with an empty list" do
122
+ let (:required_strings) { [] }
123
+
124
+ it "should raise a configuration error" do
125
+ expect { subject.required_strings }.to raise_error(LogStash::ConfigurationError)
126
+ end
127
+ end
128
+
129
+ context "with no value specified" do
130
+ let (:required_strings) { nil }
131
+
132
+ it "should raise a configuration error" do
133
+ expect { subject.required_strings }.to raise_error(LogStash::ConfigurationError)
134
+ end
135
+ end
136
+ end
137
+ end
138
+
71
139
  context "when validating :password" do
72
140
  let(:klass) do
73
141
  Class.new(LogStash::Filters::Base) do
@@ -102,6 +170,76 @@ describe LogStash::Config::Mixin do
102
170
  end
103
171
  end
104
172
 
173
+ context "when validating :uri" do
174
+ let(:klass) do
175
+ Class.new(LogStash::Filters::Base) do
176
+ config_name "fakeuri"
177
+ config :uri, :validate => :uri
178
+ end
179
+ end
180
+
181
+ shared_examples("safe URI") do
182
+ subject { klass.new("uri" => uri_str) }
183
+
184
+ it "should be a SafeURI object" do
185
+ expect(subject.uri).to(be_a(LogStash::Util::SafeURI))
186
+ end
187
+
188
+ it "should make password values hidden with #to_s" do
189
+ expect(subject.uri.to_s).to eql(uri_hidden)
190
+ end
191
+
192
+ it "should make password values hidden with #inspect" do
193
+ expect(subject.uri.inspect).to eql(uri_hidden)
194
+ end
195
+
196
+ it "should correctly copy URI types" do
197
+ clone = subject.class.new(subject.params)
198
+ expect(clone.uri.to_s).to eql(uri_hidden)
199
+ end
200
+
201
+ it "should make the real URI object availale under #uri" do
202
+ expect(subject.uri.uri).to be_a(::URI)
203
+ end
204
+
205
+ it "should obfuscate original_params" do
206
+ expect(subject.original_params['uri']).to(be_a(LogStash::Util::SafeURI))
207
+ end
208
+
209
+ context "attributes" do
210
+ [:scheme, :user, :password, :hostname, :path].each do |attr|
211
+ it "should make #{attr} available" do
212
+ expect(subject.uri.send(attr)).to eql(self.send(attr))
213
+ end
214
+ end
215
+ end
216
+ end
217
+
218
+ context "with a username / password" do
219
+ let(:scheme) { "myscheme" }
220
+ let(:user) { "myuser" }
221
+ let(:password) { "fancypants" }
222
+ let(:hostname) { "myhostname" }
223
+ let(:path) { "/my/path" }
224
+ let(:uri_str) { "#{scheme}://#{user}:#{password}@#{hostname}#{path}" }
225
+ let(:uri_hidden) { "#{scheme}://#{user}:#{LogStash::Util::SafeURI::PASS_PLACEHOLDER}@#{hostname}#{path}" }
226
+
227
+ include_examples("safe URI")
228
+ end
229
+
230
+ context "without a username / password" do
231
+ let(:scheme) { "myscheme" }
232
+ let(:user) { nil }
233
+ let(:password) { nil }
234
+ let(:hostname) { "myhostname" }
235
+ let(:path) { "/my/path" }
236
+ let(:uri_str) { "#{scheme}://#{hostname}#{path}" }
237
+ let(:uri_hidden) { "#{scheme}://#{hostname}#{path}" }
238
+
239
+ include_examples("safe URI")
240
+ end
241
+ end
242
+
105
243
  describe "obsolete settings" do
106
244
  let(:plugin_class) do
107
245
  Class.new(LogStash::Inputs::Base) do
@@ -3,15 +3,17 @@ require "logstash/inputs/metrics"
3
3
  require "spec_helper"
4
4
 
5
5
  describe LogStash::Inputs::Metrics do
6
+ let(:collector) { LogStash::Instrument::Collector.new }
7
+ let(:metric) { LogStash::Instrument::Metric.new(collector) }
8
+ let(:queue) { [] }
9
+
6
10
  before :each do
7
- LogStash::Instrument::Collector.instance.clear
11
+ allow(subject).to receive(:metric).and_return(metric)
8
12
  end
9
13
 
10
- let(:queue) { [] }
11
-
12
14
  describe "#run" do
13
15
  it "should register itself to the collector observer" do
14
- expect(LogStash::Instrument::Collector.instance).to receive(:add_observer).with(subject)
16
+ expect(collector).to receive(:add_observer).with(subject)
15
17
  t = Thread.new { subject.run(queue) }
16
18
  sleep(0.1) # give a bit of time to the thread to start
17
19
  subject.stop
@@ -19,24 +21,21 @@ describe LogStash::Inputs::Metrics do
19
21
  end
20
22
 
21
23
  describe "#update" do
22
- let(:namespaces) { [:root, :base] }
23
- let(:key) { :foo }
24
- let(:metric_store) { LogStash::Instrument::MetricStore.new }
25
-
26
24
  it "should fill up the queue with received events" do
27
25
  Thread.new { subject.run(queue) }
28
26
  sleep(0.1)
29
27
  subject.stop
30
28
 
31
- metric_store.fetch_or_store(namespaces, key, LogStash::Instrument::MetricType::Counter.new(namespaces, key))
32
- subject.update(LogStash::Instrument::Snapshot.new(metric_store))
29
+ metric.increment([:root, :test], :plugin)
30
+
31
+ subject.update(collector.snapshot_metric)
33
32
  expect(queue.count).to eq(1)
34
33
  end
35
34
  end
36
35
 
37
36
  describe "#stop" do
38
37
  it "should remove itself from the the collector observer" do
39
- expect(LogStash::Instrument::Collector.instance).to receive(:delete_observer).with(subject)
38
+ expect(collector).to receive(:delete_observer).with(subject)
40
39
  t = Thread.new { subject.run(queue) }
41
40
  sleep(0.1) # give a bit of time to the thread to start
42
41
  subject.stop
@@ -3,7 +3,7 @@ require "logstash/instrument/collector"
3
3
  require "spec_helper"
4
4
 
5
5
  describe LogStash::Instrument::Collector do
6
- subject { LogStash::Instrument::Collector.instance }
6
+ subject { LogStash::Instrument::Collector.new }
7
7
  describe "#push" do
8
8
  let(:namespaces_path) { [:root, :pipelines, :pipelines01] }
9
9
  let(:key) { :my_key }
@@ -142,6 +142,67 @@ describe LogStash::Instrument::MetricStore do
142
142
  end
143
143
  end
144
144
 
145
+ describe "get_shallow" do
146
+ it "should retrieve a path as a single value" do
147
+ r = subject.get_shallow(:node, :sashimi, :pipelines, :pipeline01, :processed_events_in)
148
+ expect(r.value).to eql(1)
149
+ end
150
+ end
151
+
152
+ describe "extract_metrics" do
153
+ it "should retrieve non-nested values correctly" do
154
+ r = subject.extract_metrics(
155
+ [:node, :sashimi, :pipelines, :pipeline01],
156
+ :processed_events_in,
157
+ :processed_events_out,
158
+ )
159
+ expect(r[:processed_events_in]).to eql(1)
160
+ expect(r[:processed_events_out]).to eql(1)
161
+ end
162
+
163
+ it "should retrieve nested values correctly alongside non-nested ones" do
164
+ r = subject.extract_metrics(
165
+ [:node, :sashimi, :pipelines, :pipeline01],
166
+ :processed_events_in,
167
+ [:plugins, :"logstash-output-elasticsearch", :event_in]
168
+ )
169
+ expect(r[:processed_events_in]).to eql(1)
170
+ expect(r[:plugins][:"logstash-output-elasticsearch"][:event_in]).to eql(1)
171
+ end
172
+
173
+ it "should retrieve multiple nested keys at a given location" do
174
+ r = subject.extract_metrics(
175
+ [:node, :sashimi, :pipelines],
176
+ [:pipeline01, [:processed_events_in, :processed_events_out]]
177
+ )
178
+
179
+ expect(r[:pipeline01][:processed_events_in]).to eql(1)
180
+ expect(r[:pipeline01][:processed_events_out]).to eql(1)
181
+ end
182
+
183
+ it "should retrieve a single key nested in multiple places" do
184
+ r = subject.extract_metrics(
185
+ [:node, :sashimi, :pipelines],
186
+ [[:pipeline01, :pipeline02], :processed_events_out]
187
+ )
188
+
189
+ expect(r[:pipeline01][:processed_events_out]).to eql(1)
190
+ expect(r[:pipeline02][:processed_events_out]).to eql(1)
191
+ end
192
+
193
+ it "handle overlaps of paths" do
194
+ r = subject.extract_metrics(
195
+ [:node, :sashimi, :pipelines],
196
+ [:pipeline01, :processed_events_in],
197
+ [[:pipeline01, :pipeline02], :processed_events_out]
198
+ )
199
+
200
+ expect(r[:pipeline01][:processed_events_in]).to eql(1)
201
+ expect(r[:pipeline01][:processed_events_out]).to eql(1)
202
+ expect(r[:pipeline02][:processed_events_out]).to eql(1)
203
+ end
204
+ end
205
+
145
206
  describe "#each" do
146
207
  it "retrieves all the metric" do
147
208
  expect(subject.each.size).to eq(metric_events.size)
@@ -1,8 +1,10 @@
1
- require 'spec_helper'
2
- require 'logstash/instrument/periodic_poller/jvm'
1
+ # encoding: utf-8
2
+ require "spec_helper"
3
+ require "logstash/instrument/periodic_poller/jvm"
4
+ require "logstash/instrument/collector"
3
5
 
4
6
  describe LogStash::Instrument::PeriodicPoller::JVM do
5
- let(:metric) { LogStash::Instrument::Metric.new }
7
+ let(:metric) { LogStash::Instrument::Metric.new(LogStash::Instrument::Collector.new) }
6
8
  let(:options) { {} }
7
9
  subject(:jvm) { described_class.new(metric, options) }
8
10
 
@@ -18,6 +20,7 @@ describe LogStash::Instrument::PeriodicPoller::JVM do
18
20
  end
19
21
 
20
22
  describe "metrics" do
23
+ before(:each) { jvm.collect }
21
24
  let(:snapshot_store) { metric.collector.snapshot_metric.metric_store }
22
25
  subject(:jvm_metrics) { snapshot_store.get_shallow(:jvm, :process) }
23
26
 
@@ -468,8 +468,8 @@ describe LogStash::Pipeline do
468
468
  sleep 0.1 while !pipeline.ready?
469
469
  # give us a bit of time to flush the events
470
470
  wait(5).for do
471
- next unless output && output.events && output.events.first
472
- output.events.first.get("message").split("\n").count
471
+ next unless output && output.events && !(event = output.events.pop).nil?
472
+ event.get("message").split("\n").count
473
473
  end.to eq(number_of_events)
474
474
  pipeline.shutdown
475
475
  end
@@ -554,10 +554,12 @@ describe LogStash::Pipeline do
554
554
  end
555
555
 
556
556
  context "when collecting metrics in the pipeline" do
557
+ let(:metric) { LogStash::Instrument::Metric.new(LogStash::Instrument::Collector.new) }
558
+
559
+ subject { described_class.new(config, pipeline_settings_obj, metric) }
560
+
557
561
  let(:pipeline_settings) { { "pipeline.id" => pipeline_id } }
558
- subject { described_class.new(config, pipeline_settings_obj) }
559
562
  let(:pipeline_id) { "main" }
560
- let(:metric) { LogStash::Instrument::Metric.new }
561
563
  let(:number_of_events) { 1000 }
562
564
  let(:multiline_id) { "my-multiline" }
563
565
  let(:multiline_id_other) { "my-multiline_other" }
@@ -591,6 +593,7 @@ describe LogStash::Pipeline do
591
593
  EOS
592
594
  end
593
595
  let(:dummyoutput) { DummyOutput.new({ "id" => dummy_output_id }) }
596
+ let(:metric_store) { subject.metric.collector.snapshot_metric.metric_store }
594
597
 
595
598
  before :each do
596
599
  allow(DummyOutput).to receive(:new).with(any_args).and_return(dummyoutput)
@@ -599,9 +602,6 @@ describe LogStash::Pipeline do
599
602
  allow(LogStash::Plugin).to receive(:lookup).with("filter", "multiline").and_return(LogStash::Filters::Multiline)
600
603
  allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(DummyOutput)
601
604
 
602
- # Reset the metric store
603
- LogStash::Instrument::Collector.instance.clear
604
-
605
605
  Thread.new { subject.run }
606
606
  # make sure we have received all the generated events
607
607
  sleep 1 while dummyoutput.events.size < number_of_events
@@ -612,7 +612,7 @@ describe LogStash::Pipeline do
612
612
  end
613
613
 
614
614
  context "global metric" do
615
- let(:collected_metric) { LogStash::Instrument::Collector.instance.snapshot_metric.metric_store.get_with_path("stats/events") }
615
+ let(:collected_metric) { metric_store.get_with_path("stats/events") }
616
616
 
617
617
  it "populates the differents" do
618
618
  expect(collected_metric[:stats][:events][:in].value).to eq(number_of_events)
@@ -622,7 +622,7 @@ describe LogStash::Pipeline do
622
622
  end
623
623
 
624
624
  context "pipelines" do
625
- let(:collected_metric) { LogStash::Instrument::Collector.instance.snapshot_metric.metric_store.get_with_path("stats/pipelines/") }
625
+ let(:collected_metric) { metric_store.get_with_path("stats/pipelines/") }
626
626
 
627
627
  it "populates the pipelines core metrics" do
628
628
  expect(collected_metric[:stats][:pipelines][:main][:events][:in].value).to eq(number_of_events)