logstash-core 7.3.1-java → 7.3.2-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 4d263b8d4fecd83fd35aa6fcad205439982be93dbb88cfae5a5b400eea215d57
4
- data.tar.gz: b8b4f33ed54f699b45f53d9f08f238bd06abaa6ef9fbe0c383443c65906985eb
3
+ metadata.gz: a9f056b670518c9c84a1080c87e4ada7f16729ceb93dc8fc4cbe62c71ef47e86
4
+ data.tar.gz: 8abadb91f916ae4e32ccc0dba9b7fb1ba0917948d7da3c5de97519dbf4f7dd1c
5
5
  SHA512:
6
- metadata.gz: 9b680b1220b7a973f29cf9d999ace80da14311c2ceacfeee433c619bfc770007d87da83ce645cddb1d01b8154206b2df991fba5ece1a4c42d528c0e1ffbfd3c6
7
- data.tar.gz: 39a7fc2346321781cc7385f04c4ca399735dbbda3d7285b57d30d1b3196b58220ecbcb0d90ecc685dc64cd1871ec2bd8d51e483ac36ea9065e40dc0aee0c3a76
6
+ metadata.gz: 3db1f0158ea7160a663d7f54ed9137ed52112ded806657393397eb35e69ecbb994d19dcf15e590823326e821737a723233441a16ed2234ab5c1e0aef6e9bde7d
7
+ data.tar.gz: 99861090b2d27b2c48a5f041147566e4c821027da0de42981b824645600ea267c2f63e5a6f04ccc6b415404e6e856dbed15075fd25d5b52a2c50caa813b8cff4
@@ -65,8 +65,8 @@ module LogStash
65
65
  service.agent,
66
66
  service.snapshot.metric_store,
67
67
  true).each_with_object({}) do |pipeline_stats, memo|
68
- pipeline_id = pipeline_stats["id"].to_s
69
- memo[pipeline_id] = pipeline_stats
68
+ p_id = pipeline_stats["id"].to_s
69
+ memo[p_id] = pipeline_stats
70
70
  end
71
71
 
72
72
  if pipeline_id.nil?
@@ -1,6 +1,6 @@
1
1
  # encoding: utf-8
2
2
  require "logstash/config/source/local"
3
- require "logstash/pipeline_settings"
3
+ require "logstash/settings"
4
4
 
5
5
  module LogStash module Config module Source
6
6
  class MultiLocal < Local
@@ -15,7 +15,8 @@ module LogStash module Config module Source
15
15
  def pipeline_configs
16
16
  pipelines = retrieve_yaml_pipelines()
17
17
  pipelines_settings = pipelines.map do |pipeline_settings|
18
- ::LogStash::PipelineSettings.from_settings(@original_settings.clone).merge(pipeline_settings)
18
+ clone = @original_settings.clone
19
+ clone.merge_pipeline_settings(pipeline_settings)
19
20
  end
20
21
  detect_duplicate_pipelines(pipelines_settings)
21
22
  pipeline_configs = pipelines_settings.map do |pipeline_settings|
@@ -10,6 +10,39 @@ module LogStash
10
10
  include LogStash::Util::SubstitutionVariables
11
11
  include LogStash::Util::Loggable
12
12
 
13
+ # there are settings that the pipeline uses and can be changed per pipeline instance
14
+ PIPELINE_SETTINGS_WHITE_LIST = [
15
+ "config.debug",
16
+ "config.support_escapes",
17
+ "config.reload.automatic",
18
+ "config.reload.interval",
19
+ "config.string",
20
+ "dead_letter_queue.enable",
21
+ "dead_letter_queue.max_bytes",
22
+ "metric.collect",
23
+ "pipeline.java_execution",
24
+ "pipeline.plugin_classloaders",
25
+ "path.config",
26
+ "path.dead_letter_queue",
27
+ "path.queue",
28
+ "pipeline.batch.delay",
29
+ "pipeline.batch.size",
30
+ "pipeline.id",
31
+ "pipeline.reloadable",
32
+ "pipeline.system",
33
+ "pipeline.workers",
34
+ "queue.checkpoint.acks",
35
+ "queue.checkpoint.interval",
36
+ "queue.checkpoint.writes",
37
+ "queue.checkpoint.retry",
38
+ "queue.drain",
39
+ "queue.max_bytes",
40
+ "queue.max_events",
41
+ "queue.page_capacity",
42
+ "queue.type",
43
+ ]
44
+
45
+
13
46
  def initialize
14
47
  @settings = {}
15
48
  # Theses settings were loaded from the yaml file
@@ -89,6 +122,15 @@ module LogStash
89
122
  self
90
123
  end
91
124
 
125
+ def merge_pipeline_settings(hash, graceful = false)
126
+ hash.each do |key, _|
127
+ unless PIPELINE_SETTINGS_WHITE_LIST.include?(key)
128
+ raise ArgumentError.new("Only pipeline related settings are expected. Received \"#{key}\". Allowed settings: #{PIPELINE_SETTINGS_WHITE_LIST}")
129
+ end
130
+ end
131
+ merge(hash, graceful)
132
+ end
133
+
92
134
  def format_settings
93
135
  output = []
94
136
  output << "-------- Logstash Settings (* means modified) ---------"
@@ -49,4 +49,41 @@ describe LogStash::Api::Commands::Stats do
49
49
  end
50
50
 
51
51
  end
52
+
53
+ describe "pipeline stats" do
54
+ let(:report_method) { :pipeline }
55
+ it "returns information on existing pipeline" do
56
+ expect(report.keys).to include(:main)
57
+ end
58
+ context "for each pipeline" do
59
+ it "returns information on pipeline" do
60
+ expect(report[:main].keys).to include(
61
+ :events,
62
+ :plugins,
63
+ :reloads,
64
+ :queue,
65
+ )
66
+ end
67
+ it "returns event information" do
68
+ expect(report[:main][:events].keys).to include(
69
+ :in,
70
+ :filtered,
71
+ :duration_in_millis,
72
+ :out,
73
+ :queue_push_duration_in_millis
74
+ )
75
+ end
76
+ end
77
+ context "when using multiple pipelines" do
78
+ before(:each) do
79
+ expect(LogStash::Config::PipelinesInfo).to receive(:format_pipelines_info).and_return([
80
+ {"id" => :main},
81
+ {"id" => :secondary},
82
+ ])
83
+ end
84
+ it "contains metrics for all pipelines" do
85
+ expect(report.keys).to include(:main, :secondary)
86
+ end
87
+ end
88
+ end
52
89
  end
@@ -36,9 +36,7 @@ describe "OpenSSL defaults" do
36
36
  # https://github.com/jordansissel/ruby-flores/blob/master/spec/flores/pki_integration_spec.rb
37
37
  # since these helpers were created to fix this particular issue
38
38
  let(:csr) { Flores::PKI::CertificateSigningRequest.new }
39
- # Here, I use a 1024-bit key for faster tests.
40
- # Please do not use such small keys in production.
41
- let(:key_bits) { 1024 }
39
+ let(:key_bits) { 2048 }
42
40
  let(:key) { OpenSSL::PKey::RSA.generate(key_bits, 65537) }
43
41
  let(:certificate_duration) { Flores::Random.number(1..86400) }
44
42
 
@@ -24,9 +24,12 @@ shared_context "api setup" do
24
24
  settings.set("config.reload.automatic", false)
25
25
  @agent = make_test_agent(settings)
26
26
  @agent.execute
27
+ @pipelines_registry = LogStash::PipelinesRegistry.new
27
28
  pipeline_config = mock_pipeline_config(:main, "input { generator { id => '123' } } output { null {} }")
28
29
  pipeline_creator = LogStash::PipelineAction::Create.new(pipeline_config, @agent.metric)
29
- @pipelines_registry = LogStash::PipelinesRegistry.new
30
+ expect(pipeline_creator.execute(@agent, @pipelines_registry)).to be_truthy
31
+ pipeline_config = mock_pipeline_config(:secondary, "input { generator { id => '123' } } output { null {} }")
32
+ pipeline_creator = LogStash::PipelineAction::Create.new(pipeline_config, @agent.metric)
30
33
  expect(pipeline_creator.execute(@agent, @pipelines_registry)).to be_truthy
31
34
  end
32
35
 
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  # alpha and beta qualifiers are now added via VERSION_QUALIFIER environment var
3
- logstash: 7.3.1
4
- logstash-core: 7.3.1
3
+ logstash: 7.3.2
4
+ logstash-core: 7.3.2
5
5
  logstash-core-plugin-api: 2.1.16
6
6
 
7
7
  # jruby must reference a *released* version of jruby which can be downloaded from the official download url
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-core
3
3
  version: !ruby/object:Gem::Version
4
- version: 7.3.1
4
+ version: 7.3.2
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-08-19 00:00:00.000000000 Z
11
+ date: 2019-09-06 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -414,7 +414,6 @@ files:
414
414
  - lib/logstash/pipeline_action/reload.rb
415
415
  - lib/logstash/pipeline_action/stop.rb
416
416
  - lib/logstash/pipeline_reporter.rb
417
- - lib/logstash/pipeline_settings.rb
418
417
  - lib/logstash/pipelines_registry.rb
419
418
  - lib/logstash/plugin.rb
420
419
  - lib/logstash/plugin_metadata.rb
@@ -1,55 +0,0 @@
1
- # encoding: utf-8
2
- require "logstash/settings"
3
-
4
- module LogStash
5
- class PipelineSettings < Settings
6
-
7
- # there are settings that the pipeline uses and can be changed per pipeline instance
8
- SETTINGS_WHITE_LIST = [
9
- "config.debug",
10
- "config.support_escapes",
11
- "config.reload.automatic",
12
- "config.reload.interval",
13
- "config.string",
14
- "dead_letter_queue.enable",
15
- "dead_letter_queue.max_bytes",
16
- "metric.collect",
17
- "pipeline.java_execution",
18
- "pipeline.plugin_classloaders",
19
- "path.config",
20
- "path.dead_letter_queue",
21
- "path.queue",
22
- "pipeline.batch.delay",
23
- "pipeline.batch.size",
24
- "pipeline.id",
25
- "pipeline.reloadable",
26
- "pipeline.system",
27
- "pipeline.workers",
28
- "queue.checkpoint.acks",
29
- "queue.checkpoint.interval",
30
- "queue.checkpoint.writes",
31
- "queue.checkpoint.retry",
32
- "queue.drain",
33
- "queue.max_bytes",
34
- "queue.max_events",
35
- "queue.page_capacity",
36
- "queue.type",
37
- ]
38
-
39
- # register a set of settings that is used as the default set of pipelines settings
40
- def self.from_settings(settings)
41
- pipeline_settings = self.new
42
- SETTINGS_WHITE_LIST.each do |setting|
43
- pipeline_settings.register(settings.get_setting(setting).clone)
44
- end
45
- pipeline_settings
46
- end
47
-
48
- def register(setting)
49
- unless SETTINGS_WHITE_LIST.include?(setting.name)
50
- raise ArgumentError.new("Only pipeline related settings can be registered in a PipelineSettings object. Received \"#{setting.name}\". Allowed settings: #{SETTINGS_WHITE_LIST}")
51
- end
52
- super(setting)
53
- end
54
- end
55
- end