logstash-core 7.4.2-java → 7.5.0-java

Sign up to get free protection for your applications and to get access to all the features.
@@ -44,6 +44,7 @@ module LogStash
44
44
  Setting::Boolean.new("pipeline.java_execution", true),
45
45
  Setting::Boolean.new("pipeline.reloadable", true),
46
46
  Setting::Boolean.new("pipeline.plugin_classloaders", false),
47
+ Setting::Boolean.new("pipeline.separate_logs", false),
47
48
  Setting.new("path.plugins", Array, []),
48
49
  Setting::NullableString.new("interactive", nil, false),
49
50
  Setting::Boolean.new("config.debug", false),
@@ -109,7 +109,11 @@ module LogStash; class JavaPipeline < JavaBasePipeline
109
109
  @finished_run.make_true
110
110
  rescue => e
111
111
  close
112
- logger.error("Pipeline aborted due to error", default_logging_keys(:exception => e, :backtrace => e.backtrace))
112
+ pipeline_log_params = default_logging_keys(
113
+ :exception => e,
114
+ :backtrace => e.backtrace,
115
+ "pipeline.sources" => pipeline_source_details)
116
+ logger.error("Pipeline aborted due to error", pipeline_log_params)
113
117
  ensure
114
118
  @finished_execution.make_true
115
119
  end
@@ -225,11 +229,14 @@ module LogStash; class JavaPipeline < JavaBasePipeline
225
229
  config_metric.gauge(:graph, ::LogStash::Config::LIRSerializer.serialize(lir))
226
230
  config_metric.gauge(:cluster_uuids, resolve_cluster_uuids)
227
231
 
228
- @logger.info("Starting pipeline", default_logging_keys(
232
+ pipeline_log_params = default_logging_keys(
229
233
  "pipeline.workers" => pipeline_workers,
230
234
  "pipeline.batch.size" => batch_size,
231
235
  "pipeline.batch.delay" => batch_delay,
232
- "pipeline.max_inflight" => max_inflight))
236
+ "pipeline.max_inflight" => max_inflight,
237
+ "pipeline.sources" => pipeline_source_details)
238
+ @logger.info("Starting pipeline", pipeline_log_params)
239
+
233
240
  if max_inflight > MAX_INFLIGHT_WARN_THRESHOLD
234
241
  @logger.warn("CAUTION: Recommended inflight events max exceeded! Logstash will run with up to #{max_inflight} events in memory in your current configuration. If your message sizes are large this may cause instability with the default heap size. Please consider setting a non-standard heap size, changing the batch size (currently #{batch_size}), or changing the number of pipeline workers (currently #{pipeline_workers})", default_logging_keys)
235
242
  end
@@ -164,10 +164,12 @@ module LogStash; class Pipeline < BasePipeline
164
164
  collect_stats
165
165
  collect_dlq_stats
166
166
 
167
- @logger.info("Starting pipeline", default_logging_keys(
168
- "pipeline.workers" => settings.get("pipeline.workers"),
169
- "pipeline.batch.size" => settings.get("pipeline.batch.size"),
170
- "pipeline.batch.delay" => settings.get("pipeline.batch.delay")))
167
+ pipeline_log_params = default_logging_keys(
168
+ "pipeline.workers" => settings.get("pipeline.workers"),
169
+ "pipeline.batch.size" => settings.get("pipeline.batch.size"),
170
+ "pipeline.batch.delay" => settings.get("pipeline.batch.delay"),
171
+ "pipeline.sources" => pipeline_source_details)
172
+ @logger.info("Starting pipeline", pipeline_log_params)
171
173
 
172
174
  @finished_execution.make_false
173
175
  @finished_run.make_false
@@ -180,7 +182,11 @@ module LogStash; class Pipeline < BasePipeline
180
182
  @finished_run.make_true
181
183
  rescue => e
182
184
  close
183
- @logger.error("Pipeline aborted due to error", default_logging_keys(:exception => e, :backtrace => e.backtrace))
185
+ pipeline_log_params = default_logging_keys(
186
+ :exception => e,
187
+ :backtrace => e.backtrace,
188
+ "pipeline.sources" => pipeline_source_details)
189
+ @logger.error("Pipeline aborted due to error", pipeline_log_params)
184
190
  ensure
185
191
  @finished_execution.make_true
186
192
  end
@@ -254,6 +254,7 @@ class LogStash::Runner < Clamp::StrictCommand
254
254
  java.lang.System.setProperty("ls.logs", setting("path.logs"))
255
255
  java.lang.System.setProperty("ls.log.format", setting("log.format"))
256
256
  java.lang.System.setProperty("ls.log.level", setting("log.level"))
257
+ java.lang.System.setProperty("ls.pipeline.separate_logs", setting("pipeline.separate_logs").to_s)
257
258
  unless java.lang.System.getProperty("log4j.configurationFile")
258
259
  log4j_config_location = ::File.join(setting("path.settings"), "log4j2.properties")
259
260
 
@@ -147,6 +147,14 @@ describe LogStash::Runner do
147
147
  expect(subject.run(args)).to eq(1)
148
148
  end
149
149
  end
150
+
151
+ context "with invalid field reference literal" do
152
+ let(:pipeline_string) { "input { } output { if [[f[[[oo] == [bar] { } }" }
153
+ it "should fail by returning a bad exit code" do
154
+ expect(logger).to receive(:fatal)
155
+ expect(subject.run(args)).to eq(1)
156
+ end
157
+ end
150
158
  end
151
159
  describe "pipeline settings" do
152
160
  let(:pipeline_string) { "input { stdin {} } output { stdout {} }" }
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  # alpha and beta qualifiers are now added via VERSION_QUALIFIER environment var
3
- logstash: 7.4.2
4
- logstash-core: 7.4.2
3
+ logstash: 7.5.0
4
+ logstash-core: 7.5.0
5
5
  logstash-core-plugin-api: 2.1.16
6
6
 
7
7
  # jruby must reference a *released* version of jruby which can be downloaded from the official download url
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-core
3
3
  version: !ruby/object:Gem::Version
4
- version: 7.4.2
4
+ version: 7.5.0
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-10-28 00:00:00.000000000 Z
11
+ date: 2019-11-26 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement