logstash-core 6.8.17-java → 7.0.0.alpha1-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (80) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash-core/version.rb +3 -1
  3. data/lib/logstash/agent.rb +69 -85
  4. data/lib/logstash/api/modules/stats.rb +1 -1
  5. data/lib/logstash/compiler/lscl.rb +7 -7
  6. data/lib/logstash/config/config_ast.rb +1 -1
  7. data/lib/logstash/config/mixin.rb +1 -1
  8. data/lib/logstash/config/modules_common.rb +3 -3
  9. data/lib/logstash/dependency_report.rb +1 -2
  10. data/lib/logstash/environment.rb +4 -9
  11. data/lib/logstash/event.rb +1 -24
  12. data/lib/logstash/filter_delegator.rb +69 -2
  13. data/lib/logstash/filters/base.rb +2 -0
  14. data/lib/logstash/instrument/metric_store.rb +1 -1
  15. data/lib/logstash/instrument/periodic_poller/dlq.rb +7 -5
  16. data/lib/logstash/instrument/periodic_poller/jvm.rb +3 -3
  17. data/lib/logstash/instrument/periodic_poller/pq.rb +8 -6
  18. data/lib/logstash/instrument/periodic_pollers.rb +3 -3
  19. data/lib/logstash/java_pipeline.rb +11 -38
  20. data/lib/logstash/modules/kibana_config.rb +1 -1
  21. data/lib/logstash/modules/logstash_config.rb +1 -1
  22. data/lib/logstash/patches/resolv.rb +32 -17
  23. data/lib/logstash/pipeline.rb +11 -28
  24. data/lib/logstash/pipeline_action/base.rb +1 -1
  25. data/lib/logstash/pipeline_action/create.rb +13 -7
  26. data/lib/logstash/pipeline_action/reload.rb +12 -35
  27. data/lib/logstash/pipeline_action/stop.rb +6 -4
  28. data/lib/logstash/pipeline_settings.rb +1 -2
  29. data/lib/logstash/plugins/registry.rb +2 -5
  30. data/lib/logstash/runner.rb +0 -24
  31. data/lib/logstash/settings.rb +5 -5
  32. data/lib/logstash/state_resolver.rb +5 -5
  33. data/lib/logstash/util.rb +1 -11
  34. data/lib/logstash/util/duration_formatter.rb +1 -1
  35. data/lib/logstash/util/safe_uri.rb +0 -1
  36. data/lib/logstash/util/substitution_variables.rb +1 -22
  37. data/lib/logstash/util/thread_dump.rb +1 -1
  38. data/locales/en.yml +7 -16
  39. data/logstash-core.gemspec +11 -2
  40. data/spec/logstash/acked_queue_concurrent_stress_spec.rb +2 -2
  41. data/spec/logstash/agent/converge_spec.rb +31 -25
  42. data/spec/logstash/agent/metrics_spec.rb +1 -1
  43. data/spec/logstash/agent_spec.rb +7 -6
  44. data/spec/logstash/compiler/compiler_spec.rb +0 -28
  45. data/spec/logstash/config/config_ast_spec.rb +0 -15
  46. data/spec/logstash/config/mixin_spec.rb +2 -3
  47. data/spec/logstash/converge_result_spec.rb +1 -1
  48. data/spec/logstash/environment_spec.rb +4 -4
  49. data/spec/logstash/event_spec.rb +2 -10
  50. data/spec/logstash/filter_delegator_spec.rb +12 -2
  51. data/spec/logstash/filters/base_spec.rb +9 -45
  52. data/spec/logstash/instrument/periodic_poller/cgroup_spec.rb +2 -0
  53. data/spec/logstash/instrument/wrapped_write_client_spec.rb +1 -1
  54. data/spec/logstash/java_filter_delegator_spec.rb +11 -1
  55. data/spec/logstash/legacy_ruby_event_spec.rb +5 -6
  56. data/spec/logstash/patches_spec.rb +3 -1
  57. data/spec/logstash/pipeline_action/create_spec.rb +8 -14
  58. data/spec/logstash/pipeline_action/reload_spec.rb +9 -16
  59. data/spec/logstash/pipeline_action/stop_spec.rb +3 -4
  60. data/spec/logstash/queue_factory_spec.rb +1 -2
  61. data/spec/logstash/runner_spec.rb +0 -2
  62. data/spec/logstash/settings/array_coercible_spec.rb +1 -1
  63. data/spec/logstash/settings/bytes_spec.rb +2 -2
  64. data/spec/logstash/settings/port_range_spec.rb +1 -1
  65. data/spec/logstash/settings_spec.rb +0 -10
  66. data/spec/logstash/state_resolver_spec.rb +22 -26
  67. data/spec/logstash/util/safe_uri_spec.rb +0 -40
  68. data/spec/logstash/util/secretstore_spec.rb +1 -1
  69. data/spec/logstash/util/time_value_spec.rb +1 -1
  70. data/spec/logstash/util/wrapped_acked_queue_spec.rb +1 -1
  71. data/spec/logstash/webserver_spec.rb +5 -9
  72. data/spec/support/matchers.rb +19 -25
  73. data/spec/support/shared_contexts.rb +3 -3
  74. data/versions-gem-copy.yml +9 -9
  75. metadata +31 -44
  76. data/lib/logstash/patches/resolv_9270.rb +0 -2903
  77. data/lib/logstash/pipelines_registry.rb +0 -166
  78. data/lib/logstash/util/lazy_singleton.rb +0 -33
  79. data/spec/logstash/jruby_version_spec.rb +0 -15
  80. data/spec/logstash/pipelines_registry_spec.rb +0 -220
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7b8da19f938e6b15255f8088a1fce18d781bad1dfcc2bbb6d616d242a9ebb16d
4
- data.tar.gz: c9c9ceb47edce2e36116239bb7e7dfb20d3fbafb68caec24cf6803d67bfecc55
3
+ metadata.gz: aac6580e3fcdb7a4ef97667263de8081ef5f747948508322dc44dcf58f649102
4
+ data.tar.gz: 62fe96390ead958576494f6507200be79183eb4d36daf3724b9b3f9116f686ea
5
5
  SHA512:
6
- metadata.gz: 5fe210acf54443eaa7adaffb5564215ce354b2b3f0e78e1205a32a551f0276125961621405cd931cef25d951f41cb50ab1d206a8556d0de11aac4dbf7904f847
7
- data.tar.gz: a890dc1973a8f5df0598b84d95cc7ee04d5b78f2e6d0c4e40b2d16a8e7b7ed67f80c44bf4b2d7af4e88db97d0dab94098859246ae4f82a650b041a3d7c4160f6
6
+ metadata.gz: 551463904e10b3d091962829e5f55c6a32435d6f39799d725a72c8b5fa7bf5051abc4cfdd2ab6e8b509a70d28b33e9cce1deedaae16e70fefd49fb7100a61b07
7
+ data.tar.gz: '08c65c30b5223731ff293615a5c321262eb2e93d6fa896d25b064ccc52de2e2b9b877e01976b156919f3e0bf4852f3c3ae261f40375332217ad246ebbb982d85'
@@ -7,6 +7,8 @@ if !defined?(ALL_VERSIONS)
7
7
  require 'yaml'
8
8
  ALL_VERSIONS = YAML.load_file(File.expand_path("../../versions-gem-copy.yml", File.dirname(__FILE__)))
9
9
  end
10
+
10
11
  if !defined?(LOGSTASH_CORE_VERSION)
11
- LOGSTASH_CORE_VERSION = ALL_VERSIONS.fetch("logstash-core")
12
+ # PACKAGE_SUFFIX is declared in the artifact namespace from artifacts.rake
13
+ LOGSTASH_CORE_VERSION = defined?(PACKAGE_SUFFIX) ? "#{ALL_VERSIONS.fetch("logstash-core")}#{PACKAGE_SUFFIX}" : ALL_VERSIONS.fetch("logstash-core")
12
14
  end
@@ -8,7 +8,6 @@ require "logstash/webserver"
8
8
  require "logstash/config/source_loader"
9
9
  require "logstash/pipeline_action"
10
10
  require "logstash/state_resolver"
11
- require "logstash/pipelines_registry"
12
11
  require "stud/trap"
13
12
  require "uri"
14
13
  require "socket"
@@ -20,7 +19,7 @@ class LogStash::Agent
20
19
  include LogStash::Util::Loggable
21
20
  STARTED_AT = Time.now.freeze
22
21
 
23
- attr_reader :metric, :name, :settings, :dispatcher, :ephemeral_id, :pipeline_bus
22
+ attr_reader :metric, :name, :settings, :webserver, :dispatcher, :ephemeral_id, :pipelines, :pipeline_bus
24
23
  attr_accessor :logger
25
24
 
26
25
  # initialize method for LogStash::Agent
@@ -37,14 +36,11 @@ class LogStash::Agent
37
36
  # Mutex to synchonize in the exclusive method
38
37
  # Initial usage for the Ruby pipeline initialization which is not thread safe
39
38
  @exclusive_lock = Mutex.new
40
- @webserver_control_lock = Mutex.new
41
-
42
- @convergence_lock = Mutex.new
43
39
 
44
40
  # Special bus object for inter-pipelines communications. Used by the `pipeline` input/output
45
41
  @pipeline_bus = org.logstash.plugins.pipeline.PipelineBus.new
46
42
 
47
- @pipelines_registry = LogStash::PipelinesRegistry.new
43
+ @pipelines = java.util.concurrent.ConcurrentHashMap.new();
48
44
 
49
45
  @name = setting("node.name")
50
46
  @http_host = setting("http.host")
@@ -54,14 +50,10 @@ class LogStash::Agent
54
50
  id
55
51
 
56
52
  # Set the global FieldReference parsing mode
57
- parsing_mode = case setting('config.field_reference.parser')
58
- when 'COMPAT' then org.logstash.FieldReference::ParsingMode::COMPAT;
59
- when 'LEGACY' then org.logstash.FieldReference::ParsingMode::LEGACY;
60
- when 'STRICT' then org.logstash.FieldReference::ParsingMode::STRICT;
61
- else fail('Unsupported FieldReference parsing mode')
62
- end
63
- logger.debug("Setting global FieldReference parsing mode: #{parsing_mode}")
64
- org.logstash.FieldReference::set_parsing_mode(parsing_mode)
53
+ if @settings.set?('config.field_reference.parser')
54
+ # TODO: i18n
55
+ logger.warn("deprecated setting `config.field_reference.parser` set; field reference parsing is strict by default")
56
+ end
65
57
 
66
58
  # This is for backward compatibility in the tests
67
59
  if source_loader.nil?
@@ -122,17 +114,14 @@ class LogStash::Agent
122
114
  converge_state_and_update unless stopped?
123
115
  end
124
116
  else
125
- # exit with error status if the initial converge_state_and_update did not create any pipeline
126
- return 1 if @pipelines_registry.empty?
117
+ return 1 if clean_state?
127
118
 
128
119
  while !Stud.stop?
129
- # exit if all pipelines are terminated and none are reloading
130
- break if no_pipeline?
131
-
132
- # exit if there are no user defined pipelines (not system pipeline) and none are reloading
133
- break if !running_user_defined_pipelines?
134
-
135
- sleep(0.5)
120
+ if clean_state? || running_user_defined_pipelines?
121
+ sleep(0.5)
122
+ else
123
+ break
124
+ end
136
125
  end
137
126
  end
138
127
 
@@ -146,11 +135,11 @@ class LogStash::Agent
146
135
  end
147
136
 
148
137
  def running?
149
- @running.true?
138
+ @running.value
150
139
  end
151
140
 
152
141
  def stopped?
153
- @running.false?
142
+ !@running.value
154
143
  end
155
144
 
156
145
  def converge_state_and_update
@@ -165,7 +154,12 @@ class LogStash::Agent
165
154
  end
166
155
  end
167
156
 
168
- converge_result = resolve_actions_and_converge_state(results.response)
157
+ # We Lock any access on the pipelines, since the actions will modify the
158
+ # content of it.
159
+ converge_result = nil
160
+
161
+ pipeline_actions = resolve_actions(results.response)
162
+ converge_result = converge_state(pipeline_actions)
169
163
  update_metrics(converge_result)
170
164
 
171
165
  logger.info(
@@ -184,7 +178,7 @@ class LogStash::Agent
184
178
 
185
179
  # Calculate the Logstash uptime in milliseconds
186
180
  #
187
- # @return [Integer] Uptime in milliseconds
181
+ # @return [Fixnum] Uptime in milliseconds
188
182
  def uptime
189
183
  ((Time.now.to_f - STARTED_AT.to_f) * 1000.0).to_i
190
184
  end
@@ -239,48 +233,43 @@ class LogStash::Agent
239
233
  @id_path ||= ::File.join(settings.get("path.data"), "uuid")
240
234
  end
241
235
 
242
- #
243
- # Backward compatibility proxies to the PipelineRegistry
244
- #
245
-
246
236
  def get_pipeline(pipeline_id)
247
- @pipelines_registry.get_pipeline(pipeline_id)
237
+ pipelines.get(pipeline_id)
248
238
  end
249
239
 
250
240
  def pipelines_count
251
- @pipelines_registry.size
241
+ pipelines.size
252
242
  end
253
243
 
254
244
  def running_pipelines
255
- @pipelines_registry.running_pipelines
256
- end
245
+ pipelines.select {|id,pipeline| running_pipeline?(id) }
246
+ end
257
247
 
258
248
  def non_running_pipelines
259
- @pipelines_registry.non_running_pipelines
249
+ pipelines.select {|id,pipeline| !running_pipeline?(id) }
260
250
  end
261
251
 
262
252
  def running_pipelines?
263
- @pipelines_registry.running_pipelines.any?
253
+ running_pipelines_count > 0
264
254
  end
265
255
 
266
256
  def running_pipelines_count
267
- @pipelines_registry.running_pipelines.size
257
+ running_pipelines.size
268
258
  end
269
259
 
270
260
  def running_user_defined_pipelines?
271
- @pipelines_registry.running_user_defined_pipelines.any?
261
+ !running_user_defined_pipelines.empty?
272
262
  end
273
263
 
274
264
  def running_user_defined_pipelines
275
- @pipelines_registry.running_user_defined_pipelines
265
+ pipelines.select {|id, pipeline| running_pipeline?(id) && !pipeline.system? }
276
266
  end
277
267
 
278
- def no_pipeline?
279
- @pipelines_registry.running_pipelines.empty?
268
+ def with_running_user_defined_pipelines
269
+ yield running_user_defined_pipelines
280
270
  end
281
271
 
282
272
  private
283
-
284
273
  def transition_to_stopped
285
274
  @running.make_false
286
275
  end
@@ -289,15 +278,6 @@ class LogStash::Agent
289
278
  @running.make_true
290
279
  end
291
280
 
292
- # @param pipeline_configs [Array<Config::PipelineConfig>]
293
- # @return [ConvergeResult]
294
- def resolve_actions_and_converge_state(pipeline_configs)
295
- @convergence_lock.synchronize do
296
- pipeline_actions = resolve_actions(pipeline_configs)
297
- converge_state(pipeline_actions)
298
- end
299
- end
300
-
301
281
  # We depends on a series of task derived from the internal state and what
302
282
  # need to be run, theses actions are applied to the current pipelines to converge to
303
283
  # the desired state.
@@ -310,12 +290,11 @@ class LogStash::Agent
310
290
  #
311
291
  def converge_state(pipeline_actions)
312
292
  logger.debug("Converging pipelines state", :actions_count => pipeline_actions.size)
313
- fail("Illegal access to `LogStash::Agent#converge_state()` without exclusive lock at #{caller[1]}") unless @convergence_lock.owned?
314
293
 
315
294
  converge_result = LogStash::ConvergeResult.new(pipeline_actions.size)
316
295
 
317
296
  pipeline_actions.map do |action|
318
- Thread.new(action, converge_result) do |action, converge_result|
297
+ Thread.new do
319
298
  java.lang.Thread.currentThread().setName("Converge #{action}");
320
299
  # We execute every task we need to converge the current state of pipelines
321
300
  # for every task we will record the action result, that will help us
@@ -331,36 +310,34 @@ class LogStash::Agent
331
310
  # that we currently have.
332
311
  begin
333
312
  logger.debug("Executing action", :action => action)
334
- action_result = action.execute(self, @pipelines_registry)
313
+ action_result = action.execute(self, pipelines)
335
314
  converge_result.add(action, action_result)
336
315
 
337
316
  unless action_result.successful?
338
- logger.error("Failed to execute action",
339
- :id => action.pipeline_id,
340
- :action_type => action_result.class,
341
- :message => action_result.message,
342
- :backtrace => action_result.backtrace
343
- )
317
+ logger.error("Failed to execute action", :id => action.pipeline_id,
318
+ :action_type => action_result.class, :message => action_result.message,
319
+ :backtrace => action_result.backtrace)
344
320
  end
345
- rescue SystemExit, Exception => e
321
+ rescue SystemExit => e
322
+ converge_result.add(action, e)
323
+ rescue Exception => e
346
324
  logger.error("Failed to execute action", :action => action, :exception => e.class.name, :message => e.message, :backtrace => e.backtrace)
347
325
  converge_result.add(action, e)
348
326
  end
349
327
  end
350
328
  end.each(&:join)
351
329
 
352
- logger.trace? && logger.trace("Converge results",
353
- :success => converge_result.success?,
354
- :failed_actions => converge_result.failed_actions.collect { |a, r| "id: #{a.pipeline_id}, action_type: #{a.class}, message: #{r.message}" },
355
- :successful_actions => converge_result.successful_actions.collect { |a, r| "id: #{a.pipeline_id}, action_type: #{a.class}" }
356
- )
330
+ if logger.trace?
331
+ logger.trace("Converge results", :success => converge_result.success?,
332
+ :failed_actions => converge_result.failed_actions.collect { |a, r| "id: #{a.pipeline_id}, action_type: #{a.class}, message: #{r.message}" },
333
+ :successful_actions => converge_result.successful_actions.collect { |a, r| "id: #{a.pipeline_id}, action_type: #{a.class}" })
334
+ end
357
335
 
358
336
  converge_result
359
337
  end
360
338
 
361
339
  def resolve_actions(pipeline_configs)
362
- fail("Illegal access to `LogStash::Agent#resolve_actions()` without exclusive lock at #{caller[1]}") unless @convergence_lock.owned?
363
- @state_resolver.resolve(@pipelines_registry, pipeline_configs)
340
+ @state_resolver.resolve(@pipelines, pipeline_configs)
364
341
  end
365
342
 
366
343
  def dispatch_events(converge_results)
@@ -377,24 +354,20 @@ class LogStash::Agent
377
354
  end
378
355
 
379
356
  def start_webserver
380
- @webserver_control_lock.synchronize do
381
- options = {:http_host => @http_host, :http_ports => @http_port, :http_environment => @http_environment }
382
- @webserver = LogStash::WebServer.new(@logger, self, options)
383
- @webserver_thread = Thread.new(@webserver) do |webserver|
384
- LogStash::Util.set_thread_name("Api Webserver")
385
- webserver.run
386
- end
357
+ options = {:http_host => @http_host, :http_ports => @http_port, :http_environment => @http_environment }
358
+ @webserver = LogStash::WebServer.new(@logger, self, options)
359
+ @webserver_thread = Thread.new(@webserver) do |webserver|
360
+ LogStash::Util.set_thread_name("Api Webserver")
361
+ webserver.run
387
362
  end
388
363
  end
389
364
 
390
365
  def stop_webserver
391
- @webserver_control_lock.synchronize do
392
- if @webserver
393
- @webserver.stop
394
- if @webserver_thread.join(5).nil?
395
- @webserver_thread.kill
396
- @webserver_thread.join
397
- end
366
+ if @webserver
367
+ @webserver.stop
368
+ if @webserver_thread.join(5).nil?
369
+ @webserver_thread.kill
370
+ @webserver_thread.join
398
371
  end
399
372
  end
400
373
  end
@@ -422,14 +395,25 @@ class LogStash::Agent
422
395
  end
423
396
 
424
397
  def shutdown_pipelines
425
- logger.debug("Shutting down all pipelines", :pipelines_count => running_pipelines_count)
398
+ logger.debug("Shutting down all pipelines", :pipelines_count => pipelines_count)
426
399
 
427
400
  # In this context I could just call shutdown, but I've decided to
428
401
  # use the stop action implementation for that so we have the same code.
429
402
  # This also give us some context into why a shutdown is failing
430
- resolve_actions_and_converge_state([]) # We stop all the pipeline, so we converge to a empty state
403
+ pipeline_actions = resolve_actions([]) # We stop all the pipeline, so we converge to a empty state
404
+ converge_state(pipeline_actions)
431
405
  end
432
406
 
407
+ def running_pipeline?(pipeline_id)
408
+ pipeline = get_pipeline(pipeline_id)
409
+ return false unless pipeline
410
+ thread = pipeline.thread
411
+ thread.is_a?(Thread) && thread.alive?
412
+ end
413
+
414
+ def clean_state?
415
+ pipelines.empty?
416
+ end
433
417
 
434
418
  def setting(key)
435
419
  @settings.get(key)
@@ -10,7 +10,7 @@ module LogStash
10
10
  # return hot threads information
11
11
  get "/jvm/hot_threads" do
12
12
  begin
13
- top_threads_count = params["threads"] || 10
13
+ top_threads_count = params["threads"] || 3
14
14
  ignore_idle_threads = params["ignore_idle_threads"] || true
15
15
  options = {
16
16
  :threads => top_threads_count.to_i,
@@ -3,7 +3,6 @@ require "treetop"
3
3
  require "logstash/compiler/treetop_monkeypatches"
4
4
  require "logstash/compiler/lscl/helpers"
5
5
  require "logstash/config/string_escape"
6
- require "logstash/util"
7
6
 
8
7
  java_import org.logstash.config.ir.DSL
9
8
  java_import org.logstash.common.SourceWithMetadata
@@ -112,11 +111,11 @@ module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSC
112
111
  # hash value; e.g., `{"match" => {"baz" => "bar"}, "match" => {"foo" => "bulb"}}` is
113
112
  # interpreted as `{"match" => {"baz" => "bar", "foo" => "blub"}}`.
114
113
  # (NOTE: this bypasses `AST::Hash`'s ability to detect duplicate keys)
115
- hash[k] = ::LogStash::Util.hash_merge_many(existing, v)
114
+ hash[k] = existing.merge(v)
116
115
  elsif existing.kind_of?(::Array)
117
116
  hash[k] = existing.push(*v)
118
117
  else
119
- hash[k] = [existing, v] unless v == existing
118
+ hash[k] = existing + v
120
119
  end
121
120
  hash
122
121
  end
@@ -165,8 +164,8 @@ module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSC
165
164
  class Number < Value
166
165
  def expr
167
166
  jdsl.eValue(source_meta, text_value.include?(".") ?
168
- Float(text_value) :
169
- Integer(text_value))
167
+ text_value.to_f :
168
+ text_value.to_i)
170
169
  end
171
170
  end
172
171
 
@@ -318,9 +317,10 @@ module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSC
318
317
 
319
318
  def precedence(op)
320
319
  # Believe this is right for logstash?
321
- if op == AND_METHOD
320
+ case op
321
+ when AND_METHOD
322
322
  2
323
- elsif op == OR_METHOD
323
+ when OR_METHOD
324
324
  1
325
325
  else
326
326
  raise ArgumentError, "Unexpected operator #{op}"
@@ -136,7 +136,7 @@ module LogStash; module Config; module AST
136
136
  events.each{|e| block.call(e)}
137
137
  end
138
138
 
139
- if !@generated_objects[:#{name}].nil? && @generated_objects[:#{name}].has_flush
139
+ if @generated_objects[:#{name}].respond_to?(:flush)
140
140
  @periodic_flushers << @generated_objects[:#{name}_flush] if @generated_objects[:#{name}].periodic_flush
141
141
  @shutdown_flushers << @generated_objects[:#{name}_flush]
142
142
  end
@@ -323,7 +323,7 @@ module LogStash::Config::Mixin
323
323
  if config_settings[:list]
324
324
  value = Array(value) # coerce scalars to lists
325
325
  # Empty lists are converted to nils
326
- return true, [] if value.empty?
326
+ return true, nil if value.empty?
327
327
 
328
328
  validated_items = value.map {|v| validate_value(v, config_val)}
329
329
  is_valid = validated_items.all? {|sr| sr[0] }
@@ -27,7 +27,7 @@ module LogStash module Config
27
27
  end
28
28
 
29
29
  if modules_array.empty?
30
- # no specifed modules
30
+ # no specified modules
31
31
  return pipelines
32
32
  end
33
33
  logger.debug("Specified modules", :modules_array => modules_array.to_s)
@@ -73,9 +73,9 @@ module LogStash module Config
73
73
  LogStash::Modules::SettingsMerger.merge_kibana_auth!(module_hash)
74
74
  current_module.with_settings(module_hash)
75
75
  config_test = settings.get("config.test_and_exit")
76
- modul_setup = settings.get("modules_setup")
76
+ module_setup = settings.get("modules_setup")
77
77
  # Only import data if it's not a config test and --setup is true
78
- if !config_test && modul_setup
78
+ if !config_test && module_setup
79
79
  logger.info("Setting up the #{module_name} module")
80
80
  esclient = LogStash::ElasticsearchClient.build(module_hash)
81
81
  kbnclient = LogStash::Modules::KibanaClient.new(module_hash)
@@ -32,9 +32,8 @@ class LogStash::DependencyReport < Clamp::Command
32
32
  command = ["./gradlew", "generateLicenseReport", "-PlicenseReportInputCSV=#{ruby_output_path}", "-PlicenseReportOutputCSV=#{output_path}"]
33
33
  puts "Executing #{command}"
34
34
  system(*command)
35
-
36
35
  if $?.exitstatus != 0
37
- raise "generateLicenseReport failed with exit status #{$?.exitstatus}"
36
+ raise "Could not run gradle java deps! Exit status #{$?.exitstatus}"
38
37
  end
39
38
 
40
39
  nil
@@ -33,7 +33,7 @@ module LogStash
33
33
  Setting::Boolean.new("config.reload.automatic", false),
34
34
  Setting::TimeValue.new("config.reload.interval", "3s"), # in seconds
35
35
  Setting::Boolean.new("config.support_escapes", false),
36
- Setting::String.new("config.field_reference.parser", "COMPAT", true, %w(STRICT COMPAT LEGACY)),
36
+ Setting::String.new("config.field_reference.parser", "STRICT", true, %w(STRICT)),
37
37
  Setting::Boolean.new("metric.collect", true),
38
38
  Setting::String.new("pipeline.id", "main"),
39
39
  Setting::Boolean.new("pipeline.system", false),
@@ -42,7 +42,7 @@ module LogStash
42
42
  Setting::PositiveInteger.new("pipeline.batch.size", 125),
43
43
  Setting::Numeric.new("pipeline.batch.delay", 50), # in milliseconds
44
44
  Setting::Boolean.new("pipeline.unsafe_shutdown", false),
45
- Setting::Boolean.new("pipeline.java_execution", false),
45
+ Setting::Boolean.new("pipeline.java_execution", true),
46
46
  Setting::Boolean.new("pipeline.reloadable", true),
47
47
  Setting.new("path.plugins", Array, []),
48
48
  Setting::NullableString.new("interactive", nil, false),
@@ -62,7 +62,6 @@ module LogStash
62
62
  Setting::Numeric.new("queue.checkpoint.acks", 1024), # 0 is unlimited
63
63
  Setting::Numeric.new("queue.checkpoint.writes", 1024), # 0 is unlimited
64
64
  Setting::Numeric.new("queue.checkpoint.interval", 1000), # 0 is no time-based checkpointing
65
- Setting::Boolean.new("queue.checkpoint.retry", false),
66
65
  Setting::Boolean.new("dead_letter_queue.enable", false),
67
66
  Setting::Bytes.new("dead_letter_queue.max_bytes", "1024mb"),
68
67
  Setting::TimeValue.new("slowlog.threshold.warn", "-1"),
@@ -164,15 +163,11 @@ module LogStash
164
163
  end
165
164
 
166
165
  def windows?
167
- host_os =~ WINDOW_OS_RE
166
+ RbConfig::CONFIG['host_os'] =~ WINDOW_OS_RE
168
167
  end
169
168
 
170
169
  def linux?
171
- host_os =~ LINUX_OS_RE
172
- end
173
-
174
- def host_os
175
- RbConfig::CONFIG['host_os']
170
+ RbConfig::CONFIG['host_os'] =~ LINUX_OS_RE
176
171
  end
177
172
 
178
173
  def locales_path(path)