logstash-core 6.0.0.alpha2-java → 6.0.0.beta1-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. checksums.yaml +5 -5
  2. data/gemspec_jars.rb +6 -4
  3. data/lib/logstash-core/logstash-core.jar +0 -0
  4. data/lib/logstash-core/logstash-core.rb +2 -2
  5. data/lib/logstash-core/version.rb +1 -1
  6. data/lib/logstash-core_jars.rb +14 -10
  7. data/lib/logstash/agent.rb +4 -2
  8. data/lib/logstash/api/commands/default_metadata.rb +1 -1
  9. data/lib/logstash/api/commands/hot_threads_reporter.rb +8 -2
  10. data/lib/logstash/api/commands/node.rb +2 -2
  11. data/lib/logstash/api/commands/stats.rb +2 -2
  12. data/lib/logstash/bootstrap_check/bad_ruby.rb +2 -2
  13. data/lib/logstash/bootstrap_check/default_config.rb +2 -3
  14. data/lib/logstash/compiler.rb +12 -12
  15. data/lib/logstash/compiler/lscl.rb +17 -7
  16. data/lib/logstash/compiler/treetop_monkeypatches.rb +1 -0
  17. data/lib/logstash/config/config_ast.rb +11 -1
  18. data/lib/logstash/config/mixin.rb +5 -0
  19. data/lib/logstash/config/modules_common.rb +101 -0
  20. data/lib/logstash/config/source/base.rb +75 -0
  21. data/lib/logstash/config/source/local.rb +52 -50
  22. data/lib/logstash/config/source/modules.rb +55 -0
  23. data/lib/logstash/config/source/multi_local.rb +54 -10
  24. data/lib/logstash/config/source_loader.rb +1 -0
  25. data/lib/logstash/config/string_escape.rb +27 -0
  26. data/lib/logstash/elasticsearch_client.rb +142 -0
  27. data/lib/logstash/environment.rb +5 -1
  28. data/lib/logstash/event.rb +0 -1
  29. data/lib/logstash/instrument/global_metrics.rb +13 -0
  30. data/lib/logstash/instrument/metric_store.rb +16 -13
  31. data/lib/logstash/instrument/metric_type/counter.rb +6 -18
  32. data/lib/logstash/instrument/metric_type/gauge.rb +6 -12
  33. data/lib/logstash/instrument/periodic_poller/dlq.rb +19 -0
  34. data/lib/logstash/instrument/periodic_pollers.rb +3 -1
  35. data/lib/logstash/logging/logger.rb +43 -14
  36. data/lib/logstash/modules/cli_parser.rb +74 -0
  37. data/lib/logstash/modules/elasticsearch_config.rb +22 -0
  38. data/lib/logstash/modules/elasticsearch_importer.rb +37 -0
  39. data/lib/logstash/modules/elasticsearch_resource.rb +10 -0
  40. data/lib/logstash/modules/file_reader.rb +36 -0
  41. data/lib/logstash/modules/kibana_base.rb +24 -0
  42. data/lib/logstash/modules/kibana_client.rb +122 -0
  43. data/lib/logstash/modules/kibana_config.rb +125 -0
  44. data/lib/logstash/modules/kibana_dashboards.rb +36 -0
  45. data/lib/logstash/modules/kibana_importer.rb +17 -0
  46. data/lib/logstash/modules/kibana_resource.rb +10 -0
  47. data/lib/logstash/modules/kibana_settings.rb +40 -0
  48. data/lib/logstash/modules/logstash_config.rb +120 -0
  49. data/lib/logstash/modules/resource_base.rb +38 -0
  50. data/lib/logstash/modules/scaffold.rb +50 -0
  51. data/lib/logstash/modules/settings_merger.rb +23 -0
  52. data/lib/logstash/modules/util.rb +17 -0
  53. data/lib/logstash/namespace.rb +1 -0
  54. data/lib/logstash/pipeline.rb +66 -27
  55. data/lib/logstash/pipeline_settings.rb +1 -0
  56. data/lib/logstash/plugins/registry.rb +1 -0
  57. data/lib/logstash/runner.rb +47 -3
  58. data/lib/logstash/settings.rb +20 -1
  59. data/lib/logstash/util/dead_letter_queue_manager.rb +1 -1
  60. data/lib/logstash/util/safe_uri.rb +146 -11
  61. data/lib/logstash/util/thread_dump.rb +4 -3
  62. data/lib/logstash/util/wrapped_acked_queue.rb +28 -24
  63. data/lib/logstash/util/wrapped_synchronous_queue.rb +19 -20
  64. data/lib/logstash/version.rb +1 -1
  65. data/locales/en.yml +56 -1
  66. data/logstash-core.gemspec +6 -4
  67. data/spec/logstash/agent/converge_spec.rb +2 -2
  68. data/spec/logstash/agent_spec.rb +11 -3
  69. data/spec/logstash/api/modules/logging_spec.rb +13 -7
  70. data/spec/logstash/api/modules/node_plugins_spec.rb +23 -5
  71. data/spec/logstash/api/modules/node_spec.rb +17 -15
  72. data/spec/logstash/api/modules/node_stats_spec.rb +0 -1
  73. data/spec/logstash/api/modules/plugins_spec.rb +40 -9
  74. data/spec/logstash/api/modules/root_spec.rb +0 -1
  75. data/spec/logstash/api/rack_app_spec.rb +2 -1
  76. data/spec/logstash/compiler/compiler_spec.rb +54 -7
  77. data/spec/logstash/config/config_ast_spec.rb +47 -8
  78. data/spec/logstash/config/mixin_spec.rb +14 -2
  79. data/spec/logstash/config/pipeline_config_spec.rb +7 -7
  80. data/spec/logstash/config/source/local_spec.rb +5 -2
  81. data/spec/logstash/config/source/multi_local_spec.rb +56 -10
  82. data/spec/logstash/config/source_loader_spec.rb +1 -1
  83. data/spec/logstash/config/string_escape_spec.rb +24 -0
  84. data/spec/logstash/event_spec.rb +9 -0
  85. data/spec/logstash/filters/base_spec.rb +1 -1
  86. data/spec/logstash/instrument/metric_store_spec.rb +2 -3
  87. data/spec/logstash/instrument/metric_type/counter_spec.rb +0 -12
  88. data/spec/logstash/instrument/metric_type/gauge_spec.rb +1 -8
  89. data/spec/logstash/instrument/periodic_poller/dlq_spec.rb +17 -0
  90. data/spec/logstash/instrument/periodic_poller/jvm_spec.rb +1 -1
  91. data/spec/logstash/legacy_ruby_event_spec.rb +0 -9
  92. data/spec/logstash/legacy_ruby_timestamp_spec.rb +19 -14
  93. data/spec/logstash/modules/cli_parser_spec.rb +129 -0
  94. data/spec/logstash/modules/logstash_config_spec.rb +56 -0
  95. data/spec/logstash/modules/scaffold_spec.rb +239 -0
  96. data/spec/logstash/pipeline_dlq_commit_spec.rb +1 -1
  97. data/spec/logstash/pipeline_spec.rb +87 -20
  98. data/spec/logstash/runner_spec.rb +122 -5
  99. data/spec/logstash/setting_spec.rb +2 -2
  100. data/spec/logstash/settings/splittable_string_array_spec.rb +51 -0
  101. data/spec/logstash/timestamp_spec.rb +8 -2
  102. data/spec/logstash/util/safe_uri_spec.rb +16 -0
  103. data/spec/logstash/util/wrapped_acked_queue_spec.rb +63 -0
  104. data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +0 -22
  105. data/spec/support/helpers.rb +1 -1
  106. data/spec/support/matchers.rb +21 -4
  107. metadata +102 -19
  108. data/lib/logstash/instrument/metric_type/base.rb +0 -31
  109. data/lib/logstash/program.rb +0 -14
  110. data/lib/logstash/string_interpolation.rb +0 -18
@@ -0,0 +1,38 @@
1
+ # encoding: utf-8
2
+ require "logstash/namespace"
3
+ require "logstash/json"
4
+ require_relative "file_reader"
5
+
6
+ module LogStash module Modules module ResourceBase
7
+ attr_reader :base, :content_type, :content_path, :content_id
8
+
9
+ def initialize(base, content_type, content_path, content = nil, content_id = nil)
10
+ @base, @content_type, @content_path = base, content_type, content_path
11
+ @content_id = content_id || ::File.basename(@content_path, ".*")
12
+ # content at this time will be a JSON string
13
+ @content = content
14
+ if !@content.nil?
15
+ @content_as_object = LogStash::Json.load(@content) rescue {}
16
+ end
17
+ end
18
+
19
+ def content
20
+ @content ||= FileReader.read(@content_path)
21
+ end
22
+
23
+ def to_s
24
+ "#{base}, #{content_type}, #{content_path}, #{content_id}"
25
+ end
26
+
27
+ def content_as_object
28
+ @content_as_object ||= FileReader.read_json(@content_path) rescue nil
29
+ end
30
+
31
+ def <=>(other)
32
+ to_s <=> other.to_s
33
+ end
34
+
35
+ def ==(other)
36
+ to_s == other.to_s
37
+ end
38
+ end end end
@@ -0,0 +1,50 @@
1
+ # encoding: utf-8
2
+ require "logstash/namespace"
3
+ require "logstash/logging"
4
+ require "erb"
5
+
6
+ require_relative "elasticsearch_config"
7
+ require_relative "kibana_config"
8
+ require_relative "logstash_config"
9
+
10
+ module LogStash module Modules class Scaffold
11
+ include LogStash::Util::Loggable
12
+
13
+ attr_reader :directory, :module_name, :kibana_version_parts
14
+ attr_reader :kibana_configuration, :logstash_configuration, :elasticsearch_configuration
15
+
16
+ def initialize(name, directory)
17
+ @module_name = name
18
+ @directory = directory # this is the 'configuration folder in the GEM root.'
19
+ @kibana_version_parts = "6.0.0".split('.') # this is backup in case kibana client fails to connect
20
+ end
21
+
22
+ def add_kibana_version(version_parts)
23
+ @kibana_version_parts = version_parts
24
+ end
25
+
26
+ def import(import_engine, kibana_import_engine)
27
+ @elasticsearch_configuration.resources.each do |resource|
28
+ import_engine.put(resource)
29
+ end
30
+ @kibana_configuration.resources.each do |resource|
31
+ kibana_import_engine.put(resource)
32
+ end
33
+ end
34
+
35
+ def with_settings(module_settings)
36
+ @logstash_configuration = LogStashConfig.new(self, module_settings)
37
+ @kibana_configuration = KibanaConfig.new(self, module_settings)
38
+ @elasticsearch_configuration = ElasticsearchConfig.new(self, module_settings)
39
+ self
40
+ end
41
+
42
+ def config_string()
43
+ # settings should be set earlier by the caller.
44
+ # settings should be the subset from the YAML file with a structure like
45
+ # {"name" => "plugin name", "k1" => "v1", "k2" => "v2"}, etc.
46
+ return nil if @logstash_configuration.nil?
47
+ @logstash_configuration.config_string
48
+ end
49
+ end end end # class LogStash::Modules::Scaffold
50
+
@@ -0,0 +1,23 @@
1
+ # encoding: utf-8
2
+ require "logstash/namespace"
3
+
4
+ module LogStash module Modules class SettingsMerger
5
+ def self.merge(cli_settings, yml_settings)
6
+ # both args are arrays of hashes, e.g.
7
+ # [{"name"=>"mod1", "var.input.tcp.port"=>"3333"}, {"name"=>"mod2"}]
8
+ # [{"name"=>"mod1", "var.input.tcp.port"=>2222, "var.kibana.username"=>"rupert", "var.kibana.password"=>"fotherington"}, {"name"=>"mod3", "var.input.tcp.port"=>4445}]
9
+ merged = []
10
+ # union and group_by preserves order
11
+ # union will also coalesce identical hashes
12
+ union_of_settings = (cli_settings | yml_settings)
13
+ grouped_by_name = union_of_settings.group_by{|e| e["name"]}
14
+ grouped_by_name.each do |name, array|
15
+ if array.size == 2
16
+ merged << array.first.merge(array.last)
17
+ else
18
+ merged.concat(array)
19
+ end
20
+ end
21
+ merged
22
+ end
23
+ end end end
@@ -0,0 +1,17 @@
1
+ # encoding: utf-8
2
+ require_relative "scaffold"
3
+
4
+ # This module function should be used when gems or
5
+ # x-pack defines modules in their folder structures.
6
+ module LogStash module Modules module Util
7
+ def self.register_local_modules(path)
8
+ modules_path = ::File.join(path, "modules")
9
+ ::Dir.foreach(modules_path) do |item|
10
+ # Ignore unix relative path ids
11
+ next if item == '.' or item == '..'
12
+ # Ignore non-directories
13
+ next if !::File.directory?(::File.join(modules_path, ::File::Separator, item))
14
+ LogStash::PLUGIN_REGISTRY.add(:modules, item, Scaffold.new(item, ::File.join(modules_path, item, "configuration")))
15
+ end
16
+ end
17
+ end end end
@@ -11,4 +11,5 @@ module LogStash
11
11
  module PluginMixins; end
12
12
  module PluginManager; end
13
13
  module Api; end
14
+ module Modules; end
14
15
  end # module LogStash
@@ -24,19 +24,23 @@ require "logstash/filter_delegator"
24
24
  require "logstash/queue_factory"
25
25
  require "logstash/compiler"
26
26
  require "logstash/execution_context"
27
+ require "securerandom"
27
28
 
28
29
  java_import org.logstash.common.DeadLetterQueueFactory
30
+ java_import org.logstash.common.SourceWithMetadata
29
31
  java_import org.logstash.common.io.DeadLetterQueueWriter
30
32
 
31
33
  module LogStash; class BasePipeline
32
34
  include LogStash::Util::Loggable
33
35
 
34
- attr_reader :settings, :config_str, :config_hash, :inputs, :filters, :outputs, :pipeline_id, :lir, :execution_context
36
+ attr_reader :settings, :config_str, :config_hash, :inputs, :filters, :outputs, :pipeline_id, :lir, :execution_context, :ephemeral_id
35
37
  attr_reader :pipeline_config
36
38
 
37
39
  def initialize(pipeline_config, namespaced_metric = nil, agent = nil)
38
40
  @logger = self.logger
39
41
 
42
+ @ephemeral_id = SecureRandom.uuid
43
+
40
44
  @pipeline_config = pipeline_config
41
45
  @config_str = pipeline_config.config_string
42
46
  @settings = pipeline_config.settings
@@ -57,16 +61,13 @@ module LogStash; class BasePipeline
57
61
  @outputs = nil
58
62
  @agent = agent
59
63
 
60
- if settings.get_value("dead_letter_queue.enable")
61
- @dlq_writer = DeadLetterQueueFactory.getWriter(pipeline_id, settings.get_value("path.dead_letter_queue"))
62
- else
63
- @dlq_writer = LogStash::Util::DummyDeadLetterQueueWriter.new
64
- end
64
+ @dlq_writer = dlq_writer
65
65
 
66
66
  grammar = LogStashConfigParser.new
67
67
  parsed_config = grammar.parse(config_str)
68
68
  raise(ConfigurationError, grammar.failure_reason) if parsed_config.nil?
69
69
 
70
+ parsed_config.process_escape_sequences = settings.get_value("config.support_escapes")
70
71
  config_code = parsed_config.compile
71
72
 
72
73
  # config_code = BasePipeline.compileConfig(config_str)
@@ -84,9 +85,26 @@ module LogStash; class BasePipeline
84
85
  end
85
86
  end
86
87
 
88
+ def dlq_writer
89
+ if settings.get_value("dead_letter_queue.enable")
90
+ @dlq_writer = DeadLetterQueueFactory.getWriter(pipeline_id, settings.get_value("path.dead_letter_queue"), settings.get_value("dead_letter_queue.max_bytes"))
91
+ else
92
+ @dlq_writer = LogStash::Util::DummyDeadLetterQueueWriter.new
93
+ end
94
+ end
95
+
96
+ def close_dlq_writer
97
+ @dlq_writer.close
98
+ if settings.get_value("dead_letter_queue.enable")
99
+ DeadLetterQueueFactory.release(pipeline_id)
100
+ end
101
+ end
102
+
87
103
  def compile_lir
88
- source_with_metadata = org.logstash.common.SourceWithMetadata.new("str", "pipeline", self.config_str)
89
- LogStash::Compiler.compile_sources(source_with_metadata)
104
+ sources_with_metadata = [
105
+ SourceWithMetadata.new("str", "pipeline", 0, 0, self.config_str)
106
+ ]
107
+ LogStash::Compiler.compile_sources(sources_with_metadata, @settings)
90
108
  end
91
109
 
92
110
  def plugin(plugin_type, name, *args)
@@ -144,6 +162,12 @@ module LogStash; class BasePipeline
144
162
  def non_reloadable_plugins
145
163
  (inputs + filters + outputs).select { |plugin| !plugin.reloadable? }
146
164
  end
165
+
166
+ private
167
+
168
+ def default_logging_keys(other_keys = {})
169
+ { :pipeline_id => pipeline_id }.merge(other_keys)
170
+ end
147
171
  end; end
148
172
 
149
173
  module LogStash; class Pipeline < BasePipeline
@@ -172,6 +196,7 @@ module LogStash; class Pipeline < BasePipeline
172
196
  Instrument::NullMetric.new
173
197
  end
174
198
 
199
+ @ephemeral_id = SecureRandom.uuid
175
200
  @settings = settings
176
201
  @reporter = PipelineReporter.new(@logger, self)
177
202
  @worker_threads = []
@@ -187,7 +212,7 @@ module LogStash; class Pipeline < BasePipeline
187
212
 
188
213
  @input_queue_client = @queue.write_client
189
214
  @filter_queue_client = @queue.read_client
190
- @signal_queue = Queue.new
215
+ @signal_queue = java.util.concurrent.LinkedBlockingQueue.new
191
216
  # Note that @inflight_batches as a central mechanism for tracking inflight
192
217
  # batches will fail if we have multiple read clients here.
193
218
  @filter_queue_client.set_events_metric(metric.namespace([:stats, :events]))
@@ -244,6 +269,7 @@ module LogStash; class Pipeline < BasePipeline
244
269
  # Since we start lets assume that the metric namespace is cleared
245
270
  # this is useful in the context of pipeline reloading
246
271
  collect_stats
272
+ collect_dlq_stats
247
273
 
248
274
  @logger.debug("Starting pipeline", default_logging_keys)
249
275
 
@@ -320,7 +346,7 @@ module LogStash; class Pipeline < BasePipeline
320
346
  def close
321
347
  @filter_queue_client.close
322
348
  @queue.close
323
- @dlq_writer.close
349
+ close_dlq_writer
324
350
  end
325
351
 
326
352
  def transition_to_running
@@ -382,6 +408,9 @@ module LogStash; class Pipeline < BasePipeline
382
408
  config_metric.gauge(:batch_delay, batch_delay)
383
409
  config_metric.gauge(:config_reload_automatic, @settings.get("config.reload.automatic"))
384
410
  config_metric.gauge(:config_reload_interval, @settings.get("config.reload.interval"))
411
+ config_metric.gauge(:dead_letter_queue_enabled, dlq_enabled?)
412
+ config_metric.gauge(:dead_letter_queue_path, @dlq_writer.get_path.to_absolute_path.to_s) if dlq_enabled?
413
+
385
414
 
386
415
  @logger.info("Starting pipeline", default_logging_keys(
387
416
  "pipeline.workers" => pipeline_workers,
@@ -415,6 +444,10 @@ module LogStash; class Pipeline < BasePipeline
415
444
  end
416
445
  end
417
446
 
447
+ def dlq_enabled?
448
+ @settings.get("dead_letter_queue.enable")
449
+ end
450
+
418
451
  # Main body of what a worker thread does
419
452
  # Repeatedly takes batches off the queue, filters, then outputs them
420
453
  def worker_loop(batch_size, batch_delay)
@@ -423,19 +456,22 @@ module LogStash; class Pipeline < BasePipeline
423
456
  @filter_queue_client.set_batch_dimensions(batch_size, batch_delay)
424
457
 
425
458
  while true
426
- signal = @signal_queue.empty? ? NO_SIGNAL : @signal_queue.pop
459
+ signal = @signal_queue.poll || NO_SIGNAL
427
460
  shutdown_requested |= signal.shutdown? # latch on shutdown signal
428
461
 
429
462
  batch = @filter_queue_client.read_batch # metrics are started in read_batch
430
- @events_consumed.increment(batch.size)
431
- filter_batch(batch)
432
- flush_filters_to_batch(batch, :final => false) if signal.flush?
433
- output_batch(batch)
434
- break if @force_shutdown.true? # Do not ack the current batch
435
- @filter_queue_client.close_batch(batch)
463
+ if (batch.size > 0)
464
+ @events_consumed.increment(batch.size)
465
+ filter_batch(batch)
466
+ flush_filters_to_batch(batch, :final => false) if signal.flush?
467
+ output_batch(batch)
468
+ unless @force_shutdown.true? # ack the current batch
469
+ @filter_queue_client.close_batch(batch)
470
+ end
471
+ end
436
472
 
437
473
  # keep break at end of loop, after the read_batch operation, some pipeline specs rely on this "final read_batch" before shutdown.
438
- break if shutdown_requested && !draining_queue?
474
+ break if (shutdown_requested && !draining_queue?) || @force_shutdown.true?
439
475
  end
440
476
 
441
477
  # we are shutting down, queue is drained if it was required, now perform a final flush.
@@ -601,7 +637,7 @@ module LogStash; class Pipeline < BasePipeline
601
637
  # Each worker thread will receive this exactly once!
602
638
  @worker_threads.each do |t|
603
639
  @logger.debug("Pushing shutdown", default_logging_keys(:thread => t.inspect))
604
- @signal_queue.push(SHUTDOWN)
640
+ @signal_queue.put(SHUTDOWN)
605
641
  end
606
642
 
607
643
  @worker_threads.each do |t|
@@ -652,7 +688,7 @@ module LogStash; class Pipeline < BasePipeline
652
688
  def flush
653
689
  if @flushing.compare_and_set(false, true)
654
690
  @logger.debug? && @logger.debug("Pushing flush onto pipeline", default_logging_keys)
655
- @signal_queue.push(FLUSH)
691
+ @signal_queue.put(FLUSH)
656
692
  end
657
693
  end
658
694
 
@@ -695,10 +731,16 @@ module LogStash; class Pipeline < BasePipeline
695
731
  .each {|t| t.delete("status") }
696
732
  end
697
733
 
734
+ def collect_dlq_stats
735
+ if dlq_enabled?
736
+ dlq_metric = @metric.namespace([:stats, :pipelines, pipeline_id.to_s.to_sym, :dlq])
737
+ dlq_metric.gauge(:queue_size_in_bytes, @dlq_writer.get_current_queue_size)
738
+ end
739
+ end
740
+
698
741
  def collect_stats
699
742
  pipeline_metric = @metric.namespace([:stats, :pipelines, pipeline_id.to_s.to_sym, :queue])
700
743
  pipeline_metric.gauge(:type, settings.get("queue.type"))
701
-
702
744
  if @queue.is_a?(LogStash::Util::WrappedAckedQueue) && @queue.queue.is_a?(LogStash::AckedQueue)
703
745
  queue = @queue.queue
704
746
  dir_path = queue.dir_path
@@ -750,12 +792,9 @@ module LogStash; class Pipeline < BasePipeline
750
792
  private
751
793
 
752
794
  def default_logging_keys(other_keys = {})
753
- default_options = if thread
754
- { :pipeline_id => pipeline_id, :thread => thread.inspect }
755
- else
756
- { :pipeline_id => pipeline_id }
757
- end
758
- default_options.merge(other_keys)
795
+ keys = super
796
+ keys[:thread] = thread.inspect if thread
797
+ keys
759
798
  end
760
799
 
761
800
  def draining_queue?
@@ -7,6 +7,7 @@ module LogStash
7
7
  # there are settings that the pipeline uses and can be changed per pipeline instance
8
8
  SETTINGS_WHITE_LIST = [
9
9
  "config.debug",
10
+ "config.support_escapes",
10
11
  "config.reload.automatic",
11
12
  "config.reload.interval",
12
13
  "config.string",
@@ -3,6 +3,7 @@ require "rubygems/package"
3
3
  require "logstash/util/loggable"
4
4
  require "logstash/plugin"
5
5
  require "logstash/plugins/hooks_registry"
6
+ require "logstash/modules/scaffold"
6
7
 
7
8
  module LogStash module Plugins
8
9
  class Registry
@@ -9,6 +9,7 @@ require "net/http"
9
9
  require "logstash/namespace"
10
10
  require "logstash-core/logstash-core"
11
11
  require "logstash/environment"
12
+ require "logstash/modules/cli_parser"
12
13
 
13
14
  LogStash::Environment.load_locale!
14
15
 
@@ -19,6 +20,7 @@ require "logstash/patches/clamp"
19
20
  require "logstash/settings"
20
21
  require "logstash/version"
21
22
  require "logstash/plugins/registry"
23
+ require "logstash/modules/util"
22
24
  require "logstash/bootstrap_check/default_config"
23
25
  require "logstash/bootstrap_check/bad_java"
24
26
  require "logstash/bootstrap_check/bad_ruby"
@@ -61,6 +63,17 @@ class LogStash::Runner < Clamp::StrictCommand
61
63
  :default => LogStash::SETTINGS.get_default("config.string"),
62
64
  :attribute_name => "config.string"
63
65
 
66
+ # Module settings
67
+ option ["--modules"], "MODULES",
68
+ I18n.t("logstash.runner.flag.modules"),
69
+ :multivalued => true,
70
+ :attribute_name => "modules_list"
71
+
72
+ option ["-M", "--modules.variable"], "MODULES_VARIABLE",
73
+ I18n.t("logstash.runner.flag.modules_variable"),
74
+ :multivalued => true,
75
+ :attribute_name => "modules_variable_list"
76
+
64
77
  # Pipeline settings
65
78
  option ["-w", "--pipeline.workers"], "COUNT",
66
79
  I18n.t("logstash.runner.flag.pipeline-workers"),
@@ -175,6 +188,7 @@ class LogStash::Runner < Clamp::StrictCommand
175
188
  # Default we check local sources: `-e`, `-f` and the logstash.yml options.
176
189
  @source_loader = LogStash::Config::SourceLoader.new(@settings)
177
190
  @source_loader.add_source(LogStash::Config::Source::Local.new(@settings))
191
+ @source_loader.add_source(LogStash::Config::Source::Modules.new(@settings))
178
192
  @source_loader.add_source(LogStash::Config::Source::MultiLocal.new(@settings))
179
193
 
180
194
  super(*args)
@@ -237,6 +251,9 @@ class LogStash::Runner < Clamp::StrictCommand
237
251
  return 0
238
252
  end
239
253
 
254
+ # Add local modules to the registry before everything else
255
+ LogStash::Modules::Util.register_local_modules(LogStash::Environment::LOGSTASH_HOME)
256
+
240
257
  # We configure the registry and load any plugin that can register hooks
241
258
  # with logstash, this need to be done before any operation.
242
259
  LogStash::PLUGIN_REGISTRY.setup!
@@ -249,6 +266,10 @@ class LogStash::Runner < Clamp::StrictCommand
249
266
 
250
267
  return start_shell(setting("interactive"), binding) if setting("interactive")
251
268
 
269
+ module_parser = LogStash::Modules::CLIParser.new(@modules_list, @modules_variable_list)
270
+ # Now populate Setting for modules.list with our parsed array.
271
+ @settings.set("modules.cli", module_parser.output)
272
+
252
273
  begin
253
274
  @bootstrap_checks.each { |bootstrap| bootstrap.check(@settings) }
254
275
  rescue LogStash::BootstrapCheckError => e
@@ -266,6 +287,29 @@ class LogStash::Runner < Clamp::StrictCommand
266
287
 
267
288
  @settings.format_settings.each {|line| logger.debug(line) }
268
289
 
290
+ # Check for absence of any configuration
291
+ # not bulletproof because we don't know yet if there
292
+ # are no pipelines from pipelines.yml
293
+ sources_without_conflict = []
294
+ unmatched_sources_conflict_messages = []
295
+ @source_loader.sources do |source|
296
+ if source.config_conflict?
297
+ if source.conflict_messages.any?
298
+ unmatched_sources_conflict_messages << source.conflict_messages.join(", ")
299
+ end
300
+ else
301
+ sources_without_conflict << source
302
+ end
303
+ end
304
+ if unmatched_sources_conflict_messages.any?
305
+ # i18n should be done at the sources side
306
+ signal_usage_error(unmatched_sources_conflict_messages.join(" "))
307
+ return 1
308
+ elsif sources_without_conflict.empty?
309
+ signal_usage_error(I18n.t("logstash.runner.missing-configuration"))
310
+ return 1
311
+ end
312
+
269
313
  if setting("config.test_and_exit")
270
314
  begin
271
315
  results = @source_loader.fetch
@@ -288,7 +332,7 @@ class LogStash::Runner < Clamp::StrictCommand
288
332
  end
289
333
 
290
334
  # lock path.data before starting the agent
291
- @data_path_lock = FileLockFactory.getDefault().obtainLock(setting("path.data"), ".lock");
335
+ @data_path_lock = FileLockFactory.obtainLock(setting("path.data"), ".lock");
292
336
 
293
337
  @dispatcher.fire(:before_agent)
294
338
  @agent = create_agent(@settings, @source_loader)
@@ -333,7 +377,7 @@ class LogStash::Runner < Clamp::StrictCommand
333
377
  Stud::untrap("INT", sigint_id) unless sigint_id.nil?
334
378
  Stud::untrap("TERM", sigterm_id) unless sigterm_id.nil?
335
379
  Stud::untrap("HUP", sighup_id) unless sighup_id.nil?
336
- FileLockFactory.getDefault().releaseLock(@data_path_lock) if @data_path_lock
380
+ FileLockFactory.releaseLock(@data_path_lock) if @data_path_lock
337
381
  @log_fd.close if @log_fd
338
382
  end # def self.main
339
383
 
@@ -456,7 +500,7 @@ class LogStash::Runner < Clamp::StrictCommand
456
500
  nil
457
501
  end
458
502
  end
459
-
503
+
460
504
  # is the user asking for CLI help subcommand?
461
505
  def cli_help?(args)
462
506
  # I know, double negative