logstash-core 6.3.2-java → 6.4.0-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (139) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash/agent.rb +10 -6
  3. data/lib/logstash/api/modules/logging.rb +4 -0
  4. data/lib/logstash/api/service.rb +0 -1
  5. data/lib/logstash/bootstrap_check/default_config.rb +0 -2
  6. data/lib/logstash/bootstrap_check/persisted_queue_config.rb +0 -1
  7. data/lib/logstash/codecs/base.rb +2 -4
  8. data/lib/logstash/compiler.rb +2 -3
  9. data/lib/logstash/compiler/lscl.rb +0 -1
  10. data/lib/logstash/config/config_ast.rb +0 -1
  11. data/lib/logstash/config/cpu_core_strategy.rb +0 -1
  12. data/lib/logstash/config/defaults.rb +0 -1
  13. data/lib/logstash/config/file.rb +0 -2
  14. data/lib/logstash/config/mixin.rb +4 -7
  15. data/lib/logstash/config/modules_common.rb +0 -2
  16. data/lib/logstash/config/source/local.rb +1 -3
  17. data/lib/logstash/config/source/modules.rb +0 -2
  18. data/lib/logstash/config/source/multi_local.rb +0 -1
  19. data/lib/logstash/config/source_loader.rb +1 -2
  20. data/lib/logstash/dependency_report.rb +19 -6
  21. data/lib/logstash/elasticsearch_client.rb +0 -2
  22. data/lib/logstash/environment.rb +3 -2
  23. data/lib/logstash/errors.rb +1 -15
  24. data/lib/logstash/event.rb +0 -1
  25. data/lib/logstash/event_dispatcher.rb +1 -40
  26. data/lib/logstash/execution_context.rb +2 -19
  27. data/lib/logstash/filters/base.rb +0 -2
  28. data/lib/logstash/inputs/base.rb +2 -4
  29. data/lib/logstash/inputs/threadable.rb +2 -3
  30. data/lib/logstash/instrument/collector.rb +0 -2
  31. data/lib/logstash/instrument/metric.rb +1 -105
  32. data/lib/logstash/instrument/namespaced_metric.rb +1 -58
  33. data/lib/logstash/instrument/namespaced_null_metric.rb +1 -58
  34. data/lib/logstash/instrument/null_metric.rb +2 -71
  35. data/lib/logstash/instrument/periodic_poller/base.rb +0 -1
  36. data/lib/logstash/instrument/periodic_poller/cgroup.rb +0 -1
  37. data/lib/logstash/java_pipeline.rb +33 -222
  38. data/lib/logstash/json.rb +0 -1
  39. data/lib/logstash/logging.rb +0 -2
  40. data/lib/logstash/logging/logger.rb +1 -159
  41. data/lib/logstash/modules/cli_parser.rb +0 -4
  42. data/lib/logstash/modules/elasticsearch_config.rb +0 -3
  43. data/lib/logstash/modules/elasticsearch_importer.rb +0 -3
  44. data/lib/logstash/modules/elasticsearch_resource.rb +0 -1
  45. data/lib/logstash/modules/file_reader.rb +0 -2
  46. data/lib/logstash/modules/kibana_base.rb +0 -1
  47. data/lib/logstash/modules/kibana_client.rb +0 -2
  48. data/lib/logstash/modules/kibana_config.rb +0 -3
  49. data/lib/logstash/modules/kibana_dashboards.rb +0 -2
  50. data/lib/logstash/modules/kibana_importer.rb +0 -3
  51. data/lib/logstash/modules/kibana_resource.rb +0 -1
  52. data/lib/logstash/modules/kibana_settings.rb +0 -2
  53. data/lib/logstash/modules/logstash_config.rb +0 -1
  54. data/lib/logstash/modules/resource_base.rb +0 -1
  55. data/lib/logstash/modules/scaffold.rb +0 -3
  56. data/lib/logstash/modules/settings_merger.rb +0 -2
  57. data/lib/logstash/namespace.rb +2 -15
  58. data/lib/logstash/outputs/base.rb +3 -5
  59. data/lib/logstash/patches/clamp.rb +6 -0
  60. data/lib/logstash/pipeline.rb +38 -180
  61. data/lib/logstash/pipeline_action/create.rb +0 -2
  62. data/lib/logstash/pipeline_action/reload.rb +1 -4
  63. data/lib/logstash/pipeline_action/stop.rb +0 -2
  64. data/lib/logstash/pipeline_reporter.rb +2 -108
  65. data/lib/logstash/plugin.rb +4 -7
  66. data/lib/logstash/plugins/hooks_registry.rb +1 -63
  67. data/lib/logstash/plugins/registry.rb +3 -2
  68. data/lib/logstash/runner.rb +6 -7
  69. data/lib/logstash/settings.rb +4 -5
  70. data/lib/logstash/shutdown_watcher.rb +0 -119
  71. data/lib/logstash/universal_plugin.rb +1 -13
  72. data/lib/logstash/util.rb +0 -1
  73. data/lib/logstash/util/buftok.rb +1 -139
  74. data/lib/logstash/util/byte_value.rb +2 -3
  75. data/lib/logstash/util/charset.rb +0 -1
  76. data/lib/logstash/util/cloud_setting_auth.rb +0 -1
  77. data/lib/logstash/util/cloud_setting_id.rb +20 -8
  78. data/lib/logstash/util/dead_letter_queue_manager.rb +2 -61
  79. data/lib/logstash/util/decorators.rb +0 -1
  80. data/lib/logstash/util/loggable.rb +1 -31
  81. data/lib/logstash/util/modules_setting_array.rb +2 -2
  82. data/lib/logstash/util/password.rb +0 -1
  83. data/lib/logstash/util/plugin_version.rb +0 -1
  84. data/lib/logstash/util/safe_uri.rb +7 -8
  85. data/lib/logstash/util/secretstore.rb +1 -38
  86. data/lib/logstash/util/substitution_variables.rb +4 -5
  87. data/lib/logstash/util/worker_threads_default_printer.rb +0 -1
  88. data/locales/en.yml +28 -1
  89. data/spec/logstash/config/mixin_spec.rb +4 -4
  90. data/spec/logstash/converge_result_spec.rb +0 -1
  91. data/spec/logstash/event_dispatcher_spec.rb +0 -2
  92. data/spec/logstash/event_spec.rb +22 -26
  93. data/spec/logstash/execution_context_spec.rb +0 -2
  94. data/spec/logstash/filter_delegator_spec.rb +12 -28
  95. data/spec/logstash/inputs/base_spec.rb +4 -5
  96. data/spec/logstash/instrument/metric_spec.rb +0 -1
  97. data/spec/logstash/instrument/namespaced_metric_spec.rb +0 -2
  98. data/spec/logstash/instrument/namespaced_null_metric_spec.rb +1 -3
  99. data/spec/logstash/instrument/null_metric_spec.rb +1 -4
  100. data/spec/logstash/instrument/periodic_poller/base_spec.rb +0 -1
  101. data/spec/logstash/instrument/periodic_poller/os_spec.rb +0 -1
  102. data/spec/logstash/instrument/wrapped_write_client_spec.rb +0 -1
  103. data/spec/logstash/java_filter_delegator_spec.rb +0 -3
  104. data/spec/logstash/java_integration_spec.rb +0 -1
  105. data/spec/logstash/java_pipeline_spec.rb +1 -4
  106. data/spec/logstash/modules/cli_parser_spec.rb +1 -3
  107. data/spec/logstash/modules/scaffold_spec.rb +0 -1
  108. data/spec/logstash/outputs/base_spec.rb +9 -10
  109. data/spec/logstash/pipeline_action/create_spec.rb +2 -3
  110. data/spec/logstash/pipeline_action/reload_spec.rb +1 -2
  111. data/spec/logstash/pipeline_action/stop_spec.rb +0 -1
  112. data/spec/logstash/pipeline_dlq_commit_spec.rb +0 -6
  113. data/spec/logstash/pipeline_reporter_spec.rb +18 -4
  114. data/spec/logstash/pipeline_spec.rb +2 -6
  115. data/spec/logstash/plugin_spec.rb +1 -2
  116. data/spec/logstash/plugins/hooks_registry_spec.rb +0 -2
  117. data/spec/logstash/queue_factory_spec.rb +0 -1
  118. data/spec/logstash/runner_spec.rb +16 -9
  119. data/spec/logstash/settings/modules_spec.rb +3 -3
  120. data/spec/logstash/shutdown_watcher_spec.rb +0 -27
  121. data/spec/logstash/state_resolver_spec.rb +0 -1
  122. data/spec/logstash/util/buftok_spec.rb +0 -1
  123. data/spec/logstash/util/cloud_setting_id_spec.rb +55 -2
  124. data/spec/logstash/util/secretstore_spec.rb +10 -10
  125. data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +2 -2
  126. data/versions-gem-copy.yml +2 -2
  127. metadata +2 -16
  128. data/lib/logstash/bootstrap_check/bad_java.rb +0 -16
  129. data/lib/logstash/bootstrap_check/bad_ruby.rb +0 -12
  130. data/lib/logstash/converge_result.rb +0 -103
  131. data/lib/logstash/instrument/global_metrics.rb +0 -13
  132. data/lib/logstash/instrument/snapshot.rb +0 -15
  133. data/lib/logstash/java_integration.rb +0 -116
  134. data/lib/logstash/logging/json.rb +0 -21
  135. data/lib/logstash/plugins/plugin_factory.rb +0 -107
  136. data/lib/logstash/queue_factory.rb +0 -34
  137. data/lib/logstash/util/retryable.rb +0 -40
  138. data/spec/logstash/output_delegator_spec.rb +0 -201
  139. data/spec/logstash/timestamp_spec.rb +0 -45
@@ -1,7 +1,5 @@
1
1
  # encoding: utf-8
2
- require "logstash/namespace"
3
2
  require "logstash/event"
4
- require "logstash/logging"
5
3
  require "logstash/plugin"
6
4
  require "logstash/config/mixin"
7
5
  require "logstash/util/decorators"
@@ -1,8 +1,6 @@
1
1
  # encoding: utf-8
2
- require "logstash/namespace"
3
2
  require "logstash/event"
4
3
  require "logstash/plugin"
5
- require "logstash/logging"
6
4
  require "logstash/config/mixin"
7
5
  require "logstash/codecs/base"
8
6
  require "logstash/util/decorators"
@@ -94,7 +92,7 @@ class LogStash::Inputs::Base < LogStash::Plugin
94
92
  def stop?
95
93
  @stop_called.value
96
94
  end
97
-
95
+
98
96
  def clone
99
97
  cloned = super
100
98
  cloned.codec = @codec.clone if @codec
@@ -105,7 +103,7 @@ class LogStash::Inputs::Base < LogStash::Plugin
105
103
  super
106
104
  # There is no easy way to propage an instance variable into the codec, because the codec
107
105
  # are created at the class level
108
- # TODO(talevy): Codecs should have their own execution_context, for now they will inherit their
106
+ # TODO(talevy): Codecs should have their own execution_context, for now they will inherit their
109
107
  # parent plugin's
110
108
  @codec.execution_context = context
111
109
  context
@@ -1,15 +1,14 @@
1
1
  # encoding: utf-8
2
- require "logstash/namespace"
3
2
  require "logstash/inputs/base"
4
3
 
5
- # This is the threadable class for logstash inputs.
4
+ # This is the threadable class for logstash inputs.
6
5
  # Use this class in your inputs if it can support multiple threads
7
6
  class LogStash::Inputs::Threadable < LogStash::Inputs::Base
8
7
 
9
8
  # Set this to the number of threads you want this input to spawn.
10
9
  # This is the same as declaring the input multiple times
11
10
  config :threads, :validate => :number, :default => 1
12
-
11
+
13
12
  def initialize(params)
14
13
  super
15
14
  @threadable = true
@@ -1,7 +1,5 @@
1
1
  # encoding: utf-8
2
- require "logstash/instrument/snapshot"
3
2
  require "logstash/instrument/metric_store"
4
- require "logstash/util/loggable"
5
3
  require "concurrent/timer_task"
6
4
  require "observer"
7
5
  require "singleton"
@@ -1,105 +1 @@
1
- # encoding: utf-8
2
- require "logstash/instrument/collector"
3
- require "concurrent"
4
-
5
- module LogStash module Instrument
6
- class MetricException < Exception; end
7
- class MetricNoKeyProvided < MetricException; end
8
- class MetricNoBlockProvided < MetricException; end
9
- class MetricNoNamespaceProvided < MetricException; end
10
-
11
- # This class provide the interface between the code, the collector and the format
12
- # of the recorded metric.
13
- class Metric
14
- attr_reader :collector
15
-
16
- def initialize(collector)
17
- @collector = collector
18
- end
19
-
20
- def increment(namespace, key, value = 1)
21
- self.class.validate_key!(key)
22
- collector.push(namespace, key, :counter, :increment, value)
23
- end
24
-
25
- def decrement(namespace, key, value = 1)
26
- self.class.validate_key!(key)
27
- collector.push(namespace, key, :counter, :decrement, value)
28
- end
29
-
30
- def gauge(namespace, key, value)
31
- self.class.validate_key!(key)
32
- collector.push(namespace, key, :gauge, :set, value)
33
- end
34
-
35
- def time(namespace, key)
36
- self.class.validate_key!(key)
37
-
38
- if block_given?
39
- timer = TimedExecution.new(self, namespace, key)
40
- content = yield
41
- timer.stop
42
- return content
43
- else
44
- TimedExecution.new(self, namespace, key)
45
- end
46
- end
47
-
48
- def report_time(namespace, key, duration)
49
- self.class.validate_key!(key)
50
- collector.push(namespace, key, :counter, :increment, duration)
51
- end
52
-
53
- # This method return a metric instance tied to a specific namespace
54
- # so instead of specifying the namespace on every call.
55
- #
56
- # Example:
57
- # metric.increment(:namespace, :mykey, 200)
58
- # metric.increment(:namespace, :mykey_2, 200)
59
- #
60
- # namespaced_metric = metric.namespace(:namespace)
61
- # namespaced_metric.increment(:mykey, 200)
62
- # namespaced_metric.increment(:mykey_2, 200)
63
- # ```
64
- #
65
- # @param name [Array<String>] Name of the namespace
66
- # @param name [String] Name of the namespace
67
- def namespace(name)
68
- raise MetricNoNamespaceProvided if name.nil? || name.empty?
69
-
70
- NamespacedMetric.new(self, name)
71
- end
72
-
73
- def self.validate_key!(key)
74
- raise MetricNoKeyProvided if key.nil? || key.empty?
75
- end
76
-
77
- private
78
- # Allow to calculate the execution of a block of code.
79
- # This class support 2 differents syntax a block or the return of
80
- # the object itself, but in the later case the metric won't be recorded
81
- # Until we call `#stop`.
82
- #
83
- # @see LogStash::Instrument::Metric#time
84
- class TimedExecution
85
- MILLISECONDS = 1_000.0.freeze
86
-
87
- def initialize(metric, namespace, key)
88
- @metric = metric
89
- @namespace = namespace
90
- @key = key
91
- start
92
- end
93
-
94
- def start
95
- @start_time = Time.now
96
- end
97
-
98
- def stop
99
- execution_time = (MILLISECONDS * (Time.now - @start_time)).to_i
100
- @metric.report_time(@namespace, @key, execution_time)
101
- execution_time
102
- end
103
- end
104
- end
105
- end; end
1
+ # This file is kept for backwards compatibility with plugins that include it.
@@ -1,58 +1 @@
1
- # encoding: utf-8
2
- require "logstash/instrument/metric"
3
-
4
- module LogStash module Instrument
5
- # This class acts a a proxy between the metric library and the user calls.
6
- #
7
- # This is the class that plugins authors will use to interact with the `MetricStore`
8
- # It has the same public interface as `Metric` class but doesnt require to send
9
- # the namespace on every call.
10
- #
11
- # @see Logstash::Instrument::Metric
12
- class NamespacedMetric
13
- attr_reader :namespace_name
14
- # Create metric with a specific namespace
15
- #
16
- # @param metric [LogStash::Instrument::Metric] The metric instance to proxy
17
- # @param namespace [Array] The namespace to use
18
- def initialize(metric, namespace_name)
19
- @metric = metric
20
- @namespace_name = Array(namespace_name)
21
- end
22
-
23
- def increment(key, value = 1)
24
- @metric.increment(namespace_name, key, value)
25
- end
26
-
27
- def decrement(key, value = 1)
28
- @metric.decrement(namespace_name, key, value)
29
- end
30
-
31
- def gauge(key, value)
32
- @metric.gauge(namespace_name, key, value)
33
- end
34
-
35
- def report_time(key, duration)
36
- @metric.report_time(namespace_name, key, duration)
37
- end
38
-
39
- def time(key, &block)
40
- @metric.time(namespace_name, key, &block)
41
- end
42
-
43
- def collector
44
- @metric.collector
45
- end
46
-
47
- def counter(key)
48
- collector.get(@namespace_name, key, :counter)
49
- end
50
-
51
- def namespace(name)
52
- NamespacedMetric.new(metric, namespace_name + Array(name))
53
- end
54
-
55
- private
56
- attr_reader :metric
57
- end
58
- end; end
1
+ # This file is kept for backwards compatibility with plugins that include it.
@@ -1,58 +1 @@
1
- # encoding: utf-8
2
- require "logstash/instrument/null_metric"
3
-
4
- module LogStash module Instrument
5
- # This class acts a a proxy between the metric library and the user calls.
6
- #
7
- # This is the class that plugins authors will use to interact with the `MetricStore`
8
- # It has the same public interface as `Metric` class but doesnt require to send
9
- # the namespace on every call.
10
- #
11
- # @see Logstash::Instrument::Metric
12
- class NamespacedNullMetric
13
- attr_reader :namespace_name
14
- # Create metric with a specific namespace
15
- #
16
- # @param metric [LogStash::Instrument::Metric] The metric instance to proxy
17
- # @param namespace [Array] The namespace to use
18
- def initialize(metric = nil, namespace_name = :null)
19
- @metric = metric
20
- @namespace_name = Array(namespace_name)
21
- end
22
-
23
- def increment(key, value = 1)
24
- end
25
-
26
- def decrement(key, value = 1)
27
- end
28
-
29
- def gauge(key, value)
30
- end
31
-
32
- def report_time(key, duration)
33
- end
34
-
35
- def time(key, &block)
36
- if block_given?
37
- yield
38
- else
39
- ::LogStash::Instrument::NullMetric::NullTimedExecution
40
- end
41
- end
42
-
43
- def collector
44
- @metric.collector
45
- end
46
-
47
- def counter(_)
48
- ::LogStash::Instrument::NullMetric::NullGauge
49
- end
50
-
51
- def namespace(name)
52
- NamespacedNullMetric.new(metric, namespace_name + Array(name))
53
- end
54
-
55
- private
56
- attr_reader :metric
57
- end
58
- end; end
1
+ # This file is kept for backwards compatibility with plugins that include it directly.
@@ -1,71 +1,2 @@
1
- # encoding: utf-8
2
- require "logstash/instrument/metric"
3
-
4
- module LogStash module Instrument
5
- # This class is used in the context when we disable the metric collection
6
- # for specific plugin to replace the `NamespacedMetric` class with this one
7
- # which doesn't produce any metric to the collector.
8
- class NullMetric
9
- attr_reader :namespace_name, :collector
10
-
11
- def initialize(collector = nil)
12
- @collector = collector
13
- end
14
-
15
- def increment(namespace, key, value = 1)
16
- Metric.validate_key!(key)
17
- end
18
-
19
- def decrement(namespace, key, value = 1)
20
- Metric.validate_key!(key)
21
- end
22
-
23
- def gauge(namespace, key, value)
24
- Metric.validate_key!(key)
25
- end
26
-
27
- def report_time(namespace, key, duration)
28
- Metric.validate_key!(key)
29
- end
30
-
31
- # We have to manually redefine this method since it can return an
32
- # object this object also has to be implemented as a NullObject
33
- def time(namespace, key)
34
- Metric.validate_key!(key)
35
- if block_given?
36
- yield
37
- else
38
- NullTimedExecution
39
- end
40
- end
41
-
42
- def counter(_)
43
- NullGauge
44
- end
45
-
46
- def namespace(name)
47
- raise MetricNoNamespaceProvided if name.nil? || name.empty?
48
- NamespacedNullMetric.new(self, name)
49
- end
50
-
51
- def self.validate_key!(key)
52
- raise MetricNoKeyProvided if key.nil? || key.empty?
53
- end
54
-
55
- private
56
-
57
- class NullGauge
58
- def self.increment(_)
59
- end
60
- end
61
-
62
- # Null implementation of the internal timer class
63
- #
64
- # @see LogStash::Instrument::TimedExecution`
65
- class NullTimedExecution
66
- def self.stop
67
- 0
68
- end
69
- end
70
- end
71
- end; end
1
+ # The contents of this file have been ported to Java. It is included for for compatibility
2
+ # with plugins that directly require it.
@@ -1,5 +1,4 @@
1
1
  # encoding: utf-8
2
- require "logstash/util/loggable"
3
2
  require "logstash/util"
4
3
  require "concurrent"
5
4
 
@@ -1,6 +1,5 @@
1
1
  # encoding: utf-8
2
2
  require "pathname"
3
- require "logstash/util/loggable"
4
3
 
5
4
  # Logic from elasticsearch/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java
6
5
  # Move to ruby to remove any existing dependency
@@ -1,174 +1,30 @@
1
1
  # encoding: utf-8
2
2
  require "thread"
3
3
  require "concurrent"
4
- require "logstash/namespace"
5
- require "logstash/errors"
6
4
  require "logstash/event"
7
5
  require "logstash/filters/base"
8
6
  require "logstash/inputs/base"
9
7
  require "logstash/outputs/base"
10
- require "logstash/shutdown_watcher"
11
- require "logstash/pipeline_reporter"
12
- require "logstash/instrument/metric"
13
- require "logstash/instrument/namespaced_metric"
14
- require "logstash/instrument/null_metric"
15
- require "logstash/instrument/namespaced_null_metric"
16
8
  require "logstash/instrument/collector"
17
- require "logstash/util/dead_letter_queue_manager"
18
- require "logstash/queue_factory"
19
9
  require "logstash/compiler"
20
- require "securerandom"
21
-
22
- java_import org.logstash.common.DeadLetterQueueFactory
23
- java_import org.logstash.common.SourceWithMetadata
24
- java_import org.logstash.common.io.DeadLetterQueueWriter
25
- java_import org.logstash.config.ir.CompiledPipeline
26
- java_import org.logstash.config.ir.ConfigCompiler
27
-
28
- module LogStash; class JavaBasePipeline
29
- include LogStash::Util::Loggable
30
-
31
- attr_reader :settings, :config_str, :config_hash, :inputs, :filters, :outputs, :pipeline_id, :lir, :ephemeral_id
32
- attr_reader :pipeline_config
33
-
34
- def initialize(pipeline_config, namespaced_metric = nil, agent = nil)
35
- @logger = self.logger
36
- @ephemeral_id = SecureRandom.uuid
37
-
38
- @pipeline_config = pipeline_config
39
- @config_str = pipeline_config.config_string
40
- @settings = pipeline_config.settings
41
- @config_hash = Digest::SHA1.hexdigest(@config_str)
42
-
43
- @lir = ConfigCompiler.configToPipelineIR(
44
- @config_str, @settings.get_value("config.support_escapes")
45
- )
46
-
47
- @pipeline_id = @settings.get_value("pipeline.id") || self.object_id
48
- @agent = agent
49
- @dlq_writer = dlq_writer
50
- @plugin_factory = LogStash::Plugins::PluginFactory.new(
51
- # use NullMetric if called in the BasePipeline context otherwise use the @metric value
52
- @lir, LogStash::Plugins::PluginMetricFactory.new(pipeline_id, @metric || Instrument::NullMetric.new),
53
- LogStash::Plugins::ExecutionContextFactory.new(@agent, self, @dlq_writer),
54
- JavaFilterDelegator
55
- )
56
- @lir_execution = CompiledPipeline.new(@lir, @plugin_factory)
57
- if settings.get_value("config.debug") && @logger.debug?
58
- @logger.debug("Compiled pipeline code", default_logging_keys(:code => @lir.get_graph.to_string))
59
- end
60
- @inputs = @lir_execution.inputs
61
- @filters = @lir_execution.filters
62
- @outputs = @lir_execution.outputs
63
- end
64
-
65
- def dlq_writer
66
- if settings.get_value("dead_letter_queue.enable")
67
- @dlq_writer = DeadLetterQueueFactory.getWriter(pipeline_id, settings.get_value("path.dead_letter_queue"), settings.get_value("dead_letter_queue.max_bytes"))
68
- else
69
- @dlq_writer = LogStash::Util::DummyDeadLetterQueueWriter.new
70
- end
71
- end
72
-
73
- def close_dlq_writer
74
- @dlq_writer.close
75
- if settings.get_value("dead_letter_queue.enable")
76
- DeadLetterQueueFactory.release(pipeline_id)
77
- end
78
- end
79
-
80
- def buildOutput(name, line, column, *args)
81
- plugin("output", name, line, column, *args)
82
- end
83
-
84
- def buildFilter(name, line, column, *args)
85
- plugin("filter", name, line, column, *args)
86
- end
87
-
88
- def buildInput(name, line, column, *args)
89
- plugin("input", name, line, column, *args)
90
- end
91
-
92
- def buildCodec(name, *args)
93
- plugin("codec", name, 0, 0, *args)
94
- end
95
-
96
- def plugin(plugin_type, name, line, column, *args)
97
- @plugin_factory.plugin(plugin_type, name, line, column, *args)
98
- end
99
-
100
- def reloadable?
101
- configured_as_reloadable? && reloadable_plugins?
102
- end
103
-
104
- def configured_as_reloadable?
105
- settings.get("pipeline.reloadable")
106
- end
107
-
108
- def reloadable_plugins?
109
- non_reloadable_plugins.empty?
110
- end
111
-
112
- def non_reloadable_plugins
113
- (inputs + filters + outputs).select { |plugin| !plugin.reloadable? }
114
- end
115
-
116
- private
117
-
118
- def default_logging_keys(other_keys = {})
119
- { :pipeline_id => pipeline_id }.merge(other_keys)
120
- end
121
- end; end
122
10
 
123
11
  module LogStash; class JavaPipeline < JavaBasePipeline
12
+ include LogStash::Util::Loggable
124
13
  attr_reader \
125
14
  :worker_threads,
126
15
  :events_consumed,
127
16
  :events_filtered,
128
- :reporter,
129
17
  :started_at,
130
- :thread,
131
- :settings,
132
- :metric,
133
- :filter_queue_client,
134
- :input_queue_client,
135
- :queue
18
+ :thread
136
19
 
137
20
  MAX_INFLIGHT_WARN_THRESHOLD = 10_000
138
21
 
139
22
  def initialize(pipeline_config, namespaced_metric = nil, agent = nil)
140
- @settings = pipeline_config.settings
141
- # This needs to be configured before we call super which will evaluate the code to make
142
- # sure the metric instance is correctly send to the plugins to make the namespace scoping work
143
- @metric = if namespaced_metric
144
- settings.get("metric.collect") ? namespaced_metric : Instrument::NullMetric.new(namespaced_metric.collector)
145
- else
146
- Instrument::NullMetric.new
147
- end
148
-
149
- @ephemeral_id = SecureRandom.uuid
150
- @settings = settings
151
- @reporter = PipelineReporter.new(@logger, self)
23
+ @logger = self.logger
24
+ super pipeline_config, namespaced_metric, @logger, agent
152
25
  @worker_threads = []
153
26
 
154
- super
155
-
156
- begin
157
- @queue = LogStash::QueueFactory.create(settings)
158
- rescue => e
159
- @logger.error("Logstash failed to create queue", default_logging_keys("exception" => e.message, "backtrace" => e.backtrace))
160
- raise e
161
- end
162
-
163
- @input_queue_client = @queue.write_client
164
- @filter_queue_client = @queue.read_client
165
- # Note that @inflight_batches as a central mechanism for tracking inflight
166
- # batches will fail if we have multiple read clients here.
167
- @filter_queue_client.set_events_metric(metric.namespace([:stats, :events]))
168
- @filter_queue_client.set_pipeline_metric(
169
- metric.namespace([:stats, :pipelines, pipeline_id.to_s.to_sym, :events])
170
- )
171
- @drain_queue = @settings.get_value("queue.drain") || settings.get("queue.type") == "memory"
27
+ @drain_queue = settings.get_value("queue.drain") || settings.get("queue.type") == "memory"
172
28
 
173
29
  @events_filtered = java.util.concurrent.atomic.LongAdder.new
174
30
  @events_consumed = java.util.concurrent.atomic.LongAdder.new
@@ -189,14 +45,14 @@ module LogStash; class JavaPipeline < JavaBasePipeline
189
45
  end
190
46
 
191
47
  def safe_pipeline_worker_count
192
- default = @settings.get_default("pipeline.workers")
193
- pipeline_workers = @settings.get("pipeline.workers") #override from args "-w 8" or config
194
- safe_filters, unsafe_filters = @filters.partition(&:threadsafe?)
48
+ default = settings.get_default("pipeline.workers")
49
+ pipeline_workers = settings.get("pipeline.workers") #override from args "-w 8" or config
50
+ safe_filters, unsafe_filters = filters.partition(&:threadsafe?)
195
51
  plugins = unsafe_filters.collect { |f| f.config_name }
196
52
 
197
53
  return pipeline_workers if unsafe_filters.empty?
198
54
 
199
- if @settings.set?("pipeline.workers")
55
+ if settings.set?("pipeline.workers")
200
56
  if pipeline_workers > 1
201
57
  @logger.warn("Warning: Manual override - there are filters that might not work with multiple worker threads", default_logging_keys(:worker_threads => pipeline_workers, :filters => plugins))
202
58
  end
@@ -213,7 +69,7 @@ module LogStash; class JavaPipeline < JavaBasePipeline
213
69
  end
214
70
 
215
71
  def filters?
216
- @filters.any?
72
+ filters.any?
217
73
  end
218
74
 
219
75
  def start
@@ -271,7 +127,7 @@ module LogStash; class JavaPipeline < JavaBasePipeline
271
127
 
272
128
  start_workers
273
129
 
274
- @logger.info("Pipeline started", "pipeline.id" => @pipeline_id)
130
+ @logger.info("Pipeline started", "pipeline.id" => pipeline_id)
275
131
 
276
132
  # Block until all inputs have stopped
277
133
  # Generally this happens if SIGINT is sent and `shutdown` is called from an external thread
@@ -294,12 +150,6 @@ module LogStash; class JavaPipeline < JavaBasePipeline
294
150
  return 0
295
151
  end # def run
296
152
 
297
- def close
298
- @filter_queue_client.close
299
- @queue.close
300
- close_dlq_writer
301
- end
302
-
303
153
  def transition_to_running
304
154
  @running.make_true
305
155
  end
@@ -316,10 +166,6 @@ module LogStash; class JavaPipeline < JavaBasePipeline
316
166
  @running.false?
317
167
  end
318
168
 
319
- def system?
320
- settings.get_value("pipeline.system")
321
- end
322
-
323
169
  # register_plugins calls #register_plugin on the plugins list and upon exception will call Plugin#do_close on all registered plugins
324
170
  # @param plugins [Array[Plugin]] the list of plugins to register
325
171
  def register_plugins(plugins)
@@ -340,8 +186,8 @@ module LogStash; class JavaPipeline < JavaBasePipeline
340
186
  maybe_setup_out_plugins
341
187
 
342
188
  pipeline_workers = safe_pipeline_worker_count
343
- batch_size = @settings.get("pipeline.batch.size")
344
- batch_delay = @settings.get("pipeline.batch.delay")
189
+ batch_size = settings.get("pipeline.batch.size")
190
+ batch_delay = settings.get("pipeline.batch.delay")
345
191
 
346
192
  max_inflight = batch_size * pipeline_workers
347
193
 
@@ -349,10 +195,10 @@ module LogStash; class JavaPipeline < JavaBasePipeline
349
195
  config_metric.gauge(:workers, pipeline_workers)
350
196
  config_metric.gauge(:batch_size, batch_size)
351
197
  config_metric.gauge(:batch_delay, batch_delay)
352
- config_metric.gauge(:config_reload_automatic, @settings.get("config.reload.automatic"))
353
- config_metric.gauge(:config_reload_interval, @settings.get("config.reload.interval"))
198
+ config_metric.gauge(:config_reload_automatic, settings.get("config.reload.automatic"))
199
+ config_metric.gauge(:config_reload_interval, settings.get("config.reload.interval"))
354
200
  config_metric.gauge(:dead_letter_queue_enabled, dlq_enabled?)
355
- config_metric.gauge(:dead_letter_queue_path, @dlq_writer.get_path.to_absolute_path.to_s) if dlq_enabled?
201
+ config_metric.gauge(:dead_letter_queue_path, dlq_writer.get_path.to_absolute_path.to_s) if dlq_enabled?
356
202
 
357
203
 
358
204
  @logger.info("Starting pipeline", default_logging_keys(
@@ -364,12 +210,12 @@ module LogStash; class JavaPipeline < JavaBasePipeline
364
210
  @logger.warn("CAUTION: Recommended inflight events max exceeded! Logstash will run with up to #{max_inflight} events in memory in your current configuration. If your message sizes are large this may cause instability with the default heap size. Please consider setting a non-standard heap size, changing the batch size (currently #{batch_size}), or changing the number of pipeline workers (currently #{pipeline_workers})", default_logging_keys)
365
211
  end
366
212
 
367
- @filter_queue_client.set_batch_dimensions(batch_size, batch_delay)
213
+ filter_queue_client.set_batch_dimensions(batch_size, batch_delay)
368
214
 
369
215
  pipeline_workers.times do |t|
370
216
  thread = Thread.new do
371
217
  org.logstash.execution.WorkerLoop.new(
372
- @lir_execution, @filter_queue_client, @events_filtered, @events_consumed,
218
+ lir_execution, filter_queue_client, @events_filtered, @events_consumed,
373
219
  @flushRequested, @flushing, @shutdownRequested, @drain_queue).run
374
220
  end
375
221
  thread.name="[#{pipeline_id}]>worker#{t}"
@@ -392,30 +238,26 @@ module LogStash; class JavaPipeline < JavaBasePipeline
392
238
  end
393
239
  end
394
240
 
395
- def dlq_enabled?
396
- @settings.get("dead_letter_queue.enable")
397
- end
398
-
399
241
  def wait_inputs
400
242
  @input_threads.each(&:join)
401
243
  end
402
244
 
403
245
  def start_inputs
404
246
  moreinputs = []
405
- @inputs.each do |input|
247
+ inputs.each do |input|
406
248
  if input.threadable && input.threads > 1
407
249
  (input.threads - 1).times do |i|
408
250
  moreinputs << input.clone
409
251
  end
410
252
  end
411
253
  end
412
- @inputs += moreinputs
254
+ moreinputs.each {|i| inputs << i}
413
255
 
414
256
  # first make sure we can register all input plugins
415
- register_plugins(@inputs)
257
+ register_plugins(inputs)
416
258
 
417
259
  # then after all input plugins are successfully registered, start them
418
- @inputs.each { |input| start_input(input) }
260
+ inputs.each { |input| start_input(input) }
419
261
  end
420
262
 
421
263
  def start_input(plugin)
@@ -425,7 +267,7 @@ module LogStash; class JavaPipeline < JavaBasePipeline
425
267
  def inputworker(plugin)
426
268
  Util::set_thread_name("[#{pipeline_id}]<#{plugin.class.config_name}")
427
269
  begin
428
- plugin.run(LogStash::WrappedWriteClient.new(@input_queue_client, @pipeline_id.to_s.to_sym, metric, plugin.id.to_sym))
270
+ plugin.run(wrapped_write_client(plugin.id.to_sym))
429
271
  rescue => e
430
272
  if plugin.stop?
431
273
  @logger.debug("Input plugin raised exception during shutdown, ignoring it.",
@@ -469,7 +311,7 @@ module LogStash; class JavaPipeline < JavaBasePipeline
469
311
  # stopped
470
312
  wait_for_workers
471
313
  clear_pipeline_metrics
472
- @logger.info("Pipeline terminated", "pipeline.id" => @pipeline_id)
314
+ @logger.info("Pipeline terminated", "pipeline.id" => pipeline_id)
473
315
  end # def shutdown
474
316
 
475
317
  def wait_for_workers
@@ -480,7 +322,7 @@ module LogStash; class JavaPipeline < JavaBasePipeline
480
322
 
481
323
  def stop_inputs
482
324
  @logger.debug("Closing inputs", default_logging_keys)
483
- @inputs.each(&:do_stop)
325
+ inputs.each(&:do_stop)
484
326
  @logger.debug("Closed inputs", default_logging_keys)
485
327
  end
486
328
 
@@ -495,8 +337,8 @@ module LogStash; class JavaPipeline < JavaBasePipeline
495
337
  t.join
496
338
  end
497
339
 
498
- @filters.each(&:do_close)
499
- @outputs.each(&:do_close)
340
+ filters.each(&:do_close)
341
+ outputs.each(&:do_close)
500
342
  end
501
343
 
502
344
  # for backward compatibility in devutils for the rspec helpers, this method is not used
@@ -542,41 +384,10 @@ module LogStash; class JavaPipeline < JavaBasePipeline
542
384
  .each {|t| t.delete("status") }
543
385
  end
544
386
 
545
- def collect_dlq_stats
546
- if dlq_enabled?
547
- dlq_metric = @metric.namespace([:stats, :pipelines, pipeline_id.to_s.to_sym, :dlq])
548
- dlq_metric.gauge(:queue_size_in_bytes, @dlq_writer.get_current_queue_size)
549
- end
550
- end
551
-
552
- def collect_stats
553
- pipeline_metric = @metric.namespace([:stats, :pipelines, pipeline_id.to_s.to_sym, :queue])
554
- pipeline_metric.gauge(:type, settings.get("queue.type"))
555
- if @queue.is_a?(LogStash::WrappedAckedQueue) && @queue.queue.is_a?(LogStash::AckedQueue)
556
- queue = @queue.queue
557
- dir_path = queue.dir_path
558
- file_store = Files.get_file_store(Paths.get(dir_path))
559
-
560
- pipeline_metric.namespace([:capacity]).tap do |n|
561
- n.gauge(:page_capacity_in_bytes, queue.page_capacity)
562
- n.gauge(:max_queue_size_in_bytes, queue.max_size_in_bytes)
563
- n.gauge(:max_unread_events, queue.max_unread_events)
564
- n.gauge(:queue_size_in_bytes, queue.persisted_size_in_bytes)
565
- end
566
- pipeline_metric.namespace([:data]).tap do |n|
567
- n.gauge(:free_space_in_bytes, file_store.get_unallocated_space)
568
- n.gauge(:storage_type, file_store.type)
569
- n.gauge(:path, dir_path)
570
- end
571
-
572
- pipeline_metric.gauge(:events, queue.unread_count)
573
- end
574
- end
575
-
576
387
  def clear_pipeline_metrics
577
388
  # TODO(ph): I think the metric should also proxy that call correctly to the collector
578
389
  # this will simplify everything since the null metric would simply just do a noop
579
- collector = @metric.collector
390
+ collector = metric.collector
580
391
 
581
392
  unless collector.nil?
582
393
  # selectively reset metrics we don't wish to keep after reloading
@@ -592,8 +403,8 @@ module LogStash; class JavaPipeline < JavaBasePipeline
592
403
  # We want to hide most of what's in here
593
404
  def inspect
594
405
  {
595
- :pipeline_id => @pipeline_id,
596
- :settings => @settings.inspect,
406
+ :pipeline_id => pipeline_id,
407
+ :settings => settings.inspect,
597
408
  :ready => @ready,
598
409
  :running => @running,
599
410
  :flushing => @flushing
@@ -604,13 +415,13 @@ module LogStash; class JavaPipeline < JavaBasePipeline
604
415
 
605
416
  def maybe_setup_out_plugins
606
417
  if @outputs_registered.make_true
607
- register_plugins(@outputs)
608
- register_plugins(@filters)
418
+ register_plugins(outputs)
419
+ register_plugins(filters)
609
420
  end
610
421
  end
611
422
 
612
423
  def default_logging_keys(other_keys = {})
613
- keys = super
424
+ keys = {:pipeline_id => pipeline_id}.merge other_keys
614
425
  keys[:thread] ||= thread.inspect if thread
615
426
  keys
616
427
  end