logstash-core 6.0.0.alpha1-java → 6.0.0.alpha2-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash-core/logstash-core.jar +0 -0
  3. data/lib/logstash-core/version.rb +1 -1
  4. data/lib/logstash/agent.rb +81 -45
  5. data/lib/logstash/api/commands/hot_threads_reporter.rb +3 -3
  6. data/lib/logstash/api/commands/node.rb +13 -6
  7. data/lib/logstash/api/commands/stats.rb +18 -6
  8. data/lib/logstash/api/modules/node.rb +7 -0
  9. data/lib/logstash/api/modules/node_stats.rb +12 -5
  10. data/lib/logstash/bootstrap_check/default_config.rb +3 -7
  11. data/lib/logstash/compiler.rb +33 -15
  12. data/lib/logstash/compiler/lscl.rb +16 -8
  13. data/lib/logstash/config/mixin.rb +5 -42
  14. data/lib/logstash/config/pipeline_config.rb +1 -1
  15. data/lib/logstash/config/source/local.rb +28 -13
  16. data/lib/logstash/config/source/multi_local.rb +72 -0
  17. data/lib/logstash/config/source_loader.rb +1 -2
  18. data/lib/logstash/environment.rb +12 -3
  19. data/lib/logstash/execution_context.rb +7 -3
  20. data/lib/logstash/inputs/base.rb +2 -0
  21. data/lib/logstash/instrument/metric_type.rb +0 -2
  22. data/lib/logstash/instrument/periodic_poller/jvm.rb +5 -5
  23. data/lib/logstash/instrument/periodic_poller/pq.rb +1 -1
  24. data/lib/logstash/outputs/base.rb +2 -0
  25. data/lib/logstash/pipeline.rb +31 -14
  26. data/lib/logstash/pipeline_action/create.rb +1 -2
  27. data/lib/logstash/pipeline_action/reload.rb +2 -1
  28. data/lib/logstash/pipeline_settings.rb +50 -0
  29. data/lib/logstash/plugin.rb +1 -0
  30. data/lib/logstash/runner.rb +7 -5
  31. data/lib/logstash/settings.rb +11 -3
  32. data/lib/logstash/shutdown_watcher.rb +26 -0
  33. data/lib/logstash/state_resolver.rb +1 -3
  34. data/lib/logstash/util/dead_letter_queue_manager.rb +61 -0
  35. data/lib/logstash/util/environment_variables.rb +43 -0
  36. data/lib/logstash/util/thread_dump.rb +3 -1
  37. data/lib/logstash/version.rb +1 -1
  38. data/locales/en.yml +4 -0
  39. data/logstash-core.gemspec +4 -1
  40. data/spec/logstash/agent/converge_spec.rb +36 -35
  41. data/spec/logstash/agent_spec.rb +48 -177
  42. data/spec/{api/lib/commands/stats.rb → logstash/api/commands/stats_spec.rb} +7 -2
  43. data/spec/{api/lib → logstash/api}/errors_spec.rb +1 -1
  44. data/spec/{api/lib/api → logstash/api/modules}/logging_spec.rb +1 -10
  45. data/spec/{api/lib/api → logstash/api/modules}/node_plugins_spec.rb +1 -2
  46. data/spec/{api/lib/api → logstash/api/modules}/node_spec.rb +9 -8
  47. data/spec/{api/lib/api → logstash/api/modules}/node_stats_spec.rb +11 -9
  48. data/spec/{api/lib/api → logstash/api/modules}/plugins_spec.rb +4 -3
  49. data/spec/{api/lib/api → logstash/api/modules}/root_spec.rb +2 -2
  50. data/spec/{api/lib → logstash/api}/rack_app_spec.rb +0 -0
  51. data/spec/logstash/compiler/compiler_spec.rb +72 -9
  52. data/spec/logstash/config/source/local_spec.rb +20 -4
  53. data/spec/logstash/config/source/multi_local_spec.rb +113 -0
  54. data/spec/logstash/execution_context_spec.rb +14 -4
  55. data/spec/logstash/inputs/base_spec.rb +1 -1
  56. data/spec/logstash/instrument/wrapped_write_client_spec.rb +34 -19
  57. data/spec/logstash/output_delegator_spec.rb +1 -1
  58. data/spec/logstash/outputs/base_spec.rb +1 -1
  59. data/spec/logstash/pipeline_action/reload_spec.rb +1 -1
  60. data/spec/logstash/pipeline_action/stop_spec.rb +1 -1
  61. data/spec/logstash/pipeline_dlq_commit_spec.rb +107 -0
  62. data/spec/logstash/pipeline_pq_file_spec.rb +3 -1
  63. data/spec/logstash/pipeline_reporter_spec.rb +2 -1
  64. data/spec/logstash/pipeline_spec.rb +54 -43
  65. data/spec/logstash/runner_spec.rb +27 -36
  66. data/spec/logstash/settings/array_coercible_spec.rb +65 -0
  67. data/spec/logstash/settings_spec.rb +91 -0
  68. data/spec/logstash/shutdown_watcher_spec.rb +10 -16
  69. data/spec/logstash/state_resolver_spec.rb +6 -4
  70. data/spec/support/helpers.rb +16 -3
  71. data/spec/support/shared_contexts.rb +26 -2
  72. metadata +42 -39
  73. data/lib/logstash/instrument/metric_type/mean.rb +0 -33
  74. data/spec/api/lib/api/support/resource_dsl_methods.rb +0 -87
  75. data/spec/api/spec_helper.rb +0 -106
@@ -1,38 +1,56 @@
1
1
  require 'logstash/util/loggable'
2
2
  require 'logstash/compiler/lscl/lscl_grammar'
3
3
 
4
- java_import org.logstash.config.ir.Pipeline
4
+ java_import org.logstash.config.ir.PipelineIR
5
5
  java_import org.logstash.config.ir.graph.Graph;
6
6
  java_import org.logstash.config.ir.graph.PluginVertex;
7
7
 
8
8
  module LogStash; class Compiler
9
9
  include ::LogStash::Util::Loggable
10
10
 
11
- def self.compile_pipeline(config_str, source_file=nil)
12
- graph_sections = self.compile_graph(config_str, source_file)
13
- pipeline = org.logstash.config.ir.Pipeline.new(
14
- graph_sections[:input],
15
- graph_sections[:filter],
16
- graph_sections[:output]
17
- )
11
+ def self.compile_sources(*sources_with_metadata)
12
+ graph_sections = sources_with_metadata.map do |swm|
13
+ self.compile_graph(swm)
14
+ end
15
+
16
+ input_graph = org.logstash.config.ir.graph.Graph.combine(*graph_sections.map {|s| s[:input] }).graph
17
+ output_graph = org.logstash.config.ir.graph.Graph.combine(*graph_sections.map {|s| s[:output] }).graph
18
+
19
+ filter_graph = graph_sections.reduce(nil) do |acc, s|
20
+ filter_section = s[:filter]
21
+
22
+ if acc.nil?
23
+ filter_section
24
+ else
25
+ acc.chain(filter_section)
26
+ end
27
+ end
28
+
29
+ original_source = sources_with_metadata.map(&:text).join("\n")
30
+
31
+ org.logstash.config.ir.PipelineIR.new(input_graph, filter_graph, output_graph, original_source)
18
32
  end
19
33
 
20
- def self.compile_ast(config_str, source_file=nil)
34
+ def self.compile_ast(source_with_metadata)
35
+ if !source_with_metadata.is_a?(org.logstash.common.SourceWithMetadata)
36
+ raise ArgumentError, "Expected 'org.logstash.common.SourceWithMetadata', got #{source_with_metadata.class}"
37
+ end
38
+
21
39
  grammar = LogStashCompilerLSCLGrammarParser.new
22
- config = grammar.parse(config_str)
40
+ config = grammar.parse(source_with_metadata.text)
23
41
 
24
42
  if config.nil?
25
43
  raise ConfigurationError, grammar.failure_reason
26
44
  end
27
45
 
28
- config
46
+ config.compile(source_with_metadata)
29
47
  end
30
48
 
31
- def self.compile_imperative(config_str, source_file=nil)
32
- compile_ast(config_str, source_file).compile(source_file)
49
+ def self.compile_imperative(source_with_metadata)
50
+ compile_ast(source_with_metadata)
33
51
  end
34
52
 
35
- def self.compile_graph(config_str, source_file=nil)
36
- Hash[compile_imperative(config_str, source_file).map {|section,icompiled| [section, icompiled.toGraph]}]
53
+ def self.compile_graph(source_with_metadata)
54
+ Hash[compile_imperative(source_with_metadata).map {|section,icompiled| [section, icompiled.toGraph]}]
37
55
  end
38
56
  end; end
@@ -10,15 +10,23 @@ module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSC
10
10
  module Helpers
11
11
  def source_meta
12
12
  line, column = line_and_column
13
- org.logstash.common.SourceWithMetadata.new(source_file, line, column, self.text_value)
13
+ org.logstash.common.SourceWithMetadata.new(base_protocol, base_id, line, column, self.text_value)
14
14
  end
15
15
 
16
- def source_file=(value)
17
- set_meta(:source_file, value)
16
+ def base_source_with_metadata=(value)
17
+ set_meta(:base_source_with_metadata, value)
18
18
  end
19
19
 
20
- def source_file
21
- get_meta(:source_file)
20
+ def base_source_with_metadata
21
+ get_meta(:base_source_with_metadata)
22
+ end
23
+
24
+ def base_protocol
25
+ self.base_source_with_metadata.protocol
26
+ end
27
+
28
+ def base_id
29
+ self.base_source_with_metadata.id
22
30
  end
23
31
 
24
32
  def compose(*statements)
@@ -39,7 +47,7 @@ module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSC
39
47
  end
40
48
 
41
49
  def empty_source_meta()
42
- org.logstash.common.SourceWithMetadata.new()
50
+ org.logstash.common.SourceWithMetadata.new(base_protocol, base_id, nil)
43
51
  end
44
52
 
45
53
  def jdsl
@@ -70,9 +78,9 @@ module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSC
70
78
  class Config < Node
71
79
  include Helpers
72
80
 
73
- def compile(source_file=nil)
81
+ def compile(base_source_with_metadata=nil)
74
82
  # There is no way to move vars across nodes in treetop :(
75
- self.source_file = source_file
83
+ self.base_source_with_metadata = base_source_with_metadata
76
84
 
77
85
  sections = recursive_select(PluginSection)
78
86
 
@@ -33,37 +33,21 @@ LogStash::Environment.load_locale!
33
33
  # }
34
34
  #
35
35
  module LogStash::Config::Mixin
36
+
37
+ include LogStash::Util::EnvironmentVariables
38
+
36
39
  attr_accessor :config
37
40
  attr_accessor :original_params
38
41
 
39
42
  PLUGIN_VERSION_1_0_0 = LogStash::Util::PluginVersion.new(1, 0, 0)
40
43
  PLUGIN_VERSION_0_9_0 = LogStash::Util::PluginVersion.new(0, 9, 0)
41
-
42
- ENV_PLACEHOLDER_REGEX = /\$\{(?<name>\w+)(\:(?<default>[^}]*))?\}/
43
-
44
+
44
45
  # This method is called when someone does 'include LogStash::Config'
45
46
  def self.included(base)
46
47
  # Add the DSL methods to the 'base' given.
47
48
  base.extend(LogStash::Config::Mixin::DSL)
48
49
  end
49
-
50
- # Recursive method to replace environment variable references in parameters
51
- def deep_replace(value)
52
- if (value.is_a?(Hash))
53
- value.each do |valueHashKey, valueHashValue|
54
- value[valueHashKey.to_s] = deep_replace(valueHashValue)
55
- end
56
- else
57
- if (value.is_a?(Array))
58
- value.each_index do | valueArrayIndex|
59
- value[valueArrayIndex] = deep_replace(value[valueArrayIndex])
60
- end
61
- else
62
- return replace_env_placeholders(value)
63
- end
64
- end
65
- end
66
-
50
+
67
51
  def config_init(params)
68
52
  # Validation will modify the values inside params if necessary.
69
53
  # For example: converting a string to a number, etc.
@@ -158,27 +142,6 @@ module LogStash::Config::Mixin
158
142
  @config = params
159
143
  end # def config_init
160
144
 
161
- # Replace all environment variable references in 'value' param by environment variable value and return updated value
162
- # Process following patterns : $VAR, ${VAR}, ${VAR:defaultValue}
163
- def replace_env_placeholders(value)
164
- return value unless value.is_a?(String)
165
-
166
- value.gsub(ENV_PLACEHOLDER_REGEX) do |placeholder|
167
- # Note: Ruby docs claim[1] Regexp.last_match is thread-local and scoped to
168
- # the call, so this should be thread-safe.
169
- #
170
- # [1] http://ruby-doc.org/core-2.1.1/Regexp.html#method-c-last_match
171
- name = Regexp.last_match(:name)
172
- default = Regexp.last_match(:default)
173
-
174
- replacement = ENV.fetch(name, default)
175
- if replacement.nil?
176
- raise LogStash::ConfigurationError, "Cannot evaluate `#{placeholder}`. Environment variable `#{name}` is not set and there is no default value given."
177
- end
178
- replacement
179
- end
180
- end # def replace_env_placeholders
181
-
182
145
  module DSL
183
146
  attr_accessor :flags
184
147
 
@@ -30,7 +30,7 @@ module LogStash module Config
30
30
  end
31
31
 
32
32
  def ==(other)
33
- config_hash == other.config_hash && pipeline_id == other.pipeline_id
33
+ config_hash == other.config_hash && pipeline_id == other.pipeline_id && settings == other.settings
34
34
  end
35
35
 
36
36
  def display_debug_information
@@ -37,7 +37,11 @@ module LogStash module Config module Source
37
37
  config_parts = []
38
38
  encoding_issue_files = []
39
39
 
40
- get_files.each do |file|
40
+ if logger.debug?
41
+ logger.debug("Skipping the following files while reading config since they don't match the specified glob pattern", :files => get_unmatched_files)
42
+ end
43
+
44
+ get_matched_files.each do |file|
41
45
  next unless ::File.file?(file) # skip directory
42
46
 
43
47
  logger.debug("Reading config file", :config_file => file)
@@ -78,7 +82,7 @@ module LogStash module Config module Source
78
82
  ::File.expand_path(path)
79
83
  end
80
84
 
81
- def get_files
85
+ def get_matched_files
82
86
  Dir.glob(path).sort
83
87
  end
84
88
 
@@ -90,6 +94,13 @@ module LogStash module Config module Source
90
94
  end
91
95
  end
92
96
 
97
+ def get_unmatched_files
98
+ # transform "/var/lib/*.conf" => /var/lib/*
99
+ t = ::File.split(@path)
100
+ all_files = Dir.glob(::File.join(t.first, "*")).sort
101
+ all_files - get_matched_files
102
+ end
103
+
93
104
  def valid_encoding?(content)
94
105
  content.ascii_only? && content.valid_encoding?
95
106
  end
@@ -132,24 +143,28 @@ module LogStash module Config module Source
132
143
  OUTPUT_BLOCK_RE = /output *{/
133
144
 
134
145
  def pipeline_configs
135
- config_parts = []
136
146
 
137
- config_parts.concat(ConfigStringLoader.read(config_string)) if config_string?
138
- if local_config?
139
- local_config_parts = ConfigPathLoader.read(config_path)
140
- config_parts.concat(local_config_parts)
141
- else
142
- local_config_parts = []
147
+ if config_path? && config_string?
148
+ raise ConfigurationError.new("Settings 'config.string' and 'path.config' can't be used simultaneously.")
149
+ elsif !config_path? && !config_string?
150
+ raise ConfigurationError.new("Either 'config.string' or 'path.config' must be set.")
143
151
  end
144
152
 
145
- config_parts.concat(ConfigRemoteLoader.read(config_path)) if remote_config?
153
+ config_parts = if config_string?
154
+ ConfigStringLoader.read(config_string)
155
+ elsif local_config?
156
+ ConfigPathLoader.read(config_path)
157
+ elsif remote_config?
158
+ ConfigRemoteLoader.read(config_path)
159
+ else
160
+ []
161
+ end
146
162
 
147
163
  return if config_parts.empty?
148
- return if config_string? && config_string.strip.empty? && local_config? && local_config_parts.empty?
149
164
 
150
- add_missing_default_inputs_or_outputs(config_parts)
165
+ add_missing_default_inputs_or_outputs(config_parts) if config_string?
151
166
 
152
- [PipelineConfig.new(self.class, PIPELINE_ID, config_parts, @settings)]
167
+ [PipelineConfig.new(self.class, @settings.get("pipeline.id").to_sym, config_parts, @settings)]
153
168
  end
154
169
 
155
170
  def match?
@@ -0,0 +1,72 @@
1
+ # encoding: utf-8
2
+ require "logstash/config/source/local"
3
+ require "logstash/util/loggable"
4
+ require "logstash/pipeline_settings"
5
+
6
+ module LogStash module Config module Source
7
+ class MultiLocal < Local
8
+ include LogStash::Util::Loggable
9
+
10
+ def initialize(settings)
11
+ @original_settings = settings
12
+ super(settings)
13
+ end
14
+
15
+ def pipeline_configs
16
+ pipelines = retrieve_yaml_pipelines()
17
+ pipelines_settings = pipelines.map do |pipeline_settings|
18
+ ::LogStash::PipelineSettings.from_settings(@original_settings.clone).merge(pipeline_settings)
19
+ end
20
+ detect_duplicate_pipelines(pipelines_settings)
21
+ pipelines_settings.map do |pipeline_settings|
22
+ @settings = pipeline_settings
23
+ # this relies on instance variable @settings and the parent class' pipeline_configs
24
+ # method. The alternative is to refactor most of the Local source methods to accept
25
+ # a settings object instead of relying on @settings.
26
+ super # create a PipelineConfig object based on @settings
27
+ end.flatten
28
+ end
29
+
30
+ def match?
31
+ uses_config_string = @original_settings.get_setting("config.string").set?
32
+ uses_path_config = @original_settings.get_setting("path.config").set?
33
+ return true if !uses_config_string && !uses_path_config
34
+ if uses_path_config
35
+ logger.warn("Ignoring the 'pipelines.yml' file because 'path.config' (-f) is being used.")
36
+ elsif uses_config_string
37
+ logger.warn("Ignoring the 'pipelines.yml' file because 'config.string' (-e) is being used.")
38
+ end
39
+ false
40
+ end
41
+
42
+ def retrieve_yaml_pipelines
43
+ result = read_pipelines_from_yaml(pipelines_yaml_location)
44
+ case result
45
+ when Array
46
+ result
47
+ when false
48
+ raise ConfigurationError.new("Pipelines YAML file is empty. Path: #{pipelines_yaml_location}")
49
+ else
50
+ raise ConfigurationError.new("Pipelines YAML file must contain an array of pipeline configs. Found \"#{result.class}\" in #{pipelines_yaml_location}")
51
+ end
52
+ end
53
+
54
+ def read_pipelines_from_yaml(yaml_location)
55
+ logger.debug("Reading pipeline configurations from YAML", :location => pipelines_yaml_location)
56
+ ::YAML.load(IO.read(yaml_location))
57
+ rescue => e
58
+ raise ConfigurationError.new("Failed to read pipelines yaml file. Location: #{yaml_location}, Exception: #{e.inspect}")
59
+ end
60
+
61
+ def pipelines_yaml_location
62
+ ::File.join(@original_settings.get("path.settings"), "pipelines.yml")
63
+ end
64
+
65
+ def detect_duplicate_pipelines(pipelines)
66
+ duplicate_ids = pipelines.group_by {|pipeline| pipeline.get("pipeline.id") }.select {|k, v| v.size > 1 }.map {|k, v| k}
67
+ if duplicate_ids.any?
68
+ raise ConfigurationError.new("Pipelines YAML file contains duplicate pipeline ids: #{duplicate_ids.inspect}. Location: #{pipelines_yaml_location}")
69
+ end
70
+ end
71
+ end
72
+ end end end
@@ -1,5 +1,6 @@
1
1
  # encoding: utf-8
2
2
  require "logstash/config/source/local"
3
+ require "logstash/config/source/multi_local"
3
4
  require "logstash/errors"
4
5
  require "thread"
5
6
  require "set"
@@ -120,6 +121,4 @@ module LogStash module Config
120
121
  .select { |group, pipeline_configs| pipeline_configs.size > 1 }
121
122
  end
122
123
  end
123
-
124
- SOURCE_LOADER = SourceLoader.new
125
124
  end end
@@ -50,6 +50,7 @@ module LogStash
50
50
  Setting::Numeric.new("queue.checkpoint.acks", 1024), # 0 is unlimited
51
51
  Setting::Numeric.new("queue.checkpoint.writes", 1024), # 0 is unlimited
52
52
  Setting::Numeric.new("queue.checkpoint.interval", 1000), # 0 is no time-based checkpointing
53
+ Setting::Boolean.new("dead_letter_queue.enable", false),
53
54
  Setting::TimeValue.new("slowlog.threshold.warn", "-1"),
54
55
  Setting::TimeValue.new("slowlog.threshold.info", "-1"),
55
56
  Setting::TimeValue.new("slowlog.threshold.debug", "-1"),
@@ -59,13 +60,21 @@ module LogStash
59
60
  # Compute the default queue path based on `path.data`
60
61
  default_queue_file_path = ::File.join(SETTINGS.get("path.data"), "queue")
61
62
  SETTINGS.register Setting::WritableDirectory.new("path.queue", default_queue_file_path)
62
-
63
+ # Compute the default dead_letter_queue path based on `path.data`
64
+ default_dlq_file_path = ::File.join(SETTINGS.get("path.data"), "dead_letter_queue")
65
+ SETTINGS.register Setting::WritableDirectory.new("path.dead_letter_queue", default_dlq_file_path)
66
+
63
67
  SETTINGS.on_post_process do |settings|
64
68
  # If the data path is overridden but the queue path isn't recompute the queue path
65
69
  # We need to do this at this stage because of the weird execution order
66
70
  # our monkey-patched Clamp follows
67
- if settings.set?("path.data") && !settings.set?("path.queue")
68
- settings.set_value("path.queue", ::File.join(settings.get("path.data"), "queue"))
71
+ if settings.set?("path.data")
72
+ if !settings.set?("path.queue")
73
+ settings.set_value("path.queue", ::File.join(settings.get("path.data"), "queue"))
74
+ end
75
+ if !settings.set?("path.dead_letter_queue")
76
+ settings.set_value("path.dead_letter_queue", ::File.join(settings.get("path.data"), "dead_letter_queue"))
77
+ end
69
78
  end
70
79
  end
71
80
 
@@ -1,13 +1,17 @@
1
1
  # encoding: utf-8
2
+ require "logstash/util/dead_letter_queue_manager"
2
3
  module LogStash
3
4
  class ExecutionContext
4
- attr_reader :pipeline, :agent
5
+ attr_reader :pipeline, :agent, :dlq_writer
5
6
 
6
- def initialize(pipeline, agent)
7
+ def initialize(pipeline, agent, plugin_id, plugin_type, dlq_writer)
7
8
  @pipeline = pipeline
8
9
  @agent = agent
10
+ @plugin_id = plugin_id
11
+ @plugin_type = plugin_type
12
+ @dlq_writer = LogStash::Util::PluginDeadLetterQueueWriter.new(dlq_writer, @plugin_id, @plugin_type)
9
13
  end
10
-
14
+
11
15
  def pipeline_id
12
16
  @pipeline.pipeline_id
13
17
  end
@@ -105,6 +105,8 @@ class LogStash::Inputs::Base < LogStash::Plugin
105
105
  super
106
106
  # There is no easy way to propage an instance variable into the codec, because the codec
107
107
  # are created at the class level
108
+ # TODO(talevy): Codecs should have their own execution_context, for now they will inherit their
109
+ # parent plugin's
108
110
  @codec.execution_context = context
109
111
  context
110
112
  end
@@ -1,13 +1,11 @@
1
1
  # encoding: utf-8
2
2
  require "logstash/instrument/metric_type/counter"
3
- require "logstash/instrument/metric_type/mean"
4
3
  require "logstash/instrument/metric_type/gauge"
5
4
 
6
5
  module LogStash module Instrument
7
6
  module MetricType
8
7
  METRIC_TYPE_LIST = {
9
8
  :counter => LogStash::Instrument::MetricType::Counter,
10
- :mean => LogStash::Instrument::MetricType::Mean,
11
9
  :gauge => LogStash::Instrument::MetricType::Gauge
12
10
  }.freeze
13
11
 
@@ -2,18 +2,19 @@
2
2
  require "logstash/instrument/periodic_poller/base"
3
3
  require "logstash/instrument/periodic_poller/load_average"
4
4
  require "logstash/environment"
5
- require "jrmonitor"
6
5
  require "set"
7
6
 
7
+ java_import 'com.sun.management.UnixOperatingSystemMXBean'
8
8
  java_import 'java.lang.management.ManagementFactory'
9
9
  java_import 'java.lang.management.OperatingSystemMXBean'
10
10
  java_import 'java.lang.management.GarbageCollectorMXBean'
11
11
  java_import 'java.lang.management.RuntimeMXBean'
12
- java_import 'com.sun.management.UnixOperatingSystemMXBean'
13
12
  java_import 'javax.management.MBeanServer'
14
13
  java_import 'javax.management.ObjectName'
15
14
  java_import 'javax.management.AttributeList'
16
15
  java_import 'javax.naming.directory.Attribute'
16
+ java_import 'org.logstash.instrument.reports.MemoryReport'
17
+ java_import 'org.logstash.instrument.reports.ProcessReport'
17
18
 
18
19
 
19
20
  module LogStash module Instrument module PeriodicPoller
@@ -50,7 +51,7 @@ module LogStash module Instrument module PeriodicPoller
50
51
  end
51
52
 
52
53
  def collect
53
- raw = JRMonitor.memory.generate
54
+ raw = MemoryReport.generate
54
55
  collect_jvm_metrics(raw)
55
56
  collect_pools_metrics(raw)
56
57
  collect_threads_metrics
@@ -81,11 +82,10 @@ module LogStash module Instrument module PeriodicPoller
81
82
  end
82
83
 
83
84
  def collect_process_metrics
84
- process_metrics = JRMonitor.process.generate
85
+ process_metrics = ProcessReport.generate
85
86
 
86
87
  path = [:jvm, :process]
87
88
 
88
-
89
89
  open_fds = process_metrics["open_file_descriptors"]
90
90
  if @peak_open_fds.nil? || open_fds > @peak_open_fds
91
91
  @peak_open_fds = open_fds