logstash-core 6.0.0.beta1-java → 6.0.0.beta2-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash-core/logstash-core.jar +0 -0
  3. data/lib/logstash-core/version.rb +1 -1
  4. data/lib/logstash/agent.rb +0 -16
  5. data/lib/logstash/compiler/lscl.rb +2 -53
  6. data/lib/logstash/compiler/lscl/helpers.rb +55 -0
  7. data/lib/logstash/config/config_ast.rb +6 -3
  8. data/lib/logstash/config/modules_common.rb +4 -1
  9. data/lib/logstash/elasticsearch_client.rb +4 -1
  10. data/lib/logstash/environment.rb +8 -2
  11. data/lib/logstash/filter_delegator.rb +11 -6
  12. data/lib/logstash/instrument/collector.rb +7 -5
  13. data/lib/logstash/instrument/metric_store.rb +6 -9
  14. data/lib/logstash/instrument/namespaced_metric.rb +4 -0
  15. data/lib/logstash/instrument/namespaced_null_metric.rb +4 -0
  16. data/lib/logstash/instrument/null_metric.rb +10 -0
  17. data/lib/logstash/instrument/wrapped_write_client.rb +33 -24
  18. data/lib/logstash/modules/kibana_client.rb +5 -3
  19. data/lib/logstash/modules/kibana_config.rb +1 -4
  20. data/lib/logstash/modules/scaffold.rb +2 -0
  21. data/lib/logstash/modules/settings_merger.rb +52 -4
  22. data/lib/logstash/output_delegator.rb +7 -5
  23. data/lib/logstash/pipeline.rb +37 -14
  24. data/lib/logstash/pipeline_settings.rb +2 -0
  25. data/lib/logstash/runner.rb +14 -2
  26. data/lib/logstash/settings.rb +26 -0
  27. data/lib/logstash/util/cloud_setting_auth.rb +29 -0
  28. data/lib/logstash/util/cloud_setting_id.rb +41 -0
  29. data/lib/logstash/util/modules_setting_array.rb +28 -0
  30. data/lib/logstash/util/wrapped_acked_queue.rb +5 -6
  31. data/lib/logstash/util/wrapped_synchronous_queue.rb +14 -9
  32. data/lib/logstash/version.rb +1 -1
  33. data/locales/en.yml +16 -0
  34. data/spec/logstash/agent/converge_spec.rb +6 -7
  35. data/spec/logstash/config/source/multi_local_spec.rb +11 -0
  36. data/spec/logstash/filter_delegator_spec.rb +20 -8
  37. data/spec/logstash/legacy_ruby_event_spec.rb +4 -4
  38. data/spec/logstash/modules/scaffold_spec.rb +2 -7
  39. data/spec/logstash/modules/settings_merger_spec.rb +111 -0
  40. data/spec/logstash/output_delegator_spec.rb +15 -5
  41. data/spec/logstash/pipeline_spec.rb +39 -7
  42. data/spec/logstash/runner_spec.rb +4 -1
  43. data/spec/logstash/settings/modules_spec.rb +115 -0
  44. metadata +10 -2
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 46f57cb18b555e501983b0d9d9ae61c3be63fd5925f444a4018adce705fe6b1f
4
- data.tar.gz: 963cbd763704c8c84ca8bb5055412baa6b57d22d3e65ac03cd7f6b2434ec227a
3
+ metadata.gz: 5c353e53b9ba28be27a573084a75d1dc12d1257e2192057121d5e1c41de195bc
4
+ data.tar.gz: 47790a7e594369b8d2c3d1ebfcde2d7fef023aaaf3afb0b747bcc75124c35547
5
5
  SHA512:
6
- metadata.gz: 93a8a8282730e5c5e687b5bf2bc48d0aab82fa5ec8398336114f3e4a3d73d6705dac0854827ab9bfd5c52173177c72db87da5e6bb9a1ca95b14a826c51ec2860
7
- data.tar.gz: 61df6640d1d26a96b014c9eb7399982a9829465472f9f05f5b4882c4a8720a821c6e4eca9548286dcb907e792f32f91ab1ca90fdb3f9c96dfbdb1b45d9e0b6a0
6
+ metadata.gz: d7acc940476d31aca243c82ea0bc693324e8cdbbeeb7c8abbd06210cd5ad82f143499653b0952696a027ae0b60557518164409a36ff7a4b3ff2d6e4c536dcbb6
7
+ data.tar.gz: 8848ec115ba9abfa1f3065e1ee22a63b50e7172e4aea049ebd6a37193e6dd06ee8f4b257863ab625939416a75512dffeaf7f1ad1cd9ef88d8c16db344b1a62a8
@@ -5,4 +5,4 @@
5
5
  # Note to authors: this should not include dashes because 'gem' barfs if
6
6
  # you include a dash in the version string.
7
7
 
8
- LOGSTASH_CORE_VERSION = "6.0.0-beta1"
8
+ LOGSTASH_CORE_VERSION = "6.0.0-beta2"
@@ -190,13 +190,6 @@ class LogStash::Agent
190
190
  converge_result
191
191
  end
192
192
 
193
- def force_shutdown!
194
- stop_collecting_metrics
195
- stop_webserver
196
- transition_to_stopped
197
- force_shutdown_pipelines!
198
- end
199
-
200
193
  def id
201
194
  return @id if @id
202
195
 
@@ -427,15 +420,6 @@ class LogStash::Agent
427
420
  @collect_metric
428
421
  end
429
422
 
430
- def force_shutdown_pipelines!
431
- with_pipelines do |pipelines|
432
- pipelines.each do |_, pipeline|
433
- # TODO(ph): should it be his own action?
434
- pipeline.force_shutdown!
435
- end
436
- end
437
- end
438
-
439
423
  def shutdown_pipelines
440
424
  logger.debug("Shutting down all pipelines", :pipelines_count => pipelines_count)
441
425
 
@@ -2,6 +2,7 @@
2
2
  require 'logstash/errors'
3
3
  require "treetop"
4
4
  require "logstash/compiler/treetop_monkeypatches"
5
+ require "logstash/compiler/lscl/helpers"
5
6
  require "logstash/config/string_escape"
6
7
 
7
8
  java_import org.logstash.config.ir.DSL
@@ -10,59 +11,7 @@ java_import org.logstash.common.SourceWithMetadata
10
11
  module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSCL; module AST
11
12
  PROCESS_ESCAPE_SEQUENCES = :process_escape_sequences
12
13
 
13
- # Helpers for parsing LSCL files
14
- module Helpers
15
- def source_meta
16
- line, column = line_and_column
17
- org.logstash.common.SourceWithMetadata.new(base_protocol, base_id, line, column, self.text_value)
18
- end
19
-
20
- def base_source_with_metadata=(value)
21
- set_meta(:base_source_with_metadata, value)
22
- end
23
-
24
- def base_source_with_metadata
25
- get_meta(:base_source_with_metadata)
26
- end
27
-
28
- def base_protocol
29
- self.base_source_with_metadata.protocol
30
- end
31
-
32
- def base_id
33
- self.base_source_with_metadata.id
34
- end
35
-
36
- def compose(*statements)
37
- compose_for(section_type.to_sym).call(source_meta, *statements)
38
- end
39
-
40
- def compose_for(section_sym)
41
- if section_sym == :filter
42
- jdsl.method(:iComposeSequence)
43
- else
44
- jdsl.method(:iComposeParallel)
45
- end
46
- end
47
-
48
- def line_and_column
49
- start = self.interval.first
50
- [self.input.line_of(start), self.input.column_of(start)]
51
- end
52
-
53
- def jdsl
54
- org.logstash.config.ir.DSL
55
- end
56
-
57
- def self.jdsl
58
- org.logstash.config.ir.DSL
59
- end
60
-
61
- AND_METHOD = jdsl.method(:eAnd)
62
- OR_METHOD = jdsl.method(:eOr)
63
- end
64
-
65
- class Node < Treetop::Runtime::SyntaxNode
14
+ class Node < Treetop::Runtime::SyntaxNode
66
15
  include Helpers
67
16
 
68
17
  def section_type
@@ -0,0 +1,55 @@
1
+ # encoding: utf-8
2
+
3
+ module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSCL; module AST
4
+ # Helpers for parsing LSCL files
5
+ module Helpers
6
+ def source_meta
7
+ line, column = line_and_column
8
+ org.logstash.common.SourceWithMetadata.new(base_protocol, base_id, line, column, self.text_value)
9
+ end
10
+
11
+ def base_source_with_metadata=(value)
12
+ set_meta(:base_source_with_metadata, value)
13
+ end
14
+
15
+ def base_source_with_metadata
16
+ get_meta(:base_source_with_metadata)
17
+ end
18
+
19
+ def base_protocol
20
+ self.base_source_with_metadata ? self.base_source_with_metadata.protocol : 'config_ast'
21
+ end
22
+
23
+ def base_id
24
+ self.base_source_with_metadata ? self.base_source_with_metadata.id : 'config_ast'
25
+ end
26
+
27
+ def compose(*statements)
28
+ compose_for(section_type.to_sym).call(source_meta, *statements)
29
+ end
30
+
31
+ def compose_for(section_sym)
32
+ if section_sym == :filter
33
+ jdsl.method(:iComposeSequence)
34
+ else
35
+ jdsl.method(:iComposeParallel)
36
+ end
37
+ end
38
+
39
+ def line_and_column
40
+ start = self.interval.first
41
+ [self.input.line_of(start), self.input.column_of(start)]
42
+ end
43
+
44
+ def jdsl
45
+ org.logstash.config.ir.DSL
46
+ end
47
+
48
+ def self.jdsl
49
+ org.logstash.config.ir.DSL
50
+ end
51
+
52
+ AND_METHOD = jdsl.method(:eAnd)
53
+ OR_METHOD = jdsl.method(:eOr)
54
+ end
55
+ end; end; end; end; end
@@ -1,5 +1,6 @@
1
1
  # encoding: utf-8
2
2
  require 'logstash/errors'
3
+ require "logstash/compiler/lscl/helpers"
3
4
  require "treetop"
4
5
 
5
6
  require "logstash/compiler/treetop_monkeypatches"
@@ -32,6 +33,8 @@ module LogStash; module Config; module AST
32
33
  end
33
34
 
34
35
  class Node < Treetop::Runtime::SyntaxNode
36
+ include LogStashCompilerLSCLGrammar::LogStash::Compiler::LSCL::AST::Helpers
37
+
35
38
  def text_value_for_comments
36
39
  text_value.gsub(/[\r\n]/, " ")
37
40
  end
@@ -189,12 +192,12 @@ module LogStash; module Config; module AST
189
192
  # If any parent is a Plugin, this must be a codec.
190
193
 
191
194
  if attributes.elements.nil?
192
- return "plugin(#{plugin_type.inspect}, #{plugin_name.inspect})" << (plugin_type == "codec" ? "" : "\n")
195
+ return "plugin(#{plugin_type.inspect}, #{plugin_name.inspect}, #{source_meta.line}, #{source_meta.column})" << (plugin_type == "codec" ? "" : "\n")
193
196
  else
194
197
  settings = attributes.recursive_select(Attribute).collect(&:compile).reject(&:empty?)
195
198
 
196
199
  attributes_code = "LogStash::Util.hash_merge_many(#{settings.map { |c| "{ #{c} }" }.join(", ")})"
197
- return "plugin(#{plugin_type.inspect}, #{plugin_name.inspect}, #{attributes_code})" << (plugin_type == "codec" ? "" : "\n")
200
+ return "plugin(#{plugin_type.inspect}, #{plugin_name.inspect}, #{source_meta.line}, #{source_meta.column}, #{attributes_code})" << (plugin_type == "codec" ? "" : "\n")
198
201
  end
199
202
  end
200
203
 
@@ -211,7 +214,7 @@ module LogStash; module Config; module AST
211
214
  when "codec"
212
215
  settings = attributes.recursive_select(Attribute).collect(&:compile).reject(&:empty?)
213
216
  attributes_code = "LogStash::Util.hash_merge_many(#{settings.map { |c| "{ #{c} }" }.join(", ")})"
214
- return "plugin(#{plugin_type.inspect}, #{plugin_name.inspect}, #{attributes_code})"
217
+ return "plugin(#{plugin_type.inspect}, #{plugin_name.inspect}, #{source_meta.line}, #{source_meta.column}, #{attributes_code})"
215
218
  end
216
219
  end
217
220
 
@@ -64,9 +64,12 @@ module LogStash module Config
64
64
  alt_name = "module-#{module_name}"
65
65
  pipeline_id = alt_name
66
66
  module_settings.set("pipeline.id", pipeline_id)
67
+ LogStash::Modules::SettingsMerger.merge_cloud_settings(module_hash, module_settings)
67
68
  current_module.with_settings(module_hash)
68
69
  config_test = settings.get("config.test_and_exit")
69
- if !config_test
70
+ modul_setup = settings.get("modules_setup")
71
+ # Only import data if it's not a config test and --setup is true
72
+ if !config_test && modul_setup
70
73
  esclient = LogStash::ElasticsearchClient.build(module_hash)
71
74
  kbnclient = LogStash::Modules::KibanaClient.new(module_hash)
72
75
  esconnected = esclient.can_connect?
@@ -39,8 +39,11 @@ module LogStash class ElasticsearchClient
39
39
  @client_args[:ssl] = ssl_options
40
40
 
41
41
  username = @settings["var.elasticsearch.username"]
42
- password = @settings["var.elasticsearch.password"]
43
42
  if username
43
+ password = @settings["var.elasticsearch.password"]
44
+ if password.is_a?(LogStash::Util::Password)
45
+ password = password.value
46
+ end
44
47
  @client_args[:transport_options] = { :headers => { "Authorization" => 'Basic ' + Base64.encode64( "#{username}:#{password}" ).chomp } }
45
48
  end
46
49
 
@@ -3,6 +3,9 @@ require "logstash/errors"
3
3
  require "logstash/java_integration"
4
4
  require "logstash/config/cpu_core_strategy"
5
5
  require "logstash/settings"
6
+ require "logstash/util/cloud_setting_id"
7
+ require "logstash/util/cloud_setting_auth"
8
+ require "logstash/util/modules_setting_array"
6
9
  require "socket"
7
10
  require "stud/temporary"
8
11
 
@@ -20,8 +23,11 @@ module LogStash
20
23
  Setting::NullableString.new("path.config", nil, false),
21
24
  Setting::WritableDirectory.new("path.data", ::File.join(LogStash::Environment::LOGSTASH_HOME, "data")),
22
25
  Setting::NullableString.new("config.string", nil, false),
23
- Setting.new("modules.cli", Array, []),
24
- Setting.new("modules", Array, []),
26
+ Setting::Modules.new("modules.cli", LogStash::Util::ModulesSettingArray, []),
27
+ Setting::Modules.new("modules", LogStash::Util::ModulesSettingArray, []),
28
+ Setting::Modules.new("cloud.id", LogStash::Util::CloudSettingId),
29
+ Setting::Modules.new("cloud.auth",LogStash::Util::CloudSettingAuth),
30
+ Setting::Boolean.new("modules_setup", false),
25
31
  Setting::Boolean.new("config.test_and_exit", false),
26
32
  Setting::Boolean.new("config.reload.automatic", false),
27
33
  Setting::TimeValue.new("config.reload.interval", "3s"), # in seconds
@@ -14,6 +14,8 @@ module LogStash
14
14
  ]
15
15
  def_delegators :@filter, *DELEGATED_METHODS
16
16
 
17
+ attr_reader :id
18
+
17
19
  def initialize(logger, klass, metric, execution_context, plugin_args)
18
20
  @logger = logger
19
21
  @klass = klass
@@ -26,6 +28,9 @@ module LogStash
26
28
  @filter.execution_context = execution_context
27
29
 
28
30
  @metric_events = namespaced_metric.namespace(:events)
31
+ @metric_events_in = @metric_events.counter(:in)
32
+ @metric_events_out = @metric_events.counter(:out)
33
+ @metric_events_time = @metric_events.counter(:duration_in_millis)
29
34
  namespaced_metric.gauge(:name, config_name)
30
35
 
31
36
  # Not all the filters will do bufferings
@@ -37,19 +42,19 @@ module LogStash
37
42
  end
38
43
 
39
44
  def multi_filter(events)
40
- @metric_events.increment(:in, events.size)
45
+ @metric_events_in.increment(events.size)
41
46
 
42
- clock = @metric_events.time(:duration_in_millis)
47
+ start_time = java.lang.System.current_time_millis
43
48
  new_events = @filter.multi_filter(events)
44
- clock.stop
49
+ @metric_events_time.increment(java.lang.System.current_time_millis - start_time)
45
50
 
46
51
  # There is no guarantee in the context of filter
47
52
  # that EVENTS_INT == EVENTS_OUT, see the aggregates and
48
53
  # the split filter
49
54
  c = new_events.count { |event| !event.cancelled? }
50
- @metric_events.increment(:out, c) if c > 0
51
55
 
52
- return new_events
56
+ @metric_events_out.increment(c) if c > 0
57
+ new_events
53
58
  end
54
59
 
55
60
  private
@@ -61,7 +66,7 @@ module LogStash
61
66
 
62
67
  # Filter plugins that does buffering or spooling of events like the
63
68
  # `Logstash-filter-aggregates` can return `NIL` and will flush on the next flush ticks.
64
- @metric_events.increment(:out, new_events.size) if new_events && new_events.size > 0
69
+ @metric_events_out.increment(new_events.size) if new_events && new_events.size > 0
65
70
  new_events
66
71
  end
67
72
  end
@@ -33,11 +33,7 @@ module LogStash module Instrument
33
33
  #
34
34
  def push(namespaces_path, key, type, *metric_type_params)
35
35
  begin
36
- metric = @metric_store.fetch_or_store(namespaces_path, key) do
37
- LogStash::Instrument::MetricType.create(type, namespaces_path, key)
38
- end
39
-
40
- metric.execute(*metric_type_params)
36
+ get(namespaces_path, key, type).execute(*metric_type_params)
41
37
  rescue MetricStore::NamespacesExpectedError => e
42
38
  logger.error("Collector: Cannot record metric", :exception => e)
43
39
  rescue NameError => e
@@ -51,6 +47,12 @@ module LogStash module Instrument
51
47
  end
52
48
  end
53
49
 
50
+ def get(namespaces_path, key, type)
51
+ @metric_store.fetch_or_store(namespaces_path, key) do
52
+ LogStash::Instrument::MetricType.create(type, namespaces_path, key)
53
+ end
54
+ end
55
+
54
56
  # Snapshot the current Metric Store and return it immediately,
55
57
  # This is useful if you want to get access to the current metric store without
56
58
  # waiting for a periodic call.
@@ -51,14 +51,12 @@ module LogStash module Instrument
51
51
  # BUT. If the value is not present in the `@fast_lookup` the value will be inserted and we assume that we don't
52
52
  # have it in the `@metric_store` for structured search so we add it there too.
53
53
 
54
- # array.hash as the key since it is faster then using the array itself, see #7772
55
- fast_lookup_key = (namespaces.dup << key).hash
56
- value = @fast_lookup.get(fast_lookup_key)
54
+ value = @fast_lookup.get(namespaces.dup << key)
57
55
  if value.nil?
58
56
  value = block_given? ? yield(key) : default_value
59
- @fast_lookup.put(fast_lookup_key, value)
57
+ @fast_lookup.put(namespaces.dup << key, value)
60
58
  @structured_lookup_mutex.synchronize do
61
- # If we cannot find the value this mean we need to save it in the store.
59
+ # If we cannot find the value this mean we need to save it in the store.
62
60
  fetch_or_store_namespaces(namespaces).fetch_or_store(key, value)
63
61
  end
64
62
  end
@@ -165,7 +163,7 @@ module LogStash module Instrument
165
163
  end
166
164
 
167
165
  def has_metric?(*path)
168
- @fast_lookup[path.hash]
166
+ @fast_lookup[path]
169
167
  end
170
168
 
171
169
  # Return all the individuals Metric,
@@ -187,9 +185,8 @@ module LogStash module Instrument
187
185
  def prune(path)
188
186
  key_paths = key_paths(path).map(&:to_sym)
189
187
  @structured_lookup_mutex.synchronize do
190
- fetch_or_store_namespaces(key_paths).each do |key, v|
191
- @fast_lookup.delete((key_paths.dup << key).hash)
192
- end
188
+ keys_to_delete = @fast_lookup.keys.select {|namespace| (key_paths - namespace[0..-2]).empty? }
189
+ keys_to_delete.each {|k| @fast_lookup.delete(k) }
193
190
  delete_from_map(@store, key_paths)
194
191
  end
195
192
  end
@@ -43,6 +43,10 @@ module LogStash module Instrument
43
43
  def collector
44
44
  @metric.collector
45
45
  end
46
+
47
+ def counter(key)
48
+ collector.get(@namespace_name, key, :counter)
49
+ end
46
50
 
47
51
  def namespace(name)
48
52
  NamespacedMetric.new(metric, namespace_name + Array(name))
@@ -44,6 +44,10 @@ module LogStash module Instrument
44
44
  @metric.collector
45
45
  end
46
46
 
47
+ def counter(_)
48
+ ::LogStash::Instrument::NullMetric::NullGauge
49
+ end
50
+
47
51
  def namespace(name)
48
52
  NamespacedNullMetric.new(metric, namespace_name + Array(name))
49
53
  end
@@ -39,6 +39,10 @@ module LogStash module Instrument
39
39
  end
40
40
  end
41
41
 
42
+ def counter(_)
43
+ NullGauge
44
+ end
45
+
42
46
  def namespace(name)
43
47
  raise MetricNoNamespaceProvided if name.nil? || name.empty?
44
48
  NamespacedNullMetric.new(self, name)
@@ -49,6 +53,12 @@ module LogStash module Instrument
49
53
  end
50
54
 
51
55
  private
56
+
57
+ class NullGauge
58
+ def self.increment(_)
59
+ end
60
+ end
61
+
52
62
  # Null implementation of the internal timer class
53
63
  #
54
64
  # @see LogStash::Instrument::TimedExecution`
@@ -10,7 +10,12 @@ module LogStash module Instrument
10
10
  @events_metrics = metric.namespace([:stats, :events])
11
11
  @pipeline_metrics = metric.namespace([:stats, :pipelines, pipeline_id, :events])
12
12
  @plugin_events_metrics = metric.namespace([:stats, :pipelines, pipeline_id, :plugins, plugin_type, plugin.id.to_sym, :events])
13
-
13
+ @events_metrics_counter = @events_metrics.counter(:in)
14
+ @events_metrics_time = @events_metrics.counter(:queue_push_duration_in_millis)
15
+ @pipeline_metrics_counter = @pipeline_metrics.counter(:in)
16
+ @pipeline_metrics_time = @pipeline_metrics.counter(:queue_push_duration_in_millis)
17
+ @plugin_events_metrics_counter = @plugin_events_metrics.counter(:out)
18
+ @plugin_events_metrics_time = @plugin_events_metrics.counter(:queue_push_duration_in_millis)
14
19
  define_initial_metrics_values
15
20
  end
16
21
 
@@ -19,41 +24,45 @@ module LogStash module Instrument
19
24
  end
20
25
 
21
26
  def push(event)
22
- record_metric { @write_client.push(event) }
27
+ increment_counters(1)
28
+ start_time = java.lang.System.current_time_millis
29
+ result = @write_client.push(event)
30
+ report_execution_time(start_time)
31
+ result
23
32
  end
33
+
24
34
  alias_method(:<<, :push)
25
35
 
26
36
  def push_batch(batch)
27
- record_metric(batch.size) { @write_client.push_batch(batch) }
37
+ increment_counters(batch.size)
38
+ start_time = java.lang.System.current_time_millis
39
+ result = @write_client.push_batch(batch)
40
+ report_execution_time(start_time)
41
+ result
28
42
  end
29
43
 
30
44
  private
31
- def record_metric(size = 1)
32
- @events_metrics.increment(:in, size)
33
- @pipeline_metrics.increment(:in, size)
34
- @plugin_events_metrics.increment(:out, size)
35
-
36
- clock = @events_metrics.time(:queue_push_duration_in_millis)
37
45
 
38
- result = yield
39
-
40
- # Reuse the same values for all the endpoints to make sure we don't have skew in times.
41
- execution_time = clock.stop
42
-
43
- @pipeline_metrics.report_time(:queue_push_duration_in_millis, execution_time)
44
- @plugin_events_metrics.report_time(:queue_push_duration_in_millis, execution_time)
46
+ def increment_counters(size)
47
+ @events_metrics_counter.increment(size)
48
+ @pipeline_metrics_counter.increment(size)
49
+ @plugin_events_metrics_counter.increment(size)
50
+ end
45
51
 
46
- result
52
+ def report_execution_time(start_time)
53
+ execution_time = java.lang.System.current_time_millis - start_time
54
+ @events_metrics_time.increment(execution_time)
55
+ @pipeline_metrics_time.increment(execution_time)
56
+ @plugin_events_metrics_time.increment(execution_time)
47
57
  end
48
58
 
49
59
  def define_initial_metrics_values
50
- @events_metrics.increment(:in, 0)
51
- @pipeline_metrics.increment(:in, 0)
52
- @plugin_events_metrics.increment(:out, 0)
53
-
54
- @events_metrics.report_time(:queue_push_duration_in_millis, 0)
55
- @pipeline_metrics.report_time(:queue_push_duration_in_millis, 0)
56
- @plugin_events_metrics.report_time(:queue_push_duration_in_millis, 0)
60
+ @events_metrics_counter.increment(0)
61
+ @pipeline_metrics_counter.increment(0)
62
+ @plugin_events_metrics_counter.increment(0)
63
+ @events_metrics_time.increment(0)
64
+ @pipeline_metrics_time.increment(0)
65
+ @plugin_events_metrics_time.increment(0)
57
66
  end
58
67
  end
59
68
  end end