logstash-core 5.6.16-java → 6.0.0.alpha1-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (156) hide show
  1. checksums.yaml +4 -4
  2. data/gemspec_jars.rb +4 -7
  3. data/lib/logstash-core/logstash-core.jar +0 -0
  4. data/lib/logstash-core/version.rb +4 -8
  5. data/lib/logstash-core_jars.rb +12 -26
  6. data/lib/logstash/agent.rb +261 -246
  7. data/lib/logstash/api/commands/default_metadata.rb +1 -1
  8. data/lib/logstash/api/commands/hot_threads_reporter.rb +5 -11
  9. data/lib/logstash/api/commands/node.rb +3 -2
  10. data/lib/logstash/api/commands/stats.rb +3 -2
  11. data/lib/logstash/bootstrap_check/bad_java.rb +16 -0
  12. data/lib/logstash/bootstrap_check/bad_ruby.rb +12 -0
  13. data/lib/logstash/bootstrap_check/default_config.rb +17 -0
  14. data/lib/logstash/compiler.rb +38 -0
  15. data/lib/logstash/compiler/lscl.rb +566 -0
  16. data/lib/logstash/compiler/lscl/lscl_grammar.rb +3503 -0
  17. data/lib/logstash/compiler/treetop_monkeypatches.rb +92 -0
  18. data/lib/logstash/config/config_ast.rb +4 -82
  19. data/lib/logstash/config/mixin.rb +73 -41
  20. data/lib/logstash/config/pipeline_config.rb +48 -0
  21. data/lib/logstash/config/source/base.rb +16 -0
  22. data/lib/logstash/config/source/local.rb +215 -0
  23. data/lib/logstash/config/source_loader.rb +125 -0
  24. data/lib/logstash/converge_result.rb +103 -0
  25. data/lib/logstash/environment.rb +6 -19
  26. data/lib/logstash/errors.rb +2 -0
  27. data/lib/logstash/execution_context.rb +4 -7
  28. data/lib/logstash/filter_delegator.rb +6 -9
  29. data/lib/logstash/inputs/base.rb +0 -2
  30. data/lib/logstash/instrument/collector.rb +5 -7
  31. data/lib/logstash/instrument/metric_store.rb +12 -12
  32. data/lib/logstash/instrument/metric_type/mean.rb +0 -5
  33. data/lib/logstash/instrument/namespaced_metric.rb +0 -4
  34. data/lib/logstash/instrument/namespaced_null_metric.rb +0 -4
  35. data/lib/logstash/instrument/null_metric.rb +0 -10
  36. data/lib/logstash/instrument/periodic_poller/cgroup.rb +85 -168
  37. data/lib/logstash/instrument/periodic_poller/jvm.rb +5 -5
  38. data/lib/logstash/instrument/periodic_poller/pq.rb +3 -7
  39. data/lib/logstash/instrument/periodic_pollers.rb +1 -3
  40. data/lib/logstash/instrument/wrapped_write_client.rb +24 -33
  41. data/lib/logstash/logging/logger.rb +15 -47
  42. data/lib/logstash/namespace.rb +0 -1
  43. data/lib/logstash/output_delegator.rb +5 -7
  44. data/lib/logstash/outputs/base.rb +0 -2
  45. data/lib/logstash/pipeline.rb +159 -87
  46. data/lib/logstash/pipeline_action.rb +13 -0
  47. data/lib/logstash/pipeline_action/base.rb +29 -0
  48. data/lib/logstash/pipeline_action/create.rb +47 -0
  49. data/lib/logstash/pipeline_action/reload.rb +48 -0
  50. data/lib/logstash/pipeline_action/stop.rb +23 -0
  51. data/lib/logstash/plugin.rb +0 -1
  52. data/lib/logstash/plugins/hooks_registry.rb +6 -0
  53. data/lib/logstash/plugins/registry.rb +0 -1
  54. data/lib/logstash/program.rb +14 -0
  55. data/lib/logstash/queue_factory.rb +5 -1
  56. data/lib/logstash/runner.rb +58 -80
  57. data/lib/logstash/settings.rb +3 -27
  58. data/lib/logstash/state_resolver.rb +41 -0
  59. data/lib/logstash/util/java_version.rb +6 -0
  60. data/lib/logstash/util/safe_uri.rb +12 -148
  61. data/lib/logstash/util/thread_dump.rb +4 -7
  62. data/lib/logstash/util/wrapped_acked_queue.rb +36 -39
  63. data/lib/logstash/util/wrapped_synchronous_queue.rb +29 -39
  64. data/lib/logstash/version.rb +10 -8
  65. data/locales/en.yml +3 -54
  66. data/logstash-core.gemspec +8 -35
  67. data/spec/{logstash/api/modules → api/lib/api}/logging_spec.rb +10 -1
  68. data/spec/{logstash/api/modules → api/lib/api}/node_plugins_spec.rb +2 -1
  69. data/spec/{logstash/api/modules → api/lib/api}/node_spec.rb +3 -3
  70. data/spec/{logstash/api/modules → api/lib/api}/node_stats_spec.rb +3 -7
  71. data/spec/{logstash/api/modules → api/lib/api}/plugins_spec.rb +3 -4
  72. data/spec/{logstash/api/modules → api/lib/api}/root_spec.rb +2 -2
  73. data/spec/api/lib/api/support/resource_dsl_methods.rb +87 -0
  74. data/spec/{logstash/api/commands/stats_spec.rb → api/lib/commands/stats.rb} +2 -7
  75. data/spec/{logstash/api → api/lib}/errors_spec.rb +1 -1
  76. data/spec/{logstash/api → api/lib}/rack_app_spec.rb +0 -0
  77. data/spec/api/spec_helper.rb +106 -0
  78. data/spec/logstash/agent/converge_spec.rb +286 -0
  79. data/spec/logstash/agent/metrics_spec.rb +244 -0
  80. data/spec/logstash/agent_spec.rb +213 -225
  81. data/spec/logstash/compiler/compiler_spec.rb +584 -0
  82. data/spec/logstash/config/config_ast_spec.rb +8 -47
  83. data/spec/logstash/config/mixin_spec.rb +2 -42
  84. data/spec/logstash/config/pipeline_config_spec.rb +75 -0
  85. data/spec/logstash/config/source/local_spec.rb +395 -0
  86. data/spec/logstash/config/source_loader_spec.rb +122 -0
  87. data/spec/logstash/converge_result_spec.rb +179 -0
  88. data/spec/logstash/event_spec.rb +0 -66
  89. data/spec/logstash/execution_context_spec.rb +8 -12
  90. data/spec/logstash/filter_delegator_spec.rb +12 -24
  91. data/spec/logstash/inputs/base_spec.rb +7 -5
  92. data/spec/logstash/instrument/periodic_poller/cgroup_spec.rb +92 -225
  93. data/spec/logstash/instrument/periodic_poller/jvm_spec.rb +1 -1
  94. data/spec/logstash/instrument/periodic_poller/os_spec.rb +32 -29
  95. data/spec/logstash/instrument/wrapped_write_client_spec.rb +33 -33
  96. data/spec/logstash/legacy_ruby_event_spec.rb +13 -4
  97. data/spec/logstash/output_delegator_spec.rb +11 -20
  98. data/spec/logstash/outputs/base_spec.rb +7 -5
  99. data/spec/logstash/pipeline_action/create_spec.rb +83 -0
  100. data/spec/logstash/pipeline_action/reload_spec.rb +83 -0
  101. data/spec/logstash/pipeline_action/stop_spec.rb +37 -0
  102. data/spec/logstash/pipeline_pq_file_spec.rb +1 -1
  103. data/spec/logstash/pipeline_spec.rb +81 -137
  104. data/spec/logstash/plugin_spec.rb +2 -1
  105. data/spec/logstash/plugins/hooks_registry_spec.rb +6 -0
  106. data/spec/logstash/queue_factory_spec.rb +13 -1
  107. data/spec/logstash/runner_spec.rb +29 -140
  108. data/spec/logstash/settings/writable_directory_spec.rb +10 -13
  109. data/spec/logstash/settings_spec.rb +0 -91
  110. data/spec/logstash/state_resolver_spec.rb +156 -0
  111. data/spec/logstash/timestamp_spec.rb +2 -6
  112. data/spec/logstash/util/java_version_spec.rb +22 -0
  113. data/spec/logstash/util/safe_uri_spec.rb +0 -56
  114. data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +22 -0
  115. data/spec/support/helpers.rb +9 -11
  116. data/spec/support/matchers.rb +96 -6
  117. data/spec/support/mocks_classes.rb +80 -0
  118. data/spec/support/shared_contexts.rb +2 -27
  119. metadata +100 -149
  120. data/lib/logstash/config/loader.rb +0 -107
  121. data/lib/logstash/config/modules_common.rb +0 -103
  122. data/lib/logstash/config/source/modules.rb +0 -55
  123. data/lib/logstash/config/string_escape.rb +0 -27
  124. data/lib/logstash/dependency_report.rb +0 -131
  125. data/lib/logstash/dependency_report_runner.rb +0 -17
  126. data/lib/logstash/elasticsearch_client.rb +0 -142
  127. data/lib/logstash/instrument/global_metrics.rb +0 -13
  128. data/lib/logstash/instrument/periodic_poller/dlq.rb +0 -24
  129. data/lib/logstash/modules/cli_parser.rb +0 -74
  130. data/lib/logstash/modules/elasticsearch_config.rb +0 -22
  131. data/lib/logstash/modules/elasticsearch_importer.rb +0 -37
  132. data/lib/logstash/modules/elasticsearch_resource.rb +0 -10
  133. data/lib/logstash/modules/file_reader.rb +0 -36
  134. data/lib/logstash/modules/kibana_base.rb +0 -24
  135. data/lib/logstash/modules/kibana_client.rb +0 -124
  136. data/lib/logstash/modules/kibana_config.rb +0 -105
  137. data/lib/logstash/modules/kibana_dashboards.rb +0 -36
  138. data/lib/logstash/modules/kibana_importer.rb +0 -17
  139. data/lib/logstash/modules/kibana_resource.rb +0 -10
  140. data/lib/logstash/modules/kibana_settings.rb +0 -40
  141. data/lib/logstash/modules/logstash_config.rb +0 -120
  142. data/lib/logstash/modules/resource_base.rb +0 -38
  143. data/lib/logstash/modules/scaffold.rb +0 -52
  144. data/lib/logstash/modules/settings_merger.rb +0 -23
  145. data/lib/logstash/modules/util.rb +0 -17
  146. data/lib/logstash/util/dead_letter_queue_manager.rb +0 -61
  147. data/lib/logstash/util/environment_variables.rb +0 -43
  148. data/spec/logstash/config/loader_spec.rb +0 -38
  149. data/spec/logstash/config/string_escape_spec.rb +0 -24
  150. data/spec/logstash/instrument/periodic_poller/dlq_spec.rb +0 -17
  151. data/spec/logstash/modules/logstash_config_spec.rb +0 -56
  152. data/spec/logstash/modules/scaffold_spec.rb +0 -234
  153. data/spec/logstash/pipeline_dlq_commit_spec.rb +0 -109
  154. data/spec/logstash/settings/splittable_string_array_spec.rb +0 -51
  155. data/spec/logstash/util/wrapped_acked_queue_spec.rb +0 -49
  156. data/versions-gem-copy.yml +0 -12
@@ -0,0 +1,125 @@
1
+ # encoding: utf-8
2
+ require "logstash/config/source/local"
3
+ require "logstash/errors"
4
+ require "thread"
5
+ require "set"
6
+
7
+ module LogStash module Config
8
+ class SourceLoader
9
+ class SuccessfulFetch
10
+ attr_reader :response
11
+
12
+ def initialize(response)
13
+ @response = response
14
+ end
15
+
16
+ def success?
17
+ true
18
+ end
19
+ end
20
+
21
+ class FailedFetch
22
+ attr_reader :error
23
+
24
+ def initialize(error)
25
+ @error = error
26
+ end
27
+
28
+ def success?
29
+ false
30
+ end
31
+ end
32
+
33
+ include LogStash::Util::Loggable
34
+
35
+ def initialize(settings = LogStash::SETTINGS)
36
+ @sources_lock = Mutex.new
37
+ @sources = Set.new
38
+ @settings = settings
39
+ end
40
+
41
+ # This return a ConfigLoader object that will
42
+ # abstract the call to the different sources and will return multiples pipeline
43
+ def fetch
44
+ sources_loaders = []
45
+
46
+ sources do |source|
47
+ sources_loaders << source if source.match?
48
+ end
49
+
50
+ if sources_loaders.empty?
51
+ # This shouldn't happen with the settings object or with any external plugins.
52
+ # but lets add a guard so we fail fast.
53
+ raise LogStash::InvalidSourceLoaderSettingError, "Can't find an appropriate config loader with current settings"
54
+ else
55
+ begin
56
+ pipeline_configs = sources_loaders
57
+ .collect { |source| source.pipeline_configs }
58
+ .compact
59
+ .flatten
60
+
61
+ duplicate_ids = find_duplicate_ids(pipeline_configs)
62
+
63
+ if duplicate_ids.any?
64
+ logger.debug("Fetching pipelines with duplicate ids", duplicate_ids.each { |k, v| v.collect(&:pipeline_id) } )
65
+ return FailedFetch.new("Found duplicate ids in your source: #{duplicate_ids.keys.sort.join(", ")}")
66
+ end
67
+
68
+ if config_debug?
69
+ pipeline_configs.each { |pipeline_config| pipeline_config.display_debug_information }
70
+ end
71
+
72
+ if pipeline_configs.empty?
73
+ logger.error("No configuration found in the configured sources.")
74
+ end
75
+
76
+ SuccessfulFetch.new(pipeline_configs)
77
+ rescue => e
78
+ logger.error("Could not fetch all the sources", :exception => e.class, :message => e.message, :backtrace => e.backtrace)
79
+ FailedFetch.new(e.message)
80
+ end
81
+ end
82
+ end
83
+
84
+ def sources
85
+ @sources_lock.synchronize do
86
+ if block_given?
87
+ @sources.each do |source|
88
+ yield source
89
+ end
90
+ else
91
+ @sources
92
+ end
93
+ end
94
+ end
95
+
96
+ def remove_source(klass)
97
+ @sources_lock.synchronize do
98
+ @sources.delete_if { |source| source == klass || source.is_a?(klass) }
99
+ end
100
+ end
101
+
102
+ def configure_sources(new_sources)
103
+ new_sources = Array(new_sources).to_set
104
+ logger.debug("Configure sources", :sources => new_sources.collect(&:to_s))
105
+ @sources_lock.synchronize { @sources = new_sources }
106
+ end
107
+
108
+ def add_source(new_source)
109
+ logger.debug("Adding source", :source => new_source.to_s)
110
+ @sources_lock.synchronize { @sources << new_source }
111
+ end
112
+
113
+ private
114
+ def config_debug?
115
+ @settings.get_value("config.debug") && logger.debug?
116
+ end
117
+
118
+ def find_duplicate_ids(pipeline_configs)
119
+ pipeline_configs.group_by { |pipeline_config| pipeline_config.pipeline_id }
120
+ .select { |group, pipeline_configs| pipeline_configs.size > 1 }
121
+ end
122
+ end
123
+
124
+ SOURCE_LOADER = SourceLoader.new
125
+ end end
@@ -0,0 +1,103 @@
1
+ # encoding: utf-8
2
+ require "logstash/errors"
3
+
4
+ module LogStash
5
+ # This class allow us to keep track and uniform all the return values from the
6
+ # action task
7
+ class ConvergeResult
8
+ class ActionResult
9
+ attr_reader :executed_at
10
+
11
+ def initialize
12
+ @executed_at = LogStash::Timestamp.now
13
+ end
14
+
15
+ # Until all the action have more granularity in the validation
16
+ # or execution we make the ConvergeResult works with primitives and exceptions
17
+ def self.create(action, action_result)
18
+ if action_result.is_a?(ActionResult)
19
+ action_result
20
+ elsif action_result.is_a?(Exception)
21
+ FailedAction.from_exception(action_result)
22
+ elsif action_result == true
23
+ SuccessfulAction.new
24
+ elsif action_result == false
25
+ FailedAction.from_action(action, action_result)
26
+ else
27
+ raise LogStash::Error, "Don't know how to handle `#{action_result.class}` for `#{action}`"
28
+ end
29
+ end
30
+ end
31
+
32
+ class FailedAction < ActionResult
33
+ attr_reader :message, :backtrace
34
+
35
+ def initialize(message, backtrace = nil)
36
+ super()
37
+
38
+ @message = message
39
+ @backtrace = backtrace
40
+ end
41
+
42
+ def self.from_exception(exception)
43
+ FailedAction.new(exception.message, exception.backtrace)
44
+ end
45
+
46
+ def self.from_action(action, action_result)
47
+ FailedAction.new("Could not execute action: #{action}, action_result: #{action_result}")
48
+ end
49
+
50
+ def successful?
51
+ false
52
+ end
53
+ end
54
+
55
+ class SuccessfulAction < ActionResult
56
+ def successful?
57
+ true
58
+ end
59
+ end
60
+
61
+ def initialize(expected_actions_count)
62
+ @expected_actions_count = expected_actions_count
63
+ @actions = {}
64
+ end
65
+
66
+ def add(action, action_result)
67
+ @actions[action] = ActionResult.create(action, action_result)
68
+ end
69
+
70
+ def failed_actions
71
+ filter_by_successful_state(false)
72
+ end
73
+
74
+ def successful_actions
75
+ filter_by_successful_state(true)
76
+ end
77
+
78
+ def complete?
79
+ total == @expected_actions_count
80
+ end
81
+
82
+ def success?
83
+ failed_actions.empty? && complete?
84
+ end
85
+
86
+ def fails_count
87
+ failed_actions.size
88
+ end
89
+
90
+ def success_count
91
+ successful_actions.size
92
+ end
93
+
94
+ def total
95
+ @actions.size
96
+ end
97
+
98
+ private
99
+ def filter_by_successful_state(predicate)
100
+ @actions.select { |action, action_result| action_result.successful? == predicate }
101
+ end
102
+ end
103
+ end
@@ -20,21 +20,18 @@ module LogStash
20
20
  Setting::NullableString.new("path.config", nil, false),
21
21
  Setting::WritableDirectory.new("path.data", ::File.join(LogStash::Environment::LOGSTASH_HOME, "data")),
22
22
  Setting::NullableString.new("config.string", nil, false),
23
- Setting.new("modules.cli", Array, []),
24
- Setting.new("modules", Array, []),
25
- Setting::Boolean.new("modules_setup", false),
26
23
  Setting::Boolean.new("config.test_and_exit", false),
27
24
  Setting::Boolean.new("config.reload.automatic", false),
28
- Setting::Boolean.new("config.support_escapes", false),
29
25
  Setting::Numeric.new("config.reload.interval", 3), # in seconds
30
26
  Setting::Boolean.new("metric.collect", true),
31
27
  Setting::String.new("pipeline.id", "main"),
32
- Setting::Boolean.new("pipeline.system", false),
28
+ Setting::Boolean.new("pipeline.system", false),
33
29
  Setting::PositiveInteger.new("pipeline.workers", LogStash::Config::CpuCoreStrategy.maximum),
34
30
  Setting::PositiveInteger.new("pipeline.output.workers", 1),
35
31
  Setting::PositiveInteger.new("pipeline.batch.size", 125),
36
32
  Setting::Numeric.new("pipeline.batch.delay", 5), # in milliseconds
37
33
  Setting::Boolean.new("pipeline.unsafe_shutdown", false),
34
+ Setting::Boolean.new("pipeline.reloadable", true),
38
35
  Setting.new("path.plugins", Array, []),
39
36
  Setting::NullableString.new("interactive", nil, false),
40
37
  Setting::Boolean.new("config.debug", false),
@@ -45,7 +42,7 @@ module LogStash
45
42
  Setting::String.new("http.host", "127.0.0.1"),
46
43
  Setting::PortRange.new("http.port", 9600..9700),
47
44
  Setting::String.new("http.environment", "production"),
48
- Setting::String.new("queue.type", "memory", true, ["persisted", "memory"]),
45
+ Setting::String.new("queue.type", "memory", true, ["persisted", "memory", "memory_acked"]),
49
46
  Setting::Boolean.new("queue.drain", false),
50
47
  Setting::Bytes.new("queue.page_capacity", "250mb"),
51
48
  Setting::Bytes.new("queue.max_bytes", "1024mb"),
@@ -53,8 +50,6 @@ module LogStash
53
50
  Setting::Numeric.new("queue.checkpoint.acks", 1024), # 0 is unlimited
54
51
  Setting::Numeric.new("queue.checkpoint.writes", 1024), # 0 is unlimited
55
52
  Setting::Numeric.new("queue.checkpoint.interval", 1000), # 0 is no time-based checkpointing
56
- Setting::Boolean.new("dead_letter_queue.enable", false),
57
- Setting::Bytes.new("dead_letter_queue.max_bytes", "1024mb"),
58
53
  Setting::TimeValue.new("slowlog.threshold.warn", "-1"),
59
54
  Setting::TimeValue.new("slowlog.threshold.info", "-1"),
60
55
  Setting::TimeValue.new("slowlog.threshold.debug", "-1"),
@@ -64,21 +59,13 @@ module LogStash
64
59
  # Compute the default queue path based on `path.data`
65
60
  default_queue_file_path = ::File.join(SETTINGS.get("path.data"), "queue")
66
61
  SETTINGS.register Setting::WritableDirectory.new("path.queue", default_queue_file_path)
67
- # Compute the default dead_letter_queue path based on `path.data`
68
- default_dlq_file_path = ::File.join(SETTINGS.get("path.data"), "dead_letter_queue")
69
- SETTINGS.register Setting::WritableDirectory.new("path.dead_letter_queue", default_dlq_file_path)
70
-
62
+
71
63
  SETTINGS.on_post_process do |settings|
72
64
  # If the data path is overridden but the queue path isn't recompute the queue path
73
65
  # We need to do this at this stage because of the weird execution order
74
66
  # our monkey-patched Clamp follows
75
- if settings.set?("path.data")
76
- if !settings.set?("path.queue")
77
- settings.set_value("path.queue", ::File.join(settings.get("path.data"), "queue"))
78
- end
79
- if !settings.set?("path.dead_letter_queue")
80
- settings.set_value("path.dead_letter_queue", ::File.join(settings.get("path.data"), "dead_letter_queue"))
81
- end
67
+ if settings.set?("path.data") && !settings.set?("path.queue")
68
+ settings.set_value("path.queue", ::File.join(settings.get("path.data"), "queue"))
82
69
  end
83
70
  end
84
71
 
@@ -6,8 +6,10 @@ module LogStash
6
6
  class PluginLoadingError < Error; end
7
7
  class ShutdownSignal < StandardError; end
8
8
  class PluginNoVersionError < Error; end
9
+ class BootstrapCheckError < Error; end
9
10
 
10
11
  class Bug < Error; end
11
12
  class ThisMethodWasRemoved < Bug; end
12
13
  class ConfigLoadingError < Error; end
14
+ class InvalidSourceLoaderSettingError < Error; end
13
15
  end
@@ -1,16 +1,13 @@
1
1
  # encoding: utf-8
2
- require "logstash/util/dead_letter_queue_manager"
3
2
  module LogStash
4
3
  class ExecutionContext
5
- attr_reader :pipeline, :dlq_writer
4
+ attr_reader :pipeline, :agent
6
5
 
7
- def initialize(pipeline, plugin_id, plugin_type, dlq_writer)
6
+ def initialize(pipeline, agent)
8
7
  @pipeline = pipeline
9
- @plugin_id = plugin_id
10
- @plugin_type = plugin_type
11
- @dlq_writer = LogStash::Util::PluginDeadLetterQueueWriter.new(dlq_writer, @plugin_id, @plugin_type)
8
+ @agent = agent
12
9
  end
13
-
10
+
14
11
  def pipeline_id
15
12
  @pipeline.pipeline_id
16
13
  end
@@ -26,9 +26,6 @@ module LogStash
26
26
  @filter.execution_context = execution_context
27
27
 
28
28
  @metric_events = namespaced_metric.namespace(:events)
29
- @metric_events_in = @metric_events.counter(:in)
30
- @metric_events_out = @metric_events.counter(:out)
31
- @metric_events_time = @metric_events.counter(:duration_in_millis)
32
29
  namespaced_metric.gauge(:name, config_name)
33
30
 
34
31
  # Not all the filters will do bufferings
@@ -40,19 +37,19 @@ module LogStash
40
37
  end
41
38
 
42
39
  def multi_filter(events)
43
- @metric_events_in.increment(events.size)
40
+ @metric_events.increment(:in, events.size)
44
41
 
45
- start_time = java.lang.System.current_time_millis
42
+ clock = @metric_events.time(:duration_in_millis)
46
43
  new_events = @filter.multi_filter(events)
47
- @metric_events_time.increment(java.lang.System.current_time_millis - start_time)
44
+ clock.stop
48
45
 
49
46
  # There is no guarantee in the context of filter
50
47
  # that EVENTS_INT == EVENTS_OUT, see the aggregates and
51
48
  # the split filter
52
49
  c = new_events.count { |event| !event.cancelled? }
50
+ @metric_events.increment(:out, c) if c > 0
53
51
 
54
- @metric_events_out.increment(c) if c > 0
55
- new_events
52
+ return new_events
56
53
  end
57
54
 
58
55
  private
@@ -64,7 +61,7 @@ module LogStash
64
61
 
65
62
  # Filter plugins that does buffering or spooling of events like the
66
63
  # `Logstash-filter-aggregates` can return `NIL` and will flush on the next flush ticks.
67
- @metric_events_out.increment(new_events.size) if new_events && new_events.size > 0
64
+ @metric_events.increment(:out, new_events.size) if new_events && new_events.size > 0
68
65
  new_events
69
66
  end
70
67
  end
@@ -105,8 +105,6 @@ class LogStash::Inputs::Base < LogStash::Plugin
105
105
  super
106
106
  # There is no easy way to propage an instance variable into the codec, because the codec
107
107
  # are created at the class level
108
- # TODO(talevy): Codecs should have their own execution_context, for now they will inherit their
109
- # parent plugin's
110
108
  @codec.execution_context = context
111
109
  context
112
110
  end
@@ -33,7 +33,11 @@ module LogStash module Instrument
33
33
  #
34
34
  def push(namespaces_path, key, type, *metric_type_params)
35
35
  begin
36
- get(namespaces_path, key, type).execute(*metric_type_params)
36
+ metric = @metric_store.fetch_or_store(namespaces_path, key) do
37
+ LogStash::Instrument::MetricType.create(type, namespaces_path, key)
38
+ end
39
+
40
+ metric.execute(*metric_type_params)
37
41
  rescue MetricStore::NamespacesExpectedError => e
38
42
  logger.error("Collector: Cannot record metric", :exception => e)
39
43
  rescue NameError => e
@@ -47,12 +51,6 @@ module LogStash module Instrument
47
51
  end
48
52
  end
49
53
 
50
- def get(namespaces_path, key, type)
51
- @metric_store.fetch_or_store(namespaces_path, key) do
52
- LogStash::Instrument::MetricType.create(type, namespaces_path, key)
53
- end
54
- end
55
-
56
54
  # Snapshot the current Metric Store and return it immediately,
57
55
  # This is useful if you want to get access to the current metric store without
58
56
  # waiting for a periodic call.
@@ -41,26 +41,26 @@ module LogStash module Instrument
41
41
  # @param [Symbol] The metric key
42
42
  # @return [Object] Return the new_value of the retrieve object in the tree
43
43
  def fetch_or_store(namespaces, key, default_value = nil)
44
+ provided_value = block_given? ? yield(key) : default_value
44
45
 
45
46
  # We first check in the `@fast_lookup` store to see if we have already see that metrics before,
46
47
  # This give us a `o(1)` access, which is faster than searching through the structured
47
48
  # data store (Which is a `o(n)` operation where `n` is the number of element in the namespace and
48
- # the value of the key). If the metric is already present in the `@fast_lookup`, then that value is sent
49
- # back directly to the caller.
49
+ # the value of the key). If the metric is already present in the `@fast_lookup`, the call to
50
+ # `#put_if_absent` will return the value. This value is send back directly to the caller.
50
51
  #
51
- # BUT. If the value is not present in the `@fast_lookup` the value will be inserted and we assume that we don't
52
+ # BUT. If the value is not present in the `@fast_lookup` the value will be inserted and
53
+ # `#puf_if_absent` will return nil. With this returned value of nil we assume that we don't
52
54
  # have it in the `@metric_store` for structured search so we add it there too.
53
-
54
- value = @fast_lookup.get(namespaces.dup << key)
55
- if value.nil?
56
- value = block_given? ? yield(key) : default_value
57
- @fast_lookup.put(namespaces.dup << key, value)
55
+ if found_value = @fast_lookup.put_if_absent(namespaces.dup << key, provided_value)
56
+ return found_value
57
+ else
58
58
  @structured_lookup_mutex.synchronize do
59
- # If we cannot find the value this mean we need to save it in the store.
60
- fetch_or_store_namespaces(namespaces).fetch_or_store(key, value)
59
+ # If we cannot find the value this mean we need to save it in the store.
60
+ fetch_or_store_namespaces(namespaces).fetch_or_store(key, provided_value)
61
61
  end
62
+ return provided_value
62
63
  end
63
- return value;
64
64
  end
65
65
 
66
66
  # This method allow to retrieve values for a specific path,
@@ -222,7 +222,7 @@ module LogStash module Instrument
222
222
  key_candidates = extract_filter_keys(key_paths.shift)
223
223
 
224
224
  key_candidates.each do |key_candidate|
225
- raise MetricNotFound, "For path: #{key_candidate}" if map[key_candidate].nil?
225
+ raise MetricNotFound, "For path: #{key_candidate}. Map keys: #{map.keys}" if map[key_candidate].nil?
226
226
 
227
227
  if key_paths.empty? # End of the user requested path
228
228
  if map[key_candidate].is_a?(Concurrent::Map)