logstash-core 6.0.0.beta1-java → 6.0.0.beta2-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/logstash-core/logstash-core.jar +0 -0
- data/lib/logstash-core/version.rb +1 -1
- data/lib/logstash/agent.rb +0 -16
- data/lib/logstash/compiler/lscl.rb +2 -53
- data/lib/logstash/compiler/lscl/helpers.rb +55 -0
- data/lib/logstash/config/config_ast.rb +6 -3
- data/lib/logstash/config/modules_common.rb +4 -1
- data/lib/logstash/elasticsearch_client.rb +4 -1
- data/lib/logstash/environment.rb +8 -2
- data/lib/logstash/filter_delegator.rb +11 -6
- data/lib/logstash/instrument/collector.rb +7 -5
- data/lib/logstash/instrument/metric_store.rb +6 -9
- data/lib/logstash/instrument/namespaced_metric.rb +4 -0
- data/lib/logstash/instrument/namespaced_null_metric.rb +4 -0
- data/lib/logstash/instrument/null_metric.rb +10 -0
- data/lib/logstash/instrument/wrapped_write_client.rb +33 -24
- data/lib/logstash/modules/kibana_client.rb +5 -3
- data/lib/logstash/modules/kibana_config.rb +1 -4
- data/lib/logstash/modules/scaffold.rb +2 -0
- data/lib/logstash/modules/settings_merger.rb +52 -4
- data/lib/logstash/output_delegator.rb +7 -5
- data/lib/logstash/pipeline.rb +37 -14
- data/lib/logstash/pipeline_settings.rb +2 -0
- data/lib/logstash/runner.rb +14 -2
- data/lib/logstash/settings.rb +26 -0
- data/lib/logstash/util/cloud_setting_auth.rb +29 -0
- data/lib/logstash/util/cloud_setting_id.rb +41 -0
- data/lib/logstash/util/modules_setting_array.rb +28 -0
- data/lib/logstash/util/wrapped_acked_queue.rb +5 -6
- data/lib/logstash/util/wrapped_synchronous_queue.rb +14 -9
- data/lib/logstash/version.rb +1 -1
- data/locales/en.yml +16 -0
- data/spec/logstash/agent/converge_spec.rb +6 -7
- data/spec/logstash/config/source/multi_local_spec.rb +11 -0
- data/spec/logstash/filter_delegator_spec.rb +20 -8
- data/spec/logstash/legacy_ruby_event_spec.rb +4 -4
- data/spec/logstash/modules/scaffold_spec.rb +2 -7
- data/spec/logstash/modules/settings_merger_spec.rb +111 -0
- data/spec/logstash/output_delegator_spec.rb +15 -5
- data/spec/logstash/pipeline_spec.rb +39 -7
- data/spec/logstash/runner_spec.rb +4 -1
- data/spec/logstash/settings/modules_spec.rb +115 -0
- metadata +10 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 5c353e53b9ba28be27a573084a75d1dc12d1257e2192057121d5e1c41de195bc
|
4
|
+
data.tar.gz: 47790a7e594369b8d2c3d1ebfcde2d7fef023aaaf3afb0b747bcc75124c35547
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d7acc940476d31aca243c82ea0bc693324e8cdbbeeb7c8abbd06210cd5ad82f143499653b0952696a027ae0b60557518164409a36ff7a4b3ff2d6e4c536dcbb6
|
7
|
+
data.tar.gz: 8848ec115ba9abfa1f3065e1ee22a63b50e7172e4aea049ebd6a37193e6dd06ee8f4b257863ab625939416a75512dffeaf7f1ad1cd9ef88d8c16db344b1a62a8
|
Binary file
|
data/lib/logstash/agent.rb
CHANGED
@@ -190,13 +190,6 @@ class LogStash::Agent
|
|
190
190
|
converge_result
|
191
191
|
end
|
192
192
|
|
193
|
-
def force_shutdown!
|
194
|
-
stop_collecting_metrics
|
195
|
-
stop_webserver
|
196
|
-
transition_to_stopped
|
197
|
-
force_shutdown_pipelines!
|
198
|
-
end
|
199
|
-
|
200
193
|
def id
|
201
194
|
return @id if @id
|
202
195
|
|
@@ -427,15 +420,6 @@ class LogStash::Agent
|
|
427
420
|
@collect_metric
|
428
421
|
end
|
429
422
|
|
430
|
-
def force_shutdown_pipelines!
|
431
|
-
with_pipelines do |pipelines|
|
432
|
-
pipelines.each do |_, pipeline|
|
433
|
-
# TODO(ph): should it be his own action?
|
434
|
-
pipeline.force_shutdown!
|
435
|
-
end
|
436
|
-
end
|
437
|
-
end
|
438
|
-
|
439
423
|
def shutdown_pipelines
|
440
424
|
logger.debug("Shutting down all pipelines", :pipelines_count => pipelines_count)
|
441
425
|
|
@@ -2,6 +2,7 @@
|
|
2
2
|
require 'logstash/errors'
|
3
3
|
require "treetop"
|
4
4
|
require "logstash/compiler/treetop_monkeypatches"
|
5
|
+
require "logstash/compiler/lscl/helpers"
|
5
6
|
require "logstash/config/string_escape"
|
6
7
|
|
7
8
|
java_import org.logstash.config.ir.DSL
|
@@ -10,59 +11,7 @@ java_import org.logstash.common.SourceWithMetadata
|
|
10
11
|
module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSCL; module AST
|
11
12
|
PROCESS_ESCAPE_SEQUENCES = :process_escape_sequences
|
12
13
|
|
13
|
-
|
14
|
-
module Helpers
|
15
|
-
def source_meta
|
16
|
-
line, column = line_and_column
|
17
|
-
org.logstash.common.SourceWithMetadata.new(base_protocol, base_id, line, column, self.text_value)
|
18
|
-
end
|
19
|
-
|
20
|
-
def base_source_with_metadata=(value)
|
21
|
-
set_meta(:base_source_with_metadata, value)
|
22
|
-
end
|
23
|
-
|
24
|
-
def base_source_with_metadata
|
25
|
-
get_meta(:base_source_with_metadata)
|
26
|
-
end
|
27
|
-
|
28
|
-
def base_protocol
|
29
|
-
self.base_source_with_metadata.protocol
|
30
|
-
end
|
31
|
-
|
32
|
-
def base_id
|
33
|
-
self.base_source_with_metadata.id
|
34
|
-
end
|
35
|
-
|
36
|
-
def compose(*statements)
|
37
|
-
compose_for(section_type.to_sym).call(source_meta, *statements)
|
38
|
-
end
|
39
|
-
|
40
|
-
def compose_for(section_sym)
|
41
|
-
if section_sym == :filter
|
42
|
-
jdsl.method(:iComposeSequence)
|
43
|
-
else
|
44
|
-
jdsl.method(:iComposeParallel)
|
45
|
-
end
|
46
|
-
end
|
47
|
-
|
48
|
-
def line_and_column
|
49
|
-
start = self.interval.first
|
50
|
-
[self.input.line_of(start), self.input.column_of(start)]
|
51
|
-
end
|
52
|
-
|
53
|
-
def jdsl
|
54
|
-
org.logstash.config.ir.DSL
|
55
|
-
end
|
56
|
-
|
57
|
-
def self.jdsl
|
58
|
-
org.logstash.config.ir.DSL
|
59
|
-
end
|
60
|
-
|
61
|
-
AND_METHOD = jdsl.method(:eAnd)
|
62
|
-
OR_METHOD = jdsl.method(:eOr)
|
63
|
-
end
|
64
|
-
|
65
|
-
class Node < Treetop::Runtime::SyntaxNode
|
14
|
+
class Node < Treetop::Runtime::SyntaxNode
|
66
15
|
include Helpers
|
67
16
|
|
68
17
|
def section_type
|
@@ -0,0 +1,55 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSCL; module AST
|
4
|
+
# Helpers for parsing LSCL files
|
5
|
+
module Helpers
|
6
|
+
def source_meta
|
7
|
+
line, column = line_and_column
|
8
|
+
org.logstash.common.SourceWithMetadata.new(base_protocol, base_id, line, column, self.text_value)
|
9
|
+
end
|
10
|
+
|
11
|
+
def base_source_with_metadata=(value)
|
12
|
+
set_meta(:base_source_with_metadata, value)
|
13
|
+
end
|
14
|
+
|
15
|
+
def base_source_with_metadata
|
16
|
+
get_meta(:base_source_with_metadata)
|
17
|
+
end
|
18
|
+
|
19
|
+
def base_protocol
|
20
|
+
self.base_source_with_metadata ? self.base_source_with_metadata.protocol : 'config_ast'
|
21
|
+
end
|
22
|
+
|
23
|
+
def base_id
|
24
|
+
self.base_source_with_metadata ? self.base_source_with_metadata.id : 'config_ast'
|
25
|
+
end
|
26
|
+
|
27
|
+
def compose(*statements)
|
28
|
+
compose_for(section_type.to_sym).call(source_meta, *statements)
|
29
|
+
end
|
30
|
+
|
31
|
+
def compose_for(section_sym)
|
32
|
+
if section_sym == :filter
|
33
|
+
jdsl.method(:iComposeSequence)
|
34
|
+
else
|
35
|
+
jdsl.method(:iComposeParallel)
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
def line_and_column
|
40
|
+
start = self.interval.first
|
41
|
+
[self.input.line_of(start), self.input.column_of(start)]
|
42
|
+
end
|
43
|
+
|
44
|
+
def jdsl
|
45
|
+
org.logstash.config.ir.DSL
|
46
|
+
end
|
47
|
+
|
48
|
+
def self.jdsl
|
49
|
+
org.logstash.config.ir.DSL
|
50
|
+
end
|
51
|
+
|
52
|
+
AND_METHOD = jdsl.method(:eAnd)
|
53
|
+
OR_METHOD = jdsl.method(:eOr)
|
54
|
+
end
|
55
|
+
end; end; end; end; end
|
@@ -1,5 +1,6 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
require 'logstash/errors'
|
3
|
+
require "logstash/compiler/lscl/helpers"
|
3
4
|
require "treetop"
|
4
5
|
|
5
6
|
require "logstash/compiler/treetop_monkeypatches"
|
@@ -32,6 +33,8 @@ module LogStash; module Config; module AST
|
|
32
33
|
end
|
33
34
|
|
34
35
|
class Node < Treetop::Runtime::SyntaxNode
|
36
|
+
include LogStashCompilerLSCLGrammar::LogStash::Compiler::LSCL::AST::Helpers
|
37
|
+
|
35
38
|
def text_value_for_comments
|
36
39
|
text_value.gsub(/[\r\n]/, " ")
|
37
40
|
end
|
@@ -189,12 +192,12 @@ module LogStash; module Config; module AST
|
|
189
192
|
# If any parent is a Plugin, this must be a codec.
|
190
193
|
|
191
194
|
if attributes.elements.nil?
|
192
|
-
return "plugin(#{plugin_type.inspect}, #{plugin_name.inspect})" << (plugin_type == "codec" ? "" : "\n")
|
195
|
+
return "plugin(#{plugin_type.inspect}, #{plugin_name.inspect}, #{source_meta.line}, #{source_meta.column})" << (plugin_type == "codec" ? "" : "\n")
|
193
196
|
else
|
194
197
|
settings = attributes.recursive_select(Attribute).collect(&:compile).reject(&:empty?)
|
195
198
|
|
196
199
|
attributes_code = "LogStash::Util.hash_merge_many(#{settings.map { |c| "{ #{c} }" }.join(", ")})"
|
197
|
-
return "plugin(#{plugin_type.inspect}, #{plugin_name.inspect}, #{attributes_code})" << (plugin_type == "codec" ? "" : "\n")
|
200
|
+
return "plugin(#{plugin_type.inspect}, #{plugin_name.inspect}, #{source_meta.line}, #{source_meta.column}, #{attributes_code})" << (plugin_type == "codec" ? "" : "\n")
|
198
201
|
end
|
199
202
|
end
|
200
203
|
|
@@ -211,7 +214,7 @@ module LogStash; module Config; module AST
|
|
211
214
|
when "codec"
|
212
215
|
settings = attributes.recursive_select(Attribute).collect(&:compile).reject(&:empty?)
|
213
216
|
attributes_code = "LogStash::Util.hash_merge_many(#{settings.map { |c| "{ #{c} }" }.join(", ")})"
|
214
|
-
return "plugin(#{plugin_type.inspect}, #{plugin_name.inspect}, #{attributes_code})"
|
217
|
+
return "plugin(#{plugin_type.inspect}, #{plugin_name.inspect}, #{source_meta.line}, #{source_meta.column}, #{attributes_code})"
|
215
218
|
end
|
216
219
|
end
|
217
220
|
|
@@ -64,9 +64,12 @@ module LogStash module Config
|
|
64
64
|
alt_name = "module-#{module_name}"
|
65
65
|
pipeline_id = alt_name
|
66
66
|
module_settings.set("pipeline.id", pipeline_id)
|
67
|
+
LogStash::Modules::SettingsMerger.merge_cloud_settings(module_hash, module_settings)
|
67
68
|
current_module.with_settings(module_hash)
|
68
69
|
config_test = settings.get("config.test_and_exit")
|
69
|
-
|
70
|
+
modul_setup = settings.get("modules_setup")
|
71
|
+
# Only import data if it's not a config test and --setup is true
|
72
|
+
if !config_test && modul_setup
|
70
73
|
esclient = LogStash::ElasticsearchClient.build(module_hash)
|
71
74
|
kbnclient = LogStash::Modules::KibanaClient.new(module_hash)
|
72
75
|
esconnected = esclient.can_connect?
|
@@ -39,8 +39,11 @@ module LogStash class ElasticsearchClient
|
|
39
39
|
@client_args[:ssl] = ssl_options
|
40
40
|
|
41
41
|
username = @settings["var.elasticsearch.username"]
|
42
|
-
password = @settings["var.elasticsearch.password"]
|
43
42
|
if username
|
43
|
+
password = @settings["var.elasticsearch.password"]
|
44
|
+
if password.is_a?(LogStash::Util::Password)
|
45
|
+
password = password.value
|
46
|
+
end
|
44
47
|
@client_args[:transport_options] = { :headers => { "Authorization" => 'Basic ' + Base64.encode64( "#{username}:#{password}" ).chomp } }
|
45
48
|
end
|
46
49
|
|
data/lib/logstash/environment.rb
CHANGED
@@ -3,6 +3,9 @@ require "logstash/errors"
|
|
3
3
|
require "logstash/java_integration"
|
4
4
|
require "logstash/config/cpu_core_strategy"
|
5
5
|
require "logstash/settings"
|
6
|
+
require "logstash/util/cloud_setting_id"
|
7
|
+
require "logstash/util/cloud_setting_auth"
|
8
|
+
require "logstash/util/modules_setting_array"
|
6
9
|
require "socket"
|
7
10
|
require "stud/temporary"
|
8
11
|
|
@@ -20,8 +23,11 @@ module LogStash
|
|
20
23
|
Setting::NullableString.new("path.config", nil, false),
|
21
24
|
Setting::WritableDirectory.new("path.data", ::File.join(LogStash::Environment::LOGSTASH_HOME, "data")),
|
22
25
|
Setting::NullableString.new("config.string", nil, false),
|
23
|
-
|
24
|
-
|
26
|
+
Setting::Modules.new("modules.cli", LogStash::Util::ModulesSettingArray, []),
|
27
|
+
Setting::Modules.new("modules", LogStash::Util::ModulesSettingArray, []),
|
28
|
+
Setting::Modules.new("cloud.id", LogStash::Util::CloudSettingId),
|
29
|
+
Setting::Modules.new("cloud.auth",LogStash::Util::CloudSettingAuth),
|
30
|
+
Setting::Boolean.new("modules_setup", false),
|
25
31
|
Setting::Boolean.new("config.test_and_exit", false),
|
26
32
|
Setting::Boolean.new("config.reload.automatic", false),
|
27
33
|
Setting::TimeValue.new("config.reload.interval", "3s"), # in seconds
|
@@ -14,6 +14,8 @@ module LogStash
|
|
14
14
|
]
|
15
15
|
def_delegators :@filter, *DELEGATED_METHODS
|
16
16
|
|
17
|
+
attr_reader :id
|
18
|
+
|
17
19
|
def initialize(logger, klass, metric, execution_context, plugin_args)
|
18
20
|
@logger = logger
|
19
21
|
@klass = klass
|
@@ -26,6 +28,9 @@ module LogStash
|
|
26
28
|
@filter.execution_context = execution_context
|
27
29
|
|
28
30
|
@metric_events = namespaced_metric.namespace(:events)
|
31
|
+
@metric_events_in = @metric_events.counter(:in)
|
32
|
+
@metric_events_out = @metric_events.counter(:out)
|
33
|
+
@metric_events_time = @metric_events.counter(:duration_in_millis)
|
29
34
|
namespaced_metric.gauge(:name, config_name)
|
30
35
|
|
31
36
|
# Not all the filters will do bufferings
|
@@ -37,19 +42,19 @@ module LogStash
|
|
37
42
|
end
|
38
43
|
|
39
44
|
def multi_filter(events)
|
40
|
-
@
|
45
|
+
@metric_events_in.increment(events.size)
|
41
46
|
|
42
|
-
|
47
|
+
start_time = java.lang.System.current_time_millis
|
43
48
|
new_events = @filter.multi_filter(events)
|
44
|
-
|
49
|
+
@metric_events_time.increment(java.lang.System.current_time_millis - start_time)
|
45
50
|
|
46
51
|
# There is no guarantee in the context of filter
|
47
52
|
# that EVENTS_INT == EVENTS_OUT, see the aggregates and
|
48
53
|
# the split filter
|
49
54
|
c = new_events.count { |event| !event.cancelled? }
|
50
|
-
@metric_events.increment(:out, c) if c > 0
|
51
55
|
|
52
|
-
|
56
|
+
@metric_events_out.increment(c) if c > 0
|
57
|
+
new_events
|
53
58
|
end
|
54
59
|
|
55
60
|
private
|
@@ -61,7 +66,7 @@ module LogStash
|
|
61
66
|
|
62
67
|
# Filter plugins that does buffering or spooling of events like the
|
63
68
|
# `Logstash-filter-aggregates` can return `NIL` and will flush on the next flush ticks.
|
64
|
-
@
|
69
|
+
@metric_events_out.increment(new_events.size) if new_events && new_events.size > 0
|
65
70
|
new_events
|
66
71
|
end
|
67
72
|
end
|
@@ -33,11 +33,7 @@ module LogStash module Instrument
|
|
33
33
|
#
|
34
34
|
def push(namespaces_path, key, type, *metric_type_params)
|
35
35
|
begin
|
36
|
-
|
37
|
-
LogStash::Instrument::MetricType.create(type, namespaces_path, key)
|
38
|
-
end
|
39
|
-
|
40
|
-
metric.execute(*metric_type_params)
|
36
|
+
get(namespaces_path, key, type).execute(*metric_type_params)
|
41
37
|
rescue MetricStore::NamespacesExpectedError => e
|
42
38
|
logger.error("Collector: Cannot record metric", :exception => e)
|
43
39
|
rescue NameError => e
|
@@ -51,6 +47,12 @@ module LogStash module Instrument
|
|
51
47
|
end
|
52
48
|
end
|
53
49
|
|
50
|
+
def get(namespaces_path, key, type)
|
51
|
+
@metric_store.fetch_or_store(namespaces_path, key) do
|
52
|
+
LogStash::Instrument::MetricType.create(type, namespaces_path, key)
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
54
56
|
# Snapshot the current Metric Store and return it immediately,
|
55
57
|
# This is useful if you want to get access to the current metric store without
|
56
58
|
# waiting for a periodic call.
|
@@ -51,14 +51,12 @@ module LogStash module Instrument
|
|
51
51
|
# BUT. If the value is not present in the `@fast_lookup` the value will be inserted and we assume that we don't
|
52
52
|
# have it in the `@metric_store` for structured search so we add it there too.
|
53
53
|
|
54
|
-
|
55
|
-
fast_lookup_key = (namespaces.dup << key).hash
|
56
|
-
value = @fast_lookup.get(fast_lookup_key)
|
54
|
+
value = @fast_lookup.get(namespaces.dup << key)
|
57
55
|
if value.nil?
|
58
56
|
value = block_given? ? yield(key) : default_value
|
59
|
-
@fast_lookup.put(
|
57
|
+
@fast_lookup.put(namespaces.dup << key, value)
|
60
58
|
@structured_lookup_mutex.synchronize do
|
61
|
-
|
59
|
+
# If we cannot find the value this mean we need to save it in the store.
|
62
60
|
fetch_or_store_namespaces(namespaces).fetch_or_store(key, value)
|
63
61
|
end
|
64
62
|
end
|
@@ -165,7 +163,7 @@ module LogStash module Instrument
|
|
165
163
|
end
|
166
164
|
|
167
165
|
def has_metric?(*path)
|
168
|
-
@fast_lookup[path
|
166
|
+
@fast_lookup[path]
|
169
167
|
end
|
170
168
|
|
171
169
|
# Return all the individuals Metric,
|
@@ -187,9 +185,8 @@ module LogStash module Instrument
|
|
187
185
|
def prune(path)
|
188
186
|
key_paths = key_paths(path).map(&:to_sym)
|
189
187
|
@structured_lookup_mutex.synchronize do
|
190
|
-
|
191
|
-
|
192
|
-
end
|
188
|
+
keys_to_delete = @fast_lookup.keys.select {|namespace| (key_paths - namespace[0..-2]).empty? }
|
189
|
+
keys_to_delete.each {|k| @fast_lookup.delete(k) }
|
193
190
|
delete_from_map(@store, key_paths)
|
194
191
|
end
|
195
192
|
end
|
@@ -39,6 +39,10 @@ module LogStash module Instrument
|
|
39
39
|
end
|
40
40
|
end
|
41
41
|
|
42
|
+
def counter(_)
|
43
|
+
NullGauge
|
44
|
+
end
|
45
|
+
|
42
46
|
def namespace(name)
|
43
47
|
raise MetricNoNamespaceProvided if name.nil? || name.empty?
|
44
48
|
NamespacedNullMetric.new(self, name)
|
@@ -49,6 +53,12 @@ module LogStash module Instrument
|
|
49
53
|
end
|
50
54
|
|
51
55
|
private
|
56
|
+
|
57
|
+
class NullGauge
|
58
|
+
def self.increment(_)
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
52
62
|
# Null implementation of the internal timer class
|
53
63
|
#
|
54
64
|
# @see LogStash::Instrument::TimedExecution`
|
@@ -10,7 +10,12 @@ module LogStash module Instrument
|
|
10
10
|
@events_metrics = metric.namespace([:stats, :events])
|
11
11
|
@pipeline_metrics = metric.namespace([:stats, :pipelines, pipeline_id, :events])
|
12
12
|
@plugin_events_metrics = metric.namespace([:stats, :pipelines, pipeline_id, :plugins, plugin_type, plugin.id.to_sym, :events])
|
13
|
-
|
13
|
+
@events_metrics_counter = @events_metrics.counter(:in)
|
14
|
+
@events_metrics_time = @events_metrics.counter(:queue_push_duration_in_millis)
|
15
|
+
@pipeline_metrics_counter = @pipeline_metrics.counter(:in)
|
16
|
+
@pipeline_metrics_time = @pipeline_metrics.counter(:queue_push_duration_in_millis)
|
17
|
+
@plugin_events_metrics_counter = @plugin_events_metrics.counter(:out)
|
18
|
+
@plugin_events_metrics_time = @plugin_events_metrics.counter(:queue_push_duration_in_millis)
|
14
19
|
define_initial_metrics_values
|
15
20
|
end
|
16
21
|
|
@@ -19,41 +24,45 @@ module LogStash module Instrument
|
|
19
24
|
end
|
20
25
|
|
21
26
|
def push(event)
|
22
|
-
|
27
|
+
increment_counters(1)
|
28
|
+
start_time = java.lang.System.current_time_millis
|
29
|
+
result = @write_client.push(event)
|
30
|
+
report_execution_time(start_time)
|
31
|
+
result
|
23
32
|
end
|
33
|
+
|
24
34
|
alias_method(:<<, :push)
|
25
35
|
|
26
36
|
def push_batch(batch)
|
27
|
-
|
37
|
+
increment_counters(batch.size)
|
38
|
+
start_time = java.lang.System.current_time_millis
|
39
|
+
result = @write_client.push_batch(batch)
|
40
|
+
report_execution_time(start_time)
|
41
|
+
result
|
28
42
|
end
|
29
43
|
|
30
44
|
private
|
31
|
-
def record_metric(size = 1)
|
32
|
-
@events_metrics.increment(:in, size)
|
33
|
-
@pipeline_metrics.increment(:in, size)
|
34
|
-
@plugin_events_metrics.increment(:out, size)
|
35
|
-
|
36
|
-
clock = @events_metrics.time(:queue_push_duration_in_millis)
|
37
45
|
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
@pipeline_metrics.report_time(:queue_push_duration_in_millis, execution_time)
|
44
|
-
@plugin_events_metrics.report_time(:queue_push_duration_in_millis, execution_time)
|
46
|
+
def increment_counters(size)
|
47
|
+
@events_metrics_counter.increment(size)
|
48
|
+
@pipeline_metrics_counter.increment(size)
|
49
|
+
@plugin_events_metrics_counter.increment(size)
|
50
|
+
end
|
45
51
|
|
46
|
-
|
52
|
+
def report_execution_time(start_time)
|
53
|
+
execution_time = java.lang.System.current_time_millis - start_time
|
54
|
+
@events_metrics_time.increment(execution_time)
|
55
|
+
@pipeline_metrics_time.increment(execution_time)
|
56
|
+
@plugin_events_metrics_time.increment(execution_time)
|
47
57
|
end
|
48
58
|
|
49
59
|
def define_initial_metrics_values
|
50
|
-
@
|
51
|
-
@
|
52
|
-
@
|
53
|
-
|
54
|
-
@
|
55
|
-
@
|
56
|
-
@plugin_events_metrics.report_time(:queue_push_duration_in_millis, 0)
|
60
|
+
@events_metrics_counter.increment(0)
|
61
|
+
@pipeline_metrics_counter.increment(0)
|
62
|
+
@plugin_events_metrics_counter.increment(0)
|
63
|
+
@events_metrics_time.increment(0)
|
64
|
+
@pipeline_metrics_time.increment(0)
|
65
|
+
@plugin_events_metrics_time.increment(0)
|
57
66
|
end
|
58
67
|
end
|
59
68
|
end end
|