logstash-core 6.3.2-java → 6.4.0-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/logstash/agent.rb +10 -6
- data/lib/logstash/api/modules/logging.rb +4 -0
- data/lib/logstash/api/service.rb +0 -1
- data/lib/logstash/bootstrap_check/default_config.rb +0 -2
- data/lib/logstash/bootstrap_check/persisted_queue_config.rb +0 -1
- data/lib/logstash/codecs/base.rb +2 -4
- data/lib/logstash/compiler.rb +2 -3
- data/lib/logstash/compiler/lscl.rb +0 -1
- data/lib/logstash/config/config_ast.rb +0 -1
- data/lib/logstash/config/cpu_core_strategy.rb +0 -1
- data/lib/logstash/config/defaults.rb +0 -1
- data/lib/logstash/config/file.rb +0 -2
- data/lib/logstash/config/mixin.rb +4 -7
- data/lib/logstash/config/modules_common.rb +0 -2
- data/lib/logstash/config/source/local.rb +1 -3
- data/lib/logstash/config/source/modules.rb +0 -2
- data/lib/logstash/config/source/multi_local.rb +0 -1
- data/lib/logstash/config/source_loader.rb +1 -2
- data/lib/logstash/dependency_report.rb +19 -6
- data/lib/logstash/elasticsearch_client.rb +0 -2
- data/lib/logstash/environment.rb +3 -2
- data/lib/logstash/errors.rb +1 -15
- data/lib/logstash/event.rb +0 -1
- data/lib/logstash/event_dispatcher.rb +1 -40
- data/lib/logstash/execution_context.rb +2 -19
- data/lib/logstash/filters/base.rb +0 -2
- data/lib/logstash/inputs/base.rb +2 -4
- data/lib/logstash/inputs/threadable.rb +2 -3
- data/lib/logstash/instrument/collector.rb +0 -2
- data/lib/logstash/instrument/metric.rb +1 -105
- data/lib/logstash/instrument/namespaced_metric.rb +1 -58
- data/lib/logstash/instrument/namespaced_null_metric.rb +1 -58
- data/lib/logstash/instrument/null_metric.rb +2 -71
- data/lib/logstash/instrument/periodic_poller/base.rb +0 -1
- data/lib/logstash/instrument/periodic_poller/cgroup.rb +0 -1
- data/lib/logstash/java_pipeline.rb +33 -222
- data/lib/logstash/json.rb +0 -1
- data/lib/logstash/logging.rb +0 -2
- data/lib/logstash/logging/logger.rb +1 -159
- data/lib/logstash/modules/cli_parser.rb +0 -4
- data/lib/logstash/modules/elasticsearch_config.rb +0 -3
- data/lib/logstash/modules/elasticsearch_importer.rb +0 -3
- data/lib/logstash/modules/elasticsearch_resource.rb +0 -1
- data/lib/logstash/modules/file_reader.rb +0 -2
- data/lib/logstash/modules/kibana_base.rb +0 -1
- data/lib/logstash/modules/kibana_client.rb +0 -2
- data/lib/logstash/modules/kibana_config.rb +0 -3
- data/lib/logstash/modules/kibana_dashboards.rb +0 -2
- data/lib/logstash/modules/kibana_importer.rb +0 -3
- data/lib/logstash/modules/kibana_resource.rb +0 -1
- data/lib/logstash/modules/kibana_settings.rb +0 -2
- data/lib/logstash/modules/logstash_config.rb +0 -1
- data/lib/logstash/modules/resource_base.rb +0 -1
- data/lib/logstash/modules/scaffold.rb +0 -3
- data/lib/logstash/modules/settings_merger.rb +0 -2
- data/lib/logstash/namespace.rb +2 -15
- data/lib/logstash/outputs/base.rb +3 -5
- data/lib/logstash/patches/clamp.rb +6 -0
- data/lib/logstash/pipeline.rb +38 -180
- data/lib/logstash/pipeline_action/create.rb +0 -2
- data/lib/logstash/pipeline_action/reload.rb +1 -4
- data/lib/logstash/pipeline_action/stop.rb +0 -2
- data/lib/logstash/pipeline_reporter.rb +2 -108
- data/lib/logstash/plugin.rb +4 -7
- data/lib/logstash/plugins/hooks_registry.rb +1 -63
- data/lib/logstash/plugins/registry.rb +3 -2
- data/lib/logstash/runner.rb +6 -7
- data/lib/logstash/settings.rb +4 -5
- data/lib/logstash/shutdown_watcher.rb +0 -119
- data/lib/logstash/universal_plugin.rb +1 -13
- data/lib/logstash/util.rb +0 -1
- data/lib/logstash/util/buftok.rb +1 -139
- data/lib/logstash/util/byte_value.rb +2 -3
- data/lib/logstash/util/charset.rb +0 -1
- data/lib/logstash/util/cloud_setting_auth.rb +0 -1
- data/lib/logstash/util/cloud_setting_id.rb +20 -8
- data/lib/logstash/util/dead_letter_queue_manager.rb +2 -61
- data/lib/logstash/util/decorators.rb +0 -1
- data/lib/logstash/util/loggable.rb +1 -31
- data/lib/logstash/util/modules_setting_array.rb +2 -2
- data/lib/logstash/util/password.rb +0 -1
- data/lib/logstash/util/plugin_version.rb +0 -1
- data/lib/logstash/util/safe_uri.rb +7 -8
- data/lib/logstash/util/secretstore.rb +1 -38
- data/lib/logstash/util/substitution_variables.rb +4 -5
- data/lib/logstash/util/worker_threads_default_printer.rb +0 -1
- data/locales/en.yml +28 -1
- data/spec/logstash/config/mixin_spec.rb +4 -4
- data/spec/logstash/converge_result_spec.rb +0 -1
- data/spec/logstash/event_dispatcher_spec.rb +0 -2
- data/spec/logstash/event_spec.rb +22 -26
- data/spec/logstash/execution_context_spec.rb +0 -2
- data/spec/logstash/filter_delegator_spec.rb +12 -28
- data/spec/logstash/inputs/base_spec.rb +4 -5
- data/spec/logstash/instrument/metric_spec.rb +0 -1
- data/spec/logstash/instrument/namespaced_metric_spec.rb +0 -2
- data/spec/logstash/instrument/namespaced_null_metric_spec.rb +1 -3
- data/spec/logstash/instrument/null_metric_spec.rb +1 -4
- data/spec/logstash/instrument/periodic_poller/base_spec.rb +0 -1
- data/spec/logstash/instrument/periodic_poller/os_spec.rb +0 -1
- data/spec/logstash/instrument/wrapped_write_client_spec.rb +0 -1
- data/spec/logstash/java_filter_delegator_spec.rb +0 -3
- data/spec/logstash/java_integration_spec.rb +0 -1
- data/spec/logstash/java_pipeline_spec.rb +1 -4
- data/spec/logstash/modules/cli_parser_spec.rb +1 -3
- data/spec/logstash/modules/scaffold_spec.rb +0 -1
- data/spec/logstash/outputs/base_spec.rb +9 -10
- data/spec/logstash/pipeline_action/create_spec.rb +2 -3
- data/spec/logstash/pipeline_action/reload_spec.rb +1 -2
- data/spec/logstash/pipeline_action/stop_spec.rb +0 -1
- data/spec/logstash/pipeline_dlq_commit_spec.rb +0 -6
- data/spec/logstash/pipeline_reporter_spec.rb +18 -4
- data/spec/logstash/pipeline_spec.rb +2 -6
- data/spec/logstash/plugin_spec.rb +1 -2
- data/spec/logstash/plugins/hooks_registry_spec.rb +0 -2
- data/spec/logstash/queue_factory_spec.rb +0 -1
- data/spec/logstash/runner_spec.rb +16 -9
- data/spec/logstash/settings/modules_spec.rb +3 -3
- data/spec/logstash/shutdown_watcher_spec.rb +0 -27
- data/spec/logstash/state_resolver_spec.rb +0 -1
- data/spec/logstash/util/buftok_spec.rb +0 -1
- data/spec/logstash/util/cloud_setting_id_spec.rb +55 -2
- data/spec/logstash/util/secretstore_spec.rb +10 -10
- data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +2 -2
- data/versions-gem-copy.yml +2 -2
- metadata +2 -16
- data/lib/logstash/bootstrap_check/bad_java.rb +0 -16
- data/lib/logstash/bootstrap_check/bad_ruby.rb +0 -12
- data/lib/logstash/converge_result.rb +0 -103
- data/lib/logstash/instrument/global_metrics.rb +0 -13
- data/lib/logstash/instrument/snapshot.rb +0 -15
- data/lib/logstash/java_integration.rb +0 -116
- data/lib/logstash/logging/json.rb +0 -21
- data/lib/logstash/plugins/plugin_factory.rb +0 -107
- data/lib/logstash/queue_factory.rb +0 -34
- data/lib/logstash/util/retryable.rb +0 -40
- data/spec/logstash/output_delegator_spec.rb +0 -201
- data/spec/logstash/timestamp_spec.rb +0 -45
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
# encoding: utf-8
|
|
2
|
-
require "logstash/namespace"
|
|
3
|
-
require "logstash/logging"
|
|
4
|
-
require "logstash/json"
|
|
5
|
-
|
|
6
|
-
module LogStash; module Logging; class JSON
|
|
7
|
-
def initialize(io)
|
|
8
|
-
raise ArgumentError, "Expected IO, got #{io.class.name}" unless io.is_a?(IO)
|
|
9
|
-
|
|
10
|
-
@io = io
|
|
11
|
-
@lock = Mutex.new
|
|
12
|
-
end
|
|
13
|
-
|
|
14
|
-
def <<(obj)
|
|
15
|
-
serialized = LogStash::Json.dump(obj)
|
|
16
|
-
@lock.synchronize do
|
|
17
|
-
@io.puts(serialized)
|
|
18
|
-
@io.flush
|
|
19
|
-
end
|
|
20
|
-
end
|
|
21
|
-
end; end; end
|
|
@@ -1,107 +0,0 @@
|
|
|
1
|
-
# encoding: utf-8
|
|
2
|
-
|
|
3
|
-
module LogStash
|
|
4
|
-
module Plugins
|
|
5
|
-
|
|
6
|
-
class ExecutionContextFactory
|
|
7
|
-
|
|
8
|
-
def initialize(agent, pipeline, dlq_writer)
|
|
9
|
-
@agent = agent
|
|
10
|
-
@pipeline = pipeline
|
|
11
|
-
@dlq_writer = dlq_writer
|
|
12
|
-
end
|
|
13
|
-
|
|
14
|
-
def create(id, klass_cfg_name)
|
|
15
|
-
ExecutionContext.new(@pipeline, @agent, id, klass_cfg_name, @dlq_writer)
|
|
16
|
-
end
|
|
17
|
-
end
|
|
18
|
-
|
|
19
|
-
class PluginMetricFactory
|
|
20
|
-
|
|
21
|
-
def initialize(pipeline_id, metric)
|
|
22
|
-
@pipeline_id = pipeline_id.to_s.to_sym
|
|
23
|
-
@metric = metric
|
|
24
|
-
end
|
|
25
|
-
|
|
26
|
-
def create(plugin_type)
|
|
27
|
-
@metric.namespace([:stats, :pipelines, @pipeline_id, :plugins])
|
|
28
|
-
.namespace("#{plugin_type}s".to_sym)
|
|
29
|
-
end
|
|
30
|
-
end
|
|
31
|
-
|
|
32
|
-
class PluginFactory
|
|
33
|
-
include org.logstash.config.ir.compiler.RubyIntegration::PluginFactory
|
|
34
|
-
|
|
35
|
-
def self.filter_delegator(wrapper_class, filter_class, args, filter_metrics, execution_context)
|
|
36
|
-
filter_instance = filter_class.new(args)
|
|
37
|
-
id = args["id"]
|
|
38
|
-
filter_instance.metric = filter_metrics.namespace(id.to_sym)
|
|
39
|
-
filter_instance.execution_context = execution_context
|
|
40
|
-
wrapper_class.new(filter_instance, id)
|
|
41
|
-
end
|
|
42
|
-
|
|
43
|
-
def initialize(lir, metric_factory, exec_factory, filter_class)
|
|
44
|
-
@lir = lir
|
|
45
|
-
@plugins_by_id = {}
|
|
46
|
-
@metric_factory = metric_factory
|
|
47
|
-
@exec_factory = exec_factory
|
|
48
|
-
@filter_class = filter_class
|
|
49
|
-
end
|
|
50
|
-
|
|
51
|
-
def buildOutput(name, line, column, *args)
|
|
52
|
-
plugin("output", name, line, column, *args)
|
|
53
|
-
end
|
|
54
|
-
|
|
55
|
-
def buildFilter(name, line, column, *args)
|
|
56
|
-
plugin("filter", name, line, column, *args)
|
|
57
|
-
end
|
|
58
|
-
|
|
59
|
-
def buildInput(name, line, column, *args)
|
|
60
|
-
plugin("input", name, line, column, *args)
|
|
61
|
-
end
|
|
62
|
-
|
|
63
|
-
def buildCodec(name, *args)
|
|
64
|
-
plugin("codec", name, 0, 0, *args)
|
|
65
|
-
end
|
|
66
|
-
|
|
67
|
-
def plugin(plugin_type, name, line, column, *args)
|
|
68
|
-
# Collapse the array of arguments into a single merged hash
|
|
69
|
-
args = args.reduce({}, &:merge)
|
|
70
|
-
|
|
71
|
-
if plugin_type == "codec"
|
|
72
|
-
id = SecureRandom.uuid # codecs don't really use their IDs for metrics, so we can use anything here
|
|
73
|
-
else
|
|
74
|
-
# Pull the ID from LIR to keep IDs consistent between the two representations
|
|
75
|
-
id = @lir.graph.vertices.filter do |v|
|
|
76
|
-
v.source_with_metadata &&
|
|
77
|
-
v.source_with_metadata.line == line &&
|
|
78
|
-
v.source_with_metadata.column == column
|
|
79
|
-
end.findFirst.get.id
|
|
80
|
-
end
|
|
81
|
-
args["id"] = id # some code pulls the id out of the args
|
|
82
|
-
|
|
83
|
-
raise ConfigurationError, "Could not determine ID for #{plugin_type}/#{plugin_name}" unless id
|
|
84
|
-
raise ConfigurationError, "Two plugins have the id '#{id}', please fix this conflict" if @plugins_by_id[id]
|
|
85
|
-
|
|
86
|
-
@plugins_by_id[id] = true
|
|
87
|
-
# Scope plugins of type 'input' to 'inputs'
|
|
88
|
-
type_scoped_metric = @metric_factory.create(plugin_type)
|
|
89
|
-
klass = Plugin.lookup(plugin_type, name)
|
|
90
|
-
execution_context = @exec_factory.create(id, klass.config_name)
|
|
91
|
-
|
|
92
|
-
if plugin_type == "output"
|
|
93
|
-
OutputDelegator.new(klass, type_scoped_metric, execution_context, OutputDelegatorStrategyRegistry.instance, args)
|
|
94
|
-
elsif plugin_type == "filter"
|
|
95
|
-
self.class.filter_delegator(@filter_class, klass, args, type_scoped_metric, execution_context)
|
|
96
|
-
else # input or codec plugin
|
|
97
|
-
plugin_instance = klass.new(args)
|
|
98
|
-
scoped_metric = type_scoped_metric.namespace(id.to_sym)
|
|
99
|
-
scoped_metric.gauge(:name, plugin_instance.config_name)
|
|
100
|
-
plugin_instance.metric = scoped_metric
|
|
101
|
-
plugin_instance.execution_context = execution_context
|
|
102
|
-
plugin_instance
|
|
103
|
-
end
|
|
104
|
-
end
|
|
105
|
-
end
|
|
106
|
-
end
|
|
107
|
-
end
|
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
# encoding: utf-8
|
|
2
|
-
require "fileutils"
|
|
3
|
-
require "logstash/event"
|
|
4
|
-
require "logstash/namespace"
|
|
5
|
-
|
|
6
|
-
module LogStash
|
|
7
|
-
class QueueFactory
|
|
8
|
-
def self.create(settings)
|
|
9
|
-
queue_type = settings.get("queue.type")
|
|
10
|
-
queue_page_capacity = settings.get("queue.page_capacity")
|
|
11
|
-
queue_max_bytes = settings.get("queue.max_bytes")
|
|
12
|
-
queue_max_events = settings.get("queue.max_events")
|
|
13
|
-
checkpoint_max_acks = settings.get("queue.checkpoint.acks")
|
|
14
|
-
checkpoint_max_writes = settings.get("queue.checkpoint.writes")
|
|
15
|
-
checkpoint_max_interval = settings.get("queue.checkpoint.interval")
|
|
16
|
-
|
|
17
|
-
queue_path = ::File.join(settings.get("path.queue"), settings.get("pipeline.id"))
|
|
18
|
-
|
|
19
|
-
case queue_type
|
|
20
|
-
when "persisted"
|
|
21
|
-
# persisted is the disk based acked queue
|
|
22
|
-
FileUtils.mkdir_p(queue_path)
|
|
23
|
-
LogStash::WrappedAckedQueue.new(queue_path, queue_page_capacity, queue_max_events, checkpoint_max_writes, checkpoint_max_acks, checkpoint_max_interval, queue_max_bytes)
|
|
24
|
-
when "memory"
|
|
25
|
-
# memory is the legacy and default setting
|
|
26
|
-
LogStash::WrappedSynchronousQueue.new(
|
|
27
|
-
settings.get("pipeline.batch.size") * settings.get("pipeline.workers") * 2
|
|
28
|
-
)
|
|
29
|
-
else
|
|
30
|
-
raise ConfigurationError, "Invalid setting `#{queue_type}` for `queue.type`, supported types are: 'memory' or 'persisted'"
|
|
31
|
-
end
|
|
32
|
-
end
|
|
33
|
-
end
|
|
34
|
-
end
|
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
# encoding: utf-8
|
|
2
|
-
module LogStash
|
|
3
|
-
module Retryable
|
|
4
|
-
# execute retryable code block
|
|
5
|
-
# @param [Hash] options retryable options
|
|
6
|
-
# @option options [Fixnum] :tries retries to perform, default 1, set to 0 for infinite retries. 1 means that upon exception the block will be retried once
|
|
7
|
-
# @option options [Fixnum] :base_sleep seconds to sleep on first retry, default 1
|
|
8
|
-
# @option options [Fixnum] :max_sleep max seconds to sleep upon exponential backoff, default 1
|
|
9
|
-
# @option options [Exception] :rescue exception class list to retry on, defaults is Exception, which retries on any Exception.
|
|
10
|
-
# @option options [Proc] :on_retry call the given Proc/lambda before each retry with the raised exception as parameter
|
|
11
|
-
def retryable(options = {}, &block)
|
|
12
|
-
options = {
|
|
13
|
-
:tries => 1,
|
|
14
|
-
:rescue => Exception,
|
|
15
|
-
:on_retry => nil,
|
|
16
|
-
:base_sleep => 1,
|
|
17
|
-
:max_sleep => 1,
|
|
18
|
-
}.merge(options)
|
|
19
|
-
|
|
20
|
-
rescue_classes = Array(options[:rescue])
|
|
21
|
-
max_sleep_retry = Math.log2(options[:max_sleep] / options[:base_sleep])
|
|
22
|
-
retry_count = 0
|
|
23
|
-
|
|
24
|
-
begin
|
|
25
|
-
return yield(retry_count)
|
|
26
|
-
rescue *rescue_classes => e
|
|
27
|
-
raise e if options[:tries] > 0 && retry_count >= options[:tries]
|
|
28
|
-
|
|
29
|
-
options[:on_retry].call(retry_count + 1, e) if options[:on_retry]
|
|
30
|
-
|
|
31
|
-
# dont compute and maybe overflow exponent on too big a retry count
|
|
32
|
-
seconds = retry_count < max_sleep_retry ? options[:base_sleep] * (2 ** retry_count) : options[:max_sleep]
|
|
33
|
-
sleep(seconds)
|
|
34
|
-
|
|
35
|
-
retry_count += 1
|
|
36
|
-
retry
|
|
37
|
-
end
|
|
38
|
-
end
|
|
39
|
-
end
|
|
40
|
-
end
|
|
@@ -1,201 +0,0 @@
|
|
|
1
|
-
# encoding: utf-8
|
|
2
|
-
require "logstash/execution_context"
|
|
3
|
-
require "spec_helper"
|
|
4
|
-
require "support/shared_contexts"
|
|
5
|
-
|
|
6
|
-
describe LogStash::OutputDelegator do
|
|
7
|
-
|
|
8
|
-
let(:events) { 7.times.map { LogStash::Event.new }}
|
|
9
|
-
let(:plugin_args) { {"id" => "foo", "arg1" => "val1"} }
|
|
10
|
-
let(:metric) {
|
|
11
|
-
LogStash::Instrument::NamespacedMetric.new(
|
|
12
|
-
LogStash::Instrument::Metric.new(LogStash::Instrument::Collector.new), [:output]
|
|
13
|
-
)
|
|
14
|
-
}
|
|
15
|
-
let(:counter_in) {
|
|
16
|
-
counter = metric.counter(:in)
|
|
17
|
-
counter.increment(0)
|
|
18
|
-
counter
|
|
19
|
-
}
|
|
20
|
-
let(:counter_out) {
|
|
21
|
-
counter = metric.counter(:out)
|
|
22
|
-
counter.increment(0)
|
|
23
|
-
counter
|
|
24
|
-
}
|
|
25
|
-
let(:counter_time) {
|
|
26
|
-
counter = metric.counter(:duration_in_millis)
|
|
27
|
-
counter.increment(0)
|
|
28
|
-
counter
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
include_context "execution_context"
|
|
32
|
-
|
|
33
|
-
class FakeOutClass
|
|
34
|
-
|
|
35
|
-
def self.set_out_strategy(out_strategy)
|
|
36
|
-
@@out_strategy = out_strategy
|
|
37
|
-
end
|
|
38
|
-
|
|
39
|
-
def self.set_out_inst(out_inst)
|
|
40
|
-
@@out_inst = out_inst
|
|
41
|
-
end
|
|
42
|
-
|
|
43
|
-
def self.name
|
|
44
|
-
"example"
|
|
45
|
-
end
|
|
46
|
-
|
|
47
|
-
def self.concurrency
|
|
48
|
-
@@out_strategy
|
|
49
|
-
end
|
|
50
|
-
|
|
51
|
-
def self.config_name
|
|
52
|
-
"dummy_plugin"
|
|
53
|
-
end
|
|
54
|
-
|
|
55
|
-
class << self
|
|
56
|
-
def new(args)
|
|
57
|
-
if args == {"id" => "foo", "arg1" => "val1"}
|
|
58
|
-
@@out_inst
|
|
59
|
-
else
|
|
60
|
-
raise "unexpected plugin arguments"
|
|
61
|
-
end
|
|
62
|
-
end
|
|
63
|
-
end
|
|
64
|
-
end
|
|
65
|
-
|
|
66
|
-
let(:out_klass) {FakeOutClass}
|
|
67
|
-
|
|
68
|
-
subject { described_class.new(out_klass, metric, execution_context, ::LogStash::OutputDelegatorStrategyRegistry.instance, plugin_args) }
|
|
69
|
-
|
|
70
|
-
context "with a plain output plugin" do
|
|
71
|
-
let(:out_inst) { double("output instance") }
|
|
72
|
-
|
|
73
|
-
before(:each) do
|
|
74
|
-
# use the same metric instance
|
|
75
|
-
allow(out_inst).to receive(:register)
|
|
76
|
-
allow(out_inst).to receive(:multi_receive)
|
|
77
|
-
allow(out_inst).to receive(:metric=).with(any_args)
|
|
78
|
-
allow(out_inst).to receive(:execution_context=).with(execution_context)
|
|
79
|
-
allow(out_inst).to receive(:id).and_return("a-simple-plugin")
|
|
80
|
-
FakeOutClass.set_out_inst(out_inst)
|
|
81
|
-
FakeOutClass.set_out_strategy(:single)
|
|
82
|
-
end
|
|
83
|
-
|
|
84
|
-
it "should initialize cleanly" do
|
|
85
|
-
expect { subject }.not_to raise_error
|
|
86
|
-
end
|
|
87
|
-
|
|
88
|
-
it "should push the name of the plugin to the metric" do
|
|
89
|
-
described_class.new(out_klass, metric, execution_context, ::LogStash::OutputDelegatorStrategyRegistry.instance, plugin_args)
|
|
90
|
-
expect(metric.collector.snapshot_metric.metric_store.get_with_path("output/foo")[:output][:foo][:name].value).to eq(out_klass.config_name)
|
|
91
|
-
end
|
|
92
|
-
|
|
93
|
-
context "after having received a batch of events" do
|
|
94
|
-
before do
|
|
95
|
-
subject.register
|
|
96
|
-
end
|
|
97
|
-
|
|
98
|
-
it "should pass the events through" do
|
|
99
|
-
expect(out_inst).to receive(:multi_receive).with(events)
|
|
100
|
-
subject.multi_receive(events)
|
|
101
|
-
end
|
|
102
|
-
|
|
103
|
-
it "should increment the number of events received" do
|
|
104
|
-
subject.multi_receive(events)
|
|
105
|
-
store = metric.collector.snapshot_metric.metric_store.get_with_path("output/foo")[:output][:foo][:events]
|
|
106
|
-
number_of_events = events.length
|
|
107
|
-
expect(store[:in].value).to eq(number_of_events)
|
|
108
|
-
expect(store[:out].value).to eq(number_of_events)
|
|
109
|
-
end
|
|
110
|
-
|
|
111
|
-
it "should record the `duration_in_millis`" do
|
|
112
|
-
value = 0
|
|
113
|
-
while value == 0
|
|
114
|
-
subject.multi_receive(events)
|
|
115
|
-
store = metric.collector.snapshot_metric.metric_store.get_with_path("output/foo")[:output][:foo][:events]
|
|
116
|
-
value = store[:duration_in_millis].value
|
|
117
|
-
end
|
|
118
|
-
expect(value).to be > 0
|
|
119
|
-
end
|
|
120
|
-
end
|
|
121
|
-
|
|
122
|
-
describe "closing" do
|
|
123
|
-
before do
|
|
124
|
-
subject.register
|
|
125
|
-
end
|
|
126
|
-
|
|
127
|
-
it "should register the output plugin instance on register" do
|
|
128
|
-
expect(out_inst).to have_received(:register)
|
|
129
|
-
end
|
|
130
|
-
|
|
131
|
-
it "should close the output plugin instance when closing" do
|
|
132
|
-
expect(out_inst).to receive(:do_close)
|
|
133
|
-
subject.do_close
|
|
134
|
-
end
|
|
135
|
-
end
|
|
136
|
-
|
|
137
|
-
describe "concurrency strategies" do
|
|
138
|
-
it "should have :single as the default" do
|
|
139
|
-
expect(subject.concurrency).to eq :single
|
|
140
|
-
end
|
|
141
|
-
|
|
142
|
-
[
|
|
143
|
-
[:shared, ::LogStash::OutputDelegatorStrategies::Shared],
|
|
144
|
-
[:single, ::LogStash::OutputDelegatorStrategies::Single],
|
|
145
|
-
[:legacy, ::LogStash::OutputDelegatorStrategies::Legacy],
|
|
146
|
-
].each do |strategy_concurrency,klass|
|
|
147
|
-
context "with strategy #{strategy_concurrency}" do
|
|
148
|
-
let(:concurrency) { strategy_concurrency }
|
|
149
|
-
|
|
150
|
-
before(:each) do
|
|
151
|
-
FakeOutClass.set_out_strategy(strategy_concurrency)
|
|
152
|
-
end
|
|
153
|
-
|
|
154
|
-
it "should find the correct concurrency type for the output" do
|
|
155
|
-
expect(subject.concurrency).to eq(strategy_concurrency)
|
|
156
|
-
end
|
|
157
|
-
|
|
158
|
-
it "should find the correct Strategy class for the worker" do
|
|
159
|
-
expect(subject.strategy).to be_a(klass)
|
|
160
|
-
end
|
|
161
|
-
|
|
162
|
-
it "should set the metric on the instance" do
|
|
163
|
-
expect(out_inst).to have_received(:metric=).with(subject.namespaced_metric)
|
|
164
|
-
end
|
|
165
|
-
|
|
166
|
-
[[:register], [:do_close], [:multi_receive, [[]] ] ].each do |method, args|
|
|
167
|
-
context "strategy objects" do
|
|
168
|
-
before do
|
|
169
|
-
allow(out_inst).to receive(method)
|
|
170
|
-
end
|
|
171
|
-
|
|
172
|
-
it "should delegate #{method} to the strategy" do
|
|
173
|
-
subject.send(method, *args)
|
|
174
|
-
if args
|
|
175
|
-
expect(out_inst).to have_received(method).with(*args)
|
|
176
|
-
else
|
|
177
|
-
expect(out_inst).to have_received(method).with(no_args)
|
|
178
|
-
end
|
|
179
|
-
end
|
|
180
|
-
end
|
|
181
|
-
|
|
182
|
-
context "strategy output instances" do
|
|
183
|
-
before do
|
|
184
|
-
allow(out_inst).to receive(method)
|
|
185
|
-
end
|
|
186
|
-
|
|
187
|
-
it "should delegate #{method} to the strategy" do
|
|
188
|
-
subject.send(method, *args)
|
|
189
|
-
if args
|
|
190
|
-
expect(out_inst).to have_received(method).with(*args)
|
|
191
|
-
else
|
|
192
|
-
expect(out_inst).to have_received(method).with(no_args)
|
|
193
|
-
end
|
|
194
|
-
end
|
|
195
|
-
end
|
|
196
|
-
end
|
|
197
|
-
end
|
|
198
|
-
end
|
|
199
|
-
end
|
|
200
|
-
end
|
|
201
|
-
end
|
|
@@ -1,45 +0,0 @@
|
|
|
1
|
-
# encoding: utf-8
|
|
2
|
-
|
|
3
|
-
require "spec_helper"
|
|
4
|
-
|
|
5
|
-
describe LogStash::Timestamp do
|
|
6
|
-
context "constructors" do
|
|
7
|
-
# Via JRuby 9k time see logstash/issues/7463
|
|
8
|
-
# JRuby 9k now uses Java 8 Time with nanosecond precision but
|
|
9
|
-
# our Timestamp use Joda with millisecond precision
|
|
10
|
-
# expected: 2017-06-15 10:34:08.389999999 +0000
|
|
11
|
-
# got: 2017-06-15 10:34:08.389000000 +0000
|
|
12
|
-
# we may need to use `be_within(0.000999999).of()` in other places too
|
|
13
|
-
it "should work" do
|
|
14
|
-
t = LogStash::Timestamp.new
|
|
15
|
-
expect(t.time.to_i).to be_within(2).of Time.now.to_i
|
|
16
|
-
|
|
17
|
-
t = LogStash::Timestamp.now
|
|
18
|
-
expect(t.time.to_i).to be_within(2).of Time.now.to_i
|
|
19
|
-
|
|
20
|
-
now = DateTime.now.to_time.utc
|
|
21
|
-
t = LogStash::Timestamp.new(now)
|
|
22
|
-
expect(t.time.to_f).to be_within(0.000999999).of(now.to_f)
|
|
23
|
-
|
|
24
|
-
t = LogStash::Timestamp.at(now.to_i)
|
|
25
|
-
expect(t.time.to_i).to eq(now.to_i)
|
|
26
|
-
end
|
|
27
|
-
|
|
28
|
-
it "should have consistent behaviour across == and .eql?" do
|
|
29
|
-
its_xmas = Time.utc(2015, 12, 25, 0, 0, 0)
|
|
30
|
-
expect(LogStash::Timestamp.new(its_xmas)).to eql(LogStash::Timestamp.new(its_xmas))
|
|
31
|
-
expect(LogStash::Timestamp.new(its_xmas)).to be ==(LogStash::Timestamp.new(its_xmas))
|
|
32
|
-
end
|
|
33
|
-
|
|
34
|
-
it "should raise exception on invalid format" do
|
|
35
|
-
expect{LogStash::Timestamp.new("foobar")}.to raise_error
|
|
36
|
-
end
|
|
37
|
-
|
|
38
|
-
it "compares to any type" do
|
|
39
|
-
t = LogStash::Timestamp.new
|
|
40
|
-
expect(t == '-').to be_falsey
|
|
41
|
-
end
|
|
42
|
-
|
|
43
|
-
end
|
|
44
|
-
|
|
45
|
-
end
|