logstash-core 5.3.3-java → 5.4.0-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/gemspec_jars.rb +2 -0
- data/lib/logstash-core/logstash-core.jar +0 -0
- data/lib/logstash-core/version.rb +1 -1
- data/lib/logstash-core_jars.rb +4 -0
- data/lib/logstash/agent.rb +15 -6
- data/lib/logstash/api/modules/base.rb +1 -1
- data/lib/logstash/api/rack_app.rb +1 -1
- data/lib/logstash/config/config_ast.rb +13 -13
- data/lib/logstash/config/mixin.rb +33 -28
- data/lib/logstash/environment.rb +11 -0
- data/lib/logstash/event.rb +56 -0
- data/lib/logstash/event_dispatcher.rb +2 -2
- data/lib/logstash/execution_context.rb +10 -0
- data/lib/logstash/filter_delegator.rb +3 -2
- data/lib/logstash/inputs/base.rb +15 -1
- data/lib/logstash/instrument/collector.rb +1 -1
- data/lib/logstash/instrument/metric.rb +4 -2
- data/lib/logstash/instrument/metric_store.rb +9 -5
- data/lib/logstash/instrument/null_metric.rb +1 -0
- data/lib/logstash/instrument/periodic_poller/cgroup.rb +3 -3
- data/lib/logstash/instrument/periodic_poller/jvm.rb +11 -8
- data/lib/logstash/instrument/periodic_poller/load_average.rb +4 -2
- data/lib/logstash/instrument/wrapped_write_client.rb +59 -0
- data/lib/logstash/java_integration.rb +2 -2
- data/lib/logstash/output_delegator.rb +2 -2
- data/lib/logstash/output_delegator_strategies/legacy.rb +5 -2
- data/lib/logstash/output_delegator_strategies/shared.rb +2 -1
- data/lib/logstash/output_delegator_strategies/single.rb +2 -1
- data/lib/logstash/outputs/base.rb +8 -0
- data/lib/logstash/patches/cabin.rb +1 -1
- data/lib/logstash/patches/stronger_openssl_defaults.rb +1 -1
- data/lib/logstash/pipeline.rb +47 -19
- data/lib/logstash/plugin.rb +3 -1
- data/lib/logstash/plugins/hooks_registry.rb +6 -6
- data/lib/logstash/plugins/registry.rb +2 -2
- data/lib/logstash/queue_factory.rb +7 -5
- data/lib/logstash/runner.rb +15 -1
- data/lib/logstash/settings.rb +14 -2
- data/lib/logstash/string_interpolation.rb +18 -0
- data/lib/logstash/timestamp.rb +27 -0
- data/lib/logstash/util.rb +1 -1
- data/lib/logstash/util/prctl.rb +1 -1
- data/lib/logstash/util/retryable.rb +1 -1
- data/lib/logstash/util/wrapped_acked_queue.rb +53 -22
- data/lib/logstash/util/wrapped_synchronous_queue.rb +51 -33
- data/lib/logstash/version.rb +1 -1
- data/locales/en.yml +4 -2
- data/logstash-core.gemspec +0 -3
- data/spec/api/lib/api/node_stats_spec.rb +2 -1
- data/spec/api/spec_helper.rb +1 -1
- data/spec/logstash/acked_queue_concurrent_stress_spec.rb +291 -0
- data/spec/logstash/agent_spec.rb +24 -0
- data/spec/logstash/config/mixin_spec.rb +11 -2
- data/spec/logstash/event_dispatcher_spec.rb +8 -1
- data/spec/logstash/event_spec.rb +346 -0
- data/spec/logstash/execution_context_spec.rb +13 -0
- data/spec/logstash/filter_delegator_spec.rb +4 -2
- data/spec/logstash/inputs/base_spec.rb +41 -0
- data/spec/logstash/instrument/metric_spec.rb +2 -1
- data/spec/logstash/instrument/metric_store_spec.rb +14 -0
- data/spec/logstash/instrument/namespaced_metric_spec.rb +2 -1
- data/spec/logstash/instrument/periodic_poller/cgroup_spec.rb +1 -1
- data/spec/logstash/instrument/periodic_poller/jvm_spec.rb +35 -0
- data/spec/logstash/instrument/periodic_poller/load_average_spec.rb +1 -5
- data/spec/logstash/instrument/wrapped_write_client_spec.rb +113 -0
- data/spec/logstash/json_spec.rb +1 -1
- data/spec/logstash/legacy_ruby_event_spec.rb +636 -0
- data/spec/logstash/legacy_ruby_timestamp_spec.rb +170 -0
- data/spec/logstash/output_delegator_spec.rb +6 -3
- data/spec/logstash/outputs/base_spec.rb +23 -0
- data/spec/logstash/pipeline_pq_file_spec.rb +18 -8
- data/spec/logstash/pipeline_spec.rb +41 -5
- data/spec/logstash/plugin_spec.rb +15 -3
- data/spec/logstash/plugins/hooks_registry_spec.rb +2 -2
- data/spec/logstash/runner_spec.rb +33 -2
- data/spec/logstash/settings/port_range_spec.rb +1 -1
- data/spec/logstash/settings_spec.rb +21 -0
- data/spec/logstash/timestamp_spec.rb +29 -0
- data/spec/logstash/util/accessors_spec.rb +179 -0
- data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +4 -11
- data/spec/logstash/util_spec.rb +1 -1
- data/spec/logstash/webserver_spec.rb +1 -1
- data/spec/support/mocks_classes.rb +65 -53
- metadata +25 -30
@@ -0,0 +1,13 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "logstash/execution_context"
|
4
|
+
|
5
|
+
describe LogStash::ExecutionContext do
|
6
|
+
let(:pipeline_id) { :main }
|
7
|
+
|
8
|
+
subject { described_class.new(pipeline_id) }
|
9
|
+
|
10
|
+
it "returns the `pipeline_id`" do
|
11
|
+
expect(subject.pipeline_id).to eq(pipeline_id)
|
12
|
+
end
|
13
|
+
end
|
@@ -3,6 +3,7 @@ require "spec_helper"
|
|
3
3
|
require "logstash/filter_delegator"
|
4
4
|
require "logstash/instrument/null_metric"
|
5
5
|
require "logstash/event"
|
6
|
+
require "logstash/execution_context"
|
6
7
|
|
7
8
|
describe LogStash::FilterDelegator do
|
8
9
|
let(:logger) { double(:logger) }
|
@@ -13,6 +14,7 @@ describe LogStash::FilterDelegator do
|
|
13
14
|
let(:collector) { [] }
|
14
15
|
let(:metric) { LogStash::Instrument::NamespacedNullMetric.new(collector, :null) }
|
15
16
|
let(:events) { [LogStash::Event.new, LogStash::Event.new] }
|
17
|
+
let(:default_execution_context) { LogStash::ExecutionContext.new(:main) }
|
16
18
|
|
17
19
|
before :each do
|
18
20
|
allow(metric).to receive(:namespace).with(anything).and_return(metric)
|
@@ -26,11 +28,11 @@ describe LogStash::FilterDelegator do
|
|
26
28
|
end
|
27
29
|
end
|
28
30
|
|
29
|
-
subject { described_class.new(logger, plugin_klass, metric, config) }
|
31
|
+
subject { described_class.new(logger, plugin_klass, metric, default_execution_context, config) }
|
30
32
|
|
31
33
|
it "create a plugin with the passed options" do
|
32
34
|
expect(plugin_klass).to receive(:new).with(config).and_return(plugin_klass.new(config))
|
33
|
-
described_class.new(logger, plugin_klass, metric, config)
|
35
|
+
described_class.new(logger, plugin_klass, metric, default_execution_context, config)
|
34
36
|
end
|
35
37
|
|
36
38
|
context "when the plugin support flush" do
|
@@ -1,5 +1,7 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
require "spec_helper"
|
3
|
+
require "logstash/execution_context"
|
4
|
+
require "logstash/inputs/base"
|
3
5
|
|
4
6
|
# use a dummy NOOP input to test Inputs::Base
|
5
7
|
class LogStash::Inputs::NOOP < LogStash::Inputs::Base
|
@@ -60,6 +62,45 @@ describe "LogStash::Inputs::Base#decorate" do
|
|
60
62
|
expect(evt.get("field")).to eq(["value1","value2"])
|
61
63
|
expect(evt.get("field2")).to eq("value")
|
62
64
|
end
|
65
|
+
|
66
|
+
context "execution context" do
|
67
|
+
let(:default_execution_context) { LogStash::ExecutionContext.new(:main) }
|
68
|
+
let(:klass) { LogStash::Inputs::NOOP }
|
69
|
+
|
70
|
+
subject(:instance) { klass.new({}) }
|
71
|
+
|
72
|
+
it "allow to set the context" do
|
73
|
+
expect(instance.execution_context).to be_nil
|
74
|
+
instance.execution_context = default_execution_context
|
75
|
+
|
76
|
+
expect(instance.execution_context).to eq(default_execution_context)
|
77
|
+
end
|
78
|
+
|
79
|
+
it "propagate the context to the codec" do
|
80
|
+
expect(instance.codec.execution_context).to be_nil
|
81
|
+
instance.execution_context = default_execution_context
|
82
|
+
|
83
|
+
expect(instance.codec.execution_context).to eq(default_execution_context)
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
describe "cloning" do
|
88
|
+
let(:input) do
|
89
|
+
LogStash::Inputs::NOOP.new("add_field" => {"field" => ["value1", "value2"], "field2" => "value"})
|
90
|
+
end
|
91
|
+
|
92
|
+
let(:cloned) do
|
93
|
+
input.clone
|
94
|
+
end
|
95
|
+
|
96
|
+
it "should clone the codec when cloned" do
|
97
|
+
expect(input.codec).not_to eq(cloned.codec)
|
98
|
+
end
|
99
|
+
|
100
|
+
it "should preserve codec params" do
|
101
|
+
expect(input.codec.params).to eq(cloned.codec.params)
|
102
|
+
end
|
103
|
+
end
|
63
104
|
end
|
64
105
|
|
65
106
|
describe "LogStash::Inputs::Base#fix_streaming_codecs" do
|
@@ -85,8 +85,9 @@ describe LogStash::Instrument::Metric do
|
|
85
85
|
it "return a TimedExecution" do
|
86
86
|
execution = subject.time(:root, :duration_ms)
|
87
87
|
sleep(sleep_time)
|
88
|
-
execution.stop
|
88
|
+
execution_time = execution.stop
|
89
89
|
|
90
|
+
expect(execution_time).to eq(collector.last)
|
90
91
|
expect(collector.last).to be_within(sleep_time_ms).of(sleep_time_ms + 0.1)
|
91
92
|
expect(collector[0]).to match(:root)
|
92
93
|
expect(collector[1]).to be(:duration_ms)
|
@@ -53,6 +53,20 @@ describe LogStash::Instrument::MetricStore do
|
|
53
53
|
end
|
54
54
|
end
|
55
55
|
|
56
|
+
context "#has_metric?" do
|
57
|
+
context "when the path exist" do
|
58
|
+
it "returns true" do
|
59
|
+
expect(subject.has_metric?(:node, :sashimi, :pipelines, :pipeline01, :plugins, :"logstash-output-elasticsearch", :event_in)).to be_truthy
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
context "when the path doesn't exist" do
|
64
|
+
it "returns false" do
|
65
|
+
expect(subject.has_metric?(:node, :sashimi, :pipelines, :pipeline01, :plugins, :"logstash-input-nothing")).to be_falsey
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
56
70
|
describe "#get" do
|
57
71
|
context "when the path exist" do
|
58
72
|
it "retrieves end of of a branch" do
|
@@ -78,8 +78,9 @@ describe LogStash::Instrument::NamespacedMetric do
|
|
78
78
|
it "return a TimedExecution" do
|
79
79
|
execution = subject.time(:duration_ms)
|
80
80
|
sleep(sleep_time)
|
81
|
-
execution.stop
|
81
|
+
execution_time = execution.stop
|
82
82
|
|
83
|
+
expect(execution_time).to eq(collector.last)
|
83
84
|
expect(collector.last).to be_within(sleep_time_ms).of(sleep_time_ms + 0.1)
|
84
85
|
expect(collector[0]).to match([:root])
|
85
86
|
expect(collector[1]).to be(:duration_ms)
|
@@ -74,7 +74,7 @@ describe LogStash::Instrument::PeriodicPoller::Cgroup do
|
|
74
74
|
end
|
75
75
|
|
76
76
|
context ".get_all" do
|
77
|
-
context "when we can
|
77
|
+
context "when we can retrieve the stats" do
|
78
78
|
let(:cpuacct_control_group) { "/docker/a10687343f90e97bbb1f7181bd065a42de96c40c4aa91764a9d526ea30475f61" }
|
79
79
|
let(:cpuacct_usage) { 1982 }
|
80
80
|
let(:cpu_control_group) { "/docker/a10687343f90e97bbb1f7181bd065a42de96c40c4aa91764a9d526ea30475f61" }
|
@@ -54,6 +54,41 @@ describe LogStash::Instrument::PeriodicPoller::JVM do
|
|
54
54
|
end
|
55
55
|
end
|
56
56
|
|
57
|
+
describe "aggregate heap information" do
|
58
|
+
shared_examples "heap_information" do
|
59
|
+
let(:data_set) do
|
60
|
+
{
|
61
|
+
"usage.used" => 5,
|
62
|
+
"usage.committed" => 11,
|
63
|
+
"usage.max" => 21,
|
64
|
+
"peak.max" => 51,
|
65
|
+
"peak.used" => 61
|
66
|
+
}
|
67
|
+
end
|
68
|
+
let(:collection) { [data_set] }
|
69
|
+
|
70
|
+
it "return the right values" do
|
71
|
+
expect(subject.aggregate_information_for(collection)).to match({
|
72
|
+
:used_in_bytes => 5 * collection.size,
|
73
|
+
:committed_in_bytes => 11 * collection.size,
|
74
|
+
:max_in_bytes => 21 * collection.size,
|
75
|
+
:peak_max_in_bytes => 51 * collection.size,
|
76
|
+
:peak_used_in_bytes => 61 * collection.size
|
77
|
+
})
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
context "with only one data set in a collection" do
|
82
|
+
include_examples "heap_information"
|
83
|
+
end
|
84
|
+
|
85
|
+
context "with multiples data set in a collection" do
|
86
|
+
include_examples "heap_information" do
|
87
|
+
let(:collection) { ar = []; ar << data_set; ar << data_set; ar }
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
57
92
|
describe "collections" do
|
58
93
|
subject(:collection) { jvm.collect }
|
59
94
|
it "should run cleanly" do
|
@@ -13,14 +13,10 @@ describe LogStash::Instrument::PeriodicPoller::LoadAverage do
|
|
13
13
|
context "when it can read the file" do
|
14
14
|
let(:proc_loadavg) { "0.00 0.01 0.05 3/180 29727" }
|
15
15
|
|
16
|
-
before do
|
17
|
-
expect(::File).to receive(:read).with("/proc/loadavg").and_return(proc_loadavg)
|
18
|
-
end
|
19
|
-
|
20
16
|
it "return the 3 load average from `/proc/loadavg`" do
|
21
17
|
avg_1m, avg_5m, avg_15m = proc_loadavg.chomp.split(" ")
|
22
18
|
|
23
|
-
expect(subject.get).to include(:"1m" => avg_1m.to_f, :"5m" => avg_5m.to_f, :"15m" => avg_15m.to_f)
|
19
|
+
expect(subject.get(proc_loadavg)).to include(:"1m" => avg_1m.to_f, :"5m" => avg_5m.to_f, :"15m" => avg_15m.to_f)
|
24
20
|
end
|
25
21
|
end
|
26
22
|
end
|
@@ -0,0 +1,113 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/instrument/metric"
|
3
|
+
require "logstash/instrument/wrapped_write_client"
|
4
|
+
require "logstash/util/wrapped_synchronous_queue"
|
5
|
+
require "logstash/event"
|
6
|
+
require_relative "../../support/mocks_classes"
|
7
|
+
require "spec_helper"
|
8
|
+
|
9
|
+
describe LogStash::Instrument::WrappedWriteClient do
|
10
|
+
let(:write_client) { queue.write_client }
|
11
|
+
let(:read_client) { queue.read_client }
|
12
|
+
let(:pipeline) { double("pipeline", :pipeline_id => :main) }
|
13
|
+
let(:collector) { LogStash::Instrument::Collector.new }
|
14
|
+
let(:metric) { LogStash::Instrument::Metric.new(collector) }
|
15
|
+
let(:plugin) { LogStash::Inputs::DummyInput.new({ "id" => myid }) }
|
16
|
+
let(:event) { LogStash::Event.new }
|
17
|
+
let(:myid) { "1234myid" }
|
18
|
+
|
19
|
+
subject { described_class.new(write_client, pipeline, metric, plugin) }
|
20
|
+
|
21
|
+
|
22
|
+
shared_examples "queue tests" do
|
23
|
+
it "pushes single event to the `WriteClient`" do
|
24
|
+
t = Thread.new do
|
25
|
+
subject.push(event)
|
26
|
+
end
|
27
|
+
sleep(0.01) while !t.status
|
28
|
+
expect(read_client.read_batch.size).to eq(1)
|
29
|
+
t.kill rescue nil
|
30
|
+
end
|
31
|
+
|
32
|
+
it "pushes batch to the `WriteClient`" do
|
33
|
+
batch = write_client.get_new_batch
|
34
|
+
batch << event
|
35
|
+
|
36
|
+
t = Thread.new do
|
37
|
+
subject.push_batch(batch)
|
38
|
+
end
|
39
|
+
|
40
|
+
sleep(0.01) while !t.status
|
41
|
+
expect(read_client.read_batch.size).to eq(1)
|
42
|
+
t.kill rescue nil
|
43
|
+
end
|
44
|
+
|
45
|
+
context "recorded metrics" do
|
46
|
+
before do
|
47
|
+
t = Thread.new do
|
48
|
+
subject.push(event)
|
49
|
+
end
|
50
|
+
sleep(0.01) while !t.status
|
51
|
+
sleep(0.250) # make it block for some time, so duration isn't 0
|
52
|
+
read_client.read_batch.size
|
53
|
+
t.kill rescue nil
|
54
|
+
end
|
55
|
+
|
56
|
+
let(:snapshot_store) { collector.snapshot_metric.metric_store }
|
57
|
+
|
58
|
+
let(:snapshot_metric) { snapshot_store.get_shallow(:stats) }
|
59
|
+
|
60
|
+
it "records instance level events `in`" do
|
61
|
+
expect(snapshot_metric[:events][:in].value).to eq(1)
|
62
|
+
end
|
63
|
+
|
64
|
+
it "records pipeline level `in`" do
|
65
|
+
expect(snapshot_metric[:pipelines][:main][:events][:in].value).to eq(1)
|
66
|
+
end
|
67
|
+
|
68
|
+
it "record input `out`" do
|
69
|
+
expect(snapshot_metric[:pipelines][:main][:plugins][:inputs][myid.to_sym][:events][:out].value).to eq(1)
|
70
|
+
end
|
71
|
+
|
72
|
+
context "recording of the duration of pushing to the queue" do
|
73
|
+
it "records at the `global events` level" do
|
74
|
+
expect(snapshot_metric[:events][:queue_push_duration_in_millis].value).to be_kind_of(Integer)
|
75
|
+
end
|
76
|
+
|
77
|
+
it "records at the `pipeline` level" do
|
78
|
+
expect(snapshot_metric[:pipelines][:main][:events][:queue_push_duration_in_millis].value).to be_kind_of(Integer)
|
79
|
+
end
|
80
|
+
|
81
|
+
it "records at the `plugin level" do
|
82
|
+
expect(snapshot_metric[:pipelines][:main][:plugins][:inputs][myid.to_sym][:events][:queue_push_duration_in_millis].value).to be_kind_of(Integer)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
87
|
+
|
88
|
+
context "WrappedSynchronousQueue" do
|
89
|
+
let(:queue) { LogStash::Util::WrappedSynchronousQueue.new }
|
90
|
+
|
91
|
+
before do
|
92
|
+
read_client.set_events_metric(metric.namespace([:stats, :events]))
|
93
|
+
read_client.set_pipeline_metric(metric.namespace([:stats, :pipelines, :main, :events]))
|
94
|
+
end
|
95
|
+
|
96
|
+
include_examples "queue tests"
|
97
|
+
end
|
98
|
+
|
99
|
+
context "AckedMemoryQueue" do
|
100
|
+
let(:queue) { LogStash::Util::WrappedAckedQueue.create_memory_based("", 1024, 10, 1024) }
|
101
|
+
|
102
|
+
before do
|
103
|
+
read_client.set_events_metric(metric.namespace([:stats, :events]))
|
104
|
+
read_client.set_pipeline_metric(metric.namespace([:stats, :pipelines, :main, :events]))
|
105
|
+
end
|
106
|
+
|
107
|
+
after do
|
108
|
+
queue.close
|
109
|
+
end
|
110
|
+
|
111
|
+
include_examples "queue tests"
|
112
|
+
end
|
113
|
+
end
|
data/spec/logstash/json_spec.rb
CHANGED
@@ -37,7 +37,7 @@ describe "LogStash::Json" do
|
|
37
37
|
|
38
38
|
### JRuby specific
|
39
39
|
# Former expectation in this code were removed because of https://github.com/rspec/rspec-mocks/issues/964
|
40
|
-
# as soon as is fix we can re introduce them if
|
40
|
+
# as soon as is fix we can re introduce them if desired, however for now the completeness of the test
|
41
41
|
# is also not affected as the conversion would not work if the expectation where not meet.
|
42
42
|
###
|
43
43
|
context "jruby deserialize" do
|
@@ -0,0 +1,636 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "logstash/util/decorators"
|
4
|
+
require "json"
|
5
|
+
|
6
|
+
describe LogStash::Event do
|
7
|
+
|
8
|
+
shared_examples "all event tests" do
|
9
|
+
context "[]=" do
|
10
|
+
it "should raise an exception if you attempt to set @timestamp to a value type other than a Time object" do
|
11
|
+
expect{subject.set("@timestamp", "crash!")}.to raise_error(TypeError)
|
12
|
+
end
|
13
|
+
|
14
|
+
it "should assign simple fields" do
|
15
|
+
expect(subject.get("foo")).to be_nil
|
16
|
+
expect(subject.set("foo", "bar")).to eq("bar")
|
17
|
+
expect(subject.get("foo")).to eq("bar")
|
18
|
+
end
|
19
|
+
|
20
|
+
it "should overwrite simple fields" do
|
21
|
+
expect(subject.get("foo")).to be_nil
|
22
|
+
expect(subject.set("foo", "bar")).to eq("bar")
|
23
|
+
expect(subject.get("foo")).to eq("bar")
|
24
|
+
|
25
|
+
expect(subject.set("foo", "baz")).to eq("baz")
|
26
|
+
expect(subject.get("foo")).to eq("baz")
|
27
|
+
end
|
28
|
+
|
29
|
+
it "should assign deep fields" do
|
30
|
+
expect(subject.get("[foo][bar]")).to be_nil
|
31
|
+
expect(subject.set("[foo][bar]", "baz")).to eq("baz")
|
32
|
+
expect(subject.get("[foo][bar]")).to eq("baz")
|
33
|
+
end
|
34
|
+
|
35
|
+
it "should overwrite deep fields" do
|
36
|
+
expect(subject.get("[foo][bar]")).to be_nil
|
37
|
+
expect(subject.set("[foo][bar]", "baz")).to eq("baz")
|
38
|
+
expect(subject.get("[foo][bar]")).to eq("baz")
|
39
|
+
|
40
|
+
expect(subject.set("[foo][bar]", "zab")).to eq("zab")
|
41
|
+
expect(subject.get("[foo][bar]")).to eq("zab")
|
42
|
+
end
|
43
|
+
|
44
|
+
it "allow to set the @metadata key to a hash" do
|
45
|
+
subject.set("@metadata", { "action" => "index" })
|
46
|
+
expect(subject.get("[@metadata][action]")).to eq("index")
|
47
|
+
end
|
48
|
+
|
49
|
+
it "should add key when setting nil value" do
|
50
|
+
subject.set("[baz]", nil)
|
51
|
+
expect(subject.to_hash).to include("baz" => nil)
|
52
|
+
end
|
53
|
+
|
54
|
+
it "should set nil element within existing array value" do
|
55
|
+
subject.set("[foo]", ["bar", "baz"])
|
56
|
+
|
57
|
+
expect(subject.set("[foo][0]", nil)).to eq(nil)
|
58
|
+
expect(subject.get("[foo]")).to eq([nil, "baz"])
|
59
|
+
end
|
60
|
+
|
61
|
+
it "should set nil in first element within empty array" do
|
62
|
+
subject.set("[foo]", [])
|
63
|
+
|
64
|
+
expect(subject.set("[foo][0]", nil)).to eq(nil)
|
65
|
+
expect(subject.get("[foo]")).to eq([nil])
|
66
|
+
end
|
67
|
+
|
68
|
+
it "should set nil in second element within empty array" do
|
69
|
+
subject.set("[foo]", [])
|
70
|
+
|
71
|
+
expect(subject.set("[foo][1]", nil)).to eq(nil)
|
72
|
+
expect(subject.get("[foo]")).to eq([nil, nil])
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
context "#sprintf" do
|
77
|
+
it "should not return a String reference" do
|
78
|
+
data = "NOT-A-REFERENCE"
|
79
|
+
event = LogStash::Event.new({ "reference" => data })
|
80
|
+
LogStash::Util::Decorators.add_fields({"reference_test" => "%{reference}"}, event, "dummy-plugin")
|
81
|
+
data.downcase!
|
82
|
+
expect(event.get("reference_test")).not_to eq(data)
|
83
|
+
end
|
84
|
+
|
85
|
+
it "should not return a Fixnum reference" do
|
86
|
+
data = 1
|
87
|
+
event = LogStash::Event.new({ "reference" => data })
|
88
|
+
LogStash::Util::Decorators.add_fields({"reference_test" => "%{reference}"}, event, "dummy-plugin")
|
89
|
+
data += 41
|
90
|
+
expect(event.get("reference_test")).to eq("1")
|
91
|
+
end
|
92
|
+
|
93
|
+
it "should report a unix timestamp for %{+%s}" do
|
94
|
+
expect(subject.sprintf("%{+%s}")).to eq("1356998400")
|
95
|
+
end
|
96
|
+
|
97
|
+
it "should work if there is no fieldref in the string" do
|
98
|
+
expect(subject.sprintf("bonjour")).to eq("bonjour")
|
99
|
+
end
|
100
|
+
|
101
|
+
it "should raise error when formatting %{+%s} when @timestamp field is missing" do
|
102
|
+
str = "hello-%{+%s}"
|
103
|
+
subj = subject.clone
|
104
|
+
subj.remove("[@timestamp]")
|
105
|
+
expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
|
106
|
+
end
|
107
|
+
|
108
|
+
it "should report a time with %{+format} syntax", :if => RUBY_ENGINE == "jruby" do
|
109
|
+
expect(subject.sprintf("%{+YYYY}")).to eq("2013")
|
110
|
+
expect(subject.sprintf("%{+MM}")).to eq("01")
|
111
|
+
expect(subject.sprintf("%{+HH}")).to eq("00")
|
112
|
+
end
|
113
|
+
|
114
|
+
it "should support mixed string" do
|
115
|
+
expect(subject.sprintf("foo %{+YYYY-MM-dd} %{type}")).to eq("foo 2013-01-01 sprintf")
|
116
|
+
end
|
117
|
+
|
118
|
+
it "should raise error with %{+format} syntax when @timestamp field is missing", :if => RUBY_ENGINE == "jruby" do
|
119
|
+
str = "logstash-%{+YYYY}"
|
120
|
+
subj = subject.clone
|
121
|
+
subj.remove("[@timestamp]")
|
122
|
+
expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
|
123
|
+
end
|
124
|
+
|
125
|
+
it "should report fields with %{field} syntax" do
|
126
|
+
expect(subject.sprintf("%{type}")).to eq("sprintf")
|
127
|
+
expect(subject.sprintf("%{message}")).to eq(subject.get("message"))
|
128
|
+
end
|
129
|
+
|
130
|
+
it "should print deep fields" do
|
131
|
+
expect(subject.sprintf("%{[j][k1]}")).to eq("v")
|
132
|
+
expect(subject.sprintf("%{[j][k2][0]}")).to eq("w")
|
133
|
+
end
|
134
|
+
|
135
|
+
it "should be able to take a non-string for the format" do
|
136
|
+
expect(subject.sprintf(2)).to eq("2")
|
137
|
+
end
|
138
|
+
|
139
|
+
it "should allow to use the metadata when calling #sprintf" do
|
140
|
+
expect(subject.sprintf("super-%{[@metadata][fancy]}")).to eq("super-pants")
|
141
|
+
end
|
142
|
+
|
143
|
+
it "should allow to use nested hash from the metadata field" do
|
144
|
+
expect(subject.sprintf("%{[@metadata][have-to-go][deeper]}")).to eq("inception")
|
145
|
+
end
|
146
|
+
|
147
|
+
it "should return a json string if the key is a hash" do
|
148
|
+
expect(subject.sprintf("%{[j][k3]}")).to eq("{\"4\":\"m\"}")
|
149
|
+
end
|
150
|
+
|
151
|
+
it "should not strip last character" do
|
152
|
+
expect(subject.sprintf("%{type}%{message}|")).to eq("sprintfhello world|")
|
153
|
+
end
|
154
|
+
|
155
|
+
it "should render nil array values as leading empty string" do
|
156
|
+
expect(subject.set("foo", [nil, "baz"])).to eq([nil, "baz"])
|
157
|
+
|
158
|
+
expect(subject.get("[foo][0]")).to be_nil
|
159
|
+
expect(subject.get("[foo][1]")).to eq("baz")
|
160
|
+
|
161
|
+
expect(subject.sprintf("%{[foo]}")).to eq(",baz")
|
162
|
+
end
|
163
|
+
|
164
|
+
it "should render nil array values as middle empty string" do
|
165
|
+
expect(subject.set("foo", ["bar", nil, "baz"])).to eq(["bar", nil, "baz"])
|
166
|
+
|
167
|
+
expect(subject.get("[foo][0]")).to eq("bar")
|
168
|
+
expect(subject.get("[foo][1]")).to be_nil
|
169
|
+
expect(subject.get("[foo][2]")).to eq("baz")
|
170
|
+
|
171
|
+
expect(subject.sprintf("%{[foo]}")).to eq("bar,,baz")
|
172
|
+
end
|
173
|
+
|
174
|
+
it "should render nil array values as trailing empty string" do
|
175
|
+
expect(subject.set("foo", ["bar", nil])).to eq(["bar", nil])
|
176
|
+
|
177
|
+
expect(subject.get("[foo][0]")).to eq("bar")
|
178
|
+
expect(subject.get("[foo][1]")).to be_nil
|
179
|
+
|
180
|
+
expect(subject.sprintf("%{[foo]}")).to eq("bar,")
|
181
|
+
end
|
182
|
+
|
183
|
+
it "should render deep arrays with nil value" do
|
184
|
+
subject.set("[foo]", [[12, nil], 56])
|
185
|
+
expect(subject.sprintf("%{[foo]}")).to eq("12,,56")
|
186
|
+
end
|
187
|
+
|
188
|
+
context "#encoding" do
|
189
|
+
it "should return known patterns as UTF-8" do
|
190
|
+
expect(subject.sprintf("%{message}").encoding).to eq(Encoding::UTF_8)
|
191
|
+
end
|
192
|
+
|
193
|
+
it "should return unknown patterns as UTF-8" do
|
194
|
+
expect(subject.sprintf("%{unknown_pattern}").encoding).to eq(Encoding::UTF_8)
|
195
|
+
end
|
196
|
+
end
|
197
|
+
end
|
198
|
+
|
199
|
+
context "#[]" do
|
200
|
+
it "should fetch data" do
|
201
|
+
expect(subject.get("type")).to eq("sprintf")
|
202
|
+
end
|
203
|
+
it "should fetch fields" do
|
204
|
+
expect(subject.get("a")).to eq("b")
|
205
|
+
expect(subject.get('c')['d']).to eq("f")
|
206
|
+
end
|
207
|
+
it "should fetch deep fields" do
|
208
|
+
expect(subject.get("[j][k1]")).to eq("v")
|
209
|
+
expect(subject.get("[c][d]")).to eq("f")
|
210
|
+
expect(subject.get('[f][g][h]')).to eq("i")
|
211
|
+
expect(subject.get('[j][k3][4]')).to eq("m")
|
212
|
+
expect(subject.get('[j][5]')).to eq(7)
|
213
|
+
|
214
|
+
end
|
215
|
+
|
216
|
+
it "should be fast?", :performance => true do
|
217
|
+
count = 1000000
|
218
|
+
2.times do
|
219
|
+
start = Time.now
|
220
|
+
count.times { subject.get("[j][k1]") }
|
221
|
+
duration = Time.now - start
|
222
|
+
puts "event #[] rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
|
223
|
+
end
|
224
|
+
end
|
225
|
+
end
|
226
|
+
|
227
|
+
context "#include?" do
|
228
|
+
it "should include existing fields" do
|
229
|
+
expect(subject.include?("c")).to eq(true)
|
230
|
+
expect(subject.include?("[c][d]")).to eq(true)
|
231
|
+
expect(subject.include?("[j][k4][0][nested]")).to eq(true)
|
232
|
+
end
|
233
|
+
|
234
|
+
it "should include field with nil value" do
|
235
|
+
expect(subject.include?("nilfield")).to eq(true)
|
236
|
+
end
|
237
|
+
|
238
|
+
it "should include @metadata field" do
|
239
|
+
expect(subject.include?("@metadata")).to eq(true)
|
240
|
+
end
|
241
|
+
|
242
|
+
it "should include field within @metadata" do
|
243
|
+
expect(subject.include?("[@metadata][fancy]")).to eq(true)
|
244
|
+
end
|
245
|
+
|
246
|
+
it "should not include non-existing fields" do
|
247
|
+
expect(subject.include?("doesnotexist")).to eq(false)
|
248
|
+
expect(subject.include?("[j][doesnotexist]")).to eq(false)
|
249
|
+
expect(subject.include?("[tag][0][hello][yes]")).to eq(false)
|
250
|
+
end
|
251
|
+
|
252
|
+
it "should include within arrays" do
|
253
|
+
expect(subject.include?("[tags][0]")).to eq(true)
|
254
|
+
expect(subject.include?("[tags][1]")).to eq(false)
|
255
|
+
end
|
256
|
+
end
|
257
|
+
|
258
|
+
context "#overwrite" do
|
259
|
+
it "should swap data with new content" do
|
260
|
+
new_event = LogStash::Event.new(
|
261
|
+
"type" => "new",
|
262
|
+
"message" => "foo bar",
|
263
|
+
)
|
264
|
+
subject.overwrite(new_event)
|
265
|
+
|
266
|
+
expect(subject.get("message")).to eq("foo bar")
|
267
|
+
expect(subject.get("type")).to eq("new")
|
268
|
+
|
269
|
+
["tags", "source", "a", "c", "f", "j"].each do |field|
|
270
|
+
expect(subject.get(field)).to be_nil
|
271
|
+
end
|
272
|
+
end
|
273
|
+
end
|
274
|
+
|
275
|
+
context "#append" do
|
276
|
+
it "should append strings to an array" do
|
277
|
+
subject.append(LogStash::Event.new("message" => "another thing"))
|
278
|
+
expect(subject.get("message")).to eq([ "hello world", "another thing" ])
|
279
|
+
end
|
280
|
+
|
281
|
+
it "should concatenate tags" do
|
282
|
+
subject.append(LogStash::Event.new("tags" => [ "tag2" ]))
|
283
|
+
# added to_a for when array is a Java Collection when produced from json input
|
284
|
+
# TODO: we have to find a better way to handle this in tests. maybe override
|
285
|
+
# rspec eq or == to do an explicit to_a when comparing arrays?
|
286
|
+
expect(subject.get("tags").to_a).to eq([ "tag1", "tag2" ])
|
287
|
+
end
|
288
|
+
|
289
|
+
context "when event field is nil" do
|
290
|
+
it "should add single value as string" do
|
291
|
+
subject.append(LogStash::Event.new({"field1" => "append1"}))
|
292
|
+
expect(subject.get("field1")).to eq("append1")
|
293
|
+
end
|
294
|
+
it "should add multi values as array" do
|
295
|
+
subject.append(LogStash::Event.new({"field1" => [ "append1","append2" ]}))
|
296
|
+
expect(subject.get("field1")).to eq([ "append1","append2" ])
|
297
|
+
end
|
298
|
+
end
|
299
|
+
|
300
|
+
context "when event field is a string" do
|
301
|
+
before { subject.set("field1", "original1") }
|
302
|
+
|
303
|
+
it "should append string to values, if different from current" do
|
304
|
+
subject.append(LogStash::Event.new({"field1" => "append1"}))
|
305
|
+
expect(subject.get("field1")).to eq([ "original1", "append1" ])
|
306
|
+
end
|
307
|
+
it "should not change value, if appended value is equal current" do
|
308
|
+
subject.append(LogStash::Event.new({"field1" => "original1"}))
|
309
|
+
expect(subject.get("field1")).to eq("original1")
|
310
|
+
end
|
311
|
+
it "should concatenate values in an array" do
|
312
|
+
subject.append(LogStash::Event.new({"field1" => [ "append1" ]}))
|
313
|
+
expect(subject.get("field1")).to eq([ "original1", "append1" ])
|
314
|
+
end
|
315
|
+
it "should join array, removing duplicates" do
|
316
|
+
subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
|
317
|
+
expect(subject.get("field1")).to eq([ "original1", "append1" ])
|
318
|
+
end
|
319
|
+
end
|
320
|
+
context "when event field is an array" do
|
321
|
+
before { subject.set("field1", [ "original1", "original2" ] )}
|
322
|
+
|
323
|
+
it "should append string values to array, if not present in array" do
|
324
|
+
subject.append(LogStash::Event.new({"field1" => "append1"}))
|
325
|
+
expect(subject.get("field1")).to eq([ "original1", "original2", "append1" ])
|
326
|
+
end
|
327
|
+
it "should not append string values, if the array already contains it" do
|
328
|
+
subject.append(LogStash::Event.new({"field1" => "original1"}))
|
329
|
+
expect(subject.get("field1")).to eq([ "original1", "original2" ])
|
330
|
+
end
|
331
|
+
it "should join array, removing duplicates" do
|
332
|
+
subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
|
333
|
+
expect(subject.get("field1")).to eq([ "original1", "original2", "append1" ])
|
334
|
+
end
|
335
|
+
end
|
336
|
+
|
337
|
+
end
|
338
|
+
|
339
|
+
it "timestamp parsing speed", :performance => true do
|
340
|
+
warmup = 10000
|
341
|
+
count = 1000000
|
342
|
+
|
343
|
+
data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
|
344
|
+
event = LogStash::Event.new(data)
|
345
|
+
expect(event.get("@timestamp")).to be_a(LogStash::Timestamp)
|
346
|
+
|
347
|
+
duration = 0
|
348
|
+
[warmup, count].each do |i|
|
349
|
+
start = Time.now
|
350
|
+
i.times do
|
351
|
+
data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
|
352
|
+
LogStash::Event.new(data.clone)
|
353
|
+
end
|
354
|
+
duration = Time.now - start
|
355
|
+
end
|
356
|
+
puts "event @timestamp parse rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
|
357
|
+
end
|
358
|
+
|
359
|
+
context "acceptable @timestamp formats" do
|
360
|
+
subject { LogStash::Event.new }
|
361
|
+
|
362
|
+
formats = [
|
363
|
+
"YYYY-MM-dd'T'HH:mm:ss.SSSZ",
|
364
|
+
"YYYY-MM-dd'T'HH:mm:ss.SSSSSSZ",
|
365
|
+
"YYYY-MM-dd'T'HH:mm:ss.SSS",
|
366
|
+
"YYYY-MM-dd'T'HH:mm:ss",
|
367
|
+
"YYYY-MM-dd'T'HH:mm:ssZ",
|
368
|
+
]
|
369
|
+
formats.each do |format|
|
370
|
+
it "includes #{format}" do
|
371
|
+
time = subject.sprintf("%{+#{format}}")
|
372
|
+
begin
|
373
|
+
LogStash::Event.new("@timestamp" => time)
|
374
|
+
rescue => e
|
375
|
+
raise StandardError, "Time '#{time}' was rejected. #{e.class}: #{e.to_s}"
|
376
|
+
end
|
377
|
+
end
|
378
|
+
end
|
379
|
+
|
380
|
+
context "from LOGSTASH-1738" do
|
381
|
+
it "does not error" do
|
382
|
+
LogStash::Event.new("@timestamp" => "2013-12-29T23:12:52.371240+02:00")
|
383
|
+
end
|
384
|
+
end
|
385
|
+
|
386
|
+
context "from LOGSTASH-1732" do
|
387
|
+
it "does not error" do
|
388
|
+
LogStash::Event.new("@timestamp" => "2013-12-27T11:07:25+00:00")
|
389
|
+
end
|
390
|
+
end
|
391
|
+
end
|
392
|
+
|
393
|
+
context "timestamp initialization" do
|
394
|
+
it "should coerce timestamp" do
|
395
|
+
t = Time.iso8601("2014-06-12T00:12:17.114Z")
|
396
|
+
expect(LogStash::Event.new("@timestamp" => t).timestamp.to_i).to eq(t.to_i)
|
397
|
+
expect(LogStash::Event.new("@timestamp" => LogStash::Timestamp.new(t)).timestamp.to_i).to eq(t.to_i)
|
398
|
+
expect(LogStash::Event.new("@timestamp" => "2014-06-12T00:12:17.114Z").timestamp.to_i).to eq(t.to_i)
|
399
|
+
end
|
400
|
+
|
401
|
+
it "should assign current time when no timestamp" do
|
402
|
+
expect(LogStash::Event.new({}).timestamp.to_i).to be_within(1).of (Time.now.to_i)
|
403
|
+
end
|
404
|
+
|
405
|
+
it "should tag for invalid value" do
|
406
|
+
event = LogStash::Event.new("@timestamp" => "foo")
|
407
|
+
expect(event.timestamp.to_i).to be_within(1).of Time.now.to_i
|
408
|
+
expect(event.get("tags")).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
|
409
|
+
expect(event.get(LogStash::Event::TIMESTAMP_FAILURE_FIELD)).to eq("foo")
|
410
|
+
|
411
|
+
event = LogStash::Event.new("@timestamp" => 666)
|
412
|
+
expect(event.timestamp.to_i).to be_within(1).of Time.now.to_i
|
413
|
+
expect(event.get("tags")).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
|
414
|
+
expect(event.get(LogStash::Event::TIMESTAMP_FAILURE_FIELD)).to eq(666)
|
415
|
+
end
|
416
|
+
|
417
|
+
it "should warn for invalid value" do
|
418
|
+
LogStash::Event.new("@timestamp" => :foo)
|
419
|
+
LogStash::Event.new("@timestamp" => 666)
|
420
|
+
end
|
421
|
+
|
422
|
+
it "should tag for invalid string format" do
|
423
|
+
event = LogStash::Event.new("@timestamp" => "foo")
|
424
|
+
expect(event.timestamp.to_i).to be_within(1).of Time.now.to_i
|
425
|
+
expect(event.get("tags")).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
|
426
|
+
expect(event.get(LogStash::Event::TIMESTAMP_FAILURE_FIELD)).to eq("foo")
|
427
|
+
end
|
428
|
+
|
429
|
+
it "should warn for invalid string format" do
|
430
|
+
LogStash::Event.new("@timestamp" => "foo")
|
431
|
+
end
|
432
|
+
end
|
433
|
+
|
434
|
+
context "to_json" do
|
435
|
+
it "should support to_json" do
|
436
|
+
new_event = LogStash::Event.new(
|
437
|
+
"@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
|
438
|
+
"message" => "foo bar",
|
439
|
+
)
|
440
|
+
json = new_event.to_json
|
441
|
+
|
442
|
+
expect(JSON.parse(json)).to eq( JSON.parse("{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}"))
|
443
|
+
end
|
444
|
+
|
445
|
+
it "should support to_json and ignore arguments" do
|
446
|
+
new_event = LogStash::Event.new(
|
447
|
+
"@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
|
448
|
+
"message" => "foo bar",
|
449
|
+
)
|
450
|
+
json = new_event.to_json(:foo => 1, :bar => "baz")
|
451
|
+
|
452
|
+
expect(JSON.parse(json)).to eq( JSON.parse("{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}"))
|
453
|
+
end
|
454
|
+
end
|
455
|
+
|
456
|
+
context "metadata" do
|
457
|
+
context "with existing metadata" do
|
458
|
+
subject { LogStash::Event.new("hello" => "world", "@metadata" => { "fancy" => "pants" }) }
|
459
|
+
|
460
|
+
it "should not include metadata in to_hash" do
|
461
|
+
expect(subject.to_hash.keys).not_to include("@metadata")
|
462
|
+
|
463
|
+
# 'hello', '@timestamp', and '@version'
|
464
|
+
expect(subject.to_hash.keys.count).to eq(3)
|
465
|
+
end
|
466
|
+
|
467
|
+
it "should still allow normal field access" do
|
468
|
+
expect(subject.get("hello")).to eq("world")
|
469
|
+
end
|
470
|
+
end
|
471
|
+
|
472
|
+
context "with set metadata" do
|
473
|
+
let(:fieldref) { "[@metadata][foo][bar]" }
|
474
|
+
let(:value) { "bar" }
|
475
|
+
subject { LogStash::Event.new("normal" => "normal") }
|
476
|
+
before do
|
477
|
+
# Verify the test is configured correctly.
|
478
|
+
expect(fieldref).to start_with("[@metadata]")
|
479
|
+
|
480
|
+
# Set it.
|
481
|
+
subject.set(fieldref, value)
|
482
|
+
end
|
483
|
+
|
484
|
+
it "should still allow normal field access" do
|
485
|
+
expect(subject.get("normal")).to eq("normal")
|
486
|
+
end
|
487
|
+
|
488
|
+
it "should allow getting" do
|
489
|
+
expect(subject.get(fieldref)).to eq(value)
|
490
|
+
end
|
491
|
+
|
492
|
+
it "should be hidden from .to_json" do
|
493
|
+
require "json"
|
494
|
+
obj = JSON.parse(subject.to_json)
|
495
|
+
expect(obj).not_to include("@metadata")
|
496
|
+
end
|
497
|
+
|
498
|
+
it "should be hidden from .to_hash" do
|
499
|
+
expect(subject.to_hash).not_to include("@metadata")
|
500
|
+
end
|
501
|
+
|
502
|
+
it "should be accessible through #to_hash_with_metadata" do
|
503
|
+
obj = subject.to_hash_with_metadata
|
504
|
+
expect(obj).to include("@metadata")
|
505
|
+
expect(obj["@metadata"]["foo"]["bar"]).to eq(value)
|
506
|
+
end
|
507
|
+
end
|
508
|
+
|
509
|
+
context "with no metadata" do
|
510
|
+
subject { LogStash::Event.new("foo" => "bar") }
|
511
|
+
it "should have no metadata" do
|
512
|
+
expect(subject.get("@metadata")).to be_empty
|
513
|
+
end
|
514
|
+
it "should still allow normal field access" do
|
515
|
+
expect(subject.get("foo")).to eq("bar")
|
516
|
+
end
|
517
|
+
|
518
|
+
it "should not include the @metadata key" do
|
519
|
+
expect(subject.to_hash_with_metadata).not_to include("@metadata")
|
520
|
+
end
|
521
|
+
end
|
522
|
+
end
|
523
|
+
|
524
|
+
context "signal events" do
|
525
|
+
it "should define the shutdown and flush event constants" do
|
526
|
+
# the SHUTDOWN and FLUSH constants are part of the plugin API contract
|
527
|
+
# if they are changed, all plugins must be updated
|
528
|
+
expect(LogStash::SHUTDOWN).to be_a(LogStash::ShutdownEvent)
|
529
|
+
expect(LogStash::FLUSH).to be_a(LogStash::FlushEvent)
|
530
|
+
end
|
531
|
+
|
532
|
+
it "should define the shutdown event with SignalEvent as parent class" do
|
533
|
+
expect(LogStash::SHUTDOWN).to be_a(LogStash::SignalEvent)
|
534
|
+
expect(LogStash::FLUSH).to be_a(LogStash::SignalEvent)
|
535
|
+
end
|
536
|
+
|
537
|
+
it "should define the flush? method" do
|
538
|
+
expect(LogStash::SHUTDOWN.flush?).to be_falsey
|
539
|
+
expect(LogStash::FLUSH.flush?).to be_truthy
|
540
|
+
end
|
541
|
+
|
542
|
+
it "should define the shutdown? method" do
|
543
|
+
expect(LogStash::SHUTDOWN.shutdown?).to be_truthy
|
544
|
+
expect(LogStash::FLUSH.shutdown?).to be_falsey
|
545
|
+
end
|
546
|
+
end
|
547
|
+
end
|
548
|
+
|
549
|
+
let(:event_hash) do
|
550
|
+
{
|
551
|
+
"@timestamp" => "2013-01-01T00:00:00.000Z",
|
552
|
+
"type" => "sprintf",
|
553
|
+
"message" => "hello world",
|
554
|
+
"tags" => [ "tag1" ],
|
555
|
+
"source" => "/home/foo",
|
556
|
+
"a" => "b",
|
557
|
+
"c" => {
|
558
|
+
"d" => "f",
|
559
|
+
"e" => {"f" => "g"}
|
560
|
+
},
|
561
|
+
"f" => { "g" => { "h" => "i" } },
|
562
|
+
"j" => {
|
563
|
+
"k1" => "v",
|
564
|
+
"k2" => [ "w", "x" ],
|
565
|
+
"k3" => {"4" => "m"},
|
566
|
+
"k4" => [ {"nested" => "cool"} ],
|
567
|
+
5 => 6,
|
568
|
+
"5" => 7
|
569
|
+
},
|
570
|
+
"nilfield" => nil,
|
571
|
+
"@metadata" => { "fancy" => "pants", "have-to-go" => { "deeper" => "inception" } }
|
572
|
+
}
|
573
|
+
end
|
574
|
+
|
575
|
+
describe "using normal hash input" do
|
576
|
+
it_behaves_like "all event tests" do
|
577
|
+
subject{LogStash::Event.new(event_hash)}
|
578
|
+
end
|
579
|
+
end
|
580
|
+
|
581
|
+
describe "using hash input from deserialized json" do
|
582
|
+
# this is to test the case when JrJackson deserializes Json and produces
|
583
|
+
# native Java Collections objects for efficiency
|
584
|
+
it_behaves_like "all event tests" do
|
585
|
+
subject{LogStash::Event.new(LogStash::Json.load(LogStash::Json.dump(event_hash)))}
|
586
|
+
end
|
587
|
+
end
|
588
|
+
|
589
|
+
|
590
|
+
describe "#to_s" do
|
591
|
+
let(:timestamp) { LogStash::Timestamp.new }
|
592
|
+
let(:event1) { LogStash::Event.new({ "@timestamp" => timestamp, "host" => "foo", "message" => "bar"}) }
|
593
|
+
let(:event2) { LogStash::Event.new({ "host" => "bar", "message" => "foo"}) }
|
594
|
+
|
595
|
+
it "should cache only one template" do
|
596
|
+
LogStash::StringInterpolation.clear_cache
|
597
|
+
expect {
|
598
|
+
event1.to_s
|
599
|
+
event2.to_s
|
600
|
+
}.to change { LogStash::StringInterpolation.cache_size }.by(1)
|
601
|
+
end
|
602
|
+
|
603
|
+
it "return the string containing the timestamp, the host and the message" do
|
604
|
+
expect(event1.to_s).to eq("#{timestamp.to_iso8601} #{event1.get("host")} #{event1.get("message")}")
|
605
|
+
end
|
606
|
+
end
|
607
|
+
|
608
|
+
describe "Event accessors" do
|
609
|
+
let(:event) { LogStash::Event.new({ "message" => "foo" }) }
|
610
|
+
|
611
|
+
it "should invalidate target caching" do
|
612
|
+
expect(event.get("[a][0]")).to be_nil
|
613
|
+
|
614
|
+
expect(event.set("[a][0]", 42)).to eq(42)
|
615
|
+
expect(event.get("[a][0]")).to eq(42)
|
616
|
+
expect(event.get("[a]")).to eq({"0" => 42})
|
617
|
+
|
618
|
+
expect(event.set("[a]", [42, 24])).to eq([42, 24])
|
619
|
+
expect(event.get("[a]")).to eq([42, 24])
|
620
|
+
|
621
|
+
expect(event.get("[a][0]")).to eq(42)
|
622
|
+
|
623
|
+
expect(event.set("[a]", [24, 42])).to eq([24, 42])
|
624
|
+
expect(event.get("[a][0]")).to eq(24)
|
625
|
+
|
626
|
+
expect(event.set("[a][0]", {"a "=> 99, "b" => 98})).to eq({"a "=> 99, "b" => 98})
|
627
|
+
expect(event.get("[a][0]")).to eq({"a "=> 99, "b" => 98})
|
628
|
+
|
629
|
+
expect(event.get("[a]")).to eq([{"a "=> 99, "b" => 98}, 42])
|
630
|
+
expect(event.get("[a][0]")).to eq({"a "=> 99, "b" => 98})
|
631
|
+
expect(event.get("[a][1]")).to eq(42)
|
632
|
+
expect(event.get("[a][0][b]")).to eq(98)
|
633
|
+
end
|
634
|
+
end
|
635
|
+
end
|
636
|
+
|