logstash-core 1.5.0.rc3.snapshot6-java → 1.5.0.rc4-java

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of logstash-core might be problematic. Click here for more details.

Files changed (46) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash-core.rb +2 -0
  3. data/lib/logstash/agent.rb +0 -33
  4. data/lib/logstash/config/config_ast.rb +1 -1
  5. data/lib/logstash/environment.rb +8 -30
  6. data/lib/logstash/filters/base.rb +19 -0
  7. data/lib/logstash/namespace.rb +0 -1
  8. data/lib/logstash/runner.rb +3 -51
  9. data/lib/logstash/version.rb +1 -1
  10. data/logstash-core.gemspec +54 -0
  11. data/spec/core/conditionals_spec.rb +428 -0
  12. data/spec/core/config_mixin_spec.rb +99 -0
  13. data/spec/core/config_spec.rb +108 -0
  14. data/spec/core/environment_spec.rb +44 -0
  15. data/spec/core/event_spec.rb +468 -0
  16. data/spec/core/pipeline_spec.rb +198 -0
  17. data/spec/core/plugin_spec.rb +106 -0
  18. data/spec/core/runner_spec.rb +39 -0
  19. data/spec/core/timestamp_spec.rb +83 -0
  20. data/spec/filters/base_spec.rb +318 -0
  21. data/spec/inputs/base_spec.rb +13 -0
  22. data/spec/lib/logstash/bundler_spec.rb +120 -0
  23. data/spec/lib/logstash/java_integration_spec.rb +257 -0
  24. data/spec/logstash/agent_spec.rb +37 -0
  25. data/spec/outputs/base_spec.rb +47 -0
  26. data/spec/spec_helper.rb +1 -0
  27. data/spec/util/accessors_spec.rb +215 -0
  28. data/spec/util/charset_spec.rb +74 -0
  29. data/spec/util/fieldeval_spec.rb +96 -0
  30. data/spec/util/gemfile_spec.rb +212 -0
  31. data/spec/util/json_spec.rb +97 -0
  32. data/spec/util/plugin_version_spec.rb +48 -0
  33. data/spec/util_spec.rb +34 -0
  34. metadata +84 -160
  35. data/lib/logstash-event.rb +0 -2
  36. data/lib/logstash.rb +0 -4
  37. data/lib/logstash/bundler.rb +0 -156
  38. data/lib/logstash/gemfile.rb +0 -193
  39. data/lib/logstash/pluginmanager.rb +0 -17
  40. data/lib/logstash/pluginmanager/command.rb +0 -38
  41. data/lib/logstash/pluginmanager/install.rb +0 -141
  42. data/lib/logstash/pluginmanager/list.rb +0 -44
  43. data/lib/logstash/pluginmanager/main.rb +0 -21
  44. data/lib/logstash/pluginmanager/uninstall.rb +0 -43
  45. data/lib/logstash/pluginmanager/update.rb +0 -105
  46. data/lib/logstash/pluginmanager/util.rb +0 -89
@@ -0,0 +1,99 @@
1
+ require "spec_helper"
2
+ require "logstash/config/mixin"
3
+
4
+ describe LogStash::Config::Mixin do
5
+ context "when validating :bytes successfully" do
6
+ subject do
7
+ local_num_bytes = num_bytes # needs to be locally scoped :(
8
+ Class.new(LogStash::Filters::Base) do
9
+ include LogStash::Config::Mixin
10
+ config_name "test"
11
+ milestone 1
12
+ config :size_bytes, :validate => :bytes
13
+ config :size_default, :validate => :bytes, :default => "#{local_num_bytes}"
14
+ config :size_upcase, :validate => :bytes
15
+ config :size_downcase, :validate => :bytes
16
+ config :size_space, :validate => :bytes
17
+ end.new({
18
+ "size_bytes" => "#{local_num_bytes}",
19
+ "size_upcase" => "#{local_num_bytes}KiB".upcase,
20
+ "size_downcase" => "#{local_num_bytes}KiB".downcase,
21
+ "size_space" => "#{local_num_bytes} KiB"
22
+ })
23
+ end
24
+
25
+ let!(:num_bytes) { rand(1000) }
26
+ let!(:num_kbytes) { num_bytes * 1024 }
27
+
28
+ it "should validate :bytes successfully with no units" do
29
+ expect(subject.size_bytes).to eq(num_bytes)
30
+ end
31
+
32
+ it "should allow setting valid default" do
33
+ expect(subject.size_default).to eq(num_bytes)
34
+ end
35
+
36
+ it "should be case-insensitive when parsing units" do
37
+ expect(subject.size_upcase).to eq(num_kbytes)
38
+ expect(subject.size_downcase).to eq(num_kbytes)
39
+ end
40
+
41
+ it "should accept one space between num_bytes and unit suffix" do
42
+ expect(subject.size_space).to eq(num_kbytes)
43
+ end
44
+ end
45
+
46
+ context "when raising configuration errors while validating" do
47
+ it "should raise configuration error when provided with invalid units" do
48
+ expect {
49
+ Class.new(LogStash::Filters::Base) do
50
+ include LogStash::Config::Mixin
51
+ config_name "test"
52
+ milestone 1
53
+ config :size_file, :validate => :bytes
54
+ end.new({"size_file" => "10 yolobytes"})
55
+ }.to raise_error(LogStash::ConfigurationError)
56
+ end
57
+
58
+ it "should raise configuration error when provided with too many spaces" do
59
+ expect {
60
+ Class.new(LogStash::Filters::Base) do
61
+ include LogStash::Config::Mixin
62
+ config_name "test"
63
+ milestone 1
64
+ config :size_file, :validate => :bytes
65
+ end.new({"size_file" => "10 kib"})
66
+ }.to raise_error(LogStash::ConfigurationError)
67
+ end
68
+ end
69
+
70
+ context "when validating :password" do
71
+ let(:klass) do
72
+ Class.new(LogStash::Filters::Base) do
73
+ config_name "fake"
74
+ config :password, :validate => :password
75
+ end
76
+ end
77
+
78
+ let(:secret) { "fancy pants" }
79
+ subject { klass.new("password" => secret) }
80
+
81
+ it "should be a Password object" do
82
+ expect(subject.password).to(be_a(LogStash::Util::Password))
83
+ end
84
+
85
+ it "should make password values hidden" do
86
+ expect(subject.password.to_s).to(be == "<password>")
87
+ expect(subject.password.inspect).to(be == "<password>")
88
+ end
89
+
90
+ it "should show password values via #value" do
91
+ expect(subject.password.value).to(be == secret)
92
+ end
93
+
94
+ it "should correctly copy password types" do
95
+ clone = subject.class.new(subject.params)
96
+ expect(clone.password.value).to(be == secret)
97
+ end
98
+ end
99
+ end
@@ -0,0 +1,108 @@
1
+ # encoding: utf-8
2
+ # config syntax tests
3
+ #
4
+ require "spec_helper"
5
+ require "logstash/config/grammar"
6
+ require "logstash/config/config_ast"
7
+
8
+ describe LogStashConfigParser do
9
+ context '#parse' do
10
+ context "valid configuration" do
11
+ it "should permit single-quoted attribute names" do
12
+ parser = LogStashConfigParser.new
13
+ config = parser.parse(%q(
14
+ input {
15
+ example {
16
+ 'foo' => 'bar'
17
+ test => { 'bar' => 'baz' }
18
+ }
19
+ }
20
+ ))
21
+
22
+ expect(config).not_to be_nil
23
+ end
24
+
25
+ it "should permit empty plugin sections" do
26
+ parser = LogStashConfigParser.new
27
+ config = parser.parse(%q(
28
+ filter {
29
+ }
30
+ ))
31
+
32
+ expect(config).not_to be_nil
33
+ end
34
+
35
+ it 'permits hash to contains array' do
36
+ parser = LogStashConfigParser.new
37
+ config = parser.parse(%q(
38
+ input{
39
+ example {
40
+ match => {
41
+ "message"=> ["pattern1", "pattern2", "pattern3"]
42
+ }
43
+ }
44
+ }))
45
+ expect(config).not_to be_nil
46
+ end
47
+ end
48
+ end
49
+
50
+ context "#compile" do
51
+ context "invalid configuration" do
52
+ it "rejects duplicate hash key" do
53
+ parser = LogStashConfigParser.new
54
+ config = parser.parse(%q(
55
+ input {
56
+ example {
57
+ match => {
58
+ "message"=> "pattern1"
59
+ "message"=> "pattern2"
60
+ "message"=> "pattern3"
61
+ }
62
+ }
63
+ }
64
+ ))
65
+
66
+ expect { config.compile }.to raise_error(LogStash::ConfigurationError, /Duplicate keys found in your configuration: \["message"\]/)
67
+ end
68
+
69
+ it "rejects duplicate keys in nested hash" do
70
+ parser = LogStashConfigParser.new
71
+ config = parser.parse(%q(
72
+ input {
73
+ example {
74
+ match => {
75
+ "message"=> "pattern1"
76
+ "more" => {
77
+ "cool" => true
78
+ "cool" => true
79
+ }
80
+ }
81
+ }
82
+ }
83
+ ))
84
+
85
+ expect { config.compile }.to raise_error(LogStash::ConfigurationError, /Duplicate keys found in your configuration: \["cool"\]/)
86
+ end
87
+
88
+ it "rejects a key with multiple double quotes" do
89
+ parser = LogStashConfigParser.new
90
+ config = parser.parse(%q(
91
+ input {
92
+ example {
93
+ match => {
94
+ "message"=> "pattern1"
95
+ ""more"" => {
96
+ "cool" => true
97
+ "cool" => true
98
+ }
99
+ }
100
+ }
101
+ }
102
+ ))
103
+
104
+ expect(config).to be_nil
105
+ end
106
+ end
107
+ end
108
+ end
@@ -0,0 +1,44 @@
1
+ require "spec_helper"
2
+ require "logstash/environment"
3
+
4
+ describe LogStash::Environment do
5
+
6
+ context "when loading jars dependencies" do
7
+
8
+ let(:default_jars_location) { File.join("vendor", "jar-dependencies") }
9
+ let(:default_runtime_location) { File.join(default_jars_location,"runtime-jars","*.jar") }
10
+ let(:default_test_location) { File.join(default_jars_location,"test-jars","*.jar") }
11
+
12
+ it "raises an exception if jruby is not available" do
13
+ expect(subject).to receive(:jruby?).and_return(false)
14
+ expect { subject.load_runtime_jars! }.to raise_error
15
+ end
16
+
17
+ it "find runtime jars in the default location" do
18
+ expect(subject).to receive(:find_jars).with(default_runtime_location).and_return([])
19
+ subject.load_runtime_jars!
20
+ end
21
+
22
+ it "find test jars in the default location" do
23
+ expect(subject).to receive(:find_jars).with(default_test_location).and_return([])
24
+ subject.load_test_jars!
25
+ end
26
+
27
+ context "when loading a jar file" do
28
+
29
+ let(:dummy_jar_file) { File.join(default_jars_location,"runtime-jars","elasticsearch.jar") }
30
+
31
+ it "requires the jar files if there are jars to load" do
32
+ expect(subject).to receive(:find_jars).with(default_runtime_location).and_return([dummy_jar_file])
33
+ expect(subject).to receive(:require).with(dummy_jar_file)
34
+ subject.load_runtime_jars!
35
+ end
36
+
37
+ it "raises an exception if there are no jars to load" do
38
+ allow(Dir).to receive(:glob).and_return([])
39
+ expect { subject.load_runtime_jars! }.to raise_error
40
+ end
41
+
42
+ end
43
+ end
44
+ end
@@ -0,0 +1,468 @@
1
+ # encoding: utf-8
2
+ require "spec_helper"
3
+
4
+ describe LogStash::Event do
5
+
6
+ shared_examples "all event tests" do
7
+ context "[]=" do
8
+ it "should raise an exception if you attempt to set @timestamp to a value type other than a Time object" do
9
+ expect{subject["@timestamp"] = "crash!"}.to raise_error(TypeError)
10
+ end
11
+
12
+ it "should assign simple fields" do
13
+ expect(subject["foo"]).to be_nil
14
+ expect(subject["foo"] = "bar").to eq("bar")
15
+ expect(subject["foo"]).to eq("bar")
16
+ end
17
+
18
+ it "should overwrite simple fields" do
19
+ expect(subject["foo"]).to be_nil
20
+ expect(subject["foo"] = "bar").to eq("bar")
21
+ expect(subject["foo"]).to eq("bar")
22
+
23
+ expect(subject["foo"] = "baz").to eq("baz")
24
+ expect(subject["foo"]).to eq("baz")
25
+ end
26
+
27
+ it "should assign deep fields" do
28
+ expect(subject["[foo][bar]"]).to be_nil
29
+ expect(subject["[foo][bar]"] = "baz").to eq("baz")
30
+ expect(subject["[foo][bar]"]).to eq("baz")
31
+ end
32
+
33
+ it "should overwrite deep fields" do
34
+ expect(subject["[foo][bar]"]).to be_nil
35
+ expect(subject["[foo][bar]"] = "baz").to eq("baz")
36
+ expect(subject["[foo][bar]"]).to eq("baz")
37
+
38
+ expect(subject["[foo][bar]"] = "zab").to eq("zab")
39
+ expect(subject["[foo][bar]"]).to eq("zab")
40
+ end
41
+ end
42
+
43
+ context "#sprintf" do
44
+ it "should report a unix timestamp for %{+%s}" do
45
+ expect(subject.sprintf("%{+%s}")).to eq("1356998400")
46
+ end
47
+
48
+ it "should raise error when formatting %{+%s} when @timestamp field is missing" do
49
+ str = "hello-%{+%s}"
50
+ subj = subject.clone
51
+ subj.remove("[@timestamp]")
52
+ expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
53
+ end
54
+
55
+ it "should report a time with %{+format} syntax", :if => RUBY_ENGINE == "jruby" do
56
+ expect(subject.sprintf("%{+YYYY}")).to eq("2013")
57
+ expect(subject.sprintf("%{+MM}")).to eq("01")
58
+ expect(subject.sprintf("%{+HH}")).to eq("00")
59
+ end
60
+
61
+ it "should raise error with %{+format} syntax when @timestamp field is missing", :if => RUBY_ENGINE == "jruby" do
62
+ str = "logstash-%{+YYYY}"
63
+ subj = subject.clone
64
+ subj.remove("[@timestamp]")
65
+ expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
66
+ end
67
+
68
+ it "should report fields with %{field} syntax" do
69
+ expect(subject.sprintf("%{type}")).to eq("sprintf")
70
+ expect(subject.sprintf("%{message}")).to eq(subject["message"])
71
+ end
72
+
73
+ it "should print deep fields" do
74
+ expect(subject.sprintf("%{[j][k1]}")).to eq("v")
75
+ expect(subject.sprintf("%{[j][k2][0]}")).to eq("w")
76
+ end
77
+
78
+ it "should be able to take a non-string for the format" do
79
+ expect(subject.sprintf(2)).to eq("2")
80
+ end
81
+
82
+ it "should allow to use the metadata when calling #sprintf" do
83
+ expect(subject.sprintf("super-%{[@metadata][fancy]}")).to eq("super-pants")
84
+ end
85
+
86
+ it "should allow to use nested hash from the metadata field" do
87
+ expect(subject.sprintf("%{[@metadata][have-to-go][deeper]}")).to eq("inception")
88
+ end
89
+ end
90
+
91
+ context "#[]" do
92
+ it "should fetch data" do
93
+ expect(subject["type"]).to eq("sprintf")
94
+ end
95
+ it "should fetch fields" do
96
+ expect(subject["a"]).to eq("b")
97
+ expect(subject['c']['d']).to eq("f")
98
+ end
99
+ it "should fetch deep fields" do
100
+ expect(subject["[j][k1]"]).to eq("v")
101
+ expect(subject["[c][d]"]).to eq("f")
102
+ expect(subject['[f][g][h]']).to eq("i")
103
+ expect(subject['[j][k3][4]']).to eq("m")
104
+ expect(subject['[j][5]']).to eq(7)
105
+
106
+ end
107
+
108
+ it "should be fast?", :performance => true do
109
+ count = 1000000
110
+ 2.times do
111
+ start = Time.now
112
+ count.times { subject["[j][k1]"] }
113
+ duration = Time.now - start
114
+ puts "event #[] rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
115
+ end
116
+ end
117
+ end
118
+
119
+ context "#include?" do
120
+ it "should include existing fields" do
121
+ expect(subject.include?("c")).to be_true
122
+ expect(subject.include?("[c][d]")).to be_true
123
+ expect(subject.include?("[j][k4][0][nested]")).to be_true
124
+ end
125
+
126
+ it "should include field with nil value" do
127
+ expect(subject.include?("nilfield")).to be_true
128
+ end
129
+
130
+ it "should include @metadata field" do
131
+ expect(subject.include?("@metadata")).to be_true
132
+ end
133
+
134
+ it "should include field within @metadata" do
135
+ expect(subject.include?("[@metadata][fancy]")).to be_true
136
+ end
137
+
138
+ it "should not include non-existing fields" do
139
+ expect(subject.include?("doesnotexist")).to be_false
140
+ expect(subject.include?("[j][doesnotexist]")).to be_false
141
+ expect(subject.include?("[tag][0][hello][yes]")).to be_false
142
+ end
143
+
144
+ it "should include within arrays" do
145
+ expect(subject.include?("[tags][0]")).to be_true
146
+ expect(subject.include?("[tags][1]")).to be_false
147
+ end
148
+ end
149
+
150
+ context "#overwrite" do
151
+ it "should swap data with new content" do
152
+ new_event = LogStash::Event.new(
153
+ "type" => "new",
154
+ "message" => "foo bar",
155
+ )
156
+ subject.overwrite(new_event)
157
+
158
+ expect(subject["message"]).to eq("foo bar")
159
+ expect(subject["type"]).to eq("new")
160
+
161
+ ["tags", "source", "a", "c", "f", "j"].each do |field|
162
+ expect(subject[field]).to be_nil
163
+ end
164
+ end
165
+ end
166
+
167
+ context "#append" do
168
+ it "should append strings to an array" do
169
+ subject.append(LogStash::Event.new("message" => "another thing"))
170
+ expect(subject["message"]).to eq([ "hello world", "another thing" ])
171
+ end
172
+
173
+ it "should concatenate tags" do
174
+ subject.append(LogStash::Event.new("tags" => [ "tag2" ]))
175
+ # added to_a for when array is a Java Collection when produced from json input
176
+ # TODO: we have to find a better way to handle this in tests. maybe override
177
+ # rspec eq or == to do an explicit to_a when comparing arrays?
178
+ expect(subject["tags"].to_a).to eq([ "tag1", "tag2" ])
179
+ end
180
+
181
+ context "when event field is nil" do
182
+ it "should add single value as string" do
183
+ subject.append(LogStash::Event.new({"field1" => "append1"}))
184
+ expect(subject[ "field1" ]).to eq("append1")
185
+ end
186
+ it "should add multi values as array" do
187
+ subject.append(LogStash::Event.new({"field1" => [ "append1","append2" ]}))
188
+ expect(subject[ "field1" ]).to eq([ "append1","append2" ])
189
+ end
190
+ end
191
+
192
+ context "when event field is a string" do
193
+ before { subject[ "field1" ] = "original1" }
194
+
195
+ it "should append string to values, if different from current" do
196
+ subject.append(LogStash::Event.new({"field1" => "append1"}))
197
+ expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
198
+ end
199
+ it "should not change value, if appended value is equal current" do
200
+ subject.append(LogStash::Event.new({"field1" => "original1"}))
201
+ expect(subject[ "field1" ]).to eq("original1")
202
+ end
203
+ it "should concatenate values in an array" do
204
+ subject.append(LogStash::Event.new({"field1" => [ "append1" ]}))
205
+ expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
206
+ end
207
+ it "should join array, removing duplicates" do
208
+ subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
209
+ expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
210
+ end
211
+ end
212
+ context "when event field is an array" do
213
+ before { subject[ "field1" ] = [ "original1", "original2" ] }
214
+
215
+ it "should append string values to array, if not present in array" do
216
+ subject.append(LogStash::Event.new({"field1" => "append1"}))
217
+ expect(subject[ "field1" ]).to eq([ "original1", "original2", "append1" ])
218
+ end
219
+ it "should not append string values, if the array already contains it" do
220
+ subject.append(LogStash::Event.new({"field1" => "original1"}))
221
+ expect(subject[ "field1" ]).to eq([ "original1", "original2" ])
222
+ end
223
+ it "should join array, removing duplicates" do
224
+ subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
225
+ expect(subject[ "field1" ]).to eq([ "original1", "original2", "append1" ])
226
+ end
227
+ end
228
+ end
229
+
230
+ it "timestamp parsing speed", :performance => true do
231
+ warmup = 10000
232
+ count = 1000000
233
+
234
+ data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
235
+ event = LogStash::Event.new(data)
236
+ expect(event["@timestamp"]).to be_a(LogStash::Timestamp)
237
+
238
+ duration = 0
239
+ [warmup, count].each do |i|
240
+ start = Time.now
241
+ i.times do
242
+ data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
243
+ LogStash::Event.new(data.clone)
244
+ end
245
+ duration = Time.now - start
246
+ end
247
+ puts "event @timestamp parse rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
248
+ end
249
+
250
+ context "acceptable @timestamp formats" do
251
+ subject { LogStash::Event.new }
252
+
253
+ formats = [
254
+ "YYYY-MM-dd'T'HH:mm:ss.SSSZ",
255
+ "YYYY-MM-dd'T'HH:mm:ss.SSSSSSZ",
256
+ "YYYY-MM-dd'T'HH:mm:ss.SSS",
257
+ "YYYY-MM-dd'T'HH:mm:ss",
258
+ "YYYY-MM-dd'T'HH:mm:ssZ",
259
+ ]
260
+ formats.each do |format|
261
+ it "includes #{format}" do
262
+ time = subject.sprintf("%{+#{format}}")
263
+ begin
264
+ LogStash::Event.new("@timestamp" => time)
265
+ rescue => e
266
+ raise StandardError, "Time '#{time}' was rejected. #{e.class}: #{e.to_s}"
267
+ end
268
+ end
269
+ end
270
+
271
+ context "from LOGSTASH-1738" do
272
+ it "does not error" do
273
+ LogStash::Event.new("@timestamp" => "2013-12-29T23:12:52.371240+02:00")
274
+ end
275
+ end
276
+
277
+ context "from LOGSTASH-1732" do
278
+ it "does not error" do
279
+ LogStash::Event.new("@timestamp" => "2013-12-27T11:07:25+00:00")
280
+ end
281
+ end
282
+ end
283
+
284
+ context "timestamp initialization" do
285
+ let(:logger) { double("logger") }
286
+
287
+ it "should coerce timestamp" do
288
+ t = Time.iso8601("2014-06-12T00:12:17.114Z")
289
+ expect(LogStash::Timestamp).to receive(:coerce).exactly(3).times.and_call_original
290
+ expect(LogStash::Event.new("@timestamp" => t).timestamp.to_i).to eq(t.to_i)
291
+ expect(LogStash::Event.new("@timestamp" => LogStash::Timestamp.new(t)).timestamp.to_i).to eq(t.to_i)
292
+ expect(LogStash::Event.new("@timestamp" => "2014-06-12T00:12:17.114Z").timestamp.to_i).to eq(t.to_i)
293
+ end
294
+
295
+ it "should assign current time when no timestamp" do
296
+ ts = LogStash::Timestamp.now
297
+ expect(LogStash::Timestamp).to receive(:now).and_return(ts)
298
+ expect(LogStash::Event.new({}).timestamp.to_i).to eq(ts.to_i)
299
+ end
300
+
301
+ it "should tag and warn for invalid value" do
302
+ ts = LogStash::Timestamp.now
303
+ expect(LogStash::Timestamp).to receive(:now).twice.and_return(ts)
304
+ expect(LogStash::Event::LOGGER).to receive(:warn).twice
305
+
306
+ event = LogStash::Event.new("@timestamp" => :foo)
307
+ expect(event.timestamp.to_i).to eq(ts.to_i)
308
+ expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
309
+ expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq(:foo)
310
+
311
+ event = LogStash::Event.new("@timestamp" => 666)
312
+ expect(event.timestamp.to_i).to eq(ts.to_i)
313
+ expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
314
+ expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq(666)
315
+ end
316
+
317
+ it "should tag and warn for invalid string format" do
318
+ ts = LogStash::Timestamp.now
319
+ expect(LogStash::Timestamp).to receive(:now).and_return(ts)
320
+ expect(LogStash::Event::LOGGER).to receive(:warn)
321
+
322
+ event = LogStash::Event.new("@timestamp" => "foo")
323
+ expect(event.timestamp.to_i).to eq(ts.to_i)
324
+ expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
325
+ expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq("foo")
326
+ end
327
+ end
328
+
329
+ context "to_json" do
330
+ it "should support to_json" do
331
+ new_event = LogStash::Event.new(
332
+ "@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
333
+ "message" => "foo bar",
334
+ )
335
+ json = new_event.to_json
336
+
337
+ expect(json).to eq( "{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}")
338
+ end
339
+
340
+ it "should support to_json and ignore arguments" do
341
+ new_event = LogStash::Event.new(
342
+ "@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
343
+ "message" => "foo bar",
344
+ )
345
+ json = new_event.to_json(:foo => 1, :bar => "baz")
346
+
347
+ expect(json).to eq( "{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}")
348
+ end
349
+ end
350
+
351
+ context "metadata" do
352
+ context "with existing metadata" do
353
+ subject { LogStash::Event.new("hello" => "world", "@metadata" => { "fancy" => "pants" }) }
354
+
355
+ it "should not include metadata in to_hash" do
356
+ expect(subject.to_hash.keys).not_to include("@metadata")
357
+
358
+ # 'hello', '@timestamp', and '@version'
359
+ expect(subject.to_hash.keys.count).to eq(3)
360
+ end
361
+
362
+ it "should still allow normal field access" do
363
+ expect(subject["hello"]).to eq("world")
364
+ end
365
+ end
366
+
367
+ context "with set metadata" do
368
+ let(:fieldref) { "[@metadata][foo][bar]" }
369
+ let(:value) { "bar" }
370
+ subject { LogStash::Event.new("normal" => "normal") }
371
+ before do
372
+ # Verify the test is configured correctly.
373
+ expect(fieldref).to start_with("[@metadata]")
374
+
375
+ # Set it.
376
+ subject[fieldref] = value
377
+ end
378
+
379
+ it "should still allow normal field access" do
380
+ expect(subject["normal"]).to eq("normal")
381
+ end
382
+
383
+ it "should allow getting" do
384
+ expect(subject[fieldref]).to eq(value)
385
+ end
386
+
387
+ it "should be hidden from .to_json" do
388
+ require "json"
389
+ obj = JSON.parse(subject.to_json)
390
+ expect(obj).not_to include("@metadata")
391
+ end
392
+
393
+ it "should be hidden from .to_hash" do
394
+ expect(subject.to_hash).not_to include("@metadata")
395
+ end
396
+
397
+ it "should be accessible through #to_hash_with_metadata" do
398
+ obj = subject.to_hash_with_metadata
399
+ expect(obj).to include("@metadata")
400
+ expect(obj["@metadata"]["foo"]["bar"]).to eq(value)
401
+ end
402
+ end
403
+
404
+ context "with no metadata" do
405
+ subject { LogStash::Event.new("foo" => "bar") }
406
+ it "should have no metadata" do
407
+ expect(subject["@metadata"]).to be_empty
408
+ end
409
+ it "should still allow normal field access" do
410
+ expect(subject["foo"]).to eq("bar")
411
+ end
412
+
413
+ it "should not include the @metadata key" do
414
+ expect(subject.to_hash_with_metadata).not_to include("@metadata")
415
+ end
416
+ end
417
+ end
418
+ end
419
+
420
+ context "signal events" do
421
+ it "should define the shutdown event" do
422
+ # the SHUTDOWN and FLUSH constants are part of the plugin API contract
423
+ # if they are changed, all plugins must be updated
424
+ expect(LogStash::SHUTDOWN).to be_a(LogStash::ShutdownEvent)
425
+ expect(LogStash::FLUSH).to be_a(LogStash::FlushEvent)
426
+ end
427
+ end
428
+
429
+ let(:event_hash) do
430
+ {
431
+ "@timestamp" => "2013-01-01T00:00:00.000Z",
432
+ "type" => "sprintf",
433
+ "message" => "hello world",
434
+ "tags" => [ "tag1" ],
435
+ "source" => "/home/foo",
436
+ "a" => "b",
437
+ "c" => {
438
+ "d" => "f",
439
+ "e" => {"f" => "g"}
440
+ },
441
+ "f" => { "g" => { "h" => "i" } },
442
+ "j" => {
443
+ "k1" => "v",
444
+ "k2" => [ "w", "x" ],
445
+ "k3" => {"4" => "m"},
446
+ "k4" => [ {"nested" => "cool"} ],
447
+ 5 => 6,
448
+ "5" => 7
449
+ },
450
+ "nilfield" => nil,
451
+ "@metadata" => { "fancy" => "pants", "have-to-go" => { "deeper" => "inception" } }
452
+ }
453
+ end
454
+
455
+ describe "using normal hash input" do
456
+ it_behaves_like "all event tests" do
457
+ subject{LogStash::Event.new(event_hash)}
458
+ end
459
+ end
460
+
461
+ describe "using hash input from deserialized json" do
462
+ # this is to test the case when JrJackson deserialises Json and produces
463
+ # native Java Collections objects for efficiency
464
+ it_behaves_like "all event tests" do
465
+ subject{LogStash::Event.new(LogStash::Json.load(LogStash::Json.dump(event_hash)))}
466
+ end
467
+ end
468
+ end