logstash-core 1.5.0.beta2-java → 1.5.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of logstash-core might be problematic. Click here for more details.

Files changed (58) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash-core.rb +2 -0
  3. data/lib/logstash/agent.rb +0 -41
  4. data/lib/logstash/config/config_ast.rb +62 -29
  5. data/lib/logstash/config/mixin.rb +3 -3
  6. data/lib/logstash/environment.rb +37 -100
  7. data/lib/logstash/event.rb +32 -20
  8. data/lib/logstash/filters/base.rb +20 -0
  9. data/lib/logstash/java_integration.rb +72 -18
  10. data/lib/logstash/namespace.rb +0 -3
  11. data/lib/logstash/outputs/base.rb +1 -1
  12. data/lib/logstash/patches/bundler.rb +20 -0
  13. data/lib/logstash/patches/rubygems.rb +37 -0
  14. data/lib/logstash/pipeline.rb +59 -39
  15. data/lib/logstash/runner.rb +4 -50
  16. data/lib/logstash/util.rb +0 -1
  17. data/lib/logstash/util/accessors.rb +6 -0
  18. data/lib/logstash/version.rb +1 -1
  19. data/locales/en.yml +5 -0
  20. data/logstash-core.gemspec +51 -0
  21. data/spec/core/conditionals_spec.rb +428 -0
  22. data/spec/core/config_mixin_spec.rb +99 -0
  23. data/spec/core/config_spec.rb +108 -0
  24. data/spec/core/environment_spec.rb +44 -0
  25. data/spec/core/event_spec.rb +473 -0
  26. data/spec/core/pipeline_spec.rb +198 -0
  27. data/spec/core/plugin_spec.rb +106 -0
  28. data/spec/core/runner_spec.rb +39 -0
  29. data/spec/core/timestamp_spec.rb +83 -0
  30. data/spec/filters/base_spec.rb +318 -0
  31. data/spec/inputs/base_spec.rb +13 -0
  32. data/spec/lib/logstash/bundler_spec.rb +120 -0
  33. data/spec/lib/logstash/java_integration_spec.rb +257 -0
  34. data/spec/logstash/agent_spec.rb +37 -0
  35. data/spec/outputs/base_spec.rb +47 -0
  36. data/spec/spec_helper.rb +1 -0
  37. data/spec/util/accessors_spec.rb +215 -0
  38. data/spec/util/charset_spec.rb +74 -0
  39. data/spec/util/fieldeval_spec.rb +96 -0
  40. data/spec/util/gemfile_spec.rb +212 -0
  41. data/spec/util/json_spec.rb +97 -0
  42. data/spec/util/plugin_version_spec.rb +48 -0
  43. data/spec/util/retryable_spec.rb +145 -0
  44. data/spec/util_spec.rb +34 -0
  45. metadata +96 -253
  46. data/lib/logstash-event.rb +0 -2
  47. data/lib/logstash.rb +0 -4
  48. data/lib/logstash/JRUBY-PR1448.rb +0 -32
  49. data/lib/logstash/bundler.rb +0 -124
  50. data/lib/logstash/gemfile.rb +0 -175
  51. data/lib/logstash/pluginmanager.rb +0 -17
  52. data/lib/logstash/pluginmanager/install.rb +0 -112
  53. data/lib/logstash/pluginmanager/list.rb +0 -38
  54. data/lib/logstash/pluginmanager/main.rb +0 -22
  55. data/lib/logstash/pluginmanager/maven_tools_patch.rb +0 -12
  56. data/lib/logstash/pluginmanager/uninstall.rb +0 -49
  57. data/lib/logstash/pluginmanager/update.rb +0 -50
  58. data/lib/logstash/pluginmanager/util.rb +0 -88
@@ -0,0 +1,99 @@
1
+ require "spec_helper"
2
+ require "logstash/config/mixin"
3
+
4
+ describe LogStash::Config::Mixin do
5
+ context "when validating :bytes successfully" do
6
+ subject do
7
+ local_num_bytes = num_bytes # needs to be locally scoped :(
8
+ Class.new(LogStash::Filters::Base) do
9
+ include LogStash::Config::Mixin
10
+ config_name "test"
11
+ milestone 1
12
+ config :size_bytes, :validate => :bytes
13
+ config :size_default, :validate => :bytes, :default => "#{local_num_bytes}"
14
+ config :size_upcase, :validate => :bytes
15
+ config :size_downcase, :validate => :bytes
16
+ config :size_space, :validate => :bytes
17
+ end.new({
18
+ "size_bytes" => "#{local_num_bytes}",
19
+ "size_upcase" => "#{local_num_bytes}KiB".upcase,
20
+ "size_downcase" => "#{local_num_bytes}KiB".downcase,
21
+ "size_space" => "#{local_num_bytes} KiB"
22
+ })
23
+ end
24
+
25
+ let!(:num_bytes) { rand(1000) }
26
+ let!(:num_kbytes) { num_bytes * 1024 }
27
+
28
+ it "should validate :bytes successfully with no units" do
29
+ expect(subject.size_bytes).to eq(num_bytes)
30
+ end
31
+
32
+ it "should allow setting valid default" do
33
+ expect(subject.size_default).to eq(num_bytes)
34
+ end
35
+
36
+ it "should be case-insensitive when parsing units" do
37
+ expect(subject.size_upcase).to eq(num_kbytes)
38
+ expect(subject.size_downcase).to eq(num_kbytes)
39
+ end
40
+
41
+ it "should accept one space between num_bytes and unit suffix" do
42
+ expect(subject.size_space).to eq(num_kbytes)
43
+ end
44
+ end
45
+
46
+ context "when raising configuration errors while validating" do
47
+ it "should raise configuration error when provided with invalid units" do
48
+ expect {
49
+ Class.new(LogStash::Filters::Base) do
50
+ include LogStash::Config::Mixin
51
+ config_name "test"
52
+ milestone 1
53
+ config :size_file, :validate => :bytes
54
+ end.new({"size_file" => "10 yolobytes"})
55
+ }.to raise_error(LogStash::ConfigurationError)
56
+ end
57
+
58
+ it "should raise configuration error when provided with too many spaces" do
59
+ expect {
60
+ Class.new(LogStash::Filters::Base) do
61
+ include LogStash::Config::Mixin
62
+ config_name "test"
63
+ milestone 1
64
+ config :size_file, :validate => :bytes
65
+ end.new({"size_file" => "10 kib"})
66
+ }.to raise_error(LogStash::ConfigurationError)
67
+ end
68
+ end
69
+
70
+ context "when validating :password" do
71
+ let(:klass) do
72
+ Class.new(LogStash::Filters::Base) do
73
+ config_name "fake"
74
+ config :password, :validate => :password
75
+ end
76
+ end
77
+
78
+ let(:secret) { "fancy pants" }
79
+ subject { klass.new("password" => secret) }
80
+
81
+ it "should be a Password object" do
82
+ expect(subject.password).to(be_a(LogStash::Util::Password))
83
+ end
84
+
85
+ it "should make password values hidden" do
86
+ expect(subject.password.to_s).to(be == "<password>")
87
+ expect(subject.password.inspect).to(be == "<password>")
88
+ end
89
+
90
+ it "should show password values via #value" do
91
+ expect(subject.password.value).to(be == secret)
92
+ end
93
+
94
+ it "should correctly copy password types" do
95
+ clone = subject.class.new(subject.params)
96
+ expect(clone.password.value).to(be == secret)
97
+ end
98
+ end
99
+ end
@@ -0,0 +1,108 @@
1
+ # encoding: utf-8
2
+ # config syntax tests
3
+ #
4
+ require "spec_helper"
5
+ require "logstash/config/grammar"
6
+ require "logstash/config/config_ast"
7
+
8
+ describe LogStashConfigParser do
9
+ context '#parse' do
10
+ context "valid configuration" do
11
+ it "should permit single-quoted attribute names" do
12
+ parser = LogStashConfigParser.new
13
+ config = parser.parse(%q(
14
+ input {
15
+ example {
16
+ 'foo' => 'bar'
17
+ test => { 'bar' => 'baz' }
18
+ }
19
+ }
20
+ ))
21
+
22
+ expect(config).not_to be_nil
23
+ end
24
+
25
+ it "should permit empty plugin sections" do
26
+ parser = LogStashConfigParser.new
27
+ config = parser.parse(%q(
28
+ filter {
29
+ }
30
+ ))
31
+
32
+ expect(config).not_to be_nil
33
+ end
34
+
35
+ it 'permits hash to contains array' do
36
+ parser = LogStashConfigParser.new
37
+ config = parser.parse(%q(
38
+ input{
39
+ example {
40
+ match => {
41
+ "message"=> ["pattern1", "pattern2", "pattern3"]
42
+ }
43
+ }
44
+ }))
45
+ expect(config).not_to be_nil
46
+ end
47
+ end
48
+ end
49
+
50
+ context "#compile" do
51
+ context "invalid configuration" do
52
+ it "rejects duplicate hash key" do
53
+ parser = LogStashConfigParser.new
54
+ config = parser.parse(%q(
55
+ input {
56
+ example {
57
+ match => {
58
+ "message"=> "pattern1"
59
+ "message"=> "pattern2"
60
+ "message"=> "pattern3"
61
+ }
62
+ }
63
+ }
64
+ ))
65
+
66
+ expect { config.compile }.to raise_error(LogStash::ConfigurationError, /Duplicate keys found in your configuration: \["message"\]/)
67
+ end
68
+
69
+ it "rejects duplicate keys in nested hash" do
70
+ parser = LogStashConfigParser.new
71
+ config = parser.parse(%q(
72
+ input {
73
+ example {
74
+ match => {
75
+ "message"=> "pattern1"
76
+ "more" => {
77
+ "cool" => true
78
+ "cool" => true
79
+ }
80
+ }
81
+ }
82
+ }
83
+ ))
84
+
85
+ expect { config.compile }.to raise_error(LogStash::ConfigurationError, /Duplicate keys found in your configuration: \["cool"\]/)
86
+ end
87
+
88
+ it "rejects a key with multiple double quotes" do
89
+ parser = LogStashConfigParser.new
90
+ config = parser.parse(%q(
91
+ input {
92
+ example {
93
+ match => {
94
+ "message"=> "pattern1"
95
+ ""more"" => {
96
+ "cool" => true
97
+ "cool" => true
98
+ }
99
+ }
100
+ }
101
+ }
102
+ ))
103
+
104
+ expect(config).to be_nil
105
+ end
106
+ end
107
+ end
108
+ end
@@ -0,0 +1,44 @@
1
+ require "spec_helper"
2
+ require "logstash/environment"
3
+
4
+ describe LogStash::Environment do
5
+
6
+ context "when loading jars dependencies" do
7
+
8
+ let(:default_jars_location) { File.join("vendor", "jar-dependencies") }
9
+ let(:default_runtime_location) { File.join(default_jars_location,"runtime-jars","*.jar") }
10
+ let(:default_test_location) { File.join(default_jars_location,"test-jars","*.jar") }
11
+
12
+ it "raises an exception if jruby is not available" do
13
+ expect(subject).to receive(:jruby?).and_return(false)
14
+ expect { subject.load_runtime_jars! }.to raise_error
15
+ end
16
+
17
+ it "find runtime jars in the default location" do
18
+ expect(subject).to receive(:find_jars).with(default_runtime_location).and_return([])
19
+ subject.load_runtime_jars!
20
+ end
21
+
22
+ it "find test jars in the default location" do
23
+ expect(subject).to receive(:find_jars).with(default_test_location).and_return([])
24
+ subject.load_test_jars!
25
+ end
26
+
27
+ context "when loading a jar file" do
28
+
29
+ let(:dummy_jar_file) { File.join(default_jars_location,"runtime-jars","elasticsearch.jar") }
30
+
31
+ it "requires the jar files if there are jars to load" do
32
+ expect(subject).to receive(:find_jars).with(default_runtime_location).and_return([dummy_jar_file])
33
+ expect(subject).to receive(:require).with(dummy_jar_file)
34
+ subject.load_runtime_jars!
35
+ end
36
+
37
+ it "raises an exception if there are no jars to load" do
38
+ allow(Dir).to receive(:glob).and_return([])
39
+ expect { subject.load_runtime_jars! }.to raise_error
40
+ end
41
+
42
+ end
43
+ end
44
+ end
@@ -0,0 +1,473 @@
1
+ # encoding: utf-8
2
+ require "spec_helper"
3
+
4
+ describe LogStash::Event do
5
+
6
+ shared_examples "all event tests" do
7
+ context "[]=" do
8
+ it "should raise an exception if you attempt to set @timestamp to a value type other than a Time object" do
9
+ expect{subject["@timestamp"] = "crash!"}.to raise_error(TypeError)
10
+ end
11
+
12
+ it "should assign simple fields" do
13
+ expect(subject["foo"]).to be_nil
14
+ expect(subject["foo"] = "bar").to eq("bar")
15
+ expect(subject["foo"]).to eq("bar")
16
+ end
17
+
18
+ it "should overwrite simple fields" do
19
+ expect(subject["foo"]).to be_nil
20
+ expect(subject["foo"] = "bar").to eq("bar")
21
+ expect(subject["foo"]).to eq("bar")
22
+
23
+ expect(subject["foo"] = "baz").to eq("baz")
24
+ expect(subject["foo"]).to eq("baz")
25
+ end
26
+
27
+ it "should assign deep fields" do
28
+ expect(subject["[foo][bar]"]).to be_nil
29
+ expect(subject["[foo][bar]"] = "baz").to eq("baz")
30
+ expect(subject["[foo][bar]"]).to eq("baz")
31
+ end
32
+
33
+ it "should overwrite deep fields" do
34
+ expect(subject["[foo][bar]"]).to be_nil
35
+ expect(subject["[foo][bar]"] = "baz").to eq("baz")
36
+ expect(subject["[foo][bar]"]).to eq("baz")
37
+
38
+ expect(subject["[foo][bar]"] = "zab").to eq("zab")
39
+ expect(subject["[foo][bar]"]).to eq("zab")
40
+ end
41
+
42
+ it "allow to set the @metadata key to a hash" do
43
+ subject["@metadata"] = { "action" => "index" }
44
+ expect(subject["[@metadata][action]"]).to eq("index")
45
+ end
46
+ end
47
+
48
+ context "#sprintf" do
49
+ it "should report a unix timestamp for %{+%s}" do
50
+ expect(subject.sprintf("%{+%s}")).to eq("1356998400")
51
+ end
52
+
53
+ it "should raise error when formatting %{+%s} when @timestamp field is missing" do
54
+ str = "hello-%{+%s}"
55
+ subj = subject.clone
56
+ subj.remove("[@timestamp]")
57
+ expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
58
+ end
59
+
60
+ it "should report a time with %{+format} syntax", :if => RUBY_ENGINE == "jruby" do
61
+ expect(subject.sprintf("%{+YYYY}")).to eq("2013")
62
+ expect(subject.sprintf("%{+MM}")).to eq("01")
63
+ expect(subject.sprintf("%{+HH}")).to eq("00")
64
+ end
65
+
66
+ it "should raise error with %{+format} syntax when @timestamp field is missing", :if => RUBY_ENGINE == "jruby" do
67
+ str = "logstash-%{+YYYY}"
68
+ subj = subject.clone
69
+ subj.remove("[@timestamp]")
70
+ expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
71
+ end
72
+
73
+ it "should report fields with %{field} syntax" do
74
+ expect(subject.sprintf("%{type}")).to eq("sprintf")
75
+ expect(subject.sprintf("%{message}")).to eq(subject["message"])
76
+ end
77
+
78
+ it "should print deep fields" do
79
+ expect(subject.sprintf("%{[j][k1]}")).to eq("v")
80
+ expect(subject.sprintf("%{[j][k2][0]}")).to eq("w")
81
+ end
82
+
83
+ it "should be able to take a non-string for the format" do
84
+ expect(subject.sprintf(2)).to eq("2")
85
+ end
86
+
87
+ it "should allow to use the metadata when calling #sprintf" do
88
+ expect(subject.sprintf("super-%{[@metadata][fancy]}")).to eq("super-pants")
89
+ end
90
+
91
+ it "should allow to use nested hash from the metadata field" do
92
+ expect(subject.sprintf("%{[@metadata][have-to-go][deeper]}")).to eq("inception")
93
+ end
94
+ end
95
+
96
+ context "#[]" do
97
+ it "should fetch data" do
98
+ expect(subject["type"]).to eq("sprintf")
99
+ end
100
+ it "should fetch fields" do
101
+ expect(subject["a"]).to eq("b")
102
+ expect(subject['c']['d']).to eq("f")
103
+ end
104
+ it "should fetch deep fields" do
105
+ expect(subject["[j][k1]"]).to eq("v")
106
+ expect(subject["[c][d]"]).to eq("f")
107
+ expect(subject['[f][g][h]']).to eq("i")
108
+ expect(subject['[j][k3][4]']).to eq("m")
109
+ expect(subject['[j][5]']).to eq(7)
110
+
111
+ end
112
+
113
+ it "should be fast?", :performance => true do
114
+ count = 1000000
115
+ 2.times do
116
+ start = Time.now
117
+ count.times { subject["[j][k1]"] }
118
+ duration = Time.now - start
119
+ puts "event #[] rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
120
+ end
121
+ end
122
+ end
123
+
124
+ context "#include?" do
125
+ it "should include existing fields" do
126
+ expect(subject.include?("c")).to be_true
127
+ expect(subject.include?("[c][d]")).to be_true
128
+ expect(subject.include?("[j][k4][0][nested]")).to be_true
129
+ end
130
+
131
+ it "should include field with nil value" do
132
+ expect(subject.include?("nilfield")).to be_true
133
+ end
134
+
135
+ it "should include @metadata field" do
136
+ expect(subject.include?("@metadata")).to be_true
137
+ end
138
+
139
+ it "should include field within @metadata" do
140
+ expect(subject.include?("[@metadata][fancy]")).to be_true
141
+ end
142
+
143
+ it "should not include non-existing fields" do
144
+ expect(subject.include?("doesnotexist")).to be_false
145
+ expect(subject.include?("[j][doesnotexist]")).to be_false
146
+ expect(subject.include?("[tag][0][hello][yes]")).to be_false
147
+ end
148
+
149
+ it "should include within arrays" do
150
+ expect(subject.include?("[tags][0]")).to be_true
151
+ expect(subject.include?("[tags][1]")).to be_false
152
+ end
153
+ end
154
+
155
+ context "#overwrite" do
156
+ it "should swap data with new content" do
157
+ new_event = LogStash::Event.new(
158
+ "type" => "new",
159
+ "message" => "foo bar",
160
+ )
161
+ subject.overwrite(new_event)
162
+
163
+ expect(subject["message"]).to eq("foo bar")
164
+ expect(subject["type"]).to eq("new")
165
+
166
+ ["tags", "source", "a", "c", "f", "j"].each do |field|
167
+ expect(subject[field]).to be_nil
168
+ end
169
+ end
170
+ end
171
+
172
+ context "#append" do
173
+ it "should append strings to an array" do
174
+ subject.append(LogStash::Event.new("message" => "another thing"))
175
+ expect(subject["message"]).to eq([ "hello world", "another thing" ])
176
+ end
177
+
178
+ it "should concatenate tags" do
179
+ subject.append(LogStash::Event.new("tags" => [ "tag2" ]))
180
+ # added to_a for when array is a Java Collection when produced from json input
181
+ # TODO: we have to find a better way to handle this in tests. maybe override
182
+ # rspec eq or == to do an explicit to_a when comparing arrays?
183
+ expect(subject["tags"].to_a).to eq([ "tag1", "tag2" ])
184
+ end
185
+
186
+ context "when event field is nil" do
187
+ it "should add single value as string" do
188
+ subject.append(LogStash::Event.new({"field1" => "append1"}))
189
+ expect(subject[ "field1" ]).to eq("append1")
190
+ end
191
+ it "should add multi values as array" do
192
+ subject.append(LogStash::Event.new({"field1" => [ "append1","append2" ]}))
193
+ expect(subject[ "field1" ]).to eq([ "append1","append2" ])
194
+ end
195
+ end
196
+
197
+ context "when event field is a string" do
198
+ before { subject[ "field1" ] = "original1" }
199
+
200
+ it "should append string to values, if different from current" do
201
+ subject.append(LogStash::Event.new({"field1" => "append1"}))
202
+ expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
203
+ end
204
+ it "should not change value, if appended value is equal current" do
205
+ subject.append(LogStash::Event.new({"field1" => "original1"}))
206
+ expect(subject[ "field1" ]).to eq("original1")
207
+ end
208
+ it "should concatenate values in an array" do
209
+ subject.append(LogStash::Event.new({"field1" => [ "append1" ]}))
210
+ expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
211
+ end
212
+ it "should join array, removing duplicates" do
213
+ subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
214
+ expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
215
+ end
216
+ end
217
+ context "when event field is an array" do
218
+ before { subject[ "field1" ] = [ "original1", "original2" ] }
219
+
220
+ it "should append string values to array, if not present in array" do
221
+ subject.append(LogStash::Event.new({"field1" => "append1"}))
222
+ expect(subject[ "field1" ]).to eq([ "original1", "original2", "append1" ])
223
+ end
224
+ it "should not append string values, if the array already contains it" do
225
+ subject.append(LogStash::Event.new({"field1" => "original1"}))
226
+ expect(subject[ "field1" ]).to eq([ "original1", "original2" ])
227
+ end
228
+ it "should join array, removing duplicates" do
229
+ subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
230
+ expect(subject[ "field1" ]).to eq([ "original1", "original2", "append1" ])
231
+ end
232
+ end
233
+ end
234
+
235
+ it "timestamp parsing speed", :performance => true do
236
+ warmup = 10000
237
+ count = 1000000
238
+
239
+ data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
240
+ event = LogStash::Event.new(data)
241
+ expect(event["@timestamp"]).to be_a(LogStash::Timestamp)
242
+
243
+ duration = 0
244
+ [warmup, count].each do |i|
245
+ start = Time.now
246
+ i.times do
247
+ data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
248
+ LogStash::Event.new(data.clone)
249
+ end
250
+ duration = Time.now - start
251
+ end
252
+ puts "event @timestamp parse rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
253
+ end
254
+
255
+ context "acceptable @timestamp formats" do
256
+ subject { LogStash::Event.new }
257
+
258
+ formats = [
259
+ "YYYY-MM-dd'T'HH:mm:ss.SSSZ",
260
+ "YYYY-MM-dd'T'HH:mm:ss.SSSSSSZ",
261
+ "YYYY-MM-dd'T'HH:mm:ss.SSS",
262
+ "YYYY-MM-dd'T'HH:mm:ss",
263
+ "YYYY-MM-dd'T'HH:mm:ssZ",
264
+ ]
265
+ formats.each do |format|
266
+ it "includes #{format}" do
267
+ time = subject.sprintf("%{+#{format}}")
268
+ begin
269
+ LogStash::Event.new("@timestamp" => time)
270
+ rescue => e
271
+ raise StandardError, "Time '#{time}' was rejected. #{e.class}: #{e.to_s}"
272
+ end
273
+ end
274
+ end
275
+
276
+ context "from LOGSTASH-1738" do
277
+ it "does not error" do
278
+ LogStash::Event.new("@timestamp" => "2013-12-29T23:12:52.371240+02:00")
279
+ end
280
+ end
281
+
282
+ context "from LOGSTASH-1732" do
283
+ it "does not error" do
284
+ LogStash::Event.new("@timestamp" => "2013-12-27T11:07:25+00:00")
285
+ end
286
+ end
287
+ end
288
+
289
+ context "timestamp initialization" do
290
+ let(:logger) { double("logger") }
291
+
292
+ it "should coerce timestamp" do
293
+ t = Time.iso8601("2014-06-12T00:12:17.114Z")
294
+ expect(LogStash::Timestamp).to receive(:coerce).exactly(3).times.and_call_original
295
+ expect(LogStash::Event.new("@timestamp" => t).timestamp.to_i).to eq(t.to_i)
296
+ expect(LogStash::Event.new("@timestamp" => LogStash::Timestamp.new(t)).timestamp.to_i).to eq(t.to_i)
297
+ expect(LogStash::Event.new("@timestamp" => "2014-06-12T00:12:17.114Z").timestamp.to_i).to eq(t.to_i)
298
+ end
299
+
300
+ it "should assign current time when no timestamp" do
301
+ ts = LogStash::Timestamp.now
302
+ expect(LogStash::Timestamp).to receive(:now).and_return(ts)
303
+ expect(LogStash::Event.new({}).timestamp.to_i).to eq(ts.to_i)
304
+ end
305
+
306
+ it "should tag and warn for invalid value" do
307
+ ts = LogStash::Timestamp.now
308
+ expect(LogStash::Timestamp).to receive(:now).twice.and_return(ts)
309
+ expect(LogStash::Event::LOGGER).to receive(:warn).twice
310
+
311
+ event = LogStash::Event.new("@timestamp" => :foo)
312
+ expect(event.timestamp.to_i).to eq(ts.to_i)
313
+ expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
314
+ expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq(:foo)
315
+
316
+ event = LogStash::Event.new("@timestamp" => 666)
317
+ expect(event.timestamp.to_i).to eq(ts.to_i)
318
+ expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
319
+ expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq(666)
320
+ end
321
+
322
+ it "should tag and warn for invalid string format" do
323
+ ts = LogStash::Timestamp.now
324
+ expect(LogStash::Timestamp).to receive(:now).and_return(ts)
325
+ expect(LogStash::Event::LOGGER).to receive(:warn)
326
+
327
+ event = LogStash::Event.new("@timestamp" => "foo")
328
+ expect(event.timestamp.to_i).to eq(ts.to_i)
329
+ expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
330
+ expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq("foo")
331
+ end
332
+ end
333
+
334
+ context "to_json" do
335
+ it "should support to_json" do
336
+ new_event = LogStash::Event.new(
337
+ "@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
338
+ "message" => "foo bar",
339
+ )
340
+ json = new_event.to_json
341
+
342
+ expect(json).to eq( "{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}")
343
+ end
344
+
345
+ it "should support to_json and ignore arguments" do
346
+ new_event = LogStash::Event.new(
347
+ "@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
348
+ "message" => "foo bar",
349
+ )
350
+ json = new_event.to_json(:foo => 1, :bar => "baz")
351
+
352
+ expect(json).to eq( "{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}")
353
+ end
354
+ end
355
+
356
+ context "metadata" do
357
+ context "with existing metadata" do
358
+ subject { LogStash::Event.new("hello" => "world", "@metadata" => { "fancy" => "pants" }) }
359
+
360
+ it "should not include metadata in to_hash" do
361
+ expect(subject.to_hash.keys).not_to include("@metadata")
362
+
363
+ # 'hello', '@timestamp', and '@version'
364
+ expect(subject.to_hash.keys.count).to eq(3)
365
+ end
366
+
367
+ it "should still allow normal field access" do
368
+ expect(subject["hello"]).to eq("world")
369
+ end
370
+ end
371
+
372
+ context "with set metadata" do
373
+ let(:fieldref) { "[@metadata][foo][bar]" }
374
+ let(:value) { "bar" }
375
+ subject { LogStash::Event.new("normal" => "normal") }
376
+ before do
377
+ # Verify the test is configured correctly.
378
+ expect(fieldref).to start_with("[@metadata]")
379
+
380
+ # Set it.
381
+ subject[fieldref] = value
382
+ end
383
+
384
+ it "should still allow normal field access" do
385
+ expect(subject["normal"]).to eq("normal")
386
+ end
387
+
388
+ it "should allow getting" do
389
+ expect(subject[fieldref]).to eq(value)
390
+ end
391
+
392
+ it "should be hidden from .to_json" do
393
+ require "json"
394
+ obj = JSON.parse(subject.to_json)
395
+ expect(obj).not_to include("@metadata")
396
+ end
397
+
398
+ it "should be hidden from .to_hash" do
399
+ expect(subject.to_hash).not_to include("@metadata")
400
+ end
401
+
402
+ it "should be accessible through #to_hash_with_metadata" do
403
+ obj = subject.to_hash_with_metadata
404
+ expect(obj).to include("@metadata")
405
+ expect(obj["@metadata"]["foo"]["bar"]).to eq(value)
406
+ end
407
+ end
408
+
409
+ context "with no metadata" do
410
+ subject { LogStash::Event.new("foo" => "bar") }
411
+ it "should have no metadata" do
412
+ expect(subject["@metadata"]).to be_empty
413
+ end
414
+ it "should still allow normal field access" do
415
+ expect(subject["foo"]).to eq("bar")
416
+ end
417
+
418
+ it "should not include the @metadata key" do
419
+ expect(subject.to_hash_with_metadata).not_to include("@metadata")
420
+ end
421
+ end
422
+ end
423
+ end
424
+
425
+ context "signal events" do
426
+ it "should define the shutdown event" do
427
+ # the SHUTDOWN and FLUSH constants are part of the plugin API contract
428
+ # if they are changed, all plugins must be updated
429
+ expect(LogStash::SHUTDOWN).to be_a(LogStash::ShutdownEvent)
430
+ expect(LogStash::FLUSH).to be_a(LogStash::FlushEvent)
431
+ end
432
+ end
433
+
434
+ let(:event_hash) do
435
+ {
436
+ "@timestamp" => "2013-01-01T00:00:00.000Z",
437
+ "type" => "sprintf",
438
+ "message" => "hello world",
439
+ "tags" => [ "tag1" ],
440
+ "source" => "/home/foo",
441
+ "a" => "b",
442
+ "c" => {
443
+ "d" => "f",
444
+ "e" => {"f" => "g"}
445
+ },
446
+ "f" => { "g" => { "h" => "i" } },
447
+ "j" => {
448
+ "k1" => "v",
449
+ "k2" => [ "w", "x" ],
450
+ "k3" => {"4" => "m"},
451
+ "k4" => [ {"nested" => "cool"} ],
452
+ 5 => 6,
453
+ "5" => 7
454
+ },
455
+ "nilfield" => nil,
456
+ "@metadata" => { "fancy" => "pants", "have-to-go" => { "deeper" => "inception" } }
457
+ }
458
+ end
459
+
460
+ describe "using normal hash input" do
461
+ it_behaves_like "all event tests" do
462
+ subject{LogStash::Event.new(event_hash)}
463
+ end
464
+ end
465
+
466
+ describe "using hash input from deserialized json" do
467
+ # this is to test the case when JrJackson deserialises Json and produces
468
+ # native Java Collections objects for efficiency
469
+ it_behaves_like "all event tests" do
470
+ subject{LogStash::Event.new(LogStash::Json.load(LogStash::Json.dump(event_hash)))}
471
+ end
472
+ end
473
+ end