logstash-core-event 2.2.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,97 @@
1
+ # encoding: utf-8
2
+ require "logstash/environment"
3
+ require "logstash/json"
4
+ require "forwardable"
5
+ require "date"
6
+ require "time"
7
+
8
+ module LogStash
9
+ class TimestampParserError < StandardError; end
10
+
11
+ class Timestamp
12
+ extend Forwardable
13
+ include Comparable
14
+
15
+ def_delegators :@time, :tv_usec, :usec, :year, :iso8601, :to_i, :tv_sec, :to_f, :to_edn, :<=>, :+
16
+
17
+ attr_reader :time
18
+
19
+ ISO8601_STRFTIME = "%04d-%02d-%02dT%02d:%02d:%02d.%06d%+03d:00".freeze
20
+ ISO8601_PRECISION = 3
21
+
22
+ def initialize(time = Time.new)
23
+ @time = time.utc
24
+ end
25
+
26
+ def self.at(*args)
27
+ Timestamp.new(::Time.at(*args))
28
+ end
29
+
30
+ def self.parse(*args)
31
+ Timestamp.new(::Time.parse(*args))
32
+ end
33
+
34
+ def self.now
35
+ Timestamp.new(::Time.now)
36
+ end
37
+
38
+ # coerce tries different strategies based on the time object class to convert into a Timestamp.
39
+ # @param [String, Time, Timestamp] time the time object to try coerce
40
+ # @return [Timestamp, nil] Timestamp will be returned if successful otherwise nil
41
+ # @raise [TimestampParserError] on String with invalid format
42
+ def self.coerce(time)
43
+ case time
44
+ when String
45
+ LogStash::Timestamp.parse_iso8601(time)
46
+ when LogStash::Timestamp
47
+ time
48
+ when Time
49
+ LogStash::Timestamp.new(time)
50
+ else
51
+ nil
52
+ end
53
+ end
54
+
55
+ if LogStash::Environment.jruby?
56
+ JODA_ISO8601_PARSER = org.joda.time.format.ISODateTimeFormat.dateTimeParser
57
+ UTC = org.joda.time.DateTimeZone.forID("UTC")
58
+
59
+ def self.parse_iso8601(t)
60
+ millis = JODA_ISO8601_PARSER.parseMillis(t)
61
+ LogStash::Timestamp.at(millis / 1000, (millis % 1000) * 1000)
62
+ rescue => e
63
+ raise(TimestampParserError, "invalid timestamp string #{t.inspect}, error=#{e.inspect}")
64
+ end
65
+
66
+ else
67
+
68
+ def self.parse_iso8601(t)
69
+ # warning, ruby's Time.parse is *really* terrible and slow.
70
+ LogStash::Timestamp.new(::Time.parse(t))
71
+ rescue => e
72
+ raise(TimestampParserError, "invalid timestamp string #{t.inspect}, error=#{e.inspect}")
73
+ end
74
+ end
75
+
76
+ def utc
77
+ @time.utc # modifies the receiver
78
+ self
79
+ end
80
+ alias_method :gmtime, :utc
81
+
82
+ def to_json(*args)
83
+ # ignore arguments to respect accepted to_json method signature
84
+ "\"" + to_iso8601 + "\""
85
+ end
86
+ alias_method :inspect, :to_json
87
+
88
+ def to_iso8601
89
+ @iso8601 ||= @time.iso8601(ISO8601_PRECISION)
90
+ end
91
+ alias_method :to_s, :to_iso8601
92
+
93
+ def -(value)
94
+ @time - (value.is_a?(Timestamp) ? value.time : value)
95
+ end
96
+ end
97
+ end
@@ -0,0 +1,123 @@
1
+ # encoding: utf-8
2
+ require "logstash/namespace"
3
+ require "logstash/util"
4
+ require "thread_safe"
5
+
6
+ module LogStash::Util
7
+
8
+ # PathCache is a singleton which globally caches the relation between a field reference and its
9
+ # decomposition into a [key, path array] tuple. For example the field reference [foo][bar][baz]
10
+ # is decomposed into ["baz", ["foo", "bar"]].
11
+ module PathCache
12
+ extend self
13
+
14
+ # requiring libraries and defining constants is thread safe in JRuby so
15
+ # PathCache::CACHE will be corretly initialized, once, when accessors.rb
16
+ # will be first required
17
+ CACHE = ThreadSafe::Cache.new
18
+
19
+ def get(field_reference)
20
+ # the "get_or_default(x, nil) || put(x, parse(x))" is ~2x faster than "get || put" because the get call is
21
+ # proxied through the JRuby JavaProxy op_aref method. the correct idiom here would be to use
22
+ # "compute_if_absent(x){parse(x)}" but because of the closure creation, it is ~1.5x slower than
23
+ # "get_or_default || put".
24
+ # this "get_or_default || put" is obviously non-atomic which is not really important here
25
+ # since all threads will set the same value and this cache will stabilize very quickly after the first
26
+ # few events.
27
+ CACHE.get_or_default(field_reference, nil) || CACHE.put(field_reference, parse(field_reference))
28
+ end
29
+
30
+ def parse(field_reference)
31
+ path = field_reference.split(/[\[\]]/).select{|s| !s.empty?}
32
+ [path.pop, path]
33
+ end
34
+ end
35
+
36
+ # Accessors uses a lookup table to speedup access of a field reference of the form
37
+ # "[hello][world]" to the underlying store hash into {"hello" => {"world" => "foo"}}
38
+ class Accessors
39
+
40
+ # @param store [Hash] the backing data store field refereces point to
41
+ def initialize(store)
42
+ @store = store
43
+
44
+ # @lut is a lookup table between a field reference and a [target, key] tuple
45
+ # where target is the containing Hash or Array for key in @store.
46
+ # this allows us to directly access the containing object for key instead of
47
+ # walking the field reference path into the inner @store objects
48
+ @lut = {}
49
+ end
50
+
51
+ # @param field_reference [String] the field reference
52
+ # @return [Object] the value in @store for this field reference
53
+ def get(field_reference)
54
+ target, key = lookup(field_reference)
55
+ return nil unless target
56
+ target.is_a?(Array) ? target[key.to_i] : target[key]
57
+ end
58
+
59
+ # @param field_reference [String] the field reference
60
+ # @param value [Object] the value to set in @store for this field reference
61
+ # @return [Object] the value set
62
+ def set(field_reference, value)
63
+ target, key = lookup_or_create(field_reference)
64
+ target[target.is_a?(Array) ? key.to_i : key] = value
65
+ end
66
+
67
+ # @param field_reference [String] the field reference to remove
68
+ # @return [Object] the removed value in @store for this field reference
69
+ def del(field_reference)
70
+ target, key = lookup(field_reference)
71
+ return nil unless target
72
+ target.is_a?(Array) ? target.delete_at(key.to_i) : target.delete(key)
73
+ end
74
+
75
+ # @param field_reference [String] the field reference to test for inclusion in the store
76
+ # @return [Boolean] true if the store contains a value for this field reference
77
+ def include?(field_reference)
78
+ target, key = lookup(field_reference)
79
+ return false unless target
80
+
81
+ target.is_a?(Array) ? !target[key.to_i].nil? : target.include?(key)
82
+ end
83
+
84
+ private
85
+
86
+ # retrieve the [target, key] tuple associated with this field reference
87
+ # @param field_reference [String] the field referece
88
+ # @return [[Object, String]] the [target, key] tuple associated with this field reference
89
+ def lookup(field_reference)
90
+ @lut[field_reference] ||= find_target(field_reference)
91
+ end
92
+
93
+ # retrieve the [target, key] tuple associated with this field reference and create inner
94
+ # container objects if they do not exists
95
+ # @param field_reference [String] the field referece
96
+ # @return [[Object, String]] the [target, key] tuple associated with this field reference
97
+ def lookup_or_create(field_reference)
98
+ @lut[field_reference] ||= find_or_create_target(field_reference)
99
+ end
100
+
101
+ # find the target container object in store for this field reference
102
+ # @param field_reference [String] the field referece
103
+ # @return [Object] the target container object in store associated with this field reference
104
+ def find_target(field_reference)
105
+ key, path = PathCache.get(field_reference)
106
+ target = path.inject(@store) do |r, k|
107
+ return nil unless r
108
+ r[r.is_a?(Array) ? k.to_i : k]
109
+ end
110
+ target ? [target, key] : nil
111
+ end
112
+
113
+ # find the target container object in store for this field reference and create inner
114
+ # container objects if they do not exists
115
+ # @param field_reference [String] the field referece
116
+ # @return [Object] the target container object in store associated with this field reference
117
+ def find_or_create_target(accessor)
118
+ key, path = PathCache.get(accessor)
119
+ target = path.inject(@store) {|r, k| r[r.is_a?(Array) ? k.to_i : k] ||= {}}
120
+ [target, key]
121
+ end
122
+ end # class Accessors
123
+ end # module LogStash::Util
@@ -0,0 +1,23 @@
1
+ # -*- encoding: utf-8 -*-
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'logstash-core-event/version'
5
+
6
+ Gem::Specification.new do |gem|
7
+ gem.authors = ["Jordan Sissel", "Pete Fritchman", "Elasticsearch"]
8
+ gem.email = ["jls@semicomplete.com", "petef@databits.net", "info@elasticsearch.com"]
9
+ gem.description = %q{The core event component of logstash, the scalable log and event management tool}
10
+ gem.summary = %q{logstash-core-event - The core event component of logstash}
11
+ gem.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
12
+ gem.license = "Apache License (2.0)"
13
+
14
+ gem.files = Dir.glob(["logstash-core-event.gemspec", "lib/**/*.rb", "spec/**/*.rb"])
15
+ gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
16
+ gem.name = "logstash-core-event"
17
+ gem.require_paths = ["lib"]
18
+ gem.version = LOGSTASH_CORE_EVENT_VERSION
19
+
20
+ if RUBY_PLATFORM == 'java'
21
+ gem.platform = RUBY_PLATFORM
22
+ end
23
+ end
@@ -0,0 +1,533 @@
1
+ # encoding: utf-8
2
+ require "spec_helper"
3
+ require "json"
4
+
5
+ describe LogStash::Event do
6
+
7
+ shared_examples "all event tests" do
8
+ context "[]=" do
9
+ it "should raise an exception if you attempt to set @timestamp to a value type other than a Time object" do
10
+ expect{subject["@timestamp"] = "crash!"}.to raise_error(TypeError)
11
+ end
12
+
13
+ it "should assign simple fields" do
14
+ expect(subject["foo"]).to be_nil
15
+ expect(subject["foo"] = "bar").to eq("bar")
16
+ expect(subject["foo"]).to eq("bar")
17
+ end
18
+
19
+ it "should overwrite simple fields" do
20
+ expect(subject["foo"]).to be_nil
21
+ expect(subject["foo"] = "bar").to eq("bar")
22
+ expect(subject["foo"]).to eq("bar")
23
+
24
+ expect(subject["foo"] = "baz").to eq("baz")
25
+ expect(subject["foo"]).to eq("baz")
26
+ end
27
+
28
+ it "should assign deep fields" do
29
+ expect(subject["[foo][bar]"]).to be_nil
30
+ expect(subject["[foo][bar]"] = "baz").to eq("baz")
31
+ expect(subject["[foo][bar]"]).to eq("baz")
32
+ end
33
+
34
+ it "should overwrite deep fields" do
35
+ expect(subject["[foo][bar]"]).to be_nil
36
+ expect(subject["[foo][bar]"] = "baz").to eq("baz")
37
+ expect(subject["[foo][bar]"]).to eq("baz")
38
+
39
+ expect(subject["[foo][bar]"] = "zab").to eq("zab")
40
+ expect(subject["[foo][bar]"]).to eq("zab")
41
+ end
42
+
43
+ it "allow to set the @metadata key to a hash" do
44
+ subject["@metadata"] = { "action" => "index" }
45
+ expect(subject["[@metadata][action]"]).to eq("index")
46
+ end
47
+
48
+ it "should add key when setting nil value" do
49
+ subject["[baz]"] = nil
50
+ expect(subject.to_hash).to include("baz" => nil)
51
+ end
52
+ end
53
+
54
+ context "#sprintf" do
55
+ it "should report a unix timestamp for %{+%s}" do
56
+ expect(subject.sprintf("%{+%s}")).to eq("1356998400")
57
+ end
58
+
59
+ it "should work if there is no fieldref in the string" do
60
+ expect(subject.sprintf("bonjour")).to eq("bonjour")
61
+ end
62
+
63
+ it "should raise error when formatting %{+%s} when @timestamp field is missing" do
64
+ str = "hello-%{+%s}"
65
+ subj = subject.clone
66
+ subj.remove("[@timestamp]")
67
+ expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
68
+ end
69
+
70
+ it "should report a time with %{+format} syntax", :if => RUBY_ENGINE == "jruby" do
71
+ expect(subject.sprintf("%{+YYYY}")).to eq("2013")
72
+ expect(subject.sprintf("%{+MM}")).to eq("01")
73
+ expect(subject.sprintf("%{+HH}")).to eq("00")
74
+ end
75
+
76
+ it "should support mixed string" do
77
+ expect(subject.sprintf("foo %{+YYYY-MM-dd} %{type}")).to eq("foo 2013-01-01 sprintf")
78
+ end
79
+
80
+ it "should raise error with %{+format} syntax when @timestamp field is missing", :if => RUBY_ENGINE == "jruby" do
81
+ str = "logstash-%{+YYYY}"
82
+ subj = subject.clone
83
+ subj.remove("[@timestamp]")
84
+ expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
85
+ end
86
+
87
+ it "should report fields with %{field} syntax" do
88
+ expect(subject.sprintf("%{type}")).to eq("sprintf")
89
+ expect(subject.sprintf("%{message}")).to eq(subject["message"])
90
+ end
91
+
92
+ it "should print deep fields" do
93
+ expect(subject.sprintf("%{[j][k1]}")).to eq("v")
94
+ expect(subject.sprintf("%{[j][k2][0]}")).to eq("w")
95
+ end
96
+
97
+ it "should be able to take a non-string for the format" do
98
+ expect(subject.sprintf(2)).to eq("2")
99
+ end
100
+
101
+ it "should allow to use the metadata when calling #sprintf" do
102
+ expect(subject.sprintf("super-%{[@metadata][fancy]}")).to eq("super-pants")
103
+ end
104
+
105
+ it "should allow to use nested hash from the metadata field" do
106
+ expect(subject.sprintf("%{[@metadata][have-to-go][deeper]}")).to eq("inception")
107
+ end
108
+
109
+ it "should return a json string if the key is a hash" do
110
+ expect(subject.sprintf("%{[j][k3]}")).to eq("{\"4\":\"m\"}")
111
+ end
112
+
113
+ it "should not strip last character" do
114
+ expect(subject.sprintf("%{type}%{message}|")).to eq("sprintfhello world|")
115
+ end
116
+
117
+ context "#encoding" do
118
+ it "should return known patterns as UTF-8" do
119
+ expect(subject.sprintf("%{message}").encoding).to eq(Encoding::UTF_8)
120
+ end
121
+
122
+ it "should return unknown patterns as UTF-8" do
123
+ expect(subject.sprintf("%{unkown_pattern}").encoding).to eq(Encoding::UTF_8)
124
+ end
125
+ end
126
+ end
127
+
128
+ context "#[]" do
129
+ it "should fetch data" do
130
+ expect(subject["type"]).to eq("sprintf")
131
+ end
132
+ it "should fetch fields" do
133
+ expect(subject["a"]).to eq("b")
134
+ expect(subject['c']['d']).to eq("f")
135
+ end
136
+ it "should fetch deep fields" do
137
+ expect(subject["[j][k1]"]).to eq("v")
138
+ expect(subject["[c][d]"]).to eq("f")
139
+ expect(subject['[f][g][h]']).to eq("i")
140
+ expect(subject['[j][k3][4]']).to eq("m")
141
+ expect(subject['[j][5]']).to eq(7)
142
+
143
+ end
144
+
145
+ it "should be fast?", :performance => true do
146
+ count = 1000000
147
+ 2.times do
148
+ start = Time.now
149
+ count.times { subject["[j][k1]"] }
150
+ duration = Time.now - start
151
+ puts "event #[] rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
152
+ end
153
+ end
154
+ end
155
+
156
+ context "#include?" do
157
+ it "should include existing fields" do
158
+ expect(subject.include?("c")).to eq(true)
159
+ expect(subject.include?("[c][d]")).to eq(true)
160
+ expect(subject.include?("[j][k4][0][nested]")).to eq(true)
161
+ end
162
+
163
+ it "should include field with nil value" do
164
+ expect(subject.include?("nilfield")).to eq(true)
165
+ end
166
+
167
+ it "should include @metadata field" do
168
+ expect(subject.include?("@metadata")).to eq(true)
169
+ end
170
+
171
+ it "should include field within @metadata" do
172
+ expect(subject.include?("[@metadata][fancy]")).to eq(true)
173
+ end
174
+
175
+ it "should not include non-existing fields" do
176
+ expect(subject.include?("doesnotexist")).to eq(false)
177
+ expect(subject.include?("[j][doesnotexist]")).to eq(false)
178
+ expect(subject.include?("[tag][0][hello][yes]")).to eq(false)
179
+ end
180
+
181
+ it "should include within arrays" do
182
+ expect(subject.include?("[tags][0]")).to eq(true)
183
+ expect(subject.include?("[tags][1]")).to eq(false)
184
+ end
185
+ end
186
+
187
+ context "#overwrite" do
188
+ it "should swap data with new content" do
189
+ new_event = LogStash::Event.new(
190
+ "type" => "new",
191
+ "message" => "foo bar",
192
+ )
193
+ subject.overwrite(new_event)
194
+
195
+ expect(subject["message"]).to eq("foo bar")
196
+ expect(subject["type"]).to eq("new")
197
+
198
+ ["tags", "source", "a", "c", "f", "j"].each do |field|
199
+ expect(subject[field]).to be_nil
200
+ end
201
+ end
202
+ end
203
+
204
+ context "#append" do
205
+ it "should append strings to an array" do
206
+ subject.append(LogStash::Event.new("message" => "another thing"))
207
+ expect(subject["message"]).to eq([ "hello world", "another thing" ])
208
+ end
209
+
210
+ it "should concatenate tags" do
211
+ subject.append(LogStash::Event.new("tags" => [ "tag2" ]))
212
+ # added to_a for when array is a Java Collection when produced from json input
213
+ # TODO: we have to find a better way to handle this in tests. maybe override
214
+ # rspec eq or == to do an explicit to_a when comparing arrays?
215
+ expect(subject["tags"].to_a).to eq([ "tag1", "tag2" ])
216
+ end
217
+
218
+ context "when event field is nil" do
219
+ it "should add single value as string" do
220
+ subject.append(LogStash::Event.new({"field1" => "append1"}))
221
+ expect(subject[ "field1" ]).to eq("append1")
222
+ end
223
+ it "should add multi values as array" do
224
+ subject.append(LogStash::Event.new({"field1" => [ "append1","append2" ]}))
225
+ expect(subject[ "field1" ]).to eq([ "append1","append2" ])
226
+ end
227
+ end
228
+
229
+ context "when event field is a string" do
230
+ before { subject[ "field1" ] = "original1" }
231
+
232
+ it "should append string to values, if different from current" do
233
+ subject.append(LogStash::Event.new({"field1" => "append1"}))
234
+ expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
235
+ end
236
+ it "should not change value, if appended value is equal current" do
237
+ subject.append(LogStash::Event.new({"field1" => "original1"}))
238
+ expect(subject[ "field1" ]).to eq("original1")
239
+ end
240
+ it "should concatenate values in an array" do
241
+ subject.append(LogStash::Event.new({"field1" => [ "append1" ]}))
242
+ expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
243
+ end
244
+ it "should join array, removing duplicates" do
245
+ subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
246
+ expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
247
+ end
248
+ end
249
+ context "when event field is an array" do
250
+ before { subject[ "field1" ] = [ "original1", "original2" ] }
251
+
252
+ it "should append string values to array, if not present in array" do
253
+ subject.append(LogStash::Event.new({"field1" => "append1"}))
254
+ expect(subject[ "field1" ]).to eq([ "original1", "original2", "append1" ])
255
+ end
256
+ it "should not append string values, if the array already contains it" do
257
+ subject.append(LogStash::Event.new({"field1" => "original1"}))
258
+ expect(subject[ "field1" ]).to eq([ "original1", "original2" ])
259
+ end
260
+ it "should join array, removing duplicates" do
261
+ subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
262
+ expect(subject[ "field1" ]).to eq([ "original1", "original2", "append1" ])
263
+ end
264
+ end
265
+ end
266
+
267
+ it "timestamp parsing speed", :performance => true do
268
+ warmup = 10000
269
+ count = 1000000
270
+
271
+ data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
272
+ event = LogStash::Event.new(data)
273
+ expect(event["@timestamp"]).to be_a(LogStash::Timestamp)
274
+
275
+ duration = 0
276
+ [warmup, count].each do |i|
277
+ start = Time.now
278
+ i.times do
279
+ data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
280
+ LogStash::Event.new(data.clone)
281
+ end
282
+ duration = Time.now - start
283
+ end
284
+ puts "event @timestamp parse rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
285
+ end
286
+
287
+ context "acceptable @timestamp formats" do
288
+ subject { LogStash::Event.new }
289
+
290
+ formats = [
291
+ "YYYY-MM-dd'T'HH:mm:ss.SSSZ",
292
+ "YYYY-MM-dd'T'HH:mm:ss.SSSSSSZ",
293
+ "YYYY-MM-dd'T'HH:mm:ss.SSS",
294
+ "YYYY-MM-dd'T'HH:mm:ss",
295
+ "YYYY-MM-dd'T'HH:mm:ssZ",
296
+ ]
297
+ formats.each do |format|
298
+ it "includes #{format}" do
299
+ time = subject.sprintf("%{+#{format}}")
300
+ begin
301
+ LogStash::Event.new("@timestamp" => time)
302
+ rescue => e
303
+ raise StandardError, "Time '#{time}' was rejected. #{e.class}: #{e.to_s}"
304
+ end
305
+ end
306
+ end
307
+
308
+ context "from LOGSTASH-1738" do
309
+ it "does not error" do
310
+ LogStash::Event.new("@timestamp" => "2013-12-29T23:12:52.371240+02:00")
311
+ end
312
+ end
313
+
314
+ context "from LOGSTASH-1732" do
315
+ it "does not error" do
316
+ LogStash::Event.new("@timestamp" => "2013-12-27T11:07:25+00:00")
317
+ end
318
+ end
319
+ end
320
+
321
+ context "timestamp initialization" do
322
+ let(:logger_mock) { double("logger") }
323
+
324
+ after(:each) do
325
+ LogStash::Event.logger = LogStash::Event::DEFAULT_LOGGER
326
+ end
327
+
328
+ it "should coerce timestamp" do
329
+ t = Time.iso8601("2014-06-12T00:12:17.114Z")
330
+ expect(LogStash::Event.new("@timestamp" => t).timestamp.to_i).to eq(t.to_i)
331
+ expect(LogStash::Event.new("@timestamp" => LogStash::Timestamp.new(t)).timestamp.to_i).to eq(t.to_i)
332
+ expect(LogStash::Event.new("@timestamp" => "2014-06-12T00:12:17.114Z").timestamp.to_i).to eq(t.to_i)
333
+ end
334
+
335
+ it "should assign current time when no timestamp" do
336
+ expect(LogStash::Event.new({}).timestamp.to_i).to be_within(1).of (Time.now.to_i)
337
+ end
338
+
339
+ it "should tag for invalid value" do
340
+ event = LogStash::Event.new("@timestamp" => "foo")
341
+ expect(event.timestamp.to_i).to be_within(1).of Time.now.to_i
342
+ expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
343
+ expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq("foo")
344
+
345
+ event = LogStash::Event.new("@timestamp" => 666)
346
+ expect(event.timestamp.to_i).to be_within(1).of Time.now.to_i
347
+ expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
348
+ expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq(666)
349
+ end
350
+
351
+ it "should warn for invalid value" do
352
+ LogStash::Event.logger = logger_mock
353
+
354
+ expect(logger_mock).to receive(:warn).twice
355
+
356
+ LogStash::Event.new("@timestamp" => :foo)
357
+ LogStash::Event.new("@timestamp" => 666)
358
+ end
359
+
360
+ it "should tag for invalid string format" do
361
+ event = LogStash::Event.new("@timestamp" => "foo")
362
+ expect(event.timestamp.to_i).to be_within(1).of Time.now.to_i
363
+ expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
364
+ expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq("foo")
365
+ end
366
+
367
+ it "should warn for invalid string format" do
368
+ LogStash::Event.logger = logger_mock
369
+
370
+ expect(logger_mock).to receive(:warn)
371
+ LogStash::Event.new("@timestamp" => "foo")
372
+ end
373
+ end
374
+
375
+ context "to_json" do
376
+ it "should support to_json" do
377
+ new_event = LogStash::Event.new(
378
+ "@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
379
+ "message" => "foo bar",
380
+ )
381
+ json = new_event.to_json
382
+
383
+ expect(JSON.parse(json)).to eq( JSON.parse("{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}"))
384
+ end
385
+
386
+ it "should support to_json and ignore arguments" do
387
+ new_event = LogStash::Event.new(
388
+ "@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
389
+ "message" => "foo bar",
390
+ )
391
+ json = new_event.to_json(:foo => 1, :bar => "baz")
392
+
393
+ expect(JSON.parse(json)).to eq( JSON.parse("{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}"))
394
+ end
395
+ end
396
+
397
+ context "metadata" do
398
+ context "with existing metadata" do
399
+ subject { LogStash::Event.new("hello" => "world", "@metadata" => { "fancy" => "pants" }) }
400
+
401
+ it "should not include metadata in to_hash" do
402
+ expect(subject.to_hash.keys).not_to include("@metadata")
403
+
404
+ # 'hello', '@timestamp', and '@version'
405
+ expect(subject.to_hash.keys.count).to eq(3)
406
+ end
407
+
408
+ it "should still allow normal field access" do
409
+ expect(subject["hello"]).to eq("world")
410
+ end
411
+ end
412
+
413
+ context "with set metadata" do
414
+ let(:fieldref) { "[@metadata][foo][bar]" }
415
+ let(:value) { "bar" }
416
+ subject { LogStash::Event.new("normal" => "normal") }
417
+ before do
418
+ # Verify the test is configured correctly.
419
+ expect(fieldref).to start_with("[@metadata]")
420
+
421
+ # Set it.
422
+ subject[fieldref] = value
423
+ end
424
+
425
+ it "should still allow normal field access" do
426
+ expect(subject["normal"]).to eq("normal")
427
+ end
428
+
429
+ it "should allow getting" do
430
+ expect(subject[fieldref]).to eq(value)
431
+ end
432
+
433
+ it "should be hidden from .to_json" do
434
+ require "json"
435
+ obj = JSON.parse(subject.to_json)
436
+ expect(obj).not_to include("@metadata")
437
+ end
438
+
439
+ it "should be hidden from .to_hash" do
440
+ expect(subject.to_hash).not_to include("@metadata")
441
+ end
442
+
443
+ it "should be accessible through #to_hash_with_metadata" do
444
+ obj = subject.to_hash_with_metadata
445
+ expect(obj).to include("@metadata")
446
+ expect(obj["@metadata"]["foo"]["bar"]).to eq(value)
447
+ end
448
+ end
449
+
450
+ context "with no metadata" do
451
+ subject { LogStash::Event.new("foo" => "bar") }
452
+ it "should have no metadata" do
453
+ expect(subject["@metadata"]).to be_empty
454
+ end
455
+ it "should still allow normal field access" do
456
+ expect(subject["foo"]).to eq("bar")
457
+ end
458
+
459
+ it "should not include the @metadata key" do
460
+ expect(subject.to_hash_with_metadata).not_to include("@metadata")
461
+ end
462
+ end
463
+ end
464
+
465
+ context "signal events" do
466
+ it "should define the shutdown event" do
467
+ # the SHUTDOWN and FLUSH constants are part of the plugin API contract
468
+ # if they are changed, all plugins must be updated
469
+ expect(LogStash::SHUTDOWN).to be_a(LogStash::ShutdownEvent)
470
+ expect(LogStash::FLUSH).to be_a(LogStash::FlushEvent)
471
+ end
472
+ end
473
+ end
474
+
475
+ let(:event_hash) do
476
+ {
477
+ "@timestamp" => "2013-01-01T00:00:00.000Z",
478
+ "type" => "sprintf",
479
+ "message" => "hello world",
480
+ "tags" => [ "tag1" ],
481
+ "source" => "/home/foo",
482
+ "a" => "b",
483
+ "c" => {
484
+ "d" => "f",
485
+ "e" => {"f" => "g"}
486
+ },
487
+ "f" => { "g" => { "h" => "i" } },
488
+ "j" => {
489
+ "k1" => "v",
490
+ "k2" => [ "w", "x" ],
491
+ "k3" => {"4" => "m"},
492
+ "k4" => [ {"nested" => "cool"} ],
493
+ 5 => 6,
494
+ "5" => 7
495
+ },
496
+ "nilfield" => nil,
497
+ "@metadata" => { "fancy" => "pants", "have-to-go" => { "deeper" => "inception" } }
498
+ }
499
+ end
500
+
501
+ describe "using normal hash input" do
502
+ it_behaves_like "all event tests" do
503
+ subject{LogStash::Event.new(event_hash)}
504
+ end
505
+ end
506
+
507
+ describe "using hash input from deserialized json" do
508
+ # this is to test the case when JrJackson deserialises Json and produces
509
+ # native Java Collections objects for efficiency
510
+ it_behaves_like "all event tests" do
511
+ subject{LogStash::Event.new(LogStash::Json.load(LogStash::Json.dump(event_hash)))}
512
+ end
513
+ end
514
+
515
+
516
+ describe "#to_s" do
517
+ let(:timestamp) { LogStash::Timestamp.new }
518
+ let(:event1) { LogStash::Event.new({ "@timestamp" => timestamp, "host" => "foo", "message" => "bar"}) }
519
+ let(:event2) { LogStash::Event.new({ "host" => "bar", "message" => "foo"}) }
520
+
521
+ it "should cache only one template" do
522
+ LogStash::StringInterpolation.clear_cache
523
+ expect {
524
+ event1.to_s
525
+ event2.to_s
526
+ }.to change { LogStash::StringInterpolation.cache_size }.by(1)
527
+ end
528
+
529
+ it "return the string containing the timestamp, the host and the message" do
530
+ expect(event1.to_s).to eq("#{timestamp.to_iso8601} #{event1["host"]} #{event1["message"]}")
531
+ end
532
+ end
533
+ end