logstash-core-event 2.2.4.snapshot1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,103 @@
1
+ # encoding: utf-8
2
+ require "logstash/environment"
3
+ require "logstash/json"
4
+ require "forwardable"
5
+ require "date"
6
+ require "time"
7
+
8
+ module LogStash
9
+ class TimestampParserError < StandardError; end
10
+
11
+ class Timestamp
12
+ extend Forwardable
13
+ include Comparable
14
+
15
+ def_delegators :@time, :tv_usec, :usec, :year, :iso8601, :to_i, :tv_sec, :to_f, :to_edn, :<=>, :+
16
+
17
+ attr_reader :time
18
+
19
+ ISO8601_STRFTIME = "%04d-%02d-%02dT%02d:%02d:%02d.%06d%+03d:00".freeze
20
+ ISO8601_PRECISION = 3
21
+
22
+ def initialize(time = Time.new)
23
+ @time = time.utc
24
+ end
25
+
26
+ def self.at(*args)
27
+ epoch = args.first
28
+ if epoch.is_a?(BigDecimal)
29
+ # bug in JRuby prevents correcly parsing a BigDecimal fractional part, see https://github.com/elastic/logstash/issues/4565
30
+ Timestamp.new(::Time.at(epoch.to_i, epoch.frac.to_f * 1000000))
31
+ else
32
+ Timestamp.new(::Time.at(*args))
33
+ end
34
+ end
35
+
36
+ def self.parse(*args)
37
+ Timestamp.new(::Time.parse(*args))
38
+ end
39
+
40
+ def self.now
41
+ Timestamp.new(::Time.now)
42
+ end
43
+
44
+ # coerce tries different strategies based on the time object class to convert into a Timestamp.
45
+ # @param [String, Time, Timestamp] time the time object to try coerce
46
+ # @return [Timestamp, nil] Timestamp will be returned if successful otherwise nil
47
+ # @raise [TimestampParserError] on String with invalid format
48
+ def self.coerce(time)
49
+ case time
50
+ when String
51
+ LogStash::Timestamp.parse_iso8601(time)
52
+ when LogStash::Timestamp
53
+ time
54
+ when Time
55
+ LogStash::Timestamp.new(time)
56
+ else
57
+ nil
58
+ end
59
+ end
60
+
61
+ if LogStash::Environment.jruby?
62
+ JODA_ISO8601_PARSER = org.joda.time.format.ISODateTimeFormat.dateTimeParser
63
+ UTC = org.joda.time.DateTimeZone.forID("UTC")
64
+
65
+ def self.parse_iso8601(t)
66
+ millis = JODA_ISO8601_PARSER.parseMillis(t)
67
+ LogStash::Timestamp.at(millis / 1000, (millis % 1000) * 1000)
68
+ rescue => e
69
+ raise(TimestampParserError, "invalid timestamp string #{t.inspect}, error=#{e.inspect}")
70
+ end
71
+
72
+ else
73
+
74
+ def self.parse_iso8601(t)
75
+ # warning, ruby's Time.parse is *really* terrible and slow.
76
+ LogStash::Timestamp.new(::Time.parse(t))
77
+ rescue => e
78
+ raise(TimestampParserError, "invalid timestamp string #{t.inspect}, error=#{e.inspect}")
79
+ end
80
+ end
81
+
82
+ def utc
83
+ @time.utc # modifies the receiver
84
+ self
85
+ end
86
+ alias_method :gmtime, :utc
87
+
88
+ def to_json(*args)
89
+ # ignore arguments to respect accepted to_json method signature
90
+ "\"" + to_iso8601 + "\""
91
+ end
92
+ alias_method :inspect, :to_json
93
+
94
+ def to_iso8601
95
+ @iso8601 ||= @time.iso8601(ISO8601_PRECISION)
96
+ end
97
+ alias_method :to_s, :to_iso8601
98
+
99
+ def -(value)
100
+ @time - (value.is_a?(Timestamp) ? value.time : value)
101
+ end
102
+ end
103
+ end
@@ -0,0 +1,123 @@
1
+ # encoding: utf-8
2
+ require "logstash/namespace"
3
+ require "logstash/util"
4
+ require "thread_safe"
5
+
6
+ module LogStash::Util
7
+
8
+ # PathCache is a singleton which globally caches the relation between a field reference and its
9
+ # decomposition into a [key, path array] tuple. For example the field reference [foo][bar][baz]
10
+ # is decomposed into ["baz", ["foo", "bar"]].
11
+ module PathCache
12
+ extend self
13
+
14
+ # requiring libraries and defining constants is thread safe in JRuby so
15
+ # PathCache::CACHE will be corretly initialized, once, when accessors.rb
16
+ # will be first required
17
+ CACHE = ThreadSafe::Cache.new
18
+
19
+ def get(field_reference)
20
+ # the "get_or_default(x, nil) || put(x, parse(x))" is ~2x faster than "get || put" because the get call is
21
+ # proxied through the JRuby JavaProxy op_aref method. the correct idiom here would be to use
22
+ # "compute_if_absent(x){parse(x)}" but because of the closure creation, it is ~1.5x slower than
23
+ # "get_or_default || put".
24
+ # this "get_or_default || put" is obviously non-atomic which is not really important here
25
+ # since all threads will set the same value and this cache will stabilize very quickly after the first
26
+ # few events.
27
+ CACHE.get_or_default(field_reference, nil) || CACHE.put(field_reference, parse(field_reference))
28
+ end
29
+
30
+ def parse(field_reference)
31
+ path = field_reference.split(/[\[\]]/).select{|s| !s.empty?}
32
+ [path.pop, path]
33
+ end
34
+ end
35
+
36
+ # Accessors uses a lookup table to speedup access of a field reference of the form
37
+ # "[hello][world]" to the underlying store hash into {"hello" => {"world" => "foo"}}
38
+ class Accessors
39
+
40
+ # @param store [Hash] the backing data store field refereces point to
41
+ def initialize(store)
42
+ @store = store
43
+
44
+ # @lut is a lookup table between a field reference and a [target, key] tuple
45
+ # where target is the containing Hash or Array for key in @store.
46
+ # this allows us to directly access the containing object for key instead of
47
+ # walking the field reference path into the inner @store objects
48
+ @lut = {}
49
+ end
50
+
51
+ # @param field_reference [String] the field reference
52
+ # @return [Object] the value in @store for this field reference
53
+ def get(field_reference)
54
+ target, key = lookup(field_reference)
55
+ return nil unless target
56
+ target.is_a?(Array) ? target[key.to_i] : target[key]
57
+ end
58
+
59
+ # @param field_reference [String] the field reference
60
+ # @param value [Object] the value to set in @store for this field reference
61
+ # @return [Object] the value set
62
+ def set(field_reference, value)
63
+ target, key = lookup_or_create(field_reference)
64
+ target[target.is_a?(Array) ? key.to_i : key] = value
65
+ end
66
+
67
+ # @param field_reference [String] the field reference to remove
68
+ # @return [Object] the removed value in @store for this field reference
69
+ def del(field_reference)
70
+ target, key = lookup(field_reference)
71
+ return nil unless target
72
+ target.is_a?(Array) ? target.delete_at(key.to_i) : target.delete(key)
73
+ end
74
+
75
+ # @param field_reference [String] the field reference to test for inclusion in the store
76
+ # @return [Boolean] true if the store contains a value for this field reference
77
+ def include?(field_reference)
78
+ target, key = lookup(field_reference)
79
+ return false unless target
80
+
81
+ target.is_a?(Array) ? !target[key.to_i].nil? : target.include?(key)
82
+ end
83
+
84
+ private
85
+
86
+ # retrieve the [target, key] tuple associated with this field reference
87
+ # @param field_reference [String] the field referece
88
+ # @return [[Object, String]] the [target, key] tuple associated with this field reference
89
+ def lookup(field_reference)
90
+ @lut[field_reference] ||= find_target(field_reference)
91
+ end
92
+
93
+ # retrieve the [target, key] tuple associated with this field reference and create inner
94
+ # container objects if they do not exists
95
+ # @param field_reference [String] the field referece
96
+ # @return [[Object, String]] the [target, key] tuple associated with this field reference
97
+ def lookup_or_create(field_reference)
98
+ @lut[field_reference] ||= find_or_create_target(field_reference)
99
+ end
100
+
101
+ # find the target container object in store for this field reference
102
+ # @param field_reference [String] the field referece
103
+ # @return [Object] the target container object in store associated with this field reference
104
+ def find_target(field_reference)
105
+ key, path = PathCache.get(field_reference)
106
+ target = path.inject(@store) do |r, k|
107
+ return nil unless r
108
+ r[r.is_a?(Array) ? k.to_i : k]
109
+ end
110
+ target ? [target, key] : nil
111
+ end
112
+
113
+ # find the target container object in store for this field reference and create inner
114
+ # container objects if they do not exists
115
+ # @param field_reference [String] the field referece
116
+ # @return [Object] the target container object in store associated with this field reference
117
+ def find_or_create_target(accessor)
118
+ key, path = PathCache.get(accessor)
119
+ target = path.inject(@store) {|r, k| r[r.is_a?(Array) ? k.to_i : k] ||= {}}
120
+ [target, key]
121
+ end
122
+ end # class Accessors
123
+ end # module LogStash::Util
@@ -0,0 +1,23 @@
1
+ # -*- encoding: utf-8 -*-
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'logstash-core-event/version'
5
+
6
+ Gem::Specification.new do |gem|
7
+ gem.authors = ["Elastic"]
8
+ gem.email = ["info@elastic.co"]
9
+ gem.description = %q{The core event component of logstash, the scalable log and event management tool}
10
+ gem.summary = %q{logstash-core-event - The core event component of logstash}
11
+ gem.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
12
+ gem.license = "Apache License (2.0)"
13
+
14
+ gem.files = Dir.glob(["logstash-core-event.gemspec", "lib/**/*.rb", "spec/**/*.rb"])
15
+ gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
16
+ gem.name = "logstash-core-event"
17
+ gem.require_paths = ["lib"]
18
+ gem.version = LOGSTASH_CORE_EVENT_VERSION
19
+
20
+ if RUBY_PLATFORM == 'java'
21
+ gem.platform = RUBY_PLATFORM
22
+ end
23
+ end
@@ -0,0 +1,534 @@
1
+ # encoding: utf-8
2
+ require "spec_helper"
3
+ require "json"
4
+
5
+ describe LogStash::Event do
6
+
7
+ shared_examples "all event tests" do
8
+ context "[]=" do
9
+ it "should raise an exception if you attempt to set @timestamp to a value type other than a Time object" do
10
+ expect{subject["@timestamp"] = "crash!"}.to raise_error(TypeError)
11
+ end
12
+
13
+ it "should assign simple fields" do
14
+ expect(subject["foo"]).to be_nil
15
+ expect(subject["foo"] = "bar").to eq("bar")
16
+ expect(subject["foo"]).to eq("bar")
17
+ end
18
+
19
+ it "should overwrite simple fields" do
20
+ expect(subject["foo"]).to be_nil
21
+ expect(subject["foo"] = "bar").to eq("bar")
22
+ expect(subject["foo"]).to eq("bar")
23
+
24
+ expect(subject["foo"] = "baz").to eq("baz")
25
+ expect(subject["foo"]).to eq("baz")
26
+ end
27
+
28
+ it "should assign deep fields" do
29
+ expect(subject["[foo][bar]"]).to be_nil
30
+ expect(subject["[foo][bar]"] = "baz").to eq("baz")
31
+ expect(subject["[foo][bar]"]).to eq("baz")
32
+ end
33
+
34
+ it "should overwrite deep fields" do
35
+ expect(subject["[foo][bar]"]).to be_nil
36
+ expect(subject["[foo][bar]"] = "baz").to eq("baz")
37
+ expect(subject["[foo][bar]"]).to eq("baz")
38
+
39
+ expect(subject["[foo][bar]"] = "zab").to eq("zab")
40
+ expect(subject["[foo][bar]"]).to eq("zab")
41
+ end
42
+
43
+ it "allow to set the @metadata key to a hash" do
44
+ subject["@metadata"] = { "action" => "index" }
45
+ expect(subject["[@metadata][action]"]).to eq("index")
46
+ end
47
+
48
+ it "should add key when setting nil value" do
49
+ subject["[baz]"] = nil
50
+ expect(subject.to_hash).to include("baz" => nil)
51
+ end
52
+ end
53
+
54
+ context "#sprintf" do
55
+ it "should report a unix timestamp for %{+%s}" do
56
+ expect(subject.sprintf("%{+%s}")).to eq("1356998400")
57
+ end
58
+
59
+ it "should work if there is no fieldref in the string" do
60
+ expect(subject.sprintf("bonjour")).to eq("bonjour")
61
+ end
62
+
63
+ it "should raise error when formatting %{+%s} when @timestamp field is missing" do
64
+ str = "hello-%{+%s}"
65
+ subj = subject.clone
66
+ subj.remove("[@timestamp]")
67
+ expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
68
+ end
69
+
70
+ it "should report a time with %{+format} syntax", :if => RUBY_ENGINE == "jruby" do
71
+ expect(subject.sprintf("%{+YYYY}")).to eq("2013")
72
+ expect(subject.sprintf("%{+MM}")).to eq("01")
73
+ expect(subject.sprintf("%{+HH}")).to eq("00")
74
+ end
75
+
76
+ it "should support mixed string" do
77
+ expect(subject.sprintf("foo %{+YYYY-MM-dd} %{type}")).to eq("foo 2013-01-01 sprintf")
78
+ end
79
+
80
+ it "should raise error with %{+format} syntax when @timestamp field is missing", :if => RUBY_ENGINE == "jruby" do
81
+ str = "logstash-%{+YYYY}"
82
+ subj = subject.clone
83
+ subj.remove("[@timestamp]")
84
+ expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
85
+ end
86
+
87
+ it "should report fields with %{field} syntax" do
88
+ expect(subject.sprintf("%{type}")).to eq("sprintf")
89
+ expect(subject.sprintf("%{message}")).to eq(subject["message"])
90
+ end
91
+
92
+ it "should print deep fields" do
93
+ expect(subject.sprintf("%{[j][k1]}")).to eq("v")
94
+ expect(subject.sprintf("%{[j][k2][0]}")).to eq("w")
95
+ end
96
+
97
+ it "should be able to take a non-string for the format" do
98
+ expect(subject.sprintf(2)).to eq("2")
99
+ end
100
+
101
+ it "should allow to use the metadata when calling #sprintf" do
102
+ expect(subject.sprintf("super-%{[@metadata][fancy]}")).to eq("super-pants")
103
+ end
104
+
105
+ it "should allow to use nested hash from the metadata field" do
106
+ expect(subject.sprintf("%{[@metadata][have-to-go][deeper]}")).to eq("inception")
107
+ end
108
+
109
+ it "should return a json string if the key is a hash" do
110
+ expect(subject.sprintf("%{[j][k3]}")).to eq("{\"4\":\"m\"}")
111
+ end
112
+
113
+ it "should not strip last character" do
114
+ expect(subject.sprintf("%{type}%{message}|")).to eq("sprintfhello world|")
115
+ end
116
+
117
+ context "#encoding" do
118
+ it "should return known patterns as UTF-8" do
119
+ expect(subject.sprintf("%{message}").encoding).to eq(Encoding::UTF_8)
120
+ end
121
+
122
+ it "should return unknown patterns as UTF-8" do
123
+ expect(subject.sprintf("%{unkown_pattern}").encoding).to eq(Encoding::UTF_8)
124
+ end
125
+ end
126
+ end
127
+
128
+ context "#[]" do
129
+ it "should fetch data" do
130
+ expect(subject["type"]).to eq("sprintf")
131
+ end
132
+ it "should fetch fields" do
133
+ expect(subject["a"]).to eq("b")
134
+ expect(subject['c']['d']).to eq("f")
135
+ end
136
+ it "should fetch deep fields" do
137
+ expect(subject["[j][k1]"]).to eq("v")
138
+ expect(subject["[c][d]"]).to eq("f")
139
+ expect(subject['[f][g][h]']).to eq("i")
140
+ expect(subject['[j][k3][4]']).to eq("m")
141
+ expect(subject['[j][5]']).to eq(7)
142
+
143
+ end
144
+
145
+ it "should be fast?", :performance => true do
146
+ count = 1000000
147
+ 2.times do
148
+ start = Time.now
149
+ count.times { subject["[j][k1]"] }
150
+ duration = Time.now - start
151
+ puts "event #[] rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
152
+ end
153
+ end
154
+ end
155
+
156
+ context "#include?" do
157
+ it "should include existing fields" do
158
+ expect(subject.include?("c")).to eq(true)
159
+ expect(subject.include?("[c][d]")).to eq(true)
160
+ expect(subject.include?("[j][k4][0][nested]")).to eq(true)
161
+ end
162
+
163
+ it "should include field with nil value" do
164
+ expect(subject.include?("nilfield")).to eq(true)
165
+ end
166
+
167
+ it "should include @metadata field" do
168
+ expect(subject.include?("@metadata")).to eq(true)
169
+ end
170
+
171
+ it "should include field within @metadata" do
172
+ expect(subject.include?("[@metadata][fancy]")).to eq(true)
173
+ end
174
+
175
+ it "should not include non-existing fields" do
176
+ expect(subject.include?("doesnotexist")).to eq(false)
177
+ expect(subject.include?("[j][doesnotexist]")).to eq(false)
178
+ expect(subject.include?("[tag][0][hello][yes]")).to eq(false)
179
+ end
180
+
181
+ it "should include within arrays" do
182
+ expect(subject.include?("[tags][0]")).to eq(true)
183
+ expect(subject.include?("[tags][1]")).to eq(false)
184
+ end
185
+ end
186
+
187
+ context "#overwrite" do
188
+ it "should swap data with new content" do
189
+ new_event = LogStash::Event.new(
190
+ "type" => "new",
191
+ "message" => "foo bar",
192
+ )
193
+ subject.overwrite(new_event)
194
+
195
+ expect(subject["message"]).to eq("foo bar")
196
+ expect(subject["type"]).to eq("new")
197
+
198
+ ["tags", "source", "a", "c", "f", "j"].each do |field|
199
+ expect(subject[field]).to be_nil
200
+ end
201
+ end
202
+ end
203
+
204
+ context "#append" do
205
+ it "should append strings to an array" do
206
+ subject.append(LogStash::Event.new("message" => "another thing"))
207
+ expect(subject["message"]).to eq([ "hello world", "another thing" ])
208
+ end
209
+
210
+ it "should concatenate tags" do
211
+ subject.append(LogStash::Event.new("tags" => [ "tag2" ]))
212
+ # added to_a for when array is a Java Collection when produced from json input
213
+ # TODO: we have to find a better way to handle this in tests. maybe override
214
+ # rspec eq or == to do an explicit to_a when comparing arrays?
215
+ expect(subject["tags"].to_a).to eq([ "tag1", "tag2" ])
216
+ end
217
+
218
+ context "when event field is nil" do
219
+ it "should add single value as string" do
220
+ subject.append(LogStash::Event.new({"field1" => "append1"}))
221
+ expect(subject[ "field1" ]).to eq("append1")
222
+ end
223
+ it "should add multi values as array" do
224
+ subject.append(LogStash::Event.new({"field1" => [ "append1","append2" ]}))
225
+ expect(subject[ "field1" ]).to eq([ "append1","append2" ])
226
+ end
227
+ end
228
+
229
+ context "when event field is a string" do
230
+ before { subject[ "field1" ] = "original1" }
231
+
232
+ it "should append string to values, if different from current" do
233
+ subject.append(LogStash::Event.new({"field1" => "append1"}))
234
+ expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
235
+ end
236
+ it "should not change value, if appended value is equal current" do
237
+ subject.append(LogStash::Event.new({"field1" => "original1"}))
238
+ expect(subject[ "field1" ]).to eq("original1")
239
+ end
240
+ it "should concatenate values in an array" do
241
+ subject.append(LogStash::Event.new({"field1" => [ "append1" ]}))
242
+ expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
243
+ end
244
+ it "should join array, removing duplicates" do
245
+ subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
246
+ expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
247
+ end
248
+ end
249
+ context "when event field is an array" do
250
+ before { subject[ "field1" ] = [ "original1", "original2" ] }
251
+
252
+ it "should append string values to array, if not present in array" do
253
+ subject.append(LogStash::Event.new({"field1" => "append1"}))
254
+ expect(subject[ "field1" ]).to eq([ "original1", "original2", "append1" ])
255
+ end
256
+ it "should not append string values, if the array already contains it" do
257
+ subject.append(LogStash::Event.new({"field1" => "original1"}))
258
+ expect(subject[ "field1" ]).to eq([ "original1", "original2" ])
259
+ end
260
+ it "should join array, removing duplicates" do
261
+ subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
262
+ expect(subject[ "field1" ]).to eq([ "original1", "original2", "append1" ])
263
+ end
264
+ end
265
+
266
+ end
267
+
268
+ it "timestamp parsing speed", :performance => true do
269
+ warmup = 10000
270
+ count = 1000000
271
+
272
+ data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
273
+ event = LogStash::Event.new(data)
274
+ expect(event["@timestamp"]).to be_a(LogStash::Timestamp)
275
+
276
+ duration = 0
277
+ [warmup, count].each do |i|
278
+ start = Time.now
279
+ i.times do
280
+ data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
281
+ LogStash::Event.new(data.clone)
282
+ end
283
+ duration = Time.now - start
284
+ end
285
+ puts "event @timestamp parse rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
286
+ end
287
+
288
+ context "acceptable @timestamp formats" do
289
+ subject { LogStash::Event.new }
290
+
291
+ formats = [
292
+ "YYYY-MM-dd'T'HH:mm:ss.SSSZ",
293
+ "YYYY-MM-dd'T'HH:mm:ss.SSSSSSZ",
294
+ "YYYY-MM-dd'T'HH:mm:ss.SSS",
295
+ "YYYY-MM-dd'T'HH:mm:ss",
296
+ "YYYY-MM-dd'T'HH:mm:ssZ",
297
+ ]
298
+ formats.each do |format|
299
+ it "includes #{format}" do
300
+ time = subject.sprintf("%{+#{format}}")
301
+ begin
302
+ LogStash::Event.new("@timestamp" => time)
303
+ rescue => e
304
+ raise StandardError, "Time '#{time}' was rejected. #{e.class}: #{e.to_s}"
305
+ end
306
+ end
307
+ end
308
+
309
+ context "from LOGSTASH-1738" do
310
+ it "does not error" do
311
+ LogStash::Event.new("@timestamp" => "2013-12-29T23:12:52.371240+02:00")
312
+ end
313
+ end
314
+
315
+ context "from LOGSTASH-1732" do
316
+ it "does not error" do
317
+ LogStash::Event.new("@timestamp" => "2013-12-27T11:07:25+00:00")
318
+ end
319
+ end
320
+ end
321
+
322
+ context "timestamp initialization" do
323
+ let(:logger_mock) { double("logger") }
324
+
325
+ after(:each) do
326
+ LogStash::Event.logger = LogStash::Event::DEFAULT_LOGGER
327
+ end
328
+
329
+ it "should coerce timestamp" do
330
+ t = Time.iso8601("2014-06-12T00:12:17.114Z")
331
+ expect(LogStash::Event.new("@timestamp" => t).timestamp.to_i).to eq(t.to_i)
332
+ expect(LogStash::Event.new("@timestamp" => LogStash::Timestamp.new(t)).timestamp.to_i).to eq(t.to_i)
333
+ expect(LogStash::Event.new("@timestamp" => "2014-06-12T00:12:17.114Z").timestamp.to_i).to eq(t.to_i)
334
+ end
335
+
336
+ it "should assign current time when no timestamp" do
337
+ expect(LogStash::Event.new({}).timestamp.to_i).to be_within(1).of (Time.now.to_i)
338
+ end
339
+
340
+ it "should tag for invalid value" do
341
+ event = LogStash::Event.new("@timestamp" => "foo")
342
+ expect(event.timestamp.to_i).to be_within(1).of Time.now.to_i
343
+ expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
344
+ expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq("foo")
345
+
346
+ event = LogStash::Event.new("@timestamp" => 666)
347
+ expect(event.timestamp.to_i).to be_within(1).of Time.now.to_i
348
+ expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
349
+ expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq(666)
350
+ end
351
+
352
+ it "should warn for invalid value" do
353
+ LogStash::Event.logger = logger_mock
354
+
355
+ expect(logger_mock).to receive(:warn).twice
356
+
357
+ LogStash::Event.new("@timestamp" => :foo)
358
+ LogStash::Event.new("@timestamp" => 666)
359
+ end
360
+
361
+ it "should tag for invalid string format" do
362
+ event = LogStash::Event.new("@timestamp" => "foo")
363
+ expect(event.timestamp.to_i).to be_within(1).of Time.now.to_i
364
+ expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
365
+ expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq("foo")
366
+ end
367
+
368
+ it "should warn for invalid string format" do
369
+ LogStash::Event.logger = logger_mock
370
+
371
+ expect(logger_mock).to receive(:warn)
372
+ LogStash::Event.new("@timestamp" => "foo")
373
+ end
374
+ end
375
+
376
+ context "to_json" do
377
+ it "should support to_json" do
378
+ new_event = LogStash::Event.new(
379
+ "@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
380
+ "message" => "foo bar",
381
+ )
382
+ json = new_event.to_json
383
+
384
+ expect(JSON.parse(json)).to eq( JSON.parse("{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}"))
385
+ end
386
+
387
+ it "should support to_json and ignore arguments" do
388
+ new_event = LogStash::Event.new(
389
+ "@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
390
+ "message" => "foo bar",
391
+ )
392
+ json = new_event.to_json(:foo => 1, :bar => "baz")
393
+
394
+ expect(JSON.parse(json)).to eq( JSON.parse("{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}"))
395
+ end
396
+ end
397
+
398
+ context "metadata" do
399
+ context "with existing metadata" do
400
+ subject { LogStash::Event.new("hello" => "world", "@metadata" => { "fancy" => "pants" }) }
401
+
402
+ it "should not include metadata in to_hash" do
403
+ expect(subject.to_hash.keys).not_to include("@metadata")
404
+
405
+ # 'hello', '@timestamp', and '@version'
406
+ expect(subject.to_hash.keys.count).to eq(3)
407
+ end
408
+
409
+ it "should still allow normal field access" do
410
+ expect(subject["hello"]).to eq("world")
411
+ end
412
+ end
413
+
414
+ context "with set metadata" do
415
+ let(:fieldref) { "[@metadata][foo][bar]" }
416
+ let(:value) { "bar" }
417
+ subject { LogStash::Event.new("normal" => "normal") }
418
+ before do
419
+ # Verify the test is configured correctly.
420
+ expect(fieldref).to start_with("[@metadata]")
421
+
422
+ # Set it.
423
+ subject[fieldref] = value
424
+ end
425
+
426
+ it "should still allow normal field access" do
427
+ expect(subject["normal"]).to eq("normal")
428
+ end
429
+
430
+ it "should allow getting" do
431
+ expect(subject[fieldref]).to eq(value)
432
+ end
433
+
434
+ it "should be hidden from .to_json" do
435
+ require "json"
436
+ obj = JSON.parse(subject.to_json)
437
+ expect(obj).not_to include("@metadata")
438
+ end
439
+
440
+ it "should be hidden from .to_hash" do
441
+ expect(subject.to_hash).not_to include("@metadata")
442
+ end
443
+
444
+ it "should be accessible through #to_hash_with_metadata" do
445
+ obj = subject.to_hash_with_metadata
446
+ expect(obj).to include("@metadata")
447
+ expect(obj["@metadata"]["foo"]["bar"]).to eq(value)
448
+ end
449
+ end
450
+
451
+ context "with no metadata" do
452
+ subject { LogStash::Event.new("foo" => "bar") }
453
+ it "should have no metadata" do
454
+ expect(subject["@metadata"]).to be_empty
455
+ end
456
+ it "should still allow normal field access" do
457
+ expect(subject["foo"]).to eq("bar")
458
+ end
459
+
460
+ it "should not include the @metadata key" do
461
+ expect(subject.to_hash_with_metadata).not_to include("@metadata")
462
+ end
463
+ end
464
+ end
465
+
466
+ context "signal events" do
467
+ it "should define the shutdown event" do
468
+ # the SHUTDOWN and FLUSH constants are part of the plugin API contract
469
+ # if they are changed, all plugins must be updated
470
+ expect(LogStash::SHUTDOWN).to be_a(LogStash::ShutdownEvent)
471
+ expect(LogStash::FLUSH).to be_a(LogStash::FlushEvent)
472
+ end
473
+ end
474
+ end
475
+
476
+ let(:event_hash) do
477
+ {
478
+ "@timestamp" => "2013-01-01T00:00:00.000Z",
479
+ "type" => "sprintf",
480
+ "message" => "hello world",
481
+ "tags" => [ "tag1" ],
482
+ "source" => "/home/foo",
483
+ "a" => "b",
484
+ "c" => {
485
+ "d" => "f",
486
+ "e" => {"f" => "g"}
487
+ },
488
+ "f" => { "g" => { "h" => "i" } },
489
+ "j" => {
490
+ "k1" => "v",
491
+ "k2" => [ "w", "x" ],
492
+ "k3" => {"4" => "m"},
493
+ "k4" => [ {"nested" => "cool"} ],
494
+ 5 => 6,
495
+ "5" => 7
496
+ },
497
+ "nilfield" => nil,
498
+ "@metadata" => { "fancy" => "pants", "have-to-go" => { "deeper" => "inception" } }
499
+ }
500
+ end
501
+
502
+ describe "using normal hash input" do
503
+ it_behaves_like "all event tests" do
504
+ subject{LogStash::Event.new(event_hash)}
505
+ end
506
+ end
507
+
508
+ describe "using hash input from deserialized json" do
509
+ # this is to test the case when JrJackson deserialises Json and produces
510
+ # native Java Collections objects for efficiency
511
+ it_behaves_like "all event tests" do
512
+ subject{LogStash::Event.new(LogStash::Json.load(LogStash::Json.dump(event_hash)))}
513
+ end
514
+ end
515
+
516
+
517
+ describe "#to_s" do
518
+ let(:timestamp) { LogStash::Timestamp.new }
519
+ let(:event1) { LogStash::Event.new({ "@timestamp" => timestamp, "host" => "foo", "message" => "bar"}) }
520
+ let(:event2) { LogStash::Event.new({ "host" => "bar", "message" => "foo"}) }
521
+
522
+ it "should cache only one template" do
523
+ LogStash::StringInterpolation.clear_cache
524
+ expect {
525
+ event1.to_s
526
+ event2.to_s
527
+ }.to change { LogStash::StringInterpolation.cache_size }.by(1)
528
+ end
529
+
530
+ it "return the string containing the timestamp, the host and the message" do
531
+ expect(event1.to_s).to eq("#{timestamp.to_iso8601} #{event1["host"]} #{event1["message"]}")
532
+ end
533
+ end
534
+ end