logstash-core-event 2.2.4.snapshot1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 55afe526e0375965caa5d655bd0d15cefb96b7de
4
+ data.tar.gz: b5604ec9b6181049186cd4976d804a81fa147dcf
5
+ SHA512:
6
+ metadata.gz: f008f7a9f74fbbfc0e40db9fcee97e06e73e4e4e8ea091bdc368e5f267b398859be1375a3ed5a162e992f354381211e68a6bb2e70da4e0e562f14098631299ae
7
+ data.tar.gz: f76d2ce6b55a825fb0578a34c53d296bde2274fa34e8ea9b7c48b70187300e5ca8f338cda511fc92ad9d13093b04f3bb5d6926b18df9b0bf9cbf1d214e2348ca
@@ -0,0 +1 @@
1
+ require "logstash-core-event/logstash-core-event"
@@ -0,0 +1,5 @@
1
+ # encoding: utf-8
2
+ module LogStash
3
+ end
4
+
5
+ require "logstash/event"
@@ -0,0 +1,8 @@
1
+ # encoding: utf-8
2
+
3
+ # The version of logstash core event gem.
4
+ #
5
+ # Note to authors: this should not include dashes because 'gem' barfs if
6
+ # you include a dash in the version string.
7
+
8
+ LOGSTASH_CORE_EVENT_VERSION = "2.2.4.snapshot1"
@@ -0,0 +1,278 @@
1
+ # encoding: utf-8
2
+ require "time"
3
+ require "date"
4
+ require "cabin"
5
+ require "logstash/namespace"
6
+ require "logstash/util/accessors"
7
+ require "logstash/timestamp"
8
+ require "logstash/json"
9
+ require "logstash/string_interpolation"
10
+
11
+ # transcient pipeline events for normal in-flow signaling as opposed to
12
+ # flow altering exceptions. for now having base classes is adequate and
13
+ # in the future it might be necessary to refactor using like a BaseEvent
14
+ # class to have a common interface for all pileline events to support
15
+ # eventual queueing persistence for example, TBD.
16
+ class LogStash::ShutdownEvent; end
17
+ class LogStash::FlushEvent; end
18
+
19
+ module LogStash
20
+ FLUSH = LogStash::FlushEvent.new
21
+
22
+ # LogStash::SHUTDOWN is used by plugins
23
+ SHUTDOWN = LogStash::ShutdownEvent.new
24
+ end
25
+
26
+ # the logstash event object.
27
+ #
28
+ # An event is simply a tuple of (timestamp, data).
29
+ # The 'timestamp' is an ISO8601 timestamp. Data is anything - any message,
30
+ # context, references, etc that are relevant to this event.
31
+ #
32
+ # Internally, this is represented as a hash with only two guaranteed fields.
33
+ #
34
+ # * "@timestamp" - an ISO8601 timestamp representing the time the event
35
+ # occurred at.
36
+ # * "@version" - the version of the schema. Currently "1"
37
+ #
38
+ # They are prefixed with an "@" symbol to avoid clashing with your
39
+ # own custom fields.
40
+ #
41
+ # When serialized, this is represented in JSON. For example:
42
+ #
43
+ # {
44
+ # "@timestamp": "2013-02-09T20:39:26.234Z",
45
+ # "@version": "1",
46
+ # message: "hello world"
47
+ # }
48
+ class LogStash::Event
49
+ class DeprecatedMethod < StandardError; end
50
+
51
+ CHAR_PLUS = "+"
52
+ TIMESTAMP = "@timestamp"
53
+ VERSION = "@version"
54
+ VERSION_ONE = "1"
55
+ TIMESTAMP_FAILURE_TAG = "_timestampparsefailure"
56
+ TIMESTAMP_FAILURE_FIELD = "_@timestamp"
57
+
58
+ METADATA = "@metadata".freeze
59
+ METADATA_BRACKETS = "[#{METADATA}]".freeze
60
+
61
+ # Floats outside of these upper and lower bounds are forcibly converted
62
+ # to scientific notation by Float#to_s
63
+ MIN_FLOAT_BEFORE_SCI_NOT = 0.0001
64
+ MAX_FLOAT_BEFORE_SCI_NOT = 1000000000000000.0
65
+
66
+ DEFAULT_LOGGER = Cabin::Channel.get(LogStash)
67
+ @@logger = DEFAULT_LOGGER
68
+
69
+ def initialize(data = {})
70
+ @cancelled = false
71
+ @data = data
72
+ @accessors = LogStash::Util::Accessors.new(data)
73
+ @data[VERSION] ||= VERSION_ONE
74
+ ts = @data[TIMESTAMP]
75
+ @data[TIMESTAMP] = ts ? init_timestamp(ts) : LogStash::Timestamp.now
76
+
77
+ @metadata = @data.delete(METADATA) || {}
78
+ @metadata_accessors = LogStash::Util::Accessors.new(@metadata)
79
+ end
80
+
81
+ def cancel
82
+ @cancelled = true
83
+ end
84
+
85
+ def uncancel
86
+ @cancelled = false
87
+ end
88
+
89
+ def cancelled?
90
+ @cancelled
91
+ end
92
+
93
+ # Create a deep-ish copy of this event.
94
+ def clone
95
+ copy = {}
96
+ @data.each do |k,v|
97
+ # TODO(sissel): Recurse if this is a hash/array?
98
+ copy[k] = begin v.clone rescue v end
99
+ end
100
+
101
+ self.class.new(copy)
102
+ end
103
+
104
+ def to_s
105
+ "#{timestamp.to_iso8601} #{self.sprintf("%{host} %{message}")}"
106
+ end
107
+
108
+ def timestamp
109
+ @data[TIMESTAMP]
110
+ end
111
+
112
+ def timestamp=(val)
113
+ @data[TIMESTAMP] = val
114
+ end
115
+
116
+ def [](fieldref)
117
+ if fieldref.start_with?(METADATA_BRACKETS)
118
+ @metadata_accessors.get(fieldref[METADATA_BRACKETS.length .. -1])
119
+ elsif fieldref == METADATA
120
+ @metadata
121
+ else
122
+ @accessors.get(fieldref)
123
+ end
124
+ end
125
+
126
+ def []=(fieldref, value)
127
+ if fieldref == TIMESTAMP && !value.is_a?(LogStash::Timestamp)
128
+ raise TypeError, "The field '@timestamp' must be a (LogStash::Timestamp, not a #{value.class} (#{value})"
129
+ end
130
+ if fieldref.start_with?(METADATA_BRACKETS)
131
+ @metadata_accessors.set(fieldref[METADATA_BRACKETS.length .. -1], value)
132
+ elsif fieldref == METADATA
133
+ @metadata = value
134
+ @metadata_accessors = LogStash::Util::Accessors.new(@metadata)
135
+ else
136
+ @accessors.set(fieldref, value)
137
+ end
138
+ end
139
+
140
+ def to_json(*args)
141
+ # ignore arguments to respect accepted to_json method signature
142
+ LogStash::Json.dump(@data)
143
+ end
144
+
145
+ def to_hash
146
+ @data
147
+ end
148
+
149
+ def overwrite(event)
150
+ # pickup new event @data and also pickup @accessors
151
+ # otherwise it will be pointing on previous data
152
+ @data = event.instance_variable_get(:@data)
153
+ @accessors = event.instance_variable_get(:@accessors)
154
+
155
+ #convert timestamp if it is a String
156
+ if @data[TIMESTAMP].is_a?(String)
157
+ @data[TIMESTAMP] = LogStash::Timestamp.parse_iso8601(@data[TIMESTAMP])
158
+ end
159
+ end
160
+
161
+ def include?(fieldref)
162
+ if fieldref.start_with?(METADATA_BRACKETS)
163
+ @metadata_accessors.include?(fieldref[METADATA_BRACKETS.length .. -1])
164
+ elsif fieldref == METADATA
165
+ true
166
+ else
167
+ @accessors.include?(fieldref)
168
+ end
169
+ end
170
+
171
+ # Append an event to this one.
172
+ def append(event)
173
+ # non-destructively merge that event with ourselves.
174
+
175
+ # no need to reset @accessors here because merging will not disrupt any existing field paths
176
+ # and if new ones are created they will be picked up.
177
+ LogStash::Util.hash_merge(@data, event.to_hash)
178
+ end
179
+
180
+ # Remove a field or field reference. Returns the value of that field when deleted
181
+ def remove(fieldref)
182
+ @accessors.del(fieldref)
183
+ end
184
+
185
+ # sprintf. This could use a better method name.
186
+ # The idea is to take an event and convert it to a string based on
187
+ # any format values, delimited by %{foo} where 'foo' is a field or
188
+ # metadata member.
189
+ #
190
+ # For example, if the event has type == "foo" and host == "bar"
191
+ # then this string:
192
+ # "type is %{type} and source is %{host}"
193
+ # will return
194
+ # "type is foo and source is bar"
195
+ #
196
+ # If a %{name} value is an array, then we will join by ','
197
+ # If a %{name} value does not exist, then no substitution occurs.
198
+ def sprintf(format)
199
+ LogStash::StringInterpolation.evaluate(self, format)
200
+ end
201
+
202
+ def tag(value)
203
+ # Generalize this method for more usability
204
+ self["tags"] ||= []
205
+ self["tags"] << value unless self["tags"].include?(value)
206
+ end
207
+
208
+ def to_hash_with_metadata
209
+ @metadata.empty? ? to_hash : to_hash.merge(METADATA => @metadata)
210
+ end
211
+
212
+ def to_json_with_metadata(*args)
213
+ # ignore arguments to respect accepted to_json method signature
214
+ LogStash::Json.dump(to_hash_with_metadata)
215
+ end
216
+
217
+ # this is used by logstash-devutils spec_helper.rb to monkey patch the Event field setter []=
218
+ # and add systematic encoding validation on every field set in specs.
219
+ # TODO: (colin) this should be moved, probably in logstash-devutils ?
220
+ def self.validate_value(value)
221
+ case value
222
+ when String
223
+ raise("expected UTF-8 encoding for value=#{value}, encoding=#{value.encoding.inspect}") unless value.encoding == Encoding::UTF_8
224
+ raise("invalid UTF-8 encoding for value=#{value}, encoding=#{value.encoding.inspect}") unless value.valid_encoding?
225
+ value
226
+ when Array
227
+ value.each{|v| validate_value(v)} # don't map, return original object
228
+ value
229
+ else
230
+ value
231
+ end
232
+ end
233
+
234
+ # depracated public methods
235
+ # TODO: (colin) since these depracated mothods are still exposed in 2.x we should remove them in 3.0
236
+
237
+ def unix_timestamp
238
+ raise DeprecatedMethod
239
+ end
240
+
241
+ def ruby_timestamp
242
+ raise DeprecatedMethod
243
+ end
244
+
245
+ def fields
246
+ raise DeprecatedMethod
247
+ end
248
+
249
+ # set a new logger for all Event instances
250
+ # there is no point in changing it at runtime for other reasons than in tests/specs.
251
+ # @param logger [Cabin::Channel] logger instance that will be used by all Event instances
252
+ def self.logger=(logger)
253
+ @@logger = logger
254
+ end
255
+
256
+ private
257
+
258
+ def logger
259
+ @@logger
260
+ end
261
+
262
+ def init_timestamp(o)
263
+ begin
264
+ timestamp = LogStash::Timestamp.coerce(o)
265
+ return timestamp if timestamp
266
+
267
+ logger.warn("Unrecognized #{TIMESTAMP} value, setting current time to #{TIMESTAMP}, original in #{TIMESTAMP_FAILURE_FIELD}field", :value => o.inspect)
268
+ rescue LogStash::TimestampParserError => e
269
+ logger.warn("Error parsing #{TIMESTAMP} string, setting current time to #{TIMESTAMP}, original in #{TIMESTAMP_FAILURE_FIELD} field", :value => o.inspect, :exception => e.message)
270
+ end
271
+
272
+ @data["tags"] ||= []
273
+ @data["tags"] << TIMESTAMP_FAILURE_TAG unless @data["tags"].include?(TIMESTAMP_FAILURE_TAG)
274
+ @data[TIMESTAMP_FAILURE_FIELD] = o
275
+
276
+ LogStash::Timestamp.now
277
+ end
278
+ end
@@ -0,0 +1,150 @@
1
+ # encoding: utf-8
2
+ require "thread_safe"
3
+ require "forwardable"
4
+
5
+ module LogStash
6
+ module StringInterpolation
7
+ extend self
8
+
9
+ # Floats outside of these upper and lower bounds are forcibly converted
10
+ # to scientific notation by Float#to_s
11
+ MIN_FLOAT_BEFORE_SCI_NOT = 0.0001
12
+ MAX_FLOAT_BEFORE_SCI_NOT = 1000000000000000.0
13
+
14
+ CACHE = ThreadSafe::Cache.new
15
+ TEMPLATE_TAG_REGEXP = /%\{[^}]+\}/
16
+
17
+ def evaluate(event, template)
18
+ if template.is_a?(Float) && (template < MIN_FLOAT_BEFORE_SCI_NOT || template >= MAX_FLOAT_BEFORE_SCI_NOT)
19
+ return ("%.15f" % template).sub(/0*$/,"")
20
+ end
21
+
22
+ template = template.to_s
23
+
24
+ return template if not_cachable?(template)
25
+
26
+ compiled = CACHE.get_or_default(template, nil) || CACHE.put(template, compile_template(template))
27
+ compiled.evaluate(event)
28
+ end
29
+
30
+ # clear the global compiled templates cache
31
+ def clear_cache
32
+ CACHE.clear
33
+ end
34
+
35
+ # @return [Fixnum] the compiled templates cache size
36
+ def cache_size
37
+ CACHE.size
38
+ end
39
+
40
+ private
41
+ def not_cachable?(template)
42
+ template.index("%").nil?
43
+ end
44
+
45
+ def compile_template(template)
46
+ nodes = Template.new
47
+
48
+ position = 0
49
+ matches = template.to_enum(:scan, TEMPLATE_TAG_REGEXP).map { |m| $~ }
50
+
51
+ matches.each do |match|
52
+ tag = match[0][2..-2]
53
+ start = match.offset(0).first
54
+ nodes << StaticNode.new(template[position..(start-1)]) if start > 0
55
+ nodes << identify(tag)
56
+ position = match.offset(0).last
57
+ end
58
+
59
+ if position < template.size
60
+ nodes << StaticNode.new(template[position..-1])
61
+ end
62
+
63
+ optimize(nodes)
64
+ end
65
+
66
+ def optimize(nodes)
67
+ nodes.size == 1 ? nodes.first : nodes
68
+ end
69
+
70
+ def identify(tag)
71
+ if tag == "+%s"
72
+ EpocNode.new
73
+ elsif tag[0, 1] == "+"
74
+ DateNode.new(tag[1..-1])
75
+ else
76
+ KeyNode.new(tag)
77
+ end
78
+ end
79
+ end
80
+
81
+ class Template
82
+ extend Forwardable
83
+ def_delegators :@nodes, :<<, :push, :size, :first
84
+
85
+ def initialize
86
+ @nodes = []
87
+ end
88
+
89
+ def evaluate(event)
90
+ @nodes.collect { |node| node.evaluate(event) }.join
91
+ end
92
+ end
93
+
94
+ class EpocNode
95
+ def evaluate(event)
96
+ t = event.timestamp
97
+ raise LogStash::Error, "Unable to format in string \"#{@format}\", #{LogStash::Event::TIMESTAMP} field not found" unless t
98
+ t.to_i.to_s
99
+ end
100
+ end
101
+
102
+ class StaticNode
103
+ def initialize(content)
104
+ @content = content
105
+ end
106
+
107
+ def evaluate(event)
108
+ @content
109
+ end
110
+ end
111
+
112
+ class KeyNode
113
+ def initialize(key)
114
+ @key = key
115
+ end
116
+
117
+ def evaluate(event)
118
+ value = event[@key]
119
+
120
+ case value
121
+ when nil
122
+ "%{#{@key}}"
123
+ when Array
124
+ value.join(",")
125
+ when Hash
126
+ LogStash::Json.dump(value)
127
+ else
128
+ value
129
+ end
130
+ end
131
+ end
132
+
133
+ class DateNode
134
+ def initialize(format)
135
+ @format = format
136
+ @formatter = org.joda.time.format.DateTimeFormat.forPattern(@format)
137
+ .withZone(org.joda.time.DateTimeZone::UTC)
138
+ end
139
+
140
+ def evaluate(event)
141
+ t = event.timestamp
142
+
143
+ raise LogStash::Error, "Unable to format in string \"#{@format}\", #{LogStash::Event::TIMESTAMP} field not found" unless t
144
+
145
+ org.joda.time.Instant.java_class.constructor(Java::long).new_instance(
146
+ t.tv_sec * 1000 + t.tv_usec / 1000
147
+ ).to_java.toDateTime.toString(@formatter)
148
+ end
149
+ end
150
+ end