logstash-logger-yajl 0.27.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +21 -0
- data/.rspec +3 -0
- data/.rubocop.yml +1156 -0
- data/.travis.yml +26 -0
- data/Appraisals +23 -0
- data/CHANGELOG.md +203 -0
- data/Gemfile +6 -0
- data/LICENSE.txt +22 -0
- data/README.md +880 -0
- data/Rakefile +23 -0
- data/gemfiles/rails_3.2.gemfile +9 -0
- data/gemfiles/rails_4.0.gemfile +9 -0
- data/gemfiles/rails_4.1.gemfile +9 -0
- data/gemfiles/rails_4.2.gemfile +9 -0
- data/gemfiles/rails_5.0.gemfile +9 -0
- data/gemfiles/rails_5.1.gemfile +9 -0
- data/lib/logstash-event.rb +1 -0
- data/lib/logstash-logger.rb +11 -0
- data/lib/logstash-logger/buffer.rb +336 -0
- data/lib/logstash-logger/configuration.rb +29 -0
- data/lib/logstash-logger/device.rb +67 -0
- data/lib/logstash-logger/device/aws_stream.rb +94 -0
- data/lib/logstash-logger/device/balancer.rb +40 -0
- data/lib/logstash-logger/device/base.rb +73 -0
- data/lib/logstash-logger/device/connectable.rb +131 -0
- data/lib/logstash-logger/device/file.rb +23 -0
- data/lib/logstash-logger/device/firehose.rb +42 -0
- data/lib/logstash-logger/device/io.rb +11 -0
- data/lib/logstash-logger/device/kafka.rb +57 -0
- data/lib/logstash-logger/device/kinesis.rb +44 -0
- data/lib/logstash-logger/device/multi_delegator.rb +36 -0
- data/lib/logstash-logger/device/redis.rb +69 -0
- data/lib/logstash-logger/device/socket.rb +21 -0
- data/lib/logstash-logger/device/stderr.rb +13 -0
- data/lib/logstash-logger/device/stdout.rb +14 -0
- data/lib/logstash-logger/device/tcp.rb +86 -0
- data/lib/logstash-logger/device/udp.rb +12 -0
- data/lib/logstash-logger/device/unix.rb +18 -0
- data/lib/logstash-logger/formatter.rb +51 -0
- data/lib/logstash-logger/formatter/base.rb +73 -0
- data/lib/logstash-logger/formatter/cee.rb +11 -0
- data/lib/logstash-logger/formatter/cee_syslog.rb +22 -0
- data/lib/logstash-logger/formatter/json.rb +11 -0
- data/lib/logstash-logger/formatter/json_lines.rb +11 -0
- data/lib/logstash-logger/formatter/logstash_event.rb +6 -0
- data/lib/logstash-logger/logger.rb +106 -0
- data/lib/logstash-logger/multi_logger.rb +153 -0
- data/lib/logstash-logger/railtie.rb +51 -0
- data/lib/logstash-logger/silenced_logging.rb +83 -0
- data/lib/logstash-logger/tagged_logging.rb +40 -0
- data/lib/logstash-logger/version.rb +3 -0
- data/lib/logstash/event.rb +272 -0
- data/lib/logstash/namespace.rb +15 -0
- data/lib/logstash/util.rb +105 -0
- data/lib/logstash/util/fieldreference.rb +49 -0
- data/logstash-logger.gemspec +42 -0
- data/samples/example.crt +16 -0
- data/samples/example.key +15 -0
- data/samples/file.conf +11 -0
- data/samples/redis.conf +12 -0
- data/samples/ssl.conf +15 -0
- data/samples/syslog.conf +10 -0
- data/samples/tcp.conf +11 -0
- data/samples/udp.conf +11 -0
- data/samples/unix.conf +11 -0
- data/spec/configuration_spec.rb +27 -0
- data/spec/constructor_spec.rb +30 -0
- data/spec/device/balancer_spec.rb +31 -0
- data/spec/device/connectable_spec.rb +74 -0
- data/spec/device/file_spec.rb +15 -0
- data/spec/device/firehose_spec.rb +41 -0
- data/spec/device/io_spec.rb +13 -0
- data/spec/device/kafka_spec.rb +32 -0
- data/spec/device/kinesis_spec.rb +41 -0
- data/spec/device/multi_delegator_spec.rb +31 -0
- data/spec/device/redis_spec.rb +52 -0
- data/spec/device/socket_spec.rb +15 -0
- data/spec/device/stderr_spec.rb +16 -0
- data/spec/device/stdout_spec.rb +31 -0
- data/spec/device/tcp_spec.rb +120 -0
- data/spec/device/udp_spec.rb +9 -0
- data/spec/device/unix_spec.rb +23 -0
- data/spec/device_spec.rb +97 -0
- data/spec/formatter/base_spec.rb +125 -0
- data/spec/formatter/cee_spec.rb +15 -0
- data/spec/formatter/cee_syslog_spec.rb +43 -0
- data/spec/formatter/json_lines_spec.rb +14 -0
- data/spec/formatter/json_spec.rb +10 -0
- data/spec/formatter/logstash_event_spec.rb +10 -0
- data/spec/formatter_spec.rb +79 -0
- data/spec/logger_spec.rb +128 -0
- data/spec/logstash_event_spec.rb +139 -0
- data/spec/multi_logger_spec.rb +59 -0
- data/spec/rails_spec.rb +91 -0
- data/spec/silenced_logging_spec.rb +31 -0
- data/spec/spec_helper.rb +111 -0
- data/spec/syslog_spec.rb +32 -0
- data/spec/tagged_logging_spec.rb +32 -0
- metadata +385 -0
@@ -0,0 +1,83 @@
|
|
1
|
+
# Adapted from:
|
2
|
+
# https://github.com/rails/activerecord-session_store/blob/master/lib/active_record/session_store/extension/logger_silencer.rb
|
3
|
+
# https://github.com/rails/rails/pull/16885
|
4
|
+
|
5
|
+
require 'thread'
|
6
|
+
|
7
|
+
# Add support for Rails-style logger silencing. Thread-safe and no dependencies.
|
8
|
+
#
|
9
|
+
# Setup:
|
10
|
+
# logger = Logger.new(STDOUT)
|
11
|
+
# logger.extend(LogStashLogger::SilencedLogging)
|
12
|
+
#
|
13
|
+
# Usage:
|
14
|
+
#
|
15
|
+
# logger.silence do
|
16
|
+
# ...
|
17
|
+
# end
|
18
|
+
#
|
19
|
+
module LogStashLogger
|
20
|
+
module SilencedLogging
|
21
|
+
def self.extended(logger)
|
22
|
+
class << logger
|
23
|
+
attr_accessor :silencer
|
24
|
+
alias_method :level_without_thread_safety, :level
|
25
|
+
alias_method :level, :level_with_thread_safety
|
26
|
+
alias_method :add_without_thread_safety, :add
|
27
|
+
alias_method :add, :add_with_thread_safety
|
28
|
+
|
29
|
+
Logger::Severity.constants.each do |severity|
|
30
|
+
instance_eval <<-EOT, __FILE__, __LINE__ + 1
|
31
|
+
def #{severity.downcase}? # def debug?
|
32
|
+
Logger::#{severity} >= level # DEBUG >= level
|
33
|
+
end # end
|
34
|
+
EOT
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
logger.instance_eval do
|
39
|
+
self.silencer = true
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
def thread_level
|
44
|
+
Thread.current[thread_hash_level_key]
|
45
|
+
end
|
46
|
+
|
47
|
+
def thread_level=(level)
|
48
|
+
Thread.current[thread_hash_level_key] = level
|
49
|
+
end
|
50
|
+
|
51
|
+
def level_with_thread_safety
|
52
|
+
thread_level || level_without_thread_safety
|
53
|
+
end
|
54
|
+
|
55
|
+
def add_with_thread_safety(severity, message = nil, progname = nil, &block)
|
56
|
+
if (defined?(@logdev) && @logdev.nil?) || (severity || UNKNOWN) < level
|
57
|
+
true
|
58
|
+
else
|
59
|
+
add_without_thread_safety(severity, message, progname, &block)
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
# Silences the logger for the duration of the block.
|
64
|
+
def silence(temporary_level = Logger::ERROR)
|
65
|
+
if silencer
|
66
|
+
begin
|
67
|
+
self.thread_level = temporary_level
|
68
|
+
yield self
|
69
|
+
ensure
|
70
|
+
self.thread_level = nil
|
71
|
+
end
|
72
|
+
else
|
73
|
+
yield self
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
private
|
78
|
+
|
79
|
+
def thread_hash_level_key
|
80
|
+
@thread_hash_level_key ||= :"ThreadSafeLogger##{object_id}@level"
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
module LogStashLogger
|
2
|
+
# Shamelessly copied from ActiveSupport::TaggedLogging
|
3
|
+
module TaggedLogging
|
4
|
+
def tagged(*tags)
|
5
|
+
formatter.tagged(*tags) { yield self }
|
6
|
+
end
|
7
|
+
|
8
|
+
def flush
|
9
|
+
formatter.clear_tags!
|
10
|
+
super if defined?(super)
|
11
|
+
end
|
12
|
+
|
13
|
+
module Formatter
|
14
|
+
def tagged(*tags)
|
15
|
+
new_tags = push_tags(*tags)
|
16
|
+
yield self
|
17
|
+
ensure
|
18
|
+
pop_tags(new_tags.size)
|
19
|
+
end
|
20
|
+
|
21
|
+
def push_tags(*tags)
|
22
|
+
tags.flatten.reject{ |t| t.nil? || t.empty? }.tap do |new_tags|
|
23
|
+
current_tags.concat new_tags
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
def pop_tags(size = 1)
|
28
|
+
current_tags.pop size
|
29
|
+
end
|
30
|
+
|
31
|
+
def clear_tags!
|
32
|
+
current_tags.clear
|
33
|
+
end
|
34
|
+
|
35
|
+
def current_tags
|
36
|
+
Thread.current[:logstash_logger_tags] ||= []
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
@@ -0,0 +1,272 @@
|
|
1
|
+
require "yajl"
|
2
|
+
require "time"
|
3
|
+
require "date"
|
4
|
+
require "logstash/namespace"
|
5
|
+
require "logstash/util/fieldreference"
|
6
|
+
|
7
|
+
# Use a custom serialization for jsonifying Time objects.
|
8
|
+
# TODO(sissel): Put this in a separate file.
|
9
|
+
class Time
|
10
|
+
def to_json(*args)
|
11
|
+
return iso8601(3).to_json(*args)
|
12
|
+
end
|
13
|
+
|
14
|
+
def inspect
|
15
|
+
return to_json
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
# the logstash event object.
|
20
|
+
#
|
21
|
+
# An event is simply a tuple of (timestamp, data).
|
22
|
+
# The 'timestamp' is an ISO8601 timestamp. Data is anything - any message,
|
23
|
+
# context, references, etc that are relevant to this event.
|
24
|
+
#
|
25
|
+
# Internally, this is represented as a hash with only two guaranteed fields.
|
26
|
+
#
|
27
|
+
# * "@timestamp" - an ISO8601 timestamp representing the time the event
|
28
|
+
# occurred at.
|
29
|
+
# * "@version" - the version of the schema. Currently "1"
|
30
|
+
#
|
31
|
+
# They are prefixed with an "@" symbol to avoid clashing with your
|
32
|
+
# own custom fields.
|
33
|
+
#
|
34
|
+
# When serialized, this is represented in JSON. For example:
|
35
|
+
#
|
36
|
+
# {
|
37
|
+
# "@timestamp": "2013-02-09T20:39:26.234Z",
|
38
|
+
# "@version": "1",
|
39
|
+
# message: "hello world"
|
40
|
+
# }
|
41
|
+
class LogStash::Event
|
42
|
+
class DeprecatedMethod < StandardError; end
|
43
|
+
|
44
|
+
public
|
45
|
+
def initialize(data={})
|
46
|
+
@cancelled = false
|
47
|
+
|
48
|
+
@data = data
|
49
|
+
if data.include?("@timestamp")
|
50
|
+
t = data["@timestamp"]
|
51
|
+
if t.is_a?(String)
|
52
|
+
data["@timestamp"] = Time.parse(t).gmtime
|
53
|
+
end
|
54
|
+
else
|
55
|
+
data["@timestamp"] = ::Time.now.utc
|
56
|
+
end
|
57
|
+
data["@version"] = "1" if !@data.include?("@version")
|
58
|
+
end # def initialize
|
59
|
+
|
60
|
+
# Add class methods on inclusion.
|
61
|
+
public
|
62
|
+
def self.included(klass)
|
63
|
+
klass.extend(ClassMethods)
|
64
|
+
end # def included
|
65
|
+
|
66
|
+
module ClassMethods
|
67
|
+
public
|
68
|
+
def from_json(json)
|
69
|
+
return self.new(Yajl.load(json))
|
70
|
+
end # def from_json
|
71
|
+
end
|
72
|
+
|
73
|
+
public
|
74
|
+
def cancel
|
75
|
+
@cancelled = true
|
76
|
+
end # def cancel
|
77
|
+
|
78
|
+
public
|
79
|
+
def uncancel
|
80
|
+
@cancelled = false
|
81
|
+
end # def uncancel
|
82
|
+
|
83
|
+
public
|
84
|
+
def cancelled?
|
85
|
+
return @cancelled
|
86
|
+
end # def cancelled?
|
87
|
+
|
88
|
+
# Create a deep-ish copy of this event.
|
89
|
+
public
|
90
|
+
def clone
|
91
|
+
copy = {}
|
92
|
+
@data.each do |k,v|
|
93
|
+
# TODO(sissel): Recurse if this is a hash/array?
|
94
|
+
copy[k] = v.clone
|
95
|
+
end
|
96
|
+
return self.class.new(copy)
|
97
|
+
end # def clone
|
98
|
+
|
99
|
+
if RUBY_ENGINE == "jruby"
|
100
|
+
public
|
101
|
+
def to_s
|
102
|
+
return self.sprintf("%{+yyyy-MM-dd'T'HH:mm:ss.SSSZ} %{host} %{message}")
|
103
|
+
end # def to_s
|
104
|
+
else
|
105
|
+
public
|
106
|
+
def to_s
|
107
|
+
return self.sprintf("#{self["@timestamp"].iso8601} %{host} %{message}")
|
108
|
+
end # def to_s
|
109
|
+
end
|
110
|
+
|
111
|
+
public
|
112
|
+
def timestamp; return @data["@timestamp"]; end # def timestamp
|
113
|
+
def timestamp=(val); return @data["@timestamp"] = val; end # def timestamp=
|
114
|
+
|
115
|
+
def unix_timestamp
|
116
|
+
raise DeprecatedMethod
|
117
|
+
end # def unix_timestamp
|
118
|
+
|
119
|
+
def ruby_timestamp
|
120
|
+
raise DeprecatedMethod
|
121
|
+
end # def unix_timestamp
|
122
|
+
|
123
|
+
# field-related access
|
124
|
+
public
|
125
|
+
def [](str)
|
126
|
+
if str[0,1] == "+"
|
127
|
+
else
|
128
|
+
return LogStash::Util::FieldReference.exec(str, @data)
|
129
|
+
end
|
130
|
+
end # def []
|
131
|
+
|
132
|
+
public
|
133
|
+
def []=(str, value)
|
134
|
+
r = LogStash::Util::FieldReference.exec(str, @data) do |obj, key|
|
135
|
+
obj[key] = value
|
136
|
+
end
|
137
|
+
|
138
|
+
# The assignment can fail if the given field reference (str) does not exist
|
139
|
+
# In this case, we'll want to set the value manually.
|
140
|
+
if r.nil?
|
141
|
+
# TODO(sissel): Implement this in LogStash::Util::FieldReference
|
142
|
+
if str[0,1] != "["
|
143
|
+
return @data[str] = value
|
144
|
+
end
|
145
|
+
|
146
|
+
# No existing element was found, so let's set one.
|
147
|
+
*parents, key = str.scan(/(?<=\[)[^\]]+(?=\])/)
|
148
|
+
obj = @data
|
149
|
+
parents.each do |p|
|
150
|
+
if obj.include?(p)
|
151
|
+
obj = obj[p]
|
152
|
+
else
|
153
|
+
obj[p] = {}
|
154
|
+
obj = obj[p]
|
155
|
+
end
|
156
|
+
end
|
157
|
+
obj[key] = value
|
158
|
+
end
|
159
|
+
return value
|
160
|
+
end # def []=
|
161
|
+
|
162
|
+
public
|
163
|
+
def fields
|
164
|
+
raise DeprecatedMethod
|
165
|
+
end
|
166
|
+
|
167
|
+
public
|
168
|
+
def to_json(*args)
|
169
|
+
return Yajl.dump(@data, *args)
|
170
|
+
end # def to_json
|
171
|
+
|
172
|
+
def to_hash
|
173
|
+
return @data
|
174
|
+
end # def to_hash
|
175
|
+
|
176
|
+
public
|
177
|
+
def overwrite(event)
|
178
|
+
@data = event.to_hash
|
179
|
+
end
|
180
|
+
|
181
|
+
public
|
182
|
+
def include?(key)
|
183
|
+
return !self[key].nil?
|
184
|
+
end # def include?
|
185
|
+
|
186
|
+
# Append an event to this one.
|
187
|
+
public
|
188
|
+
def append(event)
|
189
|
+
# non-destructively merge that event with ourselves.
|
190
|
+
LogStash::Util.hash_merge(@data, event.to_hash)
|
191
|
+
end # append
|
192
|
+
|
193
|
+
# Remove a field or field reference. Returns the value of that field when
|
194
|
+
# deleted
|
195
|
+
public
|
196
|
+
def remove(str)
|
197
|
+
return LogStash::Util::FieldReference.exec(str, @data) do |obj, key|
|
198
|
+
next obj.delete(key)
|
199
|
+
end
|
200
|
+
end # def remove
|
201
|
+
|
202
|
+
# sprintf. This could use a better method name.
|
203
|
+
# The idea is to take an event and convert it to a string based on
|
204
|
+
# any format values, delimited by %{foo} where 'foo' is a field or
|
205
|
+
# metadata member.
|
206
|
+
#
|
207
|
+
# For example, if the event has type == "foo" and source == "bar"
|
208
|
+
# then this string:
|
209
|
+
# "type is %{type} and source is %{host}"
|
210
|
+
# will return
|
211
|
+
# "type is foo and source is bar"
|
212
|
+
#
|
213
|
+
# If a %{name} value is an array, then we will join by ','
|
214
|
+
# If a %{name} value does not exist, then no substitution occurs.
|
215
|
+
#
|
216
|
+
# TODO(sissel): It is not clear what the value of a field that
|
217
|
+
# is an array (or hash?) should be. Join by comma? Something else?
|
218
|
+
public
|
219
|
+
def sprintf(format)
|
220
|
+
format = format.to_s
|
221
|
+
if format.index("%").nil?
|
222
|
+
return format
|
223
|
+
end
|
224
|
+
|
225
|
+
return format.gsub(/%\{[^}]+\}/) do |tok|
|
226
|
+
# Take the inside of the %{ ... }
|
227
|
+
key = tok[2 ... -1]
|
228
|
+
|
229
|
+
if key == "+%s"
|
230
|
+
# Got %{+%s}, support for unix epoch time
|
231
|
+
next @data["@timestamp"].to_i
|
232
|
+
elsif key[0,1] == "+"
|
233
|
+
t = @data["@timestamp"]
|
234
|
+
formatter = org.joda.time.format.DateTimeFormat.forPattern(key[1 .. -1])\
|
235
|
+
.withZone(org.joda.time.DateTimeZone::UTC)
|
236
|
+
#next org.joda.time.Instant.new(t.tv_sec * 1000 + t.tv_usec / 1000).toDateTime.toString(formatter)
|
237
|
+
# Invoke a specific Instant constructor to avoid this warning in JRuby
|
238
|
+
# > ambiguous Java methods found, using org.joda.time.Instant(long)
|
239
|
+
org.joda.time.Instant.java_class.constructor(Java::long).new_instance(
|
240
|
+
t.tv_sec * 1000 + t.tv_usec / 1000
|
241
|
+
).to_java.toDateTime.toString(formatter)
|
242
|
+
else
|
243
|
+
value = self[key]
|
244
|
+
case value
|
245
|
+
when nil
|
246
|
+
tok # leave the %{foo} if this field does not exist in this event.
|
247
|
+
when Array
|
248
|
+
value.join(",") # Join by ',' if value is an array
|
249
|
+
when Hash
|
250
|
+
value.to_json # Convert hashes to json
|
251
|
+
else
|
252
|
+
value # otherwise return the value
|
253
|
+
end # case value
|
254
|
+
end # 'key' checking
|
255
|
+
end # format.gsub...
|
256
|
+
end # def sprintf
|
257
|
+
|
258
|
+
# Shims to remove after event v1 is the default.
|
259
|
+
def tags=(value); self["tags"] = value; end
|
260
|
+
def tags; return self["tags"]; end
|
261
|
+
def message=(value); self["message"] = value; end
|
262
|
+
def source=(value); self["source"] = value; end
|
263
|
+
def type=(value); self["type"] = value; end
|
264
|
+
def type; return self["type"]; end
|
265
|
+
def fields; return self.to_hash; end
|
266
|
+
|
267
|
+
def tag(value)
|
268
|
+
# Generalize this method for more usability
|
269
|
+
self["tags"] ||= []
|
270
|
+
self["tags"] << value unless self["tags"].include?(value)
|
271
|
+
end
|
272
|
+
end # class LogStash::Event
|
@@ -0,0 +1,15 @@
|
|
1
|
+
#$: << File.join(File.dirname(__FILE__), "..", "..", "vendor", "bundle")
|
2
|
+
|
3
|
+
module LogStash
|
4
|
+
module Inputs; end
|
5
|
+
module Outputs; end
|
6
|
+
module Filters; end
|
7
|
+
module Search; end
|
8
|
+
module Config; end
|
9
|
+
module File; end
|
10
|
+
module Web; end
|
11
|
+
module Util; end
|
12
|
+
module PluginMixins; end
|
13
|
+
|
14
|
+
SHUTDOWN = :shutdown
|
15
|
+
end # module LogStash
|
@@ -0,0 +1,105 @@
|
|
1
|
+
require "logstash/namespace"
|
2
|
+
|
3
|
+
module LogStash::Util
|
4
|
+
UNAME = case RbConfig::CONFIG["host_os"]
|
5
|
+
when /^linux/; "linux"
|
6
|
+
else; RbConfig::CONFIG["host_os"]
|
7
|
+
end
|
8
|
+
|
9
|
+
PR_SET_NAME = 15
|
10
|
+
def self.set_thread_name(name)
|
11
|
+
if RUBY_ENGINE == "jruby"
|
12
|
+
# Keep java and ruby thread names in sync.
|
13
|
+
Java::java.lang.Thread.currentThread.setName(name)
|
14
|
+
end
|
15
|
+
Thread.current[:name] = name
|
16
|
+
|
17
|
+
if UNAME == "linux"
|
18
|
+
require "logstash/util/prctl"
|
19
|
+
# prctl PR_SET_NAME allows up to 16 bytes for a process name
|
20
|
+
# since MRI 1.9, JRuby, and Rubinius use system threads for this.
|
21
|
+
LibC.prctl(PR_SET_NAME, name[0..16], 0, 0, 0)
|
22
|
+
end
|
23
|
+
end # def set_thread_name
|
24
|
+
|
25
|
+
# Merge hash 'src' into 'dst' nondestructively
|
26
|
+
#
|
27
|
+
# Duplicate keys will become array values
|
28
|
+
#
|
29
|
+
# [ src["foo"], dst["foo"] ]
|
30
|
+
def self.hash_merge(dst, src)
|
31
|
+
src.each do |name, svalue|
|
32
|
+
if dst.include?(name)
|
33
|
+
dvalue = dst[name]
|
34
|
+
if dvalue.is_a?(Hash) && svalue.is_a?(Hash)
|
35
|
+
dvalue = hash_merge(dvalue, svalue)
|
36
|
+
elsif svalue.is_a?(Array)
|
37
|
+
if dvalue.is_a?(Array)
|
38
|
+
# merge arrays without duplicates.
|
39
|
+
dvalue |= svalue
|
40
|
+
else
|
41
|
+
dvalue = [dvalue] | svalue
|
42
|
+
end
|
43
|
+
else
|
44
|
+
if dvalue.is_a?(Array)
|
45
|
+
dvalue << svalue unless dvalue.include?(svalue)
|
46
|
+
else
|
47
|
+
dvalue = [dvalue, svalue] unless dvalue == svalue
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
dst[name] = dvalue
|
52
|
+
else
|
53
|
+
# dst doesn't have this key, just set it.
|
54
|
+
dst[name] = svalue
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
return dst
|
59
|
+
end # def self.hash_merge
|
60
|
+
|
61
|
+
# Merge hash 'src' into 'dst' nondestructively
|
62
|
+
#
|
63
|
+
# Duplicate keys will become array values
|
64
|
+
# Arrays merged will simply be appended.
|
65
|
+
#
|
66
|
+
# [ src["foo"], dst["foo"] ]
|
67
|
+
def self.hash_merge_with_dups(dst, src)
|
68
|
+
src.each do |name, svalue|
|
69
|
+
if dst.include?(name)
|
70
|
+
dvalue = dst[name]
|
71
|
+
if dvalue.is_a?(Hash) && svalue.is_a?(Hash)
|
72
|
+
dvalue = hash_merge(dvalue, svalue)
|
73
|
+
elsif svalue.is_a?(Array)
|
74
|
+
if dvalue.is_a?(Array)
|
75
|
+
# merge arrays without duplicates.
|
76
|
+
dvalue += svalue
|
77
|
+
else
|
78
|
+
dvalue = [dvalue] + svalue
|
79
|
+
end
|
80
|
+
else
|
81
|
+
if dvalue.is_a?(Array)
|
82
|
+
dvalue << svalue unless dvalue.include?(svalue)
|
83
|
+
else
|
84
|
+
dvalue = [dvalue, svalue] unless dvalue == svalue
|
85
|
+
end
|
86
|
+
end
|
87
|
+
|
88
|
+
dst[name] = dvalue
|
89
|
+
else
|
90
|
+
# dst doesn't have this key, just set it.
|
91
|
+
dst[name] = svalue
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
return dst
|
96
|
+
end # def self.hash_merge
|
97
|
+
|
98
|
+
def self.hash_merge_many(*hashes)
|
99
|
+
dst = {}
|
100
|
+
hashes.each do |hash|
|
101
|
+
hash_merge_with_dups(dst, hash)
|
102
|
+
end
|
103
|
+
return dst
|
104
|
+
end # def hash_merge_many
|
105
|
+
end # module LogStash::Util
|