logstash-core 2.1.3-java → 2.2.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of logstash-core might be problematic. Click here for more details.

Files changed (71) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash-core.rb +1 -3
  3. data/lib/logstash-core/logstash-core.rb +3 -0
  4. data/lib/logstash-core/version.rb +8 -0
  5. data/lib/logstash/agent.rb +48 -20
  6. data/lib/logstash/codecs/base.rb +2 -2
  7. data/lib/logstash/config/config_ast.rb +8 -3
  8. data/lib/logstash/environment.rb +0 -16
  9. data/lib/logstash/filters/base.rb +9 -5
  10. data/lib/logstash/inputs/base.rb +1 -1
  11. data/lib/logstash/output_delegator.rb +150 -0
  12. data/lib/logstash/outputs/base.rb +37 -40
  13. data/lib/logstash/pipeline.rb +259 -178
  14. data/lib/logstash/pipeline_reporter.rb +114 -0
  15. data/lib/logstash/plugin.rb +1 -1
  16. data/lib/logstash/{shutdown_controller.rb → shutdown_watcher.rb} +10 -37
  17. data/lib/logstash/util.rb +17 -0
  18. data/lib/logstash/util/decorators.rb +14 -7
  19. data/lib/logstash/util/worker_threads_default_printer.rb +4 -4
  20. data/lib/logstash/util/wrapped_synchronous_queue.rb +41 -0
  21. data/lib/logstash/version.rb +10 -2
  22. data/locales/en.yml +8 -3
  23. data/logstash-core.gemspec +5 -3
  24. data/spec/{core/conditionals_spec.rb → conditionals_spec.rb} +0 -0
  25. data/spec/{core/config_spec.rb → logstash/config/config_ast_spec.rb} +0 -0
  26. data/spec/{core/config_cpu_core_strategy_spec.rb → logstash/config/cpu_core_strategy_spec.rb} +0 -0
  27. data/spec/{core/config_defaults_spec.rb → logstash/config/defaults_spec.rb} +0 -0
  28. data/spec/{core/config_mixin_spec.rb → logstash/config/mixin_spec.rb} +0 -0
  29. data/spec/{core → logstash}/environment_spec.rb +0 -0
  30. data/spec/{filters → logstash/filters}/base_spec.rb +0 -0
  31. data/spec/{inputs → logstash/inputs}/base_spec.rb +0 -0
  32. data/spec/{lib/logstash → logstash}/java_integration_spec.rb +0 -0
  33. data/spec/{util → logstash}/json_spec.rb +0 -0
  34. data/spec/logstash/output_delegator_spec.rb +126 -0
  35. data/spec/logstash/outputs/base_spec.rb +40 -0
  36. data/spec/logstash/pipeline_reporter_spec.rb +85 -0
  37. data/spec/{core → logstash}/pipeline_spec.rb +128 -16
  38. data/spec/{core → logstash}/plugin_spec.rb +47 -1
  39. data/spec/logstash/runner_spec.rb +68 -0
  40. data/spec/{core/shutdown_controller_spec.rb → logstash/shutdown_watcher_spec.rb} +17 -11
  41. data/spec/{util → logstash/util}/buftok_spec.rb +0 -0
  42. data/spec/{util → logstash/util}/charset_spec.rb +0 -0
  43. data/spec/{util → logstash/util}/defaults_printer_spec.rb +4 -4
  44. data/spec/{util → logstash/util}/java_version_spec.rb +0 -0
  45. data/spec/{util → logstash/util}/plugin_version_spec.rb +0 -0
  46. data/spec/{util → logstash/util}/unicode_trimmer_spec.rb +0 -0
  47. data/spec/{util → logstash/util}/worker_threads_default_printer_spec.rb +8 -8
  48. data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +28 -0
  49. data/spec/{util_spec.rb → logstash/util_spec.rb} +0 -0
  50. metadata +74 -81
  51. data/lib/logstash/event.rb +0 -275
  52. data/lib/logstash/patches/bundler.rb +0 -36
  53. data/lib/logstash/sized_queue.rb +0 -8
  54. data/lib/logstash/string_interpolation.rb +0 -140
  55. data/lib/logstash/timestamp.rb +0 -97
  56. data/lib/logstash/util/accessors.rb +0 -123
  57. data/spec/core/event_spec.rb +0 -518
  58. data/spec/core/runner_spec.rb +0 -40
  59. data/spec/core/timestamp_spec.rb +0 -84
  60. data/spec/coverage_helper.rb +0 -24
  61. data/spec/lib/logstash/bundler_spec.rb +0 -121
  62. data/spec/license_spec.rb +0 -67
  63. data/spec/outputs/base_spec.rb +0 -26
  64. data/spec/plugin_manager/install_spec.rb +0 -28
  65. data/spec/plugin_manager/update_spec.rb +0 -39
  66. data/spec/plugin_manager/util_spec.rb +0 -71
  67. data/spec/spec_helper.rb +0 -11
  68. data/spec/util/accessors_spec.rb +0 -170
  69. data/spec/util/compress_spec.rb +0 -121
  70. data/spec/util/gemfile_spec.rb +0 -212
  71. data/spec/util/retryable_spec.rb +0 -139
@@ -1,36 +0,0 @@
1
- # encoding: utf-8
2
- # Bundler monkey patches
3
- module ::Bundler
4
- # Patch bundler to write a .lock file specific to the version of ruby.
5
- # This keeps MRI/JRuby/RBX from conflicting over the Gemfile.lock updates
6
- module SharedHelpers
7
- def default_lockfile
8
- ruby = "#{LogStash::Environment.ruby_engine}-#{LogStash::Environment.ruby_abi_version}"
9
- Pathname.new("#{default_gemfile}.#{ruby}.lock")
10
- end
11
- end
12
-
13
- # Patch to prevent Bundler to save a .bundle/config file in the root
14
- # of the application
15
- class Settings
16
- def set_key(key, value, hash, file)
17
- key = key_for(key)
18
-
19
- unless hash[key] == value
20
- hash[key] = value
21
- hash.delete(key) if value.nil?
22
- end
23
-
24
- value
25
- end
26
- end
27
-
28
- # Add the Bundler.reset! method which has been added in master but is not in 1.7.9.
29
- class << self
30
- unless self.method_defined?("reset!")
31
- def reset!
32
- @definition = nil
33
- end
34
- end
35
- end
36
- end
@@ -1,8 +0,0 @@
1
- # encoding: utf-8
2
- require "logstash/namespace"
3
- require "logstash/logging"
4
-
5
- require "thread" # for SizedQueue
6
- class LogStash::SizedQueue < SizedQueue
7
- # TODO(sissel): Soon will implement push/pop stats, etc
8
- end
@@ -1,140 +0,0 @@
1
- # encoding: utf-8
2
- require "thread_safe"
3
- require "forwardable"
4
-
5
- module LogStash
6
- module StringInterpolation
7
- extend self
8
-
9
- # Floats outside of these upper and lower bounds are forcibly converted
10
- # to scientific notation by Float#to_s
11
- MIN_FLOAT_BEFORE_SCI_NOT = 0.0001
12
- MAX_FLOAT_BEFORE_SCI_NOT = 1000000000000000.0
13
-
14
- CACHE = ThreadSafe::Cache.new
15
- TEMPLATE_TAG_REGEXP = /%\{[^}]+\}/
16
-
17
- def evaluate(event, template)
18
- if template.is_a?(Float) && (template < MIN_FLOAT_BEFORE_SCI_NOT || template >= MAX_FLOAT_BEFORE_SCI_NOT)
19
- return ("%.15f" % template).sub(/0*$/,"")
20
- end
21
-
22
- template = template.to_s
23
-
24
- return template if not_cachable?(template)
25
-
26
- compiled = CACHE.get_or_default(template, nil) || CACHE.put(template, compile_template(template))
27
- compiled.evaluate(event)
28
- end
29
-
30
- private
31
- def not_cachable?(template)
32
- template.index("%").nil?
33
- end
34
-
35
- def compile_template(template)
36
- nodes = Template.new
37
-
38
- position = 0
39
- matches = template.to_enum(:scan, TEMPLATE_TAG_REGEXP).map { |m| $~ }
40
-
41
- matches.each do |match|
42
- tag = match[0][2..-2]
43
- start = match.offset(0).first
44
- nodes << StaticNode.new(template[position..(start-1)]) if start > 0
45
- nodes << identify(tag)
46
- position = match.offset(0).last
47
- end
48
-
49
- if position < template.size
50
- nodes << StaticNode.new(template[position..-1])
51
- end
52
-
53
- optimize(nodes)
54
- end
55
-
56
- def optimize(nodes)
57
- nodes.size == 1 ? nodes.first : nodes
58
- end
59
-
60
- def identify(tag)
61
- if tag == "+%s"
62
- EpocNode.new
63
- elsif tag[0, 1] == "+"
64
- DateNode.new(tag[1..-1])
65
- else
66
- KeyNode.new(tag)
67
- end
68
- end
69
- end
70
-
71
- class Template
72
- extend Forwardable
73
- def_delegators :@nodes, :<<, :push, :size, :first
74
-
75
- def initialize
76
- @nodes = []
77
- end
78
-
79
- def evaluate(event)
80
- @nodes.collect { |node| node.evaluate(event) }.join
81
- end
82
- end
83
-
84
- class EpocNode
85
- def evaluate(event)
86
- t = event.timestamp
87
- raise LogStash::Error, "Unable to format in string \"#{@format}\", #{LogStash::Event::TIMESTAMP} field not found" unless t
88
- t.to_i.to_s
89
- end
90
- end
91
-
92
- class StaticNode
93
- def initialize(content)
94
- @content = content
95
- end
96
-
97
- def evaluate(event)
98
- @content
99
- end
100
- end
101
-
102
- class KeyNode
103
- def initialize(key)
104
- @key = key
105
- end
106
-
107
- def evaluate(event)
108
- value = event[@key]
109
-
110
- case value
111
- when nil
112
- "%{#{@key}}"
113
- when Array
114
- value.join(",")
115
- when Hash
116
- LogStash::Json.dump(value)
117
- else
118
- value
119
- end
120
- end
121
- end
122
-
123
- class DateNode
124
- def initialize(format)
125
- @format = format
126
- @formatter = org.joda.time.format.DateTimeFormat.forPattern(@format)
127
- .withZone(org.joda.time.DateTimeZone::UTC)
128
- end
129
-
130
- def evaluate(event)
131
- t = event.timestamp
132
-
133
- raise LogStash::Error, "Unable to format in string \"#{@format}\", #{LogStash::Event::TIMESTAMP} field not found" unless t
134
-
135
- org.joda.time.Instant.java_class.constructor(Java::long).new_instance(
136
- t.tv_sec * 1000 + t.tv_usec / 1000
137
- ).to_java.toDateTime.toString(@formatter)
138
- end
139
- end
140
- end
@@ -1,97 +0,0 @@
1
- # encoding: utf-8
2
- require "logstash/environment"
3
- require "logstash/json"
4
- require "forwardable"
5
- require "date"
6
- require "time"
7
-
8
- module LogStash
9
- class TimestampParserError < StandardError; end
10
-
11
- class Timestamp
12
- extend Forwardable
13
- include Comparable
14
-
15
- def_delegators :@time, :tv_usec, :usec, :year, :iso8601, :to_i, :tv_sec, :to_f, :to_edn, :<=>, :+
16
-
17
- attr_reader :time
18
-
19
- ISO8601_STRFTIME = "%04d-%02d-%02dT%02d:%02d:%02d.%06d%+03d:00".freeze
20
- ISO8601_PRECISION = 3
21
-
22
- def initialize(time = Time.new)
23
- @time = time.utc
24
- end
25
-
26
- def self.at(*args)
27
- Timestamp.new(::Time.at(*args))
28
- end
29
-
30
- def self.parse(*args)
31
- Timestamp.new(::Time.parse(*args))
32
- end
33
-
34
- def self.now
35
- Timestamp.new(::Time.now)
36
- end
37
-
38
- # coerce tries different strategies based on the time object class to convert into a Timestamp.
39
- # @param [String, Time, Timestamp] time the time object to try coerce
40
- # @return [Timestamp, nil] Timestamp will be returned if successful otherwise nil
41
- # @raise [TimestampParserError] on String with invalid format
42
- def self.coerce(time)
43
- case time
44
- when String
45
- LogStash::Timestamp.parse_iso8601(time)
46
- when LogStash::Timestamp
47
- time
48
- when Time
49
- LogStash::Timestamp.new(time)
50
- else
51
- nil
52
- end
53
- end
54
-
55
- if LogStash::Environment.jruby?
56
- JODA_ISO8601_PARSER = org.joda.time.format.ISODateTimeFormat.dateTimeParser
57
- UTC = org.joda.time.DateTimeZone.forID("UTC")
58
-
59
- def self.parse_iso8601(t)
60
- millis = JODA_ISO8601_PARSER.parseMillis(t)
61
- LogStash::Timestamp.at(millis / 1000, (millis % 1000) * 1000)
62
- rescue => e
63
- raise(TimestampParserError, "invalid timestamp string #{t.inspect}, error=#{e.inspect}")
64
- end
65
-
66
- else
67
-
68
- def self.parse_iso8601(t)
69
- # warning, ruby's Time.parse is *really* terrible and slow.
70
- LogStash::Timestamp.new(::Time.parse(t))
71
- rescue => e
72
- raise(TimestampParserError, "invalid timestamp string #{t.inspect}, error=#{e.inspect}")
73
- end
74
- end
75
-
76
- def utc
77
- @time.utc # modifies the receiver
78
- self
79
- end
80
- alias_method :gmtime, :utc
81
-
82
- def to_json(*args)
83
- # ignore arguments to respect accepted to_json method signature
84
- "\"" + to_iso8601 + "\""
85
- end
86
- alias_method :inspect, :to_json
87
-
88
- def to_iso8601
89
- @iso8601 ||= @time.iso8601(ISO8601_PRECISION)
90
- end
91
- alias_method :to_s, :to_iso8601
92
-
93
- def -(value)
94
- @time - (value.is_a?(Timestamp) ? value.time : value)
95
- end
96
- end
97
- end
@@ -1,123 +0,0 @@
1
- # encoding: utf-8
2
- require "logstash/namespace"
3
- require "logstash/util"
4
- require "thread_safe"
5
-
6
- module LogStash::Util
7
-
8
- # PathCache is a singleton which globally caches the relation between a field reference and its
9
- # decomposition into a [key, path array] tuple. For example the field reference [foo][bar][baz]
10
- # is decomposed into ["baz", ["foo", "bar"]].
11
- module PathCache
12
- extend self
13
-
14
- # requiring libraries and defining constants is thread safe in JRuby so
15
- # PathCache::CACHE will be corretly initialized, once, when accessors.rb
16
- # will be first required
17
- CACHE = ThreadSafe::Cache.new
18
-
19
- def get(field_reference)
20
- # the "get_or_default(x, nil) || put(x, parse(x))" is ~2x faster than "get || put" because the get call is
21
- # proxied through the JRuby JavaProxy op_aref method. the correct idiom here would be to use
22
- # "compute_if_absent(x){parse(x)}" but because of the closure creation, it is ~1.5x slower than
23
- # "get_or_default || put".
24
- # this "get_or_default || put" is obviously non-atomic which is not really important here
25
- # since all threads will set the same value and this cache will stabilize very quickly after the first
26
- # few events.
27
- CACHE.get_or_default(field_reference, nil) || CACHE.put(field_reference, parse(field_reference))
28
- end
29
-
30
- def parse(field_reference)
31
- path = field_reference.split(/[\[\]]/).select{|s| !s.empty?}
32
- [path.pop, path]
33
- end
34
- end
35
-
36
- # Accessors uses a lookup table to speedup access of a field reference of the form
37
- # "[hello][world]" to the underlying store hash into {"hello" => {"world" => "foo"}}
38
- class Accessors
39
-
40
- # @param store [Hash] the backing data store field refereces point to
41
- def initialize(store)
42
- @store = store
43
-
44
- # @lut is a lookup table between a field reference and a [target, key] tuple
45
- # where target is the containing Hash or Array for key in @store.
46
- # this allows us to directly access the containing object for key instead of
47
- # walking the field reference path into the inner @store objects
48
- @lut = {}
49
- end
50
-
51
- # @param field_reference [String] the field reference
52
- # @return [Object] the value in @store for this field reference
53
- def get(field_reference)
54
- target, key = lookup(field_reference)
55
- return nil unless target
56
- target.is_a?(Array) ? target[key.to_i] : target[key]
57
- end
58
-
59
- # @param field_reference [String] the field reference
60
- # @param value [Object] the value to set in @store for this field reference
61
- # @return [Object] the value set
62
- def set(field_reference, value)
63
- target, key = lookup_or_create(field_reference)
64
- target[target.is_a?(Array) ? key.to_i : key] = value
65
- end
66
-
67
- # @param field_reference [String] the field reference to remove
68
- # @return [Object] the removed value in @store for this field reference
69
- def del(field_reference)
70
- target, key = lookup(field_reference)
71
- return nil unless target
72
- target.is_a?(Array) ? target.delete_at(key.to_i) : target.delete(key)
73
- end
74
-
75
- # @param field_reference [String] the field reference to test for inclusion in the store
76
- # @return [Boolean] true if the store contains a value for this field reference
77
- def include?(field_reference)
78
- target, key = lookup(field_reference)
79
- return false unless target
80
-
81
- target.is_a?(Array) ? !target[key.to_i].nil? : target.include?(key)
82
- end
83
-
84
- private
85
-
86
- # retrieve the [target, key] tuple associated with this field reference
87
- # @param field_reference [String] the field referece
88
- # @return [[Object, String]] the [target, key] tuple associated with this field reference
89
- def lookup(field_reference)
90
- @lut[field_reference] ||= find_target(field_reference)
91
- end
92
-
93
- # retrieve the [target, key] tuple associated with this field reference and create inner
94
- # container objects if they do not exists
95
- # @param field_reference [String] the field referece
96
- # @return [[Object, String]] the [target, key] tuple associated with this field reference
97
- def lookup_or_create(field_reference)
98
- @lut[field_reference] ||= find_or_create_target(field_reference)
99
- end
100
-
101
- # find the target container object in store for this field reference
102
- # @param field_reference [String] the field referece
103
- # @return [Object] the target container object in store associated with this field reference
104
- def find_target(field_reference)
105
- key, path = PathCache.get(field_reference)
106
- target = path.inject(@store) do |r, k|
107
- return nil unless r
108
- r[r.is_a?(Array) ? k.to_i : k]
109
- end
110
- target ? [target, key] : nil
111
- end
112
-
113
- # find the target container object in store for this field reference and create inner
114
- # container objects if they do not exists
115
- # @param field_reference [String] the field referece
116
- # @return [Object] the target container object in store associated with this field reference
117
- def find_or_create_target(accessor)
118
- key, path = PathCache.get(accessor)
119
- target = path.inject(@store) {|r, k| r[r.is_a?(Array) ? k.to_i : k] ||= {}}
120
- [target, key]
121
- end
122
- end # class Accessors
123
- end # module LogStash::Util
@@ -1,518 +0,0 @@
1
- # encoding: utf-8
2
- require "spec_helper"
3
-
4
- describe LogStash::Event do
5
-
6
- shared_examples "all event tests" do
7
- context "[]=" do
8
- it "should raise an exception if you attempt to set @timestamp to a value type other than a Time object" do
9
- expect{subject["@timestamp"] = "crash!"}.to raise_error(TypeError)
10
- end
11
-
12
- it "should assign simple fields" do
13
- expect(subject["foo"]).to be_nil
14
- expect(subject["foo"] = "bar").to eq("bar")
15
- expect(subject["foo"]).to eq("bar")
16
- end
17
-
18
- it "should overwrite simple fields" do
19
- expect(subject["foo"]).to be_nil
20
- expect(subject["foo"] = "bar").to eq("bar")
21
- expect(subject["foo"]).to eq("bar")
22
-
23
- expect(subject["foo"] = "baz").to eq("baz")
24
- expect(subject["foo"]).to eq("baz")
25
- end
26
-
27
- it "should assign deep fields" do
28
- expect(subject["[foo][bar]"]).to be_nil
29
- expect(subject["[foo][bar]"] = "baz").to eq("baz")
30
- expect(subject["[foo][bar]"]).to eq("baz")
31
- end
32
-
33
- it "should overwrite deep fields" do
34
- expect(subject["[foo][bar]"]).to be_nil
35
- expect(subject["[foo][bar]"] = "baz").to eq("baz")
36
- expect(subject["[foo][bar]"]).to eq("baz")
37
-
38
- expect(subject["[foo][bar]"] = "zab").to eq("zab")
39
- expect(subject["[foo][bar]"]).to eq("zab")
40
- end
41
-
42
- it "allow to set the @metadata key to a hash" do
43
- subject["@metadata"] = { "action" => "index" }
44
- expect(subject["[@metadata][action]"]).to eq("index")
45
- end
46
- end
47
-
48
- context "#sprintf" do
49
- it "should report a unix timestamp for %{+%s}" do
50
- expect(subject.sprintf("%{+%s}")).to eq("1356998400")
51
- end
52
-
53
- it "should work if there is no fieldref in the string" do
54
- expect(subject.sprintf("bonjour")).to eq("bonjour")
55
- end
56
-
57
- it "should raise error when formatting %{+%s} when @timestamp field is missing" do
58
- str = "hello-%{+%s}"
59
- subj = subject.clone
60
- subj.remove("[@timestamp]")
61
- expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
62
- end
63
-
64
- it "should report a time with %{+format} syntax", :if => RUBY_ENGINE == "jruby" do
65
- expect(subject.sprintf("%{+YYYY}")).to eq("2013")
66
- expect(subject.sprintf("%{+MM}")).to eq("01")
67
- expect(subject.sprintf("%{+HH}")).to eq("00")
68
- end
69
-
70
- it "should support mixed string" do
71
- expect(subject.sprintf("foo %{+YYYY-MM-dd} %{type}")).to eq("foo 2013-01-01 sprintf")
72
- end
73
-
74
- it "should raise error with %{+format} syntax when @timestamp field is missing", :if => RUBY_ENGINE == "jruby" do
75
- str = "logstash-%{+YYYY}"
76
- subj = subject.clone
77
- subj.remove("[@timestamp]")
78
- expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
79
- end
80
-
81
- it "should report fields with %{field} syntax" do
82
- expect(subject.sprintf("%{type}")).to eq("sprintf")
83
- expect(subject.sprintf("%{message}")).to eq(subject["message"])
84
- end
85
-
86
- it "should print deep fields" do
87
- expect(subject.sprintf("%{[j][k1]}")).to eq("v")
88
- expect(subject.sprintf("%{[j][k2][0]}")).to eq("w")
89
- end
90
-
91
- it "should be able to take a non-string for the format" do
92
- expect(subject.sprintf(2)).to eq("2")
93
- end
94
-
95
- it "should allow to use the metadata when calling #sprintf" do
96
- expect(subject.sprintf("super-%{[@metadata][fancy]}")).to eq("super-pants")
97
- end
98
-
99
- it "should allow to use nested hash from the metadata field" do
100
- expect(subject.sprintf("%{[@metadata][have-to-go][deeper]}")).to eq("inception")
101
- end
102
-
103
- it "should return a json string if the key is a hash" do
104
- expect(subject.sprintf("%{[j][k3]}")).to eq("{\"4\":\"m\"}")
105
- end
106
-
107
- it "should not strip last character" do
108
- expect(subject.sprintf("%{type}%{message}|")).to eq("sprintfhello world|")
109
- end
110
-
111
- context "#encoding" do
112
- it "should return known patterns as UTF-8" do
113
- expect(subject.sprintf("%{message}").encoding).to eq(Encoding::UTF_8)
114
- end
115
-
116
- it "should return unknown patterns as UTF-8" do
117
- expect(subject.sprintf("%{unkown_pattern}").encoding).to eq(Encoding::UTF_8)
118
- end
119
- end
120
- end
121
-
122
- context "#[]" do
123
- it "should fetch data" do
124
- expect(subject["type"]).to eq("sprintf")
125
- end
126
- it "should fetch fields" do
127
- expect(subject["a"]).to eq("b")
128
- expect(subject['c']['d']).to eq("f")
129
- end
130
- it "should fetch deep fields" do
131
- expect(subject["[j][k1]"]).to eq("v")
132
- expect(subject["[c][d]"]).to eq("f")
133
- expect(subject['[f][g][h]']).to eq("i")
134
- expect(subject['[j][k3][4]']).to eq("m")
135
- expect(subject['[j][5]']).to eq(7)
136
-
137
- end
138
-
139
- it "should be fast?", :performance => true do
140
- count = 1000000
141
- 2.times do
142
- start = Time.now
143
- count.times { subject["[j][k1]"] }
144
- duration = Time.now - start
145
- puts "event #[] rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
146
- end
147
- end
148
- end
149
-
150
- context "#include?" do
151
- it "should include existing fields" do
152
- expect(subject.include?("c")).to eq(true)
153
- expect(subject.include?("[c][d]")).to eq(true)
154
- expect(subject.include?("[j][k4][0][nested]")).to eq(true)
155
- end
156
-
157
- it "should include field with nil value" do
158
- expect(subject.include?("nilfield")).to eq(true)
159
- end
160
-
161
- it "should include @metadata field" do
162
- expect(subject.include?("@metadata")).to eq(true)
163
- end
164
-
165
- it "should include field within @metadata" do
166
- expect(subject.include?("[@metadata][fancy]")).to eq(true)
167
- end
168
-
169
- it "should not include non-existing fields" do
170
- expect(subject.include?("doesnotexist")).to eq(false)
171
- expect(subject.include?("[j][doesnotexist]")).to eq(false)
172
- expect(subject.include?("[tag][0][hello][yes]")).to eq(false)
173
- end
174
-
175
- it "should include within arrays" do
176
- expect(subject.include?("[tags][0]")).to eq(true)
177
- expect(subject.include?("[tags][1]")).to eq(false)
178
- end
179
- end
180
-
181
- context "#overwrite" do
182
- it "should swap data with new content" do
183
- new_event = LogStash::Event.new(
184
- "type" => "new",
185
- "message" => "foo bar",
186
- )
187
- subject.overwrite(new_event)
188
-
189
- expect(subject["message"]).to eq("foo bar")
190
- expect(subject["type"]).to eq("new")
191
-
192
- ["tags", "source", "a", "c", "f", "j"].each do |field|
193
- expect(subject[field]).to be_nil
194
- end
195
- end
196
- end
197
-
198
- context "#append" do
199
- it "should append strings to an array" do
200
- subject.append(LogStash::Event.new("message" => "another thing"))
201
- expect(subject["message"]).to eq([ "hello world", "another thing" ])
202
- end
203
-
204
- it "should concatenate tags" do
205
- subject.append(LogStash::Event.new("tags" => [ "tag2" ]))
206
- # added to_a for when array is a Java Collection when produced from json input
207
- # TODO: we have to find a better way to handle this in tests. maybe override
208
- # rspec eq or == to do an explicit to_a when comparing arrays?
209
- expect(subject["tags"].to_a).to eq([ "tag1", "tag2" ])
210
- end
211
-
212
- context "when event field is nil" do
213
- it "should add single value as string" do
214
- subject.append(LogStash::Event.new({"field1" => "append1"}))
215
- expect(subject[ "field1" ]).to eq("append1")
216
- end
217
- it "should add multi values as array" do
218
- subject.append(LogStash::Event.new({"field1" => [ "append1","append2" ]}))
219
- expect(subject[ "field1" ]).to eq([ "append1","append2" ])
220
- end
221
- end
222
-
223
- context "when event field is a string" do
224
- before { subject[ "field1" ] = "original1" }
225
-
226
- it "should append string to values, if different from current" do
227
- subject.append(LogStash::Event.new({"field1" => "append1"}))
228
- expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
229
- end
230
- it "should not change value, if appended value is equal current" do
231
- subject.append(LogStash::Event.new({"field1" => "original1"}))
232
- expect(subject[ "field1" ]).to eq("original1")
233
- end
234
- it "should concatenate values in an array" do
235
- subject.append(LogStash::Event.new({"field1" => [ "append1" ]}))
236
- expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
237
- end
238
- it "should join array, removing duplicates" do
239
- subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
240
- expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
241
- end
242
- end
243
- context "when event field is an array" do
244
- before { subject[ "field1" ] = [ "original1", "original2" ] }
245
-
246
- it "should append string values to array, if not present in array" do
247
- subject.append(LogStash::Event.new({"field1" => "append1"}))
248
- expect(subject[ "field1" ]).to eq([ "original1", "original2", "append1" ])
249
- end
250
- it "should not append string values, if the array already contains it" do
251
- subject.append(LogStash::Event.new({"field1" => "original1"}))
252
- expect(subject[ "field1" ]).to eq([ "original1", "original2" ])
253
- end
254
- it "should join array, removing duplicates" do
255
- subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
256
- expect(subject[ "field1" ]).to eq([ "original1", "original2", "append1" ])
257
- end
258
- end
259
- end
260
-
261
- it "timestamp parsing speed", :performance => true do
262
- warmup = 10000
263
- count = 1000000
264
-
265
- data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
266
- event = LogStash::Event.new(data)
267
- expect(event["@timestamp"]).to be_a(LogStash::Timestamp)
268
-
269
- duration = 0
270
- [warmup, count].each do |i|
271
- start = Time.now
272
- i.times do
273
- data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
274
- LogStash::Event.new(data.clone)
275
- end
276
- duration = Time.now - start
277
- end
278
- puts "event @timestamp parse rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
279
- end
280
-
281
- context "acceptable @timestamp formats" do
282
- subject { LogStash::Event.new }
283
-
284
- formats = [
285
- "YYYY-MM-dd'T'HH:mm:ss.SSSZ",
286
- "YYYY-MM-dd'T'HH:mm:ss.SSSSSSZ",
287
- "YYYY-MM-dd'T'HH:mm:ss.SSS",
288
- "YYYY-MM-dd'T'HH:mm:ss",
289
- "YYYY-MM-dd'T'HH:mm:ssZ",
290
- ]
291
- formats.each do |format|
292
- it "includes #{format}" do
293
- time = subject.sprintf("%{+#{format}}")
294
- begin
295
- LogStash::Event.new("@timestamp" => time)
296
- rescue => e
297
- raise StandardError, "Time '#{time}' was rejected. #{e.class}: #{e.to_s}"
298
- end
299
- end
300
- end
301
-
302
- context "from LOGSTASH-1738" do
303
- it "does not error" do
304
- LogStash::Event.new("@timestamp" => "2013-12-29T23:12:52.371240+02:00")
305
- end
306
- end
307
-
308
- context "from LOGSTASH-1732" do
309
- it "does not error" do
310
- LogStash::Event.new("@timestamp" => "2013-12-27T11:07:25+00:00")
311
- end
312
- end
313
- end
314
-
315
- context "timestamp initialization" do
316
- let(:logger) { double("logger") }
317
-
318
- it "should coerce timestamp" do
319
- t = Time.iso8601("2014-06-12T00:12:17.114Z")
320
- expect(LogStash::Timestamp).to receive(:coerce).exactly(3).times.and_call_original
321
- expect(LogStash::Event.new("@timestamp" => t).timestamp.to_i).to eq(t.to_i)
322
- expect(LogStash::Event.new("@timestamp" => LogStash::Timestamp.new(t)).timestamp.to_i).to eq(t.to_i)
323
- expect(LogStash::Event.new("@timestamp" => "2014-06-12T00:12:17.114Z").timestamp.to_i).to eq(t.to_i)
324
- end
325
-
326
- it "should assign current time when no timestamp" do
327
- ts = LogStash::Timestamp.now
328
- expect(LogStash::Timestamp).to receive(:now).and_return(ts)
329
- expect(LogStash::Event.new({}).timestamp.to_i).to eq(ts.to_i)
330
- end
331
-
332
- it "should tag and warn for invalid value" do
333
- ts = LogStash::Timestamp.now
334
- expect(LogStash::Timestamp).to receive(:now).twice.and_return(ts)
335
- expect(LogStash::Event::LOGGER).to receive(:warn).twice
336
-
337
- event = LogStash::Event.new("@timestamp" => :foo)
338
- expect(event.timestamp.to_i).to eq(ts.to_i)
339
- expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
340
- expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq(:foo)
341
-
342
- event = LogStash::Event.new("@timestamp" => 666)
343
- expect(event.timestamp.to_i).to eq(ts.to_i)
344
- expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
345
- expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq(666)
346
- end
347
-
348
- it "should tag and warn for invalid string format" do
349
- ts = LogStash::Timestamp.now
350
- expect(LogStash::Timestamp).to receive(:now).and_return(ts)
351
- expect(LogStash::Event::LOGGER).to receive(:warn)
352
-
353
- event = LogStash::Event.new("@timestamp" => "foo")
354
- expect(event.timestamp.to_i).to eq(ts.to_i)
355
- expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
356
- expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq("foo")
357
- end
358
- end
359
-
360
- context "to_json" do
361
- it "should support to_json" do
362
- new_event = LogStash::Event.new(
363
- "@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
364
- "message" => "foo bar",
365
- )
366
- json = new_event.to_json
367
-
368
- expect(json).to eq( "{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}")
369
- end
370
-
371
- it "should support to_json and ignore arguments" do
372
- new_event = LogStash::Event.new(
373
- "@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
374
- "message" => "foo bar",
375
- )
376
- json = new_event.to_json(:foo => 1, :bar => "baz")
377
-
378
- expect(json).to eq( "{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}")
379
- end
380
- end
381
-
382
- context "metadata" do
383
- context "with existing metadata" do
384
- subject { LogStash::Event.new("hello" => "world", "@metadata" => { "fancy" => "pants" }) }
385
-
386
- it "should not include metadata in to_hash" do
387
- expect(subject.to_hash.keys).not_to include("@metadata")
388
-
389
- # 'hello', '@timestamp', and '@version'
390
- expect(subject.to_hash.keys.count).to eq(3)
391
- end
392
-
393
- it "should still allow normal field access" do
394
- expect(subject["hello"]).to eq("world")
395
- end
396
- end
397
-
398
- context "with set metadata" do
399
- let(:fieldref) { "[@metadata][foo][bar]" }
400
- let(:value) { "bar" }
401
- subject { LogStash::Event.new("normal" => "normal") }
402
- before do
403
- # Verify the test is configured correctly.
404
- expect(fieldref).to start_with("[@metadata]")
405
-
406
- # Set it.
407
- subject[fieldref] = value
408
- end
409
-
410
- it "should still allow normal field access" do
411
- expect(subject["normal"]).to eq("normal")
412
- end
413
-
414
- it "should allow getting" do
415
- expect(subject[fieldref]).to eq(value)
416
- end
417
-
418
- it "should be hidden from .to_json" do
419
- require "json"
420
- obj = JSON.parse(subject.to_json)
421
- expect(obj).not_to include("@metadata")
422
- end
423
-
424
- it "should be hidden from .to_hash" do
425
- expect(subject.to_hash).not_to include("@metadata")
426
- end
427
-
428
- it "should be accessible through #to_hash_with_metadata" do
429
- obj = subject.to_hash_with_metadata
430
- expect(obj).to include("@metadata")
431
- expect(obj["@metadata"]["foo"]["bar"]).to eq(value)
432
- end
433
- end
434
-
435
- context "with no metadata" do
436
- subject { LogStash::Event.new("foo" => "bar") }
437
- it "should have no metadata" do
438
- expect(subject["@metadata"]).to be_empty
439
- end
440
- it "should still allow normal field access" do
441
- expect(subject["foo"]).to eq("bar")
442
- end
443
-
444
- it "should not include the @metadata key" do
445
- expect(subject.to_hash_with_metadata).not_to include("@metadata")
446
- end
447
- end
448
- end
449
-
450
- context "signal events" do
451
- it "should define the shutdown event" do
452
- # the SHUTDOWN and FLUSH constants are part of the plugin API contract
453
- # if they are changed, all plugins must be updated
454
- expect(LogStash::SHUTDOWN).to be_a(LogStash::ShutdownEvent)
455
- expect(LogStash::FLUSH).to be_a(LogStash::FlushEvent)
456
- end
457
- end
458
- end
459
-
460
- let(:event_hash) do
461
- {
462
- "@timestamp" => "2013-01-01T00:00:00.000Z",
463
- "type" => "sprintf",
464
- "message" => "hello world",
465
- "tags" => [ "tag1" ],
466
- "source" => "/home/foo",
467
- "a" => "b",
468
- "c" => {
469
- "d" => "f",
470
- "e" => {"f" => "g"}
471
- },
472
- "f" => { "g" => { "h" => "i" } },
473
- "j" => {
474
- "k1" => "v",
475
- "k2" => [ "w", "x" ],
476
- "k3" => {"4" => "m"},
477
- "k4" => [ {"nested" => "cool"} ],
478
- 5 => 6,
479
- "5" => 7
480
- },
481
- "nilfield" => nil,
482
- "@metadata" => { "fancy" => "pants", "have-to-go" => { "deeper" => "inception" } }
483
- }
484
- end
485
-
486
- describe "using normal hash input" do
487
- it_behaves_like "all event tests" do
488
- subject{LogStash::Event.new(event_hash)}
489
- end
490
- end
491
-
492
- describe "using hash input from deserialized json" do
493
- # this is to test the case when JrJackson deserialises Json and produces
494
- # native Java Collections objects for efficiency
495
- it_behaves_like "all event tests" do
496
- subject{LogStash::Event.new(LogStash::Json.load(LogStash::Json.dump(event_hash)))}
497
- end
498
- end
499
-
500
-
501
- describe "#to_s" do
502
- let(:timestamp) { LogStash::Timestamp.new }
503
- let(:event1) { LogStash::Event.new({ "@timestamp" => timestamp, "host" => "foo", "message" => "bar"}) }
504
- let(:event2) { LogStash::Event.new({ "host" => "bar", "message" => "foo"}) }
505
-
506
- it "should cache only one template" do
507
- LogStash::StringInterpolation::CACHE.clear
508
- expect {
509
- event1.to_s
510
- event2.to_s
511
- }.to change { LogStash::StringInterpolation::CACHE.size }.by(1)
512
- end
513
-
514
- it "return the string containing the timestamp, the host and the message" do
515
- expect(event1.to_s).to eq("#{timestamp.to_iso8601} #{event1["host"]} #{event1["message"]}")
516
- end
517
- end
518
- end