logstash-core 5.3.3-java → 5.4.0-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (85) hide show
  1. checksums.yaml +4 -4
  2. data/gemspec_jars.rb +2 -0
  3. data/lib/logstash-core/logstash-core.jar +0 -0
  4. data/lib/logstash-core/version.rb +1 -1
  5. data/lib/logstash-core_jars.rb +4 -0
  6. data/lib/logstash/agent.rb +15 -6
  7. data/lib/logstash/api/modules/base.rb +1 -1
  8. data/lib/logstash/api/rack_app.rb +1 -1
  9. data/lib/logstash/config/config_ast.rb +13 -13
  10. data/lib/logstash/config/mixin.rb +33 -28
  11. data/lib/logstash/environment.rb +11 -0
  12. data/lib/logstash/event.rb +56 -0
  13. data/lib/logstash/event_dispatcher.rb +2 -2
  14. data/lib/logstash/execution_context.rb +10 -0
  15. data/lib/logstash/filter_delegator.rb +3 -2
  16. data/lib/logstash/inputs/base.rb +15 -1
  17. data/lib/logstash/instrument/collector.rb +1 -1
  18. data/lib/logstash/instrument/metric.rb +4 -2
  19. data/lib/logstash/instrument/metric_store.rb +9 -5
  20. data/lib/logstash/instrument/null_metric.rb +1 -0
  21. data/lib/logstash/instrument/periodic_poller/cgroup.rb +3 -3
  22. data/lib/logstash/instrument/periodic_poller/jvm.rb +11 -8
  23. data/lib/logstash/instrument/periodic_poller/load_average.rb +4 -2
  24. data/lib/logstash/instrument/wrapped_write_client.rb +59 -0
  25. data/lib/logstash/java_integration.rb +2 -2
  26. data/lib/logstash/output_delegator.rb +2 -2
  27. data/lib/logstash/output_delegator_strategies/legacy.rb +5 -2
  28. data/lib/logstash/output_delegator_strategies/shared.rb +2 -1
  29. data/lib/logstash/output_delegator_strategies/single.rb +2 -1
  30. data/lib/logstash/outputs/base.rb +8 -0
  31. data/lib/logstash/patches/cabin.rb +1 -1
  32. data/lib/logstash/patches/stronger_openssl_defaults.rb +1 -1
  33. data/lib/logstash/pipeline.rb +47 -19
  34. data/lib/logstash/plugin.rb +3 -1
  35. data/lib/logstash/plugins/hooks_registry.rb +6 -6
  36. data/lib/logstash/plugins/registry.rb +2 -2
  37. data/lib/logstash/queue_factory.rb +7 -5
  38. data/lib/logstash/runner.rb +15 -1
  39. data/lib/logstash/settings.rb +14 -2
  40. data/lib/logstash/string_interpolation.rb +18 -0
  41. data/lib/logstash/timestamp.rb +27 -0
  42. data/lib/logstash/util.rb +1 -1
  43. data/lib/logstash/util/prctl.rb +1 -1
  44. data/lib/logstash/util/retryable.rb +1 -1
  45. data/lib/logstash/util/wrapped_acked_queue.rb +53 -22
  46. data/lib/logstash/util/wrapped_synchronous_queue.rb +51 -33
  47. data/lib/logstash/version.rb +1 -1
  48. data/locales/en.yml +4 -2
  49. data/logstash-core.gemspec +0 -3
  50. data/spec/api/lib/api/node_stats_spec.rb +2 -1
  51. data/spec/api/spec_helper.rb +1 -1
  52. data/spec/logstash/acked_queue_concurrent_stress_spec.rb +291 -0
  53. data/spec/logstash/agent_spec.rb +24 -0
  54. data/spec/logstash/config/mixin_spec.rb +11 -2
  55. data/spec/logstash/event_dispatcher_spec.rb +8 -1
  56. data/spec/logstash/event_spec.rb +346 -0
  57. data/spec/logstash/execution_context_spec.rb +13 -0
  58. data/spec/logstash/filter_delegator_spec.rb +4 -2
  59. data/spec/logstash/inputs/base_spec.rb +41 -0
  60. data/spec/logstash/instrument/metric_spec.rb +2 -1
  61. data/spec/logstash/instrument/metric_store_spec.rb +14 -0
  62. data/spec/logstash/instrument/namespaced_metric_spec.rb +2 -1
  63. data/spec/logstash/instrument/periodic_poller/cgroup_spec.rb +1 -1
  64. data/spec/logstash/instrument/periodic_poller/jvm_spec.rb +35 -0
  65. data/spec/logstash/instrument/periodic_poller/load_average_spec.rb +1 -5
  66. data/spec/logstash/instrument/wrapped_write_client_spec.rb +113 -0
  67. data/spec/logstash/json_spec.rb +1 -1
  68. data/spec/logstash/legacy_ruby_event_spec.rb +636 -0
  69. data/spec/logstash/legacy_ruby_timestamp_spec.rb +170 -0
  70. data/spec/logstash/output_delegator_spec.rb +6 -3
  71. data/spec/logstash/outputs/base_spec.rb +23 -0
  72. data/spec/logstash/pipeline_pq_file_spec.rb +18 -8
  73. data/spec/logstash/pipeline_spec.rb +41 -5
  74. data/spec/logstash/plugin_spec.rb +15 -3
  75. data/spec/logstash/plugins/hooks_registry_spec.rb +2 -2
  76. data/spec/logstash/runner_spec.rb +33 -2
  77. data/spec/logstash/settings/port_range_spec.rb +1 -1
  78. data/spec/logstash/settings_spec.rb +21 -0
  79. data/spec/logstash/timestamp_spec.rb +29 -0
  80. data/spec/logstash/util/accessors_spec.rb +179 -0
  81. data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +4 -11
  82. data/spec/logstash/util_spec.rb +1 -1
  83. data/spec/logstash/webserver_spec.rb +1 -1
  84. data/spec/support/mocks_classes.rb +65 -53
  85. metadata +25 -30
@@ -11,4 +11,4 @@
11
11
  # eventually this file should be in the root logstash lib fir and dependencies in logstash-core should be
12
12
  # fixed.
13
13
 
14
- LOGSTASH_VERSION = "5.3.3"
14
+ LOGSTASH_VERSION = "5.4.0"
@@ -71,9 +71,9 @@ en:
71
71
  Logstash is not able to start since configuration auto reloading was enabled but the configuration contains plugins that don't support it. Quitting...
72
72
  web_api:
73
73
  cant_bind_to_port: |-
74
- Logstash tried to bind to port %{port}, but the port is already in use. You can specify a new port by launching logtash with the --http.port option."
74
+ Logstash tried to bind to port %{port}, but the port is already in use. You can specify a new port by launching logstash with the --http.port option."
75
75
  cant_bind_to_port_in_range: |-
76
- Logstash tried to bind to port range %{http_ports}, but all the ports are already in use. You can specify a new port by launching logtash with the --http.port option."
76
+ Logstash tried to bind to port range %{http_ports}, but all the ports are already in use. You can specify a new port by launching logstash with the --http.port option."
77
77
  hot_threads:
78
78
  title: |-
79
79
  ::: {%{hostname}}
@@ -98,6 +98,8 @@ en:
98
98
  the '-f yourlogstash.conf' flag?
99
99
  reload-without-config-path: >-
100
100
  Configuration reloading also requires passing a configuration path with '-f yourlogstash.conf'
101
+ locked-data-path: >-
102
+ Logstash could not be started because there is already another instance using the configured data directory. If you wish to run multiple instances, you must change the "path.data" setting.
101
103
  invalid-shell: >-
102
104
  Invalid option for interactive Ruby shell. Use either "irb" or "pry"
103
105
  configtest-flag-information: |-
@@ -19,9 +19,6 @@ Gem::Specification.new do |gem|
19
19
 
20
20
  gem.platform = "java"
21
21
 
22
- gem.add_runtime_dependency "logstash-core-event-java", LOGSTASH_CORE_VERSION
23
- gem.add_runtime_dependency "logstash-core-queue-jruby", LOGSTASH_CORE_VERSION
24
-
25
22
  gem.add_runtime_dependency "pry", "~> 0.10.1" #(Ruby license)
26
23
  gem.add_runtime_dependency "stud", "~> 0.0.19" #(Apache 2.0 license)
27
24
  gem.add_runtime_dependency "clamp", "~> 0.6.5" #(MIT license) for command line args/flags
@@ -78,7 +78,8 @@ describe LogStash::Api::Modules::NodeStats do
78
78
  "duration_in_millis" => Numeric,
79
79
  "in" => Numeric,
80
80
  "filtered" => Numeric,
81
- "out" => Numeric
81
+ "out" => Numeric,
82
+ "queue_push_duration_in_millis" => Numeric
82
83
  }
83
84
  },
84
85
  "reloads" => {
@@ -31,7 +31,7 @@ end
31
31
  ##
32
32
  # Class used to wrap and manage the execution of an agent for test,
33
33
  # this helps a lot in order to have a more integrated test for the
34
- # web api, could be also used for other use cases if generalized enought
34
+ # web api, could be also used for other use cases if generalized enough
35
35
  ##
36
36
  class LogStashRunner
37
37
 
@@ -0,0 +1,291 @@
1
+ # encoding: utf-8
2
+ require "logstash/util/wrapped_acked_queue"
3
+ require "logstash/event"
4
+ require "logstash/instrument/namespaced_metric"
5
+
6
+ describe LogStash::Util::WrappedAckedQueue, :stress_test => true do
7
+ let(:path) { Stud::Temporary.directory }
8
+
9
+ context "with multiple writers" do
10
+ let(:items) { expected_count / writers }
11
+ let(:page_capacity) { 1 << page_capacity_multiplier }
12
+ let(:queue_capacity) { page_capacity * queue_capacity_multiplier }
13
+
14
+ let(:output_strings) { [] }
15
+ let(:reject_memo_keys) { [:reject_memo_keys, :path, :queue, :writer_threads, :collector, :metric, :reader_threads, :output_strings] }
16
+
17
+ let(:queue) do
18
+ described_class.create_file_based(path, page_capacity, 0, queue_checkpoint_acks, queue_checkpoint_writes, queue_checkpoint_interval, queue_capacity)
19
+ end
20
+
21
+ let(:writer_threads) do
22
+ writer = queue.write_client
23
+ writers.times.map do |i|
24
+ Thread.new(i, items, writer) do |_i, _items, _writer|
25
+ publisher(_items, _writer)
26
+ end
27
+ end
28
+ end
29
+
30
+ let(:writers_finished) { Concurrent::AtomicBoolean.new(false) }
31
+
32
+ let(:reader_threads) do
33
+ reader = queue.read_client
34
+ reader.set_batch_dimensions(batch_size, batch_wait)
35
+ reader.set_events_metric(metric.namespace([:stats, :events]))
36
+ reader.set_pipeline_metric(metric.namespace([:stats, :pipelines, :main, :events]))
37
+
38
+ readers.times.map do |i|
39
+ Thread.new(i, reader, counts) do |_i, _reader, _counts|
40
+ begin
41
+ tally = 0
42
+ while true
43
+ batch = _reader.read_batch
44
+ break if batch.size.zero? && writers_finished.value == true && queue.queue.is_fully_acked?
45
+ sleep(rand * 0.01) if simulate_work
46
+ tally += batch.size
47
+ batch.close
48
+ end
49
+ _counts[_i] = tally
50
+ # puts("reader #{_i}, tally=#{tally}, _counts=#{_counts.inspect}")
51
+ rescue => e
52
+ p :reader_error => e
53
+ end
54
+ end
55
+ end
56
+ end
57
+
58
+ def publisher(items, writer)
59
+ items.times.each do |i|
60
+ event = LogStash::Event.new("sequence" => "#{i}".ljust(string_size))
61
+ writer.push(event)
62
+ end
63
+ rescue => e
64
+ p :publisher_error => e
65
+ end
66
+
67
+ let(:collector) { LogStash::Instrument::Collector.new }
68
+ let(:metric) { LogStash::Instrument::Metric.new(collector) }
69
+
70
+ shared_examples "a well behaved queue" do
71
+ it "writes, reads, closes and reopens" do
72
+ Thread.abort_on_exception = true
73
+
74
+ # force lazy initialization to avoid concurency issues within threads
75
+ counts
76
+ queue
77
+
78
+ # Start the threads
79
+ writer_threads
80
+ reader_threads
81
+
82
+ writer_threads.each(&:join)
83
+ writers_finished.make_true
84
+
85
+ reader_threads.each(&:join)
86
+
87
+ enqueued = queue.queue.unread_count
88
+
89
+ if enqueued != 0
90
+ output_strings << "unread events in queue: #{enqueued}"
91
+ end
92
+
93
+ got = counts.reduce(&:+)
94
+
95
+ if got != expected_count
96
+ # puts("count=#{counts.inspect}")
97
+ output_strings << "events read: #{got}"
98
+ end
99
+
100
+ sleep 0.1
101
+ expect { queue.close }.not_to raise_error
102
+ sleep 0.1
103
+ files = Dir.glob(path + '/*').map{|f| f.sub("#{path}/", '')}
104
+ if files.count != 2
105
+ output_strings << "File count after close mismatch expected: 2 got: #{files.count}"
106
+ output_strings.concat files
107
+ end
108
+
109
+ begin
110
+ queue.queue.open
111
+ rescue Exception => e
112
+ output_strings << e.message
113
+ end
114
+
115
+ queue.queue.close
116
+
117
+ if output_strings.any?
118
+ output_strings << __memoized.reject{|k,v| reject_memo_keys.include?(k)}.inspect
119
+ end
120
+
121
+ expect(output_strings).to eq([])
122
+ end
123
+ end
124
+
125
+ let(:writers) { 3 }
126
+ let(:readers) { 3 }
127
+ let(:simulate_work) { true }
128
+ let(:counts) { Concurrent::Array.new([0, 0, 0, 0, 0, 0, 0, 0]) }
129
+ let(:page_capacity_multiplier) { 20 }
130
+ let(:queue_capacity_multiplier) { 128 }
131
+ let(:queue_checkpoint_acks) { 1024 }
132
+ let(:queue_checkpoint_writes) { 1024 }
133
+ let(:queue_checkpoint_interval) { 1000 }
134
+ let(:batch_size) { 500 }
135
+ let(:batch_wait) { 1000 }
136
+ let(:expected_count) { 60000 }
137
+ let(:string_size) { 256 }
138
+
139
+ describe "with simulate_work ON" do
140
+ let(:simulate_work) { true }
141
+
142
+ context "> more writers than readers <" do
143
+ let(:writers) { 4 }
144
+ let(:readers) { 2 }
145
+ it_behaves_like "a well behaved queue"
146
+ end
147
+
148
+ context "> less writers than readers <" do
149
+ let(:writers) { 2 }
150
+ let(:readers) { 4 }
151
+ it_behaves_like "a well behaved queue"
152
+ end
153
+
154
+ context "> larger checkpoint acks <" do
155
+ let(:queue_checkpoint_acks) { 3000 }
156
+ it_behaves_like "a well behaved queue"
157
+ end
158
+
159
+ context "> smaller checkpoint acks <" do
160
+ let(:queue_checkpoint_acks) { 500 }
161
+ it_behaves_like "a well behaved queue"
162
+ end
163
+
164
+ context "> larger checkpoint writes <" do
165
+ let(:queue_checkpoint_writes) { 3000 }
166
+ it_behaves_like "a well behaved queue"
167
+ end
168
+
169
+ context "> smaller checkpoint writes <" do
170
+ let(:queue_checkpoint_writes) { 500 }
171
+ it_behaves_like "a well behaved queue"
172
+ end
173
+
174
+ context "> larger checkpoint interval <" do
175
+ let(:queue_checkpoint_interval) { 3000 }
176
+ it_behaves_like "a well behaved queue"
177
+ end
178
+
179
+ context "> smaller checkpoint interval <" do
180
+ let(:queue_checkpoint_interval) { 500 }
181
+ it_behaves_like "a well behaved queue"
182
+ end
183
+
184
+ context "> smaller batch wait <" do
185
+ let(:batch_wait) { 125 }
186
+ it_behaves_like "a well behaved queue"
187
+ end
188
+
189
+ context "> larger batch wait <" do
190
+ let(:batch_wait) { 5000 }
191
+ it_behaves_like "a well behaved queue"
192
+ end
193
+
194
+ context "> smaller event size <" do
195
+ let(:string_size) { 8 }
196
+ it_behaves_like "a well behaved queue"
197
+ end
198
+
199
+ context "> larger event size <" do
200
+ let(:string_size) { 8192 }
201
+ it_behaves_like "a well behaved queue"
202
+ end
203
+
204
+ context "> small queue size limit <" do
205
+ let(:queue_capacity_multiplier) { 10 }
206
+ it_behaves_like "a well behaved queue"
207
+ end
208
+
209
+ context "> very large queue size limit <" do
210
+ let(:queue_capacity_multiplier) { 512 }
211
+ it_behaves_like "a well behaved queue"
212
+ end
213
+ end
214
+
215
+ describe "with simulate_work OFF" do
216
+ let(:simulate_work) { false }
217
+
218
+ context "> more writers than readers <" do
219
+ let(:writers) { 4 }
220
+ let(:readers) { 2 }
221
+ it_behaves_like "a well behaved queue"
222
+ end
223
+
224
+ context "> less writers than readers <" do
225
+ let(:writers) { 2 }
226
+ let(:readers) { 4 }
227
+ it_behaves_like "a well behaved queue"
228
+ end
229
+
230
+ context "> larger checkpoint acks <" do
231
+ let(:queue_checkpoint_acks) { 3000 }
232
+ it_behaves_like "a well behaved queue"
233
+ end
234
+
235
+ context "> smaller checkpoint acks <" do
236
+ let(:queue_checkpoint_acks) { 500 }
237
+ it_behaves_like "a well behaved queue"
238
+ end
239
+
240
+ context "> larger checkpoint writes <" do
241
+ let(:queue_checkpoint_writes) { 3000 }
242
+ it_behaves_like "a well behaved queue"
243
+ end
244
+
245
+ context "> smaller checkpoint writes <" do
246
+ let(:queue_checkpoint_writes) { 500 }
247
+ it_behaves_like "a well behaved queue"
248
+ end
249
+
250
+ context "> larger checkpoint interval <" do
251
+ let(:queue_checkpoint_interval) { 3000 }
252
+ it_behaves_like "a well behaved queue"
253
+ end
254
+
255
+ context "> smaller checkpoint interval <" do
256
+ let(:queue_checkpoint_interval) { 500 }
257
+ it_behaves_like "a well behaved queue"
258
+ end
259
+
260
+ context "> smaller batch wait <" do
261
+ let(:batch_wait) { 125 }
262
+ it_behaves_like "a well behaved queue"
263
+ end
264
+
265
+ context "> larger batch wait <" do
266
+ let(:batch_wait) { 5000 }
267
+ it_behaves_like "a well behaved queue"
268
+ end
269
+
270
+ context "> smaller event size <" do
271
+ let(:string_size) { 8 }
272
+ it_behaves_like "a well behaved queue"
273
+ end
274
+
275
+ context "> larger event size <" do
276
+ let(:string_size) { 8192 }
277
+ it_behaves_like "a well behaved queue"
278
+ end
279
+
280
+ context "> small queue size limit <" do
281
+ let(:queue_capacity_multiplier) { 10 }
282
+ it_behaves_like "a well behaved queue"
283
+ end
284
+
285
+ context "> very large queue size limit <" do
286
+ let(:queue_capacity_multiplier) { 512 }
287
+ it_behaves_like "a well behaved queue"
288
+ end
289
+ end
290
+ end
291
+ end
@@ -99,6 +99,30 @@ describe LogStash::Agent do
99
99
  subject.register_pipeline(pipeline_settings)
100
100
  end
101
101
 
102
+ context "when a system pipeline is running" do
103
+ context "when one pipeline is finite" do
104
+ let(:pipeline_args) {
105
+ {
106
+ "path.config" => "a",
107
+ "config.string" => "input { generator { count => 1000 }} output { null {} }"
108
+ }
109
+ }
110
+ let(:system_pipeline_settings) do
111
+ s = agent_settings.clone
112
+ s.set("path.config", "")
113
+ s.set("config.string", "input { generator {}} output { null {} }")
114
+ s.set("pipeline.id", ".monitoring")
115
+ s.set("pipeline.system", true)
116
+ s
117
+ end
118
+
119
+ it "stops logstash at the end of the execution of the finite pipeline" do
120
+ subject.register_pipeline(system_pipeline_settings)
121
+ expect(subject.execute).to be_nil
122
+ end
123
+ end
124
+ end
125
+
102
126
  context "if state is clean" do
103
127
  before :each do
104
128
  allow(subject).to receive(:running_pipelines?).and_return(true)
@@ -192,7 +192,7 @@ describe LogStash::Config::Mixin do
192
192
  expect(clone.uri.to_s).to eql(uri_hidden)
193
193
  end
194
194
 
195
- it "should make the real URI object availale under #uri" do
195
+ it "should make the real URI object available under #uri" do
196
196
  expect(subject.uri.uri).to be_a(::URI)
197
197
  end
198
198
 
@@ -322,6 +322,9 @@ describe LogStash::Config::Mixin do
322
322
  config :oneNumber, :validate => :number, :required => false
323
323
  config :oneArray, :validate => :array, :required => false
324
324
  config :oneHash, :validate => :hash, :required => false
325
+ config :nestedHash, :validate => :hash, :required => false
326
+ config :nestedArray, :validate => :hash, :required => false
327
+ config :deepHash, :validate => :hash, :required => false
325
328
 
326
329
  def initialize(params)
327
330
  super(params)
@@ -378,7 +381,10 @@ describe LogStash::Config::Mixin do
378
381
  "oneString" => "${FunString:foo}",
379
382
  "oneBoolean" => "${FunBool:false}",
380
383
  "oneArray" => [ "first array value", "${FunString:foo}" ],
381
- "oneHash" => { "key1" => "${FunString:foo}", "key2" => "${FunString} is ${FunBool}", "key3" => "${FunBool:false} or ${funbool:false}" }
384
+ "oneHash" => { "key1" => "${FunString:foo}", "key2" => "${FunString} is ${FunBool}", "key3" => "${FunBool:false} or ${funbool:false}" },
385
+ "nestedHash" => { "level1" => { "key1" => "http://${FunString}:8080/blah.txt" } },
386
+ "nestedArray" => { "level1" => [{ "key1" => "http://${FunString}:8080/blah.txt" }, { "key2" => "http://${FunString}:8080/foo.txt" }] },
387
+ "deepHash" => { "level1" => { "level2" => {"level3" => { "key1" => "http://${FunString}:8080/blah.txt" } } } }
382
388
  )
383
389
  end
384
390
 
@@ -387,6 +393,9 @@ describe LogStash::Config::Mixin do
387
393
  expect(subject.oneBoolean).to(be_truthy)
388
394
  expect(subject.oneArray).to(be == [ "first array value", "fancy" ])
389
395
  expect(subject.oneHash).to(be == { "key1" => "fancy", "key2" => "fancy is true", "key3" => "true or false" })
396
+ expect(subject.nestedHash).to(be == { "level1" => { "key1" => "http://fancy:8080/blah.txt" } })
397
+ expect(subject.nestedArray).to(be == { "level1" => [{ "key1" => "http://fancy:8080/blah.txt" }, { "key2" => "http://fancy:8080/foo.txt" }] })
398
+ expect(subject.deepHash).to(be == { "level1" => { "level2" => { "level3" => { "key1" => "http://fancy:8080/blah.txt" } } } })
390
399
  end
391
400
  end
392
401
 
@@ -34,6 +34,13 @@ describe LogStash::EventDispatcher do
34
34
  let(:listener) { CustomSpy }
35
35
  subject(:emitter) { DummyEmitter.new }
36
36
 
37
+ it "ignores duplicate listener" do
38
+ emitter.dispatcher.add_listener(listener)
39
+ emitter.dispatcher.add_listener(listener)
40
+ expect(listener).to receive(:method_exists).with(emitter).once
41
+ emitter.method_exists
42
+ end
43
+
37
44
  describe "Emits events" do
38
45
  before do
39
46
  emitter.dispatcher.add_listener(listener)
@@ -65,7 +72,7 @@ describe LogStash::EventDispatcher do
65
72
  emitter.method_exists
66
73
  end
67
74
 
68
- it "allows to remove a listner to an emitter" do
75
+ it "allows to remove a listener to an emitter" do
69
76
  expect(listener).to receive(:method_exists).with(emitter).once
70
77
  emitter.dispatcher.add_listener(listener)
71
78
  emitter.method_exists
@@ -0,0 +1,346 @@
1
+ # encoding: utf-8
2
+
3
+ require "spec_helper"
4
+ require "logstash/util"
5
+ require "logstash/event"
6
+ require "json"
7
+ require "java"
8
+
9
+ TIMESTAMP = "@timestamp"
10
+
11
+ describe LogStash::Event do
12
+ context "to_json" do
13
+ it "should serialize simple values" do
14
+ e = LogStash::Event.new({"foo" => "bar", "bar" => 1, "baz" => 1.0, TIMESTAMP => "2015-05-28T23:02:05.350Z"})
15
+ expect(JSON.parse(e.to_json)).to eq(JSON.parse("{\"foo\":\"bar\",\"bar\":1,\"baz\":1.0,\"@timestamp\":\"2015-05-28T23:02:05.350Z\",\"@version\":\"1\"}"))
16
+ end
17
+
18
+ it "should serialize deep hash values" do
19
+ e = LogStash::Event.new({"foo" => {"bar" => 1, "baz" => 1.0, "biz" => "boz"}, TIMESTAMP => "2015-05-28T23:02:05.350Z"})
20
+ expect(JSON.parse(e.to_json)).to eq(JSON.parse("{\"foo\":{\"bar\":1,\"baz\":1.0,\"biz\":\"boz\"},\"@timestamp\":\"2015-05-28T23:02:05.350Z\",\"@version\":\"1\"}"))
21
+ end
22
+
23
+ it "should serialize deep array values" do
24
+ e = LogStash::Event.new({"foo" => ["bar", 1, 1.0], TIMESTAMP => "2015-05-28T23:02:05.350Z"})
25
+ expect(JSON.parse(e.to_json)).to eq(JSON.parse("{\"foo\":[\"bar\",1,1.0],\"@timestamp\":\"2015-05-28T23:02:05.350Z\",\"@version\":\"1\"}"))
26
+ end
27
+
28
+ it "should serialize deep hash from field reference assignments" do
29
+ e = LogStash::Event.new({TIMESTAMP => "2015-05-28T23:02:05.350Z"})
30
+ e.set("foo", "bar")
31
+ e.set("bar", 1)
32
+ e.set("baz", 1.0)
33
+ e.set("[fancy][pants][socks]", "shoes")
34
+ expect(JSON.parse(e.to_json)).to eq(JSON.parse("{\"@timestamp\":\"2015-05-28T23:02:05.350Z\",\"@version\":\"1\",\"foo\":\"bar\",\"bar\":1,\"baz\":1.0,\"fancy\":{\"pants\":{\"socks\":\"shoes\"}}}"))
35
+ end
36
+ end
37
+
38
+ context "#get" do
39
+ it "should get simple values" do
40
+ e = LogStash::Event.new({"foo" => "bar", "bar" => 1, "baz" => 1.0, TIMESTAMP => "2015-05-28T23:02:05.350Z"})
41
+ expect(e.get("foo")).to eq("bar")
42
+ expect(e.get("[foo]")).to eq("bar")
43
+ expect(e.get("bar")).to eq(1)
44
+ expect(e.get("[bar]")).to eq(1)
45
+ expect(e.get("baz")).to eq(1.0)
46
+ expect(e.get("[baz]")).to eq(1.0)
47
+ expect(e.get(TIMESTAMP).to_s).to eq("2015-05-28T23:02:05.350Z")
48
+ expect(e.get("[#{TIMESTAMP}]").to_s).to eq("2015-05-28T23:02:05.350Z")
49
+ end
50
+
51
+ it "should get deep hash values" do
52
+ e = LogStash::Event.new({"foo" => {"bar" => 1, "baz" => 1.0}})
53
+ expect(e.get("[foo][bar]")).to eq(1)
54
+ expect(e.get("[foo][baz]")).to eq(1.0)
55
+ end
56
+
57
+ it "should get deep array values" do
58
+ e = LogStash::Event.new({"foo" => ["bar", 1, 1.0]})
59
+ expect(e.get("[foo][0]")).to eq("bar")
60
+ expect(e.get("[foo][1]")).to eq(1)
61
+ expect(e.get("[foo][2]")).to eq(1.0)
62
+ expect(e.get("[foo][3]")).to be_nil
63
+ end
64
+
65
+ context "negative array values" do
66
+ it "should index from the end of the array" do
67
+ list = ["bar", 1, 1.0]
68
+ e = LogStash::Event.new({"foo" => list})
69
+ expect(e.get("[foo][-3]")).to eq(list[-3])
70
+ expect(e.get("[foo][-2]")).to eq(list[-2])
71
+ expect(e.get("[foo][-1]")).to eq(list[-1])
72
+ end
73
+ end
74
+ end
75
+
76
+ context "#set" do
77
+ it "should set simple values" do
78
+ e = LogStash::Event.new()
79
+ expect(e.set("foo", "bar")).to eq("bar")
80
+ expect(e.get("foo")).to eq("bar")
81
+
82
+ e = LogStash::Event.new({"foo" => "test"})
83
+ expect(e.set("foo", "bar")).to eq("bar")
84
+ expect(e.get("foo")).to eq("bar")
85
+ end
86
+
87
+ it "should set deep hash values" do
88
+ e = LogStash::Event.new()
89
+ expect(e.set("[foo][bar]", "baz")).to eq("baz")
90
+ expect(e.get("[foo][bar]")).to eq("baz")
91
+ expect(e.get("[foo][baz]")).to be_nil
92
+ end
93
+
94
+ it "should set deep array values" do
95
+ e = LogStash::Event.new()
96
+ expect(e.set("[foo][0]", "bar")).to eq("bar")
97
+ expect(e.get("[foo][0]")).to eq("bar")
98
+ expect(e.set("[foo][1]", 1)).to eq(1)
99
+ expect(e.get("[foo][1]")).to eq(1)
100
+ expect(e.set("[foo][2]", 1.0)).to eq(1.0)
101
+ expect(e.get("[foo][2]")).to eq(1.0)
102
+ expect(e.get("[foo][3]")).to be_nil
103
+ end
104
+
105
+ it "should add key when setting nil value" do
106
+ e = LogStash::Event.new()
107
+ e.set("[foo]", nil)
108
+ expect(e.to_hash).to include("foo" => nil)
109
+ end
110
+
111
+ # BigDecimal is now natively converted by JRuby, see https://github.com/elastic/logstash/pull/4838
112
+ it "should set BigDecimal" do
113
+ e = LogStash::Event.new()
114
+ e.set("[foo]", BigDecimal.new(1))
115
+ expect(e.get("foo")).to be_kind_of(BigDecimal)
116
+ expect(e.get("foo")).to eq(BigDecimal.new(1))
117
+ end
118
+
119
+ it "should set RubyBignum" do
120
+ e = LogStash::Event.new()
121
+ e.set("[foo]", -9223372036854776000)
122
+ expect(e.get("foo")).to be_kind_of(Bignum)
123
+ expect(e.get("foo")).to eq(-9223372036854776000)
124
+ end
125
+
126
+ it "should convert Time to Timestamp" do
127
+ e = LogStash::Event.new()
128
+ time = Time.now
129
+ e.set("[foo]", Time.at(time.to_f))
130
+ expect(e.get("foo")).to be_kind_of(LogStash::Timestamp)
131
+ expect(e.get("foo").to_f).to be_within(0.1).of(time.to_f)
132
+ end
133
+
134
+ it "should set XXJavaProxy Jackson crafted" do
135
+ proxy = org.logstash.Util.getMapFixtureJackson()
136
+ # proxy is {"string": "foo", "int": 42, "float": 42.42, "array": ["bar","baz"], "hash": {"string":"quux"} }
137
+ e = LogStash::Event.new()
138
+ e.set("[proxy]", proxy)
139
+ expect(e.get("[proxy][string]")).to eql("foo")
140
+ expect(e.get("[proxy][int]")).to eql(42)
141
+ expect(e.get("[proxy][float]")).to eql(42.42)
142
+ expect(e.get("[proxy][array][0]")).to eql("bar")
143
+ expect(e.get("[proxy][array][1]")).to eql("baz")
144
+ expect(e.get("[proxy][hash][string]")).to eql("quux")
145
+ end
146
+
147
+ it "should set XXJavaProxy hand crafted" do
148
+ proxy = org.logstash.Util.getMapFixtureHandcrafted()
149
+ # proxy is {"string": "foo", "int": 42, "float": 42.42, "array": ["bar","baz"], "hash": {"string":"quux"} }
150
+ e = LogStash::Event.new()
151
+ e.set("[proxy]", proxy)
152
+ expect(e.get("[proxy][string]")).to eql("foo")
153
+ expect(e.get("[proxy][int]")).to eql(42)
154
+ expect(e.get("[proxy][float]")).to eql(42.42)
155
+ expect(e.get("[proxy][array][0]")).to eql("bar")
156
+ expect(e.get("[proxy][array][1]")).to eql("baz")
157
+ expect(e.get("[proxy][hash][string]")).to eql("quux")
158
+ end
159
+
160
+ it "should fail on non UTF-8 encoding" do
161
+ # e = LogStash::Event.new
162
+ # s1 = "\xE0 Montr\xE9al".force_encoding("ISO-8859-1")
163
+ # expect(s1.encoding.name).to eq("ISO-8859-1")
164
+ # expect(s1.valid_encoding?).to eq(true)
165
+ # e.set("test", s1)
166
+ # s2 = e.get("test")
167
+ # expect(s2.encoding.name).to eq("UTF-8")
168
+ # expect(s2.valid_encoding?).to eq(true)
169
+ end
170
+ end
171
+
172
+ context "timestamp" do
173
+ it "getters should present a Ruby LogStash::Timestamp" do
174
+ e = LogStash::Event.new()
175
+ expect(e.timestamp.class).to eq(LogStash::Timestamp)
176
+ expect(e.get(TIMESTAMP).class).to eq(LogStash::Timestamp)
177
+ end
178
+
179
+ it "to_hash should inject a Ruby LogStash::Timestamp" do
180
+ e = LogStash::Event.new()
181
+
182
+ expect(e.to_java).to be_kind_of(Java::OrgLogstash::Event)
183
+ expect(e.to_java.get_field(TIMESTAMP)).to be_kind_of(Java::OrgLogstash::Timestamp)
184
+
185
+ expect(e.to_hash[TIMESTAMP]).to be_kind_of(LogStash::Timestamp)
186
+ # now make sure the original map was not touched
187
+ expect(e.to_java.get_field(TIMESTAMP)).to be_kind_of(Java::OrgLogstash::Timestamp)
188
+ end
189
+
190
+ it "should set timestamp" do
191
+ e = LogStash::Event.new
192
+ now = Time.now
193
+ e.set("@timestamp", LogStash::Timestamp.at(now.to_i))
194
+ expect(e.timestamp.to_i).to eq(now.to_i)
195
+ expect(e.get("@timestamp").to_i).to eq(now.to_i)
196
+ end
197
+ end
198
+
199
+ context "append" do
200
+ it "should append" do
201
+ event = LogStash::Event.new("message" => "hello world")
202
+ event.append(LogStash::Event.new("message" => "another thing"))
203
+ expect(event.get("message")).to eq(["hello world", "another thing"])
204
+ end
205
+ end
206
+
207
+ context "tags" do
208
+ it "should tag" do
209
+ event = LogStash::Event.new("message" => "hello world")
210
+ expect(event.get("tags")).to be_nil
211
+ event.tag("foo")
212
+ expect(event.get("tags")).to eq(["foo"])
213
+ end
214
+ end
215
+
216
+
217
+ # TODO(talevy): migrate tests to Java. no reason to test logging logic in ruby when it is being
218
+ # done in java land.
219
+
220
+ # context "logger" do
221
+
222
+ # let(:logger) { double("Logger") }
223
+
224
+ # before(:each) do
225
+ # allow(LogStash::Event).to receive(:logger).and_return(logger)
226
+ # end
227
+
228
+ # it "should set logger using a module" do
229
+ # expect(logger).to receive(:warn).once
230
+ # LogStash::Event.new(TIMESTAMP => "invalid timestamp")
231
+ # end
232
+
233
+ # it "should warn on invalid timestamp object" do
234
+ # expect(logger).to receive(:warn).once.with(/^Unrecognized/)
235
+ # LogStash::Event.new(TIMESTAMP => Array.new)
236
+ # end
237
+ # end
238
+
239
+ context "to_hash" do
240
+ let (:source_hash) { {"a" => 1, "b" => [1, 2, 3, {"h" => 1, "i" => "baz"}], "c" => {"d" => "foo", "e" => "bar", "f" => [4, 5, "six"]}} }
241
+ let (:source_hash_with_metadata) { source_hash.merge({"@metadata" => {"a" => 1, "b" => 2}}) }
242
+ subject { LogStash::Event.new(source_hash_with_metadata) }
243
+
244
+ it "should include @timestamp and @version" do
245
+ h = subject.to_hash
246
+ expect(h).to include("@timestamp")
247
+ expect(h).to include("@version")
248
+ expect(h).not_to include("@metadata")
249
+ end
250
+
251
+ it "should include @timestamp and @version and @metadata" do
252
+ h = subject.to_hash_with_metadata
253
+ expect(h).to include("@timestamp")
254
+ expect(h).to include("@version")
255
+ expect(h).to include("@metadata")
256
+ end
257
+
258
+ it "should produce valid deep Ruby hash without metadata" do
259
+ h = subject.to_hash
260
+ h.delete("@timestamp")
261
+ h.delete("@version")
262
+ expect(h).to eq(source_hash)
263
+ end
264
+
265
+ it "should produce valid deep Ruby hash with metadata" do
266
+ h = subject.to_hash_with_metadata
267
+ h.delete("@timestamp")
268
+ h.delete("@version")
269
+ expect(h).to eq(source_hash_with_metadata)
270
+ end
271
+ end
272
+
273
+ context "from_json" do
274
+ let (:source_json) { "{\"foo\":1, \"bar\":\"baz\"}" }
275
+ let (:blank_strings) {["", " ", " "]}
276
+ let (:bare_strings) {["aa", " aa", "aa "]}
277
+
278
+ it "should produce a new event from json" do
279
+ expect(LogStash::Event.from_json(source_json).size).to eq(1)
280
+
281
+ event = LogStash::Event.from_json(source_json)[0]
282
+ expect(event.get("[foo]")).to eq(1)
283
+ expect(event.get("[bar]")).to eq("baz")
284
+ end
285
+
286
+ it "should ignore blank strings" do
287
+ blank_strings.each do |s|
288
+ expect(LogStash::Event.from_json(s).size).to eq(0)
289
+ end
290
+ end
291
+
292
+ it "should raise TypeError on nil string" do
293
+ expect{LogStash::Event.from_json(nil)}.to raise_error TypeError
294
+ end
295
+
296
+ it "should consistently handle nil" do
297
+ blank_strings.each do |s|
298
+ expect{LogStash::Event.from_json(nil)}.to raise_error
299
+ expect{LogStash::Event.new(LogStash::Json.load(nil))}.to raise_error
300
+ end
301
+ end
302
+
303
+ it "should consistently handle bare string" do
304
+ bare_strings.each do |s|
305
+ expect{LogStash::Event.from_json(s)}.to raise_error LogStash::Json::ParserError
306
+ expect{LogStash::Event.new(LogStash::Json.load(s))}.to raise_error LogStash::Json::ParserError
307
+ end
308
+ end
309
+ end
310
+
311
+ context "initialize" do
312
+
313
+ it "should accept Ruby Hash" do
314
+ e = LogStash::Event.new({"foo" => 1, TIMESTAMP => "2015-05-28T23:02:05.350Z"})
315
+ expect(e.get("foo")).to eq(1)
316
+ expect(e.timestamp.to_iso8601).to eq("2015-05-28T23:02:05.350Z")
317
+ end
318
+
319
+ it "should accept Java Map" do
320
+ h = Java::JavaUtil::HashMap.new
321
+ h.put("foo", 2);
322
+ h.put(TIMESTAMP, "2016-05-28T23:02:05.350Z");
323
+ e = LogStash::Event.new(h)
324
+
325
+ expect(e.get("foo")).to eq(2)
326
+ expect(e.timestamp.to_iso8601).to eq("2016-05-28T23:02:05.350Z")
327
+ end
328
+
329
+ end
330
+
331
+ context "method missing exception messages" do
332
+ subject { LogStash::Event.new({"foo" => "bar"}) }
333
+
334
+ it "#[] method raises a better exception message" do
335
+ expect { subject["foo"] }.to raise_error(NoMethodError, /Direct event field references \(i\.e\. event\['field'\]\)/)
336
+ end
337
+
338
+ it "#[]= method raises a better exception message" do
339
+ expect { subject["foo"] = "baz" }.to raise_error(NoMethodError, /Direct event field references \(i\.e\. event\['field'\] = 'value'\)/)
340
+ end
341
+
342
+ it "other missing method raises normal exception message" do
343
+ expect { subject.baz() }.to raise_error(NoMethodError, /undefined method `baz' for/)
344
+ end
345
+ end
346
+ end