logstash-core 6.0.0.beta2-java → 6.0.0-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (51) hide show
  1. checksums.yaml +4 -4
  2. data/gemspec_jars.rb +5 -5
  3. data/lib/logstash-core/logstash-core.jar +0 -0
  4. data/lib/logstash-core/version.rb +8 -4
  5. data/lib/logstash-core_jars.rb +10 -10
  6. data/lib/logstash/agent.rb +3 -2
  7. data/lib/logstash/compiler/lscl.rb +15 -3
  8. data/lib/logstash/config/config_ast.rb +3 -2
  9. data/lib/logstash/config/modules_common.rb +1 -0
  10. data/lib/logstash/config/source/local.rb +2 -1
  11. data/lib/logstash/instrument/periodic_poller/dlq.rb +8 -3
  12. data/lib/logstash/instrument/periodic_poller/pq.rb +7 -3
  13. data/lib/logstash/logging/logger.rb +4 -1
  14. data/lib/logstash/modules/kibana_client.rb +35 -8
  15. data/lib/logstash/modules/logstash_config.rb +1 -1
  16. data/lib/logstash/modules/settings_merger.rb +8 -2
  17. data/lib/logstash/pipeline.rb +10 -19
  18. data/lib/logstash/pipeline_action/stop.rb +1 -0
  19. data/lib/logstash/runner.rb +4 -1
  20. data/lib/logstash/util/cloud_setting_id.rb +46 -12
  21. data/lib/logstash/util/modules_setting_array.rb +1 -1
  22. data/lib/logstash/util/password.rb +2 -4
  23. data/lib/logstash/util/wrapped_acked_queue.rb +6 -0
  24. data/lib/logstash/util/wrapped_synchronous_queue.rb +6 -0
  25. data/lib/logstash/version.rb +8 -10
  26. data/logstash-core.gemspec +25 -3
  27. data/spec/logstash/agent/converge_spec.rb +23 -10
  28. data/spec/logstash/agent_spec.rb +35 -15
  29. data/spec/logstash/api/modules/node_stats_spec.rb +5 -1
  30. data/spec/logstash/compiler/compiler_spec.rb +29 -0
  31. data/spec/logstash/config/source/local_spec.rb +3 -2
  32. data/spec/logstash/event_spec.rb +57 -0
  33. data/spec/logstash/modules/kibana_client_spec.rb +60 -0
  34. data/spec/logstash/modules/logstash_config_spec.rb +7 -1
  35. data/spec/logstash/modules/scaffold_spec.rb +1 -1
  36. data/spec/logstash/modules/settings_merger_spec.rb +32 -2
  37. data/spec/logstash/pipeline_action/create_spec.rb +4 -1
  38. data/spec/logstash/pipeline_action/reload_spec.rb +4 -1
  39. data/spec/logstash/pipeline_dlq_commit_spec.rb +3 -1
  40. data/spec/logstash/pipeline_pq_file_spec.rb +5 -7
  41. data/spec/logstash/pipeline_spec.rb +26 -38
  42. data/spec/logstash/runner_spec.rb +1 -5
  43. data/spec/logstash/settings/modules_spec.rb +13 -2
  44. data/spec/logstash/settings/writable_directory_spec.rb +13 -10
  45. data/spec/logstash/timestamp_spec.rb +2 -2
  46. data/spec/logstash/util/cloud_setting_id_spec.rb +93 -0
  47. data/spec/support/helpers.rb +1 -1
  48. data/spec/support/mocks_classes.rb +14 -0
  49. data/spec/support/shared_contexts.rb +9 -0
  50. data/versions-gem-copy.yml +23 -0
  51. metadata +19 -14
@@ -16,7 +16,7 @@ module LogStash module Util class ModulesSettingArray
16
16
  @original = value
17
17
  # wrap passwords
18
18
  @original.each do |hash|
19
- hash.keys.select{|key| key.to_s.end_with?('password')}.each do |key|
19
+ hash.keys.select{|key| key.to_s.end_with?('password') && !hash[key].is_a?(LogStash::Util::Password)}.each do |key|
20
20
  hash[key] = LogStash::Util::Password.new(hash[key])
21
21
  end
22
22
  end
@@ -1,10 +1,9 @@
1
1
  # encoding: utf-8
2
2
  require "logstash/namespace"
3
- require "logstash/util"
4
3
 
5
4
  # This class exists to quietly wrap a password string so that, when printed or
6
5
  # logged, you don't accidentally print the password itself.
7
- class LogStash::Util::Password
6
+ module LogStash module Util class Password
8
7
  attr_reader :value
9
8
 
10
9
  public
@@ -21,5 +20,4 @@ class LogStash::Util::Password
21
20
  def inspect
22
21
  return to_s
23
22
  end # def inspect
24
- end # class LogStash::Util::Password
25
-
23
+ end end end # class LogStash::Util::Password
@@ -287,6 +287,12 @@ module LogStash; module Util
287
287
  # @cancelled[event] = true
288
288
  end
289
289
 
290
+ def to_a
291
+ events = []
292
+ each {|e| events << e}
293
+ events
294
+ end
295
+
290
296
  def each(&blk)
291
297
  # take care not to cause @originals or @generated to change during iteration
292
298
 
@@ -218,6 +218,12 @@ module LogStash; module Util
218
218
  # @cancelled[event] = true
219
219
  end
220
220
 
221
+ def to_a
222
+ events = []
223
+ each {|e| events << e}
224
+ events
225
+ end
226
+
221
227
  def each(&blk)
222
228
  # take care not to cause @originals or @generated to change during iteration
223
229
  @iterating = true
@@ -2,13 +2,11 @@
2
2
 
3
3
  # The version of the logstash package (not the logstash-core gem version).
4
4
  #
5
- # Note to authors: this should not include dashes because 'gem' barfs if
6
- # you include a dash in the version string.
7
-
8
- # TODO: (colin) the logstash-core gem uses it's own version number in logstash-core/lib/logstash-core/version.rb
9
- # there are some dependencies in logstash-core on the LOGSTASH_VERSION constant this is why
10
- # the logstash version is currently defined here in logstash-core/lib/logstash/version.rb but
11
- # eventually this file should be in the root logstash lib fir and dependencies in logstash-core should be
12
- # fixed.
13
-
14
- LOGSTASH_VERSION = "6.0.0-beta2"
5
+ # sourced from a copy of the master versions.yml file, see logstash-core/logstash-core.gemspec
6
+ if !defined?(ALL_VERSIONS)
7
+ require 'yaml'
8
+ ALL_VERSIONS = YAML.load_file(File.expand_path("../../versions-gem-copy.yml", File.dirname(__FILE__)))
9
+ end
10
+ if !defined?(LOGSTASH_VERSION)
11
+ LOGSTASH_VERSION = ALL_VERSIONS.fetch("logstash")
12
+ end
@@ -1,6 +1,25 @@
1
1
  # -*- encoding: utf-8 -*-
2
2
  lib = File.expand_path('../lib', __FILE__)
3
3
  $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+
5
+ project_versions_yaml_path = File.expand_path("../versions.yml", File.dirname(__FILE__))
6
+ if File.exist?(project_versions_yaml_path)
7
+ # we need to copy the project level versions.yml into the gem root
8
+ # to be able to package it into the gems file structure
9
+ # as the require 'logstash-core/version' loads the yaml file from within the gem root.
10
+ #
11
+ # we ignore the copy in git and we overwrite an existing file
12
+ # each time we build the logstash-core gem
13
+ original_lines = IO.readlines(project_versions_yaml_path)
14
+ original_lines << ""
15
+ original_lines << "# This is a copy the project level versions.yml into this gem's root and it is created when the gemspec is evaluated."
16
+ gem_versions_yaml_path = File.expand_path("./versions-gem-copy.yml", File.dirname(__FILE__))
17
+ File.open(gem_versions_yaml_path, 'w') do |new_file|
18
+ # create or overwrite
19
+ new_file.puts(original_lines)
20
+ end
21
+ end
22
+
4
23
  require 'logstash-core/version'
5
24
 
6
25
  Gem::Specification.new do |gem|
@@ -11,7 +30,10 @@ Gem::Specification.new do |gem|
11
30
  gem.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
12
31
  gem.license = "Apache License (2.0)"
13
32
 
14
- gem.files = Dir.glob(["logstash-core.gemspec", "gemspec_jars.rb", "lib/**/*.rb", "spec/**/*.rb", "locales/*", "lib/logstash/api/init.ru", "lib/logstash-core/logstash-core.jar"])
33
+ gem.files = Dir.glob(
34
+ %w(versions-gem-copy.yml logstash-core.gemspec gemspec_jars.rb lib/**/*.rb spec/**/*.rb locales/*
35
+ lib/logstash/api/init.ru lib/logstash-core/logstash-core.jar)
36
+ )
15
37
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
16
38
  gem.name = "logstash-core"
17
39
  gem.require_paths = ["lib"]
@@ -31,7 +53,7 @@ Gem::Specification.new do |gem|
31
53
 
32
54
  gem.add_runtime_dependency "sinatra", '~> 1.4', '>= 1.4.6'
33
55
  gem.add_runtime_dependency 'puma', '~> 2.16'
34
- gem.add_runtime_dependency "jruby-openssl", "0.9.20" # >= 0.9.13 Required to support TLSv1.2
56
+ gem.add_runtime_dependency "jruby-openssl", ">= 0.9.20" # >= 0.9.13 Required to support TLSv1.2
35
57
  gem.add_runtime_dependency "chronic_duration", "0.10.6"
36
58
 
37
59
  # TODO(sissel): Treetop 1.5.x doesn't seem to work well, but I haven't
@@ -46,7 +68,7 @@ Gem::Specification.new do |gem|
46
68
  gem.add_runtime_dependency "rubyzip", "~> 1.2.1"
47
69
  gem.add_runtime_dependency "thread_safe", "~> 0.3.5" #(Apache 2.0 license)
48
70
 
49
- gem.add_runtime_dependency "jrjackson", "~> 0.4.2" #(Apache 2.0 license)
71
+ gem.add_runtime_dependency "jrjackson", "~> #{ALL_VERSIONS.fetch('jrjackson')}" #(Apache 2.0 license)
50
72
 
51
73
  gem.add_runtime_dependency "jar-dependencies"
52
74
  # as of Feb 3rd 2016, the ruby-maven gem is resolved to version 3.3.3 and that version
@@ -71,9 +71,16 @@ describe LogStash::Agent do
71
71
  TestSourceLoader.new(infinite_pipeline_config, system_pipeline_config)
72
72
  end
73
73
 
74
+ before(:each) do
75
+ @agent_task = start_agent(subject)
76
+ end
77
+
78
+ after(:each) do
79
+ @agent_task.stop!
80
+ end
81
+
74
82
  describe "#running_user_defined_pipelines" do
75
83
  it "returns the user defined pipelines" do
76
- start_agent(subject)
77
84
  wait_for do
78
85
  subject.with_running_user_defined_pipelines {|pipelines| pipelines.keys }
79
86
  end.to eq([:main])
@@ -82,7 +89,6 @@ describe LogStash::Agent do
82
89
 
83
90
  describe "#running_user_defined_pipelines?" do
84
91
  it "returns true" do
85
- start_agent(subject)
86
92
  wait_for do
87
93
  subject.running_user_defined_pipelines?
88
94
  end.to be_truthy
@@ -102,14 +108,19 @@ describe LogStash::Agent do
102
108
  context "and successfully load the config" do
103
109
  let(:agent_settings) { mock_settings("config.reload.automatic" => false) }
104
110
 
105
- it "converge only once" do
106
- agent_task = start_agent(subject)
111
+ before(:each) do
112
+ @agent_task = start_agent(subject)
113
+ end
107
114
 
115
+ after(:each) do
116
+ @agent_task.stop!
117
+ end
118
+
119
+ it "converge only once" do
108
120
  expect(source_loader.fetch_count).to eq(1)
109
121
  expect(subject).to have_running_pipeline?(pipeline_config)
110
122
 
111
123
  subject.shutdown
112
- agent_task.stop!
113
124
  end
114
125
  end
115
126
 
@@ -137,11 +148,16 @@ describe LogStash::Agent do
137
148
  "config.reload.interval" => interval
138
149
  )
139
150
  end
151
+ before(:each) do
152
+ @agent_task = start_agent(subject)
153
+ end
154
+
155
+ after(:each) do
156
+ @agent_task.stop!
157
+ end
140
158
 
141
159
  context "and successfully load the config" do
142
160
  it "converges periodically the pipelines from the configs source" do
143
- agent_task = start_agent(subject)
144
-
145
161
  sleep(2) # let the interval reload a few times
146
162
  expect(subject).to have_running_pipeline?(pipeline_config)
147
163
 
@@ -152,7 +168,6 @@ describe LogStash::Agent do
152
168
  end
153
169
 
154
170
  subject.shutdown
155
- agent_task.stop!
156
171
  end
157
172
  end
158
173
 
@@ -162,14 +177,12 @@ describe LogStash::Agent do
162
177
  end
163
178
 
164
179
  it "it will keep trying to converge" do
165
- agent_task = start_agent(subject)
166
180
 
167
181
  sleep(agent_settings.get("config.reload.interval") / 1_000_000_000.0 * 20) # let the interval reload a few times
168
182
  expect(subject.pipelines_count).to eq(0)
169
183
  expect(source_loader.fetch_count).to be > 1
170
184
 
171
185
  subject.shutdown
172
- agent_task.stop!
173
186
  end
174
187
  end
175
188
  end
@@ -26,6 +26,7 @@ describe LogStash::Agent do
26
26
  sl
27
27
  end
28
28
  let(:logger) { double("logger") }
29
+ let(:timeout) {120} #seconds
29
30
 
30
31
  subject { LogStash::Agent.new(agent_settings, default_source_loader) }
31
32
 
@@ -33,7 +34,7 @@ describe LogStash::Agent do
33
34
  # This MUST run first, before `subject` is invoked to ensure clean state
34
35
  clear_data_dir
35
36
 
36
- File.open(config_file, "w") { |f| f.puts config_file_txt }
37
+ File.open(config_file, "w") { |f| f.puts(config_file_txt) }
37
38
 
38
39
  agent_args.each do |key, value|
39
40
  agent_settings.set(key, value)
@@ -50,8 +51,9 @@ describe LogStash::Agent do
50
51
  after :each do
51
52
  subject.shutdown
52
53
  LogStash::SETTINGS.reset
53
- File.unlink(config_file)
54
- File.unlink(subject.id_path)
54
+
55
+ FileUtils.rm(config_file)
56
+ FileUtils.rm_rf(subject.id_path)
55
57
  end
56
58
 
57
59
  it "fallback to hostname when no name is provided" do
@@ -314,8 +316,10 @@ describe LogStash::Agent do
314
316
  context "metrics after config reloading" do
315
317
 
316
318
  let(:initial_generator_threshold) { 1000 }
317
- let(:temporary_file) { Stud::Temporary.file.path }
318
- let(:config_file_txt) { "input { generator { count => #{initial_generator_threshold*2} } } output { file { path => '#{temporary_file}'} }" }
319
+ let(:original_config_output) { Stud::Temporary.pathname }
320
+ let(:new_config_output) { Stud::Temporary.pathname }
321
+
322
+ let(:config_file_txt) { "input { generator { count => #{initial_generator_threshold*2} } } output { file { path => '#{original_config_output}'} }" }
319
323
 
320
324
  let(:agent_args) do
321
325
  {
@@ -326,14 +330,25 @@ describe LogStash::Agent do
326
330
 
327
331
  subject { described_class.new(agent_settings, default_source_loader) }
328
332
 
333
+ let(:agent_thread) do
334
+ # subject has to be called for the first time outside the thread because it could create a race condition
335
+ # with subsequent subject calls
336
+ s = subject
337
+ Thread.new { s.execute }
338
+ end
339
+
329
340
  before(:each) do
330
341
  @abort_on_exception = Thread.abort_on_exception
331
342
  Thread.abort_on_exception = true
332
343
 
333
- @t = Thread.new { subject.execute }
344
+ agent_thread
334
345
 
335
346
  # wait for some events to reach the dummy_output
336
- sleep(0.01) until IO.readlines(temporary_file).size > initial_generator_threshold
347
+ Timeout.timeout(timeout) do
348
+ # wait for file existence otherwise it will raise exception on Windows
349
+ sleep(0.1) until ::File.exist?(original_config_output)
350
+ sleep(0.1) until IO.readlines(original_config_output).size > initial_generator_threshold
351
+ end
337
352
 
338
353
  # write new config
339
354
  File.open(config_file, "w") { |f| f.write(new_config) }
@@ -341,10 +356,14 @@ describe LogStash::Agent do
341
356
 
342
357
  after :each do
343
358
  begin
359
+ Stud.stop!(agent_thread) rescue nil # it may be dead already
360
+ agent_thread.join
344
361
  subject.shutdown
345
- Stud.stop!(@t) rescue nil # it may be dead already
346
- @t.join
347
- File.unlink(temporary_file)
362
+
363
+ FileUtils.rm(original_config_output)
364
+ FileUtils.rm(new_config_output) if File.exist?(new_config_output)
365
+ rescue
366
+ #don't care about errors here.
348
367
  ensure
349
368
  Thread.abort_on_exception = @abort_on_exception
350
369
  end
@@ -352,19 +371,20 @@ describe LogStash::Agent do
352
371
 
353
372
  context "when reloading a good config" do
354
373
  let(:new_config_generator_counter) { 500 }
355
- let(:new_file) { Stud::Temporary.file.path }
356
- let(:new_config) { "input { generator { count => #{new_config_generator_counter} } } output { file { path => '#{new_file}'} }" }
374
+ let(:new_config) { "input { generator { count => #{new_config_generator_counter} } } output { file { path => '#{new_config_output}'} }" }
357
375
 
358
376
  before :each do
359
377
  subject.converge_state_and_update
360
- sleep(0.01) while ::File.read(new_file).chomp.empty?
378
+ Timeout.timeout(timeout) do
379
+ # wait for file existence otherwise it will raise exception on Windows
380
+ sleep(0.1) until ::File.exist?(new_config_output)
381
+ sleep(0.1) while ::File.read(new_config_output).chomp.empty?
382
+ end
361
383
  # ensure the converge_state_and_update method has updated metrics by
362
384
  # invoking the mutex
363
385
  subject.running_pipelines?
364
386
  end
365
387
 
366
- after(:each) { File.unlink(new_file) }
367
-
368
388
  it "resets the pipeline metric collector" do
369
389
  snapshot = subject.metric.collector.snapshot_metric
370
390
  value = snapshot.metric_store.get_with_path("/stats/pipelines")[:stats][:pipelines][:main][:events][:in].value
@@ -69,7 +69,7 @@ describe LogStash::Api::Modules::NodeStats do
69
69
  "cpu"=>{
70
70
  "total_in_millis"=>Numeric,
71
71
  "percent"=>Numeric,
72
- "load_average" => { "1m" => Numeric }
72
+ # load_average is not supported on Windows, set it below
73
73
  }
74
74
  },
75
75
  "pipelines" => {
@@ -89,5 +89,9 @@ describe LogStash::Api::Modules::NodeStats do
89
89
  }
90
90
  }
91
91
 
92
+ unless LogStash::Environment.windows?
93
+ root_structure["process"]["cpu"]["load_average"] = { "1m" => Numeric }
94
+ end
95
+
92
96
  test_api_and_resources(root_structure)
93
97
  end
@@ -193,6 +193,35 @@ describe LogStash::Compiler do
193
193
  expect(c_plugin).to ir_eql(j.iPlugin(INPUT, "generator", expected_plugin_args))
194
194
  end
195
195
  end
196
+
197
+ describe "a filter plugin that repeats a Hash directive" do
198
+ let(:source) { "input { } filter { #{plugin_source} } output { } " }
199
+ subject(:c_plugin) { compiled[:filter] }
200
+
201
+ let(:plugin_source) do
202
+ %q[
203
+ grok {
204
+ match => { "message" => "%{WORD:word}" }
205
+ match => { "examplefield" => "%{NUMBER:num}" }
206
+ break_on_match => false
207
+ }
208
+ ]
209
+ end
210
+
211
+ let(:expected_plugin_args) do
212
+ {
213
+ "match" => {
214
+ "message" => "%{WORD:word}",
215
+ "examplefield" => "%{NUMBER:num}"
216
+ },
217
+ "break_on_match" => "false"
218
+ }
219
+ end
220
+
221
+ it "should merge the contents of the individual directives" do
222
+ expect(c_plugin).to ir_eql(j.iPlugin(FILTER, "grok", expected_plugin_args))
223
+ end
224
+ end
196
225
  end
197
226
 
198
227
  context "inputs" do
@@ -79,7 +79,7 @@ describe LogStash::Config::Source::Local::ConfigPathLoader do
79
79
 
80
80
  parts.each do |part|
81
81
  basename = ::File.basename(part.id)
82
- file_path = ::File.join(directory, basename)
82
+ file_path = ::File.expand_path(::File.join(directory, basename))
83
83
  content = files[basename]
84
84
  expect(part).to be_a_source_with_metadata("file", file_path, content)
85
85
  end
@@ -99,7 +99,8 @@ describe LogStash::Config::Source::Local::ConfigPathLoader do
99
99
  end
100
100
 
101
101
  it "raises an exception" do
102
- expect { subject.read(file_path) }.to raise_error LogStash::ConfigLoadingError, /#{file_path}/
102
+ # check against base name because on Windows long paths are shrinked in the exception message
103
+ expect { subject.read(file_path) }.to raise_error LogStash::ConfigLoadingError, /.+#{::File.basename(file_path)}/
103
104
  end
104
105
  end
105
106
 
@@ -352,4 +352,61 @@ describe LogStash::Event do
352
352
  expect { subject.baz() }.to raise_error(NoMethodError, /undefined method `baz' for/)
353
353
  end
354
354
  end
355
+
356
+ describe "#clone" do
357
+ let(:fieldref) { "[@metadata][fancy]" }
358
+ let(:event1) { LogStash::Event.new("hello" => "world", "@metadata" => { "fancy" => "pants" }) }
359
+ let(:event2) { LogStash::Event.new("hello" => "world", "@metadata" => { "fancy" => {"fancy2" => "pants2"} }) }
360
+ let(:event3) { LogStash::Event.new("hello" => "world", "@metadata" => { "fancy" => {"fancy2" => {"fancy3" => "pants2"}} }) }
361
+ let(:event4) { LogStash::Event.new("hello" => "world", "@metadata" => { "fancy" => {"fancy2" => ["pants1", "pants2"]} }) }
362
+ let(:event5) { LogStash::Event.new("hello" => "world", "@metadata" => { "fancy" => "pants", "smarty" => "pants2" }) }
363
+
364
+ it "should clone metadata fields" do
365
+ cloned = event1.clone
366
+ expect(cloned.get(fieldref)).to eq("pants")
367
+ expect(cloned.to_hash_with_metadata).to include("@metadata")
368
+ end
369
+
370
+ it "should clone metadata fields with nested json" do
371
+ cloned = event2.clone
372
+ expect(cloned.get(fieldref)).to eq({"fancy2" => "pants2"})
373
+ expect(cloned.get("hello")).to eq("world")
374
+ expect(cloned.to_hash).not_to include("@metadata")
375
+ expect(cloned.to_hash_with_metadata).to include("@metadata")
376
+ end
377
+
378
+ it "should clone metadata fields with 2-level nested json" do
379
+ cloned = event3.clone
380
+ expect(cloned.get(fieldref)).to eq({"fancy2" => {"fancy3" => "pants2"}})
381
+ expect(cloned.to_hash).not_to include("@metadata")
382
+ expect(cloned.to_hash_with_metadata).to include("@metadata")
383
+ end
384
+
385
+ it "should clone metadata fields with nested json and array value" do
386
+ cloned = event4.clone
387
+ expect(cloned.get(fieldref)).to eq({"fancy2" => ["pants1", "pants2"]})
388
+ expect(cloned.to_hash_with_metadata).to include("@metadata")
389
+ end
390
+
391
+ it "should clone metadata fields with multiple keys" do
392
+ cloned = event5.clone
393
+ expect(cloned.get(fieldref)).to eq("pants")
394
+ expect(cloned.get("[@metadata][smarty]")).to eq("pants2")
395
+ expect(cloned.to_hash_with_metadata).to include("@metadata")
396
+ end
397
+
398
+ it "mutating cloned event should not affect the original event" do
399
+ cloned = event1.clone
400
+ cloned.set("hello", "foobar")
401
+ expect(cloned.get("hello")).to eq("foobar")
402
+ expect(event1.get("hello")).to eq("world")
403
+ end
404
+
405
+ it "mutating cloned event's metadata should not affect the original event metadata" do
406
+ cloned = event1.clone
407
+ cloned.set("[@metadata][fancy]", "foobar")
408
+ expect(cloned.get("[@metadata][fancy]")).to eq("foobar")
409
+ expect(event1.get("[@metadata][fancy]")).to eq("pants")
410
+ end
411
+ end
355
412
  end