logstash-filter-translate 3.1.0 → 3.2.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,33 @@
1
+ # encoding: utf-8
2
+
3
+ module LogStash module Filters
4
+ class SingleValueUpdate
5
+ def initialize(field, destination, fallback, lookup)
6
+ @field = field
7
+ @destination = destination
8
+ @fallback = fallback
9
+ @use_fallback = !fallback.nil? # fallback is not nil, the user set a value in the config
10
+ @lookup = lookup
11
+ end
12
+
13
+ def test_for_inclusion(event, override)
14
+ # Skip translation in case @destination field already exists and @override is disabled.
15
+ return false if event.include?(@destination) && !override
16
+ event.include?(@field)
17
+ end
18
+
19
+ def update(event)
20
+ # If source field is array use first value and make sure source value is string
21
+ source = Array(event.get(@field)).first.to_s
22
+ matched = [true, nil]
23
+ @lookup.fetch_strategy.fetch(source, matched)
24
+ if matched.first
25
+ event.set(@destination, matched.last)
26
+ elsif @use_fallback
27
+ event.set(@destination, event.sprintf(@fallback))
28
+ matched[0] = true
29
+ end
30
+ return matched.first
31
+ end
32
+ end
33
+ end end
@@ -1,14 +1,18 @@
1
1
  # encoding: utf-8
2
2
  require "logstash/filters/base"
3
3
  require "logstash/namespace"
4
- require "json"
5
- require "csv"
6
-
7
- java_import 'java.util.concurrent.locks.ReentrantReadWriteLock'
8
4
 
5
+ require "logstash/filters/dictionary/memory"
6
+ require "logstash/filters/dictionary/file"
7
+ require "logstash/filters/dictionary/csv_file"
8
+ require "logstash/filters/dictionary/yaml_file"
9
+ require "logstash/filters/dictionary/json_file"
9
10
 
11
+ require_relative "single_value_update"
12
+ require_relative "array_of_values_update"
13
+ require_relative "array_of_maps_value_update"
10
14
  # A general search and replace tool that uses a configured hash
11
- # and/or a file to determine replacement values. Currently supported are
15
+ # and/or a file to determine replacement values. Currently supported are
12
16
  # YAML, JSON, and CSV files.
13
17
  #
14
18
  # The dictionary entries can be specified in one of two ways: First,
@@ -22,21 +26,23 @@ java_import 'java.util.concurrent.locks.ReentrantReadWriteLock'
22
26
  # `regex` configuration item has been enabled), the field's value will be substituted
23
27
  # with the matched key's value from the dictionary.
24
28
  #
25
- # By default, the translate filter will replace the contents of the
29
+ # By default, the translate filter will replace the contents of the
26
30
  # maching event field (in-place). However, by using the `destination`
27
31
  # configuration item, you may also specify a target event field to
28
32
  # populate with the new translated value.
29
- #
33
+ #
30
34
  # Alternatively, for simple string search and replacements for just a few values
31
35
  # you might consider using the gsub function of the mutate filter.
32
-
33
- class LogStash::Filters::Translate < LogStash::Filters::Base
36
+ module LogStash module Filters
37
+ class Translate < LogStash::Filters::Base
34
38
  config_name "translate"
35
39
 
36
40
  # The name of the logstash event field containing the value to be compared for a
37
- # match by the translate filter (e.g. `message`, `host`, `response_code`).
38
- #
39
- # If this field is an array, only the first value will be used.
41
+ # match by the translate filter (e.g. `message`, `host`, `response_code`).
42
+ #
43
+ # If this field is an array, only the first value will be used, unless
44
+ # you specify `iterate_on`. See below. If you want to use another element
45
+ # in the array then use `"[some_field][2]"`
40
46
  config :field, :validate => :string, :required => true
41
47
 
42
48
  # If the destination (or target) field already exists, this configuration item specifies
@@ -63,8 +69,8 @@ class LogStash::Filters::Translate < LogStash::Filters::Base
63
69
 
64
70
  # The full path of the external dictionary file. The format of the table should
65
71
  # be a standard YAML, JSON or CSV with filenames ending in `.yaml`, `.yml`,
66
- #`.json` or `.csv` to be read. Make sure you specify any integer-based keys in
67
- # quotes. For example, the YAML file (`.yaml` or `.yml` should look something like
72
+ #`.json` or `.csv` to be read. Make sure you specify any integer-based keys in
73
+ # quotes. For example, the YAML file (`.yaml` or `.yml` should look something like
68
74
  # this:
69
75
  # [source,ruby]
70
76
  # "100": Continue
@@ -88,7 +94,7 @@ class LogStash::Filters::Translate < LogStash::Filters::Base
88
94
  # The destination field you wish to populate with the translated code. The default
89
95
  # is a field named `translation`. Set this to the same value as source if you want
90
96
  # to do a substitution, in this case filter will allways succeed. This will clobber
91
- # the old value of the source field!
97
+ # the old value of the source field!
92
98
  config :destination, :validate => :string, :default => "translation"
93
99
 
94
100
  # When `exact => true`, the translate filter will populate the destination field
@@ -108,10 +114,10 @@ class LogStash::Filters::Translate < LogStash::Filters::Base
108
114
  # set to `foofing`, the destination field will be set to `barfing`.
109
115
  #
110
116
  # Set both `exact => true` AND `regex => `true` if you would like to match using dictionary
111
- # keys as regular expressions. A large dictionary could be expensive to match in this case.
117
+ # keys as regular expressions. A large dictionary could be expensive to match in this case.
112
118
  config :exact, :validate => :boolean, :default => true
113
119
 
114
- # If you'd like to treat dictionary keys as regular expressions, set `exact => true`.
120
+ # If you'd like to treat dictionary keys as regular expressions, set `regex => true`.
115
121
  # Note: this is activated only when `exact => true`.
116
122
  config :regex, :validate => :boolean, :default => false
117
123
 
@@ -133,11 +139,20 @@ class LogStash::Filters::Translate < LogStash::Filters::Base
133
139
  # deletes old entries on update.
134
140
  config :refresh_behaviour, :validate => ['merge', 'replace'], :default => 'merge'
135
141
 
142
+ # When the value that you need to perform enrichment on is a variable sized array then specify
143
+ # the field name in this setting. This setting introduces two modes, 1) when the value is an
144
+ # array of strings and 2) when the value is an array of objects (as in JSON object).
145
+ # In the first mode, you should have the same field name in both `field` and `iterate_on`, the
146
+ # result will be an array added to the field specified in the `destination` setting. This array
147
+ # will have the looked up value (or the `fallback` value or nil) in same ordinal position
148
+ # as each sought value. In the second mode, specify the field that has the array of objects
149
+ # then specify the field in each object that provides the sought value with `field` and
150
+ # the field to write the looked up value (or the `fallback` value) to with `destination`
151
+ config :iterate_on, :validate => :string
152
+
153
+ attr_reader :lookup # for testing reloading
154
+
136
155
  def register
137
- rw_lock = java.util.concurrent.locks.ReentrantReadWriteLock.new
138
- @read_lock = rw_lock.readLock
139
- @write_lock = rw_lock.writeLock
140
-
141
156
  if @dictionary_path && !@dictionary.empty?
142
157
  raise LogStash::ConfigurationError, I18n.t(
143
158
  "logstash.agent.configuration.invalid_plugin_register",
@@ -148,153 +163,37 @@ class LogStash::Filters::Translate < LogStash::Filters::Base
148
163
  end
149
164
 
150
165
  if @dictionary_path
151
- @next_refresh = Time.now + @refresh_interval
152
- raise_exception = true
153
- lock_for_write { load_dictionary(raise_exception) }
166
+ @lookup = Dictionary::File.create(@dictionary_path, @refresh_interval, @refresh_behaviour, @exact, @regex)
167
+ else
168
+ @lookup = Dictionary::Memory.new(@dictionary, @exact, @regex)
169
+ end
170
+ if @iterate_on.nil?
171
+ @updater = SingleValueUpdate.new(@field, @destination, @fallback, @lookup)
172
+ elsif @iterate_on == @field
173
+ @updater = ArrayOfValuesUpdate.new(@iterate_on, @destination, @fallback, @lookup)
174
+ else
175
+ @updater = ArrayOfMapsValueUpdate.new(@iterate_on, @field, @destination, @fallback, @lookup)
154
176
  end
155
177
 
156
- @logger.debug? and @logger.debug("#{self.class.name}: Dictionary - ", :dictionary => @dictionary)
178
+ @logger.debug? && @logger.debug("#{self.class.name}: Dictionary - ", :dictionary => @lookup.dictionary)
157
179
  if @exact
158
- @logger.debug? and @logger.debug("#{self.class.name}: Dictionary translation method - Exact")
180
+ @logger.debug? && @logger.debug("#{self.class.name}: Dictionary translation method - Exact")
159
181
  else
160
- @logger.debug? and @logger.debug("#{self.class.name}: Dictionary translation method - Fuzzy")
182
+ @logger.debug? && @logger.debug("#{self.class.name}: Dictionary translation method - Fuzzy")
161
183
  end
162
184
  end # def register
163
185
 
164
- def lock_for_read
165
- @read_lock.lock
166
- begin
167
- yield
168
- ensure
169
- @read_lock.unlock
170
- end
171
- end
172
-
173
- def lock_for_write
174
- @write_lock.lock
175
- begin
176
- yield
177
- ensure
178
- @write_lock.unlock
179
- end
186
+ def close
187
+ @lookup.stop_scheduler
180
188
  end
181
189
 
182
190
  def filter(event)
183
- if @dictionary_path
184
- if needs_refresh?
185
- lock_for_write do
186
- if needs_refresh?
187
- load_dictionary
188
- @next_refresh = Time.now + @refresh_interval
189
- @logger.info("refreshing dictionary file")
190
- end
191
- end
192
- end
193
- end
194
-
195
- return unless event.include?(@field) # Skip translation in case event does not have @event field.
196
- return if event.include?(@destination) and not @override # Skip translation in case @destination field already exists and @override is disabled.
197
-
191
+ return unless @updater.test_for_inclusion(event, @override)
198
192
  begin
199
- #If source field is array use first value and make sure source value is string
200
- source = event.get(@field).is_a?(Array) ? event.get(@field).first.to_s : event.get(@field).to_s
201
- matched = false
202
- if @exact
203
- if @regex
204
- key = @dictionary.keys.detect{|k| source.match(Regexp.new(k))}
205
- if key
206
- event.set(@destination, lock_for_read { @dictionary[key] })
207
- matched = true
208
- end
209
- elsif @dictionary.include?(source)
210
- event.set(@destination, lock_for_read { @dictionary[source] })
211
- matched = true
212
- end
213
- else
214
- translation = lock_for_read { source.gsub(Regexp.union(@dictionary.keys), @dictionary) }
215
-
216
- if source != translation
217
- event.set(@destination, translation.force_encoding(Encoding::UTF_8))
218
- matched = true
219
- end
220
- end
221
-
222
- if not matched and @fallback
223
- event.set(@destination, event.sprintf(@fallback))
224
- matched = true
225
- end
226
- filter_matched(event) if matched or @field == @destination
193
+ filter_matched(event) if @updater.update(event) || @field == @destination
227
194
  rescue Exception => e
228
195
  @logger.error("Something went wrong when attempting to translate from dictionary", :exception => e, :field => @field, :event => event)
229
196
  end
230
197
  end # def filter
231
-
232
- private
233
-
234
- def load_dictionary(raise_exception=false)
235
- if /.y[a]?ml$/.match(@dictionary_path)
236
- load_yaml(raise_exception)
237
- elsif @dictionary_path.end_with?(".json")
238
- load_json(raise_exception)
239
- elsif @dictionary_path.end_with?(".csv")
240
- load_csv(raise_exception)
241
- else
242
- raise "#{self.class.name}: Dictionary #{@dictionary_path} have a non valid format"
243
- end
244
- rescue => e
245
- loading_exception(e, raise_exception)
246
- end
247
-
248
- def load_yaml(raise_exception=false)
249
- if !File.exists?(@dictionary_path)
250
- @logger.warn("dictionary file read failure, continuing with old dictionary", :path => @dictionary_path)
251
- return
252
- end
253
- refresh_dictionary!(YAML.load_file(@dictionary_path))
254
- end
255
-
256
- def load_json(raise_exception=false)
257
- if !File.exists?(@dictionary_path)
258
- @logger.warn("dictionary file read failure, continuing with old dictionary", :path => @dictionary_path)
259
- return
260
- end
261
- refresh_dictionary!(JSON.parse(File.read(@dictionary_path)))
262
- end
263
-
264
- def load_csv(raise_exception=false)
265
- if !File.exists?(@dictionary_path)
266
- @logger.warn("dictionary file read failure, continuing with old dictionary", :path => @dictionary_path)
267
- return
268
- end
269
- data = CSV.read(@dictionary_path).inject(Hash.new) do |acc, v|
270
- acc[v[0]] = v[1]
271
- acc
272
- end
273
- refresh_dictionary!(data)
274
- end
275
-
276
- def refresh_dictionary!(data)
277
- case @refresh_behaviour
278
- when 'merge'
279
- @dictionary.merge!(data)
280
- when 'replace'
281
- @dictionary = data
282
- else
283
- # we really should never get here
284
- raise(LogStash::ConfigurationError, "Unknown value for refresh_behaviour=#{@refresh_behaviour.to_s}")
285
- end
286
- end
287
-
288
- def loading_exception(e, raise_exception=false)
289
- msg = "#{self.class.name}: #{e.message} when loading dictionary file at #{@dictionary_path}"
290
- if raise_exception
291
- raise RuntimeError.new(msg)
292
- else
293
- @logger.warn("#{msg}, continuing with old dictionary", :dictionary_path => @dictionary_path)
294
- end
295
- end
296
-
297
- def needs_refresh?
298
- @next_refresh < Time.now
299
- end
300
198
  end # class LogStash::Filters::Translate
199
+ end end
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-filter-translate'
4
- s.version = '3.1.0'
4
+ s.version = '3.2.0'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "Replaces field contents based on a hash or YAML file"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -21,7 +21,11 @@ Gem::Specification.new do |s|
21
21
 
22
22
  # Gem dependencies
23
23
  s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
24
+ s.add_runtime_dependency 'rufus-scheduler'
24
25
 
25
26
  s.add_development_dependency 'logstash-devutils'
27
+ s.add_development_dependency 'rspec-sequencing'
28
+ s.add_development_dependency "rspec-wait"
29
+ s.add_development_dependency "benchmark-ips"
26
30
  end
27
31
 
@@ -0,0 +1,69 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+
4
+ require "logstash/filters/translate"
5
+ require "benchmark/ips"
6
+
7
+ module BenchmarkingFileBuilder
8
+ def self.create_huge_csv_dictionary(directory, name, size)
9
+ tmppath = directory.join("temp_big.csv")
10
+ tmppath.open("w") do |file|
11
+ file.puts("foo,#{SecureRandom.hex(4)}")
12
+ file.puts("bar,#{SecureRandom.hex(4)}")
13
+ size.times do |i|
14
+ file.puts("#{SecureRandom.hex(12)},#{1000000 + i}")
15
+ end
16
+ file.puts("baz,quux")
17
+ end
18
+ tmppath.rename(directory.join(name))
19
+ end
20
+ end
21
+
22
+ describe LogStash::Filters::Translate do
23
+ let(:directory) { Pathname.new(Stud::Temporary.directory) }
24
+ let(:dictionary_name) { "dict-h.csv" }
25
+ let(:dictionary_path) { directory.join(dictionary_name) }
26
+ let(:dictionary_size) { 100000 }
27
+ let(:config) do
28
+ {
29
+ "field" => "[status]",
30
+ "destination" => "[translation]",
31
+ "dictionary_path" => dictionary_path.to_path,
32
+ "exact" => true,
33
+ "regex" => false,
34
+ "refresh_interval" => 0,
35
+ "override" => true,
36
+ "refresh_behaviour" => "merge"
37
+ }
38
+ end
39
+ before do
40
+ directory
41
+ BenchmarkingFileBuilder.create_huge_csv_dictionary(directory, dictionary_name, dictionary_size)
42
+ end
43
+
44
+ it 'how fast is the filter method?' do
45
+ plugin = described_class.new(config)
46
+ plugin.register
47
+ event = LogStash::Event.new("status" => "baz", "translation" => "foo")
48
+
49
+ Benchmark.ips do |x|
50
+ x.config(:time => 20, :warmup => 120)
51
+ x.report("filter(event)") { plugin.filter(event) }
52
+ end
53
+ expect(event.get("[translation]")).to eq("quux")
54
+ end
55
+
56
+ it 'how fast is the new, register then filter method?' do
57
+ event = LogStash::Event.new("status" => "baz", "translation" => "foo")
58
+
59
+ Benchmark.ips do |x|
60
+ x.config(:time => 10, :warmup => 120)
61
+ x.report("new, register, filter(event)") do
62
+ plugin = described_class.new(config)
63
+ plugin.register
64
+ plugin.filter(event)
65
+ end
66
+ end
67
+ expect(event.get("[translation]")).to eq("quux")
68
+ end
69
+ end
@@ -0,0 +1,200 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "support/rspec_wait_handler_helper"
4
+ require "support/build_huge_dictionaries"
5
+
6
+ require "rspec_sequencing"
7
+
8
+ require "logstash/filters/translate"
9
+
10
+ describe LogStash::Filters::Translate do
11
+ let(:directory) { Pathname.new(Stud::Temporary.directory) }
12
+ describe "scheduled reloading" do
13
+ subject { described_class.new(config) }
14
+
15
+ let(:config) do
16
+ {
17
+ "field" => "[status]",
18
+ "destination" => "[translation]",
19
+ "dictionary_path" => dictionary_path.to_path,
20
+ "exact" => true,
21
+ "regex" => false,
22
+ "refresh_interval" => 1,
23
+ "override" => true,
24
+ "refresh_behaviour" => refresh_behaviour
25
+ }
26
+ end
27
+
28
+ let(:event) { LogStash::Event.new("status" => "b") }
29
+
30
+ before do
31
+ directory
32
+ wait(1.0).for{Dir.exist?(directory)}.to eq(true)
33
+ dictionary_path.open("wb") do |file|
34
+ file.puts("a,1\nb,2\nc,3\n")
35
+ end
36
+ subject.register
37
+ end
38
+
39
+ after do
40
+ FileUtils.rm_rf(directory)
41
+ wait(1.0).for{Dir.exist?(directory)}.to eq(false)
42
+ end
43
+
44
+ context "replace" do
45
+ let(:dictionary_path) { directory.join("dict-r.csv") }
46
+ let(:refresh_behaviour) { "replace" }
47
+ let(:actions) do
48
+ RSpec::Sequencing
49
+ .run("translate") do
50
+ subject.filter(event)
51
+ wait(0.1).for{event.get("[translation]")}.to eq("2"), "field [translation] did not eq '2'"
52
+ end
53
+ .then_after(1,"modify file") do
54
+ dictionary_path.open("w") do |file|
55
+ file.puts("a,11\nb,12\nc,13\n")
56
+ end
57
+ end
58
+ .then_after(1.2, "wait then translate again") do
59
+ subject.filter(event)
60
+ wait(0.1).for{event.get("[translation]")}.to eq("12"), "field [translation] did not eq '12'"
61
+ end
62
+ .then("stop") do
63
+ subject.close
64
+ end
65
+ end
66
+
67
+ it "updates the event after scheduled reload" do
68
+ actions.activate_quietly
69
+ actions.assert_no_errors
70
+ end
71
+ end
72
+
73
+ context "merge" do
74
+ let(:dictionary_path) { directory.join("dict-m.csv") }
75
+ let(:refresh_behaviour) { "merge" }
76
+ let(:actions) do
77
+ RSpec::Sequencing
78
+ .run("translate") do
79
+ subject.filter(event)
80
+ wait(0.1).for{event.get("[translation]")}.to eq("2"), "field [translation] did not eq '2'"
81
+ end
82
+ .then_after(1,"modify file") do
83
+ dictionary_path.open("w") do |file|
84
+ file.puts("a,21\nb,22\nc,23\n")
85
+ end
86
+ end
87
+ .then_after(1.2, "wait then translate again") do
88
+ subject.filter(event)
89
+ wait(0.1).for{event.get("[translation]")}.to eq("22"), "field [translation] did not eq '22'"
90
+ end
91
+ .then("stop") do
92
+ subject.close
93
+ end
94
+ end
95
+
96
+ it "updates the event after scheduled reload" do
97
+ actions.activate_quietly
98
+ actions.assert_no_errors
99
+ end
100
+ end
101
+ end
102
+
103
+ describe "huge json file merge" do
104
+ let(:dictionary_path) { directory.join("dict-h.json") }
105
+ let(:dictionary_size) { 100000 }
106
+ let(:config) do
107
+ {
108
+ "field" => "[status]",
109
+ "destination" => "[translation]",
110
+ "dictionary_path" => dictionary_path.to_path,
111
+ "exact" => true,
112
+ "regex" => false,
113
+ "refresh_interval" => 1,
114
+ "override" => true,
115
+ "refresh_behaviour" => "merge"
116
+ }
117
+ end
118
+ let(:event) { LogStash::Event.new("status" => "baz", "translation" => "foo") }
119
+ subject { described_class.new(config) }
120
+
121
+ before do
122
+ directory
123
+ wait(1.0).for{Dir.exist?(directory)}.to eq(true)
124
+ LogStash::Filters::Dictionary.create_huge_json_dictionary(directory, "dict-h.json", dictionary_size)
125
+ subject.register
126
+ end
127
+
128
+ let(:actions) do
129
+ RSpec::Sequencing
130
+ .run("translate") do
131
+ subject.filter(event)
132
+ wait(0.1).for{event.get("[translation]")}.not_to eq("foo"), "field [translation] should not be 'foo'"
133
+ end
134
+ .then_after(0.1,"modify file") do
135
+ LogStash::Filters::Dictionary.create_huge_json_dictionary(directory, "dict-h.json", dictionary_size)
136
+ end
137
+ .then_after(1.8, "wait then translate again") do
138
+ subject.filter(event)
139
+ wait(0.1).for{event.get("[translation]")}.not_to eq("foo"), "field [translation] should not be 'foo'"
140
+ end
141
+ .then("stop") do
142
+ subject.close
143
+ end
144
+ end
145
+
146
+ it "updates the event after scheduled reload" do
147
+ actions.activate_quietly
148
+ actions.assert_no_errors
149
+ end
150
+ end
151
+
152
+ describe "huge csv file merge" do
153
+ let(:dictionary_path) { directory.join("dict-h.csv") }
154
+ let(:dictionary_size) { 100000 }
155
+ let(:config) do
156
+ {
157
+ "field" => "[status]",
158
+ "destination" => "[translation]",
159
+ "dictionary_path" => dictionary_path.to_path,
160
+ "exact" => true,
161
+ "regex" => false,
162
+ "refresh_interval" => 1,
163
+ "override" => true,
164
+ "refresh_behaviour" => "merge"
165
+ }
166
+ end
167
+ let(:event) { LogStash::Event.new("status" => "bar", "translation" => "foo") }
168
+ subject { described_class.new(config) }
169
+
170
+ before do
171
+ directory
172
+ wait(1.0).for{Dir.exist?(directory)}.to eq(true)
173
+ LogStash::Filters::Dictionary.create_huge_csv_dictionary(directory, "dict-h.csv", dictionary_size)
174
+ subject.register
175
+ end
176
+
177
+ let(:actions) do
178
+ RSpec::Sequencing
179
+ .run("translate") do
180
+ subject.filter(event)
181
+ wait(0.1).for{event.get("[translation]")}.not_to eq("foo"), "field [translation] should not be 'foo'"
182
+ end
183
+ .then_after(0.1,"modify file") do
184
+ LogStash::Filters::Dictionary.create_huge_csv_dictionary(directory, "dict-h.csv", dictionary_size)
185
+ end
186
+ .then_after(1.8, "wait then translate again") do
187
+ subject.filter(event)
188
+ wait(0.1).for{event.get("[translation]")}.not_to eq("foo"), "field [translation] should not be 'foo'"
189
+ end
190
+ .then("stop") do
191
+ subject.close
192
+ end
193
+ end
194
+
195
+ it "updates the event after scheduled reload" do
196
+ actions.activate_quietly
197
+ actions.assert_no_errors
198
+ end
199
+ end
200
+ end