logstash-input-file 4.1.18 → 4.2.4

Sign up to get free protection for your applications and to get access to all the features.
Files changed (37) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +16 -0
  3. data/README.md +1 -1
  4. data/lib/filewatch/discoverer.rb +9 -8
  5. data/lib/filewatch/observing_base.rb +1 -12
  6. data/lib/filewatch/processor.rb +55 -0
  7. data/lib/filewatch/read_mode/handlers/base.rb +12 -11
  8. data/lib/filewatch/read_mode/handlers/read_file.rb +23 -8
  9. data/lib/filewatch/read_mode/handlers/read_zip_file.rb +8 -6
  10. data/lib/filewatch/read_mode/processor.rb +22 -36
  11. data/lib/filewatch/settings.rb +1 -2
  12. data/lib/filewatch/sincedb_collection.rb +40 -41
  13. data/lib/filewatch/sincedb_record_serializer.rb +5 -11
  14. data/lib/filewatch/stat/generic.rb +8 -13
  15. data/lib/filewatch/stat/windows_path.rb +7 -9
  16. data/lib/filewatch/tail_mode/handlers/base.rb +32 -23
  17. data/lib/filewatch/tail_mode/handlers/delete.rb +2 -4
  18. data/lib/filewatch/tail_mode/handlers/shrink.rb +2 -3
  19. data/lib/filewatch/tail_mode/handlers/unignore.rb +4 -4
  20. data/lib/filewatch/tail_mode/processor.rb +47 -54
  21. data/lib/filewatch/watch.rb +12 -14
  22. data/lib/filewatch/watched_file.rb +25 -14
  23. data/lib/filewatch/watched_files_collection.rb +11 -78
  24. data/lib/jars/filewatch-1.0.1.jar +0 -0
  25. data/lib/logstash/inputs/file.rb +4 -3
  26. data/lib/logstash/inputs/file_listener.rb +3 -14
  27. data/logstash-input-file.gemspec +2 -1
  28. data/spec/filewatch/reading_spec.rb +63 -12
  29. data/spec/filewatch/rotate_spec.rb +4 -4
  30. data/spec/filewatch/settings_spec.rb +3 -0
  31. data/spec/filewatch/sincedb_record_serializer_spec.rb +6 -2
  32. data/spec/filewatch/spec_helper.rb +12 -14
  33. data/spec/filewatch/tailing_spec.rb +24 -22
  34. data/spec/filewatch/watched_file_spec.rb +30 -0
  35. data/spec/filewatch/watched_files_collection_spec.rb +62 -8
  36. data/spec/inputs/file_read_spec.rb +58 -14
  37. metadata +17 -2
Binary file
@@ -332,8 +332,9 @@ class File < LogStash::Inputs::Base
332
332
  @completely_stopped.true?
333
333
  end
334
334
 
335
+ # The WatchedFile calls back here as `observer.listener_for(@path)`
336
+ # @param [String] path the identity
335
337
  def listener_for(path)
336
- # path is the identity
337
338
  FileListener.new(path, self)
338
339
  end
339
340
 
@@ -376,12 +377,12 @@ class File < LogStash::Inputs::Base
376
377
  def handle_deletable_path(path)
377
378
  return if tail_mode?
378
379
  return if @completed_file_handlers.empty?
380
+ @logger.debug? && @logger.debug(__method__.to_s, :path => path)
379
381
  @completed_file_handlers.each { |handler| handler.handle(path) }
380
382
  end
381
383
 
382
384
  def log_line_received(path, line)
383
- return unless @logger.debug?
384
- @logger.debug("Received line", :path => path, :text => line)
385
+ @logger.debug? && @logger.debug("Received line", :path => path, :text => line)
385
386
  end
386
387
 
387
388
  def stop
@@ -7,9 +7,9 @@ module LogStash module Inputs
7
7
  class FileListener
8
8
  attr_reader :input, :path, :data
9
9
  # construct with link back to the input plugin instance.
10
- def initialize(path, input)
10
+ def initialize(path, input, data = nil)
11
11
  @path, @input = path, input
12
- @data = nil
12
+ @data = data
13
13
  end
14
14
 
15
15
  def opened
@@ -36,7 +36,7 @@ module LogStash module Inputs
36
36
  def accept(data)
37
37
  # and push transient data filled dup listener downstream
38
38
  input.log_line_received(path, data)
39
- input.codec.accept(dup_adding_state(data))
39
+ input.codec.accept(self.class.new(path, input, data))
40
40
  end
41
41
 
42
42
  def process_event(event)
@@ -45,17 +45,6 @@ module LogStash module Inputs
45
45
  input.post_process_this(event)
46
46
  end
47
47
 
48
- def add_state(data)
49
- @data = data
50
- self
51
- end
52
-
53
- private
54
-
55
- # duplicate and add state for downstream
56
- def dup_adding_state(line)
57
- self.class.new(path, input).add_state(line)
58
- end
59
48
  end
60
49
 
61
50
  class FlushableListener < FileListener
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-file'
4
- s.version = '4.1.18'
4
+ s.version = '4.2.4'
5
5
  s.licenses = ['Apache-2.0']
6
6
  s.summary = "Streams events from files"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -31,6 +31,7 @@ Gem::Specification.new do |s|
31
31
  s.add_runtime_dependency 'addressable'
32
32
  end
33
33
 
34
+ s.add_runtime_dependency 'concurrent-ruby', '~> 1.0'
34
35
  s.add_runtime_dependency 'logstash-codec-multiline', ['~> 3.0']
35
36
 
36
37
  s.add_development_dependency 'stud', ['~> 0.0.19']
@@ -23,9 +23,12 @@ module FileWatch
23
23
  let(:start_new_files_at) { :end } # should be irrelevant for read mode
24
24
  let(:opts) do
25
25
  {
26
- :stat_interval => stat_interval, :start_new_files_at => start_new_files_at,
27
- :delimiter => "\n", :discover_interval => discover_interval,
28
- :ignore_older => 3600, :sincedb_path => sincedb_path
26
+ :stat_interval => stat_interval,
27
+ :start_new_files_at => start_new_files_at,
28
+ :delimiter => "\n",
29
+ :discover_interval => discover_interval,
30
+ :ignore_older => 3600,
31
+ :sincedb_path => sincedb_path
29
32
  }
30
33
  end
31
34
  let(:observer) { TestObserver.new }
@@ -88,7 +91,7 @@ module FileWatch
88
91
  context "when watching a directory with files using striped reading" do
89
92
  let(:file_path2) { ::File.join(directory, "2.log") }
90
93
  # use a chunk size that does not align with the line boundaries
91
- let(:opts) { super.merge(:file_chunk_size => 10, :file_chunk_count => 1, :file_sort_by => "path")}
94
+ let(:opts) { super().merge(:file_chunk_size => 10, :file_chunk_count => 1, :file_sort_by => "path")}
92
95
  let(:lines) { [] }
93
96
  let(:observer) { TestObserver.new(lines) }
94
97
  let(:listener2) { observer.listener_for(file_path2) }
@@ -118,7 +121,7 @@ module FileWatch
118
121
  end
119
122
 
120
123
  context "when a non default delimiter is specified and it is not in the content" do
121
- let(:opts) { super.merge(:delimiter => "\nø") }
124
+ let(:opts) { super().merge(:delimiter => "\nø") }
122
125
  let(:actions) do
123
126
  RSpec::Sequencing.run("create file") do
124
127
  File.open(file_path, "wb") { |file| file.write("line1\nline2") }
@@ -147,8 +150,52 @@ module FileWatch
147
150
  end
148
151
  end
149
152
 
153
+ context "when watching directory with files and adding a new file" do
154
+ let(:file_path2) { ::File.join(directory, "2.log") }
155
+ let(:file_path3) { ::File.join(directory, "3.log") }
156
+
157
+ let(:opts) { super().merge(:file_sort_by => "last_modified") }
158
+ let(:lines) { [] }
159
+ let(:observer) { TestObserver.new(lines) }
160
+
161
+
162
+ let(:listener2) { observer.listener_for(file_path2) }
163
+ let(:listener3) { observer.listener_for(file_path3) }
164
+
165
+ let(:actions) do
166
+ RSpec::Sequencing.run("create12") do
167
+ File.open(file_path, "w") { |file| file.write("string11\nstring12") }
168
+ File.open(file_path2, "w") { |file| file.write("string21\nstring22") }
169
+ end
170
+ .then("watch") do
171
+ reading.watch_this(watch_dir)
172
+ end
173
+ .then("wait12") do
174
+ wait(2).for { listener1.calls.last == :delete && listener2.calls.last == :delete }.to eq(true)
175
+ end
176
+ .then_after(2, "create3") do
177
+ File.open(file_path3, "w") { |file| file.write("string31\nstring32") }
178
+ end
179
+ .then("wait3") do
180
+ wait(2).for { listener3.calls.last == :delete }.to eq(true)
181
+ end
182
+ .then("quit") do
183
+ reading.quit
184
+ end
185
+ end
186
+
187
+ it "reads all (3) files" do
188
+ actions.activate_quietly
189
+ reading.subscribe(observer)
190
+ actions.assert_no_errors
191
+ expect(lines.last).to eq 'string32'
192
+ expect(lines.sort).to eq %w(string11 string12 string21 string22 string31 string32)
193
+ expect( reading.watch.watched_files_collection.paths ).to eq [ file_path, file_path2, file_path3 ]
194
+ end
195
+ end
196
+
150
197
  context "when watching a directory with files using exit_after_read" do
151
- let(:opts) { super.merge(:exit_after_read => true, :max_open_files => 2) }
198
+ let(:opts) { super().merge(:exit_after_read => true, :max_open_files => 2) }
152
199
  let(:file_path3) { ::File.join(directory, "3.log") }
153
200
  let(:file_path4) { ::File.join(directory, "4.log") }
154
201
  let(:file_path5) { ::File.join(directory, "5.log") }
@@ -159,40 +206,45 @@ module FileWatch
159
206
  let(:listener6) { observer.listener_for(file_path6) }
160
207
 
161
208
  it "the file is read" do
162
- File.open(file_path3, "w") { |file| file.write("line1\nline2\n") }
209
+ File.open(file_path3, "w") { |file| file.write("line1\nline2\n") }
163
210
  reading.watch_this(watch_dir)
164
211
  reading.subscribe(observer)
165
212
  expect(listener3.lines).to eq(["line1", "line2"])
166
213
  end
214
+
167
215
  it "multiple files are read" do
168
- File.open(file_path3, "w") { |file| file.write("line1\nline2\n") }
216
+ File.open(file_path3, "w") { |file| file.write("line1\nline2\n") }
169
217
  File.open(file_path4, "w") { |file| file.write("line3\nline4\n") }
170
218
  reading.watch_this(watch_dir)
171
219
  reading.subscribe(observer)
172
220
  expect(listener3.lines.sort).to eq(["line1", "line2", "line3", "line4"])
173
221
  end
222
+
174
223
  it "multiple files are read even if max_open_files is smaller then number of files" do
175
- File.open(file_path3, "w") { |file| file.write("line1\nline2\n") }
224
+ File.open(file_path3, "w") { |file| file.write("line1\nline2\n") }
176
225
  File.open(file_path4, "w") { |file| file.write("line3\nline4\n") }
177
226
  File.open(file_path5, "w") { |file| file.write("line5\nline6\n") }
178
227
  reading.watch_this(watch_dir)
179
228
  reading.subscribe(observer)
180
229
  expect(listener3.lines.sort).to eq(["line1", "line2", "line3", "line4", "line5", "line6"])
181
230
  end
231
+
182
232
  it "file as marked as reading_completed" do
183
- File.open(file_path3, "w") { |file| file.write("line1\nline2\n") }
233
+ File.open(file_path3, "w") { |file| file.write("line1\nline2\n") }
184
234
  reading.watch_this(watch_dir)
185
235
  reading.subscribe(observer)
186
236
  expect(listener3.calls).to eq([:open, :accept, :accept, :eof, :delete, :reading_completed])
187
237
  end
238
+
188
239
  it "sincedb works correctly" do
189
- File.open(file_path3, "w") { |file| file.write("line1\nline2\n") }
240
+ File.open(file_path3, "w") { |file| file.write("line1\nline2\n") }
190
241
  reading.watch_this(watch_dir)
191
242
  reading.subscribe(observer)
192
243
  sincedb_record_fields = File.read(sincedb_path).split(" ")
193
244
  position_field_index = 3
194
245
  expect(sincedb_record_fields[position_field_index]).to eq("12")
195
246
  end
247
+
196
248
  it "does not include new files added after start" do
197
249
  File.open(file_path3, "w") { |file| file.write("line1\nline2\n") }
198
250
  reading.watch_this(watch_dir)
@@ -201,7 +253,6 @@ module FileWatch
201
253
  expect(listener3.lines).to eq(["line1", "line2"])
202
254
  expect(listener3.calls).to eq([:open, :accept, :accept, :eof, :delete, :reading_completed])
203
255
  expect(listener6.calls).to eq([])
204
-
205
256
  end
206
257
 
207
258
  end
@@ -219,7 +219,7 @@ module FileWatch
219
219
  end
220
220
 
221
221
  context "create + rename rotation: when a new logfile is renamed to a path we have seen before but not all content from the previous the file is read" do
222
- let(:opts) { super.merge(
222
+ let(:opts) { super().merge(
223
223
  :file_chunk_size => line1.bytesize.succ,
224
224
  :file_chunk_count => 1
225
225
  ) }
@@ -296,7 +296,7 @@ module FileWatch
296
296
  end
297
297
 
298
298
  context "copy + truncate rotation: when a logfile is copied to a new path and truncated before the open file is fully read" do
299
- let(:opts) { super.merge(
299
+ let(:opts) { super().merge(
300
300
  :file_chunk_size => line1.bytesize.succ,
301
301
  :file_chunk_count => 1
302
302
  ) }
@@ -370,7 +370,7 @@ module FileWatch
370
370
  end
371
371
 
372
372
  context "? rotation: when an active file is renamed inside the glob and the reading lags behind" do
373
- let(:opts) { super.merge(
373
+ let(:opts) { super().merge(
374
374
  :file_chunk_size => line1.bytesize.succ,
375
375
  :file_chunk_count => 2
376
376
  ) }
@@ -409,7 +409,7 @@ module FileWatch
409
409
  end
410
410
 
411
411
  context "? rotation: when a not active file is rotated outside the glob before the file is read" do
412
- let(:opts) { super.merge(
412
+ let(:opts) { super().merge(
413
413
  :close_older => 3600,
414
414
  :max_open_files => 1,
415
415
  :file_sort_by => "path"
@@ -1,3 +1,6 @@
1
+ require 'logstash/devutils/rspec/spec_helper'
2
+ require 'logstash/inputs/friendly_durations'
3
+
1
4
  describe FileWatch::Settings do
2
5
 
3
6
  context "when create from options" do
@@ -9,7 +9,9 @@ module FileWatch
9
9
  let(:io) { StringIO.new }
10
10
  let(:db) { Hash.new }
11
11
 
12
- subject { SincedbRecordSerializer.new(SincedbRecordSerializer.days_to_seconds(14)) }
12
+ let(:sincedb_value_expiry) { SincedbRecordSerializer.days_to_seconds(14) }
13
+
14
+ subject { SincedbRecordSerializer.new(sincedb_value_expiry) }
13
15
 
14
16
  context "deserialize from IO" do
15
17
  it 'reads V1 records' do
@@ -82,8 +84,10 @@ module FileWatch
82
84
  end
83
85
 
84
86
  context "given a non default `sincedb_clean_after`" do
87
+
88
+ let(:sincedb_value_expiry) { SincedbRecordSerializer.days_to_seconds(2) }
89
+
85
90
  it "does not write expired db entries to an IO object" do
86
- subject.update_sincedb_value_expiry_from_days(2)
87
91
  one_day_ago = Time.now.to_f - (1.0*24*3600)
88
92
  three_days_ago = one_day_ago - (2.0*24*3600)
89
93
  db[InodeStruct.new("42424242", 2, 5)] = SincedbValue.new(42, one_day_ago)
@@ -117,17 +117,12 @@ module FileWatch
117
117
  class Listener
118
118
  attr_reader :path, :lines, :calls
119
119
 
120
- def initialize(path)
120
+ def initialize(path, lines)
121
121
  @path = path
122
- @lines = Concurrent::Array.new
122
+ @lines = lines || Concurrent::Array.new
123
123
  @calls = Concurrent::Array.new
124
124
  end
125
125
 
126
- def add_lines(lines)
127
- @lines = lines
128
- self
129
- end
130
-
131
126
  def accept(line)
132
127
  @lines << line
133
128
  @calls << :accept
@@ -161,12 +156,7 @@ module FileWatch
161
156
  attr_reader :listeners
162
157
 
163
158
  def initialize(combined_lines = nil)
164
- listener_proc = if combined_lines.nil?
165
- lambda{|k| Listener.new(k) }
166
- else
167
- lambda{|k| Listener.new(k).add_lines(combined_lines) }
168
- end
169
- @listeners = Concurrent::Hash.new {|hash, key| hash[key] = listener_proc.call(key) }
159
+ @listeners = Concurrent::Hash.new { |hash, key| hash[key] = new_listener(key, combined_lines) }
170
160
  end
171
161
 
172
162
  def listener_for(path)
@@ -174,6 +164,14 @@ module FileWatch
174
164
  end
175
165
 
176
166
  def clear
177
- @listeners.clear; end
167
+ @listeners.clear
168
+ end
169
+
170
+ private
171
+
172
+ def new_listener(path, lines = nil)
173
+ Listener.new(path, lines)
174
+ end
175
+
178
176
  end
179
177
  end
@@ -10,15 +10,19 @@ module FileWatch
10
10
  let(:file_path) { ::File.join(directory, "1#{suffix}.log") }
11
11
  let(:file_path2) { ::File.join(directory, "2#{suffix}.log") }
12
12
  let(:file_path3) { ::File.join(directory, "3#{suffix}.log") }
13
- let(:max) { 4095 }
13
+ let(:max) { 4095 }
14
14
  let(:stat_interval) { 0.1 }
15
15
  let(:discover_interval) { 4 }
16
16
  let(:start_new_files_at) { :end }
17
17
  let(:sincedb_path) { ::File.join(directory, "tailing.sdb") }
18
18
  let(:opts) do
19
19
  {
20
- :stat_interval => stat_interval, :start_new_files_at => start_new_files_at, :max_open_files => max,
21
- :delimiter => "\n", :discover_interval => discover_interval, :sincedb_path => sincedb_path,
20
+ :stat_interval => stat_interval,
21
+ :start_new_files_at => start_new_files_at,
22
+ :max_open_files => max,
23
+ :delimiter => "\n",
24
+ :discover_interval => discover_interval,
25
+ :sincedb_path => sincedb_path,
22
26
  :file_sort_by => "path"
23
27
  }
24
28
  end
@@ -30,12 +34,11 @@ module FileWatch
30
34
 
31
35
  before do
32
36
  directory
33
- wait(1.0).for{Dir.exist?(directory)}.to eq(true)
37
+ wait(1.0).for { Dir.exist?(directory) }.to eq(true)
34
38
  end
35
39
 
36
40
  after do
37
41
  FileUtils.rm_rf(directory)
38
- wait(1.0).for{Dir.exist?(directory)}.to eq(false)
39
42
  end
40
43
 
41
44
  describe "max open files (set to 1)" do
@@ -74,7 +77,7 @@ module FileWatch
74
77
 
75
78
  context "when close_older is set" do
76
79
  let(:wait_before_quit) { 0.8 }
77
- let(:opts) { super.merge(:close_older => 0.1, :max_open_files => 1, :stat_interval => 0.1) }
80
+ let(:opts) { super().merge(:close_older => 0.1, :max_open_files => 1, :stat_interval => 0.1) }
78
81
  let(:suffix) { "B" }
79
82
  it "opens both files" do
80
83
  actions.activate_quietly
@@ -95,16 +98,16 @@ module FileWatch
95
98
  let(:actions) do
96
99
  RSpec::Sequencing
97
100
  .run("create file") do
98
- File.open(file_path, "wb") { |file| file.write("lineA\nlineB\n") }
101
+ File.open(file_path, "wb") { |file| file.write("lineA\nlineB\n") }
99
102
  end
100
103
  .then_after(0.1, "begin watching") do
101
104
  tailing.watch_this(watch_dir)
102
105
  end
103
- .then_after(2, "add content") do
104
- File.open(file_path, "ab") { |file| file.write("line1\nline2\n") }
106
+ .then_after(1.0, "add content") do
107
+ File.open(file_path, "ab") { |file| file.write("line1\nline2\n") }
105
108
  end
106
109
  .then("wait") do
107
- wait(0.75).for{listener1.lines}.to eq(["line1", "line2"])
110
+ wait(0.75).for { listener1.lines }.to_not be_empty
108
111
  end
109
112
  .then("quit") do
110
113
  tailing.quit
@@ -113,7 +116,6 @@ module FileWatch
113
116
 
114
117
  it "only the new content is read" do
115
118
  actions.activate_quietly
116
- tailing.watch_this(watch_dir)
117
119
  tailing.subscribe(observer)
118
120
  actions.assert_no_errors
119
121
  expect(listener1.calls).to eq([:open, :accept, :accept])
@@ -132,7 +134,7 @@ module FileWatch
132
134
  File.open(file_path, "wb") { |file| file.write("line1\nline2\n") }
133
135
  end
134
136
  .then("wait") do
135
- wait(0.75).for{listener1.lines.size}.to eq(2)
137
+ wait(0.75).for { listener1.lines }.to_not be_empty
136
138
  end
137
139
  .then("quit") do
138
140
  tailing.quit
@@ -154,7 +156,7 @@ module FileWatch
154
156
  # so when a stat is taken on the file an error is raised
155
157
  let(:suffix) { "E" }
156
158
  let(:quit_after) { 0.2 }
157
- let(:stat) { double("stat", :size => 100, :modified_at => Time.now.to_f, :identifier => nil, :inode => 234567, :inode_struct => InodeStruct.new("234567", 1, 5)) }
159
+ let(:stat) { double("stat", :size => 100, :modified_at => Time.now.to_f, :inode => 234567, :inode_struct => InodeStruct.new("234567", 1, 5)) }
158
160
  let(:watched_file) { WatchedFile.new(file_path, stat, tailing.settings) }
159
161
  before do
160
162
  allow(stat).to receive(:restat).and_raise(Errno::ENOENT)
@@ -276,7 +278,7 @@ module FileWatch
276
278
 
277
279
  context "when watching a directory with files and a file is renamed to match glob", :unix => true do
278
280
  let(:suffix) { "H" }
279
- let(:opts) { super.merge(:close_older => 0) }
281
+ let(:opts) { super().merge(:close_older => 0) }
280
282
  let(:listener2) { observer.listener_for(file_path2) }
281
283
  let(:actions) do
282
284
  RSpec::Sequencing
@@ -344,7 +346,7 @@ module FileWatch
344
346
  end
345
347
 
346
348
  context "when close older expiry is enabled" do
347
- let(:opts) { super.merge(:close_older => 1) }
349
+ let(:opts) { super().merge(:close_older => 1) }
348
350
  let(:suffix) { "J" }
349
351
  let(:actions) do
350
352
  RSpec::Sequencing.run("create file") do
@@ -368,7 +370,7 @@ module FileWatch
368
370
  end
369
371
 
370
372
  context "when close older expiry is enabled and after timeout the file is appended-to" do
371
- let(:opts) { super.merge(:close_older => 0.5) }
373
+ let(:opts) { super().merge(:close_older => 0.5) }
372
374
  let(:suffix) { "K" }
373
375
  let(:actions) do
374
376
  RSpec::Sequencing
@@ -404,7 +406,7 @@ module FileWatch
404
406
  end
405
407
 
406
408
  context "when ignore older expiry is enabled and all files are already expired" do
407
- let(:opts) { super.merge(:ignore_older => 1) }
409
+ let(:opts) { super().merge(:ignore_older => 1) }
408
410
  let(:suffix) { "L" }
409
411
  let(:actions) do
410
412
  RSpec::Sequencing
@@ -428,7 +430,7 @@ module FileWatch
428
430
 
429
431
  context "when a file is renamed before it gets activated", :unix => true do
430
432
  let(:max) { 1 }
431
- let(:opts) { super.merge(:file_chunk_count => 8, :file_chunk_size => 6, :close_older => 0.1, :discover_interval => 6) }
433
+ let(:opts) { super().merge(:file_chunk_count => 8, :file_chunk_size => 6, :close_older => 0.1, :discover_interval => 6) }
432
434
  let(:suffix) { "M" }
433
435
  let(:start_new_files_at) { :beginning } # we are creating files and sincedb record before hand
434
436
  let(:actions) do
@@ -467,7 +469,7 @@ module FileWatch
467
469
  end
468
470
 
469
471
  context "when ignore_older is less than close_older and all files are not expired" do
470
- let(:opts) { super.merge(:ignore_older => 1, :close_older => 1.1) }
472
+ let(:opts) { super().merge(:ignore_older => 1, :close_older => 1.1) }
471
473
  let(:suffix) { "N" }
472
474
  let(:start_new_files_at) { :beginning }
473
475
  let(:actions) do
@@ -495,7 +497,7 @@ module FileWatch
495
497
  end
496
498
 
497
499
  context "when ignore_older is less than close_older and all files are expired" do
498
- let(:opts) { super.merge(:ignore_older => 10, :close_older => 1) }
500
+ let(:opts) { super().merge(:ignore_older => 10, :close_older => 1) }
499
501
  let(:suffix) { "P" }
500
502
  let(:actions) do
501
503
  RSpec::Sequencing
@@ -520,7 +522,7 @@ module FileWatch
520
522
  end
521
523
 
522
524
  context "when ignore older and close older expiry is enabled and after timeout the file is appended-to" do
523
- let(:opts) { super.merge(:ignore_older => 20, :close_older => 0.5) }
525
+ let(:opts) { super().merge(:ignore_older => 20, :close_older => 0.5) }
524
526
  let(:suffix) { "Q" }
525
527
  let(:actions) do
526
528
  RSpec::Sequencing
@@ -549,7 +551,7 @@ module FileWatch
549
551
  end
550
552
 
551
553
  context "when a non default delimiter is specified and it is not in the content" do
552
- let(:opts) { super.merge(:ignore_older => 20, :close_older => 1, :delimiter => "\nø") }
554
+ let(:opts) { super().merge(:ignore_older => 20, :close_older => 1, :delimiter => "\nø") }
553
555
  let(:suffix) { "R" }
554
556
  let(:actions) do
555
557
  RSpec::Sequencing