logstash-input-file 4.4.0 → 4.4.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 667711732af65c8b27c1bf3e455d78748d0b0c9365cc612df7aa29cc65fbbb58
4
- data.tar.gz: 3f25446f513d8cb7a5619bd7e00333bcade1dbb6ccd8aeb061b991b14e91cb1c
3
+ metadata.gz: 26577b41ecc7118c3f4ec2dba011fe730e44d4a74c0f45b19875ba331922bb24
4
+ data.tar.gz: e799e9a5e149662f3e1ffa4e902bb8c789d8d5eb7f1b90c1776445571b358a1b
5
5
  SHA512:
6
- metadata.gz: 371c56fb328a940b11047b88ded84ef2c17427ae706999dcdb15a674109be7515c952b67aadb692400e76fae9edf0fd5bfb6984a9b23f9fe00762ec618d88b25
7
- data.tar.gz: b980a4595e55b90783f72f9f36240c0ac9f4a0aea998ab27ae6e2b5024474961f100349f6a044aee29826d7c94789829b1f3f1598bd204ba1b4edb0d90295fa8
6
+ metadata.gz: b8ce5e67051bb9d04947d3b3e72475f2929cecf475a5aa1196556fb63fcd37c44e931ebdff1e47fa21f01030bf2a5b15a6301658dbc249633b613c681b942fd9
7
+ data.tar.gz: 44dc2403a6ca1dcc5c0660404e4419d5acd1f020d376cd3be8fc7d46533335c10ba10e6c0510eb9c4afa3685b281e5a4a8cfb201ac2882c500f42b3bd54681ef
data/CHANGELOG.md CHANGED
@@ -1,3 +1,13 @@
1
+ ## 4.4.3
2
+ - Fixes read mode to restart the read from reference stored in sincedb in case the file wasn't completely consumed. [#307](https://github.com/logstash-plugins/logstash-input-file/pull/307)
3
+
4
+ ## 4.4.2
5
+ - Doc: Fix attribute by removing extra character [#310](https://github.com/logstash-plugins/logstash-input-file/pull/310)
6
+
7
+ ## 4.4.1
8
+ - Fix: update to Gradle 7 [#305](https://github.com/logstash-plugins/logstash-input-file/pull/305)
9
+ - [DOC] Add version attributes to doc source file [#308](https://github.com/logstash-plugins/logstash-input-file/pull/308)
10
+
1
11
  ## 4.4.0
2
12
  - Add support for ECS v8 [#301](https://github.com/logstash-plugins/logstash-input-file/pull/301)
3
13
 
data/docs/index.asciidoc CHANGED
@@ -9,6 +9,11 @@ START - GENERATED VARIABLES, DO NOT EDIT!
9
9
  :release_date: %RELEASE_DATE%
10
10
  :changelog_url: %CHANGELOG_URL%
11
11
  :include_path: ../../../../logstash/docs/include
12
+
13
+ ifeval::["{versioned_docs}"=="true"]
14
+ :branch: %BRANCH%
15
+ :ecs_version: %ECS_VERSION%
16
+ endif::[]
12
17
  ///////////////////////////////////////////
13
18
  END - GENERATED VARIABLES, DO NOT EDIT!
14
19
  ///////////////////////////////////////////
@@ -82,7 +87,7 @@ was not ideal and a dedicated Read mode is an improvement.
82
87
  ==== Compatibility with the Elastic Common Schema (ECS)
83
88
 
84
89
  This plugin adds metadata about event's source, and can be configured to do so
85
- in an {ecs-ref}[ECS-compatible] way with <<plugins-{type}s-{plugin}-ecs_compatibility>>.
90
+ in an https://www.elastic.co/guide/en/ecs/{ecs_version}/index.html[ECS-compatible] way with <<plugins-{type}s-{plugin}-ecs_compatibility>>.
86
91
  This metadata is added after the event has been decoded by the appropriate codec,
87
92
  and will never overwrite existing values.
88
93
 
@@ -270,7 +275,7 @@ In practice, this will be the best case because the time taken to read new conte
270
275
  ** Otherwise, the default value is `disabled`.
271
276
 
272
277
  Controls this plugin's compatibility with the
273
- {ecs-ref}[Elastic Common Schema (ECS)].
278
+ https://www.elastic.co/guide/en/ecs/{ecs_version}/index.html[Elastic Common Schema (ECS)].
274
279
 
275
280
  [id="plugins-{type}s-{plugin}-exclude"]
276
281
  ===== `exclude`
@@ -426,7 +431,7 @@ of `/var/log` will be done for all `*.log` files.
426
431
  Paths must be absolute and cannot be relative.
427
432
 
428
433
  You may also configure multiple paths. See an example
429
- on the {logstash-ref}/configuration-file-structure.html#array[Logstash configuration page].
434
+ on the https://www.elastic.co/guide/en/logstash/{branch}/configuration-file-structure.html#array[Logstash configuration page].
430
435
 
431
436
  [id="plugins-{type}s-{plugin}-sincedb_clean_after"]
432
437
  ===== `sincedb_clean_after`
@@ -439,7 +444,7 @@ The sincedb record now has a last active timestamp associated with it.
439
444
  If no changes are detected in a tracked file in the last N days its sincedb
440
445
  tracking record expires and will not be persisted.
441
446
  This option helps protect against the inode recycling problem.
442
- Filebeat has a {filebeat-ref}/inode-reuse-issue.html[FAQ about inode recycling].
447
+ Filebeat has an https://www.elastic.co/guide/en/beats/filebeat/{branch}/inode-reuse-issue.html[FAQ about inode recycling].
443
448
 
444
449
  [id="plugins-{type}s-{plugin}-sincedb_path"]
445
450
  ===== `sincedb_path`
@@ -533,4 +538,9 @@ Supported values: `us` `usec` `usecs`, e.g. "600 us", "800 usec", "900 usecs"
533
538
  [NOTE]
534
539
  `micro` `micros` and `microseconds` are not supported
535
540
 
541
+ ifeval::["{versioned_docs}"=="true"]
542
+ :branch: current
543
+ :ecs_version: current
544
+ endif::[]
545
+
536
546
  :default_codec!:
@@ -2,9 +2,19 @@
2
2
 
3
3
  module FileWatch module ReadMode module Handlers
4
4
  class ReadFile < Base
5
+
6
+ # seek file to which ever is furthest: either current bytes read or sincedb position
7
+ private
8
+ def seek_to_furthest_position(watched_file)
9
+ previous_pos = sincedb_collection.find(watched_file).position
10
+ watched_file.file_seek([watched_file.bytes_read, previous_pos].max)
11
+ end
12
+
13
+ public
5
14
  def handle_specifically(watched_file)
6
15
  if open_file(watched_file)
7
16
  add_or_update_sincedb_collection(watched_file) unless sincedb_collection.member?(watched_file.sincedb_key)
17
+ seek_to_furthest_position(watched_file)
8
18
  loop do
9
19
  break if quit?
10
20
  loop_control = watched_file.loop_control_adjusted_for_stat_size
Binary file
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-file'
4
- s.version = '4.4.0'
4
+ s.version = '4.4.3'
5
5
  s.licenses = ['Apache-2.0']
6
6
  s.summary = "Streams events from files"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -36,5 +36,45 @@ module FileWatch
36
36
  processor.read_file(watched_file)
37
37
  end
38
38
  end
39
+
40
+ context "when restart from existing sincedb" do
41
+ let(:settings) do
42
+ Settings.from_options(
43
+ :sincedb_write_interval => 0,
44
+ :sincedb_path => File::NULL,
45
+ :file_chunk_size => 10
46
+ )
47
+ end
48
+
49
+ let(:processor) { double("fake processor") }
50
+ let(:observer) { TestObserver.new }
51
+ let(:watch) { double("watch") }
52
+
53
+ before(:each) {
54
+ allow(watch).to receive(:quit?).and_return(false)#.and_return(false).and_return(true)
55
+ allow(processor).to receive(:watch).and_return(watch)
56
+ }
57
+
58
+ it "read from where it left" do
59
+ listener = observer.listener_for(Pathname.new(pathname).to_path)
60
+ sut = ReadMode::Handlers::ReadFile.new(processor, sdb_collection, observer, settings)
61
+
62
+ # simulate a previous partial read of the file
63
+ sincedb_value = SincedbValue.new(0)
64
+ sincedb_value.set_watched_file(watched_file)
65
+ sdb_collection.set(watched_file.sincedb_key, sincedb_value)
66
+
67
+
68
+ # simulate a consumption of first line, (size + newline) bytes
69
+ sdb_collection.increment(watched_file.sincedb_key, File.readlines(pathname)[0].size + 2)
70
+
71
+ # exercise
72
+ sut.handle(watched_file)
73
+
74
+ # verify
75
+ expect(listener.lines.size).to eq(1)
76
+ expect(listener.lines[0]).to start_with("2010-03-12 23:51:21 SEA4 192.0.2.222 play 3914 OK")
77
+ end
78
+ end
39
79
  end
40
80
  end
@@ -80,6 +80,8 @@ module FileWatch
80
80
  multiplier = amount / string.length
81
81
  string * multiplier
82
82
  end
83
+ def sysseek(offset, whence)
84
+ end
83
85
  end
84
86
 
85
87
  FIXTURE_DIR = File.join('spec', 'fixtures')
@@ -31,7 +31,13 @@ module FileInput
31
31
 
32
32
  def trace_for(symbol)
33
33
  params = @tracer.map {|k,v| k == symbol ? v : nil}.compact
34
- params.empty? ? false : params
34
+ if params.empty?
35
+ false
36
+ else
37
+ # merge all params with same key
38
+ # there could be multiple instances of same call, e.g. [[:accept, true], [:auto_flush, true], [:close, true], [:auto_flush, true]]
39
+ params.reduce {|b1, b2| b1 and b2}
40
+ end
35
41
  end
36
42
 
37
43
  def clear
@@ -57,7 +63,7 @@ module FileInput
57
63
  @tracer.push [:close, true]
58
64
  end
59
65
  def clone
60
- self.class.new
66
+ self
61
67
  end
62
68
  end
63
69
  end
@@ -157,39 +157,49 @@ describe LogStash::Inputs::File do
157
157
  end
158
158
  end
159
159
 
160
- context "running the input twice" do
161
- let(:name) { "D" }
162
- it "should read old files" do
163
- conf = <<-CONFIG
164
- input {
165
- file {
166
- type => "blah"
167
- path => "#{path_path}"
168
- start_position => "beginning"
169
- codec => "json"
170
- }
171
- }
172
- CONFIG
160
+ context "running the input twice", :ecs_compatibility_support do
161
+ ecs_compatibility_matrix(:disabled, :v1, :v8 => :v1) do |ecs_select|
173
162
 
174
- File.open(tmpfile_path, "w") do |fd|
175
- fd.puts('{"path": "my_path", "host": "my_host"}')
176
- fd.puts('{"my_field": "my_val"}')
177
- fd.fsync
163
+ before(:each) do
164
+ allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(ecs_compatibility)
178
165
  end
179
- # arbitrary old file (2 days)
180
- FileInput.make_file_older(tmpfile_path, 48 * 60 * 60)
166
+
167
+ let(:file_path_target_field ) { ecs_select[disabled: "path", v1: '[log][file][path]'] }
168
+ let(:source_host_target_field) { ecs_select[disabled: "host", v1: '[host][name]'] }
169
+
170
+ let(:name) { "D" }
171
+ it "should read old files" do
172
+ conf = <<-CONFIG
173
+ input {
174
+ file {
175
+ type => "blah"
176
+ path => "#{path_path}"
177
+ start_position => "beginning"
178
+ codec => "json"
179
+ }
180
+ }
181
+ CONFIG
181
182
 
182
- events = input(conf) do |pipeline, queue|
183
- 2.times.collect { queue.pop }
183
+ File.open(tmpfile_path, "w") do |fd|
184
+ fd.puts('{"path": "my_path", "host": "my_host"}')
185
+ fd.puts('{"my_field": "my_val"}')
186
+ fd.fsync
187
+ end
188
+ # arbitrary old file (2 days)
189
+ FileInput.make_file_older(tmpfile_path, 48 * 60 * 60)
190
+
191
+ events = input(conf) do |pipeline, queue|
192
+ 2.times.collect { queue.pop }
193
+ end
194
+ existing_path_index, added_path_index = "my_val" == events[0].get("my_field") ? [1,0] : [0,1]
195
+ expect(events[existing_path_index].get("path")).to eq "my_path"
196
+ expect(events[existing_path_index].get("host")).to eq "my_host"
197
+ expect(events[existing_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
198
+
199
+ expect(events[added_path_index].get(file_path_target_field)).to eq "#{tmpfile_path}"
200
+ expect(events[added_path_index].get(source_host_target_field)).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
201
+ expect(events[added_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
184
202
  end
185
- existing_path_index, added_path_index = "my_val" == events[0].get("my_field") ? [1,0] : [0,1]
186
- expect(events[existing_path_index].get("path")).to eq "my_path"
187
- expect(events[existing_path_index].get("host")).to eq "my_host"
188
- expect(events[existing_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
189
-
190
- expect(events[added_path_index].get("path")).to eq "#{tmpfile_path}"
191
- expect(events[added_path_index].get("host")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
192
- expect(events[added_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
193
203
  end
194
204
  end
195
205
 
@@ -233,54 +243,62 @@ describe LogStash::Inputs::File do
233
243
  FileUtils.rm_rf(sincedb_path)
234
244
  end
235
245
 
236
- context "when data exists and then more data is appended" do
237
- subject { described_class.new(conf) }
246
+ context "when data exists and then more data is appended", :ecs_compatibility_support do
247
+ ecs_compatibility_matrix(:disabled, :v1, :v8 => :v1) do |ecs_select|
238
248
 
239
- before do
240
- File.open(tmpfile_path, "w") do |fd|
241
- fd.puts("ignore me 1")
242
- fd.puts("ignore me 2")
243
- fd.fsync
249
+ before(:each) do
250
+ allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(ecs_compatibility)
244
251
  end
245
- mlconf.update("pattern" => "^\s", "what" => "previous")
246
- conf.update("type" => "blah",
247
- "path" => path_path,
248
- "sincedb_path" => sincedb_path,
249
- "stat_interval" => 0.1,
250
- "codec" => mlcodec,
251
- "delimiter" => TEST_FILE_DELIMITER)
252
- end
253
252
 
254
- it "reads the appended data only" do
255
- subject.register
256
- actions = RSpec::Sequencing
257
- .run_after(1, "append two lines after delay") do
258
- File.open(tmpfile_path, "a") { |fd| fd.puts("hello"); fd.puts("world") }
259
- end
260
- .then("wait for one event") do
261
- wait(0.75).for{events.size}.to eq(1)
262
- end
263
- .then("quit") do
264
- subject.stop
265
- end
266
- .then("wait for flushed event") do
267
- wait(0.75).for{events.size}.to eq(2)
253
+ let(:file_path_target_field ) { ecs_select[disabled: "path", v1: '[log][file][path]'] }
254
+ subject { described_class.new(conf) }
255
+
256
+ before do
257
+ File.open(tmpfile_path, "w") do |fd|
258
+ fd.puts("ignore me 1")
259
+ fd.puts("ignore me 2")
260
+ fd.fsync
268
261
  end
262
+ mlconf.update("pattern" => "^\s", "what" => "previous")
263
+ conf.update("type" => "blah",
264
+ "path" => path_path,
265
+ "sincedb_path" => sincedb_path,
266
+ "stat_interval" => 0.1,
267
+ "codec" => mlcodec,
268
+ "delimiter" => TEST_FILE_DELIMITER)
269
+ end
269
270
 
270
- subject.run(events)
271
- actions.assert_no_errors
271
+ it "reads the appended data only" do
272
+ subject.register
273
+ actions = RSpec::Sequencing
274
+ .run_after(1, "append two lines after delay") do
275
+ File.open(tmpfile_path, "a") { |fd| fd.puts("hello"); fd.puts("world") }
276
+ end
277
+ .then("wait for one event") do
278
+ wait(0.75).for{events.size}.to eq(1)
279
+ end
280
+ .then("quit") do
281
+ subject.stop
282
+ end
283
+ .then("wait for flushed event") do
284
+ wait(0.75).for{events.size}.to eq(2)
285
+ end
272
286
 
273
- event1 = events[0]
274
- expect(event1).not_to be_nil
275
- expect(event1.get("path")).to eq tmpfile_path
276
- expect(event1.get("[@metadata][path]")).to eq tmpfile_path
277
- expect(event1.get("message")).to eq "hello"
278
-
279
- event2 = events[1]
280
- expect(event2).not_to be_nil
281
- expect(event2.get("path")).to eq tmpfile_path
282
- expect(event2.get("[@metadata][path]")).to eq tmpfile_path
283
- expect(event2.get("message")).to eq "world"
287
+ subject.run(events)
288
+ actions.assert_no_errors
289
+
290
+ event1 = events[0]
291
+ expect(event1).not_to be_nil
292
+ expect(event1.get(file_path_target_field)).to eq tmpfile_path
293
+ expect(event1.get("[@metadata][path]")).to eq tmpfile_path
294
+ expect(event1.get("message")).to eq "hello"
295
+
296
+ event2 = events[1]
297
+ expect(event2).not_to be_nil
298
+ expect(event2.get(file_path_target_field)).to eq tmpfile_path
299
+ expect(event2.get("[@metadata][path]")).to eq tmpfile_path
300
+ expect(event2.get("message")).to eq "world"
301
+ end
284
302
  end
285
303
  end
286
304
 
@@ -311,15 +329,23 @@ describe LogStash::Inputs::File do
311
329
  .then_after(0.1, "identity is mapped") do
312
330
  wait(0.75).for{subject.codec.identity_map[tmpfile_path]}.not_to be_nil, "identity is not mapped"
313
331
  end
314
- .then("wait for auto_flush") do
315
- wait(0.75).for{subject.codec.identity_map[tmpfile_path].codec.trace_for(:auto_flush)}.to eq([true]), "autoflush didn't"
332
+ .then("wait accept") do
333
+ wait(0.75).for {
334
+ subject.codec.identity_map[tmpfile_path].codec.trace_for(:accept)
335
+ }.to eq(true), "accept didn't"
316
336
  end
317
- .then("quit") do
337
+ .then("request a stop") do
338
+ # without this the subject.run doesn't invokes the #exit_flush which is the only @codec.flush_mapped invocation
318
339
  subject.stop
319
340
  end
341
+ .then("wait for auto_flush") do
342
+ wait(2).for {
343
+ subject.codec.identity_map[tmpfile_path].codec.trace_for(:auto_flush)
344
+ }.to eq(true), "autoflush didn't"
345
+ end
320
346
  subject.run(events)
321
347
  actions.assert_no_errors
322
- expect(subject.codec.identity_map[tmpfile_path].codec.trace_for(:accept)).to eq([true])
348
+ expect(subject.codec.identity_map[tmpfile_path].codec.trace_for(:accept)).to eq(true)
323
349
  end
324
350
  end
325
351
 
@@ -356,74 +382,50 @@ describe LogStash::Inputs::File do
356
382
  end
357
383
  end
358
384
 
359
- context "when wildcard path and a multiline codec is specified" do
360
- subject { described_class.new(conf) }
361
- let(:suffix) { "J" }
362
- let(:tmpfile_path2) { ::File.join(tmpdir_path, "K.txt") }
363
- before do
364
- mlconf.update("pattern" => "^\s", "what" => "previous")
365
- conf.update(
366
- "type" => "blah",
367
- "path" => path_path,
368
- "start_position" => "beginning",
369
- "sincedb_path" => sincedb_path,
370
- "stat_interval" => 0.05,
371
- "codec" => mlcodec,
372
- "file_sort_by" => "path",
373
- "delimiter" => TEST_FILE_DELIMITER)
385
+ context "when wildcard path and a multiline codec is specified", :ecs_compatibility_support do
386
+ ecs_compatibility_matrix(:disabled, :v1, :v8 => :v1) do |ecs_select|
374
387
 
375
- subject.register
376
- end
388
+ before(:each) do
389
+ allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(ecs_compatibility)
390
+ end
377
391
 
378
- it "collects separate multiple line events from each file" do
379
- subject
380
- actions = RSpec::Sequencing
381
- .run_after(0.1, "create files") do
382
- File.open(tmpfile_path, "wb") do |fd|
383
- fd.puts("line1.1-of-J")
384
- fd.puts(" line1.2-of-J")
385
- fd.puts(" line1.3-of-J")
386
- end
387
- File.open(tmpfile_path2, "wb") do |fd|
388
- fd.puts("line1.1-of-K")
389
- fd.puts(" line1.2-of-K")
390
- fd.puts(" line1.3-of-K")
391
- end
392
- end
393
- .then("assert both files are mapped as identities and stop") do
394
- wait(2).for {subject.codec.identity_count}.to eq(2), "both files are not mapped as identities"
395
- end
396
- .then("stop") do
397
- subject.stop
398
- end
399
- subject.run(events)
400
- # wait for actions to complete
401
- actions.assert_no_errors
402
- expect(events.size).to eq(2)
403
- e1, e2 = events
404
- e1_message = e1.get("message")
405
- e2_message = e2.get("message")
406
-
407
- expect(e1.get("path")).to match(/J.txt/)
408
- expect(e2.get("path")).to match(/K.txt/)
409
- expect(e1_message).to eq("line1.1-of-J#{TEST_FILE_DELIMITER} line1.2-of-J#{TEST_FILE_DELIMITER} line1.3-of-J")
410
- expect(e2_message).to eq("line1.1-of-K#{TEST_FILE_DELIMITER} line1.2-of-K#{TEST_FILE_DELIMITER} line1.3-of-K")
411
- end
392
+ let(:file_path_target_field ) { ecs_select[disabled: "path", v1: '[log][file][path]'] }
412
393
 
413
- context "if auto_flush is enabled on the multiline codec" do
414
- let(:mlconf) { { "auto_flush_interval" => 0.5 } }
415
- let(:suffix) { "M" }
416
- it "an event is generated via auto_flush" do
394
+ subject { described_class.new(conf) }
395
+ let(:suffix) { "J" }
396
+ let(:tmpfile_path2) { ::File.join(tmpdir_path, "K.txt") }
397
+ before do
398
+ mlconf.update("pattern" => "^\s", "what" => "previous")
399
+ conf.update(
400
+ "type" => "blah",
401
+ "path" => path_path,
402
+ "start_position" => "beginning",
403
+ "sincedb_path" => sincedb_path,
404
+ "stat_interval" => 0.05,
405
+ "codec" => mlcodec,
406
+ "file_sort_by" => "path",
407
+ "delimiter" => TEST_FILE_DELIMITER)
408
+
409
+ subject.register
410
+ end
411
+
412
+ it "collects separate multiple line events from each file" do
413
+ subject
417
414
  actions = RSpec::Sequencing
418
415
  .run_after(0.1, "create files") do
419
416
  File.open(tmpfile_path, "wb") do |fd|
420
- fd.puts("line1.1-of-a")
421
- fd.puts(" line1.2-of-a")
422
- fd.puts(" line1.3-of-a")
417
+ fd.puts("line1.1-of-J")
418
+ fd.puts(" line1.2-of-J")
419
+ fd.puts(" line1.3-of-J")
420
+ end
421
+ File.open(tmpfile_path2, "wb") do |fd|
422
+ fd.puts("line1.1-of-K")
423
+ fd.puts(" line1.2-of-K")
424
+ fd.puts(" line1.3-of-K")
423
425
  end
424
426
  end
425
- .then("wait for auto_flush") do
426
- wait(2).for{events.size}.to eq(1), "events size is not 1"
427
+ .then("assert both files are mapped as identities and stop") do
428
+ wait(2).for {subject.codec.identity_count}.to eq(2), "both files are not mapped as identities"
427
429
  end
428
430
  .then("stop") do
429
431
  subject.stop
@@ -431,10 +433,43 @@ describe LogStash::Inputs::File do
431
433
  subject.run(events)
432
434
  # wait for actions to complete
433
435
  actions.assert_no_errors
434
- e1 = events.first
436
+ expect(events.size).to eq(2)
437
+ e1, e2 = events
435
438
  e1_message = e1.get("message")
436
- expect(e1_message).to eq("line1.1-of-a#{TEST_FILE_DELIMITER} line1.2-of-a#{TEST_FILE_DELIMITER} line1.3-of-a")
437
- expect(e1.get("path")).to match(/M.txt$/)
439
+ e2_message = e2.get("message")
440
+
441
+ expect(e1.get(file_path_target_field)).to match(/J.txt/)
442
+ expect(e2.get(file_path_target_field)).to match(/K.txt/)
443
+ expect(e1_message).to eq("line1.1-of-J#{TEST_FILE_DELIMITER} line1.2-of-J#{TEST_FILE_DELIMITER} line1.3-of-J")
444
+ expect(e2_message).to eq("line1.1-of-K#{TEST_FILE_DELIMITER} line1.2-of-K#{TEST_FILE_DELIMITER} line1.3-of-K")
445
+ end
446
+
447
+ context "if auto_flush is enabled on the multiline codec" do
448
+ let(:mlconf) { { "auto_flush_interval" => 0.5 } }
449
+ let(:suffix) { "M" }
450
+ it "an event is generated via auto_flush" do
451
+ actions = RSpec::Sequencing
452
+ .run_after(0.1, "create files") do
453
+ File.open(tmpfile_path, "wb") do |fd|
454
+ fd.puts("line1.1-of-a")
455
+ fd.puts(" line1.2-of-a")
456
+ fd.puts(" line1.3-of-a")
457
+ end
458
+ end
459
+ .then("wait for auto_flush") do
460
+ wait(2).for{events.size}.to eq(1), "events size is not 1"
461
+ end
462
+ .then("stop") do
463
+ subject.stop
464
+ end
465
+ subject.run(events)
466
+ # wait for actions to complete
467
+ actions.assert_no_errors
468
+ e1 = events.first
469
+ e1_message = e1.get("message")
470
+ expect(e1_message).to eq("line1.1-of-a#{TEST_FILE_DELIMITER} line1.2-of-a#{TEST_FILE_DELIMITER} line1.3-of-a")
471
+ expect(e1.get(file_path_target_field)).to match(/M.txt$/)
472
+ end
438
473
  end
439
474
  end
440
475
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-file
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.4.0
4
+ version: 4.4.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-08-04 00:00:00.000000000 Z
11
+ date: 2022-06-06 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement