logstash-input-file 4.4.0 → 4.4.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 667711732af65c8b27c1bf3e455d78748d0b0c9365cc612df7aa29cc65fbbb58
4
- data.tar.gz: 3f25446f513d8cb7a5619bd7e00333bcade1dbb6ccd8aeb061b991b14e91cb1c
3
+ metadata.gz: ff1f9254faa01ec5630d380861c674edbf1d3e4b4e3a95a86e0305513206dc94
4
+ data.tar.gz: 62783c5f3bd97b0736650625649a3c2264c38b8ae0397c0c48c4f92d6d4e5445
5
5
  SHA512:
6
- metadata.gz: 371c56fb328a940b11047b88ded84ef2c17427ae706999dcdb15a674109be7515c952b67aadb692400e76fae9edf0fd5bfb6984a9b23f9fe00762ec618d88b25
7
- data.tar.gz: b980a4595e55b90783f72f9f36240c0ac9f4a0aea998ab27ae6e2b5024474961f100349f6a044aee29826d7c94789829b1f3f1598bd204ba1b4edb0d90295fa8
6
+ metadata.gz: 3327cfa95a7208246874f72bf21ecd104c626c840477bd25309ac94795d3699fc20e55847110bff41c9a09b30faea19069b5bf6e46c8f13494cee015a8c1230f
7
+ data.tar.gz: 38821490e5415889a4fa40c9cdb5723dcbaf660157dd2e84bf93160686f5a772a59293b379bc34b544dc6f635c5367020bb13a264da0c8e5e5517ddf79108685
data/CHANGELOG.md CHANGED
@@ -1,3 +1,7 @@
1
+ ## 4.4.1
2
+ - Fix: update to Gradle 7 [#305](https://github.com/logstash-plugins/logstash-input-file/pull/305)
3
+ - [DOC] Add version attributes to doc source file [#308](https://github.com/logstash-plugins/logstash-input-file/pull/308)
4
+
1
5
  ## 4.4.0
2
6
  - Add support for ECS v8 [#301](https://github.com/logstash-plugins/logstash-input-file/pull/301)
3
7
 
data/docs/index.asciidoc CHANGED
@@ -9,6 +9,11 @@ START - GENERATED VARIABLES, DO NOT EDIT!
9
9
  :release_date: %RELEASE_DATE%
10
10
  :changelog_url: %CHANGELOG_URL%
11
11
  :include_path: ../../../../logstash/docs/include
12
+
13
+ ifeval::["{versioned_docs}"=="true"]
14
+ :branch: %BRANCH%
15
+ :ecs_version: %ECS_VERSION%
16
+ endif::[]
12
17
  ///////////////////////////////////////////
13
18
  END - GENERATED VARIABLES, DO NOT EDIT!
14
19
  ///////////////////////////////////////////
@@ -82,7 +87,7 @@ was not ideal and a dedicated Read mode is an improvement.
82
87
  ==== Compatibility with the Elastic Common Schema (ECS)
83
88
 
84
89
  This plugin adds metadata about event's source, and can be configured to do so
85
- in an {ecs-ref}[ECS-compatible] way with <<plugins-{type}s-{plugin}-ecs_compatibility>>.
90
+ in an https://www.elastic.co/guide/en/ecs/{ecs_version}/index.html[ECS-compatible] way with <<plugins-{type}s-{plugin}-ecs_compatibility>>.
86
91
  This metadata is added after the event has been decoded by the appropriate codec,
87
92
  and will never overwrite existing values.
88
93
 
@@ -270,7 +275,7 @@ In practice, this will be the best case because the time taken to read new conte
270
275
  ** Otherwise, the default value is `disabled`.
271
276
 
272
277
  Controls this plugin's compatibility with the
273
- {ecs-ref}[Elastic Common Schema (ECS)].
278
+ https://www.elastic.co/guide/en/ecs/{ecs_version}/index.html[Elastic Common Schema (ECS)].
274
279
 
275
280
  [id="plugins-{type}s-{plugin}-exclude"]
276
281
  ===== `exclude`
@@ -426,7 +431,7 @@ of `/var/log` will be done for all `*.log` files.
426
431
  Paths must be absolute and cannot be relative.
427
432
 
428
433
  You may also configure multiple paths. See an example
429
- on the {logstash-ref}/configuration-file-structure.html#array[Logstash configuration page].
434
+ on the https://www.elastic.co/guide/en/logstash/{branch}/configuration-file-structure.html#array[Logstash configuration page].
430
435
 
431
436
  [id="plugins-{type}s-{plugin}-sincedb_clean_after"]
432
437
  ===== `sincedb_clean_after`
@@ -439,7 +444,7 @@ The sincedb record now has a last active timestamp associated with it.
439
444
  If no changes are detected in a tracked file in the last N days its sincedb
440
445
  tracking record expires and will not be persisted.
441
446
  This option helps protect against the inode recycling problem.
442
- Filebeat has a {filebeat-ref}/inode-reuse-issue.html[FAQ about inode recycling].
447
+ Filebeat has an https://www.elastic.co/guide/en/beats/filebeat/{branch}}/inode-reuse-issue.html[FAQ about inode recycling].
443
448
 
444
449
  [id="plugins-{type}s-{plugin}-sincedb_path"]
445
450
  ===== `sincedb_path`
@@ -533,4 +538,9 @@ Supported values: `us` `usec` `usecs`, e.g. "600 us", "800 usec", "900 usecs"
533
538
  [NOTE]
534
539
  `micro` `micros` and `microseconds` are not supported
535
540
 
541
+ ifeval::["{versioned_docs}"=="true"]
542
+ :branch: current
543
+ :ecs_version: current
544
+ endif::[]
545
+
536
546
  :default_codec!:
Binary file
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-file'
4
- s.version = '4.4.0'
4
+ s.version = '4.4.1'
5
5
  s.licenses = ['Apache-2.0']
6
6
  s.summary = "Streams events from files"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -57,7 +57,7 @@ module FileInput
57
57
  @tracer.push [:close, true]
58
58
  end
59
59
  def clone
60
- self.class.new
60
+ self
61
61
  end
62
62
  end
63
63
  end
@@ -157,39 +157,49 @@ describe LogStash::Inputs::File do
157
157
  end
158
158
  end
159
159
 
160
- context "running the input twice" do
161
- let(:name) { "D" }
162
- it "should read old files" do
163
- conf = <<-CONFIG
164
- input {
165
- file {
166
- type => "blah"
167
- path => "#{path_path}"
168
- start_position => "beginning"
169
- codec => "json"
170
- }
171
- }
172
- CONFIG
160
+ context "running the input twice", :ecs_compatibility_support do
161
+ ecs_compatibility_matrix(:disabled, :v1, :v8 => :v1) do |ecs_select|
173
162
 
174
- File.open(tmpfile_path, "w") do |fd|
175
- fd.puts('{"path": "my_path", "host": "my_host"}')
176
- fd.puts('{"my_field": "my_val"}')
177
- fd.fsync
163
+ before(:each) do
164
+ allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(ecs_compatibility)
178
165
  end
179
- # arbitrary old file (2 days)
180
- FileInput.make_file_older(tmpfile_path, 48 * 60 * 60)
166
+
167
+ let(:file_path_target_field ) { ecs_select[disabled: "path", v1: '[log][file][path]'] }
168
+ let(:source_host_target_field) { ecs_select[disabled: "host", v1: '[host][name]'] }
169
+
170
+ let(:name) { "D" }
171
+ it "should read old files" do
172
+ conf = <<-CONFIG
173
+ input {
174
+ file {
175
+ type => "blah"
176
+ path => "#{path_path}"
177
+ start_position => "beginning"
178
+ codec => "json"
179
+ }
180
+ }
181
+ CONFIG
181
182
 
182
- events = input(conf) do |pipeline, queue|
183
- 2.times.collect { queue.pop }
183
+ File.open(tmpfile_path, "w") do |fd|
184
+ fd.puts('{"path": "my_path", "host": "my_host"}')
185
+ fd.puts('{"my_field": "my_val"}')
186
+ fd.fsync
187
+ end
188
+ # arbitrary old file (2 days)
189
+ FileInput.make_file_older(tmpfile_path, 48 * 60 * 60)
190
+
191
+ events = input(conf) do |pipeline, queue|
192
+ 2.times.collect { queue.pop }
193
+ end
194
+ existing_path_index, added_path_index = "my_val" == events[0].get("my_field") ? [1,0] : [0,1]
195
+ expect(events[existing_path_index].get("path")).to eq "my_path"
196
+ expect(events[existing_path_index].get("host")).to eq "my_host"
197
+ expect(events[existing_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
198
+
199
+ expect(events[added_path_index].get(file_path_target_field)).to eq "#{tmpfile_path}"
200
+ expect(events[added_path_index].get(source_host_target_field)).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
201
+ expect(events[added_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
184
202
  end
185
- existing_path_index, added_path_index = "my_val" == events[0].get("my_field") ? [1,0] : [0,1]
186
- expect(events[existing_path_index].get("path")).to eq "my_path"
187
- expect(events[existing_path_index].get("host")).to eq "my_host"
188
- expect(events[existing_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
189
-
190
- expect(events[added_path_index].get("path")).to eq "#{tmpfile_path}"
191
- expect(events[added_path_index].get("host")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
192
- expect(events[added_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
193
203
  end
194
204
  end
195
205
 
@@ -233,54 +243,62 @@ describe LogStash::Inputs::File do
233
243
  FileUtils.rm_rf(sincedb_path)
234
244
  end
235
245
 
236
- context "when data exists and then more data is appended" do
237
- subject { described_class.new(conf) }
246
+ context "when data exists and then more data is appended", :ecs_compatibility_support do
247
+ ecs_compatibility_matrix(:disabled, :v1, :v8 => :v1) do |ecs_select|
238
248
 
239
- before do
240
- File.open(tmpfile_path, "w") do |fd|
241
- fd.puts("ignore me 1")
242
- fd.puts("ignore me 2")
243
- fd.fsync
249
+ before(:each) do
250
+ allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(ecs_compatibility)
244
251
  end
245
- mlconf.update("pattern" => "^\s", "what" => "previous")
246
- conf.update("type" => "blah",
247
- "path" => path_path,
248
- "sincedb_path" => sincedb_path,
249
- "stat_interval" => 0.1,
250
- "codec" => mlcodec,
251
- "delimiter" => TEST_FILE_DELIMITER)
252
- end
253
252
 
254
- it "reads the appended data only" do
255
- subject.register
256
- actions = RSpec::Sequencing
257
- .run_after(1, "append two lines after delay") do
258
- File.open(tmpfile_path, "a") { |fd| fd.puts("hello"); fd.puts("world") }
259
- end
260
- .then("wait for one event") do
261
- wait(0.75).for{events.size}.to eq(1)
262
- end
263
- .then("quit") do
264
- subject.stop
265
- end
266
- .then("wait for flushed event") do
267
- wait(0.75).for{events.size}.to eq(2)
253
+ let(:file_path_target_field ) { ecs_select[disabled: "path", v1: '[log][file][path]'] }
254
+ subject { described_class.new(conf) }
255
+
256
+ before do
257
+ File.open(tmpfile_path, "w") do |fd|
258
+ fd.puts("ignore me 1")
259
+ fd.puts("ignore me 2")
260
+ fd.fsync
268
261
  end
262
+ mlconf.update("pattern" => "^\s", "what" => "previous")
263
+ conf.update("type" => "blah",
264
+ "path" => path_path,
265
+ "sincedb_path" => sincedb_path,
266
+ "stat_interval" => 0.1,
267
+ "codec" => mlcodec,
268
+ "delimiter" => TEST_FILE_DELIMITER)
269
+ end
269
270
 
270
- subject.run(events)
271
- actions.assert_no_errors
271
+ it "reads the appended data only" do
272
+ subject.register
273
+ actions = RSpec::Sequencing
274
+ .run_after(1, "append two lines after delay") do
275
+ File.open(tmpfile_path, "a") { |fd| fd.puts("hello"); fd.puts("world") }
276
+ end
277
+ .then("wait for one event") do
278
+ wait(0.75).for{events.size}.to eq(1)
279
+ end
280
+ .then("quit") do
281
+ subject.stop
282
+ end
283
+ .then("wait for flushed event") do
284
+ wait(0.75).for{events.size}.to eq(2)
285
+ end
272
286
 
273
- event1 = events[0]
274
- expect(event1).not_to be_nil
275
- expect(event1.get("path")).to eq tmpfile_path
276
- expect(event1.get("[@metadata][path]")).to eq tmpfile_path
277
- expect(event1.get("message")).to eq "hello"
278
-
279
- event2 = events[1]
280
- expect(event2).not_to be_nil
281
- expect(event2.get("path")).to eq tmpfile_path
282
- expect(event2.get("[@metadata][path]")).to eq tmpfile_path
283
- expect(event2.get("message")).to eq "world"
287
+ subject.run(events)
288
+ actions.assert_no_errors
289
+
290
+ event1 = events[0]
291
+ expect(event1).not_to be_nil
292
+ expect(event1.get(file_path_target_field)).to eq tmpfile_path
293
+ expect(event1.get("[@metadata][path]")).to eq tmpfile_path
294
+ expect(event1.get("message")).to eq "hello"
295
+
296
+ event2 = events[1]
297
+ expect(event2).not_to be_nil
298
+ expect(event2.get(file_path_target_field)).to eq tmpfile_path
299
+ expect(event2.get("[@metadata][path]")).to eq tmpfile_path
300
+ expect(event2.get("message")).to eq "world"
301
+ end
284
302
  end
285
303
  end
286
304
 
@@ -311,12 +329,21 @@ describe LogStash::Inputs::File do
311
329
  .then_after(0.1, "identity is mapped") do
312
330
  wait(0.75).for{subject.codec.identity_map[tmpfile_path]}.not_to be_nil, "identity is not mapped"
313
331
  end
314
- .then("wait for auto_flush") do
315
- wait(0.75).for{subject.codec.identity_map[tmpfile_path].codec.trace_for(:auto_flush)}.to eq([true]), "autoflush didn't"
332
+ .then("wait accept") do
333
+ wait(0.75).for {
334
+ subject.codec.identity_map[tmpfile_path].codec.trace_for(:accept)
335
+ }.to eq([true]), "accept didn't"
316
336
  end
317
- .then("quit") do
337
+ .then("request a stop") do
338
+ # without this the subject.run doesn't invokes the #exit_flush which is the only @codec.flush_mapped invocation
318
339
  subject.stop
319
340
  end
341
+ .then("wait for auto_flush") do
342
+ wait(2).for {
343
+ subject.codec.identity_map[tmpfile_path].codec.trace_for(:auto_flush)
344
+ .reduce {|b1, b2| b1 and b2} # there could be multiple instances of same call, e.g. [[:accept, true], [:auto_flush, true], [:close, true], [:auto_flush, true]]
345
+ }.to eq(true), "autoflush didn't"
346
+ end
320
347
  subject.run(events)
321
348
  actions.assert_no_errors
322
349
  expect(subject.codec.identity_map[tmpfile_path].codec.trace_for(:accept)).to eq([true])
@@ -356,74 +383,50 @@ describe LogStash::Inputs::File do
356
383
  end
357
384
  end
358
385
 
359
- context "when wildcard path and a multiline codec is specified" do
360
- subject { described_class.new(conf) }
361
- let(:suffix) { "J" }
362
- let(:tmpfile_path2) { ::File.join(tmpdir_path, "K.txt") }
363
- before do
364
- mlconf.update("pattern" => "^\s", "what" => "previous")
365
- conf.update(
366
- "type" => "blah",
367
- "path" => path_path,
368
- "start_position" => "beginning",
369
- "sincedb_path" => sincedb_path,
370
- "stat_interval" => 0.05,
371
- "codec" => mlcodec,
372
- "file_sort_by" => "path",
373
- "delimiter" => TEST_FILE_DELIMITER)
386
+ context "when wildcard path and a multiline codec is specified", :ecs_compatibility_support do
387
+ ecs_compatibility_matrix(:disabled, :v1, :v8 => :v1) do |ecs_select|
374
388
 
375
- subject.register
376
- end
389
+ before(:each) do
390
+ allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(ecs_compatibility)
391
+ end
377
392
 
378
- it "collects separate multiple line events from each file" do
379
- subject
380
- actions = RSpec::Sequencing
381
- .run_after(0.1, "create files") do
382
- File.open(tmpfile_path, "wb") do |fd|
383
- fd.puts("line1.1-of-J")
384
- fd.puts(" line1.2-of-J")
385
- fd.puts(" line1.3-of-J")
386
- end
387
- File.open(tmpfile_path2, "wb") do |fd|
388
- fd.puts("line1.1-of-K")
389
- fd.puts(" line1.2-of-K")
390
- fd.puts(" line1.3-of-K")
391
- end
392
- end
393
- .then("assert both files are mapped as identities and stop") do
394
- wait(2).for {subject.codec.identity_count}.to eq(2), "both files are not mapped as identities"
395
- end
396
- .then("stop") do
397
- subject.stop
398
- end
399
- subject.run(events)
400
- # wait for actions to complete
401
- actions.assert_no_errors
402
- expect(events.size).to eq(2)
403
- e1, e2 = events
404
- e1_message = e1.get("message")
405
- e2_message = e2.get("message")
406
-
407
- expect(e1.get("path")).to match(/J.txt/)
408
- expect(e2.get("path")).to match(/K.txt/)
409
- expect(e1_message).to eq("line1.1-of-J#{TEST_FILE_DELIMITER} line1.2-of-J#{TEST_FILE_DELIMITER} line1.3-of-J")
410
- expect(e2_message).to eq("line1.1-of-K#{TEST_FILE_DELIMITER} line1.2-of-K#{TEST_FILE_DELIMITER} line1.3-of-K")
411
- end
393
+ let(:file_path_target_field ) { ecs_select[disabled: "path", v1: '[log][file][path]'] }
412
394
 
413
- context "if auto_flush is enabled on the multiline codec" do
414
- let(:mlconf) { { "auto_flush_interval" => 0.5 } }
415
- let(:suffix) { "M" }
416
- it "an event is generated via auto_flush" do
395
+ subject { described_class.new(conf) }
396
+ let(:suffix) { "J" }
397
+ let(:tmpfile_path2) { ::File.join(tmpdir_path, "K.txt") }
398
+ before do
399
+ mlconf.update("pattern" => "^\s", "what" => "previous")
400
+ conf.update(
401
+ "type" => "blah",
402
+ "path" => path_path,
403
+ "start_position" => "beginning",
404
+ "sincedb_path" => sincedb_path,
405
+ "stat_interval" => 0.05,
406
+ "codec" => mlcodec,
407
+ "file_sort_by" => "path",
408
+ "delimiter" => TEST_FILE_DELIMITER)
409
+
410
+ subject.register
411
+ end
412
+
413
+ it "collects separate multiple line events from each file" do
414
+ subject
417
415
  actions = RSpec::Sequencing
418
416
  .run_after(0.1, "create files") do
419
417
  File.open(tmpfile_path, "wb") do |fd|
420
- fd.puts("line1.1-of-a")
421
- fd.puts(" line1.2-of-a")
422
- fd.puts(" line1.3-of-a")
418
+ fd.puts("line1.1-of-J")
419
+ fd.puts(" line1.2-of-J")
420
+ fd.puts(" line1.3-of-J")
421
+ end
422
+ File.open(tmpfile_path2, "wb") do |fd|
423
+ fd.puts("line1.1-of-K")
424
+ fd.puts(" line1.2-of-K")
425
+ fd.puts(" line1.3-of-K")
423
426
  end
424
427
  end
425
- .then("wait for auto_flush") do
426
- wait(2).for{events.size}.to eq(1), "events size is not 1"
428
+ .then("assert both files are mapped as identities and stop") do
429
+ wait(2).for {subject.codec.identity_count}.to eq(2), "both files are not mapped as identities"
427
430
  end
428
431
  .then("stop") do
429
432
  subject.stop
@@ -431,10 +434,43 @@ describe LogStash::Inputs::File do
431
434
  subject.run(events)
432
435
  # wait for actions to complete
433
436
  actions.assert_no_errors
434
- e1 = events.first
437
+ expect(events.size).to eq(2)
438
+ e1, e2 = events
435
439
  e1_message = e1.get("message")
436
- expect(e1_message).to eq("line1.1-of-a#{TEST_FILE_DELIMITER} line1.2-of-a#{TEST_FILE_DELIMITER} line1.3-of-a")
437
- expect(e1.get("path")).to match(/M.txt$/)
440
+ e2_message = e2.get("message")
441
+
442
+ expect(e1.get(file_path_target_field)).to match(/J.txt/)
443
+ expect(e2.get(file_path_target_field)).to match(/K.txt/)
444
+ expect(e1_message).to eq("line1.1-of-J#{TEST_FILE_DELIMITER} line1.2-of-J#{TEST_FILE_DELIMITER} line1.3-of-J")
445
+ expect(e2_message).to eq("line1.1-of-K#{TEST_FILE_DELIMITER} line1.2-of-K#{TEST_FILE_DELIMITER} line1.3-of-K")
446
+ end
447
+
448
+ context "if auto_flush is enabled on the multiline codec" do
449
+ let(:mlconf) { { "auto_flush_interval" => 0.5 } }
450
+ let(:suffix) { "M" }
451
+ it "an event is generated via auto_flush" do
452
+ actions = RSpec::Sequencing
453
+ .run_after(0.1, "create files") do
454
+ File.open(tmpfile_path, "wb") do |fd|
455
+ fd.puts("line1.1-of-a")
456
+ fd.puts(" line1.2-of-a")
457
+ fd.puts(" line1.3-of-a")
458
+ end
459
+ end
460
+ .then("wait for auto_flush") do
461
+ wait(2).for{events.size}.to eq(1), "events size is not 1"
462
+ end
463
+ .then("stop") do
464
+ subject.stop
465
+ end
466
+ subject.run(events)
467
+ # wait for actions to complete
468
+ actions.assert_no_errors
469
+ e1 = events.first
470
+ e1_message = e1.get("message")
471
+ expect(e1_message).to eq("line1.1-of-a#{TEST_FILE_DELIMITER} line1.2-of-a#{TEST_FILE_DELIMITER} line1.3-of-a")
472
+ expect(e1.get(file_path_target_field)).to match(/M.txt$/)
473
+ end
438
474
  end
439
475
  end
440
476
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-file
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.4.0
4
+ version: 4.4.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-08-04 00:00:00.000000000 Z
11
+ date: 2022-05-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement