logstash-input-file 4.0.5 → 4.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +25 -3
- data/JAR_VERSION +1 -0
- data/docs/index.asciidoc +195 -37
- data/lib/filewatch/bootstrap.rb +74 -0
- data/lib/filewatch/discoverer.rb +94 -0
- data/lib/filewatch/helper.rb +65 -0
- data/lib/filewatch/observing_base.rb +97 -0
- data/lib/filewatch/observing_read.rb +23 -0
- data/lib/filewatch/observing_tail.rb +22 -0
- data/lib/filewatch/read_mode/handlers/base.rb +81 -0
- data/lib/filewatch/read_mode/handlers/read_file.rb +47 -0
- data/lib/filewatch/read_mode/handlers/read_zip_file.rb +57 -0
- data/lib/filewatch/read_mode/processor.rb +117 -0
- data/lib/filewatch/settings.rb +67 -0
- data/lib/filewatch/sincedb_collection.rb +215 -0
- data/lib/filewatch/sincedb_record_serializer.rb +70 -0
- data/lib/filewatch/sincedb_value.rb +87 -0
- data/lib/filewatch/tail_mode/handlers/base.rb +124 -0
- data/lib/filewatch/tail_mode/handlers/create.rb +17 -0
- data/lib/filewatch/tail_mode/handlers/create_initial.rb +21 -0
- data/lib/filewatch/tail_mode/handlers/delete.rb +11 -0
- data/lib/filewatch/tail_mode/handlers/grow.rb +11 -0
- data/lib/filewatch/tail_mode/handlers/shrink.rb +20 -0
- data/lib/filewatch/tail_mode/handlers/timeout.rb +10 -0
- data/lib/filewatch/tail_mode/handlers/unignore.rb +37 -0
- data/lib/filewatch/tail_mode/processor.rb +209 -0
- data/lib/filewatch/watch.rb +107 -0
- data/lib/filewatch/watched_file.rb +226 -0
- data/lib/filewatch/watched_files_collection.rb +84 -0
- data/lib/filewatch/winhelper.rb +65 -0
- data/lib/jars/filewatch-1.0.0.jar +0 -0
- data/lib/logstash/inputs/delete_completed_file_handler.rb +9 -0
- data/lib/logstash/inputs/file.rb +162 -107
- data/lib/logstash/inputs/file_listener.rb +61 -0
- data/lib/logstash/inputs/log_completed_file_handler.rb +13 -0
- data/logstash-input-file.gemspec +5 -4
- data/spec/filewatch/buftok_spec.rb +24 -0
- data/spec/filewatch/reading_spec.rb +128 -0
- data/spec/filewatch/sincedb_record_serializer_spec.rb +71 -0
- data/spec/filewatch/spec_helper.rb +120 -0
- data/spec/filewatch/tailing_spec.rb +440 -0
- data/spec/filewatch/watched_file_spec.rb +38 -0
- data/spec/filewatch/watched_files_collection_spec.rb +73 -0
- data/spec/filewatch/winhelper_spec.rb +22 -0
- data/spec/fixtures/compressed.log.gz +0 -0
- data/spec/fixtures/compressed.log.gzip +0 -0
- data/spec/fixtures/invalid_utf8.gbk.log +2 -0
- data/spec/fixtures/no-final-newline.log +2 -0
- data/spec/fixtures/uncompressed.log +2 -0
- data/spec/{spec_helper.rb → helpers/spec_helper.rb} +14 -41
- data/spec/inputs/file_read_spec.rb +155 -0
- data/spec/inputs/{file_spec.rb → file_tail_spec.rb} +55 -52
- metadata +96 -28
@@ -0,0 +1,38 @@
|
|
1
|
+
require 'stud/temporary'
|
2
|
+
require_relative 'spec_helper'
|
3
|
+
|
4
|
+
module FileWatch
|
5
|
+
describe WatchedFile do
|
6
|
+
let(:pathname) { Pathname.new(__FILE__) }
|
7
|
+
|
8
|
+
context 'Given two instances of the same file' do
|
9
|
+
it 'their sincedb_keys should equate' do
|
10
|
+
wf_key1 = WatchedFile.new(pathname, pathname.stat, Settings.new).sincedb_key
|
11
|
+
hash_db = { wf_key1 => 42 }
|
12
|
+
wf_key2 = WatchedFile.new(pathname, pathname.stat, Settings.new).sincedb_key
|
13
|
+
expect(wf_key1).to eq(wf_key2)
|
14
|
+
expect(wf_key1).to eql(wf_key2)
|
15
|
+
expect(wf_key1.hash).to eq(wf_key2.hash)
|
16
|
+
expect(hash_db[wf_key2]).to eq(42)
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
context 'Given a barrage of state changes' do
|
21
|
+
it 'only the previous N state changes are remembered' do
|
22
|
+
watched_file = WatchedFile.new(pathname, pathname.stat, Settings.new)
|
23
|
+
watched_file.ignore
|
24
|
+
watched_file.watch
|
25
|
+
watched_file.activate
|
26
|
+
watched_file.watch
|
27
|
+
watched_file.close
|
28
|
+
watched_file.watch
|
29
|
+
watched_file.activate
|
30
|
+
watched_file.unwatch
|
31
|
+
watched_file.activate
|
32
|
+
watched_file.close
|
33
|
+
expect(watched_file.closed?).to be_truthy
|
34
|
+
expect(watched_file.recent_states).to eq([:watched, :active, :watched, :closed, :watched, :active, :unwatched, :active])
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
@@ -0,0 +1,73 @@
|
|
1
|
+
require_relative 'spec_helper'
|
2
|
+
|
3
|
+
module FileWatch
|
4
|
+
describe WatchedFilesCollection do
|
5
|
+
let(:time) { Time.now }
|
6
|
+
let(:stat1) { double("stat1", :size => 98, :ctime => time - 30, :mtime => time - 30, :ino => 234567, :dev_major => 3, :dev_minor => 2) }
|
7
|
+
let(:stat2) { double("stat2", :size => 99, :ctime => time - 20, :mtime => time - 20, :ino => 234568, :dev_major => 3, :dev_minor => 2) }
|
8
|
+
let(:stat3) { double("stat3", :size => 100, :ctime => time, :mtime => time, :ino => 234569, :dev_major => 3, :dev_minor => 2) }
|
9
|
+
let(:wf1) { WatchedFile.new("/var/log/z.log", stat1, Settings.new) }
|
10
|
+
let(:wf2) { WatchedFile.new("/var/log/m.log", stat2, Settings.new) }
|
11
|
+
let(:wf3) { WatchedFile.new("/var/log/a.log", stat3, Settings.new) }
|
12
|
+
|
13
|
+
context "sort by last_modified in ascending order" do
|
14
|
+
let(:sort_by) { "last_modified" }
|
15
|
+
let(:sort_direction) { "asc" }
|
16
|
+
|
17
|
+
it "sorts earliest modified first" do
|
18
|
+
collection = described_class.new(Settings.from_options(:file_sort_by => sort_by, :file_sort_direction => sort_direction))
|
19
|
+
collection.add(wf2)
|
20
|
+
expect(collection.values).to eq([wf2])
|
21
|
+
collection.add(wf3)
|
22
|
+
expect(collection.values).to eq([wf2, wf3])
|
23
|
+
collection.add(wf1)
|
24
|
+
expect(collection.values).to eq([wf1, wf2, wf3])
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
context "sort by path in ascending order" do
|
29
|
+
let(:sort_by) { "path" }
|
30
|
+
let(:sort_direction) { "asc" }
|
31
|
+
|
32
|
+
it "sorts path A-Z" do
|
33
|
+
collection = described_class.new(Settings.from_options(:file_sort_by => sort_by, :file_sort_direction => sort_direction))
|
34
|
+
collection.add(wf2)
|
35
|
+
expect(collection.values).to eq([wf2])
|
36
|
+
collection.add(wf1)
|
37
|
+
expect(collection.values).to eq([wf2, wf1])
|
38
|
+
collection.add(wf3)
|
39
|
+
expect(collection.values).to eq([wf3, wf2, wf1])
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
context "sort by last_modified in descending order" do
|
44
|
+
let(:sort_by) { "last_modified" }
|
45
|
+
let(:sort_direction) { "desc" }
|
46
|
+
|
47
|
+
it "sorts latest modified first" do
|
48
|
+
collection = described_class.new(Settings.from_options(:file_sort_by => sort_by, :file_sort_direction => sort_direction))
|
49
|
+
collection.add(wf2)
|
50
|
+
expect(collection.values).to eq([wf2])
|
51
|
+
collection.add(wf1)
|
52
|
+
expect(collection.values).to eq([wf2, wf1])
|
53
|
+
collection.add(wf3)
|
54
|
+
expect(collection.values).to eq([wf3, wf2, wf1])
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
context "sort by path in descending order" do
|
59
|
+
let(:sort_by) { "path" }
|
60
|
+
let(:sort_direction) { "desc" }
|
61
|
+
|
62
|
+
it "sorts path Z-A" do
|
63
|
+
collection = described_class.new(Settings.from_options(:file_sort_by => sort_by, :file_sort_direction => sort_direction))
|
64
|
+
collection.add(wf2)
|
65
|
+
expect(collection.values).to eq([wf2])
|
66
|
+
collection.add(wf1)
|
67
|
+
expect(collection.values).to eq([wf1, wf2])
|
68
|
+
collection.add(wf3)
|
69
|
+
expect(collection.values).to eq([wf1, wf2, wf3])
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
require "stud/temporary"
|
2
|
+
require "fileutils"
|
3
|
+
|
4
|
+
if Gem.win_platform?
|
5
|
+
require "lib/filewatch/winhelper"
|
6
|
+
|
7
|
+
describe Winhelper do
|
8
|
+
let(:path) { Stud::Temporary.file.path }
|
9
|
+
|
10
|
+
after do
|
11
|
+
FileUtils.rm_rf(path)
|
12
|
+
end
|
13
|
+
|
14
|
+
it "return a unique file identifier" do
|
15
|
+
volume_serial, file_index_low, file_index_high = Winhelper.GetWindowsUniqueFileIdentifier(path).split("").map(&:to_i)
|
16
|
+
|
17
|
+
expect(volume_serial).not_to eq(0)
|
18
|
+
expect(file_index_low).not_to eq(0)
|
19
|
+
expect(file_index_high).not_to eq(0)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
Binary file
|
Binary file
|
@@ -0,0 +1,2 @@
|
|
1
|
+
2010-03-12 23:51:20 SEA4 192.0.2.147 connect 2014 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 - - - -
|
2
|
+
2010-03-12 23:51:21 SEA4 192.0.2.222 play 3914 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 myvideo p=2&q=4 flv 1
|
@@ -0,0 +1,2 @@
|
|
1
|
+
2010-03-12 23:51:20 SEA4 192.0.2.147 connect 2014 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 - - - -
|
2
|
+
2010-03-12 23:51:21 SEA4 192.0.2.222 play 3914 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 myvideo p=2&q=4 flv 1
|
@@ -4,41 +4,33 @@ require "logstash/devutils/rspec/spec_helper"
|
|
4
4
|
require "rspec_sequencing"
|
5
5
|
|
6
6
|
module FileInput
|
7
|
+
|
8
|
+
FIXTURE_DIR = File.join('spec', 'fixtures')
|
9
|
+
|
7
10
|
def self.make_file_older(path, seconds)
|
8
11
|
time = Time.now.to_f - seconds
|
9
|
-
File.utime(time, time, path)
|
12
|
+
::File.utime(time, time, path)
|
13
|
+
end
|
14
|
+
|
15
|
+
def self.make_fixture_current(path, time = Time.now)
|
16
|
+
::File.utime(time, time, path)
|
10
17
|
end
|
11
|
-
|
18
|
+
|
12
19
|
class TracerBase
|
13
|
-
def initialize
|
20
|
+
def initialize
|
21
|
+
@tracer = []
|
22
|
+
end
|
14
23
|
|
15
24
|
def trace_for(symbol)
|
16
25
|
params = @tracer.map {|k,v| k == symbol ? v : nil}.compact
|
17
26
|
params.empty? ? false : params
|
18
27
|
end
|
19
28
|
|
20
|
-
def clear
|
21
|
-
@tracer.clear
|
29
|
+
def clear
|
30
|
+
@tracer.clear
|
22
31
|
end
|
23
32
|
end
|
24
33
|
|
25
|
-
class FileLogTracer < TracerBase
|
26
|
-
def warn(*args) @tracer.push [:warn, args]; end
|
27
|
-
def error(*args) @tracer.push [:error, args]; end
|
28
|
-
def debug(*args) @tracer.push [:debug, args]; end
|
29
|
-
def info(*args) @tracer.push [:info, args]; end
|
30
|
-
|
31
|
-
def info?() true; end
|
32
|
-
def debug?() true; end
|
33
|
-
def warn?() true; end
|
34
|
-
def error?() true; end
|
35
|
-
end
|
36
|
-
|
37
|
-
class ComponentTracer < TracerBase
|
38
|
-
def accept(*args) @tracer.push [:accept, args]; end
|
39
|
-
def deliver(*args) @tracer.push [:deliver, args]; end
|
40
|
-
end
|
41
|
-
|
42
34
|
class CodecTracer < TracerBase
|
43
35
|
def decode_accept(ctx, data, listener)
|
44
36
|
@tracer.push [:decode_accept, [ctx, data]]
|
@@ -62,25 +54,6 @@ module FileInput
|
|
62
54
|
end
|
63
55
|
end
|
64
56
|
|
65
|
-
unless Kernel.method_defined?(:pause_until)
|
66
|
-
module Kernel
|
67
|
-
def pause_until(nap = 5, &block)
|
68
|
-
sq = SizedQueue.new(1)
|
69
|
-
th1 = Thread.new(sq) {|q| sleep nap; q.push(false) }
|
70
|
-
th2 = Thread.new(sq) do |q|
|
71
|
-
success = false
|
72
|
-
iters = nap * 5 + 1
|
73
|
-
iters.times do
|
74
|
-
break if !!(success = block.call)
|
75
|
-
sleep(0.2)
|
76
|
-
end
|
77
|
-
q.push(success)
|
78
|
-
end
|
79
|
-
sq.pop
|
80
|
-
end
|
81
|
-
end
|
82
|
-
end
|
83
|
-
|
84
57
|
unless RSpec::Matchers.method_defined?(:receive_call_and_args)
|
85
58
|
RSpec::Matchers.define(:receive_call_and_args) do |m, args|
|
86
59
|
match do |actual|
|
@@ -0,0 +1,155 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
require "helpers/spec_helper"
|
4
|
+
require "logstash/inputs/file"
|
5
|
+
|
6
|
+
# LogStash::Logging::Logger::configure_logging("DEBUG")
|
7
|
+
|
8
|
+
require "tempfile"
|
9
|
+
require "stud/temporary"
|
10
|
+
require "logstash/codecs/multiline"
|
11
|
+
|
12
|
+
FILE_DELIMITER = LogStash::Environment.windows? ? "\r\n" : "\n"
|
13
|
+
|
14
|
+
describe LogStash::Inputs::File do
|
15
|
+
describe "'read' mode testing with input(conf) do |pipeline, queue|" do
|
16
|
+
it "should start at the beginning of an existing file and delete the file when done" do
|
17
|
+
tmpfile_path = Stud::Temporary.pathname
|
18
|
+
sincedb_path = Stud::Temporary.pathname
|
19
|
+
|
20
|
+
conf = <<-CONFIG
|
21
|
+
input {
|
22
|
+
file {
|
23
|
+
type => "blah"
|
24
|
+
path => "#{tmpfile_path}"
|
25
|
+
sincedb_path => "#{sincedb_path}"
|
26
|
+
delimiter => "#{FILE_DELIMITER}"
|
27
|
+
mode => "read"
|
28
|
+
file_completed_action => "delete"
|
29
|
+
}
|
30
|
+
}
|
31
|
+
CONFIG
|
32
|
+
|
33
|
+
File.open(tmpfile_path, "a") do |fd|
|
34
|
+
fd.puts("hello")
|
35
|
+
fd.puts("world")
|
36
|
+
fd.fsync
|
37
|
+
end
|
38
|
+
|
39
|
+
events = input(conf) do |pipeline, queue|
|
40
|
+
2.times.collect { queue.pop }
|
41
|
+
end
|
42
|
+
|
43
|
+
expect(events.map{|e| e.get("message")}).to contain_exactly("hello", "world")
|
44
|
+
expect(File.exist?(tmpfile_path)).to be_falsey
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
describe "reading fixtures" do
|
49
|
+
let(:fixture_dir) { Pathname.new(FileInput::FIXTURE_DIR).expand_path }
|
50
|
+
|
51
|
+
context "for a file without a final newline character" do
|
52
|
+
let(:file_path) { fixture_dir.join('no-final-newline.log') }
|
53
|
+
|
54
|
+
it "the file is read and the path is logged to the `file_completed_log_path` file" do
|
55
|
+
tmpfile_path = fixture_dir.join("no-f*.log")
|
56
|
+
sincedb_path = Stud::Temporary.pathname
|
57
|
+
FileInput.make_fixture_current(file_path.to_path)
|
58
|
+
log_completed_path = Stud::Temporary.pathname
|
59
|
+
|
60
|
+
conf = <<-CONFIG
|
61
|
+
input {
|
62
|
+
file {
|
63
|
+
type => "blah"
|
64
|
+
path => "#{tmpfile_path}"
|
65
|
+
sincedb_path => "#{sincedb_path}"
|
66
|
+
delimiter => "#{FILE_DELIMITER}"
|
67
|
+
mode => "read"
|
68
|
+
file_completed_action => "log"
|
69
|
+
file_completed_log_path => "#{log_completed_path}"
|
70
|
+
}
|
71
|
+
}
|
72
|
+
CONFIG
|
73
|
+
|
74
|
+
events = input(conf) do |pipeline, queue|
|
75
|
+
2.times.collect { queue.pop }
|
76
|
+
end
|
77
|
+
|
78
|
+
expect(events[0].get("message")).to start_with("2010-03-12 23:51")
|
79
|
+
expect(events[1].get("message")).to start_with("2010-03-12 23:51")
|
80
|
+
expect(IO.read(log_completed_path)).to eq(file_path.to_s + "\n")
|
81
|
+
end
|
82
|
+
|
83
|
+
end
|
84
|
+
|
85
|
+
context "for an uncompressed file" do
|
86
|
+
let(:file_path) { fixture_dir.join('uncompressed.log') }
|
87
|
+
|
88
|
+
it "the file is read and the path is logged to the `file_completed_log_path` file" do
|
89
|
+
tmpfile_path = fixture_dir.join("unc*.log")
|
90
|
+
sincedb_path = Stud::Temporary.pathname
|
91
|
+
FileInput.make_fixture_current(file_path.to_path)
|
92
|
+
log_completed_path = Stud::Temporary.pathname
|
93
|
+
|
94
|
+
conf = <<-CONFIG
|
95
|
+
input {
|
96
|
+
file {
|
97
|
+
type => "blah"
|
98
|
+
path => "#{tmpfile_path}"
|
99
|
+
sincedb_path => "#{sincedb_path}"
|
100
|
+
delimiter => "#{FILE_DELIMITER}"
|
101
|
+
mode => "read"
|
102
|
+
file_completed_action => "log"
|
103
|
+
file_completed_log_path => "#{log_completed_path}"
|
104
|
+
}
|
105
|
+
}
|
106
|
+
CONFIG
|
107
|
+
|
108
|
+
events = input(conf) do |pipeline, queue|
|
109
|
+
2.times.collect { queue.pop }
|
110
|
+
end
|
111
|
+
|
112
|
+
expect(events[0].get("message")).to start_with("2010-03-12 23:51")
|
113
|
+
expect(events[1].get("message")).to start_with("2010-03-12 23:51")
|
114
|
+
expect(IO.read(log_completed_path)).to eq(file_path.to_s + "\n")
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
context "for a compressed file" do
|
119
|
+
it "the file is read" do
|
120
|
+
tmpfile_path = fixture_dir.join("compressed.*.*")
|
121
|
+
sincedb_path = Stud::Temporary.pathname
|
122
|
+
file_path = fixture_dir.join('compressed.log.gz')
|
123
|
+
file_path2 = fixture_dir.join('compressed.log.gzip')
|
124
|
+
FileInput.make_fixture_current(file_path.to_path)
|
125
|
+
log_completed_path = Stud::Temporary.pathname
|
126
|
+
|
127
|
+
conf = <<-CONFIG
|
128
|
+
input {
|
129
|
+
file {
|
130
|
+
type => "blah"
|
131
|
+
path => "#{tmpfile_path}"
|
132
|
+
sincedb_path => "#{sincedb_path}"
|
133
|
+
delimiter => "#{FILE_DELIMITER}"
|
134
|
+
mode => "read"
|
135
|
+
file_completed_action => "log"
|
136
|
+
file_completed_log_path => "#{log_completed_path}"
|
137
|
+
}
|
138
|
+
}
|
139
|
+
CONFIG
|
140
|
+
|
141
|
+
events = input(conf) do |pipeline, queue|
|
142
|
+
4.times.collect { queue.pop }
|
143
|
+
end
|
144
|
+
|
145
|
+
expect(events[0].get("message")).to start_with("2010-03-12 23:51")
|
146
|
+
expect(events[1].get("message")).to start_with("2010-03-12 23:51")
|
147
|
+
expect(events[2].get("message")).to start_with("2010-03-12 23:51")
|
148
|
+
expect(events[3].get("message")).to start_with("2010-03-12 23:51")
|
149
|
+
logged_completions = IO.read(log_completed_path).split
|
150
|
+
expect(logged_completions.first).to match(/compressed\.log\.(gzip|gz)$/)
|
151
|
+
expect(logged_completions.last).to match(/compressed\.log\.(gzip|gz)$/)
|
152
|
+
end
|
153
|
+
end
|
154
|
+
end
|
155
|
+
end
|
@@ -1,14 +1,18 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
-
|
2
|
+
|
3
|
+
require "helpers/spec_helper"
|
3
4
|
require "logstash/inputs/file"
|
5
|
+
|
4
6
|
require "tempfile"
|
5
7
|
require "stud/temporary"
|
6
8
|
require "logstash/codecs/multiline"
|
7
9
|
|
8
|
-
|
10
|
+
# LogStash::Logging::Logger::configure_logging("DEBUG")
|
11
|
+
|
12
|
+
TEST_FILE_DELIMITER = LogStash::Environment.windows? ? "\r\n" : "\n"
|
9
13
|
|
10
14
|
describe LogStash::Inputs::File do
|
11
|
-
describe "testing with input(conf) do |pipeline, queue|" do
|
15
|
+
describe "'tail' mode testing with input(conf) do |pipeline, queue|" do
|
12
16
|
it_behaves_like "an interruptible input plugin" do
|
13
17
|
let(:config) do
|
14
18
|
{
|
@@ -29,7 +33,7 @@ describe LogStash::Inputs::File do
|
|
29
33
|
path => "#{tmpfile_path}"
|
30
34
|
start_position => "beginning"
|
31
35
|
sincedb_path => "#{sincedb_path}"
|
32
|
-
delimiter => "#{
|
36
|
+
delimiter => "#{TEST_FILE_DELIMITER}"
|
33
37
|
}
|
34
38
|
}
|
35
39
|
CONFIG
|
@@ -43,12 +47,10 @@ describe LogStash::Inputs::File do
|
|
43
47
|
events = input(conf) do |pipeline, queue|
|
44
48
|
2.times.collect { queue.pop }
|
45
49
|
end
|
46
|
-
|
47
|
-
insist { events[0].get("message") } == "hello"
|
48
|
-
insist { events[1].get("message") } == "world"
|
50
|
+
expect(events.map{|e| e.get("message")}).to contain_exactly("hello", "world")
|
49
51
|
end
|
50
52
|
|
51
|
-
it "should
|
53
|
+
it "should restart at the sincedb value" do
|
52
54
|
tmpfile_path = Stud::Temporary.pathname
|
53
55
|
sincedb_path = Stud::Temporary.pathname
|
54
56
|
|
@@ -59,7 +61,7 @@ describe LogStash::Inputs::File do
|
|
59
61
|
path => "#{tmpfile_path}"
|
60
62
|
start_position => "beginning"
|
61
63
|
sincedb_path => "#{sincedb_path}"
|
62
|
-
delimiter => "#{
|
64
|
+
delimiter => "#{TEST_FILE_DELIMITER}"
|
63
65
|
}
|
64
66
|
}
|
65
67
|
CONFIG
|
@@ -73,8 +75,7 @@ describe LogStash::Inputs::File do
|
|
73
75
|
2.times.collect { queue.pop }
|
74
76
|
end
|
75
77
|
|
76
|
-
|
77
|
-
insist { events[1].get("message") } == "world3"
|
78
|
+
expect(events.map{|e| e.get("message")}).to contain_exactly("hello3", "world3")
|
78
79
|
|
79
80
|
File.open(tmpfile_path, "a") do |fd|
|
80
81
|
fd.puts("foo")
|
@@ -86,10 +87,8 @@ describe LogStash::Inputs::File do
|
|
86
87
|
events = input(conf) do |pipeline, queue|
|
87
88
|
3.times.collect { queue.pop }
|
88
89
|
end
|
89
|
-
|
90
|
-
|
91
|
-
insist { events[1].get("message") } == "bar"
|
92
|
-
insist { events[2].get("message") } == "baz"
|
90
|
+
messages = events.map{|e| e.get("message")}
|
91
|
+
expect(messages).to contain_exactly("foo", "bar", "baz")
|
93
92
|
end
|
94
93
|
|
95
94
|
it "should not overwrite existing path and host fields" do
|
@@ -103,7 +102,7 @@ describe LogStash::Inputs::File do
|
|
103
102
|
path => "#{tmpfile_path}"
|
104
103
|
start_position => "beginning"
|
105
104
|
sincedb_path => "#{sincedb_path}"
|
106
|
-
delimiter => "#{
|
105
|
+
delimiter => "#{TEST_FILE_DELIMITER}"
|
107
106
|
codec => "json"
|
108
107
|
}
|
109
108
|
}
|
@@ -119,13 +118,15 @@ describe LogStash::Inputs::File do
|
|
119
118
|
2.times.collect { queue.pop }
|
120
119
|
end
|
121
120
|
|
122
|
-
|
123
|
-
insist { events[0].get("host") } == "my_host"
|
124
|
-
insist { events[0].get("[@metadata][host]") } == "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
121
|
+
existing_path_index, added_path_index = "my_val" == events[0].get("my_field") ? [1,0] : [0,1]
|
125
122
|
|
126
|
-
|
127
|
-
|
128
|
-
|
123
|
+
expect(events[existing_path_index].get("path")).to eq "my_path"
|
124
|
+
expect(events[existing_path_index].get("host")).to eq "my_host"
|
125
|
+
expect(events[existing_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
126
|
+
|
127
|
+
expect(events[added_path_index].get("path")).to eq "#{tmpfile_path}"
|
128
|
+
expect(events[added_path_index].get("host")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
129
|
+
expect(events[added_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
129
130
|
end
|
130
131
|
|
131
132
|
it "should read old files" do
|
@@ -153,14 +154,14 @@ describe LogStash::Inputs::File do
|
|
153
154
|
events = input(conf) do |pipeline, queue|
|
154
155
|
2.times.collect { queue.pop }
|
155
156
|
end
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
157
|
+
existing_path_index, added_path_index = "my_val" == events[0].get("my_field") ? [1,0] : [0,1]
|
158
|
+
expect(events[existing_path_index].get("path")).to eq "my_path"
|
159
|
+
expect(events[existing_path_index].get("host")).to eq "my_host"
|
160
|
+
expect(events[existing_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
161
|
+
|
162
|
+
expect(events[added_path_index].get("path")).to eq "#{tmpfile_path}"
|
163
|
+
expect(events[added_path_index].get("host")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
164
|
+
expect(events[added_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
164
165
|
end
|
165
166
|
|
166
167
|
context "when sincedb_path is an existing directory" do
|
@@ -207,17 +208,17 @@ describe LogStash::Inputs::File do
|
|
207
208
|
"sincedb_path" => sincedb_path,
|
208
209
|
"stat_interval" => 0.1,
|
209
210
|
"codec" => mlcodec,
|
210
|
-
"delimiter" =>
|
211
|
-
subject.register
|
211
|
+
"delimiter" => TEST_FILE_DELIMITER)
|
212
212
|
end
|
213
213
|
|
214
214
|
it "reads the appended data only" do
|
215
|
+
subject.register
|
215
216
|
RSpec::Sequencing
|
216
|
-
.run_after(0.
|
217
|
+
.run_after(0.2, "assert zero events then append two lines") do
|
217
218
|
expect(events.size).to eq(0)
|
218
219
|
File.open(tmpfile_path, "a") { |fd| fd.puts("hello"); fd.puts("world") }
|
219
220
|
end
|
220
|
-
.then_after(0.
|
221
|
+
.then_after(0.4, "quit") do
|
221
222
|
subject.stop
|
222
223
|
end
|
223
224
|
|
@@ -250,7 +251,7 @@ describe LogStash::Inputs::File do
|
|
250
251
|
"stat_interval" => 0.02,
|
251
252
|
"codec" => codec,
|
252
253
|
"close_older" => 0.5,
|
253
|
-
"delimiter" =>
|
254
|
+
"delimiter" => TEST_FILE_DELIMITER)
|
254
255
|
|
255
256
|
subject.register
|
256
257
|
end
|
@@ -294,7 +295,7 @@ describe LogStash::Inputs::File do
|
|
294
295
|
"stat_interval" => 0.02,
|
295
296
|
"codec" => codec,
|
296
297
|
"ignore_older" => 1,
|
297
|
-
"delimiter" =>
|
298
|
+
"delimiter" => TEST_FILE_DELIMITER)
|
298
299
|
|
299
300
|
subject.register
|
300
301
|
Thread.new { subject.run(events) }
|
@@ -320,7 +321,7 @@ describe LogStash::Inputs::File do
|
|
320
321
|
"sincedb_path" => sincedb_path,
|
321
322
|
"stat_interval" => 0.05,
|
322
323
|
"codec" => mlcodec,
|
323
|
-
"delimiter" =>
|
324
|
+
"delimiter" => TEST_FILE_DELIMITER)
|
324
325
|
|
325
326
|
subject.register
|
326
327
|
end
|
@@ -355,13 +356,13 @@ describe LogStash::Inputs::File do
|
|
355
356
|
if e1_message.start_with?('line1.1-of-z')
|
356
357
|
expect(e1.get("path")).to match(/z.log/)
|
357
358
|
expect(e2.get("path")).to match(/A.log/)
|
358
|
-
expect(e1_message).to eq("line1.1-of-z#{
|
359
|
-
expect(e2_message).to eq("line1.1-of-a#{
|
359
|
+
expect(e1_message).to eq("line1.1-of-z#{TEST_FILE_DELIMITER} line1.2-of-z#{TEST_FILE_DELIMITER} line1.3-of-z")
|
360
|
+
expect(e2_message).to eq("line1.1-of-a#{TEST_FILE_DELIMITER} line1.2-of-a#{TEST_FILE_DELIMITER} line1.3-of-a")
|
360
361
|
else
|
361
362
|
expect(e1.get("path")).to match(/A.log/)
|
362
363
|
expect(e2.get("path")).to match(/z.log/)
|
363
|
-
expect(e1_message).to eq("line1.1-of-a#{
|
364
|
-
expect(e2_message).to eq("line1.1-of-z#{
|
364
|
+
expect(e1_message).to eq("line1.1-of-a#{TEST_FILE_DELIMITER} line1.2-of-a#{TEST_FILE_DELIMITER} line1.3-of-a")
|
365
|
+
expect(e2_message).to eq("line1.1-of-z#{TEST_FILE_DELIMITER} line1.2-of-z#{TEST_FILE_DELIMITER} line1.3-of-z")
|
365
366
|
end
|
366
367
|
end
|
367
368
|
subject.run(events)
|
@@ -385,7 +386,7 @@ describe LogStash::Inputs::File do
|
|
385
386
|
e1 = events.first
|
386
387
|
e1_message = e1.get("message")
|
387
388
|
expect(e1["path"]).to match(/a.log/)
|
388
|
-
expect(e1_message).to eq("line1.1-of-a#{
|
389
|
+
expect(e1_message).to eq("line1.1-of-a#{TEST_FILE_DELIMITER} line1.2-of-a#{TEST_FILE_DELIMITER} line1.3-of-a")
|
389
390
|
end
|
390
391
|
.then("stop") do
|
391
392
|
subject.stop
|
@@ -400,7 +401,6 @@ describe LogStash::Inputs::File do
|
|
400
401
|
context "when #run is called multiple times", :unix => true do
|
401
402
|
let(:file_path) { "#{tmpdir_path}/a.log" }
|
402
403
|
let(:buffer) { [] }
|
403
|
-
let(:lsof) { [] }
|
404
404
|
let(:run_thread_proc) do
|
405
405
|
lambda { Thread.new { subject.run(buffer) } }
|
406
406
|
end
|
@@ -424,17 +424,20 @@ describe LogStash::Inputs::File do
|
|
424
424
|
end
|
425
425
|
end
|
426
426
|
|
427
|
-
it "should only
|
427
|
+
it "should only actually open files when content changes are detected" do
|
428
428
|
subject.register
|
429
429
|
expect(lsof_proc.call).to eq("")
|
430
|
+
# first run processes the file and records sincedb progress
|
430
431
|
run_thread_proc.call
|
431
|
-
|
432
|
-
|
433
|
-
|
432
|
+
wait(1).for{lsof_proc.call.scan(file_path).size}.to eq(1)
|
433
|
+
# second run quits the first run
|
434
|
+
# sees the file has not changed size and does not open it
|
434
435
|
run_thread_proc.call
|
435
|
-
|
436
|
-
|
437
|
-
|
436
|
+
wait(1).for{lsof_proc.call.scan(file_path).size}.to eq(0)
|
437
|
+
# truncate and write less than before
|
438
|
+
File.open(file_path, "w"){ |fd| fd.puts('baz'); fd.fsync }
|
439
|
+
# sees the file has changed size and does open it
|
440
|
+
wait(1).for{lsof_proc.call.scan(file_path).size}.to eq(1)
|
438
441
|
end
|
439
442
|
end
|
440
443
|
|
@@ -463,7 +466,7 @@ describe LogStash::Inputs::File do
|
|
463
466
|
"stat_interval" => 0.1,
|
464
467
|
"max_open_files" => 1,
|
465
468
|
"start_position" => "beginning",
|
466
|
-
"delimiter" =>
|
469
|
+
"delimiter" => TEST_FILE_DELIMITER)
|
467
470
|
subject.register
|
468
471
|
end
|
469
472
|
it "collects line events from only one file" do
|
@@ -502,7 +505,7 @@ describe LogStash::Inputs::File do
|
|
502
505
|
"max_open_files" => 1,
|
503
506
|
"close_older" => 0.5,
|
504
507
|
"start_position" => "beginning",
|
505
|
-
"delimiter" =>
|
508
|
+
"delimiter" => TEST_FILE_DELIMITER)
|
506
509
|
subject.register
|
507
510
|
end
|
508
511
|
|