logstash-input-file 4.1.3 → 4.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +10 -0
- data/JAR_VERSION +1 -1
- data/README.md +0 -3
- data/docs/index.asciidoc +26 -16
- data/lib/filewatch/bootstrap.rb +10 -21
- data/lib/filewatch/discoverer.rb +35 -28
- data/lib/filewatch/observing_base.rb +2 -1
- data/lib/filewatch/read_mode/handlers/base.rb +19 -6
- data/lib/filewatch/read_mode/handlers/read_file.rb +43 -32
- data/lib/filewatch/read_mode/handlers/read_zip_file.rb +8 -3
- data/lib/filewatch/read_mode/processor.rb +8 -8
- data/lib/filewatch/settings.rb +3 -3
- data/lib/filewatch/sincedb_collection.rb +56 -42
- data/lib/filewatch/sincedb_value.rb +6 -0
- data/lib/filewatch/stat/generic.rb +34 -0
- data/lib/filewatch/stat/windows_path.rb +32 -0
- data/lib/filewatch/tail_mode/handlers/base.rb +40 -22
- data/lib/filewatch/tail_mode/handlers/create.rb +1 -2
- data/lib/filewatch/tail_mode/handlers/create_initial.rb +2 -1
- data/lib/filewatch/tail_mode/handlers/delete.rb +13 -1
- data/lib/filewatch/tail_mode/handlers/grow.rb +5 -2
- data/lib/filewatch/tail_mode/handlers/shrink.rb +7 -4
- data/lib/filewatch/tail_mode/handlers/unignore.rb +4 -2
- data/lib/filewatch/tail_mode/processor.rb +147 -58
- data/lib/filewatch/watch.rb +15 -35
- data/lib/filewatch/watched_file.rb +237 -41
- data/lib/filewatch/watched_files_collection.rb +2 -2
- data/lib/filewatch/winhelper.rb +167 -25
- data/lib/jars/filewatch-1.0.1.jar +0 -0
- data/lib/logstash/inputs/file.rb +9 -2
- data/logstash-input-file.gemspec +9 -2
- data/spec/file_ext/file_ext_windows_spec.rb +36 -0
- data/spec/filewatch/read_mode_handlers_read_file_spec.rb +2 -2
- data/spec/filewatch/reading_spec.rb +100 -57
- data/spec/filewatch/rotate_spec.rb +451 -0
- data/spec/filewatch/spec_helper.rb +33 -10
- data/spec/filewatch/tailing_spec.rb +273 -153
- data/spec/filewatch/watched_file_spec.rb +3 -3
- data/spec/filewatch/watched_files_collection_spec.rb +3 -3
- data/spec/filewatch/winhelper_spec.rb +4 -5
- data/spec/helpers/logging_level_helper.rb +8 -0
- data/spec/helpers/rspec_wait_handler_helper.rb +38 -0
- data/spec/helpers/spec_helper.rb +7 -1
- data/spec/inputs/file_read_spec.rb +54 -24
- data/spec/inputs/file_tail_spec.rb +244 -284
- metadata +13 -3
- data/lib/jars/filewatch-1.0.0.jar +0 -0
@@ -8,9 +8,9 @@ module FileWatch
|
|
8
8
|
|
9
9
|
context 'Given two instances of the same file' do
|
10
10
|
it 'their sincedb_keys should equate' do
|
11
|
-
wf_key1 = WatchedFile.new(pathname, pathname
|
11
|
+
wf_key1 = WatchedFile.new(pathname, PathStatClass.new(pathname), Settings.new).sincedb_key
|
12
12
|
hash_db = { wf_key1 => 42 }
|
13
|
-
wf_key2 = WatchedFile.new(pathname, pathname
|
13
|
+
wf_key2 = WatchedFile.new(pathname, PathStatClass.new(pathname), Settings.new).sincedb_key
|
14
14
|
expect(wf_key1).to eq(wf_key2)
|
15
15
|
expect(wf_key1).to eql(wf_key2)
|
16
16
|
expect(wf_key1.hash).to eq(wf_key2.hash)
|
@@ -20,7 +20,7 @@ module FileWatch
|
|
20
20
|
|
21
21
|
context 'Given a barrage of state changes' do
|
22
22
|
it 'only the previous N state changes are remembered' do
|
23
|
-
watched_file = WatchedFile.new(pathname, pathname
|
23
|
+
watched_file = WatchedFile.new(pathname, PathStatClass.new(pathname), Settings.new)
|
24
24
|
watched_file.ignore
|
25
25
|
watched_file.watch
|
26
26
|
watched_file.activate
|
@@ -4,9 +4,9 @@ require_relative 'spec_helper'
|
|
4
4
|
module FileWatch
|
5
5
|
describe WatchedFilesCollection do
|
6
6
|
let(:time) { Time.now }
|
7
|
-
let(:stat1) { double("stat1", :size => 98, :
|
8
|
-
let(:stat2) { double("stat2", :size => 99, :
|
9
|
-
let(:stat3) { double("stat3", :size => 100, :
|
7
|
+
let(:stat1) { double("stat1", :size => 98, :modified_at => time - 30, :identifier => nil, :inode => 234567, :inode_struct => InodeStruct.new("234567", 3, 2)) }
|
8
|
+
let(:stat2) { double("stat2", :size => 99, :modified_at => time - 20, :identifier => nil, :inode => 234568, :inode_struct => InodeStruct.new("234568", 3, 2)) }
|
9
|
+
let(:stat3) { double("stat3", :size => 100, :modified_at => time, :identifier => nil, :inode => 234569, :inode_struct => InodeStruct.new("234569", 3, 2)) }
|
10
10
|
let(:wf1) { WatchedFile.new("/var/log/z.log", stat1, Settings.new) }
|
11
11
|
let(:wf2) { WatchedFile.new("/var/log/m.log", stat2, Settings.new) }
|
12
12
|
let(:wf3) { WatchedFile.new("/var/log/a.log", stat3, Settings.new) }
|
@@ -3,7 +3,7 @@ require "stud/temporary"
|
|
3
3
|
require "fileutils"
|
4
4
|
|
5
5
|
if Gem.win_platform?
|
6
|
-
require "
|
6
|
+
require "filewatch/winhelper"
|
7
7
|
|
8
8
|
describe Winhelper do
|
9
9
|
let(:path) { Stud::Temporary.file.path }
|
@@ -13,11 +13,10 @@ if Gem.win_platform?
|
|
13
13
|
end
|
14
14
|
|
15
15
|
it "return a unique file identifier" do
|
16
|
-
|
16
|
+
identifier = Winhelper.identifier_from_path(path)
|
17
17
|
|
18
|
-
expect(
|
19
|
-
expect(
|
20
|
-
expect(file_index_high).not_to eq(0)
|
18
|
+
expect(identifier).not_to eq("unknown")
|
19
|
+
expect(identifier.count("-")).to eq(2)
|
21
20
|
end
|
22
21
|
end
|
23
22
|
end
|
@@ -0,0 +1,38 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
module RSpec
|
4
|
+
module Wait
|
5
|
+
module Handler
|
6
|
+
def handle_matcher(target, *args, &block)
|
7
|
+
# there is a similar patch in the rspec-wait repo since Nov, 19 2017
|
8
|
+
# it does not look like the author is interested in the change.
|
9
|
+
# - do not use Ruby Timeout
|
10
|
+
count = RSpec.configuration.wait_timeout.fdiv(RSpec.configuration.wait_delay).ceil
|
11
|
+
failure = nil
|
12
|
+
count.times do
|
13
|
+
begin
|
14
|
+
actual = target.respond_to?(:call) ? target.call : target
|
15
|
+
super(actual, *args, &block)
|
16
|
+
failure = nil
|
17
|
+
rescue RSpec::Expectations::ExpectationNotMetError => failure
|
18
|
+
sleep RSpec.configuration.wait_delay
|
19
|
+
end
|
20
|
+
break if failure.nil?
|
21
|
+
end
|
22
|
+
raise failure unless failure.nil?
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
# From: https://github.com/rspec/rspec-expectations/blob/v3.0.0/lib/rspec/expectations/handler.rb#L44-L63
|
27
|
+
class PositiveHandler < RSpec::Expectations::PositiveExpectationHandler
|
28
|
+
extend Handler
|
29
|
+
end
|
30
|
+
|
31
|
+
# From: https://github.com/rspec/rspec-expectations/blob/v3.0.0/lib/rspec/expectations/handler.rb#L66-L93
|
32
|
+
class NegativeHandler < RSpec::Expectations::NegativeExpectationHandler
|
33
|
+
extend Handler
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
RSPEC_WAIT_HANDLER_PATCHED = true
|
data/spec/helpers/spec_helper.rb
CHANGED
@@ -18,7 +18,7 @@ module FileInput
|
|
18
18
|
|
19
19
|
class TracerBase
|
20
20
|
def initialize
|
21
|
-
@tracer =
|
21
|
+
@tracer = Concurrent::Array.new
|
22
22
|
end
|
23
23
|
|
24
24
|
def trace_for(symbol)
|
@@ -54,6 +54,9 @@ module FileInput
|
|
54
54
|
end
|
55
55
|
end
|
56
56
|
|
57
|
+
require_relative "rspec_wait_handler_helper" unless defined? RSPEC_WAIT_HANDLER_PATCHED
|
58
|
+
require_relative "logging_level_helper" unless defined? LOG_AT_HANDLED
|
59
|
+
|
57
60
|
unless RSpec::Matchers.method_defined?(:receive_call_and_args)
|
58
61
|
RSpec::Matchers.define(:receive_call_and_args) do |m, args|
|
59
62
|
match do |actual|
|
@@ -66,3 +69,6 @@ unless RSpec::Matchers.method_defined?(:receive_call_and_args)
|
|
66
69
|
end
|
67
70
|
end
|
68
71
|
|
72
|
+
ENV["LOG_AT"].tap do |level|
|
73
|
+
LogStash::Logging::Logger::configure_logging(level) unless level.nil?
|
74
|
+
end
|
@@ -9,21 +9,21 @@ require "tempfile"
|
|
9
9
|
require "stud/temporary"
|
10
10
|
require "logstash/codecs/multiline"
|
11
11
|
|
12
|
-
FILE_DELIMITER = LogStash::Environment.windows? ? "\r\n" : "\n"
|
13
|
-
|
14
12
|
describe LogStash::Inputs::File do
|
15
13
|
describe "'read' mode testing with input(conf) do |pipeline, queue|" do
|
16
14
|
it "should start at the beginning of an existing file and delete the file when done" do
|
17
|
-
|
18
|
-
|
15
|
+
directory = Stud::Temporary.directory
|
16
|
+
tmpfile_path = ::File.join(directory, "A.log")
|
17
|
+
sincedb_path = ::File.join(directory, "readmode_A_sincedb.txt")
|
18
|
+
path_path = ::File.join(directory, "*.log")
|
19
19
|
|
20
20
|
conf = <<-CONFIG
|
21
21
|
input {
|
22
22
|
file {
|
23
|
-
|
24
|
-
path => "#{
|
23
|
+
id => "blah"
|
24
|
+
path => "#{path_path}"
|
25
25
|
sincedb_path => "#{sincedb_path}"
|
26
|
-
delimiter => "
|
26
|
+
delimiter => "|"
|
27
27
|
mode => "read"
|
28
28
|
file_completed_action => "delete"
|
29
29
|
}
|
@@ -31,17 +31,49 @@ describe LogStash::Inputs::File do
|
|
31
31
|
CONFIG
|
32
32
|
|
33
33
|
File.open(tmpfile_path, "a") do |fd|
|
34
|
-
fd.
|
35
|
-
fd.puts("world")
|
34
|
+
fd.write("hello|world")
|
36
35
|
fd.fsync
|
37
36
|
end
|
38
37
|
|
39
38
|
events = input(conf) do |pipeline, queue|
|
39
|
+
wait(0.5).for{File.exist?(tmpfile_path)}.to be_falsey
|
40
40
|
2.times.collect { queue.pop }
|
41
41
|
end
|
42
42
|
|
43
43
|
expect(events.map{|e| e.get("message")}).to contain_exactly("hello", "world")
|
44
|
-
|
44
|
+
end
|
45
|
+
|
46
|
+
it "should start at the beginning of an existing file and log the file when done" do
|
47
|
+
directory = Stud::Temporary.directory
|
48
|
+
tmpfile_path = ::File.join(directory, "A.log")
|
49
|
+
sincedb_path = ::File.join(directory, "readmode_A_sincedb.txt")
|
50
|
+
path_path = ::File.join(directory, "*.log")
|
51
|
+
log_completed_path = ::File.join(directory, "A_completed.txt")
|
52
|
+
|
53
|
+
conf = <<-CONFIG
|
54
|
+
input {
|
55
|
+
file {
|
56
|
+
id => "blah"
|
57
|
+
path => "#{path_path}"
|
58
|
+
sincedb_path => "#{sincedb_path}"
|
59
|
+
delimiter => "|"
|
60
|
+
mode => "read"
|
61
|
+
file_completed_action => "log"
|
62
|
+
file_completed_log_path => "#{log_completed_path}"
|
63
|
+
}
|
64
|
+
}
|
65
|
+
CONFIG
|
66
|
+
|
67
|
+
File.open(tmpfile_path, "a") do |fd|
|
68
|
+
fd.write("hello|world")
|
69
|
+
fd.fsync
|
70
|
+
end
|
71
|
+
|
72
|
+
events = input(conf) do |pipeline, queue|
|
73
|
+
wait(0.5).for{IO.read(log_completed_path)}.to match(/A\.log/)
|
74
|
+
2.times.collect { queue.pop }
|
75
|
+
end
|
76
|
+
expect(events.map{|e| e.get("message")}).to contain_exactly("hello", "world")
|
45
77
|
end
|
46
78
|
end
|
47
79
|
|
@@ -63,7 +95,6 @@ describe LogStash::Inputs::File do
|
|
63
95
|
type => "blah"
|
64
96
|
path => "#{tmpfile_path}"
|
65
97
|
sincedb_path => "#{sincedb_path}"
|
66
|
-
delimiter => "#{FILE_DELIMITER}"
|
67
98
|
mode => "read"
|
68
99
|
file_completed_action => "log"
|
69
100
|
file_completed_log_path => "#{log_completed_path}"
|
@@ -72,12 +103,12 @@ describe LogStash::Inputs::File do
|
|
72
103
|
CONFIG
|
73
104
|
|
74
105
|
events = input(conf) do |pipeline, queue|
|
106
|
+
wait(0.5).for{IO.read(log_completed_path)}.to match(/#{file_path.to_s}/)
|
75
107
|
2.times.collect { queue.pop }
|
76
108
|
end
|
77
109
|
|
78
110
|
expect(events[0].get("message")).to start_with("2010-03-12 23:51")
|
79
111
|
expect(events[1].get("message")).to start_with("2010-03-12 23:51")
|
80
|
-
expect(IO.read(log_completed_path)).to eq(file_path.to_s + "\n")
|
81
112
|
end
|
82
113
|
|
83
114
|
end
|
@@ -86,10 +117,11 @@ describe LogStash::Inputs::File do
|
|
86
117
|
let(:file_path) { fixture_dir.join('uncompressed.log') }
|
87
118
|
|
88
119
|
it "the file is read and the path is logged to the `file_completed_log_path` file" do
|
89
|
-
tmpfile_path = fixture_dir.join("unc*.log")
|
90
|
-
sincedb_path = Stud::Temporary.pathname
|
91
120
|
FileInput.make_fixture_current(file_path.to_path)
|
92
|
-
|
121
|
+
tmpfile_path = fixture_dir.join("unc*.log")
|
122
|
+
directory = Stud::Temporary.directory
|
123
|
+
sincedb_path = ::File.join(directory, "readmode_B_sincedb.txt")
|
124
|
+
log_completed_path = ::File.join(directory, "B_completed.txt")
|
93
125
|
|
94
126
|
conf = <<-CONFIG
|
95
127
|
input {
|
@@ -97,7 +129,6 @@ describe LogStash::Inputs::File do
|
|
97
129
|
type => "blah"
|
98
130
|
path => "#{tmpfile_path}"
|
99
131
|
sincedb_path => "#{sincedb_path}"
|
100
|
-
delimiter => "#{FILE_DELIMITER}"
|
101
132
|
mode => "read"
|
102
133
|
file_completed_action => "log"
|
103
134
|
file_completed_log_path => "#{log_completed_path}"
|
@@ -106,23 +137,25 @@ describe LogStash::Inputs::File do
|
|
106
137
|
CONFIG
|
107
138
|
|
108
139
|
events = input(conf) do |pipeline, queue|
|
140
|
+
wait(0.5).for{IO.read(log_completed_path)}.to match(/uncompressed\.log/)
|
109
141
|
2.times.collect { queue.pop }
|
110
142
|
end
|
111
143
|
|
112
144
|
expect(events[0].get("message")).to start_with("2010-03-12 23:51")
|
113
145
|
expect(events[1].get("message")).to start_with("2010-03-12 23:51")
|
114
|
-
expect(IO.read(log_completed_path)).to eq(file_path.to_s + "\n")
|
115
146
|
end
|
116
147
|
end
|
117
148
|
|
118
149
|
context "for a compressed file" do
|
119
150
|
it "the file is read" do
|
120
|
-
tmpfile_path = fixture_dir.join("compressed.*.*")
|
121
|
-
sincedb_path = Stud::Temporary.pathname
|
122
151
|
file_path = fixture_dir.join('compressed.log.gz')
|
123
152
|
file_path2 = fixture_dir.join('compressed.log.gzip')
|
124
153
|
FileInput.make_fixture_current(file_path.to_path)
|
125
|
-
|
154
|
+
FileInput.make_fixture_current(file_path2.to_path)
|
155
|
+
tmpfile_path = fixture_dir.join("compressed.*.*")
|
156
|
+
directory = Stud::Temporary.directory
|
157
|
+
sincedb_path = ::File.join(directory, "readmode_C_sincedb.txt")
|
158
|
+
log_completed_path = ::File.join(directory, "C_completed.txt")
|
126
159
|
|
127
160
|
conf = <<-CONFIG
|
128
161
|
input {
|
@@ -130,7 +163,6 @@ describe LogStash::Inputs::File do
|
|
130
163
|
type => "blah"
|
131
164
|
path => "#{tmpfile_path}"
|
132
165
|
sincedb_path => "#{sincedb_path}"
|
133
|
-
delimiter => "#{FILE_DELIMITER}"
|
134
166
|
mode => "read"
|
135
167
|
file_completed_action => "log"
|
136
168
|
file_completed_log_path => "#{log_completed_path}"
|
@@ -139,6 +171,7 @@ describe LogStash::Inputs::File do
|
|
139
171
|
CONFIG
|
140
172
|
|
141
173
|
events = input(conf) do |pipeline, queue|
|
174
|
+
wait(0.5).for{IO.read(log_completed_path).scan(/compressed\.log\.gz(ip)?/).size}.to eq(2)
|
142
175
|
4.times.collect { queue.pop }
|
143
176
|
end
|
144
177
|
|
@@ -146,9 +179,6 @@ describe LogStash::Inputs::File do
|
|
146
179
|
expect(events[1].get("message")).to start_with("2010-03-12 23:51")
|
147
180
|
expect(events[2].get("message")).to start_with("2010-03-12 23:51")
|
148
181
|
expect(events[3].get("message")).to start_with("2010-03-12 23:51")
|
149
|
-
logged_completions = IO.read(log_completed_path).split
|
150
|
-
expect(logged_completions.first).to match(/compressed\.log\.(gzip|gz)$/)
|
151
|
-
expect(logged_completions.last).to match(/compressed\.log\.(gzip|gz)$/)
|
152
182
|
end
|
153
183
|
end
|
154
184
|
end
|
@@ -9,7 +9,7 @@ require "logstash/codecs/multiline"
|
|
9
9
|
|
10
10
|
# LogStash::Logging::Logger::configure_logging("DEBUG")
|
11
11
|
|
12
|
-
TEST_FILE_DELIMITER =
|
12
|
+
TEST_FILE_DELIMITER = $/
|
13
13
|
|
14
14
|
describe LogStash::Inputs::File do
|
15
15
|
describe "'tail' mode testing with input(conf) do |pipeline, queue|" do
|
@@ -22,152 +22,159 @@ describe LogStash::Inputs::File do
|
|
22
22
|
end
|
23
23
|
end
|
24
24
|
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
25
|
+
let(:directory) { Stud::Temporary.directory }
|
26
|
+
let(:sincedb_dir) { Stud::Temporary.directory }
|
27
|
+
let(:tmpfile_path) { ::File.join(directory, "#{name}.txt") }
|
28
|
+
let(:sincedb_path) { ::File.join(sincedb_dir, "readmode_#{name}_sincedb.txt") }
|
29
|
+
let(:path_path) { ::File.join(directory, "*.txt") }
|
30
|
+
|
31
|
+
context "for an existing file" do
|
32
|
+
let(:name) { "A" }
|
33
|
+
it "should start at the beginning" do
|
34
|
+
conf = <<-CONFIG
|
35
|
+
input {
|
36
|
+
file {
|
37
|
+
type => "blah"
|
38
|
+
path => "#{path_path}"
|
39
|
+
start_position => "beginning"
|
40
|
+
sincedb_path => "#{sincedb_path}"
|
41
|
+
delimiter => "#{TEST_FILE_DELIMITER}"
|
42
|
+
}
|
37
43
|
}
|
38
|
-
|
39
|
-
CONFIG
|
44
|
+
CONFIG
|
40
45
|
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
+
File.open(tmpfile_path, "a") do |fd|
|
47
|
+
fd.puts("hello")
|
48
|
+
fd.puts("world")
|
49
|
+
fd.fsync
|
50
|
+
end
|
46
51
|
|
47
|
-
|
48
|
-
|
52
|
+
events = input(conf) do |pipeline, queue|
|
53
|
+
2.times.collect { queue.pop }
|
54
|
+
end
|
55
|
+
expect(events.map{|e| e.get("message")}).to contain_exactly("hello", "world")
|
49
56
|
end
|
50
|
-
expect(events.map{|e| e.get("message")}).to contain_exactly("hello", "world")
|
51
57
|
end
|
52
58
|
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
59
|
+
context "running the input twice" do
|
60
|
+
let(:name) { "B" }
|
61
|
+
it "should restart at the sincedb value" do
|
62
|
+
conf = <<-CONFIG
|
63
|
+
input {
|
64
|
+
file {
|
65
|
+
type => "blah"
|
66
|
+
path => "#{path_path}"
|
67
|
+
start_position => "beginning"
|
68
|
+
sincedb_path => "#{sincedb_path}"
|
69
|
+
"file_sort_by" => "path"
|
70
|
+
delimiter => "#{TEST_FILE_DELIMITER}"
|
71
|
+
}
|
65
72
|
}
|
66
|
-
|
67
|
-
CONFIG
|
73
|
+
CONFIG
|
68
74
|
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
75
|
+
File.open(tmpfile_path, "w") do |fd|
|
76
|
+
fd.puts("hello3")
|
77
|
+
fd.puts("world3")
|
78
|
+
end
|
73
79
|
|
74
|
-
|
75
|
-
|
76
|
-
|
80
|
+
events = input(conf) do |pipeline, queue|
|
81
|
+
2.times.collect { queue.pop }
|
82
|
+
end
|
77
83
|
|
78
|
-
|
84
|
+
expect(events.map{|e| e.get("message")}).to contain_exactly("hello3", "world3")
|
79
85
|
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
+
File.open(tmpfile_path, "a") do |fd|
|
87
|
+
fd.puts("foo")
|
88
|
+
fd.puts("bar")
|
89
|
+
fd.puts("baz")
|
90
|
+
fd.fsync
|
91
|
+
end
|
86
92
|
|
87
|
-
|
88
|
-
|
93
|
+
events = input(conf) do |pipeline, queue|
|
94
|
+
3.times.collect { queue.pop }
|
95
|
+
end
|
96
|
+
messages = events.map{|e| e.get("message")}
|
97
|
+
expect(messages).to contain_exactly("foo", "bar", "baz")
|
89
98
|
end
|
90
|
-
messages = events.map{|e| e.get("message")}
|
91
|
-
expect(messages).to contain_exactly("foo", "bar", "baz")
|
92
99
|
end
|
93
100
|
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
101
|
+
context "when path and host fields exist" do
|
102
|
+
let(:name) { "C" }
|
103
|
+
it "should not overwrite them" do
|
104
|
+
conf = <<-CONFIG
|
105
|
+
input {
|
106
|
+
file {
|
107
|
+
type => "blah"
|
108
|
+
path => "#{path_path}"
|
109
|
+
start_position => "beginning"
|
110
|
+
sincedb_path => "#{sincedb_path}"
|
111
|
+
delimiter => "#{TEST_FILE_DELIMITER}"
|
112
|
+
codec => "json"
|
113
|
+
}
|
107
114
|
}
|
108
|
-
|
109
|
-
CONFIG
|
115
|
+
CONFIG
|
110
116
|
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
117
|
+
File.open(tmpfile_path, "w") do |fd|
|
118
|
+
fd.puts('{"path": "my_path", "host": "my_host"}')
|
119
|
+
fd.puts('{"my_field": "my_val"}')
|
120
|
+
fd.fsync
|
121
|
+
end
|
116
122
|
|
117
|
-
|
118
|
-
|
119
|
-
|
123
|
+
events = input(conf) do |pipeline, queue|
|
124
|
+
2.times.collect { queue.pop }
|
125
|
+
end
|
120
126
|
|
121
|
-
|
127
|
+
existing_path_index, added_path_index = "my_val" == events[0].get("my_field") ? [1,0] : [0,1]
|
122
128
|
|
123
|
-
|
124
|
-
|
125
|
-
|
129
|
+
expect(events[existing_path_index].get("path")).to eq "my_path"
|
130
|
+
expect(events[existing_path_index].get("host")).to eq "my_host"
|
131
|
+
expect(events[existing_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
126
132
|
|
127
|
-
|
128
|
-
|
129
|
-
|
133
|
+
expect(events[added_path_index].get("path")).to eq "#{tmpfile_path}"
|
134
|
+
expect(events[added_path_index].get("host")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
135
|
+
expect(events[added_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
136
|
+
end
|
130
137
|
end
|
131
138
|
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
139
|
+
context "running the input twice" do
|
140
|
+
let(:name) { "D" }
|
141
|
+
it "should read old files" do
|
142
|
+
conf = <<-CONFIG
|
143
|
+
input {
|
144
|
+
file {
|
145
|
+
type => "blah"
|
146
|
+
path => "#{path_path}"
|
147
|
+
start_position => "beginning"
|
148
|
+
codec => "json"
|
149
|
+
}
|
142
150
|
}
|
143
|
-
|
144
|
-
CONFIG
|
151
|
+
CONFIG
|
145
152
|
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
+
File.open(tmpfile_path, "w") do |fd|
|
154
|
+
fd.puts('{"path": "my_path", "host": "my_host"}')
|
155
|
+
fd.puts('{"my_field": "my_val"}')
|
156
|
+
fd.fsync
|
157
|
+
end
|
158
|
+
# arbitrary old file (2 days)
|
159
|
+
FileInput.make_file_older(tmpfile_path, 48 * 60 * 60)
|
153
160
|
|
154
|
-
|
155
|
-
|
161
|
+
events = input(conf) do |pipeline, queue|
|
162
|
+
2.times.collect { queue.pop }
|
163
|
+
end
|
164
|
+
existing_path_index, added_path_index = "my_val" == events[0].get("my_field") ? [1,0] : [0,1]
|
165
|
+
expect(events[existing_path_index].get("path")).to eq "my_path"
|
166
|
+
expect(events[existing_path_index].get("host")).to eq "my_host"
|
167
|
+
expect(events[existing_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
168
|
+
|
169
|
+
expect(events[added_path_index].get("path")).to eq "#{tmpfile_path}"
|
170
|
+
expect(events[added_path_index].get("host")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
171
|
+
expect(events[added_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
156
172
|
end
|
157
|
-
existing_path_index, added_path_index = "my_val" == events[0].get("my_field") ? [1,0] : [0,1]
|
158
|
-
expect(events[existing_path_index].get("path")).to eq "my_path"
|
159
|
-
expect(events[existing_path_index].get("host")).to eq "my_host"
|
160
|
-
expect(events[existing_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
161
|
-
|
162
|
-
expect(events[added_path_index].get("path")).to eq "#{tmpfile_path}"
|
163
|
-
expect(events[added_path_index].get("host")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
164
|
-
expect(events[added_path_index].get("[@metadata][host]")).to eq "#{Socket.gethostname.force_encoding(Encoding::UTF_8)}"
|
165
173
|
end
|
166
174
|
|
167
|
-
context "when sincedb_path is
|
168
|
-
let(:
|
169
|
-
|
170
|
-
subject { LogStash::Inputs::File.new("path" => tmpfile_path, "sincedb_path" => sincedb_path) }
|
175
|
+
context "when sincedb_path is a directory" do
|
176
|
+
let(:name) { "E" }
|
177
|
+
subject { LogStash::Inputs::File.new("path" => path_path, "sincedb_path" => directory) }
|
171
178
|
|
172
179
|
after :each do
|
173
180
|
FileUtils.rm_rf(sincedb_path)
|
@@ -180,16 +187,19 @@ describe LogStash::Inputs::File do
|
|
180
187
|
end
|
181
188
|
|
182
189
|
describe "testing with new, register, run and stop" do
|
190
|
+
let(:suffix) { "A" }
|
183
191
|
let(:conf) { Hash.new }
|
184
192
|
let(:mlconf) { Hash.new }
|
185
193
|
let(:events) { Array.new }
|
186
194
|
let(:mlcodec) { LogStash::Codecs::Multiline.new(mlconf) }
|
187
|
-
let(:
|
188
|
-
let(:tmpfile_path) { Stud::Temporary.pathname }
|
189
|
-
let(:sincedb_path) { Stud::Temporary.pathname }
|
195
|
+
let(:tracer_codec) { FileInput::CodecTracer.new }
|
190
196
|
let(:tmpdir_path) { Stud::Temporary.directory }
|
197
|
+
let(:tmpfile_path) { ::File.join(tmpdir_path, "#{suffix}.txt") }
|
198
|
+
let(:path_path) { ::File.join(tmpdir_path, "*.txt") }
|
199
|
+
let(:sincedb_path) { ::File.join(tmpdir_path, "sincedb-#{suffix}") }
|
191
200
|
|
192
201
|
after :each do
|
202
|
+
sleep(0.1) until subject.completely_stopped?
|
193
203
|
FileUtils.rm_rf(sincedb_path)
|
194
204
|
end
|
195
205
|
|
@@ -204,7 +214,7 @@ describe LogStash::Inputs::File do
|
|
204
214
|
end
|
205
215
|
mlconf.update("pattern" => "^\s", "what" => "previous")
|
206
216
|
conf.update("type" => "blah",
|
207
|
-
"path" =>
|
217
|
+
"path" => path_path,
|
208
218
|
"sincedb_path" => sincedb_path,
|
209
219
|
"stat_interval" => 0.1,
|
210
220
|
"codec" => mlcodec,
|
@@ -213,16 +223,22 @@ describe LogStash::Inputs::File do
|
|
213
223
|
|
214
224
|
it "reads the appended data only" do
|
215
225
|
subject.register
|
216
|
-
RSpec::Sequencing
|
217
|
-
.run_after(
|
218
|
-
expect(events.size).to eq(0)
|
226
|
+
actions = RSpec::Sequencing
|
227
|
+
.run_after(1, "append two lines after delay") do
|
219
228
|
File.open(tmpfile_path, "a") { |fd| fd.puts("hello"); fd.puts("world") }
|
220
229
|
end
|
221
|
-
.
|
230
|
+
.then("wait for one event") do
|
231
|
+
wait(0.75).for{events.size}.to eq(1)
|
232
|
+
end
|
233
|
+
.then("quit") do
|
222
234
|
subject.stop
|
223
235
|
end
|
236
|
+
.then("wait for flushed event") do
|
237
|
+
wait(0.75).for{events.size}.to eq(2)
|
238
|
+
end
|
224
239
|
|
225
240
|
subject.run(events)
|
241
|
+
actions.assert_no_errors
|
226
242
|
|
227
243
|
event1 = events[0]
|
228
244
|
expect(event1).not_to be_nil
|
@@ -240,218 +256,172 @@ describe LogStash::Inputs::File do
|
|
240
256
|
|
241
257
|
context "when close_older config is specified" do
|
242
258
|
let(:line) { "line1.1-of-a" }
|
243
|
-
|
259
|
+
let(:suffix) { "X" }
|
244
260
|
subject { described_class.new(conf) }
|
245
261
|
|
246
262
|
before do
|
247
263
|
conf.update(
|
248
264
|
"type" => "blah",
|
249
|
-
"path" =>
|
265
|
+
"path" => path_path,
|
250
266
|
"sincedb_path" => sincedb_path,
|
251
267
|
"stat_interval" => 0.02,
|
252
|
-
"codec" =>
|
253
|
-
"close_older" =>
|
268
|
+
"codec" => tracer_codec,
|
269
|
+
"close_older" => "100 ms",
|
270
|
+
"start_position" => "beginning",
|
254
271
|
"delimiter" => TEST_FILE_DELIMITER)
|
255
272
|
|
256
273
|
subject.register
|
257
274
|
end
|
258
275
|
|
259
|
-
it "having timed_out, the
|
260
|
-
RSpec::Sequencing
|
276
|
+
it "having timed_out, the codec is auto flushed" do
|
277
|
+
actions = RSpec::Sequencing
|
261
278
|
.run("create file") do
|
262
|
-
File.open(
|
279
|
+
File.open(tmpfile_path, "wb") { |file| file.puts(line) }
|
263
280
|
end
|
264
|
-
.then_after(0.
|
265
|
-
|
266
|
-
expect(subject.codec.identity_count).to eq(1)
|
281
|
+
.then_after(0.1, "identity is mapped") do
|
282
|
+
wait(0.75).for{subject.codec.identity_map[tmpfile_path]}.not_to be_nil, "identity is not mapped"
|
267
283
|
end
|
268
|
-
.
|
269
|
-
|
270
|
-
expect(subject.codec.identity_count).to eq(0)
|
284
|
+
.then("wait for auto_flush") do
|
285
|
+
wait(0.75).for{subject.codec.identity_map[tmpfile_path].codec.trace_for(:auto_flush)}.to eq([true]), "autoflush didn't"
|
271
286
|
end
|
272
|
-
.
|
287
|
+
.then("quit") do
|
273
288
|
subject.stop
|
274
289
|
end
|
275
290
|
subject.run(events)
|
291
|
+
actions.assert_no_errors
|
292
|
+
expect(subject.codec.identity_map[tmpfile_path].codec.trace_for(:accept)).to eq([true])
|
276
293
|
end
|
277
294
|
end
|
278
295
|
|
279
296
|
context "when ignore_older config is specified" do
|
280
|
-
let(:
|
281
|
-
let(:tmp_dir_file) { "#{tmpdir_path}/a.log" }
|
282
|
-
|
283
|
-
subject { described_class.new(conf) }
|
284
|
-
|
297
|
+
let(:suffix) { "Y" }
|
285
298
|
before do
|
286
|
-
File.open(tmp_dir_file, "a") do |fd|
|
287
|
-
fd.puts(line)
|
288
|
-
fd.fsync
|
289
|
-
end
|
290
|
-
FileInput.make_file_older(tmp_dir_file, 2)
|
291
299
|
conf.update(
|
292
300
|
"type" => "blah",
|
293
|
-
"path" =>
|
301
|
+
"path" => path_path,
|
294
302
|
"sincedb_path" => sincedb_path,
|
295
303
|
"stat_interval" => 0.02,
|
296
|
-
"codec" =>
|
297
|
-
"ignore_older" =>
|
304
|
+
"codec" => tracer_codec,
|
305
|
+
"ignore_older" => "500 ms",
|
298
306
|
"delimiter" => TEST_FILE_DELIMITER)
|
299
|
-
|
300
|
-
subject.register
|
301
|
-
Thread.new { subject.run(events) }
|
302
307
|
end
|
308
|
+
subject { described_class.new(conf) }
|
309
|
+
let(:line) { "line1.1-of-a" }
|
303
310
|
|
304
311
|
it "the file is not read" do
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
312
|
+
subject.register
|
313
|
+
RSpec::Sequencing
|
314
|
+
.run("create file") do
|
315
|
+
File.open(tmp_dir_file, "a") do |fd|
|
316
|
+
fd.puts(line)
|
317
|
+
fd.fsync
|
318
|
+
end
|
319
|
+
FileInput.make_file_older(tmp_dir_file, 2)
|
320
|
+
end
|
321
|
+
.then_after(0.5, "stop") do
|
322
|
+
subject.stop
|
323
|
+
end
|
324
|
+
subject.run(events)
|
325
|
+
expect(subject.codec.identity_map[tmpfile_path].codec.trace_for(:accept)).to be_falsey
|
310
326
|
end
|
311
327
|
end
|
312
328
|
|
313
329
|
context "when wildcard path and a multiline codec is specified" do
|
314
330
|
subject { described_class.new(conf) }
|
315
|
-
|
331
|
+
let(:suffix) { "J" }
|
332
|
+
let(:tmpfile_path2) { ::File.join(tmpdir_path, "K.txt") }
|
316
333
|
before do
|
317
334
|
mlconf.update("pattern" => "^\s", "what" => "previous")
|
318
335
|
conf.update(
|
319
336
|
"type" => "blah",
|
320
|
-
"path" =>
|
337
|
+
"path" => path_path,
|
338
|
+
"start_position" => "beginning",
|
321
339
|
"sincedb_path" => sincedb_path,
|
322
340
|
"stat_interval" => 0.05,
|
323
341
|
"codec" => mlcodec,
|
342
|
+
"file_sort_by" => "path",
|
324
343
|
"delimiter" => TEST_FILE_DELIMITER)
|
325
344
|
|
326
345
|
subject.register
|
327
346
|
end
|
328
347
|
|
329
348
|
it "collects separate multiple line events from each file" do
|
349
|
+
subject
|
330
350
|
actions = RSpec::Sequencing
|
331
351
|
.run_after(0.1, "create files") do
|
332
|
-
File.open(
|
333
|
-
fd.puts("line1.1-of-
|
334
|
-
fd.puts(" line1.2-of-
|
335
|
-
fd.puts(" line1.3-of-
|
352
|
+
File.open(tmpfile_path, "wb") do |fd|
|
353
|
+
fd.puts("line1.1-of-J")
|
354
|
+
fd.puts(" line1.2-of-J")
|
355
|
+
fd.puts(" line1.3-of-J")
|
336
356
|
end
|
337
|
-
File.open(
|
338
|
-
fd.puts("line1.1-of-
|
339
|
-
fd.puts(" line1.2-of-
|
340
|
-
fd.puts(" line1.3-of-
|
357
|
+
File.open(tmpfile_path2, "wb") do |fd|
|
358
|
+
fd.puts("line1.1-of-K")
|
359
|
+
fd.puts(" line1.2-of-K")
|
360
|
+
fd.puts(" line1.3-of-K")
|
341
361
|
end
|
342
362
|
end
|
343
|
-
.
|
344
|
-
|
363
|
+
.then("assert both files are mapped as identities and stop") do
|
364
|
+
wait(2).for {subject.codec.identity_count}.to eq(2), "both files are not mapped as identities"
|
345
365
|
end
|
346
|
-
.
|
366
|
+
.then("stop") do
|
347
367
|
subject.stop
|
348
368
|
end
|
349
|
-
.then_after(0.2 , "stop flushes both events") do
|
350
|
-
expect(events.size).to eq(2)
|
351
|
-
e1, e2 = events
|
352
|
-
e1_message = e1.get("message")
|
353
|
-
e2_message = e2.get("message")
|
354
|
-
|
355
|
-
# can't assume File A will be read first
|
356
|
-
if e1_message.start_with?('line1.1-of-z')
|
357
|
-
expect(e1.get("path")).to match(/z.log/)
|
358
|
-
expect(e2.get("path")).to match(/A.log/)
|
359
|
-
expect(e1_message).to eq("line1.1-of-z#{TEST_FILE_DELIMITER} line1.2-of-z#{TEST_FILE_DELIMITER} line1.3-of-z")
|
360
|
-
expect(e2_message).to eq("line1.1-of-a#{TEST_FILE_DELIMITER} line1.2-of-a#{TEST_FILE_DELIMITER} line1.3-of-a")
|
361
|
-
else
|
362
|
-
expect(e1.get("path")).to match(/A.log/)
|
363
|
-
expect(e2.get("path")).to match(/z.log/)
|
364
|
-
expect(e1_message).to eq("line1.1-of-a#{TEST_FILE_DELIMITER} line1.2-of-a#{TEST_FILE_DELIMITER} line1.3-of-a")
|
365
|
-
expect(e2_message).to eq("line1.1-of-z#{TEST_FILE_DELIMITER} line1.2-of-z#{TEST_FILE_DELIMITER} line1.3-of-z")
|
366
|
-
end
|
367
|
-
end
|
368
369
|
subject.run(events)
|
369
370
|
# wait for actions to complete
|
370
|
-
actions.
|
371
|
+
actions.assert_no_errors
|
372
|
+
expect(events.size).to eq(2)
|
373
|
+
e1, e2 = events
|
374
|
+
e1_message = e1.get("message")
|
375
|
+
e2_message = e2.get("message")
|
376
|
+
|
377
|
+
expect(e1.get("path")).to match(/J.txt/)
|
378
|
+
expect(e2.get("path")).to match(/K.txt/)
|
379
|
+
expect(e1_message).to eq("line1.1-of-J#{TEST_FILE_DELIMITER} line1.2-of-J#{TEST_FILE_DELIMITER} line1.3-of-J")
|
380
|
+
expect(e2_message).to eq("line1.1-of-K#{TEST_FILE_DELIMITER} line1.2-of-K#{TEST_FILE_DELIMITER} line1.3-of-K")
|
371
381
|
end
|
372
382
|
|
373
383
|
context "if auto_flush is enabled on the multiline codec" do
|
374
384
|
let(:mlconf) { { "auto_flush_interval" => 0.5 } }
|
375
|
-
|
385
|
+
let(:suffix) { "M" }
|
376
386
|
it "an event is generated via auto_flush" do
|
377
387
|
actions = RSpec::Sequencing
|
378
388
|
.run_after(0.1, "create files") do
|
379
|
-
File.open(
|
389
|
+
File.open(tmpfile_path, "wb") do |fd|
|
380
390
|
fd.puts("line1.1-of-a")
|
381
391
|
fd.puts(" line1.2-of-a")
|
382
392
|
fd.puts(" line1.3-of-a")
|
383
393
|
end
|
384
394
|
end
|
385
|
-
.
|
386
|
-
|
387
|
-
e1_message = e1.get("message")
|
388
|
-
expect(e1["path"]).to match(/a.log/)
|
389
|
-
expect(e1_message).to eq("line1.1-of-a#{TEST_FILE_DELIMITER} line1.2-of-a#{TEST_FILE_DELIMITER} line1.3-of-a")
|
395
|
+
.then("wait for auto_flush") do
|
396
|
+
wait(2).for{events.size}.to eq(1), "events size is not 1"
|
390
397
|
end
|
391
398
|
.then("stop") do
|
392
399
|
subject.stop
|
393
400
|
end
|
394
401
|
subject.run(events)
|
395
402
|
# wait for actions to complete
|
396
|
-
actions.
|
403
|
+
actions.assert_no_errors
|
404
|
+
e1 = events.first
|
405
|
+
e1_message = e1.get("message")
|
406
|
+
expect(e1_message).to eq("line1.1-of-a#{TEST_FILE_DELIMITER} line1.2-of-a#{TEST_FILE_DELIMITER} line1.3-of-a")
|
407
|
+
expect(e1.get("path")).to match(/M.txt$/)
|
397
408
|
end
|
398
409
|
end
|
399
410
|
end
|
400
411
|
|
401
|
-
context "when #run is called multiple times", :unix => true do
|
402
|
-
let(:file_path) { "#{tmpdir_path}/a.log" }
|
403
|
-
let(:buffer) { [] }
|
404
|
-
let(:run_thread_proc) do
|
405
|
-
lambda { Thread.new { subject.run(buffer) } }
|
406
|
-
end
|
407
|
-
let(:lsof_proc) do
|
408
|
-
lambda { `lsof -p #{Process.pid} | grep #{file_path}` }
|
409
|
-
end
|
410
|
-
|
411
|
-
subject { described_class.new(conf) }
|
412
|
-
|
413
|
-
before do
|
414
|
-
conf.update(
|
415
|
-
"path" => tmpdir_path + "/*.log",
|
416
|
-
"start_position" => "beginning",
|
417
|
-
"stat_interval" => 0.1,
|
418
|
-
"sincedb_path" => sincedb_path)
|
419
|
-
|
420
|
-
File.open(file_path, "w") do |fd|
|
421
|
-
fd.puts('foo')
|
422
|
-
fd.puts('bar')
|
423
|
-
fd.fsync
|
424
|
-
end
|
425
|
-
end
|
426
|
-
|
427
|
-
it "should only actually open files when content changes are detected" do
|
428
|
-
subject.register
|
429
|
-
expect(lsof_proc.call).to eq("")
|
430
|
-
# first run processes the file and records sincedb progress
|
431
|
-
run_thread_proc.call
|
432
|
-
wait(1).for{lsof_proc.call.scan(file_path).size}.to eq(1)
|
433
|
-
# second run quits the first run
|
434
|
-
# sees the file has not changed size and does not open it
|
435
|
-
run_thread_proc.call
|
436
|
-
wait(1).for{lsof_proc.call.scan(file_path).size}.to eq(0)
|
437
|
-
# truncate and write less than before
|
438
|
-
File.open(file_path, "w"){ |fd| fd.puts('baz'); fd.fsync }
|
439
|
-
# sees the file has changed size and does open it
|
440
|
-
wait(1).for{lsof_proc.call.scan(file_path).size}.to eq(1)
|
441
|
-
end
|
442
|
-
end
|
443
|
-
|
444
412
|
describe "specifying max_open_files" do
|
413
|
+
let(:suffix) { "P" }
|
414
|
+
let(:tmpfile_path2) { ::File.join(tmpdir_path, "Q.txt") }
|
445
415
|
subject { described_class.new(conf) }
|
446
416
|
before do
|
447
|
-
File.open(
|
448
|
-
fd.puts("line1-of-
|
449
|
-
fd.puts("line2-of-
|
417
|
+
File.open(tmpfile_path, "w") do |fd|
|
418
|
+
fd.puts("line1-of-P")
|
419
|
+
fd.puts("line2-of-P")
|
450
420
|
fd.fsync
|
451
421
|
end
|
452
|
-
File.open(
|
453
|
-
fd.puts("line1-of-
|
454
|
-
fd.puts("line2-of-
|
422
|
+
File.open(tmpfile_path2, "w") do |fd|
|
423
|
+
fd.puts("line1-of-Q")
|
424
|
+
fd.puts("line2-of-Q")
|
455
425
|
fd.fsync
|
456
426
|
end
|
457
427
|
end
|
@@ -461,37 +431,32 @@ describe LogStash::Inputs::File do
|
|
461
431
|
conf.clear
|
462
432
|
conf.update(
|
463
433
|
"type" => "blah",
|
464
|
-
"path" =>
|
434
|
+
"path" => path_path,
|
465
435
|
"sincedb_path" => sincedb_path,
|
466
436
|
"stat_interval" => 0.1,
|
467
437
|
"max_open_files" => 1,
|
468
438
|
"start_position" => "beginning",
|
439
|
+
"file_sort_by" => "path",
|
469
440
|
"delimiter" => TEST_FILE_DELIMITER)
|
470
441
|
subject.register
|
471
442
|
end
|
472
443
|
it "collects line events from only one file" do
|
473
444
|
actions = RSpec::Sequencing
|
474
|
-
.
|
475
|
-
|
445
|
+
.run("assert one identity is mapped") do
|
446
|
+
wait(0.4).for{subject.codec.identity_count}.to be > 0, "no identity is mapped"
|
476
447
|
end
|
477
|
-
.
|
448
|
+
.then("stop") do
|
478
449
|
subject.stop
|
479
450
|
end
|
480
|
-
.
|
481
|
-
|
482
|
-
e1, e2 = events
|
483
|
-
if Dir.glob("#{tmpdir_path}/*.log").first =~ %r{a\.log}
|
484
|
-
#linux and OSX have different retrieval order
|
485
|
-
expect(e1.get("message")).to eq("line1-of-a")
|
486
|
-
expect(e2.get("message")).to eq("line2-of-a")
|
487
|
-
else
|
488
|
-
expect(e1.get("message")).to eq("line1-of-z")
|
489
|
-
expect(e2.get("message")).to eq("line2-of-z")
|
490
|
-
end
|
451
|
+
.then("stop flushes last event") do
|
452
|
+
wait(0.4).for{events.size}.to eq(2), "events size does not equal 2"
|
491
453
|
end
|
492
454
|
subject.run(events)
|
493
455
|
# wait for actions future value
|
494
|
-
actions.
|
456
|
+
actions.assert_no_errors
|
457
|
+
e1, e2 = events
|
458
|
+
expect(e1.get("message")).to eq("line1-of-P")
|
459
|
+
expect(e2.get("message")).to eq("line2-of-P")
|
495
460
|
end
|
496
461
|
end
|
497
462
|
|
@@ -499,41 +464,36 @@ describe LogStash::Inputs::File do
|
|
499
464
|
before do
|
500
465
|
conf.update(
|
501
466
|
"type" => "blah",
|
502
|
-
"path" =>
|
467
|
+
"path" => path_path,
|
503
468
|
"sincedb_path" => sincedb_path,
|
504
469
|
"stat_interval" => 0.1,
|
505
470
|
"max_open_files" => 1,
|
506
471
|
"close_older" => 0.5,
|
507
472
|
"start_position" => "beginning",
|
473
|
+
"file_sort_by" => "path",
|
508
474
|
"delimiter" => TEST_FILE_DELIMITER)
|
509
475
|
subject.register
|
510
476
|
end
|
511
477
|
|
512
478
|
it "collects line events from both files" do
|
513
479
|
actions = RSpec::Sequencing
|
514
|
-
.
|
515
|
-
|
516
|
-
expect(events.size).to eq(2)
|
480
|
+
.run("assert both identities are mapped and the first two events are built") do
|
481
|
+
wait(0.4).for{subject.codec.identity_count == 1 && events.size == 2}.to eq(true), "both identities are not mapped and the first two events are not built"
|
517
482
|
end
|
518
|
-
.
|
519
|
-
|
520
|
-
if Dir.glob("#{tmpdir_path}/*.log").first =~ %r{a\.log}
|
521
|
-
#linux and OSX have different retrieval order
|
522
|
-
e1, e2, e3, e4 = events
|
523
|
-
else
|
524
|
-
e3, e4, e1, e2 = events
|
525
|
-
end
|
526
|
-
expect(e1.get("message")).to eq("line1-of-a")
|
527
|
-
expect(e2.get("message")).to eq("line2-of-a")
|
528
|
-
expect(e3.get("message")).to eq("line1-of-z")
|
529
|
-
expect(e4.get("message")).to eq("line2-of-z")
|
483
|
+
.then("wait for close to flush last event of each identity") do
|
484
|
+
wait(0.8).for{events.size}.to eq(4), "close does not flush last event of each identity"
|
530
485
|
end
|
531
486
|
.then_after(0.1, "stop") do
|
532
487
|
subject.stop
|
533
488
|
end
|
534
489
|
subject.run(events)
|
535
490
|
# wait for actions future value
|
536
|
-
actions.
|
491
|
+
actions.assert_no_errors
|
492
|
+
e1, e2, e3, e4 = events
|
493
|
+
expect(e1.get("message")).to eq("line1-of-P")
|
494
|
+
expect(e2.get("message")).to eq("line2-of-P")
|
495
|
+
expect(e3.get("message")).to eq("line1-of-Q")
|
496
|
+
expect(e4.get("message")).to eq("line2-of-Q")
|
537
497
|
end
|
538
498
|
end
|
539
499
|
end
|