logstash-input-file 4.0.0 → 4.0.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -5
- data/CHANGELOG.md +4 -0
- data/Gemfile +8 -1
- data/docs/index.asciidoc +256 -0
- data/lib/logstash/inputs/file.rb +8 -16
- data/lib/logstash/inputs/file/patch.rb +16 -0
- data/logstash-input-file.gemspec +2 -2
- data/spec/inputs/file_spec.rb +1 -2
- metadata +8 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 4bfa2200c86a94ed11e3691e5769dabafed7b0794f6fbba92cd1b8d68af828a9
|
4
|
+
data.tar.gz: f5c9d5dc49814dd1040cc6af8e7fca6c9c1ef5d149cea96a0152a05fcb4e11de
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 44d10ae77862a89089f48088420c67e5430623b46b2bc9d3d65f9ef5c5d90abdfd759112a3c763d4db6425f0c61ad8b6031f5e466de690b1b378f7080b1f3141
|
7
|
+
data.tar.gz: c85ca0e8e74cefa9a89dc33cc6d3163b1ab866bd59c9aca5fb1b44cbdc523e880a7c01a18d9de70fc02307a88d07b8bfad7fd19c74dd028d41bcdb854fdd6b21
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,7 @@
|
|
1
|
+
## 4.0.1
|
2
|
+
- Docs: Fix the description with the logstash documentation generator
|
3
|
+
- Fix an issue with the rspec suite not finding log4j
|
4
|
+
|
1
5
|
## 4.0.0
|
2
6
|
- Breaking: `ignore_older` settings is disabled by default. Previously if the file was older than
|
3
7
|
24 hours (the default for ignore_older), it would be ignored. This confused new users a lot, specially
|
data/Gemfile
CHANGED
@@ -1,4 +1,11 @@
|
|
1
1
|
source 'https://rubygems.org'
|
2
2
|
|
3
|
-
# Specify your gem's dependencies in logstash-mass_effect.gemspec
|
4
3
|
gemspec
|
4
|
+
|
5
|
+
logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash"
|
6
|
+
use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
|
7
|
+
|
8
|
+
if Dir.exist?(logstash_path) && use_logstash_source
|
9
|
+
gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
|
10
|
+
gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
|
11
|
+
end
|
data/docs/index.asciidoc
ADDED
@@ -0,0 +1,256 @@
|
|
1
|
+
:plugin: file
|
2
|
+
:type: input
|
3
|
+
|
4
|
+
///////////////////////////////////////////
|
5
|
+
START - GENERATED VARIABLES, DO NOT EDIT!
|
6
|
+
///////////////////////////////////////////
|
7
|
+
:version: %VERSION%
|
8
|
+
:release_date: %RELEASE_DATE%
|
9
|
+
:changelog_url: %CHANGELOG_URL%
|
10
|
+
:include_path: ../../../../logstash/docs/include
|
11
|
+
///////////////////////////////////////////
|
12
|
+
END - GENERATED VARIABLES, DO NOT EDIT!
|
13
|
+
///////////////////////////////////////////
|
14
|
+
|
15
|
+
[id="plugins-{type}-{plugin}"]
|
16
|
+
|
17
|
+
=== File input plugin
|
18
|
+
|
19
|
+
include::{include_path}/plugin_header.asciidoc[]
|
20
|
+
|
21
|
+
==== Description
|
22
|
+
|
23
|
+
Stream events from files, normally by tailing them in a manner
|
24
|
+
similar to `tail -0F` but optionally reading them from the
|
25
|
+
beginning.
|
26
|
+
|
27
|
+
By default, each event is assumed to be one line and a line is
|
28
|
+
taken to be the text before a newline character.
|
29
|
+
Normally, logging will add a newline to the end of each line written.
|
30
|
+
If you would like to join multiple log lines into one event,
|
31
|
+
you'll want to use the multiline codec or filter.
|
32
|
+
|
33
|
+
The plugin aims to track changing files and emit new content as it's
|
34
|
+
appended to each file. It's not well-suited for reading a file from
|
35
|
+
beginning to end and storing all of it in a single event (not even
|
36
|
+
with the multiline codec or filter).
|
37
|
+
|
38
|
+
==== Reading from remote network volumes
|
39
|
+
|
40
|
+
The file input is not tested on remote filesystems such as NFS, Samba, s3fs-fuse, etc. These
|
41
|
+
remote filesystems typically have behaviors that are very different from local filesystems and
|
42
|
+
are therefore unlikely to work correctly when used with the file input.
|
43
|
+
|
44
|
+
==== Tracking of current position in watched files
|
45
|
+
|
46
|
+
The plugin keeps track of the current position in each file by
|
47
|
+
recording it in a separate file named sincedb. This makes it
|
48
|
+
possible to stop and restart Logstash and have it pick up where it
|
49
|
+
left off without missing the lines that were added to the file while
|
50
|
+
Logstash was stopped.
|
51
|
+
|
52
|
+
By default, the sincedb file is placed in the home directory of the
|
53
|
+
user running Logstash with a filename based on the filename patterns
|
54
|
+
being watched (i.e. the `path` option). Thus, changing the filename
|
55
|
+
patterns will result in a new sincedb file being used and any
|
56
|
+
existing current position state will be lost. If you change your
|
57
|
+
patterns with any frequency it might make sense to explicitly choose
|
58
|
+
a sincedb path with the `sincedb_path` option.
|
59
|
+
|
60
|
+
A different `sincedb_path` must be used for each input. Using the same
|
61
|
+
path will cause issues. The read checkpoints for each input must be
|
62
|
+
stored in a different path so the information does not override.
|
63
|
+
|
64
|
+
Sincedb files are text files with four columns:
|
65
|
+
|
66
|
+
. The inode number (or equivalent).
|
67
|
+
. The major device number of the file system (or equivalent).
|
68
|
+
. The minor device number of the file system (or equivalent).
|
69
|
+
. The current byte offset within the file.
|
70
|
+
|
71
|
+
On non-Windows systems you can obtain the inode number of a file
|
72
|
+
with e.g. `ls -li`.
|
73
|
+
|
74
|
+
==== File rotation
|
75
|
+
|
76
|
+
File rotation is detected and handled by this input, regardless of
|
77
|
+
whether the file is rotated via a rename or a copy operation. To
|
78
|
+
support programs that write to the rotated file for some time after
|
79
|
+
the rotation has taken place, include both the original filename and
|
80
|
+
the rotated filename (e.g. /var/log/syslog and /var/log/syslog.1) in
|
81
|
+
the filename patterns to watch (the `path` option). Note that the
|
82
|
+
rotated filename will be treated as a new file so if
|
83
|
+
`start_position` is set to 'beginning' the rotated file will be
|
84
|
+
reprocessed.
|
85
|
+
|
86
|
+
With the default value of `start_position` ('end') any messages
|
87
|
+
written to the end of the file between the last read operation prior
|
88
|
+
to the rotation and its reopening under the new name (an interval
|
89
|
+
determined by the `stat_interval` and `discover_interval` options)
|
90
|
+
will not get picked up.
|
91
|
+
|
92
|
+
[id="plugins-{type}s-{plugin}-options"]
|
93
|
+
==== File Input Configuration Options
|
94
|
+
|
95
|
+
This plugin supports the following configuration options plus the <<plugins-{type}s-{plugin}-common-options>> described later.
|
96
|
+
|
97
|
+
[cols="<,<,<",options="header",]
|
98
|
+
|=======================================================================
|
99
|
+
|Setting |Input type|Required
|
100
|
+
| <<plugins-{type}s-{plugin}-close_older>> |<<number,number>>|No
|
101
|
+
| <<plugins-{type}s-{plugin}-delimiter>> |<<string,string>>|No
|
102
|
+
| <<plugins-{type}s-{plugin}-discover_interval>> |<<number,number>>|No
|
103
|
+
| <<plugins-{type}s-{plugin}-exclude>> |<<array,array>>|No
|
104
|
+
| <<plugins-{type}s-{plugin}-ignore_older>> |<<number,number>>|No
|
105
|
+
| <<plugins-{type}s-{plugin}-max_open_files>> |<<number,number>>|No
|
106
|
+
| <<plugins-{type}s-{plugin}-path>> |<<array,array>>|Yes
|
107
|
+
| <<plugins-{type}s-{plugin}-sincedb_path>> |<<string,string>>|No
|
108
|
+
| <<plugins-{type}s-{plugin}-sincedb_write_interval>> |<<number,number>>|No
|
109
|
+
| <<plugins-{type}s-{plugin}-start_position>> |<<string,string>>, one of `["beginning", "end"]`|No
|
110
|
+
| <<plugins-{type}s-{plugin}-stat_interval>> |<<number,number>>|No
|
111
|
+
|=======================================================================
|
112
|
+
|
113
|
+
Also see <<plugins-{type}s-{plugin}-common-options>> for a list of options supported by all
|
114
|
+
input plugins.
|
115
|
+
|
116
|
+
|
117
|
+
|
118
|
+
[id="plugins-{type}s-{plugin}-close_older"]
|
119
|
+
===== `close_older`
|
120
|
+
|
121
|
+
* Value type is <<number,number>>
|
122
|
+
* Default value is `3600`
|
123
|
+
|
124
|
+
The file input closes any files that were last read the specified
|
125
|
+
timespan in seconds ago.
|
126
|
+
This has different implications depending on if a file is being tailed or
|
127
|
+
read. If tailing, and there is a large time gap in incoming data the file
|
128
|
+
can be closed (allowing other files to be opened) but will be queued for
|
129
|
+
reopening when new data is detected. If reading, the file will be closed
|
130
|
+
after closed_older seconds from when the last bytes were read.
|
131
|
+
The default is 1 hour
|
132
|
+
|
133
|
+
[id="plugins-{type}s-{plugin}-delimiter"]
|
134
|
+
===== `delimiter`
|
135
|
+
|
136
|
+
* Value type is <<string,string>>
|
137
|
+
* Default value is `"\n"`
|
138
|
+
|
139
|
+
set the new line delimiter, defaults to "\n"
|
140
|
+
|
141
|
+
[id="plugins-{type}s-{plugin}-discover_interval"]
|
142
|
+
===== `discover_interval`
|
143
|
+
|
144
|
+
* Value type is <<number,number>>
|
145
|
+
* Default value is `15`
|
146
|
+
|
147
|
+
How often (in seconds) we expand the filename patterns in the
|
148
|
+
`path` option to discover new files to watch.
|
149
|
+
|
150
|
+
[id="plugins-{type}s-{plugin}-exclude"]
|
151
|
+
===== `exclude`
|
152
|
+
|
153
|
+
* Value type is <<array,array>>
|
154
|
+
* There is no default value for this setting.
|
155
|
+
|
156
|
+
Exclusions (matched against the filename, not full path). Filename
|
157
|
+
patterns are valid here, too. For example, if you have
|
158
|
+
[source,ruby]
|
159
|
+
path => "/var/log/*"
|
160
|
+
|
161
|
+
You might want to exclude gzipped files:
|
162
|
+
[source,ruby]
|
163
|
+
exclude => "*.gz"
|
164
|
+
|
165
|
+
[id="plugins-{type}s-{plugin}-ignore_older"]
|
166
|
+
===== `ignore_older`
|
167
|
+
|
168
|
+
* Value type is <<number,number>>
|
169
|
+
* There is no default value for this setting.
|
170
|
+
|
171
|
+
When the file input discovers a file that was last modified
|
172
|
+
before the specified timespan in seconds, the file is ignored.
|
173
|
+
After it's discovery, if an ignored file is modified it is no
|
174
|
+
longer ignored and any new data is read. By default, this option is
|
175
|
+
disabled. Note this unit is in seconds.
|
176
|
+
|
177
|
+
[id="plugins-{type}s-{plugin}-max_open_files"]
|
178
|
+
===== `max_open_files`
|
179
|
+
|
180
|
+
* Value type is <<number,number>>
|
181
|
+
* There is no default value for this setting.
|
182
|
+
|
183
|
+
What is the maximum number of file_handles that this input consumes
|
184
|
+
at any one time. Use close_older to close some files if you need to
|
185
|
+
process more files than this number. This should not be set to the
|
186
|
+
maximum the OS can do because file handles are needed for other
|
187
|
+
LS plugins and OS processes.
|
188
|
+
The default of 4095 is set in filewatch.
|
189
|
+
|
190
|
+
[id="plugins-{type}s-{plugin}-path"]
|
191
|
+
===== `path`
|
192
|
+
|
193
|
+
* This is a required setting.
|
194
|
+
* Value type is <<array,array>>
|
195
|
+
* There is no default value for this setting.
|
196
|
+
|
197
|
+
The path(s) to the file(s) to use as an input.
|
198
|
+
You can use filename patterns here, such as `/var/log/*.log`.
|
199
|
+
If you use a pattern like `/var/log/**/*.log`, a recursive search
|
200
|
+
of `/var/log` will be done for all `*.log` files.
|
201
|
+
Paths must be absolute and cannot be relative.
|
202
|
+
|
203
|
+
You may also configure multiple paths. See an example
|
204
|
+
on the <<array,Logstash configuration page>>.
|
205
|
+
|
206
|
+
[id="plugins-{type}s-{plugin}-sincedb_path"]
|
207
|
+
===== `sincedb_path`
|
208
|
+
|
209
|
+
* Value type is <<string,string>>
|
210
|
+
* There is no default value for this setting.
|
211
|
+
|
212
|
+
Path of the sincedb database file (keeps track of the current
|
213
|
+
position of monitored log files) that will be written to disk.
|
214
|
+
The default will write sincedb files to `<path.data>/plugins/inputs/file`
|
215
|
+
NOTE: it must be a file path and not a directory path
|
216
|
+
|
217
|
+
[id="plugins-{type}s-{plugin}-sincedb_write_interval"]
|
218
|
+
===== `sincedb_write_interval`
|
219
|
+
|
220
|
+
* Value type is <<number,number>>
|
221
|
+
* Default value is `15`
|
222
|
+
|
223
|
+
How often (in seconds) to write a since database with the current position of
|
224
|
+
monitored log files.
|
225
|
+
|
226
|
+
[id="plugins-{type}s-{plugin}-start_position"]
|
227
|
+
===== `start_position`
|
228
|
+
|
229
|
+
* Value can be any of: `beginning`, `end`
|
230
|
+
* Default value is `"end"`
|
231
|
+
|
232
|
+
Choose where Logstash starts initially reading files: at the beginning or
|
233
|
+
at the end. The default behavior treats files like live streams and thus
|
234
|
+
starts at the end. If you have old data you want to import, set this
|
235
|
+
to 'beginning'.
|
236
|
+
|
237
|
+
This option only modifies "first contact" situations where a file
|
238
|
+
is new and not seen before, i.e. files that don't have a current
|
239
|
+
position recorded in a sincedb file read by Logstash. If a file
|
240
|
+
has already been seen before, this option has no effect and the
|
241
|
+
position recorded in the sincedb file will be used.
|
242
|
+
|
243
|
+
[id="plugins-{type}s-{plugin}-stat_interval"]
|
244
|
+
===== `stat_interval`
|
245
|
+
|
246
|
+
* Value type is <<number,number>>
|
247
|
+
* Default value is `1`
|
248
|
+
|
249
|
+
How often (in seconds) we stat files to see if they have been modified.
|
250
|
+
Increasing this interval will decrease the number of system calls we make,
|
251
|
+
but increase the time to detect new log lines.
|
252
|
+
|
253
|
+
|
254
|
+
|
255
|
+
[id="plugins-{type}s-{plugin}-common-options"]
|
256
|
+
include::{include_path}/{type}.asciidoc[]
|
data/lib/logstash/inputs/file.rb
CHANGED
@@ -6,6 +6,7 @@ require "logstash/codecs/identity_map_codec"
|
|
6
6
|
require "pathname"
|
7
7
|
require "socket" # for Socket.gethostname
|
8
8
|
require "fileutils"
|
9
|
+
require_relative "file/patch"
|
9
10
|
|
10
11
|
# Stream events from files, normally by tailing them in a manner
|
11
12
|
# similar to `tail -0F` but optionally reading them from the
|
@@ -23,6 +24,12 @@ require "fileutils"
|
|
23
24
|
# beginning to end and storing all of it in a single event (not even
|
24
25
|
# with the multiline codec or filter).
|
25
26
|
#
|
27
|
+
# ==== Reading from remote network volumes
|
28
|
+
#
|
29
|
+
# The file input is not tested on remote filesystems such as NFS, Samba, s3fs-fuse, etc. These
|
30
|
+
# remote filesystems typically have behaviors that are very different from local filesystems and
|
31
|
+
# are therefore unlikely to work correctly when used with the file input.
|
32
|
+
#
|
26
33
|
# ==== Tracking of current position in watched files
|
27
34
|
#
|
28
35
|
# The plugin keeps track of the current position in each file by
|
@@ -71,21 +78,6 @@ require "fileutils"
|
|
71
78
|
# determined by the `stat_interval` and `discover_interval` options)
|
72
79
|
# will not get picked up.
|
73
80
|
|
74
|
-
class LogStash::Codecs::Base
|
75
|
-
# TODO - move this to core
|
76
|
-
if !method_defined?(:accept)
|
77
|
-
def accept(listener)
|
78
|
-
decode(listener.data) do |event|
|
79
|
-
listener.process_event(event)
|
80
|
-
end
|
81
|
-
end
|
82
|
-
end
|
83
|
-
if !method_defined?(:auto_flush)
|
84
|
-
def auto_flush(*)
|
85
|
-
end
|
86
|
-
end
|
87
|
-
end
|
88
|
-
|
89
81
|
class LogStash::Inputs::File < LogStash::Inputs::Base
|
90
82
|
config_name "file"
|
91
83
|
|
@@ -120,7 +112,7 @@ class LogStash::Inputs::File < LogStash::Inputs::Base
|
|
120
112
|
|
121
113
|
# Path of the sincedb database file (keeps track of the current
|
122
114
|
# position of monitored log files) that will be written to disk.
|
123
|
-
# The default will write sincedb files to
|
115
|
+
# The default will write sincedb files to `<path.data>/plugins/inputs/file`
|
124
116
|
# NOTE: it must be a file path and not a directory path
|
125
117
|
config :sincedb_path, :validate => :string
|
126
118
|
|
@@ -0,0 +1,16 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
class LogStash::Codecs::Base
|
3
|
+
# TODO - move this to core
|
4
|
+
if !method_defined?(:accept)
|
5
|
+
def accept(listener)
|
6
|
+
decode(listener.data) do |event|
|
7
|
+
listener.process_event(event)
|
8
|
+
end
|
9
|
+
end
|
10
|
+
end
|
11
|
+
if !method_defined?(:auto_flush)
|
12
|
+
def auto_flush(*)
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
data/logstash-input-file.gemspec
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-input-file'
|
4
|
-
s.version = '4.0.
|
4
|
+
s.version = '4.0.2'
|
5
5
|
s.licenses = ['Apache License (2.0)']
|
6
6
|
s.summary = "Stream events from files."
|
7
7
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -11,7 +11,7 @@ Gem::Specification.new do |s|
|
|
11
11
|
s.require_paths = ["lib"]
|
12
12
|
|
13
13
|
# Files
|
14
|
-
s.files = Dir[
|
14
|
+
s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
|
15
15
|
|
16
16
|
# Tests
|
17
17
|
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
data/spec/inputs/file_spec.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-input-file
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 4.0.
|
4
|
+
version: 4.0.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2017-06-23 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -148,7 +148,9 @@ dependencies:
|
|
148
148
|
- - ">="
|
149
149
|
- !ruby/object:Gem::Version
|
150
150
|
version: '0'
|
151
|
-
description: This gem is a Logstash plugin required to be installed on top of the
|
151
|
+
description: This gem is a Logstash plugin required to be installed on top of the
|
152
|
+
Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
|
153
|
+
gem is not a stand-alone program
|
152
154
|
email: info@elastic.co
|
153
155
|
executables: []
|
154
156
|
extensions: []
|
@@ -160,7 +162,9 @@ files:
|
|
160
162
|
- LICENSE
|
161
163
|
- NOTICE.TXT
|
162
164
|
- README.md
|
165
|
+
- docs/index.asciidoc
|
163
166
|
- lib/logstash/inputs/file.rb
|
167
|
+
- lib/logstash/inputs/file/patch.rb
|
164
168
|
- logstash-input-file.gemspec
|
165
169
|
- spec/inputs/file_spec.rb
|
166
170
|
- spec/spec_helper.rb
|
@@ -186,7 +190,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
186
190
|
version: '0'
|
187
191
|
requirements: []
|
188
192
|
rubyforge_project:
|
189
|
-
rubygems_version: 2.
|
193
|
+
rubygems_version: 2.6.11
|
190
194
|
signing_key:
|
191
195
|
specification_version: 4
|
192
196
|
summary: Stream events from files.
|