logstash-input-file 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,15 @@
1
1
  ---
2
- SHA1:
3
- metadata.gz: f76d793c2d582581dcc968bcdc951fd0f1caf922
4
- data.tar.gz: 0b4bb47dc61603f1a263bdd0b9e7220848239bdc
2
+ !binary "U0hBMQ==":
3
+ metadata.gz: !binary |-
4
+ NmU2Y2QwMmI0NTNkNDgzNTAzMjQwMDRhZDMxZjNiYzViZDFlMzZjNA==
5
+ data.tar.gz: !binary |-
6
+ NTFkZDZjMWNmZjJiYTE0Yjk3MTNjZTgzYTRlMzdkNjQzNDYzM2NlNQ==
5
7
  SHA512:
6
- metadata.gz: c14ea082099e079659159fae2ddf5210f32678fa4a20fed455fabd13f7860420c5a5759b55efbe2712fd26eee687bdeb5b37f01a972572b24184a9bc652e6676
7
- data.tar.gz: 51ea23e1dda2985782258c19a5df8d89fb16b1d16ac1ccd3f57e8e16ef886d56b0f7e51d2ef078bd98a3794ba254583235c283b37eefe00a6cd4d0f69054854b
8
+ metadata.gz: !binary |-
9
+ MzhkMGE2YjkwMDE4OGFhZDcwZGZlMDM2MTA0YmZhMmE1Y2YwNmFmZjhkMzg0
10
+ Mjc0MDdiZGRhZmI3YTc3ZGNlZmIzN2MxYTE1ODhjY2RlZDY4N2ZkNGQ4ZTE0
11
+ MTNiZmM0YmUyMTIyYjQwNzk2ZTNmNTRlOGIwNDIxZTQ1NTY5MjA=
12
+ data.tar.gz: !binary |-
13
+ YjFmY2VlYjI1NGFiMDMyYWNlZjMyMTdkMjA2ZTVlYTg3OGYwZDUyZjUzNDgw
14
+ NWUzNWM1NTMxYWZhNjMwMWZmNTcyMzA4MjQwNjM3MDQ2OTkwY2UyZmE0NTMy
15
+ ZTY1MzIwMmM3NzQwOWQ1MzkzNGY2OGFhMWNlMzZiZDYwMjhiZGI=
data/.gitignore ADDED
@@ -0,0 +1,4 @@
1
+ *.gem
2
+ Gemfile.lock
3
+ .bundle
4
+ vendor
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source 'http://rubygems.org'
2
+ gem 'rake'
3
+ gem 'gem_publisher'
4
+ gem 'archive-tar-minitar'
data/LICENSE ADDED
@@ -0,0 +1,13 @@
1
+ Copyright (c) 2012-2014 Elasticsearch <http://www.elasticsearch.org>
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
data/Rakefile ADDED
@@ -0,0 +1,6 @@
1
+ @files=[]
2
+
3
+ task :default do
4
+ system("rake -T")
5
+ end
6
+
@@ -0,0 +1,150 @@
1
+ # encoding: utf-8
2
+ require "logstash/inputs/base"
3
+ require "logstash/namespace"
4
+
5
+ require "pathname"
6
+ require "socket" # for Socket.gethostname
7
+
8
+ # Stream events from files.
9
+ #
10
+ # By default, each event is assumed to be one line. If you would like
11
+ # to join multiple log lines into one event, you'll want to use the
12
+ # multiline codec.
13
+ #
14
+ # Files are followed in a manner similar to "tail -0F". File rotation
15
+ # is detected and handled by this input.
16
+ class LogStash::Inputs::File < LogStash::Inputs::Base
17
+ config_name "file"
18
+ milestone 2
19
+
20
+ # TODO(sissel): This should switch to use the 'line' codec by default
21
+ # once file following
22
+ default :codec, "plain"
23
+
24
+ # The path(s) to the file(s) to use as an input.
25
+ # You can use globs here, such as `/var/log/*.log`
26
+ # Paths must be absolute and cannot be relative.
27
+ #
28
+ # You may also configure multiple paths. See an example
29
+ # on the [Logstash configuration page](configuration#array).
30
+ config :path, :validate => :array, :required => true
31
+
32
+ # Exclusions (matched against the filename, not full path). Globs
33
+ # are valid here, too. For example, if you have
34
+ #
35
+ # path => "/var/log/*"
36
+ #
37
+ # You might want to exclude gzipped files:
38
+ #
39
+ # exclude => "*.gz"
40
+ config :exclude, :validate => :array
41
+
42
+ # How often we stat files to see if they have been modified. Increasing
43
+ # this interval will decrease the number of system calls we make, but
44
+ # increase the time to detect new log lines.
45
+ config :stat_interval, :validate => :number, :default => 1
46
+
47
+ # How often we expand globs to discover new files to watch.
48
+ config :discover_interval, :validate => :number, :default => 15
49
+
50
+ # Where to write the sincedb database (keeps track of the current
51
+ # position of monitored log files). The default will write
52
+ # sincedb files to some path matching "$HOME/.sincedb*"
53
+ config :sincedb_path, :validate => :string
54
+
55
+ # How often (in seconds) to write a since database with the current position of
56
+ # monitored log files.
57
+ config :sincedb_write_interval, :validate => :number, :default => 15
58
+
59
+ # Choose where Logstash starts initially reading files: at the beginning or
60
+ # at the end. The default behavior treats files like live streams and thus
61
+ # starts at the end. If you have old data you want to import, set this
62
+ # to 'beginning'
63
+ #
64
+ # This option only modifies "first contact" situations where a file is new
65
+ # and not seen before. If a file has already been seen before, this option
66
+ # has no effect.
67
+ config :start_position, :validate => [ "beginning", "end"], :default => "end"
68
+
69
+ public
70
+ def register
71
+ require "addressable/uri"
72
+ require "filewatch/tail"
73
+ require "digest/md5"
74
+ @logger.info("Registering file input", :path => @path)
75
+
76
+ @tail_config = {
77
+ :exclude => @exclude,
78
+ :stat_interval => @stat_interval,
79
+ :discover_interval => @discover_interval,
80
+ :sincedb_write_interval => @sincedb_write_interval,
81
+ :logger => @logger,
82
+ }
83
+
84
+ @path.each do |path|
85
+ if Pathname.new(path).relative?
86
+ raise ArgumentError.new("File paths must be absolute, relative path specified: #{path}")
87
+ end
88
+ end
89
+
90
+ if @sincedb_path.nil?
91
+ if ENV["SINCEDB_DIR"].nil? && ENV["HOME"].nil?
92
+ @logger.error("No SINCEDB_DIR or HOME environment variable set, I don't know where " \
93
+ "to keep track of the files I'm watching. Either set " \
94
+ "HOME or SINCEDB_DIR in your environment, or set sincedb_path in " \
95
+ "in your Logstash config for the file input with " \
96
+ "path '#{@path.inspect}'")
97
+ raise # TODO(sissel): HOW DO I FAIL PROPERLY YO
98
+ end
99
+
100
+ #pick SINCEDB_DIR if available, otherwise use HOME
101
+ sincedb_dir = ENV["SINCEDB_DIR"] || ENV["HOME"]
102
+
103
+ # Join by ',' to make it easy for folks to know their own sincedb
104
+ # generated path (vs, say, inspecting the @path array)
105
+ @sincedb_path = File.join(sincedb_dir, ".sincedb_" + Digest::MD5.hexdigest(@path.join(",")))
106
+
107
+ # Migrate any old .sincedb to the new file (this is for version <=1.1.1 compatibility)
108
+ old_sincedb = File.join(sincedb_dir, ".sincedb")
109
+ if File.exists?(old_sincedb)
110
+ @logger.info("Renaming old ~/.sincedb to new one", :old => old_sincedb,
111
+ :new => @sincedb_path)
112
+ File.rename(old_sincedb, @sincedb_path)
113
+ end
114
+
115
+ @logger.info("No sincedb_path set, generating one based on the file path",
116
+ :sincedb_path => @sincedb_path, :path => @path)
117
+ end
118
+
119
+ @tail_config[:sincedb_path] = @sincedb_path
120
+
121
+ if @start_position == "beginning"
122
+ @tail_config[:start_new_files_at] = :beginning
123
+ end
124
+ end # def register
125
+
126
+ public
127
+ def run(queue)
128
+ @tail = FileWatch::Tail.new(@tail_config)
129
+ @tail.logger = @logger
130
+ @path.each { |path| @tail.tail(path) }
131
+ hostname = Socket.gethostname
132
+
133
+ @tail.subscribe do |path, line|
134
+ @logger.debug? && @logger.debug("Received line", :path => path, :text => line)
135
+ @codec.decode(line) do |event|
136
+ decorate(event)
137
+ event["host"] = hostname if !event.include?("host")
138
+ event["path"] = path
139
+ queue << event
140
+ end
141
+ end
142
+ finished
143
+ end # def run
144
+
145
+ public
146
+ def teardown
147
+ @tail.sincedb_write
148
+ @tail.quit
149
+ end # def teardown
150
+ end # class LogStash::Inputs::File
@@ -0,0 +1,30 @@
1
+ Gem::Specification.new do |s|
2
+
3
+ s.name = 'logstash-input-file'
4
+ s.version = '0.1.1'
5
+ s.licenses = ['Apache License (2.0)']
6
+ s.summary = "Stream events from files."
7
+ s.description = "Stream events from files."
8
+ s.authors = ["Elasticsearch"]
9
+ s.email = 'richard.pijnenburg@elasticsearch.com'
10
+ s.homepage = "http://logstash.net/"
11
+ s.require_paths = ["lib"]
12
+
13
+ # Files
14
+ s.files = `git ls-files`.split($\)+::Dir.glob('vendor/*')
15
+
16
+ # Tests
17
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
+
19
+ # Special flag to let us know this is actually a logstash plugin
20
+ s.metadata = { "logstash_plugin" => "true", "group" => "input" }
21
+
22
+ # Gem dependencies
23
+ s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
24
+
25
+ s.add_runtime_dependency 'logstash-codec-plain'
26
+ s.add_runtime_dependency 'addressable'
27
+ s.add_runtime_dependency 'filewatch', ['0.5.1']
28
+
29
+ end
30
+
@@ -0,0 +1,9 @@
1
+ require "gem_publisher"
2
+
3
+ desc "Publish gem to RubyGems.org"
4
+ task :publish_gem do |t|
5
+ gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
6
+ gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
7
+ puts "Published #{gem}" if gem
8
+ end
9
+
@@ -0,0 +1,169 @@
1
+ require "net/http"
2
+ require "uri"
3
+ require "digest/sha1"
4
+
5
+ def vendor(*args)
6
+ return File.join("vendor", *args)
7
+ end
8
+
9
+ directory "vendor/" => ["vendor"] do |task, args|
10
+ mkdir task.name
11
+ end
12
+
13
+ def fetch(url, sha1, output)
14
+
15
+ puts "Downloading #{url}"
16
+ actual_sha1 = download(url, output)
17
+
18
+ if actual_sha1 != sha1
19
+ fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
20
+ end
21
+ end # def fetch
22
+
23
+ def file_fetch(url, sha1)
24
+ filename = File.basename( URI(url).path )
25
+ output = "vendor/#{filename}"
26
+ task output => [ "vendor/" ] do
27
+ begin
28
+ actual_sha1 = file_sha1(output)
29
+ if actual_sha1 != sha1
30
+ fetch(url, sha1, output)
31
+ end
32
+ rescue Errno::ENOENT
33
+ fetch(url, sha1, output)
34
+ end
35
+ end.invoke
36
+
37
+ return output
38
+ end
39
+
40
+ def file_sha1(path)
41
+ digest = Digest::SHA1.new
42
+ fd = File.new(path, "r")
43
+ while true
44
+ begin
45
+ digest << fd.sysread(16384)
46
+ rescue EOFError
47
+ break
48
+ end
49
+ end
50
+ return digest.hexdigest
51
+ ensure
52
+ fd.close if fd
53
+ end
54
+
55
+ def download(url, output)
56
+ uri = URI(url)
57
+ digest = Digest::SHA1.new
58
+ tmp = "#{output}.tmp"
59
+ Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
60
+ request = Net::HTTP::Get.new(uri.path)
61
+ http.request(request) do |response|
62
+ fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code)
63
+ size = (response["content-length"].to_i || -1).to_f
64
+ count = 0
65
+ File.open(tmp, "w") do |fd|
66
+ response.read_body do |chunk|
67
+ fd.write(chunk)
68
+ digest << chunk
69
+ if size > 0 && $stdout.tty?
70
+ count += chunk.bytesize
71
+ $stdout.write(sprintf("\r%0.2f%%", count/size * 100))
72
+ end
73
+ end
74
+ end
75
+ $stdout.write("\r \r") if $stdout.tty?
76
+ end
77
+ end
78
+
79
+ File.rename(tmp, output)
80
+
81
+ return digest.hexdigest
82
+ rescue SocketError => e
83
+ puts "Failure while downloading #{url}: #{e}"
84
+ raise
85
+ ensure
86
+ File.unlink(tmp) if File.exist?(tmp)
87
+ end # def download
88
+
89
+ def untar(tarball, &block)
90
+ require "archive/tar/minitar"
91
+ tgz = Zlib::GzipReader.new(File.open(tarball))
92
+ # Pull out typesdb
93
+ tar = Archive::Tar::Minitar::Input.open(tgz)
94
+ tar.each do |entry|
95
+ path = block.call(entry)
96
+ next if path.nil?
97
+ parent = File.dirname(path)
98
+
99
+ mkdir_p parent unless File.directory?(parent)
100
+
101
+ # Skip this file if the output file is the same size
102
+ if entry.directory?
103
+ mkdir path unless File.directory?(path)
104
+ else
105
+ entry_mode = entry.instance_eval { @mode } & 0777
106
+ if File.exists?(path)
107
+ stat = File.stat(path)
108
+ # TODO(sissel): Submit a patch to archive-tar-minitar upstream to
109
+ # expose headers in the entry.
110
+ entry_size = entry.instance_eval { @size }
111
+ # If file sizes are same, skip writing.
112
+ next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
113
+ end
114
+ puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
115
+ File.open(path, "w") do |fd|
116
+ # eof? check lets us skip empty files. Necessary because the API provided by
117
+ # Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
118
+ # IO object. Something about empty files in this EntryStream causes
119
+ # IO.copy_stream to throw "can't convert nil into String" on JRuby
120
+ # TODO(sissel): File a bug about this.
121
+ while !entry.eof?
122
+ chunk = entry.read(16384)
123
+ fd.write(chunk)
124
+ end
125
+ #IO.copy_stream(entry, fd)
126
+ end
127
+ File.chmod(entry_mode, path)
128
+ end
129
+ end
130
+ tar.close
131
+ File.unlink(tarball) if File.file?(tarball)
132
+ end # def untar
133
+
134
+ def ungz(file)
135
+
136
+ outpath = file.gsub('.gz', '')
137
+ tgz = Zlib::GzipReader.new(File.open(file))
138
+ begin
139
+ File.open(outpath, "w") do |out|
140
+ IO::copy_stream(tgz, out)
141
+ end
142
+ File.unlink(file)
143
+ rescue
144
+ File.unlink(outpath) if File.file?(outpath)
145
+ raise
146
+ end
147
+ tgz.close
148
+ end
149
+
150
+ desc "Process any vendor files required for this plugin"
151
+ task "vendor" do |task, args|
152
+
153
+ @files.each do |file|
154
+ download = file_fetch(file['url'], file['sha1'])
155
+ if download =~ /.tar.gz/
156
+ prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
157
+ untar(download) do |entry|
158
+ if !file['files'].nil?
159
+ next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
160
+ out = entry.full_name.split("/").last
161
+ end
162
+ File.join('vendor', out)
163
+ end
164
+ elsif download =~ /.gz/
165
+ ungz(download)
166
+ end
167
+ end
168
+
169
+ end
@@ -0,0 +1,132 @@
1
+ # encoding: utf-8
2
+
3
+ require "spec_helper"
4
+ require "tempfile"
5
+
6
+ describe "inputs/file" do
7
+
8
+
9
+ describe "starts at the end of an existing file" do
10
+ tmp_file = Tempfile.new('logstash-spec-input-file')
11
+
12
+ config <<-CONFIG
13
+ input {
14
+ file {
15
+ type => "blah"
16
+ path => "#{tmp_file.path}"
17
+ sincedb_path => "/dev/null"
18
+ }
19
+ }
20
+ CONFIG
21
+
22
+ input do |pipeline, queue|
23
+ File.open(tmp_file, "w") do |fd|
24
+ fd.puts("ignore me 1")
25
+ fd.puts("ignore me 2")
26
+ end
27
+
28
+ Thread.new { pipeline.run }
29
+ sleep 0.1 while !pipeline.ready?
30
+
31
+ # at this point even if pipeline.ready? == true the plugins
32
+ # threads might still be initializing so we cannot know when the
33
+ # file plugin will have seen the original file, it could see it
34
+ # after the first(s) hello world appends below, hence the
35
+ # retry logic.
36
+
37
+ retries = 0
38
+ loop do
39
+ insist { retries } < 20 # 2 secs should be plenty?
40
+
41
+ File.open(tmp_file, "a") do |fd|
42
+ fd.puts("hello")
43
+ fd.puts("world")
44
+ end
45
+
46
+ if queue.size >= 2
47
+ events = 2.times.collect { queue.pop }
48
+ insist { events[0]["message"] } == "hello"
49
+ insist { events[1]["message"] } == "world"
50
+ break
51
+ end
52
+
53
+ sleep(0.1)
54
+ retries += 1
55
+ end
56
+ end
57
+ end
58
+
59
+ describe "can start at the beginning of an existing file" do
60
+ tmp_file = Tempfile.new('logstash-spec-input-file')
61
+
62
+ config <<-CONFIG
63
+ input {
64
+ file {
65
+ type => "blah"
66
+ path => "#{tmp_file.path}"
67
+ start_position => "beginning"
68
+ sincedb_path => "/dev/null"
69
+ }
70
+ }
71
+ CONFIG
72
+
73
+ input do |pipeline, queue|
74
+ File.open(tmp_file, "a") do |fd|
75
+ fd.puts("hello")
76
+ fd.puts("world")
77
+ end
78
+
79
+ Thread.new { pipeline.run }
80
+ sleep 0.1 while !pipeline.ready?
81
+
82
+ events = 2.times.collect { queue.pop }
83
+ insist { events[0]["message"] } == "hello"
84
+ insist { events[1]["message"] } == "world"
85
+ end
86
+ end
87
+
88
+ describe "restarts at the sincedb value" do
89
+ tmp_file = Tempfile.new('logstash-spec-input-file')
90
+ tmp_sincedb = Tempfile.new('logstash-spec-input-file-sincedb')
91
+
92
+ config <<-CONFIG
93
+ input {
94
+ file {
95
+ type => "blah"
96
+ path => "#{tmp_file.path}"
97
+ start_position => "beginning"
98
+ sincedb_path => "#{tmp_sincedb.path}"
99
+ }
100
+ }
101
+ CONFIG
102
+
103
+ input do |pipeline, queue|
104
+ File.open(tmp_file, "w") do |fd|
105
+ fd.puts("hello")
106
+ fd.puts("world")
107
+ end
108
+
109
+ t = Thread.new { pipeline.run }
110
+ sleep 0.1 while !pipeline.ready?
111
+
112
+ events = 2.times.collect { queue.pop }
113
+ pipeline.shutdown
114
+ t.join
115
+
116
+ File.open(tmp_file, "a") do |fd|
117
+ fd.puts("foo")
118
+ fd.puts("bar")
119
+ fd.puts("baz")
120
+ end
121
+
122
+ Thread.new { pipeline.run }
123
+ sleep 0.1 while !pipeline.ready?
124
+
125
+ events = 3.times.collect { queue.pop }
126
+
127
+ insist { events[0]["message"] } == "foo"
128
+ insist { events[1]["message"] } == "bar"
129
+ insist { events[2]["message"] } == "baz"
130
+ end
131
+ end
132
+ end
metadata CHANGED
@@ -1,92 +1,117 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-file
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.1.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-07-15 00:00:00.000000000 Z
11
+ date: 2014-11-05 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
- name: filewatch
14
+ name: logstash
15
15
  requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - '='
17
+ - - ! '>='
18
18
  - !ruby/object:Gem::Version
19
- version: 0.5.1
19
+ version: 1.4.0
20
+ - - <
21
+ - !ruby/object:Gem::Version
22
+ version: 2.0.0
20
23
  type: :runtime
21
24
  prerelease: false
22
25
  version_requirements: !ruby/object:Gem::Requirement
23
26
  requirements:
24
- - - '='
27
+ - - ! '>='
25
28
  - !ruby/object:Gem::Version
26
- version: 0.5.1
29
+ version: 1.4.0
30
+ - - <
31
+ - !ruby/object:Gem::Version
32
+ version: 2.0.0
27
33
  - !ruby/object:Gem::Dependency
28
- name: addressable
34
+ name: logstash-codec-plain
29
35
  requirement: !ruby/object:Gem::Requirement
30
36
  requirements:
31
- - - ">="
37
+ - - ! '>='
32
38
  - !ruby/object:Gem::Version
33
39
  version: '0'
34
40
  type: :runtime
35
41
  prerelease: false
36
42
  version_requirements: !ruby/object:Gem::Requirement
37
43
  requirements:
38
- - - ">="
44
+ - - ! '>='
39
45
  - !ruby/object:Gem::Version
40
46
  version: '0'
41
47
  - !ruby/object:Gem::Dependency
42
- name: logstash
48
+ name: addressable
43
49
  requirement: !ruby/object:Gem::Requirement
44
50
  requirements:
45
- - - ">="
46
- - !ruby/object:Gem::Version
47
- version: 1.4.0
48
- - - "<"
51
+ - - ! '>='
49
52
  - !ruby/object:Gem::Version
50
- version: 2.0.0
53
+ version: '0'
51
54
  type: :runtime
52
55
  prerelease: false
53
56
  version_requirements: !ruby/object:Gem::Requirement
54
57
  requirements:
55
- - - ">="
58
+ - - ! '>='
56
59
  - !ruby/object:Gem::Version
57
- version: 1.4.0
58
- - - "<"
60
+ version: '0'
61
+ - !ruby/object:Gem::Dependency
62
+ name: filewatch
63
+ requirement: !ruby/object:Gem::Requirement
64
+ requirements:
65
+ - - '='
59
66
  - !ruby/object:Gem::Version
60
- version: 2.0.0
67
+ version: 0.5.1
68
+ type: :runtime
69
+ prerelease: false
70
+ version_requirements: !ruby/object:Gem::Requirement
71
+ requirements:
72
+ - - '='
73
+ - !ruby/object:Gem::Version
74
+ version: 0.5.1
61
75
  description: Stream events from files.
62
- email: rubycoder@example.com
76
+ email: richard.pijnenburg@elasticsearch.com
63
77
  executables: []
64
78
  extensions: []
65
79
  extra_rdoc_files: []
66
- files: []
80
+ files:
81
+ - .gitignore
82
+ - Gemfile
83
+ - LICENSE
84
+ - Rakefile
85
+ - lib/logstash/inputs/file.rb
86
+ - logstash-input-file.gemspec
87
+ - rakelib/publish.rake
88
+ - rakelib/vendor.rake
89
+ - spec/inputs/file_spec.rb
67
90
  homepage: http://logstash.net/
68
91
  licenses:
69
92
  - Apache License (2.0)
70
93
  metadata:
71
94
  logstash_plugin: 'true'
95
+ group: input
72
96
  post_install_message:
73
97
  rdoc_options: []
74
98
  require_paths:
75
99
  - lib
76
100
  required_ruby_version: !ruby/object:Gem::Requirement
77
101
  requirements:
78
- - - ">="
102
+ - - ! '>='
79
103
  - !ruby/object:Gem::Version
80
104
  version: '0'
81
105
  required_rubygems_version: !ruby/object:Gem::Requirement
82
106
  requirements:
83
- - - ">="
107
+ - - ! '>='
84
108
  - !ruby/object:Gem::Version
85
109
  version: '0'
86
110
  requirements: []
87
111
  rubyforge_project:
88
- rubygems_version: 2.3.0
112
+ rubygems_version: 2.4.1
89
113
  signing_key:
90
114
  specification_version: 4
91
- summary: Logstash File input
92
- test_files: []
115
+ summary: Stream events from files.
116
+ test_files:
117
+ - spec/inputs/file_spec.rb