logstash-output-file 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,15 @@
1
+ ---
2
+ !binary "U0hBMQ==":
3
+ metadata.gz: !binary |-
4
+ MjllMjc2MzAwYzI3NGE1MzMxNjU4MzY4NWUxNDI4MTU5ZjZhNDM5OA==
5
+ data.tar.gz: !binary |-
6
+ NDNlNjY4N2E5NzZhYWZiNzU2ZTFhNDk3MDllOWUxOTY0MWFkN2FjYw==
7
+ SHA512:
8
+ metadata.gz: !binary |-
9
+ NzllNWZkMTAwN2RhNjAwZTUxYjA2YzUyYzRkMjIzOWM4OWMzZTQ3NWRjZjk3
10
+ OGUxOWQyZjk4NmY5ZmMzMWM3MGFkZjdjYTU2YWVmNmNlMzQzOTE4YjQzMmE2
11
+ OGMwMWUyODRjMGVmMThiNjk1Mzc3OWY4MjAwYTdjMmVjY2JmYjg=
12
+ data.tar.gz: !binary |-
13
+ NWIyNGE1MTI0MDEwOTMzNzRhY2JmN2E2Y2E0MDE2ODlkMTA4MTNiMmU5MmUx
14
+ MGZiODAwOGVjODczODJlNDMxOTZiZmU0MjBhNTM3OWQ0NWY2MDg4ZjcyMWNl
15
+ OWFiZGUyM2MzZGNhMDhjYjIxZjYxYmI0YWU4M2UwOWFmNzI5Yzg=
data/.gitignore ADDED
@@ -0,0 +1,4 @@
1
+ *.gem
2
+ Gemfile.lock
3
+ .bundle
4
+ vendor
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source 'http://rubygems.org'
2
+ gem 'rake'
3
+ gem 'gem_publisher'
4
+ gem 'archive-tar-minitar'
data/LICENSE ADDED
@@ -0,0 +1,13 @@
1
+ Copyright (c) 2012-2014 Elasticsearch <http://www.elasticsearch.org>
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
data/Rakefile ADDED
@@ -0,0 +1,6 @@
1
+ @files=[]
2
+
3
+ task :default do
4
+ system("rake -T")
5
+ end
6
+
@@ -0,0 +1,243 @@
1
+ # encoding: utf-8
2
+ require "logstash/namespace"
3
+ require "logstash/outputs/base"
4
+ require "logstash/errors"
5
+ require "zlib"
6
+
7
+ # This output will write events to files on disk. You can use fields
8
+ # from the event as parts of the filename and/or path.
9
+ class LogStash::Outputs::File < LogStash::Outputs::Base
10
+
11
+ config_name "file"
12
+ milestone 2
13
+
14
+ # The path to the file to write. Event fields can be used here,
15
+ # like "/var/log/logstash/%{host}/%{application}"
16
+ # One may also utilize the path option for date-based log
17
+ # rotation via the joda time format. This will use the event
18
+ # timestamp.
19
+ # E.g.: path => "./test-%{+YYYY-MM-dd}.txt" to create
20
+ # ./test-2013-05-29.txt
21
+ #
22
+ # If you use an absolute path you cannot start with a dynamic string.
23
+ # E.g: /%{myfield}/, /test-%{myfield}/ are not valid paths
24
+ config :path, :validate => :string, :required => true
25
+
26
+ # The maximum size of file to write. When the file exceeds this
27
+ # threshold, it will be rotated to the current filename + ".1"
28
+ # If that file already exists, the previous .1 will shift to .2
29
+ # and so forth.
30
+ #
31
+ # NOT YET SUPPORTED
32
+ config :max_size, :validate => :string
33
+
34
+ # The format to use when writing events to the file. This value
35
+ # supports any string and can include %{name} and other dynamic
36
+ # strings.
37
+ #
38
+ # If this setting is omitted, the full json representation of the
39
+ # event will be written as a single line.
40
+ config :message_format, :validate => :string
41
+
42
+ # Flush interval (in seconds) for flushing writes to log files.
43
+ # 0 will flush on every message.
44
+ config :flush_interval, :validate => :number, :default => 2
45
+
46
+ # Gzip the output stream before writing to disk.
47
+ config :gzip, :validate => :boolean, :default => false
48
+
49
+ # If the generated path is invalid, the events will be saved
50
+ # into this file and inside the defined path.
51
+ config :filename_failure, :validate => :string, :default => '_filepath_failures'
52
+
53
+ public
54
+ def register
55
+ require "fileutils" # For mkdir_p
56
+
57
+ workers_not_supported
58
+
59
+ @files = {}
60
+
61
+ @path = File.expand_path(path)
62
+
63
+ validate_path
64
+
65
+ if path_with_field_ref?
66
+ @file_root = extract_file_root
67
+ @failure_path = File.join(@file_root, @filename_failure)
68
+ end
69
+
70
+ now = Time.now
71
+ @last_flush_cycle = now
72
+ @last_stale_cleanup_cycle = now
73
+ @flush_interval = @flush_interval.to_i
74
+ @stale_cleanup_interval = 10
75
+ end # def register
76
+
77
+ private
78
+ def validate_path
79
+ root_directory = @path.split(File::SEPARATOR).select { |item| !item.empty? }.shift
80
+
81
+ if (root_directory =~ /%\{[^}]+\}/) != nil
82
+ @logger.error("File: The starting part of the path should not be dynamic.", :path => @path)
83
+ raise LogStash::ConfigurationError.new("The starting part of the path should not be dynamic.")
84
+ end
85
+ end
86
+
87
+ public
88
+ def receive(event)
89
+ return unless output?(event)
90
+
91
+ file_output_path = generate_filepath(event)
92
+
93
+ if path_with_field_ref? && !inside_file_root?(file_output_path)
94
+ @logger.warn("File: the event tried to write outside the files root, writing the event to the failure file", :event => event, :filename => @failure_path)
95
+ file_output_path = @failure_path
96
+ end
97
+
98
+ output = format_message(event)
99
+ write_event(file_output_path, output)
100
+ end # def receive
101
+
102
+ private
103
+ def inside_file_root?(log_path)
104
+ target_file = File.expand_path(log_path)
105
+ return target_file.start_with?("#{@file_root.to_s}/")
106
+ end
107
+
108
+ private
109
+ def write_event(log_path, event)
110
+ @logger.debug("File, writing event to file.", :filename => log_path)
111
+ fd = open(log_path)
112
+
113
+ # TODO(sissel): Check if we should rotate the file.
114
+
115
+ fd.write(event)
116
+ fd.write("\n")
117
+
118
+ flush(fd)
119
+ close_stale_files
120
+ end
121
+
122
+ private
123
+ def generate_filepath(event)
124
+ event.sprintf(@path)
125
+ end
126
+
127
+ private
128
+ def path_with_field_ref?
129
+ path =~ /%\{[^}]+\}/
130
+ end
131
+
132
+ def format_message(event)
133
+ if @message_format
134
+ event.sprintf(@message_format)
135
+ else
136
+ event.to_json
137
+ end
138
+ end
139
+
140
+ def extract_file_root
141
+ extracted_path = File.expand_path(path.gsub(/%{.+/, ''))
142
+ Pathname.new(extracted_path).expand_path
143
+ end
144
+
145
+ def teardown
146
+ @logger.debug("Teardown: closing files")
147
+ @files.each do |path, fd|
148
+ begin
149
+ fd.close
150
+ @logger.debug("Closed file #{path}", :fd => fd)
151
+ rescue Exception => e
152
+ @logger.error("Exception while flushing and closing files.", :exception => e)
153
+ end
154
+ end
155
+ finished
156
+ end
157
+
158
+ private
159
+ def flush(fd)
160
+ if flush_interval > 0
161
+ flush_pending_files
162
+ else
163
+ fd.flush
164
+ end
165
+ end
166
+
167
+ # every flush_interval seconds or so (triggered by events, but if there are no events there's no point flushing files anyway)
168
+ def flush_pending_files
169
+ return unless Time.now - @last_flush_cycle >= flush_interval
170
+ @logger.debug("Starting flush cycle")
171
+ @files.each do |path, fd|
172
+ @logger.debug("Flushing file", :path => path, :fd => fd)
173
+ fd.flush
174
+ end
175
+ @last_flush_cycle = Time.now
176
+ end
177
+
178
+ # every 10 seconds or so (triggered by events, but if there are no events there's no point closing files anyway)
179
+ def close_stale_files
180
+ now = Time.now
181
+ return unless now - @last_stale_cleanup_cycle >= @stale_cleanup_interval
182
+ @logger.info("Starting stale files cleanup cycle", :files => @files)
183
+ inactive_files = @files.select { |path, fd| not fd.active }
184
+ @logger.debug("%d stale files found" % inactive_files.count, :inactive_files => inactive_files)
185
+ inactive_files.each do |path, fd|
186
+ @logger.info("Closing file %s" % path)
187
+ fd.close
188
+ @files.delete(path)
189
+ end
190
+ # mark all files as inactive, a call to write will mark them as active again
191
+ @files.each { |path, fd| fd.active = false }
192
+ @last_stale_cleanup_cycle = now
193
+ end
194
+
195
+ def open(path)
196
+ return @files[path] if @files.include?(path) and not @files[path].nil?
197
+
198
+ @logger.info("Opening file", :path => path)
199
+
200
+ dir = File.dirname(path)
201
+ if !Dir.exists?(dir)
202
+ @logger.info("Creating directory", :directory => dir)
203
+ FileUtils.mkdir_p(dir)
204
+ end
205
+
206
+ # work around a bug opening fifos (bug JRUBY-6280)
207
+ stat = File.stat(path) rescue nil
208
+ if stat and stat.ftype == "fifo" and RUBY_PLATFORM == "java"
209
+ fd = java.io.FileWriter.new(java.io.File.new(path))
210
+ else
211
+ fd = File.new(path, "a")
212
+ end
213
+ if gzip
214
+ fd = Zlib::GzipWriter.new(fd)
215
+ end
216
+ @files[path] = IOWriter.new(fd)
217
+ end
218
+ end # class LogStash::Outputs::File
219
+
220
+ # wrapper class
221
+ class IOWriter
222
+ def initialize(io)
223
+ @io = io
224
+ end
225
+ def write(*args)
226
+ @io.write(*args)
227
+ @active = true
228
+ end
229
+ def flush
230
+ @io.flush
231
+ if @io.class == Zlib::GzipWriter
232
+ @io.to_io.flush
233
+ end
234
+ end
235
+ def method_missing(method_name, *args, &block)
236
+ if @io.respond_to?(method_name)
237
+ @io.send(method_name, *args, &block)
238
+ else
239
+ super
240
+ end
241
+ end
242
+ attr_accessor :active
243
+ end
@@ -0,0 +1,27 @@
1
+ Gem::Specification.new do |s|
2
+
3
+ s.name = 'logstash-output-file'
4
+ s.version = '0.1.0'
5
+ s.licenses = ['Apache License (2.0)']
6
+ s.summary = "This output will write events to files on disk"
7
+ s.description = "This output will write events to files on disk"
8
+ s.authors = ["Elasticsearch"]
9
+ s.email = 'richard.pijnenburg@elasticsearch.com'
10
+ s.homepage = "http://logstash.net/"
11
+ s.require_paths = ["lib"]
12
+
13
+ # Files
14
+ s.files = `git ls-files`.split($\)+::Dir.glob('vendor/*')
15
+
16
+ # Tests
17
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
+
19
+ # Special flag to let us know this is actually a logstash plugin
20
+ s.metadata = { "logstash_plugin" => "true", "group" => "output" }
21
+
22
+ # Gem dependencies
23
+ s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
24
+ s.add_runtime_dependency 'logstash-input-generator'
25
+
26
+ end
27
+
@@ -0,0 +1,9 @@
1
+ require "gem_publisher"
2
+
3
+ desc "Publish gem to RubyGems.org"
4
+ task :publish_gem do |t|
5
+ gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
6
+ gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
7
+ puts "Published #{gem}" if gem
8
+ end
9
+
@@ -0,0 +1,169 @@
1
+ require "net/http"
2
+ require "uri"
3
+ require "digest/sha1"
4
+
5
+ def vendor(*args)
6
+ return File.join("vendor", *args)
7
+ end
8
+
9
+ directory "vendor/" => ["vendor"] do |task, args|
10
+ mkdir task.name
11
+ end
12
+
13
+ def fetch(url, sha1, output)
14
+
15
+ puts "Downloading #{url}"
16
+ actual_sha1 = download(url, output)
17
+
18
+ if actual_sha1 != sha1
19
+ fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
20
+ end
21
+ end # def fetch
22
+
23
+ def file_fetch(url, sha1)
24
+ filename = File.basename( URI(url).path )
25
+ output = "vendor/#{filename}"
26
+ task output => [ "vendor/" ] do
27
+ begin
28
+ actual_sha1 = file_sha1(output)
29
+ if actual_sha1 != sha1
30
+ fetch(url, sha1, output)
31
+ end
32
+ rescue Errno::ENOENT
33
+ fetch(url, sha1, output)
34
+ end
35
+ end.invoke
36
+
37
+ return output
38
+ end
39
+
40
+ def file_sha1(path)
41
+ digest = Digest::SHA1.new
42
+ fd = File.new(path, "r")
43
+ while true
44
+ begin
45
+ digest << fd.sysread(16384)
46
+ rescue EOFError
47
+ break
48
+ end
49
+ end
50
+ return digest.hexdigest
51
+ ensure
52
+ fd.close if fd
53
+ end
54
+
55
+ def download(url, output)
56
+ uri = URI(url)
57
+ digest = Digest::SHA1.new
58
+ tmp = "#{output}.tmp"
59
+ Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
60
+ request = Net::HTTP::Get.new(uri.path)
61
+ http.request(request) do |response|
62
+ fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code)
63
+ size = (response["content-length"].to_i || -1).to_f
64
+ count = 0
65
+ File.open(tmp, "w") do |fd|
66
+ response.read_body do |chunk|
67
+ fd.write(chunk)
68
+ digest << chunk
69
+ if size > 0 && $stdout.tty?
70
+ count += chunk.bytesize
71
+ $stdout.write(sprintf("\r%0.2f%%", count/size * 100))
72
+ end
73
+ end
74
+ end
75
+ $stdout.write("\r \r") if $stdout.tty?
76
+ end
77
+ end
78
+
79
+ File.rename(tmp, output)
80
+
81
+ return digest.hexdigest
82
+ rescue SocketError => e
83
+ puts "Failure while downloading #{url}: #{e}"
84
+ raise
85
+ ensure
86
+ File.unlink(tmp) if File.exist?(tmp)
87
+ end # def download
88
+
89
+ def untar(tarball, &block)
90
+ require "archive/tar/minitar"
91
+ tgz = Zlib::GzipReader.new(File.open(tarball))
92
+ # Pull out typesdb
93
+ tar = Archive::Tar::Minitar::Input.open(tgz)
94
+ tar.each do |entry|
95
+ path = block.call(entry)
96
+ next if path.nil?
97
+ parent = File.dirname(path)
98
+
99
+ mkdir_p parent unless File.directory?(parent)
100
+
101
+ # Skip this file if the output file is the same size
102
+ if entry.directory?
103
+ mkdir path unless File.directory?(path)
104
+ else
105
+ entry_mode = entry.instance_eval { @mode } & 0777
106
+ if File.exists?(path)
107
+ stat = File.stat(path)
108
+ # TODO(sissel): Submit a patch to archive-tar-minitar upstream to
109
+ # expose headers in the entry.
110
+ entry_size = entry.instance_eval { @size }
111
+ # If file sizes are same, skip writing.
112
+ next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
113
+ end
114
+ puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
115
+ File.open(path, "w") do |fd|
116
+ # eof? check lets us skip empty files. Necessary because the API provided by
117
+ # Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
118
+ # IO object. Something about empty files in this EntryStream causes
119
+ # IO.copy_stream to throw "can't convert nil into String" on JRuby
120
+ # TODO(sissel): File a bug about this.
121
+ while !entry.eof?
122
+ chunk = entry.read(16384)
123
+ fd.write(chunk)
124
+ end
125
+ #IO.copy_stream(entry, fd)
126
+ end
127
+ File.chmod(entry_mode, path)
128
+ end
129
+ end
130
+ tar.close
131
+ File.unlink(tarball) if File.file?(tarball)
132
+ end # def untar
133
+
134
+ def ungz(file)
135
+
136
+ outpath = file.gsub('.gz', '')
137
+ tgz = Zlib::GzipReader.new(File.open(file))
138
+ begin
139
+ File.open(outpath, "w") do |out|
140
+ IO::copy_stream(tgz, out)
141
+ end
142
+ File.unlink(file)
143
+ rescue
144
+ File.unlink(outpath) if File.file?(outpath)
145
+ raise
146
+ end
147
+ tgz.close
148
+ end
149
+
150
+ desc "Process any vendor files required for this plugin"
151
+ task "vendor" do |task, args|
152
+
153
+ @files.each do |file|
154
+ download = file_fetch(file['url'], file['sha1'])
155
+ if download =~ /.tar.gz/
156
+ prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
157
+ untar(download) do |entry|
158
+ if !file['files'].nil?
159
+ next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
160
+ out = entry.full_name.split("/").last
161
+ end
162
+ File.join('vendor', out)
163
+ end
164
+ elsif download =~ /.gz/
165
+ ungz(download)
166
+ end
167
+ end
168
+
169
+ end
@@ -0,0 +1,206 @@
1
+ # encoding: UTF-8
2
+ require "spec_helper"
3
+ require "logstash/outputs/file"
4
+ require "logstash/event"
5
+ require "logstash/json"
6
+ require "stud/temporary"
7
+ require "tempfile"
8
+ require "uri"
9
+
10
+ describe LogStash::Outputs::File do
11
+ describe "ship lots of events to a file" do
12
+ tmp_file = Tempfile.new('logstash-spec-output-file')
13
+ event_count = 10000 + rand(500)
14
+
15
+ config <<-CONFIG
16
+ input {
17
+ generator {
18
+ message => "hello world"
19
+ count => #{event_count}
20
+ type => "generator"
21
+ }
22
+ }
23
+ output {
24
+ file {
25
+ path => "#{tmp_file.path}"
26
+ }
27
+ }
28
+ CONFIG
29
+
30
+ agent do
31
+ line_num = 0
32
+
33
+ # Now check all events for order and correctness.
34
+ tmp_file.each_line do |line|
35
+ event = LogStash::Event.new(LogStash::Json.load(line))
36
+ insist {event["message"]} == "hello world"
37
+ insist {event["sequence"]} == line_num
38
+ line_num += 1
39
+ end
40
+
41
+ insist {line_num} == event_count
42
+ end # agent
43
+ end
44
+
45
+ describe "ship lots of events to a file gzipped" do
46
+ Stud::Temporary.file('logstash-spec-output-file') do |tmp_file|
47
+ event_count = 10000 + rand(500)
48
+
49
+ config <<-CONFIG
50
+ input {
51
+ generator {
52
+ message => "hello world"
53
+ count => #{event_count}
54
+ type => "generator"
55
+ }
56
+ }
57
+ output {
58
+ file {
59
+ path => "#{tmp_file.path}"
60
+ gzip => true
61
+ }
62
+ }
63
+ CONFIG
64
+
65
+ agent do
66
+ line_num = 0
67
+ # Now check all events for order and correctness.
68
+ Zlib::GzipReader.open(tmp_file.path).each_line do |line|
69
+ event = LogStash::Event.new(LogStash::Json.load(line))
70
+ insist {event["message"]} == "hello world"
71
+ insist {event["sequence"]} == line_num
72
+ line_num += 1
73
+ end
74
+ insist {line_num} == event_count
75
+ end # agent
76
+ end
77
+ end
78
+
79
+ describe "#register" do
80
+ it 'doesnt allow the path to start with a dynamic string' do
81
+ path = '/%{name}'
82
+ output = LogStash::Outputs::File.new({ "path" => path })
83
+ expect { output.register }.to raise_error(LogStash::ConfigurationError)
84
+ end
85
+
86
+ it 'doesnt allow the root directory to have some dynamic part' do
87
+ path = '/a%{name}/'
88
+ output = LogStash::Outputs::File.new({ "path" => path })
89
+ expect { output.register }.to raise_error(LogStash::ConfigurationError)
90
+
91
+ path = '/a %{name}/'
92
+ output = LogStash::Outputs::File.new({ "path" => path })
93
+ expect { output.register }.to raise_error(LogStash::ConfigurationError)
94
+
95
+ path = '/a- %{name}/'
96
+ output = LogStash::Outputs::File.new({ "path" => path })
97
+ expect { output.register }.to raise_error(LogStash::ConfigurationError)
98
+
99
+ path = '/a- %{name}'
100
+ output = LogStash::Outputs::File.new({ "path" => path })
101
+ expect { output.register }.to raise_error(LogStash::ConfigurationError)
102
+ end
103
+
104
+ it 'allow to have dynamic part after the file root' do
105
+ path = '/tmp/%{name}'
106
+ output = LogStash::Outputs::File.new({ "path" => path })
107
+ expect { output.register }.not_to raise_error
108
+ end
109
+ end
110
+
111
+ describe "receiving events" do
112
+ context "when using an interpolated path" do
113
+ context "when trying to write outside the files root directory" do
114
+ let(:bad_event) do
115
+ event = LogStash::Event.new
116
+ event['error'] = '../uncool/directory'
117
+ event
118
+ end
119
+
120
+ it 'writes the bad event in the specified error file' do
121
+ Stud::Temporary.directory('filepath_error') do |path|
122
+ config = {
123
+ "path" => "#{path}/%{error}",
124
+ "filename_failure" => "_error"
125
+ }
126
+
127
+ # Trying to write outside the file root
128
+ outside_path = "#{'../' * path.split(File::SEPARATOR).size}notcool"
129
+ bad_event["error"] = outside_path
130
+
131
+
132
+ output = LogStash::Outputs::File.new(config)
133
+ output.register
134
+ output.receive(bad_event)
135
+
136
+ error_file = File.join(path, config["filename_failure"])
137
+
138
+ expect(File.exist?(error_file)).to eq(true)
139
+ end
140
+ end
141
+
142
+ it 'doesnt decode relatives paths urlencoded' do
143
+ Stud::Temporary.directory('filepath_error') do |path|
144
+ encoded_once = "%2E%2E%2ftest" # ../test
145
+ encoded_twice = "%252E%252E%252F%252E%252E%252Ftest" # ../../test
146
+
147
+ output = LogStash::Outputs::File.new({ "path" => "/#{path}/%{error}"})
148
+ output.register
149
+
150
+ bad_event['error'] = encoded_once
151
+ output.receive(bad_event)
152
+
153
+ bad_event['error'] = encoded_twice
154
+ output.receive(bad_event)
155
+
156
+ expect(Dir.glob(File.join(path, "*")).size).to eq(2)
157
+ end
158
+ end
159
+
160
+ it 'doesnt write outside the file if the path is double escaped' do
161
+ Stud::Temporary.directory('filepath_error') do |path|
162
+ output = LogStash::Outputs::File.new({ "path" => "/#{path}/%{error}"})
163
+ output.register
164
+
165
+ bad_event['error'] = '../..//test'
166
+ output.receive(bad_event)
167
+
168
+ expect(Dir.glob(File.join(path, "*")).size).to eq(1)
169
+ end
170
+ end
171
+ end
172
+
173
+ context 'when trying to write inside the file root directory' do
174
+ it 'write the event to the generated filename' do
175
+ good_event = LogStash::Event.new
176
+ good_event['error'] = '42.txt'
177
+
178
+ Stud::Temporary.directory do |path|
179
+ config = { "path" => "#{path}/%{error}" }
180
+ output = LogStash::Outputs::File.new(config)
181
+ output.register
182
+ output.receive(good_event)
183
+
184
+ good_file = File.join(path, good_event['error'])
185
+ expect(File.exist?(good_file)).to eq(true)
186
+ end
187
+ end
188
+
189
+ it 'write the event to the generated filename with multiple deep' do
190
+ good_event = LogStash::Event.new
191
+ good_event['error'] = '/inside/errors/42.txt'
192
+
193
+ Stud::Temporary.directory do |path|
194
+ config = { "path" => "#{path}/%{error}" }
195
+ output = LogStash::Outputs::File.new(config)
196
+ output.register
197
+ output.receive(good_event)
198
+
199
+ good_file = File.join(path, good_event['error'])
200
+ expect(File.exist?(good_file)).to eq(true)
201
+ end
202
+ end
203
+ end
204
+ end
205
+ end
206
+ end
metadata ADDED
@@ -0,0 +1,89 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: logstash-output-file
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Elasticsearch
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2014-11-06 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: logstash
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ! '>='
18
+ - !ruby/object:Gem::Version
19
+ version: 1.4.0
20
+ - - <
21
+ - !ruby/object:Gem::Version
22
+ version: 2.0.0
23
+ type: :runtime
24
+ prerelease: false
25
+ version_requirements: !ruby/object:Gem::Requirement
26
+ requirements:
27
+ - - ! '>='
28
+ - !ruby/object:Gem::Version
29
+ version: 1.4.0
30
+ - - <
31
+ - !ruby/object:Gem::Version
32
+ version: 2.0.0
33
+ - !ruby/object:Gem::Dependency
34
+ name: logstash-input-generator
35
+ requirement: !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - ! '>='
38
+ - !ruby/object:Gem::Version
39
+ version: '0'
40
+ type: :runtime
41
+ prerelease: false
42
+ version_requirements: !ruby/object:Gem::Requirement
43
+ requirements:
44
+ - - ! '>='
45
+ - !ruby/object:Gem::Version
46
+ version: '0'
47
+ description: This output will write events to files on disk
48
+ email: richard.pijnenburg@elasticsearch.com
49
+ executables: []
50
+ extensions: []
51
+ extra_rdoc_files: []
52
+ files:
53
+ - .gitignore
54
+ - Gemfile
55
+ - LICENSE
56
+ - Rakefile
57
+ - lib/logstash/outputs/file.rb
58
+ - logstash-output-file.gemspec
59
+ - rakelib/publish.rake
60
+ - rakelib/vendor.rake
61
+ - spec/outputs/file_spec.rb
62
+ homepage: http://logstash.net/
63
+ licenses:
64
+ - Apache License (2.0)
65
+ metadata:
66
+ logstash_plugin: 'true'
67
+ group: output
68
+ post_install_message:
69
+ rdoc_options: []
70
+ require_paths:
71
+ - lib
72
+ required_ruby_version: !ruby/object:Gem::Requirement
73
+ requirements:
74
+ - - ! '>='
75
+ - !ruby/object:Gem::Version
76
+ version: '0'
77
+ required_rubygems_version: !ruby/object:Gem::Requirement
78
+ requirements:
79
+ - - ! '>='
80
+ - !ruby/object:Gem::Version
81
+ version: '0'
82
+ requirements: []
83
+ rubyforge_project:
84
+ rubygems_version: 2.4.1
85
+ signing_key:
86
+ specification_version: 4
87
+ summary: This output will write events to files on disk
88
+ test_files:
89
+ - spec/outputs/file_spec.rb