logstash-input-s3 0.1.5 → 0.1.6

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 7cca9ff1627bc43568a938921989e908cd471d7a
4
- data.tar.gz: 75cbc3f6cae6d8e98f2165193d04f10d3877b343
3
+ metadata.gz: aa4fc81331c80d28e2f60a1b25272416d75a5164
4
+ data.tar.gz: 6cdab7b995fad5a9568e968c81a8b27874e394e3
5
5
  SHA512:
6
- metadata.gz: dfaf185985c5b89b26b923528772ebe0f12216db2002289f8e8a31470868f8e16e248c82bbf72b66052a1c6dcd0cde385a6ee6247b7651191c0c3e4f10196141
7
- data.tar.gz: b88072c54d0fc0eaf6466eb293e1c646e14ba937dac3886774876b90a00aad6768f161b94786d65d9fc815194016f3e24797bd50063a67daebaa5ebb2c60d383
6
+ metadata.gz: 63b24fc3c5fab419ccfa85a8d181afb0813665b676e1d3388e45669efd71c686483af57bbe8949f6019a6beda0517f442ee9cd230465db1fec32d6dac9c266e3
7
+ data.tar.gz: d0edf178a0d260107e85eab199950fc4bf8a78ef58b26e963f944a4a54a1fe39280ec5a1ff526c7c052e9407941890d66ea2e4cf7fc4e8c1225304216d72c955
data/.gitignore CHANGED
@@ -2,3 +2,4 @@
2
2
  Gemfile.lock
3
3
  .bundle
4
4
  vendor
5
+ coverage/
data/CONTRIBUTORS CHANGED
@@ -11,6 +11,7 @@ Contributors:
11
11
  * Richard Pijnenburg (electrical)
12
12
  * Suyog Rao (suyograo)
13
13
  * Ted Timmons (tedder)
14
+ * Ryan O'Keeffe (danielredoak)
14
15
 
15
16
  Note: If you've sent us patches, bug reports, or otherwise contributed to
16
17
  Logstash, and you aren't on the list above and want to be, please let us know
@@ -17,7 +17,7 @@ class LogStash::Inputs::S3 < LogStash::Inputs::Base
17
17
 
18
18
  config_name "s3"
19
19
 
20
- default :codec, "line"
20
+ default :codec, "plain"
21
21
 
22
22
  # DEPRECATED: The credentials of the AWS account used to access the bucket.
23
23
  # Credentials can be specified:
@@ -64,6 +64,10 @@ class LogStash::Inputs::S3 < LogStash::Inputs::Base
64
64
  # Ruby style regexp of keys to exclude from the bucket
65
65
  config :exclude_pattern, :validate => :string, :default => nil
66
66
 
67
+ # Set the directory where logstash will store the tmp files before processing them.
68
+ # default to the current OS temporary directory in linux /tmp/logstash
69
+ config :temporary_directory, :validate => :string, :default => File.join(Dir.tmpdir, "logstash")
70
+
67
71
  public
68
72
  def register
69
73
  require "digest/md5"
@@ -89,7 +93,6 @@ class LogStash::Inputs::S3 < LogStash::Inputs::Base
89
93
  end
90
94
  end # def register
91
95
 
92
-
93
96
  public
94
97
  def run(queue)
95
98
  Stud.interval(@interval) do
@@ -114,7 +117,6 @@ class LogStash::Inputs::S3 < LogStash::Inputs::Base
114
117
  return objects.keys.sort {|a,b| objects[a] <=> objects[b]}
115
118
  end # def fetch_new_files
116
119
 
117
-
118
120
  public
119
121
  def backup_to_bucket(object, key)
120
122
  unless @backup_to_bucket.nil?
@@ -134,13 +136,116 @@ class LogStash::Inputs::S3 < LogStash::Inputs::Base
134
136
  end
135
137
  end
136
138
 
139
+ public
140
+ def process_files(queue)
141
+ objects = list_new_files
142
+
143
+ objects.each do |key|
144
+ @logger.debug("S3 input processing", :bucket => @bucket, :key => key)
145
+
146
+ lastmod = @s3bucket.objects[key].last_modified
147
+
148
+ process_log(queue, key)
149
+
150
+ sincedb.write(lastmod)
151
+ end
152
+ end # def process_files
153
+
154
+
137
155
  private
138
156
  def process_local_log(queue, filename)
139
- @codec.decode(File.open(filename, 'rb')) do |event|
140
- decorate(event)
141
- queue << event
157
+ @logger.debug('Processing file', :filename => filename)
158
+
159
+ metadata = {}
160
+ # Currently codecs operates on bytes instead of stream.
161
+ # So all IO stuff: decompression, reading need to be done in the actual
162
+ # input and send as bytes to the codecs.
163
+ read_file(filename) do |line|
164
+ @codec.decode(line) do |event|
165
+ # We are making an assumption concerning cloudfront
166
+ # log format, the user will use the plain or the line codec
167
+ # and the message key will represent the actual line content.
168
+ # If the event is only metadata the event will be drop.
169
+ # This was the behavior of the pre 1.5 plugin.
170
+ #
171
+ # The line need to go through the codecs to replace
172
+ # unknown bytes in the log stream before doing a regexp match or
173
+ # you will get a `Error: invalid byte sequence in UTF-8'
174
+ if event_is_metadata?(event)
175
+ @logger.debug('Event is metadata, updating the current cloudfront metadata', :event => event)
176
+ update_metadata(metadata, event)
177
+ else
178
+ decorate(event)
179
+
180
+ event["cloudfront_version"] = metadata[:cloudfront_version] unless metadata[:cloudfront_version].nil?
181
+ event["cloudfront_fields"] = metadata[:cloudfront_fields] unless metadata[:cloudfront_fields].nil?
182
+
183
+ queue << event
184
+ end
185
+ end
142
186
  end
143
187
  end # def process_local_log
188
+
189
+ private
190
+ def event_is_metadata?(event)
191
+ line = event['message']
192
+ version_metadata?(line) || fields_metadata?(line)
193
+ end
194
+
195
+ private
196
+ def version_metadata?(line)
197
+ line.start_with?('#Version: ')
198
+ end
199
+
200
+ private
201
+ def fields_metadata?(line)
202
+ line.start_with?('#Fields: ')
203
+ end
204
+
205
+ private
206
+ def update_metadata(metadata, event)
207
+ line = event['message'].strip
208
+
209
+ if version_metadata?(line)
210
+ metadata[:cloudfront_version] = line.split(/#Version: (.+)/).last
211
+ end
212
+
213
+ if fields_metadata?(line)
214
+ metadata[:cloudfront_fields] = line.split(/#Fields: (.+)/).last
215
+ end
216
+ end
217
+
218
+ private
219
+ def read_file(filename, &block)
220
+ if gzip?(filename)
221
+ read_gzip_file(filename, block)
222
+ else
223
+ read_plain_file(filename, block)
224
+ end
225
+ end
226
+
227
+ def read_plain_file(filename, block)
228
+ File.open(filename, 'rb') do |file|
229
+ file.each(&block)
230
+ end
231
+ end
232
+
233
+ private
234
+ def read_gzip_file(filename, block)
235
+ begin
236
+ Zlib::GzipReader.open(filename) do |decoder|
237
+ decoder.each_line { |line| block.call(line) }
238
+ end
239
+ rescue Zlib::Error, Zlib::GzipFile::Error => e
240
+ @logger.error("Gzip codec: We cannot uncompress the gzip file", :filename => filename)
241
+ raise e
242
+ end
243
+ end
244
+
245
+ private
246
+ def gzip?(filename)
247
+ filename.end_with?('.gz')
248
+ end
144
249
 
145
250
  private
146
251
  def sincedb
@@ -148,7 +253,8 @@ class LogStash::Inputs::S3 < LogStash::Inputs::Base
148
253
  @logger.info("Using default generated file for the sincedb", :filename => sincedb_file)
149
254
  SinceDB::File.new(sincedb_file)
150
255
  else
151
- @logger.error("S3 input: Configuration error, no HOME or sincedb_path set")
256
+ @logger.info("Using the provided sincedb_path",
257
+ :sincedb_path => @sincedb_path)
152
258
  SinceDB::File.new(@sincedb_path)
153
259
  end
154
260
  end
@@ -158,23 +264,11 @@ class LogStash::Inputs::S3 < LogStash::Inputs::Base
158
264
  File.join(ENV["HOME"], ".sincedb_" + Digest::MD5.hexdigest("#{@bucket}+#{@prefix}"))
159
265
  end
160
266
 
161
- private
162
- def process_files(queue, since=nil)
163
- objects = list_new_files
164
- objects.each do |key|
165
- @logger.debug("S3 input processing", :bucket => @bucket, :key => key)
166
-
167
- lastmod = @s3bucket.objects[key].last_modified
168
-
169
- process_log(queue, key)
170
-
171
- sincedb.write(lastmod)
172
- end
173
- end # def process_files
174
-
175
267
  private
176
268
  def ignore_filename?(filename)
177
- if (@backup_add_prefix && @backup_to_bucket == @bucket && filename =~ /^#{backup_add_prefix}/)
269
+ if @prefix == filename
270
+ return true
271
+ elsif (@backup_add_prefix && @backup_to_bucket == @bucket && filename =~ /^#{backup_add_prefix}/)
178
272
  return true
179
273
  elsif @exclude_pattern.nil?
180
274
  return false
@@ -189,9 +283,7 @@ class LogStash::Inputs::S3 < LogStash::Inputs::Base
189
283
  def process_log(queue, key)
190
284
  object = @s3bucket.objects[key]
191
285
 
192
- tmp = Stud::Temporary.directory("logstash-")
193
-
194
- filename = File.join(tmp, File.basename(key))
286
+ filename = File.join(temporary_directory, File.basename(key))
195
287
 
196
288
  download_remote_file(object, filename)
197
289
 
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-s3'
4
- s.version = '0.1.5'
4
+ s.version = '0.1.6'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "Stream events from files from a S3 bucket."
7
7
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
@@ -22,10 +22,10 @@ Gem::Specification.new do |s|
22
22
  # Gem dependencies
23
23
  s.add_runtime_dependency "logstash-core", '>= 1.4.0', '< 2.0.0'
24
24
  s.add_runtime_dependency 'logstash-mixin-aws'
25
- s.add_runtime_dependency 'logstash-codec-line'
26
- s.add_runtime_dependency 'aws-sdk'
27
25
  s.add_runtime_dependency 'stud', '~> 0.0.18'
28
26
 
29
27
  s.add_development_dependency 'logstash-devutils'
28
+ s.add_development_dependency 'simplecov'
29
+ s.add_development_dependency 'coveralls'
30
30
  end
31
31
 
@@ -0,0 +1,4 @@
1
+ #Version: 1.0
2
+ #Fields: date time x-edge-location c-ip x-event sc-bytes x-cf-status x-cf-client-id cs-uri-stem cs-uri-query c-referrer x-page-url​ c-user-agent x-sname x-sname-query x-file-ext x-sid
3
+ 2010-03-12 23:51:20 SEA4 192.0.2.147 connect 2014 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st​ key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 - - - -
4
+ 2010-03-12 23:51:21 SEA4 192.0.2.222 play 3914 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st​ key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 myvideo p=2&q=4 flv 1
Binary file
@@ -0,0 +1,2 @@
1
+ 2015-01-01T02:52:45.866722Z no "GET http://www.logstash.com:80/utfmadness/≈4od HTTP/1.1"
2
+
@@ -0,0 +1,2 @@
1
+ 2010-03-12 23:51:20 SEA4 192.0.2.147 connect 2014 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st​ key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 - - - -
2
+ 2010-03-12 23:51:21 SEA4 192.0.2.222 play 3914 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st​ key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 myvideo p=2&q=4 flv 1
@@ -2,9 +2,9 @@
2
2
  require "logstash/devutils/rspec/spec_helper"
3
3
  require "logstash/inputs/s3"
4
4
  require "logstash/errors"
5
-
6
5
  require "aws-sdk"
7
6
  require "stud/temporary"
7
+ require "spec/support/helpers"
8
8
 
9
9
  describe LogStash::Inputs::S3 do
10
10
  before do
@@ -23,7 +23,7 @@ describe LogStash::Inputs::S3 do
23
23
  describe "#list_new_files" do
24
24
  before { allow_any_instance_of(AWS::S3::ObjectCollection).to receive(:with_prefix).with(nil) { objects_list } }
25
25
 
26
- let(:present_object) { double(:key => 'this-should-be-present', :last_modified => Time.now) }
26
+ let!(:present_object) { double(:key => 'this-should-be-present', :last_modified => Time.now) }
27
27
  let(:objects_list) {
28
28
  [
29
29
  double(:key => 'exclude-this-file-1', :last_modified => Time.now - 2 * day),
@@ -69,6 +69,21 @@ describe LogStash::Inputs::S3 do
69
69
  expect(config.list_new_files).to eq([present_object.key])
70
70
  end
71
71
 
72
+ it 'should ignore file if the file match the prefix' do
73
+ prefix = 'mysource/'
74
+
75
+ objects_list = [
76
+ double(:key => prefix, :last_modified => Time.now),
77
+ present_object
78
+ ]
79
+
80
+ allow_any_instance_of(AWS::S3::ObjectCollection).to receive(:with_prefix).with(prefix) { objects_list }
81
+
82
+ config = LogStash::Inputs::S3.new(settings.merge({ 'prefix' => prefix }))
83
+ config.register
84
+ expect(config.list_new_files).to eq([present_object.key])
85
+ end
86
+
72
87
  it 'should sort return object sorted by last_modification date with older first' do
73
88
  objects = [
74
89
  double(:key => 'YESTERDAY', :last_modified => Time.now - day),
@@ -144,4 +159,58 @@ describe LogStash::Inputs::S3 do
144
159
  end
145
160
  end
146
161
  end
162
+
163
+ context 'when working with logs' do
164
+ let(:objects) { [log] }
165
+ let(:log) { double(:key => 'uncompressed.log', :last_modified => Time.now - 2 * day) }
166
+
167
+ before do
168
+ allow_any_instance_of(AWS::S3::ObjectCollection).to receive(:with_prefix).with(nil) { objects }
169
+ allow_any_instance_of(AWS::S3::ObjectCollection).to receive(:[]).with(log.key) { log }
170
+ expect(log).to receive(:read) { |&block| block.call(File.read(log_file)) }
171
+ end
172
+
173
+ context 'compressed' do
174
+ let(:log) { double(:key => 'log.gz', :last_modified => Time.now - 2 * day) }
175
+ let(:log_file) { File.join('spec', 'fixtures', 'compressed.log.gz') }
176
+
177
+ it 'should process events' do
178
+ events = fetch_events(settings)
179
+ expect(events.size).to eq(2)
180
+ end
181
+ end
182
+
183
+ context 'plain text' do
184
+ let(:log_file) { File.join('spec', 'fixtures', 'uncompressed.log') }
185
+
186
+ it 'should process events' do
187
+ events = fetch_events(settings)
188
+ expect(events.size).to eq(2)
189
+ end
190
+ end
191
+
192
+ context 'encoded' do
193
+ let(:log_file) { File.join('spec', 'fixtures', 'invalid_utf8.log') }
194
+
195
+ it 'should work with invalid utf-8 log event' do
196
+ events = fetch_events(settings)
197
+ expect(events.size).to eq(2)
198
+ end
199
+ end
200
+
201
+ context 'cloudfront' do
202
+ let(:log_file) { File.join('spec', 'fixtures', 'cloudfront.log') }
203
+
204
+ it 'should extract metadata from cloudfront log' do
205
+ events = fetch_events(settings)
206
+
207
+ expect(events.size).to eq(2)
208
+
209
+ events.each do |event|
210
+ expect(event['cloudfront_fields']).to eq('date time x-edge-location c-ip x-event sc-bytes x-cf-status x-cf-client-id cs-uri-stem cs-uri-query c-referrer x-page-url​ c-user-agent x-sname x-sname-query x-file-ext x-sid')
211
+ expect(event['cloudfront_version']).to eq('1.0')
212
+ end
213
+ end
214
+ end
215
+ end
147
216
  end
@@ -0,0 +1,61 @@
1
+ require "logstash/devutils/rspec/spec_helper"
2
+ require "spec/support/helpers"
3
+ require "logstash/inputs/s3"
4
+ require "aws-sdk"
5
+ require "fileutils"
6
+
7
+ describe LogStash::Inputs::S3, :integration => true, :s3 => true do
8
+ before do
9
+ Thread.abort_on_exception = true
10
+
11
+ upload_file('../fixtures/uncompressed.log' , "#{prefix}uncompressed_1.log")
12
+ upload_file('../fixtures/compressed.log.gz', "#{prefix}compressed_1.log.gz")
13
+ end
14
+
15
+ after do
16
+ delete_remote_files(prefix)
17
+ FileUtils.rm_rf(temporary_directory)
18
+ delete_remote_files(backup_prefix)
19
+ end
20
+
21
+ let(:temporary_directory) { Stud::Temporary.directory }
22
+ let(:prefix) { 'logstash-s3-input-prefix/' }
23
+
24
+ let(:minimal_settings) { { "access_key_id" => ENV['AWS_ACCESS_KEY_ID'],
25
+ "secret_access_key" => ENV['AWS_SECRET_ACCESS_KEY'],
26
+ "bucket" => ENV['AWS_LOGSTASH_TEST_BUCKET'],
27
+ "region" => ENV["AWS_REGION"] || "us-east-1",
28
+ "prefix" => prefix,
29
+ "temporary_directory" => temporary_directory } }
30
+ let(:backup_prefix) { "backup/" }
31
+
32
+ it "support prefix to scope the remote files" do
33
+ events = fetch_events(minimal_settings)
34
+ expect(events.size).to eq(4)
35
+ end
36
+
37
+
38
+ it "add a prefix to the file" do
39
+ fetch_events(minimal_settings.merge({ "backup_to_bucket" => ENV["AWS_LOGSTASH_TEST_BUCKET"],
40
+ "backup_add_prefix" => backup_prefix }))
41
+ expect(list_remote_files(backup_prefix).size).to eq(2)
42
+ end
43
+
44
+ it "allow you to backup to a local directory" do
45
+ Stud::Temporary.directory do |backup_dir|
46
+ fetch_events(minimal_settings.merge({ "backup_to_dir" => backup_dir }))
47
+ expect(Dir.glob(File.join(backup_dir, "*")).size).to eq(2)
48
+ end
49
+ end
50
+
51
+ context "remote backup" do
52
+ it "another bucket" do
53
+ fetch_events(minimal_settings.merge({ "backup_to_bucket" => "logstash-s3-input-backup"}))
54
+ expect(list_remote_files("", "logstash-s3-input-backup").size).to eq(2)
55
+ end
56
+
57
+ after do
58
+ delete_bucket("logstash-s3-input-backup")
59
+ end
60
+ end
61
+ end
@@ -0,0 +1,34 @@
1
+ def fetch_events(settings)
2
+ queue = []
3
+ s3 = LogStash::Inputs::S3.new(settings)
4
+ s3.register
5
+ s3.process_files(queue)
6
+ s3.teardown
7
+ queue
8
+ end
9
+
10
+ # delete_files(prefix)
11
+ def upload_file(local_file, remote_name)
12
+ bucket = s3object.buckets[ENV['AWS_LOGSTASH_TEST_BUCKET']]
13
+ file = File.expand_path(File.join(File.dirname(__FILE__), local_file))
14
+ bucket.objects[remote_name].write(:file => file)
15
+ end
16
+
17
+ def delete_remote_files(prefix)
18
+ bucket = s3object.buckets[ENV['AWS_LOGSTASH_TEST_BUCKET']]
19
+ bucket.objects.with_prefix(prefix).each { |object| object.delete }
20
+ end
21
+
22
+ def list_remote_files(prefix, target_bucket = ENV['AWS_LOGSTASH_TEST_BUCKET'])
23
+ bucket = s3object.buckets[target_bucket]
24
+ bucket.objects.with_prefix(prefix).collect(&:key)
25
+ end
26
+
27
+ def delete_bucket(name)
28
+ s3object.buckets[name].objects.map(&:delete)
29
+ s3object.buckets[name].delete
30
+ end
31
+
32
+ def s3object
33
+ AWS::S3.new
34
+ end
metadata CHANGED
@@ -1,17 +1,18 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-s3
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.5
4
+ version: 0.1.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-02-26 00:00:00.000000000 Z
11
+ date: 2015-03-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
- requirement: !ruby/object:Gem::Requirement
14
+ name: logstash-core
15
+ version_requirements: !ruby/object:Gem::Requirement
15
16
  requirements:
16
17
  - - '>='
17
18
  - !ruby/object:Gem::Version
@@ -19,10 +20,7 @@ dependencies:
19
20
  - - <
20
21
  - !ruby/object:Gem::Version
21
22
  version: 2.0.0
22
- name: logstash-core
23
- prerelease: false
24
- type: :runtime
25
- version_requirements: !ruby/object:Gem::Requirement
23
+ requirement: !ruby/object:Gem::Requirement
26
24
  requirements:
27
25
  - - '>='
28
26
  - !ruby/object:Gem::Version
@@ -30,76 +28,78 @@ dependencies:
30
28
  - - <
31
29
  - !ruby/object:Gem::Version
32
30
  version: 2.0.0
33
- - !ruby/object:Gem::Dependency
34
- requirement: !ruby/object:Gem::Requirement
35
- requirements:
36
- - - '>='
37
- - !ruby/object:Gem::Version
38
- version: '0'
39
- name: logstash-mixin-aws
40
31
  prerelease: false
41
32
  type: :runtime
33
+ - !ruby/object:Gem::Dependency
34
+ name: logstash-mixin-aws
42
35
  version_requirements: !ruby/object:Gem::Requirement
43
36
  requirements:
44
37
  - - '>='
45
38
  - !ruby/object:Gem::Version
46
39
  version: '0'
47
- - !ruby/object:Gem::Dependency
48
40
  requirement: !ruby/object:Gem::Requirement
49
41
  requirements:
50
42
  - - '>='
51
43
  - !ruby/object:Gem::Version
52
44
  version: '0'
53
- name: logstash-codec-line
54
45
  prerelease: false
55
46
  type: :runtime
47
+ - !ruby/object:Gem::Dependency
48
+ name: stud
56
49
  version_requirements: !ruby/object:Gem::Requirement
57
50
  requirements:
58
- - - '>='
51
+ - - ~>
59
52
  - !ruby/object:Gem::Version
60
- version: '0'
61
- - !ruby/object:Gem::Dependency
53
+ version: 0.0.18
62
54
  requirement: !ruby/object:Gem::Requirement
63
55
  requirements:
64
- - - '>='
56
+ - - ~>
65
57
  - !ruby/object:Gem::Version
66
- version: '0'
67
- name: aws-sdk
58
+ version: 0.0.18
68
59
  prerelease: false
69
60
  type: :runtime
61
+ - !ruby/object:Gem::Dependency
62
+ name: logstash-devutils
70
63
  version_requirements: !ruby/object:Gem::Requirement
71
64
  requirements:
72
65
  - - '>='
73
66
  - !ruby/object:Gem::Version
74
67
  version: '0'
75
- - !ruby/object:Gem::Dependency
76
68
  requirement: !ruby/object:Gem::Requirement
77
69
  requirements:
78
- - - ~>
70
+ - - '>='
79
71
  - !ruby/object:Gem::Version
80
- version: 0.0.18
81
- name: stud
72
+ version: '0'
82
73
  prerelease: false
83
- type: :runtime
74
+ type: :development
75
+ - !ruby/object:Gem::Dependency
76
+ name: simplecov
84
77
  version_requirements: !ruby/object:Gem::Requirement
85
78
  requirements:
86
- - - ~>
79
+ - - '>='
87
80
  - !ruby/object:Gem::Version
88
- version: 0.0.18
89
- - !ruby/object:Gem::Dependency
81
+ version: '0'
90
82
  requirement: !ruby/object:Gem::Requirement
91
83
  requirements:
92
84
  - - '>='
93
85
  - !ruby/object:Gem::Version
94
86
  version: '0'
95
- name: logstash-devutils
96
87
  prerelease: false
97
88
  type: :development
89
+ - !ruby/object:Gem::Dependency
90
+ name: coveralls
98
91
  version_requirements: !ruby/object:Gem::Requirement
99
92
  requirements:
100
93
  - - '>='
101
94
  - !ruby/object:Gem::Version
102
95
  version: '0'
96
+ requirement: !ruby/object:Gem::Requirement
97
+ requirements:
98
+ - - '>='
99
+ - !ruby/object:Gem::Version
100
+ version: '0'
101
+ prerelease: false
102
+ type: :development
103
103
  description: This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program
104
104
  email: info@elasticsearch.com
105
105
  executables: []
@@ -114,7 +114,13 @@ files:
114
114
  - Rakefile
115
115
  - lib/logstash/inputs/s3.rb
116
116
  - logstash-input-s3.gemspec
117
+ - spec/fixtures/cloudfront.log
118
+ - spec/fixtures/compressed.log.gz
119
+ - spec/fixtures/invalid_utf8.log
120
+ - spec/fixtures/uncompressed.log
117
121
  - spec/inputs/s3_spec.rb
122
+ - spec/integration/s3_spec.rb
123
+ - spec/support/helpers.rb
118
124
  homepage: http://www.elasticsearch.org/guide/en/logstash/current/index.html
119
125
  licenses:
120
126
  - Apache License (2.0)
@@ -137,9 +143,15 @@ required_rubygems_version: !ruby/object:Gem::Requirement
137
143
  version: '0'
138
144
  requirements: []
139
145
  rubyforge_project:
140
- rubygems_version: 2.4.5
146
+ rubygems_version: 2.1.9
141
147
  signing_key:
142
148
  specification_version: 4
143
149
  summary: Stream events from files from a S3 bucket.
144
150
  test_files:
151
+ - spec/fixtures/cloudfront.log
152
+ - spec/fixtures/compressed.log.gz
153
+ - spec/fixtures/invalid_utf8.log
154
+ - spec/fixtures/uncompressed.log
145
155
  - spec/inputs/s3_spec.rb
156
+ - spec/integration/s3_spec.rb
157
+ - spec/support/helpers.rb