logstash-input-s3 0.1.1 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 18491f4d5a8850a30b8bd5b25c33cc2a0dd9c247
4
- data.tar.gz: c3017df48dd6ea484f8afb81e77d5b1e085d9d11
3
+ metadata.gz: 1be6737259a6ffb9f0f0734931ab2f495912bac4
4
+ data.tar.gz: f794cf9ce4f876bb3ecd81ab0f167cefc1fd692d
5
5
  SHA512:
6
- metadata.gz: 8d112c0f09cbfe563eebce342812e8082b125b660560a250971b81a16d12584cad7303cc2b8b37fd2c6dfb2d64a0b44b44b843e133a22cde89971b1568bc38f8
7
- data.tar.gz: ecec7bab79976f09f84f72b516ebe95dfbc6e07998f23ff581e17e275ab994259707e311aed5441bb38be5557bbed8ca377abfeb41f701fdcd8e79716954a51e
6
+ metadata.gz: 97dbbbda141e669cdad3bfce516764e649ccf929d72509d858e86211f772fb8f2e91311b945ff7576616a941f161afe41faa57769913b83e72b9697a6e073140
7
+ data.tar.gz: d6235bf2a17418da6108349144dfef750526eaa2405a09607e49db1748e76f88f5e14851a6384fbe06a4ae26cde4d7de2187a069aba3b6664c9c2e9fe3c25ef7
@@ -1,44 +1,41 @@
1
1
  # encoding: utf-8
2
2
  require "logstash/inputs/base"
3
3
  require "logstash/namespace"
4
+ require "logstash/plugin_mixins/aws_config"
4
5
 
5
6
  require "time"
6
7
  require "tmpdir"
8
+ require "stud/interval"
9
+ require "stud/temporary"
7
10
 
8
11
  # Stream events from files from a S3 bucket.
9
12
  #
10
13
  # Each line from each file generates an event.
11
14
  # Files ending in `.gz` are handled as gzip'ed files.
12
15
  class LogStash::Inputs::S3 < LogStash::Inputs::Base
16
+ include LogStash::PluginMixins::AwsConfig
17
+
13
18
  config_name "s3"
14
19
  milestone 1
15
20
 
16
- # TODO(sissel): refactor to use `line` codec (requires removing both gzip
17
- # support and readline usage). Support gzip through a gzip codec! ;)
18
- default :codec, "plain"
21
+ default :codec, "line"
19
22
 
20
- # The credentials of the AWS account used to access the bucket.
23
+ # DEPRECATED: The credentials of the AWS account used to access the bucket.
21
24
  # Credentials can be specified:
22
- # - As an `["id","secret"]` array
23
- # - As a path to a file containing `AWS_ACCESS_KEY_ID=...` and `AWS_SECRET_ACCESS_KEY=...`
24
- # - In the environment, if not set (using variables `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`)
25
- config :credentials, :validate => :array, :default => []
25
+ # - As an ["id","secret"] array
26
+ # - As a path to a file containing AWS_ACCESS_KEY_ID=... and AWS_SECRET_ACCESS_KEY=...
27
+ # - In the environment, if not set (using variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY)
28
+ config :credentials, :validate => :array, :default => [], :deprecated => "This only exists to be backwards compatible. This plugin now uses the AwsConfig from PluginMixins"
26
29
 
27
30
  # The name of the S3 bucket.
28
31
  config :bucket, :validate => :string, :required => true
29
32
 
30
- # The AWS region for your bucket.
31
- config :region, :validate => ["us-east-1", "us-west-1", "us-west-2",
32
- "eu-west-1", "ap-southeast-1", "ap-southeast-2",
33
- "ap-northeast-1", "sa-east-1", "us-gov-west-1"],
34
- :deprecated => "'region' has been deprecated in favor of 'region_endpoint'"
35
-
36
33
  # The AWS region for your bucket.
37
34
  config :region_endpoint, :validate => ["us-east-1", "us-west-1", "us-west-2",
38
35
  "eu-west-1", "ap-southeast-1", "ap-southeast-2",
39
- "ap-northeast-1", "sa-east-1", "us-gov-west-1"], :default => "us-east-1"
36
+ "ap-northeast-1", "sa-east-1", "us-gov-west-1"], :deprecated => "This only exists to be backwards compatible. This plugin now uses the AwsConfig from PluginMixins"
40
37
 
41
- # If specified, the prefix the filenames in the bucket must match (not a regexp)
38
+ # If specified, the prefix of filenames in the bucket must match (not a regexp)
42
39
  config :prefix, :validate => :string, :default => nil
43
40
 
44
41
  # Where to write the since database (keeps track of the date
@@ -50,6 +47,11 @@ class LogStash::Inputs::S3 < LogStash::Inputs::Base
50
47
  # Name of a S3 bucket to backup processed files to.
51
48
  config :backup_to_bucket, :validate => :string, :default => nil
52
49
 
50
+ # Append a prefix to the key (full path including file name in s3) after processing.
51
+ # If backing up to another (or the same) bucket, this effectively lets you
52
+ # choose a new 'folder' to place the files in
53
+ config :backup_add_prefix, :validate => :string, :default => nil
54
+
53
55
  # Path of a local directory to backup processed files to.
54
56
  config :backup_to_dir, :validate => :string, :default => nil
55
57
 
@@ -60,61 +62,19 @@ class LogStash::Inputs::S3 < LogStash::Inputs::Base
60
62
  # Value is in seconds.
61
63
  config :interval, :validate => :number, :default => 60
62
64
 
65
+ # Ruby style regexp of keys to exclude from the bucket
66
+ config :exclude_pattern, :validate => :string, :default => nil
67
+
63
68
  public
64
69
  def register
65
70
  require "digest/md5"
66
71
  require "aws-sdk"
67
72
 
68
- @region_endpoint = @region if @region && !@region.empty?
69
-
70
- @logger.info("Registering s3 input", :bucket => @bucket, :region_endpoint => @region_endpoint)
71
-
72
- if @credentials.length == 0
73
- @access_key_id = ENV['AWS_ACCESS_KEY_ID']
74
- @secret_access_key = ENV['AWS_SECRET_ACCESS_KEY']
75
- elsif @credentials.length == 1
76
- File.open(@credentials[0]) { |f| f.each do |line|
77
- unless (/^\#/.match(line))
78
- if(/\s*=\s*/.match(line))
79
- param, value = line.split('=', 2)
80
- param = param.chomp().strip()
81
- value = value.chomp().strip()
82
- if param.eql?('AWS_ACCESS_KEY_ID')
83
- @access_key_id = value
84
- elsif param.eql?('AWS_SECRET_ACCESS_KEY')
85
- @secret_access_key = value
86
- end
87
- end
88
- end
89
- end
90
- }
91
- elsif @credentials.length == 2
92
- @access_key_id = @credentials[0]
93
- @secret_access_key = @credentials[1]
94
- else
95
- raise ArgumentError.new('Credentials must be of the form "/path/to/file" or ["id", "secret"]')
96
- end
97
-
98
- if @access_key_id.nil? or @secret_access_key.nil?
99
- raise ArgumentError.new('Missing AWS credentials')
100
- end
101
-
102
- if @bucket.nil?
103
- raise ArgumentError.new('Missing AWS bucket')
104
- end
73
+ @region = get_region
105
74
 
106
- if @sincedb_path.nil?
107
- if ENV['HOME'].nil?
108
- raise ArgumentError.new('No HOME or sincedb_path set')
109
- end
110
- @sincedb_path = File.join(ENV["HOME"], ".sincedb_" + Digest::MD5.hexdigest("#{@bucket}+#{@prefix}"))
111
- end
75
+ @logger.info("Registering s3 input", :bucket => @bucket, :region => @region)
112
76
 
113
- s3 = AWS::S3.new(
114
- :access_key_id => @access_key_id,
115
- :secret_access_key => @secret_access_key,
116
- :region => @region_endpoint
117
- )
77
+ s3 = get_s3object
118
78
 
119
79
  @s3bucket = s3.buckets[@bucket]
120
80
 
@@ -128,152 +88,212 @@ class LogStash::Inputs::S3 < LogStash::Inputs::Base
128
88
  unless @backup_to_dir.nil?
129
89
  Dir.mkdir(@backup_to_dir, 0700) unless File.exists?(@backup_to_dir)
130
90
  end
131
-
132
91
  end # def register
133
92
 
93
+
134
94
  public
135
95
  def run(queue)
136
- loop do
137
- process_new(queue)
138
- sleep(@interval)
96
+ Stud.interval(@interval) do
97
+ process_files(queue)
139
98
  end
140
- finished
141
99
  end # def run
142
100
 
143
- private
144
- def process_new(queue, since=nil)
101
+ public
102
+ def list_new_files
103
+ objects = {}
145
104
 
146
- if since.nil?
147
- since = sincedb_read()
148
- end
105
+ @s3bucket.objects.with_prefix(@prefix).each do |log|
106
+ @logger.debug("S3 input: Found key", :key => log.key)
149
107
 
150
- objects = list_new(since)
151
- objects.each do |k|
152
- @logger.debug("S3 input processing", :bucket => @bucket, :key => k)
153
- lastmod = @s3bucket.objects[k].last_modified
154
- process_log(queue, k)
155
- sincedb_write(lastmod)
108
+ unless ignore_filename?(log.key)
109
+ if sincedb.newer?(log.last_modified)
110
+ objects[log.key] = log.last_modified
111
+ @logger.debug("S3 input: Adding to objects[]", :key => log.key)
112
+ end
113
+ end
156
114
  end
115
+ return objects.keys.sort {|a,b| objects[a] <=> objects[b]}
116
+ end # def fetch_new_files
157
117
 
158
- end # def process_new
159
118
 
160
- private
161
- def list_new(since=nil)
162
-
163
- if since.nil?
164
- since = Time.new(0)
119
+ public
120
+ def backup_to_bucket(object, key)
121
+ unless @backup_to_bucket.nil?
122
+ backup_key = "#{@backup_add_prefix}#{key}"
123
+ if @delete
124
+ object.move_to(backup_key, :bucket => @backup_bucket)
125
+ else
126
+ object.copy_to(backup_key, :bucket => @backup_bucket)
127
+ end
165
128
  end
129
+ end
166
130
 
167
- objects = {}
168
- @s3bucket.objects.with_prefix(@prefix).each do |log|
169
- if log.last_modified > since
170
- objects[log.key] = log.last_modified
171
- end
131
+ public
132
+ def backup_to_dir(filename)
133
+ unless @backup_to_dir.nil?
134
+ FileUtils.cp(filename, @backup_to_dir)
172
135
  end
136
+ end
173
137
 
174
- return sorted_objects = objects.keys.sort {|a,b| objects[a] <=> objects[b]}
138
+ private
139
+ def process_local_log(queue, filename)
140
+ @codec.decode(File.open(filename, 'rb')) do |event|
141
+ decorate(event)
142
+ queue << event
143
+ end
144
+ end # def process_local_log
145
+
146
+ private
147
+ def sincedb
148
+ @sincedb ||= if @sincedb_path.nil?
149
+ @logger.info("Using default generated file for the sincedb", :filename => sincedb_file)
150
+ SinceDB::File.new(sincedb_file)
151
+ else
152
+ @logger.error("S3 input: Configuration error, no HOME or sincedb_path set")
153
+ SinceDB::File.new(@sincedb_path)
154
+ end
155
+ end
175
156
 
176
- end # def list_new
157
+ private
158
+ def sincedb_file
159
+ File.join(ENV["HOME"], ".sincedb_" + Digest::MD5.hexdigest("#{@bucket}+#{@prefix}"))
160
+ end
177
161
 
178
162
  private
179
- def process_log(queue, key)
163
+ def process_files(queue, since=nil)
164
+ objects = list_new_files
165
+ objects.each do |key|
166
+ @logger.debug("S3 input processing", :bucket => @bucket, :key => key)
180
167
 
181
- object = @s3bucket.objects[key]
182
- tmp = Dir.mktmpdir("logstash-")
183
- begin
184
- filename = File.join(tmp, File.basename(key))
185
- File.open(filename, 'wb') do |s3file|
186
- object.read do |chunk|
187
- s3file.write(chunk)
188
- end
189
- end
190
- process_local_log(queue, filename)
191
- unless @backup_to_bucket.nil?
192
- backup_object = @backup_bucket.objects[key]
193
- backup_object.write(Pathname.new(filename))
194
- end
195
- unless @backup_to_dir.nil?
196
- FileUtils.cp(filename, @backup_to_dir)
197
- end
198
- if @delete
199
- object.delete()
200
- end
168
+ lastmod = @s3bucket.objects[key].last_modified
169
+
170
+ process_log(queue, key)
171
+
172
+ sincedb.write(lastmod)
201
173
  end
202
- FileUtils.remove_entry_secure(tmp, force=true)
174
+ end # def process_files
203
175
 
204
- end # def process_log
176
+ private
177
+ def ignore_filename?(filename)
178
+ if (@backup_add_prefix && @backup_to_bucket == @bucket && filename =~ /^#{backup_add_prefix}/)
179
+ return true
180
+ elsif @exclude_pattern.nil?
181
+ return false
182
+ elsif filename =~ Regexp.new(@exclude_pattern)
183
+ return true
184
+ else
185
+ return false
186
+ end
187
+ end
205
188
 
206
189
  private
207
- def process_local_log(queue, filename)
190
+ def process_log(queue, key)
191
+ object = @s3bucket.objects[key]
208
192
 
209
- metadata = {
210
- :version => nil,
211
- :format => nil,
212
- }
213
- File.open(filename) do |file|
214
- if filename.end_with?('.gz')
215
- gz = Zlib::GzipReader.new(file)
216
- gz.each_line do |line|
217
- metadata = process_line(queue, metadata, line)
218
- end
219
- else
220
- file.each do |line|
221
- metadata = process_line(queue, metadata, line)
222
- end
193
+ tmp = Stud::Temporary.directory("logstash-")
194
+
195
+ filename = File.join(tmp, File.basename(key))
196
+
197
+ download_remote_file(object, filename)
198
+
199
+ process_local_log(queue, filename)
200
+
201
+ backup_to_bucket(object, key)
202
+ backup_to_dir(filename)
203
+
204
+ delete_file_from_bucket(object)
205
+ end
206
+
207
+ private
208
+ def download_remote_file(remote_object, local_filename)
209
+ @logger.debug("S3 input: Download remove file", :remote_key => remote_object.key, :local_filename => local_filename)
210
+ File.open(local_filename, 'wb') do |s3file|
211
+ remote_object.read do |chunk|
212
+ s3file.write(chunk)
223
213
  end
224
214
  end
225
-
226
- end # def process_local_log
215
+ end
227
216
 
228
217
  private
229
- def process_line(queue, metadata, line)
218
+ def delete_file_from_bucket(object)
219
+ if @delete and @backup_to_bucket.nil?
220
+ object.delete()
221
+ end
222
+ end
230
223
 
231
- if /#Version: .+/.match(line)
232
- junk, version = line.strip().split(/#Version: (.+)/)
233
- unless version.nil?
234
- metadata[:version] = version
235
- end
236
- elsif /#Fields: .+/.match(line)
237
- junk, format = line.strip().split(/#Fields: (.+)/)
238
- unless format.nil?
239
- metadata[:format] = format
240
- end
224
+ private
225
+ def get_region
226
+ # TODO: (ph) Deprecated, it will be removed
227
+ if @region_endpoint
228
+ @region_endpoint
241
229
  else
242
- @codec.decode(line) do |event|
243
- decorate(event)
244
- unless metadata[:version].nil?
245
- event["cloudfront_version"] = metadata[:version]
246
- end
247
- unless metadata[:format].nil?
248
- event["cloudfront_fields"] = metadata[:format]
249
- end
250
- queue << event
251
- end
230
+ @region
252
231
  end
253
- return metadata
254
-
255
- end # def process_line
232
+ end
256
233
 
257
234
  private
258
- def sincedb_read()
235
+ def get_s3object
236
+ # TODO: (ph) Deprecated, it will be removed
237
+ if @credentials.length == 1
238
+ File.open(@credentials[0]) { |f| f.each do |line|
239
+ unless (/^\#/.match(line))
240
+ if(/\s*=\s*/.match(line))
241
+ param, value = line.split('=', 2)
242
+ param = param.chomp().strip()
243
+ value = value.chomp().strip()
244
+ if param.eql?('AWS_ACCESS_KEY_ID')
245
+ @access_key_id = value
246
+ elsif param.eql?('AWS_SECRET_ACCESS_KEY')
247
+ @secret_access_key = value
248
+ end
249
+ end
250
+ end
251
+ end
252
+ }
253
+ elsif @credentials.length == 2
254
+ @access_key_id = @credentials[0]
255
+ @secret_access_key = @credentials[1]
256
+ end
259
257
 
260
- if File.exists?(@sincedb_path)
261
- since = Time.parse(File.read(@sincedb_path).chomp.strip)
258
+ if @credentials
259
+ s3 = AWS::S3.new(
260
+ :access_key_id => @access_key_id,
261
+ :secret_access_key => @secret_access_key,
262
+ :region => @region
263
+ )
262
264
  else
263
- since = Time.new(0)
265
+ s3 = AWS::S3.new(aws_options_hash)
264
266
  end
265
- return since
266
-
267
- end # def sincedb_read
267
+ end
268
268
 
269
269
  private
270
- def sincedb_write(since=nil)
270
+ def aws_service_endpoint(region)
271
+ return { :s3_endpoint => region }
272
+ end
273
+
274
+ module SinceDB
275
+ class File
276
+ def initialize(file)
277
+ @sincedb_path = file
278
+ end
271
279
 
272
- if since.nil?
273
- since = Time.now()
274
- end
275
- File.open(@sincedb_path, 'w') { |file| file.write(since.to_s) }
280
+ def newer?(date)
281
+ date > read
282
+ end
276
283
 
277
- end # def sincedb_write
284
+ def read
285
+ if ::File.exists?(@sincedb_path)
286
+ since = Time.parse(::File.read(@sincedb_path).chomp.strip)
287
+ else
288
+ since = Time.new(0)
289
+ end
290
+ return since
291
+ end
278
292
 
293
+ def write(since = nil)
294
+ since = Time.now() if since.nil?
295
+ ::File.open(@sincedb_path, 'w') { |file| file.write(since.to_s) }
296
+ end
297
+ end
298
+ end
279
299
  end # class LogStash::Inputs::S3
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-s3'
4
- s.version = '0.1.1'
4
+ s.version = '0.1.2'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "Stream events from files from a S3 bucket."
7
7
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
@@ -22,8 +22,9 @@ Gem::Specification.new do |s|
22
22
  # Gem dependencies
23
23
  s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
24
24
  s.add_runtime_dependency 'logstash-mixin-aws'
25
- s.add_runtime_dependency 'logstash-codec-plain'
25
+ s.add_runtime_dependency 'logstash-codec-line'
26
26
  s.add_runtime_dependency 'aws-sdk'
27
+ s.add_runtime_dependency 'stud', '~> 0.0.18'
27
28
 
28
29
  s.add_development_dependency 'logstash-devutils'
29
30
  end
@@ -1,5 +1,146 @@
1
+ # encoding: utf-8
1
2
  require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/inputs/s3"
4
+ require "logstash/errors"
2
5
  require 'logstash/inputs/s3'
3
6
 
7
+ require "aws-sdk"
8
+ require "stud/temporary"
9
+
4
10
  describe LogStash::Inputs::S3 do
11
+ before do
12
+ AWS.stub!
13
+ Thread.abort_on_exception = true
14
+ end
15
+ let(:day) { 3600 * 24 }
16
+ let(:settings) {
17
+ {
18
+ "access_key_id" => "1234",
19
+ "secret_access_key" => "secret",
20
+ "bucket" => "logstash-test"
21
+ }
22
+ }
23
+
24
+ describe "#list_new_files" do
25
+ before { allow_any_instance_of(AWS::S3::ObjectCollection).to receive(:with_prefix).with(nil) { objects_list } }
26
+
27
+ let(:present_object) { double(:key => 'this-should-be-present', :last_modified => Time.now) }
28
+ let(:objects_list) {
29
+ [
30
+ double(:key => 'exclude-this-file-1', :last_modified => Time.now - 2 * day),
31
+ double(:key => 'exclude/logstash', :last_modified => Time.now - 2 * day),
32
+ present_object
33
+ ]
34
+ }
35
+
36
+ it 'should allow user to exclude files from the s3 bucket' do
37
+ config = LogStash::Inputs::S3.new(settings.merge({ "exclude_pattern" => "^exclude" }))
38
+ config.register
39
+ expect(config.list_new_files).to eq([present_object.key])
40
+ end
41
+
42
+ it 'should support not providing a exclude pattern' do
43
+ config = LogStash::Inputs::S3.new(settings)
44
+ config.register
45
+ expect(config.list_new_files).to eq(objects_list.map(&:key))
46
+ end
47
+
48
+ context "If the bucket is the same as the backup bucket" do
49
+ it 'should ignore files from the bucket if they match the backup prefix' do
50
+ objects_list = [
51
+ double(:key => 'mybackup-log-1', :last_modified => Time.now),
52
+ present_object
53
+ ]
54
+
55
+ allow_any_instance_of(AWS::S3::ObjectCollection).to receive(:with_prefix).with(nil) { objects_list }
56
+
57
+ config = LogStash::Inputs::S3.new(settings.merge({ 'backup_add_prefix' => 'mybackup',
58
+ 'backup_to_bucket' => settings['bucket']}))
59
+ config.register
60
+ expect(config.list_new_files).to eq([present_object.key])
61
+ end
62
+ end
63
+
64
+ it 'should ignore files older than X' do
65
+ config = LogStash::Inputs::S3.new(settings.merge({ 'backup_add_prefix' => 'exclude-this-file'}))
66
+
67
+ expect_any_instance_of(LogStash::Inputs::S3::SinceDB::File).to receive(:read).exactly(objects_list.size) { Time.now - day }
68
+ config.register
69
+
70
+ expect(config.list_new_files).to eq([present_object.key])
71
+ end
72
+
73
+ it 'should sort return object sorted by last_modification date with older first' do
74
+ objects = [
75
+ double(:key => 'YESTERDAY', :last_modified => Time.now - day),
76
+ double(:key => 'TODAY', :last_modified => Time.now),
77
+ double(:key => 'TWO_DAYS_AGO', :last_modified => Time.now - 2 * day)
78
+ ]
79
+
80
+ allow_any_instance_of(AWS::S3::ObjectCollection).to receive(:with_prefix).with(nil) { objects }
81
+
82
+
83
+ config = LogStash::Inputs::S3.new(settings)
84
+ config.register
85
+ expect(config.list_new_files).to eq(['TWO_DAYS_AGO', 'YESTERDAY', 'TODAY'])
86
+ end
87
+
88
+ describe "when doing backup on the s3" do
89
+ it 'should copy to another s3 bucket when keeping the original file' do
90
+ config = LogStash::Inputs::S3.new(settings.merge({ "backup_to_bucket" => "mybackup"}))
91
+ config.register
92
+
93
+ s3object = double()
94
+ expect(s3object).to receive(:copy_to).with('test-file', :bucket => an_instance_of(AWS::S3::Bucket))
95
+
96
+ config.backup_to_bucket(s3object, 'test-file')
97
+ end
98
+
99
+ it 'should move to another s3 bucket when deleting the original file' do
100
+ config = LogStash::Inputs::S3.new(settings.merge({ "backup_to_bucket" => "mybackup", "delete" => true }))
101
+ config.register
102
+
103
+ s3object = double()
104
+ expect(s3object).to receive(:move_to).with('test-file', :bucket => an_instance_of(AWS::S3::Bucket))
105
+
106
+ config.backup_to_bucket(s3object, 'test-file')
107
+ end
108
+
109
+ it 'should add the specified prefix to the backup file' do
110
+ config = LogStash::Inputs::S3.new(settings.merge({ "backup_to_bucket" => "mybackup",
111
+ "backup_add_prefix" => 'backup-' }))
112
+ config.register
113
+
114
+ s3object = double()
115
+ expect(s3object).to receive(:copy_to).with('backup-test-file', :bucket => an_instance_of(AWS::S3::Bucket))
116
+
117
+ config.backup_to_bucket(s3object, 'test-file')
118
+ end
119
+ end
120
+
121
+ it 'should support doing local backup of files' do
122
+ Stud::Temporary.directory do |backup_dir|
123
+ Stud::Temporary.file do |source_file|
124
+ backup_file = File.join(backup_dir.to_s, Pathname.new(source_file.path).basename.to_s)
125
+
126
+ config = LogStash::Inputs::S3.new(settings.merge({ "backup_to_dir" => backup_dir }))
127
+
128
+ config.backup_to_dir(source_file)
129
+
130
+ expect(File.exists?(backup_file)).to be_true
131
+ end
132
+ end
133
+ end
134
+
135
+ it 'should accepts a list of credentials for the aws-sdk, this is deprecated' do
136
+ old_credentials_settings = {
137
+ "credentials" => ['1234', 'secret'],
138
+ "backup_to_dir" => "/tmp/mybackup",
139
+ "bucket" => "logstash-test"
140
+ }
141
+
142
+ config = LogStash::Inputs::S3.new(old_credentials_settings)
143
+ expect{ config.register }.not_to raise_error
144
+ end
145
+ end
5
146
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-s3
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.1
4
+ version: 0.1.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-11-19 00:00:00.000000000 Z
11
+ date: 2015-01-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: logstash
@@ -45,7 +45,7 @@ dependencies:
45
45
  prerelease: false
46
46
  type: :runtime
47
47
  - !ruby/object:Gem::Dependency
48
- name: logstash-codec-plain
48
+ name: logstash-codec-line
49
49
  version_requirements: !ruby/object:Gem::Requirement
50
50
  requirements:
51
51
  - - '>='
@@ -72,6 +72,20 @@ dependencies:
72
72
  version: '0'
73
73
  prerelease: false
74
74
  type: :runtime
75
+ - !ruby/object:Gem::Dependency
76
+ name: stud
77
+ version_requirements: !ruby/object:Gem::Requirement
78
+ requirements:
79
+ - - ~>
80
+ - !ruby/object:Gem::Version
81
+ version: 0.0.18
82
+ requirement: !ruby/object:Gem::Requirement
83
+ requirements:
84
+ - - ~>
85
+ - !ruby/object:Gem::Version
86
+ version: 0.0.18
87
+ prerelease: false
88
+ type: :runtime
75
89
  - !ruby/object:Gem::Dependency
76
90
  name: logstash-devutils
77
91
  version_requirements: !ruby/object:Gem::Requirement
@@ -121,7 +135,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
121
135
  version: '0'
122
136
  requirements: []
123
137
  rubyforge_project:
124
- rubygems_version: 2.2.2
138
+ rubygems_version: 2.1.9
125
139
  signing_key:
126
140
  specification_version: 4
127
141
  summary: Stream events from files from a S3 bucket.