logstash-output-s3 3.1.2 → 3.2.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 35c6cb316aa0ec036121f892c821dbdb11de3c8d
4
- data.tar.gz: faa1102b573616d9b02496a32b16eca96d151ba6
3
+ metadata.gz: e681a1dd7ceea2cedb08ddd6dfc6eae4310a48cb
4
+ data.tar.gz: c86a97c06c234211329e24184370b1a0fdf19fc7
5
5
  SHA512:
6
- metadata.gz: e56c266affad3d06f0e04d324a9cd8ccd1827a92c0eb6a70965b70ff40d60f5761314f0d4689275bcfc4510ce3ee26fa8de5f8f54d18230bcb04872320055d99
7
- data.tar.gz: 8483fed9096096e0d0842a5c720642b7382ec4a4b311d57f160ebc0f5b53a1ddc8af369adc0a67fa69511717302858fdf329551cb99a2993835a59985b74c395
6
+ metadata.gz: 139c3e3749e83a6490ecd982294ab65e6c8628aeb246d9b04f30cdd7f05640c6afd0865133e6c0f017e307d81f2438a675b4024d16a0ece34266ce74d7b46c68
7
+ data.tar.gz: 1d6b05b4aa255c1db7887d93e8eed9f6781fd923312a38095e7bea6eaff56db49641c2b78263c757b46f522df6131cebe6f28e3af331d0ebb7bdb0c3ac0cbd82
@@ -1,6 +1,13 @@
1
+ ## 3.2.0
2
+ - Move to the new concurrency model `:single`
3
+ - use correct license identifier #99
4
+ - add support for `bucket_owner_full_control` in the canned ACL #87
5
+ - delete the test file but ignore any errors, because we actually only need to be able to write to S3. #97
6
+
1
7
  ## 3.1.2
2
8
  - Fix improper shutdown of output worker threads
3
9
  - improve exception handling
10
+
4
11
  ## 3.0.1
5
12
  - Republish all the gems under jruby.
6
13
 
@@ -13,8 +13,8 @@ require "fileutils"
13
13
  # INFORMATION:
14
14
  #
15
15
  # This plugin batches and uploads logstash events into Amazon Simple Storage Service (Amazon S3).
16
- #
17
- # Requirements:
16
+ #
17
+ # Requirements:
18
18
  # * Amazon S3 Bucket and S3 Access Permissions (Typically access_key_id and secret_access_key)
19
19
  # * S3 PutObject permission
20
20
  # * Run logstash as superuser to establish connection
@@ -42,7 +42,7 @@ require "fileutils"
42
42
  # Both time_file and size_file settings can trigger a log "file rotation"
43
43
  # A log rotation pushes the current log "part" to s3 and deleted from local temporary storage.
44
44
  #
45
- ## If you specify BOTH size_file and time_file then it will create file for each tag (if specified).
45
+ ## If you specify BOTH size_file and time_file then it will create file for each tag (if specified).
46
46
  ## When EITHER time_file minutes have elapsed OR log file size > size_file, a log rotation is triggered.
47
47
  ##
48
48
  ## If you ONLY specify time_file but NOT file_size, one file for each tag (if specified) will be created..
@@ -67,7 +67,7 @@ require "fileutils"
67
67
  # size_file => 2048 (optional) - Bytes
68
68
  # time_file => 5 (optional) - Minutes
69
69
  # format => "plain" (optional)
70
- # canned_acl => "private" (optional. Options are "private", "public_read", "public_read_write", "authenticated_read". Defaults to "private" )
70
+ # canned_acl => "private" (optional. Options are "private", "public_read", "public_read_write", "authenticated_read", "bucket_owner_full_control". Defaults to "private" )
71
71
  # }
72
72
  #
73
73
  class LogStash::Outputs::S3 < LogStash::Outputs::Base
@@ -79,6 +79,8 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
79
79
  config_name "s3"
80
80
  default :codec, 'line'
81
81
 
82
+ concurrency :single
83
+
82
84
  # S3 bucket
83
85
  config :bucket, :validate => :string
84
86
 
@@ -100,7 +102,7 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
100
102
  config :restore, :validate => :boolean, :default => false
101
103
 
102
104
  # The S3 canned ACL to use when putting the file. Defaults to "private".
103
- config :canned_acl, :validate => ["private", "public_read", "public_read_write", "authenticated_read"],
105
+ config :canned_acl, :validate => ["private", "public_read", "public_read_write", "authenticated_read", "bucket_owner_full_control"],
104
106
  :default => "private"
105
107
 
106
108
  # Specifies wether or not to use S3's AES256 server side encryption. Defaults to false.
@@ -214,8 +216,6 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
214
216
  # http://ruby.awsblog.com/post/Tx16QY1CI5GVBFT/Threading-with-the-AWS-SDK-for-Ruby
215
217
  AWS.eager_autoload!(AWS::S3)
216
218
 
217
- workers_not_supported
218
-
219
219
  @s3 = aws_s3_config
220
220
  @upload_queue = Queue.new
221
221
  @file_rotation_lock = Mutex.new
@@ -258,7 +258,15 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
258
258
 
259
259
  begin
260
260
  write_on_bucket(test_filename)
261
- delete_on_bucket(test_filename)
261
+
262
+ begin
263
+ remote_filename = "#{@prefix}#{File.basename(test_filename)}"
264
+ bucket = @s3.buckets[@bucket]
265
+ bucket.objects[remote_filename].delete
266
+ rescue StandardError => e
267
+ # we actually only need `put_object`, but if we dont delete them
268
+ # we can have a lot of tests files
269
+ end
262
270
  ensure
263
271
  File.delete(test_filename)
264
272
  end
@@ -466,24 +474,6 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
466
474
  @page_counter = 0
467
475
  end
468
476
 
469
- private
470
- def delete_on_bucket(filename)
471
- bucket = @s3.buckets[@bucket]
472
-
473
- remote_filename = "#{@prefix}#{File.basename(filename)}"
474
-
475
- @logger.debug("S3: delete file from bucket", :remote_filename => remote_filename, :bucket => @bucket)
476
-
477
- begin
478
- # prepare for write the file
479
- object = bucket.objects[remote_filename]
480
- object.delete
481
- rescue AWS::Errors::Base => e
482
- @logger.error("S3: AWS error", :error => e)
483
- raise LogStash::ConfigurationError, "AWS Configuration Error"
484
- end
485
- end
486
-
487
477
  private
488
478
  def move_file_to_bucket_async(file)
489
479
  @logger.debug("S3: Sending the file to the upload queue.", :filename => File.basename(file))
@@ -1,8 +1,8 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-output-s3'
4
- s.version = '3.1.2'
5
- s.licenses = ['Apache License (2.0)']
4
+ s.version = '3.2.0'
5
+ s.licenses = ['Apache-2.0']
6
6
  s.summary = "This plugin was created for store the logstash's events into Amazon Simple Storage Service (Amazon S3)"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
8
8
  s.authors = ["Elastic"]
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-s3
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.1.2
4
+ version: 3.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-08-08 00:00:00.000000000 Z
11
+ date: 2016-09-12 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -120,7 +120,7 @@ files:
120
120
  - spec/supports/helpers.rb
121
121
  homepage: http://www.elastic.co/guide/en/logstash/current/index.html
122
122
  licenses:
123
- - Apache License (2.0)
123
+ - Apache-2.0
124
124
  metadata:
125
125
  logstash_plugin: 'true'
126
126
  logstash_group: output