logstash-output-s3 4.0.13 → 4.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 31c0e0ca6dc28796f973af9830c1c375560892c4d425bcb8735b8cf720ddf3f7
4
- data.tar.gz: b06aacbcc82edae998b34205d9c8cee1d70bd0d734f70c79e846b02a9e99efa7
3
+ metadata.gz: a81da91b293a56df2fdc37318a53c955ad95cb27bcd18ed8590852d67eb5ef04
4
+ data.tar.gz: 8042723187ab6e9f876010742e4e3b2ae8d6a4ad8cc78410d72db661a5790b8a
5
5
  SHA512:
6
- metadata.gz: 5ecf5cd5791fa502b8301eee9beee91f58557bf3653c9340a930ab8cc870a6a0687740a3769f8a6e1df588d11680f153537888053f2f85e5f0303c30a0effbce
7
- data.tar.gz: cd4bf16b7c61a02cdbf7460f1e03671e76e208c4a3c6e104a2fee5152ea0d962f5572a36d17c7bf3df247b95e99cb2563409984e658a915475ed9298f2a39200
6
+ metadata.gz: 8da621cdb021120b3a21b84856197a509f6be69a30e055233c7deda34a393dd9516cc955d581bc622f15e2f6f77328a4edf4cdce0f4e1d04c3723b7f873634e9
7
+ data.tar.gz: 349da598ec69a047c1be15be0ed724f630fbc5da815b37f2f056da6d37f591e1019239fdff9bf33c50008c6b5ab944d7e50b63c389447982ce3ca3e28e233a60
@@ -1,3 +1,10 @@
1
+ ## 4.1.0
2
+ - Add documentation for endpoint, role_arn and role_session_name #174
3
+ - Add option for additional settings #173
4
+ - Add more S3 bucket ACLs #158
5
+ - Handle file not found exception on S3 upload #144
6
+ - Document prefix interpolation #154
7
+
1
8
  ## 4.0.13
2
9
  - Update gemspec summary
3
10
 
@@ -4,7 +4,7 @@
4
4
 
5
5
  ```
6
6
  bundle install
7
- bundle rspec
7
+ bundle exec rspec
8
8
  ```
9
9
 
10
10
  If you want to run the integration test against a real bucket you need to pass
data/LICENSE CHANGED
@@ -1,4 +1,4 @@
1
- Copyright (c) 2012–2016 Elasticsearch <http://www.elastic.co>
1
+ Copyright (c) 2012-2018 Elasticsearch <http://www.elastic.co>
2
2
 
3
3
  Licensed under the Apache License, Version 2.0 (the "License");
4
4
  you may not use this file except in compliance with the License.
@@ -40,7 +40,7 @@ ls.s3.312bc026-2f5d-49bc-ae9f-5940cf4ad9a6.2013-04-18T10.00.tag_hello.part0.txt
40
40
  | 312bc026-2f5d-49bc-ae9f-5940cf4ad9a6 | a new, random uuid per file. |
41
41
  | 2013-04-18T10.00 | represents the time whenever you specify time_file. |
42
42
  | tag_hello | this indicates the event's tag. |
43
- | part0 | this means if you indicate size_file then it will generate more parts if you file.size > size_file. When a file is full it will be pushed to the bucket and then deleted from the temporary directory. If a file is empty, it is simply deleted. Empty files will not be pushed |
43
+ | part0 | this means if you indicate size_file then it will generate more parts if your file.size > size_file. When a file is full it will be pushed to the bucket and then deleted from the temporary directory. If a file is empty, it is simply deleted. Empty files will not be pushed |
44
44
  |=======
45
45
 
46
46
  Crash Recovery:
@@ -64,7 +64,7 @@ output {
64
64
  size_file => 2048 (optional) - Bytes
65
65
  time_file => 5 (optional) - Minutes
66
66
  codec => "plain" (optional)
67
- canned_acl => "private" (optional. Options are "private", "public-read", "public-read-write", "authenticated-read". Defaults to "private" )
67
+ canned_acl => "private" (optional. Options are "private", "public-read", "public-read-write", "authenticated-read", "aws-exec-read", "bucket-owner-read", "bucket-owner-full-control", "log-delivery-write". Defaults to "private" )
68
68
  }
69
69
 
70
70
 
@@ -77,14 +77,18 @@ This plugin supports the following configuration options plus the <<plugins-{typ
77
77
  |=======================================================================
78
78
  |Setting |Input type|Required
79
79
  | <<plugins-{type}s-{plugin}-access_key_id>> |<<string,string>>|No
80
+ | <<plugins-{type}s-{plugin}-additional_settings>> |<<hash,hash>>|No
80
81
  | <<plugins-{type}s-{plugin}-aws_credentials_file>> |<<string,string>>|No
81
82
  | <<plugins-{type}s-{plugin}-bucket>> |<<string,string>>|Yes
82
- | <<plugins-{type}s-{plugin}-canned_acl>> |<<string,string>>, one of `["private", "public-read", "public-read-write", "authenticated-read"]`|No
83
+ | <<plugins-{type}s-{plugin}-canned_acl>> |<<string,string>>, one of `["private", "public-read", "public-read-write", "authenticated-read", "aws-exec-read", "bucket-owner-read", "bucket-owner-full-control", "log-delivery-write"]`|No
83
84
  | <<plugins-{type}s-{plugin}-encoding>> |<<string,string>>, one of `["none", "gzip"]`|No
85
+ | <<plugins-{type}s-{plugin}-endpoint>> |<<string,string>>|No
84
86
  | <<plugins-{type}s-{plugin}-prefix>> |<<string,string>>|No
85
87
  | <<plugins-{type}s-{plugin}-proxy_uri>> |<<string,string>>|No
86
- | <<plugins-{type}s-{plugin}-region>> |<<string,string>>, one of `["us-east-1", "us-east-2", "us-west-1", "us-west-2", "eu-central-1", "eu-west-1", "eu-west-2", "ap-southeast-1", "ap-southeast-2", "ap-northeast-1", "ap-northeast-2", "sa-east-1", "us-gov-west-1", "cn-north-1", "ap-south-1", "ca-central-1"]`|No
88
+ | <<plugins-{type}s-{plugin}-region>> |<<string,string>>|No
87
89
  | <<plugins-{type}s-{plugin}-restore>> |<<boolean,boolean>>|No
90
+ | <<plugins-{type}s-{plugin}-role_arn>> |<<string,string>>|No
91
+ | <<plugins-{type}s-{plugin}-role_session_name>> |<<string,string>>|No
88
92
  | <<plugins-{type}s-{plugin}-rotation_strategy>> |<<string,string>>, one of `["size_and_time", "size", "time"]`|No
89
93
  | <<plugins-{type}s-{plugin}-secret_access_key>> |<<string,string>>|No
90
94
  | <<plugins-{type}s-{plugin}-server_side_encryption>> |<<boolean,boolean>>|No
@@ -120,6 +124,29 @@ This plugin uses the AWS SDK and supports several ways to get credentials, which
120
124
  4. Environment variables `AMAZON_ACCESS_KEY_ID` and `AMAZON_SECRET_ACCESS_KEY`
121
125
  5. IAM Instance Profile (available when running inside EC2)
122
126
 
127
+ [id="plugins-{type}s-{plugin}-additional_settings"]
128
+ ===== `additional_settings`
129
+
130
+ * Value type is <<hash,hash>>
131
+ * Default value is `{}`
132
+
133
+ Key-value pairs of settings and corresponding values used to parametrize
134
+ the connection to S3. See full list in https://docs.aws.amazon.com/sdkforruby/api/Aws/S3/Client.html[the AWS SDK documentation]. Example:
135
+
136
+ [source,ruby]
137
+ output {
138
+ s3 {
139
+ access_key_id => "1234",
140
+ secret_access_key => "secret",
141
+ region => "eu-west-1",
142
+ bucket => "logstash-test",
143
+ additional_settings => {
144
+ "force_path_style => true,
145
+ "follow_redirects" => false
146
+ }
147
+ }
148
+ }
149
+
123
150
  [id="plugins-{type}s-{plugin}-aws_credentials_file"]
124
151
  ===== `aws_credentials_file`
125
152
 
@@ -150,7 +177,7 @@ S3 bucket
150
177
  [id="plugins-{type}s-{plugin}-canned_acl"]
151
178
  ===== `canned_acl`
152
179
 
153
- * Value can be any of: `private`, `public-read`, `public-read-write`, `authenticated-read`
180
+ * Value can be any of: `private`, `public-read`, `public-read-write`, `authenticated-read`, `aws-exec-read`, `bucket-owner-read`, `bucket-owner-full-control`, `log-delivery-write`
154
181
  * Default value is `"private"`
155
182
 
156
183
  The S3 canned ACL to use when putting the file. Defaults to "private".
@@ -163,6 +190,16 @@ The S3 canned ACL to use when putting the file. Defaults to "private".
163
190
 
164
191
  Specify the content encoding. Supports ("gzip"). Defaults to "none"
165
192
 
193
+ [id="plugins-{type}s-{plugin}-endpoint"]
194
+ ===== `endpoint`
195
+
196
+ * Value type is <<string,string>>
197
+ * There is no default value for this setting.
198
+
199
+ The endpoint to connect to. By default it is constructed using the value of `region`.
200
+ This is useful when connecting to S3 compatible services, but beware that these aren't
201
+ guaranteed to work correctly with the AWS SDK.
202
+
166
203
  [id="plugins-{type}s-{plugin}-prefix"]
167
204
  ===== `prefix`
168
205
 
@@ -170,7 +207,9 @@ Specify the content encoding. Supports ("gzip"). Defaults to "none"
170
207
  * Default value is `""`
171
208
 
172
209
  Specify a prefix to the uploaded filename, this can simulate directories on S3. Prefix does not require leading slash.
173
- This option support string interpolation, be warned this can created a lot of temporary local files.
210
+ This option supports logstash interpolation: https://www.elastic.co/guide/en/logstash/current/event-dependent-configuration.html#sprintf;
211
+ for example, files can be prefixed with the event date using `prefix = "%{+YYYY}/%{+MM}/%{+dd}"`.
212
+ Be warned this can created a lot of temporary local files.
174
213
 
175
214
  [id="plugins-{type}s-{plugin}-proxy_uri"]
176
215
  ===== `proxy_uri`
@@ -183,7 +222,7 @@ URI to proxy server if required
183
222
  [id="plugins-{type}s-{plugin}-region"]
184
223
  ===== `region`
185
224
 
186
- * Value can be any of: `us-east-1`, `us-east-2`, `us-west-1`, `us-west-2`, `eu-central-1`, `eu-west-1`, `eu-west-2`, `ap-southeast-1`, `ap-southeast-2`, `ap-northeast-1`, `ap-northeast-2`, `sa-east-1`, `us-gov-west-1`, `cn-north-1`, `ap-south-1`, `ca-central-1`
225
+ * Value type is <<string,string>>
187
226
  * Default value is `"us-east-1"`
188
227
 
189
228
  The AWS Region
@@ -194,7 +233,23 @@ The AWS Region
194
233
  * Value type is <<boolean,boolean>>
195
234
  * Default value is `true`
196
235
 
236
+ [id="plugins-{type}s-{plugin}-role_arn"]
237
+ ===== `role_arn`
238
+
239
+ * Value type is <<string,string>>
240
+ * There is no default value for this setting.
241
+
242
+ The AWS IAM Role to assume, if any.
243
+ This is used to generate temporary credentials, typically for cross-account access.
244
+ See the https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html[AssumeRole API documentation] for more information.
245
+
246
+ [id="plugins-{type}s-{plugin}-role_session_name"]
247
+ ===== `role_session_name`
248
+
249
+ * Value type is <<string,string>>
250
+ * Default value is `"logstash"`
197
251
 
252
+ Session name to use when assuming an IAM role.
198
253
 
199
254
  [id="plugins-{type}s-{plugin}-rotation_strategy"]
200
255
  ===== `rotation_strategy`
@@ -324,4 +379,4 @@ In some circonstances you need finer grained permission on subfolder, this allow
324
379
 
325
380
 
326
381
  [id="plugins-{type}s-{plugin}-common-options"]
327
- include::{include_path}/{type}.asciidoc[]
382
+ include::{include_path}/{type}.asciidoc[]
@@ -72,7 +72,7 @@ Aws.eager_autoload!
72
72
  # size_file => 2048 (optional) - Bytes
73
73
  # time_file => 5 (optional) - Minutes
74
74
  # codec => "plain" (optional)
75
- # canned_acl => "private" (optional. Options are "private", "public-read", "public-read-write", "authenticated-read". Defaults to "private" )
75
+ # canned_acl => "private" (optional. Options are "private", "public-read", "public-read-write", "authenticated-read", "aws-exec-read", "bucket-owner-read", "bucket-owner-full-control", "log-delivery-write". Defaults to "private" )
76
76
  # }
77
77
  #
78
78
  class LogStash::Outputs::S3 < LogStash::Outputs::Base
@@ -106,6 +106,8 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
106
106
  # S3 bucket
107
107
  config :bucket, :validate => :string, :required => true
108
108
 
109
+ config :additional_settings, :validate => :hash, :default => {}
110
+
109
111
  # Set the size of file in bytes, this means that files on bucket when have dimension > file_size, they are stored in two or more file.
110
112
  # If you have tags then it will generate a specific size file for every tags
111
113
  ##NOTE: define size of file is the better thing, because generate a local temporary file on disk and then put it in bucket.
@@ -124,7 +126,7 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
124
126
  config :restore, :validate => :boolean, :default => true
125
127
 
126
128
  # The S3 canned ACL to use when putting the file. Defaults to "private".
127
- config :canned_acl, :validate => ["private", "public-read", "public-read-write", "authenticated-read"],
129
+ config :canned_acl, :validate => ["private", "public-read", "public-read-write", "authenticated-read", "aws-exec-read", "bucket-owner-read", "bucket-owner-full-control", "log-delivery-write"],
128
130
  :default => "private"
129
131
 
130
132
  # Specifies wether or not to use S3's server side encryption. Defaults to no encryption.
@@ -267,9 +269,9 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
267
269
  end
268
270
 
269
271
  def full_options
270
- options = Hash.new
272
+ options = aws_options_hash || {}
271
273
  options[:signature_version] = @signature_version if @signature_version
272
- options.merge(aws_options_hash)
274
+ @additional_settings.merge(options)
273
275
  end
274
276
 
275
277
  def normalize_key(prefix_key)
@@ -36,13 +36,16 @@ module LogStash
36
36
  begin
37
37
  obj = bucket.object(file.key)
38
38
  obj.upload_file(file.path, upload_options)
39
+ rescue Errno::ENOENT => e
40
+ logger.error("File doesn't exist! Unrecoverable error.", :exception => e.class, :message => e.message, :path => file.path, :backtrace => e.backtrace)
39
41
  rescue => e
40
42
  # When we get here it usually mean that S3 tried to do some retry by himself (default is 3)
41
43
  # When the retry limit is reached or another error happen we will wait and retry.
42
44
  #
43
45
  # Thread might be stuck here, but I think its better than losing anything
44
46
  # its either a transient errors or something bad really happened.
45
- logger.error("Uploading failed, retrying", :exception => e.class, :message => e.message, :path => file.path, :backtrace => e.backtrace)
47
+ logger.error("Uploading failed, retrying.", :exception => e.class, :message => e.message, :path => file.path, :backtrace => e.backtrace)
48
+ sleep TIME_BEFORE_RETRYING_SECONDS
46
49
  retry
47
50
  end
48
51
 
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-s3'
3
- s.version = '4.0.13'
3
+ s.version = '4.1.0'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Sends Logstash events to the Amazon Simple Storage Service"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -20,7 +20,7 @@ Gem::Specification.new do |s|
20
20
 
21
21
  # Gem dependencies
22
22
  s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
23
- s.add_runtime_dependency 'logstash-mixin-aws'
23
+ s.add_runtime_dependency 'logstash-mixin-aws', '>= 4.3.0'
24
24
  s.add_runtime_dependency "concurrent-ruby"
25
25
  s.add_runtime_dependency 'stud', '~> 0.0.22'
26
26
  s.add_development_dependency 'logstash-devutils'
@@ -24,8 +24,7 @@ describe LogStash::Outputs::S3 do
24
24
  subject { described_class.new(options) }
25
25
 
26
26
  before do
27
- allow(subject).to receive(:bucket_resource).and_return(mock_bucket)
28
- allow_any_instance_of(LogStash::Outputs::S3::WriteBucketPermissionValidator).to receive(:valid?).with(mock_bucket, subject.upload_options).and_return(true)
27
+ allow_any_instance_of(LogStash::Outputs::S3::WriteBucketPermissionValidator).to receive(:valid?).and_return(true)
29
28
  end
30
29
 
31
30
  context "#register configuration validation" do
@@ -45,7 +44,7 @@ describe LogStash::Outputs::S3 do
45
44
 
46
45
  describe "Access control list" do
47
46
  context "when configured" do
48
- ["private", "public-read", "public-read-write", "authenticated-read"].each do |permission|
47
+ ["private", "public-read", "public-read-write", "authenticated-read", "aws-exec-read", "bucket-owner-read", "bucket-owner-full-control", "log-delivery-write"].each do |permission|
49
48
  it "should return the configured ACL permissions: #{permission}" do
50
49
  s3 = described_class.new(options.merge({ "canned_acl" => permission }))
51
50
  expect(s3.upload_options).to include(:acl => permission)
@@ -143,6 +142,29 @@ describe LogStash::Outputs::S3 do
143
142
  expect { s3.register }.to raise_error(LogStash::ConfigurationError)
144
143
  end
145
144
 
145
+ describe "additional_settings" do
146
+ context "when enabling force_path_style" do
147
+ let(:additional_settings) do
148
+ { "additional_settings" => { "force_path_style" => true } }
149
+ end
150
+
151
+ it "validates the prefix" do
152
+ expect(Aws::S3::Bucket).to receive(:new).twice.with(anything, hash_including("force_path_style" => true)).and_call_original
153
+ described_class.new(options.merge(additional_settings)).register
154
+ end
155
+ end
156
+ context "when using a non existing setting" do
157
+ let(:additional_settings) do
158
+ { "additional_settings" => { "doesnt_exist" => true } }
159
+ end
160
+
161
+ it "raises an error" do
162
+ plugin = described_class.new(options.merge(additional_settings))
163
+ expect { plugin.register }.to raise_error(ArgumentError)
164
+ end
165
+ end
166
+ end
167
+
146
168
  it "allow to not validate credentials" do
147
169
  s3 = described_class.new(options.merge({"validate_credentials_on_root_bucket" => false}))
148
170
  expect_any_instance_of(LogStash::Outputs::S3::WriteBucketPermissionValidator).not_to receive(:valid?).with(any_args)
@@ -152,6 +174,7 @@ describe LogStash::Outputs::S3 do
152
174
 
153
175
  context "receiving events" do
154
176
  before do
177
+ allow(subject).to receive(:bucket_resource).and_return(mock_bucket)
155
178
  subject.register
156
179
  end
157
180
 
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-s3
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.0.13
4
+ version: 4.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-11-13 00:00:00.000000000 Z
11
+ date: 2018-04-03 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -35,7 +35,7 @@ dependencies:
35
35
  requirements:
36
36
  - - ">="
37
37
  - !ruby/object:Gem::Version
38
- version: '0'
38
+ version: 4.3.0
39
39
  name: logstash-mixin-aws
40
40
  prerelease: false
41
41
  type: :runtime
@@ -43,7 +43,7 @@ dependencies:
43
43
  requirements:
44
44
  - - ">="
45
45
  - !ruby/object:Gem::Version
46
- version: '0'
46
+ version: 4.3.0
47
47
  - !ruby/object:Gem::Dependency
48
48
  requirement: !ruby/object:Gem::Requirement
49
49
  requirements:
@@ -186,7 +186,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
186
186
  version: '0'
187
187
  requirements: []
188
188
  rubyforge_project:
189
- rubygems_version: 2.6.11
189
+ rubygems_version: 2.6.13
190
190
  signing_key:
191
191
  specification_version: 4
192
192
  summary: Sends Logstash events to the Amazon Simple Storage Service