logstash-output-s3 3.0.1 → 3.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +21 -7
- data/lib/logstash/outputs/s3.rb +48 -28
- data/logstash-output-s3.gemspec +2 -2
- data/spec/integration/s3_spec.rb +1 -0
- data/spec/outputs/s3_spec.rb +1 -11
- metadata +13 -7
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 9008635509ce94ad1d6e2242f256c67e0e2ed269
|
4
|
+
data.tar.gz: 5f05f5f226976a70c3df9342dd6b4db0e3760274
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: f6a4a44f0b6c9da9f8ec46c230b9aa7d01c2508d01fbb68632debceb23223bc252c9d80e7eb2a241764718e67eee3ba289f7100677cad2ffa73db0e4b4a5892e
|
7
|
+
data.tar.gz: ad0adb0d2a1125a427e013fdcc1d1c2884c6d215523df590081e3b670440656c1d92cebb5765dfe7c898220596d4f6553e74ba336929247ee1f606440dbf654f
|
data/CHANGELOG.md
CHANGED
@@ -1,25 +1,39 @@
|
|
1
|
+
## 3.1.1
|
2
|
+
- Relax constraint on logstash-core-plugin-api to >= 1.60 <= 2.99
|
3
|
+
|
4
|
+
## 3.1.0
|
5
|
+
- breaking,config: Remove deprecated config `endpoint_region`. Please use `region` instead.
|
6
|
+
|
1
7
|
## 3.0.1
|
2
|
-
|
8
|
+
- Republish all the gems under jruby.
|
9
|
+
|
3
10
|
## 3.0.0
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
11
|
+
- Update the plugin to the version 2.0 of the plugin api, this change is required for Logstash 5.0 compatibility. See https://github.com/elastic/logstash/issues/5141
|
12
|
+
|
13
|
+
## 2.0.7
|
14
|
+
- Depend on logstash-core-plugin-api instead of logstash-core, removing the need to mass update plugins on major releases of logstash
|
15
|
+
|
16
|
+
## 2.0.6
|
17
|
+
- New dependency requirements for logstash-core for the 5.0 release
|
18
|
+
|
9
19
|
## 2.0.5
|
10
20
|
- Support signature_version option for v4 S3 keys
|
21
|
+
|
11
22
|
## 2.0.4
|
12
23
|
- Remove the `Time.now` stub in the spec, it was conflicting with other test when running inside the default plugins test #63
|
13
24
|
- Make the spec run faster by adjusting the values of time rotation test.
|
25
|
+
|
14
26
|
## 2.0.3
|
15
27
|
- Update deps for logstash 2.0
|
28
|
+
|
16
29
|
## 2.0.2
|
17
30
|
- Fixes an issue when tags were defined #39
|
31
|
+
|
18
32
|
## 2.0.0
|
19
33
|
- Plugins were updated to follow the new shutdown semantic, this mainly allows Logstash to instruct input plugins to terminate gracefully,
|
20
34
|
instead of using Thread.raise on the plugins' threads. Ref: https://github.com/elastic/logstash/pull/3895
|
21
35
|
- Dependency on logstash-core update to 2.0
|
22
36
|
|
23
|
-
|
37
|
+
## 1.0.1
|
24
38
|
- Fix a synchronization issue when doing file rotation and checking the size of the current file
|
25
39
|
- Fix an issue with synchronization when shutting down the plugin and closing the current temp file
|
data/lib/logstash/outputs/s3.rb
CHANGED
@@ -62,7 +62,7 @@ require "fileutils"
|
|
62
62
|
# s3{
|
63
63
|
# access_key_id => "crazy_key" (required)
|
64
64
|
# secret_access_key => "monkey_access_key" (required)
|
65
|
-
#
|
65
|
+
# region => "eu-west-1" (optional, default = "us-east-1")
|
66
66
|
# bucket => "boss_please_open_your_bucket" (required)
|
67
67
|
# size_file => 2048 (optional) - Bytes
|
68
68
|
# time_file => 5 (optional) - Minutes
|
@@ -82,11 +82,6 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
|
|
82
82
|
# S3 bucket
|
83
83
|
config :bucket, :validate => :string
|
84
84
|
|
85
|
-
# AWS endpoint_region
|
86
|
-
config :endpoint_region, :validate => ["us-east-1", "us-west-1", "us-west-2",
|
87
|
-
"eu-west-1", "ap-southeast-1", "ap-southeast-2",
|
88
|
-
"ap-northeast-1", "sa-east-1", "us-gov-west-1"], :deprecated => 'Deprecated, use region instead.'
|
89
|
-
|
90
85
|
# Set the size of file in bytes, this means that files on bucket when have dimension > file_size, they are stored in two or more file.
|
91
86
|
# If you have tags then it will generate a specific size file for every tags
|
92
87
|
##NOTE: define size of file is the better thing, because generate a local temporary file on disk and then put it in bucket.
|
@@ -135,6 +130,9 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
|
|
135
130
|
#
|
136
131
|
config :tags, :validate => :array, :default => []
|
137
132
|
|
133
|
+
# Specify the content encoding. Supports ("gzip"). Defaults to "none"
|
134
|
+
config :encoding, :validate => ["none", "gzip"], :default => "none"
|
135
|
+
|
138
136
|
# Exposed attributes for testing purpose.
|
139
137
|
attr_accessor :tempfile
|
140
138
|
attr_reader :page_counter
|
@@ -158,17 +156,8 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
|
|
158
156
|
end
|
159
157
|
|
160
158
|
def aws_service_endpoint(region)
|
161
|
-
# Make the deprecated endpoint_region work
|
162
|
-
# TODO: (ph) Remove this after deprecation.
|
163
|
-
|
164
|
-
if @endpoint_region
|
165
|
-
region_to_use = @endpoint_region
|
166
|
-
else
|
167
|
-
region_to_use = @region
|
168
|
-
end
|
169
|
-
|
170
159
|
return {
|
171
|
-
:s3_endpoint =>
|
160
|
+
:s3_endpoint => region == 'us-east-1' ? 's3.amazonaws.com' : "s3-#{region}.amazonaws.com"
|
172
161
|
}
|
173
162
|
end
|
174
163
|
|
@@ -185,7 +174,10 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
|
|
185
174
|
begin
|
186
175
|
# prepare for write the file
|
187
176
|
object = bucket.objects[remote_filename]
|
188
|
-
object.write(fileIO,
|
177
|
+
object.write(fileIO,
|
178
|
+
:acl => @canned_acl,
|
179
|
+
:server_side_encryption => @server_side_encryption ? :aes256 : nil,
|
180
|
+
:content_encoding => @encoding == "gzip" ? "gzip" : nil)
|
189
181
|
rescue AWS::Errors::Base => error
|
190
182
|
@logger.error("S3: AWS error", :error => error)
|
191
183
|
raise LogStash::Error, "AWS Configuration Error, #{error}"
|
@@ -207,7 +199,11 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
|
|
207
199
|
@tempfile.close
|
208
200
|
end
|
209
201
|
|
210
|
-
@
|
202
|
+
if @encoding == "gzip"
|
203
|
+
@tempfile = Zlib::GzipWriter.open(filename)
|
204
|
+
else
|
205
|
+
@tempfile = File.open(filename, "a")
|
206
|
+
end
|
211
207
|
end
|
212
208
|
end
|
213
209
|
|
@@ -272,7 +268,7 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
|
|
272
268
|
def restore_from_crashes
|
273
269
|
@logger.debug("S3: Checking for temp files from a previoius crash...")
|
274
270
|
|
275
|
-
Dir[File.join(@temporary_directory, "*.#{
|
271
|
+
Dir[File.join(@temporary_directory, "*.#{get_tempfile_extension}")].each do |file|
|
276
272
|
name_file = File.basename(file)
|
277
273
|
@logger.warn("S3: Found temporary file from crash. Uploading file to S3.", :filename => name_file)
|
278
274
|
move_file_to_bucket_async(file)
|
@@ -301,15 +297,20 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
|
|
301
297
|
@time_file * 60
|
302
298
|
end
|
303
299
|
|
300
|
+
private
|
301
|
+
def get_tempfile_extension
|
302
|
+
@encoding == "gzip" ? "#{TEMPFILE_EXTENSION}.gz" : "#{TEMPFILE_EXTENSION}"
|
303
|
+
end
|
304
|
+
|
304
305
|
public
|
305
306
|
def get_temporary_filename(page_counter = 0)
|
306
307
|
current_time = Time.now
|
307
308
|
filename = "ls.s3.#{Socket.gethostname}.#{current_time.strftime("%Y-%m-%dT%H.%M")}"
|
308
309
|
|
309
310
|
if @tags.size > 0
|
310
|
-
return "#{filename}.tag_#{@tags.join('.')}.part#{page_counter}.#{
|
311
|
+
return "#{filename}.tag_#{@tags.join('.')}.part#{page_counter}.#{get_tempfile_extension}"
|
311
312
|
else
|
312
|
-
return "#{filename}.part#{page_counter}.#{
|
313
|
+
return "#{filename}.part#{page_counter}.#{get_tempfile_extension}"
|
313
314
|
end
|
314
315
|
end
|
315
316
|
|
@@ -322,7 +323,18 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
|
|
322
323
|
public
|
323
324
|
def rotate_events_log?
|
324
325
|
@file_rotation_lock.synchronize do
|
325
|
-
|
326
|
+
tempfile_size > @size_file
|
327
|
+
end
|
328
|
+
end
|
329
|
+
|
330
|
+
private
|
331
|
+
def tempfile_size
|
332
|
+
if @tempfile.instance_of? File
|
333
|
+
@tempfile.size
|
334
|
+
elsif @tempfile.instance_of? Zlib::GzipWriter
|
335
|
+
@tempfile.tell
|
336
|
+
else
|
337
|
+
raise LogStash::Error, "Unable to get size of temp file of type #{@tempfile.class}"
|
326
338
|
end
|
327
339
|
end
|
328
340
|
|
@@ -334,10 +346,10 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
|
|
334
346
|
public
|
335
347
|
def write_to_tempfile(event)
|
336
348
|
begin
|
337
|
-
@logger.debug("S3: put event into tempfile ", :tempfile => File.basename(@tempfile))
|
349
|
+
@logger.debug("S3: put event into tempfile ", :tempfile => File.basename(@tempfile.path))
|
338
350
|
|
339
351
|
@file_rotation_lock.synchronize do
|
340
|
-
@tempfile.
|
352
|
+
@tempfile.write(event)
|
341
353
|
end
|
342
354
|
rescue Errno::ENOSPC
|
343
355
|
@logger.error("S3: No space left in temporary directory", :temporary_directory => @temporary_directory)
|
@@ -365,13 +377,17 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
|
|
365
377
|
def handle_event(encoded_event)
|
366
378
|
if write_events_to_multiple_files?
|
367
379
|
if rotate_events_log?
|
368
|
-
@logger.debug("S3: tempfile is too large, let's bucket it and create new file", :tempfile => File.basename(@tempfile))
|
380
|
+
@logger.debug("S3: tempfile is too large, let's bucket it and create new file", :tempfile => File.basename(@tempfile.path))
|
369
381
|
|
370
|
-
|
382
|
+
tempfile_path = @tempfile.path
|
383
|
+
# close and start next file before sending the previous one
|
371
384
|
next_page
|
372
385
|
create_temporary_file
|
386
|
+
|
387
|
+
# send to s3
|
388
|
+
move_file_to_bucket_async(tempfile_path)
|
373
389
|
else
|
374
|
-
@logger.debug("S3: tempfile file size report.", :tempfile_size =>
|
390
|
+
@logger.debug("S3: tempfile file size report.", :tempfile_size => tempfile_size, :size_file => @size_file)
|
375
391
|
end
|
376
392
|
end
|
377
393
|
|
@@ -386,9 +402,13 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
|
|
386
402
|
Stud.interval(periodic_interval, :sleep_then_run => true) do
|
387
403
|
@logger.debug("S3: time_file triggered, bucketing the file", :filename => @tempfile.path)
|
388
404
|
|
389
|
-
|
405
|
+
tempfile_path = @tempfile.path
|
406
|
+
# close and start next file before sending the previous one
|
390
407
|
next_page
|
391
408
|
create_temporary_file
|
409
|
+
|
410
|
+
# send to s3
|
411
|
+
move_file_to_bucket_async(tempfile_path)
|
392
412
|
end
|
393
413
|
end
|
394
414
|
end
|
data/logstash-output-s3.gemspec
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-output-s3'
|
4
|
-
s.version = '3.
|
4
|
+
s.version = '3.1.1'
|
5
5
|
s.licenses = ['Apache License (2.0)']
|
6
6
|
s.summary = "This plugin was created for store the logstash's events into Amazon Simple Storage Service (Amazon S3)"
|
7
7
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -20,7 +20,7 @@ Gem::Specification.new do |s|
|
|
20
20
|
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
|
21
21
|
|
22
22
|
# Gem dependencies
|
23
|
-
s.add_runtime_dependency "logstash-core-plugin-api", "
|
23
|
+
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
24
24
|
s.add_runtime_dependency 'logstash-mixin-aws'
|
25
25
|
s.add_runtime_dependency 'stud', '~> 0.0.22'
|
26
26
|
s.add_development_dependency 'logstash-devutils'
|
data/spec/integration/s3_spec.rb
CHANGED
data/spec/outputs/s3_spec.rb
CHANGED
@@ -20,17 +20,7 @@ describe LogStash::Outputs::S3 do
|
|
20
20
|
"bucket" => "my-bucket" } }
|
21
21
|
|
22
22
|
describe "configuration" do
|
23
|
-
let!(:config) { { "
|
24
|
-
|
25
|
-
it "should support the deprecated endpoint_region as a configuration option" do
|
26
|
-
s3 = LogStash::Outputs::S3.new(config)
|
27
|
-
expect(s3.aws_options_hash[:s3_endpoint]).to eq("s3-sa-east-1.amazonaws.com")
|
28
|
-
end
|
29
|
-
|
30
|
-
it "should fallback to region if endpoint_region isnt defined" do
|
31
|
-
s3 = LogStash::Outputs::S3.new(config.merge({ "region" => 'sa-east-1' }))
|
32
|
-
expect(s3.aws_options_hash).to include(:s3_endpoint => "s3-sa-east-1.amazonaws.com")
|
33
|
-
end
|
23
|
+
let!(:config) { { "region" => "sa-east-1" } }
|
34
24
|
|
35
25
|
describe "signature version" do
|
36
26
|
it "should set the signature version if specified" do
|
metadata
CHANGED
@@ -1,29 +1,35 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-s3
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 3.
|
4
|
+
version: 3.1.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2016-
|
11
|
+
date: 2016-07-14 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
15
15
|
requirements:
|
16
|
-
- - "
|
16
|
+
- - ">="
|
17
|
+
- !ruby/object:Gem::Version
|
18
|
+
version: '1.60'
|
19
|
+
- - "<="
|
17
20
|
- !ruby/object:Gem::Version
|
18
|
-
version: '2.
|
21
|
+
version: '2.99'
|
19
22
|
name: logstash-core-plugin-api
|
20
23
|
prerelease: false
|
21
24
|
type: :runtime
|
22
25
|
version_requirements: !ruby/object:Gem::Requirement
|
23
26
|
requirements:
|
24
|
-
- - "
|
27
|
+
- - ">="
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: '1.60'
|
30
|
+
- - "<="
|
25
31
|
- !ruby/object:Gem::Version
|
26
|
-
version: '2.
|
32
|
+
version: '2.99'
|
27
33
|
- !ruby/object:Gem::Dependency
|
28
34
|
requirement: !ruby/object:Gem::Requirement
|
29
35
|
requirements:
|
@@ -134,7 +140,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
134
140
|
version: '0'
|
135
141
|
requirements: []
|
136
142
|
rubyforge_project:
|
137
|
-
rubygems_version: 2.
|
143
|
+
rubygems_version: 2.6.3
|
138
144
|
signing_key:
|
139
145
|
specification_version: 4
|
140
146
|
summary: This plugin was created for store the logstash's events into Amazon Simple Storage Service (Amazon S3)
|