logstash-integration-aws 7.0.0 → 7.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +5 -0
- data/docs/codec-cloudfront.asciidoc +2 -2
- data/docs/codec-cloudtrail.asciidoc +2 -2
- data/docs/index.asciidoc +1 -3
- data/docs/input-cloudwatch.asciidoc +2 -2
- data/docs/input-s3.asciidoc +2 -2
- data/docs/input-sqs.asciidoc +2 -2
- data/docs/output-cloudwatch.asciidoc +2 -2
- data/docs/output-s3.asciidoc +2 -2
- data/docs/output-sns.asciidoc +2 -2
- data/docs/output-sqs.asciidoc +2 -2
- data/lib/logstash/outputs/s3/file_repository.rb +83 -11
- data/lib/logstash/outputs/s3.rb +17 -18
- data/logstash-integration-aws.gemspec +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 17a23ba90d8888e550c2116d81a5dde97b5e57f762c02ceea44a2afecb3e5579
|
4
|
+
data.tar.gz: 41d6e0d5e4e5d9bed6170baa2b44fdfc4bce7d89488e0f15bd80ff9e7a44a3c0
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 14cef0ec2b9d04466fcf9ee6f612cd2c8e1055bc5853b147c9086c4914d52c695aab3ab270ef81fe9ae3c9add45391a380228d77befdfe54910decfc27ad60fd
|
7
|
+
data.tar.gz: 5da492ac21e100028d58a6549efae8d110e764ff28a79d21f835946c41afba88e11d240fedde9ac2842357b87e9610f31e098c8ffbd7946ffc36ca5555cd170a
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,8 @@
|
|
1
|
+
## 7.0.1
|
2
|
+
- resolves two closely-related race conditions in the S3 Output plugin's handling of stale temporary files that could cause plugin crashes or data-loss [#19](https://github.com/logstash-plugins/logstash-integration-aws/pull/19)
|
3
|
+
- prevents a `No such file or directory` crash that could occur when a temporary file is accessed after it has been detected as stale (empty+old) and deleted.
|
4
|
+
- prevents a possible deletion of a non-empty temporary file that could occur if bytes were written to it _after_ it was detected as stale (empty+old) and _before_ the deletion completed.
|
5
|
+
|
1
6
|
## 7.0.0
|
2
7
|
- bump integration to upper bound of all underlying plugins versions (biggest is sqs output 6.x)
|
3
8
|
- this is necessary to facilitate versioning continuity between older standalone plugins and plugins within the integration
|
@@ -1,4 +1,4 @@
|
|
1
|
-
|
1
|
+
:integration: aws
|
2
2
|
:plugin: cloudfront
|
3
3
|
:type: codec
|
4
4
|
|
@@ -17,7 +17,7 @@ END - GENERATED VARIABLES, DO NOT EDIT!
|
|
17
17
|
|
18
18
|
=== Cloudfront codec plugin
|
19
19
|
|
20
|
-
|
20
|
+
include::{include_path}/plugin_header-integration.asciidoc[]
|
21
21
|
|
22
22
|
==== Description
|
23
23
|
|
@@ -1,4 +1,4 @@
|
|
1
|
-
|
1
|
+
:integration: aws
|
2
2
|
:plugin: cloudtrail
|
3
3
|
:type: codec
|
4
4
|
|
@@ -17,7 +17,7 @@ END - GENERATED VARIABLES, DO NOT EDIT!
|
|
17
17
|
|
18
18
|
=== Cloudtrail codec plugin
|
19
19
|
|
20
|
-
|
20
|
+
include::{include_path}/plugin_header-integration.asciidoc[]
|
21
21
|
|
22
22
|
==== Description
|
23
23
|
|
data/docs/index.asciidoc
CHANGED
@@ -17,13 +17,12 @@ END - GENERATED VARIABLES, DO NOT EDIT!
|
|
17
17
|
|
18
18
|
=== AWS Integration Plugin
|
19
19
|
|
20
|
-
|
20
|
+
include::{include_path}/plugin_header.asciidoc[]
|
21
21
|
|
22
22
|
==== Description
|
23
23
|
|
24
24
|
The AWS Integration Plugin provides integrated plugins for working with Amazon Web Services:
|
25
25
|
|
26
|
-
////
|
27
26
|
- {logstash-ref}/plugins-codecs-cloudfront.html[Cloudfront Codec Plugin]
|
28
27
|
- {logstash-ref}/plugins-codecs-cloudtrail.html[Cloudtrail Codec Plugin]
|
29
28
|
- {logstash-ref}/plugins-inputs-cloudwatch.html[Cloudwatch Input Plugin]
|
@@ -33,6 +32,5 @@ The AWS Integration Plugin provides integrated plugins for working with Amazon W
|
|
33
32
|
- {logstash-ref}/plugins-outputs-s3.html[S3 Output Plugin]
|
34
33
|
- {logstash-ref}/plugins-outputs-sns.html[Sns Output Plugin]
|
35
34
|
- {logstash-ref}/plugins-outputs-sqs.html[Sqs Output Plugin]
|
36
|
-
////
|
37
35
|
|
38
36
|
:no_codec!:
|
@@ -1,4 +1,4 @@
|
|
1
|
-
|
1
|
+
:integration: aws
|
2
2
|
:plugin: cloudwatch
|
3
3
|
:type: input
|
4
4
|
:default_codec: plain
|
@@ -18,7 +18,7 @@ END - GENERATED VARIABLES, DO NOT EDIT!
|
|
18
18
|
|
19
19
|
=== Cloudwatch input plugin
|
20
20
|
|
21
|
-
|
21
|
+
include::{include_path}/plugin_header-integration.asciidoc[]
|
22
22
|
|
23
23
|
==== Description
|
24
24
|
|
data/docs/input-s3.asciidoc
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
|
1
|
+
:integration: aws
|
2
2
|
:plugin: s3
|
3
3
|
:type: input
|
4
4
|
:default_codec: plain
|
@@ -18,7 +18,7 @@ END - GENERATED VARIABLES, DO NOT EDIT!
|
|
18
18
|
|
19
19
|
=== S3 input plugin
|
20
20
|
|
21
|
-
|
21
|
+
include::{include_path}/plugin_header-integration.asciidoc[]
|
22
22
|
|
23
23
|
==== Description
|
24
24
|
|
data/docs/input-sqs.asciidoc
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
|
1
|
+
:integration: aws
|
2
2
|
:plugin: sqs
|
3
3
|
:type: input
|
4
4
|
:default_codec: json
|
@@ -18,7 +18,7 @@ END - GENERATED VARIABLES, DO NOT EDIT!
|
|
18
18
|
|
19
19
|
=== Sqs input plugin
|
20
20
|
|
21
|
-
|
21
|
+
include::{include_path}/plugin_header-integration.asciidoc[]
|
22
22
|
|
23
23
|
==== Description
|
24
24
|
|
@@ -1,4 +1,4 @@
|
|
1
|
-
|
1
|
+
:integration: aws
|
2
2
|
:plugin: cloudwatch
|
3
3
|
:type: output
|
4
4
|
:default_codec: plain
|
@@ -18,7 +18,7 @@ END - GENERATED VARIABLES, DO NOT EDIT!
|
|
18
18
|
|
19
19
|
=== Cloudwatch output plugin
|
20
20
|
|
21
|
-
|
21
|
+
include::{include_path}/plugin_header-integration.asciidoc[]
|
22
22
|
|
23
23
|
==== Description
|
24
24
|
|
data/docs/output-s3.asciidoc
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
|
1
|
+
:integration: aws
|
2
2
|
:plugin: s3
|
3
3
|
:type: output
|
4
4
|
:default_codec: line
|
@@ -18,7 +18,7 @@ END - GENERATED VARIABLES, DO NOT EDIT!
|
|
18
18
|
|
19
19
|
=== S3 output plugin
|
20
20
|
|
21
|
-
|
21
|
+
include::{include_path}/plugin_header-integration.asciidoc[]
|
22
22
|
|
23
23
|
==== Description
|
24
24
|
|
data/docs/output-sns.asciidoc
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
|
1
|
+
:integration: aws
|
2
2
|
:plugin: sns
|
3
3
|
:type: output
|
4
4
|
:default_codec: plain
|
@@ -18,7 +18,7 @@ END - GENERATED VARIABLES, DO NOT EDIT!
|
|
18
18
|
|
19
19
|
=== Sns output plugin
|
20
20
|
|
21
|
-
|
21
|
+
include::{include_path}/plugin_header-integration.asciidoc[]
|
22
22
|
|
23
23
|
==== Description
|
24
24
|
|
data/docs/output-sqs.asciidoc
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
|
1
|
+
:integration: aws
|
2
2
|
:plugin: sqs
|
3
3
|
:type: output
|
4
4
|
:default_codec: json
|
@@ -18,7 +18,7 @@ END - GENERATED VARIABLES, DO NOT EDIT!
|
|
18
18
|
|
19
19
|
=== Sqs output plugin
|
20
20
|
|
21
|
-
|
21
|
+
include::{include_path}/plugin_header-integration.asciidoc[]
|
22
22
|
|
23
23
|
==== Description
|
24
24
|
|
@@ -17,8 +17,9 @@ module LogStash
|
|
17
17
|
class PrefixedValue
|
18
18
|
def initialize(file_factory, stale_time)
|
19
19
|
@file_factory = file_factory
|
20
|
-
@lock =
|
20
|
+
@lock = Monitor.new # reentrant Mutex
|
21
21
|
@stale_time = stale_time
|
22
|
+
@is_deleted = false
|
22
23
|
end
|
23
24
|
|
24
25
|
def with_lock
|
@@ -36,7 +37,14 @@ module LogStash
|
|
36
37
|
end
|
37
38
|
|
38
39
|
def delete!
|
39
|
-
with_lock
|
40
|
+
with_lock do |factory|
|
41
|
+
factory.current.delete!
|
42
|
+
@is_deleted = true
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
def deleted?
|
47
|
+
with_lock { |_| @is_deleted }
|
40
48
|
end
|
41
49
|
end
|
42
50
|
|
@@ -72,19 +80,70 @@ module LogStash
|
|
72
80
|
@prefixed_factories.keySet
|
73
81
|
end
|
74
82
|
|
83
|
+
##
|
84
|
+
# Yields the current file of each non-deleted file factory while the current thread has exclusive access to it.
|
85
|
+
# @yieldparam file [TemporaryFile]
|
86
|
+
# @return [void]
|
75
87
|
def each_files
|
76
|
-
|
77
|
-
|
88
|
+
each_factory(keys) do |factory|
|
89
|
+
yield factory.current
|
78
90
|
end
|
91
|
+
nil # void return avoid leaking unsynchronized access
|
79
92
|
end
|
80
93
|
|
81
|
-
|
94
|
+
##
|
95
|
+
# Yields the file factory while the current thread has exclusive access to it, creating a new
|
96
|
+
# one if one does not exist or if the current one is being reaped by the stale watcher.
|
97
|
+
# @param prefix_key [String]: the prefix key
|
98
|
+
# @yieldparam factory [TemporaryFileFactory]: a temporary file factory that this thread has exclusive access to
|
99
|
+
# @return [void]
|
82
100
|
def get_factory(prefix_key)
|
83
|
-
|
101
|
+
# fast-path: if factory exists and is not deleted, yield it with exclusive access and return
|
102
|
+
prefix_val = @prefixed_factories.get(prefix_key)
|
103
|
+
prefix_val&.with_lock do |factory|
|
104
|
+
# intentional local-jump to ensure deletion detection
|
105
|
+
# is done inside the exclusive access.
|
106
|
+
unless prefix_val.deleted?
|
107
|
+
yield(factory)
|
108
|
+
return nil # void return avoid leaking unsynchronized access
|
109
|
+
end
|
110
|
+
end
|
111
|
+
|
112
|
+
# slow-path:
|
113
|
+
# the ConcurrentHashMap#get operation is lock-free, but may have returned an entry that was being deleted by
|
114
|
+
# another thread (such as via stale detection). If we failed to retrieve a value, or retrieved one that had
|
115
|
+
# been marked deleted, use the atomic ConcurrentHashMap#compute to retrieve a non-deleted entry.
|
116
|
+
prefix_val = @prefixed_factories.compute(prefix_key) do |_, existing|
|
117
|
+
existing && !existing.deleted? ? existing : @factory_initializer.apply(prefix_key)
|
118
|
+
end
|
119
|
+
prefix_val.with_lock { |factory| yield factory }
|
120
|
+
nil # void return avoid leaking unsynchronized access
|
121
|
+
end
|
122
|
+
|
123
|
+
##
|
124
|
+
# Yields each non-deleted file factory while the current thread has exclusive access to it.
|
125
|
+
# @param prefixes [Array<String>]: the prefix keys
|
126
|
+
# @yieldparam factory [TemporaryFileFactory]
|
127
|
+
# @return [void]
|
128
|
+
def each_factory(prefixes)
|
129
|
+
prefixes.each do |prefix_key|
|
130
|
+
prefix_val = @prefixed_factories.get(prefix_key)
|
131
|
+
prefix_val&.with_lock do |factory|
|
132
|
+
yield factory unless prefix_val.deleted?
|
133
|
+
end
|
134
|
+
end
|
135
|
+
nil # void return avoid leaking unsynchronized access
|
84
136
|
end
|
85
137
|
|
138
|
+
##
|
139
|
+
# Ensures that a non-deleted factory exists for the provided prefix and yields its current file
|
140
|
+
# while the current thread has exclusive access to it.
|
141
|
+
# @param prefix_key [String]
|
142
|
+
# @yieldparam file [TemporaryFile]
|
143
|
+
# @return [void]
|
86
144
|
def get_file(prefix_key)
|
87
145
|
get_factory(prefix_key) { |factory| yield factory.current }
|
146
|
+
nil # void return avoid leaking unsynchronized access
|
88
147
|
end
|
89
148
|
|
90
149
|
def shutdown
|
@@ -95,10 +154,21 @@ module LogStash
|
|
95
154
|
@prefixed_factories.size
|
96
155
|
end
|
97
156
|
|
98
|
-
def
|
99
|
-
|
100
|
-
|
101
|
-
|
157
|
+
def remove_if_stale(prefix_key)
|
158
|
+
# we use the ATOMIC `ConcurrentHashMap#computeIfPresent` to atomically
|
159
|
+
# detect the staleness, mark a stale prefixed factory as deleted, and delete from the map.
|
160
|
+
@prefixed_factories.computeIfPresent(prefix_key) do |_, prefixed_factory|
|
161
|
+
# once we have retrieved an instance, we acquire exclusive access to it
|
162
|
+
# for stale detection, marking it as deleted before releasing the lock
|
163
|
+
# and causing it to become deleted from the map.
|
164
|
+
prefixed_factory.with_lock do |_|
|
165
|
+
if prefixed_factory.stale?
|
166
|
+
prefixed_factory.delete! # mark deleted to prevent reuse
|
167
|
+
nil # cause deletion
|
168
|
+
else
|
169
|
+
prefixed_factory # keep existing
|
170
|
+
end
|
171
|
+
end
|
102
172
|
end
|
103
173
|
end
|
104
174
|
|
@@ -106,7 +176,9 @@ module LogStash
|
|
106
176
|
@stale_sweeper = Concurrent::TimerTask.new(:execution_interval => @sweeper_interval) do
|
107
177
|
LogStash::Util.set_thread_name("S3, Stale factory sweeper")
|
108
178
|
|
109
|
-
@prefixed_factories.
|
179
|
+
@prefixed_factories.keys.each do |prefix|
|
180
|
+
remove_if_stale(prefix)
|
181
|
+
end
|
110
182
|
end
|
111
183
|
|
112
184
|
@stale_sweeper.execute
|
data/lib/logstash/outputs/s3.rb
CHANGED
@@ -258,6 +258,8 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
|
|
258
258
|
|
259
259
|
@logger.debug("Uploading current workspace")
|
260
260
|
|
261
|
+
@file_repository.shutdown # stop stale sweeps
|
262
|
+
|
261
263
|
# The plugin has stopped receiving new events, but we still have
|
262
264
|
# data on disk, lets make sure it get to S3.
|
263
265
|
# If Logstash get interrupted, the `restore_from_crash` (when set to true) method will pickup
|
@@ -267,8 +269,6 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
|
|
267
269
|
upload_file(file)
|
268
270
|
end
|
269
271
|
|
270
|
-
@file_repository.shutdown
|
271
|
-
|
272
272
|
@uploader.stop # wait until all the current upload are complete
|
273
273
|
@crash_uploader.stop if @restore # we might have still work to do for recovery so wait until we are done
|
274
274
|
end
|
@@ -336,22 +336,21 @@ class LogStash::Outputs::S3 < LogStash::Outputs::Base
|
|
336
336
|
end
|
337
337
|
|
338
338
|
def rotate_if_needed(prefixes)
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
end
|
339
|
+
|
340
|
+
# Each file access is thread safe,
|
341
|
+
# until the rotation is done then only
|
342
|
+
# one thread has access to the resource.
|
343
|
+
@file_repository.each_factory(prefixes) do |factory|
|
344
|
+
temp_file = factory.current
|
345
|
+
|
346
|
+
if @rotation.rotate?(temp_file)
|
347
|
+
@logger.debug("Rotate file",
|
348
|
+
:strategy => @rotation.class.name,
|
349
|
+
:key => temp_file.key,
|
350
|
+
:path => temp_file.path)
|
351
|
+
|
352
|
+
upload_file(temp_file)
|
353
|
+
factory.rotate!
|
355
354
|
end
|
356
355
|
end
|
357
356
|
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = "logstash-integration-aws"
|
3
|
-
s.version = "7.0.
|
3
|
+
s.version = "7.0.1"
|
4
4
|
s.licenses = ["Apache-2.0"]
|
5
5
|
s.summary = "Collection of Logstash plugins that integrate with AWS"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-integration-aws
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 7.0.
|
4
|
+
version: 7.0.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2022-
|
11
|
+
date: 2022-12-23 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|