logstash-input-s3-sns-sqs 2.0.0 → 2.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +5 -0
- data/lib/logstash/inputs/codec_factory.rb +1 -1
- data/lib/logstash/inputs/s3/downloader.rb +3 -0
- data/lib/logstash/inputs/s3snssqs.rb +14 -9
- data/lib/logstash/inputs/s3snssqs/log_processor.rb +22 -4
- data/lib/logstash/inputs/sqs/poller.rb +2 -1
- data/logstash-input-s3-sns-sqs.gemspec +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 9989a06ffe035de2692ed0331fc45b9da60ddab3270826487eeb75fd20e882cb
|
4
|
+
data.tar.gz: e36504f3dca4413e4b34edee8e1a82203444837f785a36b8e2450f83f98102e3
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: e538a1426547653719bf0f2dba0aaae03bb80c50b5259ddd69871c209961cc21c389ba587028aa1bc57b017fa4445929350a7cab5833d713d5a0e8d4289f530b
|
7
|
+
data.tar.gz: 589100a9362b9bdb79df9c5ad8e68568175bc8384c6c5302634a2923b82edd8ce4ce6455bb269089e4aed314844bf2ea2cabe39d55f6d3d38c45fa8448b2fa77
|
data/CHANGELOG.md
CHANGED
@@ -15,12 +15,14 @@ class S3Downloader
|
|
15
15
|
# (from docs) WARNING:
|
16
16
|
# yielding data to a block disables retries of networking errors!
|
17
17
|
begin
|
18
|
+
#@logger.info("Download File", :file => record)
|
18
19
|
@factory.get_s3_client(record[:bucket]) do |s3|
|
19
20
|
response = s3.get_object(
|
20
21
|
bucket: record[:bucket],
|
21
22
|
key: record[:key],
|
22
23
|
response_target: record[:local_file]
|
23
24
|
)
|
25
|
+
#@logger.info("READY: File", :file => record, :response => response)
|
24
26
|
end
|
25
27
|
rescue Aws::S3::Errors::ServiceError => e
|
26
28
|
@logger.error("Unable to download file. Requeuing the message", :error => e, :record => record)
|
@@ -32,6 +34,7 @@ class S3Downloader
|
|
32
34
|
end
|
33
35
|
|
34
36
|
def cleanup_local_object(record)
|
37
|
+
#@logger.info("Cleaning up file", :file => record[:local_file])
|
35
38
|
FileUtils.remove_entry_secure(record[:local_file], true) if ::File.exists?(record[:local_file])
|
36
39
|
rescue Exception => e
|
37
40
|
@logger.warn("Could not delete file", :file => record[:local_file], :error => e)
|
@@ -7,11 +7,13 @@ require "logstash/shutdown_watcher"
|
|
7
7
|
require "logstash/errors"
|
8
8
|
require 'logstash/inputs/s3sqs/patch'
|
9
9
|
require "aws-sdk"
|
10
|
+
|
10
11
|
# "object-oriented interfaces on top of API clients"...
|
11
12
|
# => Overhead. FIXME: needed?
|
12
13
|
#require "aws-sdk-resources"
|
13
14
|
require "fileutils"
|
14
15
|
require "concurrent"
|
16
|
+
require 'tmpdir'
|
15
17
|
# unused in code:
|
16
18
|
#require "stud/interval"
|
17
19
|
#require "digest/md5"
|
@@ -265,8 +267,10 @@ class LogStash::Inputs::S3SNSSQS < LogStash::Inputs::Threadable
|
|
265
267
|
def run(logstash_event_queue)
|
266
268
|
#LogStash::ShutdownWatcher.abort_threshold(30)
|
267
269
|
# start them
|
268
|
-
@
|
269
|
-
|
270
|
+
@queue_mutex = Mutex.new
|
271
|
+
#@consumer_threads= 1
|
272
|
+
@worker_threads = @consumer_threads.times.map do |thread_id|
|
273
|
+
run_worker_thread(logstash_event_queue, thread_id)
|
270
274
|
end
|
271
275
|
# and wait (possibly infinitely) for them to shut down
|
272
276
|
@worker_threads.each { |t| t.join }
|
@@ -286,18 +290,24 @@ class LogStash::Inputs::S3SNSSQS < LogStash::Inputs::Threadable
|
|
286
290
|
end
|
287
291
|
end
|
288
292
|
|
293
|
+
def stop?
|
294
|
+
@received_stop.value
|
295
|
+
end
|
296
|
+
|
289
297
|
# --- END plugin interface ------------------------------------------#
|
290
298
|
|
291
299
|
private
|
292
300
|
|
293
|
-
def run_worker_thread(queue)
|
301
|
+
def run_worker_thread(queue, thread_id)
|
294
302
|
Thread.new do
|
295
303
|
@logger.info("Starting new worker thread")
|
304
|
+
temporary_directory = Dir.mktmpdir("#{@temporary_directory}/")
|
296
305
|
@sqs_poller.run do |record|
|
297
306
|
throw :skip_delete if stop?
|
298
307
|
@logger.debug("Outside Poller: got a record", :record => record)
|
299
308
|
# record is a valid object with the keys ":bucket", ":key", ":size"
|
300
|
-
record[:local_file] = File.join(
|
309
|
+
record[:local_file] = File.join(temporary_directory, File.basename(record[:key]))
|
310
|
+
LogStash::Util.set_thread_name("[Processor #{thread_id} - Working on: #{record[:key]}")
|
301
311
|
if @s3_downloader.copy_s3object_to_disk(record)
|
302
312
|
completed = catch(:skip_delete) do
|
303
313
|
process(record, queue)
|
@@ -334,9 +344,4 @@ class LogStash::Inputs::S3SNSSQS < LogStash::Inputs::Threadable
|
|
334
344
|
# return input hash (convenience)
|
335
345
|
return myhash
|
336
346
|
end
|
337
|
-
|
338
|
-
def stop?
|
339
|
-
@received_stop.value
|
340
|
-
end
|
341
|
-
|
342
347
|
end # class
|
@@ -16,17 +16,27 @@ module LogProcessor
|
|
16
16
|
folder = record[:folder]
|
17
17
|
type = @type_by_folder[folder] #if @type_by_folder.key?(folder)
|
18
18
|
metadata = {}
|
19
|
+
#@logger.info("processing file",:thread => Thread.current[:name], :local_file => record[:local_file])
|
20
|
+
line_count = 0
|
21
|
+
event_count = 0
|
19
22
|
read_file(file) do |line|
|
23
|
+
line_count += 1
|
24
|
+
#@logger.info("got a yielded line", :line_count => line_count) if line_count < 10
|
20
25
|
if stop?
|
21
26
|
@logger.warn("Abort reading in the middle of the file, we will read it again when logstash is started")
|
22
27
|
throw :skip_delete
|
23
28
|
end
|
24
29
|
line = line.encode('UTF-8', 'binary', invalid: :replace, undef: :replace, replace: "\u2370")
|
30
|
+
#@logger.info("ENcoded line", :line_count => line_count) if line_count < 10
|
25
31
|
codec.decode(line) do |event|
|
26
32
|
decorate_event(event, metadata, type, record[:key], record[:bucket], folder)
|
33
|
+
event_count += 1
|
27
34
|
logstash_event_queue << event
|
35
|
+
#@logger.info("queued event ", :lines => line_count, :events => event_count, :thread => Thread.current[:name]) if event_count < 10
|
28
36
|
end
|
37
|
+
#@logger.info("DEcoded line", :line_count => line_count) if line_count < 10
|
29
38
|
end
|
39
|
+
#@logger.info("queued all events ", :lines => line_count, :events => event_count, :thread => Thread.current[:name])
|
30
40
|
# ensure any stateful codecs (such as multi-line ) are flushed to the queue
|
31
41
|
codec.flush do |event|
|
32
42
|
decorate_event(event, metadata, type, record[:key], record[:bucket], folder)
|
@@ -65,7 +75,8 @@ module LogProcessor
|
|
65
75
|
@logger.warn("Problem while gzip detection", :error => e)
|
66
76
|
end
|
67
77
|
|
68
|
-
def read_file(filename)
|
78
|
+
def read_file(filename, &block)
|
79
|
+
#@logger.info("begin read_file",:thread => Thread.current[:name])
|
69
80
|
completed = false
|
70
81
|
zipped = gzip?(filename)
|
71
82
|
file_stream = FileInputStream.new(filename)
|
@@ -76,19 +87,26 @@ module LogProcessor
|
|
76
87
|
decoder = InputStreamReader.new(file_stream, 'UTF-8')
|
77
88
|
end
|
78
89
|
buffered = BufferedReader.new(decoder)
|
79
|
-
|
80
|
-
|
81
|
-
|
90
|
+
line = buffered.readLine()
|
91
|
+
#@logger.info("read first line", :line => line)
|
92
|
+
while (!line.nil?)
|
93
|
+
block.call(line)
|
94
|
+
line = buffered.readLine()
|
95
|
+
#@logger.info("next line read",:line => line)
|
82
96
|
end
|
97
|
+
#@logger.info("finished read_file",:thread => Thread.current[:name])
|
83
98
|
completed = true
|
99
|
+
|
84
100
|
rescue ZipException => e
|
85
101
|
@logger.error("Gzip codec: We cannot uncompress the gzip file", :filename => filename, :error => e)
|
102
|
+
return nil
|
86
103
|
ensure
|
87
104
|
buffered.close unless buffered.nil?
|
88
105
|
decoder.close unless decoder.nil?
|
89
106
|
gzip_stream.close unless gzip_stream.nil?
|
90
107
|
file_stream.close unless file_stream.nil?
|
91
108
|
throw :skip_delete unless completed
|
109
|
+
return nil
|
92
110
|
end
|
93
111
|
|
94
112
|
def event_is_metadata?(event)
|
@@ -90,7 +90,7 @@ class SqsPoller
|
|
90
90
|
failed = false
|
91
91
|
begin
|
92
92
|
preprocess(message) do |record|
|
93
|
-
|
93
|
+
#@logger.info("we got a record", :record => record)
|
94
94
|
yield(record) #unless record.nil? - unnecessary; implicit
|
95
95
|
end
|
96
96
|
rescue Exception => e
|
@@ -100,6 +100,7 @@ class SqsPoller
|
|
100
100
|
end
|
101
101
|
# at this time the extender has either fired or is obsolete
|
102
102
|
extender.kill
|
103
|
+
#@logger.info("Inside Poller: killed background thread", :message => message)
|
103
104
|
extender = nil
|
104
105
|
throw :skip_delete if failed
|
105
106
|
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-input-s3-sns-sqs'
|
3
|
-
s.version = '2.0.
|
3
|
+
s.version = '2.0.1'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = "Get logs from AWS s3 buckets as issued by an object-created event via sns -> sqs."
|
6
6
|
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-input-s3-sns-sqs
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.0.
|
4
|
+
version: 2.0.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Christian Herweg
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2019-06-
|
11
|
+
date: 2019-06-26 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|