logstash-input-s3-sns-sqs 1.1.6 → 1.1.7

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 75d610de1c48fd3e5cc00b1c867cc21eaea6e3c1
4
- data.tar.gz: 0fdf5bb05918562318431308ea15ace790b0108c
3
+ metadata.gz: cbba025cb4e274be8fd5af60c33fdc445486831c
4
+ data.tar.gz: 1aea14626b1290ca3837fea2cd8b35dee986b841
5
5
  SHA512:
6
- metadata.gz: 2d6ffeca392ea7f5ac597942df3faabb85b9fd41ec0c5c5341e2ae115c13ae7345315fd31ca2bd7491707042e3be931fbeca2365827d2c9e017e04f512d0ffae
7
- data.tar.gz: d5fc3995e1ac8bcf43e251e63ae68619e8b8ade5b35713449a2d296ad3be73677a6828991e848cb0603bb12b99544e8245a14a4c11835b60e174f874eb7fa490
6
+ metadata.gz: a7ad292c17c49d1b9a0fa3c8ab9f454a026b38defdd73007dde13f6349f358a927da92bba46f8c082d269a1464431ba8d114d844c44a4383c8b43b1940026c0a
7
+ data.tar.gz: 5c969909a581f9aeb3d1704310a0556a27ed2936a73e3c0a5b2a9590801a63bbdf6fd16d5797ff79af3f87855b541614fc155acf3ae46da58ef19991073e6e5a
data/CHANGELOG.md CHANGED
@@ -1,3 +1,5 @@
1
+ ## 1.1.6
2
+ - Fix a nil exception in message parsing
1
3
  ## 1.1.5
2
4
  - Fix loglevel for some debug messages
3
5
  ## 1.1.4
@@ -131,74 +131,76 @@ class LogStash::Inputs::S3SNSSQS < LogStash::Inputs::Threadable
131
131
  @logger.debug("handle_message", :hash => hash, :message => message)
132
132
  # there may be test events sent from the s3 bucket which won't contain a Records array,
133
133
  # we will skip those events and remove them from queue
134
- message = JSON.parse(hash['Message'])
135
- if message['Records'] then
136
- # typically there will be only 1 record per event, but since it is an array we will
137
- # treat it as if there could be more records
138
- message['Records'].each do |record|
139
- @logger.debug("We found a record", :record => record)
140
- # in case there are any events with Records that aren't s3 object-created events and can't therefore be
141
- # processed by this plugin, we will skip them and remove them from queue
142
- if record['eventSource'] == EVENT_SOURCE and record['eventName'].start_with?(EVENT_TYPE) then
143
- @logger.debug("It is a valid record")
144
- bucket = CGI.unescape(record['s3']['bucket']['name'])
145
- key = CGI.unescape(record['s3']['object']['key'])
146
-
147
- # try download and :skip_delete if it fails
148
- begin
149
- response = @s3.get_object(
150
- bucket: bucket,
151
- key: key,
152
- )
153
- rescue => e
154
- @logger.warn("issuing :skip_delete on failed download", :bucket => bucket, :object => key, :error => e)
155
- throw :skip_delete
156
- end
134
+ if hash['Message'].present?
135
+ message = JSON.parse(hash['Message'])
136
+ if message['Records'] then
137
+ # typically there will be only 1 record per event, but since it is an array we will
138
+ # treat it as if there could be more records
139
+ message['Records'].each do |record|
140
+ @logger.debug("We found a record", :record => record)
141
+ # in case there are any events with Records that aren't s3 object-created events and can't therefore be
142
+ # processed by this plugin, we will skip them and remove them from queue
143
+ if record['eventSource'] == EVENT_SOURCE and record['eventName'].start_with?(EVENT_TYPE) then
144
+ @logger.debug("It is a valid record")
145
+ bucket = CGI.unescape(record['s3']['bucket']['name'])
146
+ key = CGI.unescape(record['s3']['object']['key'])
147
+
148
+ # try download and :skip_delete if it fails
149
+ begin
150
+ response = @s3.get_object(
151
+ bucket: bucket,
152
+ key: key,
153
+ )
154
+ rescue => e
155
+ @logger.warn("issuing :skip_delete on failed download", :bucket => bucket, :object => key, :error => e)
156
+ throw :skip_delete
157
+ end
157
158
 
158
- # verify downloaded content size
159
- if response.content_length == record['s3']['object']['size'] then
160
- body = response.body
161
- # if necessary unzip
162
- if response.content_encoding == "gzip" or record['s3']['object']['key'].end_with?(".gz") then
163
- @logger.debug("Ohhh i´ll try to unzip")
164
- begin
165
- temp = Zlib::GzipReader.new(body)
166
- rescue => e
167
- @logger.warn("content is marked to be gzipped but can't unzip it, assuming plain text", :bucket => bucket, :object => key, :error => e)
168
- temp = body
159
+ # verify downloaded content size
160
+ if response.content_length == record['s3']['object']['size'] then
161
+ body = response.body
162
+ # if necessary unzip
163
+ if response.content_encoding == "gzip" or record['s3']['object']['key'].end_with?(".gz") then
164
+ @logger.debug("Ohhh i´ll try to unzip")
165
+ begin
166
+ temp = Zlib::GzipReader.new(body)
167
+ rescue => e
168
+ @logger.warn("content is marked to be gzipped but can't unzip it, assuming plain text", :bucket => bucket, :object => key, :error => e)
169
+ temp = body
170
+ end
171
+ body = temp
169
172
  end
170
- body = temp
171
- end
172
- # process the plain text content
173
- begin
174
- lines = body.read.encode('UTF-8', 'binary', invalid: :replace, undef: :replace, replace: "\u2370").split(/\n/)
175
- lines.each do |line|
176
- @logger.debug("Decorating the event")
177
- @codec.decode(line) do |event|
173
+ # process the plain text content
174
+ begin
175
+ lines = body.read.encode('UTF-8', 'binary', invalid: :replace, undef: :replace, replace: "\u2370").split(/\n/)
176
+ lines.each do |line|
177
+ @logger.debug("Decorating the event")
178
+ @codec.decode(line) do |event|
178
179
  decorate(event)
179
180
 
180
181
  event.set('[@metadata][s3_bucket_name]', record['s3']['bucket']['name'])
181
182
  event.set('[@metadata][s3_object_key]', record['s3']['object']['key'])
182
183
 
183
184
  queue << event
185
+ end
184
186
  end
187
+ rescue => e
188
+ @logger.warn("issuing :skip_delete on failed plain text processing", :bucket => bucket, :object => key, :error => e)
189
+ throw :skip_delete
185
190
  end
186
- rescue => e
187
- @logger.warn("issuing :skip_delete on failed plain text processing", :bucket => bucket, :object => key, :error => e)
188
- throw :skip_delete
189
- end
190
191
 
191
- # Delete the files from S3
192
- begin
193
- @s3.delete_object(bucket: bucket, key: key) if @delete_on_success
194
- rescue => e
195
- @logger.warn("Failed to delete S3 object", :bucket => bucket, :object => key, :error => e)
192
+ # Delete the files from S3
193
+ begin
194
+ @s3.delete_object(bucket: bucket, key: key) if @delete_on_success
195
+ rescue => e
196
+ @logger.warn("Failed to delete S3 object", :bucket => bucket, :object => key, :error => e)
197
+ end
198
+ # otherwise try again later
199
+ else
200
+ @logger.warn("issuing :skip_delete on wrong download content size", :bucket => bucket, :object => key,
201
+ :download_size => response.content_length, :expected => record['s3']['object']['size'])
202
+ throw :skip_delete
196
203
  end
197
- # otherwise try again later
198
- else
199
- @logger.warn("issuing :skip_delete on wrong download content size", :bucket => bucket, :object => key,
200
- :download_size => response.content_length, :expected => record['s3']['object']['size'])
201
- throw :skip_delete
202
204
  end
203
205
  end
204
206
  end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-input-s3-sns-sqs'
3
- s.version = '1.1.6'
3
+ s.version = '1.1.7'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "Get logs from AWS s3 buckets as issued by an object-created event via sns -> sqs."
6
6
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-s3-sns-sqs
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.6
4
+ version: 1.1.7
5
5
  platform: ruby
6
6
  authors:
7
7
  - Christian Herweg
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-01-19 00:00:00.000000000 Z
11
+ date: 2018-02-17 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement