logstash-input-azure_blob_storage 0.11.2 → 0.11.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b721a6aa74f4e9df285f62f47efa42112e540d9836391b31e74daf6544e1087d
4
- data.tar.gz: 5d22a077d53698807a51dde75ac6c7deb273f0fe68d7ea05a46651b5e0c9e577
3
+ metadata.gz: 38eceb446d8dd92f8a0c86d6f7e48fb707babb0c611f437e98322272fcfea124
4
+ data.tar.gz: ef970349f391a6809cdd91feabe88a5e2beb2c81402ad1ec0ac29ad7bdb0ed1e
5
5
  SHA512:
6
- metadata.gz: fb35924d7f18579977fa8257a722aa136ca3d9d6a48cb1aecc3aa9f768a4d4b682d5a86c455b634b19d40c6dad9359a54d5b4906ef6952fff8ebc7166c90a808
7
- data.tar.gz: abddf838e31d981dc2da2b84bf825cb7981c610da1331a7aba1cf13f2de6e9ce7c644649f586ad6ef9c2630888900f3f8620fec7b47cddbc6b91c927e44c9b72
6
+ metadata.gz: c0285b5459e65dd7b95766626f6336f6fc92a8a767b78912f9830e8b605beebe5a681f077c7b59b2cc1eb31861854e49ebf89a1372f8ac282fc5a537ad478d54
7
+ data.tar.gz: 72a905d9d621a80333eeb06a69baa51b435b1efb66acdde4ab51ecf37c6f0aa388ec39af57e0991be1cc1121035212f5a4a9a966a798bfa2794048e8949f33f3
@@ -1,34 +1,38 @@
1
+ ## 0.11.2
2
+ - implemented path_filters to to use path filtering like this **/*.log
3
+ - implemented debug_until to debug only at the start of a pipeline until it processed enough messages
4
+
1
5
  ## 0.11.1
2
6
  - copied changes from irnc fork (danke!)
3
- - Fixed trying to load the registry, three time is the charm
7
+ - fixed trying to load the registry, three time is the charm
4
8
  - logs are less chatty, changed info to debug
5
9
 
6
10
  ## 0.11.0
7
- - Implemented start_fresh to skip all previous logs and start monitoring new entries
8
- - Fixed the timer, now properly sleep the interval and check again
11
+ - implemented start_fresh to skip all previous logs and start monitoring new entries
12
+ - fixed the timer, now properly sleep the interval and check again
9
13
  - Work around for a Faraday Middleware v.s. Azure Storage Account bug in follow_redirect
10
14
 
11
15
  ## 0.10.6
12
- - Fixed the rootcause of the checking the codec. Now compare the classname.
16
+ - fixed the rootcause of the checking the codec. Now compare the classname.
13
17
 
14
18
  ## 0.10.5
15
- - Previous fix broke codec = "line"
19
+ - previous fix broke codec = "line"
16
20
 
17
21
  ## 0.10.4
18
- - Fixed JSON parsing error for partial files because somehow (logstash 7?) @codec.is_a? doesn't work anymore
22
+ - fixed JSON parsing error for partial files because somehow (logstash 7?) @codec.is_a? doesn't work anymore
19
23
 
20
24
  ## 0.10.3
21
- - Fixed issue-1 where iplookup confguration was removed, but still used
25
+ - fixed issue-1 where iplookup confguration was removed, but still used
22
26
  - iplookup is now done by a separate plugin named logstash-filter-weblookup
23
27
 
24
28
  ## 0.10.2
25
29
  - moved iplookup to own plugin logstash-filter-lookup
26
30
 
27
31
  ## 0.10.1
28
- - Implemented iplookup
29
- - Fixed sas tokens (maybe)
30
- - Introduced dns_suffix
32
+ - implemented iplookup
33
+ - fixed sas tokens (maybe)
34
+ - introduced dns_suffix
31
35
 
32
36
  ## 0.10.0
33
- - Plugin created with the logstash plugin generator
34
- - Reimplemented logstash-input-azureblob with incompatible config and data/registry
37
+ - plugin created with the logstash plugin generator
38
+ - reimplemented logstash-input-azureblob with incompatible config and data/registry
@@ -224,10 +224,15 @@ def run(queue)
224
224
  @processed += wadiislog(queue, name)
225
225
  else
226
226
  counter = 0
227
- @codec.decode(chunk) do |event|
227
+ begin
228
+ @codec.decode(chunk) do |event|
228
229
  counter += 1
229
230
  decorate(event)
230
231
  queue << event
232
+ end
233
+ rescue Exception => e
234
+ @logger.error(@pipe_id+" codec exception: #{e.message} .. will continue and pretend this never happened")
235
+ @logger.debug(@pipe_id+" #{chunk}")
231
236
  end
232
237
  @processed += counter
233
238
  end
@@ -337,6 +342,7 @@ def list_blobs(fill)
337
342
  nextMarker = nil
338
343
  for counter in 1..3
339
344
  begin
345
+ loop do
340
346
  blobs = @blob_client.list_blobs(container, { marker: nextMarker, prefix: @prefix})
341
347
  blobs.each do |blob|
342
348
  # FNM_PATHNAME is required so that "**/test" can match "test" at the root folder
@@ -355,6 +361,7 @@ def list_blobs(fill)
355
361
  end
356
362
  nextMarker = blobs.continuation_token
357
363
  break unless nextMarker && !nextMarker.empty?
364
+ end
358
365
  rescue Exception => e
359
366
  @logger.error(@pipe_id+" caught: #{e.message} for attempt #{counter} of 3")
360
367
  counter += 1
@@ -386,6 +393,7 @@ def learn_encapsulation
386
393
  blob = @blob_client.list_blobs(container, { maxresults: 1, prefix: @prefix }).first
387
394
  return if blob.nil?
388
395
  blocks = @blob_client.list_blob_blocks(container, blob.name)[:committed]
396
+ # TODO add check for empty blocks and log error that the header and footer can't be learned and must be set in the config
389
397
  @logger.debug(@pipe_id+" using #{blob.name} to learn the json header and tail")
390
398
  @head = @blob_client.get_blob(container, blob.name, start_range: 0, end_range: blocks.first.size-1)[1]
391
399
  @logger.debug(@pipe_id+" learned header: #{@head}")
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-input-azure_blob_storage'
3
- s.version = '0.11.2'
3
+ s.version = '0.11.3'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = 'This logstash plugin reads and parses data from Azure Storage Blobs.'
6
6
  s.description = <<-EOF
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-azure_blob_storage
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.11.2
4
+ version: 0.11.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Jan Geertsma
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-12-20 00:00:00.000000000 Z
11
+ date: 2020-03-13 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -113,7 +113,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
113
113
  version: '0'
114
114
  requirements: []
115
115
  rubyforge_project:
116
- rubygems_version: 2.7.9
116
+ rubygems_version: 2.7.10
117
117
  signing_key:
118
118
  specification_version: 4
119
119
  summary: This logstash plugin reads and parses data from Azure Storage Blobs.