logstash-input-azure_blob_storage 0.10.4 → 0.10.5

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b2b6693e28facdfb5bd20cb174e0808f532fbd3d703489098ceed1b9f144572c
4
- data.tar.gz: fb79d81ecadb1eff1201fb338d88066709528e8b4dbfbfecdb32a58a44d93e53
3
+ metadata.gz: 781172a19e14c687455ae28a4a306b04154336b4a1f250a524e711dde3c7d751
4
+ data.tar.gz: c38c2cc4eb832285e3db7e3146d69fc4bf26b5c04e2981229a2b5bb330ec5345
5
5
  SHA512:
6
- metadata.gz: bf10deda3c20e2c49f182530ad67a7897530c7ff98a811b0c12c6aafda9c06983ced7086d965249d5f380d46f6da314570d55502a783337bfe5be2d7430972dd
7
- data.tar.gz: cea053b1c2d1188fbcf1a758ed0c625c2dc7436306d4334516e05e0096b5c92ba94f04ebbb5b3a7182aa0c36987978dfb7a7d2cd29b60dffa72ec8db7f7d237b
6
+ metadata.gz: b0a396dfc88d41be843a562b2473c2e8b34a04ae7532d67be215c4d64130a23a9afa63e875c79d48080f3ba887acc2996d29955c3fff7e7e94ac7cee65830873
7
+ data.tar.gz: 26d6884ac46ab287e9067bbf49fa597281cc8b8901302c30bf850daba582de7ba07333a79570e2bffc1dce84e2b02bdb47a99706cc4361969f5380a1ede38a27
data/CHANGELOG.md CHANGED
@@ -1,10 +1,20 @@
1
+ ## 0.10.5
2
+ - Previous fix broke codec = "line"
3
+
4
+ ## 0.10.4
5
+ - Fixed JSON parsing error for partial files because somehow (logstash 7?) @codec.is_a? doesn't work anymore
6
+
7
+ ## 0.10.3
8
+ - Fixed issue-1 where iplookup confguration was removed, but still used
9
+ - iplookup is now done by a separate plugin named logstash-filter-weblookup
10
+
1
11
  ## 0.10.2
2
12
  - moved iplookup to own plugin logstash-filter-lookup
3
13
 
4
14
  ## 0.10.1
5
- - implemented iplookup
6
- - fixed sas tokens (maybe)
7
- - introduced dns_suffix
15
+ - Implemented iplookup
16
+ - Fixed sas tokens (maybe)
17
+ - Introduced dns_suffix
8
18
 
9
19
  ## 0.10.0
10
20
  - Plugin created with the logstash plugin generator
@@ -9,7 +9,7 @@ require 'json'
9
9
  class LogStash::Inputs::AzureBlobStorage < LogStash::Inputs::Base
10
10
  config_name "azure_blob_storage"
11
11
 
12
- # If undefined, Logstash will complain, even if codec is unused. The codec for nsgflowlog has to be JSON and the for WADIIS and APPSERVICE it has to be plain.
12
+ # If undefined, Logstash will complain, even if codec is unused. The codec for nsgflowlog is "json" and the for WADIIS and APPSERVICE is "line".
13
13
  default :codec, "json"
14
14
 
15
15
  # logtype can be nsgflowlog, wadiis, appservice or raw. The default is raw, where files are read and added as one event. If the file grows, the next interval the file is read from the offset, so that the delta is sent as another event. In raw mode, further processing has to be done in the filter block. If the logtype is specified, this plugin will split and mutate and add individual events to the queue.
@@ -132,9 +132,14 @@ def register
132
132
  end
133
133
  end
134
134
 
135
- @is_json = false
136
- if @codec.class == LogStash::Codecs::JSON
137
- @is_json = true
135
+ @is_json = false
136
+ begin
137
+ if @codec.is_a?(LogStash::Codecs::JSON)
138
+ @is_json = true
139
+ end
140
+ rescue
141
+ @logger.debug(@pipe_id+" Rescue from uninitialized constant ...")
142
+ # how can you elegantly check the codec type in logstash? anyway, not worth crashing over since is_json is already set to false by default
138
143
  end
139
144
  @logger.debug(@pipe_id+" is_json is set to: #{@is_json} because it is a #{@codec}")
140
145
  @head = ''
@@ -178,7 +183,9 @@ def run(queue)
178
183
  file[:length]=chunk.size
179
184
  else
180
185
  chunk = partial_read_json(name, file[:offset], file[:length])
181
- @logger.debug(@pipe_id+" partial file #{res[:nsg]} [#{res[:date]}]")
186
+ # This only applies to NSG!
187
+ @logger.info(@pipe_id+" partial file #{res[:nsg]} [#{res[:date]}]")
188
+ @logger.info(@pipe_id+" partial file #{name}")
182
189
  end
183
190
  if logtype == "nsgflowlog" && @is_json
184
191
  begin
@@ -197,6 +204,7 @@ def run(queue)
197
204
  end
198
205
  @processed += 1
199
206
  end
207
+ # This only applies to NSG!
200
208
  @logger.debug(@pipe_id+" Processed #{res[:nsg]} [#{res[:date]}] #{@processed} events")
201
209
  @registry.store(name, { :offset => file[:length], :length => file[:length] })
202
210
  # if stop? good moment to stop what we're doing
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-input-azure_blob_storage'
3
- s.version = '0.10.4'
3
+ s.version = '0.10.5'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = 'This logstash plugin reads and parses data from Azure Storage Blobs.'
6
6
  s.description = <<-EOF
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-azure_blob_storage
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.10.4
4
+ version: 0.10.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Jan Geertsma
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-07-04 00:00:00.000000000 Z
11
+ date: 2019-07-08 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement