logstash-input-azure_blob_storage 0.10.5 → 0.10.6
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +3 -0
- data/lib/logstash/inputs/azure_blob_storage.rb +19 -27
- data/logstash-input-azure_blob_storage.gemspec +1 -2
- data/spec/inputs/azure_blob_storage_spec.rb +5 -5
- metadata +2 -16
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 409d47c61b8f38963281ca655d7664bba48c6d311adbcbbbb6a4e2e5d0356363
|
4
|
+
data.tar.gz: 1648a2174502fb84d015bcb1b76f192c7c7583b3e71ce5ec738ee20da12d0f8b
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: '0795b03a70aee648d1cd7c951c1f3bc6fc6d8bc3989098ed4982e1011f66cca7a83c77a366351db00254cf3b2ee3b3b1389d8db0f2e9fc2d6424e7218c470a06'
|
7
|
+
data.tar.gz: 85e0d362dfacc7369fdb04e4dc47ea3754ba64c175029bc8c46e0ce14fd30ad2a0660f9b051f0b93eeb5ddffd6b6f06a111819116cbb7e07f20d9e310f418e2d
|
data/CHANGELOG.md
CHANGED
@@ -91,6 +91,8 @@ def register
|
|
91
91
|
@processed = 0
|
92
92
|
@regsaved = @processed
|
93
93
|
|
94
|
+
@buffer = FileWatch::BufferedTokenizer.new('\n')
|
95
|
+
|
94
96
|
# Try in this order to access the storageaccount
|
95
97
|
# 1. storageaccount / sas_token
|
96
98
|
# 2. connection_string
|
@@ -134,14 +136,10 @@ def register
|
|
134
136
|
|
135
137
|
@is_json = false
|
136
138
|
begin
|
137
|
-
if @codec.
|
139
|
+
if @codec.class.name.eql?("LogStash::Codecs::JSON")
|
138
140
|
@is_json = true
|
139
141
|
end
|
140
|
-
rescue
|
141
|
-
@logger.debug(@pipe_id+" Rescue from uninitialized constant ...")
|
142
|
-
# how can you elegantly check the codec type in logstash? anyway, not worth crashing over since is_json is already set to false by default
|
143
142
|
end
|
144
|
-
@logger.debug(@pipe_id+" is_json is set to: #{@is_json} because it is a #{@codec}")
|
145
143
|
@head = ''
|
146
144
|
@tail = ''
|
147
145
|
# if codec=json sniff one files blocks A and Z to learn file_head and file_tail
|
@@ -153,15 +151,17 @@ def register
|
|
153
151
|
if file_tail
|
154
152
|
@tail = file_tail
|
155
153
|
end
|
154
|
+
@logger.info(@pipe_id+" head will be: #{@head} and tail is set to #{@tail}")
|
156
155
|
end
|
157
|
-
@logger.info(@pipe_id+" head will be: #{@head} and tail is set to #{@tail}")
|
158
156
|
end # def register
|
159
157
|
|
158
|
+
|
159
|
+
|
160
160
|
def run(queue)
|
161
161
|
filelist = Hash.new
|
162
162
|
|
163
|
-
|
164
|
-
|
163
|
+
# we can abort the loop if stop? becomes true
|
164
|
+
while !stop?
|
165
165
|
chrono = Time.now.to_i
|
166
166
|
# load te registry, compare it's offsets to file list, set offset to 0 for new files, process the whole list and if finished within the interval wait for next loop,
|
167
167
|
# TODO: sort by timestamp
|
@@ -183,14 +183,13 @@ def run(queue)
|
|
183
183
|
file[:length]=chunk.size
|
184
184
|
else
|
185
185
|
chunk = partial_read_json(name, file[:offset], file[:length])
|
186
|
-
#
|
187
|
-
@logger.info(@pipe_id+" partial file #{res[:nsg]} [#{res[:date]}]")
|
188
|
-
@logger.info(@pipe_id+" partial file #{name}")
|
186
|
+
@logger.debug(@pipe_id+" partial file #{name} from #{file[:offset]} to #{file[:length]}")
|
189
187
|
end
|
190
188
|
if logtype == "nsgflowlog" && @is_json
|
191
189
|
begin
|
192
190
|
fingjson = JSON.parse(chunk)
|
193
191
|
@processed += nsgflowlog(queue, fingjson)
|
192
|
+
@logger.debug(@pipe_id+" Processed #{res[:nsg]} [#{res[:date]}] #{@processed} events")
|
194
193
|
rescue JSON::ParserError
|
195
194
|
@logger.error(@pipe_id+" parse error on #{res[:nsg]} [#{res[:date]}] offset: #{file[:offset]} length: #{file[:length]}")
|
196
195
|
end
|
@@ -198,24 +197,24 @@ def run(queue)
|
|
198
197
|
elsif logtype == "wadiis" && !@is_json
|
199
198
|
@processed += wadiislog(queue, file[:name])
|
200
199
|
else
|
200
|
+
counter = 0
|
201
201
|
@codec.decode(chunk) do |event|
|
202
|
+
counter += 1
|
202
203
|
decorate(event)
|
203
204
|
queue << event
|
204
205
|
end
|
205
|
-
@processed +=
|
206
|
+
@processed += counter
|
206
207
|
end
|
207
|
-
# This only applies to NSG!
|
208
|
-
@logger.debug(@pipe_id+" Processed #{res[:nsg]} [#{res[:date]}] #{@processed} events")
|
209
208
|
@registry.store(name, { :offset => file[:length], :length => file[:length] })
|
210
209
|
# if stop? good moment to stop what we're doing
|
211
210
|
if stop?
|
212
211
|
return
|
213
212
|
end
|
214
|
-
# save the registry
|
213
|
+
# save the registry past the regular intervals
|
215
214
|
now = Time.now.to_i
|
216
215
|
if ((now - chrono) > interval)
|
217
216
|
save_registry(@registry)
|
218
|
-
|
217
|
+
chrono =+ interval
|
219
218
|
end
|
220
219
|
end
|
221
220
|
# Save the registry and sleep until the remaining polling interval is over
|
@@ -223,9 +222,7 @@ def run(queue)
|
|
223
222
|
sleeptime = interval - (Time.now.to_i - chrono)
|
224
223
|
Stud.stoppable_sleep(sleeptime) { stop? }
|
225
224
|
end
|
226
|
-
|
227
|
-
# event = LogStash::Event.new("message" => @message, "host" => @host)
|
228
|
-
end # def run
|
225
|
+
end
|
229
226
|
|
230
227
|
def stop
|
231
228
|
save_registry(@registry)
|
@@ -274,11 +271,6 @@ def nsgflowlog(queue, json)
|
|
274
271
|
if (record["properties"]["Version"]==2)
|
275
272
|
ev.merge!( {:flowstate => tups[8], :src_pack => tups[9], :src_bytes => tups[10], :dst_pack => tups[11], :dst_bytes => tups[12]} )
|
276
273
|
end
|
277
|
-
# Replaced by new plugin: logstash-filter-lookup
|
278
|
-
# This caused JSON parse errors since iplookup is now obsolete
|
279
|
-
#unless iplookup.nil?
|
280
|
-
# ev.merge!(addip(tups[1], tups[2]))
|
281
|
-
#end
|
282
274
|
@logger.trace(ev.to_s)
|
283
275
|
event = LogStash::Event.new('message' => ev.to_json)
|
284
276
|
decorate(event)
|
@@ -354,13 +346,13 @@ def learn_encapsulation
|
|
354
346
|
blob = @blob_client.list_blobs(container, { maxresults: 1, prefix: @prefix }).first
|
355
347
|
return if blob.nil?
|
356
348
|
blocks = @blob_client.list_blob_blocks(container, blob.name)[:committed]
|
357
|
-
@logger.
|
349
|
+
@logger.debug(@pipe_id+" using #{blob.name} to learn the json header and tail")
|
358
350
|
@head = @blob_client.get_blob(container, blob.name, start_range: 0, end_range: blocks.first.size-1)[1]
|
359
|
-
@logger.
|
351
|
+
@logger.debug(@pipe_id+" learned header: #{@head}")
|
360
352
|
length = blob.properties[:content_length].to_i
|
361
353
|
offset = length - blocks.last.size
|
362
354
|
@tail = @blob_client.get_blob(container, blob.name, start_range: offset, end_range: length-1)[1]
|
363
|
-
@logger.
|
355
|
+
@logger.debug(@pipe_id+" learned tail: #{@tail}")
|
364
356
|
end
|
365
357
|
|
366
358
|
def resource(str)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-input-azure_blob_storage'
|
3
|
-
s.version = '0.10.
|
3
|
+
s.version = '0.10.6'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = 'This logstash plugin reads and parses data from Azure Storage Blobs.'
|
6
6
|
s.description = <<-EOF
|
@@ -21,7 +21,6 @@ EOF
|
|
21
21
|
|
22
22
|
# Gem dependencies
|
23
23
|
s.add_runtime_dependency "logstash-core-plugin-api", "~> 2.0"
|
24
|
-
s.add_runtime_dependency 'logstash-codec-plain', '~> 3.0'
|
25
24
|
s.add_runtime_dependency 'stud', '~> 0.0.22'
|
26
25
|
s.add_runtime_dependency 'azure-storage-blob', '~> 1.0'
|
27
26
|
s.add_development_dependency 'logstash-devutils', '~> 1.0', '>= 1.0.0'
|
@@ -9,10 +9,10 @@ describe LogStash::Inputs::AzureBlobStorage do
|
|
9
9
|
end
|
10
10
|
|
11
11
|
def test_helper_methodes
|
12
|
-
assert_equal('b', AzureBlobStorage.val('a=b')
|
13
|
-
assert_equal('whatever', AzureBlobStorage.strip_comma(',whatever')
|
14
|
-
assert_equal('whatever', AzureBlobStorage.strip_comma('whatever,')
|
15
|
-
assert_equal('whatever', AzureBlobStorage.strip_comma(',whatever,')
|
16
|
-
assert_equal('whatever', AzureBlobStorage.strip_comma('whatever')
|
12
|
+
assert_equal('b', AzureBlobStorage.val('a=b'))
|
13
|
+
assert_equal('whatever', AzureBlobStorage.strip_comma(',whatever'))
|
14
|
+
assert_equal('whatever', AzureBlobStorage.strip_comma('whatever,'))
|
15
|
+
assert_equal('whatever', AzureBlobStorage.strip_comma(',whatever,'))
|
16
|
+
assert_equal('whatever', AzureBlobStorage.strip_comma('whatever'))
|
17
17
|
end
|
18
18
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-input-azure_blob_storage
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.10.
|
4
|
+
version: 0.10.6
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Jan Geertsma
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2019-07
|
11
|
+
date: 2019-08-07 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -24,20 +24,6 @@ dependencies:
|
|
24
24
|
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
26
|
version: '2.0'
|
27
|
-
- !ruby/object:Gem::Dependency
|
28
|
-
requirement: !ruby/object:Gem::Requirement
|
29
|
-
requirements:
|
30
|
-
- - "~>"
|
31
|
-
- !ruby/object:Gem::Version
|
32
|
-
version: '3.0'
|
33
|
-
name: logstash-codec-plain
|
34
|
-
prerelease: false
|
35
|
-
type: :runtime
|
36
|
-
version_requirements: !ruby/object:Gem::Requirement
|
37
|
-
requirements:
|
38
|
-
- - "~>"
|
39
|
-
- !ruby/object:Gem::Version
|
40
|
-
version: '3.0'
|
41
27
|
- !ruby/object:Gem::Dependency
|
42
28
|
requirement: !ruby/object:Gem::Requirement
|
43
29
|
requirements:
|