logstash-input-azureblob 0.9.9 → 0.9.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 56832bbecde64cfccd20199348a414c0738ed9c1
4
- data.tar.gz: fcf9cf2cae426c7f4b4f94dd7151ebac0b2b2964
3
+ metadata.gz: b3d707b40498cf41aff368f405137d3997abf0e3
4
+ data.tar.gz: 933fb461fa056eef0f8122a91eabc899781d9ac4
5
5
  SHA512:
6
- metadata.gz: cae7c2d9420417e758cea143bd9852e112fc98aed2c0e7040923e15e7379c1acb149f94f7eab73c73b50e67dde8869233d10341dfa6f2594078e4104d46c294e
7
- data.tar.gz: 0962291e17c6f9e9c3eea68e898cb3ccdd0ba8ded581b7d6d087958155f71100ac4013ea2306264b5b9d36d88567f40804a51cc77b7902c9b4daf257a4064049
6
+ metadata.gz: e21058e38226798a4b2ff8089e6e84a80fba966b1e2909c02a9f6bb109120426e7bbb7d377a876f393421c05b4f81fb92a62b8f6d82f890e8d90fe6e5695dff4
7
+ data.tar.gz: e63cf4d75d6310ebf8964d52ba546eb4a0bd325510e40af990862e327d16bfb2618c1a238c70fbc3f4227c222d0d19da79146abbdb9f84218226560716318a2b
data/README.md CHANGED
@@ -134,7 +134,7 @@ filter {
134
134
  #
135
135
  if [bytesSent] {
136
136
  ruby {
137
- code => "event['kilobytesSent'] = event['bytesSent'].to_i / 1024.0"
137
+ code => "event.set('kilobytesSent', event.get('bytesSent').to_i / 1024.0)"
138
138
  }
139
139
  }
140
140
 
@@ -142,7 +142,7 @@ filter {
142
142
  #
143
143
  if [bytesReceived] {
144
144
  ruby {
145
- code => "event['kilobytesReceived'] = event['bytesReceived'].to_i / 1024.0"
145
+ code => "event.set('kilobytesReceived', event.get('bytesReceived').to_i / 1024.0 )"
146
146
  }
147
147
  }
148
148
 
@@ -66,6 +66,13 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
66
66
  # The default, `data/registry`, is used to coordinate readings for various instances of the clients.
67
67
  config :registry_path, :validate => :string, :default => 'data/registry'
68
68
 
69
+ # Sets the value for registry file lock duration in seconds. It must be set to -1, or between 15 to 60 inclusively.
70
+ #
71
+ # The default, `15` means the registry file will be locked for at most 15 seconds. This should usually be sufficient to
72
+ # read the content of registry. Having this configuration here to allow lease expired in case the client crashed that
73
+ # never got a chance to release the lease for the registry.
74
+ config :registry_lease_duration, :validate => :number, :default => 15
75
+
69
76
  # Set how many seconds to keep idle before checking for new logs.
70
77
  #
71
78
  # The default, `30`, means trigger a reading for the log every 30 seconds after entering idle.
@@ -155,26 +162,26 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
155
162
  blob, content = @azure_blob.get_blob(@container, blob_name, {:start_range => start_index} )
156
163
 
157
164
  # content will be used to calculate the new offset. Create a new variable for processed content.
158
- processed_content = content
165
+ content_length = content.length unless content.nil?
159
166
 
160
167
  is_json_codec = (defined?(LogStash::Codecs::JSON) == 'constant') && (@codec.is_a? LogStash::Codecs::JSON)
161
168
  if (is_json_codec)
162
- skip = processed_content.index '{'
163
- processed_content = processed_content[skip..-1] unless skip.nil?
169
+ skip = content.index '{'
170
+ content.slice!(skip-1) unless (skip.nil? || skip == 0)
164
171
  end #if
165
172
 
166
173
  if is_json_codec && (@break_json_down_policy != 'do_not_break')
167
174
  @logger.debug("codec is json and policy is not do_not_break")
168
175
 
169
- @break_json_batch_count = 1 if break_json_batch_count <= 0
170
- tail = processed_content[-@file_tail_bytes..-1]
171
- while (!processed_content.nil? && processed_content.length > @file_tail_bytes)
172
- json_event, processed_content = get_jsons(processed_content, @break_json_batch_count)
176
+ @break_json_batch_count = 1 if @break_json_batch_count <= 0
177
+ tail = content[-@file_tail_bytes..-1]
178
+ while (!content.nil? && content.length > @file_tail_bytes)
179
+ json_event = get_jsons!(content, @break_json_batch_count)
180
+ break if json_event.nil?
173
181
  @logger.debug("Got json: ========================")
174
182
  @logger.debug("#{json_event[0..50]}...#{json_event[-50..-1]}")
175
183
  @logger.debug("End got json: ========================")
176
- @logger.debug("Processed content: #{processed_content[0..50]}...")
177
- break if json_event.nil?
184
+ @logger.debug("Processed content: #{content[0..50]}...")
178
185
  if @break_json_down_policy == 'with_head_tail'
179
186
  @logger.debug("Adding json head/tails.")
180
187
  json_event = "#{header}#{json_event}#{tail}"
@@ -187,8 +194,8 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
187
194
  else
188
195
  @logger.debug("Non-json codec or the policy is do not break")
189
196
  # Putting header and content and tail together before pushing into event queue
190
- processed_content = "#{header}#{processed_content}" unless header.nil? || header.length == 0
191
- @codec.decode(processed_content) do |event|
197
+ content = "#{header}#{content}" unless header.nil? || header.length == 0
198
+ @codec.decode(content) do |event|
192
199
  decorate(event)
193
200
  queue << event
194
201
  end # decode
@@ -197,7 +204,7 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
197
204
  # Making sure the reader is removed from the registry even when there's exception.
198
205
  new_offset = start_index
199
206
  new_offset = 0 if start_index == @file_head_bytes && content.nil? # Reset the offset when nothing has been read.
200
- new_offset = new_offset + content.length unless content.nil?
207
+ new_offset = new_offset + content_length unless content_length.nil?
201
208
  new_registry_item = LogStash::Inputs::RegistryItem.new(blob_name, new_etag, nil, new_offset, gen)
202
209
  update_registry(new_registry_item)
203
210
  end # begin
@@ -207,46 +214,51 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
207
214
  end # begin
208
215
  end # process
209
216
 
210
- # Get first json object out of a string, return the rest of the string
211
- def get_jsons(content, batch_size)
212
- return nil, content, 0 if content.nil? || content.length == 0
213
- return nil, content, 0 if (content.index '{').nil?
214
-
215
- hit = 0
216
- count = 0
217
- index = 0
218
- first = content.index('{')
219
- move_opening = true
220
- move_closing = true
221
- while(hit < batch_size)
222
- inIndex = content.index('{', index) if move_opening
223
- outIndex = content.index('}', index) if move_closing
224
-
225
- # TODO: Fix the ending condition
226
- break if count == 0 && (inIndex.nil? || outIndex.nil?)
227
-
228
- if(inIndex.nil?)
229
- index = outIndex
230
- elsif(outIndex.nil?)
231
- index = inIndex
232
- else
233
- index = [inIndex, outIndex].min
234
- end #if
235
- if content[index] == '{'
236
- count += 1
237
- move_opening = true
238
- move_closing = false
239
- elsif content[index] == '}'
240
- count -= 1
241
- move_closing = true
242
- move_opening = false
243
- end #if
244
- index += 1
245
- hit += 1 if count == 0
246
- end
217
+ # Get json objects out of a string and return it. Note, content will be updated in place as well.
218
+ def get_jsons!(content, batch_size)
219
+ return nil if content.nil? || content.length == 0
220
+ return nil if (content.index '{').nil?
221
+
222
+ hit = 0
223
+ count = 0
224
+ index = 0
225
+ first = content.index('{')
226
+ move_opening = true
227
+ move_closing = true
228
+ while(hit < batch_size)
229
+ inIndex = content.index('{', index) if move_opening
230
+ outIndex = content.index('}', index) if move_closing
231
+
232
+ break if count == 0 && (inIndex.nil? || outIndex.nil?)
247
233
 
248
- return content[first..index-1], content[index..-1], hit
249
- end #def get_first_json
234
+ if(inIndex.nil?)
235
+ index = outIndex
236
+ elsif(outIndex.nil?)
237
+ index = inIndex
238
+ else
239
+ index = [inIndex, outIndex].min
240
+ end #if
241
+
242
+ if content[index] == '{'
243
+ count += 1
244
+ move_opening = true
245
+ move_closing = false
246
+ elsif content[index] == '}'
247
+ count -= 1
248
+ move_closing = true
249
+ move_opening = false
250
+ end #if
251
+ index += 1
252
+
253
+ if (count < 0)
254
+ throw "Malformed json encountered."
255
+ end #if
256
+ hit += 1 if count == 0
257
+ end
258
+ # slice left & then right to making sure the leading characters are trimed.
259
+ content.slice!(0, first) if first > 0
260
+ return content.slice!(0, index-first)
261
+ end #def get_jsons!
250
262
 
251
263
  # Deserialize registry hash from json string.
252
264
  def deserialize_registry_hash (json_string)
@@ -306,7 +318,7 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
306
318
  retried = 0;
307
319
  while lease.nil? do
308
320
  begin
309
- lease = @azure_blob.acquire_blob_lease(@container, blob_name, {:timeout => 10})
321
+ lease = @azure_blob.acquire_blob_lease(@container, blob_name, { :timeout => 10, :duration => @registry_lease_duration })
310
322
  rescue StandardError => e
311
323
  if(e.type == 'LeaseAlreadyPresent')
312
324
  if (retried > retry_times)
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-input-azureblob'
3
- s.version = '0.9.9'
3
+ s.version = '0.9.10'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = 'This plugin collects Microsoft Azure Diagnostics data from Azure Storage Blobs.'
6
6
  s.description = 'This gem is a Logstash plugin. It reads and parses data from Azure Storage Blobs.'
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-azureblob
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.9.9
4
+ version: 0.9.10
5
5
  platform: ruby
6
6
  authors:
7
7
  - Microsoft Corporation
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-08-31 00:00:00.000000000 Z
11
+ date: 2017-09-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement