logstash-input-azureblob 0.9.12-java → 0.9.13-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
- SHA256:
3
- metadata.gz: 5947577417b1f859db0712b7c414a536f4456a359d222d5069cc400d8a4ceb50
4
- data.tar.gz: 2c57b9f7ec19871095b19f0eb7fa07f05e3d7b67386b7815ee583331d50a10f6
2
+ SHA1:
3
+ metadata.gz: 7f323b4769713e9d8bd2d7c9938bc041e66204aa
4
+ data.tar.gz: ee3d8f15d8a82de85c291f9d7b1b7805eb06dcb9
5
5
  SHA512:
6
- metadata.gz: dd9c54213183b732055ccf15470b41e0428933f942ac23911abefa9a535453e7b01721a922ad5a3677d90a581ddd4603628fdfc5655682a66fe6fa9045cdf737
7
- data.tar.gz: 11ea4a6e8d69e1640bcbc078c7d01bc93b2f9a5cf45d30c6a2d12357f8cdb30dbbe482ad8a44a08eb3bbd6ac8808766b5c9b5c1ae8fcbf5321c23092103d68e0
6
+ metadata.gz: 834c5214f404e5d11f65d5646cca6756e68381d7856b35d66820e2994c16150700e1af42c48d0cd10fc7f8157a6d42b575bf062db6bda5428c26d19a567172a7
7
+ data.tar.gz: 24e6f64372cdb4c087b39969e7ec252e926f769734850aec0eac0f3589dcf9cd0ea24683e5c7d32589f5c413419cded6be43f74b7b6a4734085b2d249282d392
data/README.md CHANGED
@@ -25,6 +25,14 @@ __*container*__
25
25
  The blob container name.
26
26
 
27
27
  ### Optional Parameters
28
+ __*path_filters*__
29
+
30
+ The path(s) to the file(s) to use as an input. By default it will watch every files in the storage container. You can use filename patterns here, such as `logs/*.log`. If you use a pattern like `logs/**/*.log`, a recursive search of `logs` will be done for all `*.log` files.
31
+
32
+ Do not include a leading `/`, as Azure path look like this: `path/to/blob/file.txt`
33
+
34
+ You may also configure multiple paths. See an example on the [Logstash configuration page](http://www.elastic.co/guide/en/logstash/current/configuration-file-structure.html#array).
35
+
28
36
  __*endpoint*__
29
37
 
30
38
  Specifies the endpoint of Azure Service Management. The default value is `core.windows.net`.
@@ -203,9 +211,6 @@ input {
203
211
  # Typical numbers could be 21/9 or 12/2 depends on the nsg log file types
204
212
  file_head_bytes => 21
205
213
  file_tail_bytes => 9
206
- # Enable / tweak these settings when event is too big for codec to handle.
207
- # break_json_down_policy => "with_head_tail"
208
- # break_json_batch_count => 2
209
214
  }
210
215
  }
211
216
 
@@ -250,4 +255,4 @@ input {
250
255
  ```
251
256
 
252
257
  ## More information
253
- The source code of this plugin is hosted in GitHub repo [Microsoft Azure Diagnostics with ELK](https://github.com/Azure/azure-diagnostics-tools). We welcome you to provide feedback and/or contribute to the project.
258
+ The source code of this plugin is hosted in GitHub repo [Microsoft Azure Diagnostics with ELK](https://github.com/Azure/azure-diagnostics-tools). We welcome you to provide feedback and/or contribute to the project.
@@ -37,25 +37,36 @@ class LogStash::Inputs::RegistryItem
37
37
  end # initialize
38
38
  end # class RegistryItem
39
39
 
40
-
41
40
  # Logstash input plugin for Azure Blobs
42
41
  #
43
42
  # This logstash plugin gathers data from Microsoft Azure Blobs
44
43
  class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
45
- config_name "azureblob"
44
+ config_name 'azureblob'
46
45
 
47
46
  # If undefined, Logstash will complain, even if codec is unused.
48
- default :codec, "json_lines"
47
+ default :codec, 'json_lines'
49
48
 
50
49
  # Set the account name for the azure storage account.
51
50
  config :storage_account_name, :validate => :string
52
-
51
+
53
52
  # Set the key to access the storage account.
54
53
  config :storage_access_key, :validate => :string
55
-
54
+
56
55
  # Set the container of the blobs.
57
56
  config :container, :validate => :string
58
57
 
58
+ # The path(s) to the file(s) to use as an input. By default it will
59
+ # watch every files in the storage container.
60
+ # You can use filename patterns here, such as `logs/*.log`.
61
+ # If you use a pattern like `logs/**/*.log`, a recursive search
62
+ # of `logs` will be done for all `*.log` files.
63
+ # Do not include a leading `/`, as Azure path look like this:
64
+ # `path/to/blob/file.txt`
65
+ #
66
+ # You may also configure multiple paths. See an example
67
+ # on the <<array,Logstash configuration page>>.
68
+ config :path_filters, :validate => :array, :default => [], :required => false
69
+
59
70
  # Set the endpoint for the blobs.
60
71
  #
61
72
  # The default, `core.windows.net` targets the public azure.
@@ -63,12 +74,12 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
63
74
 
64
75
  # Set the value of using backup mode.
65
76
  config :backupmode, :validate => :boolean, :default => false, :deprecated => true, :obsolete => 'This option is obsoleted and the settings will be ignored.'
66
-
77
+
67
78
  # Set the value for the registry file.
68
79
  #
69
80
  # The default, `data/registry`, is used to coordinate readings for various instances of the clients.
70
81
  config :registry_path, :validate => :string, :default => 'data/registry'
71
-
82
+
72
83
  # Sets the value for registry file lock duration in seconds. It must be set to -1, or between 15 to 60 inclusively.
73
84
  #
74
85
  # The default, `15` means the registry file will be locked for at most 15 seconds. This should usually be sufficient to
@@ -113,20 +124,19 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
113
124
  config :file_chunk_size_bytes, :validate => :number, :default => 4 * 1024 * 1024
114
125
 
115
126
  # Constant of max integer
116
- MAX = 2 ** ([42].pack('i').size * 16 - 2 ) -1
127
+ MAX = 2**([42].pack('i').size * 16 - 2) - 1
117
128
 
118
129
  # Update the registry offset each time after this number of entries have been processed
119
130
  UPDATE_REGISTRY_COUNT = 100
120
131
 
121
132
  public
122
133
  def register
123
- user_agent = "logstash-input-azureblob"
124
- user_agent << "/" << Gem.latest_spec_for("logstash-input-azureblob").version.to_s
125
-
134
+ user_agent = 'logstash-input-azureblob'
135
+ user_agent << '/' << Gem.latest_spec_for('logstash-input-azureblob').version.to_s
136
+
126
137
  # this is the reader # for this specific instance.
127
138
  @reader = SecureRandom.uuid
128
- @registry_locker = "#{@registry_path}.lock"
129
-
139
+
130
140
  # Setup a specific instance of an Azure::Storage::Client
131
141
  client = Azure::Storage::Client.create(:storage_account_name => @storage_account_name, :storage_access_key => @storage_access_key, :storage_blob_host => "https://#{@storage_account_name}.blob.#{@endpoint}", :user_agent_prefix => user_agent)
132
142
  # Get an azure storage blob service object from a specific instance of an Azure::Storage::Client
@@ -139,7 +149,7 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
139
149
  # we can abort the loop if stop? becomes true
140
150
  while !stop?
141
151
  process(queue)
142
- @logger.debug("Hitting interval of #{@interval}ms . . .")
152
+ @logger.debug("Hitting interval of #{@interval}s . . .")
143
153
  Stud.stoppable_sleep(@interval) { stop? }
144
154
  end # loop
145
155
  end # def run
@@ -147,14 +157,14 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
147
157
  def stop
148
158
  cleanup_registry
149
159
  end # def stop
150
-
160
+
151
161
  # Start processing the next item.
152
162
  def process(queue)
153
163
  begin
154
164
  @processed_entries = 0
155
165
  blob, start_index, gen = register_for_read
156
166
 
157
- if(!blob.nil?)
167
+ unless blob.nil?
158
168
  begin
159
169
  blob_name = blob.name
160
170
  @logger.debug("Processing blob #{blob.name}")
@@ -182,7 +192,7 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
182
192
  parser = JsonParser.new(@logger, blob_reader)
183
193
 
184
194
  parser.parse(->(json_content) {
185
- content_length = content_length + json_content.length
195
+ content_length += json_content.length
186
196
 
187
197
  enqueue_content(queue, json_content, header, tail)
188
198
 
@@ -197,7 +207,7 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
197
207
  begin
198
208
  content, are_more_bytes_available = blob_reader.read
199
209
 
200
- content_length = content_length + content.length
210
+ content_length += content.length
201
211
  enqueue_content(queue, content, header, tail)
202
212
 
203
213
  on_entry_processed(start_index, content_length, blob_name, new_etag, gen)
@@ -208,7 +218,7 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
208
218
  # Making sure the reader is removed from the registry even when there's exception.
209
219
  request_registry_update(start_index, content_length, blob_name, new_etag, gen)
210
220
  end # begin
211
- end # if
221
+ end # unless
212
222
  rescue => e
213
223
  @logger.error("Oh My, An error occurred. Error:#{e}: Trace: #{e.backtrace}", :exception => e)
214
224
  end # begin
@@ -219,28 +229,26 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
219
229
  #skip some unnecessary copying
220
230
  full_content = content
221
231
  else
222
- full_content = ""
232
+ full_content = ''
223
233
  full_content << header unless header.nil? || header.length == 0
224
234
  full_content << content
225
235
  full_content << tail unless tail.nil? || tail.length == 0
226
236
  end
227
-
237
+
228
238
  @codec.decode(full_content) do |event|
229
239
  decorate(event)
230
240
  queue << event
231
- end
241
+ end
232
242
  end
233
243
 
234
244
  def on_entry_processed(start_index, content_length, blob_name, new_etag, gen)
235
- @processed_entries = @processed_entries + 1
236
- if @processed_entries % UPDATE_REGISTRY_COUNT == 0
237
- request_registry_update(start_index, content_length, blob_name, new_etag, gen)
238
- end
245
+ @processed_entries += 1
246
+ request_registry_update(start_index, content_length, blob_name, new_etag, gen) if @processed_entries % UPDATE_REGISTRY_COUNT == 0
239
247
  end
240
-
248
+
241
249
  def request_registry_update(start_index, content_length, blob_name, new_etag, gen)
242
250
  new_offset = start_index
243
- new_offset = new_offset + content_length unless content_length.nil?
251
+ new_offset += content_length unless content_length.nil?
244
252
  @logger.debug("New registry offset: #{new_offset}")
245
253
  new_registry_item = LogStash::Inputs::RegistryItem.new(blob_name, new_etag, nil, new_offset, gen)
246
254
  update_registry(new_registry_item)
@@ -264,9 +272,20 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
264
272
  loop do
265
273
  # Need to limit the returned number of the returned entries to avoid out of memory exception.
266
274
  entries = @azure_blob.list_blobs(@container, { :timeout => 60, :marker => continuation_token, :max_results => @blob_list_page_size })
267
- entries.each do |entry|
268
- blobs << entry
269
- end # each
275
+ if @path_filters.empty?
276
+ entries.each do |entry|
277
+ blobs << entry
278
+ end # each
279
+ else
280
+ # Add the registry_path to the list of matched blobs
281
+ @path_filters << @registry_path
282
+ entries.each do |entry|
283
+ # FNM_PATHNAME is required so that "**/test" can match "test" at the root folder
284
+ # FNM_EXTGLOB allows you to use "test{a,b,c}" to match either "testa", "testb" or "testc" (closer to shell behavior)
285
+ matched = @path_filters.any? {|path| File.fnmatch?(path, entry.name, File::FNM_PATHNAME | File::FNM_EXTGLOB)}
286
+ blobs << entry if matched
287
+ end # each
288
+ end
270
289
  continuation_token = entries.continuation_token
271
290
  break if continuation_token.empty?
272
291
  end # loop
@@ -306,7 +325,7 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
306
325
  begin
307
326
  lease = @azure_blob.acquire_blob_lease(@container, blob_name, { :timeout => 60, :duration => @registry_lease_duration })
308
327
  rescue StandardError => e
309
- if(e.type && e.type == 'LeaseAlreadyPresent')
328
+ if (e.class.name.include? 'LeaseAlreadyPresent')
310
329
  if (retried > retry_times)
311
330
  raise
312
331
  end
@@ -315,7 +334,7 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
315
334
  else
316
335
  # Anything else happend other than 'LeaseAlreadyPresent', break the lease. This is a work-around for the behavior that when
317
336
  # timeout exception is hit, somehow, a infinite lease will be put on the lock file.
318
- @azure_blob.break_blob_lease(@container, blob, { :break_period => 30 })
337
+ @azure_blob.break_blob_lease(@container, blob_name, { :break_period => 30 })
319
338
  end
320
339
  end
321
340
  end #while
@@ -326,27 +345,22 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
326
345
  def register_for_read
327
346
  begin
328
347
  all_blobs = list_all_blobs
329
- registry = all_blobs.find { |item| item.name.downcase == @registry_path }
330
- registry_locker = all_blobs.find { |item| item.name.downcase == @registry_locker }
331
-
332
- candidate_blobs = all_blobs.select { |item| (item.name.downcase != @registry_path) && ( item.name.downcase != @registry_locker ) }
348
+ registry = all_blobs.find { |item| item.name.downcase == @registry_path }
333
349
 
350
+ candidate_blobs = all_blobs.select { |item| (item.name.downcase != @registry_path) }
351
+
334
352
  start_index = 0
335
353
  gen = 0
336
354
  lease = nil
337
355
 
338
- # Put lease on locker file than the registy file to allow update of the registry as a workaround for Azure Storage Ruby SDK issue # 16.
339
- # Workaround: https://github.com/Azure/azure-storage-ruby/issues/16
340
- registry_locker = @azure_blob.create_block_blob(@container, @registry_locker, @reader) if registry_locker.nil?
341
- lease = acquire_lease(@registry_locker)
342
- # ~ Workaround
343
-
344
- if(registry.nil?)
356
+ if registry.nil?
345
357
  registry_hash = create_registry(candidate_blobs)
358
+ lease = acquire_lease(@registry_path)
346
359
  else
360
+ lease = acquire_lease(@registry_path)
347
361
  registry_hash = load_registry
348
362
  end #if
349
-
363
+
350
364
  picked_blobs = Set.new []
351
365
  # Pick up the next candidate
352
366
  picked_blob = nil
@@ -367,45 +381,45 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
367
381
  }
368
382
 
369
383
  picked_blob = picked_blobs.min_by { |b| registry_hash[b.name].gen }
370
- if !picked_blob.nil?
384
+ unless picked_blob.nil?
371
385
  registry_item = registry_hash[picked_blob.name]
372
386
  registry_item.reader = @reader
373
387
  registry_hash[picked_blob.name] = registry_item
374
388
  start_index = registry_item.offset
375
389
  raise_gen(registry_hash, picked_blob.name)
376
390
  gen = registry_item.gen
377
- end #if
391
+ end # unless
378
392
 
379
- # Save the chnage for the registry
380
- save_registry(registry_hash)
381
-
382
- @azure_blob.release_blob_lease(@container, @registry_locker, lease)
383
- lease = nil;
393
+ # Save the change for the registry
394
+ save_registry(registry_hash, lease)
395
+
396
+ @azure_blob.release_blob_lease(@container, @registry_path, lease)
397
+ lease = nil
384
398
 
385
399
  return picked_blob, start_index, gen
386
400
  rescue StandardError => e
387
401
  @logger.error("Oh My, An error occurred. #{e}: #{e.backtrace}", :exception => e)
388
402
  return nil, nil, nil
389
403
  ensure
390
- @azure_blob.release_blob_lease(@container, @registry_locker, lease) unless lease.nil?
404
+ @azure_blob.release_blob_lease(@container, @registry_path, lease) unless lease.nil?
391
405
  lease = nil
392
406
  end # rescue
393
407
  end #register_for_read
394
408
 
395
409
  # Update the registry
396
- def update_registry (registry_item)
410
+ def update_registry(registry_item)
397
411
  begin
398
412
  lease = nil
399
- lease = acquire_lease(@registry_locker)
413
+ lease = acquire_lease(@registry_path)
400
414
  registry_hash = load_registry
401
415
  registry_hash[registry_item.file_path] = registry_item
402
- save_registry(registry_hash)
403
- @azure_blob.release_blob_lease(@container, @registry_locker, lease)
416
+ save_registry(registry_hash, lease)
417
+ @azure_blob.release_blob_lease(@container, @registry_path, lease)
404
418
  lease = nil
405
419
  rescue StandardError => e
406
420
  @logger.error("Oh My, An error occurred. #{e}:\n#{e.backtrace}", :exception => e)
407
421
  ensure
408
- @azure_blob.release_blob_lease(@container, @registry_locker, lease) unless lease.nil?
422
+ @azure_blob.release_blob_lease(@container, @registry_path, lease) unless lease.nil?
409
423
  lease = nil
410
424
  end #rescue
411
425
  end # def update_registry
@@ -413,52 +427,56 @@ class LogStash::Inputs::LogstashInputAzureblob < LogStash::Inputs::Base
413
427
  # Clean up the registry.
414
428
  def cleanup_registry
415
429
  begin
430
+ @logger.debug("azureblob : start cleanup_registry")
416
431
  lease = nil
417
- lease = acquire_lease(@registry_locker)
432
+ lease = acquire_lease(@registry_path)
418
433
  registry_hash = load_registry
419
434
  registry_hash.each { | key, registry_item|
420
435
  registry_item.reader = nil if registry_item.reader == @reader
421
436
  }
422
- save_registry(registry_hash)
423
- @azure_blob.release_blob_lease(@container, @registry_locker, lease)
437
+ save_registry(registry_hash, lease)
438
+ @azure_blob.release_blob_lease(@container, @registry_path, lease)
424
439
  lease = nil
425
440
  rescue StandardError => e
426
441
  @logger.error("Oh My, An error occurred. #{e}:\n#{e.backtrace}", :exception => e)
427
442
  ensure
428
- @azure_blob.release_blob_lease(@container, @registry_locker, lease) unless lease.nil?
443
+ @azure_blob.release_blob_lease(@container, @registry_path, lease) unless lease.nil?
429
444
  lease = nil
430
445
  end #rescue
446
+ @logger.debug("azureblob : End of cleanup_registry")
431
447
  end # def cleanup_registry
432
448
 
433
449
  # Create a registry file to coordinate between multiple azure blob inputs.
434
- def create_registry (blob_items)
450
+ def create_registry(blob_items)
451
+ @azure_blob.create_block_blob(@container, @registry_path, '')
452
+ lease = acquire_lease(@registry_path)
435
453
  registry_hash = Hash.new
436
-
437
454
  blob_items.each do |blob_item|
438
- initial_offset = 0
439
- initial_offset = blob_item.properties[:content_length] if @registry_create_policy == 'resume'
440
- registry_item = LogStash::Inputs::RegistryItem.new(blob_item.name, blob_item.properties[:etag], nil, initial_offset, 0)
455
+ initial_offset = 0
456
+ initial_offset = blob_item.properties[:content_length] if @registry_create_policy == 'resume'
457
+ registry_item = LogStash::Inputs::RegistryItem.new(blob_item.name, blob_item.properties[:etag], nil, initial_offset, 0)
441
458
  registry_hash[blob_item.name] = registry_item
442
459
  end # each
443
- save_registry(registry_hash)
444
- return registry_hash
460
+ save_registry(registry_hash, lease)
461
+ @azure_blob.release_blob_lease(@container, @registry_path, lease)
462
+ registry_hash
445
463
  end # create_registry
446
464
 
447
465
  # Load the content of the registry into the registry hash and return it.
448
466
  def load_registry
449
467
  # Get content
450
- registry_blob, registry_blob_body = @azure_blob.get_blob(@container, @registry_path)
468
+ _registry_blob, registry_blob_body = @azure_blob.get_blob(@container, @registry_path)
451
469
  registry_hash = deserialize_registry_hash(registry_blob_body)
452
- return registry_hash
470
+ registry_hash
453
471
  end # def load_registry
454
472
 
455
473
  # Serialize the registry hash and save it.
456
- def save_registry(registry_hash)
474
+ def save_registry(registry_hash, lease_id)
457
475
  # Serialize hash to json
458
476
  registry_hash_json = JSON.generate(registry_hash)
459
477
 
460
478
  # Upload registry to blob
461
- @azure_blob.create_block_blob(@container, @registry_path, registry_hash_json)
479
+ @azure_blob.create_block_blob(@container, @registry_path, registry_hash_json, lease_id: lease_id)
462
480
  end # def save_registry
463
481
  end # class LogStash::Inputs::LogstashInputAzureblob
464
482
 
@@ -493,8 +511,9 @@ class BlobReader < LinearReader
493
511
  end
494
512
 
495
513
  private
514
+
496
515
  def read_from_blob(start_index, end_index)
497
- blob, content = @azure_blob.get_blob(@container, @blob_name, {:start_range => start_index, :end_range => end_index } )
516
+ _blob, content = @azure_blob.get_blob(@container, @blob_name, {:start_range => start_index, :end_range => end_index } )
498
517
  return content
499
518
  end
500
- end #class BlobReader
519
+ end #class BlobReader
@@ -1,8 +1,8 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-input-azureblob'
3
- s.version = '0.9.12'
4
- s.platform = "java"
5
- s.licenses = ['Apache License (2.0)']
3
+ s.version = '0.9.13'
4
+ s.platform = 'java'
5
+ s.licenses = ['Apache-2.0']
6
6
  s.summary = 'This plugin collects Microsoft Azure Diagnostics data from Azure Storage Blobs.'
7
7
  s.description = 'This gem is a Logstash plugin. It reads and parses data from Azure Storage Blobs.'
8
8
  s.homepage = 'https://github.com/Azure/azure-diagnostics-tools'
@@ -11,22 +11,22 @@ Gem::Specification.new do |s|
11
11
  s.require_paths = ['lib']
12
12
 
13
13
  # Files
14
- s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','Gemfile','LICENSE']
14
+ s.files = Dir['lib/**/*', 'spec/**/*', 'vendor/**/*', '*.gemspec', '*.md', 'Gemfile', 'LICENSE']
15
15
  # Tests
16
16
  s.test_files = s.files.grep(%r{^(test|spec|features)/})
17
17
 
18
18
  # Special flag to let us know this is actually a logstash plugin
19
- s.metadata = { "logstash_plugin" => "true", "logstash_group" => "input" }
19
+ s.metadata = { 'logstash_plugin' => 'true', 'logstash_group' => 'input' }
20
20
 
21
21
  # Gem dependencies
22
- s.add_runtime_dependency "logstash-core-plugin-api", '>= 1.60', '<= 2.99'
23
- s.add_runtime_dependency 'logstash-codec-json_lines'
24
- s.add_runtime_dependency 'stud', '>= 0.0.22'
25
- s.add_runtime_dependency 'azure-storage', '~> 0.12.3.preview'
26
- s.add_development_dependency 'logstash-devutils'
27
- s.add_development_dependency 'logging'
22
+ s.add_runtime_dependency 'logstash-core-plugin-api', '>= 1.60', '<= 2.99'
23
+ s.add_runtime_dependency 'logstash-codec-json_lines', '~> 3'
24
+ s.add_runtime_dependency 'stud', '~> 0.0', '>= 0.0.22'
25
+ s.add_runtime_dependency 'azure-storage', '~> 0.15.0.preview'
26
+ s.add_development_dependency 'logstash-devutils', '~> 1'
27
+ s.add_development_dependency 'logging', '~> 2'
28
28
 
29
29
  # Jar dependencies
30
30
  s.requirements << "jar 'org.glassfish:javax.json', '1.1'"
31
- s.add_runtime_dependency 'jar-dependencies'
31
+ s.add_runtime_dependency 'jar-dependencies', '~> 0'
32
32
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-azureblob
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.9.12
4
+ version: 0.9.13
5
5
  platform: java
6
6
  authors:
7
7
  - Microsoft Corporation
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-10-03 00:00:00.000000000 Z
11
+ date: 2018-09-27 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -33,20 +33,23 @@ dependencies:
33
33
  - !ruby/object:Gem::Dependency
34
34
  requirement: !ruby/object:Gem::Requirement
35
35
  requirements:
36
- - - ">="
36
+ - - "~>"
37
37
  - !ruby/object:Gem::Version
38
- version: '0'
38
+ version: '3'
39
39
  name: logstash-codec-json_lines
40
40
  prerelease: false
41
41
  type: :runtime
42
42
  version_requirements: !ruby/object:Gem::Requirement
43
43
  requirements:
44
- - - ">="
44
+ - - "~>"
45
45
  - !ruby/object:Gem::Version
46
- version: '0'
46
+ version: '3'
47
47
  - !ruby/object:Gem::Dependency
48
48
  requirement: !ruby/object:Gem::Requirement
49
49
  requirements:
50
+ - - "~>"
51
+ - !ruby/object:Gem::Version
52
+ version: '0.0'
50
53
  - - ">="
51
54
  - !ruby/object:Gem::Version
52
55
  version: 0.0.22
@@ -55,6 +58,9 @@ dependencies:
55
58
  type: :runtime
56
59
  version_requirements: !ruby/object:Gem::Requirement
57
60
  requirements:
61
+ - - "~>"
62
+ - !ruby/object:Gem::Version
63
+ version: '0.0'
58
64
  - - ">="
59
65
  - !ruby/object:Gem::Version
60
66
  version: 0.0.22
@@ -63,7 +69,7 @@ dependencies:
63
69
  requirements:
64
70
  - - "~>"
65
71
  - !ruby/object:Gem::Version
66
- version: 0.12.3.preview
72
+ version: 0.15.0.preview
67
73
  name: azure-storage
68
74
  prerelease: false
69
75
  type: :runtime
@@ -71,39 +77,39 @@ dependencies:
71
77
  requirements:
72
78
  - - "~>"
73
79
  - !ruby/object:Gem::Version
74
- version: 0.12.3.preview
80
+ version: 0.15.0.preview
75
81
  - !ruby/object:Gem::Dependency
76
82
  requirement: !ruby/object:Gem::Requirement
77
83
  requirements:
78
- - - ">="
84
+ - - "~>"
79
85
  - !ruby/object:Gem::Version
80
- version: '0'
86
+ version: '1'
81
87
  name: logstash-devutils
82
88
  prerelease: false
83
89
  type: :development
84
90
  version_requirements: !ruby/object:Gem::Requirement
85
91
  requirements:
86
- - - ">="
92
+ - - "~>"
87
93
  - !ruby/object:Gem::Version
88
- version: '0'
94
+ version: '1'
89
95
  - !ruby/object:Gem::Dependency
90
96
  requirement: !ruby/object:Gem::Requirement
91
97
  requirements:
92
- - - ">="
98
+ - - "~>"
93
99
  - !ruby/object:Gem::Version
94
- version: '0'
100
+ version: '2'
95
101
  name: logging
96
102
  prerelease: false
97
103
  type: :development
98
104
  version_requirements: !ruby/object:Gem::Requirement
99
105
  requirements:
100
- - - ">="
106
+ - - "~>"
101
107
  - !ruby/object:Gem::Version
102
- version: '0'
108
+ version: '2'
103
109
  - !ruby/object:Gem::Dependency
104
110
  requirement: !ruby/object:Gem::Requirement
105
111
  requirements:
106
- - - ">="
112
+ - - "~>"
107
113
  - !ruby/object:Gem::Version
108
114
  version: '0'
109
115
  name: jar-dependencies
@@ -111,11 +117,10 @@ dependencies:
111
117
  type: :runtime
112
118
  version_requirements: !ruby/object:Gem::Requirement
113
119
  requirements:
114
- - - ">="
120
+ - - "~>"
115
121
  - !ruby/object:Gem::Version
116
122
  version: '0'
117
- description: This gem is a Logstash plugin. It reads and parses data from Azure Storage
118
- Blobs.
123
+ description: This gem is a Logstash plugin. It reads and parses data from Azure Storage Blobs.
119
124
  email: azdiag@microsoft.com
120
125
  executables: []
121
126
  extensions: []
@@ -126,15 +131,13 @@ files:
126
131
  - LICENSE
127
132
  - README.md
128
133
  - lib/com/microsoft/json-parser.rb
129
- - lib/logstash-input-azureblob_jars.rb
130
134
  - lib/logstash/inputs/azureblob.rb
131
- - lib/org/glassfish/javax.json/1.1/javax.json-1.1.jar
132
135
  - logstash-input-azureblob.gemspec
133
136
  - spec/com/microsoft/json-parser_spec.rb
134
137
  - spec/inputs/azureblob_spec.rb
135
138
  homepage: https://github.com/Azure/azure-diagnostics-tools
136
139
  licenses:
137
- - Apache License (2.0)
140
+ - Apache-2.0
138
141
  metadata:
139
142
  logstash_plugin: 'true'
140
143
  logstash_group: input
@@ -155,11 +158,10 @@ required_rubygems_version: !ruby/object:Gem::Requirement
155
158
  requirements:
156
159
  - jar 'org.glassfish:javax.json', '1.1'
157
160
  rubyforge_project:
158
- rubygems_version: 2.6.13
161
+ rubygems_version: 2.4.8
159
162
  signing_key:
160
163
  specification_version: 4
161
- summary: This plugin collects Microsoft Azure Diagnostics data from Azure Storage
162
- Blobs.
164
+ summary: This plugin collects Microsoft Azure Diagnostics data from Azure Storage Blobs.
163
165
  test_files:
164
166
  - spec/com/microsoft/json-parser_spec.rb
165
167
  - spec/inputs/azureblob_spec.rb
@@ -1,10 +0,0 @@
1
- # this is a generated file, to avoid over-writing it just delete this comment
2
- begin
3
- require 'jar_dependencies'
4
- rescue LoadError
5
- require 'org/glassfish/javax.json/1.1/javax.json-1.1.jar'
6
- end
7
-
8
- if defined? Jars
9
- require_jar( 'org.glassfish', 'javax.json', '1.1' )
10
- end