generik-logstash-input-azureblob 1.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/lib/logstash/codecs/json_list.rb +36 -0
- data/lib/logstash/inputs/azureblob.rb +96 -0
- data/lib/logstash/inputs/azuretopic.rb +62 -0
- data/lib/logstash/inputs/azuretopicthreadable.rb +78 -0
- data/lib/logstash/inputs/azurewadtable.rb +132 -0
- metadata +128 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: f5020155403abd1393bc355a70f783a0674f8886
|
4
|
+
data.tar.gz: f4df1a5e97fcdd9217653639f162ae2329c9df4f
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: b6ae69be71131a5798eb142a1e262d39df13faa9183a78263716fa8653f0b1b094e0dfac7cfc44390a241ba5c3883031110c4a823e24704cf69a69399a972be8
|
7
|
+
data.tar.gz: 88e0cd67a2d08a24b47f98641e88e25885ce753286f6fe519fccdee74547a4d077b45ea136b97de0d6c399a225356ddfc328525351c0a8caa078f529a55e2d59
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/codecs/base"
|
3
|
+
require "logstash/codecs/line"
|
4
|
+
require "json"
|
5
|
+
|
6
|
+
class LogStash::Codecs::JSONList < LogStash::Codecs::Base
|
7
|
+
config_name "json_list"
|
8
|
+
|
9
|
+
milestone 0
|
10
|
+
|
11
|
+
config :charset, :validate => ::Encoding.name_list, :default => "UTF-8"
|
12
|
+
|
13
|
+
public
|
14
|
+
def register
|
15
|
+
end # def register
|
16
|
+
|
17
|
+
public
|
18
|
+
def decode(data)
|
19
|
+
begin
|
20
|
+
JSON.parse(data).each do |obj|
|
21
|
+
yield LogStash::Event.new(obj)
|
22
|
+
end
|
23
|
+
rescue JSON::ParserError => e
|
24
|
+
@logger.info("JSON parse failure. Falling back to plain-text", :error => e, :data => data)
|
25
|
+
yield LogStash::Event.new("message" => event["message"])
|
26
|
+
end
|
27
|
+
end # def decode
|
28
|
+
|
29
|
+
public
|
30
|
+
def encode(data)
|
31
|
+
arr = Array.new
|
32
|
+
arr << data
|
33
|
+
@on_event.call(arr.to_json)
|
34
|
+
end # def encode
|
35
|
+
|
36
|
+
end # class LogStash::Codecs::JSONList
|
@@ -0,0 +1,96 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/inputs/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
|
5
|
+
require "azure"
|
6
|
+
require "securerandom"
|
7
|
+
|
8
|
+
# Reads events from Azure Blobs
|
9
|
+
class LogStash::Inputs::Azureblob < LogStash::Inputs::Base
|
10
|
+
|
11
|
+
config_name "azureblob"
|
12
|
+
milestone 0
|
13
|
+
|
14
|
+
default :codec, "json_lines"
|
15
|
+
|
16
|
+
config :storage_account_name, :validate => :string
|
17
|
+
config :storage_access_key, :validate => :string
|
18
|
+
|
19
|
+
config :container, :validate => :string
|
20
|
+
config :sleep_time, :validate => :number, :default => 10
|
21
|
+
|
22
|
+
def initialize(*args)
|
23
|
+
super(*args)
|
24
|
+
end # def initialize
|
25
|
+
|
26
|
+
public
|
27
|
+
def register
|
28
|
+
Azure.configure do |config|
|
29
|
+
config.storage_account_name = @storage_account_name
|
30
|
+
config.storage_access_key = @storage_access_key
|
31
|
+
end
|
32
|
+
@azure_blob = Azure::Blob::BlobService.new
|
33
|
+
end # def register
|
34
|
+
|
35
|
+
def list_blob_names
|
36
|
+
blob_names = Set.new []
|
37
|
+
loop do
|
38
|
+
continuation_token = NIL
|
39
|
+
entries = @azure_blob.list_blobs(@container, { :timeout => 10, :marker => continuation_token})
|
40
|
+
entries.each do |entry|
|
41
|
+
blob_names << entry.name
|
42
|
+
end
|
43
|
+
continuation_token = entries.continuation_token
|
44
|
+
break if continuation_token.empty?
|
45
|
+
end
|
46
|
+
return blob_names
|
47
|
+
end # def list_blobs
|
48
|
+
|
49
|
+
def acquire_lock(blob_name)
|
50
|
+
@azure_blob.create_page_blob(@container, blob_name, 512)
|
51
|
+
@azure_blob.acquire_lease(@container, blob_name,{:duration=>60, :timeout=>10, :proposed_lease_id=>SecureRandom.uuid})
|
52
|
+
return true
|
53
|
+
rescue LogStash::ShutdownSignal => e
|
54
|
+
raise e
|
55
|
+
rescue => e
|
56
|
+
@logger.error("Caught exception while locking", :exception => e)
|
57
|
+
return false
|
58
|
+
end # def acquire_lock
|
59
|
+
|
60
|
+
def lock_blob(blob_names)
|
61
|
+
real_blob_names = blob_names.select { |name| !name.end_with?(".lock") }
|
62
|
+
real_blob_names.each do |blob_name|
|
63
|
+
if !blob_names.include?(blob_name + ".lock")
|
64
|
+
if acquire_lock(blob_name + ".lock")
|
65
|
+
return blob_name
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
return NIL
|
70
|
+
end # def lock_blob
|
71
|
+
|
72
|
+
def process(output_queue)
|
73
|
+
blob_names = list_blob_names
|
74
|
+
blob_name = lock_blob(blob_names)
|
75
|
+
return if !blob_name
|
76
|
+
blob, content = @azure_blob.get_blob(@container, blob_name)
|
77
|
+
@codec.decode(content) do |event|
|
78
|
+
output_queue << event
|
79
|
+
end
|
80
|
+
rescue LogStash::ShutdownSignal => e
|
81
|
+
raise e
|
82
|
+
rescue => e
|
83
|
+
@logger.error("Oh My, An error occurred.", :exception => e)
|
84
|
+
end # def process
|
85
|
+
|
86
|
+
public
|
87
|
+
def run(output_queue)
|
88
|
+
while true
|
89
|
+
process(output_queue)
|
90
|
+
end # loop
|
91
|
+
end # def run
|
92
|
+
|
93
|
+
public
|
94
|
+
def teardown
|
95
|
+
end # def teardown
|
96
|
+
end # class LogStash::Inputs::Azuretopic
|
@@ -0,0 +1,62 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/inputs/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
|
5
|
+
require "azure"
|
6
|
+
|
7
|
+
# Reads events from Azure topics
|
8
|
+
class LogStash::Inputs::Azuretopic < LogStash::Inputs::Base
|
9
|
+
class Interrupted < StandardError; end
|
10
|
+
|
11
|
+
config_name "azuretopic"
|
12
|
+
milestone 0
|
13
|
+
|
14
|
+
default :codec, "json_list"
|
15
|
+
|
16
|
+
config :namespace, :validate => :string
|
17
|
+
config :access_key, :validate => :string
|
18
|
+
config :subscription, :validate => :string
|
19
|
+
config :topic, :validate => :string
|
20
|
+
config :deliverycount, :validate => :number, :default => 10
|
21
|
+
|
22
|
+
def initialize(*args)
|
23
|
+
super(*args)
|
24
|
+
end # def initialize
|
25
|
+
|
26
|
+
public
|
27
|
+
def register
|
28
|
+
Azure.configure do |config|
|
29
|
+
config.sb_namespace = @namespace
|
30
|
+
config.sb_access_key = @access_key
|
31
|
+
end
|
32
|
+
@azure_service_bus = Azure::ServiceBus::ServiceBusService.new
|
33
|
+
end # def register
|
34
|
+
|
35
|
+
def process(output_queue)
|
36
|
+
message = @azure_service_bus.receive_subscription_message(@topic ,@subscription, { :peek_lock => true, :timeout => 1 } )
|
37
|
+
if message
|
38
|
+
codec.decode(message.body) do |event|
|
39
|
+
output_queue << event
|
40
|
+
end # codec.decode
|
41
|
+
@azure_service_bus.delete_subscription_message(message)
|
42
|
+
end
|
43
|
+
rescue LogStash::ShutdownSignal => e
|
44
|
+
raise e
|
45
|
+
rescue => e
|
46
|
+
@logger.error("Oh My, An error occurred.", :exception => e)
|
47
|
+
if message and message.delivery_count > @deliverycount
|
48
|
+
@azure_service_bus.delete_subscription_message(message)
|
49
|
+
end
|
50
|
+
end # def process
|
51
|
+
|
52
|
+
public
|
53
|
+
def run(output_queue)
|
54
|
+
while true
|
55
|
+
process(output_queue)
|
56
|
+
end # loop
|
57
|
+
end # def run
|
58
|
+
|
59
|
+
public
|
60
|
+
def teardown
|
61
|
+
end # def teardown
|
62
|
+
end # class LogStash::Inputs::Azuretopic
|
@@ -0,0 +1,78 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/inputs/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
|
5
|
+
require "thread"
|
6
|
+
|
7
|
+
require "azure"
|
8
|
+
|
9
|
+
# Reads events from Azure topics
|
10
|
+
class LogStash::Inputs::Azuretopicthreadable < LogStash::Inputs::Base
|
11
|
+
class Interrupted < StandardError; end
|
12
|
+
|
13
|
+
config_name "azuretopicthreadable"
|
14
|
+
milestone 0
|
15
|
+
|
16
|
+
default :codec, "json" # default json codec
|
17
|
+
|
18
|
+
config :namespace, :validate => :string
|
19
|
+
config :access_key, :validate => :string
|
20
|
+
config :subscription, :validate => :string
|
21
|
+
config :topic, :validate => :string
|
22
|
+
config :deliverycount, :validate => :number, :default => 10
|
23
|
+
config :threads, :validate => :number, :default => 1
|
24
|
+
config :thread_sleep_time, :validate => :number, :default => 1.0/50.0
|
25
|
+
|
26
|
+
def initialize(*args)
|
27
|
+
super(*args)
|
28
|
+
end # def initialize
|
29
|
+
|
30
|
+
public
|
31
|
+
def register
|
32
|
+
# Configure credentials
|
33
|
+
Azure.configure do |config|
|
34
|
+
config.sb_namespace = @namespace
|
35
|
+
config.sb_access_key = @access_key
|
36
|
+
end
|
37
|
+
end # def register
|
38
|
+
|
39
|
+
def process(output_queue, pid)
|
40
|
+
# Get a new instance of a service
|
41
|
+
azure_service_bus = Azure::ServiceBus::ServiceBusService.new
|
42
|
+
while true
|
43
|
+
begin
|
44
|
+
# check if we have a message in the subscription
|
45
|
+
message = azure_service_bus.receive_subscription_message(@topic ,@subscription, { :peek_lock => true, :timeout => 1 } )
|
46
|
+
if message
|
47
|
+
# decoding returns a yield
|
48
|
+
codec.decode(message.body) do |event|
|
49
|
+
output_queue << event
|
50
|
+
end # codec.decode
|
51
|
+
# delete the message after reading it
|
52
|
+
azure_service_bus.delete_subscription_message(message)
|
53
|
+
end
|
54
|
+
rescue LogStash::ShutdownSignal => e
|
55
|
+
raise e
|
56
|
+
rescue => e
|
57
|
+
@logger.error("Oh My, An error occurred. Thread id:" + pid.to_s, :exception => e)
|
58
|
+
if message and message.delivery_count > @deliverycount
|
59
|
+
azure_service_bus.delete_subscription_message(message)
|
60
|
+
end
|
61
|
+
end
|
62
|
+
sleep(@thread_sleep_time)
|
63
|
+
end
|
64
|
+
end # def process
|
65
|
+
|
66
|
+
public
|
67
|
+
def run(output_queue)
|
68
|
+
threads = []
|
69
|
+
(0..(@threads-1)).each do |pid|
|
70
|
+
threads << Thread.new { process(output_queue, pid) }
|
71
|
+
end
|
72
|
+
threads.each { |thr| thr.join }
|
73
|
+
end # def run
|
74
|
+
|
75
|
+
public
|
76
|
+
def teardown
|
77
|
+
end # def teardown
|
78
|
+
end # class LogStash::Inputs::Azuretopic
|
@@ -0,0 +1,132 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/inputs/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
|
5
|
+
require "azure"
|
6
|
+
|
7
|
+
class LogStash::Inputs::AzureWADTable < LogStash::Inputs::Base
|
8
|
+
class Interrupted < StandardError; end
|
9
|
+
|
10
|
+
config_name "azurewadtable"
|
11
|
+
milestone 0
|
12
|
+
|
13
|
+
config :account_name, :validate => :string
|
14
|
+
config :access_key, :validate => :string
|
15
|
+
config :table_name, :validate => :string
|
16
|
+
config :entity_count_to_process, :validate => :string, :default => 100
|
17
|
+
config :collection_start_time_utc, :validate => :string, :default => Time.now.utc.iso8601
|
18
|
+
config :etw_pretty_print, :validate => :boolean, :default => false
|
19
|
+
config :idle_delay_seconds, :validate => :number, :default => 15
|
20
|
+
|
21
|
+
TICKS_SINCE_EPOCH = Time.utc(0001, 01, 01).to_i * 10000000
|
22
|
+
|
23
|
+
def initialize(*args)
|
24
|
+
super(*args)
|
25
|
+
end # initialize
|
26
|
+
|
27
|
+
public
|
28
|
+
def register
|
29
|
+
Azure.configure do |config|
|
30
|
+
config.storage_account_name = @account_name
|
31
|
+
config.storage_access_key = @access_key
|
32
|
+
end
|
33
|
+
@azure_table_service = Azure::Table::TableService.new
|
34
|
+
@last_timestamp = @collection_start_time_utc
|
35
|
+
@idle_delay = @idle_delay_seconds
|
36
|
+
@continuation_token = nil
|
37
|
+
end # register
|
38
|
+
|
39
|
+
public
|
40
|
+
def run(output_queue)
|
41
|
+
loop do
|
42
|
+
@logger.debug("Starting process method @" + Time.now.to_s);
|
43
|
+
process(output_queue)
|
44
|
+
@logger.debug("Starting delay of: " + @idle_delay_seconds.to_s + " seconds @" + Time.now.to_s);
|
45
|
+
sleep @idle_delay
|
46
|
+
end # loop
|
47
|
+
end # run
|
48
|
+
|
49
|
+
public
|
50
|
+
def teardown
|
51
|
+
end
|
52
|
+
|
53
|
+
def process(output_queue)
|
54
|
+
@logger.debug(@last_timestamp)
|
55
|
+
# query data using start_from_time
|
56
|
+
query_filter = "(PartitionKey gt '#{partitionkey_from_datetime(@last_timestamp)}')"
|
57
|
+
for i in 0..99
|
58
|
+
query_filter << " or (PartitionKey gt '#{i.to_s.rjust(19, '0')}___#{partitionkey_from_datetime(@last_timestamp)}' and PartitionKey lt '#{i.to_s.rjust(19, '0')}___9999999999999999999')"
|
59
|
+
end # for block
|
60
|
+
query_filter = query_filter.gsub('"','')
|
61
|
+
@logger.debug("Query filter: " + query_filter)
|
62
|
+
query = { :top => @entity_count_to_process, :filter => query_filter, :continuation_token => @continuation_token }
|
63
|
+
result = @azure_table_service.query_entities(@table_name, query)
|
64
|
+
@continuation_token = result.continuation_token
|
65
|
+
|
66
|
+
if result and result.length > 0
|
67
|
+
result.each do |entity|
|
68
|
+
event = LogStash::Event.new(entity.properties)
|
69
|
+
event["type"] = @table_name
|
70
|
+
|
71
|
+
# Help pretty print etw files
|
72
|
+
if (@etw_pretty_print && !event["EventMessage"].nil? && !event["Message"].nil?)
|
73
|
+
logger.debug("event: " + event.to_s)
|
74
|
+
eventMessage = event["EventMessage"].to_s
|
75
|
+
message = event["Message"].to_s
|
76
|
+
logger.debug("EventMessage: " + eventMessage)
|
77
|
+
logger.debug("Message: " + message)
|
78
|
+
if (eventMessage.include? "%")
|
79
|
+
logger.debug("starting pretty print")
|
80
|
+
toReplace = eventMessage.scan(/%\d+/)
|
81
|
+
payload = message.scan(/(?<!\\S)([a-zA-Z]+)=(\"[^\"]*\")(?!\\S)/)
|
82
|
+
# Split up the format string to seperate all of the numbers
|
83
|
+
toReplace.each do |key|
|
84
|
+
logger.debug("Replacing key: " + key.to_s)
|
85
|
+
index = key.scan(/\d+/).join.to_i
|
86
|
+
newValue = payload[index - 1][1]
|
87
|
+
logger.debug("New Value: " + newValue)
|
88
|
+
eventMessage[key] = newValue
|
89
|
+
end
|
90
|
+
event["EventMessage"] = eventMessage
|
91
|
+
logger.debug("pretty print end. result: " + event["EventMessage"].to_s)
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
output_queue << event
|
96
|
+
end # each block
|
97
|
+
@idle_delay = 0
|
98
|
+
@last_timestamp = result.last.properties["TIMESTAMP"].iso8601 unless @continuation_token
|
99
|
+
else
|
100
|
+
@logger.debug("No new results found.")
|
101
|
+
@idle_delay = @idle_delay_seconds
|
102
|
+
end # if block
|
103
|
+
|
104
|
+
rescue => e
|
105
|
+
@logger.error("Oh My, An error occurred.", :exception => e)
|
106
|
+
raise
|
107
|
+
end # process
|
108
|
+
|
109
|
+
# Windows Azure Diagnostic's algorithm for determining the partition key based on time is as follows:
|
110
|
+
# 1. Take time in UTC without seconds.
|
111
|
+
# 2. Convert it into .net ticks
|
112
|
+
# 3. add a '0' prefix.
|
113
|
+
def partitionkey_from_datetime(time_string)
|
114
|
+
collection_time = Time.parse(time_string)
|
115
|
+
if collection_time
|
116
|
+
@logger.debug("collection time parsed successfully #{collection_time}")
|
117
|
+
else
|
118
|
+
raise(ArgumentError, "Could not parse the time_string")
|
119
|
+
end # if else block
|
120
|
+
|
121
|
+
collection_time -= collection_time.sec
|
122
|
+
ticks = to_ticks(collection_time)
|
123
|
+
"0#{ticks}"
|
124
|
+
end # partitionkey_from_datetime
|
125
|
+
|
126
|
+
# Convert time to ticks
|
127
|
+
def to_ticks(time_to_convert)
|
128
|
+
@logger.debug("Converting time to ticks")
|
129
|
+
time_to_convert.to_i * 10000000 - TICKS_SINCE_EPOCH
|
130
|
+
end # to_ticks
|
131
|
+
|
132
|
+
end # LogStash::Inputs::AzureWADTable
|
metadata
ADDED
@@ -0,0 +1,128 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: generik-logstash-input-azureblob
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 1.0.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- gongdo
|
8
|
+
- Microsoft
|
9
|
+
autorequire:
|
10
|
+
bindir: bin
|
11
|
+
cert_chain: []
|
12
|
+
date: 2016-02-19 00:00:00.000000000 Z
|
13
|
+
dependencies:
|
14
|
+
- !ruby/object:Gem::Dependency
|
15
|
+
name: logstash-core
|
16
|
+
requirement: !ruby/object:Gem::Requirement
|
17
|
+
requirements:
|
18
|
+
- - ">="
|
19
|
+
- !ruby/object:Gem::Version
|
20
|
+
version: 1.4.0
|
21
|
+
- - "<"
|
22
|
+
- !ruby/object:Gem::Version
|
23
|
+
version: 2.0.0
|
24
|
+
type: :runtime
|
25
|
+
prerelease: false
|
26
|
+
version_requirements: !ruby/object:Gem::Requirement
|
27
|
+
requirements:
|
28
|
+
- - ">="
|
29
|
+
- !ruby/object:Gem::Version
|
30
|
+
version: 1.4.0
|
31
|
+
- - "<"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: 2.0.0
|
34
|
+
- !ruby/object:Gem::Dependency
|
35
|
+
name: logstash-codec-plain
|
36
|
+
requirement: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - ">="
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '0'
|
41
|
+
type: :runtime
|
42
|
+
prerelease: false
|
43
|
+
version_requirements: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - ">="
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0'
|
48
|
+
- !ruby/object:Gem::Dependency
|
49
|
+
name: stud
|
50
|
+
requirement: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - ">="
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '0'
|
55
|
+
type: :runtime
|
56
|
+
prerelease: false
|
57
|
+
version_requirements: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - ">="
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '0'
|
62
|
+
- !ruby/object:Gem::Dependency
|
63
|
+
name: azure
|
64
|
+
requirement: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - ">="
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '0'
|
69
|
+
type: :runtime
|
70
|
+
prerelease: false
|
71
|
+
version_requirements: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - ">="
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '0'
|
76
|
+
- !ruby/object:Gem::Dependency
|
77
|
+
name: logstash-devutils
|
78
|
+
requirement: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - ">="
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '0'
|
83
|
+
type: :development
|
84
|
+
prerelease: false
|
85
|
+
version_requirements: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - ">="
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: '0'
|
90
|
+
description: This gem is a logstash plugin in order to use Microsoft Azure as an input
|
91
|
+
source.
|
92
|
+
email: gongdo@live.com
|
93
|
+
executables: []
|
94
|
+
extensions: []
|
95
|
+
extra_rdoc_files: []
|
96
|
+
files:
|
97
|
+
- lib/logstash/codecs/json_list.rb
|
98
|
+
- lib/logstash/inputs/azureblob.rb
|
99
|
+
- lib/logstash/inputs/azuretopic.rb
|
100
|
+
- lib/logstash/inputs/azuretopicthreadable.rb
|
101
|
+
- lib/logstash/inputs/azurewadtable.rb
|
102
|
+
homepage: ''
|
103
|
+
licenses:
|
104
|
+
- Apache License (2.0)
|
105
|
+
metadata:
|
106
|
+
logstash_plugin: 'true'
|
107
|
+
logstash_group: input
|
108
|
+
post_install_message:
|
109
|
+
rdoc_options: []
|
110
|
+
require_paths:
|
111
|
+
- lib
|
112
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
113
|
+
requirements:
|
114
|
+
- - ">="
|
115
|
+
- !ruby/object:Gem::Version
|
116
|
+
version: '0'
|
117
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
118
|
+
requirements:
|
119
|
+
- - ">="
|
120
|
+
- !ruby/object:Gem::Version
|
121
|
+
version: '0'
|
122
|
+
requirements: []
|
123
|
+
rubyforge_project:
|
124
|
+
rubygems_version: 2.2.2
|
125
|
+
signing_key:
|
126
|
+
specification_version: 4
|
127
|
+
summary: Logstash input source from Azure.
|
128
|
+
test_files: []
|