logstash-input-azurewadtable 0.9.9 → 0.9.11
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +6 -6
- data/Gemfile +2 -2
- data/README.md +67 -67
- data/lib/logstash/inputs/azurewadtable.rb +174 -134
- data/logstash-input-azurewadtable.gemspec +25 -25
- metadata +29 -23
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: c27925f10618e725258c57b9afb50b9f8c25689c
|
4
|
+
data.tar.gz: 3f4a6dc33cbf979edeaf97b6696470b22bd7f5ff
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 11ac6192c5c76e265bab0412520b795e898e28e5861aeacdebdfd05634da3c718bc4b4aa079f4706959fe63c4ddca2191ae21505785067516934a1028153947c
|
7
|
+
data.tar.gz: 1581ff53ae508173137aa0ccb8ad96fc5c224f4385bd6b7f1535362196d4c44c050eef4aa4b676d3df162141941b85ff2ec94aca18a62626fa31149f3087be2b
|
data/CHANGELOG.md
CHANGED
@@ -1,7 +1,7 @@
|
|
1
|
-
## 2016.06.27
|
2
|
-
* Added support for setting Azure service endpoint in the configuration.
|
3
|
-
|
4
|
-
## 2016.05.02
|
5
|
-
* Made the plugin to respect Logstash shutdown signal.
|
6
|
-
* Updated the *logstash-core* runtime dependency requirement to '~> 2.0'.
|
1
|
+
## 2016.06.27
|
2
|
+
* Added support for setting Azure service endpoint in the configuration.
|
3
|
+
|
4
|
+
## 2016.05.02
|
5
|
+
* Made the plugin to respect Logstash shutdown signal.
|
6
|
+
* Updated the *logstash-core* runtime dependency requirement to '~> 2.0'.
|
7
7
|
* Updated the *logstash-devutils* development dependency requirement to '>= 0.0.16'
|
data/Gemfile
CHANGED
@@ -1,2 +1,2 @@
|
|
1
|
-
source 'https://rubygems.org'
|
2
|
-
gemspec
|
1
|
+
source 'https://rubygems.org'
|
2
|
+
gemspec
|
data/README.md
CHANGED
@@ -1,67 +1,67 @@
|
|
1
|
-
# Logstash input plugin for Azure diagnostics data from Storage Tables
|
2
|
-
|
3
|
-
## Summary
|
4
|
-
This plugin reads Azure diagnostics data from specified Azure Storage Table and parses the data for output.
|
5
|
-
|
6
|
-
## Installation
|
7
|
-
You can install this plugin using the Logstash "plugin" or "logstash-plugin" (for newer versions of Logstash) command:
|
8
|
-
```sh
|
9
|
-
logstash-plugin install logstash-input-azurewadtable
|
10
|
-
```
|
11
|
-
For more information, see Logstash reference [Working with plugins](https://www.elastic.co/guide/en/logstash/current/working-with-plugins.html).
|
12
|
-
|
13
|
-
## Configuration
|
14
|
-
### Required Parameters
|
15
|
-
__*account_name*__
|
16
|
-
|
17
|
-
The Azure Storage account name.
|
18
|
-
|
19
|
-
__*access_key*__
|
20
|
-
|
21
|
-
The access key to the storage account.
|
22
|
-
|
23
|
-
__*table_name*__
|
24
|
-
|
25
|
-
The storage table to pull data from.
|
26
|
-
|
27
|
-
### Optional Parameters
|
28
|
-
__*entity_count_to_process*__
|
29
|
-
|
30
|
-
The plugin queries and processes table entities in a loop, this parameter is to specify the maximum number of entities it should query and process per loop. The default value is 100.
|
31
|
-
|
32
|
-
__*collection_start_time_utc*__
|
33
|
-
|
34
|
-
Specifies the point of time after which the entities created should be included in the query results. The default value is when the plugin gets initialized:
|
35
|
-
|
36
|
-
```ruby
|
37
|
-
Time.now.utc.iso8601
|
38
|
-
```
|
39
|
-
__*etw_pretty_print*__
|
40
|
-
|
41
|
-
True to pretty print ETW files, otherwise False. The default value is False.
|
42
|
-
|
43
|
-
__*idle_delay_seconds*__
|
44
|
-
|
45
|
-
Specifies the seconds to wait between each processing loop. The default value is 15.
|
46
|
-
|
47
|
-
__*endpoint*__
|
48
|
-
|
49
|
-
Specifies the endpoint of Azure environment. The default value is "core.windows.net".
|
50
|
-
|
51
|
-
### Examples
|
52
|
-
```
|
53
|
-
input
|
54
|
-
{
|
55
|
-
|
56
|
-
{
|
57
|
-
account_name => "mystorageaccount"
|
58
|
-
access_key => "VGhpcyBpcyBhIGZha2Uga2V5Lg=="
|
59
|
-
table_name => "WADWindowsEventLogsTable"
|
60
|
-
}
|
61
|
-
}
|
62
|
-
```
|
63
|
-
|
64
|
-
## More information
|
65
|
-
The source code of this plugin is hosted in GitHub repo [Microsoft Azure Diagnostics with ELK](https://github.com/Azure/azure-diagnostics-tools). We welcome you to provide feedback and/or contribute to the project.
|
66
|
-
|
67
|
-
Please also see [Analyze Diagnostics Data with ELK template](https://github.com/Azure/azure-quickstart-templates/tree/master/diagnostics-with-elk) for quick deployment of ELK to Azure.
|
1
|
+
# Logstash input plugin for Azure diagnostics data from Storage Tables
|
2
|
+
|
3
|
+
## Summary
|
4
|
+
This plugin reads Azure diagnostics data from specified Azure Storage Table and parses the data for output.
|
5
|
+
|
6
|
+
## Installation
|
7
|
+
You can install this plugin using the Logstash "plugin" or "logstash-plugin" (for newer versions of Logstash) command:
|
8
|
+
```sh
|
9
|
+
logstash-plugin install logstash-input-azurewadtable
|
10
|
+
```
|
11
|
+
For more information, see Logstash reference [Working with plugins](https://www.elastic.co/guide/en/logstash/current/working-with-plugins.html).
|
12
|
+
|
13
|
+
## Configuration
|
14
|
+
### Required Parameters
|
15
|
+
__*account_name*__
|
16
|
+
|
17
|
+
The Azure Storage account name.
|
18
|
+
|
19
|
+
__*access_key*__
|
20
|
+
|
21
|
+
The access key to the storage account.
|
22
|
+
|
23
|
+
__*table_name*__
|
24
|
+
|
25
|
+
The storage table to pull data from.
|
26
|
+
|
27
|
+
### Optional Parameters
|
28
|
+
__*entity_count_to_process*__
|
29
|
+
|
30
|
+
The plugin queries and processes table entities in a loop, this parameter is to specify the maximum number of entities it should query and process per loop. The default value is 100.
|
31
|
+
|
32
|
+
__*collection_start_time_utc*__
|
33
|
+
|
34
|
+
Specifies the point of time after which the entities created should be included in the query results. The default value is when the plugin gets initialized:
|
35
|
+
|
36
|
+
```ruby
|
37
|
+
Time.now.utc.iso8601
|
38
|
+
```
|
39
|
+
__*etw_pretty_print*__
|
40
|
+
|
41
|
+
True to pretty print ETW files, otherwise False. The default value is False.
|
42
|
+
|
43
|
+
__*idle_delay_seconds*__
|
44
|
+
|
45
|
+
Specifies the seconds to wait between each processing loop. The default value is 15.
|
46
|
+
|
47
|
+
__*endpoint*__
|
48
|
+
|
49
|
+
Specifies the endpoint of Azure environment. The default value is "core.windows.net".
|
50
|
+
|
51
|
+
### Examples
|
52
|
+
```
|
53
|
+
input
|
54
|
+
{
|
55
|
+
azurewadtable
|
56
|
+
{
|
57
|
+
account_name => "mystorageaccount"
|
58
|
+
access_key => "VGhpcyBpcyBhIGZha2Uga2V5Lg=="
|
59
|
+
table_name => "WADWindowsEventLogsTable"
|
60
|
+
}
|
61
|
+
}
|
62
|
+
```
|
63
|
+
|
64
|
+
## More information
|
65
|
+
The source code of this plugin is hosted in GitHub repo [Microsoft Azure Diagnostics with ELK](https://github.com/Azure/azure-diagnostics-tools). We welcome you to provide feedback and/or contribute to the project.
|
66
|
+
|
67
|
+
Please also see [Analyze Diagnostics Data with ELK template](https://github.com/Azure/azure-quickstart-templates/tree/master/diagnostics-with-elk) for quick deployment of ELK to Azure.
|
@@ -1,134 +1,174 @@
|
|
1
|
-
# encoding: utf-8
|
2
|
-
require "logstash/inputs/base"
|
3
|
-
require "logstash/namespace"
|
4
|
-
|
5
|
-
require "azure"
|
6
|
-
|
7
|
-
class LogStash::Inputs::AzureWADTable < LogStash::Inputs::Base
|
8
|
-
class Interrupted < StandardError; end
|
9
|
-
|
10
|
-
config_name "azurewadtable"
|
11
|
-
milestone 1
|
12
|
-
|
13
|
-
config :account_name, :validate => :string
|
14
|
-
config :access_key, :validate => :string
|
15
|
-
config :table_name, :validate => :string
|
16
|
-
config :entity_count_to_process, :validate => :string, :default => 100
|
17
|
-
config :collection_start_time_utc, :validate => :string, :default => Time.now.utc.iso8601
|
18
|
-
config :etw_pretty_print, :validate => :boolean, :default => false
|
19
|
-
config :idle_delay_seconds, :validate => :number, :default => 15
|
20
|
-
config :endpoint, :validate => :string, :default => "core.windows.net"
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
end
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
end
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/inputs/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
require "time"
|
5
|
+
require "azure"
|
6
|
+
|
7
|
+
class LogStash::Inputs::AzureWADTable < LogStash::Inputs::Base
|
8
|
+
class Interrupted < StandardError; end
|
9
|
+
|
10
|
+
config_name "azurewadtable"
|
11
|
+
milestone 1
|
12
|
+
|
13
|
+
config :account_name, :validate => :string
|
14
|
+
config :access_key, :validate => :string
|
15
|
+
config :table_name, :validate => :string
|
16
|
+
config :entity_count_to_process, :validate => :string, :default => 100
|
17
|
+
config :collection_start_time_utc, :validate => :string, :default => Time.now.utc.iso8601
|
18
|
+
config :etw_pretty_print, :validate => :boolean, :default => false
|
19
|
+
config :idle_delay_seconds, :validate => :number, :default => 15
|
20
|
+
config :endpoint, :validate => :string, :default => "core.windows.net"
|
21
|
+
|
22
|
+
# Default 1 minute delay to ensure all data is published to the table before querying.
|
23
|
+
# See issue #23 for more: https://github.com/Azure/azure-diagnostics-tools/issues/23
|
24
|
+
config :data_latency_minutes, :validate => :number, :default => 1
|
25
|
+
|
26
|
+
TICKS_SINCE_EPOCH = Time.utc(0001, 01, 01).to_i * 10000000
|
27
|
+
|
28
|
+
def initialize(*args)
|
29
|
+
super(*args)
|
30
|
+
end # initialize
|
31
|
+
|
32
|
+
public
|
33
|
+
def register
|
34
|
+
Azure.configure do |config|
|
35
|
+
config.storage_account_name = @account_name
|
36
|
+
config.storage_access_key = @access_key
|
37
|
+
config.storage_table_host = "https://#{@account_name}.table.#{@endpoint}"
|
38
|
+
end
|
39
|
+
@azure_table_service = Azure::Table::TableService.new
|
40
|
+
@last_timestamp = @collection_start_time_utc
|
41
|
+
@idle_delay = @idle_delay_seconds
|
42
|
+
@continuation_token = nil
|
43
|
+
end # register
|
44
|
+
|
45
|
+
public
|
46
|
+
def run(output_queue)
|
47
|
+
while !stop?
|
48
|
+
@logger.debug("Starting process method @" + Time.now.to_s);
|
49
|
+
process(output_queue)
|
50
|
+
@logger.debug("Starting delay of: " + @idle_delay_seconds.to_s + " seconds @" + Time.now.to_s);
|
51
|
+
sleep @idle_delay
|
52
|
+
end # while
|
53
|
+
end # run
|
54
|
+
|
55
|
+
public
|
56
|
+
def teardown
|
57
|
+
end
|
58
|
+
|
59
|
+
def build_latent_query
|
60
|
+
@logger.debug("from #{@last_timestamp} to #{@until_timestamp}")
|
61
|
+
query_filter = "(PartitionKey gt '#{partitionkey_from_datetime(@last_timestamp)}' and PartitionKey lt '#{partitionkey_from_datetime(@until_timestamp)}')"
|
62
|
+
for i in 0..99
|
63
|
+
query_filter << " or (PartitionKey gt '#{i.to_s.rjust(19, '0')}___#{partitionkey_from_datetime(@last_timestamp)}' and PartitionKey lt '#{i.to_s.rjust(19, '0')}___#{partitionkey_from_datetime(@until_timestamp)}')"
|
64
|
+
end # for block
|
65
|
+
query_filter = query_filter.gsub('"','')
|
66
|
+
query_filter
|
67
|
+
end
|
68
|
+
|
69
|
+
def build_zero_latency_query
|
70
|
+
@logger.debug("from #{@last_timestamp} to most recent data")
|
71
|
+
# query data using start_from_time
|
72
|
+
query_filter = "(PartitionKey gt '#{partitionkey_from_datetime(@last_timestamp)}')"
|
73
|
+
for i in 0..99
|
74
|
+
query_filter << " or (PartitionKey gt '#{i.to_s.rjust(19, '0')}___#{partitionkey_from_datetime(@last_timestamp)}' and PartitionKey lt '#{i.to_s.rjust(19, '0')}___9999999999999999999')"
|
75
|
+
end # for block
|
76
|
+
query_filter = query_filter.gsub('"','')
|
77
|
+
query_filter
|
78
|
+
end
|
79
|
+
|
80
|
+
def process(output_queue)
|
81
|
+
if @data_latency_minutes > 0
|
82
|
+
@until_timestamp = (Time.now - (60 * @data_latency_minutes)).iso8601 unless @continuation_token
|
83
|
+
query_filter = build_latent_query
|
84
|
+
else
|
85
|
+
query_filter = build_zero_latency_query
|
86
|
+
end
|
87
|
+
@logger.debug("Query filter: " + query_filter)
|
88
|
+
query = { :top => @entity_count_to_process, :filter => query_filter, :continuation_token => @continuation_token }
|
89
|
+
result = @azure_table_service.query_entities(@table_name, query)
|
90
|
+
@continuation_token = result.continuation_token
|
91
|
+
|
92
|
+
if result and result.length > 0
|
93
|
+
@logger.debug("#{result.length} results found.")
|
94
|
+
last_good_timestamp = nil
|
95
|
+
result.each do |entity|
|
96
|
+
event = LogStash::Event.new(entity.properties)
|
97
|
+
event.set("type", @table_name)
|
98
|
+
|
99
|
+
# Help pretty print etw files
|
100
|
+
if (@etw_pretty_print && !event.get("EventMessage").nil? && !event.get("Message").nil?)
|
101
|
+
@logger.debug("event: " + event.to_s)
|
102
|
+
eventMessage = event.get("EventMessage").to_s
|
103
|
+
message = event.get("Message").to_s
|
104
|
+
@logger.debug("EventMessage: " + eventMessage)
|
105
|
+
@logger.debug("Message: " + message)
|
106
|
+
if (eventMessage.include? "%")
|
107
|
+
@logger.debug("starting pretty print")
|
108
|
+
toReplace = eventMessage.scan(/%\d+/)
|
109
|
+
payload = message.scan(/(?<!\\S)([a-zA-Z]+)=(\"[^\"]*\")(?!\\S)/)
|
110
|
+
# Split up the format string to seperate all of the numbers
|
111
|
+
toReplace.each do |key|
|
112
|
+
@logger.debug("Replacing key: " + key.to_s)
|
113
|
+
index = key.scan(/\d+/).join.to_i
|
114
|
+
newValue = payload[index - 1][1]
|
115
|
+
@logger.debug("New Value: " + newValue)
|
116
|
+
eventMessage[key] = newValue
|
117
|
+
end # do block
|
118
|
+
event.set("EventMessage", eventMessage)
|
119
|
+
@logger.debug("pretty print end. result: " + event.get("EventMessage").to_s)
|
120
|
+
end
|
121
|
+
end
|
122
|
+
decorate(event)
|
123
|
+
if event.get('PreciseTimeStamp').is_a?(Time)
|
124
|
+
event.set('PreciseTimeStamp', LogStash::Timestamp.new(event.get('PreciseTimeStamp')))
|
125
|
+
end
|
126
|
+
theTIMESTAMP = event.get('TIMESTAMP')
|
127
|
+
if theTIMESTAMP.is_a?(LogStash::Timestamp)
|
128
|
+
last_good_timestamp = theTIMESTAMP.to_iso8601
|
129
|
+
elsif theTIMESTAMP.is_a?(Time)
|
130
|
+
last_good_timestamp = theTIMESTAMP.iso8601
|
131
|
+
event.set('TIMESTAMP', LogStash::Timestamp.new(theTIMESTAMP))
|
132
|
+
else
|
133
|
+
@logger.warn("Found result with invalid TIMESTAMP. " + event.to_hash.to_s)
|
134
|
+
end
|
135
|
+
output_queue << event
|
136
|
+
end # each block
|
137
|
+
@idle_delay = 0
|
138
|
+
if (!last_good_timestamp.nil?)
|
139
|
+
@last_timestamp = last_good_timestamp unless @continuation_token
|
140
|
+
end
|
141
|
+
else
|
142
|
+
@logger.debug("No new results found.")
|
143
|
+
@idle_delay = @idle_delay_seconds
|
144
|
+
end # if block
|
145
|
+
|
146
|
+
rescue => e
|
147
|
+
@logger.error("Oh My, An error occurred.", :exception => e)
|
148
|
+
raise
|
149
|
+
end # process
|
150
|
+
|
151
|
+
# Windows Azure Diagnostic's algorithm for determining the partition key based on time is as follows:
|
152
|
+
# 1. Take time in UTC without seconds.
|
153
|
+
# 2. Convert it into .net ticks
|
154
|
+
# 3. add a '0' prefix.
|
155
|
+
def partitionkey_from_datetime(time_string)
|
156
|
+
collection_time = Time.parse(time_string)
|
157
|
+
if collection_time
|
158
|
+
@logger.debug("collection time parsed successfully #{collection_time}")
|
159
|
+
else
|
160
|
+
raise(ArgumentError, "Could not parse the time_string")
|
161
|
+
end # if else block
|
162
|
+
|
163
|
+
collection_time -= collection_time.sec
|
164
|
+
ticks = to_ticks(collection_time)
|
165
|
+
"0#{ticks}"
|
166
|
+
end # partitionkey_from_datetime
|
167
|
+
|
168
|
+
# Convert time to ticks
|
169
|
+
def to_ticks(time_to_convert)
|
170
|
+
@logger.debug("Converting time to ticks")
|
171
|
+
time_to_convert.to_i * 10000000 - TICKS_SINCE_EPOCH
|
172
|
+
end # to_ticks
|
173
|
+
|
174
|
+
end # LogStash::Inputs::AzureWADTable
|
@@ -1,25 +1,25 @@
|
|
1
|
-
Gem::Specification.new do |s|
|
2
|
-
s.name = 'logstash-input-azurewadtable'
|
3
|
-
s.version = '0.9.
|
4
|
-
s.licenses = ['Apache License (2.0)']
|
5
|
-
s.summary = "This plugin collects Microsoft Azure Diagnostics data from Azure Storage Tables."
|
6
|
-
s.description = "This gem is a Logstash plugin. It reads and parses diagnostics data from Azure Storage Tables."
|
7
|
-
s.authors = ["Microsoft Corporation"]
|
8
|
-
s.email = 'azdiag@microsoft.com'
|
9
|
-
s.homepage = "https://github.com/Azure/azure-diagnostics-tools"
|
10
|
-
s.require_paths = ["lib"]
|
11
|
-
|
12
|
-
# Files
|
13
|
-
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','Gemfile','LICENSE']
|
14
|
-
# Tests
|
15
|
-
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
16
|
-
|
17
|
-
# Special flag to let us know this is actually a logstash plugin
|
18
|
-
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "input" }
|
19
|
-
|
20
|
-
# Gem dependencies
|
21
|
-
s.add_runtime_dependency
|
22
|
-
s.add_runtime_dependency 'azure', '~> 0.7.3'
|
23
|
-
s.add_development_dependency 'logstash-devutils', '>=
|
24
|
-
end
|
25
|
-
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
s.name = 'logstash-input-azurewadtable'
|
3
|
+
s.version = '0.9.11'
|
4
|
+
s.licenses = ['Apache License (2.0)']
|
5
|
+
s.summary = "This plugin collects Microsoft Azure Diagnostics data from Azure Storage Tables."
|
6
|
+
s.description = "This gem is a Logstash plugin. It reads and parses diagnostics data from Azure Storage Tables."
|
7
|
+
s.authors = ["Microsoft Corporation"]
|
8
|
+
s.email = 'azdiag@microsoft.com'
|
9
|
+
s.homepage = "https://github.com/Azure/azure-diagnostics-tools"
|
10
|
+
s.require_paths = ["lib"]
|
11
|
+
|
12
|
+
# Files
|
13
|
+
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','Gemfile','LICENSE']
|
14
|
+
# Tests
|
15
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
16
|
+
|
17
|
+
# Special flag to let us know this is actually a logstash plugin
|
18
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "input" }
|
19
|
+
|
20
|
+
# Gem dependencies
|
21
|
+
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
22
|
+
s.add_runtime_dependency 'azure', '~> 0.7.3'
|
23
|
+
s.add_development_dependency 'logstash-devutils', '>= 1.1.0'
|
24
|
+
end
|
25
|
+
|
metadata
CHANGED
@@ -1,57 +1,63 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-input-azurewadtable
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.9.
|
4
|
+
version: 0.9.11
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Microsoft Corporation
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2016-
|
11
|
+
date: 2016-12-08 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
|
+
name: logstash-core-plugin-api
|
15
|
+
version_requirements: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ">="
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '1.60'
|
20
|
+
- - "<="
|
21
|
+
- !ruby/object:Gem::Version
|
22
|
+
version: '2.99'
|
14
23
|
requirement: !ruby/object:Gem::Requirement
|
15
24
|
requirements:
|
16
|
-
- -
|
25
|
+
- - ">="
|
26
|
+
- !ruby/object:Gem::Version
|
27
|
+
version: '1.60'
|
28
|
+
- - "<="
|
17
29
|
- !ruby/object:Gem::Version
|
18
|
-
version: '2.
|
19
|
-
name: logstash-core
|
30
|
+
version: '2.99'
|
20
31
|
prerelease: false
|
21
32
|
type: :runtime
|
33
|
+
- !ruby/object:Gem::Dependency
|
34
|
+
name: azure
|
22
35
|
version_requirements: !ruby/object:Gem::Requirement
|
23
36
|
requirements:
|
24
|
-
- - ~>
|
37
|
+
- - "~>"
|
25
38
|
- !ruby/object:Gem::Version
|
26
|
-
version:
|
27
|
-
- !ruby/object:Gem::Dependency
|
39
|
+
version: 0.7.3
|
28
40
|
requirement: !ruby/object:Gem::Requirement
|
29
41
|
requirements:
|
30
|
-
- - ~>
|
42
|
+
- - "~>"
|
31
43
|
- !ruby/object:Gem::Version
|
32
44
|
version: 0.7.3
|
33
|
-
name: azure
|
34
45
|
prerelease: false
|
35
46
|
type: :runtime
|
47
|
+
- !ruby/object:Gem::Dependency
|
48
|
+
name: logstash-devutils
|
36
49
|
version_requirements: !ruby/object:Gem::Requirement
|
37
50
|
requirements:
|
38
|
-
- -
|
51
|
+
- - ">="
|
39
52
|
- !ruby/object:Gem::Version
|
40
|
-
version:
|
41
|
-
- !ruby/object:Gem::Dependency
|
53
|
+
version: 1.1.0
|
42
54
|
requirement: !ruby/object:Gem::Requirement
|
43
55
|
requirements:
|
44
|
-
- -
|
56
|
+
- - ">="
|
45
57
|
- !ruby/object:Gem::Version
|
46
|
-
version:
|
47
|
-
name: logstash-devutils
|
58
|
+
version: 1.1.0
|
48
59
|
prerelease: false
|
49
60
|
type: :development
|
50
|
-
version_requirements: !ruby/object:Gem::Requirement
|
51
|
-
requirements:
|
52
|
-
- - '>='
|
53
|
-
- !ruby/object:Gem::Version
|
54
|
-
version: 0.0.16
|
55
61
|
description: This gem is a Logstash plugin. It reads and parses diagnostics data from Azure Storage Tables.
|
56
62
|
email: azdiag@microsoft.com
|
57
63
|
executables: []
|
@@ -77,12 +83,12 @@ require_paths:
|
|
77
83
|
- lib
|
78
84
|
required_ruby_version: !ruby/object:Gem::Requirement
|
79
85
|
requirements:
|
80
|
-
- -
|
86
|
+
- - ">="
|
81
87
|
- !ruby/object:Gem::Version
|
82
88
|
version: '0'
|
83
89
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
84
90
|
requirements:
|
85
|
-
- -
|
91
|
+
- - ">="
|
86
92
|
- !ruby/object:Gem::Version
|
87
93
|
version: '0'
|
88
94
|
requirements: []
|