logstash-output-azure_loganalytics 0.2.3 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +24 -0
- data/README.md +12 -3
- data/VERSION +1 -1
- data/lib/logstash/outputs/azure_loganalytics.rb +54 -12
- data/logstash-output-azure_loganalytics.gemspec +1 -1
- metadata +4 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: b09df7b108a440679410974226e7e7599463d9c2
|
4
|
+
data.tar.gz: 30d5a64ab00f80ff9446eaccb8094e5766ff3606
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 1039c5799973e17dbd72ab8f4918d501967455bc398469da1e0ba7aee7478b4259c3894bd4ae857d5004042861360c506d77ab9263e5b8a245c015d7e1123d7c
|
7
|
+
data.tar.gz: 130337fedcefcf9f07ee34961cfbf0786b53d1129715811b0a8b52a38293913d9998733e1666dfbfc21de51a0d3ca6213644c2b450dc0f3e5e21bee0ed91ec15
|
data/CHANGELOG.md
CHANGED
@@ -1,7 +1,31 @@
|
|
1
|
+
## 0.5.0
|
2
|
+
|
3
|
+
* Change base [azure-loganalytics-datacollector-api](https://github.com/yokawasa/azure-log-analytics-data-collector) to ">= 0.5.0"
|
4
|
+
* Support sprintf syntax like `%{my_log_type}` for `log_type` config param - [Issue #13](https://github.com/yokawasa/logstash-output-azure_loganalytics/issues/13)
|
5
|
+
|
6
|
+
## 0.4.0
|
7
|
+
|
8
|
+
* Change base [azure-loganalytics-datacollector-api](https://github.com/yokawasa/azure-log-analytics-data-collector) to ">= 0.4.0"
|
9
|
+
|
10
|
+
## 0.3.2
|
11
|
+
|
12
|
+
* Improvement: removed unnecessary key check
|
13
|
+
|
14
|
+
## 0.3.1
|
15
|
+
|
16
|
+
* Performance optimization for large key_names list scenario - [Issue#10](https://github.com/yokawasa/logstash-output-azure_loganalytics/issues/10)
|
17
|
+
|
18
|
+
## 0.3.0
|
19
|
+
|
20
|
+
* Support `key_types` param - [Issue#8](https://github.com/yokawasa/logstash-output-azure_loganalytics/issues/8)
|
21
|
+
* Support custom log analytics API endpoint (for supporting Azure sovereign cloud) - [Issue#9](https://github.com/yokawasa/logstash-output-azure_loganalytics/issues/9)
|
22
|
+
|
1
23
|
## 0.2.3
|
24
|
+
|
2
25
|
* Added additional debug logging for successful requests - [PR#7](https://github.com/yokawasa/logstash-output-azure_loganalytics/pull/7) by [@daniel-chambers](https://github.com/daniel-chambers)
|
3
26
|
|
4
27
|
## 0.2.2
|
28
|
+
|
5
29
|
* Fix logging failure - [PR#6](https://github.com/yokawasa/logstash-output-azure_loganalytics/pull/6) by [@daniel-chambers](https://github.com/daniel-chambers)
|
6
30
|
|
7
31
|
## 0.2.1
|
data/README.md
CHANGED
@@ -19,7 +19,8 @@ output {
|
|
19
19
|
customer_id => "<OMS WORKSPACE ID>"
|
20
20
|
shared_key => "<CLIENT AUTH KEY>"
|
21
21
|
log_type => "<LOG TYPE NAME>"
|
22
|
-
key_names => ['key1','key2','key3'..] ## list of Key names
|
22
|
+
key_names => ['key1','key2','key3'..] ## list of Key names
|
23
|
+
key_types => {'key1'=> 'string' 'key2'=>'double' 'key3'=>'boolean' .. }
|
23
24
|
flush_items => <FLUSH_ITEMS_NUM>
|
24
25
|
flush_interval_time => <FLUSH INTERVAL TIME(sec)>
|
25
26
|
}
|
@@ -28,12 +29,20 @@ output {
|
|
28
29
|
|
29
30
|
* **customer\_id (required)** - Your Operations Management Suite workspace ID
|
30
31
|
* **shared\_key (required)** - The primary or the secondary Connected Sources client authentication key.
|
31
|
-
* **log\_type (required)** - The name of the event type that is being submitted to Log Analytics.
|
32
|
+
* **log\_type (required)** - The name of the event type that is being submitted to Log Analytics. It must only contain alpha numeric and _, and not exceed 100 chars. sprintf syntax like `%{my_log_type}` is supported.
|
32
33
|
* **time\_generated\_field (optional)** - Default:''(empty string) The name of the time generated field. Be carefule that the value of field should strictly follow the ISO 8601 format (YYYY-MM-DDThh:mm:ssZ). See also [this](https://docs.microsoft.com/en-us/azure/log-analytics/log-analytics-data-collector-api#create-a-request) for more details
|
33
|
-
* **key\_names (optional)** - Default:[] (empty array). list of
|
34
|
+
* **key\_names (optional)** - Default:[] (empty array). The list of key names in in-coming record that you want to submit to Log Analytics.
|
35
|
+
* **key\_types (optional)** - Default:{} (empty hash). The list of data types for each column as which you want to store in Log Analytics (`string`, `boolean`, or `double`)
|
36
|
+
* The key names in `key_types` param must be included in `key_names` param. The column data whose key isn't included in `key_names` is treated as `string` data type.
|
37
|
+
* Multiple key value entries are separated by `spaces` rather than commas (See also [this](https://www.elastic.co/guide/en/logstash/current/configuration-file-structure.html#hash))
|
38
|
+
* If you want to store a column as datetime or guid data format, set `string` for the column ( the value of the column should be `YYYY-MM-DDThh:mm:ssZ format` if it's `datetime`, and `GUID format` if it's `guid`).
|
39
|
+
* In case that `key_types` param are not specified, all columns that you want to submit ( you choose with `key_names` param ) are stored as `string` data type in Log Analytics.
|
34
40
|
* **flush_items (optional)** - Default 50. Max number of items to buffer before flushing (1 - 1000).
|
35
41
|
* **flush_interval_time (optional)** - Default 5. Max number of seconds to wait between flushes.
|
36
42
|
|
43
|
+
> [NOTE] There is a special param for changing the Log Analytics API endpoint (mainly for supporting Azure sovereign cloud)
|
44
|
+
> * **endpoint (optional)** - Default: ods.opinsights.azure.com
|
45
|
+
|
37
46
|
## Tests
|
38
47
|
|
39
48
|
Here is an example configuration where Logstash's event source and destination are configured as Apache2 access log and Azure Log Analytics respectively.
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.
|
1
|
+
0.5.0
|
@@ -1,4 +1,5 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
+
|
2
3
|
require "logstash/outputs/base"
|
3
4
|
require "logstash/namespace"
|
4
5
|
require "stud/buffer"
|
@@ -14,15 +15,32 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
14
15
|
# The primary or the secondary Connected Sources client authentication key
|
15
16
|
config :shared_key, :validate => :string, :required => true
|
16
17
|
|
17
|
-
# The name of the event type that is being submitted to Log Analytics.
|
18
|
+
# The name of the event type that is being submitted to Log Analytics.
|
19
|
+
# This must only contain alpha numeric and _, and not exceed 100 chars.
|
20
|
+
# sprintf syntax like %{my_log_type} is supported.
|
18
21
|
config :log_type, :validate => :string, :required => true
|
19
22
|
|
20
|
-
# The
|
23
|
+
# The service endpoint (Default: ods.opinsights.azure.com)
|
24
|
+
config :endpoint, :validate => :string, :default => 'ods.opinsights.azure.com'
|
25
|
+
|
26
|
+
# The name of the time generated field.
|
27
|
+
# Be carefule that the value of field should strictly follow the ISO 8601 format (YYYY-MM-DDThh:mm:ssZ)
|
21
28
|
config :time_generated_field, :validate => :string, :default => ''
|
22
29
|
|
23
|
-
# list of
|
30
|
+
# The list of key names in in-coming record that you want to submit to Log Analytics
|
24
31
|
config :key_names, :validate => :array, :default => []
|
25
|
-
|
32
|
+
|
33
|
+
# The list of data types for each column as which you want to store in Log Analytics (`string`, `boolean`, or `double`)
|
34
|
+
# - The key names in `key_types` param must be included in `key_names` param. The column data whose key isn't included in `key_names` is treated as `string` data type.
|
35
|
+
# - Multiple key value entries are separated by `spaces` rather than commas
|
36
|
+
# See also https://www.elastic.co/guide/en/logstash/current/configuration-file-structure.html#hash
|
37
|
+
# - If you want to store a column as datetime or guid data format, set `string` for the column ( the value of the column should be `YYYY-MM-DDThh:mm:ssZ format` if it's `datetime`, and `GUID format` if it's `guid`).
|
38
|
+
# - In case that `key_types` param are not specified, all columns that you want to submit ( you choose with `key_names` param ) are stored as `string` data type in Log Analytics.
|
39
|
+
# Example:
|
40
|
+
# key_names => ['key1','key2','key3','key4',...]
|
41
|
+
# key_types => {'key1'=>'string' 'key2'=>'string' 'key3'=>'boolean' 'key4'=>'double' ...}
|
42
|
+
config :key_types, :validate => :hash, :default => {}
|
43
|
+
|
26
44
|
# Max number of items to buffer before flushing. Default 50.
|
27
45
|
config :flush_items, :validate => :number, :default => 50
|
28
46
|
|
@@ -33,13 +51,19 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
33
51
|
def register
|
34
52
|
require 'azure/loganalytics/datacollectorapi/client'
|
35
53
|
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
54
|
+
#if not @log_type.match(/^[[:alpha:]]+$/)
|
55
|
+
# raise ArgumentError, 'log_type must be only alpha characters'
|
56
|
+
#end
|
57
|
+
|
58
|
+
@key_types.each { |k, v|
|
59
|
+
t = v.downcase
|
60
|
+
if ( !t.eql?('string') && !t.eql?('double') && !t.eql?('boolean') )
|
61
|
+
raise ArgumentError, "Key type(#{v}) for key(#{k}) must be either string, boolean, or double"
|
62
|
+
end
|
63
|
+
}
|
40
64
|
|
41
65
|
## Start
|
42
|
-
@client=Azure::Loganalytics::Datacollectorapi::Client::new(@customer_id,@shared_key)
|
66
|
+
@client=Azure::Loganalytics::Datacollectorapi::Client::new(@customer_id,@shared_key,@endpoint)
|
43
67
|
|
44
68
|
buffer_initialize(
|
45
69
|
:max_items => @flush_items,
|
@@ -51,6 +75,7 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
51
75
|
|
52
76
|
public
|
53
77
|
def receive(event)
|
78
|
+
@log_type = event.sprintf(@log_type)
|
54
79
|
# Simply save an event for later delivery
|
55
80
|
buffer_receive(event)
|
56
81
|
end # def receive
|
@@ -64,8 +89,12 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
64
89
|
document = {}
|
65
90
|
event_hash = event.to_hash()
|
66
91
|
if @key_names.length > 0
|
67
|
-
|
68
|
-
|
92
|
+
# Get the intersection of key_names and keys of event_hash
|
93
|
+
keys_intersection = @key_names & event_hash.keys
|
94
|
+
keys_intersection.each do |key|
|
95
|
+
if @key_types.include?(key)
|
96
|
+
document[key] = convert_value(@key_types[key], event_hash[key])
|
97
|
+
else
|
69
98
|
document[key] = event_hash[key]
|
70
99
|
end
|
71
100
|
end
|
@@ -95,7 +124,20 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
95
124
|
rescue Exception => ex
|
96
125
|
@logger.error("Exception occured in posting to DataCollector API: '#{ex}', data=>" + (documents.to_json).to_s)
|
97
126
|
end
|
98
|
-
|
99
127
|
end # def flush
|
100
128
|
|
129
|
+
private
|
130
|
+
def convert_value(type, val)
|
131
|
+
t = type.downcase
|
132
|
+
case t
|
133
|
+
when "boolean"
|
134
|
+
v = val.downcase
|
135
|
+
return (v.to_s == 'true' ) ? true : false
|
136
|
+
when "double"
|
137
|
+
return Integer(val) rescue Float(val) rescue val
|
138
|
+
else
|
139
|
+
return val
|
140
|
+
end
|
141
|
+
end
|
142
|
+
|
101
143
|
end # class LogStash::Outputs::AzureLogAnalytics
|
@@ -19,7 +19,7 @@ Gem::Specification.new do |s|
|
|
19
19
|
|
20
20
|
# Gem dependencies
|
21
21
|
s.add_runtime_dependency "rest-client", ">= 1.8.0"
|
22
|
-
s.add_runtime_dependency "azure-loganalytics-datacollector-api", ">= 0.
|
22
|
+
s.add_runtime_dependency "azure-loganalytics-datacollector-api", ">= 0.4.0"
|
23
23
|
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
24
24
|
s.add_runtime_dependency "logstash-codec-plain"
|
25
25
|
s.add_development_dependency "logstash-devutils"
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-azure_loganalytics
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.5.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Yoichi Kawasaki
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2020-07-21 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -29,7 +29,7 @@ dependencies:
|
|
29
29
|
requirements:
|
30
30
|
- - ">="
|
31
31
|
- !ruby/object:Gem::Version
|
32
|
-
version: 0.
|
32
|
+
version: 0.4.0
|
33
33
|
name: azure-loganalytics-datacollector-api
|
34
34
|
prerelease: false
|
35
35
|
type: :runtime
|
@@ -37,7 +37,7 @@ dependencies:
|
|
37
37
|
requirements:
|
38
38
|
- - ">="
|
39
39
|
- !ruby/object:Gem::Version
|
40
|
-
version: 0.
|
40
|
+
version: 0.4.0
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
42
|
requirement: !ruby/object:Gem::Requirement
|
43
43
|
requirements:
|