logstash-output-azure_loganalytics 0.2.3 → 0.3.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 979885fba76d51d97736cce9150ddc0df3e08061
4
- data.tar.gz: 64e69b5fd9e4290e7ff40dfced1484033c8d764b
3
+ metadata.gz: a20896835f3ed40fecbd125f5146e634d1bacd40
4
+ data.tar.gz: 0f6440bedff853ddda5d0fcb1f852c4d7450eaff
5
5
  SHA512:
6
- metadata.gz: f43dcc28048eb42f27b9053f2bd530791eb852d43f6eff796ace77e4526a873da074cb0c839780f927e397cb120b2bebca11a1b05f03289b6c8978f7706f3e96
7
- data.tar.gz: cb4f87632dd4af3913f21c01bd0611648c0be7d48beaf4de7b8f295e25f767447da8c9fa80f59a8b543db441d7fac0b720cb1dc4b069bb6da48dfe40ad7c1f28
6
+ metadata.gz: f38c2136f52d10b7c95897d2c6982beb05e3c1c80d83abc41adaf2f1f39c40e53bf5a0f54b7836d84c2d02bca2dca00a4b87ac01022f687ca65ff8fe0446a43e
7
+ data.tar.gz: d5868224a493d3cd30c16f1606fa19ff41734c6e9485d2342932509d1e6fb0abfb97acd837d0c0140148e7988c26acb84da8fe1c2561d881e761fbe1803e50e2
data/CHANGELOG.md CHANGED
@@ -1,3 +1,7 @@
1
+ ## 0.3.0
2
+ * Support `key_types` param - [Issue#8](https://github.com/yokawasa/logstash-output-azure_loganalytics/issues/8)
3
+ * Support custom log analytics API endpoint (for supporting Azure sovereign cloud) - [Issue#9](https://github.com/yokawasa/logstash-output-azure_loganalytics/issues/9)
4
+
1
5
  ## 0.2.3
2
6
  * Added additional debug logging for successful requests - [PR#7](https://github.com/yokawasa/logstash-output-azure_loganalytics/pull/7) by [@daniel-chambers](https://github.com/daniel-chambers)
3
7
 
data/README.md CHANGED
@@ -19,7 +19,8 @@ output {
19
19
  customer_id => "<OMS WORKSPACE ID>"
20
20
  shared_key => "<CLIENT AUTH KEY>"
21
21
  log_type => "<LOG TYPE NAME>"
22
- key_names => ['key1','key2','key3'..] ## list of Key names (array)
22
+ key_names => ['key1','key2','key3'..] ## list of Key names
23
+ key_types => {'key1'=> 'string' 'key2'=>'double' 'key3'=>'boolean' .. }
23
24
  flush_items => <FLUSH_ITEMS_NUM>
24
25
  flush_interval_time => <FLUSH INTERVAL TIME(sec)>
25
26
  }
@@ -30,10 +31,20 @@ output {
30
31
  * **shared\_key (required)** - The primary or the secondary Connected Sources client authentication key.
31
32
  * **log\_type (required)** - The name of the event type that is being submitted to Log Analytics. This must be only alpha characters.
32
33
  * **time\_generated\_field (optional)** - Default:''(empty string) The name of the time generated field. Be carefule that the value of field should strictly follow the ISO 8601 format (YYYY-MM-DDThh:mm:ssZ). See also [this](https://docs.microsoft.com/en-us/azure/log-analytics/log-analytics-data-collector-api#create-a-request) for more details
33
- * **key\_names (optional)** - Default:[] (empty array). list of Key names in in-coming record to deliver.
34
+ * **key\_names (optional)** - Default:[] (empty array). The list of key names in in-coming record that you want to submit to Log Analytics.
35
+ * **key\_types (optional)** - Default:{} (empty hash). The list of data types for each column as which you want to store in Log Analytics (`string`, `boolean`, or `double`)
36
+ * The key names in `key_types` param must be included in `key_names` param. The column data whose key isn't included in `key_names` is treated as `string` data type.
37
+ * Multiple key value entries are separated by `spaces` rather than commas (See also [this](https://www.elastic.co/guide/en/logstash/current/configuration-file-structure.html#hash))
38
+ * If you want to store a column as datetime or guid data format, set `string` for the column ( the value of the column should be `YYYY-MM-DDThh:mm:ssZ format` if it's `datetime`, and `GUID format` if it's `guid`).
39
+ * In case that `key_types` param are not specified, all columns that you want to submit ( you choose with `key_names` param ) are stored as `string` data type in Log Analytics.
34
40
  * **flush_items (optional)** - Default 50. Max number of items to buffer before flushing (1 - 1000).
35
41
  * **flush_interval_time (optional)** - Default 5. Max number of seconds to wait between flushes.
36
42
 
43
+ > [NOTE] There is a special param for changing the Log Analytics API endpoint (mainly for supporting Azure sovereign cloud)
44
+ > * **endpoint (optional)** - Default: ods.opinsights.azure.com
45
+
46
+ Support custom log analytics API endpoint (for supporting Azure sovereign cloud)
47
+
37
48
  ## Tests
38
49
 
39
50
  Here is an example configuration where Logstash's event source and destination are configured as Apache2 access log and Azure Log Analytics respectively.
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.2.3
1
+ 0.3.0
@@ -1,4 +1,5 @@
1
1
  # encoding: utf-8
2
+
2
3
  require "logstash/outputs/base"
3
4
  require "logstash/namespace"
4
5
  require "stud/buffer"
@@ -14,15 +15,31 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
14
15
  # The primary or the secondary Connected Sources client authentication key
15
16
  config :shared_key, :validate => :string, :required => true
16
17
 
17
- # The name of the event type that is being submitted to Log Analytics. This must be only alpha characters.
18
+ # The name of the event type that is being submitted to Log Analytics.
19
+ # This must be only alpha characters.
18
20
  config :log_type, :validate => :string, :required => true
19
21
 
20
- # The name of the time generated field. Be carefule that the value of field should strictly follow the ISO 8601 format (YYYY-MM-DDThh:mm:ssZ)
22
+ # The service endpoint (Default: ods.opinsights.azure.com)
23
+ config :endpoint, :validate => :string, :default => 'ods.opinsights.azure.com'
24
+
25
+ # The name of the time generated field.
26
+ # Be carefule that the value of field should strictly follow the ISO 8601 format (YYYY-MM-DDThh:mm:ssZ)
21
27
  config :time_generated_field, :validate => :string, :default => ''
22
28
 
23
- # list of Key names in in-coming record to deliver.
29
+ # The list of key names in in-coming record that you want to submit to Log Analytics
24
30
  config :key_names, :validate => :array, :default => []
25
-
31
+
32
+ # The list of data types for each column as which you want to store in Log Analytics (`string`, `boolean`, or `double`)
33
+ # - The key names in `key_types` param must be included in `key_names` param. The column data whose key isn't included in `key_names` is treated as `string` data type.
34
+ # - Multiple key value entries are separated by `spaces` rather than commas
35
+ # See also https://www.elastic.co/guide/en/logstash/current/configuration-file-structure.html#hash
36
+ # - If you want to store a column as datetime or guid data format, set `string` for the column ( the value of the column should be `YYYY-MM-DDThh:mm:ssZ format` if it's `datetime`, and `GUID format` if it's `guid`).
37
+ # - In case that `key_types` param are not specified, all columns that you want to submit ( you choose with `key_names` param ) are stored as `string` data type in Log Analytics.
38
+ # Example:
39
+ # key_names => ['key1','key2','key3','key4',...]
40
+ # key_types => {'key1'=>'string' 'key2'=>'string' 'key3'=>'boolean' 'key4'=>'double' ...}
41
+ config :key_types, :validate => :hash, :default => {}
42
+
26
43
  # Max number of items to buffer before flushing. Default 50.
27
44
  config :flush_items, :validate => :number, :default => 50
28
45
 
@@ -38,8 +55,15 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
38
55
  raise ArgumentError, 'log_type must be only alpha characters'
39
56
  end
40
57
 
58
+ @key_types.each { |k, v|
59
+ t = v.downcase
60
+ if ( !t.eql?('string') && !t.eql?('double') && !t.eql?('boolean') )
61
+ raise ArgumentError, "Key type(#{v}) for key(#{k}) must be either string, boolean, or double"
62
+ end
63
+ }
64
+
41
65
  ## Start
42
- @client=Azure::Loganalytics::Datacollectorapi::Client::new(@customer_id,@shared_key)
66
+ @client=Azure::Loganalytics::Datacollectorapi::Client::new(@customer_id,@shared_key,@endpoint)
43
67
 
44
68
  buffer_initialize(
45
69
  :max_items => @flush_items,
@@ -66,7 +90,11 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
66
90
  if @key_names.length > 0
67
91
  @key_names.each do |key|
68
92
  if event_hash.include?(key)
69
- document[key] = event_hash[key]
93
+ if @key_types.include?(key)
94
+ document[key] = convert_value(@key_types[key], event_hash[key])
95
+ else
96
+ document[key] = event_hash[key]
97
+ end
70
98
  end
71
99
  end
72
100
  else
@@ -95,7 +123,20 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
95
123
  rescue Exception => ex
96
124
  @logger.error("Exception occured in posting to DataCollector API: '#{ex}', data=>" + (documents.to_json).to_s)
97
125
  end
98
-
99
126
  end # def flush
100
127
 
101
- end # class LogStash::Outputs::AzureLogAnalytics
128
+ private
129
+ def convert_value(type, val)
130
+ t = type.downcase
131
+ case t
132
+ when "boolean"
133
+ v = val.downcase
134
+ return (v.to_s == 'true' ) ? true : false
135
+ when "double"
136
+ return Integer(val) rescue Float(val) rescue val
137
+ else
138
+ return val
139
+ end
140
+ end
141
+
142
+ end # class LogStash::Outputs::AzureLogAnalytics
@@ -19,7 +19,7 @@ Gem::Specification.new do |s|
19
19
 
20
20
  # Gem dependencies
21
21
  s.add_runtime_dependency "rest-client", ">= 1.8.0"
22
- s.add_runtime_dependency "azure-loganalytics-datacollector-api", ">= 0.1.2"
22
+ s.add_runtime_dependency "azure-loganalytics-datacollector-api", ">= 0.1.5"
23
23
  s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
24
24
  s.add_runtime_dependency "logstash-codec-plain"
25
25
  s.add_development_dependency "logstash-devutils"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-azure_loganalytics
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.3
4
+ version: 0.3.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Yoichi Kawasaki
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-03-30 00:00:00.000000000 Z
11
+ date: 2019-06-14 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -29,7 +29,7 @@ dependencies:
29
29
  requirements:
30
30
  - - ">="
31
31
  - !ruby/object:Gem::Version
32
- version: 0.1.2
32
+ version: 0.1.5
33
33
  name: azure-loganalytics-datacollector-api
34
34
  prerelease: false
35
35
  type: :runtime
@@ -37,7 +37,7 @@ dependencies:
37
37
  requirements:
38
38
  - - ">="
39
39
  - !ruby/object:Gem::Version
40
- version: 0.1.2
40
+ version: 0.1.5
41
41
  - !ruby/object:Gem::Dependency
42
42
  requirement: !ruby/object:Gem::Requirement
43
43
  requirements: