logstash-output-azure_loganalytics 0.3.1 → 0.5.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: a25ad8b80f9c07355c78a85326fcca885e67d739
4
- data.tar.gz: 332c355b577a8876e3b8dd77bd8322d1479ca0c1
3
+ metadata.gz: f5703dcf7467f52e043faed81a31e4ac16326a28
4
+ data.tar.gz: a1626400b84f10787997929640fce9dcc51fad4e
5
5
  SHA512:
6
- metadata.gz: d490865b12e382522d1985d275a8f7b10f38cd8bab49961bd24c0024ffdb8843fe4501cf32c5ae8d08307ee8d01cd8065f212f620df16895ce8ffb6efbf04ec3
7
- data.tar.gz: 549bfc413b647fb53e2e5c0e1bc0ee66bd0553f1ac090d3cba5f6703e4ec7e4fb6f32cbe3fe34e22ac126a1a55bddf814d18ec25647992f0a0278db231b5d04b
6
+ metadata.gz: 63e25706eec6fad297468afd439ac9dd93c567d838f584124bc276d3568f663ae6923bf6b4fcff2d15caea3dcde26b067ba788db5a3bf4203f0789d3e321bf92
7
+ data.tar.gz: b2492d86bbcd2ac2aaf00c6274bc6d039a19f256e557026f3a6b011df45a53e798583b48d1c65f1c7f6a2d9bf69187516f250a2feaef9127f03beb851deffd32
@@ -1,14 +1,38 @@
1
+ ## 0.5.2
2
+
3
+ * Fixed using sprintf in log_type - [PR #16](https://github.com/yokawasa/logstash-output-azure_loganalytics/pull/16) by [@daniel-chambers](https://github.com/daniel-chambers)
4
+
5
+ ## 0.5.1
6
+
7
+ * Change base [azure-loganalytics-datacollector-api](https://github.com/yokawasa/azure-log-analytics-data-collector) to ">= 0.5.0"
8
+
9
+ ## 0.5.0
10
+
11
+ * Support sprintf syntax like `%{my_log_type}` for `log_type` config param - [Issue #13](https://github.com/yokawasa/logstash-output-azure_loganalytics/issues/13)
12
+
13
+ ## 0.4.0
14
+
15
+ * Change base [azure-loganalytics-datacollector-api](https://github.com/yokawasa/azure-log-analytics-data-collector) to ">= 0.4.0"
16
+
17
+ ## 0.3.2
18
+
19
+ * Improvement: removed unnecessary key check
20
+
1
21
  ## 0.3.1
22
+
2
23
  * Performance optimization for large key_names list scenario - [Issue#10](https://github.com/yokawasa/logstash-output-azure_loganalytics/issues/10)
3
24
 
4
25
  ## 0.3.0
26
+
5
27
  * Support `key_types` param - [Issue#8](https://github.com/yokawasa/logstash-output-azure_loganalytics/issues/8)
6
28
  * Support custom log analytics API endpoint (for supporting Azure sovereign cloud) - [Issue#9](https://github.com/yokawasa/logstash-output-azure_loganalytics/issues/9)
7
29
 
8
30
  ## 0.2.3
31
+
9
32
  * Added additional debug logging for successful requests - [PR#7](https://github.com/yokawasa/logstash-output-azure_loganalytics/pull/7) by [@daniel-chambers](https://github.com/daniel-chambers)
10
33
 
11
34
  ## 0.2.2
35
+
12
36
  * Fix logging failure - [PR#6](https://github.com/yokawasa/logstash-output-azure_loganalytics/pull/6) by [@daniel-chambers](https://github.com/daniel-chambers)
13
37
 
14
38
  ## 0.2.1
data/README.md CHANGED
@@ -29,7 +29,7 @@ output {
29
29
 
30
30
  * **customer\_id (required)** - Your Operations Management Suite workspace ID
31
31
  * **shared\_key (required)** - The primary or the secondary Connected Sources client authentication key.
32
- * **log\_type (required)** - The name of the event type that is being submitted to Log Analytics. This must be only alpha characters.
32
+ * **log\_type (required)** - The name of the event type that is being submitted to Log Analytics. It must only contain alpha numeric and _, and not exceed 100 chars. sprintf syntax like `%{my_log_type}` is supported.
33
33
  * **time\_generated\_field (optional)** - Default:''(empty string) The name of the time generated field. Be carefule that the value of field should strictly follow the ISO 8601 format (YYYY-MM-DDThh:mm:ssZ). See also [this](https://docs.microsoft.com/en-us/azure/log-analytics/log-analytics-data-collector-api#create-a-request) for more details
34
34
  * **key\_names (optional)** - Default:[] (empty array). The list of key names in in-coming record that you want to submit to Log Analytics.
35
35
  * **key\_types (optional)** - Default:{} (empty hash). The list of data types for each column as which you want to store in Log Analytics (`string`, `boolean`, or `double`)
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.3.1
1
+ 0.5.2
@@ -16,7 +16,8 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
16
16
  config :shared_key, :validate => :string, :required => true
17
17
 
18
18
  # The name of the event type that is being submitted to Log Analytics.
19
- # This must be only alpha characters.
19
+ # This must only contain alpha numeric and _, and not exceed 100 chars.
20
+ # sprintf syntax like %{my_log_type} is supported.
20
21
  config :log_type, :validate => :string, :required => true
21
22
 
22
23
  # The service endpoint (Default: ods.opinsights.azure.com)
@@ -50,11 +51,6 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
50
51
  def register
51
52
  require 'azure/loganalytics/datacollectorapi/client'
52
53
 
53
- ## Configure
54
- if not @log_type.match(/^[[:alpha:]]+$/)
55
- raise ArgumentError, 'log_type must be only alpha characters'
56
- end
57
-
58
54
  @key_types.each { |k, v|
59
55
  t = v.downcase
60
56
  if ( !t.eql?('string') && !t.eql?('double') && !t.eql?('boolean') )
@@ -83,20 +79,21 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
83
79
  public
84
80
  def flush (events, close=false)
85
81
 
86
- documents = [] #this is the array of hashes to add Azure Log Analytics
82
+ documentsByLogType = {} # This is a map of log_type to list of documents (themselves maps) to send to Log Analytics
87
83
  events.each do |event|
88
84
  document = {}
85
+
86
+ log_type_for_event = event.sprintf(@log_type)
87
+
89
88
  event_hash = event.to_hash()
90
89
  if @key_names.length > 0
91
90
  # Get the intersection of key_names and keys of event_hash
92
91
  keys_intersection = @key_names & event_hash.keys
93
92
  keys_intersection.each do |key|
94
- if event_hash.include?(key)
95
- if @key_types.include?(key)
96
- document[key] = convert_value(@key_types[key], event_hash[key])
97
- else
98
- document[key] = event_hash[key]
99
- end
93
+ if @key_types.include?(key)
94
+ document[key] = convert_value(@key_types[key], event_hash[key])
95
+ else
96
+ document[key] = event_hash[key]
100
97
  end
101
98
  end
102
99
  else
@@ -105,26 +102,32 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
105
102
  # Skip if document doesn't contain any items
106
103
  next if (document.keys).length < 1
107
104
 
108
- documents.push(document)
105
+ if documentsByLogType[log_type_for_event] == nil then
106
+ documentsByLogType[log_type_for_event] = []
107
+ end
108
+ documentsByLogType[log_type_for_event].push(document)
109
109
  end
110
110
 
111
111
  # Skip in case there are no candidate documents to deliver
112
- if documents.length < 1
113
- @logger.debug("No documents in batch for log type #{@log_type}. Skipping")
112
+ if documentsByLogType.length < 1
113
+ @logger.debug("No documents in batch. Skipping")
114
114
  return
115
115
  end
116
116
 
117
- begin
118
- @logger.debug("Posting log batch (log count: #{documents.length}) as log type #{@log_type} to DataCollector API. First log: " + (documents[0].to_json).to_s)
119
- res = @client.post_data(@log_type, documents, @time_generated_field)
120
- if Azure::Loganalytics::Datacollectorapi::Client.is_success(res)
121
- @logger.debug("Successfully posted logs as log type #{@log_type} with result code #{res.code} to DataCollector API")
122
- else
123
- @logger.error("DataCollector API request failure: error code: #{res.code}, data=>" + (documents.to_json).to_s)
117
+ documentsByLogType.each do |log_type_for_events, events|
118
+ begin
119
+ @logger.debug("Posting log batch (log count: #{events.length}) as log type #{log_type_for_events} to DataCollector API. First log: " + (events[0].to_json).to_s)
120
+ res = @client.post_data(log_type_for_events, events, @time_generated_field)
121
+ if Azure::Loganalytics::Datacollectorapi::Client.is_success(res)
122
+ @logger.debug("Successfully posted logs as log type #{log_type_for_events} with result code #{res.code} to DataCollector API")
123
+ else
124
+ @logger.error("DataCollector API request failure: error code: #{res.code}, data=>" + (events.to_json).to_s)
125
+ end
126
+ rescue Exception => ex
127
+ @logger.error("Exception occured in posting to DataCollector API: '#{ex}', data=>" + (events.to_json).to_s)
124
128
  end
125
- rescue Exception => ex
126
- @logger.error("Exception occured in posting to DataCollector API: '#{ex}', data=>" + (documents.to_json).to_s)
127
129
  end
130
+
128
131
  end # def flush
129
132
 
130
133
  private
@@ -141,4 +144,4 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
141
144
  end
142
145
  end
143
146
 
144
- end # class LogStash::Outputs::AzureLogAnalytics
147
+ end # class LogStash::Outputs::AzureLogAnalytics
@@ -19,7 +19,7 @@ Gem::Specification.new do |s|
19
19
 
20
20
  # Gem dependencies
21
21
  s.add_runtime_dependency "rest-client", ">= 1.8.0"
22
- s.add_runtime_dependency "azure-loganalytics-datacollector-api", ">= 0.1.5"
22
+ s.add_runtime_dependency "azure-loganalytics-datacollector-api", ">= 0.5.0"
23
23
  s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
24
24
  s.add_runtime_dependency "logstash-codec-plain"
25
25
  s.add_development_dependency "logstash-devutils"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-azure_loganalytics
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.1
4
+ version: 0.5.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Yoichi Kawasaki
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-06-17 00:00:00.000000000 Z
11
+ date: 2020-09-21 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -29,7 +29,7 @@ dependencies:
29
29
  requirements:
30
30
  - - ">="
31
31
  - !ruby/object:Gem::Version
32
- version: 0.1.5
32
+ version: 0.5.0
33
33
  name: azure-loganalytics-datacollector-api
34
34
  prerelease: false
35
35
  type: :runtime
@@ -37,7 +37,7 @@ dependencies:
37
37
  requirements:
38
38
  - - ">="
39
39
  - !ruby/object:Gem::Version
40
- version: 0.1.5
40
+ version: 0.5.0
41
41
  - !ruby/object:Gem::Dependency
42
42
  requirement: !ruby/object:Gem::Requirement
43
43
  requirements: