logstash-output-azure_loganalytics 0.5.1 → 0.5.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -0
- data/VERSION +1 -1
- data/lib/logstash/outputs/azure_loganalytics.rb +22 -18
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f5703dcf7467f52e043faed81a31e4ac16326a28
|
4
|
+
data.tar.gz: a1626400b84f10787997929640fce9dcc51fad4e
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 63e25706eec6fad297468afd439ac9dd93c567d838f584124bc276d3568f663ae6923bf6b4fcff2d15caea3dcde26b067ba788db5a3bf4203f0789d3e321bf92
|
7
|
+
data.tar.gz: b2492d86bbcd2ac2aaf00c6274bc6d039a19f256e557026f3a6b011df45a53e798583b48d1c65f1c7f6a2d9bf69187516f250a2feaef9127f03beb851deffd32
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,7 @@
|
|
1
|
+
## 0.5.2
|
2
|
+
|
3
|
+
* Fixed using sprintf in log_type - [PR #16](https://github.com/yokawasa/logstash-output-azure_loganalytics/pull/16) by [@daniel-chambers](https://github.com/daniel-chambers)
|
4
|
+
|
1
5
|
## 0.5.1
|
2
6
|
|
3
7
|
* Change base [azure-loganalytics-datacollector-api](https://github.com/yokawasa/azure-log-analytics-data-collector) to ">= 0.5.0"
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.5.
|
1
|
+
0.5.2
|
@@ -51,10 +51,6 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
51
51
|
def register
|
52
52
|
require 'azure/loganalytics/datacollectorapi/client'
|
53
53
|
|
54
|
-
#if not @log_type.match(/^[[:alpha:]]+$/)
|
55
|
-
# raise ArgumentError, 'log_type must be only alpha characters'
|
56
|
-
#end
|
57
|
-
|
58
54
|
@key_types.each { |k, v|
|
59
55
|
t = v.downcase
|
60
56
|
if ( !t.eql?('string') && !t.eql?('double') && !t.eql?('boolean') )
|
@@ -75,7 +71,6 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
75
71
|
|
76
72
|
public
|
77
73
|
def receive(event)
|
78
|
-
@log_type = event.sprintf(@log_type)
|
79
74
|
# Simply save an event for later delivery
|
80
75
|
buffer_receive(event)
|
81
76
|
end # def receive
|
@@ -84,9 +79,12 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
84
79
|
public
|
85
80
|
def flush (events, close=false)
|
86
81
|
|
87
|
-
|
82
|
+
documentsByLogType = {} # This is a map of log_type to list of documents (themselves maps) to send to Log Analytics
|
88
83
|
events.each do |event|
|
89
84
|
document = {}
|
85
|
+
|
86
|
+
log_type_for_event = event.sprintf(@log_type)
|
87
|
+
|
90
88
|
event_hash = event.to_hash()
|
91
89
|
if @key_names.length > 0
|
92
90
|
# Get the intersection of key_names and keys of event_hash
|
@@ -104,26 +102,32 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
104
102
|
# Skip if document doesn't contain any items
|
105
103
|
next if (document.keys).length < 1
|
106
104
|
|
107
|
-
|
105
|
+
if documentsByLogType[log_type_for_event] == nil then
|
106
|
+
documentsByLogType[log_type_for_event] = []
|
107
|
+
end
|
108
|
+
documentsByLogType[log_type_for_event].push(document)
|
108
109
|
end
|
109
110
|
|
110
111
|
# Skip in case there are no candidate documents to deliver
|
111
|
-
if
|
112
|
-
@logger.debug("No documents in batch
|
112
|
+
if documentsByLogType.length < 1
|
113
|
+
@logger.debug("No documents in batch. Skipping")
|
113
114
|
return
|
114
115
|
end
|
115
116
|
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
117
|
+
documentsByLogType.each do |log_type_for_events, events|
|
118
|
+
begin
|
119
|
+
@logger.debug("Posting log batch (log count: #{events.length}) as log type #{log_type_for_events} to DataCollector API. First log: " + (events[0].to_json).to_s)
|
120
|
+
res = @client.post_data(log_type_for_events, events, @time_generated_field)
|
121
|
+
if Azure::Loganalytics::Datacollectorapi::Client.is_success(res)
|
122
|
+
@logger.debug("Successfully posted logs as log type #{log_type_for_events} with result code #{res.code} to DataCollector API")
|
123
|
+
else
|
124
|
+
@logger.error("DataCollector API request failure: error code: #{res.code}, data=>" + (events.to_json).to_s)
|
125
|
+
end
|
126
|
+
rescue Exception => ex
|
127
|
+
@logger.error("Exception occured in posting to DataCollector API: '#{ex}', data=>" + (events.to_json).to_s)
|
123
128
|
end
|
124
|
-
rescue Exception => ex
|
125
|
-
@logger.error("Exception occured in posting to DataCollector API: '#{ex}', data=>" + (documents.to_json).to_s)
|
126
129
|
end
|
130
|
+
|
127
131
|
end # def flush
|
128
132
|
|
129
133
|
private
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-azure_loganalytics
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.5.
|
4
|
+
version: 0.5.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Yoichi Kawasaki
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2020-
|
11
|
+
date: 2020-09-21 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|