logstash-output-azure_loganalytics 0.5.2 → 0.6.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +7 -0
- data/README.md +9 -4
- data/VERSION +1 -1
- data/lib/logstash/outputs/azure_loganalytics.rb +23 -33
- data/spec/outputs/azure_loganalytics_spec.rb +2 -4
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: c8478ec73e02eae91388307f89026500b140364d
|
4
|
+
data.tar.gz: 8ca5e7e57bf454b4f33ff614b5f9e25a76b1ea1c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: cf96615306ab45d89fa999a4ded0ed0da8f31e5b6caf01b1d473b57a36f16303d04211eda349944a27badd93fcea4da0fa632795eedb5b385ceb835f7b2448fa
|
7
|
+
data.tar.gz: d0762a739e1f30b70a0cf1084cdbfabdd75cea7ce6851eef6847e09d71543bb8bec9237a21e20fc8eea39a2213ea6367d6ca5d678244b5a31734312a111c9e9f
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,10 @@
|
|
1
|
+
## 0.6.0
|
2
|
+
|
3
|
+
* Multithreading support - [PR #17](https://github.com/yokawasa/logstash-output-azure_loganalytics/pull/17) by [@daniel-chambers](https://github.com/daniel-chambers)
|
4
|
+
* Big performance improvement
|
5
|
+
* New parame `max_batch_items` is added
|
6
|
+
* No longer `flush_items` and `flush_interval_time` params are supported in the plugin configuration
|
7
|
+
|
1
8
|
## 0.5.2
|
2
9
|
|
3
10
|
* Fixed using sprintf in log_type - [PR #16](https://github.com/yokawasa/logstash-output-azure_loganalytics/pull/16) by [@daniel-chambers](https://github.com/daniel-chambers)
|
data/README.md
CHANGED
@@ -1,6 +1,13 @@
|
|
1
1
|
# Azure Log Analytics output plugin for Logstash
|
2
2
|
logstash-output-azure_loganalytics is a logstash plugin to output to Azure Log Analytics. [Logstash](https://www.elastic.co/products/logstash) is an open source, server-side data processing pipeline that ingests data from a multitude of sources simultaneously, transforms it, and then sends it to your favorite [destinations](https://www.elastic.co/products/logstash). [Log Analytics](https://azure.microsoft.com/en-us/services/log-analytics/) is a service in Operations Management Suite (OMS) that helps you collect and analyze data generated by resources in your cloud and on-premises environments. It gives you real-time insights using integrated search and custom dashboards to readily analyze millions of records across all of your workloads and servers regardless of their physical location. The plugin stores in-coming events to Azure Log Analytics by leveraging [Log Analytics HTTP Data Collector API](https://docs.microsoft.com/en-us/azure/log-analytics/log-analytics-data-collector-api)
|
3
3
|
|
4
|
+
> [NOTICE]
|
5
|
+
> logstash-output-azure_loganalytics >= 0.6.0
|
6
|
+
> - Multithreading support
|
7
|
+
> - New parame `max_batch_items` is added
|
8
|
+
> - No longer `flush_items` and `flush_interval_time` params are supported in the plugin configuration
|
9
|
+
|
10
|
+
|
4
11
|
## Installation
|
5
12
|
|
6
13
|
You can install this plugin using the Logstash "plugin" or "logstash-plugin" (for newer versions of Logstash) command:
|
@@ -21,8 +28,7 @@ output {
|
|
21
28
|
log_type => "<LOG TYPE NAME>"
|
22
29
|
key_names => ['key1','key2','key3'..] ## list of Key names
|
23
30
|
key_types => {'key1'=> 'string' 'key2'=>'double' 'key3'=>'boolean' .. }
|
24
|
-
|
25
|
-
flush_interval_time => <FLUSH INTERVAL TIME(sec)>
|
31
|
+
max_batch_items => <MAX BATCH ITEMS (num)>
|
26
32
|
}
|
27
33
|
}
|
28
34
|
```
|
@@ -37,8 +43,7 @@ output {
|
|
37
43
|
* Multiple key value entries are separated by `spaces` rather than commas (See also [this](https://www.elastic.co/guide/en/logstash/current/configuration-file-structure.html#hash))
|
38
44
|
* If you want to store a column as datetime or guid data format, set `string` for the column ( the value of the column should be `YYYY-MM-DDThh:mm:ssZ format` if it's `datetime`, and `GUID format` if it's `guid`).
|
39
45
|
* In case that `key_types` param are not specified, all columns that you want to submit ( you choose with `key_names` param ) are stored as `string` data type in Log Analytics.
|
40
|
-
* **
|
41
|
-
* **flush_interval_time (optional)** - Default 5. Max number of seconds to wait between flushes.
|
46
|
+
* **max_batch_items (optional)** - Default 50. Maximum number of log events to put in one request to Log Analytics.
|
42
47
|
|
43
48
|
> [NOTE] There is a special param for changing the Log Analytics API endpoint (mainly for supporting Azure sovereign cloud)
|
44
49
|
> * **endpoint (optional)** - Default: ods.opinsights.azure.com
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.
|
1
|
+
0.6.0
|
@@ -2,11 +2,9 @@
|
|
2
2
|
|
3
3
|
require "logstash/outputs/base"
|
4
4
|
require "logstash/namespace"
|
5
|
-
require "
|
5
|
+
require "securerandom"
|
6
6
|
|
7
7
|
class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
8
|
-
include Stud::Buffer
|
9
|
-
|
10
8
|
config_name "azure_loganalytics"
|
11
9
|
|
12
10
|
# Your Operations Management Suite workspace ID
|
@@ -41,11 +39,10 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
41
39
|
# key_types => {'key1'=>'string' 'key2'=>'string' 'key3'=>'boolean' 'key4'=>'double' ...}
|
42
40
|
config :key_types, :validate => :hash, :default => {}
|
43
41
|
|
44
|
-
#
|
45
|
-
config :
|
46
|
-
|
47
|
-
|
48
|
-
config :flush_interval_time, :validate => :number, :default => 5
|
42
|
+
# Maximum number of log events to put in one request to Log Analytics
|
43
|
+
config :max_batch_items, :validate => :number, :default => 50
|
44
|
+
|
45
|
+
concurrency :shared
|
49
46
|
|
50
47
|
public
|
51
48
|
def register
|
@@ -61,24 +58,14 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
61
58
|
## Start
|
62
59
|
@client=Azure::Loganalytics::Datacollectorapi::Client::new(@customer_id,@shared_key,@endpoint)
|
63
60
|
|
64
|
-
buffer_initialize(
|
65
|
-
:max_items => @flush_items,
|
66
|
-
:max_interval => @flush_interval_time,
|
67
|
-
:logger => @logger
|
68
|
-
)
|
69
|
-
|
70
61
|
end # def register
|
71
62
|
|
72
63
|
public
|
73
|
-
def
|
74
|
-
|
75
|
-
|
76
|
-
|
64
|
+
def multi_receive(events)
|
65
|
+
|
66
|
+
flush_guid = SecureRandom.uuid
|
67
|
+
@logger.debug("Start receive: #{flush_guid}. Received #{events.length} events")
|
77
68
|
|
78
|
-
# called from Stud::Buffer#buffer_flush when there are events to flush
|
79
|
-
public
|
80
|
-
def flush (events, close=false)
|
81
|
-
|
82
69
|
documentsByLogType = {} # This is a map of log_type to list of documents (themselves maps) to send to Log Analytics
|
83
70
|
events.each do |event|
|
84
71
|
document = {}
|
@@ -115,20 +102,23 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
115
102
|
end
|
116
103
|
|
117
104
|
documentsByLogType.each do |log_type_for_events, events|
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
105
|
+
events.each_slice(@max_batch_items) do |event_batch|
|
106
|
+
begin
|
107
|
+
@logger.debug("Posting log batch (log count: #{event_batch.length}) as log type #{log_type_for_events} to DataCollector API. First log: " + (event_batch[0].to_json).to_s)
|
108
|
+
res = @client.post_data(log_type_for_events, event_batch, @time_generated_field)
|
109
|
+
if Azure::Loganalytics::Datacollectorapi::Client.is_success(res)
|
110
|
+
@logger.debug("Successfully posted logs as log type #{log_type_for_events} with result code #{res.code} to DataCollector API")
|
111
|
+
else
|
112
|
+
@logger.error("DataCollector API request failure (log type #{log_type_for_events}): error code: #{res.code}, data=>" + (event_batch.to_json).to_s)
|
113
|
+
end
|
114
|
+
rescue Exception => ex
|
115
|
+
@logger.error("Exception occured in posting to DataCollector API as log type #{log_type_for_events}: '#{ex}', data=>" + (event_batch.to_json).to_s)
|
125
116
|
end
|
126
|
-
rescue Exception => ex
|
127
|
-
@logger.error("Exception occured in posting to DataCollector API: '#{ex}', data=>" + (events.to_json).to_s)
|
128
117
|
end
|
129
118
|
end
|
130
|
-
|
131
|
-
|
119
|
+
@logger.debug("End receive: #{flush_guid}")
|
120
|
+
|
121
|
+
end # def multi_receive
|
132
122
|
|
133
123
|
private
|
134
124
|
def convert_value(type, val)
|
@@ -28,7 +28,7 @@ describe LogStash::Outputs::AzureLogAnalytics do
|
|
28
28
|
azure_loganalytics_output.register
|
29
29
|
end
|
30
30
|
|
31
|
-
describe "#
|
31
|
+
describe "#multi_receive" do
|
32
32
|
it "Should successfully send the event to Azure Log Analytics" do
|
33
33
|
events = []
|
34
34
|
log1 = {
|
@@ -61,11 +61,9 @@ describe LogStash::Outputs::AzureLogAnalytics do
|
|
61
61
|
|
62
62
|
event1 = LogStash::Event.new(log1)
|
63
63
|
event2 = LogStash::Event.new(log2)
|
64
|
-
azure_loganalytics_output.receive(event1)
|
65
|
-
azure_loganalytics_output.receive(event2)
|
66
64
|
events.push(event1)
|
67
65
|
events.push(event2)
|
68
|
-
expect {azure_loganalytics_output.
|
66
|
+
expect {azure_loganalytics_output.multi_receive(events)}.to_not raise_error
|
69
67
|
end
|
70
68
|
end
|
71
69
|
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-azure_loganalytics
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.6.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Yoichi Kawasaki
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2020-
|
11
|
+
date: 2020-10-01 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|