sentinelblue-logstash-output-azure-loganalytics 1.1.1 → 1.1.3.rc1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +8 -0
- data/README.md +11 -6
- data/VERSION +1 -1
- data/lib/logstash/{logAnalyticsClient/logAnalyticsClient.rb → azureLAClasses/azureLAClient.rb} +10 -10
- data/lib/logstash/{logAnalyticsClient/logStashAutoResizeBuffer.rb → azureLAClasses/logAnalyticsAutoResizeBuffer.rb} +30 -30
- data/lib/logstash/{logAnalyticsClient/logstashLoganalyticsConfiguration.rb → azureLAClasses/logAnalyticsConfiguration.rb} +15 -4
- data/lib/logstash/outputs/sentinelblue-logstash-output-azure-loganalytics.rb +15 -17
- data/sentinelblue-logstash-output-azure-loganalytics.gemspec +1 -1
- metadata +8 -8
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8a6135d1df0d5d38a762d443acea323b7cca725c3a381a4782137e5da06c1830
|
4
|
+
data.tar.gz: 74141b9a0d28b93e623e1da575243ea94ffdf2f2baf4527102ff5e21297fc643
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: a0142efda4fff94c1bc7a46865674c27cf68e23b46e32789257c4b4bb686f574bc8397adb7da3a54861b6dc70c1b87eea3e5b9a4abf3ec20a7db567fefb58a45
|
7
|
+
data.tar.gz: 0b8f6ca5ba27907346cd8fe31a9965e4e301ae0772324fb2407fb1434ac0ad26b9afee5838320bcfd7ee167b667ef43a398693ca9e125438406476b77bf4cbd9
|
data/CHANGELOG.md
CHANGED
data/README.md
CHANGED
@@ -1,30 +1,35 @@
|
|
1
1
|
# Sentinel Blue Azure Log Analytics output plugin for Logstash
|
2
2
|
|
3
|
-
|
4
|
-
|
5
|
-
|
3
|
+
Sentinel Blue provides an updated output plugin for Logstash. Using this output plugin, you will be able to send any log you want using Logstash to the Azure Sentinel/Log Analytics workspace using dynamic custom table names.
|
4
|
+
|
5
|
+
This allows you to set your destination table in your filtering process and reference it in the output plugin. The original plugin functionality has been preserved as well.
|
6
6
|
|
7
7
|
Azure Sentinel output plugin uses the rest API integration to Log Analytics, in order to ingest the logs into custom logs tables [What are custom logs tables](<https://docs.microsoft.com/azure/azure-monitor/platform/data-sources-custom-logs>)
|
8
8
|
|
9
9
|
This plugin is based on the original provided by the Azure Sentinel team. View the original plugin here: <https://github.com/Azure/Azure-Sentinel/tree/master/DataConnectors/microsoft-logstash-output-azure-loganalytics>
|
10
10
|
|
11
11
|
```text
|
12
|
-
Plugin version: v1.1.
|
13
|
-
Released on: 2022-10-
|
12
|
+
Plugin version: v1.1.2.rc1
|
13
|
+
Released on: 2022-10-28
|
14
14
|
```
|
15
15
|
|
16
16
|
This plugin is currently in development and is free to use. We welcome contributions from the open source community on this project, and we request and appreciate feedback from users.
|
17
17
|
|
18
|
+
<https://rubygems.org/gems/sentinelblue-logstash-output-azure-loganalytics>
|
19
|
+
|
18
20
|
## Support
|
19
21
|
|
20
22
|
For issues regarding the output plugin please open a support issue here. Create a new issue describing the problem so that we can assist you.
|
21
23
|
|
22
24
|
## Installation
|
23
25
|
|
24
|
-
Azure Sentinel provides Logstash an output plugin to Log analytics workspace.
|
25
26
|
Install the sentinelblue-logstash-output-azure-loganalytics, use [Logstash Working with plugins](<https://www.elastic.co/guide/en/logstash/current/working-with-plugins.html>) document.
|
26
27
|
For offline setup follow [Logstash Offline Plugin Management instruction](<https://www.elastic.co/guide/en/logstash/current/offline-plugins.html>).
|
27
28
|
|
29
|
+
```bash
|
30
|
+
logstash-plugin install sentinelblue-logstash-output-azure-loganalytics
|
31
|
+
```
|
32
|
+
|
28
33
|
Required Logstash version: between 7.0+
|
29
34
|
|
30
35
|
## Configuration
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
1.1.
|
1
|
+
1.1.3.rc1
|
data/lib/logstash/{logAnalyticsClient/logAnalyticsClient.rb → azureLAClasses/azureLAClient.rb}
RENAMED
@@ -1,18 +1,18 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
-
require "logstash/
|
2
|
+
require "logstash/azureLAClasses/logAnalyticsConfiguration"
|
3
3
|
require 'rest-client'
|
4
4
|
require 'json'
|
5
5
|
require 'openssl'
|
6
6
|
require 'base64'
|
7
7
|
require 'time'
|
8
8
|
|
9
|
-
class
|
9
|
+
class AzureLAClient
|
10
10
|
API_VERSION = '2016-04-01'.freeze
|
11
11
|
|
12
|
-
def initialize (
|
13
|
-
@
|
14
|
-
set_proxy(@
|
15
|
-
@uri = sprintf("https://%s.%s/api/logs?api-version=%s", @
|
12
|
+
def initialize (logAnalyticsConfiguration)
|
13
|
+
@logAnalyticsConfiguration = logAnalyticsConfiguration
|
14
|
+
set_proxy(@logAnalyticsConfiguration.proxy)
|
15
|
+
@uri = sprintf("https://%s.%s/api/logs?api-version=%s", @logAnalyticsConfiguration.workspace_id, @logAnalyticsConfiguration.endpoint, API_VERSION)
|
16
16
|
end # def initialize
|
17
17
|
|
18
18
|
|
@@ -39,8 +39,8 @@ class LogAnalyticsClient
|
|
39
39
|
'Authorization' => signature(date, body_bytesize_length),
|
40
40
|
'Log-Type' => custom_table_name,
|
41
41
|
'x-ms-date' => date,
|
42
|
-
'time-generated-field' => @
|
43
|
-
'x-ms-AzureResourceId' => @
|
42
|
+
'time-generated-field' => @logAnalyticsConfiguration.time_generated_field,
|
43
|
+
'x-ms-AzureResourceId' => @logAnalyticsConfiguration.azure_resource_id
|
44
44
|
}
|
45
45
|
end # def get_header
|
46
46
|
|
@@ -61,10 +61,10 @@ class LogAnalyticsClient
|
|
61
61
|
def signature(date, body_bytesize_length)
|
62
62
|
sigs = sprintf("POST\n%d\napplication/json\nx-ms-date:%s\n/api/logs", body_bytesize_length, date)
|
63
63
|
utf8_sigs = sigs.encode('utf-8')
|
64
|
-
decoded_shared_key = Base64.decode64(@
|
64
|
+
decoded_shared_key = Base64.decode64(@logAnalyticsConfiguration.workspace_key)
|
65
65
|
hmac_sha256_sigs = OpenSSL::HMAC.digest(OpenSSL::Digest.new('sha256'), decoded_shared_key, utf8_sigs)
|
66
66
|
encoded_hash = Base64.encode64(hmac_sha256_sigs)
|
67
|
-
authorization = sprintf("SharedKey %s:%s", @
|
67
|
+
authorization = sprintf("SharedKey %s:%s", @logAnalyticsConfiguration.workspace_id, encoded_hash)
|
68
68
|
|
69
69
|
return authorization
|
70
70
|
end # def signature
|
@@ -1,23 +1,23 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
require "stud/buffer"
|
3
|
-
require "logstash/
|
3
|
+
require "logstash/azureLAClasses/azureLAClient"
|
4
4
|
require "stud/buffer"
|
5
|
-
require "logstash/
|
5
|
+
require "logstash/azureLAClasses/logAnalyticsConfiguration"
|
6
6
|
|
7
|
-
#
|
7
|
+
# LogAnalyticsAutoResizeBuffer class setting a resizable buffer which is flushed periodically
|
8
8
|
# The buffer resize itself according to Azure Loganalytics and configuration limitations
|
9
|
-
class
|
9
|
+
class LogAnalyticsAutoResizeBuffer
|
10
10
|
include Stud::Buffer
|
11
11
|
|
12
|
-
def initialize(
|
13
|
-
@
|
14
|
-
@logger = @
|
12
|
+
def initialize(logAnalyticsConfiguration,custom_table_name)
|
13
|
+
@logAnalyticsConfiguration = logAnalyticsConfiguration
|
14
|
+
@logger = @logAnalyticsConfiguration.logger
|
15
15
|
@custom_log_table_name = custom_table_name
|
16
|
-
@client=
|
16
|
+
@client=AzureLAClient::new(logAnalyticsConfiguration)
|
17
17
|
buffer_initialize(
|
18
|
-
:max_items =>
|
19
|
-
:max_interval =>
|
20
|
-
:logger => @
|
18
|
+
:max_items => logAnalyticsConfiguration.max_items,
|
19
|
+
:max_interval => logAnalyticsConfiguration.plugin_flush_interval,
|
20
|
+
:logger => @logAnalyticsConfiguration.logger
|
21
21
|
)
|
22
22
|
end # initialize
|
23
23
|
|
@@ -41,7 +41,7 @@ class LogStashAutoResizeBuffer
|
|
41
41
|
# We send Json in the REST request
|
42
42
|
documents_json = documents.to_json
|
43
43
|
# Setting resizing to true will cause changing the max size
|
44
|
-
if @
|
44
|
+
if @logAnalyticsConfiguration.amount_resizing == true
|
45
45
|
# Resizing the amount of messages according to size of message received and amount of messages
|
46
46
|
change_message_limit_size(documents.length, documents_json.bytesize)
|
47
47
|
end
|
@@ -60,12 +60,12 @@ class LogStashAutoResizeBuffer
|
|
60
60
|
@logger.info("Successfully posted #{amount_of_documents} logs into custom log analytics table[#{@custom_log_table_name}].")
|
61
61
|
else
|
62
62
|
@logger.error("DataCollector API request failure: error code: #{response.code}, data=>" + (documents.to_json).to_s)
|
63
|
-
resend_message(documents_json, amount_of_documents, @
|
63
|
+
resend_message(documents_json, amount_of_documents, @logAnalyticsConfiguration.retransmission_time)
|
64
64
|
end
|
65
65
|
rescue Exception => ex
|
66
|
-
@logger.error("Exception in posting data to Azure Loganalytics
|
66
|
+
@logger.error("Exception in posting data to Azure Loganalytics. [Exception: '#{ex}]'")
|
67
67
|
@logger.trace("Exception in posting data to Azure Loganalytics.[amount_of_documents=#{amount_of_documents} documents=#{documents_json}]")
|
68
|
-
resend_message(documents_json, amount_of_documents, @
|
68
|
+
resend_message(documents_json, amount_of_documents, @logAnalyticsConfiguration.retransmission_time)
|
69
69
|
end
|
70
70
|
end # end send_message_to_loganalytics
|
71
71
|
|
@@ -73,19 +73,19 @@ class LogStashAutoResizeBuffer
|
|
73
73
|
# We would like to do it until we reached to the duration
|
74
74
|
def resend_message(documents_json, amount_of_documents, remaining_duration)
|
75
75
|
if remaining_duration > 0
|
76
|
-
@logger.info("Resending #{amount_of_documents} documents as log type #{@custom_log_table_name} to DataCollector API in #{@
|
77
|
-
sleep @
|
76
|
+
@logger.info("Resending #{amount_of_documents} documents as log type #{@custom_log_table_name} to DataCollector API in #{@logAnalyticsConfiguration.RETRANSMISSION_DELAY} seconds.")
|
77
|
+
sleep @logAnalyticsConfiguration.RETRANSMISSION_DELAY
|
78
78
|
begin
|
79
79
|
response = @client.post_data(documents_json,@custom_log_table_name)
|
80
80
|
if is_successfully_posted(response)
|
81
81
|
@logger.info("Successfully sent #{amount_of_documents} logs into custom log analytics table[#{@custom_log_table_name}] after resending.")
|
82
82
|
else
|
83
|
-
@logger.debug("Resending #{amount_of_documents} documents failed, will try to resend for #{(remaining_duration - @
|
84
|
-
resend_message(documents_json, amount_of_documents, (remaining_duration - @
|
83
|
+
@logger.debug("Resending #{amount_of_documents} documents failed (error code #{response.code}), will try to resend for #{(remaining_duration - @logAnalyticsConfiguration.RETRANSMISSION_DELAY)}")
|
84
|
+
resend_message(documents_json, amount_of_documents, (remaining_duration - @logAnalyticsConfiguration.RETRANSMISSION_DELAY))
|
85
85
|
end
|
86
86
|
rescue Exception => ex
|
87
|
-
@logger.debug("Resending #{amount_of_documents} documents failed, will try to resend for #{(remaining_duration - @
|
88
|
-
resend_message(documents_json, amount_of_documents, (remaining_duration - @
|
87
|
+
@logger.debug("Resending #{amount_of_documents} documents failed (Exception: '#{ex}'), will try to resend for #{(remaining_duration - @logAnalyticsConfiguration.RETRANSMISSION_DELAY)}")
|
88
|
+
resend_message(documents_json, amount_of_documents, (remaining_duration - @logAnalyticsConfiguration.RETRANSMISSION_DELAY))
|
89
89
|
end
|
90
90
|
else
|
91
91
|
@logger.error("Could not resend #{amount_of_documents} documents, message is dropped.")
|
@@ -99,23 +99,23 @@ class LogStashAutoResizeBuffer
|
|
99
99
|
# Meaning that if we reached the max amount we would like to increase it.
|
100
100
|
# Else we would like to decrease it(to reduce latency for messages)
|
101
101
|
def change_message_limit_size(amount_of_documents, documents_byte_size)
|
102
|
-
new_buffer_size = @
|
102
|
+
new_buffer_size = @logAnalyticsConfiguration.max_items
|
103
103
|
average_document_size = documents_byte_size / amount_of_documents
|
104
104
|
# If window is full we need to increase it
|
105
105
|
# "amount_of_documents" can be greater since buffer is not synchronized meaning
|
106
106
|
# that flush can occur after limit was reached.
|
107
|
-
if amount_of_documents >= @
|
107
|
+
if amount_of_documents >= @logAnalyticsConfiguration.max_items
|
108
108
|
# if doubling the size wouldn't exceed the API limit
|
109
|
-
if ((2 * @
|
110
|
-
new_buffer_size = 2 * @
|
109
|
+
if ((2 * @logAnalyticsConfiguration.max_items) * average_document_size) < @logAnalyticsConfiguration.MAX_SIZE_BYTES
|
110
|
+
new_buffer_size = 2 * @logAnalyticsConfiguration.max_items
|
111
111
|
else
|
112
|
-
new_buffer_size = (@
|
112
|
+
new_buffer_size = (@logAnalyticsConfiguration.MAX_SIZE_BYTES / average_document_size) -1000
|
113
113
|
end
|
114
114
|
|
115
115
|
# We would like to decrease the window but not more then the MIN_MESSAGE_AMOUNT
|
116
116
|
# We are trying to decrease it slowly to be able to send as much messages as we can in one window
|
117
|
-
elsif amount_of_documents < @
|
118
|
-
new_buffer_size = [(@
|
117
|
+
elsif amount_of_documents < @logAnalyticsConfiguration.max_items and @logAnalyticsConfiguration.max_items != [(@logAnalyticsConfiguration.max_items - @logAnalyticsConfiguration.decrease_factor) ,@logAnalyticsConfiguration.MIN_MESSAGE_AMOUNT].max
|
118
|
+
new_buffer_size = [(@logAnalyticsConfiguration.max_items - @logAnalyticsConfiguration.decrease_factor) ,@logAnalyticsConfiguration.MIN_MESSAGE_AMOUNT].max
|
119
119
|
end
|
120
120
|
|
121
121
|
change_buffer_size(new_buffer_size)
|
@@ -128,7 +128,7 @@ class LogStashAutoResizeBuffer
|
|
128
128
|
if @buffer_config[:max_items] != new_size
|
129
129
|
old_buffer_size = @buffer_config[:max_items]
|
130
130
|
@buffer_config[:max_items] = new_size
|
131
|
-
@
|
131
|
+
@logAnalyticsConfiguration.max_items = new_size
|
132
132
|
@logger.info("Changing buffer size.[configuration='#{old_buffer_size}' , new_size='#{new_size}']")
|
133
133
|
else
|
134
134
|
@logger.info("Buffer size wasn't changed.[configuration='#{old_buffer_size}' , new_size='#{new_size}']")
|
@@ -140,4 +140,4 @@ class LogStashAutoResizeBuffer
|
|
140
140
|
return (response.code == 200) ? true : false
|
141
141
|
end # def is_successfully_posted
|
142
142
|
|
143
|
-
end #
|
143
|
+
end # LogAnalyticsAutoResizeBuffer
|
@@ -1,8 +1,9 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
-
class
|
3
|
-
def initialize(workspace_id, workspace_key, logger)
|
2
|
+
class LogAnalyticsConfiguration
|
3
|
+
def initialize(workspace_id, workspace_key, custom_log_table_name, logger)
|
4
4
|
@workspace_id = workspace_id
|
5
5
|
@workspace_key = workspace_key
|
6
|
+
@custom_log_table_name = custom_log_table_name
|
6
7
|
@logger = logger
|
7
8
|
|
8
9
|
# Delay between each resending of a message
|
@@ -24,8 +25,14 @@ class LogstashLoganalyticsOutputConfiguration
|
|
24
25
|
elsif @max_items < @MIN_MESSAGE_AMOUNT
|
25
26
|
raise ArgumentError, "Setting max_items to value must be greater then #{@MIN_MESSAGE_AMOUNT}."
|
26
27
|
|
27
|
-
elsif @workspace_id.empty? or @workspace_key.empty?
|
28
|
-
raise ArgumentError, "Malformed configuration , the following arguments can not be null or empty.[workspace_id=#{@workspace_id} , workspace_key=#{@workspace_key}]"
|
28
|
+
elsif @workspace_id.empty? or @workspace_key.empty? or @custom_log_table_name.empty?
|
29
|
+
raise ArgumentError, "Malformed configuration , the following arguments can not be null or empty.[workspace_id=#{@workspace_id} , workspace_key=#{@workspace_key} , custom_log_table_name=#{@custom_log_table_name}]"
|
30
|
+
|
31
|
+
elsif !@custom_log_table_name.match(/^[a-zA-Z][[:alpha:][:digit:]_]*$/) and !@custom_log_table_name.match(/^%{((\[[\w_\-@]*\])*)([\w_\-@]*)}$/)
|
32
|
+
raise ArgumentError, "custom_log_table_name must be either a static name starting with a letter and consisting only of numbers, letters, and underscores OR a dynamic table name of the format used by logstash (e.g. %{field_name}, %{[nested][field]}."
|
33
|
+
|
34
|
+
elsif @custom_log_table_name.match(/^[a-zA-Z][[:alpha:][:digit:]_]*$/) and @custom_log_table_name.length > 100
|
35
|
+
raise ArgumentError, "custom_log_table_name must not exceed 100 characters"
|
29
36
|
|
30
37
|
elsif @key_names.length > 500
|
31
38
|
raise ArgumentError, 'Azure Loganalytics limits the amount of columns to 500 in each table.'
|
@@ -77,6 +84,10 @@ class LogstashLoganalyticsOutputConfiguration
|
|
77
84
|
@workspace_key
|
78
85
|
end
|
79
86
|
|
87
|
+
def custom_log_table_name
|
88
|
+
@custom_log_table_name
|
89
|
+
end
|
90
|
+
|
80
91
|
def endpoint
|
81
92
|
@endpoint
|
82
93
|
end
|
@@ -2,8 +2,8 @@
|
|
2
2
|
require "logstash/outputs/base"
|
3
3
|
require "logstash/namespace"
|
4
4
|
require "stud/buffer"
|
5
|
-
require "logstash/
|
6
|
-
require "logstash/
|
5
|
+
require "logstash/azureLAClasses/logAnalyticsAutoResizeBuffer"
|
6
|
+
require "logstash/azureLAClasses/logAnalyticsConfiguration"
|
7
7
|
|
8
8
|
class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
9
9
|
|
@@ -70,7 +70,7 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
70
70
|
# Initialize the logstash resizable buffer
|
71
71
|
# This buffer will increase and decrease size according to the amount of messages inserted.
|
72
72
|
# If the buffer reached the max amount of messages the amount will be increased until the limit
|
73
|
-
# @logstash_resizable_event_buffer=
|
73
|
+
# @logstash_resizable_event_buffer=LogAnalyticsAutoResizeBuffer::new(@logstash_configuration)
|
74
74
|
|
75
75
|
end # def register
|
76
76
|
|
@@ -89,7 +89,7 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
89
89
|
custom_table_name = ""
|
90
90
|
|
91
91
|
# Check if the table name is static or dynamic
|
92
|
-
if @custom_log_table_name.match(/^[[:alpha:][:digit:]_]
|
92
|
+
if @custom_log_table_name.match(/^[a-zA-Z][[:alpha:][:digit:]_]*$/)
|
93
93
|
# Table name is static.
|
94
94
|
custom_table_name = @custom_log_table_name
|
95
95
|
|
@@ -100,28 +100,26 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
100
100
|
|
101
101
|
else
|
102
102
|
# Incorrect format
|
103
|
-
@logger.
|
103
|
+
@logger.error("custom_log_table_name must be either a static name consisting only of numbers, letters, and underscores OR a dynamic table name of the format used by logstash (e.g. %{field_name}, %{[nested][field]}.")
|
104
104
|
break
|
105
105
|
|
106
106
|
end
|
107
107
|
|
108
108
|
# Check that the table name is a string, exists, and is les than 100 characters
|
109
|
-
if !custom_table_name.
|
110
|
-
@logger.
|
111
|
-
|
109
|
+
if !custom_table_name.match(/^[a-zA-Z][[:alpha:][:digit:]_]*$/)
|
110
|
+
@logger.error("The custom table name must start with a letter and only consist of letters, numbers, and/or underscores (_). Also check the field name used. If it doesn't exist, you will also receive this error.")
|
111
|
+
next
|
112
112
|
|
113
113
|
elsif custom_table_name.empty? or custom_table_name.nil?
|
114
|
-
@logger.
|
115
|
-
|
114
|
+
@logger.error("The custom table name is empty. Make sure the field you used always returns a table name.")
|
115
|
+
next
|
116
116
|
|
117
117
|
elsif custom_table_name.length > 100
|
118
|
-
@logger.
|
119
|
-
|
118
|
+
@logger.error("The custom table name must not exceed 100 characters")
|
119
|
+
next
|
120
120
|
|
121
121
|
end
|
122
122
|
|
123
|
-
@logger.info("Custom table name #{custom_table_name} is valid")
|
124
|
-
|
125
123
|
# Determine if there is a buffer for the given table
|
126
124
|
if buffers.keys.include?(custom_table_name)
|
127
125
|
@logger.trace("Adding event document - " + event.to_s)
|
@@ -129,7 +127,7 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
129
127
|
|
130
128
|
else
|
131
129
|
# If the buffer doesn't exist for the table, create one and add the document
|
132
|
-
buffers[custom_table_name] =
|
130
|
+
buffers[custom_table_name] = LogAnalyticsAutoResizeBuffer::new(@logstash_configuration,custom_table_name)
|
133
131
|
@logger.trace("Adding event document - " + event.to_s)
|
134
132
|
buffers[custom_table_name].add_event_document(document)
|
135
133
|
|
@@ -165,9 +163,9 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
|
|
165
163
|
end # def create_event_document
|
166
164
|
|
167
165
|
# Building the logstash object configuration from the output configuration provided by the user
|
168
|
-
# Return
|
166
|
+
# Return LogAnalyticsConfiguration populated with the configuration values
|
169
167
|
def build_logstash_configuration()
|
170
|
-
logstash_configuration=
|
168
|
+
logstash_configuration= LogAnalyticsConfiguration::new(@workspace_id, @workspace_key, @custom_log_table_name, @logger)
|
171
169
|
logstash_configuration.endpoint = @endpoint
|
172
170
|
logstash_configuration.time_generated_field = @time_generated_field
|
173
171
|
logstash_configuration.key_names = @key_names
|
@@ -6,7 +6,7 @@ Gem::Specification.new do |s|
|
|
6
6
|
s.summary = %q{Sentinel Blue provides a plugin outputing to Azure Sentinel for Logstash. Using this output plugin, you will be able to send any log you want using Logstash to the Azure Sentinel/Log Analytics workspace. You can utilize a dynamic table name during output to simplify complex table schemes.}
|
7
7
|
s.description = s.summary
|
8
8
|
s.homepage = "https://github.com/sentinelblue/sentinelblue-logstash-output-azure-loganalytics"
|
9
|
-
s.licenses = ['Apache
|
9
|
+
s.licenses = ['Apache-2.0']
|
10
10
|
s.require_paths = ["lib"]
|
11
11
|
|
12
12
|
# Files
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: sentinelblue-logstash-output-azure-loganalytics
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.1.
|
4
|
+
version: 1.1.3.rc1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Sentinel Blue
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2022-
|
11
|
+
date: 2022-11-04 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: rest-client
|
@@ -86,15 +86,15 @@ files:
|
|
86
86
|
- LICENSE
|
87
87
|
- README.md
|
88
88
|
- VERSION
|
89
|
-
- lib/logstash/
|
90
|
-
- lib/logstash/
|
91
|
-
- lib/logstash/
|
89
|
+
- lib/logstash/azureLAClasses/azureLAClient.rb
|
90
|
+
- lib/logstash/azureLAClasses/logAnalyticsAutoResizeBuffer.rb
|
91
|
+
- lib/logstash/azureLAClasses/logAnalyticsConfiguration.rb
|
92
92
|
- lib/logstash/outputs/sentinelblue-logstash-output-azure-loganalytics.rb
|
93
93
|
- sentinelblue-logstash-output-azure-loganalytics.gemspec
|
94
94
|
- spec/outputs/azure_loganalytics_spec.rb
|
95
95
|
homepage: https://github.com/sentinelblue/sentinelblue-logstash-output-azure-loganalytics
|
96
96
|
licenses:
|
97
|
-
- Apache
|
97
|
+
- Apache-2.0
|
98
98
|
metadata:
|
99
99
|
logstash_plugin: 'true'
|
100
100
|
logstash_group: output
|
@@ -109,9 +109,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
109
109
|
version: '0'
|
110
110
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
111
111
|
requirements:
|
112
|
-
- - "
|
112
|
+
- - ">"
|
113
113
|
- !ruby/object:Gem::Version
|
114
|
-
version:
|
114
|
+
version: 1.3.1
|
115
115
|
requirements: []
|
116
116
|
rubygems_version: 3.3.7
|
117
117
|
signing_key:
|