microsoft-sentinel-logstash-output-plugin 1.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/CHANGELOG.md +2 -0
- data/Gemfile +2 -0
- data/LICENSE +21 -0
- data/README.md +230 -0
- data/lib/logstash/outputs/microsoft-sentinel-logstash-output-plugin.rb +103 -0
- data/lib/logstash/sentinel/customSizeBasedBuffer.rb +293 -0
- data/lib/logstash/sentinel/eventsHandler.rb +58 -0
- data/lib/logstash/sentinel/logAnalyticsAadTokenProvider.rb +93 -0
- data/lib/logstash/sentinel/logAnalyticsClient.rb +90 -0
- data/lib/logstash/sentinel/logStashAutoResizeBuffer.rb +157 -0
- data/lib/logstash/sentinel/logStashCompressedStream.rb +144 -0
- data/lib/logstash/sentinel/logStashEventsBatcher.rb +116 -0
- data/lib/logstash/sentinel/logsSender.rb +47 -0
- data/lib/logstash/sentinel/logstashLoganalyticsConfiguration.rb +214 -0
- data/lib/logstash/sentinel/sampleFileCreator.rb +61 -0
- data/lib/logstash/sentinel/version.rb +10 -0
- data/microsoft-sentinel-logstash-output-plugin.gemspec +27 -0
- metadata +125 -0
@@ -0,0 +1,47 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/sentinel/logstashLoganalyticsConfiguration"
|
3
|
+
require "logstash/sentinel/eventsHandler"
|
4
|
+
require "logstash/sentinel/logStashAutoResizeBuffer"
|
5
|
+
require "logstash/sentinel/logStashCompressedStream"
|
6
|
+
|
7
|
+
module LogStash; module Outputs; class MicrosoftSentinelOutputInternal
|
8
|
+
class LogsSender < EventsHandler
|
9
|
+
|
10
|
+
@thread_batch_map
|
11
|
+
|
12
|
+
def initialize(logstashLogAnalyticsConfiguration)
|
13
|
+
@thread_batch_map = Concurrent::Hash.new
|
14
|
+
@logstashLogAnalyticsConfiguration = logstashLogAnalyticsConfiguration
|
15
|
+
@logger = logstashLogAnalyticsConfiguration.logger
|
16
|
+
super
|
17
|
+
end
|
18
|
+
|
19
|
+
def handle_events(events)
|
20
|
+
t = Thread.current
|
21
|
+
|
22
|
+
unless @thread_batch_map.include?(t)
|
23
|
+
@thread_batch_map[t] = @logstashLogAnalyticsConfiguration.compress_data ?
|
24
|
+
LogStashCompressedStream::new(@logstashLogAnalyticsConfiguration) :
|
25
|
+
LogStashAutoResizeBuffer::new(@logstashLogAnalyticsConfiguration)
|
26
|
+
end
|
27
|
+
|
28
|
+
events.each do |event|
|
29
|
+
# creating document from event
|
30
|
+
document = create_event_document(event)
|
31
|
+
|
32
|
+
# Skip if document doesn't contain any items
|
33
|
+
next if (document.keys).length < 1
|
34
|
+
|
35
|
+
@logger.trace("Adding event document - " + event.to_s)
|
36
|
+
@thread_batch_map[t].batch_event_document(document)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
def close
|
41
|
+
@thread_batch_map.each { |thread_id, batcher|
|
42
|
+
batcher.close
|
43
|
+
}
|
44
|
+
end
|
45
|
+
|
46
|
+
end
|
47
|
+
end; end; end;
|
@@ -0,0 +1,214 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
module LogStash; module Outputs; class MicrosoftSentinelOutputInternal
|
3
|
+
class LogstashLoganalyticsOutputConfiguration
|
4
|
+
def initialize(client_app_Id, client_app_secret, tenant_id, data_collection_endpoint, dcr_immutable_id, dcr_stream_name, compress_data, create_sample_file, sample_file_path, logger)
|
5
|
+
@client_app_Id = client_app_Id
|
6
|
+
@client_app_secret = client_app_secret
|
7
|
+
@tenant_id = tenant_id
|
8
|
+
@data_collection_endpoint = data_collection_endpoint
|
9
|
+
@dcr_immutable_id = dcr_immutable_id
|
10
|
+
@dcr_stream_name = dcr_stream_name
|
11
|
+
@logger = logger
|
12
|
+
@compress_data = compress_data
|
13
|
+
@create_sample_file = create_sample_file
|
14
|
+
@sample_file_path = sample_file_path
|
15
|
+
|
16
|
+
# Delay between each resending of a message
|
17
|
+
@RETRANSMISSION_DELAY = 2
|
18
|
+
@MIN_MESSAGE_AMOUNT = 100
|
19
|
+
# Maximum of 1 MB per post to Log Analytics Data Collector API V2.
|
20
|
+
# This is a size limit for a single post.
|
21
|
+
# If the data from a single post that exceeds 1 MB, you should split it.
|
22
|
+
@loganalytics_api_data_limit = 1 * 1024 * 1024
|
23
|
+
|
24
|
+
# Taking 4K safety buffer
|
25
|
+
@MAX_SIZE_BYTES = @loganalytics_api_data_limit - 10000
|
26
|
+
end
|
27
|
+
|
28
|
+
def validate_configuration()
|
29
|
+
if @create_sample_file
|
30
|
+
begin
|
31
|
+
if @sample_file_path.nil?
|
32
|
+
print_missing_parameter_message_and_raise("sample_file_path")
|
33
|
+
end
|
34
|
+
if @sample_file_path.strip == ""
|
35
|
+
raise ArgumentError, "The setting sample_file_path cannot be empty"
|
36
|
+
end
|
37
|
+
begin
|
38
|
+
file = java.io.File.new(@sample_file_path)
|
39
|
+
if !file.exists
|
40
|
+
raise "Path not exists"
|
41
|
+
end
|
42
|
+
rescue Exception
|
43
|
+
raise ArgumentError, "The path #{@sample_file_path} does not exist."
|
44
|
+
end
|
45
|
+
end
|
46
|
+
else
|
47
|
+
required_configs = { "client_app_Id" => @client_app_Id,
|
48
|
+
"client_app_secret" => @client_app_secret,
|
49
|
+
"tenant_id" => @tenant_id,
|
50
|
+
"data_collection_endpoint" => @data_collection_endpoint,
|
51
|
+
"dcr_immutable_id" => @dcr_immutable_id,
|
52
|
+
"dcr_stream_name" => @dcr_stream_name }
|
53
|
+
required_configs.each { |name, conf|
|
54
|
+
if conf.nil?
|
55
|
+
print_missing_parameter_message_and_raise(name)
|
56
|
+
end
|
57
|
+
if conf.empty?
|
58
|
+
raise ArgumentError, "Malformed configuration , the following arguments can not be null or empty.[client_app_Id, client_app_secret, tenant_id, data_collection_endpoint, dcr_immutable_id, dcr_stream_name]"
|
59
|
+
end
|
60
|
+
}
|
61
|
+
|
62
|
+
if @retransmission_time < 0
|
63
|
+
raise ArgumentError, "retransmission_time must be a positive integer."
|
64
|
+
end
|
65
|
+
if @max_items < @MIN_MESSAGE_AMOUNT
|
66
|
+
raise ArgumentError, "Setting max_items to value must be greater then #{@MIN_MESSAGE_AMOUNT}."
|
67
|
+
end
|
68
|
+
if @key_names.length > 500
|
69
|
+
raise ArgumentError, 'There are over 500 key names listed to be included in the events sent to Azure Loganalytics, which exceeds the limit of columns that can be define in each table in log analytics.'
|
70
|
+
end
|
71
|
+
end
|
72
|
+
@logger.info("Azure Loganalytics configuration was found valid.")
|
73
|
+
# If all validation pass then configuration is valid
|
74
|
+
return true
|
75
|
+
end # def validate_configuration
|
76
|
+
|
77
|
+
|
78
|
+
def print_missing_parameter_message_and_raise(param_name)
|
79
|
+
@logger.error("Missing a required setting for the microsoft-sentinel-logstash-output-plugin output plugin:
|
80
|
+
output {
|
81
|
+
microsoft-sentinel-logstash-output-plugin {
|
82
|
+
#{param_name} => # SETTING MISSING
|
83
|
+
...
|
84
|
+
}
|
85
|
+
}
|
86
|
+
")
|
87
|
+
raise ArgumentError, "The setting #{param_name} is required."
|
88
|
+
end
|
89
|
+
|
90
|
+
def RETRANSMISSION_DELAY
|
91
|
+
@RETRANSMISSION_DELAY
|
92
|
+
end
|
93
|
+
|
94
|
+
def MAX_SIZE_BYTES
|
95
|
+
@MAX_SIZE_BYTES
|
96
|
+
end
|
97
|
+
|
98
|
+
def amount_resizing
|
99
|
+
@amount_resizing
|
100
|
+
end
|
101
|
+
|
102
|
+
def retransmission_time
|
103
|
+
@retransmission_time
|
104
|
+
end
|
105
|
+
|
106
|
+
def proxy
|
107
|
+
@proxy
|
108
|
+
end
|
109
|
+
|
110
|
+
def logger
|
111
|
+
@logger
|
112
|
+
end
|
113
|
+
|
114
|
+
def decrease_factor
|
115
|
+
@decrease_factor
|
116
|
+
end
|
117
|
+
|
118
|
+
def client_app_Id
|
119
|
+
@client_app_Id
|
120
|
+
end
|
121
|
+
|
122
|
+
def client_app_secret
|
123
|
+
@client_app_secret
|
124
|
+
end
|
125
|
+
|
126
|
+
def tenant_id
|
127
|
+
@tenant_id
|
128
|
+
end
|
129
|
+
|
130
|
+
def data_collection_endpoint
|
131
|
+
@data_collection_endpoint
|
132
|
+
end
|
133
|
+
|
134
|
+
def dcr_immutable_id
|
135
|
+
@dcr_immutable_id
|
136
|
+
end
|
137
|
+
|
138
|
+
def dcr_stream_name
|
139
|
+
@dcr_stream_name
|
140
|
+
end
|
141
|
+
|
142
|
+
def key_names
|
143
|
+
@key_names
|
144
|
+
end
|
145
|
+
|
146
|
+
def max_items
|
147
|
+
@max_items
|
148
|
+
end
|
149
|
+
|
150
|
+
def plugin_flush_interval
|
151
|
+
@plugin_flush_interval
|
152
|
+
end
|
153
|
+
|
154
|
+
def MIN_MESSAGE_AMOUNT
|
155
|
+
@MIN_MESSAGE_AMOUNT
|
156
|
+
end
|
157
|
+
|
158
|
+
def max_items=(new_max_items)
|
159
|
+
@max_items = new_max_items
|
160
|
+
end
|
161
|
+
|
162
|
+
def key_names=(new_key_names)
|
163
|
+
@key_names = new_key_names
|
164
|
+
end
|
165
|
+
|
166
|
+
def plugin_flush_interval=(new_plugin_flush_interval)
|
167
|
+
@plugin_flush_interval = new_plugin_flush_interval
|
168
|
+
end
|
169
|
+
|
170
|
+
def decrease_factor=(new_decrease_factor)
|
171
|
+
@decrease_factor = new_decrease_factor
|
172
|
+
end
|
173
|
+
|
174
|
+
def amount_resizing=(new_amount_resizing)
|
175
|
+
@amount_resizing = new_amount_resizing
|
176
|
+
end
|
177
|
+
|
178
|
+
def max_items=(new_max_items)
|
179
|
+
@max_items = new_max_items
|
180
|
+
end
|
181
|
+
|
182
|
+
def proxy=(new_proxy)
|
183
|
+
@proxy = new_proxy
|
184
|
+
end
|
185
|
+
|
186
|
+
def retransmission_time=(new_retransmission_time)
|
187
|
+
@retransmission_time = new_retransmission_time
|
188
|
+
end
|
189
|
+
|
190
|
+
def compress_data
|
191
|
+
@compress_data
|
192
|
+
end
|
193
|
+
|
194
|
+
def compress_data=(new_compress_data)
|
195
|
+
@compress_data = new_compress_data
|
196
|
+
end
|
197
|
+
|
198
|
+
def create_sample_file
|
199
|
+
@create_sample_file
|
200
|
+
end
|
201
|
+
|
202
|
+
def create_sample_file=(new_create_sample_file)
|
203
|
+
@create_sample_file = new_create_sample_file
|
204
|
+
end
|
205
|
+
|
206
|
+
def sample_file_path
|
207
|
+
@sample_file_path
|
208
|
+
end
|
209
|
+
|
210
|
+
def sample_file_path=(new_sample_file_path)
|
211
|
+
@sample_file_path = new_sample_file_path
|
212
|
+
end
|
213
|
+
end
|
214
|
+
end ;end ;end
|
@@ -0,0 +1,61 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/sentinel/logstashLoganalyticsConfiguration"
|
3
|
+
require "logstash/sentinel/eventsHandler"
|
4
|
+
|
5
|
+
module LogStash
|
6
|
+
module Outputs
|
7
|
+
class MicrosoftSentinelOutputInternal
|
8
|
+
class SampleFileCreator < EventsHandler
|
9
|
+
|
10
|
+
def initialize(logstashLogAnalyticsConfiguration)
|
11
|
+
@events_buffer = Concurrent::Array.new
|
12
|
+
@maximum_events_to_sample = 10
|
13
|
+
@was_file_written = false
|
14
|
+
@writing_mutex = Mutex.new
|
15
|
+
super
|
16
|
+
end
|
17
|
+
|
18
|
+
def handle_events(events)
|
19
|
+
events.each do |event|
|
20
|
+
if !@was_file_written
|
21
|
+
filtered_event = create_event_document(event)
|
22
|
+
@events_buffer.push(filtered_event)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
try_writing_events_to_file
|
26
|
+
end
|
27
|
+
|
28
|
+
def close
|
29
|
+
try_writing_events_to_file(true)
|
30
|
+
end
|
31
|
+
|
32
|
+
def try_writing_events_to_file(force = false)
|
33
|
+
if @was_file_written
|
34
|
+
return
|
35
|
+
end
|
36
|
+
|
37
|
+
@writing_mutex.synchronize do
|
38
|
+
#check if file was written during the wait
|
39
|
+
if @was_file_written ||
|
40
|
+
@events_buffer.length == 0 ||
|
41
|
+
(@events_buffer.length <= @maximum_events_to_sample && !force)
|
42
|
+
return
|
43
|
+
end
|
44
|
+
|
45
|
+
output_path = @logstashLogAnalyticsConfiguration.sample_file_path
|
46
|
+
output_file_name = "sampleFile#{Time.now.to_i}.json"
|
47
|
+
file = java.io.File.new(output_path,output_file_name)
|
48
|
+
fw = java.io.FileWriter.new(file)
|
49
|
+
fw.write(@events_buffer.take(@maximum_events_to_sample).to_json)
|
50
|
+
fw.flush
|
51
|
+
fw.close
|
52
|
+
|
53
|
+
@was_file_written = true
|
54
|
+
@logger.info("Sample file was written in path: #{file.getAbsolutePath}")
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
require File.expand_path('../lib/logstash/sentinel/version', __FILE__)
|
2
|
+
|
3
|
+
Gem::Specification.new do |s|
|
4
|
+
s.name = 'microsoft-sentinel-logstash-output-plugin'
|
5
|
+
s.version = LogStash::Outputs::MicrosoftSentinelOutputInternal::VERSION
|
6
|
+
s.authors = ["Microsoft Sentinel"]
|
7
|
+
s.email = 'AzureSentinel@microsoft.com'
|
8
|
+
s.summary = %q{This plugin was renamed to microsoft-sentinel-log-analytics-logstash-output-plugin. microsoft-sentinel-logstash-output-plugin will no longer be maintained.}
|
9
|
+
s.description = s.summary
|
10
|
+
s.homepage = "https://github.com/Azure/Azure-Sentinel"
|
11
|
+
s.licenses = ["MIT"]
|
12
|
+
s.require_paths = ["lib"]
|
13
|
+
|
14
|
+
# Files
|
15
|
+
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
|
16
|
+
# Tests
|
17
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
18
|
+
|
19
|
+
# Special flag to let us know this is actually a logstash plugin
|
20
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
|
21
|
+
|
22
|
+
# Gem dependencies
|
23
|
+
s.add_runtime_dependency "rest-client", ">= 1.8.0"
|
24
|
+
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
25
|
+
s.add_runtime_dependency "logstash-codec-plain"
|
26
|
+
s.add_development_dependency "logstash-devutils"
|
27
|
+
end
|
metadata
ADDED
@@ -0,0 +1,125 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: microsoft-sentinel-logstash-output-plugin
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 1.0.1
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Microsoft Sentinel
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2023-07-27 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: rest-client
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ">="
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: 1.8.0
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - ">="
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: 1.8.0
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: logstash-core-plugin-api
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - ">="
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '1.60'
|
34
|
+
- - "<="
|
35
|
+
- !ruby/object:Gem::Version
|
36
|
+
version: '2.99'
|
37
|
+
type: :runtime
|
38
|
+
prerelease: false
|
39
|
+
version_requirements: !ruby/object:Gem::Requirement
|
40
|
+
requirements:
|
41
|
+
- - ">="
|
42
|
+
- !ruby/object:Gem::Version
|
43
|
+
version: '1.60'
|
44
|
+
- - "<="
|
45
|
+
- !ruby/object:Gem::Version
|
46
|
+
version: '2.99'
|
47
|
+
- !ruby/object:Gem::Dependency
|
48
|
+
name: logstash-codec-plain
|
49
|
+
requirement: !ruby/object:Gem::Requirement
|
50
|
+
requirements:
|
51
|
+
- - ">="
|
52
|
+
- !ruby/object:Gem::Version
|
53
|
+
version: '0'
|
54
|
+
type: :runtime
|
55
|
+
prerelease: false
|
56
|
+
version_requirements: !ruby/object:Gem::Requirement
|
57
|
+
requirements:
|
58
|
+
- - ">="
|
59
|
+
- !ruby/object:Gem::Version
|
60
|
+
version: '0'
|
61
|
+
- !ruby/object:Gem::Dependency
|
62
|
+
name: logstash-devutils
|
63
|
+
requirement: !ruby/object:Gem::Requirement
|
64
|
+
requirements:
|
65
|
+
- - ">="
|
66
|
+
- !ruby/object:Gem::Version
|
67
|
+
version: '0'
|
68
|
+
type: :development
|
69
|
+
prerelease: false
|
70
|
+
version_requirements: !ruby/object:Gem::Requirement
|
71
|
+
requirements:
|
72
|
+
- - ">="
|
73
|
+
- !ruby/object:Gem::Version
|
74
|
+
version: '0'
|
75
|
+
description: This plugin was renamed to microsoft-sentinel-log-analytics-logstash-output-plugin.
|
76
|
+
microsoft-sentinel-logstash-output-plugin will no longer be maintained.
|
77
|
+
email: AzureSentinel@microsoft.com
|
78
|
+
executables: []
|
79
|
+
extensions: []
|
80
|
+
extra_rdoc_files: []
|
81
|
+
files:
|
82
|
+
- CHANGELOG.md
|
83
|
+
- Gemfile
|
84
|
+
- LICENSE
|
85
|
+
- README.md
|
86
|
+
- lib/logstash/outputs/microsoft-sentinel-logstash-output-plugin.rb
|
87
|
+
- lib/logstash/sentinel/customSizeBasedBuffer.rb
|
88
|
+
- lib/logstash/sentinel/eventsHandler.rb
|
89
|
+
- lib/logstash/sentinel/logAnalyticsAadTokenProvider.rb
|
90
|
+
- lib/logstash/sentinel/logAnalyticsClient.rb
|
91
|
+
- lib/logstash/sentinel/logStashAutoResizeBuffer.rb
|
92
|
+
- lib/logstash/sentinel/logStashCompressedStream.rb
|
93
|
+
- lib/logstash/sentinel/logStashEventsBatcher.rb
|
94
|
+
- lib/logstash/sentinel/logsSender.rb
|
95
|
+
- lib/logstash/sentinel/logstashLoganalyticsConfiguration.rb
|
96
|
+
- lib/logstash/sentinel/sampleFileCreator.rb
|
97
|
+
- lib/logstash/sentinel/version.rb
|
98
|
+
- microsoft-sentinel-logstash-output-plugin.gemspec
|
99
|
+
homepage: https://github.com/Azure/Azure-Sentinel
|
100
|
+
licenses:
|
101
|
+
- MIT
|
102
|
+
metadata:
|
103
|
+
logstash_plugin: 'true'
|
104
|
+
logstash_group: output
|
105
|
+
post_install_message:
|
106
|
+
rdoc_options: []
|
107
|
+
require_paths:
|
108
|
+
- lib
|
109
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
110
|
+
requirements:
|
111
|
+
- - ">="
|
112
|
+
- !ruby/object:Gem::Version
|
113
|
+
version: '0'
|
114
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
115
|
+
requirements:
|
116
|
+
- - ">="
|
117
|
+
- !ruby/object:Gem::Version
|
118
|
+
version: '0'
|
119
|
+
requirements: []
|
120
|
+
rubygems_version: 3.1.6
|
121
|
+
signing_key:
|
122
|
+
specification_version: 4
|
123
|
+
summary: This plugin was renamed to microsoft-sentinel-log-analytics-logstash-output-plugin.
|
124
|
+
microsoft-sentinel-logstash-output-plugin will no longer be maintained.
|
125
|
+
test_files: []
|