microsoft-sentinel-log-analytics-logstash-output-plugin 1.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,47 @@
1
+ # encoding: utf-8
2
+ require "logstash/sentinel_la/logstashLoganalyticsConfiguration"
3
+ require "logstash/sentinel_la/eventsHandler"
4
+ require "logstash/sentinel_la/logStashAutoResizeBuffer"
5
+ require "logstash/sentinel_la/logStashCompressedStream"
6
+
7
+ module LogStash; module Outputs; class MicrosoftSentinelOutputInternal
8
+ class LogsSender < EventsHandler
9
+
10
+ @thread_batch_map
11
+
12
+ def initialize(logstashLogAnalyticsConfiguration)
13
+ @thread_batch_map = Concurrent::Hash.new
14
+ @logstashLogAnalyticsConfiguration = logstashLogAnalyticsConfiguration
15
+ @logger = logstashLogAnalyticsConfiguration.logger
16
+ super
17
+ end
18
+
19
+ def handle_events(events)
20
+ t = Thread.current
21
+
22
+ unless @thread_batch_map.include?(t)
23
+ @thread_batch_map[t] = @logstashLogAnalyticsConfiguration.compress_data ?
24
+ LogStashCompressedStream::new(@logstashLogAnalyticsConfiguration) :
25
+ LogStashAutoResizeBuffer::new(@logstashLogAnalyticsConfiguration)
26
+ end
27
+
28
+ events.each do |event|
29
+ # creating document from event
30
+ document = create_event_document(event)
31
+
32
+ # Skip if document doesn't contain any items
33
+ next if (document.keys).length < 1
34
+
35
+ @logger.trace("Adding event document - " + event.to_s)
36
+ @thread_batch_map[t].batch_event_document(document)
37
+ end
38
+ end
39
+
40
+ def close
41
+ @thread_batch_map.each { |thread_id, batcher|
42
+ batcher.close
43
+ }
44
+ end
45
+
46
+ end
47
+ end; end; end;
@@ -0,0 +1,222 @@
1
+ # encoding: utf-8
2
+ module LogStash; module Outputs; class MicrosoftSentinelOutputInternal
3
+ class LogstashLoganalyticsOutputConfiguration
4
+ def initialize(client_app_Id, client_app_secret, tenant_id, data_collection_endpoint, dcr_immutable_id, dcr_stream_name, compress_data, create_sample_file, sample_file_path, logger)
5
+ @client_app_Id = client_app_Id
6
+ @client_app_secret = client_app_secret
7
+ @tenant_id = tenant_id
8
+ @data_collection_endpoint = data_collection_endpoint
9
+ @dcr_immutable_id = dcr_immutable_id
10
+ @dcr_stream_name = dcr_stream_name
11
+ @logger = logger
12
+ @compress_data = compress_data
13
+ @create_sample_file = create_sample_file
14
+ @sample_file_path = sample_file_path
15
+
16
+ # Delay between each resending of a message
17
+ @RETRANSMISSION_DELAY = 2
18
+ @MIN_MESSAGE_AMOUNT = 100
19
+ # Maximum of 1 MB per post to Log Analytics Data Collector API V2.
20
+ # This is a size limit for a single post.
21
+ # If the data from a single post that exceeds 1 MB, you should split it.
22
+ @loganalytics_api_data_limit = 1 * 1024 * 1024
23
+
24
+ # Taking 4K safety buffer
25
+ @MAX_SIZE_BYTES = @loganalytics_api_data_limit - 10000
26
+ end
27
+
28
+ def validate_configuration()
29
+ if @create_sample_file
30
+ begin
31
+ if @sample_file_path.nil?
32
+ print_missing_parameter_message_and_raise("sample_file_path")
33
+ end
34
+ if @sample_file_path.strip == ""
35
+ raise ArgumentError, "The setting sample_file_path cannot be empty"
36
+ end
37
+ begin
38
+ file = java.io.File.new(@sample_file_path)
39
+ if !file.exists
40
+ raise "Path not exists"
41
+ end
42
+ rescue Exception
43
+ raise ArgumentError, "The path #{@sample_file_path} does not exist."
44
+ end
45
+ end
46
+ else
47
+ required_configs = { "client_app_Id" => @client_app_Id,
48
+ "client_app_secret" => @client_app_secret,
49
+ "tenant_id" => @tenant_id,
50
+ "data_collection_endpoint" => @data_collection_endpoint,
51
+ "dcr_immutable_id" => @dcr_immutable_id,
52
+ "dcr_stream_name" => @dcr_stream_name }
53
+ required_configs.each { |name, conf|
54
+ if conf.nil?
55
+ print_missing_parameter_message_and_raise(name)
56
+ end
57
+ if conf.empty?
58
+ raise ArgumentError, "Malformed configuration , the following arguments can not be null or empty.[client_app_Id, client_app_secret, tenant_id, data_collection_endpoint, dcr_immutable_id, dcr_stream_name]"
59
+ end
60
+ }
61
+
62
+ if @retransmission_time < 0
63
+ raise ArgumentError, "retransmission_time must be a positive integer."
64
+ end
65
+ if @max_items < @MIN_MESSAGE_AMOUNT
66
+ raise ArgumentError, "Setting max_items to value must be greater then #{@MIN_MESSAGE_AMOUNT}."
67
+ end
68
+ if @key_names.length > 500
69
+ raise ArgumentError, 'There are over 500 key names listed to be included in the events sent to Azure Loganalytics, which exceeds the limit of columns that can be define in each table in log analytics.'
70
+ end
71
+ end
72
+ @logger.info("Azure Loganalytics configuration was found valid.")
73
+ # If all validation pass then configuration is valid
74
+ return true
75
+ end # def validate_configuration
76
+
77
+
78
+ def print_missing_parameter_message_and_raise(param_name)
79
+ @logger.error("Missing a required setting for the microsoft-sentinel-log-analytics-logstash-output-plugin output plugin:
80
+ output {
81
+ microsoft-sentinel-log-analytics-logstash-output-plugin {
82
+ #{param_name} => # SETTING MISSING
83
+ ...
84
+ }
85
+ }
86
+ ")
87
+ raise ArgumentError, "The setting #{param_name} is required."
88
+ end
89
+
90
+ def RETRANSMISSION_DELAY
91
+ @RETRANSMISSION_DELAY
92
+ end
93
+
94
+ def MAX_SIZE_BYTES
95
+ @MAX_SIZE_BYTES
96
+ end
97
+
98
+ def amount_resizing
99
+ @amount_resizing
100
+ end
101
+
102
+ def retransmission_time
103
+ @retransmission_time
104
+ end
105
+
106
+ def proxy_aad
107
+ @proxy_aad
108
+ end
109
+
110
+ def proxy_endpoint
111
+ @proxy_endpoint
112
+ end
113
+
114
+ def logger
115
+ @logger
116
+ end
117
+
118
+ def decrease_factor
119
+ @decrease_factor
120
+ end
121
+
122
+ def client_app_Id
123
+ @client_app_Id
124
+ end
125
+
126
+ def client_app_secret
127
+ @client_app_secret
128
+ end
129
+
130
+ def tenant_id
131
+ @tenant_id
132
+ end
133
+
134
+ def data_collection_endpoint
135
+ @data_collection_endpoint
136
+ end
137
+
138
+ def dcr_immutable_id
139
+ @dcr_immutable_id
140
+ end
141
+
142
+ def dcr_stream_name
143
+ @dcr_stream_name
144
+ end
145
+
146
+ def key_names
147
+ @key_names
148
+ end
149
+
150
+ def max_items
151
+ @max_items
152
+ end
153
+
154
+ def plugin_flush_interval
155
+ @plugin_flush_interval
156
+ end
157
+
158
+ def MIN_MESSAGE_AMOUNT
159
+ @MIN_MESSAGE_AMOUNT
160
+ end
161
+
162
+ def max_items=(new_max_items)
163
+ @max_items = new_max_items
164
+ end
165
+
166
+ def key_names=(new_key_names)
167
+ @key_names = new_key_names
168
+ end
169
+
170
+ def plugin_flush_interval=(new_plugin_flush_interval)
171
+ @plugin_flush_interval = new_plugin_flush_interval
172
+ end
173
+
174
+ def decrease_factor=(new_decrease_factor)
175
+ @decrease_factor = new_decrease_factor
176
+ end
177
+
178
+ def amount_resizing=(new_amount_resizing)
179
+ @amount_resizing = new_amount_resizing
180
+ end
181
+
182
+ def max_items=(new_max_items)
183
+ @max_items = new_max_items
184
+ end
185
+
186
+ def proxy_aad=(new_proxy_aad)
187
+ @proxy_aad = new_proxy_aad
188
+ end
189
+
190
+ def proxy_endpoint=(new_proxy_endpoint)
191
+ @proxy_endpoint = new_proxy_endpoint
192
+ end
193
+
194
+ def retransmission_time=(new_retransmission_time)
195
+ @retransmission_time = new_retransmission_time
196
+ end
197
+
198
+ def compress_data
199
+ @compress_data
200
+ end
201
+
202
+ def compress_data=(new_compress_data)
203
+ @compress_data = new_compress_data
204
+ end
205
+
206
+ def create_sample_file
207
+ @create_sample_file
208
+ end
209
+
210
+ def create_sample_file=(new_create_sample_file)
211
+ @create_sample_file = new_create_sample_file
212
+ end
213
+
214
+ def sample_file_path
215
+ @sample_file_path
216
+ end
217
+
218
+ def sample_file_path=(new_sample_file_path)
219
+ @sample_file_path = new_sample_file_path
220
+ end
221
+ end
222
+ end ;end ;end
@@ -0,0 +1,61 @@
1
+ # encoding: utf-8
2
+ require "logstash/sentinel_la/logstashLoganalyticsConfiguration"
3
+ require "logstash/sentinel_la/eventsHandler"
4
+
5
+ module LogStash
6
+ module Outputs
7
+ class MicrosoftSentinelOutputInternal
8
+ class SampleFileCreator < EventsHandler
9
+
10
+ def initialize(logstashLogAnalyticsConfiguration)
11
+ @events_buffer = Concurrent::Array.new
12
+ @maximum_events_to_sample = 10
13
+ @was_file_written = false
14
+ @writing_mutex = Mutex.new
15
+ super
16
+ end
17
+
18
+ def handle_events(events)
19
+ events.each do |event|
20
+ if !@was_file_written
21
+ filtered_event = create_event_document(event)
22
+ @events_buffer.push(filtered_event)
23
+ end
24
+ end
25
+ try_writing_events_to_file
26
+ end
27
+
28
+ def close
29
+ try_writing_events_to_file(true)
30
+ end
31
+
32
+ def try_writing_events_to_file(force = false)
33
+ if @was_file_written
34
+ return
35
+ end
36
+
37
+ @writing_mutex.synchronize do
38
+ #check if file was written during the wait
39
+ if @was_file_written ||
40
+ @events_buffer.length == 0 ||
41
+ (@events_buffer.length <= @maximum_events_to_sample && !force)
42
+ return
43
+ end
44
+
45
+ output_path = @logstashLogAnalyticsConfiguration.sample_file_path
46
+ output_file_name = "sampleFile#{Time.now.to_i}.json"
47
+ file = java.io.File.new(output_path,output_file_name)
48
+ fw = java.io.FileWriter.new(file)
49
+ fw.write(@events_buffer.take(@maximum_events_to_sample).to_json)
50
+ fw.flush
51
+ fw.close
52
+
53
+ @was_file_written = true
54
+ @logger.info("Sample file was written in path: #{file.getAbsolutePath}")
55
+ end
56
+ end
57
+
58
+ end
59
+ end
60
+ end
61
+ end
@@ -0,0 +1,10 @@
1
+ module LogStash; module Outputs;
2
+ class MicrosoftSentinelOutputInternal
3
+ VERSION_INFO = [1, 1, 0].freeze
4
+ VERSION = VERSION_INFO.map(&:to_s).join('.').freeze
5
+
6
+ def self.version
7
+ VERSION
8
+ end
9
+ end
10
+ end;end
@@ -0,0 +1,27 @@
1
+ require File.expand_path('../lib/logstash/sentinel_la/version', __FILE__)
2
+
3
+ Gem::Specification.new do |s|
4
+ s.name = 'microsoft-sentinel-log-analytics-logstash-output-plugin'
5
+ s.version = LogStash::Outputs::MicrosoftSentinelOutputInternal::VERSION
6
+ s.authors = ["Microsoft Sentinel"]
7
+ s.email = 'AzureSentinel@microsoft.com'
8
+ s.summary = %q{Microsoft Sentinel provides a new output plugin for Logstash. Use this output plugin to send any log via Logstash to the Microsoft Sentinel/Log Analytics workspace. This is done with the Log Analytics DCR-based API.}
9
+ s.description = s.summary
10
+ s.homepage = "https://github.com/Azure/Azure-Sentinel"
11
+ s.licenses = ["MIT"]
12
+ s.require_paths = ["lib"]
13
+
14
+ # Files
15
+ s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
16
+ # Tests
17
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
+
19
+ # Special flag to let us know this is actually a logstash plugin
20
+ s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
21
+
22
+ # Gem dependencies
23
+ s.add_runtime_dependency "rest-client", ">= 2.1.0"
24
+ s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
25
+ s.add_runtime_dependency "logstash-codec-plain"
26
+ s.add_development_dependency "logstash-devutils"
27
+ end
metadata ADDED
@@ -0,0 +1,127 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: microsoft-sentinel-log-analytics-logstash-output-plugin
3
+ version: !ruby/object:Gem::Version
4
+ version: 1.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Microsoft Sentinel
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2023-07-23 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: rest-client
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: 2.1.0
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: 2.1.0
27
+ - !ruby/object:Gem::Dependency
28
+ name: logstash-core-plugin-api
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ">="
32
+ - !ruby/object:Gem::Version
33
+ version: '1.60'
34
+ - - "<="
35
+ - !ruby/object:Gem::Version
36
+ version: '2.99'
37
+ type: :runtime
38
+ prerelease: false
39
+ version_requirements: !ruby/object:Gem::Requirement
40
+ requirements:
41
+ - - ">="
42
+ - !ruby/object:Gem::Version
43
+ version: '1.60'
44
+ - - "<="
45
+ - !ruby/object:Gem::Version
46
+ version: '2.99'
47
+ - !ruby/object:Gem::Dependency
48
+ name: logstash-codec-plain
49
+ requirement: !ruby/object:Gem::Requirement
50
+ requirements:
51
+ - - ">="
52
+ - !ruby/object:Gem::Version
53
+ version: '0'
54
+ type: :runtime
55
+ prerelease: false
56
+ version_requirements: !ruby/object:Gem::Requirement
57
+ requirements:
58
+ - - ">="
59
+ - !ruby/object:Gem::Version
60
+ version: '0'
61
+ - !ruby/object:Gem::Dependency
62
+ name: logstash-devutils
63
+ requirement: !ruby/object:Gem::Requirement
64
+ requirements:
65
+ - - ">="
66
+ - !ruby/object:Gem::Version
67
+ version: '0'
68
+ type: :development
69
+ prerelease: false
70
+ version_requirements: !ruby/object:Gem::Requirement
71
+ requirements:
72
+ - - ">="
73
+ - !ruby/object:Gem::Version
74
+ version: '0'
75
+ description: Microsoft Sentinel provides a new output plugin for Logstash. Use this
76
+ output plugin to send any log via Logstash to the Microsoft Sentinel/Log Analytics
77
+ workspace. This is done with the Log Analytics DCR-based API.
78
+ email: AzureSentinel@microsoft.com
79
+ executables: []
80
+ extensions: []
81
+ extra_rdoc_files: []
82
+ files:
83
+ - CHANGELOG.md
84
+ - Gemfile
85
+ - LICENSE
86
+ - README.md
87
+ - lib/logstash/outputs/microsoft-sentinel-log-analytics-logstash-output-plugin.rb
88
+ - lib/logstash/sentinel_la/customSizeBasedBuffer.rb
89
+ - lib/logstash/sentinel_la/eventsHandler.rb
90
+ - lib/logstash/sentinel_la/logAnalyticsAadTokenProvider.rb
91
+ - lib/logstash/sentinel_la/logAnalyticsClient.rb
92
+ - lib/logstash/sentinel_la/logStashAutoResizeBuffer.rb
93
+ - lib/logstash/sentinel_la/logStashCompressedStream.rb
94
+ - lib/logstash/sentinel_la/logStashEventsBatcher.rb
95
+ - lib/logstash/sentinel_la/logsSender.rb
96
+ - lib/logstash/sentinel_la/logstashLoganalyticsConfiguration.rb
97
+ - lib/logstash/sentinel_la/sampleFileCreator.rb
98
+ - lib/logstash/sentinel_la/version.rb
99
+ - microsoft-sentinel-log-analytics-logstash-output-plugin.gemspec
100
+ homepage: https://github.com/Azure/Azure-Sentinel
101
+ licenses:
102
+ - MIT
103
+ metadata:
104
+ logstash_plugin: 'true'
105
+ logstash_group: output
106
+ post_install_message:
107
+ rdoc_options: []
108
+ require_paths:
109
+ - lib
110
+ required_ruby_version: !ruby/object:Gem::Requirement
111
+ requirements:
112
+ - - ">="
113
+ - !ruby/object:Gem::Version
114
+ version: '0'
115
+ required_rubygems_version: !ruby/object:Gem::Requirement
116
+ requirements:
117
+ - - ">="
118
+ - !ruby/object:Gem::Version
119
+ version: '0'
120
+ requirements: []
121
+ rubygems_version: 3.1.6
122
+ signing_key:
123
+ specification_version: 4
124
+ summary: Microsoft Sentinel provides a new output plugin for Logstash. Use this output
125
+ plugin to send any log via Logstash to the Microsoft Sentinel/Log Analytics workspace.
126
+ This is done with the Log Analytics DCR-based API.
127
+ test_files: []