microsoft-sentinel-logstash-output 1.2.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,255 @@
1
+ # encoding: utf-8
2
+ module LogStash; module Outputs; class MicrosoftSentinelOutputInternal
3
+ class LogstashLoganalyticsOutputConfiguration
4
+ def initialize(client_app_Id, client_app_secret, tenant_id, data_collection_endpoint, dcr_immutable_id, dcr_stream_name, compress_data, create_sample_file, sample_file_path, logger, managed_identity)
5
+ @client_app_Id = client_app_Id
6
+ @client_app_secret = client_app_secret
7
+ @tenant_id = tenant_id
8
+ @data_collection_endpoint = data_collection_endpoint
9
+ @dcr_immutable_id = dcr_immutable_id
10
+ @dcr_stream_name = dcr_stream_name
11
+ @logger = logger
12
+ @compress_data = compress_data
13
+ @create_sample_file = create_sample_file
14
+ @sample_file_path = sample_file_path
15
+ @managed_identity = managed_identity
16
+
17
+ # Delay between each resending of a message
18
+ @RETRANSMISSION_DELAY = 2
19
+ @MIN_MESSAGE_AMOUNT = 100
20
+ # Maximum of 1 MB per post to Log Analytics Data Collector API V2.
21
+ # This is a size limit for a single post.
22
+ # If the data from a single post that exceeds 1 MB, you should split it.
23
+ @loganalytics_api_data_limit = 1 * 1024 * 1024
24
+
25
+ # Taking 4K safety buffer
26
+ @MAX_SIZE_BYTES = @loganalytics_api_data_limit - 10000
27
+
28
+ @azure_clouds = {
29
+ "AzureCloud" => {"aad" => "https://login.microsoftonline.com", "monitor" => "https://monitor.azure.com"},
30
+ "AzureChinaCloud" => {"aad" => "https://login.chinacloudapi.cn", "monitor" => "https://monitor.azure.cn"},
31
+ "AzureUSGovernment" => {"aad" => "https://login.microsoftonline.us", "monitor" => "https://monitor.azure.us"}
32
+ }.freeze
33
+ end
34
+
35
+ def validate_configuration()
36
+ if @create_sample_file
37
+ begin
38
+ if @sample_file_path.nil?
39
+ print_missing_parameter_message_and_raise("sample_file_path")
40
+ end
41
+ if @sample_file_path.strip == ""
42
+ raise ArgumentError, "The setting sample_file_path cannot be empty"
43
+ end
44
+ begin
45
+ file = java.io.File.new(@sample_file_path)
46
+ if !file.exists
47
+ raise "Path not exists"
48
+ end
49
+ rescue Exception
50
+ raise ArgumentError, "The path #{@sample_file_path} does not exist."
51
+ end
52
+ end
53
+ else
54
+ if @managed_identity
55
+ required_configs = { "data_collection_endpoint" => @data_collection_endpoint,
56
+ "dcr_immutable_id" => @dcr_immutable_id,
57
+ "dcr_stream_name" => @dcr_stream_name }
58
+ else
59
+ required_configs = { "client_app_Id" => @client_app_Id,
60
+ "client_app_secret" => @client_app_secret,
61
+ "tenant_id" => @tenant_id,
62
+ "data_collection_endpoint" => @data_collection_endpoint,
63
+ "dcr_immutable_id" => @dcr_immutable_id,
64
+ "dcr_stream_name" => @dcr_stream_name }
65
+ end
66
+ required_configs.each { |name, conf|
67
+ if conf.nil?
68
+ print_missing_parameter_message_and_raise(name)
69
+ end
70
+ if conf.empty?
71
+ raise ArgumentError, "Malformed configuration , the following arguments can not be null or empty.[client_app_Id, client_app_secret, tenant_id, data_collection_endpoint, dcr_immutable_id, dcr_stream_name]"
72
+ end
73
+ }
74
+
75
+ if @retransmission_time < 0
76
+ raise ArgumentError, "retransmission_time must be a positive integer."
77
+ end
78
+ if @max_items < @MIN_MESSAGE_AMOUNT
79
+ raise ArgumentError, "Setting max_items to value must be greater then #{@MIN_MESSAGE_AMOUNT}."
80
+ end
81
+ if @key_names.length > 500
82
+ raise ArgumentError, 'There are over 500 key names listed to be included in the events sent to Azure Loganalytics, which exceeds the limit of columns that can be define in each table in log analytics.'
83
+ end
84
+ if !@azure_clouds.key?(@azure_cloud)
85
+ raise ArgumentError, "The specified Azure cloud #{@azure_cloud} is not supported. Supported clouds are: #{@azure_clouds.keys.join(", ")}."
86
+ end
87
+ end
88
+ @logger.info("Azure Loganalytics configuration was found valid.")
89
+ # If all validation pass then configuration is valid
90
+ return true
91
+ end # def validate_configuration
92
+
93
+
94
+ def print_missing_parameter_message_and_raise(param_name)
95
+ @logger.error("Missing a required setting for the microsoft-sentinel-log-analytics-logstash-output-plugin output plugin:
96
+ output {
97
+ microsoft-sentinel-log-analytics-logstash-output-plugin {
98
+ #{param_name} => # SETTING MISSING
99
+ ...
100
+ }
101
+ }
102
+ ")
103
+ raise ArgumentError, "The setting #{param_name} is required."
104
+ end
105
+
106
+ def RETRANSMISSION_DELAY
107
+ @RETRANSMISSION_DELAY
108
+ end
109
+
110
+ def MAX_SIZE_BYTES
111
+ @MAX_SIZE_BYTES
112
+ end
113
+
114
+ def amount_resizing
115
+ @amount_resizing
116
+ end
117
+
118
+ def retransmission_time
119
+ @retransmission_time
120
+ end
121
+
122
+ def proxy_aad
123
+ @proxy_aad
124
+ end
125
+
126
+ def proxy_endpoint
127
+ @proxy_endpoint
128
+ end
129
+
130
+ def logger
131
+ @logger
132
+ end
133
+
134
+ def decrease_factor
135
+ @decrease_factor
136
+ end
137
+
138
+ def managed_identity
139
+ @managed_identity
140
+ end
141
+
142
+ def client_app_Id
143
+ @client_app_Id
144
+ end
145
+
146
+ def client_app_secret
147
+ @client_app_secret
148
+ end
149
+
150
+ def tenant_id
151
+ @tenant_id
152
+ end
153
+
154
+ def data_collection_endpoint
155
+ @data_collection_endpoint
156
+ end
157
+
158
+ def dcr_immutable_id
159
+ @dcr_immutable_id
160
+ end
161
+
162
+ def dcr_stream_name
163
+ @dcr_stream_name
164
+ end
165
+
166
+ def key_names
167
+ @key_names
168
+ end
169
+
170
+ def max_items
171
+ @max_items
172
+ end
173
+
174
+ def plugin_flush_interval
175
+ @plugin_flush_interval
176
+ end
177
+
178
+ def MIN_MESSAGE_AMOUNT
179
+ @MIN_MESSAGE_AMOUNT
180
+ end
181
+
182
+ def key_names=(new_key_names)
183
+ @key_names = new_key_names
184
+ end
185
+
186
+ def plugin_flush_interval=(new_plugin_flush_interval)
187
+ @plugin_flush_interval = new_plugin_flush_interval
188
+ end
189
+
190
+ def decrease_factor=(new_decrease_factor)
191
+ @decrease_factor = new_decrease_factor
192
+ end
193
+
194
+ def amount_resizing=(new_amount_resizing)
195
+ @amount_resizing = new_amount_resizing
196
+ end
197
+
198
+ def max_items=(new_max_items)
199
+ @max_items = new_max_items
200
+ end
201
+
202
+ def proxy_aad=(new_proxy_aad)
203
+ @proxy_aad = new_proxy_aad
204
+ end
205
+
206
+ def proxy_endpoint=(new_proxy_endpoint)
207
+ @proxy_endpoint = new_proxy_endpoint
208
+ end
209
+
210
+ def retransmission_time=(new_retransmission_time)
211
+ @retransmission_time = new_retransmission_time
212
+ end
213
+
214
+ def compress_data
215
+ @compress_data
216
+ end
217
+
218
+ def compress_data=(new_compress_data)
219
+ @compress_data = new_compress_data
220
+ end
221
+
222
+ def create_sample_file
223
+ @create_sample_file
224
+ end
225
+
226
+ def create_sample_file=(new_create_sample_file)
227
+ @create_sample_file = new_create_sample_file
228
+ end
229
+
230
+ def sample_file_path
231
+ @sample_file_path
232
+ end
233
+
234
+ def sample_file_path=(new_sample_file_path)
235
+ @sample_file_path = new_sample_file_path
236
+ end
237
+
238
+ def azure_cloud
239
+ @azure_cloud
240
+ end
241
+
242
+ def azure_cloud=(new_azure_cloud)
243
+ @azure_cloud = new_azure_cloud
244
+ end
245
+
246
+ def get_aad_endpoint
247
+ @azure_clouds[@azure_cloud]["aad"]
248
+ end
249
+
250
+ def get_monitor_endpoint
251
+ @azure_clouds[@azure_cloud]["monitor"]
252
+ end
253
+
254
+ end
255
+ end ;end ;end
@@ -0,0 +1,61 @@
1
+ # encoding: utf-8
2
+ require "logstash/sentinel_la/logstashLoganalyticsConfiguration"
3
+ require "logstash/sentinel_la/eventsHandler"
4
+
5
+ module LogStash
6
+ module Outputs
7
+ class MicrosoftSentinelOutputInternal
8
+ class SampleFileCreator < EventsHandler
9
+
10
+ def initialize(logstashLogAnalyticsConfiguration)
11
+ @events_buffer = Concurrent::Array.new
12
+ @maximum_events_to_sample = 10
13
+ @was_file_written = false
14
+ @writing_mutex = Mutex.new
15
+ super
16
+ end
17
+
18
+ def handle_events(events)
19
+ events.each do |event|
20
+ if !@was_file_written
21
+ filtered_event = create_event_document(event)
22
+ @events_buffer.push(filtered_event)
23
+ end
24
+ end
25
+ try_writing_events_to_file
26
+ end
27
+
28
+ def close
29
+ try_writing_events_to_file(true)
30
+ end
31
+
32
+ def try_writing_events_to_file(force = false)
33
+ if @was_file_written
34
+ return
35
+ end
36
+
37
+ @writing_mutex.synchronize do
38
+ #check if file was written during the wait
39
+ if @was_file_written ||
40
+ @events_buffer.length == 0 ||
41
+ (@events_buffer.length <= @maximum_events_to_sample && !force)
42
+ return
43
+ end
44
+
45
+ output_path = @logstashLogAnalyticsConfiguration.sample_file_path
46
+ output_file_name = "sampleFile#{Time.now.to_i}.json"
47
+ file = java.io.File.new(output_path,output_file_name)
48
+ fw = java.io.FileWriter.new(file)
49
+ fw.write(@events_buffer.take(@maximum_events_to_sample).to_json)
50
+ fw.flush
51
+ fw.close
52
+
53
+ @was_file_written = true
54
+ @logger.info("Sample file was written in path: #{file.getAbsolutePath}")
55
+ end
56
+ end
57
+
58
+ end
59
+ end
60
+ end
61
+ end
@@ -0,0 +1,10 @@
1
+ module LogStash; module Outputs;
2
+ class MicrosoftSentinelOutputInternal
3
+ VERSION_INFO = [1, 2, 0].freeze
4
+ VERSION = VERSION_INFO.map(&:to_s).join('.').freeze
5
+
6
+ def self.version
7
+ VERSION
8
+ end
9
+ end
10
+ end;end
@@ -0,0 +1,27 @@
1
+ require File.expand_path('../lib/logstash/sentinel_la/version', __FILE__)
2
+
3
+ Gem::Specification.new do |s|
4
+ s.name = 'microsoft-sentinel-logstash-output'
5
+ s.version = LogStash::Outputs::MicrosoftSentinelOutputInternal::VERSION
6
+ s.authors = ["Pouyan & Koos"]
7
+ s.email = 'pkhabazi@outlook.com'
8
+ s.summary = %q{Microsoft Sentinel provides a new output plugin for Logstash. Use this output plugin to send any log via Logstash to the Microsoft Sentinel/Log Analytics workspace. This is done with the Log Analytics DCR-based API.}
9
+ s.description = s.summary
10
+ s.homepage = "https://github.com/pkhabazi/microsoft-sentinel-logstash-output"
11
+ s.licenses = ["MIT"]
12
+ s.require_paths = ["lib"]
13
+
14
+ # Files
15
+ s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
16
+ # Tests
17
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
+
19
+ # Special flag to let us know this is actually a logstash plugin
20
+ s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
21
+
22
+ # Gem dependencies
23
+ s.add_runtime_dependency "rest-client", ">= 2.1.0"
24
+ s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
25
+ s.add_runtime_dependency "logstash-codec-plain"
26
+ s.add_development_dependency "logstash-devutils"
27
+ end
metadata ADDED
@@ -0,0 +1,130 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: microsoft-sentinel-logstash-output
3
+ version: !ruby/object:Gem::Version
4
+ version: 1.2.0
5
+ platform: ruby
6
+ authors:
7
+ - Pouyan & Koos
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2024-04-12 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: rest-client
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: 2.1.0
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: 2.1.0
27
+ - !ruby/object:Gem::Dependency
28
+ name: logstash-core-plugin-api
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ">="
32
+ - !ruby/object:Gem::Version
33
+ version: '1.60'
34
+ - - "<="
35
+ - !ruby/object:Gem::Version
36
+ version: '2.99'
37
+ type: :runtime
38
+ prerelease: false
39
+ version_requirements: !ruby/object:Gem::Requirement
40
+ requirements:
41
+ - - ">="
42
+ - !ruby/object:Gem::Version
43
+ version: '1.60'
44
+ - - "<="
45
+ - !ruby/object:Gem::Version
46
+ version: '2.99'
47
+ - !ruby/object:Gem::Dependency
48
+ name: logstash-codec-plain
49
+ requirement: !ruby/object:Gem::Requirement
50
+ requirements:
51
+ - - ">="
52
+ - !ruby/object:Gem::Version
53
+ version: '0'
54
+ type: :runtime
55
+ prerelease: false
56
+ version_requirements: !ruby/object:Gem::Requirement
57
+ requirements:
58
+ - - ">="
59
+ - !ruby/object:Gem::Version
60
+ version: '0'
61
+ - !ruby/object:Gem::Dependency
62
+ name: logstash-devutils
63
+ requirement: !ruby/object:Gem::Requirement
64
+ requirements:
65
+ - - ">="
66
+ - !ruby/object:Gem::Version
67
+ version: '0'
68
+ type: :development
69
+ prerelease: false
70
+ version_requirements: !ruby/object:Gem::Requirement
71
+ requirements:
72
+ - - ">="
73
+ - !ruby/object:Gem::Version
74
+ version: '0'
75
+ description: Microsoft Sentinel provides a new output plugin for Logstash. Use this
76
+ output plugin to send any log via Logstash to the Microsoft Sentinel/Log Analytics
77
+ workspace. This is done with the Log Analytics DCR-based API.
78
+ email: pkhabazi@outlook.com
79
+ executables: []
80
+ extensions: []
81
+ extra_rdoc_files: []
82
+ files:
83
+ - CHANGELOG.md
84
+ - Gemfile
85
+ - LICENSE
86
+ - README.md
87
+ - code_of_conduct.md
88
+ - lib/logstash/outputs/microsoft-sentinel-log-analytics-logstash-output-plugin.rb
89
+ - lib/logstash/sentinel_la/customSizeBasedBuffer.rb
90
+ - lib/logstash/sentinel_la/eventsHandler.rb
91
+ - lib/logstash/sentinel_la/logAnalyticsAadTokenProvider.rb
92
+ - lib/logstash/sentinel_la/logAnalyticsArcTokenProvider.rb
93
+ - lib/logstash/sentinel_la/logAnalyticsClient.rb
94
+ - lib/logstash/sentinel_la/logAnalyticsMiTokenProvider.rb
95
+ - lib/logstash/sentinel_la/logStashAutoResizeBuffer.rb
96
+ - lib/logstash/sentinel_la/logStashCompressedStream.rb
97
+ - lib/logstash/sentinel_la/logStashEventsBatcher.rb
98
+ - lib/logstash/sentinel_la/logsSender.rb
99
+ - lib/logstash/sentinel_la/logstashLoganalyticsConfiguration.rb
100
+ - lib/logstash/sentinel_la/sampleFileCreator.rb
101
+ - lib/logstash/sentinel_la/version.rb
102
+ - microsoft-sentinel-log-analytics-logstash-output-plugin.gemspec
103
+ homepage: https://github.com/pkhabazi/microsoft-sentinel-logstash-output
104
+ licenses:
105
+ - MIT
106
+ metadata:
107
+ logstash_plugin: 'true'
108
+ logstash_group: output
109
+ post_install_message:
110
+ rdoc_options: []
111
+ require_paths:
112
+ - lib
113
+ required_ruby_version: !ruby/object:Gem::Requirement
114
+ requirements:
115
+ - - ">="
116
+ - !ruby/object:Gem::Version
117
+ version: '0'
118
+ required_rubygems_version: !ruby/object:Gem::Requirement
119
+ requirements:
120
+ - - ">="
121
+ - !ruby/object:Gem::Version
122
+ version: '0'
123
+ requirements: []
124
+ rubygems_version: 3.4.10
125
+ signing_key:
126
+ specification_version: 4
127
+ summary: Microsoft Sentinel provides a new output plugin for Logstash. Use this output
128
+ plugin to send any log via Logstash to the Microsoft Sentinel/Log Analytics workspace.
129
+ This is done with the Log Analytics DCR-based API.
130
+ test_files: []