microsoft-sentinel-logstash-output 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,293 @@
1
+ # This code is from a PR for the official repo of ruby-stud
2
+ # with a small change to calculating the event size in the var_size function
3
+ # https://github.com/jordansissel/ruby-stud/pull/19
4
+ #
5
+ # @author {Alex Dean}[http://github.com/alexdean]
6
+ #
7
+ # Implements a generic framework for accepting events which are later flushed
8
+ # in batches. Flushing occurs whenever +:max_items+ or +:max_interval+ (seconds)
9
+ # has been reached or if the event size outgrows +:flush_each+ (bytes)
10
+ #
11
+ # Including class must implement +flush+, which will be called with all
12
+ # accumulated items either when the output buffer fills (+:max_items+ or
13
+ # +:flush_each+) or when a fixed amount of time (+:max_interval+) passes.
14
+ #
15
+ # == batch_receive and flush
16
+ # General receive/flush can be implemented in one of two ways.
17
+ #
18
+ # === batch_receive(event) / flush(events)
19
+ # +flush+ will receive an array of events which were passed to +buffer_receive+.
20
+ #
21
+ # batch_receive('one')
22
+ # batch_receive('two')
23
+ #
24
+ # will cause a flush invocation like
25
+ #
26
+ # flush(['one', 'two'])
27
+ #
28
+ # === batch_receive(event, group) / flush(events, group)
29
+ # flush() will receive an array of events, plus a grouping key.
30
+ #
31
+ # batch_receive('one', :server => 'a')
32
+ # batch_receive('two', :server => 'b')
33
+ # batch_receive('three', :server => 'a')
34
+ # batch_receive('four', :server => 'b')
35
+ #
36
+ # will result in the following flush calls
37
+ #
38
+ # flush(['one', 'three'], {:server => 'a'})
39
+ # flush(['two', 'four'], {:server => 'b'})
40
+ #
41
+ # Grouping keys can be anything which are valid Hash keys. (They don't have to
42
+ # be hashes themselves.) Strings or Fixnums work fine. Use anything which you'd
43
+ # like to receive in your +flush+ method to help enable different handling for
44
+ # various groups of events.
45
+ #
46
+ # == on_flush_error
47
+ # Including class may implement +on_flush_error+, which will be called with an
48
+ # Exception instance whenever buffer_flush encounters an error.
49
+ #
50
+ # * +buffer_flush+ will automatically re-try failed flushes, so +on_flush_error+
51
+ # should not try to implement retry behavior.
52
+ # * Exceptions occurring within +on_flush_error+ are not handled by
53
+ # +buffer_flush+.
54
+ #
55
+ # == on_full_buffer_receive
56
+ # Including class may implement +on_full_buffer_receive+, which will be called
57
+ # whenever +buffer_receive+ is called while the buffer is full.
58
+ #
59
+ # +on_full_buffer_receive+ will receive a Hash like <code>{:pending => 30,
60
+ # :outgoing => 20}</code> which describes the internal state of the module at
61
+ # the moment.
62
+ #
63
+ # == final flush
64
+ # Including class should call <code>buffer_flush(:final => true)</code>
65
+ # during a teardown/shutdown routine (after the last call to buffer_receive)
66
+ # to ensure that all accumulated messages are flushed.
67
+ module LogStash; module Outputs; class MicrosoftSentinelOutputInternal
68
+ module CustomSizeBasedBuffer
69
+
70
+ public
71
+ # Initialize the buffer.
72
+ #
73
+ # Call directly from your constructor if you wish to set some non-default
74
+ # options. Otherwise buffer_initialize will be called automatically during the
75
+ # first buffer_receive call.
76
+ #
77
+ # Options:
78
+ # * :max_items, Max number of items to buffer before flushing. Default 50.
79
+ # * :flush_each, Flush each bytes of buffer. Default 0 (no flushing fired by
80
+ # a buffer size).
81
+ # * :max_interval, Max number of seconds to wait between flushes. Default 5.
82
+ # * :logger, A logger to write log messages to. No default. Optional.
83
+ #
84
+ # @param [Hash] options
85
+ def buffer_initialize(options={})
86
+ if ! self.class.method_defined?(:flush)
87
+ raise ArgumentError, "Any class including Stud::Buffer must define a flush() method."
88
+ end
89
+
90
+ @buffer_config = {
91
+ :max_items => options[:max_items] || 50,
92
+ :flush_each => options[:flush_each].to_i || 0,
93
+ :max_interval => options[:max_interval] || 5,
94
+ :logger => options[:logger] || nil,
95
+ :has_on_flush_error => self.class.method_defined?(:on_flush_error),
96
+ :has_on_full_buffer_receive => self.class.method_defined?(:on_full_buffer_receive)
97
+ }
98
+ @buffer_state = {
99
+ # items accepted from including class
100
+ :pending_items => {},
101
+ :pending_count => 0,
102
+ :pending_size => 0,
103
+
104
+ # guard access to pending_items & pending_count & pending_size
105
+ :pending_mutex => Mutex.new,
106
+
107
+ # items which are currently being flushed
108
+ :outgoing_items => {},
109
+ :outgoing_count => 0,
110
+ :outgoing_size => 0,
111
+
112
+ # ensure only 1 flush is operating at once
113
+ :flush_mutex => Mutex.new,
114
+
115
+ # data for timed flushes
116
+ :last_flush => Time.now.to_i,
117
+ :timer => Thread.new do
118
+ loop do
119
+ sleep(@buffer_config[:max_interval])
120
+ buffer_flush(:force => true)
121
+ end
122
+ end
123
+ }
124
+
125
+ # events we've accumulated
126
+ buffer_clear_pending
127
+ end
128
+
129
+ # Determine if +:max_items+ or +:flush_each+ has been reached.
130
+ #
131
+ # buffer_receive calls will block while <code>buffer_full? == true</code>.
132
+ #
133
+ # @return [bool] Is the buffer full?
134
+ def buffer_full?
135
+ (@buffer_state[:pending_count] + @buffer_state[:outgoing_count] >= @buffer_config[:max_items]) || \
136
+ (@buffer_config[:flush_each] != 0 && @buffer_state[:pending_size] + @buffer_state[:outgoing_size] >= @buffer_config[:flush_each])
137
+ end
138
+
139
+ # Save an event for later delivery
140
+ #
141
+ # Events are grouped by the (optional) group parameter you provide.
142
+ # Groups of events, plus the group name, are later passed to +flush+.
143
+ #
144
+ # This call will block if +:max_items+ or +:flush_each+ has been reached.
145
+ #
146
+ # @see Stud::Buffer The overview has more information on grouping and flushing.
147
+ #
148
+ # @param event An item to buffer for flushing later.
149
+ # @param group Optional grouping key. All events with the same key will be
150
+ # passed to +flush+ together, along with the grouping key itself.
151
+ def buffer_receive(event, group=nil)
152
+ buffer_initialize if ! @buffer_state
153
+
154
+ # block if we've accumulated too many events
155
+ while buffer_full? do
156
+ on_full_buffer_receive(
157
+ :pending => @buffer_state[:pending_count],
158
+ :outgoing => @buffer_state[:outgoing_count]
159
+ ) if @buffer_config[:has_on_full_buffer_receive]
160
+ sleep 0.1
161
+ end
162
+ @buffer_state[:pending_mutex].synchronize do
163
+ @buffer_state[:pending_items][group] << event
164
+ @buffer_state[:pending_count] += 1
165
+ @buffer_state[:pending_size] += var_size(event) if @buffer_config[:flush_each] != 0
166
+ end
167
+
168
+ buffer_flush
169
+ end
170
+
171
+ # Try to flush events.
172
+ #
173
+ # Returns immediately if flushing is not necessary/possible at the moment:
174
+ # * :max_items or :flush_each have not been accumulated
175
+ # * :max_interval seconds have not elapased since the last flush
176
+ # * another flush is in progress
177
+ #
178
+ # <code>buffer_flush(:force => true)</code> will cause a flush to occur even
179
+ # if +:max_items+ or +:flush_each+ or +:max_interval+ have not been reached. A forced flush
180
+ # will still return immediately (without flushing) if another flush is
181
+ # currently in progress.
182
+ #
183
+ # <code>buffer_flush(:final => true)</code> is identical to <code>buffer_flush(:force => true)</code>,
184
+ # except that if another flush is already in progress, <code>buffer_flush(:final => true)</code>
185
+ # will block/wait for the other flush to finish before proceeding.
186
+ #
187
+ # @param [Hash] options Optional. May be <code>{:force => true}</code> or <code>{:final => true}</code>.
188
+ # @return [Fixnum] The number of items successfully passed to +flush+.
189
+ def buffer_flush(options={})
190
+ force = options[:force] || options[:final]
191
+ final = options[:final]
192
+
193
+ # final flush will wait for lock, so we are sure to flush out all buffered events
194
+ if options[:final]
195
+ @buffer_state[:flush_mutex].lock
196
+ elsif ! @buffer_state[:flush_mutex].try_lock # failed to get lock, another flush already in progress
197
+ return 0
198
+ end
199
+
200
+ items_flushed = 0
201
+
202
+ begin
203
+ return 0 if @buffer_state[:pending_count] == 0
204
+
205
+ # compute time_since_last_flush only when some item is pending
206
+ time_since_last_flush = get_time_since_last_flush
207
+
208
+ return 0 if (!force) &&
209
+ (@buffer_state[:pending_count] < @buffer_config[:max_items]) &&
210
+ (@buffer_config[:flush_each] == 0 || @buffer_state[:pending_size] < @buffer_config[:flush_each]) &&
211
+ (time_since_last_flush < @buffer_config[:max_interval])
212
+
213
+ @buffer_state[:pending_mutex].synchronize do
214
+ @buffer_state[:outgoing_items] = @buffer_state[:pending_items]
215
+ @buffer_state[:outgoing_count] = @buffer_state[:pending_count]
216
+ @buffer_state[:outgoing_size] = @buffer_state[:pending_size]
217
+ buffer_clear_pending
218
+ end
219
+ @buffer_config[:logger].debug("Flushing output",
220
+ :outgoing_count => @buffer_state[:outgoing_count],
221
+ :time_since_last_flush => time_since_last_flush,
222
+ :outgoing_events => @buffer_state[:outgoing_items],
223
+ :batch_timeout => @buffer_config[:max_interval],
224
+ :force => force,
225
+ :final => final
226
+ ) if @buffer_config[:logger]
227
+
228
+ @buffer_state[:outgoing_items].each do |group, events|
229
+ begin
230
+
231
+ if group.nil?
232
+ flush(events,final)
233
+ else
234
+ flush(events, group, final)
235
+ end
236
+
237
+ @buffer_state[:outgoing_items].delete(group)
238
+ events_size = events.size
239
+ @buffer_state[:outgoing_count] -= events_size
240
+ if @buffer_config[:flush_each] != 0
241
+ events_volume = 0
242
+ events.each do |event|
243
+ events_volume += var_size(event)
244
+ end
245
+ @buffer_state[:outgoing_size] -= events_volume
246
+ end
247
+ items_flushed += events_size
248
+
249
+ rescue => e
250
+ @buffer_config[:logger].warn("Failed to flush outgoing items",
251
+ :outgoing_count => @buffer_state[:outgoing_count],
252
+ :exception => e,
253
+ :backtrace => e.backtrace
254
+ ) if @buffer_config[:logger]
255
+
256
+ if @buffer_config[:has_on_flush_error]
257
+ on_flush_error e
258
+ end
259
+
260
+ sleep 1
261
+ retry
262
+ end
263
+ @buffer_state[:last_flush] = Time.now.to_i
264
+ end
265
+
266
+ ensure
267
+ @buffer_state[:flush_mutex].unlock
268
+ end
269
+
270
+ return items_flushed
271
+ end
272
+
273
+ private
274
+ def buffer_clear_pending
275
+ @buffer_state[:pending_items] = Hash.new { |h, k| h[k] = [] }
276
+ @buffer_state[:pending_count] = 0
277
+ @buffer_state[:pending_size] = 0
278
+ end
279
+
280
+ private
281
+ def var_size(var)
282
+ # Calculate event size as a json.
283
+ # assuming event is a hash
284
+ return var.to_json.bytesize + 2
285
+ end
286
+
287
+ protected
288
+ def get_time_since_last_flush
289
+ Time.now.to_i - @buffer_state[:last_flush]
290
+ end
291
+
292
+ end
293
+ end ;end ;end
@@ -0,0 +1,58 @@
1
+ # encoding: utf-8
2
+ require "logstash/sentinel_la/logstashLoganalyticsConfiguration"
3
+
4
+ module LogStash
5
+ module Outputs
6
+ class MicrosoftSentinelOutputInternal
7
+ class EventsHandler
8
+
9
+ def initialize(logstashLogAnalyticsConfiguration)
10
+ @logstashLogAnalyticsConfiguration = logstashLogAnalyticsConfiguration
11
+ @logger = logstashLogAnalyticsConfiguration.logger
12
+ @key_names = logstashLogAnalyticsConfiguration.key_names
13
+ @columns_to_modify = {"@timestamp" => "ls_timestamp", "@version" => "ls_version"}
14
+ end
15
+
16
+ def handle_events(events)
17
+ raise "Method handle_events not implemented"
18
+ end
19
+
20
+ def close
21
+ raise "Method close not implemented"
22
+ end
23
+
24
+ # In case that the user has defined key_names meaning that he would like to a subset of the data,
25
+ # we would like to insert only those keys.
26
+ # If no keys were defined we will send all the data
27
+ def create_event_document(event)
28
+ document = {}
29
+ event_hash = event.to_hash
30
+
31
+ @columns_to_modify.each {|original_key, new_key|
32
+ if event_hash.has_key?(original_key)
33
+ event_hash[new_key] = event_hash[original_key]
34
+ event_hash.delete(original_key)
35
+ end
36
+ }
37
+
38
+ if @key_names.length > 0
39
+ # Get the intersection of key_names and keys of event_hash
40
+ keys_intersection = @key_names & event_hash.keys
41
+ keys_intersection.each do |key|
42
+ document[key] = event_hash[key]
43
+ end
44
+ if document.keys.length < 1
45
+ @logger.warn("No keys found, message is dropped. Plugin keys: #{@key_names}, Event keys: #{event_hash}. The event message do not match event expected structre. Please edit key_names section in output plugin and try again.")
46
+ end
47
+ else
48
+ document = event_hash
49
+ end
50
+
51
+ return document
52
+ end
53
+ # def create_event_document
54
+
55
+ end
56
+ end
57
+ end
58
+ end
@@ -0,0 +1,92 @@
1
+ # encoding: utf-8
2
+ require "logstash/sentinel_la/logstashLoganalyticsConfiguration"
3
+ require 'rest-client'
4
+ require 'json'
5
+ require 'openssl'
6
+ require 'base64'
7
+ require 'time'
8
+
9
+ module LogStash; module Outputs; class MicrosoftSentinelOutputInternal
10
+ class LogAnalyticsAadTokenProvider
11
+ def initialize (logstashLoganalyticsConfiguration)
12
+ scope = CGI.escape("#{logstashLoganalyticsConfiguration.get_monitor_endpoint}//.default")
13
+ @aad_uri = logstashLoganalyticsConfiguration.get_aad_endpoint
14
+ @token_request_body = sprintf("client_id=%s&scope=%s&client_secret=%s&grant_type=client_credentials", logstashLoganalyticsConfiguration.client_app_Id, scope, logstashLoganalyticsConfiguration.client_app_secret)
15
+ @token_request_uri = sprintf("%s/%s/oauth2/v2.0/token",@aad_uri, logstashLoganalyticsConfiguration.tenant_id)
16
+ @token_state = {
17
+ :access_token => nil,
18
+ :expiry_time => nil,
19
+ :token_details_mutex => Mutex.new,
20
+ }
21
+ @logger = logstashLoganalyticsConfiguration.logger
22
+ @logstashLoganalyticsConfiguration = logstashLoganalyticsConfiguration
23
+ end # def initialize
24
+
25
+ # Public methods
26
+ public
27
+
28
+ def get_aad_token_bearer()
29
+ @token_state[:token_details_mutex].synchronize do
30
+ if is_saved_token_need_refresh()
31
+ refresh_saved_token()
32
+ end
33
+ return @token_state[:access_token]
34
+ end
35
+ end # def get_aad_token_bearer
36
+
37
+ # Private methods
38
+ private
39
+
40
+ def is_saved_token_need_refresh()
41
+ return @token_state[:access_token].nil? || @token_state[:expiry_time].nil? || @token_state[:expiry_time] <= Time.now
42
+ end # def is_saved_token_need_refresh
43
+
44
+ def refresh_saved_token()
45
+ @logger.info("Entra ID token expired - refreshing token.")
46
+
47
+ token_response = post_token_request()
48
+ @token_state[:access_token] = token_response["access_token"]
49
+ @token_state[:expiry_time] = get_token_expiry_time(token_response["expires_in"])
50
+
51
+ end # def refresh_saved_token
52
+
53
+ def get_token_expiry_time (expires_in_seconds)
54
+ if (expires_in_seconds.nil? || expires_in_seconds <= 0)
55
+ return Time.now + (60 * 60 * 24) # Refresh anyway in 24 hours
56
+ else
57
+ return Time.now + expires_in_seconds - 1; # Decrease by 1 second to be on the safe side
58
+ end
59
+ end # def get_token_expiry_time
60
+
61
+ # Post the given json to Azure Loganalytics
62
+ def post_token_request()
63
+ # Create REST request header
64
+ headers = get_header()
65
+ while true
66
+ begin
67
+ # Post REST request
68
+ response = RestClient::Request.execute(method: :post, url: @token_request_uri, payload: @token_request_body, headers: headers,
69
+ proxy: @logstashLoganalyticsConfiguration.proxy_aad)
70
+
71
+ if (response.code == 200 || response.code == 201)
72
+ return JSON.parse(response.body)
73
+ end
74
+ rescue RestClient::ExceptionWithResponse => ewr
75
+ @logger.error("Exception while authenticating with Microsoft Entra ID API ['#{ewr.response}']")
76
+ rescue Exception => ex
77
+ @logger.trace("Exception while authenticating with Microsoft Entra ID API ['#{ex}']")
78
+ end
79
+ @logger.error("Error while authenticating with Microsoft Entra ID ('#{@aad_uri}'), retrying in 10 seconds.")
80
+ sleep 10
81
+ end
82
+ end # def post_token_request
83
+
84
+ # Create a header
85
+ def get_header()
86
+ return {
87
+ 'Content-Type' => 'application/x-www-form-urlencoded',
88
+ }
89
+ end # def get_header
90
+
91
+ end # end of class
92
+ end ;end ;end
@@ -0,0 +1,137 @@
1
+ # encoding: utf-8
2
+ require "logstash/sentinel_la/logstashLoganalyticsConfiguration"
3
+ require 'rest-client'
4
+ require 'json'
5
+ require 'openssl'
6
+ require 'base64'
7
+ require 'time'
8
+
9
+ module LogStash; module Outputs; class MicrosoftSentinelOutputInternal
10
+ class LogAnalyticsArcTokenProvider
11
+ def initialize (logstashLoganalyticsConfiguration)
12
+ scope = CGI.escape("#{logstashLoganalyticsConfiguration.get_monitor_endpoint}")
13
+ @token_request_uri = sprintf("http://127.0.0.1:40342/metadata/identity/oauth2/token?api-version=2019-11-01&resource=%s", scope)
14
+ # https://learn.microsoft.com/en-us/azure/azure-arc/servers/managed-identity-authentication
15
+ @token_state = {
16
+ :access_token => nil,
17
+ :expiry_time => nil,
18
+ :token_details_mutex => Mutex.new,
19
+ }
20
+ @logger = logstashLoganalyticsConfiguration.logger
21
+ @logstashLoganalyticsConfiguration = logstashLoganalyticsConfiguration
22
+ end # def initialize
23
+
24
+ # Public methods
25
+ public
26
+
27
+ # Find the path to the authentication token
28
+ def get_challange_token_path()
29
+ # Create REST request header
30
+ headers = get_header1()
31
+ begin
32
+ response = RestClient::Request.execute(
33
+ method: :get,
34
+ url: @token_request_uri,
35
+ headers: headers
36
+ )
37
+ rescue RestClient::ExceptionWithResponse => e
38
+ response = e.response
39
+ end
40
+
41
+ # Path to .KEY file is stripped from response
42
+ www_authenticate = response.headers[:www_authenticate]
43
+ path = www_authenticate.split(' ')[1].gsub('realm=', '')
44
+ return path
45
+ end # def get_challange_token_path
46
+
47
+ # With path to .KEY file we can retrieve Bearer token
48
+ def get_challange_token()
49
+ path = get_challange_token_path()
50
+ # Check if the file is readable
51
+ if ::File.readable?(path)
52
+ # Read the content of the key file
53
+ key_content = ::File.read(path)
54
+ return key_content
55
+ else
56
+ # User must be a member of the himds group to be able to retrieve contents of .KEY file
57
+ @logger.error("The file at #{path} is not readable by the current user. Please run the script as root.")
58
+ end
59
+ end # def get_challange_token
60
+
61
+ def get_aad_token_bearer()
62
+ @token_state[:token_details_mutex].synchronize do
63
+ if is_saved_token_need_refresh()
64
+ refresh_saved_token()
65
+ end
66
+ return @token_state[:access_token]
67
+ end
68
+ end # def get_aad_token_bearer
69
+
70
+ # Private methods
71
+ private
72
+
73
+ def is_saved_token_need_refresh()
74
+ return @token_state[:access_token].nil? || @token_state[:expiry_time].nil? || @token_state[:expiry_time] <= Time.now
75
+ end # def is_saved_token_need_refresh
76
+
77
+ def refresh_saved_token()
78
+ @logger.info("Azure Arc Managed Identity token expired - refreshing token.")
79
+
80
+ token_response = post_token_request()
81
+ @token_state[:access_token] = token_response["access_token"]
82
+ @token_state[:expiry_time] = get_token_expiry_time(token_response["expires_in"].to_i)
83
+
84
+ end # def refresh_saved_token
85
+
86
+ def get_token_expiry_time (expires_in_seconds)
87
+ if (expires_in_seconds.nil? || expires_in_seconds <= 0)
88
+ return Time.now + (60 * 60 * 24) # Refresh anyway in 24 hours
89
+ else
90
+ return Time.now + expires_in_seconds - 1; # Decrease by 1 second to be on the safe side
91
+ end
92
+ end # def get_token_expiry_time
93
+
94
+ # Post the given json to Azure Loganalytics
95
+ def post_token_request()
96
+ # Create REST request header
97
+ headers = get_header()
98
+ while true
99
+ begin
100
+ # GET REST request
101
+ response = RestClient::Request.execute(
102
+ method: :get,
103
+ url: @token_request_uri,
104
+ headers: headers,
105
+ proxy: @logstashLoganalyticsConfiguration.proxy_aad
106
+ )
107
+
108
+ if (response.code == 200 || response.code == 201)
109
+ return JSON.parse(response.body)
110
+ end
111
+ rescue RestClient::ExceptionWithResponse => ewr
112
+ @logger.error("Exception while authenticating with Azure Arc Connected Machine API ['#{ewr.response}']")
113
+ rescue Exception => ex
114
+ @logger.trace("Exception while authenticating with Azure Arc Connected Machine API ['#{ex}']")
115
+ end
116
+ @logger.error("Error while authenticating with Azure Arc Connected Machine ('#{@token_request_uri}'), retrying in 10 seconds.")
117
+ sleep 10
118
+ end
119
+ end # def post_token_request
120
+
121
+ # Create a header
122
+ def get_header()
123
+ return {
124
+ 'Metadata' => 'true',
125
+ 'Authorization' => "Basic #{get_challange_token()}"
126
+ }
127
+ end # def get_header
128
+
129
+ # Create a header
130
+ def get_header1()
131
+ return {
132
+ 'Metadata' => 'true',
133
+ }
134
+ end # def get_header1
135
+
136
+ end # end of class
137
+ end ;end ;end
@@ -0,0 +1,101 @@
1
+ # encoding: utf-8
2
+ require "logstash/sentinel_la/version"
3
+ require 'rest-client'
4
+ require 'json'
5
+ require 'openssl'
6
+ require 'base64'
7
+ require 'time'
8
+ require 'rbconfig'
9
+
10
+ module LogStash; module Outputs; class MicrosoftSentinelOutputInternal
11
+ class LogAnalyticsClient
12
+
13
+ require "logstash/sentinel_la/logstashLoganalyticsConfiguration"
14
+ require "logstash/sentinel_la/logAnalyticsAadTokenProvider"
15
+ require "logstash/sentinel_la/logAnalyticsMiTokenProvider"
16
+ require "logstash/sentinel_la/logAnalyticsArcTokenProvider"
17
+
18
+ def azcmagent_running? # AZure Connected Machine AGENT is running outside of Azure and onboarded into Azure Arc
19
+ system('azcmagent > /dev/null', [:out, :err] => IO::NULL)
20
+ end # def azcmagent_running?
21
+
22
+ def initialize(logstashLoganalyticsConfiguration)
23
+ @logstashLoganalyticsConfiguration = logstashLoganalyticsConfiguration
24
+ @logger = @logstashLoganalyticsConfiguration.logger
25
+
26
+ la_api_version = "2023-01-01"
27
+ @uri = sprintf("%s/dataCollectionRules/%s/streams/%s?api-version=%s",@logstashLoganalyticsConfiguration.data_collection_endpoint, @logstashLoganalyticsConfiguration.dcr_immutable_id, logstashLoganalyticsConfiguration.dcr_stream_name, la_api_version)
28
+
29
+ if @logstashLoganalyticsConfiguration.managed_identity
30
+ if azcmagent_running?
31
+ @logger.info("Machine is Azure Arc-enabled server. Retrieving bearer token via azcmagent...")
32
+ @aadTokenProvider=LogAnalyticsArcTokenProvider::new(logstashLoganalyticsConfiguration)
33
+ else
34
+ @logger.info("Using Managed Identity configuration. Retrieving bearer token for Managed Identity...")
35
+ @aadTokenProvider=LogAnalyticsMiTokenProvider::new(logstashLoganalyticsConfiguration)
36
+ end
37
+ else
38
+ @aadTokenProvider=LogAnalyticsAadTokenProvider::new(logstashLoganalyticsConfiguration)
39
+ end
40
+
41
+ @userAgent = getUserAgent()
42
+ end # def initialize
43
+
44
+ # Post the given json to Azure Loganalytics
45
+ def post_data(body)
46
+ raise ConfigError, 'no json_records' if body.empty?
47
+
48
+ # Create REST request header
49
+ headers = get_header()
50
+
51
+ # Post REST request
52
+ return RestClient::Request.execute(method: :post, url: @uri, payload: body, headers: headers,
53
+ proxy: @logstashLoganalyticsConfiguration.proxy_endpoint, timeout: 240)
54
+ end # def post_data
55
+
56
+ # Static function to return if the response is OK or else
57
+ def self.is_successfully_posted(response)
58
+ return (response.code >= 200 && response.code < 300 ) ? true : false
59
+ end # def self.is_successfully_posted
60
+
61
+ private
62
+
63
+ # Create a header for the given length
64
+ def get_header()
65
+ # Getting an authorization token bearer (if the token is expired, the method will post a request to get a new authorization token)
66
+ token_bearer = @aadTokenProvider.get_aad_token_bearer()
67
+
68
+ headers = {
69
+ 'Content-Type' => 'application/json',
70
+ 'Authorization' => sprintf("Bearer %s", token_bearer),
71
+ 'User-Agent' => @userAgent
72
+ }
73
+
74
+ if @logstashLoganalyticsConfiguration.compress_data
75
+ headers = headers.merge({
76
+ 'Content-Encoding' => 'gzip'
77
+ })
78
+ end
79
+
80
+ return headers
81
+ end # def get_header
82
+
83
+ def ruby_agent_version()
84
+ case RUBY_ENGINE
85
+ when 'jruby'
86
+ "jruby/#{JRUBY_VERSION} (#{RUBY_VERSION}p#{RUBY_PATCHLEVEL})"
87
+ else
88
+ "#{RUBY_ENGINE}/#{RUBY_VERSION}p#{RUBY_PATCHLEVEL}"
89
+ end
90
+ end
91
+
92
+ def architecture()
93
+ "#{RbConfig::CONFIG['host_os']} #{RbConfig::CONFIG['host_cpu']}"
94
+ end
95
+
96
+ def getUserAgent()
97
+ "SentinelLogstashPlugin|#{LogStash::Outputs::MicrosoftSentinelOutputInternal::VERSION}|#{architecture}|#{ruby_agent_version}"
98
+ end #getUserAgent
99
+
100
+ end # end of class
101
+ end ;end ;end