sentinelblue-logstash-output-azure-loganalytics 1.1.2.rc1 → 1.1.3.rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7ba5af5f1a409d3a92195e04621db3b1a94e3d316b195cd2cd22f5e8d932cc6c
4
- data.tar.gz: 01fc4bc23a6e33f8ab04aeb574dd529bd22064e8b5577f27ce9dc8e548d5f103
3
+ metadata.gz: 8a6135d1df0d5d38a762d443acea323b7cca725c3a381a4782137e5da06c1830
4
+ data.tar.gz: 74141b9a0d28b93e623e1da575243ea94ffdf2f2baf4527102ff5e21297fc643
5
5
  SHA512:
6
- metadata.gz: 97f2e8591b4af40cb2bb76f20be0a847d1e10d3ed005c683293a26e1b681b224e1a50d8c604266a0816afec46af81b1283251312ccc37ef33fdb6814af64864f
7
- data.tar.gz: 57bbb6ca200bfd4af2d1f1049585540c4eec5c10957f3ff48a489db46c3a352849aa5369ee402ec35d4958550ef0608a842e141e7139d3775b92981ba11f652a
6
+ metadata.gz: a0142efda4fff94c1bc7a46865674c27cf68e23b46e32789257c4b4bb686f574bc8397adb7da3a54861b6dc70c1b87eea3e5b9a4abf3ec20a7db567fefb58a45
7
+ data.tar.gz: 0b8f6ca5ba27907346cd8fe31a9965e4e301ae0772324fb2407fb1434ac0ad26b9afee5838320bcfd7ee167b667ef43a398693ca9e125438406476b77bf4cbd9
data/CHANGELOG.md CHANGED
@@ -5,3 +5,11 @@
5
5
  ## 1.1.1
6
6
 
7
7
  * Updated strings to reference Sentinel Blue
8
+
9
+ ## 1.1.2
10
+
11
+ * Enhanced error checking
12
+
13
+ ## 1.1.3
14
+
15
+ * Renamed classes to avoid conflict with microsoft-logstash-output-azure-loganalytics plugin
data/VERSION CHANGED
@@ -1 +1 @@
1
- 1.1.2.rc1
1
+ 1.1.3.rc1
@@ -1,18 +1,18 @@
1
1
  # encoding: utf-8
2
- require "logstash/logAnalyticsClient/logstashLoganalyticsConfiguration"
2
+ require "logstash/azureLAClasses/logAnalyticsConfiguration"
3
3
  require 'rest-client'
4
4
  require 'json'
5
5
  require 'openssl'
6
6
  require 'base64'
7
7
  require 'time'
8
8
 
9
- class LogAnalyticsClient
9
+ class AzureLAClient
10
10
  API_VERSION = '2016-04-01'.freeze
11
11
 
12
- def initialize (logstashLoganalyticsConfiguration)
13
- @logstashLoganalyticsConfiguration = logstashLoganalyticsConfiguration
14
- set_proxy(@logstashLoganalyticsConfiguration.proxy)
15
- @uri = sprintf("https://%s.%s/api/logs?api-version=%s", @logstashLoganalyticsConfiguration.workspace_id, @logstashLoganalyticsConfiguration.endpoint, API_VERSION)
12
+ def initialize (logAnalyticsConfiguration)
13
+ @logAnalyticsConfiguration = logAnalyticsConfiguration
14
+ set_proxy(@logAnalyticsConfiguration.proxy)
15
+ @uri = sprintf("https://%s.%s/api/logs?api-version=%s", @logAnalyticsConfiguration.workspace_id, @logAnalyticsConfiguration.endpoint, API_VERSION)
16
16
  end # def initialize
17
17
 
18
18
 
@@ -39,8 +39,8 @@ class LogAnalyticsClient
39
39
  'Authorization' => signature(date, body_bytesize_length),
40
40
  'Log-Type' => custom_table_name,
41
41
  'x-ms-date' => date,
42
- 'time-generated-field' => @logstashLoganalyticsConfiguration.time_generated_field,
43
- 'x-ms-AzureResourceId' => @logstashLoganalyticsConfiguration.azure_resource_id
42
+ 'time-generated-field' => @logAnalyticsConfiguration.time_generated_field,
43
+ 'x-ms-AzureResourceId' => @logAnalyticsConfiguration.azure_resource_id
44
44
  }
45
45
  end # def get_header
46
46
 
@@ -61,10 +61,10 @@ class LogAnalyticsClient
61
61
  def signature(date, body_bytesize_length)
62
62
  sigs = sprintf("POST\n%d\napplication/json\nx-ms-date:%s\n/api/logs", body_bytesize_length, date)
63
63
  utf8_sigs = sigs.encode('utf-8')
64
- decoded_shared_key = Base64.decode64(@logstashLoganalyticsConfiguration.workspace_key)
64
+ decoded_shared_key = Base64.decode64(@logAnalyticsConfiguration.workspace_key)
65
65
  hmac_sha256_sigs = OpenSSL::HMAC.digest(OpenSSL::Digest.new('sha256'), decoded_shared_key, utf8_sigs)
66
66
  encoded_hash = Base64.encode64(hmac_sha256_sigs)
67
- authorization = sprintf("SharedKey %s:%s", @logstashLoganalyticsConfiguration.workspace_id, encoded_hash)
67
+ authorization = sprintf("SharedKey %s:%s", @logAnalyticsConfiguration.workspace_id, encoded_hash)
68
68
 
69
69
  return authorization
70
70
  end # def signature
@@ -1,23 +1,23 @@
1
1
  # encoding: utf-8
2
2
  require "stud/buffer"
3
- require "logstash/logAnalyticsClient/logAnalyticsClient"
3
+ require "logstash/azureLAClasses/azureLAClient"
4
4
  require "stud/buffer"
5
- require "logstash/logAnalyticsClient/logstashLoganalyticsConfiguration"
5
+ require "logstash/azureLAClasses/logAnalyticsConfiguration"
6
6
 
7
- # LogStashAutoResizeBuffer class setting a resizable buffer which is flushed periodically
7
+ # LogAnalyticsAutoResizeBuffer class setting a resizable buffer which is flushed periodically
8
8
  # The buffer resize itself according to Azure Loganalytics and configuration limitations
9
- class LogStashAutoResizeBuffer
9
+ class LogAnalyticsAutoResizeBuffer
10
10
  include Stud::Buffer
11
11
 
12
- def initialize(logstashLoganalyticsConfiguration,custom_table_name)
13
- @logstashLoganalyticsConfiguration = logstashLoganalyticsConfiguration
14
- @logger = @logstashLoganalyticsConfiguration.logger
12
+ def initialize(logAnalyticsConfiguration,custom_table_name)
13
+ @logAnalyticsConfiguration = logAnalyticsConfiguration
14
+ @logger = @logAnalyticsConfiguration.logger
15
15
  @custom_log_table_name = custom_table_name
16
- @client=LogAnalyticsClient::new(logstashLoganalyticsConfiguration)
16
+ @client=AzureLAClient::new(logAnalyticsConfiguration)
17
17
  buffer_initialize(
18
- :max_items => logstashLoganalyticsConfiguration.max_items,
19
- :max_interval => logstashLoganalyticsConfiguration.plugin_flush_interval,
20
- :logger => @logstashLoganalyticsConfiguration.logger
18
+ :max_items => logAnalyticsConfiguration.max_items,
19
+ :max_interval => logAnalyticsConfiguration.plugin_flush_interval,
20
+ :logger => @logAnalyticsConfiguration.logger
21
21
  )
22
22
  end # initialize
23
23
 
@@ -41,7 +41,7 @@ class LogStashAutoResizeBuffer
41
41
  # We send Json in the REST request
42
42
  documents_json = documents.to_json
43
43
  # Setting resizing to true will cause changing the max size
44
- if @logstashLoganalyticsConfiguration.amount_resizing == true
44
+ if @logAnalyticsConfiguration.amount_resizing == true
45
45
  # Resizing the amount of messages according to size of message received and amount of messages
46
46
  change_message_limit_size(documents.length, documents_json.bytesize)
47
47
  end
@@ -60,12 +60,12 @@ class LogStashAutoResizeBuffer
60
60
  @logger.info("Successfully posted #{amount_of_documents} logs into custom log analytics table[#{@custom_log_table_name}].")
61
61
  else
62
62
  @logger.error("DataCollector API request failure: error code: #{response.code}, data=>" + (documents.to_json).to_s)
63
- resend_message(documents_json, amount_of_documents, @logstashLoganalyticsConfiguration.retransmission_time)
63
+ resend_message(documents_json, amount_of_documents, @logAnalyticsConfiguration.retransmission_time)
64
64
  end
65
65
  rescue Exception => ex
66
66
  @logger.error("Exception in posting data to Azure Loganalytics. [Exception: '#{ex}]'")
67
67
  @logger.trace("Exception in posting data to Azure Loganalytics.[amount_of_documents=#{amount_of_documents} documents=#{documents_json}]")
68
- resend_message(documents_json, amount_of_documents, @logstashLoganalyticsConfiguration.retransmission_time)
68
+ resend_message(documents_json, amount_of_documents, @logAnalyticsConfiguration.retransmission_time)
69
69
  end
70
70
  end # end send_message_to_loganalytics
71
71
 
@@ -73,19 +73,19 @@ class LogStashAutoResizeBuffer
73
73
  # We would like to do it until we reached to the duration
74
74
  def resend_message(documents_json, amount_of_documents, remaining_duration)
75
75
  if remaining_duration > 0
76
- @logger.info("Resending #{amount_of_documents} documents as log type #{@custom_log_table_name} to DataCollector API in #{@logstashLoganalyticsConfiguration.RETRANSMISSION_DELAY} seconds.")
77
- sleep @logstashLoganalyticsConfiguration.RETRANSMISSION_DELAY
76
+ @logger.info("Resending #{amount_of_documents} documents as log type #{@custom_log_table_name} to DataCollector API in #{@logAnalyticsConfiguration.RETRANSMISSION_DELAY} seconds.")
77
+ sleep @logAnalyticsConfiguration.RETRANSMISSION_DELAY
78
78
  begin
79
79
  response = @client.post_data(documents_json,@custom_log_table_name)
80
80
  if is_successfully_posted(response)
81
81
  @logger.info("Successfully sent #{amount_of_documents} logs into custom log analytics table[#{@custom_log_table_name}] after resending.")
82
82
  else
83
- @logger.debug("Resending #{amount_of_documents} documents failed (error code #{response.code}), will try to resend for #{(remaining_duration - @logstashLoganalyticsConfiguration.RETRANSMISSION_DELAY)}")
84
- resend_message(documents_json, amount_of_documents, (remaining_duration - @logstashLoganalyticsConfiguration.RETRANSMISSION_DELAY))
83
+ @logger.debug("Resending #{amount_of_documents} documents failed (error code #{response.code}), will try to resend for #{(remaining_duration - @logAnalyticsConfiguration.RETRANSMISSION_DELAY)}")
84
+ resend_message(documents_json, amount_of_documents, (remaining_duration - @logAnalyticsConfiguration.RETRANSMISSION_DELAY))
85
85
  end
86
86
  rescue Exception => ex
87
- @logger.debug("Resending #{amount_of_documents} documents failed (Exception: '#{ex}'), will try to resend for #{(remaining_duration - @logstashLoganalyticsConfiguration.RETRANSMISSION_DELAY)}")
88
- resend_message(documents_json, amount_of_documents, (remaining_duration - @logstashLoganalyticsConfiguration.RETRANSMISSION_DELAY))
87
+ @logger.debug("Resending #{amount_of_documents} documents failed (Exception: '#{ex}'), will try to resend for #{(remaining_duration - @logAnalyticsConfiguration.RETRANSMISSION_DELAY)}")
88
+ resend_message(documents_json, amount_of_documents, (remaining_duration - @logAnalyticsConfiguration.RETRANSMISSION_DELAY))
89
89
  end
90
90
  else
91
91
  @logger.error("Could not resend #{amount_of_documents} documents, message is dropped.")
@@ -99,23 +99,23 @@ class LogStashAutoResizeBuffer
99
99
  # Meaning that if we reached the max amount we would like to increase it.
100
100
  # Else we would like to decrease it(to reduce latency for messages)
101
101
  def change_message_limit_size(amount_of_documents, documents_byte_size)
102
- new_buffer_size = @logstashLoganalyticsConfiguration.max_items
102
+ new_buffer_size = @logAnalyticsConfiguration.max_items
103
103
  average_document_size = documents_byte_size / amount_of_documents
104
104
  # If window is full we need to increase it
105
105
  # "amount_of_documents" can be greater since buffer is not synchronized meaning
106
106
  # that flush can occur after limit was reached.
107
- if amount_of_documents >= @logstashLoganalyticsConfiguration.max_items
107
+ if amount_of_documents >= @logAnalyticsConfiguration.max_items
108
108
  # if doubling the size wouldn't exceed the API limit
109
- if ((2 * @logstashLoganalyticsConfiguration.max_items) * average_document_size) < @logstashLoganalyticsConfiguration.MAX_SIZE_BYTES
110
- new_buffer_size = 2 * @logstashLoganalyticsConfiguration.max_items
109
+ if ((2 * @logAnalyticsConfiguration.max_items) * average_document_size) < @logAnalyticsConfiguration.MAX_SIZE_BYTES
110
+ new_buffer_size = 2 * @logAnalyticsConfiguration.max_items
111
111
  else
112
- new_buffer_size = (@logstashLoganalyticsConfiguration.MAX_SIZE_BYTES / average_document_size) -1000
112
+ new_buffer_size = (@logAnalyticsConfiguration.MAX_SIZE_BYTES / average_document_size) -1000
113
113
  end
114
114
 
115
115
  # We would like to decrease the window but not more then the MIN_MESSAGE_AMOUNT
116
116
  # We are trying to decrease it slowly to be able to send as much messages as we can in one window
117
- elsif amount_of_documents < @logstashLoganalyticsConfiguration.max_items and @logstashLoganalyticsConfiguration.max_items != [(@logstashLoganalyticsConfiguration.max_items - @logstashLoganalyticsConfiguration.decrease_factor) ,@logstashLoganalyticsConfiguration.MIN_MESSAGE_AMOUNT].max
118
- new_buffer_size = [(@logstashLoganalyticsConfiguration.max_items - @logstashLoganalyticsConfiguration.decrease_factor) ,@logstashLoganalyticsConfiguration.MIN_MESSAGE_AMOUNT].max
117
+ elsif amount_of_documents < @logAnalyticsConfiguration.max_items and @logAnalyticsConfiguration.max_items != [(@logAnalyticsConfiguration.max_items - @logAnalyticsConfiguration.decrease_factor) ,@logAnalyticsConfiguration.MIN_MESSAGE_AMOUNT].max
118
+ new_buffer_size = [(@logAnalyticsConfiguration.max_items - @logAnalyticsConfiguration.decrease_factor) ,@logAnalyticsConfiguration.MIN_MESSAGE_AMOUNT].max
119
119
  end
120
120
 
121
121
  change_buffer_size(new_buffer_size)
@@ -128,7 +128,7 @@ class LogStashAutoResizeBuffer
128
128
  if @buffer_config[:max_items] != new_size
129
129
  old_buffer_size = @buffer_config[:max_items]
130
130
  @buffer_config[:max_items] = new_size
131
- @logstashLoganalyticsConfiguration.max_items = new_size
131
+ @logAnalyticsConfiguration.max_items = new_size
132
132
  @logger.info("Changing buffer size.[configuration='#{old_buffer_size}' , new_size='#{new_size}']")
133
133
  else
134
134
  @logger.info("Buffer size wasn't changed.[configuration='#{old_buffer_size}' , new_size='#{new_size}']")
@@ -140,4 +140,4 @@ class LogStashAutoResizeBuffer
140
140
  return (response.code == 200) ? true : false
141
141
  end # def is_successfully_posted
142
142
 
143
- end # LogStashAutoResizeBuffer
143
+ end # LogAnalyticsAutoResizeBuffer
@@ -1,5 +1,5 @@
1
1
  # encoding: utf-8
2
- class LogstashLoganalyticsOutputConfiguration
2
+ class LogAnalyticsConfiguration
3
3
  def initialize(workspace_id, workspace_key, custom_log_table_name, logger)
4
4
  @workspace_id = workspace_id
5
5
  @workspace_key = workspace_key
@@ -2,8 +2,8 @@
2
2
  require "logstash/outputs/base"
3
3
  require "logstash/namespace"
4
4
  require "stud/buffer"
5
- require "logstash/logAnalyticsClient/logStashAutoResizeBuffer"
6
- require "logstash/logAnalyticsClient/logstashLoganalyticsConfiguration"
5
+ require "logstash/azureLAClasses/logAnalyticsAutoResizeBuffer"
6
+ require "logstash/azureLAClasses/logAnalyticsConfiguration"
7
7
 
8
8
  class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
9
9
 
@@ -70,7 +70,7 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
70
70
  # Initialize the logstash resizable buffer
71
71
  # This buffer will increase and decrease size according to the amount of messages inserted.
72
72
  # If the buffer reached the max amount of messages the amount will be increased until the limit
73
- # @logstash_resizable_event_buffer=LogStashAutoResizeBuffer::new(@logstash_configuration)
73
+ # @logstash_resizable_event_buffer=LogAnalyticsAutoResizeBuffer::new(@logstash_configuration)
74
74
 
75
75
  end # def register
76
76
 
@@ -127,7 +127,7 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
127
127
 
128
128
  else
129
129
  # If the buffer doesn't exist for the table, create one and add the document
130
- buffers[custom_table_name] = LogStashAutoResizeBuffer::new(@logstash_configuration,custom_table_name)
130
+ buffers[custom_table_name] = LogAnalyticsAutoResizeBuffer::new(@logstash_configuration,custom_table_name)
131
131
  @logger.trace("Adding event document - " + event.to_s)
132
132
  buffers[custom_table_name].add_event_document(document)
133
133
 
@@ -163,9 +163,9 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
163
163
  end # def create_event_document
164
164
 
165
165
  # Building the logstash object configuration from the output configuration provided by the user
166
- # Return LogstashLoganalyticsOutputConfiguration populated with the configuration values
166
+ # Return LogAnalyticsConfiguration populated with the configuration values
167
167
  def build_logstash_configuration()
168
- logstash_configuration= LogstashLoganalyticsOutputConfiguration::new(@workspace_id, @workspace_key, @custom_log_table_name, @logger)
168
+ logstash_configuration= LogAnalyticsConfiguration::new(@workspace_id, @workspace_key, @custom_log_table_name, @logger)
169
169
  logstash_configuration.endpoint = @endpoint
170
170
  logstash_configuration.time_generated_field = @time_generated_field
171
171
  logstash_configuration.key_names = @key_names
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: sentinelblue-logstash-output-azure-loganalytics
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.2.rc1
4
+ version: 1.1.3.rc1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Sentinel Blue
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-10-28 00:00:00.000000000 Z
11
+ date: 2022-11-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rest-client
@@ -86,9 +86,9 @@ files:
86
86
  - LICENSE
87
87
  - README.md
88
88
  - VERSION
89
- - lib/logstash/logAnalyticsClient/logAnalyticsClient.rb
90
- - lib/logstash/logAnalyticsClient/logStashAutoResizeBuffer.rb
91
- - lib/logstash/logAnalyticsClient/logstashLoganalyticsConfiguration.rb
89
+ - lib/logstash/azureLAClasses/azureLAClient.rb
90
+ - lib/logstash/azureLAClasses/logAnalyticsAutoResizeBuffer.rb
91
+ - lib/logstash/azureLAClasses/logAnalyticsConfiguration.rb
92
92
  - lib/logstash/outputs/sentinelblue-logstash-output-azure-loganalytics.rb
93
93
  - sentinelblue-logstash-output-azure-loganalytics.gemspec
94
94
  - spec/outputs/azure_loganalytics_spec.rb