logstash-output-azure_loganalytics 0.5.1 → 0.5.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: a5a4524f2d0aaed4ee9c1436606f9e2ad3c8d904
4
- data.tar.gz: fa42aaced172f03f08182e36dbf518d61bce44f2
3
+ metadata.gz: f5703dcf7467f52e043faed81a31e4ac16326a28
4
+ data.tar.gz: a1626400b84f10787997929640fce9dcc51fad4e
5
5
  SHA512:
6
- metadata.gz: 9662329d14d3d0c968cffb17de307545bd2a95e2e4247383759f094437c148a53cc39705dffa0ea11f631e73c0da1fdceb10928a82509939fcf46f2e418fe203
7
- data.tar.gz: 50259e8e408a4398e931304d60f4c65b2d1f129f7f01c5ebc401449c06d3ffb52517ad61a91170775b9a6355b47b861890cf365eaee05bf7cc2af655668a5548
6
+ metadata.gz: 63e25706eec6fad297468afd439ac9dd93c567d838f584124bc276d3568f663ae6923bf6b4fcff2d15caea3dcde26b067ba788db5a3bf4203f0789d3e321bf92
7
+ data.tar.gz: b2492d86bbcd2ac2aaf00c6274bc6d039a19f256e557026f3a6b011df45a53e798583b48d1c65f1c7f6a2d9bf69187516f250a2feaef9127f03beb851deffd32
@@ -1,3 +1,7 @@
1
+ ## 0.5.2
2
+
3
+ * Fixed using sprintf in log_type - [PR #16](https://github.com/yokawasa/logstash-output-azure_loganalytics/pull/16) by [@daniel-chambers](https://github.com/daniel-chambers)
4
+
1
5
  ## 0.5.1
2
6
 
3
7
  * Change base [azure-loganalytics-datacollector-api](https://github.com/yokawasa/azure-log-analytics-data-collector) to ">= 0.5.0"
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.5.1
1
+ 0.5.2
@@ -51,10 +51,6 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
51
51
  def register
52
52
  require 'azure/loganalytics/datacollectorapi/client'
53
53
 
54
- #if not @log_type.match(/^[[:alpha:]]+$/)
55
- # raise ArgumentError, 'log_type must be only alpha characters'
56
- #end
57
-
58
54
  @key_types.each { |k, v|
59
55
  t = v.downcase
60
56
  if ( !t.eql?('string') && !t.eql?('double') && !t.eql?('boolean') )
@@ -75,7 +71,6 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
75
71
 
76
72
  public
77
73
  def receive(event)
78
- @log_type = event.sprintf(@log_type)
79
74
  # Simply save an event for later delivery
80
75
  buffer_receive(event)
81
76
  end # def receive
@@ -84,9 +79,12 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
84
79
  public
85
80
  def flush (events, close=false)
86
81
 
87
- documents = [] #this is the array of hashes to add Azure Log Analytics
82
+ documentsByLogType = {} # This is a map of log_type to list of documents (themselves maps) to send to Log Analytics
88
83
  events.each do |event|
89
84
  document = {}
85
+
86
+ log_type_for_event = event.sprintf(@log_type)
87
+
90
88
  event_hash = event.to_hash()
91
89
  if @key_names.length > 0
92
90
  # Get the intersection of key_names and keys of event_hash
@@ -104,26 +102,32 @@ class LogStash::Outputs::AzureLogAnalytics < LogStash::Outputs::Base
104
102
  # Skip if document doesn't contain any items
105
103
  next if (document.keys).length < 1
106
104
 
107
- documents.push(document)
105
+ if documentsByLogType[log_type_for_event] == nil then
106
+ documentsByLogType[log_type_for_event] = []
107
+ end
108
+ documentsByLogType[log_type_for_event].push(document)
108
109
  end
109
110
 
110
111
  # Skip in case there are no candidate documents to deliver
111
- if documents.length < 1
112
- @logger.debug("No documents in batch for log type #{@log_type}. Skipping")
112
+ if documentsByLogType.length < 1
113
+ @logger.debug("No documents in batch. Skipping")
113
114
  return
114
115
  end
115
116
 
116
- begin
117
- @logger.debug("Posting log batch (log count: #{documents.length}) as log type #{@log_type} to DataCollector API. First log: " + (documents[0].to_json).to_s)
118
- res = @client.post_data(@log_type, documents, @time_generated_field)
119
- if Azure::Loganalytics::Datacollectorapi::Client.is_success(res)
120
- @logger.debug("Successfully posted logs as log type #{@log_type} with result code #{res.code} to DataCollector API")
121
- else
122
- @logger.error("DataCollector API request failure: error code: #{res.code}, data=>" + (documents.to_json).to_s)
117
+ documentsByLogType.each do |log_type_for_events, events|
118
+ begin
119
+ @logger.debug("Posting log batch (log count: #{events.length}) as log type #{log_type_for_events} to DataCollector API. First log: " + (events[0].to_json).to_s)
120
+ res = @client.post_data(log_type_for_events, events, @time_generated_field)
121
+ if Azure::Loganalytics::Datacollectorapi::Client.is_success(res)
122
+ @logger.debug("Successfully posted logs as log type #{log_type_for_events} with result code #{res.code} to DataCollector API")
123
+ else
124
+ @logger.error("DataCollector API request failure: error code: #{res.code}, data=>" + (events.to_json).to_s)
125
+ end
126
+ rescue Exception => ex
127
+ @logger.error("Exception occured in posting to DataCollector API: '#{ex}', data=>" + (events.to_json).to_s)
123
128
  end
124
- rescue Exception => ex
125
- @logger.error("Exception occured in posting to DataCollector API: '#{ex}', data=>" + (documents.to_json).to_s)
126
129
  end
130
+
127
131
  end # def flush
128
132
 
129
133
  private
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-azure_loganalytics
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.1
4
+ version: 0.5.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Yoichi Kawasaki
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-07-21 00:00:00.000000000 Z
11
+ date: 2020-09-21 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement