logstash-output-application_insights 0.2.2 → 0.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +8 -0
- data/lib/logstash/outputs/application_insights.rb +3 -0
- data/lib/logstash/outputs/application_insights/channel.rb +89 -9
- data/lib/logstash/outputs/application_insights/constants.rb +4 -1
- data/lib/logstash/outputs/application_insights/version.rb +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 58f3bc06a6e48003f3349c1389268c9101a96775
|
4
|
+
data.tar.gz: c068a8bdb28e647e7b456b899128fbf379183de9
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: b57f47e54e2a3d32fbf3a3548f85b8cfb90e8f3bda876d25b186c515ab3407fd3f47eba03c44487bf260506195fa8eb217912b6a80e445042112f223460da472
|
7
|
+
data.tar.gz: 7c93888e412e7515c2085259fbe1687a5e334eaad97404bd390f17431cef9db0a89c09abce6f3a79f6a4a53347b54c63152246598b62805c6c164c8e5f3d0a4e
|
data/README.md
CHANGED
@@ -414,6 +414,14 @@ example:
|
|
414
414
|
disable_blob_upload => true
|
415
415
|
```
|
416
416
|
|
417
|
+
### disable_truncation
|
418
|
+
When set to true, event fields won't be truncated to max 1MB (beware: The max allows bytes size per filed is 1MB, setting it to true, it will be just waste of bandwidth and storage) Default false
|
419
|
+
Used for troubleshooting
|
420
|
+
example:
|
421
|
+
```ruby
|
422
|
+
disable_truncation => true
|
423
|
+
```
|
424
|
+
|
417
425
|
### stop_on_unknown_io_errors
|
418
426
|
When set to true, process will stop if an unknown IO error is detected. Default false
|
419
427
|
Used for troubleshooting
|
@@ -305,6 +305,9 @@ class LogStash::Outputs::Application_insights < LogStash::Outputs::Base
|
|
305
305
|
|
306
306
|
# When set to true, blobs won't be compressed.
|
307
307
|
config :disable_compression, :validate => :boolean
|
308
|
+
|
309
|
+
# When set to true, blobs won't be compressed.
|
310
|
+
config :disable_truncation, :validate => :boolean
|
308
311
|
public
|
309
312
|
|
310
313
|
def register
|
@@ -34,6 +34,7 @@ class LogStash::Outputs::Application_insights
|
|
34
34
|
@closing = false
|
35
35
|
configuration = Config.current
|
36
36
|
|
37
|
+
@disable_truncation = configuration[:disable_truncation]
|
37
38
|
@file_pipe = !configuration[:disable_compression]
|
38
39
|
@gzip_file = !configuration[:disable_compression]
|
39
40
|
@blob_max_bytesize = configuration[:blob_max_bytesize]
|
@@ -100,7 +101,7 @@ class LogStash::Outputs::Application_insights
|
|
100
101
|
sub_channel = @workers_channel[Thread.current] || @semaphore.synchronize { @workers_channel[Thread.current] = Sub_channel.new( @event_separator ) }
|
101
102
|
sub_channel << serialized_event
|
102
103
|
else
|
103
|
-
@logger.warn { "event not uploaded, no relevant data in event. table_id: #{table_id}, event: #{data}" }
|
104
|
+
@logger.warn { "event not uploaded, no relevant data in event. table_id: #{@table_id}, event: #{data}" }
|
104
105
|
end
|
105
106
|
end
|
106
107
|
|
@@ -265,7 +266,7 @@ class LogStash::Outputs::Application_insights
|
|
265
266
|
serialized_data = data
|
266
267
|
elsif EXT_EVENT_FORMAT_CSV == @event_format
|
267
268
|
if data.is_a?( Array )
|
268
|
-
serialized_data =
|
269
|
+
serialized_data = serialize_array_to_csv( data )
|
269
270
|
elsif data.is_a?( Hash )
|
270
271
|
serialized_data = serialize_to_csv( data )
|
271
272
|
end
|
@@ -281,16 +282,26 @@ class LogStash::Outputs::Application_insights
|
|
281
282
|
|
282
283
|
|
283
284
|
def serialize_to_json ( data )
|
284
|
-
|
285
|
-
|
286
|
-
|
285
|
+
if (@table_columns.nil?)
|
286
|
+
json_hash = data
|
287
|
+
else
|
288
|
+
data = Utils.downcase_hash_keys( data ) if @case_insensitive_columns
|
287
289
|
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
290
|
+
json_hash = { }
|
291
|
+
@table_columns.each do |column|
|
292
|
+
value = data[column[:field_name]] || column[:default]
|
293
|
+
json_hash[column[:name]] = truncate_if_too_big( value ) if value
|
294
|
+
end
|
292
295
|
end
|
296
|
+
|
293
297
|
return nil if json_hash.empty?
|
298
|
+
|
299
|
+
json_string = json_hash.to_json
|
300
|
+
return json_string if json_string.bytesize < MAX_FIELD_BYTES || @disable_truncation
|
301
|
+
|
302
|
+
json_hash.each_pair do |name, value|
|
303
|
+
json_hash[name] = truncate_data_if_too_big( name, value )
|
304
|
+
end
|
294
305
|
json_hash.to_json
|
295
306
|
end
|
296
307
|
|
@@ -306,11 +317,80 @@ class LogStash::Outputs::Application_insights
|
|
306
317
|
type = (column[:type] || value.class.name).downcase.to_sym
|
307
318
|
csv_array << ( [:hash, :array, :json, :dynamic, :object].include?( type ) ? value.to_json : value )
|
308
319
|
end
|
320
|
+
serialize_array_to_csv( csv_array )
|
321
|
+
end
|
322
|
+
|
323
|
+
def serialize_array_to_csv ( csv_array )
|
309
324
|
return nil if csv_array.empty?
|
325
|
+
csv_string = csv_array.to_csv( :col_sep => @csv_separator )
|
326
|
+
return csv_string if csv_string.bytesize < MAX_FIELD_BYTES || @disable_truncation
|
327
|
+
|
328
|
+
index = 0
|
329
|
+
csv_array.map! do |value|
|
330
|
+
index += 1
|
331
|
+
truncate_data_if_too_big( index.to_s, value )
|
332
|
+
end
|
310
333
|
csv_array.to_csv( :col_sep => @csv_separator )
|
311
334
|
end
|
312
335
|
|
313
336
|
|
337
|
+
def truncate_data_if_too_big ( name, data )
|
338
|
+
return data if @disable_truncation
|
339
|
+
|
340
|
+
truncated = nil
|
341
|
+
if data.is_a?( String )
|
342
|
+
if data.bytesize > MAX_FIELD_BYTES
|
343
|
+
truncated = data.bytesize - MAX_FIELD_BYTES
|
344
|
+
data = data.byteslice( 0, MAX_FIELD_BYTES )
|
345
|
+
end
|
346
|
+
elsif data.is_a?( Hash )
|
347
|
+
str = data.to_json
|
348
|
+
while str.bytesize > MAX_FIELD_BYTES
|
349
|
+
truncated = str.bytesize - MAX_FIELD_BYTES unless truncated
|
350
|
+
delta = str.bytesize - MAX_FIELD_BYTES
|
351
|
+
max_size = 0
|
352
|
+
max_name = nil
|
353
|
+
data.each_pair do |name, value|
|
354
|
+
if value.is_a?( String ) && value.bytesize > max_size
|
355
|
+
max_name = name
|
356
|
+
max_size = value.bytesize
|
357
|
+
end
|
358
|
+
end
|
359
|
+
unless max_name
|
360
|
+
data = {}
|
361
|
+
break
|
362
|
+
end
|
363
|
+
data[max_name] = data[max_name].byteslice( 0, max_size - ( max_size > delta ? delta : max_size ) )
|
364
|
+
str = data.to_json
|
365
|
+
end
|
366
|
+
|
367
|
+
elsif data.is_a?( Array )
|
368
|
+
str = data.to_json
|
369
|
+
while str.bytesize > MAX_FIELD_BYTES
|
370
|
+
truncated = str.bytesize - MAX_FIELD_BYTES unless truncated
|
371
|
+
delta = str.bytesize - MAX_FIELD_BYTES
|
372
|
+
max_size = 0
|
373
|
+
max_index = nil
|
374
|
+
data.each_index do |index|
|
375
|
+
value = data[index]
|
376
|
+
if value.is_a?( String ) && value.bytesize > max_size
|
377
|
+
max_index = index
|
378
|
+
max_size = value.bytesize
|
379
|
+
end
|
380
|
+
end
|
381
|
+
unless max_index
|
382
|
+
data = []
|
383
|
+
break
|
384
|
+
end
|
385
|
+
data[max_index] = data[max_index].byteslice( 0, max_size - ( max_size > delta ? delta : max_size ) )
|
386
|
+
str = data.to_json
|
387
|
+
end
|
388
|
+
end
|
389
|
+
|
390
|
+
@logger.warn { "field #{name} was truncated by #{truncated} bytes, due to size above #{MAX_FIELD_BYTES} bytes. table_id: #{@table_id}" } if truncated
|
391
|
+
data
|
392
|
+
end
|
393
|
+
|
314
394
|
def find_upload_pipe
|
315
395
|
min_upload_pipe = @active_upload_pipes[0]
|
316
396
|
@active_upload_pipes.each do |upload_pipe|
|
@@ -74,6 +74,7 @@ class LogStash::Outputs::Application_insights
|
|
74
74
|
:csv_separator => @csv_separator || DEFAULT_CSV_SEPARATOR,
|
75
75
|
:csv_default_value => @csv_default_value || DEFAULT_CSV_DEFAULT_VALUE,
|
76
76
|
:disable_compression => @disable_compression || DEFAULT_DISABLE_COMPRESSION,
|
77
|
+
:disable_truncation => @disable_truncation || DEFAULT_DISABLE_TRUNCATION,
|
77
78
|
|
78
79
|
}
|
79
80
|
end
|
@@ -83,7 +84,7 @@ class LogStash::Outputs::Application_insights
|
|
83
84
|
:disable_cleanup, :delete_not_notified_blobs,
|
84
85
|
:validate_notification, :validate_storage,
|
85
86
|
:save_notified_blobs_records, :case_insensitive_columns,
|
86
|
-
:disable_compression,
|
87
|
+
:disable_compression, :disable_truncation,
|
87
88
|
:table_columns, :serialized_event_field ]
|
88
89
|
|
89
90
|
GUID_NULL = "00000000-0000-0000-0000-000000000000"
|
@@ -144,6 +145,7 @@ class LogStash::Outputs::Application_insights
|
|
144
145
|
|
145
146
|
MAX_CHANNEL_UPLOAD_PIPES = 40
|
146
147
|
CHANNEL_THRESHOLD_TO_ADD_UPLOAD_PIPE = 3 # not relevant for file upload mode
|
148
|
+
MAX_FIELD_BYTES = 1024 * 1024
|
147
149
|
|
148
150
|
METADATA_FIELD_INSTRUMENTATION_KEY = "[@metadata]instrumentation_key"
|
149
151
|
METADATA_FIELD_TABLE_ID = "[@metadata]table_id"
|
@@ -202,6 +204,7 @@ class LogStash::Outputs::Application_insights
|
|
202
204
|
DEFAULT_DELETE_NOT_NOTIFIED_BLOBS = false
|
203
205
|
DEFAULT_SAVE_NOTIFIED_BLOBS_RECORDS = false
|
204
206
|
DEFAULT_DISABLE_COMPRESSION = false
|
207
|
+
DEFAULT_DISABLE_TRUNCATION = false
|
205
208
|
|
206
209
|
DEFAULT_CASE_INSENSITIVE = false
|
207
210
|
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-application_insights
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.2.
|
4
|
+
version: 0.2.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Microsoft Corporation
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2016-
|
11
|
+
date: 2016-12-01 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|