logstash-output-scalyr 0.1.16.beta → 0.1.20.beta

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 16e88c3869fdb18e4132726af302e37c8bac2c10038ca3a078f85cebe67c4940
4
- data.tar.gz: a42194413ec2969e31bbe44e72df7cf71d09911b61adae9db16b6041a97bc1b4
3
+ metadata.gz: 23ede2e07ed42483146d4a267c8517fed2f8639fb27998e3f37e8732bd50a45d
4
+ data.tar.gz: 4d3594dd94a908d5852d58fc6e5214982ae731ef8b3de3eb3707455bfa77bd3d
5
5
  SHA512:
6
- metadata.gz: 66d4261da66deaa8f5227b0ecfcdfce51f170c561902397c6020d3baa8694bd7c92291b0faa84178e4dda180c71892ab4a30ba425dc1133f5f7b349c1b7e44fa
7
- data.tar.gz: 6689e3c9fd5fa2a571ca8076a4ae24b5eb1bf5da9216030b8c759a0baff17f4577a478f3c3efced4ddb22ecf273ec675f8831733aded928fd2c92c7ce91d1983
6
+ metadata.gz: b11f8aaeea8530b316de904d2fd4b654b8b2a33c2c13ca10614ca3cb346a154e494d9ec7c831a1cf0a83449101ed2bb59c97cc5506870bd252c31cbf9dbc9052
7
+ data.tar.gz: dc6a23b5eae108b7ac8818cfd018c9a9670f69c31fb582b7c8568ebb33720fef19564b54420b2cfc4e7fc04e4a742836bdd2eefc013b15e240379726858fe15f
data/CHANGELOG.md CHANGED
@@ -1,5 +1,22 @@
1
1
  # Beta
2
2
 
3
+ ## 0.1.20.beta
4
+ - Rewrite flattening function to no longer be recursive, to help avoid maxing out the stack.
5
+ - Added a configurable value `flattening_max_key_count` to create a limit on how large of a record we can flatten.
6
+ It limits the maximum amount of keys we can have in the final flattened record. Defaults to unlimited.
7
+
8
+ ## 0.1.19.beta
9
+ - Undo a change to nested value flattening functionality to keep existing formatting. This change can be re-enabled
10
+ by setting the `fix_deep_flattening_delimiters` configuration option to true.
11
+
12
+ ## 0.1.18.beta
13
+ - Add metrics for successfully sent and failed logstash events, and retries.
14
+ - Make array flattening optional during nested value flattening with the `flatten_nested_arrays` configuration option.
15
+
16
+ ## 0.1.17.beta
17
+ - Catch errors relating to Bignum conversions present in the ``json`` library and manually convert to string as
18
+ a workaround.
19
+
3
20
  ## 0.1.16.beta
4
21
  - Fix race condition in ``register()`` method.
5
22
 
data/README.md CHANGED
@@ -10,7 +10,7 @@ You can view documentation for this plugin [on the Scalyr website](https://app.s
10
10
  # Quick start
11
11
 
12
12
  1. Build the gem, run `gem build logstash-output-scalyr.gemspec`
13
- 2. Install the gem into a Logstash installation, run `/usr/share/logstash/bin/logstash-plugin install logstash-output-scalyr-0.1.14.beta.gem` or follow the latest official instructions on working with plugins from Logstash.
13
+ 2. Install the gem into a Logstash installation, run `/usr/share/logstash/bin/logstash-plugin install logstash-output-scalyr-0.1.20.beta.gem` or follow the latest official instructions on working with plugins from Logstash.
14
14
  3. Configure the output plugin (e.g. add it to a pipeline .conf)
15
15
  4. Restart Logstash
16
16
 
@@ -68,7 +68,10 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
68
68
  # If true, nested values will be flattened (which changes keys to delimiter-separated concatenation of all
69
69
  # nested keys).
70
70
  config :flatten_nested_values, :validate => :boolean, :default => false
71
- config :flatten_nested_values_delimiter, :validate => :string, :default => "_"
71
+ config :flatten_nested_values_delimiter, :validate => :string, :default => "_"
72
+ config :flatten_nested_arrays, :validate => :boolean, :default => true
73
+ config :fix_deep_flattening_delimiters, :validate => :boolean, :default => false
74
+ config :flattening_max_key_count, :validate => :number, :default => -1
72
75
 
73
76
  # If true, the 'tags' field will be flattened into key-values where each key is a tag and each value is set to
74
77
  # :flat_tag_value
@@ -240,7 +243,12 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
240
243
  # Plugin level (either per batch or event level metrics). Other request
241
244
  # level metrics are handled by the HTTP Client class.
242
245
  @multi_receive_statistics = {
243
- :total_multi_receive_secs => 0
246
+ :total_multi_receive_secs => 0,
247
+ :total_events_processed => 0,
248
+ :successful_events_processed => 0,
249
+ :failed_events_processed => 0,
250
+ :total_retry_count => 0,
251
+ :total_java_class_cast_errors => 0
244
252
  }
245
253
  @plugin_metrics = get_new_metrics
246
254
 
@@ -344,6 +352,9 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
344
352
  sleep_interval = sleep_for(sleep_interval)
345
353
  exc_sleep += sleep_interval
346
354
  exc_retries += 1
355
+ @stats_lock.synchronize do
356
+ @multi_receive_statistics[:total_retry_count] += 1
357
+ end
347
358
  message = "Error uploading to Scalyr (will backoff-retry)"
348
359
  exc_data = {
349
360
  :error_class => e.e_class,
@@ -393,11 +404,19 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
393
404
  }
394
405
  exc_sleep += sleep_interval
395
406
  exc_retries += 1
407
+ @stats_lock.synchronize do
408
+ @multi_receive_statistics[:total_retry_count] += 1
409
+ end
396
410
  retry if @running and exc_retries < @max_retries
397
411
  log_retry_failure(multi_event_request, exc_data, exc_retries, exc_sleep)
398
412
  next
399
413
  end
400
414
 
415
+ @stats_lock.synchronize do
416
+ @multi_receive_statistics[:total_events_processed] += multi_event_request[:logstash_events].length
417
+ @multi_receive_statistics[:successful_events_processed] += multi_event_request[:logstash_events].length
418
+ end
419
+
401
420
  if !exc_data.nil?
402
421
  message = "Retry successful after error."
403
422
  if exc_commonly_retried
@@ -436,6 +455,10 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
436
455
 
437
456
 
438
457
  def log_retry_failure(multi_event_request, exc_data, exc_retries, exc_sleep)
458
+ @stats_lock.synchronize do
459
+ @multi_receive_statistics[:total_events_processed] += multi_event_request[:logstash_events].length
460
+ @multi_receive_statistics[:failed_events_processed] += multi_event_request[:logstash_events].length
461
+ end
439
462
  message = "Failed to send #{multi_event_request[:logstash_events].length} events after #{exc_retries} tries."
440
463
  sample_events = Array.new
441
464
  multi_event_request[:logstash_events][0,5].each {|l_event|
@@ -611,7 +634,11 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
611
634
  # flatten record
612
635
  if @flatten_nested_values
613
636
  start_time = Time.now.to_f
614
- record = Scalyr::Common::Util.flatten(record, delimiter=@flatten_nested_values_delimiter)
637
+ begin
638
+ record = Scalyr::Common::Util.flatten(record, delimiter=@flatten_nested_values_delimiter, flatten_arrays=@flatten_nested_arrays, fix_deep_flattening_delimiters=@fix_deep_flattening_delimiters, max_key_count=@flattening_max_key_count)
639
+ rescue Scalyr::Common::Util::MaxKeyCountError => e
640
+ @logger.warn("Error while flattening record", :error_message => e.message, :sample_keys => e.sample_keys)
641
+ end
615
642
  end_time = Time.now.to_f
616
643
  flatten_nested_values_duration = end_time - start_time
617
644
  end
@@ -661,6 +688,21 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
661
688
  ).force_encoding('UTF-8')
662
689
  end
663
690
  event_json = scalyr_event.to_json
691
+ rescue Java::JavaLang::ClassCastException => e
692
+ # Most likely we ran into the issue described here: https://github.com/flori/json/issues/336
693
+ # Because of the version of jruby logstash works with we don't have the option to just update this away,
694
+ # so if we run into it we convert bignums into strings so we can get the data in at least.
695
+ # This is fixed in JRuby 9.2.7, which includes json 2.2.0
696
+ @logger.warn("Error serializing events to JSON, likely due to the presence of Bignum values. Converting Bignum values to strings.")
697
+ @stats_lock.synchronize do
698
+ @multi_receive_statistics[:total_java_class_cast_errors] += 1
699
+ end
700
+ Scalyr::Common::Util.convert_bignums(scalyr_event)
701
+ event_json = scalyr_event.to_json
702
+ log_json = nil
703
+ if add_log
704
+ log_json = logs[log_identifier].to_json
705
+ end
664
706
  end
665
707
 
666
708
  # generate new request if json size of events in the array exceed maximum request buffer size
@@ -749,7 +791,16 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
749
791
 
750
792
  # We time serialization to get some insight on how long it takes to serialize the request body
751
793
  start_time = Time.now.to_f
752
- serialized_body = body.to_json
794
+ begin
795
+ serialized_body = body.to_json
796
+ rescue Java::JavaLang::ClassCastException => e
797
+ @logger.warn("Error serializing events to JSON, likely due to the presence of Bignum values. Converting Bignum values to strings.")
798
+ @stats_lock.synchronize do
799
+ @multi_receive_statistics[:total_java_class_cast_errors] += 1
800
+ end
801
+ Scalyr::Common::Util.convert_bignums(body)
802
+ serialized_body = body.to_json
803
+ end
753
804
  end_time = Time.now.to_f
754
805
  serialization_duration = end_time - start_time
755
806
  {
@@ -1,44 +1,95 @@
1
1
  module Scalyr; module Common; module Util;
2
2
 
3
+ class MaxKeyCountError < StandardError
4
+ attr_reader :message, :sample_keys
5
+
6
+ def initialize(message, sample_keys)
7
+ @message = message
8
+ @sample_keys = sample_keys
9
+ end
10
+ end
3
11
 
4
12
  # Flattens a hash or array, returning a hash where keys are a delimiter-separated string concatenation of all
5
13
  # nested keys. Returned keys are always strings. If a non-hash or array is provided, raises TypeError.
6
14
  # Please see rspec util_spec.rb for expected behavior.
7
- def self.flatten(obj, delimiter='_')
15
+ # Includes a known bug where defined delimiter will not be used for nesting levels past the first, this is kept
16
+ # because some queries and dashboards already rely on the broken functionality.
17
+ def self.flatten(hash_obj, delimiter='_', flatten_arrays=true, fix_deep_flattening_delimiters=false, max_key_count=-1)
8
18
 
9
19
  # base case is input object is not enumerable, in which case simply return it
10
- if !obj.respond_to?(:each)
20
+ if !hash_obj.respond_to?(:each)
11
21
  raise TypeError.new('Input must be a hash or array')
12
22
  end
23
+ # case where we pass in a valid array, but don't want to flatten arrays
24
+ if !hash_obj.respond_to?(:has_key?) and !flatten_arrays
25
+ return hash_obj
26
+ end
13
27
 
28
+ stack = []
29
+ stack << hash_obj
30
+ key_stack = []
31
+ key_stack << ""
32
+ key_list = []
33
+ key_list_width = []
14
34
  result = Hash.new
15
- # require 'pry'
16
- # binding.pry
35
+ test_key = 0
36
+ #Debugging
37
+ #require 'pry'
38
+ #binding.pry
17
39
 
18
- if obj.respond_to?(:has_key?)
40
+ until stack.empty?
41
+ obj = stack.pop
42
+ key_list << key_stack.pop
19
43
 
20
- # input object is a hash
21
- obj.each do |key, value|
22
- if value.respond_to?(:each)
23
- flatten(value).each do |subkey, subvalue|
24
- result["#{key}#{delimiter}#{subkey}"] = subvalue
25
- end
26
- else
27
- result["#{key}"] = value
44
+ # Case when object is a hash
45
+ if obj.respond_to?(:has_key?)
46
+ key_list_width << obj.keys.count
47
+ obj.each do |key, value|
48
+ key_stack << key
49
+ stack << value
28
50
  end
29
- end
30
51
 
31
- else
52
+ # Case when object is an array we intend to flatten
53
+ elsif flatten_arrays and obj.respond_to?(:each)
54
+ key_list_width << obj.count
55
+ obj.each_with_index do |value, index|
56
+ key_stack << index
57
+ stack << value
58
+ end
32
59
 
33
- # input object is an array or set
34
- obj.each_with_index do |value, index|
35
- if value.respond_to?(:each)
36
- flatten(value).each do |subkey, subvalue|
37
- result["#{index}#{delimiter}#{subkey}"] = subvalue
60
+ else
61
+ result_key = ""
62
+ delim = delimiter
63
+ key_list.each_with_index do |key, index|
64
+ # We have a blank key at the start of the key list to avoid issues with calling pop, so we ignore delimiter
65
+ # for the first two keys
66
+ if index > 1
67
+ result_key += "#{delim}#{key}"
68
+ if not fix_deep_flattening_delimiters
69
+ delim = "_"
70
+ end
71
+ else
72
+ result_key += "#{key}"
38
73
  end
39
- else
40
- result["#{index}"] = value
41
74
  end
75
+ result[result_key] = obj
76
+
77
+ if max_key_count > -1 and result.keys.count > max_key_count
78
+ raise MaxKeyCountError.new(
79
+ "Resulting flattened object will contain more keys than the configured flattening_max_key_count of #{max_key_count}",
80
+ result.keys[0..6]
81
+ )
82
+ end
83
+
84
+ throw_away = key_list.pop
85
+ until key_list_width.empty? or key_list_width[-1] > 1
86
+ throw_away = key_list_width.pop
87
+ throw_away = key_list.pop
88
+ end
89
+ if not key_list_width.empty?
90
+ key_list_width[-1] -= 1
91
+ end
92
+
42
93
  end
43
94
  end
44
95
 
@@ -52,5 +103,26 @@ def self.truncate(content, max)
52
103
  return content
53
104
  end
54
105
 
106
+ def self.convert_bignums(obj)
107
+ if obj.respond_to?(:has_key?) and obj.respond_to?(:each)
108
+ # input object is a hash
109
+ obj.each do |key, value|
110
+ obj[key] = convert_bignums(value)
111
+ end
112
+
113
+ elsif obj.respond_to?(:each)
114
+ # input object is an array or set
115
+ obj.each_with_index do |value, index|
116
+ obj[index] = convert_bignums(value)
117
+ end
118
+
119
+ elsif obj.is_a? Bignum
120
+ return obj.to_s
121
+
122
+ else
123
+ return obj
124
+ end
125
+ end
126
+
55
127
  end; end; end;
56
128
 
@@ -1,2 +1,2 @@
1
1
  # encoding: utf-8
2
- PLUGIN_VERSION = "v0.1.16.beta"
2
+ PLUGIN_VERSION = "v0.1.20.beta"
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-scalyr'
3
- s.version = '0.1.16.beta'
3
+ s.version = '0.1.20.beta'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Scalyr output plugin for Logstash"
6
6
  s.description = "Sends log data collected by Logstash to Scalyr (https://www.scalyr.com)"
@@ -0,0 +1,90 @@
1
+ require 'benchmark'
2
+ require 'quantile'
3
+
4
+ require_relative '../../lib/scalyr/common/util'
5
+
6
+ # Micro benchmark which measures how long it takes to find all the Bignums in a record and convert them to strings
7
+
8
+ ITERATIONS = 500
9
+
10
+ def rand_str(len)
11
+ return (0...len).map { (65 + rand(26)).chr }.join
12
+ end
13
+
14
+ def rand_bignum()
15
+ return 200004000020304050300 + rand(999999)
16
+ end
17
+
18
+ def generate_hash(widths)
19
+ result = {}
20
+ if widths.empty?
21
+ return rand_bignum()
22
+ else
23
+ widths[0].times do
24
+ result[rand_str(9)] = generate_hash(widths[1..widths.length])
25
+ end
26
+ return result
27
+ end
28
+ end
29
+
30
+ def generate_data_array_for_spec(spec)
31
+ data = []
32
+ ITERATIONS.times do
33
+ data << generate_hash(spec)
34
+ end
35
+
36
+ data
37
+ end
38
+
39
+ def run_benchmark_and_print_results(data, run_benchmark_func)
40
+ puts ""
41
+ puts "Using %s total keys in a hash" % [Scalyr::Common::Util.flatten(data[0]).count]
42
+ puts ""
43
+
44
+ result = []
45
+ ITERATIONS.times do |i|
46
+ result << Benchmark.measure { run_benchmark_func.(data[0]) }
47
+ end
48
+
49
+ sum = result.inject(nil) { |sum, t| sum.nil? ? sum = t : sum += t }
50
+ avg = sum / result.size
51
+
52
+ Benchmark.bm(7, "sum:", "avg:") do |b|
53
+ [sum, avg]
54
+ end
55
+ puts ""
56
+ end
57
+
58
+
59
+ puts "Using %s iterations" % [ITERATIONS]
60
+ puts ""
61
+
62
+ @value = Quantile::Estimator.new
63
+ @prng = Random.new
64
+
65
+ def convert_bignums(record)
66
+ Scalyr::Common::Util.convert_bignums(record)
67
+ end
68
+
69
+ puts "Util.convert_bignums()"
70
+ puts "==============================="
71
+
72
+ # Around ~200 keys in a hash
73
+ data = generate_data_array_for_spec([4, 4, 3, 4])
74
+ run_benchmark_and_print_results(data, method(:convert_bignums))
75
+
76
+ # Around ~200 keys in a hash (single level)
77
+ data = generate_data_array_for_spec([200])
78
+ run_benchmark_and_print_results(data, method(:convert_bignums))
79
+
80
+ # Around ~512 keys in a hash
81
+ data = generate_data_array_for_spec([8, 4, 4, 4])
82
+ run_benchmark_and_print_results(data, method(:convert_bignums))
83
+
84
+ # Around ~960 keys in a hash
85
+ data = generate_data_array_for_spec([12, 5, 4, 4])
86
+ run_benchmark_and_print_results(data, method(:convert_bignums))
87
+
88
+ # Around ~2700 keys in a hash
89
+ data = generate_data_array_for_spec([14, 8, 6, 4])
90
+ run_benchmark_and_print_results(data, method(:convert_bignums))
@@ -288,6 +288,72 @@ describe LogStash::Outputs::Scalyr do
288
288
  end
289
289
  end
290
290
 
291
+ context "when configured to flatten values with custom delimiter and deep delimiter fix" do
292
+ config = {
293
+ 'api_write_token' => '1234',
294
+ 'flatten_tags' => true,
295
+ 'flat_tag_value' => 'true',
296
+ 'flat_tag_prefix' => 'tag_prefix_',
297
+ 'flatten_nested_values' => true, # this converts into string 'true'
298
+ 'flatten_nested_values_delimiter' => ".",
299
+ 'fix_deep_flattening_delimiters' => true,
300
+ }
301
+ plugin = LogStash::Outputs::Scalyr.new(config)
302
+ it "flattens nested values with a period" do
303
+ allow(plugin).to receive(:send_status).and_return(nil)
304
+ plugin.register
305
+ result = plugin.build_multi_event_request_array(sample_events)
306
+ body = JSON.parse(result[0][:body])
307
+ expect(body['events'].size).to eq(3)
308
+ expect(body['events'][2]['attrs']).to eq({
309
+ "nested.a" => 1,
310
+ "nested.b.0" => 3,
311
+ "nested.b.1" => 4,
312
+ "nested.b.2" => 5,
313
+ 'seq' => 3,
314
+ 'source_file' => 'my file 3',
315
+ 'source_host' => 'my host 3',
316
+ 'serverHost' => 'Logstash',
317
+ "tag_prefix_t1" => "true",
318
+ "tag_prefix_t2" => "true",
319
+ "tag_prefix_t3" => "true",
320
+ "parser" => "logstashParser",
321
+ })
322
+ end
323
+ end
324
+
325
+ context "when configured to flatten values with custom delimiter, no array flattening" do
326
+ config = {
327
+ 'api_write_token' => '1234',
328
+ 'flatten_tags' => true,
329
+ 'flat_tag_value' => 'true',
330
+ 'flat_tag_prefix' => 'tag_prefix_',
331
+ 'flatten_nested_values' => true, # this converts into string 'true'
332
+ 'flatten_nested_arrays' => false,
333
+ 'flatten_nested_values_delimiter' => ".",
334
+ }
335
+ plugin = LogStash::Outputs::Scalyr.new(config)
336
+ it "flattens nested values with a period" do
337
+ allow(plugin).to receive(:send_status).and_return(nil)
338
+ plugin.register
339
+ result = plugin.build_multi_event_request_array(sample_events)
340
+ body = JSON.parse(result[0][:body])
341
+ expect(body['events'].size).to eq(3)
342
+ expect(body['events'][2]['attrs']).to eq({
343
+ "nested.a" => 1,
344
+ "nested.b" => [3, 4, 5],
345
+ 'seq' => 3,
346
+ 'source_file' => 'my file 3',
347
+ 'source_host' => 'my host 3',
348
+ 'serverHost' => 'Logstash',
349
+ "tag_prefix_t1" => "true",
350
+ "tag_prefix_t2" => "true",
351
+ "tag_prefix_t3" => "true",
352
+ "parser" => "logstashParser",
353
+ })
354
+ end
355
+ end
356
+
291
357
  context "when configured to flatten values and tags" do
292
358
  config = {
293
359
  'api_write_token' => '1234',
@@ -342,5 +408,55 @@ describe LogStash::Outputs::Scalyr do
342
408
  })
343
409
  end
344
410
  end
411
+
412
+ context "when configured to flatten with max keys configured to 3" do
413
+ config = {
414
+ 'api_write_token' => '1234',
415
+ 'flatten_nested_values' => true, # this converts into string 'true'
416
+ 'flattening_max_key_count' => 3,
417
+ }
418
+ plugin = LogStash::Outputs::Scalyr.new(config)
419
+ it "does not flatten" do
420
+ allow(plugin).to receive(:send_status).and_return(nil)
421
+ plugin.register
422
+ allow(plugin.instance_variable_get(:@logger)).to receive(:warn)
423
+ result = plugin.build_multi_event_request_array(sample_events)
424
+ body = JSON.parse(result[0][:body])
425
+ expect(body['events'].size).to eq(3)
426
+ expect(body['events'][2]['attrs']).to eq({
427
+ "nested" => {'a'=>1, 'b'=>[3,4,5]},
428
+ 'seq' => 3,
429
+ 'source_file' => 'my file 3',
430
+ 'source_host' => 'my host 3',
431
+ 'serverHost' => 'Logstash',
432
+ "tags" => ["t1", "t2", "t3"],
433
+ "parser" => "logstashParser",
434
+ })
435
+ expect(plugin.instance_variable_get(:@logger)).to have_received(:warn).with("Error while flattening record",
436
+ {
437
+ :error_message=>"Resulting flattened object will contain more keys than the configured flattening_max_key_count of 3",
438
+ :sample_keys=>["serverHost", "parser", "tags_2", "tags_1"]
439
+ }
440
+ ).exactly(3).times
441
+ end
442
+ end
443
+
444
+ context "when receiving an event with Bignums" do
445
+ config = {
446
+ 'api_write_token' => '1234',
447
+ }
448
+ plugin = LogStash::Outputs::Scalyr.new(config)
449
+ it "doesn't throw an error" do
450
+ allow(plugin).to receive(:send_status).and_return(nil)
451
+ plugin.register
452
+ e = LogStash::Event.new
453
+ e.set('bignumber', 2000023030042002050202030320240)
454
+ allow(plugin.instance_variable_get(:@logger)).to receive(:error)
455
+ result = plugin.build_multi_event_request_array([e])
456
+ body = JSON.parse(result[0][:body])
457
+ expect(body['events'].size).to eq(1)
458
+ expect(plugin.instance_variable_get(:@logger)).to_not receive(:error)
459
+ end
460
+ end
345
461
  end
346
462
  end
@@ -132,6 +132,70 @@ describe Scalyr::Common::Util do
132
132
  expect(Scalyr::Common::Util.flatten(din)).to eq(dout)
133
133
  end
134
134
 
135
+ it "flattens a single-level array, no array flattening" do
136
+ din = [1, 2, 3]
137
+ dout = [1, 2, 3]
138
+ expect(Scalyr::Common::Util.flatten(din, "_", flatten_arrays=false)).to eq(dout)
139
+ end
140
+
141
+ it "flattens a multi-level array, no array flattening" do
142
+ din = ['a', 'b', ['c', ['d', 'e', 'f'], 'g'], 'h', 'i']
143
+ dout = ['a', 'b', ['c', ['d', 'e', 'f'], 'g'], 'h', 'i']
144
+ expect(Scalyr::Common::Util.flatten(din, "_", flatten_arrays=false)).to eq(dout)
145
+ end
146
+
147
+ it "flattens a hash that contains an array, no array flattening" do
148
+ din = {
149
+ 'a' => 1,
150
+ 'c' => [100, 200, 300]
151
+ }
152
+ dout = {
153
+ 'a' => 1,
154
+ 'c' => [100, 200, 300]
155
+ }
156
+ expect(Scalyr::Common::Util.flatten(din, "_", flatten_arrays=false)).to eq(dout)
157
+ end
158
+
159
+ it "flattens a hash that contains an array that contains a hash, no array flattening" do
160
+ din = {
161
+ 'a' => 1,
162
+ 'c' => [
163
+ 100,
164
+ {'d' => 1000, 'e' => 2000},
165
+ 300
166
+ ]
167
+ }
168
+ dout = {
169
+ 'a' => 1,
170
+ 'c' => [
171
+ 100,
172
+ {'d' => 1000, 'e' => 2000},
173
+ 300
174
+ ]
175
+ }
176
+ expect(Scalyr::Common::Util.flatten(din, "_", flatten_arrays=false)).to eq(dout)
177
+ end
178
+
179
+ it "flattens a hash that contains an array that contains a hash that contains an array, no array flattening" do
180
+ din = {
181
+ 'a' => 1,
182
+ 'c' => [
183
+ 100,
184
+ {'d' => 1000, 'e' => 2000, 'f' => [4, 5, 6]},
185
+ 300
186
+ ]
187
+ }
188
+ dout = {
189
+ 'a' => 1,
190
+ 'c' => [
191
+ 100,
192
+ {'d' => 1000, 'e' => 2000, 'f' => [4, 5, 6]},
193
+ 300
194
+ ]
195
+ }
196
+ expect(Scalyr::Common::Util.flatten(din, "_", flatten_arrays=false)).to eq(dout)
197
+ end
198
+
135
199
  it "accepts custom delimiters" do
136
200
  din = {
137
201
  'a' => 1,
@@ -148,6 +212,42 @@ describe Scalyr::Common::Util do
148
212
  expect(Scalyr::Common::Util.flatten(din, ':')).to eq(dout)
149
213
  end
150
214
 
215
+ it "accepts custom delimiters with greater depth" do
216
+ din = {
217
+ 'a' => 1,
218
+ 'b' => {
219
+ 'c' => {
220
+ 'e' => 100
221
+ },
222
+ 'd' => 200,
223
+ }
224
+ }
225
+ dout = {
226
+ 'a' => 1,
227
+ 'b:c_e' => 100,
228
+ 'b:d' => 200,
229
+ }
230
+ expect(Scalyr::Common::Util.flatten(din, ':')).to eq(dout)
231
+ end
232
+
233
+ it "accepts custom delimiters with greater depth and deep delimiters fix" do
234
+ din = {
235
+ 'a' => 1,
236
+ 'b' => {
237
+ 'c' => {
238
+ 'e' => 100
239
+ },
240
+ 'd' => 200,
241
+ }
242
+ }
243
+ dout = {
244
+ 'a' => 1,
245
+ 'b:c:e' => 100,
246
+ 'b:d' => 200,
247
+ }
248
+ expect(Scalyr::Common::Util.flatten(din, ':', true, true)).to eq(dout)
249
+ end
250
+
151
251
  it "stringifies non-string keys" do
152
252
  din = {
153
253
  'a' => 1,
@@ -183,4 +283,23 @@ describe Scalyr::Common::Util do
183
283
  it "raises exception if a non-dict is provided" do
184
284
  expect {Scalyr::Common::Util.flatten(1)}.to raise_error(TypeError)
185
285
  end
286
+
287
+ it "flattens a hash 5000 layers deep" do
288
+ din = {
289
+ 'a' => {},
290
+ }
291
+ hash = din
292
+ for i in 0...4999
293
+ hash = hash["a"]
294
+ hash["a"] = {}
295
+ if i == 4998
296
+ hash["a"] = "b"
297
+ end
298
+ end
299
+
300
+ dout = {
301
+ 'a' + "_a" * 4999 => "b",
302
+ }
303
+ expect(Scalyr::Common::Util.flatten(din, '_')).to eq(dout)
304
+ end
186
305
  end
@@ -1,6 +1,6 @@
1
1
  #!/bin/sh
2
2
  'exec' "jruby" '-x' "$0" "$@"
3
- #!/Users/tomaz/.rvm/rubies/jruby-9.2.9.0/bin/jruby
3
+ #!/Users/yans/.rvm/rubies/jruby-9.2.9.0/bin/jruby
4
4
  #
5
5
  # This file was generated by RubyGems.
6
6
  #
@@ -1,6 +1,6 @@
1
1
  #!/bin/sh
2
2
  'exec' "jruby" '-x' "$0" "$@"
3
- #!/Users/tomaz/.rvm/rubies/jruby-9.2.9.0/bin/jruby
3
+ #!/Users/yans/.rvm/rubies/jruby-9.2.9.0/bin/jruby
4
4
  #
5
5
  # This file was generated by RubyGems.
6
6
  #
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-scalyr
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.16.beta
4
+ version: 0.1.20.beta
5
5
  platform: ruby
6
6
  authors:
7
7
  - Edward Chee
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-06-23 00:00:00.000000000 Z
11
+ date: 2021-08-09 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -132,6 +132,7 @@ files:
132
132
  - lib/scalyr/common/util.rb
133
133
  - lib/scalyr/constants.rb
134
134
  - logstash-output-scalyr.gemspec
135
+ - spec/benchmarks/bignum_fixing.rb
135
136
  - spec/benchmarks/flattening_and_serialization.rb
136
137
  - spec/benchmarks/metrics_overhead.rb
137
138
  - spec/logstash/outputs/scalyr_integration_spec.rb
@@ -4062,6 +4063,7 @@ signing_key:
4062
4063
  specification_version: 4
4063
4064
  summary: Scalyr output plugin for Logstash
4064
4065
  test_files:
4066
+ - spec/benchmarks/bignum_fixing.rb
4065
4067
  - spec/benchmarks/flattening_and_serialization.rb
4066
4068
  - spec/benchmarks/metrics_overhead.rb
4067
4069
  - spec/logstash/outputs/scalyr_integration_spec.rb