logstash-output-scalyr 0.1.14.beta → 0.1.19.beta

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1af6e37035c37c270036487e9b939f40e8454d39ddea9983a737f409957e0747
4
- data.tar.gz: 95438909311aa595d2d4e771424765f3705cdc649225ca81c053e3b9c129b183
3
+ metadata.gz: '084d41a09e7ef86214868f26a97639169df8982d9488eb827d337ef53f02fc07'
4
+ data.tar.gz: 5bd3b17804c60044901d2f40c40419c119e680f7ac4a4ffc543701a885cb0c2e
5
5
  SHA512:
6
- metadata.gz: d2a9f542cfaaf8bb9e84ebf976694c548e153e1e170417309644b9206bf4df2ba8844447cbb2b0afd4ba8d03922688f6348a6fb77a6c8489a3f2a49bb873fed9
7
- data.tar.gz: 514fab3aa32f39e1ebe6e4bf164a9b9f648b82e7aa861d9cbc2edd6d44d9c6b4d9d3ea9b7c281ae26bf719e4373bde6059f220033dc660821a369b6186180798
6
+ metadata.gz: 837bd2d4d142f31bcd30494175731818de9f60f9f39ebc13cf0ae92b36e58ad4b6d288ba2f92109323f096d30a1774334651907b07e1d1310f6b42dd24698b2e
7
+ data.tar.gz: 221e47feab585a99dc69b475f446f9a9b4f9d2bd91ffd60753886438c348e0139618afa46f3889180aba68582ed9532376ee5449fb0cf91bb05cbb796c11a08c
data/CHANGELOG.md CHANGED
@@ -1,5 +1,26 @@
1
1
  # Beta
2
2
 
3
+ ## 0.1.19.beta
4
+ - Undo a change to nested value flattening functionality to keep existing formatting. This change can be re-enabled
5
+ by setting the `fix_deep_flattening_delimiters` configuration option to true.
6
+
7
+ ## 0.1.18.beta
8
+ - Add metrics for successfully sent and failed logstash events, and retries.
9
+ - Make array flattening optional during nested value flattening with the `flatten_nested_arrays` configuration option.
10
+
11
+ ## 0.1.17.beta
12
+ - Catch errors relating to Bignum conversions present in the ``json`` library and manually convert to string as
13
+ a workaround.
14
+
15
+ ## 0.1.16.beta
16
+ - Fix race condition in ``register()`` method.
17
+
18
+ ## 0.1.15.beta
19
+ - Only call ``send_status`` method at the end of ``multi_receive()`` if there is at least one
20
+ record in the batch when ``report_status_for_empty_batches`` config option is set to ``false``.
21
+ - Update ``register()`` method to use a separate short-lived client session for sending initial
22
+ client status.
23
+
3
24
  ## 0.1.14.beta
4
25
  - Add configurable max retries for requests when running into errors.
5
26
  - Add ability to send messages to the dead letter queue if we exhaust all retries and if it is configured.
data/README.md CHANGED
@@ -10,7 +10,7 @@ You can view documentation for this plugin [on the Scalyr website](https://app.s
10
10
  # Quick start
11
11
 
12
12
  1. Build the gem, run `gem build logstash-output-scalyr.gemspec`
13
- 2. Install the gem into a Logstash installation, run `/usr/share/logstash/bin/logstash-plugin install logstash-output-scalyr-0.1.14.beta.gem` or follow the latest official instructions on working with plugins from Logstash.
13
+ 2. Install the gem into a Logstash installation, run `/usr/share/logstash/bin/logstash-plugin install logstash-output-scalyr-0.1.19.beta.gem` or follow the latest official instructions on working with plugins from Logstash.
14
14
  3. Configure the output plugin (e.g. add it to a pipeline .conf)
15
15
  4. Restart Logstash
16
16
 
@@ -68,7 +68,9 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
68
68
  # If true, nested values will be flattened (which changes keys to delimiter-separated concatenation of all
69
69
  # nested keys).
70
70
  config :flatten_nested_values, :validate => :boolean, :default => false
71
- config :flatten_nested_values_delimiter, :validate => :string, :default => "_"
71
+ config :flatten_nested_values_delimiter, :validate => :string, :default => "_"
72
+ config :flatten_nested_arrays, :validate => :boolean, :default => true
73
+ config :fix_deep_flattening_delimiters, :validate => :boolean, :default => false
72
74
 
73
75
  # If true, the 'tags' field will be flattened into key-values where each key is a tag and each value is set to
74
76
  # :flat_tag_value
@@ -109,6 +111,11 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
109
111
  # minutes.
110
112
  config :status_report_interval, :validate => :number, :default => 300
111
113
 
114
+ # True to also call send_status when multi_receive() is called with no events.
115
+ # In some situations (e.g. when logstash is configured with multiple scalyr
116
+ # plugins conditionally where most are idle) you may want to set this to false
117
+ config :report_status_for_empty_batches, :validate => :boolean, :default => true
118
+
112
119
  # Set to true to also log status messages with various metrics to stdout in addition to sending
113
120
  # this data to Scalyr
114
121
  config :log_status_messages_to_stdout, :validate => :boolean, :default => false
@@ -235,7 +242,12 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
235
242
  # Plugin level (either per batch or event level metrics). Other request
236
243
  # level metrics are handled by the HTTP Client class.
237
244
  @multi_receive_statistics = {
238
- :total_multi_receive_secs => 0
245
+ :total_multi_receive_secs => 0,
246
+ :total_events_processed => 0,
247
+ :successful_events_processed => 0,
248
+ :failed_events_processed => 0,
249
+ :total_retry_count => 0,
250
+ :total_java_class_cast_errors => 0
239
251
  }
240
252
  @plugin_metrics = get_new_metrics
241
253
 
@@ -251,7 +263,25 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
251
263
  @logger.info(sprintf("Started Scalyr output plugin (%s)." % [PLUGIN_VERSION]), :class => self.class.name)
252
264
 
253
265
  # Finally, send a status line to Scalyr
254
- send_status
266
+ # We use a special separate short lived client session for sending the initial client status.
267
+ # This is done to avoid the overhead in case single logstash instance has many scalyr output
268
+ # plugins configured with conditionals and majority of them are inactive (aka receive no data).
269
+ # This way we don't need to keep idle long running connection open.
270
+ initial_send_status_client_session = Scalyr::Common::Client::ClientSession.new(
271
+ @logger, @add_events_uri,
272
+ @compression_type, @compression_level, @ssl_verify_peer, @ssl_ca_bundle_path, @append_builtin_cert,
273
+ @record_stats_for_status, @flush_quantile_estimates_on_status_send,
274
+ @http_connect_timeout, @http_socket_timeout, @http_request_timeout, @http_pool_max, @http_pool_max_per_route
275
+ )
276
+ send_status(initial_send_status_client_session)
277
+ initial_send_status_client_session.close
278
+
279
+ # We also "prime" the main HTTP client here, one which is used for sending subsequent requests.
280
+ # Here priming just means setting up the client parameters without opening any connections.
281
+ # Since client writes certs to a temporary file there could be a race in case we don't do that
282
+ # here since multi_receive() is multi threaded. An alternative would be to put a look around
283
+ # client init method (aka client_config())
284
+ @client_session.client
255
285
 
256
286
  end # def register
257
287
 
@@ -321,6 +351,9 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
321
351
  sleep_interval = sleep_for(sleep_interval)
322
352
  exc_sleep += sleep_interval
323
353
  exc_retries += 1
354
+ @stats_lock.synchronize do
355
+ @multi_receive_statistics[:total_retry_count] += 1
356
+ end
324
357
  message = "Error uploading to Scalyr (will backoff-retry)"
325
358
  exc_data = {
326
359
  :error_class => e.e_class,
@@ -370,11 +403,19 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
370
403
  }
371
404
  exc_sleep += sleep_interval
372
405
  exc_retries += 1
406
+ @stats_lock.synchronize do
407
+ @multi_receive_statistics[:total_retry_count] += 1
408
+ end
373
409
  retry if @running and exc_retries < @max_retries
374
410
  log_retry_failure(multi_event_request, exc_data, exc_retries, exc_sleep)
375
411
  next
376
412
  end
377
413
 
414
+ @stats_lock.synchronize do
415
+ @multi_receive_statistics[:total_events_processed] += multi_event_request[:logstash_events].length
416
+ @multi_receive_statistics[:successful_events_processed] += multi_event_request[:logstash_events].length
417
+ end
418
+
378
419
  if !exc_data.nil?
379
420
  message = "Retry successful after error."
380
421
  if exc_commonly_retried
@@ -394,7 +435,10 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
394
435
  end
395
436
  end
396
437
 
397
- send_status
438
+ if @report_status_for_empty_batches or records_count > 0
439
+ send_status
440
+ end
441
+
398
442
  return result
399
443
 
400
444
  rescue => e
@@ -410,6 +454,10 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
410
454
 
411
455
 
412
456
  def log_retry_failure(multi_event_request, exc_data, exc_retries, exc_sleep)
457
+ @stats_lock.synchronize do
458
+ @multi_receive_statistics[:total_events_processed] += multi_event_request[:logstash_events].length
459
+ @multi_receive_statistics[:failed_events_processed] += multi_event_request[:logstash_events].length
460
+ end
413
461
  message = "Failed to send #{multi_event_request[:logstash_events].length} events after #{exc_retries} tries."
414
462
  sample_events = Array.new
415
463
  multi_event_request[:logstash_events][0,5].each {|l_event|
@@ -585,7 +633,7 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
585
633
  # flatten record
586
634
  if @flatten_nested_values
587
635
  start_time = Time.now.to_f
588
- record = Scalyr::Common::Util.flatten(record, delimiter=@flatten_nested_values_delimiter)
636
+ record = Scalyr::Common::Util.flatten(record, delimiter=@flatten_nested_values_delimiter, flatten_arrays=@flatten_nested_arrays, fix_deep_flattening_delimiters=@fix_deep_flattening_delimiters)
589
637
  end_time = Time.now.to_f
590
638
  flatten_nested_values_duration = end_time - start_time
591
639
  end
@@ -635,6 +683,21 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
635
683
  ).force_encoding('UTF-8')
636
684
  end
637
685
  event_json = scalyr_event.to_json
686
+ rescue Java::JavaLang::ClassCastException => e
687
+ # Most likely we ran into the issue described here: https://github.com/flori/json/issues/336
688
+ # Because of the version of jruby logstash works with we don't have the option to just update this away,
689
+ # so if we run into it we convert bignums into strings so we can get the data in at least.
690
+ # This is fixed in JRuby 9.2.7, which includes json 2.2.0
691
+ @logger.warn("Error serializing events to JSON, likely due to the presence of Bignum values. Converting Bignum values to strings.")
692
+ @stats_lock.synchronize do
693
+ @multi_receive_statistics[:total_java_class_cast_errors] += 1
694
+ end
695
+ Scalyr::Common::Util.convert_bignums(scalyr_event)
696
+ event_json = scalyr_event.to_json
697
+ log_json = nil
698
+ if add_log
699
+ log_json = logs[log_identifier].to_json
700
+ end
638
701
  end
639
702
 
640
703
  # generate new request if json size of events in the array exceed maximum request buffer size
@@ -723,7 +786,16 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
723
786
 
724
787
  # We time serialization to get some insight on how long it takes to serialize the request body
725
788
  start_time = Time.now.to_f
726
- serialized_body = body.to_json
789
+ begin
790
+ serialized_body = body.to_json
791
+ rescue Java::JavaLang::ClassCastException => e
792
+ @logger.warn("Error serializing events to JSON, likely due to the presence of Bignum values. Converting Bignum values to strings.")
793
+ @stats_lock.synchronize do
794
+ @multi_receive_statistics[:total_java_class_cast_errors] += 1
795
+ end
796
+ Scalyr::Common::Util.convert_bignums(body)
797
+ serialized_body = body.to_json
798
+ end
727
799
  end_time = Time.now.to_f
728
800
  serialization_duration = end_time - start_time
729
801
  {
@@ -778,7 +850,8 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
778
850
  # Finally, note that there could be multiple instances of this plugin (one per worker), in which case each worker
779
851
  # thread sends their own status updates. This is intentional so that we know how much data each worker thread is
780
852
  # uploading to Scalyr over time.
781
- def send_status
853
+ def send_status(client_session = nil)
854
+ client_session = @client_session if client_session.nil?
782
855
 
783
856
  status_event = {
784
857
  :ts => (Time.now.to_f * (10**9)).round,
@@ -797,7 +870,7 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
797
870
  # echee TODO: get instance stats from session and create a status log line
798
871
  msg = 'plugin_status: '
799
872
  cnt = 0
800
- @client_session.get_stats.each do |k, v|
873
+ client_session.get_stats.each do |k, v|
801
874
  val = v.instance_of?(Float) ? sprintf("%.4f", v) : v
802
875
  val = val.nil? ? 0 : val
803
876
  msg << ' ' if cnt > 0
@@ -817,7 +890,7 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
817
890
  end
818
891
  multi_event_request = create_multi_event_request([status_event], nil, nil, nil)
819
892
  begin
820
- @client_session.post_add_events(multi_event_request[:body], true, 0)
893
+ client_session.post_add_events(multi_event_request[:body], true, 0)
821
894
  rescue => e
822
895
  if e.body
823
896
  @logger.warn(
@@ -267,6 +267,7 @@ class ClientSession
267
267
 
268
268
 
269
269
  def close
270
+ @client.close if @client
270
271
  end # def close
271
272
 
272
273
 
@@ -4,7 +4,9 @@ module Scalyr; module Common; module Util;
4
4
  # Flattens a hash or array, returning a hash where keys are a delimiter-separated string concatenation of all
5
5
  # nested keys. Returned keys are always strings. If a non-hash or array is provided, raises TypeError.
6
6
  # Please see rspec util_spec.rb for expected behavior.
7
- def self.flatten(obj, delimiter='_')
7
+ # Includes a known bug where defined delimiter will not be used for nesting levels past the first, this is kept
8
+ # because some queries and dashboards already rely on the broken functionality.
9
+ def self.flatten(obj, delimiter='_', flatten_arrays=true, fix_deep_flattening_delimiters=false)
8
10
 
9
11
  # base case is input object is not enumerable, in which case simply return it
10
12
  if !obj.respond_to?(:each)
@@ -19,8 +21,8 @@ def self.flatten(obj, delimiter='_')
19
21
 
20
22
  # input object is a hash
21
23
  obj.each do |key, value|
22
- if value.respond_to?(:each)
23
- flatten(value).each do |subkey, subvalue|
24
+ if (flatten_arrays and value.respond_to?(:each)) or value.respond_to?(:has_key?)
25
+ flatten(value, fix_deep_flattening_delimiters ? delimiter : '_', flatten_arrays).each do |subkey, subvalue|
24
26
  result["#{key}#{delimiter}#{subkey}"] = subvalue
25
27
  end
26
28
  else
@@ -28,18 +30,23 @@ def self.flatten(obj, delimiter='_')
28
30
  end
29
31
  end
30
32
 
31
- else
33
+ elsif flatten_arrays
32
34
 
33
35
  # input object is an array or set
34
36
  obj.each_with_index do |value, index|
35
37
  if value.respond_to?(:each)
36
- flatten(value).each do |subkey, subvalue|
38
+ flatten(value, fix_deep_flattening_delimiters ? delimiter : '_', flatten_arrays).each do |subkey, subvalue|
37
39
  result["#{index}#{delimiter}#{subkey}"] = subvalue
38
40
  end
39
41
  else
40
42
  result["#{index}"] = value
41
43
  end
42
44
  end
45
+
46
+ else
47
+
48
+ result = obj
49
+
43
50
  end
44
51
 
45
52
  return result
@@ -52,5 +59,26 @@ def self.truncate(content, max)
52
59
  return content
53
60
  end
54
61
 
62
+ def self.convert_bignums(obj)
63
+ if obj.respond_to?(:has_key?) and obj.respond_to?(:each)
64
+ # input object is a hash
65
+ obj.each do |key, value|
66
+ obj[key] = convert_bignums(value)
67
+ end
68
+
69
+ elsif obj.respond_to?(:each)
70
+ # input object is an array or set
71
+ obj.each_with_index do |value, index|
72
+ obj[index] = convert_bignums(value)
73
+ end
74
+
75
+ elsif obj.is_a? Bignum
76
+ return obj.to_s
77
+
78
+ else
79
+ return obj
80
+ end
81
+ end
82
+
55
83
  end; end; end;
56
84
 
@@ -1,2 +1,2 @@
1
1
  # encoding: utf-8
2
- PLUGIN_VERSION = "v0.1.14.beta"
2
+ PLUGIN_VERSION = "v0.1.19.beta"
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-scalyr'
3
- s.version = '0.1.14.beta'
3
+ s.version = '0.1.19.beta'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Scalyr output plugin for Logstash"
6
6
  s.description = "Sends log data collected by Logstash to Scalyr (https://www.scalyr.com)"
@@ -0,0 +1,90 @@
1
+ require 'benchmark'
2
+ require 'quantile'
3
+
4
+ require_relative '../../lib/scalyr/common/util'
5
+
6
+ # Micro benchmark which measures how long it takes to find all the Bignums in a record and convert them to strings
7
+
8
+ ITERATIONS = 500
9
+
10
+ def rand_str(len)
11
+ return (0...len).map { (65 + rand(26)).chr }.join
12
+ end
13
+
14
+ def rand_bignum()
15
+ return 200004000020304050300 + rand(999999)
16
+ end
17
+
18
+ def generate_hash(widths)
19
+ result = {}
20
+ if widths.empty?
21
+ return rand_bignum()
22
+ else
23
+ widths[0].times do
24
+ result[rand_str(9)] = generate_hash(widths[1..widths.length])
25
+ end
26
+ return result
27
+ end
28
+ end
29
+
30
+ def generate_data_array_for_spec(spec)
31
+ data = []
32
+ ITERATIONS.times do
33
+ data << generate_hash(spec)
34
+ end
35
+
36
+ data
37
+ end
38
+
39
+ def run_benchmark_and_print_results(data, run_benchmark_func)
40
+ puts ""
41
+ puts "Using %s total keys in a hash" % [Scalyr::Common::Util.flatten(data[0]).count]
42
+ puts ""
43
+
44
+ result = []
45
+ ITERATIONS.times do |i|
46
+ result << Benchmark.measure { run_benchmark_func.(data[0]) }
47
+ end
48
+
49
+ sum = result.inject(nil) { |sum, t| sum.nil? ? sum = t : sum += t }
50
+ avg = sum / result.size
51
+
52
+ Benchmark.bm(7, "sum:", "avg:") do |b|
53
+ [sum, avg]
54
+ end
55
+ puts ""
56
+ end
57
+
58
+
59
+ puts "Using %s iterations" % [ITERATIONS]
60
+ puts ""
61
+
62
+ @value = Quantile::Estimator.new
63
+ @prng = Random.new
64
+
65
+ def convert_bignums(record)
66
+ Scalyr::Common::Util.convert_bignums(record)
67
+ end
68
+
69
+ puts "Util.convert_bignums()"
70
+ puts "==============================="
71
+
72
+ # Around ~200 keys in a hash
73
+ data = generate_data_array_for_spec([4, 4, 3, 4])
74
+ run_benchmark_and_print_results(data, method(:convert_bignums))
75
+
76
+ # Around ~200 keys in a hash (single level)
77
+ data = generate_data_array_for_spec([200])
78
+ run_benchmark_and_print_results(data, method(:convert_bignums))
79
+
80
+ # Around ~512 keys in a hash
81
+ data = generate_data_array_for_spec([8, 4, 4, 4])
82
+ run_benchmark_and_print_results(data, method(:convert_bignums))
83
+
84
+ # Around ~960 keys in a hash
85
+ data = generate_data_array_for_spec([12, 5, 4, 4])
86
+ run_benchmark_and_print_results(data, method(:convert_bignums))
87
+
88
+ # Around ~2700 keys in a hash
89
+ data = generate_data_array_for_spec([14, 8, 6, 4])
90
+ run_benchmark_and_print_results(data, method(:convert_bignums))
@@ -69,7 +69,7 @@ describe LogStash::Outputs::Scalyr do
69
69
  end
70
70
 
71
71
  it "it doesnt include flatten metrics if flattening is disabled" do
72
- plugin1 = LogStash::Outputs::Scalyr.new({
72
+ plugin1 = LogStash::Outputs::Scalyr.new({
73
73
  'api_write_token' => '1234',
74
74
  'serverhost_field' => 'source_host',
75
75
  'log_constants' => ['tags'],
@@ -122,7 +122,7 @@ describe LogStash::Outputs::Scalyr do
122
122
  expect(status_event[:attrs]["message"]).to eq("plugin_status: total_requests_sent=20 total_requests_failed=10 total_request_bytes_sent=100 total_compressed_request_bytes_sent=50 total_response_bytes_received=100 total_request_latency_secs=100 total_serialization_duration_secs=100.5000 total_compression_duration_secs=10.2000 compression_type=deflate compression_level=9 total_multi_receive_secs=0 multi_receive_duration_p50=10 multi_receive_duration_p90=18 multi_receive_duration_p99=19 multi_receive_event_count_p50=0 multi_receive_event_count_p90=0 multi_receive_event_count_p99=0 event_attributes_count_p50=0 event_attributes_count_p90=0 event_attributes_count_p99=0 batches_per_multi_receive_p50=0 batches_per_multi_receive_p90=0 batches_per_multi_receive_p99=0 flatten_values_duration_secs_p50=0 flatten_values_duration_secs_p90=0 flatten_values_duration_secs_p99=0")
123
123
  end
124
124
 
125
- it "send_stats is called when events list is empty, but otherwise noop" do
125
+ it "send_stats is called when events list is empty, but otherwise is noop" do
126
126
  quantile_estimator = Quantile::Estimator.new
127
127
  plugin.instance_variable_set(:@plugin_metrics, {
128
128
  :multi_receive_duration_secs => Quantile::Estimator.new,
@@ -137,6 +137,30 @@ describe LogStash::Outputs::Scalyr do
137
137
  plugin.multi_receive([])
138
138
  end
139
139
 
140
+ it "send_stats is not called when events list is empty and report_status_for_empty_batches is false" do
141
+ plugin2 = LogStash::Outputs::Scalyr.new({
142
+ 'api_write_token' => '1234',
143
+ 'serverhost_field' => 'source_host',
144
+ 'log_constants' => ['tags'],
145
+ 'flatten_nested_values' => false,
146
+ 'report_status_for_empty_batches' => false,
147
+ })
148
+
149
+ mock_client_session = MockClientSession.new
150
+ quantile_estimator = Quantile::Estimator.new
151
+ plugin2.instance_variable_set(:@plugin_metrics, {
152
+ :multi_receive_duration_secs => Quantile::Estimator.new,
153
+ :multi_receive_event_count => Quantile::Estimator.new,
154
+ :event_attributes_count => Quantile::Estimator.new,
155
+ :flatten_values_duration_secs => Quantile::Estimator.new
156
+ })
157
+ plugin2.instance_variable_set(:@client_session, mock_client_session)
158
+ expect(plugin2).not_to receive(:send_status)
159
+ expect(quantile_estimator).not_to receive(:observe)
160
+ expect(mock_client_session).not_to receive(:post_add_events)
161
+ plugin2.multi_receive([])
162
+ end
163
+
140
164
  # Kind of a weak test but I don't see a decent way to write a stronger one without a live client session
141
165
  it "send_status only sends posts with is_status = true" do
142
166
  # 1. Initial send
@@ -264,6 +288,72 @@ describe LogStash::Outputs::Scalyr do
264
288
  end
265
289
  end
266
290
 
291
+ context "when configured to flatten values with custom delimiter and deep delimiter fix" do
292
+ config = {
293
+ 'api_write_token' => '1234',
294
+ 'flatten_tags' => true,
295
+ 'flat_tag_value' => 'true',
296
+ 'flat_tag_prefix' => 'tag_prefix_',
297
+ 'flatten_nested_values' => true, # this converts into string 'true'
298
+ 'flatten_nested_values_delimiter' => ".",
299
+ 'fix_deep_flattening_delimiters' => true,
300
+ }
301
+ plugin = LogStash::Outputs::Scalyr.new(config)
302
+ it "flattens nested values with a period" do
303
+ allow(plugin).to receive(:send_status).and_return(nil)
304
+ plugin.register
305
+ result = plugin.build_multi_event_request_array(sample_events)
306
+ body = JSON.parse(result[0][:body])
307
+ expect(body['events'].size).to eq(3)
308
+ expect(body['events'][2]['attrs']).to eq({
309
+ "nested.a" => 1,
310
+ "nested.b.0" => 3,
311
+ "nested.b.1" => 4,
312
+ "nested.b.2" => 5,
313
+ 'seq' => 3,
314
+ 'source_file' => 'my file 3',
315
+ 'source_host' => 'my host 3',
316
+ 'serverHost' => 'Logstash',
317
+ "tag_prefix_t1" => "true",
318
+ "tag_prefix_t2" => "true",
319
+ "tag_prefix_t3" => "true",
320
+ "parser" => "logstashParser",
321
+ })
322
+ end
323
+ end
324
+
325
+ context "when configured to flatten values with custom delimiter, no array flattening" do
326
+ config = {
327
+ 'api_write_token' => '1234',
328
+ 'flatten_tags' => true,
329
+ 'flat_tag_value' => 'true',
330
+ 'flat_tag_prefix' => 'tag_prefix_',
331
+ 'flatten_nested_values' => true, # this converts into string 'true'
332
+ 'flatten_nested_arrays' => false,
333
+ 'flatten_nested_values_delimiter' => ".",
334
+ }
335
+ plugin = LogStash::Outputs::Scalyr.new(config)
336
+ it "flattens nested values with a period" do
337
+ allow(plugin).to receive(:send_status).and_return(nil)
338
+ plugin.register
339
+ result = plugin.build_multi_event_request_array(sample_events)
340
+ body = JSON.parse(result[0][:body])
341
+ expect(body['events'].size).to eq(3)
342
+ expect(body['events'][2]['attrs']).to eq({
343
+ "nested.a" => 1,
344
+ "nested.b" => [3, 4, 5],
345
+ 'seq' => 3,
346
+ 'source_file' => 'my file 3',
347
+ 'source_host' => 'my host 3',
348
+ 'serverHost' => 'Logstash',
349
+ "tag_prefix_t1" => "true",
350
+ "tag_prefix_t2" => "true",
351
+ "tag_prefix_t3" => "true",
352
+ "parser" => "logstashParser",
353
+ })
354
+ end
355
+ end
356
+
267
357
  context "when configured to flatten values and tags" do
268
358
  config = {
269
359
  'api_write_token' => '1234',
@@ -318,5 +408,23 @@ describe LogStash::Outputs::Scalyr do
318
408
  })
319
409
  end
320
410
  end
411
+
412
+ context "when receiving an event with Bignums" do
413
+ config = {
414
+ 'api_write_token' => '1234',
415
+ }
416
+ plugin = LogStash::Outputs::Scalyr.new(config)
417
+ it "doesn't throw an error" do
418
+ allow(plugin).to receive(:send_status).and_return(nil)
419
+ plugin.register
420
+ e = LogStash::Event.new
421
+ e.set('bignumber', 2000023030042002050202030320240)
422
+ allow(plugin.instance_variable_get(:@logger)).to receive(:error)
423
+ result = plugin.build_multi_event_request_array([e])
424
+ body = JSON.parse(result[0][:body])
425
+ expect(body['events'].size).to eq(1)
426
+ expect(plugin.instance_variable_get(:@logger)).to_not receive(:error)
427
+ end
428
+ end
321
429
  end
322
430
  end
@@ -132,6 +132,70 @@ describe Scalyr::Common::Util do
132
132
  expect(Scalyr::Common::Util.flatten(din)).to eq(dout)
133
133
  end
134
134
 
135
+ it "flattens a single-level array, no array flattening" do
136
+ din = [1, 2, 3]
137
+ dout = [1, 2, 3]
138
+ expect(Scalyr::Common::Util.flatten(din, "_", flatten_arrays=false)).to eq(dout)
139
+ end
140
+
141
+ it "flattens a multi-level array, no array flattening" do
142
+ din = ['a', 'b', ['c', ['d', 'e', 'f'], 'g'], 'h', 'i']
143
+ dout = ['a', 'b', ['c', ['d', 'e', 'f'], 'g'], 'h', 'i']
144
+ expect(Scalyr::Common::Util.flatten(din, "_", flatten_arrays=false)).to eq(dout)
145
+ end
146
+
147
+ it "flattens a hash that contains an array, no array flattening" do
148
+ din = {
149
+ 'a' => 1,
150
+ 'c' => [100, 200, 300]
151
+ }
152
+ dout = {
153
+ 'a' => 1,
154
+ 'c' => [100, 200, 300]
155
+ }
156
+ expect(Scalyr::Common::Util.flatten(din, "_", flatten_arrays=false)).to eq(dout)
157
+ end
158
+
159
+ it "flattens a hash that contains an array that contains a hash, no array flattening" do
160
+ din = {
161
+ 'a' => 1,
162
+ 'c' => [
163
+ 100,
164
+ {'d' => 1000, 'e' => 2000},
165
+ 300
166
+ ]
167
+ }
168
+ dout = {
169
+ 'a' => 1,
170
+ 'c' => [
171
+ 100,
172
+ {'d' => 1000, 'e' => 2000},
173
+ 300
174
+ ]
175
+ }
176
+ expect(Scalyr::Common::Util.flatten(din, "_", flatten_arrays=false)).to eq(dout)
177
+ end
178
+
179
+ it "flattens a hash that contains an array that contains a hash that contains an array, no array flattening" do
180
+ din = {
181
+ 'a' => 1,
182
+ 'c' => [
183
+ 100,
184
+ {'d' => 1000, 'e' => 2000, 'f' => [4, 5, 6]},
185
+ 300
186
+ ]
187
+ }
188
+ dout = {
189
+ 'a' => 1,
190
+ 'c' => [
191
+ 100,
192
+ {'d' => 1000, 'e' => 2000, 'f' => [4, 5, 6]},
193
+ 300
194
+ ]
195
+ }
196
+ expect(Scalyr::Common::Util.flatten(din, "_", flatten_arrays=false)).to eq(dout)
197
+ end
198
+
135
199
  it "accepts custom delimiters" do
136
200
  din = {
137
201
  'a' => 1,
@@ -148,6 +212,42 @@ describe Scalyr::Common::Util do
148
212
  expect(Scalyr::Common::Util.flatten(din, ':')).to eq(dout)
149
213
  end
150
214
 
215
+ it "accepts custom delimiters with greater depth" do
216
+ din = {
217
+ 'a' => 1,
218
+ 'b' => {
219
+ 'c' => {
220
+ 'e' => 100
221
+ },
222
+ 'd' => 200,
223
+ }
224
+ }
225
+ dout = {
226
+ 'a' => 1,
227
+ 'b:c_e' => 100,
228
+ 'b:d' => 200,
229
+ }
230
+ expect(Scalyr::Common::Util.flatten(din, ':')).to eq(dout)
231
+ end
232
+
233
+ it "accepts custom delimiters with greater depth and deep delimiters fix" do
234
+ din = {
235
+ 'a' => 1,
236
+ 'b' => {
237
+ 'c' => {
238
+ 'e' => 100
239
+ },
240
+ 'd' => 200,
241
+ }
242
+ }
243
+ dout = {
244
+ 'a' => 1,
245
+ 'b:c:e' => 100,
246
+ 'b:d' => 200,
247
+ }
248
+ expect(Scalyr::Common::Util.flatten(din, ':', true, true)).to eq(dout)
249
+ end
250
+
151
251
  it "stringifies non-string keys" do
152
252
  din = {
153
253
  'a' => 1,
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-scalyr
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.14.beta
4
+ version: 0.1.19.beta
5
5
  platform: ruby
6
6
  authors:
7
7
  - Edward Chee
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-06-22 00:00:00.000000000 Z
11
+ date: 2021-07-08 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -132,6 +132,7 @@ files:
132
132
  - lib/scalyr/common/util.rb
133
133
  - lib/scalyr/constants.rb
134
134
  - logstash-output-scalyr.gemspec
135
+ - spec/benchmarks/bignum_fixing.rb
135
136
  - spec/benchmarks/flattening_and_serialization.rb
136
137
  - spec/benchmarks/metrics_overhead.rb
137
138
  - spec/logstash/outputs/scalyr_integration_spec.rb
@@ -4062,6 +4063,7 @@ signing_key:
4062
4063
  specification_version: 4
4063
4064
  summary: Scalyr output plugin for Logstash
4064
4065
  test_files:
4066
+ - spec/benchmarks/bignum_fixing.rb
4065
4067
  - spec/benchmarks/flattening_and_serialization.rb
4066
4068
  - spec/benchmarks/metrics_overhead.rb
4067
4069
  - spec/logstash/outputs/scalyr_integration_spec.rb