logstash-output-scalyr 0.1.13 → 0.1.18.beta

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 89b457b34b9579d3e6c7b82c7aed5a5ae54026904deee677d31f0a0ef5ed3e22
4
- data.tar.gz: 945e0414df96259ba39747e1f79ee8ed6d3c265d0c91aae0c15c91b9578988b9
3
+ metadata.gz: 9099fcf20201bf8e3905273393a38bb1addefb219e1874a45a439bbfd2dcddbd
4
+ data.tar.gz: a031faa8657951b66a09c172f6be69b774a2192e661ef99370813abe62250cbb
5
5
  SHA512:
6
- metadata.gz: 411e2319e7fb06de8af26ea876776500327e8fb57002cef3f4a3b153294039b486fe574089f3acdd8761309335dd83b9e3f75dbbd23c5ad701d59702e7d7fe11
7
- data.tar.gz: d17765c84763f27858548ede5afd45cc0d2ff11ef1462b586afa195fa0186f2cf0318a86463354285acae7d598aa45319305dea7ccc94581aa614a82e56dbe85
6
+ metadata.gz: c25cad81473d0224fb5c75558e302f6383d69361d3263aeaf0fe619b8c2a3a6866bf0eaf811588b9235edb4e15e11a2d0b95ecb99e2a3045ee6c6e66998e1627
7
+ data.tar.gz: fed7e02e32c70ce199dbd7ec5841711ff77c65d3a56ebe53eeada8c165d5b8fa603fc15b3c00e0e397682a50899dd141bfdcacdcbb97244e2b26205c92802238
data/CHANGELOG.md CHANGED
@@ -1,5 +1,27 @@
1
1
  # Beta
2
2
 
3
+ ## 0.1.18.beta
4
+ - Add metrics for successfully sent and failed logstash events, and retries.
5
+ - Make array flattening optional during nested value flattening with the `flatten_nested_arrays` configuration option.
6
+
7
+ ## 0.1.17.beta
8
+ - Catch errors relating to Bignum conversions present in the ``json`` library and manually convert to string as
9
+ a workaround.
10
+
11
+ ## 0.1.16.beta
12
+ - Fix race condition in ``register()`` method.
13
+
14
+ ## 0.1.15.beta
15
+ - Only call ``send_status`` method at the end of ``multi_receive()`` if there is at least one
16
+ record in the batch when ``report_status_for_empty_batches`` config option is set to ``false``.
17
+ - Update ``register()`` method to use a separate short-lived client session for sending initial
18
+ client status.
19
+
20
+ ## 0.1.14.beta
21
+ - Add configurable max retries for requests when running into errors.
22
+ - Add ability to send messages to the dead letter queue if we exhaust all retries and if it is configured.
23
+ - Log truncated error body for all errors to help with debugging.
24
+
3
25
  ## 0.1.13
4
26
  - Fix synchronization of status message sending code to avoid duplicate logs.
5
27
 
data/README.md CHANGED
@@ -10,7 +10,7 @@ You can view documentation for this plugin [on the Scalyr website](https://app.s
10
10
  # Quick start
11
11
 
12
12
  1. Build the gem, run `gem build logstash-output-scalyr.gemspec`
13
- 2. Install the gem into a Logstash installation, run `/usr/share/logstash/bin/logstash-plugin install logstash-output-scalyr-0.1.13.gem` or follow the latest official instructions on working with plugins from Logstash.
13
+ 2. Install the gem into a Logstash installation, run `/usr/share/logstash/bin/logstash-plugin install logstash-output-scalyr-0.1.18.beta.gem` or follow the latest official instructions on working with plugins from Logstash.
14
14
  3. Configure the output plugin (e.g. add it to a pipeline .conf)
15
15
  4. Restart Logstash
16
16
 
@@ -68,7 +68,8 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
68
68
  # If true, nested values will be flattened (which changes keys to delimiter-separated concatenation of all
69
69
  # nested keys).
70
70
  config :flatten_nested_values, :validate => :boolean, :default => false
71
- config :flatten_nested_values_delimiter, :validate => :string, :default => "_"
71
+ config :flatten_nested_values_delimiter, :validate => :string, :default => "_"
72
+ config :flatten_nested_arrays, :validate => :boolean, :default => true
72
73
 
73
74
  # If true, the 'tags' field will be flattened into key-values where each key is a tag and each value is set to
74
75
  # :flat_tag_value
@@ -78,6 +79,10 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
78
79
 
79
80
  # Initial interval in seconds between bulk retries. Doubled on each retry up to `retry_max_interval`
80
81
  config :retry_initial_interval, :validate => :number, :default => 1
82
+ # How many times to retry sending an event before giving up on it
83
+ config :max_retries, :validate => :number, :default => 5
84
+ # Whether or not to send messages that failed to send a max_retries amount of times to the DLQ or just drop them
85
+ config :send_to_dlq, :validate => :boolean, :default => true
81
86
 
82
87
  # Set max interval in seconds between bulk retries.
83
88
  config :retry_max_interval, :validate => :number, :default => 64
@@ -105,6 +110,11 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
105
110
  # minutes.
106
111
  config :status_report_interval, :validate => :number, :default => 300
107
112
 
113
+ # True to also call send_status when multi_receive() is called with no events.
114
+ # In some situations (e.g. when logstash is configured with multiple scalyr
115
+ # plugins conditionally where most are idle) you may want to set this to false
116
+ config :report_status_for_empty_batches, :validate => :boolean, :default => true
117
+
108
118
  # Set to true to also log status messages with various metrics to stdout in addition to sending
109
119
  # this data to Scalyr
110
120
  config :log_status_messages_to_stdout, :validate => :boolean, :default => false
@@ -231,7 +241,12 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
231
241
  # Plugin level (either per batch or event level metrics). Other request
232
242
  # level metrics are handled by the HTTP Client class.
233
243
  @multi_receive_statistics = {
234
- :total_multi_receive_secs => 0
244
+ :total_multi_receive_secs => 0,
245
+ :total_events_processed => 0,
246
+ :successful_events_processed => 0,
247
+ :failed_events_processed => 0,
248
+ :total_retry_count => 0,
249
+ :total_java_class_cast_errors => 0
235
250
  }
236
251
  @plugin_metrics = get_new_metrics
237
252
 
@@ -247,7 +262,25 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
247
262
  @logger.info(sprintf("Started Scalyr output plugin (%s)." % [PLUGIN_VERSION]), :class => self.class.name)
248
263
 
249
264
  # Finally, send a status line to Scalyr
250
- send_status
265
+ # We use a special separate short lived client session for sending the initial client status.
266
+ # This is done to avoid the overhead in case single logstash instance has many scalyr output
267
+ # plugins configured with conditionals and majority of them are inactive (aka receive no data).
268
+ # This way we don't need to keep idle long running connection open.
269
+ initial_send_status_client_session = Scalyr::Common::Client::ClientSession.new(
270
+ @logger, @add_events_uri,
271
+ @compression_type, @compression_level, @ssl_verify_peer, @ssl_ca_bundle_path, @append_builtin_cert,
272
+ @record_stats_for_status, @flush_quantile_estimates_on_status_send,
273
+ @http_connect_timeout, @http_socket_timeout, @http_request_timeout, @http_pool_max, @http_pool_max_per_route
274
+ )
275
+ send_status(initial_send_status_client_session)
276
+ initial_send_status_client_session.close
277
+
278
+ # We also "prime" the main HTTP client here, one which is used for sending subsequent requests.
279
+ # Here priming just means setting up the client parameters without opening any connections.
280
+ # Since client writes certs to a temporary file there could be a race in case we don't do that
281
+ # here since multi_receive() is multi threaded. An alternative would be to put a look around
282
+ # client init method (aka client_config())
283
+ @client_session.client
251
284
 
252
285
  end # def register
253
286
 
@@ -317,8 +350,12 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
317
350
  sleep_interval = sleep_for(sleep_interval)
318
351
  exc_sleep += sleep_interval
319
352
  exc_retries += 1
353
+ @stats_lock.synchronize do
354
+ @multi_receive_statistics[:total_retry_count] += 1
355
+ end
320
356
  message = "Error uploading to Scalyr (will backoff-retry)"
321
357
  exc_data = {
358
+ :error_class => e.e_class,
322
359
  :url => e.url.to_s,
323
360
  :message => e.message,
324
361
  :batch_num => batch_num,
@@ -330,7 +367,7 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
330
367
  exc_data[:code] = e.code if e.code
331
368
  if @logger.debug? and e.body
332
369
  exc_data[:body] = e.body
333
- elsif e.message == "Invalid JSON response from server" and e.body
370
+ elsif e.body
334
371
  exc_data[:body] = Scalyr::Common::Util.truncate(e.body, 512)
335
372
  end
336
373
  exc_data[:payload] = "\tSample payload: #{request[:body][0,1024]}..." if @logger.debug?
@@ -343,7 +380,9 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
343
380
  @logger.error(message, exc_data)
344
381
  exc_commonly_retried = false
345
382
  end
346
- retry if @running
383
+ retry if @running and exc_retries < @max_retries
384
+ log_retry_failure(multi_event_request, exc_data, exc_retries, exc_sleep)
385
+ next
347
386
 
348
387
  rescue => e
349
388
  # Any unexpected errors should be fully logged
@@ -363,7 +402,17 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
363
402
  }
364
403
  exc_sleep += sleep_interval
365
404
  exc_retries += 1
366
- retry if @running
405
+ @stats_lock.synchronize do
406
+ @multi_receive_statistics[:total_retry_count] += 1
407
+ end
408
+ retry if @running and exc_retries < @max_retries
409
+ log_retry_failure(multi_event_request, exc_data, exc_retries, exc_sleep)
410
+ next
411
+ end
412
+
413
+ @stats_lock.synchronize do
414
+ @multi_receive_statistics[:total_events_processed] += multi_event_request[:logstash_events].length
415
+ @multi_receive_statistics[:successful_events_processed] += multi_event_request[:logstash_events].length
367
416
  end
368
417
 
369
418
  if !exc_data.nil?
@@ -385,7 +434,10 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
385
434
  end
386
435
  end
387
436
 
388
- send_status
437
+ if @report_status_for_empty_batches or records_count > 0
438
+ send_status
439
+ end
440
+
389
441
  return result
390
442
 
391
443
  rescue => e
@@ -400,6 +452,27 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
400
452
  end # def multi_receive
401
453
 
402
454
 
455
+ def log_retry_failure(multi_event_request, exc_data, exc_retries, exc_sleep)
456
+ @stats_lock.synchronize do
457
+ @multi_receive_statistics[:total_events_processed] += multi_event_request[:logstash_events].length
458
+ @multi_receive_statistics[:failed_events_processed] += multi_event_request[:logstash_events].length
459
+ end
460
+ message = "Failed to send #{multi_event_request[:logstash_events].length} events after #{exc_retries} tries."
461
+ sample_events = Array.new
462
+ multi_event_request[:logstash_events][0,5].each {|l_event|
463
+ sample_events << Scalyr::Common::Util.truncate(l_event.to_hash.to_json, 256)
464
+ }
465
+ @logger.error(message, :error_data => exc_data, :sample_events => sample_events, :retries => exc_retries, :sleep_time => exc_sleep)
466
+ if @dlq_writer
467
+ multi_event_request[:logstash_events].each {|l_event|
468
+ @dlq_writer.write(l_event, "#{exc_data[:message]}")
469
+ }
470
+ else
471
+ @logger.warn("Deal letter queue not configured, dropping #{multi_event_request[:logstash_events].length} events after #{exc_retries} tries.", :sample_events => sample_events)
472
+ end
473
+ end
474
+
475
+
403
476
  # Builds an array of multi-event requests from LogStash events
404
477
  # Each array element is a request that groups multiple events (to be posted to Scalyr's addEvents endpoint)
405
478
  #
@@ -428,6 +501,8 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
428
501
  current_threads = Hash.new
429
502
  # Create a Scalyr event object for each record in the chunk
430
503
  scalyr_events = Array.new
504
+ # Track the logstash events in each chunk to send them to the dlq in case of an error
505
+ l_events = Array.new
431
506
 
432
507
  thread_ids = Hash.new
433
508
  next_id = 1 #incrementing thread id for the session
@@ -557,7 +632,7 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
557
632
  # flatten record
558
633
  if @flatten_nested_values
559
634
  start_time = Time.now.to_f
560
- record = Scalyr::Common::Util.flatten(record, delimiter=@flatten_nested_values_delimiter)
635
+ record = Scalyr::Common::Util.flatten(record, delimiter=@flatten_nested_values_delimiter, flatten_arrays=@flatten_nested_arrays)
561
636
  end_time = Time.now.to_f
562
637
  flatten_nested_values_duration = end_time - start_time
563
638
  end
@@ -607,6 +682,21 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
607
682
  ).force_encoding('UTF-8')
608
683
  end
609
684
  event_json = scalyr_event.to_json
685
+ rescue Java::JavaLang::ClassCastException => e
686
+ # Most likely we ran into the issue described here: https://github.com/flori/json/issues/336
687
+ # Because of the version of jruby logstash works with we don't have the option to just update this away,
688
+ # so if we run into it we convert bignums into strings so we can get the data in at least.
689
+ # This is fixed in JRuby 9.2.7, which includes json 2.2.0
690
+ @logger.warn("Error serializing events to JSON, likely due to the presence of Bignum values. Converting Bignum values to strings.")
691
+ @stats_lock.synchronize do
692
+ @multi_receive_statistics[:total_java_class_cast_errors] += 1
693
+ end
694
+ Scalyr::Common::Util.convert_bignums(scalyr_event)
695
+ event_json = scalyr_event.to_json
696
+ log_json = nil
697
+ if add_log
698
+ log_json = logs[log_identifier].to_json
699
+ end
610
700
  end
611
701
 
612
702
  # generate new request if json size of events in the array exceed maximum request buffer size
@@ -619,9 +709,10 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
619
709
  # make sure we always have at least one event
620
710
  if scalyr_events.size == 0
621
711
  scalyr_events << scalyr_event
712
+ l_events << l_event
622
713
  append_event = false
623
714
  end
624
- multi_event_request = self.create_multi_event_request(scalyr_events, current_threads, logs)
715
+ multi_event_request = self.create_multi_event_request(scalyr_events, l_events, current_threads, logs)
625
716
  multi_event_request_array << multi_event_request
626
717
 
627
718
  total_bytes = 0
@@ -629,19 +720,21 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
629
720
  logs = Hash.new
630
721
  logs_ids = Hash.new
631
722
  scalyr_events = Array.new
723
+ l_events = Array.new
632
724
  end
633
725
 
634
726
  # if we haven't consumed the current event already
635
727
  # add it to the end of our array and keep track of the json bytesize
636
728
  if append_event
637
729
  scalyr_events << scalyr_event
730
+ l_events << l_event
638
731
  total_bytes += add_bytes
639
732
  end
640
733
 
641
734
  }
642
735
 
643
736
  # create a final request with any left over events
644
- multi_event_request = self.create_multi_event_request(scalyr_events, current_threads, logs)
737
+ multi_event_request = self.create_multi_event_request(scalyr_events, l_events, current_threads, logs)
645
738
  multi_event_request_array << multi_event_request
646
739
  multi_event_request_array
647
740
  end
@@ -659,7 +752,7 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
659
752
  # A request comprises multiple Scalyr Events. This function creates a request hash for
660
753
  # final upload to Scalyr (from an array of events, and an optional hash of current threads)
661
754
  # Note: The request body field will be json-encoded.
662
- def create_multi_event_request(scalyr_events, current_threads, current_logs)
755
+ def create_multi_event_request(scalyr_events, logstash_events, current_threads, current_logs)
663
756
 
664
757
  body = {
665
758
  :session => @session_id + Thread.current.object_id.to_s,
@@ -692,10 +785,22 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
692
785
 
693
786
  # We time serialization to get some insight on how long it takes to serialize the request body
694
787
  start_time = Time.now.to_f
695
- serialized_body = body.to_json
788
+ begin
789
+ serialized_body = body.to_json
790
+ rescue Java::JavaLang::ClassCastException => e
791
+ @logger.warn("Error serializing events to JSON, likely due to the presence of Bignum values. Converting Bignum values to strings.")
792
+ @stats_lock.synchronize do
793
+ @multi_receive_statistics[:total_java_class_cast_errors] += 1
794
+ end
795
+ Scalyr::Common::Util.convert_bignums(body)
796
+ serialized_body = body.to_json
797
+ end
696
798
  end_time = Time.now.to_f
697
799
  serialization_duration = end_time - start_time
698
- { :body => serialized_body, :record_count => scalyr_events.size, :serialization_duration => serialization_duration }
800
+ {
801
+ :body => serialized_body, :record_count => scalyr_events.size, :serialization_duration => serialization_duration,
802
+ :logstash_events => logstash_events
803
+ }
699
804
 
700
805
  end # def create_multi_event_request
701
806
 
@@ -744,7 +849,8 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
744
849
  # Finally, note that there could be multiple instances of this plugin (one per worker), in which case each worker
745
850
  # thread sends their own status updates. This is intentional so that we know how much data each worker thread is
746
851
  # uploading to Scalyr over time.
747
- def send_status
852
+ def send_status(client_session = nil)
853
+ client_session = @client_session if client_session.nil?
748
854
 
749
855
  status_event = {
750
856
  :ts => (Time.now.to_f * (10**9)).round,
@@ -763,7 +869,7 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
763
869
  # echee TODO: get instance stats from session and create a status log line
764
870
  msg = 'plugin_status: '
765
871
  cnt = 0
766
- @client_session.get_stats.each do |k, v|
872
+ client_session.get_stats.each do |k, v|
767
873
  val = v.instance_of?(Float) ? sprintf("%.4f", v) : v
768
874
  val = val.nil? ? 0 : val
769
875
  msg << ' ' if cnt > 0
@@ -781,15 +887,24 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
781
887
  status_event[:attrs]['serverHost'] = @node_hostname
782
888
  status_event[:attrs]['parser'] = @status_parser
783
889
  end
784
- multi_event_request = create_multi_event_request([status_event], nil, nil)
890
+ multi_event_request = create_multi_event_request([status_event], nil, nil, nil)
785
891
  begin
786
- @client_session.post_add_events(multi_event_request[:body], true, 0)
892
+ client_session.post_add_events(multi_event_request[:body], true, 0)
787
893
  rescue => e
788
- @logger.warn(
789
- "Unexpected error occurred while uploading status to Scalyr",
790
- :error_message => e.message,
791
- :error_class => e.class.name
792
- )
894
+ if e.body
895
+ @logger.warn(
896
+ "Unexpected error occurred while uploading status to Scalyr",
897
+ :error_message => e.message,
898
+ :error_class => e.class.name,
899
+ :body => Scalyr::Common::Util.truncate(e.body, 512)
900
+ )
901
+ else
902
+ @logger.warn(
903
+ "Unexpected error occurred while uploading status to Scalyr",
904
+ :error_message => e.message,
905
+ :error_class => e.class.name
906
+ )
907
+ end
793
908
  return
794
909
  end
795
910
  @last_status_transmit_time = Time.now()
@@ -7,13 +7,14 @@ module Scalyr; module Common; module Client
7
7
  #---------------------------------------------------------------------------------------------------------------------
8
8
  class ServerError < StandardError
9
9
 
10
- attr_reader :code, :url, :body
10
+ attr_reader :code, :url, :body, :e_class
11
11
 
12
- def initialize(msg=nil, code=nil, url=nil, body=nil)
12
+ def initialize(msg=nil, code=nil, url=nil, body=nil, e_class="Scalyr::Common::Client::ServerError")
13
13
  super(msg)
14
14
  @code = code.to_i
15
15
  @url = url
16
16
  @body = body
17
+ @e_class = e_class
17
18
  end
18
19
 
19
20
  def is_commonly_retried?
@@ -33,13 +34,14 @@ end
33
34
  #---------------------------------------------------------------------------------------------------------------------
34
35
  class ClientError < StandardError
35
36
 
36
- attr_reader :code, :url, :body
37
+ attr_reader :code, :url, :body, :e_class
37
38
 
38
- def initialize(msg=nil, url=nil)
39
+ def initialize(msg=nil, url=nil, e_class="Scalyr::Common::Client::ClientError")
39
40
  super(msg)
40
41
  @code = nil # currently no way to get this from Net::HTTP::Persistent::Error
41
42
  @url = url
42
43
  @body = nil
44
+ @e_class = e_class
43
45
  end
44
46
 
45
47
  def is_commonly_retried?
@@ -236,15 +238,10 @@ class ClientSession
236
238
  bytes_received = response.body.bytesize # echee: double check
237
239
  # echee TODO add more statistics
238
240
 
239
- # TODO: Manticore doesn't raise SSL errors as this but as "UnknownExceptions", need to dig in and see if there is a
240
- # way to detect that it is from SSL.
241
- rescue OpenSSL::SSL::SSLError => e
242
- raise e
243
-
244
241
  rescue Manticore::ManticoreException => e
245
242
  # The underlying persistent-connection library automatically retries when there are network-related errors.
246
243
  # Eventually, it will give up and raise this generic error, at which time, we convert it to a ClientError
247
- raise ClientError.new(e.message, @add_events_uri)
244
+ raise ClientError.new(e.message, @add_events_uri, e.class.name)
248
245
 
249
246
  ensure
250
247
  if @record_stats_for_status or !is_status
@@ -270,6 +267,7 @@ class ClientSession
270
267
 
271
268
 
272
269
  def close
270
+ @client.close if @client
273
271
  end # def close
274
272
 
275
273
 
@@ -4,7 +4,7 @@ module Scalyr; module Common; module Util;
4
4
  # Flattens a hash or array, returning a hash where keys are a delimiter-separated string concatenation of all
5
5
  # nested keys. Returned keys are always strings. If a non-hash or array is provided, raises TypeError.
6
6
  # Please see rspec util_spec.rb for expected behavior.
7
- def self.flatten(obj, delimiter='_')
7
+ def self.flatten(obj, delimiter='_', flatten_arrays=true)
8
8
 
9
9
  # base case is input object is not enumerable, in which case simply return it
10
10
  if !obj.respond_to?(:each)
@@ -19,8 +19,8 @@ def self.flatten(obj, delimiter='_')
19
19
 
20
20
  # input object is a hash
21
21
  obj.each do |key, value|
22
- if value.respond_to?(:each)
23
- flatten(value).each do |subkey, subvalue|
22
+ if (flatten_arrays and value.respond_to?(:each)) or value.respond_to?(:has_key?)
23
+ flatten(value, delimiter, flatten_arrays).each do |subkey, subvalue|
24
24
  result["#{key}#{delimiter}#{subkey}"] = subvalue
25
25
  end
26
26
  else
@@ -28,18 +28,23 @@ def self.flatten(obj, delimiter='_')
28
28
  end
29
29
  end
30
30
 
31
- else
31
+ elsif flatten_arrays
32
32
 
33
33
  # input object is an array or set
34
34
  obj.each_with_index do |value, index|
35
35
  if value.respond_to?(:each)
36
- flatten(value).each do |subkey, subvalue|
36
+ flatten(value, delimiter, flatten_arrays).each do |subkey, subvalue|
37
37
  result["#{index}#{delimiter}#{subkey}"] = subvalue
38
38
  end
39
39
  else
40
40
  result["#{index}"] = value
41
41
  end
42
42
  end
43
+
44
+ else
45
+
46
+ result = obj
47
+
43
48
  end
44
49
 
45
50
  return result
@@ -52,5 +57,26 @@ def self.truncate(content, max)
52
57
  return content
53
58
  end
54
59
 
60
+ def self.convert_bignums(obj)
61
+ if obj.respond_to?(:has_key?) and obj.respond_to?(:each)
62
+ # input object is a hash
63
+ obj.each do |key, value|
64
+ obj[key] = convert_bignums(value)
65
+ end
66
+
67
+ elsif obj.respond_to?(:each)
68
+ # input object is an array or set
69
+ obj.each_with_index do |value, index|
70
+ obj[index] = convert_bignums(value)
71
+ end
72
+
73
+ elsif obj.is_a? Bignum
74
+ return obj.to_s
75
+
76
+ else
77
+ return obj
78
+ end
79
+ end
80
+
55
81
  end; end; end;
56
82
 
@@ -1,2 +1,2 @@
1
1
  # encoding: utf-8
2
- PLUGIN_VERSION = "v0.1.13"
2
+ PLUGIN_VERSION = "v0.1.18.beta"
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-scalyr'
3
- s.version = '0.1.13'
3
+ s.version = '0.1.18.beta'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Scalyr output plugin for Logstash"
6
6
  s.description = "Sends log data collected by Logstash to Scalyr (https://www.scalyr.com)"
@@ -0,0 +1,90 @@
1
+ require 'benchmark'
2
+ require 'quantile'
3
+
4
+ require_relative '../../lib/scalyr/common/util'
5
+
6
+ # Micro benchmark which measures how long it takes to find all the Bignums in a record and convert them to strings
7
+
8
+ ITERATIONS = 500
9
+
10
+ def rand_str(len)
11
+ return (0...len).map { (65 + rand(26)).chr }.join
12
+ end
13
+
14
+ def rand_bignum()
15
+ return 200004000020304050300 + rand(999999)
16
+ end
17
+
18
+ def generate_hash(widths)
19
+ result = {}
20
+ if widths.empty?
21
+ return rand_bignum()
22
+ else
23
+ widths[0].times do
24
+ result[rand_str(9)] = generate_hash(widths[1..widths.length])
25
+ end
26
+ return result
27
+ end
28
+ end
29
+
30
+ def generate_data_array_for_spec(spec)
31
+ data = []
32
+ ITERATIONS.times do
33
+ data << generate_hash(spec)
34
+ end
35
+
36
+ data
37
+ end
38
+
39
+ def run_benchmark_and_print_results(data, run_benchmark_func)
40
+ puts ""
41
+ puts "Using %s total keys in a hash" % [Scalyr::Common::Util.flatten(data[0]).count]
42
+ puts ""
43
+
44
+ result = []
45
+ ITERATIONS.times do |i|
46
+ result << Benchmark.measure { run_benchmark_func.(data[0]) }
47
+ end
48
+
49
+ sum = result.inject(nil) { |sum, t| sum.nil? ? sum = t : sum += t }
50
+ avg = sum / result.size
51
+
52
+ Benchmark.bm(7, "sum:", "avg:") do |b|
53
+ [sum, avg]
54
+ end
55
+ puts ""
56
+ end
57
+
58
+
59
+ puts "Using %s iterations" % [ITERATIONS]
60
+ puts ""
61
+
62
+ @value = Quantile::Estimator.new
63
+ @prng = Random.new
64
+
65
+ def convert_bignums(record)
66
+ Scalyr::Common::Util.convert_bignums(record)
67
+ end
68
+
69
+ puts "Util.convert_bignums()"
70
+ puts "==============================="
71
+
72
+ # Around ~200 keys in a hash
73
+ data = generate_data_array_for_spec([4, 4, 3, 4])
74
+ run_benchmark_and_print_results(data, method(:convert_bignums))
75
+
76
+ # Around ~200 keys in a hash (single level)
77
+ data = generate_data_array_for_spec([200])
78
+ run_benchmark_and_print_results(data, method(:convert_bignums))
79
+
80
+ # Around ~512 keys in a hash
81
+ data = generate_data_array_for_spec([8, 4, 4, 4])
82
+ run_benchmark_and_print_results(data, method(:convert_bignums))
83
+
84
+ # Around ~960 keys in a hash
85
+ data = generate_data_array_for_spec([12, 5, 4, 4])
86
+ run_benchmark_and_print_results(data, method(:convert_bignums))
87
+
88
+ # Around ~2700 keys in a hash
89
+ data = generate_data_array_for_spec([14, 8, 6, 4])
90
+ run_benchmark_and_print_results(data, method(:convert_bignums))
@@ -28,8 +28,11 @@ describe LogStash::Outputs::Scalyr do
28
28
  plugin = LogStash::Outputs::Scalyr.new({'api_write_token' => '1234'})
29
29
  plugin.register
30
30
  plugin.instance_variable_set(:@running, false)
31
- expect(plugin.instance_variable_get(:@logger)).to receive(:error).with("Error uploading to Scalyr (will backoff-retry)",
31
+ allow(plugin.instance_variable_get(:@logger)).to receive(:error)
32
+ plugin.multi_receive(sample_events)
33
+ expect(plugin.instance_variable_get(:@logger)).to have_received(:error).with("Error uploading to Scalyr (will backoff-retry)",
32
34
  {
35
+ :error_class=>"Scalyr::Common::Client::ServerError",
33
36
  :batch_num=>1,
34
37
  :code=>401,
35
38
  :message=>"error/client/badParam",
@@ -37,10 +40,10 @@ describe LogStash::Outputs::Scalyr do
37
40
  :record_count=>3,
38
41
  :total_batches=>1,
39
42
  :url=>"https://agent.scalyr.com/addEvents",
40
- :will_retry_in_seconds=>2
43
+ :will_retry_in_seconds=>2,
44
+ :body=>"{\n \"message\": \"Couldn't decode API token ...234.\",\n \"status\": \"error/client/badParam\"\n}"
41
45
  }
42
46
  )
43
- plugin.multi_receive(sample_events)
44
47
  end
45
48
  end
46
49
 
@@ -49,8 +52,11 @@ describe LogStash::Outputs::Scalyr do
49
52
  plugin = LogStash::Outputs::Scalyr.new({'api_write_token' => '1234', 'ssl_ca_bundle_path' => '/fakepath/nocerts', 'append_builtin_cert' => false})
50
53
  plugin.register
51
54
  plugin.instance_variable_set(:@running, false)
52
- expect(plugin.instance_variable_get(:@logger)).to receive(:error).with("Error uploading to Scalyr (will backoff-retry)",
55
+ allow(plugin.instance_variable_get(:@logger)).to receive(:error)
56
+ plugin.multi_receive(sample_events)
57
+ expect(plugin.instance_variable_get(:@logger)).to have_received(:error).with("Error uploading to Scalyr (will backoff-retry)",
53
58
  {
59
+ :error_class=>"Manticore::UnknownException",
54
60
  :batch_num=>1,
55
61
  :message=>"Unexpected error: java.security.InvalidAlgorithmParameterException: the trustAnchors parameter must be non-empty",
56
62
  :payload_size=>781,
@@ -60,7 +66,6 @@ describe LogStash::Outputs::Scalyr do
60
66
  :will_retry_in_seconds=>2
61
67
  }
62
68
  )
63
- plugin.multi_receive(sample_events)
64
69
  end
65
70
  end
66
71
 
@@ -73,8 +78,11 @@ describe LogStash::Outputs::Scalyr do
73
78
  plugin = LogStash::Outputs::Scalyr.new({'api_write_token' => '1234', 'append_builtin_cert' => false})
74
79
  plugin.register
75
80
  plugin.instance_variable_set(:@running, false)
76
- expect(plugin.instance_variable_get(:@logger)).to receive(:error).with("Error uploading to Scalyr (will backoff-retry)",
81
+ allow(plugin.instance_variable_get(:@logger)).to receive(:error)
82
+ plugin.multi_receive(sample_events)
83
+ expect(plugin.instance_variable_get(:@logger)).to have_received(:error).with("Error uploading to Scalyr (will backoff-retry)",
77
84
  {
85
+ :error_class=>"Manticore::UnknownException",
78
86
  :batch_num=>1,
79
87
  :message=>"Unexpected error: java.security.InvalidAlgorithmParameterException: the trustAnchors parameter must be non-empty",
80
88
  :payload_size=>781,
@@ -84,7 +92,6 @@ describe LogStash::Outputs::Scalyr do
84
92
  :will_retry_in_seconds=>2
85
93
  }
86
94
  )
87
- plugin.multi_receive(sample_events)
88
95
  end
89
96
  ensure
90
97
  `sudo mv /tmp/system_certs #{OpenSSL::X509::DEFAULT_CERT_DIR}`
@@ -110,8 +117,11 @@ describe LogStash::Outputs::Scalyr do
110
117
  plugin = LogStash::Outputs::Scalyr.new({'api_write_token' => '1234', 'scalyr_server' => 'https://invalid.mitm.should.fail.test.agent.scalyr.com:443'})
111
118
  plugin.register
112
119
  plugin.instance_variable_set(:@running, false)
113
- expect(plugin.instance_variable_get(:@logger)).to receive(:error).with("Error uploading to Scalyr (will backoff-retry)",
120
+ allow(plugin.instance_variable_get(:@logger)).to receive(:error)
121
+ plugin.multi_receive(sample_events)
122
+ expect(plugin.instance_variable_get(:@logger)).to have_received(:error).with("Error uploading to Scalyr (will backoff-retry)",
114
123
  {
124
+ :error_class=>"Manticore::UnknownException",
115
125
  :batch_num=>1,
116
126
  :message=>"Host name 'invalid.mitm.should.fail.test.agent.scalyr.com' does not match the certificate subject provided by the peer (CN=*.scalyr.com)",
117
127
  :payload_size=>781,
@@ -121,7 +131,6 @@ describe LogStash::Outputs::Scalyr do
121
131
  :will_retry_in_seconds=>2
122
132
  }
123
133
  )
124
- plugin.multi_receive(sample_events)
125
134
  ensure
126
135
  # Clean up the hosts file
127
136
  `sudo truncate -s 0 /etc/hosts`
@@ -129,6 +138,17 @@ describe LogStash::Outputs::Scalyr do
129
138
  end
130
139
  end
131
140
  end
141
+
142
+ context "when an error occurs with retries at 5" do
143
+ it "exits after 5 retries and emits a log" do
144
+ plugin = LogStash::Outputs::Scalyr.new({'retry_initial_interval' => 0.1, 'api_write_token' => '1234', 'ssl_ca_bundle_path' => '/fakepath/nocerts', 'append_builtin_cert' => false})
145
+ plugin.register
146
+ allow(plugin.instance_variable_get(:@logger)).to receive(:error)
147
+ plugin.multi_receive(sample_events)
148
+ expect(plugin.instance_variable_get(:@logger)).to have_received(:error).with("Failed to send 3 events after 5 tries.", anything
149
+ )
150
+ end
151
+ end
132
152
  end
133
153
 
134
154
  describe "response_handling_tests" do
@@ -145,6 +165,7 @@ describe LogStash::Outputs::Scalyr do
145
165
  plugin.multi_receive(sample_events)
146
166
  expect(plugin.instance_variable_get(:@logger)).to have_received(:debug).with("Error uploading to Scalyr (will backoff-retry)",
147
167
  {
168
+ :error_class=>"Scalyr::Common::Client::ServerError",
148
169
  :batch_num=>1,
149
170
  :code=>503,
150
171
  :message=>"Invalid JSON response from server",
@@ -172,6 +193,7 @@ describe LogStash::Outputs::Scalyr do
172
193
  plugin.multi_receive(sample_events)
173
194
  expect(plugin.instance_variable_get(:@logger)).to have_received(:error).with("Error uploading to Scalyr (will backoff-retry)",
174
195
  {
196
+ :error_class=>"Scalyr::Common::Client::ServerError",
175
197
  :batch_num=>1,
176
198
  :code=>500,
177
199
  :message=>"Invalid JSON response from server",
@@ -199,6 +221,7 @@ describe LogStash::Outputs::Scalyr do
199
221
  plugin.multi_receive(sample_events)
200
222
  expect(plugin.instance_variable_get(:@logger)).to have_received(:error).with("Error uploading to Scalyr (will backoff-retry)",
201
223
  {
224
+ :error_class=>"Scalyr::Common::Client::ServerError",
202
225
  :batch_num=>1,
203
226
  :code=>500,
204
227
  :message=>"Invalid JSON response from server",
@@ -212,6 +235,22 @@ describe LogStash::Outputs::Scalyr do
212
235
  )
213
236
  end
214
237
  end
238
+
239
+ context 'when DLQ is enabled' do
240
+ let(:dlq_writer) { double('DLQ writer') }
241
+ it 'should send the event to the DLQ' do
242
+ stub_request(:post, "https://agent.scalyr.com/addEvents").
243
+ to_return(status: 500, body: "stubbed response", headers: {})
244
+
245
+ plugin = LogStash::Outputs::Scalyr.new({'api_write_token' => '1234', 'ssl_ca_bundle_path' => '/fakepath/nocerts', 'append_builtin_cert' => false})
246
+ plugin.register
247
+ plugin.instance_variable_set(:@running, false)
248
+ plugin.instance_variable_set('@dlq_writer', dlq_writer)
249
+
250
+ expect(dlq_writer).to receive(:write).exactly(3).times.with(anything, anything)
251
+ plugin.multi_receive(sample_events)
252
+ end
253
+ end
215
254
  end
216
255
 
217
256
  end
@@ -69,7 +69,7 @@ describe LogStash::Outputs::Scalyr do
69
69
  end
70
70
 
71
71
  it "it doesnt include flatten metrics if flattening is disabled" do
72
- plugin1 = LogStash::Outputs::Scalyr.new({
72
+ plugin1 = LogStash::Outputs::Scalyr.new({
73
73
  'api_write_token' => '1234',
74
74
  'serverhost_field' => 'source_host',
75
75
  'log_constants' => ['tags'],
@@ -122,7 +122,7 @@ describe LogStash::Outputs::Scalyr do
122
122
  expect(status_event[:attrs]["message"]).to eq("plugin_status: total_requests_sent=20 total_requests_failed=10 total_request_bytes_sent=100 total_compressed_request_bytes_sent=50 total_response_bytes_received=100 total_request_latency_secs=100 total_serialization_duration_secs=100.5000 total_compression_duration_secs=10.2000 compression_type=deflate compression_level=9 total_multi_receive_secs=0 multi_receive_duration_p50=10 multi_receive_duration_p90=18 multi_receive_duration_p99=19 multi_receive_event_count_p50=0 multi_receive_event_count_p90=0 multi_receive_event_count_p99=0 event_attributes_count_p50=0 event_attributes_count_p90=0 event_attributes_count_p99=0 batches_per_multi_receive_p50=0 batches_per_multi_receive_p90=0 batches_per_multi_receive_p99=0 flatten_values_duration_secs_p50=0 flatten_values_duration_secs_p90=0 flatten_values_duration_secs_p99=0")
123
123
  end
124
124
 
125
- it "send_stats is called when events list is empty, but otherwise noop" do
125
+ it "send_stats is called when events list is empty, but otherwise is noop" do
126
126
  quantile_estimator = Quantile::Estimator.new
127
127
  plugin.instance_variable_set(:@plugin_metrics, {
128
128
  :multi_receive_duration_secs => Quantile::Estimator.new,
@@ -137,6 +137,30 @@ describe LogStash::Outputs::Scalyr do
137
137
  plugin.multi_receive([])
138
138
  end
139
139
 
140
+ it "send_stats is not called when events list is empty and report_status_for_empty_batches is false" do
141
+ plugin2 = LogStash::Outputs::Scalyr.new({
142
+ 'api_write_token' => '1234',
143
+ 'serverhost_field' => 'source_host',
144
+ 'log_constants' => ['tags'],
145
+ 'flatten_nested_values' => false,
146
+ 'report_status_for_empty_batches' => false,
147
+ })
148
+
149
+ mock_client_session = MockClientSession.new
150
+ quantile_estimator = Quantile::Estimator.new
151
+ plugin2.instance_variable_set(:@plugin_metrics, {
152
+ :multi_receive_duration_secs => Quantile::Estimator.new,
153
+ :multi_receive_event_count => Quantile::Estimator.new,
154
+ :event_attributes_count => Quantile::Estimator.new,
155
+ :flatten_values_duration_secs => Quantile::Estimator.new
156
+ })
157
+ plugin2.instance_variable_set(:@client_session, mock_client_session)
158
+ expect(plugin2).not_to receive(:send_status)
159
+ expect(quantile_estimator).not_to receive(:observe)
160
+ expect(mock_client_session).not_to receive(:post_add_events)
161
+ plugin2.multi_receive([])
162
+ end
163
+
140
164
  # Kind of a weak test but I don't see a decent way to write a stronger one without a live client session
141
165
  it "send_status only sends posts with is_status = true" do
142
166
  # 1. Initial send
@@ -249,9 +273,41 @@ describe LogStash::Outputs::Scalyr do
249
273
  expect(body['events'].size).to eq(3)
250
274
  expect(body['events'][2]['attrs']).to eq({
251
275
  "nested.a" => 1,
252
- "nested.b_0" => 3,
253
- "nested.b_1" => 4,
254
- "nested.b_2" => 5,
276
+ "nested.b.0" => 3,
277
+ "nested.b.1" => 4,
278
+ "nested.b.2" => 5,
279
+ 'seq' => 3,
280
+ 'source_file' => 'my file 3',
281
+ 'source_host' => 'my host 3',
282
+ 'serverHost' => 'Logstash',
283
+ "tag_prefix_t1" => "true",
284
+ "tag_prefix_t2" => "true",
285
+ "tag_prefix_t3" => "true",
286
+ "parser" => "logstashParser",
287
+ })
288
+ end
289
+ end
290
+
291
+ context "when configured to flatten values with custom delimiter, no array flattening" do
292
+ config = {
293
+ 'api_write_token' => '1234',
294
+ 'flatten_tags' => true,
295
+ 'flat_tag_value' => 'true',
296
+ 'flat_tag_prefix' => 'tag_prefix_',
297
+ 'flatten_nested_values' => true, # this converts into string 'true'
298
+ 'flatten_nested_arrays' => false,
299
+ 'flatten_nested_values_delimiter' => ".",
300
+ }
301
+ plugin = LogStash::Outputs::Scalyr.new(config)
302
+ it "flattens nested values with a period" do
303
+ allow(plugin).to receive(:send_status).and_return(nil)
304
+ plugin.register
305
+ result = plugin.build_multi_event_request_array(sample_events)
306
+ body = JSON.parse(result[0][:body])
307
+ expect(body['events'].size).to eq(3)
308
+ expect(body['events'][2]['attrs']).to eq({
309
+ "nested.a" => 1,
310
+ "nested.b" => [3, 4, 5],
255
311
  'seq' => 3,
256
312
  'source_file' => 'my file 3',
257
313
  'source_host' => 'my host 3',
@@ -318,5 +374,23 @@ describe LogStash::Outputs::Scalyr do
318
374
  })
319
375
  end
320
376
  end
377
+
378
+ context "when receiving an event with Bignums" do
379
+ config = {
380
+ 'api_write_token' => '1234',
381
+ }
382
+ plugin = LogStash::Outputs::Scalyr.new(config)
383
+ it "doesn't throw an error" do
384
+ allow(plugin).to receive(:send_status).and_return(nil)
385
+ plugin.register
386
+ e = LogStash::Event.new
387
+ e.set('bignumber', 2000023030042002050202030320240)
388
+ allow(plugin.instance_variable_get(:@logger)).to receive(:error)
389
+ result = plugin.build_multi_event_request_array([e])
390
+ body = JSON.parse(result[0][:body])
391
+ expect(body['events'].size).to eq(1)
392
+ expect(plugin.instance_variable_get(:@logger)).to_not receive(:error)
393
+ end
394
+ end
321
395
  end
322
396
  end
@@ -132,6 +132,70 @@ describe Scalyr::Common::Util do
132
132
  expect(Scalyr::Common::Util.flatten(din)).to eq(dout)
133
133
  end
134
134
 
135
+ it "flattens a single-level array, no array flattening" do
136
+ din = [1, 2, 3]
137
+ dout = [1, 2, 3]
138
+ expect(Scalyr::Common::Util.flatten(din, "_", flatten_arrays=false)).to eq(dout)
139
+ end
140
+
141
+ it "flattens a multi-level array, no array flattening" do
142
+ din = ['a', 'b', ['c', ['d', 'e', 'f'], 'g'], 'h', 'i']
143
+ dout = ['a', 'b', ['c', ['d', 'e', 'f'], 'g'], 'h', 'i']
144
+ expect(Scalyr::Common::Util.flatten(din, "_", flatten_arrays=false)).to eq(dout)
145
+ end
146
+
147
+ it "flattens a hash that contains an array, no array flattening" do
148
+ din = {
149
+ 'a' => 1,
150
+ 'c' => [100, 200, 300]
151
+ }
152
+ dout = {
153
+ 'a' => 1,
154
+ 'c' => [100, 200, 300]
155
+ }
156
+ expect(Scalyr::Common::Util.flatten(din, "_", flatten_arrays=false)).to eq(dout)
157
+ end
158
+
159
+ it "flattens a hash that contains an array that contains a hash, no array flattening" do
160
+ din = {
161
+ 'a' => 1,
162
+ 'c' => [
163
+ 100,
164
+ {'d' => 1000, 'e' => 2000},
165
+ 300
166
+ ]
167
+ }
168
+ dout = {
169
+ 'a' => 1,
170
+ 'c' => [
171
+ 100,
172
+ {'d' => 1000, 'e' => 2000},
173
+ 300
174
+ ]
175
+ }
176
+ expect(Scalyr::Common::Util.flatten(din, "_", flatten_arrays=false)).to eq(dout)
177
+ end
178
+
179
+ it "flattens a hash that contains an array that contains a hash that contains an array, no array flattening" do
180
+ din = {
181
+ 'a' => 1,
182
+ 'c' => [
183
+ 100,
184
+ {'d' => 1000, 'e' => 2000, 'f' => [4, 5, 6]},
185
+ 300
186
+ ]
187
+ }
188
+ dout = {
189
+ 'a' => 1,
190
+ 'c' => [
191
+ 100,
192
+ {'d' => 1000, 'e' => 2000, 'f' => [4, 5, 6]},
193
+ 300
194
+ ]
195
+ }
196
+ expect(Scalyr::Common::Util.flatten(din, "_", flatten_arrays=false)).to eq(dout)
197
+ end
198
+
135
199
  it "accepts custom delimiters" do
136
200
  din = {
137
201
  'a' => 1,
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-scalyr
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.13
4
+ version: 0.1.18.beta
5
5
  platform: ruby
6
6
  authors:
7
7
  - Edward Chee
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-06-19 00:00:00.000000000 Z
11
+ date: 2021-07-06 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -132,6 +132,7 @@ files:
132
132
  - lib/scalyr/common/util.rb
133
133
  - lib/scalyr/constants.rb
134
134
  - logstash-output-scalyr.gemspec
135
+ - spec/benchmarks/bignum_fixing.rb
135
136
  - spec/benchmarks/flattening_and_serialization.rb
136
137
  - spec/benchmarks/metrics_overhead.rb
137
138
  - spec/logstash/outputs/scalyr_integration_spec.rb
@@ -4052,9 +4053,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
4052
4053
  version: '0'
4053
4054
  required_rubygems_version: !ruby/object:Gem::Requirement
4054
4055
  requirements:
4055
- - - ">="
4056
+ - - ">"
4056
4057
  - !ruby/object:Gem::Version
4057
- version: '0'
4058
+ version: 1.3.1
4058
4059
  requirements: []
4059
4060
  rubyforge_project:
4060
4061
  rubygems_version: 2.7.10
@@ -4062,6 +4063,7 @@ signing_key:
4062
4063
  specification_version: 4
4063
4064
  summary: Scalyr output plugin for Logstash
4064
4065
  test_files:
4066
+ - spec/benchmarks/bignum_fixing.rb
4065
4067
  - spec/benchmarks/flattening_and_serialization.rb
4066
4068
  - spec/benchmarks/metrics_overhead.rb
4067
4069
  - spec/logstash/outputs/scalyr_integration_spec.rb