logstash-output-scalyr 0.1.20.beta → 0.1.24.beta
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +10 -0
- data/Gemfile +2 -2
- data/README.md +7 -1
- data/lib/logstash/outputs/scalyr.rb +98 -63
- data/lib/scalyr/common/util.rb +7 -7
- data/lib/scalyr/constants.rb +1 -1
- data/logstash-output-scalyr.gemspec +1 -1
- data/spec/logstash/outputs/scalyr_integration_spec.rb +8 -2
- data/spec/logstash/outputs/scalyr_spec.rb +126 -2
- data/spec/scalyr/common/util_spec.rb +234 -0
- data/vendor/bundle/jruby/2.5.0/bin/htmldiff +1 -1
- data/vendor/bundle/jruby/2.5.0/bin/ldiff +1 -1
- data/vendor/bundle/jruby/2.5.0/cache/manticore-0.7.1-java.gem +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/APACHE-LICENSE-2.0.txt +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/CHANGELOG.md +12 -3
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/Gemfile +2 -1
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/LICENSE.txt +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/README.md +17 -4
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/Rakefile +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/ext/manticore/org/manticore/HttpDeleteWithEntity.java +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/ext/manticore/org/manticore/HttpGetWithEntity.java +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/ext/manticore/org/manticore/Manticore.java +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/gem-public_cert.pem +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/commons-codec/commons-codec/1.10/commons-codec-1.10.jar +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/commons-logging/commons-logging/1.2/commons-logging-1.2.jar +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/faraday/adapter/manticore.rb +1 -6
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/manticore/client/proxies.rb +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/manticore/client.rb +24 -16
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/manticore/cookie.rb +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/manticore/facade.rb +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/manticore/java_extensions.rb +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/manticore/response.rb +12 -12
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/manticore/stubbed_response.rb +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/manticore/version.rb +1 -1
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/manticore.rb +26 -2
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/manticore_jars.rb +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/org/apache/httpcomponents/httpmime/4.5.2/httpmime-4.5.2.jar +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/lib/org/manticore/manticore-ext.jar +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/manticore.gemspec +4 -2
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/spec/manticore/client_proxy_spec.rb +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/spec/manticore/client_spec.rb +15 -3
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/spec/manticore/cookie_spec.rb +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/spec/manticore/facade_spec.rb +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/spec/manticore/response_spec.rb +1 -1
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/spec/manticore/stubbed_response_spec.rb +0 -0
- data/vendor/bundle/jruby/2.5.0/gems/{manticore-0.6.4-java → manticore-0.7.1-java}/spec/spec_helper.rb +0 -0
- data/vendor/bundle/jruby/2.5.0/specifications/{manticore-0.6.4-java.gemspec → manticore-0.7.1-java.gemspec} +10 -9
- metadata +39 -39
- data/vendor/bundle/jruby/2.5.0/cache/manticore-0.6.4-java.gem +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 312d501ddc98f27b8d2be32bb20cb29439e7ce096d5f12870ef1b1bf095c32ee
|
4
|
+
data.tar.gz: c0f6c4a51f42acef8266ee30480fc00a19e59098d7b957deddcc017bade71695
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 2628a0bcfbe9a13dfc5bb9fd93360ef7629fca158cc325184817b4f15492ff5fcb28c03ca992c08dbbf9dc7953d2715b071b5d94f67912c36f66625f7bcf3234
|
7
|
+
data.tar.gz: 1d009fe06794e8f2d15737da874609600e57569fe3d6be0297fb23a1ab99d375f3ac2e9bdbf52b653baa85222bc3d5da001ac8d68a3848c8099ed7eb1510175f
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,15 @@
|
|
1
1
|
# Beta
|
2
2
|
|
3
|
+
## 0.1.23.beta
|
4
|
+
- Add testing support for disabling estimation of serialized event size for each event in the batch.
|
5
|
+
|
6
|
+
## 0.1.22.beta
|
7
|
+
- Add new plugin metric for tracking the duration of ``build_multi_event_request_array`` method.
|
8
|
+
- Update internal dependencies (``manticore``) to latest stable version.
|
9
|
+
|
10
|
+
## 0.1.21.beta
|
11
|
+
- Fix issue with iterative flattening function when dealing with empty collections.
|
12
|
+
|
3
13
|
## 0.1.20.beta
|
4
14
|
- Rewrite flattening function to no longer be recursive, to help avoid maxing out the stack.
|
5
15
|
- Added a configurable value `flattening_max_key_count` to create a limit on how large of a record we can flatten.
|
data/Gemfile
CHANGED
data/README.md
CHANGED
@@ -10,7 +10,7 @@ You can view documentation for this plugin [on the Scalyr website](https://app.s
|
|
10
10
|
# Quick start
|
11
11
|
|
12
12
|
1. Build the gem, run `gem build logstash-output-scalyr.gemspec`
|
13
|
-
2. Install the gem into a Logstash installation, run `/usr/share/logstash/bin/logstash-plugin install logstash-output-scalyr-0.1.
|
13
|
+
2. Install the gem into a Logstash installation, run `/usr/share/logstash/bin/logstash-plugin install logstash-output-scalyr-0.1.22.beta.gem` or follow the latest official instructions on working with plugins from Logstash.
|
14
14
|
3. Configure the output plugin (e.g. add it to a pipeline .conf)
|
15
15
|
4. Restart Logstash
|
16
16
|
|
@@ -321,6 +321,12 @@ If you want to run just the unit tests, you can run the command displayed below.
|
|
321
321
|
bundle exec rspec spec/logstash/outputs/scalyr_spec.rb spec/scalyr/common/util_spec.rb
|
322
322
|
```
|
323
323
|
|
324
|
+
Or to run a single test function defined on line XXX
|
325
|
+
|
326
|
+
```bash
|
327
|
+
bundle exec rspec spec/scalyr/common/util_spec.rb:XXX
|
328
|
+
```
|
329
|
+
|
324
330
|
## Instrumentation and metrics
|
325
331
|
|
326
332
|
By default, plugin logs a special line with metrics to Scalyr every 5 minutes. This line contains
|
@@ -140,11 +140,17 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
140
140
|
# Whether or not to create fresh quantile estimators after a status send. Depending on what you want to gather from
|
141
141
|
# these stas this might be wanted or not.
|
142
142
|
config :flush_quantile_estimates_on_status_send, :validate => :boolean, :default => false
|
143
|
-
|
143
|
+
|
144
144
|
# Causes this plugin to act as if it successfully uploaded the logs, while actually returning as quickly as possible
|
145
145
|
# after no work being done.
|
146
146
|
config :noop_mode, :validate => :boolean, :default => false
|
147
147
|
|
148
|
+
# Set to true to disable estimiating the size of each serialized event to make sure we don't go over the max request
|
149
|
+
# size (5.5) and split batch into multiple Scalyr requests, if needed. Since this estimation is not "free", especially
|
150
|
+
# for large batches, it may make sense to disable this option when logstash batch size is configured in a way that
|
151
|
+
# Scalyr single request limit won't be reached.
|
152
|
+
config :estimate_each_event_size, :validate => :boolean, :default => true
|
153
|
+
|
148
154
|
# Manticore related options
|
149
155
|
config :http_connect_timeout, :validate => :number, :default => 10
|
150
156
|
config :http_socket_timeout, :validate => :number, :default => 10
|
@@ -289,6 +295,7 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
289
295
|
# Convenience method to create a fresh quantile estimator
|
290
296
|
def get_new_metrics
|
291
297
|
return {
|
298
|
+
:build_multi_duration_secs => Quantile::Estimator.new,
|
292
299
|
:multi_receive_duration_secs => Quantile::Estimator.new,
|
293
300
|
:multi_receive_event_count => Quantile::Estimator.new,
|
294
301
|
:event_attributes_count => Quantile::Estimator.new,
|
@@ -313,17 +320,21 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
313
320
|
return events if @noop_mode
|
314
321
|
|
315
322
|
begin
|
323
|
+
records_count = events.to_a.length
|
324
|
+
|
325
|
+
# We also time the duration of the build_multi_event_request_array method. To avoid locking twice,
|
326
|
+
# we store the duration value here and record metric at the end.
|
316
327
|
start_time = Time.now.to_f
|
317
328
|
|
318
329
|
multi_event_request_array = build_multi_event_request_array(events)
|
319
|
-
|
330
|
+
build_multi_duration_secs = Time.now.to_f - start_time
|
320
331
|
|
332
|
+
# Loop over all array of multi-event requests, sending each multi-event to Scalyr
|
321
333
|
sleep_interval = @retry_initial_interval
|
322
334
|
batch_num = 1
|
323
335
|
total_batches = multi_event_request_array.length unless multi_event_request_array.nil?
|
324
336
|
|
325
337
|
result = []
|
326
|
-
records_count = events.to_a.length
|
327
338
|
|
328
339
|
while !multi_event_request_array.to_a.empty?
|
329
340
|
multi_event_request = multi_event_request_array.pop
|
@@ -430,6 +441,7 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
430
441
|
if records_count > 0
|
431
442
|
@stats_lock.synchronize do
|
432
443
|
@multi_receive_statistics[:total_multi_receive_secs] += (Time.now.to_f - start_time)
|
444
|
+
@plugin_metrics[:build_multi_duration_secs].observe(build_multi_duration_secs)
|
433
445
|
@plugin_metrics[:multi_receive_duration_secs].observe(Time.now.to_f - start_time)
|
434
446
|
@plugin_metrics[:multi_receive_event_count].observe(records_count)
|
435
447
|
@plugin_metrics[:batches_per_multi_receive].observe(total_batches)
|
@@ -666,67 +678,75 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
666
678
|
scalyr_event[:log] = logs_ids[log_identifier]
|
667
679
|
end
|
668
680
|
|
669
|
-
|
670
|
-
|
671
|
-
|
672
|
-
|
673
|
-
|
674
|
-
|
675
|
-
|
676
|
-
|
677
|
-
|
678
|
-
|
679
|
-
|
680
|
-
|
681
|
-
|
682
|
-
|
683
|
-
|
684
|
-
|
685
|
-
|
686
|
-
|
687
|
-
|
688
|
-
|
689
|
-
|
690
|
-
|
691
|
-
|
692
|
-
|
693
|
-
|
694
|
-
|
695
|
-
|
696
|
-
|
697
|
-
|
698
|
-
@
|
681
|
+
if @estimate_each_event_size
|
682
|
+
# get json string of event to keep track of how many bytes we are sending
|
683
|
+
begin
|
684
|
+
event_json = scalyr_event.to_json
|
685
|
+
log_json = nil
|
686
|
+
if add_log
|
687
|
+
log_json = logs[log_identifier].to_json
|
688
|
+
end
|
689
|
+
rescue JSON::GeneratorError, Encoding::UndefinedConversionError => e
|
690
|
+
@logger.warn "#{e.class}: #{e.message}"
|
691
|
+
|
692
|
+
# Send the faulty event to a label @ERROR block and allow to handle it there (output to exceptions file for ex)
|
693
|
+
# TODO
|
694
|
+
# atime = Fluent::EventTime.new( sec, nsec )
|
695
|
+
# router.emit_error_event(serverHost, time, record, e)
|
696
|
+
|
697
|
+
scalyr_event[:attrs].each do |key, value|
|
698
|
+
@logger.debug "\t#{key} (#{value.encoding.name}): '#{value}'"
|
699
|
+
scalyr_event[:attrs][key] = value.encode(
|
700
|
+
"UTF-8", :invalid => :replace, :undef => :replace, :replace => "<?>"
|
701
|
+
).force_encoding('UTF-8')
|
702
|
+
end
|
703
|
+
event_json = scalyr_event.to_json
|
704
|
+
rescue Java::JavaLang::ClassCastException => e
|
705
|
+
# Most likely we ran into the issue described here: https://github.com/flori/json/issues/336
|
706
|
+
# Because of the version of jruby logstash works with we don't have the option to just update this away,
|
707
|
+
# so if we run into it we convert bignums into strings so we can get the data in at least.
|
708
|
+
# This is fixed in JRuby 9.2.7, which includes json 2.2.0
|
709
|
+
@logger.warn("Error serializing events to JSON, likely due to the presence of Bignum values. Converting Bignum values to strings.")
|
710
|
+
@stats_lock.synchronize do
|
711
|
+
@multi_receive_statistics[:total_java_class_cast_errors] += 1
|
712
|
+
end
|
713
|
+
Scalyr::Common::Util.convert_bignums(scalyr_event)
|
714
|
+
event_json = scalyr_event.to_json
|
715
|
+
log_json = nil
|
716
|
+
if add_log
|
717
|
+
log_json = logs[log_identifier].to_json
|
718
|
+
end
|
699
719
|
end
|
700
|
-
|
701
|
-
|
702
|
-
|
703
|
-
|
704
|
-
|
720
|
+
|
721
|
+
# generate new request if json size of events in the array exceed maximum request buffer size
|
722
|
+
append_event = true
|
723
|
+
add_bytes = event_json.bytesize
|
724
|
+
if log_json
|
725
|
+
add_bytes = add_bytes + log_json.bytesize
|
705
726
|
end
|
706
|
-
end
|
707
727
|
|
708
|
-
|
709
|
-
|
710
|
-
|
711
|
-
|
712
|
-
|
713
|
-
|
714
|
-
|
715
|
-
|
716
|
-
|
717
|
-
|
718
|
-
|
719
|
-
|
728
|
+
if total_bytes + add_bytes > @max_request_buffer
|
729
|
+
# make sure we always have at least one event
|
730
|
+
if scalyr_events.size == 0
|
731
|
+
scalyr_events << scalyr_event
|
732
|
+
l_events << l_event
|
733
|
+
append_event = false
|
734
|
+
end
|
735
|
+
|
736
|
+
multi_event_request = self.create_multi_event_request(scalyr_events, l_events, current_threads, logs)
|
737
|
+
multi_event_request_array << multi_event_request
|
738
|
+
|
739
|
+
total_bytes = 0
|
740
|
+
current_threads = Hash.new
|
741
|
+
logs = Hash.new
|
742
|
+
logs_ids = Hash.new
|
743
|
+
scalyr_events = Array.new
|
744
|
+
l_events = Array.new
|
720
745
|
end
|
721
|
-
|
722
|
-
|
723
|
-
|
724
|
-
|
725
|
-
current_threads = Hash.new
|
726
|
-
logs = Hash.new
|
727
|
-
logs_ids = Hash.new
|
728
|
-
scalyr_events = Array.new
|
729
|
-
l_events = Array.new
|
746
|
+
else
|
747
|
+
# If size estimation is disabled we simply append the event and handle splitting later on (if needed)
|
748
|
+
append_event = true
|
749
|
+
add_bytes = 0
|
730
750
|
end
|
731
751
|
|
732
752
|
# if we haven't consumed the current event already
|
@@ -739,9 +759,12 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
739
759
|
|
740
760
|
}
|
741
761
|
|
742
|
-
# create a final request with any left over events
|
743
|
-
|
744
|
-
|
762
|
+
# create a final request with any left over events (and make sure there is at least one event)
|
763
|
+
if scalyr_events.size >= 1
|
764
|
+
multi_event_request = self.create_multi_event_request(scalyr_events, l_events, current_threads, logs)
|
765
|
+
multi_event_request_array << multi_event_request
|
766
|
+
end
|
767
|
+
|
745
768
|
multi_event_request_array
|
746
769
|
end
|
747
770
|
|
@@ -803,6 +826,14 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
803
826
|
end
|
804
827
|
end_time = Time.now.to_f
|
805
828
|
serialization_duration = end_time - start_time
|
829
|
+
|
830
|
+
serialized_request_size = serialized_body.bytesize
|
831
|
+
|
832
|
+
if serialized_request_size >= @max_request_buffer
|
833
|
+
# TODO: If we end up here is estimate config opsion is false, split the request here into multiple ones
|
834
|
+
@logger.warn("Serialized request size (#{serialized_request_size}) is larger than max_request_buffer (#{max_request_buffer})!")
|
835
|
+
end
|
836
|
+
|
806
837
|
{
|
807
838
|
:body => serialized_body, :record_count => scalyr_events.size, :serialization_duration => serialization_duration,
|
808
839
|
:logstash_events => logstash_events
|
@@ -816,6 +847,10 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
816
847
|
@stats_lock.synchronize do
|
817
848
|
current_stats = @multi_receive_statistics.clone
|
818
849
|
|
850
|
+
current_stats[:build_multi_duration_secs_p50] = @plugin_metrics[:build_multi_duration_secs].query(0.5)
|
851
|
+
current_stats[:build_multi_duration_secs_p90] = @plugin_metrics[:build_multi_duration_secs].query(0.9)
|
852
|
+
current_stats[:build_multi_duration_secs_p99] = @plugin_metrics[:build_multi_duration_secs].query(0.99)
|
853
|
+
|
819
854
|
current_stats[:multi_receive_duration_p50] = @plugin_metrics[:multi_receive_duration_secs].query(0.5)
|
820
855
|
current_stats[:multi_receive_duration_p90] = @plugin_metrics[:multi_receive_duration_secs].query(0.9)
|
821
856
|
current_stats[:multi_receive_duration_p99] = @plugin_metrics[:multi_receive_duration_secs].query(0.99)
|
data/lib/scalyr/common/util.rb
CHANGED
@@ -42,7 +42,7 @@ def self.flatten(hash_obj, delimiter='_', flatten_arrays=true, fix_deep_flatteni
|
|
42
42
|
key_list << key_stack.pop
|
43
43
|
|
44
44
|
# Case when object is a hash
|
45
|
-
if obj.respond_to?(:has_key?)
|
45
|
+
if obj.respond_to?(:has_key?) and obj.keys.count > 0
|
46
46
|
key_list_width << obj.keys.count
|
47
47
|
obj.each do |key, value|
|
48
48
|
key_stack << key
|
@@ -50,12 +50,12 @@ def self.flatten(hash_obj, delimiter='_', flatten_arrays=true, fix_deep_flatteni
|
|
50
50
|
end
|
51
51
|
|
52
52
|
# Case when object is an array we intend to flatten
|
53
|
-
elsif flatten_arrays and obj.respond_to?(:each)
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
53
|
+
elsif flatten_arrays and obj.respond_to?(:each) and obj.count > 0
|
54
|
+
key_list_width << obj.count
|
55
|
+
obj.each_with_index do |value, index|
|
56
|
+
key_stack << index
|
57
|
+
stack << value
|
58
|
+
end
|
59
59
|
|
60
60
|
else
|
61
61
|
result_key = ""
|
data/lib/scalyr/constants.rb
CHANGED
@@ -1,2 +1,2 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
-
PLUGIN_VERSION = "v0.1.
|
2
|
+
PLUGIN_VERSION = "v0.1.24.beta"
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-scalyr'
|
3
|
-
s.version = '0.1.
|
3
|
+
s.version = '0.1.24.beta'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = "Scalyr output plugin for Logstash"
|
6
6
|
s.description = "Sends log data collected by Logstash to Scalyr (https://www.scalyr.com)"
|
@@ -7,6 +7,12 @@ require "json"
|
|
7
7
|
require 'webmock/rspec'
|
8
8
|
WebMock.allow_net_connect!
|
9
9
|
|
10
|
+
RSpec.configure do |rspec|
|
11
|
+
rspec.expect_with :rspec do |c|
|
12
|
+
c.max_formatted_output_length = nil
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
10
16
|
describe LogStash::Outputs::Scalyr do
|
11
17
|
let(:sample_events) {
|
12
18
|
events = []
|
@@ -58,7 +64,7 @@ describe LogStash::Outputs::Scalyr do
|
|
58
64
|
{
|
59
65
|
:error_class=>"Manticore::UnknownException",
|
60
66
|
:batch_num=>1,
|
61
|
-
:message=>"Unexpected error: java.security.InvalidAlgorithmParameterException: the trustAnchors parameter must be non-empty",
|
67
|
+
:message=>"java.lang.RuntimeException: Unexpected error: java.security.InvalidAlgorithmParameterException: the trustAnchors parameter must be non-empty",
|
62
68
|
:payload_size=>781,
|
63
69
|
:record_count=>3,
|
64
70
|
:total_batches=>1,
|
@@ -84,7 +90,7 @@ describe LogStash::Outputs::Scalyr do
|
|
84
90
|
{
|
85
91
|
:error_class=>"Manticore::UnknownException",
|
86
92
|
:batch_num=>1,
|
87
|
-
:message=>"Unexpected error: java.security.InvalidAlgorithmParameterException: the trustAnchors parameter must be non-empty",
|
93
|
+
:message=>"java.lang.RuntimeException: Unexpected error: java.security.InvalidAlgorithmParameterException: the trustAnchors parameter must be non-empty",
|
88
94
|
:payload_size=>781,
|
89
95
|
:record_count=>3,
|
90
96
|
:total_batches=>1,
|
@@ -80,6 +80,7 @@ describe LogStash::Outputs::Scalyr do
|
|
80
80
|
plugin1.instance_variable_set(:@client_session, mock_client_session)
|
81
81
|
plugin1.instance_variable_set(:@session_id, "some_session_id")
|
82
82
|
plugin1.instance_variable_set(:@plugin_metrics, {
|
83
|
+
:build_multi_duration_secs => Quantile::Estimator.new,
|
83
84
|
:multi_receive_duration_secs => Quantile::Estimator.new,
|
84
85
|
:multi_receive_event_count => Quantile::Estimator.new,
|
85
86
|
:event_attributes_count => Quantile::Estimator.new,
|
@@ -87,10 +88,11 @@ describe LogStash::Outputs::Scalyr do
|
|
87
88
|
:batches_per_multi_receive => Quantile::Estimator.new
|
88
89
|
})
|
89
90
|
plugin1.instance_variable_get(:@plugin_metrics)[:multi_receive_duration_secs].observe(1)
|
91
|
+
plugin1.instance_variable_get(:@plugin_metrics)[:build_multi_duration_secs].observe(1)
|
90
92
|
plugin1.instance_variable_set(:@multi_receive_statistics, {:total_multi_receive_secs => 0})
|
91
93
|
|
92
94
|
status_event = plugin1.send_status
|
93
|
-
expect(status_event[:attrs]["message"]).to eq("plugin_status: total_requests_sent=20 total_requests_failed=10 total_request_bytes_sent=100 total_compressed_request_bytes_sent=50 total_response_bytes_received=100 total_request_latency_secs=100 total_serialization_duration_secs=100.5000 total_compression_duration_secs=10.2000 compression_type=deflate compression_level=9 total_multi_receive_secs=0 multi_receive_duration_p50=1 multi_receive_duration_p90=1 multi_receive_duration_p99=1 multi_receive_event_count_p50=0 multi_receive_event_count_p90=0 multi_receive_event_count_p99=0 event_attributes_count_p50=0 event_attributes_count_p90=0 event_attributes_count_p99=0 batches_per_multi_receive_p50=0 batches_per_multi_receive_p90=0 batches_per_multi_receive_p99=0")
|
95
|
+
expect(status_event[:attrs]["message"]).to eq("plugin_status: total_requests_sent=20 total_requests_failed=10 total_request_bytes_sent=100 total_compressed_request_bytes_sent=50 total_response_bytes_received=100 total_request_latency_secs=100 total_serialization_duration_secs=100.5000 total_compression_duration_secs=10.2000 compression_type=deflate compression_level=9 total_multi_receive_secs=0 build_multi_duration_secs_p50=1 build_multi_duration_secs_p90=1 build_multi_duration_secs_p99=1 multi_receive_duration_p50=1 multi_receive_duration_p90=1 multi_receive_duration_p99=1 multi_receive_event_count_p50=0 multi_receive_event_count_p90=0 multi_receive_event_count_p99=0 event_attributes_count_p50=0 event_attributes_count_p90=0 event_attributes_count_p99=0 batches_per_multi_receive_p50=0 batches_per_multi_receive_p90=0 batches_per_multi_receive_p99=0")
|
94
96
|
end
|
95
97
|
|
96
98
|
it "returns and sends correct status event on send_stats on initial and subsequent send" do
|
@@ -106,6 +108,7 @@ describe LogStash::Outputs::Scalyr do
|
|
106
108
|
plugin.instance_variable_set(:@client_session, mock_client_session)
|
107
109
|
# Setup one quantile calculation to make sure at least one of them calculates as expected
|
108
110
|
plugin.instance_variable_set(:@plugin_metrics, {
|
111
|
+
:build_multi_duration_secs => Quantile::Estimator.new,
|
109
112
|
:multi_receive_duration_secs => Quantile::Estimator.new,
|
110
113
|
:multi_receive_event_count => Quantile::Estimator.new,
|
111
114
|
:event_attributes_count => Quantile::Estimator.new,
|
@@ -119,12 +122,13 @@ describe LogStash::Outputs::Scalyr do
|
|
119
122
|
|
120
123
|
plugin.instance_variable_set(:@multi_receive_statistics, {:total_multi_receive_secs => 0})
|
121
124
|
status_event = plugin.send_status
|
122
|
-
expect(status_event[:attrs]["message"]).to eq("plugin_status: total_requests_sent=20 total_requests_failed=10 total_request_bytes_sent=100 total_compressed_request_bytes_sent=50 total_response_bytes_received=100 total_request_latency_secs=100 total_serialization_duration_secs=100.5000 total_compression_duration_secs=10.2000 compression_type=deflate compression_level=9 total_multi_receive_secs=0 multi_receive_duration_p50=10 multi_receive_duration_p90=18 multi_receive_duration_p99=19 multi_receive_event_count_p50=0 multi_receive_event_count_p90=0 multi_receive_event_count_p99=0 event_attributes_count_p50=0 event_attributes_count_p90=0 event_attributes_count_p99=0 batches_per_multi_receive_p50=0 batches_per_multi_receive_p90=0 batches_per_multi_receive_p99=0 flatten_values_duration_secs_p50=0 flatten_values_duration_secs_p90=0 flatten_values_duration_secs_p99=0")
|
125
|
+
expect(status_event[:attrs]["message"]).to eq("plugin_status: total_requests_sent=20 total_requests_failed=10 total_request_bytes_sent=100 total_compressed_request_bytes_sent=50 total_response_bytes_received=100 total_request_latency_secs=100 total_serialization_duration_secs=100.5000 total_compression_duration_secs=10.2000 compression_type=deflate compression_level=9 total_multi_receive_secs=0 build_multi_duration_secs_p50=0 build_multi_duration_secs_p90=0 build_multi_duration_secs_p99=0 multi_receive_duration_p50=10 multi_receive_duration_p90=18 multi_receive_duration_p99=19 multi_receive_event_count_p50=0 multi_receive_event_count_p90=0 multi_receive_event_count_p99=0 event_attributes_count_p50=0 event_attributes_count_p90=0 event_attributes_count_p99=0 batches_per_multi_receive_p50=0 batches_per_multi_receive_p90=0 batches_per_multi_receive_p99=0 flatten_values_duration_secs_p50=0 flatten_values_duration_secs_p90=0 flatten_values_duration_secs_p99=0")
|
123
126
|
end
|
124
127
|
|
125
128
|
it "send_stats is called when events list is empty, but otherwise is noop" do
|
126
129
|
quantile_estimator = Quantile::Estimator.new
|
127
130
|
plugin.instance_variable_set(:@plugin_metrics, {
|
131
|
+
:build_multi_duration_secs => Quantile::Estimator.new,
|
128
132
|
:multi_receive_duration_secs => Quantile::Estimator.new,
|
129
133
|
:multi_receive_event_count => Quantile::Estimator.new,
|
130
134
|
:event_attributes_count => Quantile::Estimator.new,
|
@@ -149,6 +153,7 @@ describe LogStash::Outputs::Scalyr do
|
|
149
153
|
mock_client_session = MockClientSession.new
|
150
154
|
quantile_estimator = Quantile::Estimator.new
|
151
155
|
plugin2.instance_variable_set(:@plugin_metrics, {
|
156
|
+
:build_multi_duration_secs => Quantile::Estimator.new,
|
152
157
|
:multi_receive_duration_secs => Quantile::Estimator.new,
|
153
158
|
:multi_receive_event_count => Quantile::Estimator.new,
|
154
159
|
:event_attributes_count => Quantile::Estimator.new,
|
@@ -174,6 +179,7 @@ describe LogStash::Outputs::Scalyr do
|
|
174
179
|
plugin.instance_variable_set(:@last_status_transmit_time, 100)
|
175
180
|
plugin.instance_variable_set(:@client_session, mock_client_session)
|
176
181
|
plugin.instance_variable_set(:@plugin_metrics, {
|
182
|
+
:build_multi_duration_secs => Quantile::Estimator.new,
|
177
183
|
:multi_receive_duration_secs => Quantile::Estimator.new,
|
178
184
|
:multi_receive_event_count => Quantile::Estimator.new,
|
179
185
|
:event_attributes_count => Quantile::Estimator.new,
|
@@ -386,6 +392,124 @@ describe LogStash::Outputs::Scalyr do
|
|
386
392
|
end
|
387
393
|
end
|
388
394
|
|
395
|
+
context "split large batches into multiple scalyr requests" do
|
396
|
+
it "estimate_each_event_size is true explicit (default) batch split into 3 scalyr requests" do
|
397
|
+
config = {
|
398
|
+
'api_write_token' => '1234',
|
399
|
+
'flatten_tags' => true,
|
400
|
+
'flat_tag_value' => 'true',
|
401
|
+
'flat_tag_prefix' => 'tag_prefix_',
|
402
|
+
'flatten_nested_values' => true, # this converts into string 'true'
|
403
|
+
'max_request_buffer' => 10,
|
404
|
+
'estimate_each_event_size' => true
|
405
|
+
}
|
406
|
+
plugin = LogStash::Outputs::Scalyr.new(config)
|
407
|
+
|
408
|
+
allow(plugin).to receive(:send_status).and_return(nil)
|
409
|
+
plugin.register
|
410
|
+
result = plugin.build_multi_event_request_array(sample_events)
|
411
|
+
expect(result.size).to eq(3)
|
412
|
+
|
413
|
+
body = JSON.parse(result[0][:body])
|
414
|
+
expect(body['events'].size).to eq(1)
|
415
|
+
|
416
|
+
body = JSON.parse(result[1][:body])
|
417
|
+
expect(body['events'].size).to eq(1)
|
418
|
+
|
419
|
+
body = JSON.parse(result[2][:body])
|
420
|
+
expect(body['events'].size).to eq(1)
|
421
|
+
expect(body['events'][0]['attrs']).to eq({
|
422
|
+
"nested_a" => 1,
|
423
|
+
"nested_b_0" => 3,
|
424
|
+
"nested_b_1" => 4,
|
425
|
+
"nested_b_2" => 5,
|
426
|
+
'seq' => 3,
|
427
|
+
'source_file' => 'my file 3',
|
428
|
+
'source_host' => 'my host 3',
|
429
|
+
'serverHost' => 'Logstash',
|
430
|
+
"tag_prefix_t1" => "true",
|
431
|
+
"tag_prefix_t2" => "true",
|
432
|
+
"tag_prefix_t3" => "true",
|
433
|
+
"parser" => "logstashParser",
|
434
|
+
})
|
435
|
+
end
|
436
|
+
|
437
|
+
it "estimate_each_event_size is true implicit (default) batch split into 3 scalyr requests" do
|
438
|
+
config = {
|
439
|
+
'api_write_token' => '1234',
|
440
|
+
'flatten_tags' => true,
|
441
|
+
'flat_tag_value' => 'true',
|
442
|
+
'flat_tag_prefix' => 'tag_prefix_',
|
443
|
+
'flatten_nested_values' => true, # this converts into string 'true'
|
444
|
+
'max_request_buffer' => 10,
|
445
|
+
}
|
446
|
+
plugin = LogStash::Outputs::Scalyr.new(config)
|
447
|
+
|
448
|
+
allow(plugin).to receive(:send_status).and_return(nil)
|
449
|
+
plugin.register
|
450
|
+
result = plugin.build_multi_event_request_array(sample_events)
|
451
|
+
expect(result.size).to eq(3)
|
452
|
+
|
453
|
+
body = JSON.parse(result[0][:body])
|
454
|
+
expect(body['events'].size).to eq(1)
|
455
|
+
|
456
|
+
body = JSON.parse(result[1][:body])
|
457
|
+
expect(body['events'].size).to eq(1)
|
458
|
+
|
459
|
+
body = JSON.parse(result[2][:body])
|
460
|
+
expect(body['events'].size).to eq(1)
|
461
|
+
expect(body['events'][0]['attrs']).to eq({
|
462
|
+
"nested_a" => 1,
|
463
|
+
"nested_b_0" => 3,
|
464
|
+
"nested_b_1" => 4,
|
465
|
+
"nested_b_2" => 5,
|
466
|
+
'seq' => 3,
|
467
|
+
'source_file' => 'my file 3',
|
468
|
+
'source_host' => 'my host 3',
|
469
|
+
'serverHost' => 'Logstash',
|
470
|
+
"tag_prefix_t1" => "true",
|
471
|
+
"tag_prefix_t2" => "true",
|
472
|
+
"tag_prefix_t3" => "true",
|
473
|
+
"parser" => "logstashParser",
|
474
|
+
})
|
475
|
+
end
|
476
|
+
|
477
|
+
it "estimate_each_event_size is false batch not split into multiple scalyr requests" do
|
478
|
+
config = {
|
479
|
+
'api_write_token' => '1234',
|
480
|
+
'flatten_tags' => true,
|
481
|
+
'flat_tag_value' => 'true',
|
482
|
+
'flat_tag_prefix' => 'tag_prefix_',
|
483
|
+
'flatten_nested_values' => true, # this converts into string 'true'
|
484
|
+
'max_request_buffer' => 10,
|
485
|
+
'estimate_each_event_size' => false
|
486
|
+
}
|
487
|
+
plugin = LogStash::Outputs::Scalyr.new(config)
|
488
|
+
|
489
|
+
allow(plugin).to receive(:send_status).and_return(nil)
|
490
|
+
plugin.register
|
491
|
+
result = plugin.build_multi_event_request_array(sample_events)
|
492
|
+
expect(result.size).to eq(1)
|
493
|
+
|
494
|
+
body = JSON.parse(result[0][:body])
|
495
|
+
expect(body['events'].size).to eq(3)
|
496
|
+
expect(body['events'][2]['attrs']).to eq({
|
497
|
+
"nested_a" => 1,
|
498
|
+
"nested_b_0" => 3,
|
499
|
+
"nested_b_1" => 4,
|
500
|
+
"nested_b_2" => 5,
|
501
|
+
'seq' => 3,
|
502
|
+
'source_file' => 'my file 3',
|
503
|
+
'source_host' => 'my host 3',
|
504
|
+
'serverHost' => 'Logstash',
|
505
|
+
"tag_prefix_t1" => "true",
|
506
|
+
"tag_prefix_t2" => "true",
|
507
|
+
"tag_prefix_t3" => "true",
|
508
|
+
"parser" => "logstashParser",
|
509
|
+
})
|
510
|
+
end
|
511
|
+
end
|
512
|
+
|
389
513
|
context "when not configured to flatten values and tags" do
|
390
514
|
config = {
|
391
515
|
'api_write_token' => '1234',
|