logstash-output-scalyr 0.2.2 → 0.2.5.beta
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +29 -0
- data/Gemfile +2 -2
- data/README.md +38 -4
- data/lib/logstash/outputs/scalyr.rb +64 -7
- data/lib/scalyr/common/client.rb +13 -0
- data/lib/scalyr/constants.rb +1 -1
- data/logstash-output-scalyr.gemspec +1 -1
- data/spec/logstash/outputs/scalyr_integration_spec.rb +54 -28
- data/spec/logstash/outputs/scalyr_spec.rb +170 -2
- metadata +4 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a1f4ab67bba48557efab9493bdd6e85ef80ba6aab10f1b21e029acc78e69fdc7
|
4
|
+
data.tar.gz: c081bfd418f30502ca4fde78f8d8e15316ea18bd8f2834adff004d40913f2c39
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c1e6019904b05043552ea46d303d861f3a539c94fa644686d5a12ecd5bf40a69ec1a9afde09589a676ccecc0b03ee737bc24456ca5b19026b580421c18ae6605
|
7
|
+
data.tar.gz: ce5b38bbff35964ce84062fd647645a222958e72ea3a7f973900b182edb8e527f045dedac15668817898f6119e32057927f102b07bdffdfceb3099ea4be47beb
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,34 @@
|
|
1
1
|
# Beta
|
2
2
|
|
3
|
+
## 0.2.5.beta
|
4
|
+
|
5
|
+
* Allow user to specify value for the DataSet event severity (``sev``) field. "sev" field is a
|
6
|
+
special top level event field which denotes the event severity (log level).
|
7
|
+
|
8
|
+
To enable this functionality, user needs to configure ``severity_field`` plugin config option and
|
9
|
+
set it to the logstash event field which carries the severity field value. This field value
|
10
|
+
needs to be an integer and contain a value from 0 to 6 (inclusive).
|
11
|
+
* Upgrade dependencies (manticore -> 0.9.1, jrjackson -> 0.4.15).
|
12
|
+
* Fix experimental ``zstandard`` support.
|
13
|
+
|
14
|
+
NOTE: For zstandard compression to be used zstd / libstd system package needs to be installed
|
15
|
+
(https://github.com/msievers/zstandard-ruby/#examples-for-installing-libzstd) and ``zstandard``
|
16
|
+
gem needs to be installed inside the Logstash jRuby environment
|
17
|
+
(e.g. ``/usr/share/logstash/bin/ruby -S /usr/share/logstash/vendor/jruby/bin/gem install
|
18
|
+
zstandard ; echo 'gem "zstandard"' >> /opt/logstash/Gemfile``).
|
19
|
+
|
20
|
+
## 0.2.4.beta
|
21
|
+
|
22
|
+
* Experimental zstandard support - in development, not to be used in production.
|
23
|
+
|
24
|
+
## 0.2.3
|
25
|
+
|
26
|
+
- Increase default number of maximum retry attempts on failure from `5` to `15`.
|
27
|
+
- Change "Unexpected error occurred while uploading to Scalyr (will backoff-retry)" message to
|
28
|
+
be logged under WARNING and not ERROR log level. This error is not fatal and simply indicates
|
29
|
+
client will retry a failed request. We use WARNING and not INFO so we still have visibility into
|
30
|
+
those messages (since most deployments have log level set to WARNING or above).
|
31
|
+
|
3
32
|
## 0.2.2
|
4
33
|
|
5
34
|
- No longer vendor dependencies in the gem. This gem used to vendor a vulnerable log4j version
|
data/Gemfile
CHANGED
data/README.md
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
[](https://circleci.com/gh/scalyr/logstash-output-scalyr)
|
1
|
+
[](https://circleci.com/gh/scalyr/logstash-output-scalyr) [](https://badge.fury.io/rb/logstash-output-scalyr)
|
2
2
|
|
3
3
|
# [Scalyr output plugin for Logstash]
|
4
4
|
|
@@ -9,9 +9,9 @@ You can view documentation for this plugin [on the Scalyr website](https://app.s
|
|
9
9
|
# Quick start
|
10
10
|
|
11
11
|
1. Build the gem, run `gem build logstash-output-scalyr.gemspec`
|
12
|
-
2. Install the gem into a Logstash installation, run `/usr/share/logstash/bin/logstash-plugin install logstash-output-scalyr-0.2.
|
12
|
+
2. Install the gem into a Logstash installation, run `/usr/share/logstash/bin/logstash-plugin install logstash-output-scalyr-0.2.3.gem`
|
13
13
|
or follow the latest official instructions on working with plugins from Logstash. As an alternative, you can directly install latest
|
14
|
-
stable version from RubyGems - ``/usr/share/logstash/bin/logstash-plugin --version 0.2.
|
14
|
+
stable version from RubyGems - ``/usr/share/logstash/bin/logstash-plugin --version 0.2.3 logstash-output-scalyr``
|
15
15
|
3. Configure the output plugin (e.g. add it to a pipeline .conf)
|
16
16
|
4. Restart Logstash
|
17
17
|
|
@@ -86,6 +86,40 @@ output {
|
|
86
86
|
}
|
87
87
|
```
|
88
88
|
|
89
|
+
## Notes on severity (sev) attribute handling
|
90
|
+
|
91
|
+
``sev`` is a special top level DataSet event field which denotes the event severity / log level.
|
92
|
+
|
93
|
+
To enable this functionality, user needs to define ``severity_field`` plugin config option. This
|
94
|
+
config option tells the plugin which Logstash event field carries the value for the severity field.
|
95
|
+
|
96
|
+
The actual value needs to be an integer between 0 and 6 inclusive. Those values are mapped to
|
97
|
+
different severity / log levels on DataSet server side as shown below:
|
98
|
+
|
99
|
+
- 0 -> finest
|
100
|
+
- 1 -> trace
|
101
|
+
- 2 -> debut
|
102
|
+
- 3 -> info
|
103
|
+
- 4 -> warning
|
104
|
+
- 5 -> error
|
105
|
+
- 6 -> fatal / emergency / critical
|
106
|
+
|
107
|
+
```
|
108
|
+
output {
|
109
|
+
scalyr {
|
110
|
+
api_write_token => 'SCALYR_API_KEY'
|
111
|
+
...
|
112
|
+
severity_field => 'severity',
|
113
|
+
}
|
114
|
+
}
|
115
|
+
```
|
116
|
+
|
117
|
+
In the example above, value for the DataSet severity field should be included in the ``severity``
|
118
|
+
Logstash event field.
|
119
|
+
|
120
|
+
In case the field value doesn't contain a valid severity number (0 - 6), ``sev`` field won't be
|
121
|
+
set on the event object to prevent API from rejecting an invalid request.
|
122
|
+
|
89
123
|
## Options
|
90
124
|
|
91
125
|
- The Scalyr API write token, these are available at https://www.scalyr.com/keys. This is the only compulsory configuration field required for proper upload
|
@@ -477,7 +511,7 @@ git clone https://github.com/Kami/logstash-config-tester ~/
|
|
477
511
|
gem build logstash-output-scalyr.gemspec
|
478
512
|
|
479
513
|
# 2. Copy it to the config test repo
|
480
|
-
cp logstash-output-scalyr-0.2.
|
514
|
+
cp logstash-output-scalyr-0.2.3.gem ~/logstash-config-test/logstash-output-scalyr.gem
|
481
515
|
|
482
516
|
# 3. Build docker image with the latest dev version of the plugin (may take a while)
|
483
517
|
docker-compose build
|
@@ -60,6 +60,22 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
60
60
|
# (Warning: events with an existing 'logfile' field, it will be overwritten)
|
61
61
|
config :logfile_field, :validate => :string, :default => 'logfile'
|
62
62
|
|
63
|
+
# Record field which includes the value for the "severity" field. severity is a special field which tells
|
64
|
+
# Scalyr severity / log level for a particulat event. This field is a top level event field and not
|
65
|
+
# event attribute field. Actual field value must be an integer and is mapped to different severity /
|
66
|
+
# log level on DataSet server side as shown below:
|
67
|
+
#
|
68
|
+
# - 0 -> finest
|
69
|
+
# - 1 -> trace
|
70
|
+
# - 2 -> debut
|
71
|
+
# - 3 -> info
|
72
|
+
# - 4 -> warning
|
73
|
+
# - 5 -> error
|
74
|
+
# - 6 -> fatal / emergency / critical
|
75
|
+
#
|
76
|
+
# By default, if Event contains no severity field, default value of 3 (info) will be used.
|
77
|
+
config :severity_field, :validate => :string, :default => nil
|
78
|
+
|
63
79
|
# The Scalyr Output Plugin expects the main log message to be contained in the Event['message']. If your main log
|
64
80
|
# content is contained in a different field, specify it here. It will be renamed to 'message' before upload.
|
65
81
|
# (Warning: events with an existing 'message' field, it will be overwritten)
|
@@ -105,7 +121,9 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
105
121
|
# Initial interval in seconds between bulk retries. Doubled on each retry up to `retry_max_interval`
|
106
122
|
config :retry_initial_interval, :validate => :number, :default => 1
|
107
123
|
# How many times to retry sending an event before giving up on it
|
108
|
-
|
124
|
+
# This will result in a total of around 12 minutes of retrying / sleeping with a default value
|
125
|
+
# for retry_max_interval
|
126
|
+
config :max_retries, :validate => :number, :default => 15
|
109
127
|
# Whether or not to send messages that failed to send a max_retries amount of times to the DLQ or just drop them
|
110
128
|
config :send_to_dlq, :validate => :boolean, :default => true
|
111
129
|
|
@@ -128,7 +146,8 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
128
146
|
# Valid options are bz2, deflate, or none.
|
129
147
|
config :compression_type, :validate => :string, :default => 'deflate'
|
130
148
|
|
131
|
-
# An int containing the compression level of compression to use, from 1-9. Defaults to 6
|
149
|
+
# An int containing the compression level of compression to use, from 1-9. Defaults to 6. Only
|
150
|
+
# applicable when compression type is "deflate" or "bz2".
|
132
151
|
config :compression_level, :validate => :number, :default => 6
|
133
152
|
|
134
153
|
# How often to log and report status metrics to Scalyr. Defaults to every 5
|
@@ -493,9 +512,9 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
493
512
|
:will_retry_in_seconds => sleep_interval,
|
494
513
|
}
|
495
514
|
exc_data[:code] = e.code if e.code
|
496
|
-
if @logger.debug? and e.body
|
515
|
+
if @logger.debug? and defined?(e.body) and e.body
|
497
516
|
exc_data[:body] = e.body
|
498
|
-
elsif e.body
|
517
|
+
elsif defined?(e.body) and e.body
|
499
518
|
exc_data[:body] = Scalyr::Common::Util.truncate(e.body, 512)
|
500
519
|
end
|
501
520
|
exc_data[:payload] = "\tSample payload: #{request[:body][0,1024]}..." if @logger.debug?
|
@@ -504,8 +523,8 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
504
523
|
@logger.debug(message, exc_data)
|
505
524
|
exc_commonly_retried = true
|
506
525
|
else
|
507
|
-
# all other failed uploads should be
|
508
|
-
@logger.
|
526
|
+
# all other failed uploads should be warning
|
527
|
+
@logger.warn(message, exc_data)
|
509
528
|
exc_commonly_retried = false
|
510
529
|
end
|
511
530
|
retry if @running and exc_retries < @max_retries
|
@@ -720,6 +739,11 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
720
739
|
# Rename user-specified logfile field -> 'logfile'
|
721
740
|
rename.call(@logfile_field, 'logfile')
|
722
741
|
|
742
|
+
# Rename user-specified severity field -> 'severity' (if configured)
|
743
|
+
if not @severity_field.nil?
|
744
|
+
rename.call(@severity_field, 'severity')
|
745
|
+
end
|
746
|
+
|
723
747
|
# Remove "host" attribute
|
724
748
|
if @remove_host_attribute_from_events and record.key? "host"
|
725
749
|
record.delete("host")
|
@@ -764,6 +788,7 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
764
788
|
logs[log_identifier]['attrs']['logfile'] = record['logfile']
|
765
789
|
record.delete('logfile')
|
766
790
|
end
|
791
|
+
|
767
792
|
if @log_constants
|
768
793
|
@log_constants.each {|log_constant|
|
769
794
|
if record.key? log_constant
|
@@ -825,6 +850,21 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
825
850
|
end
|
826
851
|
end
|
827
852
|
|
853
|
+
severity = record['severity']
|
854
|
+
severity_int = nil
|
855
|
+
|
856
|
+
# Server won't accept the payload in case severity value is not valid. To avoid events from
|
857
|
+
# being dropped, we only set Event.sev field in case this field contains a valid value.
|
858
|
+
if not @severity_field.nil? and severity and severity.is_integer?
|
859
|
+
severity_int = severity.to_i
|
860
|
+
|
861
|
+
if severity_int >= 0 and severity_int <= 6
|
862
|
+
record.delete('severity')
|
863
|
+
else
|
864
|
+
severity_int = nil
|
865
|
+
end
|
866
|
+
end
|
867
|
+
|
828
868
|
# Use LogStash event.timestamp as the 'ts' Scalyr timestamp. Note that this may be overwritten by input
|
829
869
|
# filters so may not necessarily reflect the actual originating timestamp.
|
830
870
|
scalyr_event = {
|
@@ -838,6 +878,11 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
838
878
|
scalyr_event[:log] = logs_ids[log_identifier]
|
839
879
|
end
|
840
880
|
|
881
|
+
# optionally set severity (if available and valid)
|
882
|
+
if @severity_field and not severity_int.nil?
|
883
|
+
scalyr_event[:sev] = severity_int
|
884
|
+
end
|
885
|
+
|
841
886
|
if @estimate_each_event_size
|
842
887
|
# get json string of event to keep track of how many bytes we are sending
|
843
888
|
begin
|
@@ -1102,7 +1147,7 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
1102
1147
|
begin
|
1103
1148
|
client_session.post_add_events(multi_event_request[:body], true, 0)
|
1104
1149
|
rescue => e
|
1105
|
-
if e.body
|
1150
|
+
if defined?(e.body) and e.body
|
1106
1151
|
@logger.warn(
|
1107
1152
|
"Unexpected error occurred while uploading status to Scalyr",
|
1108
1153
|
:error_message => e.message,
|
@@ -1168,3 +1213,15 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
|
|
1168
1213
|
!execution_context.dlq_writer.inner_writer.is_a?(::LogStash::Util::DummyDeadLetterQueueWriter)
|
1169
1214
|
end
|
1170
1215
|
end
|
1216
|
+
|
1217
|
+
class String
|
1218
|
+
def is_integer?
|
1219
|
+
self.to_i.to_s == self
|
1220
|
+
end
|
1221
|
+
end
|
1222
|
+
|
1223
|
+
class Integer
|
1224
|
+
def is_integer?
|
1225
|
+
true
|
1226
|
+
end
|
1227
|
+
end
|
data/lib/scalyr/common/client.rb
CHANGED
@@ -299,6 +299,19 @@ class ClientSession
|
|
299
299
|
bz2.write body
|
300
300
|
bz2.close
|
301
301
|
compressed_body = io.string
|
302
|
+
elsif @compression_type == "zstandard"
|
303
|
+
# NOTE: zstandard requires libzstd to be installed on the system and
|
304
|
+
# zstandard gem. Since libzstd may not be installed out of the box, we
|
305
|
+
# don't directly depend on this gem and it's up to the user to install
|
306
|
+
# both dependencies manually in case they want to use zstandard.
|
307
|
+
begin
|
308
|
+
require 'zstandard'
|
309
|
+
rescue LoadError
|
310
|
+
raise SystemExit, "zstandard gem is missing. If you want to use zstandard compression you need to make sure zstandard and and libzstd dependency is installed. See TODO for more information."
|
311
|
+
end
|
312
|
+
|
313
|
+
encoding = 'zstandard'
|
314
|
+
compressed_body = Zstandard.deflate(body)
|
302
315
|
end
|
303
316
|
end_time = Time.now.to_f
|
304
317
|
compression_duration = end_time - start_time
|
data/lib/scalyr/constants.rb
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-scalyr'
|
3
|
-
s.version = '0.2.
|
3
|
+
s.version = '0.2.5.beta'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = "Scalyr output plugin for Logstash"
|
6
6
|
s.description = "Sends log data collected by Logstash to Scalyr (https://www.scalyr.com)"
|
@@ -39,12 +39,15 @@ describe LogStash::Outputs::Scalyr do
|
|
39
39
|
plugin = LogStash::Outputs::Scalyr.new({
|
40
40
|
'api_write_token' => '1234',
|
41
41
|
'perform_connectivity_check' => false,
|
42
|
+
'max_retries' => 2,
|
43
|
+
'retry_max_interval' => 2,
|
44
|
+
'retry_initial_interval' => 0.2,
|
42
45
|
})
|
43
46
|
plugin.register
|
44
47
|
plugin.instance_variable_set(:@running, false)
|
45
|
-
allow(plugin.instance_variable_get(:@logger)).to receive(:
|
48
|
+
allow(plugin.instance_variable_get(:@logger)).to receive(:warn)
|
46
49
|
plugin.multi_receive(sample_events)
|
47
|
-
expect(plugin.instance_variable_get(:@logger)).to have_received(:
|
50
|
+
expect(plugin.instance_variable_get(:@logger)).to have_received(:warn).with("Error uploading to Scalyr (will backoff-retry)",
|
48
51
|
{
|
49
52
|
:error_class=>"Scalyr::Common::Client::ServerError",
|
50
53
|
:batch_num=>1,
|
@@ -54,7 +57,7 @@ describe LogStash::Outputs::Scalyr do
|
|
54
57
|
:record_count=>3,
|
55
58
|
:total_batches=>1,
|
56
59
|
:url=>"https://agent.scalyr.com/addEvents",
|
57
|
-
:will_retry_in_seconds=>
|
60
|
+
:will_retry_in_seconds=>0.4,
|
58
61
|
:body=>"{\n \"message\": \"Couldn't decode API token ...234.\",\n \"status\": \"error/client/badParam\"\n}"
|
59
62
|
}
|
60
63
|
)
|
@@ -68,12 +71,15 @@ describe LogStash::Outputs::Scalyr do
|
|
68
71
|
'perform_connectivity_check' => false,
|
69
72
|
'ssl_ca_bundle_path' => '/fakepath/nocerts',
|
70
73
|
'append_builtin_cert' => false,
|
74
|
+
'max_retries' => 2,
|
75
|
+
'retry_max_interval' => 2,
|
76
|
+
'retry_initial_interval' => 0.2,
|
71
77
|
})
|
72
78
|
plugin.register
|
73
79
|
plugin.instance_variable_set(:@running, false)
|
74
|
-
allow(plugin.instance_variable_get(:@logger)).to receive(:
|
80
|
+
allow(plugin.instance_variable_get(:@logger)).to receive(:warn)
|
75
81
|
plugin.multi_receive(sample_events)
|
76
|
-
expect(plugin.instance_variable_get(:@logger)).to have_received(:
|
82
|
+
expect(plugin.instance_variable_get(:@logger)).to have_received(:warn).with("Error uploading to Scalyr (will backoff-retry)",
|
77
83
|
{
|
78
84
|
:error_class=>"Manticore::UnknownException",
|
79
85
|
:batch_num=>1,
|
@@ -82,7 +88,7 @@ describe LogStash::Outputs::Scalyr do
|
|
82
88
|
:record_count=>3,
|
83
89
|
:total_batches=>1,
|
84
90
|
:url=>"https://agent.scalyr.com/addEvents",
|
85
|
-
:will_retry_in_seconds=>
|
91
|
+
:will_retry_in_seconds=>0.4
|
86
92
|
}
|
87
93
|
)
|
88
94
|
end
|
@@ -98,12 +104,15 @@ describe LogStash::Outputs::Scalyr do
|
|
98
104
|
'api_write_token' => '1234',
|
99
105
|
'perform_connectivity_check' => false,
|
100
106
|
'append_builtin_cert' => false,
|
107
|
+
'max_retries' => 2,
|
108
|
+
'retry_max_interval' => 2,
|
109
|
+
'retry_initial_interval' => 0.2,
|
101
110
|
})
|
102
111
|
plugin.register
|
103
112
|
plugin.instance_variable_set(:@running, false)
|
104
|
-
allow(plugin.instance_variable_get(:@logger)).to receive(:
|
113
|
+
allow(plugin.instance_variable_get(:@logger)).to receive(:warn)
|
105
114
|
plugin.multi_receive(sample_events)
|
106
|
-
expect(plugin.instance_variable_get(:@logger)).to have_received(:
|
115
|
+
expect(plugin.instance_variable_get(:@logger)).to have_received(:warn).with("Error uploading to Scalyr (will backoff-retry)",
|
107
116
|
{
|
108
117
|
:error_class=>"Manticore::UnknownException",
|
109
118
|
:batch_num=>1,
|
@@ -112,7 +121,7 @@ describe LogStash::Outputs::Scalyr do
|
|
112
121
|
:record_count=>3,
|
113
122
|
:total_batches=>1,
|
114
123
|
:url=>"https://agent.scalyr.com/addEvents",
|
115
|
-
:will_retry_in_seconds=>
|
124
|
+
:will_retry_in_seconds=>0.4
|
116
125
|
}
|
117
126
|
)
|
118
127
|
end
|
@@ -141,21 +150,24 @@ describe LogStash::Outputs::Scalyr do
|
|
141
150
|
'api_write_token' => '1234',
|
142
151
|
'perform_connectivity_check' => false,
|
143
152
|
'scalyr_server' => 'https://invalid.mitm.should.fail.test.agent.scalyr.com:443',
|
153
|
+
'max_retries' => 2,
|
154
|
+
'retry_max_interval' => 2,
|
155
|
+
'retry_initial_interval' => 0.2,
|
144
156
|
})
|
145
157
|
plugin.register
|
146
158
|
plugin.instance_variable_set(:@running, false)
|
147
|
-
allow(plugin.instance_variable_get(:@logger)).to receive(:
|
159
|
+
allow(plugin.instance_variable_get(:@logger)).to receive(:warn)
|
148
160
|
plugin.multi_receive(sample_events)
|
149
|
-
expect(plugin.instance_variable_get(:@logger)).to have_received(:
|
161
|
+
expect(plugin.instance_variable_get(:@logger)).to have_received(:warn).with("Error uploading to Scalyr (will backoff-retry)",
|
150
162
|
{
|
151
163
|
:error_class=>"Manticore::UnknownException",
|
152
164
|
:batch_num=>1,
|
153
|
-
:message=>"
|
165
|
+
:message=>"Certificate for <invalid.mitm.should.fail.test.agent.scalyr.com> doesn't match any of the subject alternative names: [*.scalyr.com, scalyr.com]",
|
154
166
|
:payload_size=>737,
|
155
167
|
:record_count=>3,
|
156
168
|
:total_batches=>1,
|
157
169
|
:url=>"https://invalid.mitm.should.fail.test.agent.scalyr.com/addEvents",
|
158
|
-
:will_retry_in_seconds=>
|
170
|
+
:will_retry_in_seconds=>0.4
|
159
171
|
}
|
160
172
|
)
|
161
173
|
ensure
|
@@ -166,19 +178,21 @@ describe LogStash::Outputs::Scalyr do
|
|
166
178
|
end
|
167
179
|
end
|
168
180
|
|
169
|
-
context "when an error occurs with retries at
|
181
|
+
context "when an error occurs with retries at 15" do
|
170
182
|
it "exits after 5 retries and emits a log" do
|
171
183
|
plugin = LogStash::Outputs::Scalyr.new({
|
172
184
|
'api_write_token' => '1234',
|
173
185
|
'perform_connectivity_check' => false,
|
174
|
-
'retry_initial_interval' => 0.1,
|
175
186
|
'ssl_ca_bundle_path' => '/fakepath/nocerts',
|
176
|
-
'append_builtin_cert' => false
|
187
|
+
'append_builtin_cert' => false,
|
188
|
+
'max_retries' => 15,
|
189
|
+
'retry_max_interval' => 0.5,
|
190
|
+
'retry_initial_interval' => 0.2,
|
177
191
|
})
|
178
192
|
plugin.register
|
179
193
|
allow(plugin.instance_variable_get(:@logger)).to receive(:error)
|
180
194
|
plugin.multi_receive(sample_events)
|
181
|
-
expect(plugin.instance_variable_get(:@logger)).to have_received(:error).with("Failed to send 3 events after
|
195
|
+
expect(plugin.instance_variable_get(:@logger)).to have_received(:error).with("Failed to send 3 events after 15 tries.", anything
|
182
196
|
)
|
183
197
|
end
|
184
198
|
end
|
@@ -194,7 +208,10 @@ describe LogStash::Outputs::Scalyr do
|
|
194
208
|
'api_write_token' => '1234',
|
195
209
|
'perform_connectivity_check' => false,
|
196
210
|
'ssl_ca_bundle_path' => '/fakepath/nocerts',
|
197
|
-
'append_builtin_cert' => false
|
211
|
+
'append_builtin_cert' => false,
|
212
|
+
'max_retries' => 2,
|
213
|
+
'retry_max_interval' => 2,
|
214
|
+
'retry_initial_interval' => 0.2,
|
198
215
|
})
|
199
216
|
plugin.register
|
200
217
|
plugin.instance_variable_set(:@running, false)
|
@@ -211,7 +228,7 @@ describe LogStash::Outputs::Scalyr do
|
|
211
228
|
:record_count=>3,
|
212
229
|
:total_batches=>1,
|
213
230
|
:url=>"https://agent.scalyr.com/addEvents",
|
214
|
-
:will_retry_in_seconds=>
|
231
|
+
:will_retry_in_seconds=>0.4,
|
215
232
|
:body=>"stubbed response"
|
216
233
|
}
|
217
234
|
)
|
@@ -227,14 +244,17 @@ describe LogStash::Outputs::Scalyr do
|
|
227
244
|
'api_write_token' => '1234',
|
228
245
|
'perform_connectivity_check' => false,
|
229
246
|
'ssl_ca_bundle_path' => '/fakepath/nocerts',
|
230
|
-
'append_builtin_cert' => false
|
247
|
+
'append_builtin_cert' => false,
|
248
|
+
'max_retries' => 2,
|
249
|
+
'retry_max_interval' => 2,
|
250
|
+
'retry_initial_interval' => 0.2,
|
231
251
|
})
|
232
252
|
plugin.register
|
233
253
|
plugin.instance_variable_set(:@running, false)
|
234
254
|
|
235
|
-
allow(plugin.instance_variable_get(:@logger)).to receive(:
|
255
|
+
allow(plugin.instance_variable_get(:@logger)).to receive(:warn)
|
236
256
|
plugin.multi_receive(sample_events)
|
237
|
-
expect(plugin.instance_variable_get(:@logger)).to have_received(:
|
257
|
+
expect(plugin.instance_variable_get(:@logger)).to have_received(:warn).with("Error uploading to Scalyr (will backoff-retry)",
|
238
258
|
{
|
239
259
|
:error_class=>"Scalyr::Common::Client::ServerError",
|
240
260
|
:batch_num=>1,
|
@@ -244,7 +264,7 @@ describe LogStash::Outputs::Scalyr do
|
|
244
264
|
:record_count=>3,
|
245
265
|
:total_batches=>1,
|
246
266
|
:url=>"https://agent.scalyr.com/addEvents",
|
247
|
-
:will_retry_in_seconds=>
|
267
|
+
:will_retry_in_seconds=>0.4,
|
248
268
|
:body=>"stubbed response"
|
249
269
|
}
|
250
270
|
)
|
@@ -260,14 +280,17 @@ describe LogStash::Outputs::Scalyr do
|
|
260
280
|
'api_write_token' => '1234',
|
261
281
|
'perform_connectivity_check' => false,
|
262
282
|
'ssl_ca_bundle_path' => '/fakepath/nocerts',
|
263
|
-
'append_builtin_cert' => false
|
283
|
+
'append_builtin_cert' => false,
|
284
|
+
'max_retries' => 2,
|
285
|
+
'retry_max_interval' => 2,
|
286
|
+
'retry_initial_interval' => 0.2,
|
264
287
|
})
|
265
288
|
plugin.register
|
266
289
|
plugin.instance_variable_set(:@running, false)
|
267
290
|
|
268
|
-
allow(plugin.instance_variable_get(:@logger)).to receive(:
|
291
|
+
allow(plugin.instance_variable_get(:@logger)).to receive(:warn)
|
269
292
|
plugin.multi_receive(sample_events)
|
270
|
-
expect(plugin.instance_variable_get(:@logger)).to have_received(:
|
293
|
+
expect(plugin.instance_variable_get(:@logger)).to have_received(:warn).with("Error uploading to Scalyr (will backoff-retry)",
|
271
294
|
{
|
272
295
|
:error_class=>"Scalyr::Common::Client::ServerError",
|
273
296
|
:batch_num=>1,
|
@@ -277,7 +300,7 @@ describe LogStash::Outputs::Scalyr do
|
|
277
300
|
:record_count=>3,
|
278
301
|
:total_batches=>1,
|
279
302
|
:url=>"https://agent.scalyr.com/addEvents",
|
280
|
-
:will_retry_in_seconds=>
|
303
|
+
:will_retry_in_seconds=>0.4,
|
281
304
|
:body=>("0123456789" * 50) + "012345678..."
|
282
305
|
}
|
283
306
|
)
|
@@ -294,7 +317,10 @@ describe LogStash::Outputs::Scalyr do
|
|
294
317
|
'api_write_token' => '1234',
|
295
318
|
'perform_connectivity_check' => false,
|
296
319
|
'ssl_ca_bundle_path' => '/fakepath/nocerts',
|
297
|
-
'append_builtin_cert' => false
|
320
|
+
'append_builtin_cert' => false,
|
321
|
+
'max_retries' => 2,
|
322
|
+
'retry_max_interval' => 2,
|
323
|
+
'retry_initial_interval' => 0.2,
|
298
324
|
})
|
299
325
|
plugin.register
|
300
326
|
plugin.instance_variable_set(:@running, false)
|
@@ -56,6 +56,65 @@ describe LogStash::Outputs::Scalyr do
|
|
56
56
|
events
|
57
57
|
}
|
58
58
|
|
59
|
+
let(:sample_events_with_severity) {
|
60
|
+
events = []
|
61
|
+
for i in 0..6 do
|
62
|
+
# valid severity - integer
|
63
|
+
e = LogStash::Event.new
|
64
|
+
e.set('source_host', "my host #{i}")
|
65
|
+
e.set('source_file', "my file #{i}")
|
66
|
+
e.set('severity', i)
|
67
|
+
e.set('seq', i)
|
68
|
+
e.set('nested', {'a'=>1, 'b'=>[3,4,5]})
|
69
|
+
e.set('tags', ['t1', 't2', 't3'])
|
70
|
+
events.push(e)
|
71
|
+
end
|
72
|
+
for i in 0..6 do
|
73
|
+
# valid severity - string
|
74
|
+
e = LogStash::Event.new
|
75
|
+
e.set('source_host', "my host #{i}")
|
76
|
+
e.set('source_file', "my file #{i}")
|
77
|
+
e.set('severity', i.to_s)
|
78
|
+
e.set('seq', i)
|
79
|
+
e.set('nested', {'a'=>1, 'b'=>[3,4,5]})
|
80
|
+
e.set('tags', ['t1', 't2', 't3'])
|
81
|
+
events.push(e)
|
82
|
+
end
|
83
|
+
|
84
|
+
# invalid severity values
|
85
|
+
e = LogStash::Event.new
|
86
|
+
e.set('source_host', "my host a")
|
87
|
+
e.set('severity', -1)
|
88
|
+
events.push(e)
|
89
|
+
|
90
|
+
e = LogStash::Event.new
|
91
|
+
e.set('source_host', "my host a")
|
92
|
+
e.set('severity', 7)
|
93
|
+
events.push(e)
|
94
|
+
|
95
|
+
e = LogStash::Event.new
|
96
|
+
e.set('source_host', "my host a")
|
97
|
+
e.set('severity', "invalid")
|
98
|
+
events.push(e)
|
99
|
+
|
100
|
+
events
|
101
|
+
}
|
102
|
+
|
103
|
+
let(:sample_events_with_level) {
|
104
|
+
events = []
|
105
|
+
for i in 0..6 do
|
106
|
+
e = LogStash::Event.new
|
107
|
+
e.set('source_host', "my host #{i}")
|
108
|
+
e.set('source_file', "my file #{i}")
|
109
|
+
e.set('level', i)
|
110
|
+
e.set('seq', i)
|
111
|
+
e.set('nested', {'a'=>1, 'b'=>[3,4,5]})
|
112
|
+
e.set('tags', ['t1', 't2', 't3'])
|
113
|
+
events.push(e)
|
114
|
+
end
|
115
|
+
events
|
116
|
+
}
|
117
|
+
|
59
118
|
describe "#build_multi_event_request_array" do
|
60
119
|
|
61
120
|
context "test get_stats and send_status" do
|
@@ -225,6 +284,117 @@ describe LogStash::Outputs::Scalyr do
|
|
225
284
|
end
|
226
285
|
end
|
227
286
|
|
287
|
+
context "when severity field is configured" do
|
288
|
+
it "works correctly when severity event attribute is specified" do
|
289
|
+
plugin = LogStash::Outputs::Scalyr.new({
|
290
|
+
'api_write_token' => '1234',
|
291
|
+
'perform_connectivity_check' => false,
|
292
|
+
'severity_field' => 'severity',
|
293
|
+
})
|
294
|
+
allow(plugin).to receive(:send_status).and_return(nil)
|
295
|
+
plugin.register
|
296
|
+
result = plugin.build_multi_event_request_array(sample_events_with_severity)
|
297
|
+
body = JSON.parse(result[0][:body])
|
298
|
+
expect(body['events'].size).to eq(7 + 7 + 3)
|
299
|
+
|
300
|
+
(0..6).each do |index|
|
301
|
+
expect(body['events'][index]['attrs'].fetch('severity', nil)).to eq(nil)
|
302
|
+
expect(body['events'][index]['attrs'].fetch('sev', nil)).to eq(nil)
|
303
|
+
expect(body['events'][index]['sev']).to eq(index)
|
304
|
+
end
|
305
|
+
|
306
|
+
(7..13).each do |index|
|
307
|
+
expect(body['events'][index]['attrs'].fetch('severity', nil)).to eq(nil)
|
308
|
+
expect(body['events'][index]['attrs'].fetch('sev', nil)).to eq(nil)
|
309
|
+
expect(body['events'][index]['sev']).to eq(index - 7)
|
310
|
+
end
|
311
|
+
|
312
|
+
expect(body['events'][14]['attrs'].fetch('severity', nil)).to eq(-1)
|
313
|
+
expect(body['events'][14].key?("sev")).to eq(false)
|
314
|
+
expect(body['events'][14]['sev']).to eq(nil)
|
315
|
+
expect(body['events'][15]['attrs'].fetch('severity', nil)).to eq(7)
|
316
|
+
expect(body['events'][15].key?("sev")).to eq(false)
|
317
|
+
expect(body['events'][15]['sev']).to eq(nil)
|
318
|
+
expect(body['events'][16]['attrs'].fetch('severity', nil)).to eq("invalid")
|
319
|
+
expect(body['events'][16].key?("sev")).to eq(false)
|
320
|
+
expect(body['events'][16]['sev']).to eq(nil)
|
321
|
+
end
|
322
|
+
|
323
|
+
it "works correctly when level event attribute is specified" do
|
324
|
+
plugin = LogStash::Outputs::Scalyr.new({
|
325
|
+
'api_write_token' => '1234',
|
326
|
+
'perform_connectivity_check' => false,
|
327
|
+
'severity_field' => 'level',
|
328
|
+
})
|
329
|
+
allow(plugin).to receive(:send_status).and_return(nil)
|
330
|
+
plugin.register
|
331
|
+
result = plugin.build_multi_event_request_array(sample_events_with_level)
|
332
|
+
body = JSON.parse(result[0][:body])
|
333
|
+
expect(body['events'].size).to eq(7)
|
334
|
+
|
335
|
+
(0..6).each do |index|
|
336
|
+
expect(body['events'][index]['attrs'].fetch('level', nil)).to eq(nil)
|
337
|
+
expect(body['events'][index]['attrs'].fetch('severity', nil)).to eq(nil)
|
338
|
+
expect(body['events'][index]['attrs'].fetch('sev', nil)).to eq(nil)
|
339
|
+
expect(body['events'][index]['sev']).to eq(index)
|
340
|
+
end
|
341
|
+
end
|
342
|
+
|
343
|
+
it "works correctly when severity event attribute is not specified" do
|
344
|
+
plugin = LogStash::Outputs::Scalyr.new({
|
345
|
+
'api_write_token' => '1234',
|
346
|
+
'perform_connectivity_check' => false,
|
347
|
+
'severity_field' => 'severity',
|
348
|
+
})
|
349
|
+
allow(plugin).to receive(:send_status).and_return(nil)
|
350
|
+
plugin.register
|
351
|
+
result = plugin.build_multi_event_request_array(sample_events)
|
352
|
+
body = JSON.parse(result[0][:body])
|
353
|
+
expect(body['events'].size).to eq(3)
|
354
|
+
|
355
|
+
(0..2).each do |index|
|
356
|
+
expect(body['events'][index]['attrs'].fetch('severity', nil)).to eq(nil)
|
357
|
+
expect(body['events'][index]['attrs'].fetch('sev', nil)).to eq(nil)
|
358
|
+
expect(body['events'][index]['sev']).to eq(nil)
|
359
|
+
end
|
360
|
+
end
|
361
|
+
|
362
|
+
it "works correctly when severity event attribute is not specified but severity field is not set" do
|
363
|
+
# Since severity_field config option is not set, severity field should be treated as a
|
364
|
+
# regular event attribute and not as s a special top level Event.sev field
|
365
|
+
plugin = LogStash::Outputs::Scalyr.new({
|
366
|
+
'api_write_token' => '1234',
|
367
|
+
'perform_connectivity_check' => false,
|
368
|
+
'severity_field' => nil,
|
369
|
+
})
|
370
|
+
allow(plugin).to receive(:send_status).and_return(nil)
|
371
|
+
plugin.register
|
372
|
+
result = plugin.build_multi_event_request_array(sample_events_with_severity)
|
373
|
+
body = JSON.parse(result[0][:body])
|
374
|
+
expect(body['events'].size).to eq(7 + 7 + 3)
|
375
|
+
|
376
|
+
(0..6).each do |index|
|
377
|
+
expect(body['events'][index]['attrs'].fetch('severity', nil)).to eq(index)
|
378
|
+
expect(body['events'][index]['sev']).to eq(nil)
|
379
|
+
end
|
380
|
+
|
381
|
+
(7..13).each do |index|
|
382
|
+
expect(body['events'][index]['attrs'].fetch('severity', nil)).to eq((index - 7).to_s)
|
383
|
+
expect(body['events'][index]['sev']).to eq(nil)
|
384
|
+
end
|
385
|
+
|
386
|
+
expect(body['events'][14]['attrs'].fetch('severity', nil)).to eq(-1)
|
387
|
+
expect(body['events'][14].key?("sev")).to eq(false)
|
388
|
+
expect(body['events'][14]['sev']).to eq(nil)
|
389
|
+
expect(body['events'][15]['attrs'].fetch('severity', nil)).to eq(7)
|
390
|
+
expect(body['events'][15].key?("sev")).to eq(false)
|
391
|
+
expect(body['events'][15]['sev']).to eq(nil)
|
392
|
+
expect(body['events'][16]['attrs'].fetch('severity', nil)).to eq("invalid")
|
393
|
+
expect(body['events'][16].key?("sev")).to eq(false)
|
394
|
+
expect(body['events'][16]['sev']).to eq(nil)
|
395
|
+
end
|
396
|
+
end
|
397
|
+
|
228
398
|
context "when serverhost_field is missing" do
|
229
399
|
it "does not contain log file" do
|
230
400
|
plugin = LogStash::Outputs::Scalyr.new({'api_write_token' => '1234', 'perform_connectivity_check' => false})
|
@@ -1071,7 +1241,6 @@ describe LogStash::Outputs::Scalyr do
|
|
1071
1241
|
it "throws on invalid hostname" do
|
1072
1242
|
config = {
|
1073
1243
|
'api_write_token' => '1234',
|
1074
|
-
'perform_connectivity_check' => false,
|
1075
1244
|
'scalyr_server' => 'https://agent.invalid.foo.scalyr.com',
|
1076
1245
|
'perform_connectivity_check' => true
|
1077
1246
|
}
|
@@ -1082,7 +1251,6 @@ describe LogStash::Outputs::Scalyr do
|
|
1082
1251
|
it "throws on invalid api key" do
|
1083
1252
|
config = {
|
1084
1253
|
'api_write_token' => '1234',
|
1085
|
-
'perform_connectivity_check' => false,
|
1086
1254
|
'scalyr_server' => 'https://agent.scalyr.com',
|
1087
1255
|
'perform_connectivity_check' => true
|
1088
1256
|
}
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-scalyr
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.2.
|
4
|
+
version: 0.2.5.beta
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Edward Chee
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2022-06-21 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -186,9 +186,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
186
186
|
version: '0'
|
187
187
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
188
188
|
requirements:
|
189
|
-
- - "
|
189
|
+
- - ">"
|
190
190
|
- !ruby/object:Gem::Version
|
191
|
-
version:
|
191
|
+
version: 1.3.1
|
192
192
|
requirements: []
|
193
193
|
rubyforge_project:
|
194
194
|
rubygems_version: 2.7.10
|