logstash-output-scalyr 0.2.4.beta → 0.2.5.beta

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8e192f378cc4b5209522208cf6191907f55b75dd8296e14dd752576de5caed7c
4
- data.tar.gz: 786ba490ef419bc0d65aa369362d5b5584969c7f6349ef5a510ac6b79d20bf61
3
+ metadata.gz: a1f4ab67bba48557efab9493bdd6e85ef80ba6aab10f1b21e029acc78e69fdc7
4
+ data.tar.gz: c081bfd418f30502ca4fde78f8d8e15316ea18bd8f2834adff004d40913f2c39
5
5
  SHA512:
6
- metadata.gz: 14ec39abfc55058e5a0ce58b2be1000268c74984be5c625d5b65521286af3e99c5f4a995f5c996d6ec2c8318084bfac8fec60c519a49700f045cce50fc17675a
7
- data.tar.gz: f06c9b47b63f8b95cdba01f60d376982cb85f5630971612487c9044f2e216e2b7aa01d750d8a340e45fea422dee27c93bbb0f3deb0c10656ac2af04ac6394f69
6
+ metadata.gz: c1e6019904b05043552ea46d303d861f3a539c94fa644686d5a12ecd5bf40a69ec1a9afde09589a676ccecc0b03ee737bc24456ca5b19026b580421c18ae6605
7
+ data.tar.gz: ce5b38bbff35964ce84062fd647645a222958e72ea3a7f973900b182edb8e527f045dedac15668817898f6119e32057927f102b07bdffdfceb3099ea4be47beb
data/CHANGELOG.md CHANGED
@@ -1,5 +1,22 @@
1
1
  # Beta
2
2
 
3
+ ## 0.2.5.beta
4
+
5
+ * Allow user to specify value for the DataSet event severity (``sev``) field. "sev" field is a
6
+ special top level event field which denotes the event severity (log level).
7
+
8
+ To enable this functionality, user needs to configure ``severity_field`` plugin config option and
9
+ set it to the logstash event field which carries the severity field value. This field value
10
+ needs to be an integer and contain a value from 0 to 6 (inclusive).
11
+ * Upgrade dependencies (manticore -> 0.9.1, jrjackson -> 0.4.15).
12
+ * Fix experimental ``zstandard`` support.
13
+
14
+ NOTE: For zstandard compression to be used zstd / libstd system package needs to be installed
15
+ (https://github.com/msievers/zstandard-ruby/#examples-for-installing-libzstd) and ``zstandard``
16
+ gem needs to be installed inside the Logstash jRuby environment
17
+ (e.g. ``/usr/share/logstash/bin/ruby -S /usr/share/logstash/vendor/jruby/bin/gem install
18
+ zstandard ; echo 'gem "zstandard"' >> /opt/logstash/Gemfile``).
19
+
3
20
  ## 0.2.4.beta
4
21
 
5
22
  * Experimental zstandard support - in development, not to be used in production.
data/Gemfile CHANGED
@@ -20,5 +20,5 @@ end
20
20
  gem 'pry'
21
21
  gem 'pry-nav'
22
22
  gem 'quantile', '~> 0.2.1'
23
- gem 'manticore', '~> 0.7.1', platform: :jruby
24
- gem 'jrjackson', '~> 0.4.14', platform: :jruby
23
+ gem 'manticore', '~> 0.9.1', platform: :jruby
24
+ gem 'jrjackson', '~> 0.4.15', platform: :jruby
data/README.md CHANGED
@@ -86,6 +86,40 @@ output {
86
86
  }
87
87
  ```
88
88
 
89
+ ## Notes on severity (sev) attribute handling
90
+
91
+ ``sev`` is a special top level DataSet event field which denotes the event severity / log level.
92
+
93
+ To enable this functionality, user needs to define ``severity_field`` plugin config option. This
94
+ config option tells the plugin which Logstash event field carries the value for the severity field.
95
+
96
+ The actual value needs to be an integer between 0 and 6 inclusive. Those values are mapped to
97
+ different severity / log levels on DataSet server side as shown below:
98
+
99
+ - 0 -> finest
100
+ - 1 -> trace
101
+ - 2 -> debut
102
+ - 3 -> info
103
+ - 4 -> warning
104
+ - 5 -> error
105
+ - 6 -> fatal / emergency / critical
106
+
107
+ ```
108
+ output {
109
+ scalyr {
110
+ api_write_token => 'SCALYR_API_KEY'
111
+ ...
112
+ severity_field => 'severity',
113
+ }
114
+ }
115
+ ```
116
+
117
+ In the example above, value for the DataSet severity field should be included in the ``severity``
118
+ Logstash event field.
119
+
120
+ In case the field value doesn't contain a valid severity number (0 - 6), ``sev`` field won't be
121
+ set on the event object to prevent API from rejecting an invalid request.
122
+
89
123
  ## Options
90
124
 
91
125
  - The Scalyr API write token, these are available at https://www.scalyr.com/keys. This is the only compulsory configuration field required for proper upload
@@ -60,6 +60,22 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
60
60
  # (Warning: events with an existing 'logfile' field, it will be overwritten)
61
61
  config :logfile_field, :validate => :string, :default => 'logfile'
62
62
 
63
+ # Record field which includes the value for the "severity" field. severity is a special field which tells
64
+ # Scalyr severity / log level for a particulat event. This field is a top level event field and not
65
+ # event attribute field. Actual field value must be an integer and is mapped to different severity /
66
+ # log level on DataSet server side as shown below:
67
+ #
68
+ # - 0 -> finest
69
+ # - 1 -> trace
70
+ # - 2 -> debut
71
+ # - 3 -> info
72
+ # - 4 -> warning
73
+ # - 5 -> error
74
+ # - 6 -> fatal / emergency / critical
75
+ #
76
+ # By default, if Event contains no severity field, default value of 3 (info) will be used.
77
+ config :severity_field, :validate => :string, :default => nil
78
+
63
79
  # The Scalyr Output Plugin expects the main log message to be contained in the Event['message']. If your main log
64
80
  # content is contained in a different field, specify it here. It will be renamed to 'message' before upload.
65
81
  # (Warning: events with an existing 'message' field, it will be overwritten)
@@ -130,7 +146,8 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
130
146
  # Valid options are bz2, deflate, or none.
131
147
  config :compression_type, :validate => :string, :default => 'deflate'
132
148
 
133
- # An int containing the compression level of compression to use, from 1-9. Defaults to 6
149
+ # An int containing the compression level of compression to use, from 1-9. Defaults to 6. Only
150
+ # applicable when compression type is "deflate" or "bz2".
134
151
  config :compression_level, :validate => :number, :default => 6
135
152
 
136
153
  # How often to log and report status metrics to Scalyr. Defaults to every 5
@@ -495,9 +512,9 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
495
512
  :will_retry_in_seconds => sleep_interval,
496
513
  }
497
514
  exc_data[:code] = e.code if e.code
498
- if @logger.debug? and e.body
515
+ if @logger.debug? and defined?(e.body) and e.body
499
516
  exc_data[:body] = e.body
500
- elsif e.body
517
+ elsif defined?(e.body) and e.body
501
518
  exc_data[:body] = Scalyr::Common::Util.truncate(e.body, 512)
502
519
  end
503
520
  exc_data[:payload] = "\tSample payload: #{request[:body][0,1024]}..." if @logger.debug?
@@ -722,6 +739,11 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
722
739
  # Rename user-specified logfile field -> 'logfile'
723
740
  rename.call(@logfile_field, 'logfile')
724
741
 
742
+ # Rename user-specified severity field -> 'severity' (if configured)
743
+ if not @severity_field.nil?
744
+ rename.call(@severity_field, 'severity')
745
+ end
746
+
725
747
  # Remove "host" attribute
726
748
  if @remove_host_attribute_from_events and record.key? "host"
727
749
  record.delete("host")
@@ -766,6 +788,7 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
766
788
  logs[log_identifier]['attrs']['logfile'] = record['logfile']
767
789
  record.delete('logfile')
768
790
  end
791
+
769
792
  if @log_constants
770
793
  @log_constants.each {|log_constant|
771
794
  if record.key? log_constant
@@ -827,6 +850,21 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
827
850
  end
828
851
  end
829
852
 
853
+ severity = record['severity']
854
+ severity_int = nil
855
+
856
+ # Server won't accept the payload in case severity value is not valid. To avoid events from
857
+ # being dropped, we only set Event.sev field in case this field contains a valid value.
858
+ if not @severity_field.nil? and severity and severity.is_integer?
859
+ severity_int = severity.to_i
860
+
861
+ if severity_int >= 0 and severity_int <= 6
862
+ record.delete('severity')
863
+ else
864
+ severity_int = nil
865
+ end
866
+ end
867
+
830
868
  # Use LogStash event.timestamp as the 'ts' Scalyr timestamp. Note that this may be overwritten by input
831
869
  # filters so may not necessarily reflect the actual originating timestamp.
832
870
  scalyr_event = {
@@ -840,6 +878,11 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
840
878
  scalyr_event[:log] = logs_ids[log_identifier]
841
879
  end
842
880
 
881
+ # optionally set severity (if available and valid)
882
+ if @severity_field and not severity_int.nil?
883
+ scalyr_event[:sev] = severity_int
884
+ end
885
+
843
886
  if @estimate_each_event_size
844
887
  # get json string of event to keep track of how many bytes we are sending
845
888
  begin
@@ -1104,7 +1147,7 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
1104
1147
  begin
1105
1148
  client_session.post_add_events(multi_event_request[:body], true, 0)
1106
1149
  rescue => e
1107
- if e.body
1150
+ if defined?(e.body) and e.body
1108
1151
  @logger.warn(
1109
1152
  "Unexpected error occurred while uploading status to Scalyr",
1110
1153
  :error_message => e.message,
@@ -1170,3 +1213,15 @@ class LogStash::Outputs::Scalyr < LogStash::Outputs::Base
1170
1213
  !execution_context.dlq_writer.inner_writer.is_a?(::LogStash::Util::DummyDeadLetterQueueWriter)
1171
1214
  end
1172
1215
  end
1216
+
1217
+ class String
1218
+ def is_integer?
1219
+ self.to_i.to_s == self
1220
+ end
1221
+ end
1222
+
1223
+ class Integer
1224
+ def is_integer?
1225
+ true
1226
+ end
1227
+ end
@@ -305,13 +305,13 @@ class ClientSession
305
305
  # don't directly depend on this gem and it's up to the user to install
306
306
  # both dependencies manually in case they want to use zstandard.
307
307
  begin
308
- gem 'zstandard'
308
+ require 'zstandard'
309
309
  rescue LoadError
310
310
  raise SystemExit, "zstandard gem is missing. If you want to use zstandard compression you need to make sure zstandard and and libzstd dependency is installed. See TODO for more information."
311
311
  end
312
312
 
313
313
  encoding = 'zstandard'
314
- compressed_body = Zstandard.deflate(string)
314
+ compressed_body = Zstandard.deflate(body)
315
315
  end
316
316
  end_time = Time.now.to_f
317
317
  compression_duration = end_time - start_time
@@ -1,5 +1,5 @@
1
1
  # encoding: utf-8
2
- PLUGIN_VERSION = "v0.2.4.beta"
2
+ PLUGIN_VERSION = "v0.2.5.beta"
3
3
 
4
4
  # Special event level attribute name which can be used for setting event level serverHost attribute
5
5
  EVENT_LEVEL_SERVER_HOST_ATTRIBUTE_NAME = '__origServerHost'
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-scalyr'
3
- s.version = '0.2.4.beta'
3
+ s.version = '0.2.5.beta'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Scalyr output plugin for Logstash"
6
6
  s.description = "Sends log data collected by Logstash to Scalyr (https://www.scalyr.com)"
@@ -39,6 +39,9 @@ describe LogStash::Outputs::Scalyr do
39
39
  plugin = LogStash::Outputs::Scalyr.new({
40
40
  'api_write_token' => '1234',
41
41
  'perform_connectivity_check' => false,
42
+ 'max_retries' => 2,
43
+ 'retry_max_interval' => 2,
44
+ 'retry_initial_interval' => 0.2,
42
45
  })
43
46
  plugin.register
44
47
  plugin.instance_variable_set(:@running, false)
@@ -54,7 +57,7 @@ describe LogStash::Outputs::Scalyr do
54
57
  :record_count=>3,
55
58
  :total_batches=>1,
56
59
  :url=>"https://agent.scalyr.com/addEvents",
57
- :will_retry_in_seconds=>2,
60
+ :will_retry_in_seconds=>0.4,
58
61
  :body=>"{\n \"message\": \"Couldn't decode API token ...234.\",\n \"status\": \"error/client/badParam\"\n}"
59
62
  }
60
63
  )
@@ -68,6 +71,9 @@ describe LogStash::Outputs::Scalyr do
68
71
  'perform_connectivity_check' => false,
69
72
  'ssl_ca_bundle_path' => '/fakepath/nocerts',
70
73
  'append_builtin_cert' => false,
74
+ 'max_retries' => 2,
75
+ 'retry_max_interval' => 2,
76
+ 'retry_initial_interval' => 0.2,
71
77
  })
72
78
  plugin.register
73
79
  plugin.instance_variable_set(:@running, false)
@@ -82,7 +88,7 @@ describe LogStash::Outputs::Scalyr do
82
88
  :record_count=>3,
83
89
  :total_batches=>1,
84
90
  :url=>"https://agent.scalyr.com/addEvents",
85
- :will_retry_in_seconds=>2
91
+ :will_retry_in_seconds=>0.4
86
92
  }
87
93
  )
88
94
  end
@@ -98,6 +104,9 @@ describe LogStash::Outputs::Scalyr do
98
104
  'api_write_token' => '1234',
99
105
  'perform_connectivity_check' => false,
100
106
  'append_builtin_cert' => false,
107
+ 'max_retries' => 2,
108
+ 'retry_max_interval' => 2,
109
+ 'retry_initial_interval' => 0.2,
101
110
  })
102
111
  plugin.register
103
112
  plugin.instance_variable_set(:@running, false)
@@ -112,7 +121,7 @@ describe LogStash::Outputs::Scalyr do
112
121
  :record_count=>3,
113
122
  :total_batches=>1,
114
123
  :url=>"https://agent.scalyr.com/addEvents",
115
- :will_retry_in_seconds=>2
124
+ :will_retry_in_seconds=>0.4
116
125
  }
117
126
  )
118
127
  end
@@ -141,6 +150,9 @@ describe LogStash::Outputs::Scalyr do
141
150
  'api_write_token' => '1234',
142
151
  'perform_connectivity_check' => false,
143
152
  'scalyr_server' => 'https://invalid.mitm.should.fail.test.agent.scalyr.com:443',
153
+ 'max_retries' => 2,
154
+ 'retry_max_interval' => 2,
155
+ 'retry_initial_interval' => 0.2,
144
156
  })
145
157
  plugin.register
146
158
  plugin.instance_variable_set(:@running, false)
@@ -150,12 +162,12 @@ describe LogStash::Outputs::Scalyr do
150
162
  {
151
163
  :error_class=>"Manticore::UnknownException",
152
164
  :batch_num=>1,
153
- :message=>"Host name 'invalid.mitm.should.fail.test.agent.scalyr.com' does not match the certificate subject provided by the peer (CN=*.scalyr.com)",
165
+ :message=>"Certificate for <invalid.mitm.should.fail.test.agent.scalyr.com> doesn't match any of the subject alternative names: [*.scalyr.com, scalyr.com]",
154
166
  :payload_size=>737,
155
167
  :record_count=>3,
156
168
  :total_batches=>1,
157
169
  :url=>"https://invalid.mitm.should.fail.test.agent.scalyr.com/addEvents",
158
- :will_retry_in_seconds=>2
170
+ :will_retry_in_seconds=>0.4
159
171
  }
160
172
  )
161
173
  ensure
@@ -171,9 +183,11 @@ describe LogStash::Outputs::Scalyr do
171
183
  plugin = LogStash::Outputs::Scalyr.new({
172
184
  'api_write_token' => '1234',
173
185
  'perform_connectivity_check' => false,
174
- 'retry_initial_interval' => 0.1,
175
186
  'ssl_ca_bundle_path' => '/fakepath/nocerts',
176
- 'append_builtin_cert' => false
187
+ 'append_builtin_cert' => false,
188
+ 'max_retries' => 15,
189
+ 'retry_max_interval' => 0.5,
190
+ 'retry_initial_interval' => 0.2,
177
191
  })
178
192
  plugin.register
179
193
  allow(plugin.instance_variable_get(:@logger)).to receive(:error)
@@ -194,7 +208,10 @@ describe LogStash::Outputs::Scalyr do
194
208
  'api_write_token' => '1234',
195
209
  'perform_connectivity_check' => false,
196
210
  'ssl_ca_bundle_path' => '/fakepath/nocerts',
197
- 'append_builtin_cert' => false
211
+ 'append_builtin_cert' => false,
212
+ 'max_retries' => 2,
213
+ 'retry_max_interval' => 2,
214
+ 'retry_initial_interval' => 0.2,
198
215
  })
199
216
  plugin.register
200
217
  plugin.instance_variable_set(:@running, false)
@@ -211,7 +228,7 @@ describe LogStash::Outputs::Scalyr do
211
228
  :record_count=>3,
212
229
  :total_batches=>1,
213
230
  :url=>"https://agent.scalyr.com/addEvents",
214
- :will_retry_in_seconds=>2,
231
+ :will_retry_in_seconds=>0.4,
215
232
  :body=>"stubbed response"
216
233
  }
217
234
  )
@@ -227,7 +244,10 @@ describe LogStash::Outputs::Scalyr do
227
244
  'api_write_token' => '1234',
228
245
  'perform_connectivity_check' => false,
229
246
  'ssl_ca_bundle_path' => '/fakepath/nocerts',
230
- 'append_builtin_cert' => false
247
+ 'append_builtin_cert' => false,
248
+ 'max_retries' => 2,
249
+ 'retry_max_interval' => 2,
250
+ 'retry_initial_interval' => 0.2,
231
251
  })
232
252
  plugin.register
233
253
  plugin.instance_variable_set(:@running, false)
@@ -244,7 +264,7 @@ describe LogStash::Outputs::Scalyr do
244
264
  :record_count=>3,
245
265
  :total_batches=>1,
246
266
  :url=>"https://agent.scalyr.com/addEvents",
247
- :will_retry_in_seconds=>2,
267
+ :will_retry_in_seconds=>0.4,
248
268
  :body=>"stubbed response"
249
269
  }
250
270
  )
@@ -260,7 +280,10 @@ describe LogStash::Outputs::Scalyr do
260
280
  'api_write_token' => '1234',
261
281
  'perform_connectivity_check' => false,
262
282
  'ssl_ca_bundle_path' => '/fakepath/nocerts',
263
- 'append_builtin_cert' => false
283
+ 'append_builtin_cert' => false,
284
+ 'max_retries' => 2,
285
+ 'retry_max_interval' => 2,
286
+ 'retry_initial_interval' => 0.2,
264
287
  })
265
288
  plugin.register
266
289
  plugin.instance_variable_set(:@running, false)
@@ -277,7 +300,7 @@ describe LogStash::Outputs::Scalyr do
277
300
  :record_count=>3,
278
301
  :total_batches=>1,
279
302
  :url=>"https://agent.scalyr.com/addEvents",
280
- :will_retry_in_seconds=>2,
303
+ :will_retry_in_seconds=>0.4,
281
304
  :body=>("0123456789" * 50) + "012345678..."
282
305
  }
283
306
  )
@@ -294,7 +317,10 @@ describe LogStash::Outputs::Scalyr do
294
317
  'api_write_token' => '1234',
295
318
  'perform_connectivity_check' => false,
296
319
  'ssl_ca_bundle_path' => '/fakepath/nocerts',
297
- 'append_builtin_cert' => false
320
+ 'append_builtin_cert' => false,
321
+ 'max_retries' => 2,
322
+ 'retry_max_interval' => 2,
323
+ 'retry_initial_interval' => 0.2,
298
324
  })
299
325
  plugin.register
300
326
  plugin.instance_variable_set(:@running, false)
@@ -56,6 +56,65 @@ describe LogStash::Outputs::Scalyr do
56
56
  events
57
57
  }
58
58
 
59
+ let(:sample_events_with_severity) {
60
+ events = []
61
+ for i in 0..6 do
62
+ # valid severity - integer
63
+ e = LogStash::Event.new
64
+ e.set('source_host', "my host #{i}")
65
+ e.set('source_file', "my file #{i}")
66
+ e.set('severity', i)
67
+ e.set('seq', i)
68
+ e.set('nested', {'a'=>1, 'b'=>[3,4,5]})
69
+ e.set('tags', ['t1', 't2', 't3'])
70
+ events.push(e)
71
+ end
72
+ for i in 0..6 do
73
+ # valid severity - string
74
+ e = LogStash::Event.new
75
+ e.set('source_host', "my host #{i}")
76
+ e.set('source_file', "my file #{i}")
77
+ e.set('severity', i.to_s)
78
+ e.set('seq', i)
79
+ e.set('nested', {'a'=>1, 'b'=>[3,4,5]})
80
+ e.set('tags', ['t1', 't2', 't3'])
81
+ events.push(e)
82
+ end
83
+
84
+ # invalid severity values
85
+ e = LogStash::Event.new
86
+ e.set('source_host', "my host a")
87
+ e.set('severity', -1)
88
+ events.push(e)
89
+
90
+ e = LogStash::Event.new
91
+ e.set('source_host', "my host a")
92
+ e.set('severity', 7)
93
+ events.push(e)
94
+
95
+ e = LogStash::Event.new
96
+ e.set('source_host', "my host a")
97
+ e.set('severity', "invalid")
98
+ events.push(e)
99
+
100
+ events
101
+ }
102
+
103
+ let(:sample_events_with_level) {
104
+ events = []
105
+ for i in 0..6 do
106
+ e = LogStash::Event.new
107
+ e.set('source_host', "my host #{i}")
108
+ e.set('source_file', "my file #{i}")
109
+ e.set('level', i)
110
+ e.set('seq', i)
111
+ e.set('nested', {'a'=>1, 'b'=>[3,4,5]})
112
+ e.set('tags', ['t1', 't2', 't3'])
113
+ events.push(e)
114
+ end
115
+ events
116
+ }
117
+
59
118
  describe "#build_multi_event_request_array" do
60
119
 
61
120
  context "test get_stats and send_status" do
@@ -225,6 +284,117 @@ describe LogStash::Outputs::Scalyr do
225
284
  end
226
285
  end
227
286
 
287
+ context "when severity field is configured" do
288
+ it "works correctly when severity event attribute is specified" do
289
+ plugin = LogStash::Outputs::Scalyr.new({
290
+ 'api_write_token' => '1234',
291
+ 'perform_connectivity_check' => false,
292
+ 'severity_field' => 'severity',
293
+ })
294
+ allow(plugin).to receive(:send_status).and_return(nil)
295
+ plugin.register
296
+ result = plugin.build_multi_event_request_array(sample_events_with_severity)
297
+ body = JSON.parse(result[0][:body])
298
+ expect(body['events'].size).to eq(7 + 7 + 3)
299
+
300
+ (0..6).each do |index|
301
+ expect(body['events'][index]['attrs'].fetch('severity', nil)).to eq(nil)
302
+ expect(body['events'][index]['attrs'].fetch('sev', nil)).to eq(nil)
303
+ expect(body['events'][index]['sev']).to eq(index)
304
+ end
305
+
306
+ (7..13).each do |index|
307
+ expect(body['events'][index]['attrs'].fetch('severity', nil)).to eq(nil)
308
+ expect(body['events'][index]['attrs'].fetch('sev', nil)).to eq(nil)
309
+ expect(body['events'][index]['sev']).to eq(index - 7)
310
+ end
311
+
312
+ expect(body['events'][14]['attrs'].fetch('severity', nil)).to eq(-1)
313
+ expect(body['events'][14].key?("sev")).to eq(false)
314
+ expect(body['events'][14]['sev']).to eq(nil)
315
+ expect(body['events'][15]['attrs'].fetch('severity', nil)).to eq(7)
316
+ expect(body['events'][15].key?("sev")).to eq(false)
317
+ expect(body['events'][15]['sev']).to eq(nil)
318
+ expect(body['events'][16]['attrs'].fetch('severity', nil)).to eq("invalid")
319
+ expect(body['events'][16].key?("sev")).to eq(false)
320
+ expect(body['events'][16]['sev']).to eq(nil)
321
+ end
322
+
323
+ it "works correctly when level event attribute is specified" do
324
+ plugin = LogStash::Outputs::Scalyr.new({
325
+ 'api_write_token' => '1234',
326
+ 'perform_connectivity_check' => false,
327
+ 'severity_field' => 'level',
328
+ })
329
+ allow(plugin).to receive(:send_status).and_return(nil)
330
+ plugin.register
331
+ result = plugin.build_multi_event_request_array(sample_events_with_level)
332
+ body = JSON.parse(result[0][:body])
333
+ expect(body['events'].size).to eq(7)
334
+
335
+ (0..6).each do |index|
336
+ expect(body['events'][index]['attrs'].fetch('level', nil)).to eq(nil)
337
+ expect(body['events'][index]['attrs'].fetch('severity', nil)).to eq(nil)
338
+ expect(body['events'][index]['attrs'].fetch('sev', nil)).to eq(nil)
339
+ expect(body['events'][index]['sev']).to eq(index)
340
+ end
341
+ end
342
+
343
+ it "works correctly when severity event attribute is not specified" do
344
+ plugin = LogStash::Outputs::Scalyr.new({
345
+ 'api_write_token' => '1234',
346
+ 'perform_connectivity_check' => false,
347
+ 'severity_field' => 'severity',
348
+ })
349
+ allow(plugin).to receive(:send_status).and_return(nil)
350
+ plugin.register
351
+ result = plugin.build_multi_event_request_array(sample_events)
352
+ body = JSON.parse(result[0][:body])
353
+ expect(body['events'].size).to eq(3)
354
+
355
+ (0..2).each do |index|
356
+ expect(body['events'][index]['attrs'].fetch('severity', nil)).to eq(nil)
357
+ expect(body['events'][index]['attrs'].fetch('sev', nil)).to eq(nil)
358
+ expect(body['events'][index]['sev']).to eq(nil)
359
+ end
360
+ end
361
+
362
+ it "works correctly when severity event attribute is not specified but severity field is not set" do
363
+ # Since severity_field config option is not set, severity field should be treated as a
364
+ # regular event attribute and not as s a special top level Event.sev field
365
+ plugin = LogStash::Outputs::Scalyr.new({
366
+ 'api_write_token' => '1234',
367
+ 'perform_connectivity_check' => false,
368
+ 'severity_field' => nil,
369
+ })
370
+ allow(plugin).to receive(:send_status).and_return(nil)
371
+ plugin.register
372
+ result = plugin.build_multi_event_request_array(sample_events_with_severity)
373
+ body = JSON.parse(result[0][:body])
374
+ expect(body['events'].size).to eq(7 + 7 + 3)
375
+
376
+ (0..6).each do |index|
377
+ expect(body['events'][index]['attrs'].fetch('severity', nil)).to eq(index)
378
+ expect(body['events'][index]['sev']).to eq(nil)
379
+ end
380
+
381
+ (7..13).each do |index|
382
+ expect(body['events'][index]['attrs'].fetch('severity', nil)).to eq((index - 7).to_s)
383
+ expect(body['events'][index]['sev']).to eq(nil)
384
+ end
385
+
386
+ expect(body['events'][14]['attrs'].fetch('severity', nil)).to eq(-1)
387
+ expect(body['events'][14].key?("sev")).to eq(false)
388
+ expect(body['events'][14]['sev']).to eq(nil)
389
+ expect(body['events'][15]['attrs'].fetch('severity', nil)).to eq(7)
390
+ expect(body['events'][15].key?("sev")).to eq(false)
391
+ expect(body['events'][15]['sev']).to eq(nil)
392
+ expect(body['events'][16]['attrs'].fetch('severity', nil)).to eq("invalid")
393
+ expect(body['events'][16].key?("sev")).to eq(false)
394
+ expect(body['events'][16]['sev']).to eq(nil)
395
+ end
396
+ end
397
+
228
398
  context "when serverhost_field is missing" do
229
399
  it "does not contain log file" do
230
400
  plugin = LogStash::Outputs::Scalyr.new({'api_write_token' => '1234', 'perform_connectivity_check' => false})
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-scalyr
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.4.beta
4
+ version: 0.2.5.beta
5
5
  platform: ruby
6
6
  authors:
7
7
  - Edward Chee
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-04-21 00:00:00.000000000 Z
11
+ date: 2022-06-21 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement