logstash-output-dynatrace 0.5.0 → 0.5.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f41e7086fbd73db7471823a9bff842ad41783ba20b1f2ba112755f251af28c0a
4
- data.tar.gz: 9b645d8c4f9eb4d416dc6c873cceb8779bae7c7745e63f009f2eaee4a5b29e52
3
+ metadata.gz: 69fbd3982ad32849fe3d185e3edd44f9c3d071cb3f74c31d2f6d32a1e7841df5
4
+ data.tar.gz: 24bfb2c6599648c80295860e90eb8e6600d75ad4af8ccd5252f42b46b3beb0fc
5
5
  SHA512:
6
- metadata.gz: 043e6e396b6aa6e47e7dc6f2245440e83e295deeabc866a9bd96786f5d8c0c6b3298d936a231ec05d93f438424e92090f2f1cbff2edfd23fb6e4ebbbdf108471
7
- data.tar.gz: 4940d3ab0bea54b9125da346c46157c7e16bf11e4fd529ce7d80fcbc07f7695131035b38ede87c13ca489b1f65815a1004e2666593983ce3fb8800227ad2265e
6
+ metadata.gz: ec160bc4be7fcdeed4704cba0c084d74cfb812de161a02f72ce4178dd25232505951d9237b23f433f0b195213ad49d6d36e9ac757f7970c0014b8920728536c0
7
+ data.tar.gz: 3ae1b197743a9b72e2e6530a78187f2d0acf822e4baf3835625ad3d348d62593cd7e9e7100b15475a4ccfb535409d835a652a92d534553abc9685dc391055665
data/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # Logstash Dynatrace output plugin
2
2
 
3
- [![Travis Build Status](https://app.travis-ci.com/dynatrace-oss/logstash-output-dynatrace.svg)](https://app.travis-ci.com/dynatrace-oss/logstash-output-dynatrace)
3
+ [![Travis Build Status](https://api.travis-ci.com/dynatrace-oss/logstash-output-dynatrace.svg?branch=main)](https://app.travis-ci.com/dynatrace-oss/logstash-output-dynatrace)
4
4
 
5
5
  > This project is developed and maintained by Dynatrace R&D.
6
6
 
@@ -74,6 +74,9 @@ module LogStash
74
74
  # Include body in debug logs when HTTP errors occur. Body may be large and include sensitive data.
75
75
  config :debug_include_body, validate: :boolean, default: false
76
76
 
77
+ # Maximum size payload to send to the Dynatrace API in Bytes. Batches of events which would be larger than max_payload_size when serialized will be split into smaller batches of events.
78
+ config :max_payload_size, validate: :number, default: 4_500_000
79
+
77
80
  def register
78
81
  # ssl_verification_mode config is from mixin but ssl_verify_none is our documented config
79
82
  @ssl_verification_mode = 'none' if @ssl_verify_none
@@ -103,6 +106,35 @@ module LogStash
103
106
  end
104
107
  end
105
108
 
109
+ class Batcher
110
+ def initialize(max_batch_size)
111
+ @max_batch_size = max_batch_size
112
+ @batch_events_size = 0
113
+ @serialized_events = []
114
+ end
115
+
116
+ def offer(serialized_event)
117
+ # 2 square brackets, the length of all previously serialized strings, commas, and the current event size
118
+ batch_size_bytes = 2 + @batch_events_size + @serialized_events.length + serialized_event.length
119
+ return false if batch_size_bytes > @max_batch_size
120
+
121
+ @serialized_events.push(serialized_event)
122
+ @batch_events_size += serialized_event.length
123
+ true
124
+ end
125
+
126
+ def drain_and_serialize
127
+ out = "[#{@serialized_events.join(',')}]\n"
128
+ @batch_events_size = 0
129
+ @serialized_events = []
130
+ out
131
+ end
132
+
133
+ def empty?
134
+ @serialized_events.empty?
135
+ end
136
+ end
137
+
106
138
  def make_headers
107
139
  {
108
140
  'User-Agent' => "logstash-output-dynatrace/#{DynatraceConstants::VERSION} logstash/#{LOGSTASH_VERSION}",
@@ -135,7 +167,24 @@ module LogStash
135
167
  failures = java.util.concurrent.atomic.AtomicInteger.new(0)
136
168
 
137
169
  pending = Queue.new
138
- pending << [events, 0]
170
+ batcher = Batcher.new(@max_payload_size)
171
+
172
+ events.each do |event|
173
+ serialized_event = LogStash::Json.dump(event.to_hash)
174
+ if serialized_event.length > @max_payload_size
175
+ log_params = { size: serialized_event.length }
176
+ log_params[:body] = serialized_event if @debug_include_body
177
+ log_warning('Event larger than max_payload_size dropped', log_params)
178
+ next
179
+ end
180
+
181
+ next if batcher.offer(serialized_event)
182
+
183
+ pending << [batcher.drain_and_serialize, 0] unless batcher.empty?
184
+ batcher.offer(serialized_event)
185
+ end
186
+
187
+ pending << [batcher.drain_and_serialize, 0] unless batcher.empty?
139
188
 
140
189
  while popped = pending.pop
141
190
  break if popped == :done
@@ -199,11 +248,10 @@ module LogStash
199
248
  end
200
249
 
201
250
  def send_event(event, attempt)
202
- body = event_body(event)
203
251
  headers = make_headers
204
252
 
205
253
  # Create an async request
206
- response = client.post(ingest_endpoint_url, body: body, headers: headers)
254
+ response = client.post(ingest_endpoint_url, body: event, headers: headers)
207
255
 
208
256
  if response_success?(response)
209
257
  [:success, event, attempt]
@@ -231,7 +279,7 @@ module LogStash
231
279
  end
232
280
  if @debug_include_body
233
281
  # body can be big and may have sensitive data
234
- log_params[:body] = body
282
+ log_params[:body] = event
235
283
  end
236
284
  end
237
285
  log_failure('Could not fetch URL', log_params)
@@ -276,9 +324,9 @@ module LogStash
276
324
  @logger.error(message, opts)
277
325
  end
278
326
 
279
- # Format the HTTP body
280
- def event_body(event)
281
- "#{LogStash::Json.dump(event.map(&:to_hash)).chomp}\n"
327
+ # This is split into a separate method mostly to help testing
328
+ def log_warning(message, opts)
329
+ @logger.warn(message, opts)
282
330
  end
283
331
  end
284
332
  end
@@ -123,6 +123,7 @@ describe LogStash::Outputs::Dynatrace do
123
123
  .with(ingest_endpoint_url, hash_including(:body, :headers))
124
124
  .and_call_original
125
125
  allow(subject).to receive(:log_failure).with(any_args)
126
+ allow(subject).to receive(:log_warning).with(any_args)
126
127
  allow(subject).to receive(:log_retryable_response).with(any_args)
127
128
  end
128
129
 
@@ -132,7 +133,7 @@ describe LogStash::Outputs::Dynatrace do
132
133
  end
133
134
  end
134
135
 
135
- context 'performing a get' do
136
+ context 'performing a request' do
136
137
  describe 'invoking the request' do
137
138
  before do
138
139
  subject.multi_receive([event])
@@ -186,6 +187,63 @@ describe LogStash::Outputs::Dynatrace do
186
187
  expect(subject).to have_received(:send_event).exactly(3).times
187
188
  end
188
189
  end
190
+
191
+ context 'with more than 4.5MB of events' do
192
+ before do
193
+ allow(subject).to receive(:send_event) { |e, att| [:success, e, att] }
194
+ subject.multi_receive([1, 2].map { |n| LogStash::Event.new({ 'n' => n.to_s * 2_500_001 }) })
195
+ end
196
+
197
+ it 'should split the chunk into multiple requests' do
198
+ expect(subject).to have_received(:send_event).exactly(2).times
199
+ end
200
+ end
201
+
202
+ shared_examples('send small and drop large') do
203
+ it 'should only send the small event' do
204
+ expect(subject).to have_received(:send_event).exactly(1).times
205
+ end
206
+
207
+ it 'should log a warning' do
208
+ expect(subject).to have_received(:log_warning)
209
+ .with('Event larger than max_payload_size dropped', hash_including(:size))
210
+ .exactly(:once)
211
+ end
212
+ end
213
+
214
+ context 'with one small event and one too large event' do
215
+ before do
216
+ allow(subject).to receive(:send_event) { |e, att| [:success, e, att] }
217
+ subject.multi_receive([LogStash::Event.new({ 'event' => 'small' }),
218
+ LogStash::Event.new({ 'event' => 'n' * 4_500_001 })])
219
+ end
220
+
221
+ include_examples('send small and drop large')
222
+ end
223
+
224
+ context 'with one too large event and one small event' do
225
+ before do
226
+ allow(subject).to receive(:send_event) { |e, att| [:success, e, att] }
227
+ subject.multi_receive([LogStash::Event.new({ 'event' => 'n' * 4_500_001 }),
228
+ LogStash::Event.new({ 'event' => 'small' })])
229
+ end
230
+
231
+ include_examples('send small and drop large')
232
+ end
233
+ end
234
+
235
+ context 'max_payload_size 2MB' do
236
+ let(:config) { { 'ingest_endpoint_url' => ingest_endpoint_url, 'api_key' => api_key, 'max_payload_size' => 2_000_000 } }
237
+ subject { LogStash::Outputs::Dynatrace.new(config) }
238
+
239
+ before do
240
+ allow(subject).to receive(:send_event) { |e, att| [:success, e, att] }
241
+ subject.multi_receive([1, 2].map { |n| LogStash::Event.new({ 'n' => n.to_s * 1_250_000 }) })
242
+ end
243
+
244
+ it 'should split the chunk into multiple requests' do
245
+ expect(subject).to have_received(:send_event).exactly(2).times
246
+ end
189
247
  end
190
248
 
191
249
  context 'on retryable unknown exception' do
data/version.yaml CHANGED
@@ -1 +1 @@
1
- logstash-output-dynatrace: '0.5.0'
1
+ logstash-output-dynatrace: '0.5.1'
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-dynatrace
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.0
4
+ version: 0.5.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dynatrace Open Source Engineering
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-08-09 00:00:00.000000000 Z
11
+ date: 2023-09-26 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: logstash-codec-json