logstash-output-newrelic 1.5.0 → 1.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 25593f8efc45dce5d2366a19ce89a4d722f6e5a55fdb3d6d8cad607d551dcbd7
4
- data.tar.gz: d729dc031d6276a9dafdd1ae5a7615bd378993437a36b6828c540fc901839c88
3
+ metadata.gz: b9339803f78539fb9240937234cc69240b289f8050c9ddfd37210f423570d828
4
+ data.tar.gz: 14746a11f4c482bebe359504f9a66987bb6d500fb6e4b38f2d839a508aa4d62b
5
5
  SHA512:
6
- metadata.gz: 82c061a523aab278574d91373f8691c1499d0560b04e96dd8a8f178b14438eaceb7236dc62cea8ca6ac15d56e9ffa7acdfc2534338d4ae00306b0ef3e47b9f58
7
- data.tar.gz: a6b92e08674d6d67e04d9ca2cb5bc0007c717e43c07c888399f715e4c5de546a58cf8237ae9128b742d5cd96ba7b9d52142e11875d1e02b9649af7e515e5e3a0
6
+ metadata.gz: 7cf438c882e3017df2597038d8812110d2fd8037c57c3d5e0440e1f11396143fa4aa52b1ad639d617cc32fb0c7ed05ac148bf70088d1e9dfab8b4170bf136f43
7
+ data.tar.gz: b6c5a7c7acf89da4da50a1e675a9339ccc8f17cd5bce2b4e0662f7be1ee4211ecce1ad0b120036084938bdf388aee3224ab6fdf5b83625856cc0a8caa8dbc38a
@@ -11,7 +11,6 @@ require_relative './config/bigdecimal_patch'
11
11
  require_relative './exception/error'
12
12
 
13
13
  class LogStash::Outputs::NewRelic < LogStash::Outputs::Base
14
- java_import java.util.concurrent.Executors;
15
14
 
16
15
  RETRIABLE_CODES = Set[408, 429, 500, 502, 503, 504, 599]
17
16
 
@@ -43,7 +42,14 @@ class LogStash::Outputs::NewRelic < LogStash::Outputs::Base
43
42
  'Content-Encoding' => 'gzip',
44
43
  'Content-Type' => 'application/json'
45
44
  }.merge(auth).freeze
45
+
46
+ # We use a semaphore to ensure that at most there are @concurrent_requests inflight Logstash requests being processed
47
+ # by our plugin at the same time. Without this semaphore, given that @executor.submit() is an asynchronous method, it
48
+ # would cause that an unbounded amount of inflight requests may be processed by our plugin. Logstash then believes
49
+ # that our plugin has processed the request, and keeps reading more inflight requests in memory. This causes a memory
50
+ # leak and results in an OutOfMemoryError.
46
51
  @executor = java.util.concurrent.Executors.newFixedThreadPool(@concurrent_requests)
52
+ @semaphore = java.util.concurrent.Semaphore.new(@concurrent_requests)
47
53
  end
48
54
 
49
55
  # Used by tests so that the test run can complete (background threads prevent JVM exit)
@@ -97,7 +103,19 @@ class LogStash::Outputs::NewRelic < LogStash::Outputs::Base
97
103
 
98
104
  nr_logs = to_nr_logs(logstash_events)
99
105
 
100
- package_and_send_recursively(nr_logs)
106
+ submit_logs_to_be_sent(nr_logs)
107
+ end
108
+
109
+ def submit_logs_to_be_sent(nr_logs)
110
+ @semaphore.acquire()
111
+ execute = @executor.java_method :submit, [java.lang.Runnable]
112
+ execute.call do
113
+ begin
114
+ package_and_send_recursively(nr_logs)
115
+ ensure
116
+ @semaphore.release()
117
+ end
118
+ end
101
119
  end
102
120
 
103
121
  def package_and_send_recursively(nr_logs)
@@ -113,27 +131,24 @@ class LogStash::Outputs::NewRelic < LogStash::Outputs::Base
113
131
  :logs => nr_logs
114
132
  }
115
133
 
116
- execute = @executor.java_method :submit, [java.lang.Runnable]
117
- execute.call do
118
- compressed_payload = StringIO.new
119
- gzip = Zlib::GzipWriter.new(compressed_payload)
120
- gzip << [payload].to_json
121
- gzip.close
122
-
123
- compressed_size = compressed_payload.string.bytesize
124
- log_record_count = nr_logs.length
125
-
126
- if compressed_size >= MAX_PAYLOAD_SIZE_BYTES && log_record_count == 1
127
- @logger.error("Can't compress record below required maximum packet size and it will be discarded.")
128
- elsif compressed_size >= MAX_PAYLOAD_SIZE_BYTES && log_record_count > 1
129
- @logger.debug("Compressed payload size (#{compressed_size}) exceededs maximum packet size (1MB) and will be split in two.")
130
- split_index = log_record_count / 2
131
- package_and_send_recursively(nr_logs[0...split_index])
132
- package_and_send_recursively(nr_logs[split_index..-1])
133
- else
134
- @logger.debug("Payload compressed size: #{compressed_size}")
135
- nr_send(compressed_payload.string)
136
- end
134
+ compressed_payload = StringIO.new
135
+ gzip = Zlib::GzipWriter.new(compressed_payload)
136
+ gzip << [payload].to_json
137
+ gzip.close
138
+
139
+ compressed_size = compressed_payload.string.bytesize
140
+ log_record_count = nr_logs.length
141
+
142
+ if compressed_size >= MAX_PAYLOAD_SIZE_BYTES && log_record_count == 1
143
+ @logger.error("Can't compress record below required maximum packet size and it will be discarded.")
144
+ elsif compressed_size >= MAX_PAYLOAD_SIZE_BYTES && log_record_count > 1
145
+ @logger.debug("Compressed payload size (#{compressed_size}) exceededs maximum packet size (1MB) and will be split in two.")
146
+ split_index = log_record_count / 2
147
+ package_and_send_recursively(nr_logs[0...split_index])
148
+ package_and_send_recursively(nr_logs[split_index..-1])
149
+ else
150
+ @logger.debug("Payload compressed size: #{compressed_size}")
151
+ nr_send(compressed_payload.string)
137
152
  end
138
153
  end
139
154
 
@@ -161,6 +176,9 @@ class LogStash::Outputs::NewRelic < LogStash::Outputs::Base
161
176
  @header.each { |k, v| request[k] = v }
162
177
  request.body = payload
163
178
  handle_response(http.request(request))
179
+ if (retries > 0)
180
+ @logger.warn("Successfully sent logs at retry #{retries}")
181
+ end
164
182
  rescue Error::BadResponseCodeError => e
165
183
  @logger.error(e.message)
166
184
  if (should_retry(retries) && is_retryable_code(e))
@@ -1,7 +1,7 @@
1
1
  module LogStash
2
2
  module Outputs
3
3
  module NewRelicVersion
4
- VERSION = "1.5.0"
4
+ VERSION = "1.5.1"
5
5
  end
6
6
  end
7
7
  end
@@ -343,6 +343,28 @@ describe LogStash::Outputs::NewRelic do
343
343
  .with { |request| single_gzipped_message(request.body)['message'] == 'Test message 1' })
344
344
  .to have_been_made.times(2)
345
345
  end
346
+
347
+ it "performs the configured amount of retries, no more, no less" do
348
+ @newrelic_output = LogStash::Plugin.lookup("output", "newrelic").new(
349
+ { "base_uri" => base_uri, "license_key" => api_key, "max_retries" => '3' }
350
+ )
351
+ @newrelic_output.register
352
+ stub_request(:any, base_uri)
353
+ .to_return(status: 500)
354
+ .to_return(status: 500)
355
+ .to_return(status: 500)
356
+ .to_return(status: 200)
357
+
358
+ event1 = LogStash::Event.new({ "message" => "Test message" })
359
+ @newrelic_output.multi_receive([event1])
360
+
361
+ wait_for(a_request(:post, base_uri)
362
+ .with { |request| single_gzipped_message(request.body)['message'] == 'Test message' })
363
+ .to have_been_made.at_least_times(3)
364
+ wait_for(a_request(:post, base_uri)
365
+ .with { |request| single_gzipped_message(request.body)['message'] == 'Test message' })
366
+ .to have_been_made.at_most_times(3)
367
+ end
346
368
  end
347
369
 
348
370
  context "JSON serialization" do
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-newrelic
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.5.0
4
+ version: 1.5.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - New Relic Logging Team
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-08-14 00:00:00.000000000 Z
11
+ date: 2023-08-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement