logstash-output-newrelic 1.5.0 → 1.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 25593f8efc45dce5d2366a19ce89a4d722f6e5a55fdb3d6d8cad607d551dcbd7
4
- data.tar.gz: d729dc031d6276a9dafdd1ae5a7615bd378993437a36b6828c540fc901839c88
3
+ metadata.gz: d582474ed8258ecfade02da25a5a32b690d70efb9c12e5e34d51cb0bf229afd6
4
+ data.tar.gz: 8c2d60d8d9910f5f70286e3898e1e71f8cf7dfc428a0626943808a7b44fdba13
5
5
  SHA512:
6
- metadata.gz: 82c061a523aab278574d91373f8691c1499d0560b04e96dd8a8f178b14438eaceb7236dc62cea8ca6ac15d56e9ffa7acdfc2534338d4ae00306b0ef3e47b9f58
7
- data.tar.gz: a6b92e08674d6d67e04d9ca2cb5bc0007c717e43c07c888399f715e4c5de546a58cf8237ae9128b742d5cd96ba7b9d52142e11875d1e02b9649af7e515e5e3a0
6
+ metadata.gz: 7a4b6e04dfccb15df29ebd92429756d2c15a4c22ad470ec008f7e59d9506d0e9c560b86e7ef9ce9657e4952dd46dd8cb4d7f2167769e3eb61a16da002fe47cd6
7
+ data.tar.gz: e75fa1ff04550653a7e2fcfbda24af1550cb4df68c8ec1949ea0e9bfe56fbdcb6a765855100cf2d7d8842e2ee9711fabdd9363d9bf80e6e1d2546c8e485b8e29
@@ -11,7 +11,6 @@ require_relative './config/bigdecimal_patch'
11
11
  require_relative './exception/error'
12
12
 
13
13
  class LogStash::Outputs::NewRelic < LogStash::Outputs::Base
14
- java_import java.util.concurrent.Executors;
15
14
 
16
15
  RETRIABLE_CODES = Set[408, 429, 500, 502, 503, 504, 599]
17
16
 
@@ -43,7 +42,14 @@ class LogStash::Outputs::NewRelic < LogStash::Outputs::Base
43
42
  'Content-Encoding' => 'gzip',
44
43
  'Content-Type' => 'application/json'
45
44
  }.merge(auth).freeze
45
+
46
+ # We use a semaphore to ensure that at most there are @concurrent_requests inflight Logstash requests being processed
47
+ # by our plugin at the same time. Without this semaphore, given that @executor.submit() is an asynchronous method, it
48
+ # would cause that an unbounded amount of inflight requests may be processed by our plugin. Logstash then believes
49
+ # that our plugin has processed the request, and keeps reading more inflight requests in memory. This causes a memory
50
+ # leak and results in an OutOfMemoryError.
46
51
  @executor = java.util.concurrent.Executors.newFixedThreadPool(@concurrent_requests)
52
+ @semaphore = java.util.concurrent.Semaphore.new(@concurrent_requests)
47
53
  end
48
54
 
49
55
  # Used by tests so that the test run can complete (background threads prevent JVM exit)
@@ -97,7 +103,19 @@ class LogStash::Outputs::NewRelic < LogStash::Outputs::Base
97
103
 
98
104
  nr_logs = to_nr_logs(logstash_events)
99
105
 
100
- package_and_send_recursively(nr_logs)
106
+ submit_logs_to_be_sent(nr_logs)
107
+ end
108
+
109
+ def submit_logs_to_be_sent(nr_logs)
110
+ @semaphore.acquire()
111
+ execute = @executor.java_method :submit, [java.lang.Runnable]
112
+ execute.call do
113
+ begin
114
+ package_and_send_recursively(nr_logs)
115
+ ensure
116
+ @semaphore.release()
117
+ end
118
+ end
101
119
  end
102
120
 
103
121
  def package_and_send_recursively(nr_logs)
@@ -113,27 +131,24 @@ class LogStash::Outputs::NewRelic < LogStash::Outputs::Base
113
131
  :logs => nr_logs
114
132
  }
115
133
 
116
- execute = @executor.java_method :submit, [java.lang.Runnable]
117
- execute.call do
118
- compressed_payload = StringIO.new
119
- gzip = Zlib::GzipWriter.new(compressed_payload)
120
- gzip << [payload].to_json
121
- gzip.close
122
-
123
- compressed_size = compressed_payload.string.bytesize
124
- log_record_count = nr_logs.length
125
-
126
- if compressed_size >= MAX_PAYLOAD_SIZE_BYTES && log_record_count == 1
127
- @logger.error("Can't compress record below required maximum packet size and it will be discarded.")
128
- elsif compressed_size >= MAX_PAYLOAD_SIZE_BYTES && log_record_count > 1
129
- @logger.debug("Compressed payload size (#{compressed_size}) exceededs maximum packet size (1MB) and will be split in two.")
130
- split_index = log_record_count / 2
131
- package_and_send_recursively(nr_logs[0...split_index])
132
- package_and_send_recursively(nr_logs[split_index..-1])
133
- else
134
- @logger.debug("Payload compressed size: #{compressed_size}")
135
- nr_send(compressed_payload.string)
136
- end
134
+ compressed_payload = StringIO.new
135
+ gzip = Zlib::GzipWriter.new(compressed_payload)
136
+ gzip << [payload].to_json
137
+ gzip.close
138
+
139
+ compressed_size = compressed_payload.string.bytesize
140
+ log_record_count = nr_logs.length
141
+
142
+ if compressed_size >= MAX_PAYLOAD_SIZE_BYTES && log_record_count == 1
143
+ @logger.error("Can't compress record below required maximum packet size and it will be discarded.")
144
+ elsif compressed_size >= MAX_PAYLOAD_SIZE_BYTES && log_record_count > 1
145
+ @logger.debug("Compressed payload size (#{compressed_size}) exceededs maximum packet size (1MB) and will be split in two.")
146
+ split_index = log_record_count / 2
147
+ package_and_send_recursively(nr_logs[0...split_index])
148
+ package_and_send_recursively(nr_logs[split_index..-1])
149
+ else
150
+ @logger.debug("Payload compressed size: #{compressed_size}")
151
+ nr_send(compressed_payload.string)
137
152
  end
138
153
  end
139
154
 
@@ -148,10 +163,10 @@ class LogStash::Outputs::NewRelic < LogStash::Outputs::Base
148
163
  retry_duration = 1
149
164
 
150
165
  begin
151
- http = Net::HTTP.new(@end_point.host, 443)
166
+ http = Net::HTTP.new(@end_point.host, @end_point.port || 443)
152
167
  request = Net::HTTP::Post.new(@end_point.request_uri)
153
- http.use_ssl = true
154
- http.verify_mode = OpenSSL::SSL::VERIFY_PEER
168
+ http.use_ssl = (@end_point.scheme == 'https')
169
+ http.verify_mode = @end_point.scheme == 'https' ? OpenSSL::SSL::VERIFY_PEER : OpenSSL::SSL::VERIFY_NONE
155
170
  if !@custom_ca_cert.nil?
156
171
  store = OpenSSL::X509::Store.new
157
172
  ca_cert = OpenSSL::X509::Certificate.new(File.read(@custom_ca_cert))
@@ -161,6 +176,9 @@ class LogStash::Outputs::NewRelic < LogStash::Outputs::Base
161
176
  @header.each { |k, v| request[k] = v }
162
177
  request.body = payload
163
178
  handle_response(http.request(request))
179
+ if (retries > 0)
180
+ @logger.warn("Successfully sent logs at retry #{retries}")
181
+ end
164
182
  rescue Error::BadResponseCodeError => e
165
183
  @logger.error(e.message)
166
184
  if (should_retry(retries) && is_retryable_code(e))
@@ -1,7 +1,7 @@
1
1
  module LogStash
2
2
  module Outputs
3
3
  module NewRelicVersion
4
- VERSION = "1.5.0"
4
+ VERSION = "1.5.2"
5
5
  end
6
6
  end
7
7
  end
@@ -7,6 +7,7 @@ require "logstash/event"
7
7
  require "thread"
8
8
  require "webmock/rspec"
9
9
  require "zlib"
10
+ require "rspec/wait"
10
11
 
11
12
  describe LogStash::Outputs::NewRelic do
12
13
  let (:base_uri) { "https://testing-example-collector.com" }
@@ -48,6 +49,50 @@ describe LogStash::Outputs::NewRelic do
48
49
  })).to have_been_made
49
50
  end
50
51
  end
52
+
53
+ context "check https connection scheme" do
54
+ it "uses https by default" do
55
+ stub_request(:any, base_uri).to_return(status: 200)
56
+
57
+ event = LogStash::Event.new({:message => "Test message" })
58
+ @newrelic_output.multi_receive([event])
59
+
60
+ wait_for(a_request(:post, base_uri)
61
+ .with(headers: {
62
+ "X-License-Key" => license_key,
63
+ "X-Event-Source" => "logs",
64
+ "Content-Encoding" => "gzip",
65
+ })).to have_been_made
66
+
67
+ # Check if the requests were made using HTTPS
68
+ expect(WebMock).to have_requested(:post, base_uri).with { |req| req.uri.scheme == 'https' }
69
+ expect(WebMock).to have_requested(:post, base_uri).with { |req| req.uri.port == 443 }
70
+ end
71
+ end
72
+
73
+ context "check http connection scheme" do
74
+ it "uses http when http config is set" do
75
+ stub_request(:any, "http://localhost:5000/").to_return(status: 200)
76
+ @newrelic_output = LogStash::Plugin.lookup("output", "newrelic").new({
77
+ "base_uri" => "http://localhost:5000/",
78
+ "license_key" => license_key
79
+ })
80
+ @newrelic_output.register
81
+ event = LogStash::Event.new({:message => "Test message" })
82
+ @newrelic_output.multi_receive([event])
83
+
84
+ wait_for(a_request(:post, "http://localhost:5000/")
85
+ .with(headers: {
86
+ "X-License-Key" => license_key,
87
+ "X-Event-Source" => "logs",
88
+ "Content-Encoding" => "gzip",
89
+ })).to have_been_made
90
+
91
+ # Check if the requests were made using HTTP to this endpoint
92
+ expect(WebMock).to have_requested(:post, "http://localhost:5000/").with { |req| req.uri.scheme == 'http' }
93
+ expect(WebMock).to have_requested(:post, "http://localhost:5000/").with { |req| req.uri.port == 5000 }
94
+ end
95
+ end
51
96
  end
52
97
 
53
98
  describe LogStash::Outputs::NewRelic do
@@ -343,6 +388,28 @@ describe LogStash::Outputs::NewRelic do
343
388
  .with { |request| single_gzipped_message(request.body)['message'] == 'Test message 1' })
344
389
  .to have_been_made.times(2)
345
390
  end
391
+
392
+ it "performs the configured amount of retries, no more, no less" do
393
+ @newrelic_output = LogStash::Plugin.lookup("output", "newrelic").new(
394
+ { "base_uri" => base_uri, "license_key" => api_key, "max_retries" => '3' }
395
+ )
396
+ @newrelic_output.register
397
+ stub_request(:any, base_uri)
398
+ .to_return(status: 500)
399
+ .to_return(status: 500)
400
+ .to_return(status: 500)
401
+ .to_return(status: 200)
402
+
403
+ event1 = LogStash::Event.new({ "message" => "Test message" })
404
+ @newrelic_output.multi_receive([event1])
405
+
406
+ wait_for(a_request(:post, base_uri)
407
+ .with { |request| single_gzipped_message(request.body)['message'] == 'Test message' })
408
+ .to have_been_made.at_least_times(3)
409
+ wait_for(a_request(:post, base_uri)
410
+ .with { |request| single_gzipped_message(request.body)['message'] == 'Test message' })
411
+ .to have_been_made.at_most_times(3)
412
+ end
346
413
  end
347
414
 
348
415
  context "JSON serialization" do
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-newrelic
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.5.0
4
+ version: 1.5.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - New Relic Logging Team
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-08-14 00:00:00.000000000 Z
11
+ date: 2023-10-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement