logstash-input-rabbitmq 3.3.0 → 4.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 234eccd39f2d2459784e232bfee4aca303c51956
4
- data.tar.gz: 8ff9cd86fbd139dd7a3623316f1b60749a9bfe57
3
+ metadata.gz: 9e02bbaff26642c67faaa699f6811a8325838265
4
+ data.tar.gz: f780dae010e2e3c514f373f079a379156c380a90
5
5
  SHA512:
6
- metadata.gz: 8e7947a49fc82d0066c9d0563b2dd9ca16d5780ffec7bca130b58a5e1776c3ca7bfaf18528ca1e3f4982ae794f0636e0d029d7e330d6ec9e207bd3117813518a
7
- data.tar.gz: efb6b29abfdd37f47cf9ea707789561ab5848f2c00e1e59dc66958802527ef793d871b264cc4d9f86b7757f4c96b1243e14bfd4c7017aa346e2fcd222b3589d3
6
+ metadata.gz: ff78bf33ae7a6efcbf9228121c2674a2cb6f4c9a674609eb9d1fee36d43aa0a69140e722c10aa860f9ce0cd2f417c71aad3d34bfcc1a0786e55fb8e54cb4ad05
7
+ data.tar.gz: 504f8ef80becb3557fecf7e7da7945624748356c051f40b06a5d8dd3ad476135efa12b05259e0607e15a1b007d4158aacd7ff3b5413d5d951c6f6b71ad8c6b60
data/CHANGELOG.md CHANGED
@@ -1,3 +1,13 @@
1
+ ## 4.0.0
2
+ - Ensure the consumer is done processing messages before closing channel. Fixes shutdown errors.
3
+ - Use an internal queue + separate thread to accelerate processing
4
+ - Disable metadata insertion by default, this slows things down majorly
5
+ - Make exchange_type an optional config option with using 'exchange'.
6
+ When used the exchange will always be declared
7
+
8
+ ## 3.3.1
9
+ - New dependency requirements for logstash-core for the 5.0 release
10
+
1
11
  ## 3.3.0
2
12
  - Fix a regression in 3.2.0 that reinstated behavior that duplicated consumers
3
13
  - Always declare exchanges used, the exchange now need not exist before LS starts
@@ -2,6 +2,8 @@
2
2
  require 'logstash/plugin_mixins/rabbitmq_connection'
3
3
  require 'logstash/inputs/threadable'
4
4
  require 'logstash/event'
5
+ java_import java.util.concurrent.ArrayBlockingQueue
6
+ java_import java.util.concurrent.TimeUnit
5
7
 
6
8
  module LogStash
7
9
  module Inputs
@@ -18,8 +20,9 @@ module LogStash
18
20
  # understanding.
19
21
  #
20
22
  # The properties of messages received will be stored in the
21
- # `[@metadata][rabbitmq_properties]` field. The following
22
- # properties may be available (in most cases dependent on whether
23
+ # `[@metadata][rabbitmq_properties]` field if the `@metadata_enabled` setting is checked.
24
+ # Note that storing metadata may degrade performance.
25
+ # The following properties may be available (in most cases dependent on whether
23
26
  # they were set by the sender):
24
27
  #
25
28
  # * app-id
@@ -92,6 +95,8 @@ module LogStash
92
95
  "user-id",
93
96
  ].map { |s| s.freeze }.freeze
94
97
 
98
+ INTERNAL_QUEUE_POISON=[]
99
+
95
100
  config_name "rabbitmq"
96
101
 
97
102
  # The default codec for this plugin is JSON. You can override this to suit your particular needs however.
@@ -122,8 +127,7 @@ module LogStash
122
127
 
123
128
  # Prefetch count. If acknowledgements are enabled with the `ack`
124
129
  # option, specifies the number of outstanding unacknowledged
125
- # messages allowed. With acknowledgemnts disabled this setting
126
- # has no effect.
130
+ # messages allowed.
127
131
  config :prefetch_count, :validate => :number, :default => 256
128
132
 
129
133
  # Enable message acknowledgements. With acknowledgements
@@ -131,6 +135,9 @@ module LogStash
131
135
  # Logstash pipeline will be requeued by the server if Logstash
132
136
  # shuts down. Acknowledgements will however hurt the message
133
137
  # throughput.
138
+ #
139
+ # This will only send an ack back every `prefetch_count` messages.
140
+ # Working in batches provides a performance boost here.
134
141
  config :ack, :validate => :boolean, :default => true
135
142
 
136
143
  # If true the queue will be passively declared, meaning it must
@@ -140,10 +147,12 @@ module LogStash
140
147
  # (durable etc) must match those of the existing queue.
141
148
  config :passive, :validate => :boolean, :default => false
142
149
 
143
- # The name of the exchange to bind the queue to.
150
+ # The name of the exchange to bind the queue to. Specify `exchange_type`
151
+ # as well to declare the exchange if it does not exist
144
152
  config :exchange, :validate => :string
145
153
 
146
- # The type of the exchange to bind to
154
+ # The type of the exchange to bind to. Specifying this will cause this plugin
155
+ # to declare the exchange if it does not exist.
147
156
  config :exchange_type, :validate => :string
148
157
 
149
158
  # The routing key to use when binding a queue to the exchange.
@@ -157,7 +166,12 @@ module LogStash
157
166
  # before retrying. Subscribes can fail if the server goes away and then comes back.
158
167
  config :subscription_retry_interval_seconds, :validate => :number, :required => true, :default => 5
159
168
 
169
+ # Enable the storage of message headers and properties in `@metadata`. This may impact performance
170
+ config :metadata_enabled, :validate => :boolean, :default => false
171
+
160
172
  def register
173
+ @internal_queue = java.util.concurrent.ArrayBlockingQueue.new(@prefetch_count*2)
174
+
161
175
  connect!
162
176
  declare_queue!
163
177
  bind_exchange!
@@ -178,7 +192,10 @@ module LogStash
178
192
 
179
193
  def bind_exchange!
180
194
  if @exchange
181
- @hare_info.exchange = declare_exchange!(@hare_info.channel, @exchange, @exchange_type, @durable)
195
+ if @exchange_type # Only declare the exchange if @exchange_type is set!
196
+ @logger.info? && @logger.info("Declaring exchange '#{@exchange}' with type #{@exchange_type}")
197
+ @hare_info.exchange = declare_exchange!(@hare_info.channel, @exchange, @exchange_type, @durable)
198
+ end
182
199
  @hare_info.queue.bind(@exchange, :routing_key => @key)
183
200
  end
184
201
  end
@@ -197,21 +214,8 @@ module LogStash
197
214
  end
198
215
 
199
216
  def consume!
200
- # we manually build a consumer here to be able to keep a reference to it
201
- # in an @ivar even though we use a blocking version of HB::Queue#subscribe
202
-
203
- # The logic here around resubscription might seem strange, but its predicated on the fact
204
- # that we rely on MarchHare to do the reconnection for us with auto_reconnect.
205
- # Unfortunately, while MarchHare does the reconnection work it won't re-subscribe the consumer
206
- # hence the logic below.
207
217
  @consumer = @hare_info.queue.build_consumer(:on_cancellation => Proc.new { on_cancellation }) do |metadata, data|
208
- @codec.decode(data) do |event|
209
- decorate(event)
210
- event["@metadata"]["rabbitmq_headers"] = get_headers(metadata)
211
- event["@metadata"]["rabbitmq_properties"] = get_properties(metadata)
212
- @output_queue << event if event
213
- end
214
- @hare_info.channel.ack(metadata.delivery_tag) if @ack
218
+ @internal_queue.put [metadata, data]
215
219
  end
216
220
 
217
221
  begin
@@ -223,25 +227,67 @@ module LogStash
223
227
  retry
224
228
  end
225
229
 
226
- while !stop?
227
- sleep 1
230
+ internal_queue_consume!
231
+ end
232
+
233
+ def internal_queue_consume!
234
+ i=0
235
+ last_delivery_tag=nil
236
+ while true
237
+ payload = @internal_queue.poll(10, TimeUnit::MILLISECONDS)
238
+ if !payload # Nothing in the queue
239
+ if last_delivery_tag # And we have unacked stuff
240
+ @hare_info.channel.ack(last_delivery_tag, true) if @ack
241
+ i=0
242
+ last_delivery_tag = nil
243
+ end
244
+ next
245
+ end
246
+
247
+ break if payload == INTERNAL_QUEUE_POISON
248
+
249
+ metadata, data = payload
250
+ @codec.decode(data) do |event|
251
+ decorate(event)
252
+ if @metadata_enabled
253
+ event["@metadata"]["rabbitmq_headers"] = get_headers(metadata)
254
+ event["@metadata"]["rabbitmq_properties"] = get_properties(metadata)
255
+ end
256
+ @output_queue << event if event
257
+ end
258
+
259
+ i += 1
260
+
261
+ if i >= @prefetch_count
262
+ @hare_info.channel.ack(metadata.delivery_tag, true) if @ack
263
+ i = 0
264
+ last_delivery_tag = nil
265
+ else
266
+ last_delivery_tag = metadata.delivery_tag
267
+ end
228
268
  end
229
269
  end
230
270
 
231
271
  def stop
232
- super
272
+ @internal_queue.put(INTERNAL_QUEUE_POISON)
233
273
  shutdown_consumer
234
274
  close_connection
235
275
  end
236
276
 
237
277
  def shutdown_consumer
238
278
  return unless @consumer
239
- @consumer.gracefully_shut_down
279
+ @hare_info.channel.basic_cancel(@consumer.consumer_tag)
280
+ until @consumer.terminated?
281
+ @logger.info("Waiting for rabbitmq consumer to terminate before stopping!", :params => self.params)
282
+ sleep 1
283
+ end
240
284
  end
241
285
 
242
286
  def on_cancellation
243
- @logger.info("Received basic.cancel from #{rabbitmq_settings[:host]}, shutting down.")
244
- stop
287
+ if !stop? # If this isn't already part of a regular stop
288
+ @logger.info("Received basic.cancel from #{rabbitmq_settings[:host]}, shutting down.")
289
+ stop
290
+ end
245
291
  end
246
292
 
247
293
  private
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-input-rabbitmq'
3
- s.version = '3.3.0'
3
+ s.version = '4.0.0'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "Pull events from a RabbitMQ exchange."
6
6
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
@@ -71,7 +71,7 @@ describe LogStash::Inputs::RabbitMQ do
71
71
  context "with an exchange declared" do
72
72
  let(:exchange) { "exchange" }
73
73
  let(:key) { "routing key" }
74
- let(:rabbitmq_settings) { super.merge("exchange" => exchange, "key" => key) }
74
+ let(:rabbitmq_settings) { super.merge("exchange" => exchange, "key" => key, "exchange_type" => "fanout") }
75
75
 
76
76
  before do
77
77
  allow(instance).to receive(:declare_exchange!)
@@ -180,7 +180,7 @@ describe "with a live server", :integration => true do
180
180
  end
181
181
 
182
182
  describe "receiving a message with a queue + exchange specified" do
183
- let(:config) { super.merge("queue" => queue_name, "exchange" => exchange_name, "exchange_type" => "fanout") }
183
+ let(:config) { super.merge("queue" => queue_name, "exchange" => exchange_name, "exchange_type" => "fanout", "metadata_enabled" => true) }
184
184
  let(:event) { output_queue.pop }
185
185
  let(:exchange) { test_channel.exchange(exchange_name, :type => "fanout") }
186
186
  let(:exchange_name) { "logstash-input-rabbitmq-#{rand(0xFFFFFFFF)}" }
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-rabbitmq
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.3.0
4
+ version: 4.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-03-11 00:00:00.000000000 Z
11
+ date: 2016-03-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: logstash-core