deimos-ruby 1.0.0.pre.beta25 → 1.0.0.pre.beta26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: cf690c099639314ff468feb6b9f925ca74655f7738e0c3162c4c879254dc707b
4
- data.tar.gz: ecad3f7d4c68a824df0a2e28431885d95b896814db40d77383c6e7a162327dd4
3
+ metadata.gz: c9dbc69d5cb3ef5b93c1d926443dc2be92e295329f49edd0dec7a9e812a4e7a5
4
+ data.tar.gz: e336c6f5faf5f95c86e4d243ad0c537ba57f6db755097c83195b90bf41a1415e
5
5
  SHA512:
6
- metadata.gz: c077533375e21f16c3ceb9f81b5fcc5bb8c1a970c35efbf1fa95dbcef28d7f4a18eebafda136f16d98dbfae8b9c920755ac75b453a38d504e489c87bcbf9e2a1
7
- data.tar.gz: 425e6caeff23223e41da507a5631a13c6cf2f4d7d3f32b8338c37f2454bb0886702069de05dcd359aa5661ebb62e474db9fd9cb54c0ae1504cfcea3c6ec220ac
6
+ metadata.gz: 57fea10ed43861b17d8fd31ba1a5d322e12db96ec7de3fff6ea28e314efe10686bc6c7fd0da2f11bdf2bfa8c7510a60bfa17c9bd836a32e3e3ed5e190faa9050
7
+ data.tar.gz: 3ffd651469987c6a337decd275c78171870ab604599f4c7ca0347ebd8e9493b8fcda7ee4b3fb0012b0cdfbc13ceb7bdd2af51392707012d4559fed05842d7239
data/CHANGELOG.md CHANGED
@@ -7,10 +7,16 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
7
7
 
8
8
  ## UNRELEASED
9
9
 
10
- ## 1.0.0-beta25 - 2019-08-28
10
+ ## [1.0.0-beta26] - 2019-08-29
11
+ - Recover from Kafka::MessageSizeTooLarge in the DB producer.
12
+ - Shut down sync producers correctly when persistent_connections is true.
13
+ - Notify when messages fail to produce in the DB producer.
14
+ - Delete messages on failure and rely on notification.
15
+
16
+ ## [1.0.0-beta25] - 2019-08-28
11
17
  - Fix bug where crashing would cause producers to stay disabled
12
18
 
13
- ## 1.0.0-beta24 - 2019-08-26
19
+ ## [1.0.0-beta24] - 2019-08-26
14
20
  - Reconnect DB backend if database goes away.
15
21
  - Sleep only 5 seconds between attempts instead of using exponential backoff.
16
22
  - Fix for null payload being Avro-encoded.
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- deimos-ruby (1.0.0.pre.beta25)
4
+ deimos-ruby (1.0.0.pre.beta26)
5
5
  avro-patches (~> 0.3)
6
6
  avro_turf (~> 0.8)
7
7
  phobos (~> 1.8)
data/README.md CHANGED
@@ -197,7 +197,8 @@ You can listen to these notifications e.g. as follows:
197
197
  end
198
198
  ```
199
199
 
200
- The following events are also produced:
200
+ The following events are produced (in addition to the ones already
201
+ produced by Phobos and RubyKafka):
201
202
 
202
203
  * `produce_error` - sent when an error occurs when producing a message.
203
204
  * producer - the class that produced the message
@@ -208,6 +209,14 @@ The following events are also produced:
208
209
  * producer - the class that produced the message
209
210
  * topic
210
211
  * payloads - the unencoded payloads
212
+ * `db_producer.produce` - sent when the DB producer sends messages for the
213
+ DB backend. Messages that are too large will be caught with this
214
+ notification - they will be deleted from the table and this notification
215
+ will be fired with an exception object.
216
+ * topic
217
+ * exception_object
218
+ * messages - the batch of messages (in the form of `Deimos::KafkaMessage`s)
219
+ that failed - this should have only a single message in the batch.
211
220
 
212
221
  Similarly:
213
222
  ```ruby
@@ -6,6 +6,12 @@ module Deimos
6
6
  class Kafka < Deimos::PublishBackend
7
7
  include Phobos::Producer
8
8
 
9
+ # Shut down the producer if necessary.
10
+ def self.shutdown_producer
11
+ producer.sync_producer_shutdown if producer.respond_to?(:sync_producer_shutdown)
12
+ producer.kafka_client&.close
13
+ end
14
+
9
15
  # :nodoc:
10
16
  def self.execute(producer_class:, messages:)
11
17
  Deimos.instrument(
@@ -6,6 +6,12 @@ module Deimos
6
6
  class KafkaAsync < Deimos::PublishBackend
7
7
  include Phobos::Producer
8
8
 
9
+ # Shut down the producer cleanly.
10
+ def self.shutdown_producer
11
+ producer.async_producer_shutdown
12
+ producer.kafka_client&.close
13
+ end
14
+
9
15
  # :nodoc:
10
16
  def self.execute(producer_class:, messages:)
11
17
  Deimos.instrument(
@@ -14,6 +14,29 @@ module Deimos
14
14
  write_attribute(:message, mess ? mess.to_s : nil)
15
15
  end
16
16
 
17
+ # @return [Deimos::Consumer]
18
+ def decoder
19
+ producer = Deimos::Producer.descendants.find { |c| c.topic == self.topic }
20
+ return nil unless producer
21
+
22
+ consumer = Class.new(Deimos::Consumer)
23
+ consumer.config.merge!(producer.config)
24
+ consumer
25
+ end
26
+
27
+ # Decode the message. This assumes for now that we have access to a producer
28
+ # in the codebase which can decode it.
29
+ # @param decoder [Deimos::Consumer]
30
+ # @return [Hash]
31
+ def decoded_message(decoder=self.decoder)
32
+ return { key: self.key, message: self.message } unless decoder
33
+
34
+ {
35
+ key: self.key.present? ? decoder.new.decode_key(self.key) : nil,
36
+ payload: decoder.decoder.decode(self.message)
37
+ }
38
+ end
39
+
17
40
  # @return [Hash]
18
41
  def phobos_message
19
42
  {
@@ -64,22 +64,17 @@ module Deimos
64
64
 
65
65
  while messages.any?
66
66
  @logger.debug do
67
- producer = Deimos::Producer.descendants.find { |c| c.topic == topic }
68
- decoded_messages = if producer
69
- consumer = Class.new(Deimos::Consumer)
70
- consumer.config.merge!(producer.config)
71
- messages.map do |message|
72
- {
73
- key: message[:key].present? ? consumer.new.decode_key(message[:key]) : nil,
74
- message: consumer.decoder.decode(message[:payload])
75
- }
76
- end
77
- else
78
- messages
79
- end
80
- "DB producer: Topic #{topic} Producing messages: #{decoded_messages}"
67
+ decoder = messages.first.decoder
68
+ "DB producer: Topic #{topic} Producing messages: #{messages.map { |m| m.decoded_message(decoder) }}"
69
+ end
70
+ Deimos.instrument('db_producer.produce', topic: topic, messages: messages) do
71
+ begin
72
+ produce_messages(messages.map(&:phobos_message))
73
+ rescue Kafka::BufferOverflow, Kafka::MessageSizeTooLarge, Kafka::RecordListTooLarge
74
+ messages.each(&:delete)
75
+ raise
76
+ end
81
77
  end
82
- produce_messages(messages.map(&:phobos_message))
83
78
  messages.first.class.where(id: messages.map(&:id)).delete_all
84
79
  break if messages.size < BATCH_SIZE
85
80
 
@@ -105,6 +100,15 @@ module Deimos
105
100
  Deimos.config.metrics&.gauge('pending_db_messages_max_wait', time_diff)
106
101
  end
107
102
 
103
+ # Shut down the sync producer if we have to. Phobos will automatically
104
+ # create a new one. We should call this if the producer can be in a bad
105
+ # state and e.g. we need to clear the buffer.
106
+ def shutdown_producer
107
+ if self.class.producer.respond_to?(:sync_producer_shutdown) # Phobos 1.8.3
108
+ self.class.producer.sync_producer_shutdown
109
+ end
110
+ end
111
+
108
112
  # @param batch [Array<Hash>]
109
113
  def produce_messages(batch)
110
114
  batch_size = batch.size
@@ -119,18 +123,20 @@ module Deimos
119
123
  )
120
124
  @logger.info("Sent #{group.size} messages to #{@current_topic}")
121
125
  end
122
- rescue Kafka::BufferOverflow
123
- raise if batch_size == 1
126
+ rescue Kafka::BufferOverflow, Kafka::MessageSizeTooLarge,
127
+ Kafka::RecordListTooLarge => e
128
+ if batch_size == 1
129
+ shutdown_producer
130
+ raise
131
+ end
124
132
 
125
- @logger.error("Buffer overflow when publishing #{batch.size} in groups of #{batch_size}, retrying...")
133
+ @logger.error("Got error #{e.class.name} when publishing #{batch.size} in groups of #{batch_size}, retrying...")
126
134
  if batch_size < 10
127
135
  batch_size = 1
128
136
  else
129
137
  batch_size /= 10
130
138
  end
131
- if self.class.producer.respond_to?(:sync_producer_shutdown) # Phobos 1.8.3
132
- self.class.producer.sync_producer_shutdown
133
- end
139
+ shutdown_producer
134
140
  retry
135
141
  end
136
142
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Deimos
4
- VERSION = '1.0.0-beta25'
4
+ VERSION = '1.0.0-beta26'
5
5
  end
data/lib/deimos.rb CHANGED
@@ -124,11 +124,11 @@ end
124
124
 
125
125
  at_exit do
126
126
  begin
127
- Deimos::Backends::KafkaAsync.producer.async_producer_shutdown
128
- Deimos::Backends::KafkaAsync.producer.kafka_client&.close
127
+ Deimos::Backends::KafkaAsync.shutdown_producer
128
+ Deimos::Backends::Kafka.shutdown_producer
129
129
  rescue StandardError => e
130
130
  Deimos.config.logger.error(
131
- "Error closing async producer on shutdown: #{e.message} #{e.backtrace.join("\n")}"
131
+ "Error closing producer on shutdown: #{e.message} #{e.backtrace.join("\n")}"
132
132
  )
133
133
  end
134
134
  end
@@ -148,6 +148,15 @@ module ProducerTest
148
148
  expect(MyProducer.topic).to have_sent(anything)
149
149
  end
150
150
 
151
+ it 'should send messages after a crash' do
152
+ expect {
153
+ Deimos.disable_producers do
154
+ raise 'OH NOES'
155
+ end
156
+ } .to raise_error('OH NOES')
157
+ expect(Deimos).not_to be_producers_disabled
158
+ end
159
+
151
160
  it 'should produce to a prefixed topic' do
152
161
  Deimos.configure { |c| c.producer_topic_prefix = 'prefix.' }
153
162
  payload = { 'test_id' => 'foo', 'some_int' => 123 }
@@ -176,6 +176,31 @@ each_db_config(Deimos::Utils::DbProducer) do
176
176
  producer.process_topic('my-topic')
177
177
  end
178
178
 
179
+ it 'should notify on error' do
180
+ messages = (1..4).map do |i|
181
+ Deimos::KafkaMessage.create!(
182
+ id: i,
183
+ topic: 'my-topic',
184
+ message: "mess#{i}",
185
+ partition_key: "key#{i}"
186
+ )
187
+ end
188
+
189
+ expect(Deimos::KafkaTopicInfo).to receive(:lock).
190
+ with('my-topic', 'abc').and_return(true)
191
+ expect(producer).to receive(:produce_messages).and_raise('OH NOES')
192
+ expect(producer).to receive(:retrieve_messages).and_return(messages)
193
+ expect(Deimos::KafkaTopicInfo).to receive(:register_error)
194
+
195
+ expect(Deimos::KafkaMessage.count).to eq(4)
196
+ Deimos.subscribe('db_producer.produce') do |event|
197
+ expect(event.payload[:exception_object].message).to eq('OH NOES')
198
+ expect(event.payload[:messages]).to eq(messages)
199
+ end
200
+ producer.process_topic('my-topic')
201
+ expect(Deimos::KafkaMessage.count).to eq(0)
202
+ end
203
+
179
204
  end
180
205
 
181
206
  example 'Full integration test' do
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: deimos-ruby
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.0.pre.beta25
4
+ version: 1.0.0.pre.beta26
5
5
  platform: ruby
6
6
  authors:
7
7
  - Daniel Orner
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-08-28 00:00:00.000000000 Z
11
+ date: 2019-08-29 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: avro-patches