deimos-ruby 1.22.3 → 1.22.5

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 014d4e4a2682f57a22a065431a0de6ae0ca76fd64cb8e754f391f8f40edd8e3f
4
- data.tar.gz: b7bec2abc1e5394ec435dc0a0ef86dbb9b26e379cca0db63f448a89c5b4717ad
3
+ metadata.gz: 997678ce4ed796037a0f554a7d0897a9ab58f810f437573da7684a6c23fa5d40
4
+ data.tar.gz: e13fc14fb0bf985a02c38ff628a527198aa1e3bbf1eaab52510ea63832a03f37
5
5
  SHA512:
6
- metadata.gz: 6ac915ad65b58c77d08bf61c70f978f1ffd37ef58ae39d0e03156b778d5a36ba00de227b855b5ccbd0251d6eda1f4ea7e288b7328961d6fd790cc6a4423b9210
7
- data.tar.gz: ca1d551a581a1a968746858a3d99aacc9ace8e7f342cfe4526cc9d27aa158360c2f858b1bf9cc1018c1848b26689b239a0b74f0ccbc2d00b085776ed34a52525
6
+ metadata.gz: 90062c59b953e4fff9b1f5ddb0c5107e08f98028a7e221c78637bd9cb57c8d13c081a7417dedad57274e4b2df470a58d31ed5a1a1fd985f853ecca9ab53527fc
7
+ data.tar.gz: fa07d49754b91fdfc48fd6edb46a1906f057ddbd9d0b11d0a66d14382ae1907a0ff7fbd2526e4653e6fc74ed53e78db78203fc043621111193fca370e50da777
data/CHANGELOG.md CHANGED
@@ -7,6 +7,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
7
7
 
8
8
  ## UNRELEASED
9
9
 
10
+ # 1.22.5 - 2023-07-18
11
+ - Fix: Fixed buffer overflow crash with DB producer.
12
+
13
+ # 1.22.4 - 2023-07-05
14
+ - Feature: Add support for message headers.
15
+
10
16
  # 1.22.3 - 2023-06-13
11
17
 
12
18
  - Fix: Don't update last_sent to current time on every poll.
data/README.md CHANGED
@@ -123,6 +123,7 @@ class MyProducer < Deimos::Producer
123
123
  }
124
124
  # You can also publish an array with self.publish_list(payloads)
125
125
  # You may specify the topic here with self.publish(payload, topic: 'my-topic')
126
+ # You may also specify the headers here with self.publish(payload, headers: { 'foo' => 'bar' })
126
127
  self.publish(payload)
127
128
  end
128
129
 
@@ -1171,13 +1172,14 @@ end
1171
1172
 
1172
1173
  # A matcher which allows you to test that a message was sent on the given
1173
1174
  # topic, without having to know which class produced it.
1174
- expect(topic_name).to have_sent(payload, key=nil)
1175
+ expect(topic_name).to have_sent(payload, key=nil, partition_key=nil, headers=nil)
1175
1176
 
1176
1177
  # Inspect sent messages
1177
1178
  message = Deimos::Backends::Test.sent_messages[0]
1178
1179
  expect(message).to eq({
1179
1180
  message: {'some-key' => 'some-value'},
1180
1181
  topic: 'my-topic',
1182
+ headers: { 'foo' => 'bar' },
1181
1183
  key: 'my-id'
1182
1184
  })
1183
1185
  ```
@@ -37,6 +37,10 @@ module Deimos
37
37
  log_message.merge!(
38
38
  payloads_count: messages.count
39
39
  )
40
+ when :headers
41
+ log_message.merge!(
42
+ payload_headers: messages.map(&:headers)
43
+ )
40
44
  else
41
45
  log_message.merge!(
42
46
  payloads: messages.map do |message|
@@ -39,11 +39,12 @@ module Deimos
39
39
  def self.decoded(messages=[])
40
40
  return [] if messages.empty?
41
41
 
42
- decoder = self.decoder(messages.first.topic)&.new
42
+ decoder_class = self.decoder(messages.first.topic)
43
+ decoder = decoder_class&.new
43
44
  messages.map do |m|
44
45
  {
45
46
  key: m.key.present? ? decoder&.decode_key(m.key) || m.key : nil,
46
- payload: decoder&.decoder&.decode(m.message) || m.message
47
+ payload: decoder_class&.decoder&.decode(m.message) || m.message
47
48
  }
48
49
  end
49
50
  end
@@ -7,6 +7,8 @@ module Deimos
7
7
  attr_accessor :payload
8
8
  # @return [Hash, String, Integer]
9
9
  attr_accessor :key
10
+ # @return [Hash]
11
+ attr_accessor :headers
10
12
  # @return [Integer]
11
13
  attr_accessor :partition_key
12
14
  # @return [String]
@@ -23,11 +25,12 @@ module Deimos
23
25
  # @param topic [String]
24
26
  # @param key [String, Integer, Hash]
25
27
  # @param partition_key [Integer]
26
- def initialize(payload, producer, topic: nil, key: nil, partition_key: nil)
28
+ def initialize(payload, producer, topic: nil, key: nil, headers: nil, partition_key: nil)
27
29
  @payload = payload&.with_indifferent_access
28
30
  @producer_name = producer&.name
29
31
  @topic = topic
30
32
  @key = key
33
+ @headers = headers&.with_indifferent_access
31
34
  @partition_key = partition_key
32
35
  end
33
36
 
@@ -59,13 +62,14 @@ module Deimos
59
62
  {
60
63
  topic: @topic,
61
64
  key: @encoded_key,
65
+ headers: @headers,
62
66
  partition_key: @partition_key || @encoded_key,
63
67
  payload: @encoded_payload,
64
68
  metadata: {
65
69
  decoded_payload: @payload,
66
70
  producer_name: @producer_name
67
71
  }
68
- }
72
+ }.delete_if { |k, v| k == :headers && v.nil? }
69
73
  end
70
74
 
71
75
  # @return [Hash]
@@ -73,13 +77,14 @@ module Deimos
73
77
  {
74
78
  topic: @topic,
75
79
  key: @key,
80
+ headers: @headers,
76
81
  partition_key: @partition_key || @key,
77
82
  payload: @payload,
78
83
  metadata: {
79
84
  decoded_payload: @payload,
80
85
  producer_name: @producer_name
81
86
  }
82
- }
87
+ }.delete_if { |k, v| k == :headers && v.nil? }
83
88
  end
84
89
 
85
90
  # @param other [Message]
@@ -95,9 +95,10 @@ module Deimos
95
95
  # Publish the payload to the topic.
96
96
  # @param payload [Hash, SchemaClass::Record] with an optional payload_key hash key.
97
97
  # @param topic [String] if specifying the topic
98
+ # @param headers [Hash] if specifying headers
98
99
  # @return [void]
99
- def publish(payload, topic: self.topic)
100
- publish_list([payload], topic: topic)
100
+ def publish(payload, topic: self.topic, headers: nil)
101
+ publish_list([payload], topic: topic, headers: headers)
101
102
  end
102
103
 
103
104
  # Publish a list of messages.
@@ -107,8 +108,9 @@ module Deimos
107
108
  # @param force_send [Boolean] if true, ignore the configured backend
108
109
  # and send immediately to Kafka.
109
110
  # @param topic [String] if specifying the topic
111
+ # @param headers [Hash] if specifying headers
110
112
  # @return [void]
111
- def publish_list(payloads, sync: nil, force_send: false, topic: self.topic)
113
+ def publish_list(payloads, sync: nil, force_send: false, topic: self.topic, headers: nil)
112
114
  return if Deimos.config.kafka.seed_brokers.blank? ||
113
115
  Deimos.config.producers.disabled ||
114
116
  Deimos.producers_disabled?(self)
@@ -122,7 +124,7 @@ module Deimos
122
124
  topic: topic,
123
125
  payloads: payloads
124
126
  ) do
125
- messages = Array(payloads).map { |p| Deimos::Message.new(p.to_h, self) }
127
+ messages = Array(payloads).map { |p| Deimos::Message.new(p.to_h, self, headers: headers) }
126
128
  messages.each { |m| _process_message(m, topic) }
127
129
  messages.in_groups_of(MAX_BATCH_SIZE, false) do |batch|
128
130
  self.produce_batch(backend_class, batch)
@@ -133,7 +133,7 @@ module Deimos
133
133
  str + "\nAll Messages received:\n#{message_string}"
134
134
  end
135
135
 
136
- RSpec::Matchers.define :have_sent do |msg, key=nil, partition_key=nil|
136
+ RSpec::Matchers.define :have_sent do |msg, key=nil, partition_key=nil, headers=nil|
137
137
  message = if msg.respond_to?(:with_indifferent_access)
138
138
  msg.with_indifferent_access
139
139
  else
@@ -147,7 +147,14 @@ module Deimos
147
147
  m[:payload]&.with_indifferent_access) &&
148
148
  topic == m[:topic] &&
149
149
  (key.present? ? key == m[:key] : true) &&
150
- (partition_key.present? ? partition_key == m[:partition_key] : true)
150
+ (partition_key.present? ? partition_key == m[:partition_key] : true) &&
151
+ if headers.present?
152
+ hash_matcher.send(:match,
153
+ headers&.with_indifferent_access,
154
+ m[:headers]&.with_indifferent_access)
155
+ else
156
+ true
157
+ end
151
158
  end
152
159
  end
153
160
 
@@ -97,11 +97,16 @@ module Deimos
97
97
  Deimos.instrument('db_producer.produce', topic: @current_topic, messages: compacted_messages) do
98
98
  begin
99
99
  produce_messages(compacted_messages.map(&:phobos_message))
100
- rescue Kafka::BufferOverflow, Kafka::MessageSizeTooLarge, Kafka::RecordListTooLarge
100
+ rescue Kafka::BufferOverflow, Kafka::MessageSizeTooLarge, Kafka::RecordListTooLarge => e
101
101
  delete_messages(messages)
102
102
  @logger.error('Message batch too large, deleting...')
103
- @logger.error(Deimos::KafkaMessage.decoded(messages))
104
- raise
103
+ begin
104
+ @logger.error(Deimos::KafkaMessage.decoded(messages))
105
+ rescue StandardError => logging_exception # rubocop:disable Naming/RescuedExceptionsVariableName
106
+ @logger.error("Large message details logging failure: #{logging_exception.message}")
107
+ ensure
108
+ raise e
109
+ end
105
110
  end
106
111
  end
107
112
  delete_messages(messages)
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Deimos
4
- VERSION = '1.22.3'
4
+ VERSION = '1.22.5'
5
5
  end
data/lib/deimos.rb CHANGED
@@ -23,7 +23,6 @@ require 'deimos/utils/schema_class'
23
23
  require 'deimos/schema_class/enum'
24
24
  require 'deimos/schema_class/record'
25
25
 
26
- require 'deimos/monkey_patches/phobos_producer'
27
26
  require 'deimos/monkey_patches/phobos_cli'
28
27
 
29
28
  require 'deimos/railtie' if defined?(Rails)
data/spec/message_spec.rb CHANGED
@@ -16,4 +16,24 @@ RSpec.describe(Deimos::Message) do
16
16
  expect { described_class.new({ a: 1, b: 2 }, nil, key: { c: 3, d: 4 }) }.
17
17
  not_to raise_exception
18
18
  end
19
+
20
+ describe 'headers' do
21
+ it 'returns nil when not set' do
22
+ expect(described_class.new({ v: 'val1' }, nil, key: 'key1')).
23
+ to have_attributes(headers: nil)
24
+ end
25
+
26
+ it 'can set and get headers' do
27
+ expect(described_class.new({ v: 'val1' }, nil, key: 'key1', headers: { a: 1 })).
28
+ to have_attributes(headers: { a: 1 })
29
+ end
30
+
31
+ it 'includes headers when converting to Hash' do
32
+ expect(described_class.new({ v: 'val1' }, nil, key: 'key1', headers: { a: 1 }).to_h).
33
+ to include(headers: { a: 1 })
34
+
35
+ expect(described_class.new({ v: 'val1' }, nil, key: 'key1', headers: { a: 1 }).encoded_hash).
36
+ to include(headers: { a: 1 })
37
+ end
38
+ end
19
39
  end
@@ -110,18 +110,20 @@ module ProducerTest
110
110
  expect('my-topic').not_to have_sent('test_id' => 'foo2', 'some_int' => 123)
111
111
  end
112
112
 
113
- it 'should allow setting the topic from publish_list' do
113
+ it 'should allow setting the topic and headers from publish_list' do
114
114
  expect(described_class).to receive(:produce_batch).once.with(
115
115
  Deimos::Backends::Test,
116
116
  [
117
117
  Deimos::Message.new({ 'test_id' => 'foo', 'some_int' => 123 },
118
118
  MyProducer,
119
119
  topic: 'a-new-topic',
120
+ headers: { 'foo' => 'bar' },
120
121
  partition_key: 'foo',
121
122
  key: 'foo'),
122
123
  Deimos::Message.new({ 'test_id' => 'bar', 'some_int' => 124 },
123
124
  MyProducer,
124
125
  topic: 'a-new-topic',
126
+ headers: { 'foo' => 'bar' },
125
127
  partition_key: 'bar',
126
128
  key: 'bar')
127
129
  ]
@@ -130,9 +132,10 @@ module ProducerTest
130
132
  MyProducer.publish_list(
131
133
  [{ 'test_id' => 'foo', 'some_int' => 123 },
132
134
  { 'test_id' => 'bar', 'some_int' => 124 }],
133
- topic: 'a-new-topic'
135
+ topic: 'a-new-topic',
136
+ headers: { 'foo' => 'bar' }
134
137
  )
135
- expect('a-new-topic').to have_sent('test_id' => 'foo', 'some_int' => 123)
138
+ expect('a-new-topic').to have_sent({ 'test_id' => 'foo', 'some_int' => 123 }, nil, nil, { 'foo' => 'bar' })
136
139
  expect('my-topic').not_to have_sent('test_id' => 'foo', 'some_int' => 123)
137
140
  expect('my-topic').not_to have_sent('test_id' => 'foo2', 'some_int' => 123)
138
141
  end
@@ -2,12 +2,13 @@
2
2
 
3
3
  each_db_config(Deimos::Utils::DbProducer) do
4
4
  let(:producer) do
5
- producer = described_class.new
5
+ producer = described_class.new(logger)
6
6
  allow(producer).to receive(:sleep)
7
7
  allow(producer).to receive(:producer).and_return(phobos_producer)
8
8
  producer
9
9
  end
10
10
 
11
+ let(:logger) { nil }
11
12
  let(:phobos_producer) do
12
13
  pp = instance_double(Phobos::Producer::PublicAPI)
13
14
  allow(pp).to receive(:publish_list)
@@ -308,35 +309,6 @@ each_db_config(Deimos::Utils::DbProducer) do
308
309
  Deimos.unsubscribe(subscriber)
309
310
  end
310
311
 
311
- it 'should delete messages on buffer overflow' do
312
- messages = (1..4).map do |i|
313
- Deimos::KafkaMessage.create!(
314
- id: i,
315
- topic: 'my-topic',
316
- message: "mess#{i}",
317
- partition_key: "key#{i}"
318
- )
319
- end
320
- (5..8).each do |i|
321
- Deimos::KafkaMessage.create!(
322
- id: i,
323
- topic: 'my-topic2',
324
- message: "mess#{i}",
325
- partition_key: "key#{i}"
326
- )
327
- end
328
-
329
- expect(Deimos::KafkaTopicInfo).to receive(:lock).
330
- with('my-topic', 'abc').and_return(true)
331
- expect(producer).to receive(:produce_messages).and_raise(Kafka::BufferOverflow)
332
- expect(producer).to receive(:retrieve_messages).and_return(messages)
333
- expect(Deimos::KafkaTopicInfo).to receive(:register_error)
334
-
335
- expect(Deimos::KafkaMessage.count).to eq(8)
336
- producer.process_topic('my-topic')
337
- expect(Deimos::KafkaMessage.count).to eq(4)
338
- end
339
-
340
312
  it 'should retry deletes and not re-publish' do
341
313
  messages = (1..4).map do |i|
342
314
  Deimos::KafkaMessage.create!(
@@ -388,6 +360,102 @@ each_db_config(Deimos::Utils::DbProducer) do
388
360
  expect { producer.delete_messages(messages) }.to raise_exception('OH NOES')
389
361
  end
390
362
 
363
+ context 'with buffer overflow exception' do
364
+ let(:messages) do
365
+ (1..4).map do |i|
366
+ Deimos::KafkaMessage.create!(
367
+ id: i,
368
+ key: i,
369
+ topic: 'my-topic',
370
+ message: { message: "mess#{i}" },
371
+ partition_key: "key#{i}"
372
+ )
373
+ end
374
+ end
375
+ let(:logger) do
376
+ logger = instance_double(Logger)
377
+ allow(logger).to receive(:error)
378
+ logger
379
+ end
380
+ let(:message_producer) do
381
+ Deimos.config.schema.backend = :mock
382
+ Deimos::ActiveRecordProducer.topic('my-topic')
383
+ Deimos::ActiveRecordProducer.key_config
384
+ Deimos::ActiveRecordProducer
385
+ end
386
+
387
+ around(:each) do |example|
388
+ config = Deimos::ActiveRecordProducer.config.clone
389
+ backend = Deimos.config.schema.backend
390
+
391
+ example.run
392
+ ensure
393
+ Deimos::ActiveRecordProducer.instance_variable_set(:@config, config)
394
+ Deimos.config.schema.backend = backend
395
+ end
396
+
397
+ before(:each) do
398
+ message_producer
399
+ (5..8).each do |i|
400
+ Deimos::KafkaMessage.create!(
401
+ id: i,
402
+ topic: 'my-topic2',
403
+ message: "mess#{i}",
404
+ partition_key: "key#{i}"
405
+ )
406
+ end
407
+ allow(Deimos::KafkaTopicInfo).to receive(:lock).
408
+ with('my-topic', 'abc').and_return(true)
409
+ allow(producer).to receive(:produce_messages).and_raise(Kafka::BufferOverflow)
410
+ allow(producer).to receive(:retrieve_messages).and_return(messages)
411
+ allow(Deimos::KafkaTopicInfo).to receive(:register_error)
412
+ end
413
+
414
+ it 'should delete messages on buffer overflow' do
415
+ expect(Deimos::KafkaMessage.count).to eq(8)
416
+ producer.process_topic('my-topic')
417
+ expect(Deimos::KafkaMessage.count).to eq(4)
418
+ end
419
+
420
+ it 'should notify on buffer overflow' do
421
+ subscriber = Deimos.subscribe('db_producer.produce') do |event|
422
+ expect(event.payload[:exception_object].message).to eq('Kafka::BufferOverflow')
423
+ expect(event.payload[:messages]).to eq(messages)
424
+ end
425
+ producer.process_topic('my-topic')
426
+ Deimos.unsubscribe(subscriber)
427
+ expect(logger).to have_received(:error).with('Message batch too large, deleting...')
428
+ expect(logger).to have_received(:error).with(
429
+ [
430
+ { key: '1', payload: 'payload-decoded' },
431
+ { key: '2', payload: 'payload-decoded' },
432
+ { key: '3', payload: 'payload-decoded' },
433
+ { key: '4', payload: 'payload-decoded' }
434
+ ]
435
+ )
436
+ end
437
+
438
+ context 'with exception on error logging attempt' do
439
+ let(:message_producer) do
440
+ Deimos::ActiveRecordProducer.topic('my-topic')
441
+ Deimos::ActiveRecordProducer
442
+ end
443
+
444
+ it 'should notify on buffer overflow disregarding decoding exception' do
445
+ subscriber = Deimos.subscribe('db_producer.produce') do |event|
446
+ expect(event.payload[:exception_object].message).to eq('Kafka::BufferOverflow')
447
+ expect(event.payload[:messages]).to eq(messages)
448
+ end
449
+ producer.process_topic('my-topic')
450
+ Deimos.unsubscribe(subscriber)
451
+ expect(logger).to have_received(:error).with('Message batch too large, deleting...')
452
+ expect(logger).to have_received(:error).with(
453
+ 'Large message details logging failure: '\
454
+ 'No key config given - if you are not decoding keys, please use `key_config plain: true`'
455
+ )
456
+ end
457
+ end
458
+ end
391
459
  end
392
460
 
393
461
  describe '#send_pending_metrics' do
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: deimos-ruby
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.22.3
4
+ version: 1.22.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Daniel Orner
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-06-13 00:00:00.000000000 Z
11
+ date: 2023-07-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: avro_turf
@@ -459,7 +459,6 @@ files:
459
459
  - lib/deimos/metrics/mock.rb
460
460
  - lib/deimos/metrics/provider.rb
461
461
  - lib/deimos/monkey_patches/phobos_cli.rb
462
- - lib/deimos/monkey_patches/phobos_producer.rb
463
462
  - lib/deimos/poll_info.rb
464
463
  - lib/deimos/producer.rb
465
464
  - lib/deimos/railtie.rb
@@ -1,52 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require 'phobos/producer'
4
-
5
- #@!visibility private
6
- module Phobos
7
- module Producer
8
- # :nodoc:
9
- class PublicAPI
10
- # :nodoc:
11
- def publish(topic, payload, key=nil, partition_key=nil)
12
- class_producer.publish(topic, payload, key, partition_key)
13
- end
14
-
15
- # :nodoc:
16
- def async_publish(topic, payload, key=nil, partition_key=nil)
17
- class_producer.async_publish(topic, payload, key, partition_key)
18
- end
19
- end
20
-
21
- # :nodoc:
22
- module ClassMethods
23
- # :nodoc:
24
- class PublicAPI
25
- # :nodoc:
26
- def publish(topic, payload, key=nil, partition_key=nil)
27
- publish_list([{ topic: topic, payload: payload, key: key,
28
- partition_key: partition_key }])
29
- end
30
-
31
- # :nodoc:
32
- def async_publish(topic, payload, key=nil, partition_key=nil)
33
- async_publish_list([{ topic: topic, payload: payload, key: key,
34
- partition_key: partition_key }])
35
- end
36
-
37
- private
38
-
39
- # :nodoc:
40
- def produce_messages(producer, messages)
41
- messages.each do |message|
42
- partition_key = message[:partition_key] || message[:key]
43
- producer.produce(message[:payload],
44
- topic: message[:topic],
45
- key: message[:key],
46
- partition_key: partition_key)
47
- end
48
- end
49
- end
50
- end
51
- end
52
- end