deimos-ruby 1.6.2 → 1.8.0.pre.beta2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. checksums.yaml +4 -4
  2. data/.circleci/config.yml +9 -0
  3. data/.rubocop.yml +15 -13
  4. data/.ruby-version +1 -1
  5. data/CHANGELOG.md +31 -0
  6. data/Gemfile.lock +43 -36
  7. data/README.md +141 -16
  8. data/Rakefile +1 -1
  9. data/deimos-ruby.gemspec +2 -1
  10. data/docs/ARCHITECTURE.md +144 -0
  11. data/docs/CONFIGURATION.md +27 -0
  12. data/lib/deimos.rb +7 -6
  13. data/lib/deimos/active_record_consume/batch_consumption.rb +159 -0
  14. data/lib/deimos/active_record_consume/batch_slicer.rb +27 -0
  15. data/lib/deimos/active_record_consume/message_consumption.rb +58 -0
  16. data/lib/deimos/active_record_consume/schema_model_converter.rb +52 -0
  17. data/lib/deimos/active_record_consumer.rb +33 -75
  18. data/lib/deimos/active_record_producer.rb +23 -0
  19. data/lib/deimos/batch_consumer.rb +2 -140
  20. data/lib/deimos/config/configuration.rb +28 -10
  21. data/lib/deimos/consume/batch_consumption.rb +150 -0
  22. data/lib/deimos/consume/message_consumption.rb +94 -0
  23. data/lib/deimos/consumer.rb +79 -69
  24. data/lib/deimos/kafka_message.rb +1 -1
  25. data/lib/deimos/kafka_topic_info.rb +1 -1
  26. data/lib/deimos/message.rb +6 -1
  27. data/lib/deimos/metrics/provider.rb +0 -2
  28. data/lib/deimos/poll_info.rb +9 -0
  29. data/lib/deimos/tracing/provider.rb +0 -2
  30. data/lib/deimos/utils/db_poller.rb +149 -0
  31. data/lib/deimos/utils/db_producer.rb +8 -3
  32. data/lib/deimos/utils/deadlock_retry.rb +68 -0
  33. data/lib/deimos/utils/lag_reporter.rb +19 -26
  34. data/lib/deimos/version.rb +1 -1
  35. data/lib/generators/deimos/db_poller/templates/migration +11 -0
  36. data/lib/generators/deimos/db_poller/templates/rails3_migration +16 -0
  37. data/lib/generators/deimos/db_poller_generator.rb +48 -0
  38. data/lib/tasks/deimos.rake +7 -0
  39. data/spec/active_record_batch_consumer_spec.rb +481 -0
  40. data/spec/active_record_consume/batch_slicer_spec.rb +42 -0
  41. data/spec/active_record_consume/schema_model_converter_spec.rb +105 -0
  42. data/spec/active_record_consumer_spec.rb +3 -11
  43. data/spec/active_record_producer_spec.rb +66 -88
  44. data/spec/batch_consumer_spec.rb +24 -7
  45. data/spec/config/configuration_spec.rb +4 -0
  46. data/spec/consumer_spec.rb +8 -8
  47. data/spec/deimos_spec.rb +57 -49
  48. data/spec/handlers/my_batch_consumer.rb +6 -1
  49. data/spec/handlers/my_consumer.rb +6 -1
  50. data/spec/message_spec.rb +19 -0
  51. data/spec/producer_spec.rb +3 -3
  52. data/spec/rake_spec.rb +1 -1
  53. data/spec/schemas/com/my-namespace/MySchemaCompound-key.avsc +18 -0
  54. data/spec/schemas/com/my-namespace/Wibble.avsc +43 -0
  55. data/spec/spec_helper.rb +61 -6
  56. data/spec/utils/db_poller_spec.rb +320 -0
  57. data/spec/utils/deadlock_retry_spec.rb +74 -0
  58. data/spec/utils/lag_reporter_spec.rb +29 -22
  59. metadata +55 -20
  60. data/lib/deimos/base_consumer.rb +0 -104
  61. data/lib/deimos/utils/executor.rb +0 -124
  62. data/lib/deimos/utils/platform_schema_validation.rb +0 -0
  63. data/lib/deimos/utils/signal_handler.rb +0 -68
  64. data/spec/utils/executor_spec.rb +0 -53
  65. data/spec/utils/signal_handler_spec.rb +0 -16
@@ -0,0 +1,52 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ module ActiveRecordConsume
5
+ # Convert a message with a schema to an ActiveRecord model
6
+ class SchemaModelConverter
7
+ # Create new converter
8
+ # @param decoder [SchemaBackends::Base] Incoming message schema.
9
+ # @param klass [ActiveRecord::Base] Model to map to.
10
+ def initialize(decoder, klass)
11
+ @decoder = decoder
12
+ @klass = klass
13
+ end
14
+
15
+ # Convert a message from a decoded hash to a set of ActiveRecord
16
+ # attributes. Attributes that don't exist in the model will be ignored.
17
+ # @param payload [Hash] Decoded message payload.
18
+ # @return [Hash] Model attributes.
19
+ def convert(payload)
20
+ attributes = {}
21
+ @decoder.schema_fields.each do |field|
22
+ column = @klass.columns.find { |c| c.name == field.name }
23
+ next if column.nil?
24
+ next if %w(updated_at created_at).include?(field.name)
25
+
26
+ attributes[field.name] = _coerce_field(column, payload[field.name])
27
+ end
28
+ attributes
29
+ end
30
+
31
+ private
32
+
33
+ # @param column [ActiveRecord::ConnectionAdapters::Column]
34
+ # @param val [Object]
35
+ def _coerce_field(column, val)
36
+ return nil if val.nil?
37
+
38
+ if column.type == :datetime
39
+ int_val = begin
40
+ val.is_a?(Integer) ? val : (val.is_a?(String) && Integer(val))
41
+ rescue StandardError
42
+ nil
43
+ end
44
+
45
+ return Time.zone.at(int_val) if int_val
46
+ end
47
+
48
+ val
49
+ end
50
+ end
51
+ end
52
+ end
@@ -1,101 +1,59 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require 'deimos/active_record_consume/batch_consumption'
4
+ require 'deimos/active_record_consume/message_consumption'
5
+ require 'deimos/active_record_consume/schema_model_converter'
3
6
  require 'deimos/consumer'
4
7
 
5
8
  module Deimos
6
- # Consumer that automatically saves the payload into the database.
9
+ # Basic ActiveRecord consumer class. Consumes messages and upserts them to
10
+ # the database. For tombstones (null payloads), deletes corresponding
11
+ # records from the database. Can operate in either message-by-message mode
12
+ # or in batch mode.
13
+ #
14
+ # In batch mode, ActiveRecord callbacks will be skipped and messages will
15
+ # be batched to minimize database calls.
16
+
17
+ # To configure batch vs. message mode, change the delivery mode of your
18
+ # Phobos listener.
19
+ # Message-by-message -> use `delivery: message` or `delivery: batch`
20
+ # Batch -> use `delivery: inline_batch`
7
21
  class ActiveRecordConsumer < Consumer
22
+ include ActiveRecordConsume::MessageConsumption
23
+ include ActiveRecordConsume::BatchConsumption
24
+
8
25
  class << self
9
26
  # param klass [Class < ActiveRecord::Base] the class used to save to the
10
27
  # database.
11
28
  def record_class(klass)
12
29
  config[:record_class] = klass
13
30
  end
14
- end
15
31
 
16
- # Find the record specified by the given payload and key.
17
- # Default is to use the primary key column and the value of the first
18
- # field in the key.
19
- # @param klass [Class < ActiveRecord::Base]
20
- # @param _payload [Hash]
21
- # @param key [Object]
22
- # @return [ActiveRecord::Base]
23
- def fetch_record(klass, _payload, key)
24
- klass.unscoped.where(klass.primary_key => key).first
32
+ # param val [Boolean] Turn pre-compaction of the batch on or off. If true,
33
+ # only the last message for each unique key in a batch is processed.
34
+ def compacted(val)
35
+ config[:compacted] = val
36
+ end
25
37
  end
26
38
 
27
- # Assign a key to a new record.
28
- # @param record [ActiveRecord::Base]
29
- # @param _payload [Hash]
30
- # @param key [Object]
31
- def assign_key(record, _payload, key)
32
- record[record.class.primary_key] = key
33
- end
39
+ # Setup
40
+ def initialize
41
+ @klass = self.class.config[:record_class]
42
+ @converter = ActiveRecordConsume::SchemaModelConverter.new(self.class.decoder, @klass)
34
43
 
35
- # :nodoc:
36
- def consume(payload, metadata)
37
- key = metadata.with_indifferent_access[:key]
38
- klass = self.class.config[:record_class]
39
- record = fetch_record(klass, (payload || {}).with_indifferent_access, key)
40
- if payload.nil?
41
- destroy_record(record)
42
- return
44
+ if self.class.config[:key_schema]
45
+ @key_converter = ActiveRecordConsume::SchemaModelConverter.new(self.class.key_decoder, @klass)
43
46
  end
44
- if record.blank?
45
- record = klass.new
46
- assign_key(record, payload, key)
47
- end
48
- attrs = record_attributes(payload.with_indifferent_access)
49
- # don't use attributes= - bypass Rails < 5 attr_protected
50
- attrs.each do |k, v|
51
- record.send("#{k}=", v)
52
- end
53
- record.created_at ||= Time.zone.now if record.respond_to?(:created_at)
54
- record.updated_at = Time.zone.now if record.respond_to?(:updated_at)
55
- record.save!
56
- end
57
47
 
58
- # Destroy a record that received a null payload. Override if you need
59
- # to do something other than a straight destroy (e.g. mark as archived).
60
- # @param record [ActiveRecord::Base]
61
- def destroy_record(record)
62
- record&.destroy
48
+ @compacted = self.class.config[:compacted] != false
63
49
  end
64
50
 
65
51
  # Override this method (with `super`) if you want to add/change the default
66
52
  # attributes set to the new/existing record.
67
53
  # @param payload [Hash]
68
- def record_attributes(payload)
69
- klass = self.class.config[:record_class]
70
- attributes = {}
71
- self.class.decoder.schema_fields.each do |field|
72
- column = klass.columns.find { |c| c.name == field.name }
73
- next if column.nil?
74
- next if %w(updated_at created_at).include?(field.name)
75
-
76
- attributes[field.name] = _coerce_field(column, payload[field.name])
77
- end
78
- attributes
79
- end
80
-
81
- private
82
-
83
- # @param column [ActiveRecord::ConnectionAdapters::Column]
84
- # @param val [Object]
85
- def _coerce_field(column, val)
86
- return nil if val.nil?
87
-
88
- if column.type == :datetime
89
- int_val = begin
90
- val.is_a?(Integer) ? val : (val.is_a?(String) && Integer(val))
91
- rescue StandardError
92
- nil
93
- end
94
-
95
- return Time.zone.at(int_val) if int_val
96
- end
97
-
98
- val
54
+ # @param _key [String]
55
+ def record_attributes(payload, _key=nil)
56
+ @converter.convert(payload)
99
57
  end
100
58
  end
101
59
  end
@@ -59,6 +59,29 @@ module Deimos
59
59
  k.to_sym != :payload_key && !fields.map(&:name).include?(k)
60
60
  end
61
61
  end
62
+
63
+ # Query to use when polling the database with the DbPoller. Add
64
+ # includes, joins, or wheres as necessary, or replace entirely.
65
+ # @param time_from [Time] the time to start the query from.
66
+ # @param time_to [Time] the time to end the query.
67
+ # @param column_name [Symbol] the column name to look for.
68
+ # @param min_id [Numeric] the minimum ID (i.e. all IDs must be greater
69
+ # than this value).
70
+ # @return [ActiveRecord::Relation]
71
+ def poll_query(time_from:, time_to:, column_name: :updated_at, min_id:)
72
+ klass = config[:record_class]
73
+ table = ActiveRecord::Base.connection.quote_table_name(klass.table_name)
74
+ column = ActiveRecord::Base.connection.quote_column_name(column_name)
75
+ primary = ActiveRecord::Base.connection.quote_column_name(klass.primary_key)
76
+ klass.where(
77
+ "((#{table}.#{column} = ? AND #{table}.#{primary} > ?) \
78
+ OR #{table}.#{column} > ?) AND #{table}.#{column} <= ?",
79
+ time_from,
80
+ min_id,
81
+ time_from,
82
+ time_to
83
+ )
84
+ end
62
85
  end
63
86
  end
64
87
  end
@@ -1,145 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'deimos/base_consumer'
4
- require 'phobos/batch_handler'
5
-
6
3
  module Deimos
7
- # Class to consume batches of messages in a topic
8
- # Note: According to the docs, instances of your handler will be created
9
- # for every incoming batch of messages. This class should be lightweight.
10
- class BatchConsumer < BaseConsumer
11
- include Phobos::BatchHandler
12
-
13
- # :nodoc:
14
- def around_consume_batch(batch, metadata)
15
- payloads = []
16
- benchmark = Benchmark.measure do
17
- if self.class.config[:key_configured]
18
- metadata[:keys] = batch.map do |message|
19
- decode_key(message.key)
20
- end
21
- end
22
-
23
- payloads = batch.map do |message|
24
- message.payload ? self.class.decoder.decode(message.payload) : nil
25
- end
26
- _received_batch(payloads, metadata)
27
- _with_error_span(payloads, metadata) do
28
- yield payloads, metadata
29
- end
30
- end
31
- _handle_success(benchmark.real, payloads, metadata)
32
- end
33
-
34
- # Consume a batch of incoming messages.
35
- # @param _payloads [Array<Phobos::BatchMessage>]
36
- # @param _metadata [Hash]
37
- def consume_batch(_payloads, _metadata)
38
- raise NotImplementedError
39
- end
40
-
41
- protected
42
-
43
- def _received_batch(payloads, metadata)
44
- Deimos.config.logger.info(
45
- message: 'Got Kafka batch event',
46
- message_ids: _payload_identifiers(payloads, metadata),
47
- metadata: metadata.except(:keys)
48
- )
49
- Deimos.config.logger.debug(
50
- message: 'Kafka batch event payloads',
51
- payloads: payloads
52
- )
53
- Deimos.config.metrics&.increment(
54
- 'handler',
55
- tags: %W(
56
- status:batch_received
57
- topic:#{metadata[:topic]}
58
- ))
59
- Deimos.config.metrics&.increment(
60
- 'handler',
61
- by: metadata['batch_size'],
62
- tags: %W(
63
- status:received
64
- topic:#{metadata[:topic]}
65
- ))
66
- if payloads.present?
67
- payloads.each { |payload| _report_time_delayed(payload, metadata) }
68
- end
69
- end
70
-
71
- # @param exception [Throwable]
72
- # @param payloads [Array<Hash>]
73
- # @param metadata [Hash]
74
- def _handle_error(exception, payloads, metadata)
75
- Deimos.config.metrics&.increment(
76
- 'handler',
77
- tags: %W(
78
- status:batch_error
79
- topic:#{metadata[:topic]}
80
- ))
81
- Deimos.config.logger.warn(
82
- message: 'Error consuming message batch',
83
- handler: self.class.name,
84
- metadata: metadata.except(:keys),
85
- message_ids: _payload_identifiers(payloads, metadata),
86
- error_message: exception.message,
87
- error: exception.backtrace
88
- )
89
- super
90
- end
91
-
92
- # @param time_taken [Float]
93
- # @param payloads [Array<Hash>]
94
- # @param metadata [Hash]
95
- def _handle_success(time_taken, payloads, metadata)
96
- Deimos.config.metrics&.histogram('handler', time_taken, tags: %W(
97
- time:consume_batch
98
- topic:#{metadata[:topic]}
99
- ))
100
- Deimos.config.metrics&.increment(
101
- 'handler',
102
- tags: %W(
103
- status:batch_success
104
- topic:#{metadata[:topic]}
105
- ))
106
- Deimos.config.metrics&.increment(
107
- 'handler',
108
- by: metadata['batch_size'],
109
- tags: %W(
110
- status:success
111
- topic:#{metadata[:topic]}
112
- ))
113
- Deimos.config.logger.info(
114
- message: 'Finished processing Kafka batch event',
115
- message_ids: _payload_identifiers(payloads, metadata),
116
- time_elapsed: time_taken,
117
- metadata: metadata.except(:keys)
118
- )
119
- end
120
-
121
- # Get payload identifiers (key and message_id if present) for logging.
122
- # @param payloads [Array<Hash>]
123
- # @param metadata [Hash]
124
- # @return [Hash] the identifiers.
125
- def _payload_identifiers(payloads, metadata)
126
- message_ids = payloads&.map do |payload|
127
- if payload.is_a?(Hash) && payload.key?('message_id')
128
- payload['message_id']
129
- end
130
- end
131
-
132
- # Payloads may be nil if preprocessing failed
133
- messages = payloads || metadata[:keys] || []
134
-
135
- messages.zip(metadata[:keys] || [], message_ids || []).map do |_, k, m_id|
136
- ids = {}
137
-
138
- ids[:key] = k if k.present?
139
- ids[:message_id] = m_id if m_id.present?
140
-
141
- ids
142
- end
143
- end
4
+ # @deprecated Use Deimos::Consumer with `delivery: inline_batch` configured instead
5
+ class BatchConsumer < Consumer
144
6
  end
145
7
  end
@@ -47,17 +47,15 @@ module Deimos
47
47
  handler_class = listener.handler.constantize
48
48
  delivery = listener.delivery
49
49
 
50
- # Validate that Deimos consumers use proper delivery configs
51
- if handler_class < Deimos::BatchConsumer
52
- unless delivery == 'inline_batch'
53
- raise "BatchConsumer #{listener.handler} must have delivery set to"\
54
- ' `inline_batch`'
55
- end
56
- elsif handler_class < Deimos::Consumer
57
- if delivery.present? && !%w(message batch).include?(delivery)
58
- raise "Non-batch Consumer #{listener.handler} must have delivery"\
59
- ' set to `message` or `batch`'
50
+ next unless handler_class < Deimos::Consumer
51
+
52
+ # Validate that each consumer implements the correct method for its type
53
+ if delivery == 'inline_batch'
54
+ if handler_class.instance_method(:consume_batch).owner == Deimos::Consume::BatchConsumption
55
+ raise "BatchConsumer #{listener.handler} does not implement `consume_batch`"
60
56
  end
57
+ elsif handler_class.instance_method(:consume).owner == Deimos::Consume::MessageConsumption
58
+ raise "Non-batch Consumer #{listener.handler} does not implement `consume`"
61
59
  end
62
60
  end
63
61
  end
@@ -340,6 +338,26 @@ module Deimos
340
338
  setting :heartbeat_interval
341
339
  end
342
340
 
341
+ setting_object :db_poller do
342
+ # Producer class to use for the poller.
343
+ setting :producer_class
344
+ # How often to run the poller, in seconds. If the poll takes longer than this
345
+ # time, it will run again immediately and the timeout
346
+ # will be pushed to the next e.g. 1 minute.
347
+ setting :run_every, 60
348
+ # Column to use to find updates. Must have an index on it.
349
+ setting :timestamp_column, :updated_at
350
+ # Amount of time, in seconds, to wait before catching updates, to allow transactions
351
+ # to complete but still pick up the right records.
352
+ setting :delay_time, 2
353
+ # If true, dump the full table rather than incremental changes. Should
354
+ # only be used for very small tables.
355
+ setting :full_table, false
356
+ # If false, start from the current time instead of the beginning of time
357
+ # if this is the first time running the poller.
358
+ setting :start_from_beginning, true
359
+ end
360
+
343
361
  deprecate 'kafka_logger', 'kafka.logger'
344
362
  deprecate 'reraise_consumer_errors', 'consumers.reraise_errors'
345
363
  deprecate 'schema_registry_url', 'schema.registry_url'
@@ -0,0 +1,150 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ module Consume
5
+ # Helper methods used by batch consumers, i.e. those with "inline_batch"
6
+ # delivery. Payloads are decoded then consumers are invoked with arrays
7
+ # of messages to be handled at once
8
+ module BatchConsumption
9
+ extend ActiveSupport::Concern
10
+ include Phobos::BatchHandler
11
+
12
+ # :nodoc:
13
+ def around_consume_batch(batch, metadata)
14
+ payloads = []
15
+ benchmark = Benchmark.measure do
16
+ if self.class.config[:key_configured]
17
+ metadata[:keys] = batch.map do |message|
18
+ decode_key(message.key)
19
+ end
20
+ end
21
+ metadata[:first_offset] = batch.first&.offset
22
+
23
+ payloads = batch.map do |message|
24
+ message.payload ? self.class.decoder.decode(message.payload) : nil
25
+ end
26
+ _received_batch(payloads, metadata)
27
+ _with_span do
28
+ yield payloads, metadata
29
+ end
30
+ end
31
+ _handle_batch_success(benchmark.real, payloads, metadata)
32
+ rescue StandardError => e
33
+ _handle_batch_error(e, payloads, metadata)
34
+ end
35
+
36
+ # Consume a batch of incoming messages.
37
+ # @param _payloads [Array<Phobos::BatchMessage>]
38
+ # @param _metadata [Hash]
39
+ def consume_batch(_payloads, _metadata)
40
+ raise NotImplementedError
41
+ end
42
+
43
+ protected
44
+
45
+ def _received_batch(payloads, metadata)
46
+ Deimos.config.logger.info(
47
+ message: 'Got Kafka batch event',
48
+ message_ids: _payload_identifiers(payloads, metadata),
49
+ metadata: metadata.except(:keys)
50
+ )
51
+ Deimos.config.logger.debug(
52
+ message: 'Kafka batch event payloads',
53
+ payloads: payloads
54
+ )
55
+ Deimos.config.metrics&.increment(
56
+ 'handler',
57
+ tags: %W(
58
+ status:batch_received
59
+ topic:#{metadata[:topic]}
60
+ ))
61
+ Deimos.config.metrics&.increment(
62
+ 'handler',
63
+ by: metadata['batch_size'],
64
+ tags: %W(
65
+ status:received
66
+ topic:#{metadata[:topic]}
67
+ ))
68
+ if payloads.present?
69
+ payloads.each { |payload| _report_time_delayed(payload, metadata) }
70
+ end
71
+ end
72
+
73
+ # @param exception [Throwable]
74
+ # @param payloads [Array<Hash>]
75
+ # @param metadata [Hash]
76
+ def _handle_batch_error(exception, payloads, metadata)
77
+ Deimos.config.metrics&.increment(
78
+ 'handler',
79
+ tags: %W(
80
+ status:batch_error
81
+ topic:#{metadata[:topic]}
82
+ ))
83
+ Deimos.config.logger.warn(
84
+ message: 'Error consuming message batch',
85
+ handler: self.class.name,
86
+ metadata: metadata.except(:keys),
87
+ message_ids: _payload_identifiers(payloads, metadata),
88
+ error_message: exception.message,
89
+ error: exception.backtrace
90
+ )
91
+ _error(exception, payloads, metadata)
92
+ end
93
+
94
+ # @param time_taken [Float]
95
+ # @param payloads [Array<Hash>]
96
+ # @param metadata [Hash]
97
+ def _handle_batch_success(time_taken, payloads, metadata)
98
+ Deimos.config.metrics&.histogram('handler',
99
+ time_taken,
100
+ tags: %W(
101
+ time:consume_batch
102
+ topic:#{metadata[:topic]}
103
+ ))
104
+ Deimos.config.metrics&.increment(
105
+ 'handler',
106
+ tags: %W(
107
+ status:batch_success
108
+ topic:#{metadata[:topic]}
109
+ ))
110
+ Deimos.config.metrics&.increment(
111
+ 'handler',
112
+ by: metadata['batch_size'],
113
+ tags: %W(
114
+ status:success
115
+ topic:#{metadata[:topic]}
116
+ ))
117
+ Deimos.config.logger.info(
118
+ message: 'Finished processing Kafka batch event',
119
+ message_ids: _payload_identifiers(payloads, metadata),
120
+ time_elapsed: time_taken,
121
+ metadata: metadata.except(:keys)
122
+ )
123
+ end
124
+
125
+ # Get payload identifiers (key and message_id if present) for logging.
126
+ # @param payloads [Array<Hash>]
127
+ # @param metadata [Hash]
128
+ # @return [Array<Array>] the identifiers.
129
+ def _payload_identifiers(payloads, metadata)
130
+ message_ids = payloads&.map do |payload|
131
+ if payload.is_a?(Hash) && payload.key?('message_id')
132
+ payload['message_id']
133
+ end
134
+ end
135
+
136
+ # Payloads may be nil if preprocessing failed
137
+ messages = payloads || metadata[:keys] || []
138
+
139
+ messages.zip(metadata[:keys] || [], message_ids || []).map do |_, k, m_id|
140
+ ids = {}
141
+
142
+ ids[:key] = k if k.present?
143
+ ids[:message_id] = m_id if m_id.present?
144
+
145
+ ids
146
+ end
147
+ end
148
+ end
149
+ end
150
+ end