deimos-temp-fork 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (146) hide show
  1. checksums.yaml +7 -0
  2. data/.circleci/config.yml +83 -0
  3. data/.gitignore +41 -0
  4. data/.gitmodules +0 -0
  5. data/.rspec +1 -0
  6. data/.rubocop.yml +333 -0
  7. data/.ruby-gemset +1 -0
  8. data/.ruby-version +1 -0
  9. data/CHANGELOG.md +349 -0
  10. data/CODE_OF_CONDUCT.md +77 -0
  11. data/Dockerfile +23 -0
  12. data/Gemfile +6 -0
  13. data/Gemfile.lock +286 -0
  14. data/Guardfile +22 -0
  15. data/LICENSE.md +195 -0
  16. data/README.md +1099 -0
  17. data/Rakefile +13 -0
  18. data/bin/deimos +4 -0
  19. data/deimos-ruby.gemspec +44 -0
  20. data/docker-compose.yml +71 -0
  21. data/docs/ARCHITECTURE.md +140 -0
  22. data/docs/CONFIGURATION.md +236 -0
  23. data/docs/DATABASE_BACKEND.md +147 -0
  24. data/docs/INTEGRATION_TESTS.md +52 -0
  25. data/docs/PULL_REQUEST_TEMPLATE.md +35 -0
  26. data/docs/UPGRADING.md +128 -0
  27. data/lib/deimos-temp-fork.rb +95 -0
  28. data/lib/deimos/active_record_consume/batch_consumption.rb +164 -0
  29. data/lib/deimos/active_record_consume/batch_slicer.rb +27 -0
  30. data/lib/deimos/active_record_consume/message_consumption.rb +79 -0
  31. data/lib/deimos/active_record_consume/schema_model_converter.rb +52 -0
  32. data/lib/deimos/active_record_consumer.rb +67 -0
  33. data/lib/deimos/active_record_producer.rb +87 -0
  34. data/lib/deimos/backends/base.rb +32 -0
  35. data/lib/deimos/backends/db.rb +41 -0
  36. data/lib/deimos/backends/kafka.rb +33 -0
  37. data/lib/deimos/backends/kafka_async.rb +33 -0
  38. data/lib/deimos/backends/test.rb +20 -0
  39. data/lib/deimos/batch_consumer.rb +7 -0
  40. data/lib/deimos/config/configuration.rb +381 -0
  41. data/lib/deimos/config/phobos_config.rb +137 -0
  42. data/lib/deimos/consume/batch_consumption.rb +150 -0
  43. data/lib/deimos/consume/message_consumption.rb +94 -0
  44. data/lib/deimos/consumer.rb +104 -0
  45. data/lib/deimos/instrumentation.rb +76 -0
  46. data/lib/deimos/kafka_message.rb +60 -0
  47. data/lib/deimos/kafka_source.rb +128 -0
  48. data/lib/deimos/kafka_topic_info.rb +102 -0
  49. data/lib/deimos/message.rb +79 -0
  50. data/lib/deimos/metrics/datadog.rb +47 -0
  51. data/lib/deimos/metrics/mock.rb +39 -0
  52. data/lib/deimos/metrics/provider.rb +36 -0
  53. data/lib/deimos/monkey_patches/phobos_cli.rb +35 -0
  54. data/lib/deimos/monkey_patches/phobos_producer.rb +51 -0
  55. data/lib/deimos/poll_info.rb +9 -0
  56. data/lib/deimos/producer.rb +224 -0
  57. data/lib/deimos/railtie.rb +8 -0
  58. data/lib/deimos/schema_backends/avro_base.rb +140 -0
  59. data/lib/deimos/schema_backends/avro_local.rb +30 -0
  60. data/lib/deimos/schema_backends/avro_schema_coercer.rb +119 -0
  61. data/lib/deimos/schema_backends/avro_schema_registry.rb +34 -0
  62. data/lib/deimos/schema_backends/avro_validation.rb +21 -0
  63. data/lib/deimos/schema_backends/base.rb +150 -0
  64. data/lib/deimos/schema_backends/mock.rb +42 -0
  65. data/lib/deimos/shared_config.rb +63 -0
  66. data/lib/deimos/test_helpers.rb +360 -0
  67. data/lib/deimos/tracing/datadog.rb +35 -0
  68. data/lib/deimos/tracing/mock.rb +40 -0
  69. data/lib/deimos/tracing/provider.rb +29 -0
  70. data/lib/deimos/utils/db_poller.rb +150 -0
  71. data/lib/deimos/utils/db_producer.rb +243 -0
  72. data/lib/deimos/utils/deadlock_retry.rb +68 -0
  73. data/lib/deimos/utils/inline_consumer.rb +150 -0
  74. data/lib/deimos/utils/lag_reporter.rb +175 -0
  75. data/lib/deimos/utils/schema_controller_mixin.rb +115 -0
  76. data/lib/deimos/version.rb +5 -0
  77. data/lib/generators/deimos/active_record/templates/migration.rb.tt +28 -0
  78. data/lib/generators/deimos/active_record/templates/model.rb.tt +5 -0
  79. data/lib/generators/deimos/active_record_generator.rb +79 -0
  80. data/lib/generators/deimos/db_backend/templates/migration +25 -0
  81. data/lib/generators/deimos/db_backend/templates/rails3_migration +31 -0
  82. data/lib/generators/deimos/db_backend_generator.rb +48 -0
  83. data/lib/generators/deimos/db_poller/templates/migration +11 -0
  84. data/lib/generators/deimos/db_poller/templates/rails3_migration +16 -0
  85. data/lib/generators/deimos/db_poller_generator.rb +48 -0
  86. data/lib/tasks/deimos.rake +34 -0
  87. data/spec/active_record_batch_consumer_spec.rb +481 -0
  88. data/spec/active_record_consume/batch_slicer_spec.rb +42 -0
  89. data/spec/active_record_consume/schema_model_converter_spec.rb +105 -0
  90. data/spec/active_record_consumer_spec.rb +154 -0
  91. data/spec/active_record_producer_spec.rb +85 -0
  92. data/spec/backends/base_spec.rb +10 -0
  93. data/spec/backends/db_spec.rb +54 -0
  94. data/spec/backends/kafka_async_spec.rb +11 -0
  95. data/spec/backends/kafka_spec.rb +11 -0
  96. data/spec/batch_consumer_spec.rb +256 -0
  97. data/spec/config/configuration_spec.rb +248 -0
  98. data/spec/consumer_spec.rb +209 -0
  99. data/spec/deimos_spec.rb +169 -0
  100. data/spec/generators/active_record_generator_spec.rb +56 -0
  101. data/spec/handlers/my_batch_consumer.rb +10 -0
  102. data/spec/handlers/my_consumer.rb +10 -0
  103. data/spec/kafka_listener_spec.rb +55 -0
  104. data/spec/kafka_source_spec.rb +381 -0
  105. data/spec/kafka_topic_info_spec.rb +111 -0
  106. data/spec/message_spec.rb +19 -0
  107. data/spec/phobos.bad_db.yml +73 -0
  108. data/spec/phobos.yml +77 -0
  109. data/spec/producer_spec.rb +498 -0
  110. data/spec/rake_spec.rb +19 -0
  111. data/spec/schema_backends/avro_base_shared.rb +199 -0
  112. data/spec/schema_backends/avro_local_spec.rb +32 -0
  113. data/spec/schema_backends/avro_schema_registry_spec.rb +32 -0
  114. data/spec/schema_backends/avro_validation_spec.rb +24 -0
  115. data/spec/schema_backends/base_spec.rb +33 -0
  116. data/spec/schemas/com/my-namespace/Generated.avsc +71 -0
  117. data/spec/schemas/com/my-namespace/MyNestedSchema.avsc +62 -0
  118. data/spec/schemas/com/my-namespace/MySchema-key.avsc +13 -0
  119. data/spec/schemas/com/my-namespace/MySchema.avsc +18 -0
  120. data/spec/schemas/com/my-namespace/MySchemaCompound-key.avsc +18 -0
  121. data/spec/schemas/com/my-namespace/MySchemaWithBooleans.avsc +18 -0
  122. data/spec/schemas/com/my-namespace/MySchemaWithDateTimes.avsc +33 -0
  123. data/spec/schemas/com/my-namespace/MySchemaWithId.avsc +28 -0
  124. data/spec/schemas/com/my-namespace/MySchemaWithUniqueId.avsc +32 -0
  125. data/spec/schemas/com/my-namespace/Wibble.avsc +43 -0
  126. data/spec/schemas/com/my-namespace/Widget.avsc +27 -0
  127. data/spec/schemas/com/my-namespace/WidgetTheSecond.avsc +27 -0
  128. data/spec/schemas/com/my-namespace/request/CreateTopic.avsc +11 -0
  129. data/spec/schemas/com/my-namespace/request/Index.avsc +11 -0
  130. data/spec/schemas/com/my-namespace/request/UpdateRequest.avsc +11 -0
  131. data/spec/schemas/com/my-namespace/response/CreateTopic.avsc +11 -0
  132. data/spec/schemas/com/my-namespace/response/Index.avsc +11 -0
  133. data/spec/schemas/com/my-namespace/response/UpdateResponse.avsc +11 -0
  134. data/spec/spec_helper.rb +267 -0
  135. data/spec/utils/db_poller_spec.rb +320 -0
  136. data/spec/utils/db_producer_spec.rb +514 -0
  137. data/spec/utils/deadlock_retry_spec.rb +74 -0
  138. data/spec/utils/inline_consumer_spec.rb +31 -0
  139. data/spec/utils/lag_reporter_spec.rb +76 -0
  140. data/spec/utils/platform_schema_validation_spec.rb +0 -0
  141. data/spec/utils/schema_controller_mixin_spec.rb +84 -0
  142. data/support/deimos-solo.png +0 -0
  143. data/support/deimos-with-name-next.png +0 -0
  144. data/support/deimos-with-name.png +0 -0
  145. data/support/flipp-logo.png +0 -0
  146. metadata +551 -0
@@ -0,0 +1,76 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'active_support/notifications'
4
+ require 'active_support/concern'
5
+
6
+ # :nodoc:
7
+ module Deimos
8
+ # Copied from Phobos instrumentation.
9
+ module Instrumentation
10
+ extend ActiveSupport::Concern
11
+ NAMESPACE = 'Deimos'
12
+
13
+ # :nodoc:
14
+ module ClassMethods
15
+ # :nodoc:
16
+ def subscribe(event)
17
+ ActiveSupport::Notifications.subscribe("#{NAMESPACE}.#{event}") do |*args|
18
+ yield(ActiveSupport::Notifications::Event.new(*args)) if block_given?
19
+ end
20
+ end
21
+
22
+ # :nodoc:
23
+ def unsubscribe(subscriber)
24
+ ActiveSupport::Notifications.unsubscribe(subscriber)
25
+ end
26
+
27
+ # :nodoc:
28
+ def instrument(event, extra={})
29
+ ActiveSupport::Notifications.instrument("#{NAMESPACE}.#{event}", extra) do |extra2|
30
+ yield(extra2) if block_given?
31
+ end
32
+ end
33
+ end
34
+ end
35
+
36
+ include Instrumentation
37
+
38
+ # This module listens to events published by RubyKafka.
39
+ module KafkaListener
40
+ # Listens for any exceptions that happen during publishing and re-publishes
41
+ # as a Deimos event.
42
+ # @param event [ActiveSupport::Notification]
43
+ def self.send_produce_error(event)
44
+ exception = event.payload[:exception_object]
45
+ return if !exception || !exception.respond_to?(:failed_messages)
46
+
47
+ messages = exception.failed_messages
48
+ messages.group_by(&:topic).each do |topic, batch|
49
+ producer = Deimos::Producer.descendants.find { |c| c.topic == topic }
50
+ next if batch.empty? || !producer
51
+
52
+ decoder = Deimos.schema_backend(schema: producer.config[:schema],
53
+ namespace: producer.config[:namespace])
54
+ payloads = batch.map { |m| decoder.decode(m.value) }
55
+
56
+ Deimos.config.metrics&.increment(
57
+ 'publish_error',
58
+ tags: %W(topic:#{topic}),
59
+ by: payloads.size
60
+ )
61
+ Deimos.instrument(
62
+ 'produce_error',
63
+ producer: producer,
64
+ topic: topic,
65
+ exception_object: exception,
66
+ payloads: payloads
67
+ )
68
+ end
69
+ end
70
+ end
71
+
72
+ ActiveSupport::Notifications.subscribe('deliver_messages.producer.kafka') do |*args|
73
+ event = ActiveSupport::Notifications::Event.new(*args)
74
+ KafkaListener.send_produce_error(event)
75
+ end
76
+ end
@@ -0,0 +1,60 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ # Store Kafka messages into the database.
5
+ class KafkaMessage < ActiveRecord::Base
6
+ self.table_name = 'kafka_messages'
7
+
8
+ validates_presence_of :topic
9
+
10
+ # Ensure it gets turned into a string, e.g. for testing purposes. It
11
+ # should already be a string.
12
+ # @param mess [Object]
13
+ def message=(mess)
14
+ write_attribute(:message, mess ? mess.to_s : nil)
15
+ end
16
+
17
+ # Decoded payload for this message.
18
+ # @return [Hash]
19
+ def decoded_message
20
+ self.class.decoded([self]).first
21
+ end
22
+
23
+ # Get a decoder to decode a set of messages on the given topic.
24
+ # @param topic [String]
25
+ # @return [Deimos::Consumer]
26
+ def self.decoder(topic)
27
+ producer = Deimos::Producer.descendants.find { |c| c.topic == topic }
28
+ return nil unless producer
29
+
30
+ consumer = Class.new(Deimos::Consumer)
31
+ consumer.config.merge!(producer.config)
32
+ consumer
33
+ end
34
+
35
+ # Decoded payloads for a list of messages.
36
+ # @param messages [Array<Deimos::KafkaMessage>]
37
+ # @return [Array<Hash>]
38
+ def self.decoded(messages=[])
39
+ return [] if messages.empty?
40
+
41
+ decoder = self.decoder(messages.first.topic)&.new
42
+ messages.map do |m|
43
+ {
44
+ key: m.key.present? ? decoder&.decode_key(m.key) || m.key : nil,
45
+ payload: decoder&.decoder&.decode(m.message) || m.message
46
+ }
47
+ end
48
+ end
49
+
50
+ # @return [Hash]
51
+ def phobos_message
52
+ {
53
+ payload: self.message,
54
+ partition_key: self.partition_key,
55
+ key: self.key,
56
+ topic: self.topic
57
+ }
58
+ end
59
+ end
60
+ end
@@ -0,0 +1,128 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ # Represents an object which needs to inform Kafka when it is saved or
5
+ # bulk imported.
6
+ module KafkaSource
7
+ extend ActiveSupport::Concern
8
+
9
+ DEPRECATION_WARNING = 'The kafka_producer interface will be deprecated ' \
10
+ 'in future releases. Please use kafka_producers instead.'
11
+
12
+ included do
13
+ after_create(:send_kafka_event_on_create)
14
+ after_update(:send_kafka_event_on_update)
15
+ after_destroy(:send_kafka_event_on_destroy)
16
+ end
17
+
18
+ # Send the newly created model to Kafka.
19
+ def send_kafka_event_on_create
20
+ return unless self.persisted?
21
+ return unless self.class.kafka_config[:create]
22
+
23
+ self.class.kafka_producers.each { |p| p.send_event(self) }
24
+ end
25
+
26
+ # Send the newly updated model to Kafka.
27
+ def send_kafka_event_on_update
28
+ return unless self.class.kafka_config[:update]
29
+
30
+ producers = self.class.kafka_producers
31
+ fields = producers.flat_map(&:watched_attributes).uniq
32
+ fields -= ['updated_at']
33
+ # Only send an event if a field we care about was changed.
34
+ any_changes = fields.any? do |field|
35
+ field_change = self.previous_changes[field]
36
+ field_change.present? && field_change[0] != field_change[1]
37
+ end
38
+ return unless any_changes
39
+
40
+ producers.each { |p| p.send_event(self) }
41
+ end
42
+
43
+ # Send a deletion (null payload) event to Kafka.
44
+ def send_kafka_event_on_destroy
45
+ return unless self.class.kafka_config[:delete]
46
+
47
+ self.class.kafka_producers.each { |p| p.publish_list([self.deletion_payload]) }
48
+ end
49
+
50
+ # Payload to send after we are destroyed.
51
+ # @return [Hash]
52
+ def deletion_payload
53
+ { payload_key: self[self.class.primary_key] }
54
+ end
55
+
56
+ # :nodoc:
57
+ module ClassMethods
58
+ # @return [Hash]
59
+ def kafka_config
60
+ {
61
+ update: true,
62
+ delete: true,
63
+ import: true,
64
+ create: true
65
+ }
66
+ end
67
+
68
+ # @return [Array<Deimos::ActiveRecordProducer>] the producers to run.
69
+ def kafka_producers
70
+ if self.respond_to?(:kafka_producer)
71
+ Deimos.config.logger.warn(message: DEPRECATION_WARNING)
72
+ return [self.kafka_producer]
73
+ end
74
+
75
+ raise NotImplementedError
76
+ end
77
+
78
+ # This is an internal method, part of the activerecord_import gem. It's
79
+ # the one that actually does the importing, having already normalized
80
+ # the inputs (arrays, hashes, records etc.)
81
+ # Basically we want to first do the import, then reload the records
82
+ # and send them to Kafka.
83
+ def import_without_validations_or_callbacks(column_names,
84
+ array_of_attributes,
85
+ options={})
86
+ results = super
87
+ if !self.kafka_config[:import] || array_of_attributes.empty?
88
+ return results
89
+ end
90
+
91
+ # This will contain an array of hashes, where each hash is the actual
92
+ # attribute hash that created the object.
93
+ array_of_hashes = []
94
+ array_of_attributes.each do |array|
95
+ array_of_hashes << column_names.zip(array).to_h.with_indifferent_access
96
+ end
97
+ hashes_with_id, hashes_without_id = array_of_hashes.partition { |arr| arr[:id].present? }
98
+
99
+ self.kafka_producers.each { |p| p.send_events(hashes_with_id) }
100
+
101
+ if hashes_without_id.any?
102
+ if options[:on_duplicate_key_update].present? &&
103
+ options[:on_duplicate_key_update] != [:updated_at]
104
+ unique_columns = column_names.map(&:to_s) -
105
+ options[:on_duplicate_key_update].map(&:to_s) - %w(id created_at)
106
+ records = hashes_without_id.map do |hash|
107
+ self.where(unique_columns.map { |c| [c, hash[c]] }.to_h).first
108
+ end
109
+ self.kafka_producers.each { |p| p.send_events(records) }
110
+ else
111
+ # re-fill IDs based on what was just entered into the DB.
112
+ last_id = if self.connection.adapter_name.downcase =~ /sqlite/
113
+ self.connection.select_value('select last_insert_rowid()') -
114
+ hashes_without_id.size + 1
115
+ else # mysql
116
+ self.connection.select_value('select LAST_INSERT_ID()')
117
+ end
118
+ hashes_without_id.each_with_index do |attrs, i|
119
+ attrs[:id] = last_id + i
120
+ end
121
+ self.kafka_producers.each { |p| p.send_events(hashes_without_id) }
122
+ end
123
+ end
124
+ results
125
+ end
126
+ end
127
+ end
128
+ end
@@ -0,0 +1,102 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ # Record that keeps track of which topics are being worked on by DbProducers.
5
+ class KafkaTopicInfo < ActiveRecord::Base
6
+ self.table_name = 'kafka_topic_info'
7
+
8
+ class << self
9
+ # Lock a topic for the given ID. Returns whether the lock was successful.
10
+ # @param topic [String]
11
+ # @param lock_id [String]
12
+ # @return [Boolean]
13
+ def lock(topic, lock_id)
14
+ # Try to create it - it's fine if it already exists
15
+ begin
16
+ self.create(topic: topic, last_processed_at: Time.zone.now)
17
+ rescue ActiveRecord::RecordNotUnique
18
+ # continue on
19
+ end
20
+
21
+ # Lock the record
22
+ qtopic = self.connection.quote(topic)
23
+ qlock_id = self.connection.quote(lock_id)
24
+ qtable = self.connection.quote_table_name('kafka_topic_info')
25
+ qnow = self.connection.quote(Time.zone.now.to_s(:db))
26
+ qfalse = self.connection.quoted_false
27
+ qtime = self.connection.quote(1.minute.ago.to_s(:db))
28
+
29
+ # If a record is marked as error and less than 1 minute old,
30
+ # we don't want to pick it up even if not currently locked because
31
+ # we worry we'll run into the same problem again.
32
+ # Once it's more than 1 minute old, we figure it's OK to try again
33
+ # so we can pick up any topic that's that old, even if it was
34
+ # locked by someone, because it's the job of the producer to keep
35
+ # updating the locked_at timestamp as they work on messages in that
36
+ # topic. If the locked_at timestamp is that old, chances are that
37
+ # the producer crashed.
38
+ sql = <<~SQL
39
+ UPDATE #{qtable}
40
+ SET locked_by=#{qlock_id}, locked_at=#{qnow}, error=#{qfalse}
41
+ WHERE topic=#{qtopic} AND
42
+ ((locked_by IS NULL AND error=#{qfalse}) OR locked_at < #{qtime})
43
+ SQL
44
+ self.connection.update(sql)
45
+ self.where(locked_by: lock_id, topic: topic).any?
46
+ end
47
+
48
+ # This is called once a producer is finished working on a topic, i.e.
49
+ # there are no more messages to fetch. It unlocks the topic and
50
+ # moves on to the next one.
51
+ # @param topic [String]
52
+ # @param lock_id [String]
53
+ def clear_lock(topic, lock_id)
54
+ self.where(topic: topic, locked_by: lock_id).
55
+ update_all(locked_by: nil,
56
+ locked_at: nil,
57
+ error: false,
58
+ retries: 0,
59
+ last_processed_at: Time.zone.now)
60
+ end
61
+
62
+ # Update all topics that aren't currently locked and have no messages
63
+ # waiting. It's OK if some messages get inserted in the middle of this
64
+ # because the point is that at least within a few milliseconds of each
65
+ # other, it wasn't locked and had no messages, meaning the topic
66
+ # was in a good state.
67
+ # @param except_topics [Array<String>] the list of topics we've just
68
+ # realized had messages in them, meaning all other topics were empty.
69
+ def ping_empty_topics(except_topics)
70
+ records = KafkaTopicInfo.where(locked_by: nil).
71
+ where('topic not in(?)', except_topics)
72
+ records.each do |info|
73
+ info.update_attribute(:last_processed_at, Time.zone.now)
74
+ end
75
+ end
76
+
77
+ # The producer calls this if it gets an error sending messages. This
78
+ # essentially locks down this topic for 1 minute (for all producers)
79
+ # and allows the caller to continue to the next topic.
80
+ # @param topic [String]
81
+ # @param lock_id [String]
82
+ def register_error(topic, lock_id)
83
+ record = self.where(topic: topic, locked_by: lock_id).last
84
+ attr_hash = { locked_by: nil,
85
+ locked_at: Time.zone.now,
86
+ error: true,
87
+ retries: record.retries + 1 }
88
+ record.attributes = attr_hash
89
+ record.save!
90
+ end
91
+
92
+ # Update the locked_at timestamp to indicate that the producer is still
93
+ # working on those messages and to continue.
94
+ # @param topic [String]
95
+ # @param lock_id [String]
96
+ def heartbeat(topic, lock_id)
97
+ self.where(topic: topic, locked_by: lock_id).
98
+ update_all(locked_at: Time.zone.now)
99
+ end
100
+ end
101
+ end
102
+ end
@@ -0,0 +1,79 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ # Basically a struct to hold the message as it's processed.
5
+ class Message
6
+ attr_accessor :payload, :key, :partition_key, :encoded_key,
7
+ :encoded_payload, :topic, :producer_name
8
+
9
+ # @param payload [Hash]
10
+ # @param producer [Class]
11
+ def initialize(payload, producer, topic: nil, key: nil, partition_key: nil)
12
+ @payload = payload&.with_indifferent_access
13
+ @producer_name = producer&.name
14
+ @topic = topic
15
+ @key = key
16
+ @partition_key = partition_key
17
+ end
18
+
19
+ # Add message_id and timestamp default values if they are in the
20
+ # schema and don't already have values.
21
+ # @param fields [Array<String>] existing name fields in the schema.
22
+ def add_fields(fields)
23
+ return if @payload.except(:payload_key, :partition_key).blank?
24
+
25
+ if fields.include?('message_id')
26
+ @payload['message_id'] ||= SecureRandom.uuid
27
+ end
28
+ if fields.include?('timestamp')
29
+ @payload['timestamp'] ||= Time.now.in_time_zone.to_s
30
+ end
31
+ end
32
+
33
+ # @param encoder [Deimos::SchemaBackends::Base]
34
+ def coerce_fields(encoder)
35
+ return if payload.nil?
36
+
37
+ @payload = encoder.coerce(@payload)
38
+ end
39
+
40
+ # @return [Hash]
41
+ def encoded_hash
42
+ {
43
+ topic: @topic,
44
+ key: @encoded_key,
45
+ partition_key: @partition_key || @encoded_key,
46
+ payload: @encoded_payload,
47
+ metadata: {
48
+ decoded_payload: @payload,
49
+ producer_name: @producer_name
50
+ }
51
+ }
52
+ end
53
+
54
+ # @return [Hash]
55
+ def to_h
56
+ {
57
+ topic: @topic,
58
+ key: @key,
59
+ partition_key: @partition_key || @key,
60
+ payload: @payload,
61
+ metadata: {
62
+ decoded_payload: @payload,
63
+ producer_name: @producer_name
64
+ }
65
+ }
66
+ end
67
+
68
+ # @param other [Message]
69
+ # @return [Boolean]
70
+ def ==(other)
71
+ self.to_h == other.to_h
72
+ end
73
+
74
+ # @return [Boolean] True if this message is a tombstone
75
+ def tombstone?
76
+ payload.nil?
77
+ end
78
+ end
79
+ end