deimos-ruby 1.24.2 → 2.0.0.pre.alpha1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop_todo.yml +0 -17
  3. data/.tool-versions +1 -0
  4. data/CHANGELOG.md +5 -0
  5. data/README.md +287 -498
  6. data/deimos-ruby.gemspec +4 -4
  7. data/docs/CONFIGURATION.md +133 -226
  8. data/docs/UPGRADING.md +237 -0
  9. data/lib/deimos/active_record_consume/batch_consumption.rb +29 -28
  10. data/lib/deimos/active_record_consume/mass_updater.rb +59 -4
  11. data/lib/deimos/active_record_consume/message_consumption.rb +15 -21
  12. data/lib/deimos/active_record_consumer.rb +36 -21
  13. data/lib/deimos/active_record_producer.rb +28 -9
  14. data/lib/deimos/backends/base.rb +4 -35
  15. data/lib/deimos/backends/kafka.rb +6 -22
  16. data/lib/deimos/backends/kafka_async.rb +6 -22
  17. data/lib/deimos/backends/{db.rb → outbox.rb} +13 -9
  18. data/lib/deimos/config/configuration.rb +116 -379
  19. data/lib/deimos/consume/batch_consumption.rb +24 -124
  20. data/lib/deimos/consume/message_consumption.rb +36 -63
  21. data/lib/deimos/consumer.rb +16 -75
  22. data/lib/deimos/ext/consumer_route.rb +35 -0
  23. data/lib/deimos/ext/producer_middleware.rb +94 -0
  24. data/lib/deimos/ext/producer_route.rb +22 -0
  25. data/lib/deimos/ext/redraw.rb +29 -0
  26. data/lib/deimos/ext/routing_defaults.rb +72 -0
  27. data/lib/deimos/ext/schema_route.rb +70 -0
  28. data/lib/deimos/kafka_message.rb +2 -2
  29. data/lib/deimos/kafka_source.rb +2 -7
  30. data/lib/deimos/kafka_topic_info.rb +1 -1
  31. data/lib/deimos/logging.rb +71 -0
  32. data/lib/deimos/message.rb +2 -11
  33. data/lib/deimos/metrics/datadog.rb +40 -1
  34. data/lib/deimos/metrics/provider.rb +4 -4
  35. data/lib/deimos/producer.rb +39 -116
  36. data/lib/deimos/railtie.rb +6 -0
  37. data/lib/deimos/schema_backends/avro_base.rb +21 -21
  38. data/lib/deimos/schema_backends/avro_schema_registry.rb +1 -2
  39. data/lib/deimos/schema_backends/avro_validation.rb +2 -2
  40. data/lib/deimos/schema_backends/base.rb +19 -12
  41. data/lib/deimos/schema_backends/mock.rb +6 -1
  42. data/lib/deimos/schema_backends/plain.rb +47 -0
  43. data/lib/deimos/schema_class/base.rb +2 -2
  44. data/lib/deimos/schema_class/enum.rb +1 -1
  45. data/lib/deimos/schema_class/record.rb +2 -2
  46. data/lib/deimos/test_helpers.rb +95 -320
  47. data/lib/deimos/tracing/provider.rb +6 -6
  48. data/lib/deimos/transcoder.rb +88 -0
  49. data/lib/deimos/utils/db_poller/base.rb +16 -14
  50. data/lib/deimos/utils/db_poller/state_based.rb +3 -3
  51. data/lib/deimos/utils/db_poller/time_based.rb +4 -4
  52. data/lib/deimos/utils/db_poller.rb +1 -1
  53. data/lib/deimos/utils/deadlock_retry.rb +1 -1
  54. data/lib/deimos/utils/{db_producer.rb → outbox_producer.rb} +16 -47
  55. data/lib/deimos/utils/schema_class.rb +0 -7
  56. data/lib/deimos/version.rb +1 -1
  57. data/lib/deimos.rb +79 -26
  58. data/lib/generators/deimos/{db_backend_generator.rb → outbox_backend_generator.rb} +4 -4
  59. data/lib/generators/deimos/schema_class_generator.rb +0 -1
  60. data/lib/generators/deimos/v2/templates/karafka.rb.tt +149 -0
  61. data/lib/generators/deimos/v2_generator.rb +193 -0
  62. data/lib/tasks/deimos.rake +5 -7
  63. data/spec/active_record_batch_consumer_association_spec.rb +22 -13
  64. data/spec/active_record_batch_consumer_spec.rb +84 -65
  65. data/spec/active_record_consume/batch_consumption_spec.rb +10 -10
  66. data/spec/active_record_consume/batch_slicer_spec.rb +12 -12
  67. data/spec/active_record_consume/mass_updater_spec.rb +137 -0
  68. data/spec/active_record_consumer_spec.rb +29 -13
  69. data/spec/active_record_producer_spec.rb +36 -26
  70. data/spec/backends/base_spec.rb +0 -23
  71. data/spec/backends/kafka_async_spec.rb +1 -3
  72. data/spec/backends/kafka_spec.rb +1 -3
  73. data/spec/backends/{db_spec.rb → outbox_spec.rb} +14 -20
  74. data/spec/batch_consumer_spec.rb +66 -116
  75. data/spec/consumer_spec.rb +53 -147
  76. data/spec/deimos_spec.rb +10 -126
  77. data/spec/kafka_source_spec.rb +19 -52
  78. data/spec/karafka/karafka.rb +69 -0
  79. data/spec/karafka_config/karafka_spec.rb +97 -0
  80. data/spec/logging_spec.rb +25 -0
  81. data/spec/message_spec.rb +9 -9
  82. data/spec/producer_spec.rb +112 -254
  83. data/spec/rake_spec.rb +1 -3
  84. data/spec/schema_backends/avro_validation_spec.rb +1 -1
  85. data/spec/schemas/com/my-namespace/MySchemaWithTitle.avsc +22 -0
  86. data/spec/snapshots/consumers-no-nest.snap +49 -0
  87. data/spec/snapshots/consumers.snap +49 -0
  88. data/spec/snapshots/consumers_and_producers-no-nest.snap +49 -0
  89. data/spec/snapshots/consumers_and_producers.snap +49 -0
  90. data/spec/snapshots/consumers_circular-no-nest.snap +49 -0
  91. data/spec/snapshots/consumers_circular.snap +49 -0
  92. data/spec/snapshots/consumers_complex_types-no-nest.snap +49 -0
  93. data/spec/snapshots/consumers_complex_types.snap +49 -0
  94. data/spec/snapshots/consumers_nested-no-nest.snap +49 -0
  95. data/spec/snapshots/consumers_nested.snap +49 -0
  96. data/spec/snapshots/namespace_folders.snap +49 -0
  97. data/spec/snapshots/namespace_map.snap +49 -0
  98. data/spec/snapshots/producers_with_key-no-nest.snap +49 -0
  99. data/spec/snapshots/producers_with_key.snap +49 -0
  100. data/spec/spec_helper.rb +61 -29
  101. data/spec/utils/db_poller_spec.rb +49 -39
  102. data/spec/utils/{db_producer_spec.rb → outbox_producer_spec.rb} +17 -184
  103. metadata +58 -67
  104. data/lib/deimos/batch_consumer.rb +0 -7
  105. data/lib/deimos/config/phobos_config.rb +0 -163
  106. data/lib/deimos/instrumentation.rb +0 -95
  107. data/lib/deimos/monkey_patches/phobos_cli.rb +0 -35
  108. data/lib/deimos/utils/inline_consumer.rb +0 -158
  109. data/lib/deimos/utils/lag_reporter.rb +0 -186
  110. data/lib/deimos/utils/schema_controller_mixin.rb +0 -129
  111. data/spec/config/configuration_spec.rb +0 -321
  112. data/spec/kafka_listener_spec.rb +0 -55
  113. data/spec/phobos.bad_db.yml +0 -73
  114. data/spec/phobos.yml +0 -77
  115. data/spec/utils/inline_consumer_spec.rb +0 -31
  116. data/spec/utils/lag_reporter_spec.rb +0 -76
  117. data/spec/utils/platform_schema_validation_spec.rb +0 -0
  118. data/spec/utils/schema_controller_mixin_spec.rb +0 -84
  119. /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/migration +0 -0
  120. /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/rails3_migration +0 -0
@@ -7,151 +7,51 @@ module Deimos
7
7
  # of messages to be handled at once
8
8
  module BatchConsumption
9
9
  extend ActiveSupport::Concern
10
- include Phobos::BatchHandler
11
10
 
12
- # @param batch [Array<String>]
13
- # @param metadata [Hash]
14
- # @return [void]
15
- def around_consume_batch(batch, metadata)
16
- payloads = []
17
- _with_span do
18
- benchmark = Benchmark.measure do
19
- if self.class.config[:key_configured]
20
- metadata[:keys] = batch.map do |message|
21
- decode_key(message.key)
22
- end
23
- end
24
- metadata[:first_offset] = batch.first&.offset
25
-
26
- payloads = batch.map do |message|
27
- decode_message(message.payload)
28
- end
29
- _received_batch(payloads, metadata)
30
- yield(payloads, metadata)
31
- end
32
- _handle_batch_success(benchmark.real, payloads, metadata)
33
- end
34
- rescue StandardError => e
35
- _handle_batch_error(e, payloads, metadata)
36
- end
37
-
38
- # Consume a batch of incoming messages.
39
- # @param _payloads [Array<Phobos::BatchMessage>]
40
- # @param _metadata [Hash]
41
- # @return [void]
42
- def consume_batch(_payloads, _metadata)
43
- raise NotImplementedError
11
+ def consume_batch
12
+ raise MissingImplementationError
44
13
  end
45
14
 
46
15
  protected
47
16
 
48
- # @!visibility private
49
- def _received_batch(payloads, metadata)
50
- Deimos.config.logger.info(
51
- message: 'Got Kafka batch event',
52
- message_ids: _payload_identifiers(payloads, metadata),
53
- metadata: metadata.except(:keys)
54
- )
55
- Deimos.config.logger.debug(
56
- message: 'Kafka batch event payloads',
57
- payloads: payloads
58
- )
59
- Deimos.config.metrics&.increment(
60
- 'handler',
61
- tags: %W(
62
- status:batch_received
63
- topic:#{metadata[:topic]}
64
- ))
65
- Deimos.config.metrics&.increment(
66
- 'handler',
67
- by: metadata[:batch_size],
68
- tags: %W(
69
- status:received
70
- topic:#{metadata[:topic]}
71
- ))
72
- if payloads.present?
73
- payloads.each { |payload| _report_time_delayed(payload, metadata) }
17
+ def _consume_batch
18
+ _with_span do
19
+ begin
20
+ benchmark = Benchmark.measure do
21
+ consume_batch
22
+ end
23
+ _handle_batch_success(benchmark.real)
24
+ rescue StandardError => e
25
+ _handle_batch_error(e)
26
+ end
74
27
  end
75
28
  end
76
29
 
77
30
  # @!visibility private
78
31
  # @param exception [Throwable]
79
- # @param payloads [Array<Hash>]
80
- # @param metadata [Hash]
81
- def _handle_batch_error(exception, payloads, metadata)
82
- Deimos.config.metrics&.increment(
83
- 'handler',
84
- tags: %W(
85
- status:batch_error
86
- topic:#{metadata[:topic]}
87
- ))
88
- Deimos.config.logger.warn(
32
+ def _handle_batch_error(exception)
33
+ Deimos::Logging.log_warn(
89
34
  message: 'Error consuming message batch',
90
35
  handler: self.class.name,
91
- metadata: metadata.except(:keys),
92
- message_ids: _payload_identifiers(payloads, metadata),
36
+ metadata: Deimos::Logging.metadata_log_text(messages.metadata),
37
+ messages: Deimos::Logging.messages_log_text(self.topic.payload_log, messages),
93
38
  error_message: exception.message,
94
39
  error: exception.backtrace
95
40
  )
96
- _error(exception, payloads, metadata)
41
+ _error(exception, messages)
97
42
  end
98
43
 
99
44
  # @!visibility private
100
45
  # @param time_taken [Float]
101
- # @param payloads [Array<Hash>]
102
- # @param metadata [Hash]
103
- def _handle_batch_success(time_taken, payloads, metadata)
104
- Deimos.config.metrics&.histogram('handler',
105
- time_taken,
106
- tags: %W(
107
- time:consume_batch
108
- topic:#{metadata[:topic]}
109
- ))
110
- Deimos.config.metrics&.increment(
111
- 'handler',
112
- tags: %W(
113
- status:batch_success
114
- topic:#{metadata[:topic]}
115
- ))
116
- Deimos.config.metrics&.increment(
117
- 'handler',
118
- by: metadata[:batch_size],
119
- tags: %W(
120
- status:success
121
- topic:#{metadata[:topic]}
122
- ))
123
- Deimos.config.logger.info(
124
- message: 'Finished processing Kafka batch event',
125
- message_ids: _payload_identifiers(payloads, metadata),
126
- time_elapsed: time_taken,
127
- metadata: metadata.except(:keys)
128
- )
46
+ def _handle_batch_success(time_taken)
47
+ Deimos::Logging.log_info(
48
+ {
49
+ message: 'Finished processing Kafka batch event',
50
+ time_elapsed: time_taken,
51
+ metadata: Deimos::Logging.metadata_log_text(messages.metadata)
52
+ }.merge(Deimos::Logging.messages_log_text(self.topic.payload_log, messages)))
129
53
  end
130
54
 
131
- # @!visibility private
132
- # Get payload identifiers (key and message_id if present) for logging.
133
- # @param payloads [Array<Hash>]
134
- # @param metadata [Hash]
135
- # @return [Array<Array>] the identifiers.
136
- def _payload_identifiers(payloads, metadata)
137
- message_ids = payloads&.map do |payload|
138
- if payload.is_a?(Hash) && payload.key?('message_id')
139
- payload['message_id']
140
- end
141
- end
142
-
143
- # Payloads may be nil if preprocessing failed
144
- messages = payloads || metadata[:keys] || []
145
-
146
- messages.zip(metadata[:keys] || [], message_ids || []).map do |_, k, m_id|
147
- ids = {}
148
-
149
- ids[:key] = k if k.present?
150
- ids[:message_id] = m_id if m_id.present?
151
-
152
- ids
153
- end
154
- end
155
55
  end
156
56
  end
157
57
  end
@@ -6,90 +6,63 @@ module Deimos
6
6
  # are invoked for every individual message.
7
7
  module MessageConsumption
8
8
  extend ActiveSupport::Concern
9
- include Phobos::Handler
10
-
11
- # @param payload [String]
12
- # @param metadata [Hash]
13
- # @return [void]
14
- def around_consume(payload, metadata)
15
- decoded_payload = payload.nil? ? nil : payload.dup
16
- new_metadata = metadata.dup
17
- benchmark = Benchmark.measure do
18
- _with_span do
19
- new_metadata[:key] = decode_key(metadata[:key]) if self.class.config[:key_configured]
20
- decoded_payload = decode_message(payload)
21
- _received_message(decoded_payload, new_metadata)
22
- yield(decoded_payload, new_metadata)
23
- end
24
- end
25
- _handle_success(benchmark.real, decoded_payload, new_metadata)
26
- rescue StandardError => e
27
- _handle_error(e, decoded_payload, new_metadata)
28
- end
29
9
 
30
10
  # Consume incoming messages.
31
- # @param _payload [String]
32
- # @param _metadata [Hash]
11
+ # @param _message [Karafka::Messages::Message]
33
12
  # @return [void]
34
- def consume(_payload, _metadata)
35
- raise NotImplementedError
13
+ def consume_message(_message)
14
+ raise MissingImplementationError
36
15
  end
37
16
 
38
17
  private
39
18
 
40
- def _received_message(payload, metadata)
41
- Deimos.config.logger.info(
19
+ def _consume_messages
20
+ messages.each do |message|
21
+ begin
22
+ _with_span do
23
+ _received_message(message)
24
+ benchmark = Benchmark.measure do
25
+ consume_message(message)
26
+ end
27
+ _handle_success(message, benchmark.real)
28
+ rescue StandardError => e
29
+ _handle_message_error(e, message)
30
+ end
31
+ end
32
+ end
33
+ end
34
+
35
+ def _received_message(message)
36
+ Deimos::Logging.log_info(
42
37
  message: 'Got Kafka event',
43
- payload: payload,
44
- metadata: metadata
38
+ payload: message.payload,
39
+ metadata: Deimos::Logging.metadata_log_text(message.metadata)
45
40
  )
46
- Deimos.config.metrics&.increment('handler', tags: %W(
47
- status:received
48
- topic:#{metadata[:topic]}
49
- ))
50
- _report_time_delayed(payload, metadata)
51
41
  end
52
42
 
53
43
  # @param exception [Throwable]
54
- # @param payload [Hash]
55
- # @param metadata [Hash]
56
- def _handle_error(exception, payload, metadata)
57
- Deimos.config.metrics&.increment(
58
- 'handler',
59
- tags: %W(
60
- status:error
61
- topic:#{metadata[:topic]}
62
- )
63
- )
64
- Deimos.config.logger.warn(
44
+ # @param message [Karafka::Messages::Message]
45
+ def _handle_message_error(exception, message)
46
+ Deimos::Logging.log_warn(
65
47
  message: 'Error consuming message',
66
48
  handler: self.class.name,
67
- metadata: metadata,
68
- data: payload,
49
+ metadata: Deimos::Logging.metadata_log_text(message.metadata),
50
+ key: message.key,
51
+ data: message.payload,
69
52
  error_message: exception.message,
70
53
  error: exception.backtrace
71
54
  )
72
55
 
73
- _error(exception, payload, metadata)
56
+ _error(exception, Karafka::Messages::Messages.new([message], messages.metadata))
74
57
  end
75
58
 
76
- # @param time_taken [Float]
77
- # @param payload [Hash]
78
- # @param metadata [Hash]
79
- def _handle_success(time_taken, payload, metadata)
80
- Deimos.config.metrics&.histogram('handler', time_taken, tags: %W(
81
- time:consume
82
- topic:#{metadata[:topic]}
83
- ))
84
- Deimos.config.metrics&.increment('handler', tags: %W(
85
- status:success
86
- topic:#{metadata[:topic]}
87
- ))
88
- Deimos.config.logger.info(
59
+ def _handle_success(message, benchmark)
60
+ mark_as_consumed(message)
61
+ Deimos::Logging.log_info(
89
62
  message: 'Finished processing Kafka event',
90
- payload: payload,
91
- time_elapsed: time_taken,
92
- metadata: metadata
63
+ payload: message.payload,
64
+ time_elapsed: benchmark,
65
+ metadata: Deimos::Logging.metadata_log_text(message.metadata)
93
66
  )
94
67
  end
95
68
  end
@@ -7,62 +7,21 @@ require 'deimos/consume/message_consumption'
7
7
  # Note: According to the docs, instances of your handler will be created
8
8
  # for every incoming message/batch. This class should be lightweight.
9
9
  module Deimos
10
- # Basic consumer class. Inherit from this class and override either consume
11
- # or consume_batch, depending on the delivery mode of your listener.
12
- # `consume` -> use `delivery :message` or `delivery :batch`
13
- # `consume_batch` -> use `delivery :inline_batch`
14
- class Consumer
10
+ # Basic consumer class. Inherit from this class and override either consume_message
11
+ # or consume_batch, depending on the `:batch` config setting.
12
+ class Consumer < Karafka::BaseConsumer
15
13
  include Consume::MessageConsumption
16
14
  include Consume::BatchConsumption
17
15
  include SharedConfig
18
16
 
19
- class << self
20
- # @return [Deimos::SchemaBackends::Base]
21
- def decoder
22
- @decoder ||= Deimos.schema_backend(schema: config[:schema],
23
- namespace: config[:namespace])
24
- end
25
-
26
- # @return [Deimos::SchemaBackends::Base]
27
- def key_decoder
28
- @key_decoder ||= Deimos.schema_backend(schema: config[:key_schema],
29
- namespace: config[:namespace])
30
- end
31
- end
32
-
33
- # Helper method to decode an encoded key.
34
- # @param key [String]
35
- # @return [Object] the decoded key.
36
- def decode_key(key)
37
- return nil if key.nil?
38
-
39
- config = self.class.config
40
- unless config[:key_configured]
41
- raise 'No key config given - if you are not decoding keys, please use '\
42
- '`key_config plain: true`'
43
- end
44
-
45
- if config[:key_field]
46
- self.class.decoder.decode_key(key, config[:key_field])
47
- elsif config[:key_schema]
48
- self.class.key_decoder.decode(key, schema: config[:key_schema])
49
- else # no encoding
50
- key
17
+ def consume
18
+ if self.topic.each_message
19
+ _consume_messages
20
+ else
21
+ _consume_batch
51
22
  end
52
23
  end
53
24
 
54
- # Helper method to decode an encoded message.
55
- # @param payload [Object]
56
- # @return [Object] the decoded message.
57
- def decode_message(payload)
58
- decoded_payload = payload.nil? ? nil : self.class.decoder.decode(payload)
59
- return decoded_payload unless Utils::SchemaClass.use?(self.class.config.to_h)
60
-
61
- Utils::SchemaClass.instance(decoded_payload,
62
- self.class.config[:schema],
63
- self.class.config[:namespace])
64
- end
65
-
66
25
  private
67
26
 
68
27
  def _with_span
@@ -75,42 +34,24 @@ module Deimos
75
34
  Deimos.config.tracer&.finish(@span)
76
35
  end
77
36
 
78
- def _report_time_delayed(payload, metadata)
79
- return if payload.nil? || payload['timestamp'].blank?
80
-
81
- begin
82
- time_delayed = Time.now.in_time_zone - payload['timestamp'].to_datetime
83
- rescue ArgumentError
84
- Deimos.config.logger.info(
85
- message: "Error parsing timestamp! #{payload['timestamp']}"
86
- )
87
- return
88
- end
89
- Deimos.config.metrics&.histogram('handler', time_delayed, tags: %W(
90
- time:time_delayed
91
- topic:#{metadata[:topic]}
92
- ))
93
- end
94
-
95
37
  # Overrideable method to determine if a given error should be considered
96
38
  # "fatal" and always be reraised.
97
39
  # @param _error [Exception]
98
- # @param _payload [Hash]
99
- # @param _metadata [Hash]
40
+ # @param _messages [Array<Karafka::Message>]
100
41
  # @return [Boolean]
101
- def fatal_error?(_error, _payload, _metadata)
42
+ def fatal_error?(_error, _messages)
102
43
  false
103
44
  end
104
45
 
105
46
  # @param exception [Exception]
106
- # @param payload [Hash]
107
- # @param metadata [Hash]
108
- def _error(exception, payload, metadata)
47
+ # @param messages [Array<Karafka::Message>]
48
+ def _error(exception, messages)
109
49
  Deimos.config.tracer&.set_error(@span, exception)
110
50
 
111
- raise if Deimos.config.consumers.reraise_errors ||
112
- Deimos.config.consumers.fatal_error&.call(exception, payload, metadata) ||
113
- fatal_error?(exception, payload, metadata)
51
+ raise if self.topic.reraise_errors ||
52
+ Deimos.config.consumers.fatal_error&.call(exception, messages) ||
53
+ fatal_error?(exception, messages)
114
54
  end
55
+
115
56
  end
116
57
  end
@@ -0,0 +1,35 @@
1
+ module Deimos
2
+ class ConsumerRoute < Karafka::Routing::Features::Base
3
+ module Topic
4
+
5
+ FIELDS = %i(max_db_batch_size
6
+ bulk_import_id_column
7
+ replace_associations
8
+ bulk_import_id_generator
9
+ each_message
10
+ reraise_errors
11
+ fatal_error
12
+ save_associations_first
13
+ )
14
+ Config = Struct.new(*FIELDS, keyword_init: true)
15
+
16
+ FIELDS.each do |field|
17
+ define_method(field) do |*args|
18
+ @deimos_config ||= Config.new(
19
+ bulk_import_id_column: :bulk_import_id,
20
+ replace_associations: true,
21
+ each_message: false,
22
+ bulk_import_id_generator: proc { SecureRandom.uuid },
23
+ fatal_error: proc { false }
24
+ )
25
+ if args.any?
26
+ @deimos_config.public_send("#{field}=", args[0])
27
+ end
28
+ @deimos_config[field]
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
34
+
35
+ Deimos::ConsumerRoute.activate
@@ -0,0 +1,94 @@
1
+ module Deimos
2
+
3
+ module ProducerMiddleware
4
+ class << self
5
+
6
+ def call(message)
7
+ Karafka.monitor.instrument(
8
+ 'deimos.encode_message',
9
+ producer: self,
10
+ message: message
11
+ ) do
12
+ config = Deimos.karafka_config_for(topic: message[:topic])
13
+ return message if config.nil?
14
+ return if message[:payload] && !message[:payload].is_a?(Hash) && !message[:payload].is_a?(SchemaClass::Record)
15
+
16
+ m = Deimos::Message.new(message[:payload].to_h,
17
+ headers: message[:headers],
18
+ partition_key: message[:partition_key])
19
+ _process_message(m, message, config)
20
+ message[:payload] = m.encoded_payload
21
+ message[:key] = m.encoded_key
22
+ message[:partition_key] = if m.partition_key
23
+ m.partition_key.to_s
24
+ elsif m.key
25
+ m.key.to_s
26
+ else
27
+ nil
28
+ end
29
+ message[:topic] = "#{Deimos.config.producers.topic_prefix}#{config.name}"
30
+
31
+ validate_key_config(config, message)
32
+
33
+ message
34
+ end
35
+ end
36
+
37
+ def validate_key_config(config, message)
38
+ if message[:key].nil? && config.deserializers[:key].is_a?(Deimos::Transcoder)
39
+ raise 'No key given but a key is required! Use `key_config none: true` to avoid using keys.'
40
+ end
41
+ end
42
+
43
+ # @param message [Deimos::Message]
44
+ # @param karafka_message [Hash]
45
+ # @param config [Deimos::ProducerConfig]
46
+ def _process_message(message, karafka_message, config)
47
+ encoder = config.deserializers[:payload].backend
48
+ key_transcoder = config.deserializers[:key]
49
+ # this violates the Law of Demeter but it has to happen in a very
50
+ # specific order and requires a bunch of methods on the producer
51
+ # to work correctly.
52
+ message.add_fields(encoder.schema_fields.map(&:name))
53
+ message.key = karafka_message[:key] || _retrieve_key(message.payload, key_transcoder)
54
+ # need to do this before _coerce_fields because that might result
55
+ # in an empty payload which is an *error* whereas this is intended.
56
+ message.payload = nil if message.payload.blank?
57
+ message.coerce_fields(encoder)
58
+ message.encoded_key = _encode_key(message.key, config)
59
+ message.topic = config.name
60
+ message.encoded_payload = if message.payload.nil?
61
+ nil
62
+ else
63
+ encoder.encode(message.payload,
64
+ topic: "#{Deimos.config.producers.topic_prefix}#{config.name}-value")
65
+ end
66
+ end
67
+
68
+ # @param key [Object]
69
+ # @param config [ProducerConfig]
70
+ # @return [String|Object]
71
+ def _encode_key(key, config)
72
+ return nil if key.nil?
73
+
74
+ if config.deserializers[:key].respond_to?(:encode_key)
75
+ config.deserializers[:key].encode_key(key)
76
+ elsif key
77
+ config.deserializers[:payload].encode(key)
78
+ else
79
+ key
80
+ end
81
+ end
82
+
83
+ # @param payload [Hash]
84
+ # @param key_transcoder [Deimos::Transcoder]
85
+ # @return [String]
86
+ def _retrieve_key(payload, key_transcoder)
87
+ key = payload.delete(:payload_key)
88
+ return key if key || !key_transcoder.respond_to?(:key_field)
89
+
90
+ key_transcoder.key_field ? payload[key_transcoder.key_field] : nil
91
+ end
92
+ end
93
+ end
94
+ end
@@ -0,0 +1,22 @@
1
+ module Deimos
2
+ class ProducerRoute < Karafka::Routing::Features::Base
3
+ FIELDS = %i(producer_class payload_log disabled)
4
+
5
+ Config = Struct.new(*FIELDS, keyword_init: true)
6
+ module Topic
7
+ FIELDS.each do |field|
8
+ define_method(field) do |*args|
9
+ active(false) if field == :producer_class
10
+ @deimos_producer_config ||= Config.new
11
+ if args.any?
12
+ @deimos_producer_config.public_send("#{field}=", args[0])
13
+ _deimos_setup_transcoders if schema && namespace
14
+ end
15
+ @deimos_producer_config[field]
16
+ end
17
+ end
18
+ end
19
+ end
20
+ end
21
+
22
+ Deimos::ProducerRoute.activate
@@ -0,0 +1,29 @@
1
+ # This is for temporary testing until https://github.com/karafka/karafka/pull/2347 is merged and released.
2
+
3
+ module Karafka
4
+ module Routing
5
+ class Builder < Array
6
+ alias array_clear clear
7
+
8
+ def clear
9
+ @mutex.synchronize do
10
+ @defaults = EMPTY_DEFAULTS
11
+ @draws.clear
12
+ array_clear
13
+ end
14
+ end
15
+
16
+ def redraw(&block)
17
+ @mutex.synchronize do
18
+ @draws.clear
19
+ array_clear
20
+ end
21
+ draw(&block)
22
+ end
23
+
24
+ end
25
+ end
26
+ end
27
+
28
+ require 'karafka'
29
+ require 'karafka/routing/builder'
@@ -0,0 +1,72 @@
1
+ # This monkey patch was provided by Maciej, the maintainer of Karafka. This allows
2
+ # configs to override each other on a more granular basis rather than each `configure` call
3
+ # blowing away all fields. It also supports multiple default blocks.
4
+ #
5
+ # Unfortunately this can't be merged into Karafka as of now because it will be a major breaking
6
+ # change. As a compromise, it has been added to the test coverage of Karafka to ensure that
7
+ # other changes don't break this.
8
+ # https://github.com/karafka/karafka/issues/2344
9
+ class Matcher
10
+ def initialize
11
+ @applications = []
12
+ end
13
+
14
+ def replay_on(topic_node)
15
+ @applications.each do |method, kwargs|
16
+ if kwargs.is_a?(Hash)
17
+ ref = topic_node.public_send(method)
18
+
19
+ kwargs.each do |arg, val|
20
+ if ref.respond_to?("#{arg}=")
21
+ ref.public_send("#{arg}=", val)
22
+ else
23
+ if ref.respond_to?(:details)
24
+ ref.details.merge!(kwargs)
25
+ elsif ref.is_a?(Hash)
26
+ ref.merge!(kwargs)
27
+ else
28
+ raise 'No idea if such case exists, if so, similar handling as config'
29
+ end
30
+ end
31
+ end
32
+ end
33
+
34
+ if kwargs.is_a?(Array) && kwargs.size == 1
35
+ if topic_node.respond_to?("#{method}=")
36
+ topic_node.public_send(:"#{method}=", kwargs.first)
37
+ else
38
+ topic_node.public_send(method, *kwargs)
39
+ end
40
+ end
41
+ end
42
+ end
43
+
44
+ def method_missing(m, *args, **kwargs)
45
+ if args.empty?
46
+ @applications << [m, kwargs]
47
+ else
48
+ @applications << [m, args]
49
+ end
50
+ end
51
+ end
52
+
53
+ DEFAULTS = Matcher.new
54
+
55
+ module Builder
56
+ def defaults(&block)
57
+ DEFAULTS.instance_eval(&block) if block
58
+ end
59
+ end
60
+
61
+ module ConsumerGroup
62
+ def topic=(name, &block)
63
+ k = Matcher.new
64
+ t = super(name)
65
+ k.instance_eval(&block) if block
66
+ DEFAULTS.replay_on(t)
67
+ k.replay_on(t)
68
+ end
69
+ end
70
+
71
+ Karafka::Routing::Builder.prepend Builder
72
+ Karafka::Routing::ConsumerGroup.prepend ConsumerGroup