deimos-ruby 2.2.0 → 2.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +33 -30
  3. data/CHANGELOG.md +10 -0
  4. data/Gemfile +0 -6
  5. data/deimos-ruby.gemspec +15 -11
  6. data/karafka.rb +7 -4
  7. data/lib/deimos/active_record_consume/batch_consumption.rb +7 -7
  8. data/lib/deimos/active_record_consume/batch_record.rb +2 -2
  9. data/lib/deimos/active_record_consume/message_consumption.rb +6 -5
  10. data/lib/deimos/active_record_consume/schema_model_converter.rb +2 -2
  11. data/lib/deimos/active_record_consumer.rb +1 -0
  12. data/lib/deimos/active_record_producer.rb +4 -2
  13. data/lib/deimos/backends/base.rb +1 -3
  14. data/lib/deimos/backends/outbox.rb +1 -1
  15. data/lib/deimos/config/configuration.rb +88 -75
  16. data/lib/deimos/consume/batch_consumption.rb +5 -5
  17. data/lib/deimos/consume/message_consumption.rb +3 -3
  18. data/lib/deimos/ext/consumer_route.rb +3 -3
  19. data/lib/deimos/ext/producer_metrics_listener.rb +2 -2
  20. data/lib/deimos/ext/producer_middleware.rb +19 -15
  21. data/lib/deimos/ext/producer_route.rb +4 -2
  22. data/lib/deimos/ext/routing_defaults.rb +9 -7
  23. data/lib/deimos/ext/schema_route.rb +22 -15
  24. data/lib/deimos/kafka_message.rb +1 -1
  25. data/lib/deimos/kafka_source.rb +36 -31
  26. data/lib/deimos/kafka_topic_info.rb +1 -1
  27. data/lib/deimos/logging.rb +20 -19
  28. data/lib/deimos/message.rb +1 -1
  29. data/lib/deimos/metrics/minimal_datadog_listener.rb +19 -6
  30. data/lib/deimos/metrics/provider.rb +4 -4
  31. data/lib/deimos/producer.rb +3 -1
  32. data/lib/deimos/railtie.rb +1 -1
  33. data/lib/deimos/schema_backends/avro_base.rb +1 -1
  34. data/lib/deimos/schema_backends/avro_schema_coercer.rb +46 -27
  35. data/lib/deimos/schema_backends/avro_schema_registry.rb +8 -8
  36. data/lib/deimos/schema_backends/base.rb +9 -9
  37. data/lib/deimos/schema_backends/plain.rb +1 -1
  38. data/lib/deimos/schema_backends/proto_base.rb +7 -5
  39. data/lib/deimos/schema_backends/proto_local.rb +0 -2
  40. data/lib/deimos/schema_backends/proto_schema_registry.rb +0 -2
  41. data/lib/deimos/schema_class/base.rb +1 -1
  42. data/lib/deimos/schema_class/record.rb +3 -3
  43. data/lib/deimos/test_helpers.rb +31 -26
  44. data/lib/deimos/tracing/provider.rb +5 -5
  45. data/lib/deimos/transcoder.rb +6 -2
  46. data/lib/deimos/utils/db_poller/base.rb +3 -3
  47. data/lib/deimos/utils/deadlock_retry.rb +2 -2
  48. data/lib/deimos/utils/outbox_producer.rb +14 -14
  49. data/lib/deimos/version.rb +1 -1
  50. data/lib/deimos.rb +4 -4
  51. data/lib/generators/deimos/active_record_generator.rb +2 -1
  52. data/lib/generators/deimos/db_poller_generator.rb +1 -0
  53. data/lib/generators/deimos/outbox_backend_generator.rb +1 -0
  54. data/lib/generators/deimos/schema_class_generator.rb +3 -2
  55. data/lib/generators/deimos/v2_generator.rb +184 -155
  56. data/spec/active_record_batch_consumer_association_spec.rb +6 -2
  57. data/spec/active_record_batch_consumer_spec.rb +83 -106
  58. data/spec/active_record_consume/batch_consumption_spec.rb +27 -28
  59. data/spec/active_record_consume/batch_slicer_spec.rb +4 -12
  60. data/spec/active_record_consume/mass_updater_spec.rb +42 -46
  61. data/spec/active_record_consume/schema_model_converter_spec.rb +1 -1
  62. data/spec/active_record_consumer_spec.rb +7 -5
  63. data/spec/active_record_producer_spec.rb +83 -73
  64. data/spec/backends/outbox_spec.rb +1 -1
  65. data/spec/batch_consumer_spec.rb +20 -20
  66. data/spec/consumer_spec.rb +23 -12
  67. data/spec/gen/sample/v1/sample_pb.rb +3 -3
  68. data/spec/generators/active_record_generator_spec.rb +4 -4
  69. data/spec/generators/schema_class/my_schema_with_circular_reference_spec.rb +2 -1
  70. data/spec/generators/schema_class/my_schema_with_complex_types_spec.rb +9 -2
  71. data/spec/generators/schema_class_generator_spec.rb +5 -5
  72. data/spec/kafka_source_spec.rb +13 -6
  73. data/spec/kafka_topic_info_spec.rb +7 -7
  74. data/spec/karafka/karafka.rb +6 -5
  75. data/spec/karafka_config/karafka_spec.rb +22 -19
  76. data/spec/logging_spec.rb +2 -0
  77. data/spec/producer_spec.rb +25 -20
  78. data/spec/schema_backends/avro_base_shared.rb +8 -8
  79. data/spec/schema_backends/avro_local_spec.rb +5 -6
  80. data/spec/schema_backends/avro_schema_registry_spec.rb +5 -6
  81. data/spec/schema_backends/proto_schema_registry_spec.rb +9 -12
  82. data/spec/schemas/my_namespace/generated.rb +1 -2
  83. data/spec/schemas/my_namespace/my_schema_with_complex_type.rb +5 -8
  84. data/spec/schemas/my_namespace/my_schema_with_union_type.rb +22 -23
  85. data/spec/spec_helper.rb +13 -17
  86. data/spec/utils/db_poller_spec.rb +5 -5
  87. data/spec/utils/deadlock_retry_spec.rb +1 -4
  88. data/spec/utils/outbox_producer_spec.rb +36 -24
  89. metadata +68 -161
  90. data/.ruby-version +0 -1
@@ -6,7 +6,7 @@ require_relative '../tracing/mock'
6
6
  require 'active_support/core_ext/object'
7
7
 
8
8
  # :nodoc:
9
- module Deimos # rubocop:disable Metrics/ModuleLength
9
+ module Deimos
10
10
  include FigTree
11
11
 
12
12
  # :nodoc:
@@ -24,14 +24,14 @@ module Deimos # rubocop:disable Metrics/ModuleLength
24
24
  Deimos.karafka_configs.each do |config|
25
25
  transcoder = config.deserializers[:key]
26
26
 
27
- if transcoder.respond_to?(:key_field) &&
28
- transcoder.key_field &&
29
- transcoder.backend.supports_key_schemas?
30
- transcoder.backend = Deimos.schema_backend(schema: config.schema,
31
- namespace: config.namespace,
32
- backend: transcoder.backend_type)
33
- transcoder.backend.generate_key_schema(transcoder.key_field)
34
- end
27
+ next unless transcoder.respond_to?(:key_field) &&
28
+ transcoder.key_field &&
29
+ transcoder.backend.supports_key_schemas?
30
+
31
+ transcoder.backend = Deimos.schema_backend(schema: config.schema,
32
+ namespace: config.namespace,
33
+ backend: transcoder.backend_type)
34
+ transcoder.backend.generate_key_schema(transcoder.key_field)
35
35
  end
36
36
  end
37
37
 
@@ -39,37 +39,39 @@ module Deimos # rubocop:disable Metrics/ModuleLength
39
39
  # @return [void]
40
40
  def load_generated_schema_classes
41
41
  if Deimos.config.schema.generated_class_path.nil?
42
- raise 'Cannot use schema classes without schema.generated_class_path. Please provide a directory.'
42
+ raise 'Cannot use schema classes without schema.generated_class_path. ' \
43
+ 'Please provide a directory.'
43
44
  end
44
45
 
45
- Dir["./#{Deimos.config.schema.generated_class_path}/**/*.rb"].sort.each { |f| require f }
46
+ Dir["./#{Deimos.config.schema.generated_class_path}/**/*.rb"].sort.
47
+ each { |f| require f }
46
48
  rescue LoadError
47
- raise 'Cannot load schema classes. Please regenerate classes with rake deimos:generate_schema_models.'
49
+ raise 'Cannot load schema classes. Please regenerate classes with' \
50
+ 'rake deimos:generate_schema_models.'
48
51
  end
49
52
 
50
53
  # Ensure everything is set up correctly for the DB backend.
51
54
  # @!visibility private
52
55
  def validate_outbox_backend
53
- begin
54
56
  require 'activerecord-import'
55
- rescue LoadError
56
- raise 'Cannot set producers.backend to :outbox without activerecord-import! Please add it to your Gemfile.'
57
- end
57
+ rescue LoadError
58
+ raise 'Cannot set producers.backend to :outbox without activerecord-import! ' \
59
+ 'Please add it to your Gemfile.'
58
60
  end
59
61
  end
60
62
 
61
- # rubocop:enable Metrics/PerceivedComplexity, Metrics/AbcSize
62
-
63
63
  define_settings do
64
64
  setting :logger, removed: 'Use "logger" in Karafka setup block.'
65
65
  setting :payload_log, removed: 'Use topic.payload_log in Karafka settings'
66
66
  setting :phobos_logger, removed: 'Separate logger for Phobos is no longer supported'
67
67
 
68
68
  setting :kafka do
69
- setting :logger, Logger.new(STDOUT), removed: "Karafka uses Rails logger by default"
70
- setting :seed_brokers, ['localhost:9092'], removed: 'Use kafka(bootstrap.servers) in Karafka settings'
69
+ setting :logger, Logger.new(STDOUT), removed: 'Karafka uses Rails logger by default'
70
+ setting :seed_brokers, ['localhost:9092'], removed: 'Use kafka(bootstrap.servers)' \
71
+ 'in Karafka settings'
71
72
  setting :client_id, 'phobos', removed: 'Use client_id in Karafka setup block.'
72
- setting :connect_timeout, 15, removed: 'Use kafka(socket.connection.setup.timeout.ms) in Karafka settings'
73
+ setting :connect_timeout, 15, removed: 'Use kafka(socket.connection.setup.timeout.ms)' \
74
+ 'in Karafka settings'
73
75
  setting :socket_timeout, 15, removed: 'Use kafka(socket.timeout.ms) in Karafka settings'
74
76
 
75
77
  setting :ssl do
@@ -77,12 +79,14 @@ module Deimos # rubocop:disable Metrics/ModuleLength
77
79
  setting :ca_cert, removed: 'Use kafka(ssl.ca.pem) in Karafka settings'
78
80
  setting :client_cert, removed: 'Use kafka(ssl.certificate.pem) in Karafka settings'
79
81
  setting :client_cert_key, removed: 'Use kafka(ssl.key.pem) in Karafka settings'
80
- setting :verify_hostname, removed: 'Use kafka(ssl.endpoint.identification.algorithm=https) in Karafka settings'
82
+ setting :verify_hostname, removed: 'Use kafka(ssl.endpoint.identification.algorithm=https)' \
83
+ 'in Karafka settings'
81
84
  setting :ca_certs_from_system, removed: 'Should not be necessary with librdkafka.'
82
85
  end
83
86
 
84
87
  setting :sasl do
85
- setting :enabled, removed: 'Use kafka(security.protocol=sasl_ssl or sasl_plaintext) in Karafka settings'
88
+ setting :enabled, removed: 'Use kafka(security.protocol=sasl_ssl or sasl_plaintext)' \
89
+ 'in Karafka settings'
86
90
  setting :gssapi_principal, removed: 'Use kafka(sasl.kerberos.principal) in Karafka settings'
87
91
  setting :gssapi_keytab, removed: 'Use kafka(sasl.kerberos.keytab) in Karafka settings'
88
92
  setting :plain_authzid, removed: 'No longer needed with rdkafka'
@@ -99,26 +103,28 @@ module Deimos # rubocop:disable Metrics/ModuleLength
99
103
  setting :consumers do
100
104
  setting :reraise_errors, removed: 'Use topic.reraise_errors in Karafka settings'
101
105
  setting :report_lag, removed: "Use Karafka's built in lag reporting"
102
- setting(:fatal_error, removed: "Use topic.fatal_error in Karafka settings")
103
- setting(:bulk_import_id_generator, removed: "Use topic.bulk_import_id_generator in Karafka settings")
104
- setting :save_associations_first, removed: "Use topic.save_associations_first"
105
- setting :replace_associations, removed: "Use topic.replace_associations in Karafka settings"
106
+ setting(:fatal_error, removed: 'Use topic.fatal_error in Karafka settings')
107
+ setting(:bulk_import_id_generator, removed: 'Use topic.bulk_import_id_generator ' \
108
+ 'in Karafka settings')
109
+ setting :save_associations_first, removed: 'Use topic.save_associations_first'
110
+ setting :replace_associations, removed: 'Use topic.replace_associations in Karafka settings'
106
111
  end
107
112
 
108
113
  setting :producers do
109
- setting :ack_timeout, removed: "Not supported in rdkafka"
110
- setting :required_acks, 1, removed: "Use kafka(request.required.acks) in Karafka settings"
111
- setting :max_retries, removed: "Use kafka(message.send.max.retries) in Karafka settings"
112
- setting :retry_backoff, removed: "Use kafka(retry.backoff.ms) in Karafka settings"
113
- setting :max_buffer_size, removed: "Not relevant with Karafka. You may want to see the queue.buffering.max.messages setting."
114
- setting :max_buffer_bytesize, removed: "Not relevant with Karafka."
115
- setting :compression_codec, removed: "Use kafka(compression.codec) in Karafka settings"
116
- setting :compression_threshold, removed: "Not supported in Karafka."
117
- setting :max_queue_size, removed: "Not relevant to Karafka."
118
- setting :delivery_threshold, removed: "Not relevant to Karafka."
119
- setting :delivery_interval, removed: "Not relevant to Karafka."
120
- setting :persistent_connections, removed: "Karafka connections are always persistent."
121
- setting :schema_namespace, removed: "Use topic.namespace in Karafka settings"
114
+ setting :ack_timeout, removed: 'Not supported in rdkafka'
115
+ setting :required_acks, 1, removed: 'Use kafka(request.required.acks) in Karafka settings'
116
+ setting :max_retries, removed: 'Use kafka(message.send.max.retries) in Karafka settings'
117
+ setting :retry_backoff, removed: 'Use kafka(retry.backoff.ms) in Karafka settings'
118
+ setting :max_buffer_size, removed: 'Not relevant with Karafka. You may want to see ' \
119
+ 'the queue.buffering.max.messages setting.'
120
+ setting :max_buffer_bytesize, removed: 'Not relevant with Karafka.'
121
+ setting :compression_codec, removed: 'Use kafka(compression.codec) in Karafka settings'
122
+ setting :compression_threshold, removed: 'Not supported in Karafka.'
123
+ setting :max_queue_size, removed: 'Not relevant to Karafka.'
124
+ setting :delivery_threshold, removed: 'Not relevant to Karafka.'
125
+ setting :delivery_interval, removed: 'Not relevant to Karafka.'
126
+ setting :persistent_connections, removed: 'Karafka connections are always persistent.'
127
+ setting :schema_namespace, removed: 'Use topic.namespace in Karafka settings'
122
128
 
123
129
  # Add a prefix to all topic names. This can be useful if you're using
124
130
  # the same Kafka broker for different environments that are producing
@@ -210,47 +216,53 @@ module Deimos # rubocop:disable Metrics/ModuleLength
210
216
  end
211
217
 
212
218
  setting :db_producer do
213
- setting :logger, removed: "Use outbox.logger"
214
- setting :log_topics, removed: "Use outbox.log_topics"
215
- setting :compact_topics, removed: "Use outbox.compact_topics"
219
+ setting :logger, removed: 'Use outbox.logger'
220
+ setting :log_topics, removed: 'Use outbox.log_topics'
221
+ setting :compact_topics, removed: 'Use outbox.compact_topics'
216
222
  end
217
223
 
218
224
  setting_object :producer do
219
- setting :class_name, removed: "Use topic.producer_class in Karafka settings."
220
- setting :topic, removed: "Use Karafka settings."
221
- setting :schema, removed: "Use topic.schema(schema:) in Karafka settings."
222
- setting :namespace, removed: "Use topic.schema(namespace:) in Karafka settings."
223
- setting :key_config, removed: "Use topic.schema(key_config:) in Karafka settings."
224
- setting :use_schema_classes, removed: "Use topic.schema(use_schema_classes:) in Karafka settings."
225
+ setting :class_name, removed: 'Use topic.producer_class in Karafka settings.'
226
+ setting :topic, removed: 'Use Karafka settings.'
227
+ setting :schema, removed: 'Use topic.schema(schema:) in Karafka settings.'
228
+ setting :namespace, removed: 'Use topic.schema(namespace:) in Karafka settings.'
229
+ setting :key_config, removed: 'Use topic.schema(key_config:) in Karafka settings.'
230
+ setting :use_schema_classes, removed: 'Use topic.schema(use_schema_classes:) in ' \
231
+ 'Karafka settings.'
225
232
  end
226
233
 
227
234
  setting_object :consumer do
228
- setting :class_name, removed: "Use topic.consumer in Karafka settings."
229
- setting :topic, removed: "Use Karafka settings."
230
- setting :schema, removed: "Use topic.schema(schema:) in Karafka settings."
231
- setting :namespace, removed: "Use topic.schema(namespace:) in Karafka settings."
232
- setting :key_config, removed: "Use topic.schema(key_config:) in Karafka settings."
233
- setting :disabled, removed: "Use topic.active in Karafka settings."
234
- setting :use_schema_classes, removed: "Use topic.use_schema_classes in Karafka settings."
235
- setting :max_db_batch_size, removed: "Use topic.max_db_batch_size in Karafka settings."
236
- setting :bulk_import_id_column, removed: "Use topic.bulk_import_id_column in Karafka settings."
237
- setting :replace_associations, removed: "Use topic.replace_associations in Karafka settings."
238
- setting :bulk_import_id_generator, removed: "Use topic.bulk_import_id_generator in Karafka settings."
239
- setting :save_associations_first, removed: "Use topic.save_associations_first"
240
- setting :group_id, removed: "Use kafka(group.id) in Karafka settings."
235
+ setting :class_name, removed: 'Use topic.consumer in Karafka settings.'
236
+ setting :topic, removed: 'Use Karafka settings.'
237
+ setting :schema, removed: 'Use topic.schema(schema:) in Karafka settings.'
238
+ setting :namespace, removed: 'Use topic.schema(namespace:) in Karafka settings.'
239
+ setting :key_config, removed: 'Use topic.schema(key_config:) in Karafka settings.'
240
+ setting :disabled, removed: 'Use topic.active in Karafka settings.'
241
+ setting :use_schema_classes, removed: 'Use topic.use_schema_classes in Karafka settings.'
242
+ setting :max_db_batch_size, removed: 'Use topic.max_db_batch_size in Karafka settings.'
243
+ setting :bulk_import_id_column, removed: 'Use topic.bulk_import_id_column' \
244
+ 'in Karafka settings.'
245
+ setting :replace_associations, removed: 'Use topic.replace_associations in Karafka settings.'
246
+ setting :bulk_import_id_generator, removed: 'Use topic.bulk_import_id_generator ' \
247
+ 'in Karafka settings.'
248
+ setting :save_associations_first, removed: 'Use topic.save_associations_first'
249
+ setting :group_id, removed: 'Use kafka(group.id) in Karafka settings.'
241
250
  setting :max_concurrency, removed: "Use Karafka's 'config.concurrency' in the setup block."
242
- setting :start_from_beginning, removed: "Use initial_offset in the setup block, or kafka(auto.offset.reset) in topic settings."
243
- setting :max_bytes_per_partition, removed: "Use max_messages in the setup block."
244
- setting :min_bytes, removed: "Not supported in Karafka."
245
- setting :max_wait_time, removed: "Use max_wait_time in the setup block."
246
- setting :force_encoding, removed: "Not supported with Karafka."
247
- setting :delivery, :batch, removed: "Use batch: true/false in Karafka topic configs."
248
- setting :backoff, removed: "Use kafka(retry.backoff.ms) and retry.backoff.max.ms in Karafka settings."
249
- setting :session_timeout, removed: "Use kafka(session.timeout.ms) in Karafka settings."
250
- setting :offset_commit_interval, removed: "Use kafka(auto.commit.interval.ms) in Karafka settings."
251
- setting :offset_commit_threshold, removed: "Not supported with Karafka."
252
- setting :offset_retention_time, removed: "Not supported with Karafka."
253
- setting :heartbeat_interval, removed: "Use kafka(heartbeat.interval.ms) in Karafka settings."
251
+ setting :start_from_beginning, removed: 'Use initial_offset in the setup block, or ' \
252
+ 'kafka(auto.offset.reset) in topic settings.'
253
+ setting :max_bytes_per_partition, removed: 'Use max_messages in the setup block.'
254
+ setting :min_bytes, removed: 'Not supported in Karafka.'
255
+ setting :max_wait_time, removed: 'Use max_wait_time in the setup block.'
256
+ setting :force_encoding, removed: 'Not supported with Karafka.'
257
+ setting :delivery, :batch, removed: 'Use batch: true/false in Karafka topic configs.'
258
+ setting :backoff, removed: 'Use kafka(retry.backoff.ms) and retry.backoff.max.ms' \
259
+ 'in Karafka settings.'
260
+ setting :session_timeout, removed: 'Use kafka(session.timeout.ms) in Karafka settings.'
261
+ setting :offset_commit_interval, removed: 'Use kafka(auto.commit.interval.ms) ' \
262
+ 'in Karafka settings.'
263
+ setting :offset_commit_threshold, removed: 'Not supported with Karafka.'
264
+ setting :offset_retention_time, removed: 'Not supported with Karafka.'
265
+ setting :heartbeat_interval, removed: 'Use kafka(heartbeat.interval.ms) in Karafka settings.'
254
266
  end
255
267
 
256
268
  setting_object :db_poller do
@@ -290,7 +302,8 @@ module Deimos # rubocop:disable Metrics/ModuleLength
290
302
  # Value to set the state_column to if publishing fails - state-based only.
291
303
  setting :failed_state
292
304
 
293
- # Inherited poller class name to use for publishing to multiple kafka topics from a single poller
305
+ # Inherited poller class name to use for publishing to multiple kafka topics
306
+ # from a single poller
294
307
  setting :poller_class, nil
295
308
  end
296
309
 
@@ -18,14 +18,14 @@ module Deimos
18
18
 
19
19
  def _consume_batch
20
20
  _with_span do
21
- begin
21
+
22
22
  benchmark = Benchmark.measure do
23
23
  consume_batch
24
24
  end
25
25
  _handle_batch_success(benchmark.real)
26
- rescue StandardError => e
26
+ rescue StandardError => e
27
27
  _handle_batch_error(e)
28
- end
28
+
29
29
  end
30
30
  end
31
31
 
@@ -51,9 +51,9 @@ module Deimos
51
51
  message: 'Finished processing Kafka batch event',
52
52
  time_elapsed: time_taken,
53
53
  metadata: Deimos::Logging.metadata_log_text(messages.metadata)
54
- }.merge(Deimos::Logging.messages_log_text(self.topic.payload_log, messages)))
54
+ }.merge(Deimos::Logging.messages_log_text(self.topic.payload_log, messages))
55
+ )
55
56
  end
56
-
57
57
  end
58
58
  end
59
59
  end
@@ -20,7 +20,7 @@ module Deimos
20
20
 
21
21
  def _consume_messages
22
22
  messages.each do |message|
23
- begin
23
+
24
24
  _with_span do
25
25
  _received_message(message)
26
26
  benchmark = Benchmark.measure do
@@ -30,7 +30,7 @@ module Deimos
30
30
  rescue StandardError => e
31
31
  _handle_message_error(e, message)
32
32
  end
33
- end
33
+
34
34
  end
35
35
  end
36
36
 
@@ -55,7 +55,7 @@ module Deimos
55
55
  error_message: exception.message,
56
56
  error: exception.backtrace
57
57
  )
58
- rescue # serialization issues
58
+ rescue StandardError # serialization issues
59
59
  Deimos::Logging.log_warn(
60
60
  message: 'Error consuming message',
61
61
  handler: self.class.name,
@@ -1,7 +1,8 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Deimos
2
4
  class ConsumerRoute < Karafka::Routing::Features::Base
3
5
  module Topic
4
-
5
6
  FIELDS = %i(max_db_batch_size
6
7
  bulk_import_id_column
7
8
  replace_associations
@@ -9,8 +10,7 @@ module Deimos
9
10
  each_message
10
11
  reraise_errors
11
12
  fatal_error
12
- save_associations_first
13
- )
13
+ save_associations_first).freeze
14
14
  Config = Struct.new(*FIELDS, keyword_init: true)
15
15
 
16
16
  FIELDS.each do |field|
@@ -2,10 +2,10 @@
2
2
 
3
3
  module Deimos
4
4
  class ProducerMetricsListener
5
- %i[
5
+ %i(
6
6
  produced_sync
7
7
  produced_async
8
- ].each do |event_scope|
8
+ ).each do |event_scope|
9
9
  define_method(:"on_message_#{event_scope}") do |event|
10
10
  Deimos.config.metrics&.increment(
11
11
  'publish',
@@ -1,7 +1,7 @@
1
- module Deimos
1
+ # frozen_string_literal: true
2
2
 
3
+ module Deimos
3
4
  module ProducerMiddleware
4
-
5
5
  class << self
6
6
 
7
7
  def allowed_classes
@@ -31,19 +31,7 @@ module Deimos
31
31
  headers: message[:headers],
32
32
  partition_key: message[:partition_key])
33
33
  _process_message(m, message, config)
34
- message[:payload] = m.encoded_payload
35
- message[:label] = {
36
- original_payload: m.payload,
37
- original_key: m.key
38
- }
39
- message[:key] = m.encoded_key
40
- message[:partition_key] = if m.partition_key
41
- m.partition_key.to_s
42
- elsif m.key
43
- m.key.to_s
44
- else
45
- nil
46
- end
34
+ _assign_message(m, message)
47
35
  message[:topic] = "#{Deimos.config.producers.topic_prefix}#{config.name}"
48
36
 
49
37
  validate_key_config(config, message)
@@ -52,6 +40,22 @@ module Deimos
52
40
  end
53
41
  end
54
42
 
43
+ def _assign_message(deimos_message, message)
44
+ message[:payload] = deimos_message.encoded_payload
45
+ message[:label] = {
46
+ original_payload: deimos_message.payload,
47
+ original_key: deimos_message.key
48
+ }
49
+ message[:key] = deimos_message.encoded_key
50
+ message[:partition_key] = if deimos_message.partition_key
51
+ deimos_message.partition_key.to_s
52
+ elsif deimos_message.key
53
+ deimos_message.key.to_s
54
+ else
55
+ nil
56
+ end
57
+ end
58
+
55
59
  def validate_key_config(config, message)
56
60
  if message[:key].nil? && config.deserializers[:key].is_a?(Deimos::Transcoder)
57
61
  raise 'No key given but a key is required! Use `key_config none: true` to avoid using keys.'
@@ -1,6 +1,8 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Deimos
2
4
  class ProducerRoute < Karafka::Routing::Features::Base
3
- FIELDS = %i(producer_classes payload_log disabled)
5
+ FIELDS = %i(producer_classes payload_log disabled).freeze
4
6
 
5
7
  Config = Struct.new(*FIELDS, keyword_init: true) do
6
8
  def producer_class=(val)
@@ -14,7 +16,7 @@ module Deimos
14
16
  module Topic
15
17
  (FIELDS + [:producer_class]).each do |field|
16
18
  define_method(field) do |*args|
17
- active(false) if %i(producer_class producer_classes).include?(field)
19
+ active(false) if %i(producer_class producer_classes).include?(field) && args.any?
18
20
  @deimos_producer_config ||= Config.new
19
21
  if args.any?
20
22
  @deimos_producer_config.public_send("#{field}=", args[0])
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # This monkey patch was provided by Maciej, the maintainer of Karafka. This allows
2
4
  # configs to override each other on a more granular basis rather than each `configure` call
3
5
  # blowing away all fields. It also supports multiple default blocks.
@@ -46,11 +48,11 @@ class Matcher
46
48
  end
47
49
 
48
50
  def method_missing(m, *args, **kwargs)
49
- if args.empty?
50
- @applications << [m, kwargs]
51
- else
52
- @applications << [m, args]
53
- end
51
+ @applications << if args.empty?
52
+ [m, kwargs]
53
+ else
54
+ [m, args]
55
+ end
54
56
  end
55
57
  end
56
58
 
@@ -72,5 +74,5 @@ module ConsumerGroup
72
74
  end
73
75
  end
74
76
 
75
- Karafka::Routing::Builder.prepend Builder
76
- Karafka::Routing::ConsumerGroup.prepend ConsumerGroup
77
+ Karafka::Routing::Builder.prepend(Builder)
78
+ Karafka::Routing::ConsumerGroup.prepend(ConsumerGroup)
@@ -1,6 +1,8 @@
1
- require "deimos/transcoder"
2
- require "deimos/ext/producer_middleware"
3
- require "deimos/schema_backends/plain"
1
+ # frozen_string_literal: true
2
+
3
+ require 'deimos/transcoder'
4
+ require 'deimos/ext/producer_middleware'
5
+ require 'deimos/schema_backends/plain'
4
6
 
5
7
  module Deimos
6
8
  class SchemaRoute < Karafka::Routing::Features::Base
@@ -9,9 +11,9 @@ module Deimos
9
11
  {
10
12
  schema: nil,
11
13
  namespace: nil,
12
- key_config: {none: true},
14
+ key_config: { none: true },
13
15
  schema_backend: nil,
14
- use_schema_classes: Deimos.config.schema.use_schema_classes
16
+ use_schema_classes: nil
15
17
  }.each do |field, default|
16
18
  define_method(field) do |*args|
17
19
  @_deimos_config ||= {}
@@ -23,14 +25,19 @@ module Deimos
23
25
  @_deimos_config[:schema][field] || default
24
26
  end
25
27
  end
26
- def _deimos_setup_transcoders
28
+ def _deimos_setup_transcoders # rubocop:disable Metrics/AbcSize
29
+ use_classes = if use_schema_classes.nil?
30
+ Deimos.config.schema.use_schema_classes
31
+ else
32
+ use_schema_classes
33
+ end
27
34
  payload = Transcoder.new(
28
- schema: schema,
29
- namespace: namespace,
30
- backend: schema_backend,
31
- use_schema_classes: use_schema_classes,
32
- topic: name
33
- )
35
+ schema: schema,
36
+ namespace: namespace,
37
+ backend: schema_backend,
38
+ use_schema_classes: use_classes,
39
+ topic: name
40
+ )
34
41
 
35
42
  key = nil
36
43
 
@@ -39,7 +46,7 @@ module Deimos
39
46
  schema: schema,
40
47
  backend: schema_backend,
41
48
  namespace: namespace,
42
- use_schema_classes: use_schema_classes,
49
+ use_schema_classes: use_classes,
43
50
  topic: name
44
51
  )
45
52
  key.backend = Deimos::SchemaBackends::Plain.new(schema: nil, namespace: nil)
@@ -49,7 +56,7 @@ module Deimos
49
56
  schema: schema,
50
57
  backend: schema_backend,
51
58
  namespace: namespace,
52
- use_schema_classes: use_schema_classes,
59
+ use_schema_classes: use_classes,
53
60
  key_field: key_config[:field].to_s,
54
61
  topic: name
55
62
  )
@@ -58,7 +65,7 @@ module Deimos
58
65
  schema: key_config[:schema] || schema,
59
66
  backend: schema_backend,
60
67
  namespace: namespace,
61
- use_schema_classes: use_schema_classes,
68
+ use_schema_classes: use_classes,
62
69
  topic: self.name
63
70
  )
64
71
  else
@@ -12,7 +12,7 @@ module Deimos
12
12
  # @param mess [Object]
13
13
  # @return [void]
14
14
  def message=(mess)
15
- write_attribute(:message, mess ? mess.to_s : nil)
15
+ write_attribute(:message, mess&.to_s)
16
16
  end
17
17
 
18
18
  # Decoded payload for this message.
@@ -50,9 +50,11 @@ module Deimos
50
50
  return unless self.class.kafka_config[:delete]
51
51
 
52
52
  self.class.kafka_producers.each do |p|
53
- generated = p.respond_to?(:generate_deletion_payload) ?
54
- p.generate_deletion_payload(self) :
55
- self.deletion_payload
53
+ generated = if p.respond_to?(:generate_deletion_payload)
54
+ p.generate_deletion_payload(self)
55
+ else
56
+ self.deletion_payload
57
+ end
56
58
  p.publish_list([generated])
57
59
  end
58
60
  end
@@ -88,7 +90,7 @@ module Deimos
88
90
  # @!visibility private
89
91
  def import_without_validations_or_callbacks(column_names,
90
92
  array_of_attributes,
91
- options = {})
93
+ options={})
92
94
  results = super
93
95
  if !self.kafka_config[:import] || array_of_attributes.empty?
94
96
  return results
@@ -96,40 +98,44 @@ module Deimos
96
98
 
97
99
  # This will contain an array of hashes, where each hash is the actual
98
100
  # attribute hash that created the object.
99
- array_of_hashes = []
100
- array_of_attributes.each do |array|
101
- array_of_hashes << column_names.zip(array).to_h.with_indifferent_access
101
+ array_of_hashes = array_of_attributes.map do |array|
102
+ column_names.zip(array).to_h.with_indifferent_access
102
103
  end
103
104
  hashes_with_id, hashes_without_id = array_of_hashes.partition { |arr| arr[:id].present? }
104
105
 
105
106
  self.kafka_producers.each { |p| p.send_events(hashes_with_id) }
106
107
 
107
108
  if hashes_without_id.any?
108
- if options[:on_duplicate_key_update].present? &&
109
- options[:on_duplicate_key_update] != [:updated_at]
110
- unique_columns = column_names.map(&:to_s) -
111
- options[:on_duplicate_key_update].map(&:to_s) - %w(id created_at)
112
- records = hashes_without_id.map do |hash|
113
- self.where(unique_columns.map { |c| [c, hash[c]] }.to_h).first
114
- end
115
- self.kafka_producers.each { |p| p.send_events(records) }
116
- else
117
- # re-fill IDs based on what was just entered into the DB.
118
- last_id = if self.connection.adapter_name.downcase =~ /sqlite/
119
- self.connection.select_value('select last_insert_rowid()') -
120
- hashes_without_id.size + 1
121
- else
122
- # mysql
123
- self.connection.select_value('select LAST_INSERT_ID()')
124
- end
125
- hashes_without_id.each_with_index do |attrs, i|
126
- attrs[:id] = last_id + i
127
- end
128
- self.kafka_producers.each { |p| p.send_events(hashes_without_id) }
129
- end
109
+ refill_records(column_names, options, hashes_without_id)
130
110
  end
131
111
  results
132
112
  end
113
+
114
+ # @!visibility private
115
+ def refill_records(column_names, options, hashes_without_id)
116
+ if options[:on_duplicate_key_update].present? &&
117
+ options[:on_duplicate_key_update] != [:updated_at]
118
+ unique_columns = column_names.map(&:to_s) -
119
+ options[:on_duplicate_key_update].map(&:to_s) - %w(id created_at)
120
+ records = hashes_without_id.map do |hash|
121
+ self.where(unique_columns.map { |c| [c, hash[c]] }.to_h).first
122
+ end
123
+ self.kafka_producers.each { |p| p.send_events(records) }
124
+ else
125
+ # re-fill IDs based on what was just entered into the DB.
126
+ last_id = if self.connection.adapter_name.downcase =~ /sqlite/
127
+ self.connection.select_value('select last_insert_rowid()') -
128
+ hashes_without_id.size + 1
129
+ else
130
+ # mysql
131
+ self.connection.select_value('select LAST_INSERT_ID()')
132
+ end
133
+ hashes_without_id.each_with_index do |attrs, i|
134
+ attrs[:id] = last_id + i
135
+ end
136
+ self.kafka_producers.each { |p| p.send_events(hashes_without_id) }
137
+ end
138
+ end
133
139
  end
134
140
 
135
141
  # check if any field has value longer than the field limit
@@ -139,10 +145,9 @@ module Deimos
139
145
  next if self[col.name].blank?
140
146
 
141
147
  if self[col.name].to_s.length > col.limit
142
- self[col.name] = self[col.name][0..col.limit - 1]
148
+ self[col.name] = self[col.name][0..(col.limit - 1)]
143
149
  end
144
150
  end
145
- false
146
151
  end
147
152
  end
148
153
  end
@@ -15,7 +15,7 @@ module Deimos
15
15
  # @param topic [String]
16
16
  # @param lock_id [String]
17
17
  # @return [Boolean]
18
- def lock(topic, lock_id)
18
+ def lock(topic, lock_id) # rubocop:disable Naming/PredicateMethod
19
19
  # Try to create it - it's fine if it already exists
20
20
  begin
21
21
  self.create(topic: topic, last_processed_at: Time.zone.now)