deimos-ruby 2.2.0 → 2.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +33 -30
  3. data/CHANGELOG.md +10 -0
  4. data/Gemfile +0 -6
  5. data/deimos-ruby.gemspec +15 -11
  6. data/karafka.rb +7 -4
  7. data/lib/deimos/active_record_consume/batch_consumption.rb +7 -7
  8. data/lib/deimos/active_record_consume/batch_record.rb +2 -2
  9. data/lib/deimos/active_record_consume/message_consumption.rb +6 -5
  10. data/lib/deimos/active_record_consume/schema_model_converter.rb +2 -2
  11. data/lib/deimos/active_record_consumer.rb +1 -0
  12. data/lib/deimos/active_record_producer.rb +4 -2
  13. data/lib/deimos/backends/base.rb +1 -3
  14. data/lib/deimos/backends/outbox.rb +1 -1
  15. data/lib/deimos/config/configuration.rb +88 -75
  16. data/lib/deimos/consume/batch_consumption.rb +5 -5
  17. data/lib/deimos/consume/message_consumption.rb +3 -3
  18. data/lib/deimos/ext/consumer_route.rb +3 -3
  19. data/lib/deimos/ext/producer_metrics_listener.rb +2 -2
  20. data/lib/deimos/ext/producer_middleware.rb +19 -15
  21. data/lib/deimos/ext/producer_route.rb +4 -2
  22. data/lib/deimos/ext/routing_defaults.rb +9 -7
  23. data/lib/deimos/ext/schema_route.rb +22 -15
  24. data/lib/deimos/kafka_message.rb +1 -1
  25. data/lib/deimos/kafka_source.rb +36 -31
  26. data/lib/deimos/kafka_topic_info.rb +1 -1
  27. data/lib/deimos/logging.rb +20 -19
  28. data/lib/deimos/message.rb +1 -1
  29. data/lib/deimos/metrics/minimal_datadog_listener.rb +19 -6
  30. data/lib/deimos/metrics/provider.rb +4 -4
  31. data/lib/deimos/producer.rb +3 -1
  32. data/lib/deimos/railtie.rb +1 -1
  33. data/lib/deimos/schema_backends/avro_base.rb +1 -1
  34. data/lib/deimos/schema_backends/avro_schema_coercer.rb +46 -27
  35. data/lib/deimos/schema_backends/avro_schema_registry.rb +8 -8
  36. data/lib/deimos/schema_backends/base.rb +9 -9
  37. data/lib/deimos/schema_backends/plain.rb +1 -1
  38. data/lib/deimos/schema_backends/proto_base.rb +7 -5
  39. data/lib/deimos/schema_backends/proto_local.rb +0 -2
  40. data/lib/deimos/schema_backends/proto_schema_registry.rb +0 -2
  41. data/lib/deimos/schema_class/base.rb +1 -1
  42. data/lib/deimos/schema_class/record.rb +3 -3
  43. data/lib/deimos/test_helpers.rb +31 -26
  44. data/lib/deimos/tracing/provider.rb +5 -5
  45. data/lib/deimos/transcoder.rb +6 -2
  46. data/lib/deimos/utils/db_poller/base.rb +3 -3
  47. data/lib/deimos/utils/deadlock_retry.rb +2 -2
  48. data/lib/deimos/utils/outbox_producer.rb +14 -14
  49. data/lib/deimos/version.rb +1 -1
  50. data/lib/deimos.rb +4 -4
  51. data/lib/generators/deimos/active_record_generator.rb +2 -1
  52. data/lib/generators/deimos/db_poller_generator.rb +1 -0
  53. data/lib/generators/deimos/outbox_backend_generator.rb +1 -0
  54. data/lib/generators/deimos/schema_class_generator.rb +3 -2
  55. data/lib/generators/deimos/v2_generator.rb +184 -155
  56. data/spec/active_record_batch_consumer_association_spec.rb +6 -2
  57. data/spec/active_record_batch_consumer_spec.rb +83 -106
  58. data/spec/active_record_consume/batch_consumption_spec.rb +27 -28
  59. data/spec/active_record_consume/batch_slicer_spec.rb +4 -12
  60. data/spec/active_record_consume/mass_updater_spec.rb +42 -46
  61. data/spec/active_record_consume/schema_model_converter_spec.rb +1 -1
  62. data/spec/active_record_consumer_spec.rb +7 -5
  63. data/spec/active_record_producer_spec.rb +83 -73
  64. data/spec/backends/outbox_spec.rb +1 -1
  65. data/spec/batch_consumer_spec.rb +20 -20
  66. data/spec/consumer_spec.rb +23 -12
  67. data/spec/gen/sample/v1/sample_pb.rb +3 -3
  68. data/spec/generators/active_record_generator_spec.rb +4 -4
  69. data/spec/generators/schema_class/my_schema_with_circular_reference_spec.rb +2 -1
  70. data/spec/generators/schema_class/my_schema_with_complex_types_spec.rb +9 -2
  71. data/spec/generators/schema_class_generator_spec.rb +5 -5
  72. data/spec/kafka_source_spec.rb +13 -6
  73. data/spec/kafka_topic_info_spec.rb +7 -7
  74. data/spec/karafka/karafka.rb +6 -5
  75. data/spec/karafka_config/karafka_spec.rb +22 -19
  76. data/spec/logging_spec.rb +2 -0
  77. data/spec/producer_spec.rb +25 -20
  78. data/spec/schema_backends/avro_base_shared.rb +8 -8
  79. data/spec/schema_backends/avro_local_spec.rb +5 -6
  80. data/spec/schema_backends/avro_schema_registry_spec.rb +5 -6
  81. data/spec/schema_backends/proto_schema_registry_spec.rb +9 -12
  82. data/spec/schemas/my_namespace/generated.rb +1 -2
  83. data/spec/schemas/my_namespace/my_schema_with_complex_type.rb +5 -8
  84. data/spec/schemas/my_namespace/my_schema_with_union_type.rb +22 -23
  85. data/spec/spec_helper.rb +13 -17
  86. data/spec/utils/db_poller_spec.rb +5 -5
  87. data/spec/utils/deadlock_retry_spec.rb +1 -4
  88. data/spec/utils/outbox_producer_spec.rb +36 -24
  89. metadata +68 -161
  90. data/.ruby-version +0 -1
@@ -11,7 +11,7 @@ module Deimos
11
11
  # Base poller class for retrieving and publishing messages.
12
12
  class Base
13
13
 
14
- FATAL_CODES = %i(invalid_msg_size msg_size_too_large)
14
+ FATAL_CODES = %i(invalid_msg_size msg_size_too_large).freeze
15
15
  # @return [Integer]
16
16
  BATCH_SIZE = 1000
17
17
 
@@ -106,6 +106,7 @@ module Deimos
106
106
  raise Deimos::MissingImplementationError
107
107
  end
108
108
 
109
+ # rubocop:disable Naming/PredicateMethod
109
110
  # @param exception [Exception]
110
111
  # @param batch [Array<ActiveRecord::Base>]
111
112
  # @param status [PollStatus]
@@ -124,7 +125,6 @@ module Deimos
124
125
  end
125
126
  end
126
127
 
127
- # rubocop:disable Metrics/AbcSize
128
128
  # @param batch [Array<ActiveRecord::Base>]
129
129
  # @param status [PollStatus]
130
130
  # @return [Boolean]
@@ -163,7 +163,7 @@ module Deimos
163
163
  end
164
164
  true
165
165
  end
166
- # rubocop:enable Metrics/AbcSize
166
+ # rubocop:enable Naming/PredicateMethod
167
167
 
168
168
  # Publish batch using the configured producers
169
169
  # @param batch [Array<ActiveRecord::Base>]
@@ -46,8 +46,8 @@ module Deimos
46
46
  raise if count <= 0
47
47
 
48
48
  Deimos::Logging.log_warn(
49
- message: 'Deadlock encountered when trying to execute query. '\
50
- "Retrying. #{count} attempt(s) remaining",
49
+ message: 'Deadlock encountered when trying to execute query. ' \
50
+ "Retrying. #{count} attempt(s) remaining",
51
51
  tags: tags
52
52
  )
53
53
 
@@ -14,10 +14,10 @@ module Deimos
14
14
  # @return [Integer]
15
15
  MAX_DELETE_ATTEMPTS = 3
16
16
  # @return [Array<Symbol>]
17
- FATAL_CODES = %i(invalid_msg_size msg_size_too_large)
17
+ FATAL_CODES = %i(invalid_msg_size msg_size_too_large).freeze
18
18
 
19
19
  # @param logger [Logger]
20
- def initialize(logger=Logger.new(STDOUT))
20
+ def initialize(logger=Logger.new($stdout))
21
21
  @id = SecureRandom.uuid
22
22
  @logger = logger
23
23
  @logger.push_tags("OutboxProducer #{@id}") if @logger.respond_to?(:push_tags)
@@ -81,13 +81,14 @@ module Deimos
81
81
 
82
82
  KafkaTopicInfo.clear_lock(@current_topic, @id)
83
83
  rescue StandardError => e
84
- @logger.error("Error processing messages for topic #{@current_topic}: #{e.class.name}: #{e.message} #{e.backtrace.join("\n")}")
84
+ @logger.error('Error processing messages for topic ' \
85
+ "#{@current_topic}: #{e.class.name}: #{e.message} #{e.backtrace.join("\n")}")
85
86
  KafkaTopicInfo.register_error(@current_topic, @id)
86
87
  end
87
88
 
88
89
  # Process a single batch in a topic.
89
90
  # @return [void]
90
- def process_topic_batch
91
+ def process_topic_batch # rubocop:disable Naming/PredicateMethod
91
92
  messages = retrieve_messages
92
93
  return false if messages.empty?
93
94
 
@@ -95,9 +96,9 @@ module Deimos
95
96
  compacted_messages = compact_messages(messages)
96
97
  log_messages(compacted_messages)
97
98
  Karafka.monitor.instrument('deimos.outbox.produce', topic: @current_topic, messages: compacted_messages) do
98
- begin
99
+
99
100
  produce_messages(compacted_messages.map(&:karafka_message))
100
- rescue WaterDrop::Errors::ProduceManyError => e
101
+ rescue WaterDrop::Errors::ProduceManyError => e
101
102
  if FATAL_CODES.include?(e.cause.try(:code))
102
103
  @logger.error('Message batch too large, deleting...')
103
104
  delete_messages(messages)
@@ -106,7 +107,7 @@ module Deimos
106
107
  Deimos.log_error("Got error #{e.cause.class.name} when publishing #{batch_size} messages, retrying...")
107
108
  retry
108
109
  end
109
- end
110
+
110
111
  end
111
112
  delete_messages(messages)
112
113
  Deimos.config.metrics&.increment(
@@ -203,13 +204,12 @@ module Deimos
203
204
  def produce_messages(batch)
204
205
  batch_size = batch.size
205
206
  current_index = 0
206
- begin
207
- batch[current_index..-1].in_groups_of(batch_size, false).each do |group|
208
- @logger.debug("Publishing #{group.size} messages to #{@current_topic}")
209
- Karafka.producer.produce_many_sync(group)
210
- current_index += group.size
211
- @logger.info("Sent #{group.size} messages to #{@current_topic}")
212
- end
207
+
208
+ batch[current_index..-1].in_groups_of(batch_size, false).each do |group|
209
+ @logger.debug("Publishing #{group.size} messages to #{@current_topic}")
210
+ Karafka.producer.produce_many_sync(group)
211
+ current_index += group.size
212
+ @logger.info("Sent #{group.size} messages to #{@current_topic}")
213
213
  end
214
214
  end
215
215
 
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Deimos
4
- VERSION = '2.2.0'
4
+ VERSION = '2.2.2'
5
5
  end
data/lib/deimos.rb CHANGED
@@ -50,7 +50,7 @@ module Deimos
50
50
  deimos.batch_consumption.invalid_records
51
51
  deimos.batch_consumption.valid_records
52
52
  deimos.outbox.produce
53
- )
53
+ ).freeze
54
54
 
55
55
  class << self
56
56
 
@@ -66,7 +66,7 @@ module Deimos
66
66
 
67
67
  # @param schema [String, Symbol]
68
68
  # @param namespace [String]
69
- # @return [Deimos::SchemaBackends::Base]
69
+ # @return [Class<Deimos::SchemaBackends::Base>]
70
70
  def schema_backend(schema:, namespace:, backend: Deimos.config.schema.backend)
71
71
  if config.schema.use_schema_classes
72
72
  # Initialize an instance of the provided schema
@@ -170,9 +170,9 @@ module Deimos
170
170
  # @return [Karafka::Routing::Topic,nil]
171
171
  def karafka_config_for(topic: nil, producer: nil)
172
172
  if topic
173
- karafka_configs.find { |t| t.name == topic}
173
+ karafka_configs.find { |t| t.name == topic }
174
174
  elsif producer
175
- karafka_configs.find { |t| t.producer_classes&.include?(producer)}
175
+ karafka_configs.find { |t| t.producer_classes&.include?(producer) }
176
176
  end
177
177
  end
178
178
 
@@ -10,6 +10,7 @@ module Deimos
10
10
  # Generator for ActiveRecord model and migration.
11
11
  class ActiveRecordGenerator < Rails::Generators::Base
12
12
  include Rails::Generators::Migration
13
+
13
14
  if Rails.version < '4'
14
15
  extend(ActiveRecord::Generators::Migration)
15
16
  else
@@ -47,7 +48,7 @@ module Deimos
47
48
  # @return [String]
48
49
  def schema
49
50
  last_dot = self.full_schema.rindex('.')
50
- self.full_schema[last_dot + 1..-1]
51
+ self.full_schema[(last_dot + 1)..-1]
51
52
  end
52
53
 
53
54
  # @return [String]
@@ -8,6 +8,7 @@ module Deimos
8
8
  # Generate the database backend migration.
9
9
  class DbPollerGenerator < Rails::Generators::Base
10
10
  include Rails::Generators::Migration
11
+
11
12
  if Rails.version < '4'
12
13
  extend(ActiveRecord::Generators::Migration)
13
14
  else
@@ -8,6 +8,7 @@ module Deimos
8
8
  # Generate the database backend migration.
9
9
  class OutboxBackendGenerator < Rails::Generators::Base
10
10
  include Rails::Generators::Migration
11
+
11
12
  if Rails.version < '4'
12
13
  extend(ActiveRecord::Generators::Migration)
13
14
  else
@@ -75,7 +75,7 @@ module Deimos
75
75
  elsif schema.respond_to?(:items)
76
76
  [schema.items]
77
77
  elsif schema.respond_to?(:schemas)
78
- schema.schemas.reject { |s| s.class == Avro::Schema::PrimitiveSchema }
78
+ schema.schemas.reject { |s| s.instance_of?(Avro::Schema::PrimitiveSchema) }
79
79
  else
80
80
  []
81
81
  end
@@ -172,6 +172,7 @@ module Deimos
172
172
  Deimos.karafka_configs.each do |config|
173
173
  schema_name = config.schema
174
174
  next if schema_name.nil?
175
+
175
176
  namespace = config.namespace || Deimos.config.producers.schema_namespace
176
177
  key_schema_name = config.key_config[:schema]
177
178
 
@@ -215,7 +216,7 @@ module Deimos
215
216
  _set_instance_variables(schema, key_config)
216
217
 
217
218
  temp = schema.is_a?(Avro::Schema::RecordSchema) ? _record_class_template : _enum_class_template
218
- res = ERB.new(temp, nil, '-')
219
+ res = ERB.new(temp, trim_mode: '-')
219
220
  res.result(binding)
220
221
  end
221
222
 
@@ -20,186 +20,215 @@ module Deimos
20
20
  end
21
21
  end
22
22
 
23
- source_root File.expand_path('v2/templates', __dir__)
23
+ source_root File.expand_path('v2/templates', __dir__)
24
24
 
25
- no_commands do
26
- def deimos_config
27
- Deimos.config
28
- end
25
+ no_commands do
26
+ def deimos_config
27
+ Deimos.config
28
+ end
29
29
 
30
- def deimos_configs
31
- configs = {
32
- producers: %i(topic_prefix disabled backend),
33
- schema: %i(backend registry_url user password path generated_class_path use_schema_classes
34
- nest_child_schemas use_full_namespace schema_namespace_map),
35
- db_producer: %i(log_topics compact_topics),
36
- }
37
-
38
- response = {}
39
- configs.each do |group, settings|
40
- group_setting = deimos_config.send(group)
41
- next if settings.all? { |s| group_setting.default_value?(s)}
42
-
43
- response[group] = {}
44
- settings.each do |field|
45
- unless group_setting.default_value?(field.to_sym)
46
- response[group][field.to_s] = group_setting.send(field.to_sym)
47
- end
30
+ def deimos_configs
31
+ configs = {
32
+ producers: %i(topic_prefix disabled backend),
33
+ schema: %i(backend registry_url user password path generated_class_path use_schema_classes
34
+ nest_child_schemas use_full_namespace schema_namespace_map),
35
+ db_producer: %i(log_topics compact_topics)
36
+ }
37
+
38
+ response = {}
39
+ configs.each do |group, settings|
40
+ group_setting = deimos_config.send(group)
41
+ next if settings.all? { |s| group_setting.default_value?(s) }
42
+
43
+ response[group] = {}
44
+ settings.each do |field|
45
+ unless group_setting.default_value?(field.to_sym)
46
+ response[group][field.to_s] = group_setting.send(field.to_sym)
48
47
  end
49
48
  end
50
- response
51
49
  end
50
+ response
51
+ end
52
52
 
53
- def setup_configs
54
- configs = {}
55
- configs[:client_id] = if deimos_config.kafka.client_id && deimos_config.kafka.client_id != 'phobos'
56
- deimos_config.kafka.client_id
53
+ def setup_configs
54
+ configs = {}
55
+ configs[:client_id] = if deimos_config.kafka.client_id &&
56
+ deimos_config.kafka.client_id != 'phobos'
57
+ deimos_config.kafka.client_id
58
+ else
59
+ subclass = Rails::Application.subclasses.first
60
+ if subclass
61
+ subclass.name.gsub('::Application', '').underscore
57
62
  else
58
- Rails::Application.subclasses.first&.name&.gsub('::Application', '')&.underscore
63
+ nil
59
64
  end
60
- if deimos_config.consumer_objects.any? { |c| c.max_concurrency.present? }
61
- configs[:concurrency] = deimos_config.consumer_objects.map(&:max_concurrency).compact.max
62
- end
63
- if deimos_config.consumer_objects.any? { |c| c.max_wait_time.present? }
64
- configs[:max_wait_time] = deimos_config.consumer_objects.map(&:max_wait_time).compact.max
65
- end
66
- configs.compact
65
+ end
66
+ if deimos_config.consumer_objects.any? { |c| c.max_concurrency.present? }
67
+ configs[:concurrency] = deimos_config.consumer_objects.map(&:max_concurrency).compact.max
67
68
  end
68
-
69
- def default_kafka_configs
70
- configs = {}
71
- configs["bootstrap.servers"] = deimos_config.kafka.seed_brokers.join(',')
72
- configs["socket.connection.setup.timeout.ms"] = deimos_config.kafka.connect_timeout * 1000
73
- configs["socket.timeout.ms"] = deimos_config.kafka.socket_timeout * 1000
74
- configs["security.protocol"] = if deimos_config.kafka.ssl.enabled
75
- "ssl"
76
- elsif deimos_config.kafka.sasl.enabled
77
- if deimos_config.kafka.sasl.enforce_ssl
78
- "sasl_ssl"
79
- else
80
- "sasl_plain"
81
- end
82
- end
83
- configs["ssl.ca.location"] = deimos_config.kafka.ssl.ca_cert
84
- configs["ssl.certificate.location"] = deimos_config.kafka.ssl.client_cert
85
- configs["ssl.key.location"] = deimos_config.kafka.ssl.client_cert_key
86
- configs["ssl.endpoint.identification.algorithm"] = "https" if deimos_config.kafka.ssl.verify_hostname
87
- configs["sasl.kerberos.principal"] = deimos_config.kafka.sasl.gssapi_principal
88
- configs["sasl.kerberos.keytab"] = deimos_config.kafka.sasl.gssapi_keytab
89
- configs["sasl.username"] = deimos_config.kafka.sasl.plain_username || deimos_config.kafka.sasl.scram_username
90
- configs["sasl.password"] = deimos_config.kafka.sasl.plain_password || deimos_config.kafka.sasl.scram_password
91
- configs["sasl.mechanisms"] = deimos_config.kafka.sasl.scram_mechanism
92
- configs["request.required.acks"] = deimos_config.producers.required_acks
93
- configs["message.send.max.retries"] = deimos_config.producers.max_retries
94
- configs["retry.backoff.ms"] = deimos_config.producers.retry_backoff * 1000 if deimos_config.producers.retry_backoff
95
- configs["compression.codec"] = deimos_config.producers.compression_codec
96
- configs.compact
69
+ if deimos_config.consumer_objects.any? { |c| c.max_wait_time.present? }
70
+ configs[:max_wait_time] = deimos_config.consumer_objects.map(&:max_wait_time).compact.max
97
71
  end
72
+ configs.compact
73
+ end
98
74
 
99
- def default_configs
100
- {
101
- payload_log: deimos_config.payload_log,
102
- reraise_errors: deimos_config.consumers.reraise_errors,
103
- replace_associations: deimos_config.consumers.replace_associations,
104
- namespace: deimos_config.producers.schema_namespace,
105
- use_schema_classes: deimos_config.schema.use_schema_classes
106
- }.compact
75
+ def default_kafka_configs
76
+ configs = {}
77
+ configs['bootstrap.servers'] = deimos_config.kafka.seed_brokers.join(',')
78
+ configs['socket.connection.setup.timeout.ms'] = deimos_config.kafka.connect_timeout * 1000
79
+ configs['socket.timeout.ms'] = deimos_config.kafka.socket_timeout * 1000
80
+ configs['security.protocol'] = if deimos_config.kafka.ssl.enabled
81
+ 'ssl'
82
+ elsif deimos_config.kafka.sasl.enabled
83
+ if deimos_config.kafka.sasl.enforce_ssl
84
+ 'sasl_ssl'
85
+ else
86
+ 'sasl_plain'
87
+ end
88
+ end
89
+ configs['ssl.ca.location'] = deimos_config.kafka.ssl.ca_cert
90
+ configs['ssl.certificate.location'] = deimos_config.kafka.ssl.client_cert
91
+ configs['ssl.key.location'] = deimos_config.kafka.ssl.client_cert_key
92
+ if deimos_config.kafka.ssl.verify_hostname
93
+ configs['ssl.endpoint.identification.algorithm'] = 'https'
107
94
  end
108
-
109
- def consumer_configs
110
- deimos_config.consumer_objects.group_by(&:group_id).map do |group_id, consumers|
111
- [group_id, consumers.map do |consumer|
112
- kafka_configs = {}
113
- kafka_configs["auto.offset.reset"] = consumer.start_from_beginning ? 'earliest' : 'latest'
114
- kafka_configs["session.timeout.ms"] = consumer.session_timeout * 1000 unless consumer.default_value?(:session_timeout)
115
- kafka_configs["auto.commit.interval.ms"] = consumer.offset_commit_interval * 1000 unless consumer.default_value?(:offset_commit_interval)
116
- kafka_configs["heartbeat.interval.ms"] = consumer.heartbeat_interval * 1000 unless consumer.default_value?(:heartbeat_interval)
117
- configs = {
118
- kafka: kafka_configs.compact,
119
- topic: consumer.topic,
120
- consumer: ProcString.new(consumer.class_name),
121
- schema: consumer.schema,
122
- namespace: consumer.namespace,
123
- key_config: consumer.key_config,
124
- }
125
- configs[:use_schema_classes] = consumer.use_schema_classes unless consumer.default_value?(:use_schema_classes)
126
- configs[:max_db_batch_size] = consumer.max_db_batch_size unless consumer.default_value?(:max_db_batch_size)
127
- configs[:bulk_import_id_column] = consumer.bulk_import_id_column unless consumer.default_value?(:bulk_import_id_column)
128
- configs[:replace_associations] = consumer.replace_associations unless consumer.default_value?(:replace_associations)
129
- configs[:save_associations_first] = consumer.save_associations_first unless consumer.default_value?(:save_associations_first)
130
- configs[:active] = false if consumer.disabled
131
- configs[:each_message] = true unless consumer.delivery.to_s == 'inline_batch'
132
- configs
133
- end]
134
- end.to_h
95
+ configs['sasl.kerberos.principal'] = deimos_config.kafka.sasl.gssapi_principal
96
+ configs['sasl.kerberos.keytab'] = deimos_config.kafka.sasl.gssapi_keytab
97
+ configs['sasl.username'] = deimos_config.kafka.sasl.plain_username ||
98
+ deimos_config.kafka.sasl.scram_username
99
+ configs['sasl.password'] = deimos_config.kafka.sasl.plain_password ||
100
+ deimos_config.kafka.sasl.scram_password
101
+ configs['sasl.mechanisms'] = deimos_config.kafka.sasl.scram_mechanism
102
+ configs['request.required.acks'] = deimos_config.producers.required_acks
103
+ configs['message.send.max.retries'] = deimos_config.producers.max_retries
104
+ if deimos_config.producers.retry_backoff
105
+ configs['retry.backoff.ms'] = deimos_config.producers.retry_backoff * 1000
135
106
  end
107
+ configs['compression.codec'] = deimos_config.producers.compression_codec
108
+ configs.compact
109
+ end
136
110
 
137
- def producer_configs
138
- deimos_config.producer_objects.map do |producer|
139
- {
140
- topic: producer.topic,
141
- producer_class: ProcString.new(producer.class_name),
142
- schema: producer.schema,
143
- namespace: producer.namespace || deimos_config.producers.schema_namespace,
144
- key_config: producer.key_config,
145
- use_schema_classes: producer.use_schema_classes
146
- }.compact
147
- end
148
- end
111
+ def default_configs
112
+ {
113
+ payload_log: deimos_config.payload_log,
114
+ reraise_errors: deimos_config.consumers.reraise_errors,
115
+ replace_associations: deimos_config.consumers.replace_associations,
116
+ namespace: deimos_config.producers.schema_namespace,
117
+ use_schema_classes: deimos_config.schema.use_schema_classes
118
+ }.compact
119
+ end
149
120
 
150
- def rename_consumer_methods
151
- deimos_config.consumer_objects.each do |consumer|
152
- consumer.class_name.constantize
153
- file = Object.const_source_location(consumer.class_name)[0]
154
- if file.to_s.include?(Rails.root.to_s)
155
- gsub_file(file, /([\t ]+)def consume\((\w+)(, *(\w+)?)\)/,
156
- "\\1def consume_message(message)\n\\1 \\2 = message.payload\n\\1 \\4 = message.metadata")
157
- gsub_file(file, /([\t ]+)def consume_batch\((\w+)(, *(\w+)?)\)/,
158
- "\\1def consume_batch\n\\1 \\2 = messages.payloads\n\\1 \\4 = messages.metadata")
159
- gsub_file(file, /def record_attributes\((\w+)\)/,
160
- "def record_attributes(\\1, key)")
121
+ def consumer_configs
122
+ deimos_config.consumer_objects.group_by(&:group_id).map { |group_id, consumers|
123
+ [group_id, consumers.map do |consumer|
124
+ kafka_configs = {}
125
+ kafka_configs['auto.offset.reset'] = consumer.start_from_beginning ? 'earliest' : 'latest'
126
+ unless consumer.default_value?(:session_timeout)
127
+ kafka_configs['session.timeout.ms'] = consumer.session_timeout * 1000
161
128
  end
162
- end
163
- end
129
+ unless consumer.default_value?(:offset_commit_interval)
130
+ kafka_configs['auto.commit.interval.ms'] = consumer.offset_commit_interval * 1000
131
+ end
132
+ unless consumer.default_value?(:heartbeat_interval)
133
+ kafka_configs['heartbeat.interval.ms'] = consumer.heartbeat_interval * 1000
134
+ end
135
+ configs = {
136
+ kafka: kafka_configs.compact,
137
+ topic: consumer.topic,
138
+ consumer: ProcString.new(consumer.class_name),
139
+ schema: consumer.schema,
140
+ namespace: consumer.namespace,
141
+ key_config: consumer.key_config
142
+ }
143
+ unless consumer.default_value?(:use_schema_classes)
144
+ configs[:use_schema_classes] = consumer.use_schema_classes
145
+ end
146
+ unless consumer.default_value?(:max_db_batch_size)
147
+ configs[:max_db_batch_size] = consumer.max_db_batch_size
148
+ end
149
+ unless consumer.default_value?(:bulk_import_id_column)
150
+ configs[:bulk_import_id_column] = consumer.bulk_import_id_column
151
+ end
152
+ unless consumer.default_value?(:replace_associations)
153
+ configs[:replace_associations] = consumer.replace_associations
154
+ end
155
+ unless consumer.default_value?(:save_associations_first)
156
+ configs[:save_associations_first] = consumer.save_associations_first
157
+ end
158
+ configs[:active] = false if consumer.disabled
159
+ configs[:each_message] = true unless consumer.delivery.to_s == 'inline_batch'
160
+ configs
161
+ end]
162
+ }.to_h
163
+ end
164
164
 
165
- def fix_specs
166
- Dir["*/**/*_spec.rb"].each do |file|
167
- gsub_file(file, /,\s*call_original: true/, "")
168
- gsub_file(file, 'Deimos::Backends::Test.sent_messages', "Deimos::TestHelpers.sent_messages")
169
- end
165
+ def producer_configs
166
+ deimos_config.producer_objects.map do |producer|
167
+ {
168
+ topic: producer.topic,
169
+ producer_class: ProcString.new(producer.class_name),
170
+ schema: producer.schema,
171
+ namespace: producer.namespace || deimos_config.producers.schema_namespace,
172
+ key_config: producer.key_config,
173
+ use_schema_classes: producer.use_schema_classes
174
+ }.compact
170
175
  end
176
+ end
171
177
 
172
- def process_all_files
173
- template('karafka.rb.tt', "karafka.rb", force: true)
174
- rename_consumer_methods
175
- fix_specs
176
- insert_into_file("Gemfile", " gem 'karafka-testing'\n", after: "group :test do\n")
177
- # to avoid inserting multiple times, just in case there isn't a single group :test
178
- insert_into_file("Gemfile", " gem 'karafka-testing'\n", after: /group .*test.* do\n/)
178
+ def rename_consumer_methods
179
+ deimos_config.consumer_objects.each do |consumer|
180
+ consumer.class_name.constantize
181
+ file = Object.const_source_location(consumer.class_name)[0]
182
+ next unless file.to_s.include?(Rails.root.to_s)
183
+
184
+ gsub_file(file, /([\t ]+)def consume\((\w+)(, *(\w+)?)\)/,
185
+ "\\1def consume_message(message)\n\\1 \\2 = message.payload\n\\1 \\4 = message.metadata")
186
+ gsub_file(file, /([\t ]+)def consume_batch\((\w+)(, *(\w+)?)\)/,
187
+ "\\1def consume_batch\n\\1 \\2 = messages.payloads\n\\1 \\4 = messages.metadata")
188
+ gsub_file(file, /def record_attributes\((\w+)\)/,
189
+ 'def record_attributes(\\1, key)')
179
190
  end
180
-
181
191
  end
182
192
 
183
- desc 'Generate and update app files for version 2.0'
184
- # @return [void]
185
- def generate
186
- process_all_files
187
- say "Generation complete! You are safe to remove the existing initializer that configures Deimos.", :green
188
- print_warnings
193
+ def fix_specs
194
+ Dir['*/**/*_spec.rb'].each do |file|
195
+ gsub_file(file, /,\s*call_original: true/, '')
196
+ gsub_file(file, 'Deimos::Backends::Test.sent_messages',
197
+ 'Deimos::TestHelpers.sent_messages')
198
+ end
189
199
  end
190
200
 
191
- def print_warnings
192
- say "Note: The following settings cannot be determined by the generator:", :yellow
193
- say "* logger / phobos_logger (dynamic object, cannot be printed out)", :yellow
194
- say "* kafka.sasl.oauth_token_provider", :yellow
195
- say "* producers.max_buffer_size", :yellow
196
- say "* metrics", :yellow
197
- say "* tracer", :yellow
198
- say "* consumers.bulk_import_id_generator", :yellow
199
- say "* consumer.fatal_error", :yellow
200
- say "* consumer.backoff (only handles minimum, not maximum)", :yellow
201
- say "For more information, see https://github.com/flipp-oss/deimos/blob/master/docs/UPGRADING.md", :yellow
201
+ def process_all_files
202
+ template('karafka.rb.tt', 'karafka.rb', force: true)
203
+ rename_consumer_methods
204
+ fix_specs
205
+ insert_into_file('Gemfile', " gem 'karafka-testing'\n", after: "group :test do\n")
206
+ # to avoid inserting multiple times, just in case there isn't a single group :test
207
+ insert_into_file('Gemfile', " gem 'karafka-testing'\n", after: /group .*test.* do\n/)
202
208
  end
209
+
210
+ end
211
+
212
+ desc 'Generate and update app files for version 2.0'
213
+ # @return [void]
214
+ def generate
215
+ process_all_files
216
+ say('Generation complete! You are safe to remove the existing initializer that configures Deimos.', :green)
217
+ print_warnings
218
+ end
219
+
220
+ def print_warnings
221
+ say('Note: The following settings cannot be determined by the generator:', :yellow)
222
+ say('* logger / phobos_logger (dynamic object, cannot be printed out)', :yellow)
223
+ say('* kafka.sasl.oauth_token_provider', :yellow)
224
+ say('* producers.max_buffer_size', :yellow)
225
+ say('* metrics', :yellow)
226
+ say('* tracer', :yellow)
227
+ say('* consumers.bulk_import_id_generator', :yellow)
228
+ say('* consumer.fatal_error', :yellow)
229
+ say('* consumer.backoff (only handles minimum, not maximum)', :yellow)
230
+ say('For more information, see https://github.com/flipp-oss/deimos/blob/master/docs/UPGRADING.md', :yellow)
231
+ end
203
232
  end
204
233
  end
205
234
  end
@@ -1,5 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # rubocop:disable Lint/ConstantDefinitionInBlock
3
4
  module ActiveRecordBatchConsumerTest # rubocop:disable Metrics/ModuleLength
4
5
  describe Deimos::ActiveRecordConsumer,
5
6
  'Batch Consumer with MySQL handling associations',
@@ -179,8 +180,8 @@ module ActiveRecordBatchConsumerTest # rubocop:disable Metrics/ModuleLength
179
180
  expect {
180
181
  publish_batch([{ key: 2,
181
182
  payload: { test_id: 'xyz', some_int: 5, title: 'Widget Title' } }])
182
- }.to raise_error('Create bulk_import_id on the widgets table. Run rails g deimos:bulk_import_id {table}'\
183
- ' to create the migration.')
183
+ }.to raise_error('Create bulk_import_id on the widgets table. Run rails g deimos:bulk_import_id {table} ' \
184
+ 'to create the migration.')
184
185
  end
185
186
  end
186
187
 
@@ -204,6 +205,7 @@ module ActiveRecordBatchConsumerTest # rubocop:disable Metrics/ModuleLength
204
205
 
205
206
  context 'with one-to-many relationship in association and default bulk_import_id' do
206
207
  let(:replace_associations) { false }
208
+
207
209
  before(:each) do
208
210
  consumer_class.record_attributes_proc = proc do |payload|
209
211
  {
@@ -243,6 +245,7 @@ module ActiveRecordBatchConsumerTest # rubocop:disable Metrics/ModuleLength
243
245
 
244
246
  context 'with replace_associations on' do
245
247
  let(:replace_associations) { true }
248
+
246
249
  before(:each) do
247
250
  consumer_class.record_attributes_proc = proc do |payload|
248
251
  {
@@ -318,3 +321,4 @@ module ActiveRecordBatchConsumerTest # rubocop:disable Metrics/ModuleLength
318
321
  end
319
322
  end
320
323
  end
324
+ # rubocop:enable Lint/ConstantDefinitionInBlock