deimos-ruby 1.24.3 → 2.0.0.pre.alpha1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (118) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop_todo.yml +0 -17
  3. data/.tool-versions +1 -0
  4. data/CHANGELOG.md +1 -1
  5. data/README.md +287 -498
  6. data/deimos-ruby.gemspec +4 -4
  7. data/docs/CONFIGURATION.md +133 -227
  8. data/docs/UPGRADING.md +237 -0
  9. data/lib/deimos/active_record_consume/batch_consumption.rb +28 -29
  10. data/lib/deimos/active_record_consume/message_consumption.rb +15 -21
  11. data/lib/deimos/active_record_consumer.rb +36 -26
  12. data/lib/deimos/active_record_producer.rb +28 -9
  13. data/lib/deimos/backends/base.rb +4 -35
  14. data/lib/deimos/backends/kafka.rb +6 -22
  15. data/lib/deimos/backends/kafka_async.rb +6 -22
  16. data/lib/deimos/backends/{db.rb → outbox.rb} +13 -9
  17. data/lib/deimos/config/configuration.rb +116 -385
  18. data/lib/deimos/consume/batch_consumption.rb +24 -124
  19. data/lib/deimos/consume/message_consumption.rb +36 -63
  20. data/lib/deimos/consumer.rb +16 -75
  21. data/lib/deimos/ext/consumer_route.rb +35 -0
  22. data/lib/deimos/ext/producer_middleware.rb +94 -0
  23. data/lib/deimos/ext/producer_route.rb +22 -0
  24. data/lib/deimos/ext/redraw.rb +29 -0
  25. data/lib/deimos/ext/routing_defaults.rb +72 -0
  26. data/lib/deimos/ext/schema_route.rb +70 -0
  27. data/lib/deimos/kafka_message.rb +2 -2
  28. data/lib/deimos/kafka_source.rb +2 -7
  29. data/lib/deimos/kafka_topic_info.rb +1 -1
  30. data/lib/deimos/logging.rb +71 -0
  31. data/lib/deimos/message.rb +2 -11
  32. data/lib/deimos/metrics/datadog.rb +40 -1
  33. data/lib/deimos/metrics/provider.rb +4 -4
  34. data/lib/deimos/producer.rb +39 -116
  35. data/lib/deimos/railtie.rb +6 -0
  36. data/lib/deimos/schema_backends/avro_base.rb +21 -21
  37. data/lib/deimos/schema_backends/avro_schema_registry.rb +1 -2
  38. data/lib/deimos/schema_backends/avro_validation.rb +2 -2
  39. data/lib/deimos/schema_backends/base.rb +19 -12
  40. data/lib/deimos/schema_backends/mock.rb +6 -1
  41. data/lib/deimos/schema_backends/plain.rb +47 -0
  42. data/lib/deimos/schema_class/base.rb +2 -2
  43. data/lib/deimos/schema_class/enum.rb +1 -1
  44. data/lib/deimos/schema_class/record.rb +2 -2
  45. data/lib/deimos/test_helpers.rb +95 -320
  46. data/lib/deimos/tracing/provider.rb +6 -6
  47. data/lib/deimos/transcoder.rb +88 -0
  48. data/lib/deimos/utils/db_poller/base.rb +16 -14
  49. data/lib/deimos/utils/db_poller/state_based.rb +3 -3
  50. data/lib/deimos/utils/db_poller/time_based.rb +4 -4
  51. data/lib/deimos/utils/db_poller.rb +1 -1
  52. data/lib/deimos/utils/deadlock_retry.rb +1 -1
  53. data/lib/deimos/utils/{db_producer.rb → outbox_producer.rb} +16 -47
  54. data/lib/deimos/utils/schema_class.rb +0 -7
  55. data/lib/deimos/version.rb +1 -1
  56. data/lib/deimos.rb +79 -26
  57. data/lib/generators/deimos/{db_backend_generator.rb → outbox_backend_generator.rb} +4 -4
  58. data/lib/generators/deimos/schema_class_generator.rb +0 -1
  59. data/lib/generators/deimos/v2/templates/karafka.rb.tt +149 -0
  60. data/lib/generators/deimos/v2_generator.rb +193 -0
  61. data/lib/tasks/deimos.rake +5 -7
  62. data/spec/active_record_batch_consumer_association_spec.rb +22 -13
  63. data/spec/active_record_batch_consumer_spec.rb +84 -65
  64. data/spec/active_record_consume/batch_consumption_spec.rb +10 -10
  65. data/spec/active_record_consume/batch_slicer_spec.rb +12 -12
  66. data/spec/active_record_consumer_spec.rb +29 -13
  67. data/spec/active_record_producer_spec.rb +36 -26
  68. data/spec/backends/base_spec.rb +0 -23
  69. data/spec/backends/kafka_async_spec.rb +1 -3
  70. data/spec/backends/kafka_spec.rb +1 -3
  71. data/spec/backends/{db_spec.rb → outbox_spec.rb} +14 -20
  72. data/spec/batch_consumer_spec.rb +66 -116
  73. data/spec/consumer_spec.rb +53 -147
  74. data/spec/deimos_spec.rb +10 -126
  75. data/spec/kafka_source_spec.rb +19 -52
  76. data/spec/karafka/karafka.rb +69 -0
  77. data/spec/karafka_config/karafka_spec.rb +97 -0
  78. data/spec/logging_spec.rb +25 -0
  79. data/spec/message_spec.rb +9 -9
  80. data/spec/producer_spec.rb +112 -254
  81. data/spec/rake_spec.rb +1 -3
  82. data/spec/schema_backends/avro_validation_spec.rb +1 -1
  83. data/spec/schemas/com/my-namespace/MySchemaWithTitle.avsc +22 -0
  84. data/spec/snapshots/consumers-no-nest.snap +49 -0
  85. data/spec/snapshots/consumers.snap +49 -0
  86. data/spec/snapshots/consumers_and_producers-no-nest.snap +49 -0
  87. data/spec/snapshots/consumers_and_producers.snap +49 -0
  88. data/spec/snapshots/consumers_circular-no-nest.snap +49 -0
  89. data/spec/snapshots/consumers_circular.snap +49 -0
  90. data/spec/snapshots/consumers_complex_types-no-nest.snap +49 -0
  91. data/spec/snapshots/consumers_complex_types.snap +49 -0
  92. data/spec/snapshots/consumers_nested-no-nest.snap +49 -0
  93. data/spec/snapshots/consumers_nested.snap +49 -0
  94. data/spec/snapshots/namespace_folders.snap +49 -0
  95. data/spec/snapshots/namespace_map.snap +49 -0
  96. data/spec/snapshots/producers_with_key-no-nest.snap +49 -0
  97. data/spec/snapshots/producers_with_key.snap +49 -0
  98. data/spec/spec_helper.rb +61 -29
  99. data/spec/utils/db_poller_spec.rb +49 -39
  100. data/spec/utils/{db_producer_spec.rb → outbox_producer_spec.rb} +17 -184
  101. metadata +58 -67
  102. data/lib/deimos/batch_consumer.rb +0 -7
  103. data/lib/deimos/config/phobos_config.rb +0 -164
  104. data/lib/deimos/instrumentation.rb +0 -95
  105. data/lib/deimos/monkey_patches/phobos_cli.rb +0 -35
  106. data/lib/deimos/utils/inline_consumer.rb +0 -158
  107. data/lib/deimos/utils/lag_reporter.rb +0 -186
  108. data/lib/deimos/utils/schema_controller_mixin.rb +0 -129
  109. data/spec/config/configuration_spec.rb +0 -329
  110. data/spec/kafka_listener_spec.rb +0 -55
  111. data/spec/phobos.bad_db.yml +0 -73
  112. data/spec/phobos.yml +0 -77
  113. data/spec/utils/inline_consumer_spec.rb +0 -31
  114. data/spec/utils/lag_reporter_spec.rb +0 -76
  115. data/spec/utils/platform_schema_validation_spec.rb +0 -0
  116. data/spec/utils/schema_controller_mixin_spec.rb +0 -84
  117. /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/migration +0 -0
  118. /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/rails3_migration +0 -0
@@ -1,332 +1,121 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require 'fig_tree'
4
- require_relative 'phobos_config'
5
4
  require_relative '../metrics/mock'
6
5
  require_relative '../tracing/mock'
7
- require 'active_support/core_ext/numeric'
6
+ require 'active_support/core_ext/object'
8
7
 
9
8
  # :nodoc:
10
9
  module Deimos # rubocop:disable Metrics/ModuleLength
11
10
  include FigTree
12
11
 
13
- # :nodoc:
14
- class FigTree::ConfigStruct
15
- include Deimos::PhobosConfig
16
- end
17
-
18
12
  # :nodoc:
19
13
  after_configure do
20
- Phobos.configure(self.config.phobos_config)
21
14
  if self.config.schema.use_schema_classes
22
15
  load_generated_schema_classes
23
16
  end
24
- self.config.producer_objects.each do |producer|
25
- configure_producer_or_consumer(producer)
26
- end
27
- self.config.consumer_objects.each do |consumer|
28
- configure_producer_or_consumer(consumer)
29
- end
30
- validate_consumers
31
- validate_db_backend if self.config.producers.backend == :db
17
+ generate_key_schemas
18
+ validate_outbox_backend if self.config.producers.backend == :outbox
32
19
  end
33
20
 
34
- # Loads generated classes
35
- # @return [void]
36
- def self.load_generated_schema_classes
37
- if Deimos.config.schema.generated_class_path.nil?
38
- raise 'Cannot use schema classes without schema.generated_class_path. Please provide a directory.'
39
- end
21
+ class << self
40
22
 
41
- Dir["./#{Deimos.config.schema.generated_class_path}/**/*.rb"].sort.each { |f| require f }
42
- rescue LoadError
43
- raise 'Cannot load schema classes. Please regenerate classes with rake deimos:generate_schema_models.'
44
- end
23
+ def generate_key_schemas
24
+ Deimos.karafka_configs.each do |config|
25
+ transcoder = config.deserializers[:key]
45
26
 
46
- # Ensure everything is set up correctly for the DB backend.
47
- # @!visibility private
48
- def self.validate_db_backend
49
- begin
50
- require 'activerecord-import'
51
- rescue LoadError
52
- raise 'Cannot set producers.backend to :db without activerecord-import! Please add it to your Gemfile.'
53
- end
54
- if Deimos.config.producers.required_acks != :all
55
- raise 'Cannot set producers.backend to :db unless producers.required_acks is set to ":all"!'
27
+ if transcoder.respond_to?(:key_field) && transcoder.key_field
28
+ transcoder.backend = Deimos.schema_backend(schema: config.schema,
29
+ namespace: config.namespace)
30
+ transcoder.backend.generate_key_schema(transcoder.key_field)
31
+ end
32
+ end
56
33
  end
57
- end
58
-
59
- # Validate that consumers are configured correctly, including their
60
- # delivery mode.
61
- # @!visibility private
62
- def self.validate_consumers
63
- Phobos.config.listeners.each do |listener|
64
- handler_class = listener.handler.constantize
65
- delivery = listener.delivery
66
34
 
67
- next unless handler_class < Deimos::Consumer
68
-
69
- # Validate that each consumer implements the correct method for its type
70
- if delivery == 'inline_batch'
71
- if handler_class.instance_method(:consume_batch).owner == Deimos::Consume::BatchConsumption
72
- raise "BatchConsumer #{listener.handler} does not implement `consume_batch`"
73
- end
74
- elsif handler_class.instance_method(:consume).owner == Deimos::Consume::MessageConsumption
75
- raise "Non-batch Consumer #{listener.handler} does not implement `consume`"
35
+ # Loads generated classes
36
+ # @return [void]
37
+ def load_generated_schema_classes
38
+ if Deimos.config.schema.generated_class_path.nil?
39
+ raise 'Cannot use schema classes without schema.generated_class_path. Please provide a directory.'
76
40
  end
41
+
42
+ Dir["./#{Deimos.config.schema.generated_class_path}/**/*.rb"].sort.each { |f| require f }
43
+ rescue LoadError
44
+ raise 'Cannot load schema classes. Please regenerate classes with rake deimos:generate_schema_models.'
77
45
  end
78
- end
79
46
 
80
- # @!visibility private
81
- # @param kafka_config [FigTree::ConfigStruct]
82
- # rubocop:disable Metrics/PerceivedComplexity, Metrics/AbcSize
83
- def self.configure_producer_or_consumer(kafka_config)
84
- klass = kafka_config.class_name.constantize
85
- klass.class_eval do
86
- topic(kafka_config.topic) if kafka_config.topic.present? && klass.respond_to?(:topic)
87
- schema(kafka_config.schema) if kafka_config.schema.present?
88
- namespace(kafka_config.namespace) if kafka_config.namespace.present?
89
- key_config(**kafka_config.key_config) if kafka_config.key_config.present?
90
- schema_class_config(kafka_config.use_schema_classes) if kafka_config.use_schema_classes.present?
91
- if kafka_config.respond_to?(:bulk_import_id_column) # consumer
92
- klass.config.merge!(
93
- bulk_import_id_column: kafka_config.bulk_import_id_column,
94
- replace_associations: if kafka_config.replace_associations.nil?
95
- Deimos.config.consumers.replace_associations
96
- else
97
- kafka_config.replace_associations
98
- end,
99
- bulk_import_id_generator: kafka_config.bulk_import_id_generator ||
100
- Deimos.config.consumers.bulk_import_id_generator,
101
- save_associations_first: kafka_config.save_associations_first
102
- )
47
+ # Ensure everything is set up correctly for the DB backend.
48
+ # @!visibility private
49
+ def validate_outbox_backend
50
+ begin
51
+ require 'activerecord-import'
52
+ rescue LoadError
53
+ raise 'Cannot set producers.backend to :outbox without activerecord-import! Please add it to your Gemfile.'
103
54
  end
104
55
  end
105
56
  end
57
+
106
58
  # rubocop:enable Metrics/PerceivedComplexity, Metrics/AbcSize
107
59
 
108
60
  define_settings do
109
-
110
- # @return [Logger]
111
- setting :logger, Logger.new(STDOUT)
112
-
113
- # @return [Symbol]
114
- setting :payload_log, :full
115
-
116
- # @return [Logger]
117
- setting :phobos_logger, default_proc: proc { Deimos.config.logger.clone }
61
+ setting :logger, removed: 'Use "logger" in Karafka setup block.'
62
+ setting :payload_log, removed: 'Use topic.payload_log in Karafka settings'
63
+ setting :phobos_logger, removed: 'Separate logger for Phobos is no longer supported'
118
64
 
119
65
  setting :kafka do
120
-
121
- # @return [Logger]
122
- setting :logger, default_proc: proc { Deimos.config.logger.clone }
123
-
124
- # URL of the seed broker.
125
- # @return [Array<String>]
126
- setting :seed_brokers, ['localhost:9092']
127
-
128
- # Identifier for this application.
129
- # @return [String]
130
- setting :client_id, 'phobos'
131
-
132
- # The socket timeout for connecting to the broker, in seconds.
133
- # @return [Integer]
134
- setting :connect_timeout, 15
135
-
136
- # The socket timeout for reading and writing to the broker, in seconds.
137
- # @return [Integer]
138
- setting :socket_timeout, 15
66
+ setting :logger, removed: "Karafka uses Rails logger by default"
67
+ setting :seed_brokers, ['localhost:9092'], removed: 'Use kafka(bootstrap.servers) in Karafka settings'
68
+ setting :client_id, 'phobos', removed: 'Use client_id in Karafka setup block.'
69
+ setting :connect_timeout, 15, removed: 'Use kafka(socket.connection.setup.timeout.ms) in Karafka settings'
70
+ setting :socket_timeout, 15, removed: 'Use kafka(socket.timeout.ms) in Karafka settings'
139
71
 
140
72
  setting :ssl do
141
- # Whether SSL is enabled on the brokers.
142
- # @return [Boolean]
143
- setting :enabled
144
-
145
- # a PEM encoded CA cert, a file path to the cert, or an Array of certs,
146
- # to use with an SSL connection.
147
- # @return [String|Array<String>]
148
- setting :ca_cert
149
-
150
- # a PEM encoded client cert to use with an SSL connection, or a file path
151
- # to the cert.
152
- # @return [String]
153
- setting :client_cert
154
-
155
- # a PEM encoded client cert key to use with an SSL connection.
156
- # @return [String]
157
- setting :client_cert_key
158
-
159
- # Verify certificate hostname if supported (ruby >= 2.4.0)
160
- setting :verify_hostname, true
161
-
162
- # Use CA certs from system. This is useful to have enabled for Confluent Cloud
163
- # @return [Boolean]
164
- setting :ca_certs_from_system, false
73
+ setting :enabled, removed: 'Use kafka(security.protocol=ssl) in Karafka settings'
74
+ setting :ca_cert, removed: 'Use kafka(ssl.ca.pem) in Karafka settings'
75
+ setting :client_cert, removed: 'Use kafka(ssl.certificate.pem) in Karafka settings'
76
+ setting :client_cert_key, removed: 'Use kafka(ssl.key.pem) in Karafka settings'
77
+ setting :verify_hostname, removed: 'Use kafka(ssl.endpoint.identification.algorithm=https) in Karafka settings'
78
+ setting :ca_certs_from_system, removed: 'Should not be necessary with librdkafka.'
165
79
  end
166
80
 
167
81
  setting :sasl do
168
- # Whether SASL is enabled on the brokers.
169
- # @return [Boolean]
170
- setting :enabled
171
-
172
- # A KRB5 principal.
173
- # @return [String]
174
- setting :gssapi_principal
175
-
176
- # A KRB5 keytab filepath.
177
- # @return [String]
178
- setting :gssapi_keytab
179
-
180
- # Plain authorization ID. It needs to default to '' in order for it to work.
181
- # This is because Phobos expects it to be truthy for using plain SASL.
182
- # @return [String]
183
- setting :plain_authzid, ''
184
-
185
- # Plain username.
186
- # @return [String]
187
- setting :plain_username
188
-
189
- # Plain password.
190
- # @return [String]
191
- setting :plain_password
192
-
193
- # SCRAM username.
194
- # @return [String]
195
- setting :scram_username
196
-
197
- # SCRAM password.
198
- # @return [String]
199
- setting :scram_password
200
-
201
- # Scram mechanism, either "sha256" or "sha512".
202
- # @return [String]
203
- setting :scram_mechanism
204
-
205
- # Whether to enforce SSL with SASL.
206
- # @return [Boolean]
207
- setting :enforce_ssl
208
-
209
- # OAuthBearer Token Provider instance that implements
210
- # method token. See {Sasl::OAuth#initialize}.
211
- # @return [Object]
212
- setting :oauth_token_provider
82
+ setting :enabled, removed: 'Use kafka(security.protocol=sasl_ssl or sasl_plaintext) in Karafka settings'
83
+ setting :gssapi_principal, removed: 'Use kafka(sasl.kerberos.principal) in Karafka settings'
84
+ setting :gssapi_keytab, removed: 'Use kafka(sasl.kerberos.keytab) in Karafka settings'
85
+ setting :plain_authzid, removed: 'No longer needed with rdkafka'
86
+ setting :plain_username, removed: 'Use kafka(sasl.username) in Karafka settings'
87
+ setting :plain_password, removed: 'Use kafka(sasl.password) in Karafka settings'
88
+ setting :scram_username, removed: 'Use kafka(sasl.username) in Karafka settings'
89
+ setting :scram_password, removed: 'Use kafka(sasl.password) in Karafka settings'
90
+ setting :scram_mechanism, removed: 'Use kafka(sasl.mechanisms) in Karafka settings'
91
+ setting :enforce_ssl, removed: 'Use kafka(security.protocol=sasl_ssl) in Karafka settings'
92
+ setting :oauth_token_provider, removed: 'See rdkafka configs for details'
213
93
  end
214
94
  end
215
95
 
216
96
  setting :consumers do
217
-
218
- # Number of seconds after which, if a client hasn't contacted the Kafka cluster,
219
- # it will be kicked out of the group.
220
- # @return [Integer]
221
- setting :session_timeout, 300
222
-
223
- # Interval between offset commits, in seconds.
224
- # @return [Integer]
225
- setting :offset_commit_interval, 10
226
-
227
- # Number of messages that can be processed before their offsets are committed.
228
- # If zero, offset commits are not triggered by message processing
229
- # @return [Integer]
230
- setting :offset_commit_threshold, 0
231
-
232
- # Interval between heartbeats; must be less than the session window.
233
- # @return [Integer]
234
- setting :heartbeat_interval, 10
235
-
236
- # Minimum and maximum number of milliseconds to back off after a consumer
237
- # error.
238
- setting :backoff, (1000..60_000)
239
-
240
- # By default, consumer errors will be consumed and logged to
241
- # the metrics provider.
242
- # Set this to true to force the error to be raised.
243
- # @return [Boolean]
244
- setting :reraise_errors
245
-
246
- # @return [Boolean]
247
- setting :report_lag
248
-
249
- # Block taking an exception, payload and metadata and returning
250
- # true if this should be considered a fatal error and false otherwise.
251
- # Not needed if reraise_errors is set to true.
252
- # @return [Block]
253
- setting(:fatal_error, proc { false })
254
-
255
- # The default function to generate a bulk ID for bulk consumers
256
- # @return [Block]
257
- setting(:bulk_import_id_generator, proc { SecureRandom.uuid })
258
-
259
- # If true, multi-table consumers will blow away associations rather than appending to them.
260
- # Applies to all consumers unless specified otherwise
261
- # @return [Boolean]
262
- setting :replace_associations, true
97
+ setting :reraise_errors, removed: 'Use topic.reraise_errors in Karafka settings'
98
+ setting :report_lag, removed: "Use Karafka's built in lag reporting"
99
+ setting(:fatal_error, removed: "Use topic.fatal_error in Karafka settings")
100
+ setting(:bulk_import_id_generator, removed: "Use topic.bulk_import_id_generator in Karafka settings")
101
+ setting :save_associations_first, removed: "Use topic.save_associations_first"
102
+ setting :replace_associations, removed: "Use topic.replace_associations in Karafka settings"
263
103
  end
264
104
 
265
105
  setting :producers do
266
- # Number of seconds a broker can wait for replicas to acknowledge
267
- # a write before responding with a timeout.
268
- # @return [Integer]
269
- setting :ack_timeout, 5
270
-
271
- # Number of replicas that must acknowledge a write, or `:all`
272
- # if all in-sync replicas must acknowledge.
273
- # @return [Integer|Symbol]
274
- setting :required_acks, 1
275
-
276
- # Number of retries that should be attempted before giving up sending
277
- # messages to the cluster. Does not include the original attempt.
278
- # @return [Integer]
279
- setting :max_retries, 2
280
-
281
- # Number of seconds to wait between retries.
282
- # @return [Integer]
283
- setting :retry_backoff, 1
284
-
285
- # Number of messages allowed in the buffer before new writes will
286
- # raise {BufferOverflow} exceptions.
287
- # @return [Integer]
288
- setting :max_buffer_size, 10_000
289
-
290
- # Maximum size of the buffer in bytes. Attempting to produce messages
291
- # when the buffer reaches this size will result in {BufferOverflow} being raised.
292
- # @return [Integer]
293
- setting :max_buffer_bytesize, 10_000_000
294
-
295
- # Name of the compression codec to use, or nil if no compression should be performed.
296
- # Valid codecs: `:snappy` and `:gzip`
297
- # @return [Symbol]
298
- setting :compression_codec
299
-
300
- # Number of messages that needs to be in a message set before it should be compressed.
301
- # Note that message sets are per-partition rather than per-topic or per-producer.
302
- # @return [Integer]
303
- setting :compression_threshold, 1
304
-
305
- # Maximum number of messages allowed in the queue. Only used for async_producer.
306
- # @return [Integer]
307
- setting :max_queue_size, 10_000
308
-
309
- # If greater than zero, the number of buffered messages that will automatically
310
- # trigger a delivery. Only used for async_producer.
311
- # @return [Integer]
312
- setting :delivery_threshold, 0
313
-
314
- # if greater than zero, the number of seconds between automatic message
315
- # deliveries. Only used for async_producer.
316
- # @return [Integer]
317
- setting :delivery_interval, 0
318
-
319
- # Set this to true to keep the producer connection between publish calls.
320
- # This can speed up subsequent messages by around 30%, but it does mean
321
- # that you need to manually call sync_producer_shutdown before exiting,
322
- # similar to async_producer_shutdown.
323
- # @return [Boolean]
324
- setting :persistent_connections, false
325
-
326
- # Default namespace for all producers. Can remain nil. Individual
327
- # producers can override.
328
- # @return [String]
329
- setting :schema_namespace
106
+ setting :ack_timeout, removed: "Not supported in rdkafka"
107
+ setting :required_acks, 1, removed: "Use kafka(request.required.acks) in Karafka settings"
108
+ setting :max_retries, removed: "Use kafka(message.send.max.retries) in Karafka settings"
109
+ setting :retry_backoff, removed: "Use kafka(retry.backoff.ms) in Karafka settings"
110
+ setting :max_buffer_size, removed: "Not relevant with Karafka. You may want to see the queue.buffering.max.messages setting."
111
+ setting :max_buffer_bytesize, removed: "Not relevant with Karafka."
112
+ setting :compression_codec, removed: "Use kafka(compression.codec) in Karafka settings"
113
+ setting :compression_threshold, removed: "Not supported in Karafka."
114
+ setting :max_queue_size, removed: "Not relevant to Karafka."
115
+ setting :delivery_threshold, removed: "Not relevant to Karafka."
116
+ setting :delivery_interval, removed: "Not relevant to Karafka."
117
+ setting :persistent_connections, removed: "Karafka connections are always persistent."
118
+ setting :schema_namespace, removed: "Use topic.namespace in Karafka settings"
330
119
 
331
120
  # Add a prefix to all topic names. This can be useful if you're using
332
121
  # the same Kafka broker for different environments that are producing
@@ -371,9 +160,9 @@ module Deimos # rubocop:disable Metrics/ModuleLength
371
160
  # @return [String]
372
161
  setting :generated_class_path, 'app/lib/schema_classes'
373
162
 
374
- # Set to true to use the generated schema classes in your application
163
+ # Set to true to use the generated schema classes in your application.
375
164
  # @return [Boolean]
376
- setting :use_schema_classes, false
165
+ setting :use_schema_classes
377
166
 
378
167
  # Set to false to generate child schemas as their own files.
379
168
  # @return [Boolean]
@@ -398,10 +187,10 @@ module Deimos # rubocop:disable Metrics/ModuleLength
398
187
  # @return [Tracing::Provider]
399
188
  setting :tracer, default_proc: proc { Tracing::Mock.new }
400
189
 
401
- setting :db_producer do
190
+ setting :outbox do
402
191
 
403
192
  # @return [Logger]
404
- setting :logger, default_proc: proc { Deimos.config.logger }
193
+ setting :logger, default_proc: proc { Karafka.logger }
405
194
 
406
195
  # @return [Symbol|Array<String>] A list of topics to log all messages, or
407
196
  # :all to log all topics.
@@ -413,91 +202,48 @@ module Deimos # rubocop:disable Metrics/ModuleLength
413
202
 
414
203
  end
415
204
 
205
+ setting :db_producer do
206
+ setting :logger, removed: "Use outbox.logger"
207
+ setting :log_topics, removed: "Use outbox.log_topics"
208
+ setting :compact_topics, removed: "Use outbox.compact_topics"
209
+ end
210
+
416
211
  setting_object :producer do
417
- # Producer class.
418
- # @return [String]
419
- setting :class_name
420
- # Topic to produce to.
421
- # @return [String]
422
- setting :topic
423
- # Schema of the data in the topic.
424
- # @return [String]
425
- setting :schema
426
- # Optional namespace to access the schema.
427
- # @return [String]
428
- setting :namespace
429
- # Key configuration (see docs).
430
- # @return [Hash]
431
- setting :key_config
432
- # Configure the usage of generated schema classes for this producer
433
- # @return [Boolean]
434
- setting :use_schema_classes
435
- # If true, and using the multi-table feature of ActiveRecordConsumers, replace associations
436
- # instead of appending to them.
437
- # @return [Boolean]
438
- setting :replace_associations
212
+ setting :class_name, removed: "Use topic.producer_class in Karafka settings."
213
+ setting :topic, removed: "Use Karafka settings."
214
+ setting :schema, removed: "Use topic.schema(schema:) in Karafka settings."
215
+ setting :namespace, removed: "Use topic.schema(namespace:) in Karafka settings."
216
+ setting :key_config, removed: "Use topic.schema(key_config:) in Karafka settings."
217
+ setting :use_schema_classes, removed: "Use topic.schema(use_schema_classes:) in Karafka settings."
439
218
  end
440
219
 
441
220
  setting_object :consumer do
442
- # Consumer class.
443
- # @return [String]
444
- setting :class_name
445
- # Topic to read from.
446
- # @return [String]
447
- setting :topic
448
- # Schema of the data in the topic.
449
- # @return [String]
450
- setting :schema
451
- # Optional namespace to access the schema.
452
- # @return [String]
453
- setting :namespace
454
- # Key configuration (see docs).
455
- # @return [Hash]
456
- setting :key_config
457
- # Set to true to ignore the consumer in the Phobos config and not actually start up a
458
- # listener.
459
- # @return [Boolean]
460
- setting :disabled, false
461
- # Configure the usage of generated schema classes for this consumer
462
- # @return [Boolean]
463
- setting :use_schema_classes
464
- # Optional maximum limit for batching database calls to reduce the load on the db.
465
- # @return [Integer]
466
- setting :max_db_batch_size
467
- # Column to use for bulk imports, for multi-table feature.
468
- # @return [String]
469
- setting :bulk_import_id_column, :bulk_import_id
470
- # If true, multi-table consumers will blow away associations rather than appending to them.
471
- # @return [Boolean]
472
- setting :replace_associations, nil
473
-
474
- # The default function to generate a bulk ID for this consumer
475
- # Uses the consumers proc defined in the consumers config by default unless
476
- # specified for individual consumers
477
- # @return [Block]
478
- setting :bulk_import_id_generator, nil
479
-
480
- # If enabled save associated records prior to saving the main record class
481
- # This will also set foreign keys for associated records
482
- # @return [Boolean]
483
- setting :save_associations_first, false
484
-
485
- # These are the phobos "listener" configs. See CONFIGURATION.md for more
486
- # info.
487
- setting :group_id
488
- setting :max_concurrency, 1
489
- setting :start_from_beginning, true
490
- setting :max_bytes_per_partition, 500.kilobytes
491
- setting :min_bytes, 1
492
- setting :max_wait_time, 5
493
- setting :force_encoding
494
- setting :delivery, :batch
495
- setting :backoff
496
- setting :session_timeout, 300
497
- setting :offset_commit_interval, 10
498
- setting :offset_commit_threshold, 0
499
- setting :offset_retention_time
500
- setting :heartbeat_interval, 10
221
+ setting :class_name, removed: "Use topic.consumer in Karafka settings."
222
+ setting :topic, removed: "Use Karafka settings."
223
+ setting :schema, removed: "Use topic.schema(schema:) in Karafka settings."
224
+ setting :namespace, removed: "Use topic.schema(namespace:) in Karafka settings."
225
+ setting :key_config, removed: "Use topic.schema(key_config:) in Karafka settings."
226
+ setting :disabled, removed: "Use topic.active in Karafka settings."
227
+ setting :use_schema_classes, removed: "Use topic.use_schema_classes in Karafka settings."
228
+ setting :max_db_batch_size, removed: "Use topic.max_db_batch_size in Karafka settings."
229
+ setting :bulk_import_id_column, removed: "Use topic.bulk_import_id_column in Karafka settings."
230
+ setting :replace_associations, removed: "Use topic.replace_associations in Karafka settings."
231
+ setting :bulk_import_id_generator, removed: "Use topic.bulk_import_id_generator in Karafka settings."
232
+ setting :save_associations_first, removed: "Use topic.save_associations_first"
233
+ setting :group_id, removed: "Use kafka(group.id) in Karafka settings."
234
+ setting :max_concurrency, removed: "Use Karafka's 'config.concurrency' in the setup block."
235
+ setting :start_from_beginning, removed: "Use initial_offset in the setup block, or kafka(auto.offset.reset) in topic settings."
236
+ setting :max_bytes_per_partition, removed: "Use max_messages in the setup block."
237
+ setting :min_bytes, removed: "Not supported in Karafka."
238
+ setting :max_wait_time, removed: "Use max_wait_time in the setup block."
239
+ setting :force_encoding, removed: "Not supported with Karafka."
240
+ setting :delivery, :batch, removed: "Use batch: true/false in Karafka topic configs."
241
+ setting :backoff, removed: "Use kafka(retry.backoff.ms) and retry.backoff.max.ms in Karafka settings."
242
+ setting :session_timeout, removed: "Use kafka(session.timeout.ms) in Karafka settings."
243
+ setting :offset_commit_interval, removed: "Use kafka(auto.commit.interval.ms) in Karafka settings."
244
+ setting :offset_commit_threshold, removed: "Not supported with Karafka."
245
+ setting :offset_retention_time, removed: "Not supported with Karafka."
246
+ setting :heartbeat_interval, removed: "Use kafka(heartbeat.interval.ms) in Karafka settings."
501
247
  end
502
248
 
503
249
  setting_object :db_poller do
@@ -541,20 +287,5 @@ module Deimos # rubocop:disable Metrics/ModuleLength
541
287
  setting :poller_class, nil
542
288
  end
543
289
 
544
- deprecate 'kafka_logger', 'kafka.logger'
545
- deprecate 'reraise_consumer_errors', 'consumers.reraise_errors'
546
- deprecate 'schema_registry_url', 'schema.registry_url'
547
- deprecate 'seed_broker', 'kafka.seed_brokers'
548
- deprecate 'schema_path', 'schema.path'
549
- deprecate 'producer_schema_namespace', 'producers.schema_namespace'
550
- deprecate 'producer_topic_prefix', 'producers.topic_prefix'
551
- deprecate 'disable_producers', 'producers.disabled'
552
- deprecate 'ssl_enabled', 'kafka.ssl.enabled'
553
- deprecate 'ssl_ca_cert', 'kafka.ssl.ca_cert'
554
- deprecate 'ssl_client_cert', 'kafka.ssl.client_cert'
555
- deprecate 'ssl_client_cert_key', 'kafka.ssl.client_cert_key'
556
- deprecate 'publish_backend', 'producers.backend'
557
- deprecate 'report_lag', 'consumers.report_lag'
558
-
559
290
  end
560
291
  end