deimos-ruby 1.24.2 → 2.0.0.pre.alpha1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop_todo.yml +0 -17
- data/.tool-versions +1 -0
- data/CHANGELOG.md +5 -0
- data/README.md +287 -498
- data/deimos-ruby.gemspec +4 -4
- data/docs/CONFIGURATION.md +133 -226
- data/docs/UPGRADING.md +237 -0
- data/lib/deimos/active_record_consume/batch_consumption.rb +29 -28
- data/lib/deimos/active_record_consume/mass_updater.rb +59 -4
- data/lib/deimos/active_record_consume/message_consumption.rb +15 -21
- data/lib/deimos/active_record_consumer.rb +36 -21
- data/lib/deimos/active_record_producer.rb +28 -9
- data/lib/deimos/backends/base.rb +4 -35
- data/lib/deimos/backends/kafka.rb +6 -22
- data/lib/deimos/backends/kafka_async.rb +6 -22
- data/lib/deimos/backends/{db.rb → outbox.rb} +13 -9
- data/lib/deimos/config/configuration.rb +116 -379
- data/lib/deimos/consume/batch_consumption.rb +24 -124
- data/lib/deimos/consume/message_consumption.rb +36 -63
- data/lib/deimos/consumer.rb +16 -75
- data/lib/deimos/ext/consumer_route.rb +35 -0
- data/lib/deimos/ext/producer_middleware.rb +94 -0
- data/lib/deimos/ext/producer_route.rb +22 -0
- data/lib/deimos/ext/redraw.rb +29 -0
- data/lib/deimos/ext/routing_defaults.rb +72 -0
- data/lib/deimos/ext/schema_route.rb +70 -0
- data/lib/deimos/kafka_message.rb +2 -2
- data/lib/deimos/kafka_source.rb +2 -7
- data/lib/deimos/kafka_topic_info.rb +1 -1
- data/lib/deimos/logging.rb +71 -0
- data/lib/deimos/message.rb +2 -11
- data/lib/deimos/metrics/datadog.rb +40 -1
- data/lib/deimos/metrics/provider.rb +4 -4
- data/lib/deimos/producer.rb +39 -116
- data/lib/deimos/railtie.rb +6 -0
- data/lib/deimos/schema_backends/avro_base.rb +21 -21
- data/lib/deimos/schema_backends/avro_schema_registry.rb +1 -2
- data/lib/deimos/schema_backends/avro_validation.rb +2 -2
- data/lib/deimos/schema_backends/base.rb +19 -12
- data/lib/deimos/schema_backends/mock.rb +6 -1
- data/lib/deimos/schema_backends/plain.rb +47 -0
- data/lib/deimos/schema_class/base.rb +2 -2
- data/lib/deimos/schema_class/enum.rb +1 -1
- data/lib/deimos/schema_class/record.rb +2 -2
- data/lib/deimos/test_helpers.rb +95 -320
- data/lib/deimos/tracing/provider.rb +6 -6
- data/lib/deimos/transcoder.rb +88 -0
- data/lib/deimos/utils/db_poller/base.rb +16 -14
- data/lib/deimos/utils/db_poller/state_based.rb +3 -3
- data/lib/deimos/utils/db_poller/time_based.rb +4 -4
- data/lib/deimos/utils/db_poller.rb +1 -1
- data/lib/deimos/utils/deadlock_retry.rb +1 -1
- data/lib/deimos/utils/{db_producer.rb → outbox_producer.rb} +16 -47
- data/lib/deimos/utils/schema_class.rb +0 -7
- data/lib/deimos/version.rb +1 -1
- data/lib/deimos.rb +79 -26
- data/lib/generators/deimos/{db_backend_generator.rb → outbox_backend_generator.rb} +4 -4
- data/lib/generators/deimos/schema_class_generator.rb +0 -1
- data/lib/generators/deimos/v2/templates/karafka.rb.tt +149 -0
- data/lib/generators/deimos/v2_generator.rb +193 -0
- data/lib/tasks/deimos.rake +5 -7
- data/spec/active_record_batch_consumer_association_spec.rb +22 -13
- data/spec/active_record_batch_consumer_spec.rb +84 -65
- data/spec/active_record_consume/batch_consumption_spec.rb +10 -10
- data/spec/active_record_consume/batch_slicer_spec.rb +12 -12
- data/spec/active_record_consume/mass_updater_spec.rb +137 -0
- data/spec/active_record_consumer_spec.rb +29 -13
- data/spec/active_record_producer_spec.rb +36 -26
- data/spec/backends/base_spec.rb +0 -23
- data/spec/backends/kafka_async_spec.rb +1 -3
- data/spec/backends/kafka_spec.rb +1 -3
- data/spec/backends/{db_spec.rb → outbox_spec.rb} +14 -20
- data/spec/batch_consumer_spec.rb +66 -116
- data/spec/consumer_spec.rb +53 -147
- data/spec/deimos_spec.rb +10 -126
- data/spec/kafka_source_spec.rb +19 -52
- data/spec/karafka/karafka.rb +69 -0
- data/spec/karafka_config/karafka_spec.rb +97 -0
- data/spec/logging_spec.rb +25 -0
- data/spec/message_spec.rb +9 -9
- data/spec/producer_spec.rb +112 -254
- data/spec/rake_spec.rb +1 -3
- data/spec/schema_backends/avro_validation_spec.rb +1 -1
- data/spec/schemas/com/my-namespace/MySchemaWithTitle.avsc +22 -0
- data/spec/snapshots/consumers-no-nest.snap +49 -0
- data/spec/snapshots/consumers.snap +49 -0
- data/spec/snapshots/consumers_and_producers-no-nest.snap +49 -0
- data/spec/snapshots/consumers_and_producers.snap +49 -0
- data/spec/snapshots/consumers_circular-no-nest.snap +49 -0
- data/spec/snapshots/consumers_circular.snap +49 -0
- data/spec/snapshots/consumers_complex_types-no-nest.snap +49 -0
- data/spec/snapshots/consumers_complex_types.snap +49 -0
- data/spec/snapshots/consumers_nested-no-nest.snap +49 -0
- data/spec/snapshots/consumers_nested.snap +49 -0
- data/spec/snapshots/namespace_folders.snap +49 -0
- data/spec/snapshots/namespace_map.snap +49 -0
- data/spec/snapshots/producers_with_key-no-nest.snap +49 -0
- data/spec/snapshots/producers_with_key.snap +49 -0
- data/spec/spec_helper.rb +61 -29
- data/spec/utils/db_poller_spec.rb +49 -39
- data/spec/utils/{db_producer_spec.rb → outbox_producer_spec.rb} +17 -184
- metadata +58 -67
- data/lib/deimos/batch_consumer.rb +0 -7
- data/lib/deimos/config/phobos_config.rb +0 -163
- data/lib/deimos/instrumentation.rb +0 -95
- data/lib/deimos/monkey_patches/phobos_cli.rb +0 -35
- data/lib/deimos/utils/inline_consumer.rb +0 -158
- data/lib/deimos/utils/lag_reporter.rb +0 -186
- data/lib/deimos/utils/schema_controller_mixin.rb +0 -129
- data/spec/config/configuration_spec.rb +0 -321
- data/spec/kafka_listener_spec.rb +0 -55
- data/spec/phobos.bad_db.yml +0 -73
- data/spec/phobos.yml +0 -77
- data/spec/utils/inline_consumer_spec.rb +0 -31
- data/spec/utils/lag_reporter_spec.rb +0 -76
- data/spec/utils/platform_schema_validation_spec.rb +0 -0
- data/spec/utils/schema_controller_mixin_spec.rb +0 -84
- /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/migration +0 -0
- /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/rails3_migration +0 -0
@@ -1,331 +1,121 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
require 'fig_tree'
|
4
|
-
require_relative 'phobos_config'
|
5
4
|
require_relative '../metrics/mock'
|
6
5
|
require_relative '../tracing/mock'
|
7
|
-
require 'active_support/core_ext/
|
6
|
+
require 'active_support/core_ext/object'
|
8
7
|
|
9
8
|
# :nodoc:
|
10
9
|
module Deimos # rubocop:disable Metrics/ModuleLength
|
11
10
|
include FigTree
|
12
11
|
|
13
|
-
# :nodoc:
|
14
|
-
class FigTree::ConfigStruct
|
15
|
-
include Deimos::PhobosConfig
|
16
|
-
end
|
17
|
-
|
18
12
|
# :nodoc:
|
19
13
|
after_configure do
|
20
|
-
Phobos.configure(self.config.phobos_config)
|
21
14
|
if self.config.schema.use_schema_classes
|
22
15
|
load_generated_schema_classes
|
23
16
|
end
|
24
|
-
|
25
|
-
|
26
|
-
end
|
27
|
-
self.config.consumer_objects.each do |consumer|
|
28
|
-
configure_producer_or_consumer(consumer)
|
29
|
-
end
|
30
|
-
validate_consumers
|
31
|
-
validate_db_backend if self.config.producers.backend == :db
|
17
|
+
generate_key_schemas
|
18
|
+
validate_outbox_backend if self.config.producers.backend == :outbox
|
32
19
|
end
|
33
20
|
|
34
|
-
|
35
|
-
# @return [void]
|
36
|
-
def self.load_generated_schema_classes
|
37
|
-
if Deimos.config.schema.generated_class_path.nil?
|
38
|
-
raise 'Cannot use schema classes without schema.generated_class_path. Please provide a directory.'
|
39
|
-
end
|
21
|
+
class << self
|
40
22
|
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
end
|
23
|
+
def generate_key_schemas
|
24
|
+
Deimos.karafka_configs.each do |config|
|
25
|
+
transcoder = config.deserializers[:key]
|
45
26
|
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
raise 'Cannot set producers.backend to :db without activerecord-import! Please add it to your Gemfile.'
|
53
|
-
end
|
54
|
-
if Deimos.config.producers.required_acks != :all
|
55
|
-
raise 'Cannot set producers.backend to :db unless producers.required_acks is set to ":all"!'
|
27
|
+
if transcoder.respond_to?(:key_field) && transcoder.key_field
|
28
|
+
transcoder.backend = Deimos.schema_backend(schema: config.schema,
|
29
|
+
namespace: config.namespace)
|
30
|
+
transcoder.backend.generate_key_schema(transcoder.key_field)
|
31
|
+
end
|
32
|
+
end
|
56
33
|
end
|
57
|
-
end
|
58
|
-
|
59
|
-
# Validate that consumers are configured correctly, including their
|
60
|
-
# delivery mode.
|
61
|
-
# @!visibility private
|
62
|
-
def self.validate_consumers
|
63
|
-
Phobos.config.listeners.each do |listener|
|
64
|
-
handler_class = listener.handler.constantize
|
65
|
-
delivery = listener.delivery
|
66
|
-
|
67
|
-
next unless handler_class < Deimos::Consumer
|
68
34
|
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
elsif handler_class.instance_method(:consume).owner == Deimos::Consume::MessageConsumption
|
75
|
-
raise "Non-batch Consumer #{listener.handler} does not implement `consume`"
|
35
|
+
# Loads generated classes
|
36
|
+
# @return [void]
|
37
|
+
def load_generated_schema_classes
|
38
|
+
if Deimos.config.schema.generated_class_path.nil?
|
39
|
+
raise 'Cannot use schema classes without schema.generated_class_path. Please provide a directory.'
|
76
40
|
end
|
41
|
+
|
42
|
+
Dir["./#{Deimos.config.schema.generated_class_path}/**/*.rb"].sort.each { |f| require f }
|
43
|
+
rescue LoadError
|
44
|
+
raise 'Cannot load schema classes. Please regenerate classes with rake deimos:generate_schema_models.'
|
77
45
|
end
|
78
|
-
end
|
79
46
|
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
schema(kafka_config.schema) if kafka_config.schema.present?
|
88
|
-
namespace(kafka_config.namespace) if kafka_config.namespace.present?
|
89
|
-
key_config(**kafka_config.key_config) if kafka_config.key_config.present?
|
90
|
-
schema_class_config(kafka_config.use_schema_classes) if kafka_config.use_schema_classes.present?
|
91
|
-
if kafka_config.respond_to?(:bulk_import_id_column) # consumer
|
92
|
-
klass.config.merge!(
|
93
|
-
bulk_import_id_column: kafka_config.bulk_import_id_column,
|
94
|
-
replace_associations: if kafka_config.replace_associations.nil?
|
95
|
-
Deimos.config.consumers.replace_associations
|
96
|
-
else
|
97
|
-
kafka_config.replace_associations
|
98
|
-
end,
|
99
|
-
bulk_import_id_generator: kafka_config.bulk_import_id_generator ||
|
100
|
-
Deimos.config.consumers.bulk_import_id_generator
|
101
|
-
)
|
47
|
+
# Ensure everything is set up correctly for the DB backend.
|
48
|
+
# @!visibility private
|
49
|
+
def validate_outbox_backend
|
50
|
+
begin
|
51
|
+
require 'activerecord-import'
|
52
|
+
rescue LoadError
|
53
|
+
raise 'Cannot set producers.backend to :outbox without activerecord-import! Please add it to your Gemfile.'
|
102
54
|
end
|
103
55
|
end
|
104
56
|
end
|
57
|
+
|
105
58
|
# rubocop:enable Metrics/PerceivedComplexity, Metrics/AbcSize
|
106
59
|
|
107
60
|
define_settings do
|
108
|
-
|
109
|
-
|
110
|
-
setting :
|
111
|
-
|
112
|
-
# @return [Symbol]
|
113
|
-
setting :payload_log, :full
|
114
|
-
|
115
|
-
# @return [Logger]
|
116
|
-
setting :phobos_logger, default_proc: proc { Deimos.config.logger.clone }
|
61
|
+
setting :logger, removed: 'Use "logger" in Karafka setup block.'
|
62
|
+
setting :payload_log, removed: 'Use topic.payload_log in Karafka settings'
|
63
|
+
setting :phobos_logger, removed: 'Separate logger for Phobos is no longer supported'
|
117
64
|
|
118
65
|
setting :kafka do
|
119
|
-
|
120
|
-
|
121
|
-
setting :
|
122
|
-
|
123
|
-
|
124
|
-
# @return [Array<String>]
|
125
|
-
setting :seed_brokers, ['localhost:9092']
|
126
|
-
|
127
|
-
# Identifier for this application.
|
128
|
-
# @return [String]
|
129
|
-
setting :client_id, 'phobos'
|
130
|
-
|
131
|
-
# The socket timeout for connecting to the broker, in seconds.
|
132
|
-
# @return [Integer]
|
133
|
-
setting :connect_timeout, 15
|
134
|
-
|
135
|
-
# The socket timeout for reading and writing to the broker, in seconds.
|
136
|
-
# @return [Integer]
|
137
|
-
setting :socket_timeout, 15
|
66
|
+
setting :logger, removed: "Karafka uses Rails logger by default"
|
67
|
+
setting :seed_brokers, ['localhost:9092'], removed: 'Use kafka(bootstrap.servers) in Karafka settings'
|
68
|
+
setting :client_id, 'phobos', removed: 'Use client_id in Karafka setup block.'
|
69
|
+
setting :connect_timeout, 15, removed: 'Use kafka(socket.connection.setup.timeout.ms) in Karafka settings'
|
70
|
+
setting :socket_timeout, 15, removed: 'Use kafka(socket.timeout.ms) in Karafka settings'
|
138
71
|
|
139
72
|
setting :ssl do
|
140
|
-
|
141
|
-
|
142
|
-
setting :
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
# @return [String|Array<String>]
|
147
|
-
setting :ca_cert
|
148
|
-
|
149
|
-
# a PEM encoded client cert to use with an SSL connection, or a file path
|
150
|
-
# to the cert.
|
151
|
-
# @return [String]
|
152
|
-
setting :client_cert
|
153
|
-
|
154
|
-
# a PEM encoded client cert key to use with an SSL connection.
|
155
|
-
# @return [String]
|
156
|
-
setting :client_cert_key
|
157
|
-
|
158
|
-
# Verify certificate hostname if supported (ruby >= 2.4.0)
|
159
|
-
setting :verify_hostname, true
|
160
|
-
|
161
|
-
# Use CA certs from system. This is useful to have enabled for Confluent Cloud
|
162
|
-
# @return [Boolean]
|
163
|
-
setting :ca_certs_from_system, false
|
73
|
+
setting :enabled, removed: 'Use kafka(security.protocol=ssl) in Karafka settings'
|
74
|
+
setting :ca_cert, removed: 'Use kafka(ssl.ca.pem) in Karafka settings'
|
75
|
+
setting :client_cert, removed: 'Use kafka(ssl.certificate.pem) in Karafka settings'
|
76
|
+
setting :client_cert_key, removed: 'Use kafka(ssl.key.pem) in Karafka settings'
|
77
|
+
setting :verify_hostname, removed: 'Use kafka(ssl.endpoint.identification.algorithm=https) in Karafka settings'
|
78
|
+
setting :ca_certs_from_system, removed: 'Should not be necessary with librdkafka.'
|
164
79
|
end
|
165
80
|
|
166
81
|
setting :sasl do
|
167
|
-
|
168
|
-
|
169
|
-
setting :
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
setting :
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
setting :
|
178
|
-
|
179
|
-
# Plain authorization ID. It needs to default to '' in order for it to work.
|
180
|
-
# This is because Phobos expects it to be truthy for using plain SASL.
|
181
|
-
# @return [String]
|
182
|
-
setting :plain_authzid, ''
|
183
|
-
|
184
|
-
# Plain username.
|
185
|
-
# @return [String]
|
186
|
-
setting :plain_username
|
187
|
-
|
188
|
-
# Plain password.
|
189
|
-
# @return [String]
|
190
|
-
setting :plain_password
|
191
|
-
|
192
|
-
# SCRAM username.
|
193
|
-
# @return [String]
|
194
|
-
setting :scram_username
|
195
|
-
|
196
|
-
# SCRAM password.
|
197
|
-
# @return [String]
|
198
|
-
setting :scram_password
|
199
|
-
|
200
|
-
# Scram mechanism, either "sha256" or "sha512".
|
201
|
-
# @return [String]
|
202
|
-
setting :scram_mechanism
|
203
|
-
|
204
|
-
# Whether to enforce SSL with SASL.
|
205
|
-
# @return [Boolean]
|
206
|
-
setting :enforce_ssl
|
207
|
-
|
208
|
-
# OAuthBearer Token Provider instance that implements
|
209
|
-
# method token. See {Sasl::OAuth#initialize}.
|
210
|
-
# @return [Object]
|
211
|
-
setting :oauth_token_provider
|
82
|
+
setting :enabled, removed: 'Use kafka(security.protocol=sasl_ssl or sasl_plaintext) in Karafka settings'
|
83
|
+
setting :gssapi_principal, removed: 'Use kafka(sasl.kerberos.principal) in Karafka settings'
|
84
|
+
setting :gssapi_keytab, removed: 'Use kafka(sasl.kerberos.keytab) in Karafka settings'
|
85
|
+
setting :plain_authzid, removed: 'No longer needed with rdkafka'
|
86
|
+
setting :plain_username, removed: 'Use kafka(sasl.username) in Karafka settings'
|
87
|
+
setting :plain_password, removed: 'Use kafka(sasl.password) in Karafka settings'
|
88
|
+
setting :scram_username, removed: 'Use kafka(sasl.username) in Karafka settings'
|
89
|
+
setting :scram_password, removed: 'Use kafka(sasl.password) in Karafka settings'
|
90
|
+
setting :scram_mechanism, removed: 'Use kafka(sasl.mechanisms) in Karafka settings'
|
91
|
+
setting :enforce_ssl, removed: 'Use kafka(security.protocol=sasl_ssl) in Karafka settings'
|
92
|
+
setting :oauth_token_provider, removed: 'See rdkafka configs for details'
|
212
93
|
end
|
213
94
|
end
|
214
95
|
|
215
96
|
setting :consumers do
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
setting :
|
221
|
-
|
222
|
-
# Interval between offset commits, in seconds.
|
223
|
-
# @return [Integer]
|
224
|
-
setting :offset_commit_interval, 10
|
225
|
-
|
226
|
-
# Number of messages that can be processed before their offsets are committed.
|
227
|
-
# If zero, offset commits are not triggered by message processing
|
228
|
-
# @return [Integer]
|
229
|
-
setting :offset_commit_threshold, 0
|
230
|
-
|
231
|
-
# Interval between heartbeats; must be less than the session window.
|
232
|
-
# @return [Integer]
|
233
|
-
setting :heartbeat_interval, 10
|
234
|
-
|
235
|
-
# Minimum and maximum number of milliseconds to back off after a consumer
|
236
|
-
# error.
|
237
|
-
setting :backoff, (1000..60_000)
|
238
|
-
|
239
|
-
# By default, consumer errors will be consumed and logged to
|
240
|
-
# the metrics provider.
|
241
|
-
# Set this to true to force the error to be raised.
|
242
|
-
# @return [Boolean]
|
243
|
-
setting :reraise_errors
|
244
|
-
|
245
|
-
# @return [Boolean]
|
246
|
-
setting :report_lag
|
247
|
-
|
248
|
-
# Block taking an exception, payload and metadata and returning
|
249
|
-
# true if this should be considered a fatal error and false otherwise.
|
250
|
-
# Not needed if reraise_errors is set to true.
|
251
|
-
# @return [Block]
|
252
|
-
setting(:fatal_error, proc { false })
|
253
|
-
|
254
|
-
# The default function to generate a bulk ID for bulk consumers
|
255
|
-
# @return [Block]
|
256
|
-
setting(:bulk_import_id_generator, proc { SecureRandom.uuid })
|
257
|
-
|
258
|
-
# If true, multi-table consumers will blow away associations rather than appending to them.
|
259
|
-
# Applies to all consumers unless specified otherwise
|
260
|
-
# @return [Boolean]
|
261
|
-
setting :replace_associations, true
|
97
|
+
setting :reraise_errors, removed: 'Use topic.reraise_errors in Karafka settings'
|
98
|
+
setting :report_lag, removed: "Use Karafka's built in lag reporting"
|
99
|
+
setting(:fatal_error, removed: "Use topic.fatal_error in Karafka settings")
|
100
|
+
setting(:bulk_import_id_generator, removed: "Use topic.bulk_import_id_generator in Karafka settings")
|
101
|
+
setting :save_associations_first, removed: "Use topic.save_associations_first"
|
102
|
+
setting :replace_associations, removed: "Use topic.replace_associations in Karafka settings"
|
262
103
|
end
|
263
104
|
|
264
105
|
setting :producers do
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
setting :
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
setting :
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
setting :max_retries, 2
|
279
|
-
|
280
|
-
# Number of seconds to wait between retries.
|
281
|
-
# @return [Integer]
|
282
|
-
setting :retry_backoff, 1
|
283
|
-
|
284
|
-
# Number of messages allowed in the buffer before new writes will
|
285
|
-
# raise {BufferOverflow} exceptions.
|
286
|
-
# @return [Integer]
|
287
|
-
setting :max_buffer_size, 10_000
|
288
|
-
|
289
|
-
# Maximum size of the buffer in bytes. Attempting to produce messages
|
290
|
-
# when the buffer reaches this size will result in {BufferOverflow} being raised.
|
291
|
-
# @return [Integer]
|
292
|
-
setting :max_buffer_bytesize, 10_000_000
|
293
|
-
|
294
|
-
# Name of the compression codec to use, or nil if no compression should be performed.
|
295
|
-
# Valid codecs: `:snappy` and `:gzip`
|
296
|
-
# @return [Symbol]
|
297
|
-
setting :compression_codec
|
298
|
-
|
299
|
-
# Number of messages that needs to be in a message set before it should be compressed.
|
300
|
-
# Note that message sets are per-partition rather than per-topic or per-producer.
|
301
|
-
# @return [Integer]
|
302
|
-
setting :compression_threshold, 1
|
303
|
-
|
304
|
-
# Maximum number of messages allowed in the queue. Only used for async_producer.
|
305
|
-
# @return [Integer]
|
306
|
-
setting :max_queue_size, 10_000
|
307
|
-
|
308
|
-
# If greater than zero, the number of buffered messages that will automatically
|
309
|
-
# trigger a delivery. Only used for async_producer.
|
310
|
-
# @return [Integer]
|
311
|
-
setting :delivery_threshold, 0
|
312
|
-
|
313
|
-
# if greater than zero, the number of seconds between automatic message
|
314
|
-
# deliveries. Only used for async_producer.
|
315
|
-
# @return [Integer]
|
316
|
-
setting :delivery_interval, 0
|
317
|
-
|
318
|
-
# Set this to true to keep the producer connection between publish calls.
|
319
|
-
# This can speed up subsequent messages by around 30%, but it does mean
|
320
|
-
# that you need to manually call sync_producer_shutdown before exiting,
|
321
|
-
# similar to async_producer_shutdown.
|
322
|
-
# @return [Boolean]
|
323
|
-
setting :persistent_connections, false
|
324
|
-
|
325
|
-
# Default namespace for all producers. Can remain nil. Individual
|
326
|
-
# producers can override.
|
327
|
-
# @return [String]
|
328
|
-
setting :schema_namespace
|
106
|
+
setting :ack_timeout, removed: "Not supported in rdkafka"
|
107
|
+
setting :required_acks, 1, removed: "Use kafka(request.required.acks) in Karafka settings"
|
108
|
+
setting :max_retries, removed: "Use kafka(message.send.max.retries) in Karafka settings"
|
109
|
+
setting :retry_backoff, removed: "Use kafka(retry.backoff.ms) in Karafka settings"
|
110
|
+
setting :max_buffer_size, removed: "Not relevant with Karafka. You may want to see the queue.buffering.max.messages setting."
|
111
|
+
setting :max_buffer_bytesize, removed: "Not relevant with Karafka."
|
112
|
+
setting :compression_codec, removed: "Use kafka(compression.codec) in Karafka settings"
|
113
|
+
setting :compression_threshold, removed: "Not supported in Karafka."
|
114
|
+
setting :max_queue_size, removed: "Not relevant to Karafka."
|
115
|
+
setting :delivery_threshold, removed: "Not relevant to Karafka."
|
116
|
+
setting :delivery_interval, removed: "Not relevant to Karafka."
|
117
|
+
setting :persistent_connections, removed: "Karafka connections are always persistent."
|
118
|
+
setting :schema_namespace, removed: "Use topic.namespace in Karafka settings"
|
329
119
|
|
330
120
|
# Add a prefix to all topic names. This can be useful if you're using
|
331
121
|
# the same Kafka broker for different environments that are producing
|
@@ -370,9 +160,9 @@ module Deimos # rubocop:disable Metrics/ModuleLength
|
|
370
160
|
# @return [String]
|
371
161
|
setting :generated_class_path, 'app/lib/schema_classes'
|
372
162
|
|
373
|
-
# Set to true to use the generated schema classes in your application
|
163
|
+
# Set to true to use the generated schema classes in your application.
|
374
164
|
# @return [Boolean]
|
375
|
-
setting :use_schema_classes
|
165
|
+
setting :use_schema_classes
|
376
166
|
|
377
167
|
# Set to false to generate child schemas as their own files.
|
378
168
|
# @return [Boolean]
|
@@ -397,10 +187,10 @@ module Deimos # rubocop:disable Metrics/ModuleLength
|
|
397
187
|
# @return [Tracing::Provider]
|
398
188
|
setting :tracer, default_proc: proc { Tracing::Mock.new }
|
399
189
|
|
400
|
-
setting :
|
190
|
+
setting :outbox do
|
401
191
|
|
402
192
|
# @return [Logger]
|
403
|
-
setting :logger, default_proc: proc {
|
193
|
+
setting :logger, default_proc: proc { Karafka.logger }
|
404
194
|
|
405
195
|
# @return [Symbol|Array<String>] A list of topics to log all messages, or
|
406
196
|
# :all to log all topics.
|
@@ -412,86 +202,48 @@ module Deimos # rubocop:disable Metrics/ModuleLength
|
|
412
202
|
|
413
203
|
end
|
414
204
|
|
205
|
+
setting :db_producer do
|
206
|
+
setting :logger, removed: "Use outbox.logger"
|
207
|
+
setting :log_topics, removed: "Use outbox.log_topics"
|
208
|
+
setting :compact_topics, removed: "Use outbox.compact_topics"
|
209
|
+
end
|
210
|
+
|
415
211
|
setting_object :producer do
|
416
|
-
|
417
|
-
|
418
|
-
setting :
|
419
|
-
|
420
|
-
|
421
|
-
setting :topic
|
422
|
-
# Schema of the data in the topic.
|
423
|
-
# @return [String]
|
424
|
-
setting :schema
|
425
|
-
# Optional namespace to access the schema.
|
426
|
-
# @return [String]
|
427
|
-
setting :namespace
|
428
|
-
# Key configuration (see docs).
|
429
|
-
# @return [Hash]
|
430
|
-
setting :key_config
|
431
|
-
# Configure the usage of generated schema classes for this producer
|
432
|
-
# @return [Boolean]
|
433
|
-
setting :use_schema_classes
|
434
|
-
# If true, and using the multi-table feature of ActiveRecordConsumers, replace associations
|
435
|
-
# instead of appending to them.
|
436
|
-
# @return [Boolean]
|
437
|
-
setting :replace_associations
|
212
|
+
setting :class_name, removed: "Use topic.producer_class in Karafka settings."
|
213
|
+
setting :topic, removed: "Use Karafka settings."
|
214
|
+
setting :schema, removed: "Use topic.schema(schema:) in Karafka settings."
|
215
|
+
setting :namespace, removed: "Use topic.schema(namespace:) in Karafka settings."
|
216
|
+
setting :key_config, removed: "Use topic.schema(key_config:) in Karafka settings."
|
217
|
+
setting :use_schema_classes, removed: "Use topic.schema(use_schema_classes:) in Karafka settings."
|
438
218
|
end
|
439
219
|
|
440
220
|
setting_object :consumer do
|
441
|
-
|
442
|
-
|
443
|
-
setting :
|
444
|
-
|
445
|
-
|
446
|
-
setting :topic
|
447
|
-
|
448
|
-
|
449
|
-
setting :
|
450
|
-
|
451
|
-
|
452
|
-
setting :
|
453
|
-
|
454
|
-
|
455
|
-
setting :
|
456
|
-
|
457
|
-
|
458
|
-
|
459
|
-
setting :
|
460
|
-
|
461
|
-
|
462
|
-
setting :
|
463
|
-
|
464
|
-
|
465
|
-
setting :
|
466
|
-
|
467
|
-
# @return [String]
|
468
|
-
setting :bulk_import_id_column, :bulk_import_id
|
469
|
-
# If true, multi-table consumers will blow away associations rather than appending to them.
|
470
|
-
# @return [Boolean]
|
471
|
-
setting :replace_associations, nil
|
472
|
-
|
473
|
-
# The default function to generate a bulk ID for this consumer
|
474
|
-
# Uses the consumers proc defined in the consumers config by default unless
|
475
|
-
# specified for individual consumers
|
476
|
-
# @return [Block]
|
477
|
-
setting :bulk_import_id_generator, nil
|
478
|
-
|
479
|
-
# These are the phobos "listener" configs. See CONFIGURATION.md for more
|
480
|
-
# info.
|
481
|
-
setting :group_id
|
482
|
-
setting :max_concurrency, 1
|
483
|
-
setting :start_from_beginning, true
|
484
|
-
setting :max_bytes_per_partition, 500.kilobytes
|
485
|
-
setting :min_bytes, 1
|
486
|
-
setting :max_wait_time, 5
|
487
|
-
setting :force_encoding
|
488
|
-
setting :delivery, :batch
|
489
|
-
setting :backoff
|
490
|
-
setting :session_timeout, 300
|
491
|
-
setting :offset_commit_interval, 10
|
492
|
-
setting :offset_commit_threshold, 0
|
493
|
-
setting :offset_retention_time
|
494
|
-
setting :heartbeat_interval, 10
|
221
|
+
setting :class_name, removed: "Use topic.consumer in Karafka settings."
|
222
|
+
setting :topic, removed: "Use Karafka settings."
|
223
|
+
setting :schema, removed: "Use topic.schema(schema:) in Karafka settings."
|
224
|
+
setting :namespace, removed: "Use topic.schema(namespace:) in Karafka settings."
|
225
|
+
setting :key_config, removed: "Use topic.schema(key_config:) in Karafka settings."
|
226
|
+
setting :disabled, removed: "Use topic.active in Karafka settings."
|
227
|
+
setting :use_schema_classes, removed: "Use topic.use_schema_classes in Karafka settings."
|
228
|
+
setting :max_db_batch_size, removed: "Use topic.max_db_batch_size in Karafka settings."
|
229
|
+
setting :bulk_import_id_column, removed: "Use topic.bulk_import_id_column in Karafka settings."
|
230
|
+
setting :replace_associations, removed: "Use topic.replace_associations in Karafka settings."
|
231
|
+
setting :bulk_import_id_generator, removed: "Use topic.bulk_import_id_generator in Karafka settings."
|
232
|
+
setting :save_associations_first, removed: "Use topic.save_associations_first"
|
233
|
+
setting :group_id, removed: "Use kafka(group.id) in Karafka settings."
|
234
|
+
setting :max_concurrency, removed: "Use Karafka's 'config.concurrency' in the setup block."
|
235
|
+
setting :start_from_beginning, removed: "Use initial_offset in the setup block, or kafka(auto.offset.reset) in topic settings."
|
236
|
+
setting :max_bytes_per_partition, removed: "Use max_messages in the setup block."
|
237
|
+
setting :min_bytes, removed: "Not supported in Karafka."
|
238
|
+
setting :max_wait_time, removed: "Use max_wait_time in the setup block."
|
239
|
+
setting :force_encoding, removed: "Not supported with Karafka."
|
240
|
+
setting :delivery, :batch, removed: "Use batch: true/false in Karafka topic configs."
|
241
|
+
setting :backoff, removed: "Use kafka(retry.backoff.ms) and retry.backoff.max.ms in Karafka settings."
|
242
|
+
setting :session_timeout, removed: "Use kafka(session.timeout.ms) in Karafka settings."
|
243
|
+
setting :offset_commit_interval, removed: "Use kafka(auto.commit.interval.ms) in Karafka settings."
|
244
|
+
setting :offset_commit_threshold, removed: "Not supported with Karafka."
|
245
|
+
setting :offset_retention_time, removed: "Not supported with Karafka."
|
246
|
+
setting :heartbeat_interval, removed: "Use kafka(heartbeat.interval.ms) in Karafka settings."
|
495
247
|
end
|
496
248
|
|
497
249
|
setting_object :db_poller do
|
@@ -535,20 +287,5 @@ module Deimos # rubocop:disable Metrics/ModuleLength
|
|
535
287
|
setting :poller_class, nil
|
536
288
|
end
|
537
289
|
|
538
|
-
deprecate 'kafka_logger', 'kafka.logger'
|
539
|
-
deprecate 'reraise_consumer_errors', 'consumers.reraise_errors'
|
540
|
-
deprecate 'schema_registry_url', 'schema.registry_url'
|
541
|
-
deprecate 'seed_broker', 'kafka.seed_brokers'
|
542
|
-
deprecate 'schema_path', 'schema.path'
|
543
|
-
deprecate 'producer_schema_namespace', 'producers.schema_namespace'
|
544
|
-
deprecate 'producer_topic_prefix', 'producers.topic_prefix'
|
545
|
-
deprecate 'disable_producers', 'producers.disabled'
|
546
|
-
deprecate 'ssl_enabled', 'kafka.ssl.enabled'
|
547
|
-
deprecate 'ssl_ca_cert', 'kafka.ssl.ca_cert'
|
548
|
-
deprecate 'ssl_client_cert', 'kafka.ssl.client_cert'
|
549
|
-
deprecate 'ssl_client_cert_key', 'kafka.ssl.client_cert_key'
|
550
|
-
deprecate 'publish_backend', 'producers.backend'
|
551
|
-
deprecate 'report_lag', 'consumers.report_lag'
|
552
|
-
|
553
290
|
end
|
554
291
|
end
|