deimos-ruby 1.3.0.pre.beta5 → 1.4.0.pre.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -12,7 +12,7 @@ module Deimos
12
12
  @schema = schema
13
13
  @namespace = namespace
14
14
  @schema_store = schema_store ||
15
- AvroTurf::SchemaStore.new(path: Deimos.config.schema_path)
15
+ AvroTurf::SchemaStore.new(path: Deimos.config.schema.path)
16
16
  end
17
17
 
18
18
  # @param schema [String]
@@ -27,7 +27,7 @@ module Deimos
27
27
  # @return [AvroTurf]
28
28
  def avro_turf
29
29
  @avro_turf ||= AvroTurf.new(
30
- schemas_path: Deimos.config.schema_path,
30
+ schemas_path: Deimos.config.schema.path,
31
31
  schema_store: @schema_store
32
32
  )
33
33
  @avro_turf
@@ -37,8 +37,8 @@ module Deimos
37
37
  def avro_turf_messaging
38
38
  @avro_turf_messaging ||= AvroTurf::Messaging.new(
39
39
  schema_store: @schema_store,
40
- registry_url: Deimos.config.schema_registry_url,
41
- schemas_path: Deimos.config.schema_path,
40
+ registry_url: Deimos.config.schema.registry_url,
41
+ schemas_path: Deimos.config.schema.path,
42
42
  namespace: @namespace
43
43
  )
44
44
  end
@@ -20,11 +20,6 @@ module Deimos
20
20
  message
21
21
  end
22
22
  Deimos::KafkaMessage.import(records)
23
- Deimos.config.metrics&.increment(
24
- 'db_producer.insert',
25
- tags: %W(topic:#{producer_class.topic}),
26
- by: records.size
27
- )
28
23
  end
29
24
  end
30
25
  end
@@ -89,8 +89,8 @@ module Deimos
89
89
  def _handle_error(exception, payload, metadata)
90
90
  Deimos.config.tracer&.set_error(@span, exception)
91
91
 
92
- raise if Deimos.config.reraise_consumer_errors ||
93
- Deimos.config.fatal_error_block.call(exception, payload, metadata) ||
92
+ raise if Deimos.config.consumers.reraise_errors ||
93
+ Deimos.config.consumers.fatal_error&.call(exception, payload, metadata) ||
94
94
  fatal_error?(exception, payload, metadata)
95
95
  end
96
96
 
@@ -0,0 +1,258 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'active_support/concern'
4
+
5
+ module Deimos
6
+ # Module to allow configuration. Loosely based off of the dry-configuration
7
+ # gem but with several advantages:
8
+ # - Works with Ruby 2.3.
9
+ # - More succinct syntax using method_missing so you do not need to write
10
+ # "config.whatever" and can just write "whatever".
11
+ # - Use nested blocks:
12
+ # Deimos.configure do
13
+ # config.kafka.ssl do
14
+ # enabled true
15
+ # ca_cert_file 'my_file'
16
+ # end
17
+ # end
18
+ # - Allows for arrays of configurations:
19
+ # Deimos.configure do |config|
20
+ # config.producer do
21
+ # class_name 'MyProducer'
22
+ # topic 'MyTopic'
23
+ # end
24
+ # end
25
+ # - Allows to call `configure` multiple times without crashing.
26
+ # - Allows to lazy-set default values by passing a proc as a default:
27
+ # Deimos.configure do |config|
28
+ # setting :my_val, default_proc: proc { MyDefault.calculated_value }
29
+ # end
30
+ # - Support for setting up and automatically calling deprecated configurations.
31
+ # - Support for configuration callbacks.
32
+ module Configurable
33
+ extend ActiveSupport::Concern
34
+
35
+ ConfigValue = Struct.new(:value, :default_value, :default_proc, :deprecation) do
36
+
37
+ # Reset value back to default.
38
+ def reset!
39
+ if self.value.is_a?(ConfigStruct)
40
+ self.value.reset!
41
+ elsif self.default_proc
42
+ self.value = self.default_proc.call
43
+ else
44
+ self.value = self.default_value
45
+ end
46
+ end
47
+
48
+ # :nodoc:
49
+ def clone_and_reset
50
+ setting = ConfigValue.new(self.value, self.default_value,
51
+ self.default_proc, self.deprecation)
52
+ setting.reset!
53
+ setting
54
+ end
55
+
56
+ end
57
+
58
+ # Class that defines and keeps the configuration values.
59
+ class ConfigStruct
60
+ include ActiveSupport::Callbacks
61
+
62
+ define_callbacks :configure
63
+
64
+ # @param name [String]
65
+ def initialize(name)
66
+ @name = name
67
+ @settings = {}
68
+ @setting_objects = {}
69
+ @setting_templates = {}
70
+ end
71
+
72
+ # Reset config back to default values.
73
+ def reset!
74
+ @setting_objects = @setting_templates.map { |k, _| [k, []] }.to_h
75
+ @settings.values.each(&:reset!)
76
+ end
77
+
78
+ # Mark a configuration as deprecated and replaced with the new config.
79
+ # @param old_config [String]
80
+ # @param new_config [String]
81
+ def deprecate(old_config, new_config)
82
+ @settings[old_config.to_sym] ||= ConfigValue.new
83
+ @settings[old_config.to_sym].deprecation = new_config
84
+ end
85
+
86
+ # :nodoc:
87
+ def inspect
88
+ "#{@name}: #{@settings.inspect} #{@setting_objects.inspect}"
89
+ end
90
+
91
+ # @return [Hash]
92
+ def to_h
93
+ @settings.map { |k, v| [k, v.value] }.to_h
94
+ end
95
+
96
+ # :nodoc:
97
+ def clone_and_reset
98
+ new_config = self.clone
99
+ new_config.setting_objects = new_config.setting_objects.clone
100
+ new_config.settings = new_config.settings.map { |k, v| [k, v.clone_and_reset] }.to_h
101
+ new_config
102
+ end
103
+
104
+ # Define a setting template for an array of objects via a block:
105
+ # setting_object :producer do
106
+ # setting :topic
107
+ # setting :class_name
108
+ # end
109
+ # This will create the `producer` method to define these values as well
110
+ # as the `producer_objects` method to retrieve them.
111
+ # @param name [Symbol]
112
+ def setting_object(name, &block)
113
+ new_config = ConfigStruct.new("#{@name}.#{name}")
114
+ @setting_objects[name] = []
115
+ @setting_templates[name] = new_config
116
+ new_config.instance_eval(&block)
117
+ end
118
+
119
+ # Define a setting with the given name.
120
+ # @param name [Symbol]
121
+ # @param default_value [Object]
122
+ # @param default_proc [Proc]
123
+ def setting(name, default_value=nil, default_proc: nil, &block)
124
+ if block_given?
125
+ # Create a nested setting
126
+ setting_config = @settings[name]&.value || ConfigStruct.new("#{@name}.#{name}")
127
+ setting = ConfigValue.new
128
+ setting.value = setting_config
129
+ @settings[name] = setting
130
+ setting_config.instance_eval(&block)
131
+ else
132
+ setting = ConfigValue.new
133
+ setting.default_proc = default_proc
134
+ setting.default_value = default_value
135
+ setting.reset!
136
+ @settings[name] = setting
137
+ end
138
+ end
139
+
140
+ # :nodoc:
141
+ def respond_to_missing?(method, include_all=true)
142
+ method = method.to_s.sub(/=$/, '')
143
+ method.ends_with?('objects') ||
144
+ @setting_templates.key?(method.to_sym) ||
145
+ @settings.key?(method.to_sym) ||
146
+ super
147
+ end
148
+
149
+ # :nodoc:
150
+ def method_missing(method, *args, &block)
151
+ config_key = method.to_s.sub(/=$/, '').to_sym
152
+
153
+ # Return the list of setting objects with the given name
154
+ if config_key.to_s.end_with?('objects')
155
+ return _setting_object_method(config_key)
156
+ end
157
+
158
+ # Define a new setting object with the given name
159
+ if @setting_templates.key?(config_key) && block_given?
160
+ return _new_setting_object_method(config_key, &block)
161
+ end
162
+
163
+ setting = @settings[config_key]
164
+
165
+ if setting&.deprecation
166
+ return _deprecated_config_method(method, *args)
167
+ end
168
+
169
+ return super unless setting
170
+
171
+ if block_given?
172
+ return _block_config_method(config_key, &block)
173
+ end
174
+
175
+ _default_config_method(config_key, *args)
176
+ end
177
+
178
+ protected
179
+
180
+ # Only for the clone method
181
+ attr_accessor :settings, :setting_objects
182
+
183
+ private
184
+
185
+ def _deprecated_config_method(method, *args)
186
+ config_key = method.to_s.sub(/=$/, '').to_sym
187
+ new_config = @settings[config_key].deprecation
188
+ equals = method.to_s.end_with?('=') ? '=' : ''
189
+ ActiveSupport::Deprecation.warn("config.#{config_key}#{equals} is deprecated - use config.#{new_config}#{equals}")
190
+ obj = self
191
+ messages = new_config.split('.')
192
+ messages[0..-2].each do |message|
193
+ obj = obj.send(message)
194
+ end
195
+ if args.length.positive?
196
+ obj.send(messages[-1], args[0])
197
+ else
198
+ obj.send(messages[-1])
199
+ end
200
+ end
201
+
202
+ # Get or set a value.
203
+ def _default_config_method(config_key, *args)
204
+ if args.length.positive?
205
+ # Set the value
206
+ @settings[config_key].value = args[0]
207
+ else
208
+ # Get the value
209
+ @settings[config_key].value
210
+ end
211
+ end
212
+
213
+ # Define a new setting object and use the passed block to define values.
214
+ def _new_setting_object_method(config_key, &block)
215
+ new_config = @setting_templates[config_key].clone_and_reset
216
+ new_config.instance_eval(&block)
217
+ @setting_objects[config_key] << new_config
218
+ end
219
+
220
+ # Return a setting object.
221
+ def _setting_object_method(config_key)
222
+ key = config_key.to_s.sub(/_objects$/, '').to_sym
223
+ @setting_objects[key]
224
+ end
225
+
226
+ # Define new values inside a block.
227
+ def _block_config_method(config_key, &block)
228
+ unless @settings[config_key].value.is_a?(ConfigStruct)
229
+ raise "Block called for #{config_key} but it is not a nested config!"
230
+ end
231
+
232
+ @settings[config_key].value.instance_eval(&block)
233
+ end
234
+ end
235
+
236
+ # :nodoc:
237
+ module ClassMethods
238
+ # Pass the configuration into a block.
239
+ def configure(&block)
240
+ config.run_callbacks(:configure) do
241
+ config.instance_eval(&block)
242
+ end
243
+ end
244
+
245
+ # @return [ConfigStruct]
246
+ def config
247
+ @config ||= ConfigStruct.new('config')
248
+ end
249
+
250
+ # Pass a block to run after configuration is done.
251
+ def after_configure(&block)
252
+ mod = self
253
+ config.class.set_callback(:configure, :after,
254
+ proc { mod.instance_eval(&block) })
255
+ end
256
+ end
257
+ end
258
+ end
@@ -0,0 +1,354 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative 'phobos_config'
4
+ require_relative 'configurable'
5
+ require_relative '../metrics/mock'
6
+ require_relative '../tracing/mock'
7
+
8
+ # :nodoc:
9
+ module Deimos
10
+ include Configurable
11
+
12
+ # :nodoc:
13
+ class Configurable::ConfigStruct
14
+ include Deimos::PhobosConfig
15
+ end
16
+
17
+ # :nodoc:
18
+ after_configure do
19
+ Phobos.configure(self.config.phobos_config)
20
+ self.config.producer_objects.each do |producer|
21
+ configure_producer_or_consumer(producer)
22
+ end
23
+ self.config.consumer_objects.each do |consumer|
24
+ configure_producer_or_consumer(consumer)
25
+ end
26
+ validate_consumers
27
+ validate_db_backend if self.config.producers.backend == :db
28
+ end
29
+
30
+ # Ensure everything is set up correctly for the DB backend.
31
+ def self.validate_db_backend
32
+ begin
33
+ require 'activerecord-import'
34
+ rescue LoadError
35
+ raise 'Cannot set producers.backend to :db without activerecord-import! Please add it to your Gemfile.'
36
+ end
37
+ if Deimos.config.producers.required_acks != :all
38
+ raise 'Cannot set producers.backend to :db unless producers.required_acks is set to ":all"!'
39
+ end
40
+ end
41
+
42
+ # Validate that consumers are configured correctly, including their
43
+ # delivery mode.
44
+ def self.validate_consumers
45
+ Phobos.config.listeners.each do |listener|
46
+ handler_class = listener.handler.constantize
47
+ delivery = listener.delivery
48
+
49
+ # Validate that Deimos consumers use proper delivery configs
50
+ if handler_class < Deimos::BatchConsumer
51
+ unless delivery == 'inline_batch'
52
+ raise "BatchConsumer #{listener.handler} must have delivery set to"\
53
+ ' `inline_batch`'
54
+ end
55
+ elsif handler_class < Deimos::Consumer
56
+ if delivery.present? && !%w(message batch).include?(delivery)
57
+ raise "Non-batch Consumer #{listener.handler} must have delivery"\
58
+ ' set to `message` or `batch`'
59
+ end
60
+ end
61
+ end
62
+ end
63
+
64
+ # @param kafka_config [Configurable::ConfigStruct]
65
+ def self.configure_producer_or_consumer(kafka_config)
66
+ klass = kafka_config.class_name.constantize
67
+ klass.class_eval do
68
+ topic(kafka_config.topic) if kafka_config.topic.present? && klass.respond_to?(:topic)
69
+ schema(kafka_config.schema) if kafka_config.schema.present?
70
+ namespace(kafka_config.namespace) if kafka_config.namespace.present?
71
+ key_config(kafka_config.key_config) if kafka_config.key_config.present?
72
+ end
73
+ end
74
+
75
+ configure do
76
+
77
+ # @return [Logger]
78
+ setting :logger, Logger.new(STDOUT)
79
+
80
+ # @return [Logger]
81
+ setting :phobos_logger, default_proc: proc { Deimos.config.logger }
82
+
83
+ setting :kafka do
84
+
85
+ # @return [Logger]
86
+ setting :logger, default_proc: proc { Deimos.config.logger }
87
+
88
+ # URL of the seed broker.
89
+ # @return [Array<String>]
90
+ setting :seed_brokers, ['localhost:9092']
91
+
92
+ # Identifier for this application.
93
+ # @return [String]
94
+ setting :client_id, 'phobos'
95
+
96
+ # The socket timeout for connecting to the broker, in seconds.
97
+ # @return [Integer]
98
+ setting :connect_timeout, 15
99
+
100
+ # The socket timeout for reading and writing to the broker, in seconds.
101
+ # @return [Integer]
102
+ setting :socket_timeout, 15
103
+
104
+ setting :ssl do
105
+ # Whether SSL is enabled on the brokers.
106
+ # @return [Boolean]
107
+ setting :enabled
108
+
109
+ # a PEM encoded CA cert, a file path to the cert, or an Array of certs,
110
+ # to use with an SSL connection.
111
+ # @return [String|Array<String>]
112
+ setting :ca_cert
113
+
114
+ # a PEM encoded client cert to use with an SSL connection, or a file path
115
+ # to the cert.
116
+ # @return [String]
117
+ setting :client_cert
118
+
119
+ # a PEM encoded client cert key to use with an SSL connection.
120
+ # @return [String]
121
+ setting :client_cert_key
122
+
123
+ # Verify certificate hostname if supported (ruby >= 2.4.0)
124
+ setting :verify_hostname, true
125
+ end
126
+ end
127
+
128
+ setting :consumers do
129
+
130
+ # Number of seconds after which, if a client hasn't contacted the Kafka cluster,
131
+ # it will be kicked out of the group.
132
+ # @return [Integer]
133
+ setting :session_timeout, 300
134
+
135
+ # Interval between offset commits, in seconds.
136
+ # @return [Integer]
137
+ setting :offset_commit_interval, 10
138
+
139
+ # Number of messages that can be processed before their offsets are committed.
140
+ # If zero, offset commits are not triggered by message processing
141
+ # @return [Integer]
142
+ setting :offset_commit_threshold, 0
143
+
144
+ # Interval between heartbeats; must be less than the session window.
145
+ # @return [Integer]
146
+ setting :heartbeat_interval, 10
147
+
148
+ # Minimum and maximum number of milliseconds to back off after a consumer
149
+ # error.
150
+ setting :backoff, (1000..60_000)
151
+
152
+ # By default, consumer errors will be consumed and logged to
153
+ # the metrics provider.
154
+ # Set this to true to force the error to be raised.
155
+ # @return [Boolean]
156
+ setting :reraise_errors
157
+
158
+ # @return [Boolean]
159
+ setting :report_lag
160
+
161
+ # Block taking an exception, payload and metadata and returning
162
+ # true if this should be considered a fatal error and false otherwise.
163
+ # Not needed if reraise_errors is set to true.
164
+ # @return [Block]
165
+ setting(:fatal_error, proc { false })
166
+ end
167
+
168
+ setting :producers do
169
+ # Number of seconds a broker can wait for replicas to acknowledge
170
+ # a write before responding with a timeout.
171
+ # @return [Integer]
172
+ setting :ack_timeout, 5
173
+
174
+ # Number of replicas that must acknowledge a write, or `:all`
175
+ # if all in-sync replicas must acknowledge.
176
+ # @return [Integer|Symbol]
177
+ setting :required_acks, 1
178
+
179
+ # Number of retries that should be attempted before giving up sending
180
+ # messages to the cluster. Does not include the original attempt.
181
+ # @return [Integer]
182
+ setting :max_retries, 2
183
+
184
+ # Number of seconds to wait between retries.
185
+ # @return [Integer]
186
+ setting :retry_backoff, 1
187
+
188
+ # Number of messages allowed in the buffer before new writes will
189
+ # raise {BufferOverflow} exceptions.
190
+ # @return [Integer]
191
+ setting :max_buffer_size, 10_000
192
+
193
+ # Maximum size of the buffer in bytes. Attempting to produce messages
194
+ # when the buffer reaches this size will result in {BufferOverflow} being raised.
195
+ # @return [Integer]
196
+ setting :max_buffer_bytesize, 10_000_000
197
+
198
+ # Name of the compression codec to use, or nil if no compression should be performed.
199
+ # Valid codecs: `:snappy` and `:gzip`
200
+ # @return [Symbol]
201
+ setting :compression_codec
202
+
203
+ # Number of messages that needs to be in a message set before it should be compressed.
204
+ # Note that message sets are per-partition rather than per-topic or per-producer.
205
+ # @return [Integer]
206
+ setting :compression_threshold, 1
207
+
208
+ # Maximum number of messages allowed in the queue. Only used for async_producer.
209
+ # @return [Integer]
210
+ setting :max_queue_size, 10_000
211
+
212
+ # If greater than zero, the number of buffered messages that will automatically
213
+ # trigger a delivery. Only used for async_producer.
214
+ # @return [Integer]
215
+ setting :delivery_threshold, 0
216
+
217
+ # if greater than zero, the number of seconds between automatic message
218
+ # deliveries. Only used for async_producer.
219
+ # @return [Integer]
220
+ setting :delivery_interval, 0
221
+
222
+ # Set this to true to keep the producer connection between publish calls.
223
+ # This can speed up subsequent messages by around 30%, but it does mean
224
+ # that you need to manually call sync_producer_shutdown before exiting,
225
+ # similar to async_producer_shutdown.
226
+ # @return [Boolean]
227
+ setting :persistent_connections, false
228
+
229
+ # Default namespace for all producers. Can remain nil. Individual
230
+ # producers can override.
231
+ # @return [String]
232
+ setting :schema_namespace
233
+
234
+ # Add a prefix to all topic names. This can be useful if you're using
235
+ # the same Kafka broker for different environments that are producing
236
+ # the same topics.
237
+ # @return [String]
238
+ setting :topic_prefix
239
+
240
+ # Disable all actual message producing. Generally more useful to use
241
+ # the `disable_producers` method instead.
242
+ # @return [Boolean]
243
+ setting :disabled
244
+
245
+ # Currently can be set to :db, :kafka, or :kafka_async. If using Kafka
246
+ # directly, a good pattern is to set to async in your user-facing app, and
247
+ # sync in your consumers or delayed workers.
248
+ # @return [Symbol]
249
+ setting :backend, :kafka_async
250
+ end
251
+
252
+ setting :schema do
253
+ # URL of the Confluent schema registry.
254
+ # @return [String]
255
+ setting :registry_url, 'http://localhost:8081'
256
+
257
+ # Local path to look for schemas in.
258
+ # @return [String]
259
+ setting :path
260
+ end
261
+
262
+ # The configured metrics provider.
263
+ # @return [Metrics::Provider]
264
+ setting :metrics, Metrics::Mock.new
265
+
266
+ # The configured tracing / APM provider.
267
+ # @return [Tracing::Provider]
268
+ setting :tracer, Tracing::Mock.new
269
+
270
+ setting :db_producer do
271
+
272
+ # @return [Logger]
273
+ setting :logger, default_proc: proc { Deimos.config.logger }
274
+
275
+ # @return [Symbol|Array<String>] A list of topics to log all messages, or
276
+ # :all to log all topics.
277
+ setting :log_topics, []
278
+
279
+ # @return [Symbol|Array<String>] A list of topics to compact messages for
280
+ # before sending, or :all to compact all keyed messages.
281
+ setting :compact_topics, []
282
+
283
+ end
284
+
285
+ setting_object :producer do
286
+ # Producer class.
287
+ # @return [String]
288
+ setting :class_name
289
+ # Topic to produce to.
290
+ # @return [String]
291
+ setting :topic
292
+ # Schema of the data in the topic.
293
+ # @return [String]
294
+ setting :schema
295
+ # Optional namespace to access the schema.
296
+ # @return [String]
297
+ setting :namespace
298
+ # Key configuration (see docs).
299
+ # @return [Hash]
300
+ setting :key_config
301
+ end
302
+
303
+ setting_object :consumer do
304
+ # Consumer class.
305
+ # @return [String]
306
+ setting :class_name
307
+ # Topic to read from.
308
+ # @return [String]
309
+ setting :topic
310
+ # Schema of the data in the topic.
311
+ # @return [String]
312
+ setting :schema
313
+ # Optional namespace to access the schema.
314
+ # @return [String]
315
+ setting :namespace
316
+ # Key configuration (see docs).
317
+ # @return [Hash]
318
+ setting :key_config
319
+
320
+ # These are the phobos "listener" configs. See CONFIGURATION.md for more
321
+ # info.
322
+ setting :group_id
323
+ setting :max_concurrency
324
+ setting :start_from_beginning
325
+ setting :max_bytes_per_partition
326
+ setting :min_bytes
327
+ setting :max_wait_time
328
+ setting :force_encoding
329
+ setting :delivery
330
+ setting :backoff
331
+ setting :session_timeout
332
+ setting :offset_commit_interval
333
+ setting :offset_commit_threshold
334
+ setting :offset_retention_time
335
+ setting :heartbeat_interval
336
+ end
337
+
338
+ deprecate 'kafka_logger', 'kafka.logger'
339
+ deprecate 'reraise_consumer_errors', 'consumers.reraise_errors'
340
+ deprecate 'schema_registry_url', 'schema.registry_url'
341
+ deprecate 'seed_broker', 'kafka.seed_brokers'
342
+ deprecate 'schema_path', 'schema.path'
343
+ deprecate 'producer_schema_namespace', 'producers.schema_namespace'
344
+ deprecate 'producer_topic_prefix', 'producers.topic_prefix'
345
+ deprecate 'disable_producers', 'producers.disabled'
346
+ deprecate 'ssl_enabled', 'kafka.ssl.enabled'
347
+ deprecate 'ssl_ca_cert', 'kafka.ssl.ca_cert'
348
+ deprecate 'ssl_client_cert', 'kafka.ssl.client_cert'
349
+ deprecate 'ssl_client_cert_key', 'kafka.ssl.client_cert_key'
350
+ deprecate 'publish_backend', 'producers.backend'
351
+ deprecate 'report_lag', 'consumers.report_lag'
352
+
353
+ end
354
+ end