deimos-ruby 1.3.0.pre.beta5 → 1.4.0.pre.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,134 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'active_support/core_ext/array'
4
+
5
+ module Deimos
6
+ # Module to handle phobos.yml as well as outputting the configuration to save
7
+ # to Phobos itself.
8
+ module PhobosConfig
9
+ extend ActiveSupport::Concern
10
+
11
+ # @return [Hash]
12
+ def to_h
13
+ (FIELDS + [:handler]).map { |f|
14
+ val = self.send(f)
15
+ if f == :backoff && val
16
+ [:backoff, _backoff(val)]
17
+ elsif val.present?
18
+ [f, val]
19
+ end
20
+ }.to_h
21
+ end
22
+
23
+ # :nodoc:
24
+ def reset!
25
+ super
26
+ Phobos.configure(self.phobos_config)
27
+ end
28
+
29
+ # Create a hash representing the config that Phobos expects.
30
+ # @return [Hash]
31
+ def phobos_config
32
+ p_config = {
33
+ logger: Logger.new(STDOUT),
34
+ custom_logger: self.phobos_logger,
35
+ custom_kafka_logger: self.kafka.logger,
36
+ kafka: {
37
+ client_id: self.kafka.client_id,
38
+ connect_timeout: self.kafka.connect_timeout,
39
+ socket_timeout: self.kafka.socket_timeout,
40
+ ssl_verify_hostname: self.kafka.ssl.verify_hostname,
41
+ seed_brokers: Array.wrap(self.kafka.seed_brokers)
42
+ },
43
+ producer: {
44
+ ack_timeout: self.producers.ack_timeout,
45
+ required_acks: self.producers.required_acks,
46
+ max_retries: self.producers.max_retries,
47
+ retry_backoff: self.producers.retry_backoff,
48
+ max_buffer_size: self.producers.max_buffer_size,
49
+ max_buffer_bytesize: self.producers.max_buffer_bytesize,
50
+ compression_codec: self.producers.compression_codec,
51
+ compression_threshold: self.producers.compression_threshold,
52
+ max_queue_size: self.producers.max_queue_size,
53
+ delivery_threshold: self.producers.delivery_threshold,
54
+ delivery_interval: self.producers.delivery_interval
55
+ },
56
+ consumer: {
57
+ session_timeout: self.consumers.session_timeout,
58
+ offset_commit_interval: self.consumers.offset_commit_interval,
59
+ offset_commit_threshold: self.consumers.offset_commit_threshold,
60
+ heartbeat_interval: self.consumers.heartbeat_interval
61
+ },
62
+ backoff: _backoff(self.consumers.backoff.to_a)
63
+ }
64
+
65
+ p_config[:listeners] = self.consumer_objects.map do |consumer|
66
+ hash = consumer.to_h.reject do |k, _|
67
+ %i(class_name schema namespace key_config backoff).include?(k)
68
+ end
69
+ hash = hash.map { |k, v| [k, v.is_a?(Symbol) ? v.to_s : v] }.to_h
70
+ hash[:handler] = consumer.class_name
71
+ if consumer.backoff
72
+ hash[:backoff] = _backoff(consumer.backoff.to_a)
73
+ end
74
+ hash
75
+ end
76
+
77
+ if self.kafka.ssl.enabled
78
+ %w(ca_cert client_cert client_cert_key).each do |key|
79
+ next if self.kafka.ssl.send(key).blank?
80
+
81
+ p_config[:kafka]["ssl_#{key}".to_sym] = ssl_var_contents(self.kafka.ssl.send(key))
82
+ end
83
+ end
84
+ p_config
85
+ end
86
+
87
+ # @param key [String]
88
+ # @return [String]
89
+ def ssl_var_contents(key)
90
+ File.exist?(key) ? File.read(key) : key
91
+ end
92
+
93
+ # Legacy method to parse Phobos config file
94
+ def phobos_config_file=(file)
95
+ pconfig = YAML.load(ERB.new(File.read(File.expand_path(file))).result). # rubocop:disable Security/YAMLLoad
96
+ with_indifferent_access
97
+ self.logger&.warn('phobos.yml is deprecated - use direct configuration instead.')
98
+ pconfig[:kafka].each do |k, v|
99
+ if k.starts_with?('ssl')
100
+ k = k.sub('ssl_', '')
101
+ self.kafka.ssl.send("#{k}=", v)
102
+ else
103
+ self.kafka.send("#{k}=", v)
104
+ end
105
+ end
106
+ pconfig[:producer].each do |k, v|
107
+ self.producers.send("#{k}=", v)
108
+ end
109
+ pconfig[:consumer].each do |k, v|
110
+ self.consumers.send("#{k}=", v)
111
+ end
112
+ self.consumers.backoff = pconfig[:backoff][:min_ms]..pconfig[:backoff][:max_ms]
113
+ pconfig[:listeners].each do |listener_hash|
114
+ self.consumer do
115
+ listener_hash.each do |k, v|
116
+ k = 'class_name' if k == 'handler'
117
+ send(k, v)
118
+ end
119
+ end
120
+ end
121
+ end
122
+
123
+ private
124
+
125
+ # @param values [Array<Integer>]
126
+ # @return [Hash<Integer>]
127
+ def _backoff(values)
128
+ {
129
+ min_ms: values[0],
130
+ max_ms: values[-1]
131
+ }
132
+ end
133
+ end
134
+ end
@@ -14,6 +14,12 @@ module Deimos
14
14
  write_attribute(:message, mess ? mess.to_s : nil)
15
15
  end
16
16
 
17
+ # Decoded payload for this message.
18
+ # @return [Hash]
19
+ def decoded_message
20
+ self.class.decoded([self]).first
21
+ end
22
+
17
23
  # Get a decoder to decode a set of messages on the given topic.
18
24
  # @param topic [String]
19
25
  # @return [Deimos::Consumer]
@@ -63,7 +63,7 @@ module Deimos
63
63
  def config
64
64
  @config ||= {
65
65
  encode_key: true,
66
- namespace: Deimos.config.producer_schema_namespace
66
+ namespace: Deimos.config.producers.schema_namespace
67
67
  }
68
68
  end
69
69
 
@@ -76,7 +76,7 @@ module Deimos
76
76
  return
77
77
  end
78
78
  # accessor
79
- "#{Deimos.config.producer_topic_prefix}#{config[:topic]}"
79
+ "#{Deimos.config.producers.topic_prefix}#{config[:topic]}"
80
80
  end
81
81
 
82
82
  # Override the default partition key (which is the payload key).
@@ -100,8 +100,8 @@ module Deimos
100
100
  # @param force_send [Boolean] if true, ignore the configured backend
101
101
  # and send immediately to Kafka.
102
102
  def publish_list(payloads, sync: nil, force_send: false)
103
- return if Deimos.config.seed_broker.blank? ||
104
- Deimos.config.disable_producers ||
103
+ return if Deimos.config.kafka.seed_brokers.blank? ||
104
+ Deimos.config.producers.disabled ||
105
105
  Deimos.producers_disabled?(self)
106
106
 
107
107
  backend_class = determine_backend_class(sync, force_send)
@@ -126,7 +126,7 @@ module Deimos
126
126
  backend = if force_send
127
127
  :kafka
128
128
  else
129
- Deimos.config.publish_backend
129
+ Deimos.config.producers.backend
130
130
  end
131
131
  if backend == :kafka_async && sync
132
132
  backend = :kafka
@@ -61,7 +61,6 @@ module Deimos
61
61
  encoder = Deimos::AvroDataEncoder.new(schema: schema,
62
62
  namespace: decoder.namespace)
63
63
  encoder.schema_store = decoder.schema_store
64
- payload = payload.respond_to?(:stringify_keys) ? payload.stringify_keys : payload
65
64
  encoder.encode_local(payload)
66
65
  end
67
66
  payload
@@ -77,9 +76,8 @@ module Deimos
77
76
  config.before(:suite) do
78
77
  Deimos.configure do |fr_config|
79
78
  fr_config.logger = Logger.new(STDOUT)
80
- fr_config.seed_broker ||= 'test_broker'
81
- fr_config.tracer = Deimos::Tracing::Mock.new
82
- fr_config.metrics = Deimos::Metrics::Mock.new
79
+ fr_config.consumers.reraise_errors = true
80
+ fr_config.kafka.seed_brokers ||= ['test_broker']
83
81
  end
84
82
  end
85
83
 
@@ -469,12 +467,6 @@ module Deimos
469
467
  allow(klass).to receive(:decoder) do
470
468
  create_decoder(klass.config[:schema], klass.config[:namespace])
471
469
  end
472
-
473
- if klass.config[:key_schema] # rubocop:disable Style/GuardClause
474
- allow(klass).to receive(:key_decoder) do
475
- create_decoder(klass.config[:key_schema], klass.config[:namespace])
476
- end
477
- end
478
470
  end
479
471
  end
480
472
  end
@@ -93,11 +93,6 @@ module Deimos
93
93
  end
94
94
  end
95
95
  Deimos::KafkaMessage.where(id: messages.map(&:id)).delete_all
96
- Deimos.config.metrics&.increment(
97
- 'db_producer.process',
98
- tags: %W(topic:#{@current_topic}),
99
- by: messages.size
100
- )
101
96
  return false if batch_size < BATCH_SIZE
102
97
 
103
98
  KafkaTopicInfo.heartbeat(@current_topic, @id) # keep alive
@@ -153,28 +153,28 @@ module Deimos
153
153
  end
154
154
 
155
155
  ActiveSupport::Notifications.subscribe('start_process_message.consumer.kafka') do |*args|
156
- next unless Deimos.config.report_lag
156
+ next unless Deimos.config.consumers.report_lag
157
157
 
158
158
  event = ActiveSupport::Notifications::Event.new(*args)
159
159
  Deimos::Utils::LagReporter.message_processed(event.payload)
160
160
  end
161
161
 
162
162
  ActiveSupport::Notifications.subscribe('start_process_batch.consumer.kafka') do |*args|
163
- next unless Deimos.config.report_lag
163
+ next unless Deimos.config.consumers.report_lag
164
164
 
165
165
  event = ActiveSupport::Notifications::Event.new(*args)
166
166
  Deimos::Utils::LagReporter.message_processed(event.payload)
167
167
  end
168
168
 
169
169
  ActiveSupport::Notifications.subscribe('seek.consumer.kafka') do |*args|
170
- next unless Deimos.config.report_lag
170
+ next unless Deimos.config.consumers.report_lag
171
171
 
172
172
  event = ActiveSupport::Notifications::Event.new(*args)
173
173
  Deimos::Utils::LagReporter.offset_seek(event.payload)
174
174
  end
175
175
 
176
176
  ActiveSupport::Notifications.subscribe('heartbeat.consumer.kafka') do |*args|
177
- next unless Deimos.config.report_lag
177
+ next unless Deimos.config.consumers.report_lag
178
178
 
179
179
  event = ActiveSupport::Notifications::Event.new(*args)
180
180
  Deimos::Utils::LagReporter.heartbeat(event.payload)
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Deimos
4
- VERSION = '1.3.0-beta5'
4
+ VERSION = '1.4.0-beta1'
5
5
  end
data/lib/deimos.rb CHANGED
@@ -4,6 +4,7 @@ require 'avro-patches'
4
4
  require 'avro_turf'
5
5
  require 'phobos'
6
6
  require 'deimos/version'
7
+ require 'deimos/config/configuration'
7
8
  require 'deimos/avro_data_encoder'
8
9
  require 'deimos/avro_data_decoder'
9
10
  require 'deimos/producer'
@@ -11,7 +12,6 @@ require 'deimos/active_record_producer'
11
12
  require 'deimos/active_record_consumer'
12
13
  require 'deimos/consumer'
13
14
  require 'deimos/batch_consumer'
14
- require 'deimos/configuration'
15
15
  require 'deimos/instrumentation'
16
16
  require 'deimos/utils/lag_reporter'
17
17
 
@@ -40,48 +40,10 @@ require 'erb'
40
40
  # Parent module.
41
41
  module Deimos
42
42
  class << self
43
- attr_accessor :config
44
-
45
- # Configure Deimos.
46
- def configure
47
- first_time_config = self.config.nil?
48
- self.config ||= Configuration.new
49
- old_config = self.config.dup
50
- yield(config)
51
-
52
- # Don't re-configure Phobos every time
53
- if first_time_config || config.phobos_config_changed?(old_config)
54
-
55
- file = config.phobos_config_file
56
- phobos_config = YAML.load(ERB.new(File.read(File.expand_path(file))).result)
57
-
58
- configure_kafka_for_phobos(phobos_config)
59
- configure_loggers(phobos_config)
60
-
61
- Phobos.configure(phobos_config)
62
-
63
- validate_consumers
64
- end
65
-
66
- validate_db_backend if self.config.publish_backend == :db
67
- end
68
-
69
- # Ensure everything is set up correctly for the DB backend.
70
- def validate_db_backend
71
- begin
72
- require 'activerecord-import'
73
- rescue LoadError
74
- raise 'Cannot set publish_backend to :db without activerecord-import! Please add it to your Gemfile.'
75
- end
76
- if Phobos.config.producer_hash[:required_acks] != :all
77
- raise 'Cannot set publish_backend to :db unless required_acks is set to ":all" in phobos.yml!'
78
- end
79
- end
80
-
81
43
  # Start the DB producers to send Kafka messages.
82
44
  # @param thread_count [Integer] the number of threads to start.
83
45
  def start_db_backend!(thread_count: 1)
84
- if self.config.publish_backend != :db
46
+ if self.config.producers.backend != :db
85
47
  raise('Publish backend is not set to :db, exiting')
86
48
  end
87
49
 
@@ -99,52 +61,6 @@ module Deimos
99
61
  signal_handler = Deimos::Utils::SignalHandler.new(executor)
100
62
  signal_handler.run!
101
63
  end
102
-
103
- # @param phobos_config [Hash]
104
- def configure_kafka_for_phobos(phobos_config)
105
- if config.ssl_enabled
106
- %w(ssl_ca_cert ssl_client_cert ssl_client_cert_key).each do |key|
107
- next if config.send(key).blank?
108
-
109
- phobos_config['kafka'][key] = ssl_var_contents(config.send(key))
110
- end
111
- end
112
- phobos_config['kafka']['seed_brokers'] = config.seed_broker if config.seed_broker
113
- end
114
-
115
- # @param phobos_config [Hash]
116
- def configure_loggers(phobos_config)
117
- phobos_config['custom_logger'] = config.phobos_logger
118
- phobos_config['custom_kafka_logger'] = config.kafka_logger
119
- end
120
-
121
- # @param filename [String] a file to read, or the contents of the SSL var
122
- # @return [String] the contents of the file
123
- def ssl_var_contents(filename)
124
- File.exist?(filename) ? File.read(filename) : filename
125
- end
126
-
127
- # Validate that consumers are configured correctly, including their
128
- # delivery mode.
129
- def validate_consumers
130
- Phobos.config.listeners.each do |listener|
131
- handler_class = listener.handler.constantize
132
- delivery = listener.delivery
133
-
134
- # Validate that Deimos consumers use proper delivery configs
135
- if handler_class < Deimos::BatchConsumer
136
- unless delivery == 'inline_batch'
137
- raise "BatchConsumer #{listener.handler} must have delivery set to"\
138
- ' `inline_batch`'
139
- end
140
- elsif handler_class < Deimos::Consumer
141
- if delivery.present? && !%w(message batch).include?(delivery)
142
- raise "Non-batch Consumer #{listener.handler} must have delivery"\
143
- ' set to `message` or `batch`'
144
- end
145
- end
146
- end
147
- end
148
64
  end
149
65
  end
150
66
 
@@ -7,7 +7,7 @@ namespace :deimos do
7
7
  desc 'Starts Deimos in the rails environment'
8
8
  task start: :environment do
9
9
  Deimos.configure do |config|
10
- config.publish_backend = :kafka if config.publish_backend == :kafka_async
10
+ config.producers.backend = :kafka_sync if config.producers.backend == :kafka_async
11
11
  end
12
12
  ENV['DEIMOS_RAKE_TASK'] = 'true'
13
13
  STDOUT.sync = true
@@ -4,11 +4,6 @@ each_db_config(Deimos::Backends::Db) do
4
4
  include_context 'with publish_backend'
5
5
 
6
6
  it 'should save to the database' do
7
- expect(Deimos.config.metrics).to receive(:increment).with(
8
- 'db_producer.insert',
9
- tags: %w(topic:my-topic),
10
- by: 3
11
- )
12
7
  described_class.publish(producer_class: MyProducer, messages: messages)
13
8
  records = Deimos::KafkaMessage.all
14
9
  expect(records.size).to eq(3)
@@ -50,7 +50,7 @@ module ConsumerTest
50
50
 
51
51
  describe 'when reraising errors is disabled' do
52
52
  before(:each) do
53
- Deimos.configure { |config| config.reraise_consumer_errors = false }
53
+ Deimos.configure { |config| config.consumers.reraise_errors = false }
54
54
  end
55
55
 
56
56
  it 'should not fail when before_consume_batch fails' do
@@ -0,0 +1,121 @@
1
+ # frozen_string_literal: true
2
+
3
+ # :nodoc:
4
+ class MyConfig
5
+ include Deimos::Configurable
6
+
7
+ configure do
8
+ setting :set1
9
+ setting :set2, 'hi mom'
10
+ setting :group do
11
+ setting :set3, default_proc: proc { false }
12
+ setting :set5, (proc { 5 })
13
+ end
14
+
15
+ setting_object :listy do
16
+ setting :list1, 10
17
+ setting :list2, 5
18
+ end
19
+ end
20
+ end
21
+
22
+ describe Deimos::Configurable do
23
+ it 'should configure correctly with default values' do
24
+ expect(MyConfig.config.set1).to be_nil
25
+ expect(MyConfig.config.set2).to eq('hi mom')
26
+ expect(MyConfig.config.group.set3).to eq(false)
27
+ expect(MyConfig.config.listy_objects).to be_empty
28
+ expect { MyConfig.config.blah }.to raise_error(NameError)
29
+ expect { MyConfig.config.group.set4 }.to raise_error(NameError)
30
+ end
31
+
32
+ it "should raise error when setting configs that don't exist" do
33
+ expect { MyConfig.configure { set15 'some_value' } }.to raise_error(NameError)
34
+ end
35
+
36
+ it 'should add values' do
37
+ MyConfig.configure do |config|
38
+ config.set1 = 5 # config.x syntax
39
+ set2 nil # method_missing syntax
40
+ config.group.set3 = true
41
+ end
42
+
43
+ # second configure should not blow anything away
44
+ MyConfig.configure do
45
+ listy do
46
+ list1 0
47
+ list2 1
48
+ end
49
+ listy do
50
+ list1 100
51
+ list2 200
52
+ end
53
+ end
54
+
55
+ expect(MyConfig.config.set1).to eq(5)
56
+ expect(MyConfig.config.set2).to be_nil
57
+ expect(MyConfig.config.group.set3).to eq(true)
58
+ expect(MyConfig.config.listy_objects.map(&:to_h)).
59
+ to eq([
60
+ { list1: 0, list2: 1 },
61
+ { list1: 100, list2: 200 }
62
+ ])
63
+
64
+ # test reset!
65
+ MyConfig.config.reset!
66
+ expect(MyConfig.config.set1).to be_nil
67
+ expect(MyConfig.config.set2).to eq('hi mom')
68
+ expect(MyConfig.config.group.set3).to eq(false)
69
+ expect(MyConfig.config.listy_objects).to be_empty
70
+ end
71
+
72
+ it 'should add with block syntax' do
73
+ MyConfig.configure do
74
+ group do
75
+ set5(proc { 10 })
76
+ end
77
+ end
78
+ expect(MyConfig.config.group.set5.call).to eq(10)
79
+ end
80
+
81
+ it 'should add or redefine settings' do
82
+ MyConfig.configure do
83
+ setting :group do
84
+ setting :set6, 15
85
+ setting :set5, (proc { 15 })
86
+ end
87
+ setting_object :notey do
88
+ setting :note_title, 'some-title'
89
+ end
90
+ end
91
+
92
+ expect(MyConfig.config.group.set6).to eq(15)
93
+ expect(MyConfig.config.group.set5.call).to eq(15)
94
+ expect(MyConfig.config.listy_objects).to be_empty
95
+ expect(MyConfig.config.notey_objects).to be_empty
96
+
97
+ MyConfig.configure do
98
+ notey do
99
+ note_title 'hi mom'
100
+ end
101
+ listy do
102
+ list1 0
103
+ end
104
+ end
105
+ expect(MyConfig.config.notey_objects.size).to eq(1)
106
+ expect(MyConfig.config.notey_objects.first.note_title).to eq('hi mom')
107
+ expect(MyConfig.config.listy_objects.size).to eq(1)
108
+ expect(MyConfig.config.listy_objects.first.list1).to eq(0)
109
+
110
+ # This should not remove any keys
111
+ MyConfig.configure do
112
+ setting :group do
113
+ setting :set6, 20
114
+ end
115
+ end
116
+ expect(MyConfig.config.group.set6).to eq(20)
117
+ expect(MyConfig.config.group.set5.call).to eq(15)
118
+
119
+ end
120
+
121
+ end