deimos-ruby 1.4.0.pre.beta7 → 1.5.0.pre.beta2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +3 -0
  3. data/CHANGELOG.md +13 -0
  4. data/Gemfile.lock +140 -58
  5. data/README.md +38 -11
  6. data/Rakefile +2 -2
  7. data/deimos-ruby.gemspec +3 -2
  8. data/docs/CONFIGURATION.md +1 -0
  9. data/docs/DATABASE_BACKEND.md +1 -1
  10. data/lib/deimos/active_record_consumer.rb +11 -12
  11. data/lib/deimos/active_record_producer.rb +2 -2
  12. data/lib/deimos/backends/base.rb +32 -0
  13. data/lib/deimos/backends/db.rb +6 -1
  14. data/lib/deimos/backends/kafka.rb +1 -1
  15. data/lib/deimos/backends/kafka_async.rb +1 -1
  16. data/lib/deimos/backends/test.rb +20 -0
  17. data/lib/deimos/base_consumer.rb +7 -7
  18. data/lib/deimos/batch_consumer.rb +0 -1
  19. data/lib/deimos/config/configuration.rb +4 -0
  20. data/lib/deimos/consumer.rb +0 -2
  21. data/lib/deimos/kafka_source.rb +1 -1
  22. data/lib/deimos/kafka_topic_info.rb +1 -1
  23. data/lib/deimos/message.rb +7 -7
  24. data/lib/deimos/producer.rb +10 -12
  25. data/lib/deimos/schema_backends/avro_base.rb +108 -0
  26. data/lib/deimos/schema_backends/avro_local.rb +30 -0
  27. data/lib/deimos/{schema_coercer.rb → schema_backends/avro_schema_coercer.rb} +39 -51
  28. data/lib/deimos/schema_backends/avro_schema_registry.rb +34 -0
  29. data/lib/deimos/schema_backends/avro_validation.rb +21 -0
  30. data/lib/deimos/schema_backends/base.rb +130 -0
  31. data/lib/deimos/schema_backends/mock.rb +42 -0
  32. data/lib/deimos/test_helpers.rb +42 -168
  33. data/lib/deimos/utils/db_producer.rb +5 -0
  34. data/lib/deimos/version.rb +1 -1
  35. data/lib/deimos.rb +22 -6
  36. data/lib/tasks/deimos.rake +1 -1
  37. data/spec/active_record_consumer_spec.rb +7 -0
  38. data/spec/{publish_backend_spec.rb → backends/base_spec.rb} +1 -1
  39. data/spec/backends/db_spec.rb +5 -0
  40. data/spec/batch_consumer_spec.rb +0 -8
  41. data/spec/config/configuration_spec.rb +20 -20
  42. data/spec/consumer_spec.rb +0 -1
  43. data/spec/deimos_spec.rb +0 -4
  44. data/spec/kafka_source_spec.rb +8 -0
  45. data/spec/producer_spec.rb +23 -37
  46. data/spec/rake_spec.rb +19 -0
  47. data/spec/schema_backends/avro_base_shared.rb +174 -0
  48. data/spec/schema_backends/avro_local_spec.rb +32 -0
  49. data/spec/schema_backends/avro_schema_registry_spec.rb +32 -0
  50. data/spec/schema_backends/avro_validation_spec.rb +24 -0
  51. data/spec/schema_backends/base_spec.rb +29 -0
  52. data/spec/spec_helper.rb +6 -0
  53. data/spec/utils/db_producer_spec.rb +10 -0
  54. metadata +56 -33
  55. data/lib/deimos/avro_data_coder.rb +0 -89
  56. data/lib/deimos/avro_data_decoder.rb +0 -36
  57. data/lib/deimos/avro_data_encoder.rb +0 -51
  58. data/lib/deimos/monkey_patches/schema_store.rb +0 -19
  59. data/lib/deimos/publish_backend.rb +0 -30
  60. data/spec/avro_data_decoder_spec.rb +0 -18
  61. data/spec/avro_data_encoder_spec.rb +0 -37
  62. data/spec/updateable_schema_store_spec.rb +0 -36
@@ -2,7 +2,6 @@
2
2
 
3
3
  require 'active_support/concern'
4
4
  require 'active_support/core_ext'
5
- require 'avro_turf'
6
5
  require 'deimos/tracing/mock'
7
6
  require 'deimos/metrics/mock'
8
7
 
@@ -14,139 +13,55 @@ module Deimos
14
13
  extend ActiveSupport::Concern
15
14
 
16
15
  class << self
16
+ # for backwards compatibility
17
17
  # @return [Array<Hash>]
18
18
  def sent_messages
19
- @sent_messages ||= []
19
+ Deimos::Backends::Test.sent_messages
20
20
  end
21
21
  end
22
22
 
23
23
  included do
24
- # @param encoder_schema [String]
25
- # @param namespace [String]
26
- # @return [Deimos::AvroDataEncoder]
27
- def create_encoder(encoder_schema, namespace)
28
- encoder = Deimos::AvroDataEncoder.new(schema: encoder_schema,
29
- namespace: namespace)
30
-
31
- # we added and_wrap_original to RSpec 2 but the regular block
32
- # syntax wasn't working for some reason - block wasn't being passed
33
- # to the method
34
- block = proc do |m, *args|
35
- m.call(*args)
36
- args[0]
37
- end
38
- allow(encoder).to receive(:encode_local).and_wrap_original(&block)
39
- allow(encoder).to receive(:encode) do |payload, schema: nil, topic: nil|
40
- encoder.encode_local(payload, schema: schema)
41
- end
42
-
43
- block = proc do |m, *args|
44
- m.call(*args)&.values&.first
45
- end
46
- allow(encoder).to receive(:encode_key).and_wrap_original(&block)
47
- encoder
48
- end
49
-
50
- # @param decoder_schema [String]
51
- # @param namespace [String]
52
- # @return [Deimos::AvroDataDecoder]
53
- def create_decoder(decoder_schema, namespace)
54
- decoder = Deimos::AvroDataDecoder.new(schema: decoder_schema,
55
- namespace: namespace)
56
- allow(decoder).to receive(:decode_local) { |payload| payload }
57
- allow(decoder).to receive(:decode) do |payload, schema: nil|
58
- schema ||= decoder.schema
59
- if schema && decoder.namespace
60
- # Validate against local schema.
61
- encoder = Deimos::AvroDataEncoder.new(schema: schema,
62
- namespace: decoder.namespace)
63
- encoder.schema_store = decoder.schema_store
64
- payload = payload.respond_to?(:stringify_keys) ? payload.stringify_keys : payload
65
- encoder.encode_local(payload)
66
- end
67
- payload
68
- end
69
- allow(decoder).to receive(:decode_key) do |payload, _key_id|
70
- payload.values.first
71
- end
72
- decoder
73
- end
74
24
 
75
25
  RSpec.configure do |config|
76
26
 
77
27
  config.before(:suite) do
78
- Deimos.configure do |fr_config|
79
- fr_config.logger = Logger.new(STDOUT)
80
- fr_config.consumers.reraise_errors = true
81
- fr_config.kafka.seed_brokers ||= ['test_broker']
28
+ Deimos.configure do |d_config|
29
+ d_config.logger = Logger.new(STDOUT)
30
+ d_config.consumers.reraise_errors = true
31
+ d_config.kafka.seed_brokers ||= ['test_broker']
32
+ d_config.schema.backend = Deimos.schema_backend_class.mock_backend
82
33
  end
83
34
  end
84
-
85
35
  end
36
+
86
37
  before(:each) do
87
38
  client = double('client').as_null_object
88
39
  allow(client).to receive(:time) do |*_args, &block|
89
40
  block.call
90
41
  end
42
+ Deimos.configure { |c| c.producers.backend = :test }
43
+ Deimos::Backends::Test.sent_messages.clear
91
44
  end
92
45
  end
93
46
 
94
- # Stub all already-loaded producers and consumers for unit testing purposes.
47
+ # @deprecated
95
48
  def stub_producers_and_consumers!
96
- Deimos::TestHelpers.sent_messages.clear
97
-
98
- allow(Deimos::Producer).to receive(:produce_batch) do |_, batch|
99
- Deimos::TestHelpers.sent_messages.concat(batch.map(&:to_h))
100
- end
101
-
102
- Deimos::Producer.descendants.each do |klass|
103
- next if klass == Deimos::ActiveRecordProducer # "abstract" class
104
-
105
- stub_producer(klass)
106
- end
107
-
108
- Deimos::Consumer.descendants.each do |klass|
109
- # TODO: remove this when ActiveRecordConsumer uses batching
110
- next if klass == Deimos::ActiveRecordConsumer # "abstract" class
111
-
112
- stub_consumer(klass)
113
- end
114
-
115
- Deimos::BatchConsumer.descendants.each do |klass|
116
- next if klass == Deimos::ActiveRecordConsumer # "abstract" class
117
-
118
- stub_batch_consumer(klass)
119
- end
49
+ warn('stub_producers_and_consumers! is no longer necessary and this method will be removed in 3.0')
120
50
  end
121
51
 
122
- # Stub a given producer class.
123
- # @param klass [Class < Deimos::Producer]
124
- def stub_producer(klass)
125
- allow(klass).to receive(:encoder) do
126
- create_encoder(klass.config[:schema], klass.config[:namespace])
127
- end
128
- allow(klass).to receive(:key_encoder) do
129
- create_encoder(klass.config[:key_schema], klass.config[:namespace])
130
- end
52
+ # @deprecated
53
+ def stub_producer(_klass)
54
+ warn('Stubbing producers is no longer necessary and this method will be removed in 3.0')
131
55
  end
132
56
 
133
- # Stub a given consumer class.
134
- # @param klass [Class < Deimos::Consumer]
135
- def stub_consumer(klass)
136
- _stub_base_consumer(klass)
137
- klass.class_eval do
138
- alias_method(:old_consume, :consume) unless self.instance_methods.include?(:old_consume)
139
- end
140
- allow_any_instance_of(klass).to receive(:consume) do |instance, payload, metadata|
141
- metadata[:key] = klass.new.decode_key(metadata[:key]) if klass.config[:key_configured]
142
- instance.old_consume(payload, metadata)
143
- end
57
+ # @deprecated
58
+ def stub_consumer(_klass)
59
+ warn('Stubbing consumers is no longer necessary and this method will be removed in 3.0')
144
60
  end
145
61
 
146
- # Stub a given batch consumer class.
147
- # @param klass [Class < Deimos::BatchConsumer]
148
- def stub_batch_consumer(klass)
149
- _stub_base_consumer(klass)
62
+ # @deprecated
63
+ def stub_batch_consumer(_klass)
64
+ warn('Stubbing batch consumers is no longer necessary and this method will be removed in 3.0')
150
65
  end
151
66
 
152
67
  # get the difference of 2 hashes.
@@ -166,7 +81,7 @@ module Deimos
166
81
 
167
82
  # :nodoc:
168
83
  def _frk_failure_message(topic, message, key=nil, partition_key=nil, was_negated=false)
169
- messages = Deimos::TestHelpers.sent_messages.
84
+ messages = Deimos::Backends::Test.sent_messages.
170
85
  select { |m| m[:topic] == topic }.
171
86
  map { |m| m.except(:topic) }
172
87
  message_string = ''
@@ -200,7 +115,7 @@ module Deimos
200
115
  msg
201
116
  end
202
117
  match do |topic|
203
- Deimos::TestHelpers.sent_messages.any? do |m|
118
+ Deimos::Backends::Test.sent_messages.any? do |m|
204
119
  hash_matcher = RSpec::Matchers::BuiltIn::Match.new(message)
205
120
  hash_matcher.send(:match,
206
121
  message,
@@ -231,7 +146,7 @@ module Deimos
231
146
  # Clear all sent messages - e.g. if we want to check that
232
147
  # particular messages were sent or not sent after a point in time.
233
148
  def clear_kafka_messages!
234
- Deimos::TestHelpers.sent_messages.clear
149
+ Deimos::Backends::Test.sent_messages.clear
235
150
  end
236
151
 
237
152
  # test that a message was sent on the given topic.
@@ -241,14 +156,14 @@ module Deimos
241
156
  # @param key [String|Integer]
242
157
  # @return [Boolean]
243
158
  def was_message_sent?(message, topic, key=nil)
244
- Deimos::TestHelpers.sent_messages.any? do |m|
159
+ Deimos::Backends::Test.sent_messages.any? do |m|
245
160
  message == m[:payload] && m[:topic] == topic &&
246
161
  (key.present? ? m[:key] == key : true)
247
162
  end
248
163
  end
249
164
 
250
165
  # Test that a given handler will consume a given payload correctly, i.e.
251
- # that the Avro schema is correct. If
166
+ # that the schema is correct. If
252
167
  # a block is given, that block will be executed when `consume` is called.
253
168
  # Otherwise it will just confirm that `consume` is called at all.
254
169
  # @param handler_class_or_topic [Class|String] Class which inherits from
@@ -300,33 +215,18 @@ module Deimos
300
215
  ).send(:process_message, payload)
301
216
  end
302
217
 
303
- # Check to see that a given message will fail due to Avro errors.
218
+ # Check to see that a given message will fail due to validation errors.
304
219
  # @param handler_class [Class]
305
220
  # @param payload [Hash]
306
221
  def test_consume_invalid_message(handler_class, payload)
307
- handler = handler_class.new
308
- allow(handler_class).to receive(:new).and_return(handler)
309
- listener = double('listener',
310
- handler_class: handler_class,
311
- encoding: nil)
312
- message = double('message',
313
- key: _key_from_consumer(handler_class),
314
- partition_key: nil,
315
- partition: 1,
316
- offset: 1,
317
- value: payload)
318
-
319
222
  expect {
320
- Phobos::Actions::ProcessMessage.new(
321
- listener: listener,
322
- message: message,
323
- listener_metadata: { topic: 'my-topic' }
324
- ).send(:process_message, payload)
325
- }.to raise_error(Avro::SchemaValidator::ValidationError)
223
+ handler_class.decoder.validate(payload,
224
+ schema: handler_class.decoder.schema)
225
+ }.to raise_error
326
226
  end
327
227
 
328
228
  # Test that a given handler will consume a given batch payload correctly,
329
- # i.e. that the Avro schema is correct. If
229
+ # i.e. that the schema is correct. If
330
230
  # a block is given, that block will be executed when `consume` is called.
331
231
  # Otherwise it will just confirm that `consume` is called at all.
332
232
  # @param handler_class_or_topic [Class|String] Class which inherits from
@@ -384,7 +284,7 @@ module Deimos
384
284
  action.send(:execute)
385
285
  end
386
286
 
387
- # Check to see that a given message will fail due to Avro errors.
287
+ # Check to see that a given message will fail due to validation errors.
388
288
  # @param handler_class [Class]
389
289
  # @param payloads [Array<Hash>]
390
290
  def test_consume_batch_invalid_message(handler_class, payloads)
@@ -418,33 +318,21 @@ module Deimos
418
318
  allow(action).to receive(:handle_error) { |e| raise e }
419
319
 
420
320
  expect { action.send(:execute) }.
421
- to raise_error(Avro::SchemaValidator::ValidationError)
422
- end
423
-
424
- # @param schema1 [String|Hash] a file path, JSON string, or
425
- # hash representing a schema.
426
- # @param schema2 [String|Hash] a file path, JSON string, or
427
- # hash representing a schema.
428
- # @return [Boolean] true if the schemas are compatible, false otherwise.
429
- def self.schemas_compatible?(schema1, schema2)
430
- json1, json2 = [schema1, schema2].map do |schema|
431
- if schema.is_a?(String)
432
- schema = File.read(schema) unless schema.strip.starts_with?('{') # file path
433
- MultiJson.load(schema)
434
- else
435
- schema
436
- end
437
- end
438
- avro_schema1 = Avro::Schema.real_parse(json1, {})
439
- avro_schema2 = Avro::Schema.real_parse(json2, {})
440
- Avro::SchemaCompatibility.mutual_read?(avro_schema1, avro_schema2)
321
+ to raise_error
441
322
  end
442
323
 
443
324
  private
444
325
 
445
326
  def _key_from_consumer(consumer)
446
- if consumer.config[:key_field] || consumer.config[:key_schema]
447
- { 'test' => 1 }
327
+ if consumer.config[:key_field]
328
+ { consumer.config[:key_field] => 1 }
329
+ elsif consumer.config[:key_schema]
330
+ backend = consumer.decoder
331
+ old_schema = backend.schema
332
+ backend.schema = consumer.config[:key_schema]
333
+ key = backend.schema_fields.map { |field| [field.name, 1] }.to_h
334
+ backend.schema = old_schema
335
+ key
448
336
  elsif consumer.config[:no_keys]
449
337
  nil
450
338
  else
@@ -461,19 +349,5 @@ module Deimos
461
349
 
462
350
  handler.handler.constantize
463
351
  end
464
-
465
- # Stub shared methods between consumers/batch consumers
466
- # @param [Class < Deimos::BaseConsumer] klass Consumer class to stub
467
- def _stub_base_consumer(klass)
468
- allow(klass).to receive(:decoder) do
469
- create_decoder(klass.config[:schema], klass.config[:namespace])
470
- end
471
-
472
- if klass.config[:key_schema] # rubocop:disable Style/GuardClause
473
- allow(klass).to receive(:key_decoder) do
474
- create_decoder(klass.config[:key_schema], klass.config[:namespace])
475
- end
476
- end
477
- end
478
352
  end
479
353
  end
@@ -93,6 +93,11 @@ module Deimos
93
93
  end
94
94
  end
95
95
  Deimos::KafkaMessage.where(id: messages.map(&:id)).delete_all
96
+ Deimos.config.metrics&.increment(
97
+ 'db_producer.process',
98
+ tags: %W(topic:#{@current_topic}),
99
+ by: messages.size
100
+ )
96
101
  return false if batch_size < BATCH_SIZE
97
102
 
98
103
  KafkaTopicInfo.heartbeat(@current_topic, @id) # keep alive
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Deimos
4
- VERSION = '1.4.0-beta7'
4
+ VERSION = '1.5.0-beta2'
5
5
  end
data/lib/deimos.rb CHANGED
@@ -1,12 +1,8 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'avro-patches'
4
- require 'avro_turf'
5
3
  require 'phobos'
6
4
  require 'deimos/version'
7
5
  require 'deimos/config/configuration'
8
- require 'deimos/avro_data_encoder'
9
- require 'deimos/avro_data_decoder'
10
6
  require 'deimos/producer'
11
7
  require 'deimos/active_record_producer'
12
8
  require 'deimos/active_record_consumer'
@@ -15,16 +11,19 @@ require 'deimos/batch_consumer'
15
11
  require 'deimos/instrumentation'
16
12
  require 'deimos/utils/lag_reporter'
17
13
 
18
- require 'deimos/publish_backend'
14
+ require 'deimos/backends/base'
19
15
  require 'deimos/backends/kafka'
20
16
  require 'deimos/backends/kafka_async'
17
+ require 'deimos/backends/test'
18
+
19
+ require 'deimos/schema_backends/base'
21
20
 
22
21
  require 'deimos/monkey_patches/ruby_kafka_heartbeat'
23
- require 'deimos/monkey_patches/schema_store'
24
22
  require 'deimos/monkey_patches/phobos_producer'
25
23
  require 'deimos/monkey_patches/phobos_cli'
26
24
 
27
25
  require 'deimos/railtie' if defined?(Rails)
26
+
28
27
  if defined?(ActiveRecord)
29
28
  require 'deimos/kafka_source'
30
29
  require 'deimos/kafka_topic_info'
@@ -33,6 +32,7 @@ if defined?(ActiveRecord)
33
32
  require 'deimos/utils/executor.rb'
34
33
  require 'deimos/utils/db_producer.rb'
35
34
  end
35
+
36
36
  require 'deimos/utils/inline_consumer'
37
37
  require 'yaml'
38
38
  require 'erb'
@@ -40,6 +40,22 @@ require 'erb'
40
40
  # Parent module.
41
41
  module Deimos
42
42
  class << self
43
+ # @return [Class < Deimos::SchemaBackends::Base]
44
+ def schema_backend_class
45
+ backend = Deimos.config.schema.backend.to_s
46
+
47
+ require "deimos/schema_backends/#{backend}"
48
+
49
+ "Deimos::SchemaBackends::#{backend.classify}".constantize
50
+ end
51
+
52
+ # @param schema [String|Symbol]
53
+ # @param namespace [String]
54
+ # @return [Deimos::SchemaBackends::Base]
55
+ def schema_backend(schema:, namespace:)
56
+ schema_backend_class.new(schema: schema, namespace: namespace)
57
+ end
58
+
43
59
  # Start the DB producers to send Kafka messages.
44
60
  # @param thread_count [Integer] the number of threads to start.
45
61
  def start_db_backend!(thread_count: 1)
@@ -7,7 +7,7 @@ namespace :deimos do
7
7
  desc 'Starts Deimos in the rails environment'
8
8
  task start: :environment do
9
9
  Deimos.configure do |config|
10
- config.producers.backend = :kafka_sync if config.producers.backend == :kafka_async
10
+ config.producers.backend = :kafka if config.producers.backend == :kafka_async
11
11
  end
12
12
  ENV['DEIMOS_RAKE_TASK'] = 'true'
13
13
  STDOUT.sync = true
@@ -119,5 +119,12 @@ module ActiveRecordProducerTest
119
119
  expect(Widget.find_by_test_id('id2').some_int).to eq(4)
120
120
  end
121
121
 
122
+ it 'should coerce int values to datetimes' do
123
+ column = Widget.columns.find { |c| c.name == 'some_datetime_int' }
124
+ expect(MyConsumer.new.send(:_coerce_field, column, 1_579_046_400)).to eq('2020-01-14 19:00:00 -0500')
125
+ expect(MyConsumer.new.send(:_coerce_field, column, '1579046400')).to eq('2020-01-14 19:00:00 -0500')
126
+ expect(MyConsumer.new.send(:_coerce_field, column, 'some-other-val')).to eq('some-other-val')
127
+ end
128
+
122
129
  end
123
130
  end
@@ -1,6 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- RSpec.describe Deimos::PublishBackend do
3
+ RSpec.describe Deimos::Backends::Base do
4
4
  include_context 'with publish_backend'
5
5
  it 'should call execute' do
6
6
  expect(described_class).to receive(:execute).
@@ -4,6 +4,11 @@ each_db_config(Deimos::Backends::Db) do
4
4
  include_context 'with publish_backend'
5
5
 
6
6
  it 'should save to the database' do
7
+ expect(Deimos.config.metrics).to receive(:increment).with(
8
+ 'db_producer.insert',
9
+ tags: %w(topic:my-topic),
10
+ by: 3
11
+ )
7
12
  described_class.publish(producer_class: MyProducer, messages: messages)
8
13
  records = Deimos::KafkaMessage.all
9
14
  expect(records.size).to eq(3)
@@ -80,11 +80,6 @@ module ConsumerTest
80
80
  end
81
81
 
82
82
  it 'should decode payloads for all messages in the batch' do
83
- expect_any_instance_of(Deimos::AvroDataDecoder).
84
- to receive(:decode).with(batch[0])
85
- expect_any_instance_of(Deimos::AvroDataDecoder).
86
- to receive(:decode).with(batch[1])
87
-
88
83
  test_consume_batch('my_batch_consume_topic', batch) do |received, _metadata|
89
84
  # Mock decoder simply returns the payload
90
85
  expect(received).to eq(batch)
@@ -110,7 +105,6 @@ module ConsumerTest
110
105
  key_config plain: true
111
106
  end
112
107
  stub_const('ConsumerTest::MyBatchConsumer', consumer_class)
113
- stub_batch_consumer(consumer_class)
114
108
 
115
109
  test_consume_batch('my_batch_consume_topic', batch, keys: [1, 2]) do |_received, metadata|
116
110
  expect(metadata[:keys]).to eq([1, 2])
@@ -132,7 +126,6 @@ module ConsumerTest
132
126
  end
133
127
  end
134
128
  stub_const('ConsumerTest::MyBatchConsumer', consumer_class)
135
- stub_batch_consumer(consumer_class)
136
129
  allow(Deimos.config.metrics).to receive(:histogram)
137
130
  end
138
131
 
@@ -215,7 +208,6 @@ module ConsumerTest
215
208
  end
216
209
  end
217
210
  stub_const('ConsumerTest::MyBatchConsumer', consumer_class)
218
- stub_batch_consumer(consumer_class)
219
211
  allow(Deimos.config.metrics).to receive(:histogram)
220
212
  end
221
213
 
@@ -5,7 +5,7 @@ end
5
5
  describe Deimos, 'configuration' do
6
6
  it 'should configure with deprecated fields' do
7
7
  logger = Logger.new(nil)
8
- Deimos.configure do
8
+ described_class.configure do
9
9
  kafka_logger logger
10
10
  reraise_consumer_errors true
11
11
  schema_registry_url 'http://schema.registry'
@@ -22,25 +22,25 @@ describe Deimos, 'configuration' do
22
22
  report_lag true
23
23
  end
24
24
 
25
- expect(Deimos.config.kafka.logger).to eq(logger)
26
- expect(Deimos.config.consumers.reraise_errors).to eq(true)
27
- expect(Deimos.config.schema.registry_url).to eq('http://schema.registry')
28
- expect(Deimos.config.kafka.seed_brokers).to eq('whatever')
29
- expect(Deimos.config.producers.schema_namespace).to eq('namespace')
30
- expect(Deimos.config.producers.topic_prefix).to eq('prefix')
31
- expect(Deimos.config.producers.disabled).to eq(true)
32
- expect(Deimos.config.kafka.ssl.enabled).to eq(true)
33
- expect(Deimos.config.kafka.ssl.ca_cert).to eq('cert')
34
- expect(Deimos.config.kafka.ssl.client_cert).to eq('cert')
35
- expect(Deimos.config.kafka.ssl.client_cert_key).to eq('key')
36
- expect(Deimos.config.producers.backend).to eq('db')
37
- expect(Deimos.config.consumers.report_lag).to eq(true)
25
+ expect(described_class.config.kafka.logger).to eq(logger)
26
+ expect(described_class.config.consumers.reraise_errors).to eq(true)
27
+ expect(described_class.config.schema.registry_url).to eq('http://schema.registry')
28
+ expect(described_class.config.kafka.seed_brokers).to eq('whatever')
29
+ expect(described_class.config.producers.schema_namespace).to eq('namespace')
30
+ expect(described_class.config.producers.topic_prefix).to eq('prefix')
31
+ expect(described_class.config.producers.disabled).to eq(true)
32
+ expect(described_class.config.kafka.ssl.enabled).to eq(true)
33
+ expect(described_class.config.kafka.ssl.ca_cert).to eq('cert')
34
+ expect(described_class.config.kafka.ssl.client_cert).to eq('cert')
35
+ expect(described_class.config.kafka.ssl.client_cert_key).to eq('key')
36
+ expect(described_class.config.producers.backend).to eq('db')
37
+ expect(described_class.config.consumers.report_lag).to eq(true)
38
38
  end
39
39
 
40
40
  it 'reads existing Phobos config YML files' do
41
- Deimos.config.reset!
42
- Deimos.configure { |c| c.phobos_config_file = File.join(File.dirname(__FILE__), '..', 'phobos.yml') }
43
- expect(Deimos.config.phobos_config).to match(
41
+ described_class.config.reset!
42
+ described_class.configure { |c| c.phobos_config_file = File.join(File.dirname(__FILE__), '..', 'phobos.yml') }
43
+ expect(described_class.config.phobos_config).to match(
44
44
  logger: an_instance_of(Logger),
45
45
  backoff: { min_ms: 1000, max_ms: 60_000 },
46
46
  consumer: {
@@ -112,8 +112,8 @@ describe Deimos, 'configuration' do
112
112
  specify '#phobos_config' do
113
113
  logger1 = Logger.new(nil)
114
114
  logger2 = Logger.new(nil)
115
- Deimos.config.reset!
116
- Deimos.configure do
115
+ described_class.config.reset!
116
+ described_class.configure do
117
117
  phobos_logger logger1
118
118
  kafka do
119
119
  logger logger2
@@ -169,7 +169,7 @@ describe Deimos, 'configuration' do
169
169
  end
170
170
  end
171
171
 
172
- expect(Deimos.config.phobos_config).
172
+ expect(described_class.config.phobos_config).
173
173
  to match(
174
174
  logger: an_instance_of(Logger),
175
175
  backoff: { min_ms: 5, max_ms: 10 },
@@ -150,7 +150,6 @@ module ConsumerTest
150
150
  end
151
151
  end
152
152
  stub_const('ConsumerTest::MyConsumer', consumer_class)
153
- stub_consumer(consumer_class)
154
153
  end
155
154
 
156
155
  it 'should consume a message' do
data/spec/deimos_spec.rb CHANGED
@@ -67,10 +67,6 @@ describe Deimos do
67
67
  end
68
68
 
69
69
  describe '#start_db_backend!' do
70
- before(:each) do
71
- allow(described_class).to receive(:run_db_backend)
72
- end
73
-
74
70
  it 'should start if backend is db and thread_count is > 0' do
75
71
  signal_handler = instance_double(Deimos::Utils::SignalHandler)
76
72
  allow(signal_handler).to receive(:run!)
@@ -89,6 +89,14 @@ module KafkaSourceSpec
89
89
  expect('my-topic-the-second').to have_sent(nil, 1)
90
90
  end
91
91
 
92
+ it 'should not call generate_payload but still publish a nil payload for deletion' do
93
+ widget = Widget.create!(widget_id: '808', name: 'delete_me!')
94
+ expect(Deimos::ActiveRecordProducer).not_to receive(:generate_payload)
95
+ widget.destroy
96
+ expect('my-topic').to have_sent(nil, widget.id)
97
+ expect('my-topic-the-second').to have_sent(nil, widget.id)
98
+ end
99
+
92
100
  it 'should send events on import' do
93
101
  widgets = (1..3).map do |i|
94
102
  Widget.new(widget_id: i, name: "Widget #{i}")