deimos-ruby 1.24.3 → 2.0.0.pre.alpha1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.rubocop_todo.yml +0 -17
- data/.tool-versions +1 -0
- data/CHANGELOG.md +1 -1
- data/README.md +287 -498
- data/deimos-ruby.gemspec +4 -4
- data/docs/CONFIGURATION.md +133 -227
- data/docs/UPGRADING.md +237 -0
- data/lib/deimos/active_record_consume/batch_consumption.rb +28 -29
- data/lib/deimos/active_record_consume/message_consumption.rb +15 -21
- data/lib/deimos/active_record_consumer.rb +36 -26
- data/lib/deimos/active_record_producer.rb +28 -9
- data/lib/deimos/backends/base.rb +4 -35
- data/lib/deimos/backends/kafka.rb +6 -22
- data/lib/deimos/backends/kafka_async.rb +6 -22
- data/lib/deimos/backends/{db.rb → outbox.rb} +13 -9
- data/lib/deimos/config/configuration.rb +116 -385
- data/lib/deimos/consume/batch_consumption.rb +24 -124
- data/lib/deimos/consume/message_consumption.rb +36 -63
- data/lib/deimos/consumer.rb +16 -75
- data/lib/deimos/ext/consumer_route.rb +35 -0
- data/lib/deimos/ext/producer_middleware.rb +94 -0
- data/lib/deimos/ext/producer_route.rb +22 -0
- data/lib/deimos/ext/redraw.rb +29 -0
- data/lib/deimos/ext/routing_defaults.rb +72 -0
- data/lib/deimos/ext/schema_route.rb +70 -0
- data/lib/deimos/kafka_message.rb +2 -2
- data/lib/deimos/kafka_source.rb +2 -7
- data/lib/deimos/kafka_topic_info.rb +1 -1
- data/lib/deimos/logging.rb +71 -0
- data/lib/deimos/message.rb +2 -11
- data/lib/deimos/metrics/datadog.rb +40 -1
- data/lib/deimos/metrics/provider.rb +4 -4
- data/lib/deimos/producer.rb +39 -116
- data/lib/deimos/railtie.rb +6 -0
- data/lib/deimos/schema_backends/avro_base.rb +21 -21
- data/lib/deimos/schema_backends/avro_schema_registry.rb +1 -2
- data/lib/deimos/schema_backends/avro_validation.rb +2 -2
- data/lib/deimos/schema_backends/base.rb +19 -12
- data/lib/deimos/schema_backends/mock.rb +6 -1
- data/lib/deimos/schema_backends/plain.rb +47 -0
- data/lib/deimos/schema_class/base.rb +2 -2
- data/lib/deimos/schema_class/enum.rb +1 -1
- data/lib/deimos/schema_class/record.rb +2 -2
- data/lib/deimos/test_helpers.rb +95 -320
- data/lib/deimos/tracing/provider.rb +6 -6
- data/lib/deimos/transcoder.rb +88 -0
- data/lib/deimos/utils/db_poller/base.rb +16 -14
- data/lib/deimos/utils/db_poller/state_based.rb +3 -3
- data/lib/deimos/utils/db_poller/time_based.rb +4 -4
- data/lib/deimos/utils/db_poller.rb +1 -1
- data/lib/deimos/utils/deadlock_retry.rb +1 -1
- data/lib/deimos/utils/{db_producer.rb → outbox_producer.rb} +16 -47
- data/lib/deimos/utils/schema_class.rb +0 -7
- data/lib/deimos/version.rb +1 -1
- data/lib/deimos.rb +79 -26
- data/lib/generators/deimos/{db_backend_generator.rb → outbox_backend_generator.rb} +4 -4
- data/lib/generators/deimos/schema_class_generator.rb +0 -1
- data/lib/generators/deimos/v2/templates/karafka.rb.tt +149 -0
- data/lib/generators/deimos/v2_generator.rb +193 -0
- data/lib/tasks/deimos.rake +5 -7
- data/spec/active_record_batch_consumer_association_spec.rb +22 -13
- data/spec/active_record_batch_consumer_spec.rb +84 -65
- data/spec/active_record_consume/batch_consumption_spec.rb +10 -10
- data/spec/active_record_consume/batch_slicer_spec.rb +12 -12
- data/spec/active_record_consumer_spec.rb +29 -13
- data/spec/active_record_producer_spec.rb +36 -26
- data/spec/backends/base_spec.rb +0 -23
- data/spec/backends/kafka_async_spec.rb +1 -3
- data/spec/backends/kafka_spec.rb +1 -3
- data/spec/backends/{db_spec.rb → outbox_spec.rb} +14 -20
- data/spec/batch_consumer_spec.rb +66 -116
- data/spec/consumer_spec.rb +53 -147
- data/spec/deimos_spec.rb +10 -126
- data/spec/kafka_source_spec.rb +19 -52
- data/spec/karafka/karafka.rb +69 -0
- data/spec/karafka_config/karafka_spec.rb +97 -0
- data/spec/logging_spec.rb +25 -0
- data/spec/message_spec.rb +9 -9
- data/spec/producer_spec.rb +112 -254
- data/spec/rake_spec.rb +1 -3
- data/spec/schema_backends/avro_validation_spec.rb +1 -1
- data/spec/schemas/com/my-namespace/MySchemaWithTitle.avsc +22 -0
- data/spec/snapshots/consumers-no-nest.snap +49 -0
- data/spec/snapshots/consumers.snap +49 -0
- data/spec/snapshots/consumers_and_producers-no-nest.snap +49 -0
- data/spec/snapshots/consumers_and_producers.snap +49 -0
- data/spec/snapshots/consumers_circular-no-nest.snap +49 -0
- data/spec/snapshots/consumers_circular.snap +49 -0
- data/spec/snapshots/consumers_complex_types-no-nest.snap +49 -0
- data/spec/snapshots/consumers_complex_types.snap +49 -0
- data/spec/snapshots/consumers_nested-no-nest.snap +49 -0
- data/spec/snapshots/consumers_nested.snap +49 -0
- data/spec/snapshots/namespace_folders.snap +49 -0
- data/spec/snapshots/namespace_map.snap +49 -0
- data/spec/snapshots/producers_with_key-no-nest.snap +49 -0
- data/spec/snapshots/producers_with_key.snap +49 -0
- data/spec/spec_helper.rb +61 -29
- data/spec/utils/db_poller_spec.rb +49 -39
- data/spec/utils/{db_producer_spec.rb → outbox_producer_spec.rb} +17 -184
- metadata +58 -67
- data/lib/deimos/batch_consumer.rb +0 -7
- data/lib/deimos/config/phobos_config.rb +0 -164
- data/lib/deimos/instrumentation.rb +0 -95
- data/lib/deimos/monkey_patches/phobos_cli.rb +0 -35
- data/lib/deimos/utils/inline_consumer.rb +0 -158
- data/lib/deimos/utils/lag_reporter.rb +0 -186
- data/lib/deimos/utils/schema_controller_mixin.rb +0 -129
- data/spec/config/configuration_spec.rb +0 -329
- data/spec/kafka_listener_spec.rb +0 -55
- data/spec/phobos.bad_db.yml +0 -73
- data/spec/phobos.yml +0 -77
- data/spec/utils/inline_consumer_spec.rb +0 -31
- data/spec/utils/lag_reporter_spec.rb +0 -76
- data/spec/utils/platform_schema_validation_spec.rb +0 -0
- data/spec/utils/schema_controller_mixin_spec.rb +0 -84
- /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/migration +0 -0
- /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/rails3_migration +0 -0
data/spec/spec_helper.rb
CHANGED
@@ -14,6 +14,7 @@ require 'handlers/my_batch_consumer'
|
|
14
14
|
require 'handlers/my_consumer'
|
15
15
|
require 'rspec/rails'
|
16
16
|
require 'rspec/snapshot'
|
17
|
+
require 'karafka/testing/rspec/helpers'
|
17
18
|
require "trilogy_adapter/connection"
|
18
19
|
ActiveRecord::Base.public_send :extend, TrilogyAdapter::Connection
|
19
20
|
Dir['./spec/schemas/**/*.rb'].sort.each { |f| require f }
|
@@ -23,9 +24,33 @@ SCHEMA_CLASS_SETTINGS = { off: false, on: true }.freeze
|
|
23
24
|
|
24
25
|
class DeimosApp < Rails::Application
|
25
26
|
end
|
27
|
+
DeimosApp.initializer("setup_root_dir", before: "karafka.require_karafka_boot_file") do
|
28
|
+
ENV['KARAFKA_ROOT_DIR'] = "#{Rails.root}/spec/karafka"
|
29
|
+
end
|
26
30
|
DeimosApp.initialize!
|
27
31
|
|
28
|
-
|
32
|
+
module Helpers
|
33
|
+
|
34
|
+
def set_karafka_config(method, val)
|
35
|
+
Deimos.karafka_configs.each { |c| c.send(method.to_sym, val) }
|
36
|
+
end
|
37
|
+
|
38
|
+
def register_consumer(klass, schema, namespace='com.my-namespace', key_config:{none: true}, configs: {})
|
39
|
+
Karafka::App.routes.redraw do
|
40
|
+
topic 'my-topic' do
|
41
|
+
consumer klass
|
42
|
+
schema schema
|
43
|
+
namespace namespace
|
44
|
+
key_config key_config
|
45
|
+
configs.each do |k, v|
|
46
|
+
public_send(k, v)
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
# Helpers for Executor/OutboxProducer
|
29
54
|
module TestRunners
|
30
55
|
# Execute a block until it stops failing. This is helpful for testing threads
|
31
56
|
# where we need to wait for them to continue but don't want to rely on
|
@@ -73,11 +98,7 @@ module DbConfigs
|
|
73
98
|
# @param topic [String]
|
74
99
|
# @param key [String]
|
75
100
|
def build_message(payload, topic, key)
|
76
|
-
|
77
|
-
topic: topic, key: key)
|
78
|
-
message.encoded_payload = message.payload
|
79
|
-
message.encoded_key = message.key
|
80
|
-
message
|
101
|
+
{ payload: payload, topic: topic, key: key}
|
81
102
|
end
|
82
103
|
|
83
104
|
DB_OPTIONS = [
|
@@ -85,12 +106,12 @@ module DbConfigs
|
|
85
106
|
adapter: 'postgresql',
|
86
107
|
port: 5432,
|
87
108
|
username: 'postgres',
|
88
|
-
password: '
|
109
|
+
password: 'password',
|
89
110
|
database: 'postgres',
|
90
111
|
host: ENV['PG_HOST'] || 'localhost'
|
91
112
|
},
|
92
113
|
{
|
93
|
-
adapter: '
|
114
|
+
adapter: 'trilogy',
|
94
115
|
port: 3306,
|
95
116
|
username: 'root',
|
96
117
|
database: 'test',
|
@@ -123,14 +144,14 @@ module DbConfigs
|
|
123
144
|
end
|
124
145
|
|
125
146
|
# :nodoc:
|
126
|
-
def
|
127
|
-
migration_class_name = '
|
147
|
+
def run_outbox_backend_migration
|
148
|
+
migration_class_name = 'OutboxBackendMigration'
|
128
149
|
migration_version = '[5.2]'
|
129
150
|
migration = ERB.new(
|
130
|
-
File.read('lib/generators/deimos/
|
151
|
+
File.read('lib/generators/deimos/outbox_backend/templates/migration')
|
131
152
|
).result(binding)
|
132
153
|
eval(migration) # rubocop:disable Security/Eval
|
133
|
-
ActiveRecord::Migration.new.run(
|
154
|
+
ActiveRecord::Migration.new.run(OutboxBackendMigration, direction: :up)
|
134
155
|
end
|
135
156
|
|
136
157
|
# :nodoc:
|
@@ -147,7 +168,7 @@ module DbConfigs
|
|
147
168
|
# Set up the given database.
|
148
169
|
def setup_db(options)
|
149
170
|
ActiveRecord::Base.establish_connection(options)
|
150
|
-
|
171
|
+
run_outbox_backend_migration
|
151
172
|
run_db_poller_migration
|
152
173
|
|
153
174
|
ActiveRecord::Base.descendants.each do |klass|
|
@@ -163,7 +184,10 @@ end
|
|
163
184
|
RSpec.configure do |config|
|
164
185
|
config.extend(DbConfigs)
|
165
186
|
include DbConfigs
|
187
|
+
config.include Karafka::Testing::RSpec::Helpers
|
188
|
+
|
166
189
|
config.include TestRunners
|
190
|
+
config.include Helpers
|
167
191
|
config.full_backtrace = true
|
168
192
|
|
169
193
|
config.snapshot_dir = "spec/snapshots"
|
@@ -199,13 +223,11 @@ RSpec.configure do |config|
|
|
199
223
|
config.before(:each) do
|
200
224
|
Deimos.config.reset!
|
201
225
|
Deimos.configure do |deimos_config|
|
202
|
-
deimos_config.producers.backend = :
|
226
|
+
deimos_config.producers.backend = :kafka
|
203
227
|
deimos_config.schema.nest_child_schemas = true
|
204
|
-
deimos_config.phobos_config_file = File.join(File.dirname(__FILE__), 'phobos.yml')
|
205
228
|
deimos_config.schema.path = File.join(File.expand_path(__dir__), 'schemas')
|
206
229
|
deimos_config.consumers.reraise_errors = true
|
207
230
|
deimos_config.schema.registry_url = ENV['SCHEMA_REGISTRY'] || 'http://localhost:8081'
|
208
|
-
deimos_config.kafka.seed_brokers = ENV['KAFKA_SEED_BROKER'] || 'localhost:9092'
|
209
231
|
deimos_config.logger = Logger.new('/dev/null')
|
210
232
|
deimos_config.logger.level = Logger::INFO
|
211
233
|
deimos_config.schema.backend = :avro_validation
|
@@ -213,6 +235,10 @@ RSpec.configure do |config|
|
|
213
235
|
end
|
214
236
|
end
|
215
237
|
|
238
|
+
config.after(:each) do
|
239
|
+
Deimos::EVENT_TYPES.each { |type| Karafka.monitor.notifications_bus.clear(type) }
|
240
|
+
end
|
241
|
+
|
216
242
|
config.around(:each) do |example|
|
217
243
|
use_cleaner = !example.metadata[:integration]
|
218
244
|
|
@@ -262,26 +288,32 @@ end
|
|
262
288
|
|
263
289
|
RSpec.shared_context('with publish_backend') do
|
264
290
|
before(:each) do
|
265
|
-
producer_class = Class.new(Deimos::Producer)
|
266
|
-
schema 'MySchema'
|
267
|
-
namespace 'com.my-namespace'
|
268
|
-
topic 'my-topic'
|
269
|
-
key_config field: 'test_id'
|
270
|
-
end
|
291
|
+
producer_class = Class.new(Deimos::Producer)
|
271
292
|
stub_const('MyProducer', producer_class)
|
272
293
|
|
273
|
-
|
274
|
-
schema 'MySchema'
|
275
|
-
namespace 'com.my-namespace'
|
276
|
-
topic 'my-topic'
|
277
|
-
key_config none: true
|
278
|
-
end
|
294
|
+
producer_class_no_key = Class.new(Deimos::Producer)
|
279
295
|
stub_const('MyNoKeyProducer', producer_class)
|
296
|
+
|
297
|
+
Karafka::App.routes.redraw do
|
298
|
+
topic 'my-topic-no-key' do
|
299
|
+
schema 'MySchema'
|
300
|
+
namespace 'com.my-namespace'
|
301
|
+
key_config none: true
|
302
|
+
producer_class producer_class_no_key
|
303
|
+
end
|
304
|
+
topic 'my-topic' do
|
305
|
+
schema 'MySchema'
|
306
|
+
namespace 'com.my-namespace'
|
307
|
+
key_config field: 'test_id'
|
308
|
+
producer_class producer_class
|
309
|
+
end
|
310
|
+
end
|
311
|
+
|
280
312
|
end
|
281
313
|
|
282
314
|
let(:messages) do
|
283
315
|
(1..3).map do |i|
|
284
|
-
build_message({ foo: i }, 'my-topic', "foo#{i}")
|
316
|
+
build_message({ test_id: "foo#{i}", some_int: i }, 'my-topic', "foo#{i}")
|
285
317
|
end
|
286
318
|
end
|
287
319
|
end
|
@@ -20,21 +20,27 @@ each_db_config(Deimos::Utils::DbPoller::Base) do
|
|
20
20
|
describe '#start!' do
|
21
21
|
|
22
22
|
before(:each) do
|
23
|
-
producer_class = Class.new(Deimos::Producer)
|
24
|
-
schema 'MySchema'
|
25
|
-
namespace 'com.my-namespace'
|
26
|
-
topic 'my-topic'
|
27
|
-
key_config field: 'test_id'
|
28
|
-
end
|
23
|
+
producer_class = Class.new(Deimos::Producer)
|
29
24
|
stub_const('MyProducer', producer_class)
|
30
25
|
|
31
|
-
producer_class = Class.new(Deimos::Producer)
|
32
|
-
schema 'MySchemaWithId'
|
33
|
-
namespace 'com.my-namespace'
|
34
|
-
topic 'my-topic'
|
35
|
-
key_config plain: true
|
36
|
-
end
|
26
|
+
producer_class = Class.new(Deimos::Producer)
|
37
27
|
stub_const('MyProducerWithID', producer_class)
|
28
|
+
|
29
|
+
Karafka::App.routes.redraw do
|
30
|
+
topic 'my-topic' do
|
31
|
+
schema 'MySchema'
|
32
|
+
namespace 'com.my-namespace'
|
33
|
+
key_config field: 'test_id'
|
34
|
+
producer_class MyProducer
|
35
|
+
end
|
36
|
+
topic 'my-topic-with-id' do
|
37
|
+
schema 'MySchemaWithId'
|
38
|
+
namespace 'com.my-namespace'
|
39
|
+
key_config plain: true
|
40
|
+
producer_class MyProducerWithID
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
38
44
|
end
|
39
45
|
|
40
46
|
it 'should raise an error if no pollers configured' do
|
@@ -76,12 +82,7 @@ each_db_config(Deimos::Utils::DbPoller::Base) do
|
|
76
82
|
let(:config) { Deimos.config.db_poller_objects.first.dup }
|
77
83
|
|
78
84
|
before(:each) do
|
79
|
-
Widget.delete_all
|
80
85
|
producer_class = Class.new(Deimos::ActiveRecordProducer) do
|
81
|
-
schema 'MySchemaWithId'
|
82
|
-
namespace 'com.my-namespace'
|
83
|
-
topic 'my-topic-with-id'
|
84
|
-
key_config none: true
|
85
86
|
record_class Widget
|
86
87
|
|
87
88
|
# :nodoc:
|
@@ -91,6 +92,16 @@ each_db_config(Deimos::Utils::DbPoller::Base) do
|
|
91
92
|
end
|
92
93
|
stub_const('MyProducer', producer_class)
|
93
94
|
|
95
|
+
Widget.delete_all
|
96
|
+
Karafka::App.routes.redraw do
|
97
|
+
topic 'my-topic-with-id' do
|
98
|
+
schema 'MySchemaWithId'
|
99
|
+
namespace 'com.my-namespace'
|
100
|
+
key_config none: true
|
101
|
+
producer_class MyProducer
|
102
|
+
end
|
103
|
+
end
|
104
|
+
|
94
105
|
Deimos.configure do
|
95
106
|
db_poller do
|
96
107
|
producer_class 'MyProducer'
|
@@ -195,7 +206,9 @@ each_db_config(Deimos::Utils::DbPoller::Base) do
|
|
195
206
|
before(:each) { config.skip_too_large_messages = true }
|
196
207
|
|
197
208
|
it 'should skip and move on' do
|
198
|
-
|
209
|
+
rdkafka_error = instance_double(Rdkafka::RdkafkaError, code: :msg_size_too_large)
|
210
|
+
error = WaterDrop::Errors::ProduceManyError.new(nil, nil)
|
211
|
+
allow(error).to receive(:cause).and_return(rdkafka_error)
|
199
212
|
allow(poller).to receive(:sleep)
|
200
213
|
allow(poller).to receive(:process_batch) do
|
201
214
|
raise error
|
@@ -331,7 +344,7 @@ each_db_config(Deimos::Utils::DbPoller::Base) do
|
|
331
344
|
end
|
332
345
|
|
333
346
|
it 'should send events across multiple batches' do
|
334
|
-
allow(Deimos
|
347
|
+
allow(Deimos::Logging).to receive(:log_info)
|
335
348
|
allow(MyProducer).to receive(:poll_query).and_call_original
|
336
349
|
expect(poller).to receive(:process_and_touch_info).ordered.
|
337
350
|
with([widgets[0], widgets[1], widgets[2]], anything).and_call_original
|
@@ -376,7 +389,7 @@ each_db_config(Deimos::Utils::DbPoller::Base) do
|
|
376
389
|
time_to: time_value(secs: 120), # yes this is weird but it's because of travel_to
|
377
390
|
column_name: :updated_at,
|
378
391
|
min_id: last_widget.id)
|
379
|
-
expect(Deimos
|
392
|
+
expect(Deimos::Logging).to have_received(:log_info).
|
380
393
|
with('Poll MyProducer: ["my-topic-with-id"] complete at 2015-05-05 00:59:58 -0400 (3 batches, 0 errored batches, 7 processed messages)')
|
381
394
|
end
|
382
395
|
|
@@ -398,7 +411,7 @@ each_db_config(Deimos::Utils::DbPoller::Base) do
|
|
398
411
|
describe 'errors' do
|
399
412
|
before(:each) do
|
400
413
|
poller.config.retries = 0
|
401
|
-
allow(Deimos
|
414
|
+
allow(Deimos::Logging).to receive(:log_info)
|
402
415
|
end
|
403
416
|
|
404
417
|
after(:each) do
|
@@ -428,7 +441,7 @@ each_db_config(Deimos::Utils::DbPoller::Base) do
|
|
428
441
|
info = Deimos::PollInfo.last
|
429
442
|
expect(info.last_sent.in_time_zone).to eq(time_value(mins: -61, secs: 30))
|
430
443
|
expect(info.last_sent_id).to eq(widgets[6].id)
|
431
|
-
expect(Deimos
|
444
|
+
expect(Deimos::Logging).to have_received(:log_info).
|
432
445
|
with('Poll MyProducer: ["my-topic-with-id"] complete at 2015-05-05 00:59:58 -0400 (2 batches, 1 errored batches, 7 processed messages)')
|
433
446
|
end
|
434
447
|
end
|
@@ -449,10 +462,6 @@ each_db_config(Deimos::Utils::DbPoller::Base) do
|
|
449
462
|
before(:each) do
|
450
463
|
Widget.delete_all
|
451
464
|
producer_class = Class.new(Deimos::ActiveRecordProducer) do
|
452
|
-
schema 'MySchemaWithId'
|
453
|
-
namespace 'com.my-namespace'
|
454
|
-
topic 'my-topic-with-id'
|
455
|
-
key_config none: true
|
456
465
|
record_class Widget
|
457
466
|
|
458
467
|
# :nodoc:
|
@@ -461,20 +470,22 @@ each_db_config(Deimos::Utils::DbPoller::Base) do
|
|
461
470
|
end
|
462
471
|
end
|
463
472
|
stub_const('ProducerOne', producer_class)
|
473
|
+
stub_const('ProducerTwo', producer_class)
|
464
474
|
|
465
|
-
|
466
|
-
|
467
|
-
|
468
|
-
|
469
|
-
|
470
|
-
|
471
|
-
|
472
|
-
|
473
|
-
|
474
|
-
|
475
|
+
Karafka::App.routes.redraw do
|
476
|
+
topic 'my-topic-with-id' do
|
477
|
+
schema 'MySchemaWithId'
|
478
|
+
namespace 'com.my-namespace'
|
479
|
+
key_config none: true
|
480
|
+
producer_class ProducerOne
|
481
|
+
end
|
482
|
+
topic 'my-topic-with-id2' do
|
483
|
+
schema 'MySchemaWithId'
|
484
|
+
namespace 'com.my-namespace'
|
485
|
+
key_config none: true
|
486
|
+
producer_class ProducerTwo
|
475
487
|
end
|
476
488
|
end
|
477
|
-
stub_const('ProducerTwo', producer_class)
|
478
489
|
|
479
490
|
poller_class = Class.new(Deimos::Utils::DbPoller::StateBased) do
|
480
491
|
def self.producers
|
@@ -513,8 +524,7 @@ each_db_config(Deimos::Utils::DbPoller::Base) do
|
|
513
524
|
expect(Deimos::Utils::DbPoller::MultiProducerPoller).to receive(:poll_query).at_least(:once)
|
514
525
|
poller.process_updates
|
515
526
|
|
516
|
-
expect(ProducerOne).to have_received(:send_events).with(widgets)
|
517
|
-
expect(ProducerTwo).to have_received(:send_events).with(widgets)
|
527
|
+
expect(ProducerOne).to have_received(:send_events).twice.with(widgets)
|
518
528
|
expect(widgets.map(&:reload).map(&:publish_status)).to eq(%w(PUBLISHED PUBLISHED PUBLISHED))
|
519
529
|
end
|
520
530
|
|
@@ -1,23 +1,17 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
each_db_config(Deimos::Utils::
|
3
|
+
each_db_config(Deimos::Utils::OutboxProducer) do
|
4
4
|
let(:producer) do
|
5
5
|
producer = described_class.new(logger)
|
6
6
|
allow(producer).to receive(:sleep)
|
7
|
-
allow(producer).to receive(:producer).and_return(phobos_producer)
|
8
7
|
producer
|
9
8
|
end
|
10
9
|
|
11
|
-
let(:logger) { nil }
|
12
|
-
let(:phobos_producer) do
|
13
|
-
pp = instance_double(Phobos::Producer::PublicAPI)
|
14
|
-
allow(pp).to receive(:publish_list)
|
15
|
-
pp
|
16
|
-
end
|
10
|
+
let(:logger) { instance_double(Logger, error: nil, info: nil, debug: nil )}
|
17
11
|
|
18
12
|
before(:each) do
|
19
|
-
stub_const('Deimos::Utils::
|
20
|
-
stub_const('Deimos::Utils::
|
13
|
+
stub_const('Deimos::Utils::OutboxProducer::BATCH_SIZE', 2)
|
14
|
+
stub_const('Deimos::Utils::OutboxProducer::DELETE_BATCH_SIZE', 1)
|
21
15
|
end
|
22
16
|
|
23
17
|
specify '#process_next_messages' do
|
@@ -47,7 +41,7 @@ each_db_config(Deimos::Utils::DbProducer) do
|
|
47
41
|
message: 'blah',
|
48
42
|
key: "key#{i}")
|
49
43
|
end
|
50
|
-
stub_const('Deimos::Utils::
|
44
|
+
stub_const('Deimos::Utils::OutboxProducer::BATCH_SIZE', 5)
|
51
45
|
producer.current_topic = 'topic1'
|
52
46
|
messages = producer.retrieve_messages
|
53
47
|
expect(messages.size).to eq(3)
|
@@ -58,71 +52,8 @@ each_db_config(Deimos::Utils::DbProducer) do
|
|
58
52
|
|
59
53
|
it 'should produce normally' do
|
60
54
|
batch = ['A'] * 1000
|
61
|
-
expect(
|
62
|
-
expect(Deimos.config.metrics).to receive(:increment).with('publish',
|
63
|
-
tags: %w(status:success topic:),
|
64
|
-
by: 1000).once
|
65
|
-
producer.produce_messages(batch)
|
66
|
-
end
|
67
|
-
|
68
|
-
it 'should split the batch size on buffer overflow' do
|
69
|
-
class_producer = double(Phobos::Producer::ClassMethods::PublicAPI, # rubocop:disable RSpec/VerifiedDoubles
|
70
|
-
sync_producer_shutdown: nil)
|
71
|
-
allow(producer.class).to receive(:producer).and_return(class_producer)
|
72
|
-
expect(class_producer).to receive(:sync_producer_shutdown).twice
|
73
|
-
count = 0
|
74
|
-
allow(phobos_producer).to receive(:publish_list) do
|
75
|
-
count += 1
|
76
|
-
raise Kafka::BufferOverflow if count < 3
|
77
|
-
end
|
78
|
-
allow(Deimos.config.metrics).to receive(:increment)
|
79
|
-
batch = ['A'] * 1000
|
55
|
+
expect(Karafka.producer).to receive(:produce_many_sync).with(batch).once
|
80
56
|
producer.produce_messages(batch)
|
81
|
-
expect(phobos_producer).to have_received(:publish_list).with(batch)
|
82
|
-
expect(phobos_producer).to have_received(:publish_list).with(['A'] * 100)
|
83
|
-
expect(phobos_producer).to have_received(:publish_list).with(['A'] * 10).exactly(100).times
|
84
|
-
expect(Deimos.config.metrics).to have_received(:increment).with('publish',
|
85
|
-
tags: %w(status:success topic:),
|
86
|
-
by: 10).exactly(100).times
|
87
|
-
end
|
88
|
-
|
89
|
-
it "should raise an error if it can't split any more" do
|
90
|
-
allow(phobos_producer).to receive(:publish_list) do
|
91
|
-
raise Kafka::BufferOverflow
|
92
|
-
end
|
93
|
-
expect(Deimos.config.metrics).not_to receive(:increment)
|
94
|
-
batch = ['A'] * 1000
|
95
|
-
expect { producer.produce_messages(batch) }.to raise_error(Kafka::BufferOverflow)
|
96
|
-
expect(phobos_producer).to have_received(:publish_list).with(batch)
|
97
|
-
expect(phobos_producer).to have_received(:publish_list).with(['A'] * 100).once
|
98
|
-
expect(phobos_producer).to have_received(:publish_list).with(['A'] * 10).once
|
99
|
-
expect(phobos_producer).to have_received(:publish_list).with(['A']).once
|
100
|
-
end
|
101
|
-
|
102
|
-
it 'should not resend batches of sent messages' do
|
103
|
-
allow(phobos_producer).to receive(:publish_list) do |group|
|
104
|
-
raise Kafka::BufferOverflow if group.any?('A') && group.size >= 1000
|
105
|
-
raise Kafka::BufferOverflow if group.any?('BIG') && group.size >= 10
|
106
|
-
end
|
107
|
-
allow(Deimos.config.metrics).to receive(:increment)
|
108
|
-
batch = ['A'] * 450 + ['BIG'] * 550
|
109
|
-
producer.produce_messages(batch)
|
110
|
-
|
111
|
-
expect(phobos_producer).to have_received(:publish_list).with(batch)
|
112
|
-
expect(phobos_producer).to have_received(:publish_list).with(['A'] * 100).exactly(4).times
|
113
|
-
expect(phobos_producer).to have_received(:publish_list).with(['A'] * 50 + ['BIG'] * 50)
|
114
|
-
expect(phobos_producer).to have_received(:publish_list).with(['A'] * 10).exactly(5).times
|
115
|
-
expect(phobos_producer).to have_received(:publish_list).with(['BIG'] * 1).exactly(550).times
|
116
|
-
|
117
|
-
expect(Deimos.config.metrics).to have_received(:increment).with('publish',
|
118
|
-
tags: %w(status:success topic:),
|
119
|
-
by: 100).exactly(4).times
|
120
|
-
expect(Deimos.config.metrics).to have_received(:increment).with('publish',
|
121
|
-
tags: %w(status:success topic:),
|
122
|
-
by: 10).exactly(5).times
|
123
|
-
expect(Deimos.config.metrics).to have_received(:increment).with('publish',
|
124
|
-
tags: %w(status:success topic:),
|
125
|
-
by: 1).exactly(550).times
|
126
57
|
end
|
127
58
|
|
128
59
|
describe '#compact_messages' do
|
@@ -149,17 +80,17 @@ each_db_config(Deimos::Utils::DbProducer) do
|
|
149
80
|
let(:deduped_batch) { batch[1..2] }
|
150
81
|
|
151
82
|
it 'should dedupe messages when :all is set' do
|
152
|
-
Deimos.configure { |c| c.
|
83
|
+
Deimos.configure { |c| c.outbox.compact_topics = :all }
|
153
84
|
expect(producer.compact_messages(batch)).to eq(deduped_batch)
|
154
85
|
end
|
155
86
|
|
156
87
|
it 'should dedupe messages when topic is included' do
|
157
|
-
Deimos.configure { |c| c.
|
88
|
+
Deimos.configure { |c| c.outbox.compact_topics = %w(my-topic my-topic2) }
|
158
89
|
expect(producer.compact_messages(batch)).to eq(deduped_batch)
|
159
90
|
end
|
160
91
|
|
161
92
|
it 'should not dedupe messages when topic is not included' do
|
162
|
-
Deimos.configure { |c| c.
|
93
|
+
Deimos.configure { |c| c.outbox.compact_topics = %w(my-topic3 my-topic2) }
|
163
94
|
expect(producer.compact_messages(batch)).to eq(batch)
|
164
95
|
end
|
165
96
|
|
@@ -176,13 +107,13 @@ each_db_config(Deimos::Utils::DbProducer) do
|
|
176
107
|
message: 'BBB'
|
177
108
|
}
|
178
109
|
].map { |h| Deimos::KafkaMessage.create!(h) }
|
179
|
-
Deimos.configure { |c| c.
|
110
|
+
Deimos.configure { |c| c.outbox.compact_topics = :all }
|
180
111
|
expect(producer.compact_messages(unkeyed_batch)).to eq(unkeyed_batch)
|
181
|
-
Deimos.configure { |c| c.
|
112
|
+
Deimos.configure { |c| c.outbox.compact_topics = [] }
|
182
113
|
end
|
183
114
|
|
184
115
|
it 'should compact messages when all messages are unique' do
|
185
|
-
Deimos.configure { |c| c.
|
116
|
+
Deimos.configure { |c| c.outbox.compact_topics = %w(my-topic my-topic2) }
|
186
117
|
expect(producer.compact_messages(deduped_batch)).to eq(deduped_batch)
|
187
118
|
end
|
188
119
|
end
|
@@ -228,7 +159,7 @@ each_db_config(Deimos::Utils::DbProducer) do
|
|
228
159
|
}
|
229
160
|
])
|
230
161
|
expect(Deimos.config.metrics).to receive(:increment).ordered.with(
|
231
|
-
'
|
162
|
+
'outbox.process',
|
232
163
|
tags: %w(topic:my-topic),
|
233
164
|
by: 2
|
234
165
|
)
|
@@ -249,7 +180,7 @@ each_db_config(Deimos::Utils::DbProducer) do
|
|
249
180
|
}
|
250
181
|
])
|
251
182
|
expect(Deimos.config.metrics).to receive(:increment).ordered.with(
|
252
|
-
'
|
183
|
+
'outbox.process',
|
253
184
|
tags: %w(topic:my-topic),
|
254
185
|
by: 2
|
255
186
|
)
|
@@ -263,13 +194,11 @@ each_db_config(Deimos::Utils::DbProducer) do
|
|
263
194
|
end
|
264
195
|
|
265
196
|
it 'should register an error if it gets an error' do
|
266
|
-
allow(producer).to receive(:shutdown_producer)
|
267
197
|
expect(producer).to receive(:retrieve_messages).and_raise('OH NOES')
|
268
198
|
expect(Deimos::KafkaTopicInfo).to receive(:register_error).
|
269
199
|
with('my-topic', 'abc')
|
270
200
|
expect(producer).not_to receive(:produce_messages)
|
271
201
|
producer.process_topic('my-topic')
|
272
|
-
expect(producer).to have_received(:shutdown_producer)
|
273
202
|
end
|
274
203
|
|
275
204
|
it 'should move on if it gets a partial batch' do
|
@@ -299,14 +228,14 @@ each_db_config(Deimos::Utils::DbProducer) do
|
|
299
228
|
expect(Deimos::KafkaTopicInfo).to receive(:register_error)
|
300
229
|
|
301
230
|
expect(Deimos::KafkaMessage.count).to eq(4)
|
302
|
-
|
231
|
+
Karafka.monitor.subscribe('deimos.outbox.produce') do |event|
|
303
232
|
expect(event.payload[:exception_object].message).to eq('OH NOES')
|
304
233
|
expect(event.payload[:messages]).to eq(messages)
|
305
234
|
end
|
306
235
|
producer.process_topic('my-topic')
|
307
236
|
# don't delete for regular errors
|
308
237
|
expect(Deimos::KafkaMessage.count).to eq(4)
|
309
|
-
|
238
|
+
Karafka.monitor.notifications_bus.clear('deimos.outbox.produce')
|
310
239
|
end
|
311
240
|
|
312
241
|
it 'should retry deletes and not re-publish' do
|
@@ -340,7 +269,7 @@ each_db_config(Deimos::Utils::DbProducer) do
|
|
340
269
|
with('my-topic', 'abc').and_return(true)
|
341
270
|
expect(producer).to receive(:retrieve_messages).ordered.and_return(messages)
|
342
271
|
expect(producer).to receive(:retrieve_messages).ordered.and_return([])
|
343
|
-
expect(
|
272
|
+
expect(Karafka.producer).to receive(:produce_many_sync).once.with(messages.map(&:karafka_message))
|
344
273
|
|
345
274
|
expect(Deimos::KafkaMessage.count).to eq(8)
|
346
275
|
producer.process_topic('my-topic')
|
@@ -360,102 +289,6 @@ each_db_config(Deimos::Utils::DbProducer) do
|
|
360
289
|
expect { producer.delete_messages(messages) }.to raise_exception('OH NOES')
|
361
290
|
end
|
362
291
|
|
363
|
-
context 'with buffer overflow exception' do
|
364
|
-
let(:messages) do
|
365
|
-
(1..4).map do |i|
|
366
|
-
Deimos::KafkaMessage.create!(
|
367
|
-
id: i,
|
368
|
-
key: i,
|
369
|
-
topic: 'my-topic',
|
370
|
-
message: { message: "mess#{i}" },
|
371
|
-
partition_key: "key#{i}"
|
372
|
-
)
|
373
|
-
end
|
374
|
-
end
|
375
|
-
let(:logger) do
|
376
|
-
logger = instance_double(Logger)
|
377
|
-
allow(logger).to receive(:error)
|
378
|
-
logger
|
379
|
-
end
|
380
|
-
let(:message_producer) do
|
381
|
-
Deimos.config.schema.backend = :mock
|
382
|
-
Deimos::ActiveRecordProducer.topic('my-topic')
|
383
|
-
Deimos::ActiveRecordProducer.key_config
|
384
|
-
Deimos::ActiveRecordProducer
|
385
|
-
end
|
386
|
-
|
387
|
-
around(:each) do |example|
|
388
|
-
config = Deimos::ActiveRecordProducer.config.clone
|
389
|
-
backend = Deimos.config.schema.backend
|
390
|
-
|
391
|
-
example.run
|
392
|
-
ensure
|
393
|
-
Deimos::ActiveRecordProducer.instance_variable_set(:@config, config)
|
394
|
-
Deimos.config.schema.backend = backend
|
395
|
-
end
|
396
|
-
|
397
|
-
before(:each) do
|
398
|
-
message_producer
|
399
|
-
(5..8).each do |i|
|
400
|
-
Deimos::KafkaMessage.create!(
|
401
|
-
id: i,
|
402
|
-
topic: 'my-topic2',
|
403
|
-
message: "mess#{i}",
|
404
|
-
partition_key: "key#{i}"
|
405
|
-
)
|
406
|
-
end
|
407
|
-
allow(Deimos::KafkaTopicInfo).to receive(:lock).
|
408
|
-
with('my-topic', 'abc').and_return(true)
|
409
|
-
allow(producer).to receive(:produce_messages).and_raise(Kafka::BufferOverflow)
|
410
|
-
allow(producer).to receive(:retrieve_messages).and_return(messages)
|
411
|
-
allow(Deimos::KafkaTopicInfo).to receive(:register_error)
|
412
|
-
end
|
413
|
-
|
414
|
-
it 'should delete messages on buffer overflow' do
|
415
|
-
expect(Deimos::KafkaMessage.count).to eq(8)
|
416
|
-
producer.process_topic('my-topic')
|
417
|
-
expect(Deimos::KafkaMessage.count).to eq(4)
|
418
|
-
end
|
419
|
-
|
420
|
-
it 'should notify on buffer overflow' do
|
421
|
-
subscriber = Deimos.subscribe('db_producer.produce') do |event|
|
422
|
-
expect(event.payload[:exception_object].message).to eq('Kafka::BufferOverflow')
|
423
|
-
expect(event.payload[:messages]).to eq(messages)
|
424
|
-
end
|
425
|
-
producer.process_topic('my-topic')
|
426
|
-
Deimos.unsubscribe(subscriber)
|
427
|
-
expect(logger).to have_received(:error).with('Message batch too large, deleting...')
|
428
|
-
expect(logger).to have_received(:error).with(
|
429
|
-
[
|
430
|
-
{ key: '1', payload: 'payload-decoded' },
|
431
|
-
{ key: '2', payload: 'payload-decoded' },
|
432
|
-
{ key: '3', payload: 'payload-decoded' },
|
433
|
-
{ key: '4', payload: 'payload-decoded' }
|
434
|
-
]
|
435
|
-
)
|
436
|
-
end
|
437
|
-
|
438
|
-
context 'with exception on error logging attempt' do
|
439
|
-
let(:message_producer) do
|
440
|
-
Deimos::ActiveRecordProducer.topic('my-topic')
|
441
|
-
Deimos::ActiveRecordProducer
|
442
|
-
end
|
443
|
-
|
444
|
-
it 'should notify on buffer overflow disregarding decoding exception' do
|
445
|
-
subscriber = Deimos.subscribe('db_producer.produce') do |event|
|
446
|
-
expect(event.payload[:exception_object].message).to eq('Kafka::BufferOverflow')
|
447
|
-
expect(event.payload[:messages]).to eq(messages)
|
448
|
-
end
|
449
|
-
producer.process_topic('my-topic')
|
450
|
-
Deimos.unsubscribe(subscriber)
|
451
|
-
expect(logger).to have_received(:error).with('Message batch too large, deleting...')
|
452
|
-
expect(logger).to have_received(:error).with(
|
453
|
-
'Large message details logging failure: '\
|
454
|
-
'No key config given - if you are not decoding keys, please use `key_config plain: true`'
|
455
|
-
)
|
456
|
-
end
|
457
|
-
end
|
458
|
-
end
|
459
292
|
end
|
460
293
|
|
461
294
|
describe '#send_pending_metrics' do
|