deimos-ruby 1.24.3 → 2.0.0.pre.alpha1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.rubocop_todo.yml +0 -17
- data/.tool-versions +1 -0
- data/CHANGELOG.md +1 -1
- data/README.md +287 -498
- data/deimos-ruby.gemspec +4 -4
- data/docs/CONFIGURATION.md +133 -227
- data/docs/UPGRADING.md +237 -0
- data/lib/deimos/active_record_consume/batch_consumption.rb +28 -29
- data/lib/deimos/active_record_consume/message_consumption.rb +15 -21
- data/lib/deimos/active_record_consumer.rb +36 -26
- data/lib/deimos/active_record_producer.rb +28 -9
- data/lib/deimos/backends/base.rb +4 -35
- data/lib/deimos/backends/kafka.rb +6 -22
- data/lib/deimos/backends/kafka_async.rb +6 -22
- data/lib/deimos/backends/{db.rb → outbox.rb} +13 -9
- data/lib/deimos/config/configuration.rb +116 -385
- data/lib/deimos/consume/batch_consumption.rb +24 -124
- data/lib/deimos/consume/message_consumption.rb +36 -63
- data/lib/deimos/consumer.rb +16 -75
- data/lib/deimos/ext/consumer_route.rb +35 -0
- data/lib/deimos/ext/producer_middleware.rb +94 -0
- data/lib/deimos/ext/producer_route.rb +22 -0
- data/lib/deimos/ext/redraw.rb +29 -0
- data/lib/deimos/ext/routing_defaults.rb +72 -0
- data/lib/deimos/ext/schema_route.rb +70 -0
- data/lib/deimos/kafka_message.rb +2 -2
- data/lib/deimos/kafka_source.rb +2 -7
- data/lib/deimos/kafka_topic_info.rb +1 -1
- data/lib/deimos/logging.rb +71 -0
- data/lib/deimos/message.rb +2 -11
- data/lib/deimos/metrics/datadog.rb +40 -1
- data/lib/deimos/metrics/provider.rb +4 -4
- data/lib/deimos/producer.rb +39 -116
- data/lib/deimos/railtie.rb +6 -0
- data/lib/deimos/schema_backends/avro_base.rb +21 -21
- data/lib/deimos/schema_backends/avro_schema_registry.rb +1 -2
- data/lib/deimos/schema_backends/avro_validation.rb +2 -2
- data/lib/deimos/schema_backends/base.rb +19 -12
- data/lib/deimos/schema_backends/mock.rb +6 -1
- data/lib/deimos/schema_backends/plain.rb +47 -0
- data/lib/deimos/schema_class/base.rb +2 -2
- data/lib/deimos/schema_class/enum.rb +1 -1
- data/lib/deimos/schema_class/record.rb +2 -2
- data/lib/deimos/test_helpers.rb +95 -320
- data/lib/deimos/tracing/provider.rb +6 -6
- data/lib/deimos/transcoder.rb +88 -0
- data/lib/deimos/utils/db_poller/base.rb +16 -14
- data/lib/deimos/utils/db_poller/state_based.rb +3 -3
- data/lib/deimos/utils/db_poller/time_based.rb +4 -4
- data/lib/deimos/utils/db_poller.rb +1 -1
- data/lib/deimos/utils/deadlock_retry.rb +1 -1
- data/lib/deimos/utils/{db_producer.rb → outbox_producer.rb} +16 -47
- data/lib/deimos/utils/schema_class.rb +0 -7
- data/lib/deimos/version.rb +1 -1
- data/lib/deimos.rb +79 -26
- data/lib/generators/deimos/{db_backend_generator.rb → outbox_backend_generator.rb} +4 -4
- data/lib/generators/deimos/schema_class_generator.rb +0 -1
- data/lib/generators/deimos/v2/templates/karafka.rb.tt +149 -0
- data/lib/generators/deimos/v2_generator.rb +193 -0
- data/lib/tasks/deimos.rake +5 -7
- data/spec/active_record_batch_consumer_association_spec.rb +22 -13
- data/spec/active_record_batch_consumer_spec.rb +84 -65
- data/spec/active_record_consume/batch_consumption_spec.rb +10 -10
- data/spec/active_record_consume/batch_slicer_spec.rb +12 -12
- data/spec/active_record_consumer_spec.rb +29 -13
- data/spec/active_record_producer_spec.rb +36 -26
- data/spec/backends/base_spec.rb +0 -23
- data/spec/backends/kafka_async_spec.rb +1 -3
- data/spec/backends/kafka_spec.rb +1 -3
- data/spec/backends/{db_spec.rb → outbox_spec.rb} +14 -20
- data/spec/batch_consumer_spec.rb +66 -116
- data/spec/consumer_spec.rb +53 -147
- data/spec/deimos_spec.rb +10 -126
- data/spec/kafka_source_spec.rb +19 -52
- data/spec/karafka/karafka.rb +69 -0
- data/spec/karafka_config/karafka_spec.rb +97 -0
- data/spec/logging_spec.rb +25 -0
- data/spec/message_spec.rb +9 -9
- data/spec/producer_spec.rb +112 -254
- data/spec/rake_spec.rb +1 -3
- data/spec/schema_backends/avro_validation_spec.rb +1 -1
- data/spec/schemas/com/my-namespace/MySchemaWithTitle.avsc +22 -0
- data/spec/snapshots/consumers-no-nest.snap +49 -0
- data/spec/snapshots/consumers.snap +49 -0
- data/spec/snapshots/consumers_and_producers-no-nest.snap +49 -0
- data/spec/snapshots/consumers_and_producers.snap +49 -0
- data/spec/snapshots/consumers_circular-no-nest.snap +49 -0
- data/spec/snapshots/consumers_circular.snap +49 -0
- data/spec/snapshots/consumers_complex_types-no-nest.snap +49 -0
- data/spec/snapshots/consumers_complex_types.snap +49 -0
- data/spec/snapshots/consumers_nested-no-nest.snap +49 -0
- data/spec/snapshots/consumers_nested.snap +49 -0
- data/spec/snapshots/namespace_folders.snap +49 -0
- data/spec/snapshots/namespace_map.snap +49 -0
- data/spec/snapshots/producers_with_key-no-nest.snap +49 -0
- data/spec/snapshots/producers_with_key.snap +49 -0
- data/spec/spec_helper.rb +61 -29
- data/spec/utils/db_poller_spec.rb +49 -39
- data/spec/utils/{db_producer_spec.rb → outbox_producer_spec.rb} +17 -184
- metadata +58 -67
- data/lib/deimos/batch_consumer.rb +0 -7
- data/lib/deimos/config/phobos_config.rb +0 -164
- data/lib/deimos/instrumentation.rb +0 -95
- data/lib/deimos/monkey_patches/phobos_cli.rb +0 -35
- data/lib/deimos/utils/inline_consumer.rb +0 -158
- data/lib/deimos/utils/lag_reporter.rb +0 -186
- data/lib/deimos/utils/schema_controller_mixin.rb +0 -129
- data/spec/config/configuration_spec.rb +0 -329
- data/spec/kafka_listener_spec.rb +0 -55
- data/spec/phobos.bad_db.yml +0 -73
- data/spec/phobos.yml +0 -77
- data/spec/utils/inline_consumer_spec.rb +0 -31
- data/spec/utils/lag_reporter_spec.rb +0 -76
- data/spec/utils/platform_schema_validation_spec.rb +0 -0
- data/spec/utils/schema_controller_mixin_spec.rb +0 -84
- /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/migration +0 -0
- /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/rails3_migration +0 -0
@@ -0,0 +1,193 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# THINGS TO REMEMBER
|
4
|
+
# logger
|
5
|
+
# fatal_error
|
6
|
+
# bulk_import_id_generator
|
7
|
+
|
8
|
+
require 'rails/generators'
|
9
|
+
require 'rails/version'
|
10
|
+
|
11
|
+
# Generates a new consumer.
|
12
|
+
module Deimos
|
13
|
+
module Generators
|
14
|
+
# Generator for ActiveRecord model and migration.
|
15
|
+
class V2Generator < Rails::Generators::Base
|
16
|
+
|
17
|
+
class ProcString < String
|
18
|
+
def inspect
|
19
|
+
self.to_s
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
source_root File.expand_path('v2/templates', __dir__)
|
24
|
+
|
25
|
+
no_commands do
|
26
|
+
def deimos_config
|
27
|
+
Deimos.config
|
28
|
+
end
|
29
|
+
|
30
|
+
def deimos_configs
|
31
|
+
configs = {
|
32
|
+
producers: %i(topic_prefix disabled backend),
|
33
|
+
schema: %i(backend registry_url user password path generated_class_path use_schema_classes
|
34
|
+
nest_child_schemas use_full_namespace schema_namespace_map),
|
35
|
+
db_producer: %i(log_topics compact_topics),
|
36
|
+
}
|
37
|
+
|
38
|
+
response = {}
|
39
|
+
configs.each do |group, settings|
|
40
|
+
group_setting = deimos_config.send(group)
|
41
|
+
next if settings.all? { |s| group_setting.default_value?(s)}
|
42
|
+
|
43
|
+
response[group] = {}
|
44
|
+
settings.each do |field|
|
45
|
+
unless group_setting.default_value?(field.to_sym)
|
46
|
+
response[group][field.to_s] = group_setting.send(field.to_sym)
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
response
|
51
|
+
end
|
52
|
+
|
53
|
+
def setup_configs
|
54
|
+
configs = {}
|
55
|
+
configs[:client_id] = if deimos_config.kafka.client_id && deimos_config.kafka.client_id != 'phobos'
|
56
|
+
deimos_config.kafka.client_id
|
57
|
+
else
|
58
|
+
Rails::Application.subclasses.first&.name&.gsub('::Application', '')&.underscore
|
59
|
+
end
|
60
|
+
if deimos_config.consumer_objects.any? { |c| c.max_concurrency.present? }
|
61
|
+
configs[:concurrency] = deimos_config.consumer_objects.map(&:max_concurrency).compact.max
|
62
|
+
end
|
63
|
+
if deimos_config.consumer_objects.any? { |c| c.max_wait_time.present? }
|
64
|
+
configs[:max_wait_time] = deimos_config.consumer_objects.map(&:max_wait_time).compact.max
|
65
|
+
end
|
66
|
+
configs.compact
|
67
|
+
end
|
68
|
+
|
69
|
+
def default_kafka_configs
|
70
|
+
configs = {}
|
71
|
+
configs["bootstrap.servers"] = deimos_config.kafka.seed_brokers.join(',')
|
72
|
+
configs["socket.connection.setup.timeout.ms"] = deimos_config.kafka.connect_timeout * 1000
|
73
|
+
configs["socket.timeout.ms"] = deimos_config.kafka.socket_timeout * 1000
|
74
|
+
configs["security.protocol"] = if deimos_config.kafka.ssl.enabled
|
75
|
+
"ssl"
|
76
|
+
elsif deimos_config.kafka.sasl.enabled
|
77
|
+
if deimos_config.kafka.sasl.enforce_ssl
|
78
|
+
"sasl_ssl"
|
79
|
+
else
|
80
|
+
"sasl_plain"
|
81
|
+
end
|
82
|
+
end
|
83
|
+
configs["ssl.ca.pem"] = deimos_config.kafka.ssl.ca_cert
|
84
|
+
configs["ssl.certificate.pem"] = deimos_config.kafka.ssl.client_cert
|
85
|
+
configs["ssl.key.pem"] = deimos_config.kafka.ssl.client_cert_key
|
86
|
+
configs["ssl.endpoint.identification.algorithm"] = "https" if deimos_config.kafka.ssl.verify_hostname
|
87
|
+
configs["sasl.kerberos.principal"] = deimos_config.kafka.sasl.gssapi_principal
|
88
|
+
configs["sasl.kerberos.keytab"] = deimos_config.kafka.sasl.gssapi_keytab
|
89
|
+
configs["sasl.username"] = deimos_config.kafka.sasl.plain_username || deimos_config.kafka.sasl.scram_username
|
90
|
+
configs["sasl.password"] = deimos_config.kafka.sasl.plain_password || deimos_config.kafka.sasl.scram_password
|
91
|
+
configs["sasl.mechanisms"] = deimos_config.kafka.sasl.scram_mechanism
|
92
|
+
configs["request.required.acks"] = deimos_config.producers.required_acks
|
93
|
+
configs["message.send.max.retries"] = deimos_config.producers.max_retries
|
94
|
+
configs["retry.backoff.ms"] = deimos_config.producers.retry_backoff * 1000 if deimos_config.producers.retry_backoff
|
95
|
+
configs["compression.codec"] = deimos_config.producers.compression_codec
|
96
|
+
configs.compact
|
97
|
+
end
|
98
|
+
|
99
|
+
def default_configs
|
100
|
+
{
|
101
|
+
payload_log: deimos_config.payload_log,
|
102
|
+
reraise_errors: deimos_config.consumers.reraise_errors,
|
103
|
+
replace_associations: deimos_config.consumers.replace_associations,
|
104
|
+
namespace: deimos_config.producers.schema_namespace,
|
105
|
+
use_schema_classes: deimos_config.schema.use_schema_classes
|
106
|
+
}.compact
|
107
|
+
end
|
108
|
+
|
109
|
+
def consumer_configs
|
110
|
+
deimos_config.consumer_objects.group_by(&:group_id).map do |group_id, consumers|
|
111
|
+
[group_id, consumers.map do |consumer|
|
112
|
+
kafka_configs = {}
|
113
|
+
kafka_configs["auto.offset.reset"] = consumer.start_from_beginning ? 'earliest' : 'latest'
|
114
|
+
kafka_configs["session.timeout.ms"] = consumer.session_timeout * 1000 unless consumer.default_value?(:session_timeout)
|
115
|
+
kafka_configs["auto.commit.interval.ms"] = consumer.offset_commit_interval * 1000 unless consumer.default_value?(:offset_commit_interval)
|
116
|
+
kafka_configs["heartbeat.interval.ms"] = consumer.heartbeat_interval * 1000 unless consumer.default_value?(:heartbeat_interval)
|
117
|
+
configs = {
|
118
|
+
kafka: kafka_configs.compact,
|
119
|
+
topic: consumer.topic,
|
120
|
+
consumer: ProcString.new(consumer.class_name),
|
121
|
+
schema: consumer.schema,
|
122
|
+
namespace: consumer.namespace,
|
123
|
+
key_config: consumer.key_config,
|
124
|
+
}
|
125
|
+
configs[:use_schema_classes] = consumer.use_schema_classes unless consumer.default_value?(:use_schema_classes)
|
126
|
+
configs[:max_db_batch_size] = consumer.max_db_batch_size unless consumer.default_value?(:max_db_batch_size)
|
127
|
+
configs[:bulk_import_id_column] = consumer.bulk_import_id_column unless consumer.default_value?(:bulk_import_id_column)
|
128
|
+
configs[:replace_associations] = consumer.replace_associations unless consumer.default_value?(:replace_associations)
|
129
|
+
configs[:active] = false if consumer.disabled
|
130
|
+
configs[:each_message] = true unless consumer.delivery.to_s == 'inline_batch'
|
131
|
+
configs
|
132
|
+
end]
|
133
|
+
end.to_h
|
134
|
+
end
|
135
|
+
|
136
|
+
def producer_configs
|
137
|
+
deimos_config.producer_objects.map do |producer|
|
138
|
+
{
|
139
|
+
topic: producer.topic,
|
140
|
+
producer_class: ProcString.new(producer.class_name),
|
141
|
+
schema: producer.schema,
|
142
|
+
namespace: producer.namespace || deimos_config.producers.schema_namespace,
|
143
|
+
key_config: producer.key_config,
|
144
|
+
use_schema_classes: producer.use_schema_classes
|
145
|
+
}.compact
|
146
|
+
end
|
147
|
+
end
|
148
|
+
|
149
|
+
def rename_consumer_methods
|
150
|
+
deimos_config.consumer_objects.each do |consumer|
|
151
|
+
consumer.class_name.constantize
|
152
|
+
file = Object.const_source_location(consumer.class_name)[0]
|
153
|
+
if file.to_s.include?(Rails.root.to_s)
|
154
|
+
gsub_file(file, /([\t ]+)def consume\((\w+)(, *(\w+)?)\)/,
|
155
|
+
"\\1def consume_message(message)\n\\1 \\2 = message.payload\n\\1 \\4 = message.metadata")
|
156
|
+
gsub_file(file, /([\t ]+)def consume_batch\((\w+)(, *(\w+)?)\)/,
|
157
|
+
"\\1def consume_batch\n\\1 \\2 = messages.payloads\n\\1 \\4 = messages.metadata")
|
158
|
+
gsub_file(file, /def record_attributes\((\w+)\)/,
|
159
|
+
"def record_attributes(\\1, key)")
|
160
|
+
end
|
161
|
+
end
|
162
|
+
end
|
163
|
+
|
164
|
+
def fix_specs
|
165
|
+
Dir["*/**/*_spec.rb"].each do |file|
|
166
|
+
gsub_file(file, /,\s*call_original: true/, "")
|
167
|
+
gsub_file(file, 'Deimos::Backends::Test.sent_messages', "Deimos::TestHelpers.sent_messages")
|
168
|
+
end
|
169
|
+
end
|
170
|
+
|
171
|
+
def process_all_files
|
172
|
+
template('karafka.rb.tt', "karafka.rb", force: true)
|
173
|
+
rename_consumer_methods
|
174
|
+
fix_specs
|
175
|
+
end
|
176
|
+
|
177
|
+
end
|
178
|
+
|
179
|
+
desc 'Generate and update app files for version 2.0'
|
180
|
+
# @return [void]
|
181
|
+
def generate
|
182
|
+
process_all_files
|
183
|
+
say "Generation complete! You are safe to remove the existing initializer that configures Deimos.", :green
|
184
|
+
say "Note: The following settings cannot be determined by the generator:", :yellow
|
185
|
+
say "* logger / phobos_logger (dynamic object, cannot be printed out)", :yellow
|
186
|
+
say "* kafka.sasl.oauth_token_provider", :yellow
|
187
|
+
say "* producers.max_buffer_size", :yellow
|
188
|
+
say "* consumer.backoff (only handles minimum, not maximum)", :yellow
|
189
|
+
say "For more information, see https://github.com/flipp-oss/deimos/blob/master/docs/UPGRADING.md", :yellow
|
190
|
+
end
|
191
|
+
end
|
192
|
+
end
|
193
|
+
end
|
data/lib/tasks/deimos.rake
CHANGED
@@ -1,7 +1,5 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require 'phobos'
|
4
|
-
require 'phobos/cli'
|
5
3
|
require 'generators/deimos/schema_class_generator'
|
6
4
|
require 'optparse'
|
7
5
|
|
@@ -15,17 +13,17 @@ namespace :deimos do
|
|
15
13
|
ENV['DEIMOS_TASK_NAME'] = 'consumer'
|
16
14
|
STDOUT.sync = true
|
17
15
|
Rails.logger.info('Running deimos:start rake task.')
|
18
|
-
|
16
|
+
Karafka::Server.run
|
19
17
|
end
|
20
18
|
|
21
19
|
desc 'Starts the Deimos database producer'
|
22
|
-
task
|
20
|
+
task outbox: :environment do
|
23
21
|
ENV['DEIMOS_RAKE_TASK'] = 'true'
|
24
|
-
ENV['DEIMOS_TASK_NAME'] = '
|
22
|
+
ENV['DEIMOS_TASK_NAME'] = 'outbox'
|
25
23
|
STDOUT.sync = true
|
26
|
-
Rails.logger.info('Running deimos:
|
24
|
+
Rails.logger.info('Running deimos:outbox rake task.')
|
27
25
|
thread_count = ENV['THREAD_COUNT'].to_i.zero? ? 1 : ENV['THREAD_COUNT'].to_i
|
28
|
-
Deimos.
|
26
|
+
Deimos.start_outbox_backend!(thread_count: thread_count)
|
29
27
|
end
|
30
28
|
|
31
29
|
task db_poller: :environment do
|
@@ -74,9 +74,21 @@ module ActiveRecordBatchConsumerTest # rubocop:disable Metrics/ModuleLength
|
|
74
74
|
end
|
75
75
|
|
76
76
|
prepend_before(:each) do
|
77
|
-
consumer_class.config[:bulk_import_id_column] = :bulk_import_id
|
78
77
|
stub_const('MyBatchConsumer', consumer_class)
|
79
78
|
stub_const('ConsumerTest::MyBatchConsumer', consumer_class)
|
79
|
+
klass = consumer_class
|
80
|
+
col = bulk_import_id_column
|
81
|
+
rep = replace_associations
|
82
|
+
Karafka::App.routes.redraw do
|
83
|
+
topic 'my-topic' do
|
84
|
+
consumer klass
|
85
|
+
schema 'MySchemaWithTitle'
|
86
|
+
namespace 'com.my-namespace'
|
87
|
+
key_config plain: true
|
88
|
+
bulk_import_id_column col
|
89
|
+
replace_associations rep
|
90
|
+
end
|
91
|
+
end
|
80
92
|
end
|
81
93
|
|
82
94
|
# Helper to publish a list of messages and call the consumer
|
@@ -84,16 +96,16 @@ module ActiveRecordBatchConsumerTest # rubocop:disable Metrics/ModuleLength
|
|
84
96
|
keys = messages.map { |m| m[:key] }
|
85
97
|
payloads = messages.map { |m| m[:payload] }
|
86
98
|
|
87
|
-
test_consume_batch(MyBatchConsumer, payloads, keys: keys
|
99
|
+
test_consume_batch(MyBatchConsumer, payloads, keys: keys)
|
88
100
|
end
|
89
101
|
|
102
|
+
let(:bulk_import_id_column) { :bulk_import_id }
|
103
|
+
let(:replace_associations) { true }
|
104
|
+
|
90
105
|
let(:consumer_class) do
|
91
106
|
klass = Class.new(described_class) do
|
92
107
|
cattr_accessor :record_attributes_proc
|
93
108
|
cattr_accessor :should_consume_proc
|
94
|
-
schema 'MySchema'
|
95
|
-
namespace 'com.my-namespace'
|
96
|
-
key_config plain: true
|
97
109
|
record_class Widget
|
98
110
|
|
99
111
|
def should_consume?(record, associations)
|
@@ -154,7 +166,6 @@ module ActiveRecordBatchConsumerTest # rubocop:disable Metrics/ModuleLength
|
|
154
166
|
|
155
167
|
context 'when association configured in consumer without model changes' do
|
156
168
|
before(:each) do
|
157
|
-
consumer_class.config[:bulk_import_id_column] = :bulk_import_id
|
158
169
|
ActiveRecord::Base.connection.remove_column(:widgets, :bulk_import_id)
|
159
170
|
Widget.reset_column_information
|
160
171
|
end
|
@@ -164,6 +175,7 @@ module ActiveRecordBatchConsumerTest # rubocop:disable Metrics/ModuleLength
|
|
164
175
|
end
|
165
176
|
|
166
177
|
it 'should raise error when bulk_import_id is not found' do
|
178
|
+
set_karafka_config(:reraise_errors, true)
|
167
179
|
expect {
|
168
180
|
publish_batch([{ key: 2,
|
169
181
|
payload: { test_id: 'xyz', some_int: 5, title: 'Widget Title' } }])
|
@@ -173,10 +185,8 @@ module ActiveRecordBatchConsumerTest # rubocop:disable Metrics/ModuleLength
|
|
173
185
|
end
|
174
186
|
|
175
187
|
context 'with one-to-one relation in association and custom bulk_import_id' do
|
176
|
-
|
177
|
-
|
178
|
-
consumer_class.config[:replace_associations] = false
|
179
|
-
end
|
188
|
+
let(:bulk_import_id_column) { :custom_id }
|
189
|
+
let(:replace_associations) { false }
|
180
190
|
|
181
191
|
before(:all) do
|
182
192
|
ActiveRecord::Base.connection.add_column(:widgets, :custom_id, :string, if_not_exists: true)
|
@@ -193,8 +203,8 @@ module ActiveRecordBatchConsumerTest # rubocop:disable Metrics/ModuleLength
|
|
193
203
|
end
|
194
204
|
|
195
205
|
context 'with one-to-many relationship in association and default bulk_import_id' do
|
206
|
+
let(:replace_associations) { false }
|
196
207
|
before(:each) do
|
197
|
-
consumer_class.config[:replace_associations] = false
|
198
208
|
consumer_class.record_attributes_proc = proc do |payload|
|
199
209
|
{
|
200
210
|
test_id: payload['test_id'],
|
@@ -214,7 +224,6 @@ module ActiveRecordBatchConsumerTest # rubocop:disable Metrics/ModuleLength
|
|
214
224
|
end
|
215
225
|
|
216
226
|
it 'should save item to widget and associated details' do
|
217
|
-
consumer_class.config[:replace_associations] = false
|
218
227
|
publish_batch([{ key: 2,
|
219
228
|
payload: { test_id: 'xyz', some_int: 5, title: 'Widget Title' } }])
|
220
229
|
expect(Widget.count).to eq(2)
|
@@ -233,8 +242,8 @@ module ActiveRecordBatchConsumerTest # rubocop:disable Metrics/ModuleLength
|
|
233
242
|
end
|
234
243
|
|
235
244
|
context 'with replace_associations on' do
|
245
|
+
let(:replace_associations) { true }
|
236
246
|
before(:each) do
|
237
|
-
consumer_class.config[:replace_associations] = true
|
238
247
|
consumer_class.record_attributes_proc = proc do |payload|
|
239
248
|
{
|
240
249
|
test_id: payload['test_id'],
|
@@ -34,7 +34,11 @@ module ActiveRecordBatchConsumerTest
|
|
34
34
|
prepend_before(:each) do
|
35
35
|
stub_const('MyBatchConsumer', consumer_class)
|
36
36
|
stub_const('ConsumerTest::MyBatchConsumer', consumer_class)
|
37
|
-
|
37
|
+
register_consumer(MyBatchConsumer,
|
38
|
+
'MySchema',
|
39
|
+
key_config: {plain: true},
|
40
|
+
configs: {bulk_import_id_column: :bulk_import_id})
|
41
|
+
Widget.delete_all
|
38
42
|
end
|
39
43
|
|
40
44
|
around(:each) do |ex|
|
@@ -50,9 +54,6 @@ module ActiveRecordBatchConsumerTest
|
|
50
54
|
# Basic uncompacted consumer
|
51
55
|
let(:consumer_class) do
|
52
56
|
Class.new(described_class) do
|
53
|
-
schema 'MySchema'
|
54
|
-
namespace 'com.my-namespace'
|
55
|
-
key_config plain: true
|
56
57
|
record_class Widget
|
57
58
|
compacted false
|
58
59
|
end
|
@@ -68,7 +69,7 @@ module ActiveRecordBatchConsumerTest
|
|
68
69
|
keys = messages.map { |m| m[:key] }
|
69
70
|
payloads = messages.map { |m| m[:payload] }
|
70
71
|
|
71
|
-
test_consume_batch(MyBatchConsumer, payloads, keys: keys
|
72
|
+
test_consume_batch(MyBatchConsumer, payloads, keys: keys)
|
72
73
|
end
|
73
74
|
|
74
75
|
describe 'consume_batch' do
|
@@ -82,10 +83,6 @@ module ActiveRecordBatchConsumerTest
|
|
82
83
|
end
|
83
84
|
end
|
84
85
|
|
85
|
-
it 'should handle an empty batch' do
|
86
|
-
expect { publish_batch([]) }.not_to raise_error
|
87
|
-
end
|
88
|
-
|
89
86
|
it 'should create records from a batch' do
|
90
87
|
publish_batch(
|
91
88
|
[
|
@@ -257,12 +254,16 @@ module ActiveRecordBatchConsumerTest
|
|
257
254
|
end
|
258
255
|
|
259
256
|
describe 'compacted mode' do
|
257
|
+
before(:each) do
|
258
|
+
register_consumer(consumer_class,
|
259
|
+
'MySchema',
|
260
|
+
key_config: {plain: true})
|
261
|
+
|
262
|
+
end
|
263
|
+
|
260
264
|
# Create a compacted consumer
|
261
265
|
let(:consumer_class) do
|
262
266
|
Class.new(described_class) do
|
263
|
-
schema 'MySchema'
|
264
|
-
namespace 'com.my-namespace'
|
265
|
-
key_config plain: true
|
266
267
|
record_class Widget
|
267
268
|
|
268
269
|
# :no-doc:
|
@@ -302,11 +303,14 @@ module ActiveRecordBatchConsumerTest
|
|
302
303
|
end
|
303
304
|
|
304
305
|
describe 'compound keys' do
|
306
|
+
before(:each) do
|
307
|
+
register_consumer(consumer_class,
|
308
|
+
'MySchema',
|
309
|
+
key_config: {schema: 'MySchemaCompound_key'})
|
310
|
+
end
|
311
|
+
|
305
312
|
let(:consumer_class) do
|
306
313
|
Class.new(described_class) do
|
307
|
-
schema 'MySchema'
|
308
|
-
namespace 'com.my-namespace'
|
309
|
-
key_config schema: 'MySchemaCompound_key'
|
310
314
|
record_class Widget
|
311
315
|
compacted false
|
312
316
|
|
@@ -353,13 +357,10 @@ module ActiveRecordBatchConsumerTest
|
|
353
357
|
end
|
354
358
|
|
355
359
|
describe 'no keys' do
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
360
|
-
key_config none: true
|
361
|
-
record_class Widget
|
362
|
-
end
|
360
|
+
before(:each) do
|
361
|
+
register_consumer(consumer_class,
|
362
|
+
'MySchema',
|
363
|
+
key_config: {none: true})
|
363
364
|
end
|
364
365
|
|
365
366
|
it 'should handle unkeyed topics' do
|
@@ -385,11 +386,13 @@ module ActiveRecordBatchConsumerTest
|
|
385
386
|
end
|
386
387
|
|
387
388
|
describe 'soft deletion' do
|
389
|
+
before(:each) do
|
390
|
+
register_consumer(consumer_class,
|
391
|
+
'MySchema',
|
392
|
+
key_config: {plain: true})
|
393
|
+
end
|
388
394
|
let(:consumer_class) do
|
389
395
|
Class.new(described_class) do
|
390
|
-
schema 'MySchema'
|
391
|
-
namespace 'com.my-namespace'
|
392
|
-
key_config plain: true
|
393
396
|
record_class Widget
|
394
397
|
compacted false
|
395
398
|
|
@@ -451,11 +454,13 @@ module ActiveRecordBatchConsumerTest
|
|
451
454
|
end
|
452
455
|
|
453
456
|
describe 'skipping records' do
|
457
|
+
before(:each) do
|
458
|
+
register_consumer(consumer_class,
|
459
|
+
'MySchema',
|
460
|
+
key_config: {plain: true})
|
461
|
+
end
|
454
462
|
let(:consumer_class) do
|
455
463
|
Class.new(described_class) do
|
456
|
-
schema 'MySchema'
|
457
|
-
namespace 'com.my-namespace'
|
458
|
-
key_config plain: true
|
459
464
|
record_class Widget
|
460
465
|
|
461
466
|
# Sample customization: Skipping records
|
@@ -471,9 +476,9 @@ module ActiveRecordBatchConsumerTest
|
|
471
476
|
publish_batch(
|
472
477
|
[
|
473
478
|
{ key: 1, # Record that consumer can decide to skip
|
474
|
-
payload: { test_id: 'skipme' } },
|
479
|
+
payload: { test_id: 'skipme', some_int: 3 } },
|
475
480
|
{ key: 2,
|
476
|
-
payload: { test_id: 'abc123' } }
|
481
|
+
payload: { test_id: 'abc123', some_int: 3 } }
|
477
482
|
]
|
478
483
|
)
|
479
484
|
|
@@ -484,11 +489,13 @@ module ActiveRecordBatchConsumerTest
|
|
484
489
|
|
485
490
|
describe 'pre processing' do
|
486
491
|
context 'with uncompacted messages' do
|
492
|
+
before(:each) do
|
493
|
+
register_consumer(consumer_class,
|
494
|
+
'MySchema',
|
495
|
+
key_config: {plain: true})
|
496
|
+
end
|
487
497
|
let(:consumer_class) do
|
488
498
|
Class.new(described_class) do
|
489
|
-
schema 'MySchema'
|
490
|
-
namespace 'com.my-namespace'
|
491
|
-
key_config plain: true
|
492
499
|
record_class Widget
|
493
500
|
compacted false
|
494
501
|
|
@@ -527,9 +534,11 @@ module ActiveRecordBatchConsumerTest
|
|
527
534
|
context 'with a global bulk_import_id_generator' do
|
528
535
|
|
529
536
|
before(:each) do
|
530
|
-
|
531
|
-
|
532
|
-
|
537
|
+
register_consumer(consumer_class,
|
538
|
+
'MySchema',
|
539
|
+
key_config: {plain: true},
|
540
|
+
configs: {bulk_import_id_generator: proc { 'global' }}
|
541
|
+
)
|
533
542
|
end
|
534
543
|
|
535
544
|
it 'should call the default bulk_import_id_generator proc' do
|
@@ -559,10 +568,17 @@ module ActiveRecordBatchConsumerTest
|
|
559
568
|
context 'with a class defined bulk_import_id_generator' do
|
560
569
|
|
561
570
|
before(:each) do
|
562
|
-
|
563
|
-
|
571
|
+
Karafka::App.routes.clear
|
572
|
+
Karafka::App.routes.draw do
|
573
|
+
defaults do
|
574
|
+
bulk_import_id_generator(proc { 'global'})
|
575
|
+
end
|
564
576
|
end
|
565
|
-
consumer_class
|
577
|
+
register_consumer(consumer_class,
|
578
|
+
'MySchema',
|
579
|
+
key_config: {plain: true},
|
580
|
+
configs: {bulk_import_id_generator: proc { 'custom' }}
|
581
|
+
)
|
566
582
|
end
|
567
583
|
|
568
584
|
it 'should call the default bulk_import_id_generator proc' do
|
@@ -593,11 +609,13 @@ module ActiveRecordBatchConsumerTest
|
|
593
609
|
|
594
610
|
describe 'should_consume?' do
|
595
611
|
|
612
|
+
before(:each) do
|
613
|
+
register_consumer(consumer_class,
|
614
|
+
'MySchema',
|
615
|
+
key_config: {plain: true})
|
616
|
+
end
|
596
617
|
let(:consumer_class) do
|
597
618
|
Class.new(described_class) do
|
598
|
-
schema 'MySchema'
|
599
|
-
namespace 'com.my-namespace'
|
600
|
-
key_config plain: true
|
601
619
|
record_class Widget
|
602
620
|
compacted false
|
603
621
|
|
@@ -609,9 +627,8 @@ module ActiveRecordBatchConsumerTest
|
|
609
627
|
nil
|
610
628
|
end
|
611
629
|
|
612
|
-
|
613
|
-
|
614
|
-
payload[:consumer].process_invalid_records(payload[:records])
|
630
|
+
Karafka.monitor.subscribe('deimos.batch_consumption.invalid_records') do |event|
|
631
|
+
event[:consumer].process_invalid_records(event[:records])
|
615
632
|
end
|
616
633
|
|
617
634
|
end
|
@@ -649,11 +666,13 @@ module ActiveRecordBatchConsumerTest
|
|
649
666
|
describe 'post processing' do
|
650
667
|
|
651
668
|
context 'with uncompacted messages' do
|
669
|
+
before(:each) do
|
670
|
+
register_consumer(consumer_class,
|
671
|
+
'MySchema',
|
672
|
+
key_config: {plain: true})
|
673
|
+
end
|
652
674
|
let(:consumer_class) do
|
653
675
|
Class.new(described_class) do
|
654
|
-
schema 'MySchema'
|
655
|
-
namespace 'com.my-namespace'
|
656
|
-
key_config plain: true
|
657
676
|
record_class Widget
|
658
677
|
compacted false
|
659
678
|
|
@@ -673,13 +692,11 @@ module ActiveRecordBatchConsumerTest
|
|
673
692
|
Widget.find_by(id: attrs['id'], test_id: attrs['test_id']).update!(some_int: attrs['some_int'])
|
674
693
|
end
|
675
694
|
|
676
|
-
|
677
|
-
payload = ActiveSupport::Notifications::Event.new(*args).payload
|
695
|
+
Karafka.monitor.subscribe('deimos.batch_consumption.invalid_records') do |payload|
|
678
696
|
payload[:consumer].process_invalid_records(payload[:records])
|
679
697
|
end
|
680
698
|
|
681
|
-
|
682
|
-
payload = ActiveSupport::Notifications::Event.new(*args).payload
|
699
|
+
Karafka.monitor.subscribe('deimos.batch_consumption.valid_records') do |payload|
|
683
700
|
payload[:consumer].process_valid_records(payload[:records])
|
684
701
|
end
|
685
702
|
|
@@ -707,11 +724,13 @@ module ActiveRecordBatchConsumerTest
|
|
707
724
|
end
|
708
725
|
|
709
726
|
context 'with compacted messages' do
|
727
|
+
before(:each) do
|
728
|
+
register_consumer(consumer_class,
|
729
|
+
'MySchema',
|
730
|
+
key_config: {plain: true})
|
731
|
+
end
|
710
732
|
let(:consumer_class) do
|
711
733
|
Class.new(described_class) do
|
712
|
-
schema 'MySchema'
|
713
|
-
namespace 'com.my-namespace'
|
714
|
-
key_config plain: true
|
715
734
|
record_class Widget
|
716
735
|
compacted true
|
717
736
|
|
@@ -731,13 +750,11 @@ module ActiveRecordBatchConsumerTest
|
|
731
750
|
Widget.find_by(id: attrs['id'], test_id: attrs['test_id']).update!(some_int: attrs['some_int'])
|
732
751
|
end
|
733
752
|
|
734
|
-
|
735
|
-
payload = ActiveSupport::Notifications::Event.new(*args).payload
|
753
|
+
Karafka.monitor.subscribe('deimos.batch_consumption.invalid_records') do |payload|
|
736
754
|
payload[:consumer].process_invalid_records(payload[:records])
|
737
755
|
end
|
738
756
|
|
739
|
-
|
740
|
-
payload = ActiveSupport::Notifications::Event.new(*args).payload
|
757
|
+
Karafka.monitor.subscribe('deimos.batch_consumption.valid_records') do |payload|
|
741
758
|
payload[:consumer].process_valid_records(payload[:records])
|
742
759
|
end
|
743
760
|
|
@@ -765,11 +782,13 @@ module ActiveRecordBatchConsumerTest
|
|
765
782
|
end
|
766
783
|
|
767
784
|
context 'with post processing errors' do
|
785
|
+
before(:each) do
|
786
|
+
register_consumer(consumer_class,
|
787
|
+
'MySchema',
|
788
|
+
key_config: {plain: true})
|
789
|
+
end
|
768
790
|
let(:consumer_class) do
|
769
791
|
Class.new(described_class) do
|
770
|
-
schema 'MySchema'
|
771
|
-
namespace 'com.my-namespace'
|
772
|
-
key_config plain: true
|
773
792
|
record_class Widget
|
774
793
|
compacted false
|
775
794
|
|
@@ -777,15 +796,15 @@ module ActiveRecordBatchConsumerTest
|
|
777
796
|
raise StandardError, 'Something went wrong'
|
778
797
|
end
|
779
798
|
|
780
|
-
|
781
|
-
|
782
|
-
payload[:consumer].process_valid_records(payload[:records])
|
799
|
+
Karafka.monitor.subscribe('deimos.batch_consumption.valid_records') do |event|
|
800
|
+
event[:consumer].process_valid_records(event[:records])
|
783
801
|
end
|
784
802
|
|
785
803
|
end
|
786
804
|
end
|
787
805
|
|
788
806
|
it 'should save records if an exception occurs in post processing' do
|
807
|
+
set_karafka_config(:reraise_errors, true)
|
789
808
|
Widget.create!(id: 1, test_id: 'abc', some_int: 1)
|
790
809
|
Widget.create!(id: 2, test_id: 'def', some_int: 2)
|
791
810
|
|
@@ -9,11 +9,11 @@ RSpec.describe Deimos::ActiveRecordConsume::BatchConsumption do
|
|
9
9
|
describe 'upsert_records' do
|
10
10
|
let(:records) do
|
11
11
|
[
|
12
|
-
Deimos::Message.new({ v: 1 },
|
13
|
-
Deimos::Message.new({ v: 2 },
|
14
|
-
Deimos::Message.new({ v: 3 },
|
15
|
-
Deimos::Message.new({ v: 4 },
|
16
|
-
Deimos::Message.new({ v: 5 },
|
12
|
+
Deimos::Message.new({ v: 1 }, key: 1),
|
13
|
+
Deimos::Message.new({ v: 2 }, key: 2),
|
14
|
+
Deimos::Message.new({ v: 3 }, key: 3),
|
15
|
+
Deimos::Message.new({ v: 4 }, key: 4),
|
16
|
+
Deimos::Message.new({ v: 5 }, key: 5)
|
17
17
|
]
|
18
18
|
end
|
19
19
|
|
@@ -56,11 +56,11 @@ RSpec.describe Deimos::ActiveRecordConsume::BatchConsumption do
|
|
56
56
|
describe 'remove_records' do
|
57
57
|
let(:records) do
|
58
58
|
[
|
59
|
-
Deimos::Message.new(nil,
|
60
|
-
Deimos::Message.new(nil,
|
61
|
-
Deimos::Message.new(nil,
|
62
|
-
Deimos::Message.new(nil,
|
63
|
-
Deimos::Message.new(nil,
|
59
|
+
Deimos::Message.new(nil, key: 1),
|
60
|
+
Deimos::Message.new(nil, key: 2),
|
61
|
+
Deimos::Message.new(nil, key: 3),
|
62
|
+
Deimos::Message.new(nil, key: 4),
|
63
|
+
Deimos::Message.new(nil, key: 5)
|
64
64
|
]
|
65
65
|
end
|
66
66
|
|