dionysus-rb 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.circleci/config.yml +61 -0
- data/.github/workflows/ci.yml +77 -0
- data/.gitignore +12 -0
- data/.rspec +3 -0
- data/.rubocop.yml +175 -0
- data/.rubocop_todo.yml +53 -0
- data/CHANGELOG.md +227 -0
- data/Gemfile +10 -0
- data/Gemfile.lock +258 -0
- data/LICENSE.txt +21 -0
- data/README.md +1206 -0
- data/Rakefile +10 -0
- data/assets/logo.svg +51 -0
- data/bin/console +11 -0
- data/bin/karafka_health_check +14 -0
- data/bin/outbox_worker_health_check +12 -0
- data/bin/setup +8 -0
- data/dionysus-rb.gemspec +64 -0
- data/docker-compose.yml +44 -0
- data/lib/dionysus/checks/health_check.rb +50 -0
- data/lib/dionysus/checks.rb +7 -0
- data/lib/dionysus/consumer/batch_events_publisher.rb +33 -0
- data/lib/dionysus/consumer/config.rb +97 -0
- data/lib/dionysus/consumer/deserializer.rb +231 -0
- data/lib/dionysus/consumer/dionysus_event.rb +42 -0
- data/lib/dionysus/consumer/karafka_consumer_generator.rb +56 -0
- data/lib/dionysus/consumer/params_batch_processor.rb +65 -0
- data/lib/dionysus/consumer/params_batch_transformations/remove_duplicates_strategy.rb +54 -0
- data/lib/dionysus/consumer/params_batch_transformations.rb +4 -0
- data/lib/dionysus/consumer/persistor.rb +157 -0
- data/lib/dionysus/consumer/registry.rb +84 -0
- data/lib/dionysus/consumer/synced_data/assign_columns_from_synced_data.rb +27 -0
- data/lib/dionysus/consumer/synced_data/assign_columns_from_synced_data_job.rb +26 -0
- data/lib/dionysus/consumer/synced_data.rb +4 -0
- data/lib/dionysus/consumer/synchronizable_model.rb +93 -0
- data/lib/dionysus/consumer/workers_group.rb +18 -0
- data/lib/dionysus/consumer.rb +36 -0
- data/lib/dionysus/monitor.rb +48 -0
- data/lib/dionysus/producer/base_responder.rb +46 -0
- data/lib/dionysus/producer/config.rb +104 -0
- data/lib/dionysus/producer/deleted_record_serializer.rb +17 -0
- data/lib/dionysus/producer/genesis/performed.rb +11 -0
- data/lib/dionysus/producer/genesis/stream_job.rb +13 -0
- data/lib/dionysus/producer/genesis/streamer/base_job.rb +44 -0
- data/lib/dionysus/producer/genesis/streamer/standard_job.rb +43 -0
- data/lib/dionysus/producer/genesis/streamer.rb +40 -0
- data/lib/dionysus/producer/genesis.rb +62 -0
- data/lib/dionysus/producer/karafka_responder_generator.rb +133 -0
- data/lib/dionysus/producer/key.rb +14 -0
- data/lib/dionysus/producer/model_serializer.rb +105 -0
- data/lib/dionysus/producer/outbox/active_record_publishable.rb +74 -0
- data/lib/dionysus/producer/outbox/datadog_latency_reporter.rb +26 -0
- data/lib/dionysus/producer/outbox/datadog_latency_reporter_job.rb +11 -0
- data/lib/dionysus/producer/outbox/datadog_latency_reporter_scheduler.rb +47 -0
- data/lib/dionysus/producer/outbox/datadog_tracer.rb +32 -0
- data/lib/dionysus/producer/outbox/duplicates_filter.rb +26 -0
- data/lib/dionysus/producer/outbox/event_name.rb +26 -0
- data/lib/dionysus/producer/outbox/health_check.rb +48 -0
- data/lib/dionysus/producer/outbox/latency_tracker.rb +43 -0
- data/lib/dionysus/producer/outbox/model.rb +117 -0
- data/lib/dionysus/producer/outbox/producer.rb +26 -0
- data/lib/dionysus/producer/outbox/publishable.rb +106 -0
- data/lib/dionysus/producer/outbox/publisher.rb +131 -0
- data/lib/dionysus/producer/outbox/records_processor.rb +56 -0
- data/lib/dionysus/producer/outbox/runner.rb +120 -0
- data/lib/dionysus/producer/outbox/tombstone_publisher.rb +22 -0
- data/lib/dionysus/producer/outbox.rb +103 -0
- data/lib/dionysus/producer/partition_key.rb +42 -0
- data/lib/dionysus/producer/registry/validator.rb +32 -0
- data/lib/dionysus/producer/registry.rb +165 -0
- data/lib/dionysus/producer/serializer.rb +52 -0
- data/lib/dionysus/producer/suppressor.rb +18 -0
- data/lib/dionysus/producer.rb +121 -0
- data/lib/dionysus/railtie.rb +9 -0
- data/lib/dionysus/rb/version.rb +5 -0
- data/lib/dionysus/rb.rb +8 -0
- data/lib/dionysus/support/rspec/outbox_publishable.rb +78 -0
- data/lib/dionysus/topic_name.rb +15 -0
- data/lib/dionysus/utils/default_message_filter.rb +25 -0
- data/lib/dionysus/utils/exponential_backoff.rb +7 -0
- data/lib/dionysus/utils/karafka_datadog_listener.rb +20 -0
- data/lib/dionysus/utils/karafka_sentry_listener.rb +9 -0
- data/lib/dionysus/utils/null_error_handler.rb +6 -0
- data/lib/dionysus/utils/null_event_bus.rb +5 -0
- data/lib/dionysus/utils/null_hermes_event_producer.rb +5 -0
- data/lib/dionysus/utils/null_instrumenter.rb +7 -0
- data/lib/dionysus/utils/null_lock_client.rb +13 -0
- data/lib/dionysus/utils/null_model_factory.rb +5 -0
- data/lib/dionysus/utils/null_mutex_provider.rb +7 -0
- data/lib/dionysus/utils/null_retry_provider.rb +7 -0
- data/lib/dionysus/utils/null_tracer.rb +5 -0
- data/lib/dionysus/utils/null_transaction_provider.rb +15 -0
- data/lib/dionysus/utils/sidekiq_batched_job_distributor.rb +24 -0
- data/lib/dionysus/utils.rb +6 -0
- data/lib/dionysus/version.rb +7 -0
- data/lib/dionysus-rb.rb +3 -0
- data/lib/dionysus.rb +133 -0
- data/lib/tasks/dionysus.rake +18 -0
- data/log/development.log +0 -0
- data/sig/dionysus/rb.rbs +6 -0
- metadata +585 -0
@@ -0,0 +1,231 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "active_support/core_ext/array/wrap"
|
4
|
+
|
5
|
+
class Dionysus::Consumer::Deserializer
|
6
|
+
attr_reader :data
|
7
|
+
|
8
|
+
def initialize(data)
|
9
|
+
@data = data.to_a
|
10
|
+
end
|
11
|
+
|
12
|
+
def deserialize
|
13
|
+
data.map { |serialized_payload| SerializedRecord.new(serialized_payload) }.map do |serialized_record|
|
14
|
+
transformed_payload = DeserializedRecord.new
|
15
|
+
transformed_payload = populate_attributes_with_relationships(serialized_record, transformed_payload)
|
16
|
+
|
17
|
+
transformed_payload = assign_attributes(serialized_record, transformed_payload)
|
18
|
+
|
19
|
+
serialized_record.expected_has_many_relationships.each do |relationship_name|
|
20
|
+
deserialize_has_many_relationship(serialized_record, relationship_name, transformed_payload)
|
21
|
+
end
|
22
|
+
|
23
|
+
serialized_record.expected_has_one_relationships.each do |relationship_name|
|
24
|
+
deserialize_has_one_relationship(serialized_record, relationship_name, transformed_payload)
|
25
|
+
end
|
26
|
+
|
27
|
+
transformed_payload
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
private
|
32
|
+
|
33
|
+
def populate_attributes_with_relationships(serialized_record, transformed_payload)
|
34
|
+
serialized_record.links.each do |relationship_name, value|
|
35
|
+
transformed_payload.populate_attributes_with_relationship(relationship_name, value)
|
36
|
+
end
|
37
|
+
|
38
|
+
transformed_payload
|
39
|
+
end
|
40
|
+
|
41
|
+
def assign_attributes(serialized_record, transformed_payload)
|
42
|
+
transformed_payload.synced_id = serialized_record.id
|
43
|
+
transformed_payload.synced_created_at = serialized_record.created_at if serialized_record.has_created_at?
|
44
|
+
transformed_payload.synced_updated_at = serialized_record.updated_at if serialized_record.has_updated_at?
|
45
|
+
transformed_payload.synced_canceled_at = serialized_record.canceled_at if serialized_record.has_canceled_at?
|
46
|
+
|
47
|
+
serialized_record.plain_attributes.each do |attribute, value|
|
48
|
+
transformed_payload.attributes[attribute] = value
|
49
|
+
end
|
50
|
+
|
51
|
+
transformed_payload
|
52
|
+
end
|
53
|
+
|
54
|
+
def deserialize_has_many_relationship(serialized_record, relationship_name, transformed_payload)
|
55
|
+
value = serialized_record[relationship_name]
|
56
|
+
deserialized_relationship = (value && Dionysus::Consumer::Deserializer.new(value).deserialize) || nil
|
57
|
+
relationship_model_name = serialized_record.model_name_for_relationship(relationship_name)
|
58
|
+
transformed_payload.has_many << [relationship_model_name, deserialized_relationship]
|
59
|
+
transformed_payload.delete(relationship_name)
|
60
|
+
transformed_payload
|
61
|
+
end
|
62
|
+
|
63
|
+
def deserialize_has_one_relationship(serialized_record, relationship_name, transformed_payload)
|
64
|
+
value = serialized_record[relationship_name]
|
65
|
+
deserialized_relationship = Dionysus::Consumer::Deserializer.new(Array.wrap(value)).deserialize
|
66
|
+
relationship_model_name = serialized_record.model_name_for_relationship(relationship_name)
|
67
|
+
transformed_payload.has_one << [relationship_model_name, deserialized_relationship.first]
|
68
|
+
transformed_payload.delete(relationship_name)
|
69
|
+
transformed_payload
|
70
|
+
end
|
71
|
+
|
72
|
+
class DeserializedRecord < SimpleDelegator
|
73
|
+
def initialize
|
74
|
+
super(canonical_format)
|
75
|
+
end
|
76
|
+
|
77
|
+
def transformed_payload
|
78
|
+
__getobj__
|
79
|
+
end
|
80
|
+
|
81
|
+
def attributes
|
82
|
+
transformed_payload.fetch(:attributes)
|
83
|
+
end
|
84
|
+
|
85
|
+
def has_many
|
86
|
+
transformed_payload.fetch(:has_many)
|
87
|
+
end
|
88
|
+
|
89
|
+
def has_one
|
90
|
+
transformed_payload.fetch(:has_one)
|
91
|
+
end
|
92
|
+
|
93
|
+
def synced_id
|
94
|
+
attributes.fetch("synced_id") { "synced_id not found in #{attributes}! Something is very wrong." }
|
95
|
+
end
|
96
|
+
|
97
|
+
def synced_id=(val)
|
98
|
+
attributes["synced_id"] = val
|
99
|
+
end
|
100
|
+
|
101
|
+
def synced_created_at
|
102
|
+
attributes["synced_created_at"]
|
103
|
+
end
|
104
|
+
|
105
|
+
def synced_created_at=(val)
|
106
|
+
attributes["synced_created_at"] = val
|
107
|
+
end
|
108
|
+
|
109
|
+
def synced_updated_at
|
110
|
+
attributes["synced_updated_at"]
|
111
|
+
end
|
112
|
+
|
113
|
+
def synced_updated_at=(val)
|
114
|
+
attributes["synced_updated_at"] = val
|
115
|
+
end
|
116
|
+
|
117
|
+
def synced_canceled_at
|
118
|
+
attributes["synced_canceled_at"]
|
119
|
+
end
|
120
|
+
|
121
|
+
def synced_canceled_at=(val)
|
122
|
+
attributes["synced_canceled_at"] = val
|
123
|
+
end
|
124
|
+
|
125
|
+
def populate_attributes_with_relationship(relationship_name, value)
|
126
|
+
if value.respond_to?(:to_hash)
|
127
|
+
attributes["synced_#{relationship_name}_id"] = value["id"]
|
128
|
+
attributes["synced_#{relationship_name}_type"] = value["type"]
|
129
|
+
elsif value.respond_to?(:to_ary)
|
130
|
+
relationship_name = ActiveSupport::Inflector.singularize(relationship_name)
|
131
|
+
attributes["synced_#{relationship_name}_ids"] = value
|
132
|
+
else
|
133
|
+
attributes["synced_#{relationship_name}_id"] = value
|
134
|
+
end
|
135
|
+
end
|
136
|
+
|
137
|
+
def has_synced_canceled_at?
|
138
|
+
attributes.key?("synced_canceled_at")
|
139
|
+
end
|
140
|
+
|
141
|
+
private
|
142
|
+
|
143
|
+
def canonical_format
|
144
|
+
{ attributes: {}, has_many: [], has_one: [] }
|
145
|
+
end
|
146
|
+
end
|
147
|
+
|
148
|
+
class SerializedRecord
|
149
|
+
RESERVED_ATTRIBUTES = %w[links id created_at updated_at canceled_at].freeze
|
150
|
+
private_constant :RESERVED_ATTRIBUTES
|
151
|
+
|
152
|
+
attr_reader :payload
|
153
|
+
private :payload
|
154
|
+
|
155
|
+
delegate :key?, :[], to: :payload
|
156
|
+
|
157
|
+
def initialize(payload)
|
158
|
+
@payload = payload
|
159
|
+
end
|
160
|
+
|
161
|
+
def plain_attributes
|
162
|
+
payload.except(*RESERVED_ATTRIBUTES, *expected_has_many_relationships, *expected_has_one_relationships)
|
163
|
+
end
|
164
|
+
|
165
|
+
def expected_has_many_relationships
|
166
|
+
to_many_foreign_keys.keys
|
167
|
+
end
|
168
|
+
|
169
|
+
def expected_has_one_relationships
|
170
|
+
to_one_foreign_keys.keys
|
171
|
+
end
|
172
|
+
|
173
|
+
def model_name_for_relationship(relationship_name)
|
174
|
+
if polymorphic_relationship?(relationship_name) && links[relationship_name].key?("type")
|
175
|
+
links[relationship_name]["type"]
|
176
|
+
else
|
177
|
+
relationship_name
|
178
|
+
end
|
179
|
+
end
|
180
|
+
|
181
|
+
def id
|
182
|
+
payload["id"]
|
183
|
+
end
|
184
|
+
|
185
|
+
def created_at
|
186
|
+
payload["created_at"]
|
187
|
+
end
|
188
|
+
|
189
|
+
def updated_at
|
190
|
+
payload["updated_at"]
|
191
|
+
end
|
192
|
+
|
193
|
+
def canceled_at
|
194
|
+
payload["canceled_at"]
|
195
|
+
end
|
196
|
+
|
197
|
+
def has?(key)
|
198
|
+
key?(key)
|
199
|
+
end
|
200
|
+
|
201
|
+
def has_created_at?
|
202
|
+
has?("created_at")
|
203
|
+
end
|
204
|
+
|
205
|
+
def has_updated_at?
|
206
|
+
has?("updated_at")
|
207
|
+
end
|
208
|
+
|
209
|
+
def has_canceled_at?
|
210
|
+
has?("canceled_at")
|
211
|
+
end
|
212
|
+
|
213
|
+
def links
|
214
|
+
payload["links"].to_h
|
215
|
+
end
|
216
|
+
|
217
|
+
private
|
218
|
+
|
219
|
+
def to_one_foreign_keys
|
220
|
+
links.reject { |_, value| value.respond_to?(:to_ary) }
|
221
|
+
end
|
222
|
+
|
223
|
+
def to_many_foreign_keys
|
224
|
+
links.select { |_, value| value.respond_to?(:to_ary) }
|
225
|
+
end
|
226
|
+
|
227
|
+
def polymorphic_relationship?(relationship_name)
|
228
|
+
links[relationship_name]&.respond_to?(:to_hash)
|
229
|
+
end
|
230
|
+
end
|
231
|
+
end
|
@@ -0,0 +1,42 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class Dionysus::Consumer::DionysusEvent
|
4
|
+
attr_reader :event_name, :model_name, :transformed_data, :local_changes
|
5
|
+
|
6
|
+
def initialize(event_name, model_name, transformed_data, aggregate_root: true)
|
7
|
+
@event_name = event_name.to_s
|
8
|
+
@model_name = model_name.to_s
|
9
|
+
@transformed_data = transformed_data
|
10
|
+
@local_changes = {}
|
11
|
+
@aggregate_root = aggregate_root
|
12
|
+
end
|
13
|
+
|
14
|
+
def created?
|
15
|
+
event_name.end_with?("created")
|
16
|
+
end
|
17
|
+
|
18
|
+
def updated?
|
19
|
+
event_name.end_with?("updated")
|
20
|
+
end
|
21
|
+
|
22
|
+
def destroyed?
|
23
|
+
event_name.end_with?("destroyed")
|
24
|
+
end
|
25
|
+
|
26
|
+
def generic_event?
|
27
|
+
created? || updated? || destroyed?
|
28
|
+
end
|
29
|
+
|
30
|
+
def aggregate_root?
|
31
|
+
@aggregate_root == true
|
32
|
+
end
|
33
|
+
|
34
|
+
def to_h
|
35
|
+
{
|
36
|
+
event_name: event_name,
|
37
|
+
model_name: model_name,
|
38
|
+
transformed_data: transformed_data,
|
39
|
+
local_changes: local_changes
|
40
|
+
}
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,56 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class Dionysus::Consumer::KarafkaConsumerGenerator
|
4
|
+
TOMBSTONE = nil
|
5
|
+
|
6
|
+
def generate(config, topic)
|
7
|
+
base_class = topic.consumer_base_class || config.consumer_base_class
|
8
|
+
|
9
|
+
consumer_klass = Class.new(base_class) do
|
10
|
+
define_method :consume do
|
11
|
+
config.retry_provider.retry do
|
12
|
+
processed_events = Concurrent::Array.new
|
13
|
+
config.instrumenter.instrument("dionysus.consume.#{topic}") do
|
14
|
+
batch_number = 0
|
15
|
+
|
16
|
+
if topic.concurrency
|
17
|
+
workers = Dionysus::Consumer::WorkersGroup.new
|
18
|
+
messages.each do |batch|
|
19
|
+
batch_number += 1 # cannot use each_with_index on params_batch
|
20
|
+
worker = Thread.new do
|
21
|
+
Thread.current.report_on_exception = true
|
22
|
+
Thread.current.abort_on_exception = true
|
23
|
+
processed_events.concat(process_batch(config, topic, batch, batch_number))
|
24
|
+
end
|
25
|
+
workers << worker
|
26
|
+
end
|
27
|
+
workers.work
|
28
|
+
else
|
29
|
+
final_params_batch = topic.params_batch_transformation&.call(messages) || messages
|
30
|
+
final_params_batch.each do |batch|
|
31
|
+
batch_number += 1 # cannot use each_with_index on params_batch
|
32
|
+
processed_events.concat(process_batch(config, topic, batch, batch_number))
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
Dionysus::Consumer::BatchEventsPublisher.new(config, topic).publish(processed_events)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
private
|
41
|
+
|
42
|
+
define_method :process_batch do |configuration, current_topic, batch, batch_number|
|
43
|
+
configuration.transaction_provider.transaction do
|
44
|
+
Dionysus::Consumer::ParamsBatchProcessor.new(configuration, current_topic).process(batch,
|
45
|
+
batch_number)
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
consumer_klass_name = "#{topic.to_s.classify}Consumer"
|
51
|
+
|
52
|
+
Dionysus.send(:remove_const, consumer_klass_name) if Dionysus.const_defined?(consumer_klass_name)
|
53
|
+
Dionysus.const_set(consumer_klass_name, consumer_klass)
|
54
|
+
consumer_klass
|
55
|
+
end
|
56
|
+
end
|
@@ -0,0 +1,65 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class Dionysus::Consumer::ParamsBatchProcessor
|
4
|
+
attr_reader :config, :topic
|
5
|
+
private :config, :topic
|
6
|
+
|
7
|
+
def initialize(config, topic)
|
8
|
+
@config = config
|
9
|
+
@topic = topic
|
10
|
+
end
|
11
|
+
|
12
|
+
def process(batch, batch_number)
|
13
|
+
processed_events = []
|
14
|
+
instrument("dionysus.consume.#{topic}.batch_number_#{batch_number}") do
|
15
|
+
payload = batch.payload.to_h
|
16
|
+
metadata = batch.metadata
|
17
|
+
message = payload["message"].to_a
|
18
|
+
|
19
|
+
with_mutex(metadata, config) do
|
20
|
+
message.each do |current_event|
|
21
|
+
event_name = current_event["event"]
|
22
|
+
data = Array.wrap(current_event["data"])
|
23
|
+
model_name = current_event["model_name"]
|
24
|
+
|
25
|
+
transformed_data = nil
|
26
|
+
config.instrumenter.instrument("dionysus.consume.#{topic}.batch_number_#{batch_number}.deserialize") do
|
27
|
+
transformed_data = Dionysus::Consumer::Deserializer.new(data).deserialize
|
28
|
+
end
|
29
|
+
|
30
|
+
if ignore_message?(topic: topic, message: message, transformed_data: transformed_data)
|
31
|
+
notify_about_ignored_message(topic: topic, message: message, transformed_data: transformed_data)
|
32
|
+
next
|
33
|
+
end
|
34
|
+
|
35
|
+
dionysus_event = Dionysus::Consumer::DionysusEvent.new(event_name, model_name,
|
36
|
+
transformed_data)
|
37
|
+
config.instrumenter.instrument("dionysus.consume.#{topic}.batch_number_#{batch_number}.persist",
|
38
|
+
dionysus_event.to_h.except(:transformed_data, :local_changes)) do
|
39
|
+
config.transaction_provider.connection_pool.with_connection do
|
40
|
+
Dionysus::Consumer::Persistor.new(config, topic).persist(dionysus_event,
|
41
|
+
batch_number)
|
42
|
+
end
|
43
|
+
end
|
44
|
+
processed_events << dionysus_event
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
48
|
+
processed_events
|
49
|
+
end
|
50
|
+
|
51
|
+
private
|
52
|
+
|
53
|
+
delegate :message_filter, to: :config
|
54
|
+
delegate :ignore_message?, :notify_about_ignored_message, to: :message_filter
|
55
|
+
|
56
|
+
def instrument(label, options = {}, &block)
|
57
|
+
config.instrumenter.instrument(label, options, &block)
|
58
|
+
end
|
59
|
+
|
60
|
+
def with_mutex(metadata, config, &block)
|
61
|
+
message_key = metadata.key || SecureRandom.uuid
|
62
|
+
config.processing_mutex_provider.send(config.processing_mutex_method_name, "Dionysus-#{message_key}",
|
63
|
+
&block)
|
64
|
+
end
|
65
|
+
end
|
@@ -0,0 +1,54 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class Dionysus::Consumer::ParamsBatchTransformations::RemoveDuplicatesStrategy
|
4
|
+
def call(params_batch)
|
5
|
+
return params_batch if duplicates_removal_not_applicable?(params_batch)
|
6
|
+
|
7
|
+
Karafka::Messages::Messages.new(transform_messages_array(params_batch), params_batch.metadata)
|
8
|
+
end
|
9
|
+
|
10
|
+
private
|
11
|
+
|
12
|
+
# the idea is following:
|
13
|
+
# 1. group messages by event and id - for a given model we can expect unique messages for _created and _deleted ecents
|
14
|
+
# but we can have multiple _updated events, so this is where we are interested in removing duplicates
|
15
|
+
# 2. sort each group by updated_at, reverse (because the sort is ascending) and take the first one -
|
16
|
+
# this way we will have the most recent update
|
17
|
+
# 3. flatten the array of arrays as all groups will have a single item
|
18
|
+
# It is safer to apply sorting just for the _updated events to a given model as otherwise we could change the order of
|
19
|
+
# the messages for a different type which might be not desirable
|
20
|
+
def transform_messages_array(params_batch)
|
21
|
+
params_batch
|
22
|
+
.to_a
|
23
|
+
.group_by { |batch| grouping_key_by_event_and_id(batch) }
|
24
|
+
.map { |_, group| group.max_by { |batch| updated_at_from_batch(batch) } }
|
25
|
+
.flatten
|
26
|
+
end
|
27
|
+
|
28
|
+
def grouping_key_by_event_and_id(batch)
|
29
|
+
[
|
30
|
+
batch.payload.fetch("message").first.fetch("event"),
|
31
|
+
batch.payload.fetch("message").first["data"].first.fetch("id", nil)
|
32
|
+
].join
|
33
|
+
end
|
34
|
+
|
35
|
+
def updated_at_from_batch(batch)
|
36
|
+
timestamp = batch.payload.fetch("message", []).first.to_h.fetch("data", []).first.to_h.fetch("updated_at", nil)
|
37
|
+
|
38
|
+
return timestamp.to_datetime if timestamp.respond_to?(:to_datetime)
|
39
|
+
|
40
|
+
Time.current
|
41
|
+
end
|
42
|
+
|
43
|
+
def duplicates_removal_not_applicable?(params_batch)
|
44
|
+
any_message_containing_more_than_one_event?(params_batch) || any_event_containing_more_than_one_item(params_batch)
|
45
|
+
end
|
46
|
+
|
47
|
+
def any_message_containing_more_than_one_event?(params_batch)
|
48
|
+
params_batch.any? { |batch| batch.payload.to_h.fetch("message", []).size != 1 }
|
49
|
+
end
|
50
|
+
|
51
|
+
def any_event_containing_more_than_one_item(params_batch)
|
52
|
+
params_batch.any? { |batch| batch.payload.to_h.fetch("message", []).first.to_h.fetch("data", []).size != 1 }
|
53
|
+
end
|
54
|
+
end
|
@@ -0,0 +1,157 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class Dionysus::Consumer::Persistor
|
4
|
+
attr_reader :config, :topic
|
5
|
+
private :config, :topic
|
6
|
+
|
7
|
+
def initialize(config, topic)
|
8
|
+
@config = config
|
9
|
+
@topic = topic
|
10
|
+
end
|
11
|
+
|
12
|
+
def persist(dionysus_event, batch_number)
|
13
|
+
if dionysus_event.generic_event?
|
14
|
+
if dionysus_event.created? && topic.options[:import] == true
|
15
|
+
persist_via_dionysus_create(dionysus_event, batch_number)
|
16
|
+
elsif dionysus_event.destroyed? && topic.options[:import] == true
|
17
|
+
persist_via_dionysus_destroy(dionysus_event, batch_number)
|
18
|
+
else
|
19
|
+
persist_standard_event(dionysus_event, batch_number)
|
20
|
+
end
|
21
|
+
else
|
22
|
+
log_unknown_event_type(dionysus_event)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
private
|
27
|
+
|
28
|
+
def persist_via_dionysus_create(dionysus_event, batch_number)
|
29
|
+
model_klass = find_model_klass(dionysus_event) or return
|
30
|
+
config.instrumenter.instrument("dionysus.consume.#{topic}.batch_number_#{batch_number}.persist.dionysus_import") do
|
31
|
+
model_klass.dionysus_import(dionysus_event.transformed_data)
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
def persist_via_dionysus_destroy(dionysus_event, batch_number)
|
36
|
+
model_klass = find_model_klass(dionysus_event) or return
|
37
|
+
config.instrumenter.instrument("dionysus.consume.#{topic}.batch_number_#{batch_number}.persist.dionysus_destroy") do
|
38
|
+
model_klass.dionysus_destroy(dionysus_event.transformed_data)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def find_model_klass(dionysus_event)
|
43
|
+
config.model_factory.for_model(dionysus_event.model_name)
|
44
|
+
end
|
45
|
+
|
46
|
+
def persist_standard_event(dionysus_event, batch_number)
|
47
|
+
Array.wrap(dionysus_event.transformed_data).each do |deseralized_record|
|
48
|
+
model_klass = find_model_klass(dionysus_event) or return
|
49
|
+
attributes = deseralized_record.attributes
|
50
|
+
has_one_relationships = deseralized_record.has_one
|
51
|
+
has_many_relationships = deseralized_record.has_many
|
52
|
+
synced_id = deseralized_record.synced_id
|
53
|
+
|
54
|
+
if synced_id.nil?
|
55
|
+
Dionysus.logger.error("[Dionysus] synced_id nil for #{deseralized_record}, that should never happen!")
|
56
|
+
next
|
57
|
+
end
|
58
|
+
|
59
|
+
record = Dionysus::Consumer::SynchronizableModel.new(config,
|
60
|
+
model_klass.find_or_initialize_by(config.synced_id_attribute => synced_id))
|
61
|
+
event_updated_at = deseralized_record.synced_updated_at || deseralized_record.synced_created_at
|
62
|
+
|
63
|
+
next unless record.persist_with_dionysus?(event_updated_at)
|
64
|
+
|
65
|
+
record.assign_attributes_from_dionysus(attributes)
|
66
|
+
if dionysus_event.destroyed?
|
67
|
+
instrument("dionysus.consume.#{topic}.batch_number_#{batch_number}.persist.remove_with_dionysus") do
|
68
|
+
record.remove_with_dionysus(deseralized_record) if dionysus_event.aggregate_root?
|
69
|
+
end
|
70
|
+
else
|
71
|
+
instrument("dionysus.consume.#{topic}.batch_number_#{batch_number}.persist.restore_with_dionysus") do
|
72
|
+
record.restore_with_dionysus if record.restorable?(deseralized_record)
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
dionysus_event.local_changes[[dionysus_event.model_name, synced_id]] = record.changes if record.changes.present?
|
77
|
+
|
78
|
+
instrument("dionysus.consume.#{topic}.batch_number_#{batch_number}.persist.save") do
|
79
|
+
record.save unless record.destroyed?
|
80
|
+
end
|
81
|
+
|
82
|
+
instrument("dionysus.consume.#{topic}.batch_number_#{batch_number}.persist.persist_to_many_relationships") do
|
83
|
+
has_many_relationships.each do |relationship_name, relationship_records|
|
84
|
+
persist_to_many_relationship(dionysus_event, relationship_name, record, relationship_records,
|
85
|
+
batch_number)
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
89
|
+
instrument("dionysus.consume.#{topic}.batch_number_#{batch_number}.persist.persist_to_one_relationships") do
|
90
|
+
has_one_relationships.each do |relationship_name, relationship_record|
|
91
|
+
persist_to_one_relationship(dionysus_event, relationship_name, record, relationship_record,
|
92
|
+
batch_number)
|
93
|
+
end
|
94
|
+
end
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
def log_unknown_event_type(dionysus_event)
|
99
|
+
Dionysus.logger.debug("[Dionysus] unknown event type #{dionysus_event.event_name}")
|
100
|
+
end
|
101
|
+
|
102
|
+
def persist_to_one_relationship(original_event, relationship_name, parent_model_record, record, batch_number)
|
103
|
+
instrumentation_arguments = {
|
104
|
+
event_name: original_event.event_name,
|
105
|
+
parent_model_record: parent_model_record.model_name.to_s,
|
106
|
+
relationship_name: relationship_name
|
107
|
+
}
|
108
|
+
instrument(
|
109
|
+
"dionysus.consume.#{topic}.batch_number_#{batch_number}.persist.persist_to_one_relationship.#{relationship_name}", instrumentation_arguments
|
110
|
+
) do
|
111
|
+
return if parent_model_record.nil? || record.nil?
|
112
|
+
|
113
|
+
records = Array.wrap(record)
|
114
|
+
dionysus_event = Dionysus::Consumer::DionysusEvent.new(original_event.event_name,
|
115
|
+
relationship_name, records, aggregate_root: false)
|
116
|
+
|
117
|
+
instrument("dionysus.consume.#{topic}.batch_number_#{batch_number}.persist.persist_to_one_relationship.#{relationship_name}.persist") do
|
118
|
+
persist(dionysus_event, batch_number)
|
119
|
+
original_event.local_changes.merge!(dionysus_event.local_changes)
|
120
|
+
end
|
121
|
+
|
122
|
+
instrument("dionysus.consume.#{topic}.batch_number_#{batch_number}.persist.persist_to_one_relationship.#{relationship_name}.resolve_to_one_association") do
|
123
|
+
parent_model_record.resolve_to_one_association(relationship_name, record.synced_id)
|
124
|
+
end
|
125
|
+
end
|
126
|
+
end
|
127
|
+
|
128
|
+
def persist_to_many_relationship(original_event, relationship_name, parent_model_record, records, batch_number)
|
129
|
+
instrumentation_arguments = {
|
130
|
+
event_name: original_event.event_name,
|
131
|
+
parent_model_record: parent_model_record.model_name.to_s,
|
132
|
+
relationship_name: relationship_name
|
133
|
+
}
|
134
|
+
instrument(
|
135
|
+
"dionysus.consume.#{topic}.batch_number_#{batch_number}.persist.persist_to_many_relationship.#{relationship_name}", instrumentation_arguments
|
136
|
+
) do
|
137
|
+
return if parent_model_record.nil? || records.nil?
|
138
|
+
|
139
|
+
dionysus_event = Dionysus::Consumer::DionysusEvent.new(original_event.event_name,
|
140
|
+
relationship_name, records, aggregate_root: false)
|
141
|
+
|
142
|
+
instrument("dionysus.consume.#{topic}.batch_number_#{batch_number}.persist.persist_to_many_relationship.#{relationship_name}.persist") do
|
143
|
+
persist(dionysus_event, batch_number)
|
144
|
+
original_event.local_changes.merge!(dionysus_event.local_changes)
|
145
|
+
end
|
146
|
+
|
147
|
+
synced_ids_of_related_records = records.map(&:synced_id)
|
148
|
+
instrument("dionysus.consume.#{topic}.batch_number_#{batch_number}.persist.persist_to_many_relationship.#{relationship_name}.resolve_to_many_association") do
|
149
|
+
parent_model_record.resolve_to_many_association(relationship_name, synced_ids_of_related_records)
|
150
|
+
end
|
151
|
+
end
|
152
|
+
end
|
153
|
+
|
154
|
+
def instrument(label, options = {}, &block)
|
155
|
+
config.instrumenter.instrument(label, options, &block)
|
156
|
+
end
|
157
|
+
end
|
@@ -0,0 +1,84 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class Dionysus::Consumer::Registry
|
4
|
+
attr_reader :container
|
5
|
+
private :container
|
6
|
+
|
7
|
+
def initialize
|
8
|
+
@container = {}
|
9
|
+
end
|
10
|
+
|
11
|
+
def namespace(namespace, &block)
|
12
|
+
registration = Registration.new(namespace)
|
13
|
+
registration.instance_eval(&block)
|
14
|
+
container[namespace] = registration
|
15
|
+
end
|
16
|
+
|
17
|
+
def registrations
|
18
|
+
container
|
19
|
+
end
|
20
|
+
|
21
|
+
class Registration
|
22
|
+
attr_reader :namespace, :topics, :deserializer_klass, :consumers
|
23
|
+
|
24
|
+
def initialize(namespace)
|
25
|
+
@namespace = namespace
|
26
|
+
@topics = []
|
27
|
+
@deserializer_klass = nil
|
28
|
+
@consumers = []
|
29
|
+
end
|
30
|
+
|
31
|
+
def deserializer(deserializer_klass)
|
32
|
+
@deserializer_klass = deserializer_klass
|
33
|
+
end
|
34
|
+
|
35
|
+
def topic(name, options = {}, &block)
|
36
|
+
new_topic = Topic.new(namespace, name, deserializer_klass, options, &block)
|
37
|
+
consumer = Dionysus::Consumer::KarafkaConsumerGenerator.new.generate(
|
38
|
+
Dionysus::Consumer.configuration, new_topic
|
39
|
+
)
|
40
|
+
consumers << consumer
|
41
|
+
new_topic.consumer = consumer
|
42
|
+
topics << new_topic
|
43
|
+
end
|
44
|
+
|
45
|
+
class Topic
|
46
|
+
attr_reader :namespace, :name, :deserializer_klass, :options, :extensions_block
|
47
|
+
|
48
|
+
attr_accessor :consumer
|
49
|
+
|
50
|
+
def initialize(namespace, name, deserializer_klass, options = {}, &block)
|
51
|
+
@namespace = namespace
|
52
|
+
@name = name
|
53
|
+
@deserializer_klass = deserializer_klass
|
54
|
+
@options = options
|
55
|
+
@extensions_block = block
|
56
|
+
end
|
57
|
+
|
58
|
+
def to_s
|
59
|
+
Dionysus::TopicName.new(namespace, name).to_s
|
60
|
+
end
|
61
|
+
|
62
|
+
def sidekiq_worker
|
63
|
+
options.fetch(:worker, nil)
|
64
|
+
end
|
65
|
+
|
66
|
+
def sidekiq_backend?
|
67
|
+
options.fetch(:sidekiq, false)
|
68
|
+
end
|
69
|
+
|
70
|
+
def consumer_base_class
|
71
|
+
options.fetch(:consumer_base_class, nil)
|
72
|
+
end
|
73
|
+
|
74
|
+
def concurrency
|
75
|
+
options.fetch(:concurrency, nil)
|
76
|
+
end
|
77
|
+
|
78
|
+
def params_batch_transformation
|
79
|
+
options.fetch(:params_batch_transformation,
|
80
|
+
Dionysus::Consumer::ParamsBatchTransformations::RemoveDuplicatesStrategy.new)
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|