dionysus-rb 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (102) hide show
  1. checksums.yaml +7 -0
  2. data/.circleci/config.yml +61 -0
  3. data/.github/workflows/ci.yml +77 -0
  4. data/.gitignore +12 -0
  5. data/.rspec +3 -0
  6. data/.rubocop.yml +175 -0
  7. data/.rubocop_todo.yml +53 -0
  8. data/CHANGELOG.md +227 -0
  9. data/Gemfile +10 -0
  10. data/Gemfile.lock +258 -0
  11. data/LICENSE.txt +21 -0
  12. data/README.md +1206 -0
  13. data/Rakefile +10 -0
  14. data/assets/logo.svg +51 -0
  15. data/bin/console +11 -0
  16. data/bin/karafka_health_check +14 -0
  17. data/bin/outbox_worker_health_check +12 -0
  18. data/bin/setup +8 -0
  19. data/dionysus-rb.gemspec +64 -0
  20. data/docker-compose.yml +44 -0
  21. data/lib/dionysus/checks/health_check.rb +50 -0
  22. data/lib/dionysus/checks.rb +7 -0
  23. data/lib/dionysus/consumer/batch_events_publisher.rb +33 -0
  24. data/lib/dionysus/consumer/config.rb +97 -0
  25. data/lib/dionysus/consumer/deserializer.rb +231 -0
  26. data/lib/dionysus/consumer/dionysus_event.rb +42 -0
  27. data/lib/dionysus/consumer/karafka_consumer_generator.rb +56 -0
  28. data/lib/dionysus/consumer/params_batch_processor.rb +65 -0
  29. data/lib/dionysus/consumer/params_batch_transformations/remove_duplicates_strategy.rb +54 -0
  30. data/lib/dionysus/consumer/params_batch_transformations.rb +4 -0
  31. data/lib/dionysus/consumer/persistor.rb +157 -0
  32. data/lib/dionysus/consumer/registry.rb +84 -0
  33. data/lib/dionysus/consumer/synced_data/assign_columns_from_synced_data.rb +27 -0
  34. data/lib/dionysus/consumer/synced_data/assign_columns_from_synced_data_job.rb +26 -0
  35. data/lib/dionysus/consumer/synced_data.rb +4 -0
  36. data/lib/dionysus/consumer/synchronizable_model.rb +93 -0
  37. data/lib/dionysus/consumer/workers_group.rb +18 -0
  38. data/lib/dionysus/consumer.rb +36 -0
  39. data/lib/dionysus/monitor.rb +48 -0
  40. data/lib/dionysus/producer/base_responder.rb +46 -0
  41. data/lib/dionysus/producer/config.rb +104 -0
  42. data/lib/dionysus/producer/deleted_record_serializer.rb +17 -0
  43. data/lib/dionysus/producer/genesis/performed.rb +11 -0
  44. data/lib/dionysus/producer/genesis/stream_job.rb +13 -0
  45. data/lib/dionysus/producer/genesis/streamer/base_job.rb +44 -0
  46. data/lib/dionysus/producer/genesis/streamer/standard_job.rb +43 -0
  47. data/lib/dionysus/producer/genesis/streamer.rb +40 -0
  48. data/lib/dionysus/producer/genesis.rb +62 -0
  49. data/lib/dionysus/producer/karafka_responder_generator.rb +133 -0
  50. data/lib/dionysus/producer/key.rb +14 -0
  51. data/lib/dionysus/producer/model_serializer.rb +105 -0
  52. data/lib/dionysus/producer/outbox/active_record_publishable.rb +74 -0
  53. data/lib/dionysus/producer/outbox/datadog_latency_reporter.rb +26 -0
  54. data/lib/dionysus/producer/outbox/datadog_latency_reporter_job.rb +11 -0
  55. data/lib/dionysus/producer/outbox/datadog_latency_reporter_scheduler.rb +47 -0
  56. data/lib/dionysus/producer/outbox/datadog_tracer.rb +32 -0
  57. data/lib/dionysus/producer/outbox/duplicates_filter.rb +26 -0
  58. data/lib/dionysus/producer/outbox/event_name.rb +26 -0
  59. data/lib/dionysus/producer/outbox/health_check.rb +48 -0
  60. data/lib/dionysus/producer/outbox/latency_tracker.rb +43 -0
  61. data/lib/dionysus/producer/outbox/model.rb +117 -0
  62. data/lib/dionysus/producer/outbox/producer.rb +26 -0
  63. data/lib/dionysus/producer/outbox/publishable.rb +106 -0
  64. data/lib/dionysus/producer/outbox/publisher.rb +131 -0
  65. data/lib/dionysus/producer/outbox/records_processor.rb +56 -0
  66. data/lib/dionysus/producer/outbox/runner.rb +120 -0
  67. data/lib/dionysus/producer/outbox/tombstone_publisher.rb +22 -0
  68. data/lib/dionysus/producer/outbox.rb +103 -0
  69. data/lib/dionysus/producer/partition_key.rb +42 -0
  70. data/lib/dionysus/producer/registry/validator.rb +32 -0
  71. data/lib/dionysus/producer/registry.rb +165 -0
  72. data/lib/dionysus/producer/serializer.rb +52 -0
  73. data/lib/dionysus/producer/suppressor.rb +18 -0
  74. data/lib/dionysus/producer.rb +121 -0
  75. data/lib/dionysus/railtie.rb +9 -0
  76. data/lib/dionysus/rb/version.rb +5 -0
  77. data/lib/dionysus/rb.rb +8 -0
  78. data/lib/dionysus/support/rspec/outbox_publishable.rb +78 -0
  79. data/lib/dionysus/topic_name.rb +15 -0
  80. data/lib/dionysus/utils/default_message_filter.rb +25 -0
  81. data/lib/dionysus/utils/exponential_backoff.rb +7 -0
  82. data/lib/dionysus/utils/karafka_datadog_listener.rb +20 -0
  83. data/lib/dionysus/utils/karafka_sentry_listener.rb +9 -0
  84. data/lib/dionysus/utils/null_error_handler.rb +6 -0
  85. data/lib/dionysus/utils/null_event_bus.rb +5 -0
  86. data/lib/dionysus/utils/null_hermes_event_producer.rb +5 -0
  87. data/lib/dionysus/utils/null_instrumenter.rb +7 -0
  88. data/lib/dionysus/utils/null_lock_client.rb +13 -0
  89. data/lib/dionysus/utils/null_model_factory.rb +5 -0
  90. data/lib/dionysus/utils/null_mutex_provider.rb +7 -0
  91. data/lib/dionysus/utils/null_retry_provider.rb +7 -0
  92. data/lib/dionysus/utils/null_tracer.rb +5 -0
  93. data/lib/dionysus/utils/null_transaction_provider.rb +15 -0
  94. data/lib/dionysus/utils/sidekiq_batched_job_distributor.rb +24 -0
  95. data/lib/dionysus/utils.rb +6 -0
  96. data/lib/dionysus/version.rb +7 -0
  97. data/lib/dionysus-rb.rb +3 -0
  98. data/lib/dionysus.rb +133 -0
  99. data/lib/tasks/dionysus.rake +18 -0
  100. data/log/development.log +0 -0
  101. data/sig/dionysus/rb.rbs +6 -0
  102. metadata +585 -0
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Dionysus::Consumer::SyncedData::AssignColumnsFromSyncedData
4
+ attr_reader :config
5
+ private :config
6
+
7
+ def initialize(config: Dionysus::Consumer.configuration)
8
+ @config = config
9
+ end
10
+
11
+ def call(collection, columns)
12
+ collection.each { |record| record.update!(hash_of_attributes(record, columns)) }
13
+ end
14
+
15
+ private
16
+
17
+ delegate :resolve_synced_data_hash_proc, to: :config
18
+
19
+ def hash_of_attributes(record, columns)
20
+ columns
21
+ .to_h { |column| [column, fetch_value_from_synced_data(record, column)] }
22
+ end
23
+
24
+ def fetch_value_from_synced_data(record, column)
25
+ resolve_synced_data_hash_proc.call(record).stringify_keys[column]
26
+ end
27
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Dionysus::Consumer::SyncedData::AssignColumnsFromSyncedDataJob
4
+ include Sidekiq::Worker
5
+
6
+ sidekiq_options queue: Dionysus::Consumer::Config.default_sidekiq_queue
7
+
8
+ def self.enqueue(model_klass, columns, batch_size: 1000)
9
+ primary_key = model_klass.primary_key
10
+ model_klass.select(:id).find_in_batches(batch_size: batch_size).with_index do |records, index|
11
+ Dionysus.logger.info "[AssignColumnsFromSyncedDataJob] enqueue batch: #{index}"
12
+ model_name = model_klass.model_name.to_s
13
+ ids = records.map { |r| r.public_send(primary_key) }
14
+
15
+ set(queue: Dionysus::Consumer.configuration.sidekiq_queue)
16
+ .perform_async(model_name, ids, columns)
17
+ end
18
+ end
19
+
20
+ def perform(model_name, ids, columns)
21
+ model_klass = model_name.constantize
22
+ collection = model_klass.where(model_klass.primary_key => ids)
23
+
24
+ Dionysus::Consumer::SyncedData::AssignColumnsFromSyncedData.new.call(collection, columns)
25
+ end
26
+ end
@@ -0,0 +1,4 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Dionysus::Consumer::SyncedData
4
+ end
@@ -0,0 +1,93 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Dionysus::Consumer::SynchronizableModel < SimpleDelegator
4
+ attr_reader :config
5
+ private :config
6
+
7
+ def initialize(config, model)
8
+ @config = config
9
+ super(model)
10
+ end
11
+
12
+ def model
13
+ __getobj__
14
+ end
15
+
16
+ def synced_at
17
+ if respond_to?(synced_updated_at_timestamp_attribute)
18
+ public_send(synced_updated_at_timestamp_attribute)
19
+ else
20
+ public_send(synced_created_at_timestamp_attribute)
21
+ end
22
+ end
23
+
24
+ def persist_with_dionysus?(event_updated_at)
25
+ (synced_at && event_updated_at && event_updated_at >= synced_at) || synced_at.nil? || event_updated_at.nil?
26
+ end
27
+
28
+ def assign_attributes_from_dionysus(attributes)
29
+ public_send("#{synced_data_attribute}=", attributes)
30
+ reverse_mapping = config.attributes_mapping_for_model(model.model_name).to_a.map(&:reverse).to_h
31
+
32
+ assignable_attributes = extract_assignable_attributes(attributes)
33
+ .map { |key, value| apply_mapping(reverse_mapping, key, value) }
34
+ .select { |attribute, _| respond_to?("#{attribute}=") }
35
+ .to_h
36
+
37
+ assign_attributes(assignable_attributes)
38
+ end
39
+
40
+ def remove_with_dionysus(deseralized_record)
41
+ if soft_deleteable_but_cannot_be_soft_deleted_via_attribute_assignment?(deseralized_record)
42
+ public_send(soft_delete_strategy)
43
+ elsif (deseralized_record.has_synced_canceled_at? && !respond_to?("#{soft_deleted_at_timestamp_attribute}=")) ||
44
+ !deseralized_record.has_synced_canceled_at?
45
+
46
+ destroy
47
+ end
48
+ end
49
+
50
+ def restore_with_dionysus
51
+ model.public_send("#{soft_deleted_at_timestamp_attribute}=", nil)
52
+ end
53
+
54
+ def restorable?(deseralized_record)
55
+ respond_to?("#{soft_deleted_at_timestamp_attribute}=") && !deseralized_record.has_synced_canceled_at?
56
+ end
57
+
58
+ private
59
+
60
+ delegate :soft_delete_strategy, :synced_created_at_timestamp_attribute,
61
+ :synced_updated_at_timestamp_attribute, :soft_deleted_at_timestamp_attribute,
62
+ :synced_data_attribute, to: :config
63
+
64
+ def soft_deleteable_but_cannot_be_soft_deleted_via_attribute_assignment?(deseralized_record)
65
+ (respond_to?(soft_delete_strategy) && !deseralized_record.has_synced_canceled_at?) ||
66
+ (respond_to?(soft_delete_strategy) && !respond_to?("#{soft_deleted_at_timestamp_attribute}="))
67
+ end
68
+
69
+ def extract_assignable_attributes(attributes)
70
+ attributes.clone.tap do |hash|
71
+ if synced_created_at_timestamp_attribute.to_s != "synced_created_at"
72
+ hash[synced_created_at_timestamp_attribute] =
73
+ hash["synced_created_at"]
74
+ end
75
+ if synced_updated_at_timestamp_attribute.to_s != "synced_updated_at"
76
+ hash[synced_updated_at_timestamp_attribute] =
77
+ hash["synced_updated_at"]
78
+ end
79
+ if soft_deleted_at_timestamp_attribute.to_s != "synced_canceled_at"
80
+ hash[soft_deleted_at_timestamp_attribute] =
81
+ hash["synced_canceled_at"]
82
+ end
83
+ end
84
+ end
85
+
86
+ def apply_mapping(reverse_mapping, key, value)
87
+ if reverse_mapping.key?(key.to_sym)
88
+ [reverse_mapping[key.to_sym], value]
89
+ else
90
+ [key, value]
91
+ end
92
+ end
93
+ end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Dionysus::Consumer::WorkersGroup
4
+ attr_reader :workers
5
+ private :workers
6
+
7
+ def initialize
8
+ @workers = []
9
+ end
10
+
11
+ def <<(worker)
12
+ workers << worker
13
+ end
14
+
15
+ def work
16
+ workers.map(&:join)
17
+ end
18
+ end
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Dionysus::Consumer
4
+ def self.configuration
5
+ @configuration ||= Dionysus::Consumer::Config.new
6
+ end
7
+
8
+ def self.configure
9
+ yield configuration
10
+ end
11
+
12
+ def self.registry
13
+ configuration.registry
14
+ end
15
+
16
+ def self.declare(&config)
17
+ registry = Dionysus::Consumer::Registry.new
18
+ registry.instance_eval(&config)
19
+
20
+ Dionysus.inject_routing!(registry)
21
+
22
+ configure do |configuration|
23
+ configuration.registry = registry
24
+ end
25
+ end
26
+
27
+ def self.reset!
28
+ return if registry.nil?
29
+
30
+ registry.registrations.values.flat_map(&:consumers).each do |consumer_class|
31
+ Dionysus.send(:remove_const, consumer_class.name.demodulize.to_sym) if consumer_class.name
32
+ end
33
+ @configuration = nil
34
+ Dionysus.inject_routing!(nil)
35
+ end
36
+ end
@@ -0,0 +1,48 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Dionysus::Monitor < Dry::Monitor::Notifications
4
+ EVENTS = %w[
5
+ outbox_producer.started
6
+ outbox_producer.stopped
7
+ outbox_producer.shutting_down
8
+ outbox_producer.error
9
+ outbox_producer.publishing_failed
10
+ outbox_producer.published
11
+ outbox_producer.processing_topic
12
+ outbox_producer.processed_topic
13
+ outbox_producer.lock_exists_for_topic
14
+ outbox_producer.heartbeat
15
+ ].freeze
16
+
17
+ private_constant :EVENTS
18
+
19
+ def initialize
20
+ super(:dionysus)
21
+ EVENTS.each { |event| register_event(event) }
22
+ end
23
+
24
+ def subscribe(event)
25
+ return super if events.include?(event.to_s)
26
+
27
+ raise UnknownEventError.new(events, event)
28
+ end
29
+
30
+ def events
31
+ EVENTS
32
+ end
33
+
34
+ class UnknownEventError < StandardError
35
+ attr_reader :available_events, :current_event
36
+ private :available_events, :current_event
37
+
38
+ def initialize(available_events, current_event)
39
+ super()
40
+ @available_events = available_events
41
+ @current_event = current_event
42
+ end
43
+
44
+ def message
45
+ "unknown event: #{current_event}, the available events are: #{available_events.join(", ")}"
46
+ end
47
+ end
48
+ end
@@ -0,0 +1,46 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Based on the responder concept from Karafka 1.4
4
+ class Dionysus::Producer::BaseResponder
5
+ class << self
6
+ attr_accessor :topics
7
+
8
+ def topic(topic_name)
9
+ self.topics ||= {}
10
+ self.topics[topic_name] = topic_name.to_s
11
+ end
12
+
13
+ def call(*data)
14
+ new.call(*data)
15
+ end
16
+ end
17
+
18
+ attr_reader :messages_buffer
19
+
20
+ def initialize
21
+ @messages_buffer = Hash.new { |h, k| h[k] = [] }
22
+ end
23
+
24
+ def call(*data)
25
+ respond(*data)
26
+ deliver
27
+ end
28
+
29
+ private
30
+
31
+ def deliver
32
+ messages_buffer.each_value do |data_elements|
33
+ data_elements.each do |data, options|
34
+ Karafka.producer.produce_sync(payload: data, **options)
35
+ end
36
+ end
37
+ end
38
+
39
+ def respond(*_data)
40
+ raise "implement me"
41
+ end
42
+
43
+ def respond_to(topic, data, options = {})
44
+ messages_buffer[topic] << [data.to_json, options.merge(topic: topic.to_s)]
45
+ end
46
+ end
@@ -0,0 +1,104 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Dionysus::Producer::Config
4
+ attr_accessor :registry, :outbox_model, :database_connection_provider, :transaction_provider,
5
+ :datadog_statsd_client
6
+
7
+ attr_writer :instrumenter, :event_bus, :soft_delete_column, :default_partition_key, :outbox_worker_sleep_seconds,
8
+ :lock_client, :lock_expiry_time, :error_handler, :outbox_publishing_batch_size,
9
+ :transactional_outbox_enabled, :sidekiq_queue, :publisher_service_name,
10
+ :genesis_consistency_safety_delay, :hermes_event_producer, :publish_after_commit, :outbox_worker_publishing_delay,
11
+ :high_priority_sidekiq_queue, :observers_inline_maximum_size, :remove_consecutive_duplicates_before_publishing
12
+
13
+ def self.default_sidekiq_queue
14
+ :dionysus
15
+ end
16
+
17
+ def self.high_priority_sidekiq_queue
18
+ :dionysus_high_priority
19
+ end
20
+
21
+ def instrumenter
22
+ @instrumenter || Dionysus::Utils::NullInstrumenter
23
+ end
24
+
25
+ def event_bus
26
+ @event_bus || Dionysus::Utils::NullEventBus
27
+ end
28
+
29
+ def soft_delete_column
30
+ @soft_delete_column || "canceled_at"
31
+ end
32
+
33
+ def default_partition_key
34
+ @default_partition_key || :account_id
35
+ end
36
+
37
+ def outbox_worker_sleep_seconds
38
+ return BigDecimal("0.2") if @outbox_worker_sleep_seconds.nil?
39
+
40
+ @outbox_worker_sleep_seconds.to_d
41
+ end
42
+
43
+ def lock_client
44
+ @lock_client || Dionysus::Utils::NullLockClient
45
+ end
46
+
47
+ def lock_expiry_time
48
+ @lock_expiry_time || 10_000
49
+ end
50
+
51
+ def error_handler
52
+ @error_handler || Dionysus::Utils::NullErrorHandler
53
+ end
54
+
55
+ def outbox_publishing_batch_size
56
+ @outbox_publishing_batch_size || 100
57
+ end
58
+
59
+ def transactional_outbox_enabled
60
+ return @transactional_outbox_enabled if defined?(@transactional_outbox_enabled)
61
+
62
+ true
63
+ end
64
+
65
+ def sidekiq_queue
66
+ @sidekiq_queue || self.class.default_sidekiq_queue
67
+ end
68
+
69
+ def publisher_service_name
70
+ @publisher_service_name || Karafka.producer.id
71
+ end
72
+
73
+ def genesis_consistency_safety_delay
74
+ @genesis_consistency_safety_delay || 60.seconds
75
+ end
76
+
77
+ def hermes_event_producer
78
+ @hermes_event_producer || Dionysus::Utils::NullHermesEventProducer
79
+ end
80
+
81
+ def publish_after_commit
82
+ return @publish_after_commit if defined?(@publish_after_commit)
83
+
84
+ false
85
+ end
86
+
87
+ def outbox_worker_publishing_delay
88
+ (@outbox_worker_publishing_delay || 0).to_d.seconds
89
+ end
90
+
91
+ def high_priority_sidekiq_queue
92
+ @high_priority_sidekiq_queue || self.class.high_priority_sidekiq_queue
93
+ end
94
+
95
+ def observers_inline_maximum_size
96
+ @observers_inline_maximum_size || 1000
97
+ end
98
+
99
+ def remove_consecutive_duplicates_before_publishing
100
+ return @remove_consecutive_duplicates_before_publishing if defined?(@remove_consecutive_duplicates_before_publishing)
101
+
102
+ false
103
+ end
104
+ end
@@ -0,0 +1,17 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Dionysus::Producer::DeletedRecordSerializer < Dionysus::Producer::ModelSerializer
4
+ def as_json
5
+ super.merge(primary_key => primary_key_value)
6
+ end
7
+
8
+ private
9
+
10
+ def primary_key
11
+ record.class.primary_key
12
+ end
13
+
14
+ def primary_key_value
15
+ record.public_send(primary_key)
16
+ end
17
+ end
@@ -0,0 +1,11 @@
1
+ # frozen_string_literal: true
2
+
3
+ if defined?(Hermes)
4
+ class Dionysus::Producer::Genesis::Performed < Hermes::BaseEvent
5
+ attribute :model, Dry.Types::Strict::String
6
+ attribute :service, Dry.Types::Strict::String
7
+ attribute :topic, Dry.Types::Strict::String
8
+ attribute :start_at, Dry.Types::Nominal::DateTime
9
+ attribute :end_at, Dry.Types::Nominal::DateTime
10
+ end
11
+ end
@@ -0,0 +1,13 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Dionysus::Producer::Genesis::StreamJob
4
+ include Sidekiq::Worker
5
+
6
+ sidekiq_options queue: Dionysus::Producer::Config.default_sidekiq_queue
7
+
8
+ def perform(topic, model_klass, from, to, number_of_days, streamer_job)
9
+ Dionysus::Producer::Genesis::Streamer
10
+ .new(job_class: streamer_job.constantize)
11
+ .stream(topic, model_klass.constantize, from, to, number_of_days: number_of_days)
12
+ end
13
+ end
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Dionysus::Producer::Genesis::Streamer::BaseJob
4
+ include Sidekiq::Worker
5
+
6
+ sidekiq_options queue: Dionysus::Producer::Config.default_sidekiq_queue
7
+
8
+ ONE_DAY_IN_SECONDS = 60 * 60 * 24
9
+ BATCH_SIZE = 1000
10
+
11
+ def self.enqueue(relation, model_class, topic, number_of_days: 1, batch_size: BATCH_SIZE)
12
+ distributor = Dionysus::Utils::SidekiqBatchedJobDistributor.new(
13
+ batch_size: batch_size,
14
+ units_count: relation.count,
15
+ time_range_in_seconds: (ONE_DAY_IN_SECONDS * number_of_days)
16
+ )
17
+
18
+ relation.in_batches(of: batch_size).lazy.each_with_index do |batch_relation, batch_number|
19
+ distributor.enqueue_batch(
20
+ self,
21
+ Dionysus::Producer.configuration.sidekiq_queue,
22
+ batch_number,
23
+ batch_relation.pluck(model_class.primary_key).sort,
24
+ model_class.to_s,
25
+ topic
26
+ )
27
+ end
28
+ end
29
+
30
+ def perform(ids, resource_name, topic)
31
+ model_class = resource_name.constantize
32
+ primary_key_column = model_class.primary_key
33
+
34
+ model_class
35
+ .where(primary_key_column => ids)
36
+ .find_each { |entity| call(entity, topic) }
37
+ end
38
+
39
+ private
40
+
41
+ def call(_item, _topic)
42
+ raise "implement me!"
43
+ end
44
+ end
@@ -0,0 +1,43 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Dionysus::Producer::Genesis::Streamer::StandardJob < Dionysus::Producer::Genesis::Streamer::BaseJob
4
+ DESTROYED_EVENT_TYPE = "destroyed"
5
+ UPDATED_EVENT_TYPE = "updated"
6
+ GENESIS_TOPIC_SUFFIX = "genesis"
7
+
8
+ private
9
+
10
+ delegate :configuration, :outbox_model, :outbox_publisher, to: Dionysus::Producer
11
+ delegate :outbox_model, to: :configuration
12
+
13
+ def call(item, topic)
14
+ publishable = Dionysus::Producer::Outbox::Publishable.new(item)
15
+ outbox_record = Dionysus::Producer.configuration.outbox_model.new(
16
+ resource_class: publishable.model_name.to_s,
17
+ resource_id: publishable.publishable_id,
18
+ event_name: event_name_for(publishable),
19
+ topic: topic
20
+ )
21
+
22
+ options = {}
23
+ options[:genesis_only] = true if genesis_only?(topic)
24
+
25
+ outbox_publisher.publish(outbox_record, options)
26
+ end
27
+
28
+ def event_name_for(publishable)
29
+ Dionysus::Producer::Outbox::EventName
30
+ .new(publishable.resource_name)
31
+ .for_event_type(event_type(publishable))
32
+ end
33
+
34
+ def event_type(publishable)
35
+ return DESTROYED_EVENT_TYPE if publishable.soft_deletable? && publishable.soft_deleted?
36
+
37
+ UPDATED_EVENT_TYPE
38
+ end
39
+
40
+ def genesis_only?(topic)
41
+ topic.ends_with?(GENESIS_TOPIC_SUFFIX)
42
+ end
43
+ end
@@ -0,0 +1,40 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Dionysus::Producer::Genesis::Streamer
4
+ attr_reader :job_class, :config
5
+ private :job_class, :config
6
+
7
+ def initialize(job_class: Dionysus::Producer::Genesis::Streamer::StandardJob,
8
+ config: Dionysus::Producer.configuration)
9
+ @job_class = job_class
10
+ @config = config
11
+ end
12
+
13
+ def stream(topic, model_class, from, to, options = {})
14
+ resources = fetch_resources(model_class, from, to, options)
15
+ job_class.enqueue(
16
+ resources,
17
+ model_class,
18
+ topic,
19
+ number_of_days: options.fetch(:number_of_days, 1),
20
+ batch_size: options.fetch(:batch_size, 1000)
21
+ )
22
+ end
23
+
24
+ private
25
+
26
+ delegate :soft_delete_column, to: :config
27
+
28
+ def fetch_resources(resource_class, from, to, options_hash)
29
+ records = resource_class
30
+ records = resource_class.where("updated_at BETWEEN ? AND ?", from, to) if from.present? && to.present?
31
+ if visible_only?(options_hash) && records.column_names.include?(soft_delete_column.to_s)
32
+ records = records.where(soft_delete_column => nil)
33
+ end
34
+ records
35
+ end
36
+
37
+ def visible_only?(options_hash)
38
+ options_hash.fetch(:visible_only, false)
39
+ end
40
+ end
@@ -0,0 +1,62 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Dionysus::Producer::Genesis
4
+ attr_reader :config
5
+ private :config
6
+
7
+ def initialize(config: Dionysus::Producer.configuration)
8
+ @config = config
9
+ end
10
+
11
+ def stream(topic:, model:, number_of_days:, from: nil, to: nil,
12
+ streamer_job: Dionysus::Producer::Genesis::Streamer::StandardJob)
13
+ ensure_genesis_not_executed_for_model_that_is_only_a_dependency(model, topic)
14
+ enqueue_stream_model_for_topic(topic, model, from, to, number_of_days, streamer_job)
15
+ publish_genesis_performed(model: model, topic: topic, number_of_days: number_of_days)
16
+ end
17
+
18
+ private
19
+
20
+ delegate :publisher_service_name, :genesis_consistency_safety_delay, :hermes_event_producer, :sidekiq_queue,
21
+ to: :config
22
+ delegate :responders_for_model_for_topic, :responders_for_dependency_parent_for_topic,
23
+ to: Dionysus::Producer
24
+
25
+ def ensure_genesis_not_executed_for_model_that_is_only_a_dependency(model, topic)
26
+ if responders_for_model_for_topic(model,
27
+ topic).empty? && responders_for_dependency_parent_for_topic(model, topic).any?
28
+ raise CannotExecuteGenesisForModelThatIsOnlyDependency.new(model, topic)
29
+ end
30
+ end
31
+
32
+ def enqueue_stream_model_for_topic(topic, model, from, to, number_of_days, streamer_job)
33
+ Dionysus::Producer::Genesis::StreamJob
34
+ .set(queue: sidekiq_queue)
35
+ .perform_async(topic.to_s, model.to_s, from.as_json, to.as_json, number_of_days.to_i, streamer_job.to_s)
36
+ end
37
+
38
+ def publish_genesis_performed(model:, topic:, number_of_days:)
39
+ event = Dionysus::Producer::Genesis::Performed.new(
40
+ model: model.to_s,
41
+ service: publisher_service_name,
42
+ topic: topic,
43
+ start_at: Time.current,
44
+ end_at: Time.current + number_of_days.days + genesis_consistency_safety_delay
45
+ )
46
+ hermes_event_producer.publish(event)
47
+ end
48
+
49
+ class CannotExecuteGenesisForModelThatIsOnlyDependency < StandardError
50
+ attr_reader :model, :topic
51
+ private :model, :topic
52
+ def initialize(model, topic)
53
+ super()
54
+ @model = model
55
+ @topic = topic
56
+ end
57
+
58
+ def message
59
+ "Cannot execute genesis for model #{model}, #{topic} because that is only a dependency. Execute it for parent instead"
60
+ end
61
+ end
62
+ end