deimos-temp-fork 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (146) hide show
  1. checksums.yaml +7 -0
  2. data/.circleci/config.yml +83 -0
  3. data/.gitignore +41 -0
  4. data/.gitmodules +0 -0
  5. data/.rspec +1 -0
  6. data/.rubocop.yml +333 -0
  7. data/.ruby-gemset +1 -0
  8. data/.ruby-version +1 -0
  9. data/CHANGELOG.md +349 -0
  10. data/CODE_OF_CONDUCT.md +77 -0
  11. data/Dockerfile +23 -0
  12. data/Gemfile +6 -0
  13. data/Gemfile.lock +286 -0
  14. data/Guardfile +22 -0
  15. data/LICENSE.md +195 -0
  16. data/README.md +1099 -0
  17. data/Rakefile +13 -0
  18. data/bin/deimos +4 -0
  19. data/deimos-ruby.gemspec +44 -0
  20. data/docker-compose.yml +71 -0
  21. data/docs/ARCHITECTURE.md +140 -0
  22. data/docs/CONFIGURATION.md +236 -0
  23. data/docs/DATABASE_BACKEND.md +147 -0
  24. data/docs/INTEGRATION_TESTS.md +52 -0
  25. data/docs/PULL_REQUEST_TEMPLATE.md +35 -0
  26. data/docs/UPGRADING.md +128 -0
  27. data/lib/deimos-temp-fork.rb +95 -0
  28. data/lib/deimos/active_record_consume/batch_consumption.rb +164 -0
  29. data/lib/deimos/active_record_consume/batch_slicer.rb +27 -0
  30. data/lib/deimos/active_record_consume/message_consumption.rb +79 -0
  31. data/lib/deimos/active_record_consume/schema_model_converter.rb +52 -0
  32. data/lib/deimos/active_record_consumer.rb +67 -0
  33. data/lib/deimos/active_record_producer.rb +87 -0
  34. data/lib/deimos/backends/base.rb +32 -0
  35. data/lib/deimos/backends/db.rb +41 -0
  36. data/lib/deimos/backends/kafka.rb +33 -0
  37. data/lib/deimos/backends/kafka_async.rb +33 -0
  38. data/lib/deimos/backends/test.rb +20 -0
  39. data/lib/deimos/batch_consumer.rb +7 -0
  40. data/lib/deimos/config/configuration.rb +381 -0
  41. data/lib/deimos/config/phobos_config.rb +137 -0
  42. data/lib/deimos/consume/batch_consumption.rb +150 -0
  43. data/lib/deimos/consume/message_consumption.rb +94 -0
  44. data/lib/deimos/consumer.rb +104 -0
  45. data/lib/deimos/instrumentation.rb +76 -0
  46. data/lib/deimos/kafka_message.rb +60 -0
  47. data/lib/deimos/kafka_source.rb +128 -0
  48. data/lib/deimos/kafka_topic_info.rb +102 -0
  49. data/lib/deimos/message.rb +79 -0
  50. data/lib/deimos/metrics/datadog.rb +47 -0
  51. data/lib/deimos/metrics/mock.rb +39 -0
  52. data/lib/deimos/metrics/provider.rb +36 -0
  53. data/lib/deimos/monkey_patches/phobos_cli.rb +35 -0
  54. data/lib/deimos/monkey_patches/phobos_producer.rb +51 -0
  55. data/lib/deimos/poll_info.rb +9 -0
  56. data/lib/deimos/producer.rb +224 -0
  57. data/lib/deimos/railtie.rb +8 -0
  58. data/lib/deimos/schema_backends/avro_base.rb +140 -0
  59. data/lib/deimos/schema_backends/avro_local.rb +30 -0
  60. data/lib/deimos/schema_backends/avro_schema_coercer.rb +119 -0
  61. data/lib/deimos/schema_backends/avro_schema_registry.rb +34 -0
  62. data/lib/deimos/schema_backends/avro_validation.rb +21 -0
  63. data/lib/deimos/schema_backends/base.rb +150 -0
  64. data/lib/deimos/schema_backends/mock.rb +42 -0
  65. data/lib/deimos/shared_config.rb +63 -0
  66. data/lib/deimos/test_helpers.rb +360 -0
  67. data/lib/deimos/tracing/datadog.rb +35 -0
  68. data/lib/deimos/tracing/mock.rb +40 -0
  69. data/lib/deimos/tracing/provider.rb +29 -0
  70. data/lib/deimos/utils/db_poller.rb +150 -0
  71. data/lib/deimos/utils/db_producer.rb +243 -0
  72. data/lib/deimos/utils/deadlock_retry.rb +68 -0
  73. data/lib/deimos/utils/inline_consumer.rb +150 -0
  74. data/lib/deimos/utils/lag_reporter.rb +175 -0
  75. data/lib/deimos/utils/schema_controller_mixin.rb +115 -0
  76. data/lib/deimos/version.rb +5 -0
  77. data/lib/generators/deimos/active_record/templates/migration.rb.tt +28 -0
  78. data/lib/generators/deimos/active_record/templates/model.rb.tt +5 -0
  79. data/lib/generators/deimos/active_record_generator.rb +79 -0
  80. data/lib/generators/deimos/db_backend/templates/migration +25 -0
  81. data/lib/generators/deimos/db_backend/templates/rails3_migration +31 -0
  82. data/lib/generators/deimos/db_backend_generator.rb +48 -0
  83. data/lib/generators/deimos/db_poller/templates/migration +11 -0
  84. data/lib/generators/deimos/db_poller/templates/rails3_migration +16 -0
  85. data/lib/generators/deimos/db_poller_generator.rb +48 -0
  86. data/lib/tasks/deimos.rake +34 -0
  87. data/spec/active_record_batch_consumer_spec.rb +481 -0
  88. data/spec/active_record_consume/batch_slicer_spec.rb +42 -0
  89. data/spec/active_record_consume/schema_model_converter_spec.rb +105 -0
  90. data/spec/active_record_consumer_spec.rb +154 -0
  91. data/spec/active_record_producer_spec.rb +85 -0
  92. data/spec/backends/base_spec.rb +10 -0
  93. data/spec/backends/db_spec.rb +54 -0
  94. data/spec/backends/kafka_async_spec.rb +11 -0
  95. data/spec/backends/kafka_spec.rb +11 -0
  96. data/spec/batch_consumer_spec.rb +256 -0
  97. data/spec/config/configuration_spec.rb +248 -0
  98. data/spec/consumer_spec.rb +209 -0
  99. data/spec/deimos_spec.rb +169 -0
  100. data/spec/generators/active_record_generator_spec.rb +56 -0
  101. data/spec/handlers/my_batch_consumer.rb +10 -0
  102. data/spec/handlers/my_consumer.rb +10 -0
  103. data/spec/kafka_listener_spec.rb +55 -0
  104. data/spec/kafka_source_spec.rb +381 -0
  105. data/spec/kafka_topic_info_spec.rb +111 -0
  106. data/spec/message_spec.rb +19 -0
  107. data/spec/phobos.bad_db.yml +73 -0
  108. data/spec/phobos.yml +77 -0
  109. data/spec/producer_spec.rb +498 -0
  110. data/spec/rake_spec.rb +19 -0
  111. data/spec/schema_backends/avro_base_shared.rb +199 -0
  112. data/spec/schema_backends/avro_local_spec.rb +32 -0
  113. data/spec/schema_backends/avro_schema_registry_spec.rb +32 -0
  114. data/spec/schema_backends/avro_validation_spec.rb +24 -0
  115. data/spec/schema_backends/base_spec.rb +33 -0
  116. data/spec/schemas/com/my-namespace/Generated.avsc +71 -0
  117. data/spec/schemas/com/my-namespace/MyNestedSchema.avsc +62 -0
  118. data/spec/schemas/com/my-namespace/MySchema-key.avsc +13 -0
  119. data/spec/schemas/com/my-namespace/MySchema.avsc +18 -0
  120. data/spec/schemas/com/my-namespace/MySchemaCompound-key.avsc +18 -0
  121. data/spec/schemas/com/my-namespace/MySchemaWithBooleans.avsc +18 -0
  122. data/spec/schemas/com/my-namespace/MySchemaWithDateTimes.avsc +33 -0
  123. data/spec/schemas/com/my-namespace/MySchemaWithId.avsc +28 -0
  124. data/spec/schemas/com/my-namespace/MySchemaWithUniqueId.avsc +32 -0
  125. data/spec/schemas/com/my-namespace/Wibble.avsc +43 -0
  126. data/spec/schemas/com/my-namespace/Widget.avsc +27 -0
  127. data/spec/schemas/com/my-namespace/WidgetTheSecond.avsc +27 -0
  128. data/spec/schemas/com/my-namespace/request/CreateTopic.avsc +11 -0
  129. data/spec/schemas/com/my-namespace/request/Index.avsc +11 -0
  130. data/spec/schemas/com/my-namespace/request/UpdateRequest.avsc +11 -0
  131. data/spec/schemas/com/my-namespace/response/CreateTopic.avsc +11 -0
  132. data/spec/schemas/com/my-namespace/response/Index.avsc +11 -0
  133. data/spec/schemas/com/my-namespace/response/UpdateResponse.avsc +11 -0
  134. data/spec/spec_helper.rb +267 -0
  135. data/spec/utils/db_poller_spec.rb +320 -0
  136. data/spec/utils/db_producer_spec.rb +514 -0
  137. data/spec/utils/deadlock_retry_spec.rb +74 -0
  138. data/spec/utils/inline_consumer_spec.rb +31 -0
  139. data/spec/utils/lag_reporter_spec.rb +76 -0
  140. data/spec/utils/platform_schema_validation_spec.rb +0 -0
  141. data/spec/utils/schema_controller_mixin_spec.rb +84 -0
  142. data/support/deimos-solo.png +0 -0
  143. data/support/deimos-with-name-next.png +0 -0
  144. data/support/deimos-with-name.png +0 -0
  145. data/support/flipp-logo.png +0 -0
  146. metadata +551 -0
@@ -0,0 +1,164 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'deimos/active_record_consume/batch_slicer'
4
+ require 'deimos/utils/deadlock_retry'
5
+ require 'deimos/message'
6
+
7
+ module Deimos
8
+ module ActiveRecordConsume
9
+ # Methods for consuming batches of messages and saving them to the database
10
+ # in bulk ActiveRecord operations.
11
+ module BatchConsumption
12
+ # Handle a batch of Kafka messages. Batches are split into "slices",
13
+ # which are groups of independent messages that can be processed together
14
+ # in a single database operation.
15
+ # If two messages in a batch have the same key, we cannot process them
16
+ # in the same operation as they would interfere with each other. Thus
17
+ # they are split
18
+ # @param payloads [Array<Hash>] Decoded payloads.
19
+ # @param metadata [Hash] Information about batch, including keys.
20
+ def consume_batch(payloads, metadata)
21
+ messages = payloads.
22
+ zip(metadata[:keys]).
23
+ map { |p, k| Deimos::Message.new(p, nil, key: k) }
24
+
25
+ tags = %W(topic:#{metadata[:topic]})
26
+
27
+ Deimos.instrument('ar_consumer.consume_batch', tags) do
28
+ # The entire batch should be treated as one transaction so that if
29
+ # any message fails, the whole thing is rolled back or retried
30
+ # if there is deadlock
31
+ Deimos::Utils::DeadlockRetry.wrap(tags) do
32
+ if @compacted || self.class.config[:no_keys]
33
+ update_database(compact_messages(messages))
34
+ else
35
+ uncompacted_update(messages)
36
+ end
37
+ end
38
+ end
39
+ end
40
+
41
+ # Get unique key for the ActiveRecord instance from the incoming key.
42
+ # Override this method (with super) to customize the set of attributes that
43
+ # uniquely identifies each record in the database.
44
+ # @param key [String] The encoded key.
45
+ # @return [Hash] The key attributes.
46
+ def record_key(key)
47
+ decoded_key = decode_key(key)
48
+
49
+ if decoded_key.nil?
50
+ {}
51
+ elsif decoded_key.is_a?(Hash)
52
+ @key_converter.convert(decoded_key)
53
+ else
54
+ { @klass.primary_key => decoded_key }
55
+ end
56
+ end
57
+
58
+ protected
59
+
60
+ # Perform database operations for a batch of messages without compaction.
61
+ # All messages are split into slices containing only unique keys, and
62
+ # each slice is handles as its own batch.
63
+ # @param messages [Array<Message>] List of messages.
64
+ def uncompacted_update(messages)
65
+ BatchSlicer.
66
+ slice(messages).
67
+ each(&method(:update_database))
68
+ end
69
+
70
+ # Perform database operations for a group of messages.
71
+ # All messages with payloads are passed to upsert_records.
72
+ # All tombstones messages are passed to remove_records.
73
+ # @param messages [Array<Message>] List of messages.
74
+ def update_database(messages)
75
+ # Find all upserted records (i.e. that have a payload) and all
76
+ # deleted record (no payload)
77
+ removed, upserted = messages.partition(&:tombstone?)
78
+
79
+ upsert_records(upserted) if upserted.any?
80
+ remove_records(removed) if removed.any?
81
+ end
82
+
83
+ # Upsert any non-deleted records
84
+ # @param messages [Array<Message>] List of messages for a group of
85
+ # records to either be updated or inserted.
86
+ def upsert_records(messages)
87
+ key_cols = key_columns(messages)
88
+
89
+ # Create payloads with payload + key attributes
90
+ upserts = messages.map do |m|
91
+ attrs = if self.method(:record_attributes).parameters.size == 2
92
+ record_attributes(m.payload, m.key)
93
+ else
94
+ record_attributes(m.payload)
95
+ end
96
+
97
+ attrs&.merge(record_key(m.key))
98
+ end
99
+
100
+ # If overridden record_attributes indicated no record, skip
101
+ upserts.compact!
102
+
103
+ options = if key_cols.empty?
104
+ {} # Can't upsert with no key, just do regular insert
105
+ else
106
+ {
107
+ on_duplicate_key_update: {
108
+ # conflict_target must explicitly list the columns for
109
+ # Postgres and SQLite. Not required for MySQL, but this
110
+ # ensures consistent behaviour.
111
+ conflict_target: key_cols,
112
+ columns: :all
113
+ }
114
+ }
115
+ end
116
+
117
+ @klass.import!(upserts, options)
118
+ end
119
+
120
+ # Delete any records with a tombstone.
121
+ # @param messages [Array<Message>] List of messages for a group of
122
+ # deleted records.
123
+ def remove_records(messages)
124
+ clause = deleted_query(messages)
125
+
126
+ clause.delete_all
127
+ end
128
+
129
+ # Create an ActiveRecord relation that matches all of the passed
130
+ # records. Used for bulk deletion.
131
+ # @param records [Array<Message>] List of messages.
132
+ # @return ActiveRecord::Relation Matching relation.
133
+ def deleted_query(records)
134
+ keys = records.
135
+ map { |m| record_key(m.key)[@klass.primary_key] }.
136
+ reject(&:nil?)
137
+
138
+ @klass.unscoped.where(@klass.primary_key => keys)
139
+ end
140
+
141
+ # Get the set of attribute names that uniquely identify messages in the
142
+ # batch. Requires at least one record.
143
+ # @param records [Array<Message>] Non-empty list of messages.
144
+ # @return [Array<String>] List of attribute names.
145
+ # @raise If records is empty.
146
+ def key_columns(records)
147
+ raise 'Cannot determine key from empty batch' if records.empty?
148
+
149
+ first_key = records.first.key
150
+ record_key(first_key).keys
151
+ end
152
+
153
+ # Compact a batch of messages, taking only the last message for each
154
+ # unique key.
155
+ # @param batch [Array<Message>] Batch of messages.
156
+ # @return [Array<Message>] Compacted batch.
157
+ def compact_messages(batch)
158
+ return batch unless batch.first&.key.present?
159
+
160
+ batch.reverse.uniq(&:key).reverse!
161
+ end
162
+ end
163
+ end
164
+ end
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ module ActiveRecordConsume
5
+ # Helper class for breaking down batches into independent groups for
6
+ # processing
7
+ class BatchSlicer
8
+ # Split the batch into a series of independent slices. Each slice contains
9
+ # messages that can be processed in any order (i.e. they have distinct
10
+ # keys). Messages with the same key will be separated into different
11
+ # slices that maintain the correct order.
12
+ # E.g. Given messages A1, A2, B1, C1, C2, C3, they will be sliced as:
13
+ # [[A1, B1, C1], [A2, C2], [C3]]
14
+ def self.slice(messages)
15
+ ops = messages.group_by(&:key)
16
+
17
+ # Find maximum depth
18
+ depth = ops.values.map(&:length).max || 0
19
+
20
+ # Generate slices for each depth
21
+ depth.times.map do |i|
22
+ ops.values.map { |arr| arr.dig(i) }.compact
23
+ end
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,79 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ module ActiveRecordConsume
5
+ # Methods for consuming individual messages and saving them to the database
6
+ # as ActiveRecord instances.
7
+ module MessageConsumption
8
+ # Find the record specified by the given payload and key.
9
+ # Default is to use the primary key column and the value of the first
10
+ # field in the key.
11
+ # @param klass [Class < ActiveRecord::Base]
12
+ # @param _payload [Hash]
13
+ # @param key [Object]
14
+ # @return [ActiveRecord::Base]
15
+ def fetch_record(klass, _payload, key)
16
+ klass.unscoped.where(klass.primary_key => key).first
17
+ end
18
+
19
+ # Assign a key to a new record.
20
+ # @param record [ActiveRecord::Base]
21
+ # @param _payload [Hash]
22
+ # @param key [Object]
23
+ def assign_key(record, _payload, key)
24
+ record[record.class.primary_key] = key
25
+ end
26
+
27
+ # :nodoc:
28
+ def consume(payload, metadata)
29
+ unless self.process_message?(payload)
30
+ Deimos.config.logger.debug(
31
+ message: 'Skipping processing of message',
32
+ payload: payload,
33
+ metadata: metadata
34
+ )
35
+ return
36
+ end
37
+
38
+ key = metadata.with_indifferent_access[:key]
39
+ klass = self.class.config[:record_class]
40
+ record = fetch_record(klass, (payload || {}).with_indifferent_access, key)
41
+ if payload.nil?
42
+ destroy_record(record)
43
+ return
44
+ end
45
+ if record.blank?
46
+ record = klass.new
47
+ assign_key(record, payload, key)
48
+ end
49
+
50
+ # for backwards compatibility
51
+ # TODO next major release we should deprecate this
52
+ attrs = if self.method(:record_attributes).parameters.size == 2
53
+ record_attributes(payload.with_indifferent_access, key)
54
+ else
55
+ record_attributes(payload.with_indifferent_access)
56
+ end
57
+ # don't use attributes= - bypass Rails < 5 attr_protected
58
+ attrs.each do |k, v|
59
+ record.send("#{k}=", v)
60
+ end
61
+ save_record(record)
62
+ end
63
+
64
+ # @param record [ActiveRecord::Base]
65
+ def save_record(record)
66
+ record.created_at ||= Time.zone.now if record.respond_to?(:created_at)
67
+ record.updated_at = Time.zone.now if record.respond_to?(:updated_at)
68
+ record.save!
69
+ end
70
+
71
+ # Destroy a record that received a null payload. Override if you need
72
+ # to do something other than a straight destroy (e.g. mark as archived).
73
+ # @param record [ActiveRecord::Base]
74
+ def destroy_record(record)
75
+ record&.destroy
76
+ end
77
+ end
78
+ end
79
+ end
@@ -0,0 +1,52 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ module ActiveRecordConsume
5
+ # Convert a message with a schema to an ActiveRecord model
6
+ class SchemaModelConverter
7
+ # Create new converter
8
+ # @param decoder [SchemaBackends::Base] Incoming message schema.
9
+ # @param klass [ActiveRecord::Base] Model to map to.
10
+ def initialize(decoder, klass)
11
+ @decoder = decoder
12
+ @klass = klass
13
+ end
14
+
15
+ # Convert a message from a decoded hash to a set of ActiveRecord
16
+ # attributes. Attributes that don't exist in the model will be ignored.
17
+ # @param payload [Hash] Decoded message payload.
18
+ # @return [Hash] Model attributes.
19
+ def convert(payload)
20
+ attributes = {}
21
+ @decoder.schema_fields.each do |field|
22
+ column = @klass.columns.find { |c| c.name == field.name }
23
+ next if column.nil?
24
+ next if %w(updated_at created_at).include?(field.name)
25
+
26
+ attributes[field.name] = _coerce_field(column, payload[field.name])
27
+ end
28
+ attributes
29
+ end
30
+
31
+ private
32
+
33
+ # @param column [ActiveRecord::ConnectionAdapters::Column]
34
+ # @param val [Object]
35
+ def _coerce_field(column, val)
36
+ return nil if val.nil?
37
+
38
+ if column.type == :datetime
39
+ int_val = begin
40
+ val.is_a?(Integer) ? val : (val.is_a?(String) && Integer(val))
41
+ rescue StandardError
42
+ nil
43
+ end
44
+
45
+ return Time.zone.at(int_val) if int_val
46
+ end
47
+
48
+ val
49
+ end
50
+ end
51
+ end
52
+ end
@@ -0,0 +1,67 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'deimos/active_record_consume/batch_consumption'
4
+ require 'deimos/active_record_consume/message_consumption'
5
+ require 'deimos/active_record_consume/schema_model_converter'
6
+ require 'deimos/consumer'
7
+
8
+ module Deimos
9
+ # Basic ActiveRecord consumer class. Consumes messages and upserts them to
10
+ # the database. For tombstones (null payloads), deletes corresponding
11
+ # records from the database. Can operate in either message-by-message mode
12
+ # or in batch mode.
13
+ #
14
+ # In batch mode, ActiveRecord callbacks will be skipped and messages will
15
+ # be batched to minimize database calls.
16
+
17
+ # To configure batch vs. message mode, change the delivery mode of your
18
+ # Phobos listener.
19
+ # Message-by-message -> use `delivery: message` or `delivery: batch`
20
+ # Batch -> use `delivery: inline_batch`
21
+ class ActiveRecordConsumer < Consumer
22
+ include ActiveRecordConsume::MessageConsumption
23
+ include ActiveRecordConsume::BatchConsumption
24
+
25
+ class << self
26
+ # param klass [Class < ActiveRecord::Base] the class used to save to the
27
+ # database.
28
+ def record_class(klass)
29
+ config[:record_class] = klass
30
+ end
31
+
32
+ # param val [Boolean] Turn pre-compaction of the batch on or off. If true,
33
+ # only the last message for each unique key in a batch is processed.
34
+ def compacted(val)
35
+ config[:compacted] = val
36
+ end
37
+ end
38
+
39
+ # Setup
40
+ def initialize
41
+ @klass = self.class.config[:record_class]
42
+ @converter = ActiveRecordConsume::SchemaModelConverter.new(self.class.decoder, @klass)
43
+
44
+ if self.class.config[:key_schema]
45
+ @key_converter = ActiveRecordConsume::SchemaModelConverter.new(self.class.key_decoder, @klass)
46
+ end
47
+
48
+ @compacted = self.class.config[:compacted] != false
49
+ end
50
+
51
+ # Override this method (with `super`) if you want to add/change the default
52
+ # attributes set to the new/existing record.
53
+ # @param payload [Hash]
54
+ # @param _key [String]
55
+ def record_attributes(payload, _key=nil)
56
+ @converter.convert(payload)
57
+ end
58
+
59
+ # Override this message to conditionally save records
60
+ # @param payload [Hash] The kafka message as a hash
61
+ # @return [Boolean] if true, record is created/update.
62
+ # If false, record processing is skipped but message offset is still committed.
63
+ def process_message?(_payload)
64
+ true
65
+ end
66
+ end
67
+ end
@@ -0,0 +1,87 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'deimos/producer'
4
+
5
+ module Deimos
6
+ # Class which automatically produces a record when given an ActiveRecord
7
+ # instance or a list of them. Just call `send_events` on a list of records
8
+ # and they will be auto-published. You can override `generate_payload`
9
+ # to make changes to the payload before it's published.
10
+ #
11
+ # You can also call this with a list of hashes representing attributes.
12
+ # This is common when using activerecord-import.
13
+ class ActiveRecordProducer < Producer
14
+ class << self
15
+ # Indicate the class this producer is working on.
16
+ # @param klass [Class]
17
+ # @param refetch [Boolean] if true, and we are given a hash instead of
18
+ # a record object, refetch the record to pass into the `generate_payload`
19
+ # method.
20
+ def record_class(klass, refetch: true)
21
+ config[:record_class] = klass
22
+ config[:refetch_record] = refetch
23
+ end
24
+
25
+ # @param record [ActiveRecord::Base]
26
+ # @param force_send [Boolean]
27
+ def send_event(record, force_send: false)
28
+ send_events([record], force_send: force_send)
29
+ end
30
+
31
+ # @param records [Array<ActiveRecord::Base>]
32
+ # @param force_send [Boolean]
33
+ def send_events(records, force_send: false)
34
+ primary_key = config[:record_class]&.primary_key
35
+ messages = records.map do |record|
36
+ if record.respond_to?(:attributes)
37
+ attrs = record.attributes.with_indifferent_access
38
+ else
39
+ attrs = record.with_indifferent_access
40
+ if config[:refetch_record] && attrs[primary_key]
41
+ record = config[:record_class].find(attrs[primary_key])
42
+ end
43
+ end
44
+ generate_payload(attrs, record).with_indifferent_access
45
+ end
46
+ self.publish_list(messages, force_send: force_send)
47
+ end
48
+
49
+ # Generate the payload, given a list of attributes or a record..
50
+ # Can be overridden or added to by subclasses.
51
+ # @param attributes [Hash]
52
+ # @param _record [ActiveRecord::Base] May be nil if refetch_record
53
+ # is not set.
54
+ # @return [Hash]
55
+ def generate_payload(attributes, _record)
56
+ fields = self.encoder.schema_fields
57
+ payload = attributes.stringify_keys
58
+ payload.delete_if do |k, _|
59
+ k.to_sym != :payload_key && !fields.map(&:name).include?(k)
60
+ end
61
+ end
62
+
63
+ # Query to use when polling the database with the DbPoller. Add
64
+ # includes, joins, or wheres as necessary, or replace entirely.
65
+ # @param time_from [Time] the time to start the query from.
66
+ # @param time_to [Time] the time to end the query.
67
+ # @param column_name [Symbol] the column name to look for.
68
+ # @param min_id [Numeric] the minimum ID (i.e. all IDs must be greater
69
+ # than this value).
70
+ # @return [ActiveRecord::Relation]
71
+ def poll_query(time_from:, time_to:, column_name: :updated_at, min_id:)
72
+ klass = config[:record_class]
73
+ table = ActiveRecord::Base.connection.quote_table_name(klass.table_name)
74
+ column = ActiveRecord::Base.connection.quote_column_name(column_name)
75
+ primary = ActiveRecord::Base.connection.quote_column_name(klass.primary_key)
76
+ klass.where(
77
+ "((#{table}.#{column} = ? AND #{table}.#{primary} > ?) \
78
+ OR #{table}.#{column} > ?) AND #{table}.#{column} <= ?",
79
+ time_from,
80
+ min_id,
81
+ time_from,
82
+ time_to
83
+ )
84
+ end
85
+ end
86
+ end
87
+ end