deimos-ruby 1.16.3 → 1.16.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +1 -0
- data/CHANGELOG.md +5 -0
- data/Gemfile +6 -0
- data/README.md +5 -0
- data/lib/deimos/active_record_consume/batch_consumption.rb +7 -2
- data/lib/deimos/active_record_consume/batch_slicer.rb +2 -0
- data/lib/deimos/active_record_consume/message_consumption.rb +8 -4
- data/lib/deimos/active_record_consumer.rb +7 -4
- data/lib/deimos/active_record_producer.rb +3 -0
- data/lib/deimos/backends/base.rb +4 -2
- data/lib/deimos/backends/kafka.rb +1 -0
- data/lib/deimos/backends/kafka_async.rb +1 -0
- data/lib/deimos/config/configuration.rb +4 -0
- data/lib/deimos/config/phobos_config.rb +2 -1
- data/lib/deimos/consume/batch_consumption.rb +8 -1
- data/lib/deimos/consume/message_consumption.rb +4 -1
- data/lib/deimos/instrumentation.rb +11 -4
- data/lib/deimos/kafka_message.rb +1 -0
- data/lib/deimos/kafka_source.rb +5 -0
- data/lib/deimos/kafka_topic_info.rb +4 -0
- data/lib/deimos/message.rb +19 -2
- data/lib/deimos/metrics/datadog.rb +2 -1
- data/lib/deimos/metrics/mock.rb +2 -2
- data/lib/deimos/metrics/provider.rb +6 -0
- data/lib/deimos/monkey_patches/phobos_cli.rb +1 -1
- data/lib/deimos/monkey_patches/phobos_producer.rb +1 -0
- data/lib/deimos/producer.rb +12 -6
- data/lib/deimos/schema_backends/base.rb +31 -17
- data/lib/deimos/schema_backends/mock.rb +2 -2
- data/lib/deimos/schema_class/base.rb +9 -5
- data/lib/deimos/schema_class/enum.rb +4 -2
- data/lib/deimos/schema_class/record.rb +5 -5
- data/lib/deimos/shared_config.rb +6 -2
- data/lib/deimos/test_helpers.rb +21 -4
- data/lib/deimos/tracing/datadog.rb +1 -1
- data/lib/deimos/tracing/mock.rb +4 -3
- data/lib/deimos/tracing/provider.rb +5 -0
- data/lib/deimos/utils/db_poller.rb +9 -1
- data/lib/deimos/utils/db_producer.rb +14 -2
- data/lib/deimos/utils/deadlock_retry.rb +3 -0
- data/lib/deimos/utils/inline_consumer.rb +14 -6
- data/lib/deimos/utils/lag_reporter.rb +11 -0
- data/lib/deimos/utils/schema_controller_mixin.rb +8 -0
- data/lib/deimos/version.rb +1 -1
- data/lib/deimos.rb +3 -2
- data/lib/generators/deimos/active_record_generator.rb +1 -1
- data/lib/generators/deimos/db_backend_generator.rb +1 -0
- data/lib/generators/deimos/db_poller_generator.rb +1 -0
- data/lib/generators/deimos/schema_class/templates/schema_record.rb.tt +1 -1
- data/lib/generators/deimos/schema_class_generator.rb +12 -3
- data/rbs_collection.lock.yaml +176 -0
- data/rbs_collection.yaml +15 -0
- data/sig/avro.rbs +14 -0
- data/sig/defs.rbs +1859 -0
- data/sig/fig_tree.rbs +2 -0
- data/spec/snapshots/consumers-no-nest.snap +1 -1
- data/spec/snapshots/consumers.snap +1 -1
- data/spec/snapshots/consumers_and_producers-no-nest.snap +3 -3
- data/spec/snapshots/consumers_and_producers.snap +3 -3
- data/spec/snapshots/consumers_circular-no-nest.snap +1 -1
- data/spec/snapshots/consumers_circular.snap +1 -1
- data/spec/snapshots/consumers_complex_types-no-nest.snap +1 -1
- data/spec/snapshots/consumers_complex_types.snap +1 -1
- data/spec/snapshots/consumers_nested-no-nest.snap +1 -1
- data/spec/snapshots/consumers_nested.snap +1 -1
- data/spec/snapshots/namespace_folders.snap +3 -3
- data/spec/snapshots/producers_with_key-no-nest.snap +1 -1
- data/spec/snapshots/producers_with_key.snap +1 -1
- metadata +7 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 7412853b7b878c8bcfbfa130f21b5d2c3d46b4d741f1f6adad9edcb358843a4b
|
4
|
+
data.tar.gz: 5672157db0da4ea4a67893d7a792737e858962a2011cd097fc4af23a9bea60f0
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 444d6e14343916242caaa612d1e1c731ce45423ef41dc5e2cf4d9a85e8d618851001d52cf30ef2c2db060b2631bcaa4daa921ddd66eed93636d5d09ac5ad47e2
|
7
|
+
data.tar.gz: 253428dc65fc2d86108671a9e03dd85e9a9afa97681dd562efe2a6ec7d61317002a375705ccc7e0b267da31041a0d4c5b2fce960b7e73195dfdad651c5e77b09
|
data/.gitignore
CHANGED
data/CHANGELOG.md
CHANGED
@@ -7,6 +7,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|
7
7
|
|
8
8
|
## UNRELEASED
|
9
9
|
|
10
|
+
# 1.16.4 - 2022-09-09
|
11
|
+
|
12
|
+
- Now generates RBS types.
|
13
|
+
- Use `allocate` instead of `new` in `tombstone` to avoid issues with required fields in `initialize`.
|
14
|
+
|
10
15
|
# 1.16.3 - 2022-09-08
|
11
16
|
|
12
17
|
- Add the `tombstone` method to schema classes.
|
data/Gemfile
CHANGED
@@ -4,3 +4,9 @@ source 'https://rubygems.org'
|
|
4
4
|
|
5
5
|
# Specify your gem's dependencies in boilerplate.gemspec
|
6
6
|
gemspec
|
7
|
+
|
8
|
+
if !ENV['CI'] || ENV['CI'] == ''
|
9
|
+
# TODO: once all PRs are merged, add this to gemspec as a development dependency
|
10
|
+
gem 'sord', git: 'git@github.com:dorner/sord.git', ref: 'local-develop'
|
11
|
+
end
|
12
|
+
|
data/README.md
CHANGED
@@ -1198,6 +1198,11 @@ decoded = Deimos.decode(schema: 'MySchema', namespace: 'com.my-namespace', paylo
|
|
1198
1198
|
|
1199
1199
|
Bug reports and pull requests are welcome on GitHub at https://github.com/flipp-oss/deimos .
|
1200
1200
|
|
1201
|
+
You can/should re-generate RBS types when methods or classes change by running the following:
|
1202
|
+
|
1203
|
+
rbs collection install # if you haven't done it recently
|
1204
|
+
bundle exec sord --hide-private --no-sord-comments sig/defs.rbs --tags 'override:Override'
|
1205
|
+
|
1201
1206
|
### Linting
|
1202
1207
|
|
1203
1208
|
Deimos uses Rubocop to lint the code. Please run Rubocop on your code
|
@@ -15,8 +15,9 @@ module Deimos
|
|
15
15
|
# If two messages in a batch have the same key, we cannot process them
|
16
16
|
# in the same operation as they would interfere with each other. Thus
|
17
17
|
# they are split
|
18
|
-
# @param payloads [Array<Hash
|
18
|
+
# @param payloads [Array<Hash,Deimos::SchemaClass::Record>] Decoded payloads
|
19
19
|
# @param metadata [Hash] Information about batch, including keys.
|
20
|
+
# @return [void]
|
20
21
|
def consume_batch(payloads, metadata)
|
21
22
|
messages = payloads.
|
22
23
|
zip(metadata[:keys]).
|
@@ -59,6 +60,7 @@ module Deimos
|
|
59
60
|
# All messages are split into slices containing only unique keys, and
|
60
61
|
# each slice is handles as its own batch.
|
61
62
|
# @param messages [Array<Message>] List of messages.
|
63
|
+
# @return [void]
|
62
64
|
def uncompacted_update(messages)
|
63
65
|
BatchSlicer.
|
64
66
|
slice(messages).
|
@@ -69,6 +71,7 @@ module Deimos
|
|
69
71
|
# All messages with payloads are passed to upsert_records.
|
70
72
|
# All tombstones messages are passed to remove_records.
|
71
73
|
# @param messages [Array<Message>] List of messages.
|
74
|
+
# @return [void]
|
72
75
|
def update_database(messages)
|
73
76
|
# Find all upserted records (i.e. that have a payload) and all
|
74
77
|
# deleted record (no payload)
|
@@ -81,6 +84,7 @@ module Deimos
|
|
81
84
|
# Upsert any non-deleted records
|
82
85
|
# @param messages [Array<Message>] List of messages for a group of
|
83
86
|
# records to either be updated or inserted.
|
87
|
+
# @return [void]
|
84
88
|
def upsert_records(messages)
|
85
89
|
key_cols = key_columns(messages)
|
86
90
|
|
@@ -119,6 +123,7 @@ module Deimos
|
|
119
123
|
# Delete any records with a tombstone.
|
120
124
|
# @param messages [Array<Message>] List of messages for a group of
|
121
125
|
# deleted records.
|
126
|
+
# @return [void]
|
122
127
|
def remove_records(messages)
|
123
128
|
clause = deleted_query(messages)
|
124
129
|
|
@@ -128,7 +133,7 @@ module Deimos
|
|
128
133
|
# Create an ActiveRecord relation that matches all of the passed
|
129
134
|
# records. Used for bulk deletion.
|
130
135
|
# @param records [Array<Message>] List of messages.
|
131
|
-
# @return ActiveRecord::Relation Matching relation.
|
136
|
+
# @return [ActiveRecord::Relation] Matching relation.
|
132
137
|
def deleted_query(records)
|
133
138
|
keys = records.
|
134
139
|
map { |m| record_key(m.key)[@klass.primary_key] }.
|
@@ -11,6 +11,8 @@ module Deimos
|
|
11
11
|
# slices that maintain the correct order.
|
12
12
|
# E.g. Given messages A1, A2, B1, C1, C2, C3, they will be sliced as:
|
13
13
|
# [[A1, B1, C1], [A2, C2], [C3]]
|
14
|
+
# @param messages [Array<Message>]
|
15
|
+
# @return [Array<Array<Message>>]
|
14
16
|
def self.slice(messages)
|
15
17
|
ops = messages.group_by(&:key)
|
16
18
|
|
@@ -8,8 +8,8 @@ module Deimos
|
|
8
8
|
# Find the record specified by the given payload and key.
|
9
9
|
# Default is to use the primary key column and the value of the first
|
10
10
|
# field in the key.
|
11
|
-
# @param klass [Class
|
12
|
-
# @param _payload [Hash
|
11
|
+
# @param klass [Class<ActiveRecord::Base>]
|
12
|
+
# @param _payload [Hash,Deimos::SchemaClass::Record]
|
13
13
|
# @param key [Object]
|
14
14
|
# @return [ActiveRecord::Base]
|
15
15
|
def fetch_record(klass, _payload, key)
|
@@ -19,14 +19,16 @@ module Deimos
|
|
19
19
|
|
20
20
|
# Assign a key to a new record.
|
21
21
|
# @param record [ActiveRecord::Base]
|
22
|
-
# @param _payload [Hash
|
22
|
+
# @param _payload [Hash,Deimos::SchemaClass::Record]
|
23
23
|
# @param key [Object]
|
24
|
+
# @return [void]
|
24
25
|
def assign_key(record, _payload, key)
|
25
26
|
record[record.class.primary_key] = key
|
26
27
|
end
|
27
28
|
|
28
|
-
# @param payload [Hash
|
29
|
+
# @param payload [Hash,Deimos::SchemaClass::Record] Decoded payloads
|
29
30
|
# @param metadata [Hash] Information about batch, including keys.
|
31
|
+
# @return [void]
|
30
32
|
def consume(payload, metadata)
|
31
33
|
unless self.process_message?(payload)
|
32
34
|
Deimos.config.logger.debug(
|
@@ -64,6 +66,7 @@ module Deimos
|
|
64
66
|
end
|
65
67
|
|
66
68
|
# @param record [ActiveRecord::Base]
|
69
|
+
# @return [void]
|
67
70
|
def save_record(record)
|
68
71
|
record.created_at ||= Time.zone.now if record.respond_to?(:created_at)
|
69
72
|
record.updated_at = Time.zone.now if record.respond_to?(:updated_at)
|
@@ -73,6 +76,7 @@ module Deimos
|
|
73
76
|
# Destroy a record that received a null payload. Override if you need
|
74
77
|
# to do something other than a straight destroy (e.g. mark as archived).
|
75
78
|
# @param record [ActiveRecord::Base]
|
79
|
+
# @return [void]
|
76
80
|
def destroy_record(record)
|
77
81
|
record&.destroy
|
78
82
|
end
|
@@ -23,14 +23,16 @@ module Deimos
|
|
23
23
|
include ActiveRecordConsume::BatchConsumption
|
24
24
|
|
25
25
|
class << self
|
26
|
-
# param klass [Class
|
26
|
+
# @param klass [Class<ActiveRecord::Base>] the class used to save to the
|
27
27
|
# database.
|
28
|
+
# @return [void]
|
28
29
|
def record_class(klass)
|
29
30
|
config[:record_class] = klass
|
30
31
|
end
|
31
32
|
|
32
|
-
# param val [Boolean] Turn pre-compaction of the batch on or off. If true,
|
33
|
+
# @param val [Boolean] Turn pre-compaction of the batch on or off. If true,
|
33
34
|
# only the last message for each unique key in a batch is processed.
|
35
|
+
# @return [void]
|
34
36
|
def compacted(val)
|
35
37
|
config[:compacted] = val
|
36
38
|
end
|
@@ -50,14 +52,15 @@ module Deimos
|
|
50
52
|
|
51
53
|
# Override this method (with `super`) if you want to add/change the default
|
52
54
|
# attributes set to the new/existing record.
|
53
|
-
# @param payload [Hash
|
55
|
+
# @param payload [Hash,Deimos::SchemaClass::Record]
|
54
56
|
# @param _key [String]
|
57
|
+
# @return [Hash]
|
55
58
|
def record_attributes(payload, _key=nil)
|
56
59
|
@converter.convert(payload)
|
57
60
|
end
|
58
61
|
|
59
62
|
# Override this message to conditionally save records
|
60
|
-
# @param _payload [Hash
|
63
|
+
# @param _payload [Hash,Deimos::SchemaClass::Record] The kafka message
|
61
64
|
# @return [Boolean] if true, record is created/update.
|
62
65
|
# If false, record processing is skipped but message offset is still committed.
|
63
66
|
def process_message?(_payload)
|
@@ -17,6 +17,7 @@ module Deimos
|
|
17
17
|
# @param refetch [Boolean] if true, and we are given a hash instead of
|
18
18
|
# a record object, refetch the record to pass into the `generate_payload`
|
19
19
|
# method.
|
20
|
+
# @return [void]
|
20
21
|
def record_class(klass, refetch: true)
|
21
22
|
config[:record_class] = klass
|
22
23
|
config[:refetch_record] = refetch
|
@@ -24,12 +25,14 @@ module Deimos
|
|
24
25
|
|
25
26
|
# @param record [ActiveRecord::Base]
|
26
27
|
# @param force_send [Boolean]
|
28
|
+
# @return [void]
|
27
29
|
def send_event(record, force_send: false)
|
28
30
|
send_events([record], force_send: force_send)
|
29
31
|
end
|
30
32
|
|
31
33
|
# @param records [Array<ActiveRecord::Base>]
|
32
34
|
# @param force_send [Boolean]
|
35
|
+
# @return [void]
|
33
36
|
def send_events(records, force_send: false)
|
34
37
|
primary_key = config[:record_class]&.primary_key
|
35
38
|
messages = records.map do |record|
|
data/lib/deimos/backends/base.rb
CHANGED
@@ -5,8 +5,9 @@ module Deimos
|
|
5
5
|
# Abstract class for all publish backends.
|
6
6
|
class Base
|
7
7
|
class << self
|
8
|
-
# @param producer_class [Class
|
8
|
+
# @param producer_class [Class<Deimos::Producer>]
|
9
9
|
# @param messages [Array<Deimos::Message>]
|
10
|
+
# @return [void]
|
10
11
|
def publish(producer_class:, messages:)
|
11
12
|
Deimos.config.logger.info(
|
12
13
|
message: 'Publishing messages',
|
@@ -21,8 +22,9 @@ module Deimos
|
|
21
22
|
execute(producer_class: producer_class, messages: messages)
|
22
23
|
end
|
23
24
|
|
24
|
-
# @param producer_class [Class
|
25
|
+
# @param producer_class [Class<Deimos::Producer>]
|
25
26
|
# @param messages [Array<Deimos::Message>]
|
27
|
+
# @return [void]
|
26
28
|
def execute(producer_class:, messages:)
|
27
29
|
raise NotImplementedError
|
28
30
|
end
|
@@ -32,6 +32,7 @@ module Deimos
|
|
32
32
|
end
|
33
33
|
|
34
34
|
# Loads generated classes
|
35
|
+
# @return [void]
|
35
36
|
def self.load_generated_schema_classes
|
36
37
|
if Deimos.config.schema.generated_class_path.nil?
|
37
38
|
raise 'Cannot use schema classes without schema.generated_class_path. Please provide a directory.'
|
@@ -43,6 +44,7 @@ module Deimos
|
|
43
44
|
end
|
44
45
|
|
45
46
|
# Ensure everything is set up correctly for the DB backend.
|
47
|
+
# @!visibility private
|
46
48
|
def self.validate_db_backend
|
47
49
|
begin
|
48
50
|
require 'activerecord-import'
|
@@ -56,6 +58,7 @@ module Deimos
|
|
56
58
|
|
57
59
|
# Validate that consumers are configured correctly, including their
|
58
60
|
# delivery mode.
|
61
|
+
# @!visibility private
|
59
62
|
def self.validate_consumers
|
60
63
|
Phobos.config.listeners.each do |listener|
|
61
64
|
handler_class = listener.handler.constantize
|
@@ -74,6 +77,7 @@ module Deimos
|
|
74
77
|
end
|
75
78
|
end
|
76
79
|
|
80
|
+
# @!visibility private
|
77
81
|
# @param kafka_config [FigTree::ConfigStruct]
|
78
82
|
def self.configure_producer_or_consumer(kafka_config)
|
79
83
|
klass = kafka_config.class_name.constantize
|
@@ -20,7 +20,7 @@ module Deimos
|
|
20
20
|
}.to_h
|
21
21
|
end
|
22
22
|
|
23
|
-
#
|
23
|
+
# @return [void]
|
24
24
|
def reset!
|
25
25
|
super
|
26
26
|
Phobos.configure(self.phobos_config)
|
@@ -115,6 +115,7 @@ module Deimos
|
|
115
115
|
end
|
116
116
|
|
117
117
|
# Legacy method to parse Phobos config file
|
118
|
+
# @!visibility private
|
118
119
|
def phobos_config_file=(file)
|
119
120
|
pconfig = YAML.load(ERB.new(File.read(File.expand_path(file))).result). # rubocop:disable Security/YAMLLoad
|
120
121
|
with_indifferent_access
|
@@ -9,7 +9,9 @@ module Deimos
|
|
9
9
|
extend ActiveSupport::Concern
|
10
10
|
include Phobos::BatchHandler
|
11
11
|
|
12
|
-
#
|
12
|
+
# @param batch [Array<String>]
|
13
|
+
# @param metadata [Hash]
|
14
|
+
# @return [void]
|
13
15
|
def around_consume_batch(batch, metadata)
|
14
16
|
payloads = []
|
15
17
|
_with_span do
|
@@ -36,12 +38,14 @@ module Deimos
|
|
36
38
|
# Consume a batch of incoming messages.
|
37
39
|
# @param _payloads [Array<Phobos::BatchMessage>]
|
38
40
|
# @param _metadata [Hash]
|
41
|
+
# @return [void]
|
39
42
|
def consume_batch(_payloads, _metadata)
|
40
43
|
raise NotImplementedError
|
41
44
|
end
|
42
45
|
|
43
46
|
protected
|
44
47
|
|
48
|
+
# @!visibility private
|
45
49
|
def _received_batch(payloads, metadata)
|
46
50
|
Deimos.config.logger.info(
|
47
51
|
message: 'Got Kafka batch event',
|
@@ -70,6 +74,7 @@ module Deimos
|
|
70
74
|
end
|
71
75
|
end
|
72
76
|
|
77
|
+
# @!visibility private
|
73
78
|
# @param exception [Throwable]
|
74
79
|
# @param payloads [Array<Hash>]
|
75
80
|
# @param metadata [Hash]
|
@@ -91,6 +96,7 @@ module Deimos
|
|
91
96
|
_error(exception, payloads, metadata)
|
92
97
|
end
|
93
98
|
|
99
|
+
# @!visibility private
|
94
100
|
# @param time_taken [Float]
|
95
101
|
# @param payloads [Array<Hash>]
|
96
102
|
# @param metadata [Hash]
|
@@ -122,6 +128,7 @@ module Deimos
|
|
122
128
|
)
|
123
129
|
end
|
124
130
|
|
131
|
+
# @!visibility private
|
125
132
|
# Get payload identifiers (key and message_id if present) for logging.
|
126
133
|
# @param payloads [Array<Hash>]
|
127
134
|
# @param metadata [Hash]
|
@@ -8,7 +8,9 @@ module Deimos
|
|
8
8
|
extend ActiveSupport::Concern
|
9
9
|
include Phobos::Handler
|
10
10
|
|
11
|
-
#
|
11
|
+
# @param payload [String]
|
12
|
+
# @param metadata [Hash]
|
13
|
+
# @return [void]
|
12
14
|
def around_consume(payload, metadata)
|
13
15
|
decoded_payload = payload.nil? ? nil : payload.dup
|
14
16
|
new_metadata = metadata.dup
|
@@ -28,6 +30,7 @@ module Deimos
|
|
28
30
|
# Consume incoming messages.
|
29
31
|
# @param _payload [String]
|
30
32
|
# @param _metadata [Hash]
|
33
|
+
# @return [void]
|
31
34
|
def consume(_payload, _metadata)
|
32
35
|
raise NotImplementedError
|
33
36
|
end
|
@@ -8,23 +8,29 @@ module Deimos
|
|
8
8
|
# Copied from Phobos instrumentation.
|
9
9
|
module Instrumentation
|
10
10
|
extend ActiveSupport::Concern
|
11
|
+
|
12
|
+
# @return [String]
|
11
13
|
NAMESPACE = 'Deimos'
|
12
14
|
|
13
15
|
# :nodoc:
|
14
16
|
module ClassMethods
|
15
|
-
#
|
17
|
+
# @param event [String]
|
18
|
+
# @return [void]
|
16
19
|
def subscribe(event)
|
17
20
|
ActiveSupport::Notifications.subscribe("#{NAMESPACE}.#{event}") do |*args|
|
18
21
|
yield(ActiveSupport::Notifications::Event.new(*args)) if block_given?
|
19
22
|
end
|
20
23
|
end
|
21
24
|
|
22
|
-
#
|
25
|
+
# @param subscriber [ActiveSupport::Subscriber]
|
26
|
+
# @return [void]
|
23
27
|
def unsubscribe(subscriber)
|
24
28
|
ActiveSupport::Notifications.unsubscribe(subscriber)
|
25
29
|
end
|
26
30
|
|
27
|
-
#
|
31
|
+
# @param event [String]
|
32
|
+
# @param extra [Hash]
|
33
|
+
# @return [void]
|
28
34
|
def instrument(event, extra={})
|
29
35
|
ActiveSupport::Notifications.instrument("#{NAMESPACE}.#{event}", extra) do |extra2|
|
30
36
|
yield(extra2) if block_given?
|
@@ -39,7 +45,8 @@ module Deimos
|
|
39
45
|
module KafkaListener
|
40
46
|
# Listens for any exceptions that happen during publishing and re-publishes
|
41
47
|
# as a Deimos event.
|
42
|
-
# @param event [ActiveSupport::
|
48
|
+
# @param event [ActiveSupport::Notifications::Event]
|
49
|
+
# @return [void]
|
43
50
|
def self.send_produce_error(event)
|
44
51
|
exception = event.payload[:exception_object]
|
45
52
|
return if !exception || !exception.respond_to?(:failed_messages)
|
data/lib/deimos/kafka_message.rb
CHANGED
data/lib/deimos/kafka_source.rb
CHANGED
@@ -6,6 +6,7 @@ module Deimos
|
|
6
6
|
module KafkaSource
|
7
7
|
extend ActiveSupport::Concern
|
8
8
|
|
9
|
+
# @return [String]
|
9
10
|
DEPRECATION_WARNING = 'The kafka_producer interface will be deprecated ' \
|
10
11
|
'in future releases. Please use kafka_producers instead.'
|
11
12
|
|
@@ -16,6 +17,7 @@ module Deimos
|
|
16
17
|
end
|
17
18
|
|
18
19
|
# Send the newly created model to Kafka.
|
20
|
+
# @return [void]
|
19
21
|
def send_kafka_event_on_create
|
20
22
|
return unless self.persisted?
|
21
23
|
return unless self.class.kafka_config[:create]
|
@@ -24,6 +26,7 @@ module Deimos
|
|
24
26
|
end
|
25
27
|
|
26
28
|
# Send the newly updated model to Kafka.
|
29
|
+
# @return [void]
|
27
30
|
def send_kafka_event_on_update
|
28
31
|
return unless self.class.kafka_config[:update]
|
29
32
|
|
@@ -41,6 +44,7 @@ module Deimos
|
|
41
44
|
end
|
42
45
|
|
43
46
|
# Send a deletion (null payload) event to Kafka.
|
47
|
+
# @return [void]
|
44
48
|
def send_kafka_event_on_destroy
|
45
49
|
return unless self.class.kafka_config[:delete]
|
46
50
|
|
@@ -80,6 +84,7 @@ module Deimos
|
|
80
84
|
# the inputs (arrays, hashes, records etc.)
|
81
85
|
# Basically we want to first do the import, then reload the records
|
82
86
|
# and send them to Kafka.
|
87
|
+
# @!visibility private
|
83
88
|
def import_without_validations_or_callbacks(column_names,
|
84
89
|
array_of_attributes,
|
85
90
|
options={})
|
@@ -50,6 +50,7 @@ module Deimos
|
|
50
50
|
# moves on to the next one.
|
51
51
|
# @param topic [String]
|
52
52
|
# @param lock_id [String]
|
53
|
+
# @return [void]
|
53
54
|
def clear_lock(topic, lock_id)
|
54
55
|
self.where(topic: topic, locked_by: lock_id).
|
55
56
|
update_all(locked_by: nil,
|
@@ -66,6 +67,7 @@ module Deimos
|
|
66
67
|
# was in a good state.
|
67
68
|
# @param except_topics [Array<String>] the list of topics we've just
|
68
69
|
# realized had messages in them, meaning all other topics were empty.
|
70
|
+
# @return [void]
|
69
71
|
def ping_empty_topics(except_topics)
|
70
72
|
records = KafkaTopicInfo.where(locked_by: nil).
|
71
73
|
where('topic not in(?)', except_topics)
|
@@ -79,6 +81,7 @@ module Deimos
|
|
79
81
|
# and allows the caller to continue to the next topic.
|
80
82
|
# @param topic [String]
|
81
83
|
# @param lock_id [String]
|
84
|
+
# @return [void]
|
82
85
|
def register_error(topic, lock_id)
|
83
86
|
record = self.where(topic: topic, locked_by: lock_id).last
|
84
87
|
attr_hash = { locked_by: nil,
|
@@ -93,6 +96,7 @@ module Deimos
|
|
93
96
|
# working on those messages and to continue.
|
94
97
|
# @param topic [String]
|
95
98
|
# @param lock_id [String]
|
99
|
+
# @return [void]
|
96
100
|
def heartbeat(topic, lock_id)
|
97
101
|
self.where(topic: topic, locked_by: lock_id).
|
98
102
|
update_all(locked_at: Time.zone.now)
|
data/lib/deimos/message.rb
CHANGED
@@ -3,11 +3,26 @@
|
|
3
3
|
module Deimos
|
4
4
|
# Basically a struct to hold the message as it's processed.
|
5
5
|
class Message
|
6
|
-
|
7
|
-
|
6
|
+
# @return [Hash]
|
7
|
+
attr_accessor :payload
|
8
|
+
# @return [Hash, String, Integer]
|
9
|
+
attr_accessor :key
|
10
|
+
# @return [Integer]
|
11
|
+
attr_accessor :partition_key
|
12
|
+
# @return [String]
|
13
|
+
attr_accessor :encoded_key
|
14
|
+
# @return [String]
|
15
|
+
attr_accessor :encoded_payload
|
16
|
+
# @return [String]
|
17
|
+
attr_accessor :topic
|
18
|
+
# @return [String]
|
19
|
+
attr_accessor :producer_name
|
8
20
|
|
9
21
|
# @param payload [Hash]
|
10
22
|
# @param producer [Class]
|
23
|
+
# @param topic [String]
|
24
|
+
# @param key [String, Integer, Hash]
|
25
|
+
# @param partition_key [Integer]
|
11
26
|
def initialize(payload, producer, topic: nil, key: nil, partition_key: nil)
|
12
27
|
@payload = payload&.with_indifferent_access
|
13
28
|
@producer_name = producer&.name
|
@@ -19,6 +34,7 @@ module Deimos
|
|
19
34
|
# Add message_id and timestamp default values if they are in the
|
20
35
|
# schema and don't already have values.
|
21
36
|
# @param fields [Array<String>] existing name fields in the schema.
|
37
|
+
# @return [void]
|
22
38
|
def add_fields(fields)
|
23
39
|
return if @payload.except(:payload_key, :partition_key).blank?
|
24
40
|
|
@@ -31,6 +47,7 @@ module Deimos
|
|
31
47
|
end
|
32
48
|
|
33
49
|
# @param encoder [Deimos::SchemaBackends::Base]
|
50
|
+
# @return [void]
|
34
51
|
def coerce_fields(encoder)
|
35
52
|
return if payload.nil?
|
36
53
|
|
@@ -6,7 +6,8 @@ module Deimos
|
|
6
6
|
module Metrics
|
7
7
|
# A Metrics wrapper class for Datadog.
|
8
8
|
class Datadog < Metrics::Provider
|
9
|
-
#
|
9
|
+
# @param config [Hash]
|
10
|
+
# @param logger [Logger]
|
10
11
|
def initialize(config, logger)
|
11
12
|
raise 'Metrics config must specify host_ip' if config[:host_ip].nil?
|
12
13
|
raise 'Metrics config must specify host_port' if config[:host_port].nil?
|
data/lib/deimos/metrics/mock.rb
CHANGED
@@ -5,8 +5,8 @@ require 'deimos/metrics/provider'
|
|
5
5
|
module Deimos
|
6
6
|
module Metrics
|
7
7
|
# A mock Metrics wrapper which just logs the metrics
|
8
|
-
class Mock
|
9
|
-
#
|
8
|
+
class Mock < Provider
|
9
|
+
# @param logger [Logger,nil]
|
10
10
|
def initialize(logger=nil)
|
11
11
|
@logger = logger || Logger.new(STDOUT)
|
12
12
|
@logger.info('MockMetricsProvider initialized')
|
@@ -7,20 +7,25 @@ module Deimos
|
|
7
7
|
# Send an counter increment metric
|
8
8
|
# @param metric_name [String] The name of the counter metric
|
9
9
|
# @param options [Hash] Any additional options, e.g. :tags
|
10
|
+
# @return [void]
|
10
11
|
def increment(metric_name, options={})
|
11
12
|
raise NotImplementedError
|
12
13
|
end
|
13
14
|
|
14
15
|
# Send an counter increment metric
|
15
16
|
# @param metric_name [String] The name of the counter metric
|
17
|
+
# @param count [Integer]
|
16
18
|
# @param options [Hash] Any additional options, e.g. :tags
|
19
|
+
# @return [void]
|
17
20
|
def gauge(metric_name, count, options={})
|
18
21
|
raise NotImplementedError
|
19
22
|
end
|
20
23
|
|
21
24
|
# Send an counter increment metric
|
22
25
|
# @param metric_name [String] The name of the counter metric
|
26
|
+
# @param count [Integer]
|
23
27
|
# @param options [Hash] Any additional options, e.g. :tags
|
28
|
+
# @return [void]
|
24
29
|
def histogram(metric_name, count, options={})
|
25
30
|
raise NotImplementedError
|
26
31
|
end
|
@@ -28,6 +33,7 @@ module Deimos
|
|
28
33
|
# Time a yielded block, and send a timer metric
|
29
34
|
# @param metric_name [String] The name of the metric
|
30
35
|
# @param options [Hash] Any additional options, e.g. :tags
|
36
|
+
# @return [void]
|
31
37
|
def time(metric_name, options={})
|
32
38
|
raise NotImplementedError
|
33
39
|
end
|