deimos-ruby 1.7.0.pre.beta1 → 1.8.0.pre.beta1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +13 -0
- data/Gemfile.lock +8 -2
- data/README.md +69 -15
- data/deimos-ruby.gemspec +2 -0
- data/docs/ARCHITECTURE.md +144 -0
- data/docs/CONFIGURATION.md +4 -0
- data/lib/deimos.rb +6 -6
- data/lib/deimos/active_record_consume/batch_consumption.rb +159 -0
- data/lib/deimos/active_record_consume/batch_slicer.rb +27 -0
- data/lib/deimos/active_record_consume/message_consumption.rb +58 -0
- data/lib/deimos/active_record_consume/schema_model_converter.rb +52 -0
- data/lib/deimos/active_record_consumer.rb +33 -75
- data/lib/deimos/batch_consumer.rb +2 -142
- data/lib/deimos/config/configuration.rb +8 -10
- data/lib/deimos/consume/batch_consumption.rb +148 -0
- data/lib/deimos/consume/message_consumption.rb +93 -0
- data/lib/deimos/consumer.rb +79 -72
- data/lib/deimos/kafka_message.rb +1 -1
- data/lib/deimos/message.rb +6 -1
- data/lib/deimos/utils/db_poller.rb +6 -6
- data/lib/deimos/utils/db_producer.rb +6 -2
- data/lib/deimos/utils/deadlock_retry.rb +68 -0
- data/lib/deimos/utils/lag_reporter.rb +19 -26
- data/lib/deimos/version.rb +1 -1
- data/spec/active_record_batch_consumer_spec.rb +481 -0
- data/spec/active_record_consume/batch_slicer_spec.rb +42 -0
- data/spec/active_record_consume/schema_model_converter_spec.rb +105 -0
- data/spec/active_record_consumer_spec.rb +3 -11
- data/spec/batch_consumer_spec.rb +23 -7
- data/spec/config/configuration_spec.rb +4 -0
- data/spec/consumer_spec.rb +6 -6
- data/spec/deimos_spec.rb +57 -49
- data/spec/handlers/my_batch_consumer.rb +6 -1
- data/spec/handlers/my_consumer.rb +6 -1
- data/spec/message_spec.rb +19 -0
- data/spec/schemas/com/my-namespace/MySchemaCompound-key.avsc +18 -0
- data/spec/schemas/com/my-namespace/Wibble.avsc +43 -0
- data/spec/spec_helper.rb +17 -0
- data/spec/utils/db_poller_spec.rb +2 -2
- data/spec/utils/deadlock_retry_spec.rb +74 -0
- data/spec/utils/lag_reporter_spec.rb +29 -22
- metadata +57 -16
- data/lib/deimos/base_consumer.rb +0 -100
- data/lib/deimos/utils/executor.rb +0 -124
- data/lib/deimos/utils/platform_schema_validation.rb +0 -0
- data/lib/deimos/utils/signal_handler.rb +0 -68
- data/spec/utils/executor_spec.rb +0 -53
- data/spec/utils/signal_handler_spec.rb +0 -16
@@ -0,0 +1,148 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Deimos
|
4
|
+
module Consume
|
5
|
+
# Helper methods used by batch consumers, i.e. those with "inline_batch"
|
6
|
+
# delivery. Payloads are decoded then consumers are invoked with arrays
|
7
|
+
# of messages to be handled at once
|
8
|
+
module BatchConsumption
|
9
|
+
include Phobos::BatchHandler
|
10
|
+
|
11
|
+
# :nodoc:
|
12
|
+
def around_consume_batch(batch, metadata)
|
13
|
+
payloads = []
|
14
|
+
benchmark = Benchmark.measure do
|
15
|
+
if self.class.config[:key_configured]
|
16
|
+
metadata[:keys] = batch.map do |message|
|
17
|
+
decode_key(message.key)
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
payloads = batch.map do |message|
|
22
|
+
message.payload ? self.class.decoder.decode(message.payload) : nil
|
23
|
+
end
|
24
|
+
_received_batch(payloads, metadata)
|
25
|
+
_with_span do
|
26
|
+
yield payloads, metadata
|
27
|
+
end
|
28
|
+
end
|
29
|
+
_handle_batch_success(benchmark.real, payloads, metadata)
|
30
|
+
rescue StandardError => e
|
31
|
+
_handle_batch_error(e, payloads, metadata)
|
32
|
+
end
|
33
|
+
|
34
|
+
# Consume a batch of incoming messages.
|
35
|
+
# @param _payloads [Array<Phobos::BatchMessage>]
|
36
|
+
# @param _metadata [Hash]
|
37
|
+
def consume_batch(_payloads, _metadata)
|
38
|
+
raise NotImplementedError
|
39
|
+
end
|
40
|
+
|
41
|
+
protected
|
42
|
+
|
43
|
+
def _received_batch(payloads, metadata)
|
44
|
+
Deimos.config.logger.info(
|
45
|
+
message: 'Got Kafka batch event',
|
46
|
+
message_ids: _payload_identifiers(payloads, metadata),
|
47
|
+
metadata: metadata.except(:keys)
|
48
|
+
)
|
49
|
+
Deimos.config.logger.debug(
|
50
|
+
message: 'Kafka batch event payloads',
|
51
|
+
payloads: payloads
|
52
|
+
)
|
53
|
+
Deimos.config.metrics&.increment(
|
54
|
+
'handler',
|
55
|
+
tags: %W(
|
56
|
+
status:batch_received
|
57
|
+
topic:#{metadata[:topic]}
|
58
|
+
))
|
59
|
+
Deimos.config.metrics&.increment(
|
60
|
+
'handler',
|
61
|
+
by: metadata['batch_size'],
|
62
|
+
tags: %W(
|
63
|
+
status:received
|
64
|
+
topic:#{metadata[:topic]}
|
65
|
+
))
|
66
|
+
if payloads.present?
|
67
|
+
payloads.each { |payload| _report_time_delayed(payload, metadata) }
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
# @param exception [Throwable]
|
72
|
+
# @param payloads [Array<Hash>]
|
73
|
+
# @param metadata [Hash]
|
74
|
+
def _handle_batch_error(exception, payloads, metadata)
|
75
|
+
Deimos.config.metrics&.increment(
|
76
|
+
'handler',
|
77
|
+
tags: %W(
|
78
|
+
status:batch_error
|
79
|
+
topic:#{metadata[:topic]}
|
80
|
+
))
|
81
|
+
Deimos.config.logger.warn(
|
82
|
+
message: 'Error consuming message batch',
|
83
|
+
handler: self.class.name,
|
84
|
+
metadata: metadata.except(:keys),
|
85
|
+
message_ids: _payload_identifiers(payloads, metadata),
|
86
|
+
error_message: exception.message,
|
87
|
+
error: exception.backtrace
|
88
|
+
)
|
89
|
+
_error(exception, payloads, metadata)
|
90
|
+
end
|
91
|
+
|
92
|
+
# @param time_taken [Float]
|
93
|
+
# @param payloads [Array<Hash>]
|
94
|
+
# @param metadata [Hash]
|
95
|
+
def _handle_batch_success(time_taken, payloads, metadata)
|
96
|
+
Deimos.config.metrics&.histogram('handler',
|
97
|
+
time_taken,
|
98
|
+
tags: %W(
|
99
|
+
time:consume_batch
|
100
|
+
topic:#{metadata[:topic]}
|
101
|
+
))
|
102
|
+
Deimos.config.metrics&.increment(
|
103
|
+
'handler',
|
104
|
+
tags: %W(
|
105
|
+
status:batch_success
|
106
|
+
topic:#{metadata[:topic]}
|
107
|
+
))
|
108
|
+
Deimos.config.metrics&.increment(
|
109
|
+
'handler',
|
110
|
+
by: metadata['batch_size'],
|
111
|
+
tags: %W(
|
112
|
+
status:success
|
113
|
+
topic:#{metadata[:topic]}
|
114
|
+
))
|
115
|
+
Deimos.config.logger.info(
|
116
|
+
message: 'Finished processing Kafka batch event',
|
117
|
+
message_ids: _payload_identifiers(payloads, metadata),
|
118
|
+
time_elapsed: time_taken,
|
119
|
+
metadata: metadata.except(:keys)
|
120
|
+
)
|
121
|
+
end
|
122
|
+
|
123
|
+
# Get payload identifiers (key and message_id if present) for logging.
|
124
|
+
# @param payloads [Array<Hash>]
|
125
|
+
# @param metadata [Hash]
|
126
|
+
# @return [Array<Array>] the identifiers.
|
127
|
+
def _payload_identifiers(payloads, metadata)
|
128
|
+
message_ids = payloads&.map do |payload|
|
129
|
+
if payload.is_a?(Hash) && payload.key?('message_id')
|
130
|
+
payload['message_id']
|
131
|
+
end
|
132
|
+
end
|
133
|
+
|
134
|
+
# Payloads may be nil if preprocessing failed
|
135
|
+
messages = payloads || metadata[:keys] || []
|
136
|
+
|
137
|
+
messages.zip(metadata[:keys] || [], message_ids || []).map do |_, k, m_id|
|
138
|
+
ids = {}
|
139
|
+
|
140
|
+
ids[:key] = k if k.present?
|
141
|
+
ids[:message_id] = m_id if m_id.present?
|
142
|
+
|
143
|
+
ids
|
144
|
+
end
|
145
|
+
end
|
146
|
+
end
|
147
|
+
end
|
148
|
+
end
|
@@ -0,0 +1,93 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Deimos
|
4
|
+
module Consume
|
5
|
+
# Methods used by message-by-message (non-batch) consumers. These consumers
|
6
|
+
# are invoked for every individual message.
|
7
|
+
module MessageConsumption
|
8
|
+
include Phobos::Handler
|
9
|
+
|
10
|
+
# :nodoc:
|
11
|
+
def around_consume(payload, metadata)
|
12
|
+
decoded_payload = payload.dup
|
13
|
+
new_metadata = metadata.dup
|
14
|
+
benchmark = Benchmark.measure do
|
15
|
+
_with_span do
|
16
|
+
new_metadata[:key] = decode_key(metadata[:key]) if self.class.config[:key_configured]
|
17
|
+
decoded_payload = payload ? self.class.decoder.decode(payload) : nil
|
18
|
+
_received_message(decoded_payload, new_metadata)
|
19
|
+
yield decoded_payload, new_metadata
|
20
|
+
end
|
21
|
+
end
|
22
|
+
_handle_success(benchmark.real, decoded_payload, new_metadata)
|
23
|
+
rescue StandardError => e
|
24
|
+
_handle_error(e, decoded_payload, new_metadata)
|
25
|
+
end
|
26
|
+
|
27
|
+
# Consume incoming messages.
|
28
|
+
# @param _payload [String]
|
29
|
+
# @param _metadata [Hash]
|
30
|
+
def consume(_payload, _metadata)
|
31
|
+
raise NotImplementedError
|
32
|
+
end
|
33
|
+
|
34
|
+
private
|
35
|
+
|
36
|
+
def _received_message(payload, metadata)
|
37
|
+
Deimos.config.logger.info(
|
38
|
+
message: 'Got Kafka event',
|
39
|
+
payload: payload,
|
40
|
+
metadata: metadata
|
41
|
+
)
|
42
|
+
Deimos.config.metrics&.increment('handler', tags: %W(
|
43
|
+
status:received
|
44
|
+
topic:#{metadata[:topic]}
|
45
|
+
))
|
46
|
+
_report_time_delayed(payload, metadata)
|
47
|
+
end
|
48
|
+
|
49
|
+
# @param exception [Throwable]
|
50
|
+
# @param payload [Hash]
|
51
|
+
# @param metadata [Hash]
|
52
|
+
def _handle_error(exception, payload, metadata)
|
53
|
+
Deimos.config.metrics&.increment(
|
54
|
+
'handler',
|
55
|
+
tags: %W(
|
56
|
+
status:error
|
57
|
+
topic:#{metadata[:topic]}
|
58
|
+
)
|
59
|
+
)
|
60
|
+
Deimos.config.logger.warn(
|
61
|
+
message: 'Error consuming message',
|
62
|
+
handler: self.class.name,
|
63
|
+
metadata: metadata,
|
64
|
+
data: payload,
|
65
|
+
error_message: exception.message,
|
66
|
+
error: exception.backtrace
|
67
|
+
)
|
68
|
+
|
69
|
+
_error(exception, payload, metadata)
|
70
|
+
end
|
71
|
+
|
72
|
+
# @param time_taken [Float]
|
73
|
+
# @param payload [Hash]
|
74
|
+
# @param metadata [Hash]
|
75
|
+
def _handle_success(time_taken, payload, metadata)
|
76
|
+
Deimos.config.metrics&.histogram('handler', time_taken, tags: %W(
|
77
|
+
time:consume
|
78
|
+
topic:#{metadata[:topic]}
|
79
|
+
))
|
80
|
+
Deimos.config.metrics&.increment('handler', tags: %W(
|
81
|
+
status:success
|
82
|
+
topic:#{metadata[:topic]}
|
83
|
+
))
|
84
|
+
Deimos.config.logger.info(
|
85
|
+
message: 'Finished processing Kafka event',
|
86
|
+
payload: payload,
|
87
|
+
time_elapsed: time_taken,
|
88
|
+
metadata: metadata
|
89
|
+
)
|
90
|
+
end
|
91
|
+
end
|
92
|
+
end
|
93
|
+
end
|
data/lib/deimos/consumer.rb
CHANGED
@@ -1,97 +1,104 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require 'deimos/
|
4
|
-
require 'deimos/
|
5
|
-
require 'phobos/handler'
|
6
|
-
require 'active_support/all'
|
3
|
+
require 'deimos/consume/batch_consumption'
|
4
|
+
require 'deimos/consume/message_consumption'
|
7
5
|
|
8
|
-
# Class to consume messages coming from
|
6
|
+
# Class to consume messages coming from a Kafka topic
|
9
7
|
# Note: According to the docs, instances of your handler will be created
|
10
|
-
# for every incoming message. This class should be lightweight.
|
8
|
+
# for every incoming message/batch. This class should be lightweight.
|
11
9
|
module Deimos
|
12
|
-
#
|
13
|
-
|
14
|
-
|
10
|
+
# Basic consumer class. Inherit from this class and override either consume
|
11
|
+
# or consume_batch, depending on the delivery mode of your listener.
|
12
|
+
# `consume` -> use `delivery :message` or `delivery :batch`
|
13
|
+
# `consume_batch` -> use `delivery :inline_batch`
|
14
|
+
class Consumer
|
15
|
+
include Consume::MessageConsumption
|
16
|
+
include Consume::BatchConsumption
|
17
|
+
include SharedConfig
|
15
18
|
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
19
|
+
class << self
|
20
|
+
# @return [Deimos::SchemaBackends::Base]
|
21
|
+
def decoder
|
22
|
+
@decoder ||= Deimos.schema_backend(schema: config[:schema],
|
23
|
+
namespace: config[:namespace])
|
24
|
+
end
|
25
|
+
|
26
|
+
# @return [Deimos::SchemaBackends::Base]
|
27
|
+
def key_decoder
|
28
|
+
@key_decoder ||= Deimos.schema_backend(schema: config[:key_schema],
|
29
|
+
namespace: config[:namespace])
|
27
30
|
end
|
28
|
-
_handle_success(benchmark.real, decoded_payload, new_metadata)
|
29
|
-
rescue StandardError => e
|
30
|
-
_handle_error(e, decoded_payload, new_metadata)
|
31
31
|
end
|
32
32
|
|
33
|
-
#
|
34
|
-
# @param
|
35
|
-
# @
|
36
|
-
def
|
37
|
-
|
33
|
+
# Helper method to decode an encoded key.
|
34
|
+
# @param key [String]
|
35
|
+
# @return [Object] the decoded key.
|
36
|
+
def decode_key(key)
|
37
|
+
return nil if key.nil?
|
38
|
+
|
39
|
+
config = self.class.config
|
40
|
+
unless config[:key_configured]
|
41
|
+
raise 'No key config given - if you are not decoding keys, please use '\
|
42
|
+
'`key_config plain: true`'
|
43
|
+
end
|
44
|
+
|
45
|
+
if config[:key_field]
|
46
|
+
self.class.decoder.decode_key(key, config[:key_field])
|
47
|
+
elsif config[:key_schema]
|
48
|
+
self.class.key_decoder.decode(key, schema: config[:key_schema])
|
49
|
+
else # no encoding
|
50
|
+
key
|
51
|
+
end
|
38
52
|
end
|
39
53
|
|
40
54
|
private
|
41
55
|
|
42
|
-
def
|
43
|
-
Deimos.config.
|
44
|
-
|
45
|
-
|
46
|
-
metadata: metadata
|
56
|
+
def _with_span
|
57
|
+
@span = Deimos.config.tracer&.start(
|
58
|
+
'deimos-consumer',
|
59
|
+
resource: self.class.name.gsub('::', '-')
|
47
60
|
)
|
48
|
-
|
49
|
-
|
61
|
+
yield
|
62
|
+
ensure
|
63
|
+
Deimos.config.tracer&.finish(@span)
|
64
|
+
end
|
65
|
+
|
66
|
+
def _report_time_delayed(payload, metadata)
|
67
|
+
return if payload.nil? || payload['timestamp'].blank?
|
68
|
+
|
69
|
+
begin
|
70
|
+
time_delayed = Time.now.in_time_zone - payload['timestamp'].to_datetime
|
71
|
+
rescue ArgumentError
|
72
|
+
Deimos.config.logger.info(
|
73
|
+
message: "Error parsing timestamp! #{payload['timestamp']}"
|
74
|
+
)
|
75
|
+
return
|
76
|
+
end
|
77
|
+
Deimos.config.metrics&.histogram('handler', time_delayed, tags: %W(
|
78
|
+
time:time_delayed
|
50
79
|
topic:#{metadata[:topic]}
|
51
80
|
))
|
52
|
-
_report_time_delayed(payload, metadata)
|
53
81
|
end
|
54
82
|
|
55
|
-
#
|
56
|
-
#
|
57
|
-
# @param
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
topic:#{metadata[:topic]}
|
64
|
-
)
|
65
|
-
)
|
66
|
-
Deimos.config.logger.warn(
|
67
|
-
message: 'Error consuming message',
|
68
|
-
handler: self.class.name,
|
69
|
-
metadata: metadata,
|
70
|
-
data: payload,
|
71
|
-
error_message: exception.message,
|
72
|
-
error: exception.backtrace
|
73
|
-
)
|
74
|
-
super
|
83
|
+
# Overrideable method to determine if a given error should be considered
|
84
|
+
# "fatal" and always be reraised.
|
85
|
+
# @param _error [Exception]
|
86
|
+
# @param _payload [Hash]
|
87
|
+
# @param _metadata [Hash]
|
88
|
+
# @return [Boolean]
|
89
|
+
def fatal_error?(_error, _payload, _metadata)
|
90
|
+
false
|
75
91
|
end
|
76
92
|
|
77
|
-
# @param
|
93
|
+
# @param exception [Exception]
|
78
94
|
# @param payload [Hash]
|
79
95
|
# @param metadata [Hash]
|
80
|
-
def
|
81
|
-
Deimos.config.
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
status:success
|
87
|
-
topic:#{metadata[:topic]}
|
88
|
-
))
|
89
|
-
Deimos.config.logger.info(
|
90
|
-
message: 'Finished processing Kafka event',
|
91
|
-
payload: payload,
|
92
|
-
time_elapsed: time_taken,
|
93
|
-
metadata: metadata
|
94
|
-
)
|
96
|
+
def _error(exception, payload, metadata)
|
97
|
+
Deimos.config.tracer&.set_error(@span, exception)
|
98
|
+
|
99
|
+
raise if Deimos.config.consumers.reraise_errors ||
|
100
|
+
Deimos.config.consumers.fatal_error&.call(exception, payload, metadata) ||
|
101
|
+
fatal_error?(exception, payload, metadata)
|
95
102
|
end
|
96
103
|
end
|
97
104
|
end
|
data/lib/deimos/kafka_message.rb
CHANGED
data/lib/deimos/message.rb
CHANGED
@@ -10,7 +10,7 @@ module Deimos
|
|
10
10
|
# @param producer [Class]
|
11
11
|
def initialize(payload, producer, topic: nil, key: nil, partition_key: nil)
|
12
12
|
@payload = payload&.with_indifferent_access
|
13
|
-
@producer_name = producer
|
13
|
+
@producer_name = producer&.name
|
14
14
|
@topic = topic
|
15
15
|
@key = key
|
16
16
|
@partition_key = partition_key
|
@@ -70,5 +70,10 @@ module Deimos
|
|
70
70
|
def ==(other)
|
71
71
|
self.to_h == other.to_h
|
72
72
|
end
|
73
|
+
|
74
|
+
# @return [Boolean] True if this message is a tombstone
|
75
|
+
def tombstone?
|
76
|
+
payload.nil?
|
77
|
+
end
|
73
78
|
end
|
74
79
|
end
|
@@ -1,8 +1,8 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
require 'deimos/poll_info'
|
4
|
-
require '
|
5
|
-
require '
|
4
|
+
require 'sigurd/executor'
|
5
|
+
require 'sigurd/signal_handler'
|
6
6
|
|
7
7
|
module Deimos
|
8
8
|
module Utils
|
@@ -22,10 +22,10 @@ module Deimos
|
|
22
22
|
pollers = Deimos.config.db_poller_objects.map do |poller_config|
|
23
23
|
self.new(poller_config)
|
24
24
|
end
|
25
|
-
executor =
|
26
|
-
|
27
|
-
|
28
|
-
signal_handler =
|
25
|
+
executor = Sigurd::Executor.new(pollers,
|
26
|
+
sleep_seconds: 5,
|
27
|
+
logger: Deimos.config.logger)
|
28
|
+
signal_handler = Sigurd::SignalHandler.new(executor)
|
29
29
|
signal_handler.run!
|
30
30
|
end
|
31
31
|
|