deimos-ruby 1.6.1 → 1.8.0.pre.beta1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (67) hide show
  1. checksums.yaml +4 -4
  2. data/.circleci/config.yml +9 -0
  3. data/.rubocop.yml +15 -13
  4. data/.ruby-version +1 -1
  5. data/CHANGELOG.md +30 -0
  6. data/Gemfile.lock +87 -80
  7. data/README.md +139 -15
  8. data/Rakefile +1 -1
  9. data/deimos-ruby.gemspec +3 -2
  10. data/docs/ARCHITECTURE.md +144 -0
  11. data/docs/CONFIGURATION.md +27 -0
  12. data/lib/deimos.rb +7 -6
  13. data/lib/deimos/active_record_consume/batch_consumption.rb +159 -0
  14. data/lib/deimos/active_record_consume/batch_slicer.rb +27 -0
  15. data/lib/deimos/active_record_consume/message_consumption.rb +58 -0
  16. data/lib/deimos/active_record_consume/schema_model_converter.rb +52 -0
  17. data/lib/deimos/active_record_consumer.rb +33 -75
  18. data/lib/deimos/active_record_producer.rb +23 -0
  19. data/lib/deimos/batch_consumer.rb +2 -140
  20. data/lib/deimos/config/configuration.rb +28 -10
  21. data/lib/deimos/consume/batch_consumption.rb +148 -0
  22. data/lib/deimos/consume/message_consumption.rb +93 -0
  23. data/lib/deimos/consumer.rb +79 -69
  24. data/lib/deimos/kafka_message.rb +1 -1
  25. data/lib/deimos/kafka_source.rb +29 -23
  26. data/lib/deimos/kafka_topic_info.rb +1 -1
  27. data/lib/deimos/message.rb +6 -1
  28. data/lib/deimos/metrics/provider.rb +0 -2
  29. data/lib/deimos/poll_info.rb +9 -0
  30. data/lib/deimos/tracing/provider.rb +0 -2
  31. data/lib/deimos/utils/db_poller.rb +149 -0
  32. data/lib/deimos/utils/db_producer.rb +8 -3
  33. data/lib/deimos/utils/deadlock_retry.rb +68 -0
  34. data/lib/deimos/utils/lag_reporter.rb +19 -26
  35. data/lib/deimos/version.rb +1 -1
  36. data/lib/generators/deimos/db_poller/templates/migration +11 -0
  37. data/lib/generators/deimos/db_poller/templates/rails3_migration +16 -0
  38. data/lib/generators/deimos/db_poller_generator.rb +48 -0
  39. data/lib/tasks/deimos.rake +7 -0
  40. data/spec/active_record_batch_consumer_spec.rb +481 -0
  41. data/spec/active_record_consume/batch_slicer_spec.rb +42 -0
  42. data/spec/active_record_consume/schema_model_converter_spec.rb +105 -0
  43. data/spec/active_record_consumer_spec.rb +22 -11
  44. data/spec/active_record_producer_spec.rb +66 -88
  45. data/spec/batch_consumer_spec.rb +23 -7
  46. data/spec/config/configuration_spec.rb +4 -0
  47. data/spec/consumer_spec.rb +8 -8
  48. data/spec/deimos_spec.rb +57 -49
  49. data/spec/handlers/my_batch_consumer.rb +6 -1
  50. data/spec/handlers/my_consumer.rb +6 -1
  51. data/spec/kafka_source_spec.rb +53 -0
  52. data/spec/message_spec.rb +19 -0
  53. data/spec/producer_spec.rb +3 -3
  54. data/spec/rake_spec.rb +1 -1
  55. data/spec/schemas/com/my-namespace/MySchemaCompound-key.avsc +18 -0
  56. data/spec/schemas/com/my-namespace/Wibble.avsc +43 -0
  57. data/spec/spec_helper.rb +61 -6
  58. data/spec/utils/db_poller_spec.rb +320 -0
  59. data/spec/utils/deadlock_retry_spec.rb +74 -0
  60. data/spec/utils/lag_reporter_spec.rb +29 -22
  61. metadata +61 -20
  62. data/lib/deimos/base_consumer.rb +0 -104
  63. data/lib/deimos/utils/executor.rb +0 -124
  64. data/lib/deimos/utils/platform_schema_validation.rb +0 -0
  65. data/lib/deimos/utils/signal_handler.rb +0 -68
  66. data/spec/utils/executor_spec.rb +0 -53
  67. data/spec/utils/signal_handler_spec.rb +0 -16
@@ -1,145 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'deimos/base_consumer'
4
- require 'phobos/batch_handler'
5
-
6
3
  module Deimos
7
- # Class to consume batches of messages in a topic
8
- # Note: According to the docs, instances of your handler will be created
9
- # for every incoming batch of messages. This class should be lightweight.
10
- class BatchConsumer < BaseConsumer
11
- include Phobos::BatchHandler
12
-
13
- # :nodoc:
14
- def around_consume_batch(batch, metadata)
15
- payloads = []
16
- benchmark = Benchmark.measure do
17
- if self.class.config[:key_configured]
18
- metadata[:keys] = batch.map do |message|
19
- decode_key(message.key)
20
- end
21
- end
22
-
23
- payloads = batch.map do |message|
24
- message.payload ? self.class.decoder.decode(message.payload) : nil
25
- end
26
- _received_batch(payloads, metadata)
27
- _with_error_span(payloads, metadata) do
28
- yield payloads, metadata
29
- end
30
- end
31
- _handle_success(benchmark.real, payloads, metadata)
32
- end
33
-
34
- # Consume a batch of incoming messages.
35
- # @param _payloads [Array<Phobos::BatchMessage>]
36
- # @param _metadata [Hash]
37
- def consume_batch(_payloads, _metadata)
38
- raise NotImplementedError
39
- end
40
-
41
- protected
42
-
43
- def _received_batch(payloads, metadata)
44
- Deimos.config.logger.info(
45
- message: 'Got Kafka batch event',
46
- message_ids: _payload_identifiers(payloads, metadata),
47
- metadata: metadata.except(:keys)
48
- )
49
- Deimos.config.logger.debug(
50
- message: 'Kafka batch event payloads',
51
- payloads: payloads
52
- )
53
- Deimos.config.metrics&.increment(
54
- 'handler',
55
- tags: %W(
56
- status:batch_received
57
- topic:#{metadata[:topic]}
58
- ))
59
- Deimos.config.metrics&.increment(
60
- 'handler',
61
- by: metadata['batch_size'],
62
- tags: %W(
63
- status:received
64
- topic:#{metadata[:topic]}
65
- ))
66
- if payloads.present?
67
- payloads.each { |payload| _report_time_delayed(payload, metadata) }
68
- end
69
- end
70
-
71
- # @param exception [Throwable]
72
- # @param payloads [Array<Hash>]
73
- # @param metadata [Hash]
74
- def _handle_error(exception, payloads, metadata)
75
- Deimos.config.metrics&.increment(
76
- 'handler',
77
- tags: %W(
78
- status:batch_error
79
- topic:#{metadata[:topic]}
80
- ))
81
- Deimos.config.logger.warn(
82
- message: 'Error consuming message batch',
83
- handler: self.class.name,
84
- metadata: metadata.except(:keys),
85
- message_ids: _payload_identifiers(payloads, metadata),
86
- error_message: exception.message,
87
- error: exception.backtrace
88
- )
89
- super
90
- end
91
-
92
- # @param time_taken [Float]
93
- # @param payloads [Array<Hash>]
94
- # @param metadata [Hash]
95
- def _handle_success(time_taken, payloads, metadata)
96
- Deimos.config.metrics&.histogram('handler', time_taken, tags: %W(
97
- time:consume_batch
98
- topic:#{metadata[:topic]}
99
- ))
100
- Deimos.config.metrics&.increment(
101
- 'handler',
102
- tags: %W(
103
- status:batch_success
104
- topic:#{metadata[:topic]}
105
- ))
106
- Deimos.config.metrics&.increment(
107
- 'handler',
108
- by: metadata['batch_size'],
109
- tags: %W(
110
- status:success
111
- topic:#{metadata[:topic]}
112
- ))
113
- Deimos.config.logger.info(
114
- message: 'Finished processing Kafka batch event',
115
- message_ids: _payload_identifiers(payloads, metadata),
116
- time_elapsed: time_taken,
117
- metadata: metadata.except(:keys)
118
- )
119
- end
120
-
121
- # Get payload identifiers (key and message_id if present) for logging.
122
- # @param payloads [Array<Hash>]
123
- # @param metadata [Hash]
124
- # @return [Hash] the identifiers.
125
- def _payload_identifiers(payloads, metadata)
126
- message_ids = payloads&.map do |payload|
127
- if payload.is_a?(Hash) && payload.key?('message_id')
128
- payload['message_id']
129
- end
130
- end
131
-
132
- # Payloads may be nil if preprocessing failed
133
- messages = payloads || metadata[:keys] || []
134
-
135
- messages.zip(metadata[:keys] || [], message_ids || []).map do |_, k, m_id|
136
- ids = {}
137
-
138
- ids[:key] = k if k.present?
139
- ids[:message_id] = m_id if m_id.present?
140
-
141
- ids
142
- end
143
- end
4
+ # @deprecated Use Deimos::Consumer with `delivery: inline_batch` configured instead
5
+ class BatchConsumer < Consumer
144
6
  end
145
7
  end
@@ -47,17 +47,15 @@ module Deimos
47
47
  handler_class = listener.handler.constantize
48
48
  delivery = listener.delivery
49
49
 
50
- # Validate that Deimos consumers use proper delivery configs
51
- if handler_class < Deimos::BatchConsumer
52
- unless delivery == 'inline_batch'
53
- raise "BatchConsumer #{listener.handler} must have delivery set to"\
54
- ' `inline_batch`'
55
- end
56
- elsif handler_class < Deimos::Consumer
57
- if delivery.present? && !%w(message batch).include?(delivery)
58
- raise "Non-batch Consumer #{listener.handler} must have delivery"\
59
- ' set to `message` or `batch`'
50
+ next unless handler_class < Deimos::Consumer
51
+
52
+ # Validate that each consumer implements the correct method for its type
53
+ if delivery == 'inline_batch'
54
+ if handler_class.instance_method(:consume_batch).owner == Deimos::Consume::BatchConsumption
55
+ raise "BatchConsumer #{listener.handler} does not implement `consume_batch`"
60
56
  end
57
+ elsif handler_class.instance_method(:consume).owner == Deimos::Consume::MessageConsumption
58
+ raise "Non-batch Consumer #{listener.handler} does not implement `consume`"
61
59
  end
62
60
  end
63
61
  end
@@ -340,6 +338,26 @@ module Deimos
340
338
  setting :heartbeat_interval
341
339
  end
342
340
 
341
+ setting_object :db_poller do
342
+ # Producer class to use for the poller.
343
+ setting :producer_class
344
+ # How often to run the poller, in seconds. If the poll takes longer than this
345
+ # time, it will run again immediately and the timeout
346
+ # will be pushed to the next e.g. 1 minute.
347
+ setting :run_every, 60
348
+ # Column to use to find updates. Must have an index on it.
349
+ setting :timestamp_column, :updated_at
350
+ # Amount of time, in seconds, to wait before catching updates, to allow transactions
351
+ # to complete but still pick up the right records.
352
+ setting :delay_time, 2
353
+ # If true, dump the full table rather than incremental changes. Should
354
+ # only be used for very small tables.
355
+ setting :full_table, false
356
+ # If false, start from the current time instead of the beginning of time
357
+ # if this is the first time running the poller.
358
+ setting :start_from_beginning, true
359
+ end
360
+
343
361
  deprecate 'kafka_logger', 'kafka.logger'
344
362
  deprecate 'reraise_consumer_errors', 'consumers.reraise_errors'
345
363
  deprecate 'schema_registry_url', 'schema.registry_url'
@@ -0,0 +1,148 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ module Consume
5
+ # Helper methods used by batch consumers, i.e. those with "inline_batch"
6
+ # delivery. Payloads are decoded then consumers are invoked with arrays
7
+ # of messages to be handled at once
8
+ module BatchConsumption
9
+ include Phobos::BatchHandler
10
+
11
+ # :nodoc:
12
+ def around_consume_batch(batch, metadata)
13
+ payloads = []
14
+ benchmark = Benchmark.measure do
15
+ if self.class.config[:key_configured]
16
+ metadata[:keys] = batch.map do |message|
17
+ decode_key(message.key)
18
+ end
19
+ end
20
+
21
+ payloads = batch.map do |message|
22
+ message.payload ? self.class.decoder.decode(message.payload) : nil
23
+ end
24
+ _received_batch(payloads, metadata)
25
+ _with_span do
26
+ yield payloads, metadata
27
+ end
28
+ end
29
+ _handle_batch_success(benchmark.real, payloads, metadata)
30
+ rescue StandardError => e
31
+ _handle_batch_error(e, payloads, metadata)
32
+ end
33
+
34
+ # Consume a batch of incoming messages.
35
+ # @param _payloads [Array<Phobos::BatchMessage>]
36
+ # @param _metadata [Hash]
37
+ def consume_batch(_payloads, _metadata)
38
+ raise NotImplementedError
39
+ end
40
+
41
+ protected
42
+
43
+ def _received_batch(payloads, metadata)
44
+ Deimos.config.logger.info(
45
+ message: 'Got Kafka batch event',
46
+ message_ids: _payload_identifiers(payloads, metadata),
47
+ metadata: metadata.except(:keys)
48
+ )
49
+ Deimos.config.logger.debug(
50
+ message: 'Kafka batch event payloads',
51
+ payloads: payloads
52
+ )
53
+ Deimos.config.metrics&.increment(
54
+ 'handler',
55
+ tags: %W(
56
+ status:batch_received
57
+ topic:#{metadata[:topic]}
58
+ ))
59
+ Deimos.config.metrics&.increment(
60
+ 'handler',
61
+ by: metadata['batch_size'],
62
+ tags: %W(
63
+ status:received
64
+ topic:#{metadata[:topic]}
65
+ ))
66
+ if payloads.present?
67
+ payloads.each { |payload| _report_time_delayed(payload, metadata) }
68
+ end
69
+ end
70
+
71
+ # @param exception [Throwable]
72
+ # @param payloads [Array<Hash>]
73
+ # @param metadata [Hash]
74
+ def _handle_batch_error(exception, payloads, metadata)
75
+ Deimos.config.metrics&.increment(
76
+ 'handler',
77
+ tags: %W(
78
+ status:batch_error
79
+ topic:#{metadata[:topic]}
80
+ ))
81
+ Deimos.config.logger.warn(
82
+ message: 'Error consuming message batch',
83
+ handler: self.class.name,
84
+ metadata: metadata.except(:keys),
85
+ message_ids: _payload_identifiers(payloads, metadata),
86
+ error_message: exception.message,
87
+ error: exception.backtrace
88
+ )
89
+ _error(exception, payloads, metadata)
90
+ end
91
+
92
+ # @param time_taken [Float]
93
+ # @param payloads [Array<Hash>]
94
+ # @param metadata [Hash]
95
+ def _handle_batch_success(time_taken, payloads, metadata)
96
+ Deimos.config.metrics&.histogram('handler',
97
+ time_taken,
98
+ tags: %W(
99
+ time:consume_batch
100
+ topic:#{metadata[:topic]}
101
+ ))
102
+ Deimos.config.metrics&.increment(
103
+ 'handler',
104
+ tags: %W(
105
+ status:batch_success
106
+ topic:#{metadata[:topic]}
107
+ ))
108
+ Deimos.config.metrics&.increment(
109
+ 'handler',
110
+ by: metadata['batch_size'],
111
+ tags: %W(
112
+ status:success
113
+ topic:#{metadata[:topic]}
114
+ ))
115
+ Deimos.config.logger.info(
116
+ message: 'Finished processing Kafka batch event',
117
+ message_ids: _payload_identifiers(payloads, metadata),
118
+ time_elapsed: time_taken,
119
+ metadata: metadata.except(:keys)
120
+ )
121
+ end
122
+
123
+ # Get payload identifiers (key and message_id if present) for logging.
124
+ # @param payloads [Array<Hash>]
125
+ # @param metadata [Hash]
126
+ # @return [Array<Array>] the identifiers.
127
+ def _payload_identifiers(payloads, metadata)
128
+ message_ids = payloads&.map do |payload|
129
+ if payload.is_a?(Hash) && payload.key?('message_id')
130
+ payload['message_id']
131
+ end
132
+ end
133
+
134
+ # Payloads may be nil if preprocessing failed
135
+ messages = payloads || metadata[:keys] || []
136
+
137
+ messages.zip(metadata[:keys] || [], message_ids || []).map do |_, k, m_id|
138
+ ids = {}
139
+
140
+ ids[:key] = k if k.present?
141
+ ids[:message_id] = m_id if m_id.present?
142
+
143
+ ids
144
+ end
145
+ end
146
+ end
147
+ end
148
+ end
@@ -0,0 +1,93 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ module Consume
5
+ # Methods used by message-by-message (non-batch) consumers. These consumers
6
+ # are invoked for every individual message.
7
+ module MessageConsumption
8
+ include Phobos::Handler
9
+
10
+ # :nodoc:
11
+ def around_consume(payload, metadata)
12
+ decoded_payload = payload.dup
13
+ new_metadata = metadata.dup
14
+ benchmark = Benchmark.measure do
15
+ _with_span do
16
+ new_metadata[:key] = decode_key(metadata[:key]) if self.class.config[:key_configured]
17
+ decoded_payload = payload ? self.class.decoder.decode(payload) : nil
18
+ _received_message(decoded_payload, new_metadata)
19
+ yield decoded_payload, new_metadata
20
+ end
21
+ end
22
+ _handle_success(benchmark.real, decoded_payload, new_metadata)
23
+ rescue StandardError => e
24
+ _handle_error(e, decoded_payload, new_metadata)
25
+ end
26
+
27
+ # Consume incoming messages.
28
+ # @param _payload [String]
29
+ # @param _metadata [Hash]
30
+ def consume(_payload, _metadata)
31
+ raise NotImplementedError
32
+ end
33
+
34
+ private
35
+
36
+ def _received_message(payload, metadata)
37
+ Deimos.config.logger.info(
38
+ message: 'Got Kafka event',
39
+ payload: payload,
40
+ metadata: metadata
41
+ )
42
+ Deimos.config.metrics&.increment('handler', tags: %W(
43
+ status:received
44
+ topic:#{metadata[:topic]}
45
+ ))
46
+ _report_time_delayed(payload, metadata)
47
+ end
48
+
49
+ # @param exception [Throwable]
50
+ # @param payload [Hash]
51
+ # @param metadata [Hash]
52
+ def _handle_error(exception, payload, metadata)
53
+ Deimos.config.metrics&.increment(
54
+ 'handler',
55
+ tags: %W(
56
+ status:error
57
+ topic:#{metadata[:topic]}
58
+ )
59
+ )
60
+ Deimos.config.logger.warn(
61
+ message: 'Error consuming message',
62
+ handler: self.class.name,
63
+ metadata: metadata,
64
+ data: payload,
65
+ error_message: exception.message,
66
+ error: exception.backtrace
67
+ )
68
+
69
+ _error(exception, payload, metadata)
70
+ end
71
+
72
+ # @param time_taken [Float]
73
+ # @param payload [Hash]
74
+ # @param metadata [Hash]
75
+ def _handle_success(time_taken, payload, metadata)
76
+ Deimos.config.metrics&.histogram('handler', time_taken, tags: %W(
77
+ time:consume
78
+ topic:#{metadata[:topic]}
79
+ ))
80
+ Deimos.config.metrics&.increment('handler', tags: %W(
81
+ status:success
82
+ topic:#{metadata[:topic]}
83
+ ))
84
+ Deimos.config.logger.info(
85
+ message: 'Finished processing Kafka event',
86
+ payload: payload,
87
+ time_elapsed: time_taken,
88
+ metadata: metadata
89
+ )
90
+ end
91
+ end
92
+ end
93
+ end
@@ -1,94 +1,104 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'deimos/base_consumer'
4
- require 'deimos/shared_config'
5
- require 'phobos/handler'
6
- require 'active_support/all'
3
+ require 'deimos/consume/batch_consumption'
4
+ require 'deimos/consume/message_consumption'
7
5
 
8
- # Class to consume messages coming from the pipeline topic
6
+ # Class to consume messages coming from a Kafka topic
9
7
  # Note: According to the docs, instances of your handler will be created
10
- # for every incoming message. This class should be lightweight.
8
+ # for every incoming message/batch. This class should be lightweight.
11
9
  module Deimos
12
- # Parent consumer class.
13
- class Consumer < BaseConsumer
14
- include Phobos::Handler
10
+ # Basic consumer class. Inherit from this class and override either consume
11
+ # or consume_batch, depending on the delivery mode of your listener.
12
+ # `consume` -> use `delivery :message` or `delivery :batch`
13
+ # `consume_batch` -> use `delivery :inline_batch`
14
+ class Consumer
15
+ include Consume::MessageConsumption
16
+ include Consume::BatchConsumption
17
+ include SharedConfig
15
18
 
16
- # :nodoc:
17
- def around_consume(payload, metadata)
18
- _received_message(payload, metadata)
19
- benchmark = Benchmark.measure do
20
- _with_error_span(payload, metadata) do
21
- new_metadata = metadata.dup
22
- new_metadata[:key] = decode_key(metadata[:key]) if self.class.config[:key_configured]
23
- decoded_payload = payload ? self.class.decoder.decode(payload) : nil
24
- yield decoded_payload, new_metadata
25
- end
19
+ class << self
20
+ # @return [Deimos::SchemaBackends::Base]
21
+ def decoder
22
+ @decoder ||= Deimos.schema_backend(schema: config[:schema],
23
+ namespace: config[:namespace])
24
+ end
25
+
26
+ # @return [Deimos::SchemaBackends::Base]
27
+ def key_decoder
28
+ @key_decoder ||= Deimos.schema_backend(schema: config[:key_schema],
29
+ namespace: config[:namespace])
26
30
  end
27
- _handle_success(benchmark.real, payload, metadata)
28
31
  end
29
32
 
30
- # Consume incoming messages.
31
- # @param _payload [String]
32
- # @param _metadata [Hash]
33
- def consume(_payload, _metadata)
34
- raise NotImplementedError
33
+ # Helper method to decode an encoded key.
34
+ # @param key [String]
35
+ # @return [Object] the decoded key.
36
+ def decode_key(key)
37
+ return nil if key.nil?
38
+
39
+ config = self.class.config
40
+ unless config[:key_configured]
41
+ raise 'No key config given - if you are not decoding keys, please use '\
42
+ '`key_config plain: true`'
43
+ end
44
+
45
+ if config[:key_field]
46
+ self.class.decoder.decode_key(key, config[:key_field])
47
+ elsif config[:key_schema]
48
+ self.class.key_decoder.decode(key, schema: config[:key_schema])
49
+ else # no encoding
50
+ key
51
+ end
35
52
  end
36
53
 
37
54
  private
38
55
 
39
- def _received_message(payload, metadata)
40
- Deimos.config.logger.info(
41
- message: 'Got Kafka event',
42
- payload: payload,
43
- metadata: metadata
56
+ def _with_span
57
+ @span = Deimos.config.tracer&.start(
58
+ 'deimos-consumer',
59
+ resource: self.class.name.gsub('::', '-')
44
60
  )
45
- Deimos.config.metrics&.increment('handler', tags: %W(
46
- status:received
61
+ yield
62
+ ensure
63
+ Deimos.config.tracer&.finish(@span)
64
+ end
65
+
66
+ def _report_time_delayed(payload, metadata)
67
+ return if payload.nil? || payload['timestamp'].blank?
68
+
69
+ begin
70
+ time_delayed = Time.now.in_time_zone - payload['timestamp'].to_datetime
71
+ rescue ArgumentError
72
+ Deimos.config.logger.info(
73
+ message: "Error parsing timestamp! #{payload['timestamp']}"
74
+ )
75
+ return
76
+ end
77
+ Deimos.config.metrics&.histogram('handler', time_delayed, tags: %W(
78
+ time:time_delayed
47
79
  topic:#{metadata[:topic]}
48
80
  ))
49
- _report_time_delayed(payload, metadata)
50
81
  end
51
82
 
52
- # @param exception [Throwable]
53
- # @param payload [Hash]
54
- # @param metadata [Hash]
55
- def _handle_error(exception, payload, metadata)
56
- Deimos.config.metrics&.increment(
57
- 'handler',
58
- tags: %W(
59
- status:error
60
- topic:#{metadata[:topic]}
61
- )
62
- )
63
- Deimos.config.logger.warn(
64
- message: 'Error consuming message',
65
- handler: self.class.name,
66
- metadata: metadata,
67
- data: payload,
68
- error_message: exception.message,
69
- error: exception.backtrace
70
- )
71
- super
83
+ # Overrideable method to determine if a given error should be considered
84
+ # "fatal" and always be reraised.
85
+ # @param _error [Exception]
86
+ # @param _payload [Hash]
87
+ # @param _metadata [Hash]
88
+ # @return [Boolean]
89
+ def fatal_error?(_error, _payload, _metadata)
90
+ false
72
91
  end
73
92
 
74
- # @param time_taken [Float]
93
+ # @param exception [Exception]
75
94
  # @param payload [Hash]
76
95
  # @param metadata [Hash]
77
- def _handle_success(time_taken, payload, metadata)
78
- Deimos.config.metrics&.histogram('handler', time_taken, tags: %W(
79
- time:consume
80
- topic:#{metadata[:topic]}
81
- ))
82
- Deimos.config.metrics&.increment('handler', tags: %W(
83
- status:success
84
- topic:#{metadata[:topic]}
85
- ))
86
- Deimos.config.logger.info(
87
- message: 'Finished processing Kafka event',
88
- payload: payload,
89
- time_elapsed: time_taken,
90
- metadata: metadata
91
- )
96
+ def _error(exception, payload, metadata)
97
+ Deimos.config.tracer&.set_error(@span, exception)
98
+
99
+ raise if Deimos.config.consumers.reraise_errors ||
100
+ Deimos.config.consumers.fatal_error&.call(exception, payload, metadata) ||
101
+ fatal_error?(exception, payload, metadata)
92
102
  end
93
103
  end
94
104
  end