deimos-temp-fork 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (146) hide show
  1. checksums.yaml +7 -0
  2. data/.circleci/config.yml +83 -0
  3. data/.gitignore +41 -0
  4. data/.gitmodules +0 -0
  5. data/.rspec +1 -0
  6. data/.rubocop.yml +333 -0
  7. data/.ruby-gemset +1 -0
  8. data/.ruby-version +1 -0
  9. data/CHANGELOG.md +349 -0
  10. data/CODE_OF_CONDUCT.md +77 -0
  11. data/Dockerfile +23 -0
  12. data/Gemfile +6 -0
  13. data/Gemfile.lock +286 -0
  14. data/Guardfile +22 -0
  15. data/LICENSE.md +195 -0
  16. data/README.md +1099 -0
  17. data/Rakefile +13 -0
  18. data/bin/deimos +4 -0
  19. data/deimos-ruby.gemspec +44 -0
  20. data/docker-compose.yml +71 -0
  21. data/docs/ARCHITECTURE.md +140 -0
  22. data/docs/CONFIGURATION.md +236 -0
  23. data/docs/DATABASE_BACKEND.md +147 -0
  24. data/docs/INTEGRATION_TESTS.md +52 -0
  25. data/docs/PULL_REQUEST_TEMPLATE.md +35 -0
  26. data/docs/UPGRADING.md +128 -0
  27. data/lib/deimos-temp-fork.rb +95 -0
  28. data/lib/deimos/active_record_consume/batch_consumption.rb +164 -0
  29. data/lib/deimos/active_record_consume/batch_slicer.rb +27 -0
  30. data/lib/deimos/active_record_consume/message_consumption.rb +79 -0
  31. data/lib/deimos/active_record_consume/schema_model_converter.rb +52 -0
  32. data/lib/deimos/active_record_consumer.rb +67 -0
  33. data/lib/deimos/active_record_producer.rb +87 -0
  34. data/lib/deimos/backends/base.rb +32 -0
  35. data/lib/deimos/backends/db.rb +41 -0
  36. data/lib/deimos/backends/kafka.rb +33 -0
  37. data/lib/deimos/backends/kafka_async.rb +33 -0
  38. data/lib/deimos/backends/test.rb +20 -0
  39. data/lib/deimos/batch_consumer.rb +7 -0
  40. data/lib/deimos/config/configuration.rb +381 -0
  41. data/lib/deimos/config/phobos_config.rb +137 -0
  42. data/lib/deimos/consume/batch_consumption.rb +150 -0
  43. data/lib/deimos/consume/message_consumption.rb +94 -0
  44. data/lib/deimos/consumer.rb +104 -0
  45. data/lib/deimos/instrumentation.rb +76 -0
  46. data/lib/deimos/kafka_message.rb +60 -0
  47. data/lib/deimos/kafka_source.rb +128 -0
  48. data/lib/deimos/kafka_topic_info.rb +102 -0
  49. data/lib/deimos/message.rb +79 -0
  50. data/lib/deimos/metrics/datadog.rb +47 -0
  51. data/lib/deimos/metrics/mock.rb +39 -0
  52. data/lib/deimos/metrics/provider.rb +36 -0
  53. data/lib/deimos/monkey_patches/phobos_cli.rb +35 -0
  54. data/lib/deimos/monkey_patches/phobos_producer.rb +51 -0
  55. data/lib/deimos/poll_info.rb +9 -0
  56. data/lib/deimos/producer.rb +224 -0
  57. data/lib/deimos/railtie.rb +8 -0
  58. data/lib/deimos/schema_backends/avro_base.rb +140 -0
  59. data/lib/deimos/schema_backends/avro_local.rb +30 -0
  60. data/lib/deimos/schema_backends/avro_schema_coercer.rb +119 -0
  61. data/lib/deimos/schema_backends/avro_schema_registry.rb +34 -0
  62. data/lib/deimos/schema_backends/avro_validation.rb +21 -0
  63. data/lib/deimos/schema_backends/base.rb +150 -0
  64. data/lib/deimos/schema_backends/mock.rb +42 -0
  65. data/lib/deimos/shared_config.rb +63 -0
  66. data/lib/deimos/test_helpers.rb +360 -0
  67. data/lib/deimos/tracing/datadog.rb +35 -0
  68. data/lib/deimos/tracing/mock.rb +40 -0
  69. data/lib/deimos/tracing/provider.rb +29 -0
  70. data/lib/deimos/utils/db_poller.rb +150 -0
  71. data/lib/deimos/utils/db_producer.rb +243 -0
  72. data/lib/deimos/utils/deadlock_retry.rb +68 -0
  73. data/lib/deimos/utils/inline_consumer.rb +150 -0
  74. data/lib/deimos/utils/lag_reporter.rb +175 -0
  75. data/lib/deimos/utils/schema_controller_mixin.rb +115 -0
  76. data/lib/deimos/version.rb +5 -0
  77. data/lib/generators/deimos/active_record/templates/migration.rb.tt +28 -0
  78. data/lib/generators/deimos/active_record/templates/model.rb.tt +5 -0
  79. data/lib/generators/deimos/active_record_generator.rb +79 -0
  80. data/lib/generators/deimos/db_backend/templates/migration +25 -0
  81. data/lib/generators/deimos/db_backend/templates/rails3_migration +31 -0
  82. data/lib/generators/deimos/db_backend_generator.rb +48 -0
  83. data/lib/generators/deimos/db_poller/templates/migration +11 -0
  84. data/lib/generators/deimos/db_poller/templates/rails3_migration +16 -0
  85. data/lib/generators/deimos/db_poller_generator.rb +48 -0
  86. data/lib/tasks/deimos.rake +34 -0
  87. data/spec/active_record_batch_consumer_spec.rb +481 -0
  88. data/spec/active_record_consume/batch_slicer_spec.rb +42 -0
  89. data/spec/active_record_consume/schema_model_converter_spec.rb +105 -0
  90. data/spec/active_record_consumer_spec.rb +154 -0
  91. data/spec/active_record_producer_spec.rb +85 -0
  92. data/spec/backends/base_spec.rb +10 -0
  93. data/spec/backends/db_spec.rb +54 -0
  94. data/spec/backends/kafka_async_spec.rb +11 -0
  95. data/spec/backends/kafka_spec.rb +11 -0
  96. data/spec/batch_consumer_spec.rb +256 -0
  97. data/spec/config/configuration_spec.rb +248 -0
  98. data/spec/consumer_spec.rb +209 -0
  99. data/spec/deimos_spec.rb +169 -0
  100. data/spec/generators/active_record_generator_spec.rb +56 -0
  101. data/spec/handlers/my_batch_consumer.rb +10 -0
  102. data/spec/handlers/my_consumer.rb +10 -0
  103. data/spec/kafka_listener_spec.rb +55 -0
  104. data/spec/kafka_source_spec.rb +381 -0
  105. data/spec/kafka_topic_info_spec.rb +111 -0
  106. data/spec/message_spec.rb +19 -0
  107. data/spec/phobos.bad_db.yml +73 -0
  108. data/spec/phobos.yml +77 -0
  109. data/spec/producer_spec.rb +498 -0
  110. data/spec/rake_spec.rb +19 -0
  111. data/spec/schema_backends/avro_base_shared.rb +199 -0
  112. data/spec/schema_backends/avro_local_spec.rb +32 -0
  113. data/spec/schema_backends/avro_schema_registry_spec.rb +32 -0
  114. data/spec/schema_backends/avro_validation_spec.rb +24 -0
  115. data/spec/schema_backends/base_spec.rb +33 -0
  116. data/spec/schemas/com/my-namespace/Generated.avsc +71 -0
  117. data/spec/schemas/com/my-namespace/MyNestedSchema.avsc +62 -0
  118. data/spec/schemas/com/my-namespace/MySchema-key.avsc +13 -0
  119. data/spec/schemas/com/my-namespace/MySchema.avsc +18 -0
  120. data/spec/schemas/com/my-namespace/MySchemaCompound-key.avsc +18 -0
  121. data/spec/schemas/com/my-namespace/MySchemaWithBooleans.avsc +18 -0
  122. data/spec/schemas/com/my-namespace/MySchemaWithDateTimes.avsc +33 -0
  123. data/spec/schemas/com/my-namespace/MySchemaWithId.avsc +28 -0
  124. data/spec/schemas/com/my-namespace/MySchemaWithUniqueId.avsc +32 -0
  125. data/spec/schemas/com/my-namespace/Wibble.avsc +43 -0
  126. data/spec/schemas/com/my-namespace/Widget.avsc +27 -0
  127. data/spec/schemas/com/my-namespace/WidgetTheSecond.avsc +27 -0
  128. data/spec/schemas/com/my-namespace/request/CreateTopic.avsc +11 -0
  129. data/spec/schemas/com/my-namespace/request/Index.avsc +11 -0
  130. data/spec/schemas/com/my-namespace/request/UpdateRequest.avsc +11 -0
  131. data/spec/schemas/com/my-namespace/response/CreateTopic.avsc +11 -0
  132. data/spec/schemas/com/my-namespace/response/Index.avsc +11 -0
  133. data/spec/schemas/com/my-namespace/response/UpdateResponse.avsc +11 -0
  134. data/spec/spec_helper.rb +267 -0
  135. data/spec/utils/db_poller_spec.rb +320 -0
  136. data/spec/utils/db_producer_spec.rb +514 -0
  137. data/spec/utils/deadlock_retry_spec.rb +74 -0
  138. data/spec/utils/inline_consumer_spec.rb +31 -0
  139. data/spec/utils/lag_reporter_spec.rb +76 -0
  140. data/spec/utils/platform_schema_validation_spec.rb +0 -0
  141. data/spec/utils/schema_controller_mixin_spec.rb +84 -0
  142. data/support/deimos-solo.png +0 -0
  143. data/support/deimos-with-name-next.png +0 -0
  144. data/support/deimos-with-name.png +0 -0
  145. data/support/flipp-logo.png +0 -0
  146. metadata +551 -0
@@ -0,0 +1,137 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'active_support/core_ext/array'
4
+
5
+ module Deimos
6
+ # Module to handle phobos.yml as well as outputting the configuration to save
7
+ # to Phobos itself.
8
+ module PhobosConfig
9
+ extend ActiveSupport::Concern
10
+
11
+ # @return [Hash]
12
+ def to_h
13
+ (FIELDS + [:handler]).map { |f|
14
+ val = self.send(f)
15
+ if f == :backoff && val
16
+ [:backoff, _backoff(val)]
17
+ elsif val.present?
18
+ [f, val]
19
+ end
20
+ }.to_h
21
+ end
22
+
23
+ # :nodoc:
24
+ def reset!
25
+ super
26
+ Phobos.configure(self.phobos_config)
27
+ end
28
+
29
+ # Create a hash representing the config that Phobos expects.
30
+ # @return [Hash]
31
+ def phobos_config
32
+ p_config = {
33
+ logger: Logger.new(STDOUT),
34
+ custom_logger: self.phobos_logger,
35
+ custom_kafka_logger: self.kafka.logger,
36
+ kafka: {
37
+ client_id: self.kafka.client_id,
38
+ connect_timeout: self.kafka.connect_timeout,
39
+ socket_timeout: self.kafka.socket_timeout,
40
+ ssl_verify_hostname: self.kafka.ssl.verify_hostname,
41
+ seed_brokers: Array.wrap(self.kafka.seed_brokers)
42
+ },
43
+ producer: {
44
+ ack_timeout: self.producers.ack_timeout,
45
+ required_acks: self.producers.required_acks,
46
+ max_retries: self.producers.max_retries,
47
+ retry_backoff: self.producers.retry_backoff,
48
+ max_buffer_size: self.producers.max_buffer_size,
49
+ max_buffer_bytesize: self.producers.max_buffer_bytesize,
50
+ compression_codec: self.producers.compression_codec,
51
+ compression_threshold: self.producers.compression_threshold,
52
+ max_queue_size: self.producers.max_queue_size,
53
+ delivery_threshold: self.producers.delivery_threshold,
54
+ delivery_interval: self.producers.delivery_interval
55
+ },
56
+ consumer: {
57
+ session_timeout: self.consumers.session_timeout,
58
+ offset_commit_interval: self.consumers.offset_commit_interval,
59
+ offset_commit_threshold: self.consumers.offset_commit_threshold,
60
+ heartbeat_interval: self.consumers.heartbeat_interval
61
+ },
62
+ backoff: _backoff(self.consumers.backoff.to_a)
63
+ }
64
+
65
+ p_config[:listeners] = self.consumer_objects.map do |consumer|
66
+ next nil if consumer.disabled
67
+
68
+ hash = consumer.to_h.reject do |k, _|
69
+ %i(class_name schema namespace key_config backoff disabled).include?(k)
70
+ end
71
+ hash = hash.map { |k, v| [k, v.is_a?(Symbol) ? v.to_s : v] }.to_h
72
+ hash[:handler] = consumer.class_name
73
+ if consumer.backoff
74
+ hash[:backoff] = _backoff(consumer.backoff.to_a)
75
+ end
76
+ hash
77
+ end
78
+ p_config[:listeners].compact!
79
+
80
+ if self.kafka.ssl.enabled
81
+ %w(ca_cert client_cert client_cert_key).each do |key|
82
+ next if self.kafka.ssl.send(key).blank?
83
+
84
+ p_config[:kafka]["ssl_#{key}".to_sym] = ssl_var_contents(self.kafka.ssl.send(key))
85
+ end
86
+ end
87
+ p_config
88
+ end
89
+
90
+ # @param key [String]
91
+ # @return [String]
92
+ def ssl_var_contents(key)
93
+ File.exist?(key) ? File.read(key) : key
94
+ end
95
+
96
+ # Legacy method to parse Phobos config file
97
+ def phobos_config_file=(file)
98
+ pconfig = YAML.load(ERB.new(File.read(File.expand_path(file))).result). # rubocop:disable Security/YAMLLoad
99
+ with_indifferent_access
100
+ self.logger&.warn('phobos.yml is deprecated - use direct configuration instead.')
101
+ pconfig[:kafka].each do |k, v|
102
+ if k.starts_with?('ssl')
103
+ k = k.sub('ssl_', '')
104
+ self.kafka.ssl.send("#{k}=", v)
105
+ else
106
+ self.kafka.send("#{k}=", v)
107
+ end
108
+ end
109
+ pconfig[:producer].each do |k, v|
110
+ self.producers.send("#{k}=", v)
111
+ end
112
+ pconfig[:consumer].each do |k, v|
113
+ self.consumers.send("#{k}=", v)
114
+ end
115
+ self.consumers.backoff = pconfig[:backoff][:min_ms]..pconfig[:backoff][:max_ms]
116
+ pconfig[:listeners].each do |listener_hash|
117
+ self.consumer do
118
+ listener_hash.each do |k, v|
119
+ k = 'class_name' if k == 'handler'
120
+ send(k, v)
121
+ end
122
+ end
123
+ end
124
+ end
125
+
126
+ private
127
+
128
+ # @param values [Array<Integer>]
129
+ # @return [Hash<Integer>]
130
+ def _backoff(values)
131
+ {
132
+ min_ms: values[0],
133
+ max_ms: values[-1]
134
+ }
135
+ end
136
+ end
137
+ end
@@ -0,0 +1,150 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ module Consume
5
+ # Helper methods used by batch consumers, i.e. those with "inline_batch"
6
+ # delivery. Payloads are decoded then consumers are invoked with arrays
7
+ # of messages to be handled at once
8
+ module BatchConsumption
9
+ extend ActiveSupport::Concern
10
+ include Phobos::BatchHandler
11
+
12
+ # :nodoc:
13
+ def around_consume_batch(batch, metadata)
14
+ payloads = []
15
+ benchmark = Benchmark.measure do
16
+ if self.class.config[:key_configured]
17
+ metadata[:keys] = batch.map do |message|
18
+ decode_key(message.key)
19
+ end
20
+ end
21
+ metadata[:first_offset] = batch.first&.offset
22
+
23
+ payloads = batch.map do |message|
24
+ message.payload ? self.class.decoder.decode(message.payload) : nil
25
+ end
26
+ _received_batch(payloads, metadata)
27
+ _with_span do
28
+ yield payloads, metadata
29
+ end
30
+ end
31
+ _handle_batch_success(benchmark.real, payloads, metadata)
32
+ rescue StandardError => e
33
+ _handle_batch_error(e, payloads, metadata)
34
+ end
35
+
36
+ # Consume a batch of incoming messages.
37
+ # @param _payloads [Array<Phobos::BatchMessage>]
38
+ # @param _metadata [Hash]
39
+ def consume_batch(_payloads, _metadata)
40
+ raise NotImplementedError
41
+ end
42
+
43
+ protected
44
+
45
+ def _received_batch(payloads, metadata)
46
+ Deimos.config.logger.info(
47
+ message: 'Got Kafka batch event',
48
+ message_ids: _payload_identifiers(payloads, metadata),
49
+ metadata: metadata.except(:keys)
50
+ )
51
+ Deimos.config.logger.debug(
52
+ message: 'Kafka batch event payloads',
53
+ payloads: payloads
54
+ )
55
+ Deimos.config.metrics&.increment(
56
+ 'handler',
57
+ tags: %W(
58
+ status:batch_received
59
+ topic:#{metadata[:topic]}
60
+ ))
61
+ Deimos.config.metrics&.increment(
62
+ 'handler',
63
+ by: metadata['batch_size'],
64
+ tags: %W(
65
+ status:received
66
+ topic:#{metadata[:topic]}
67
+ ))
68
+ if payloads.present?
69
+ payloads.each { |payload| _report_time_delayed(payload, metadata) }
70
+ end
71
+ end
72
+
73
+ # @param exception [Throwable]
74
+ # @param payloads [Array<Hash>]
75
+ # @param metadata [Hash]
76
+ def _handle_batch_error(exception, payloads, metadata)
77
+ Deimos.config.metrics&.increment(
78
+ 'handler',
79
+ tags: %W(
80
+ status:batch_error
81
+ topic:#{metadata[:topic]}
82
+ ))
83
+ Deimos.config.logger.warn(
84
+ message: 'Error consuming message batch',
85
+ handler: self.class.name,
86
+ metadata: metadata.except(:keys),
87
+ message_ids: _payload_identifiers(payloads, metadata),
88
+ error_message: exception.message,
89
+ error: exception.backtrace
90
+ )
91
+ _error(exception, payloads, metadata)
92
+ end
93
+
94
+ # @param time_taken [Float]
95
+ # @param payloads [Array<Hash>]
96
+ # @param metadata [Hash]
97
+ def _handle_batch_success(time_taken, payloads, metadata)
98
+ Deimos.config.metrics&.histogram('handler',
99
+ time_taken,
100
+ tags: %W(
101
+ time:consume_batch
102
+ topic:#{metadata[:topic]}
103
+ ))
104
+ Deimos.config.metrics&.increment(
105
+ 'handler',
106
+ tags: %W(
107
+ status:batch_success
108
+ topic:#{metadata[:topic]}
109
+ ))
110
+ Deimos.config.metrics&.increment(
111
+ 'handler',
112
+ by: metadata['batch_size'],
113
+ tags: %W(
114
+ status:success
115
+ topic:#{metadata[:topic]}
116
+ ))
117
+ Deimos.config.logger.info(
118
+ message: 'Finished processing Kafka batch event',
119
+ message_ids: _payload_identifiers(payloads, metadata),
120
+ time_elapsed: time_taken,
121
+ metadata: metadata.except(:keys)
122
+ )
123
+ end
124
+
125
+ # Get payload identifiers (key and message_id if present) for logging.
126
+ # @param payloads [Array<Hash>]
127
+ # @param metadata [Hash]
128
+ # @return [Array<Array>] the identifiers.
129
+ def _payload_identifiers(payloads, metadata)
130
+ message_ids = payloads&.map do |payload|
131
+ if payload.is_a?(Hash) && payload.key?('message_id')
132
+ payload['message_id']
133
+ end
134
+ end
135
+
136
+ # Payloads may be nil if preprocessing failed
137
+ messages = payloads || metadata[:keys] || []
138
+
139
+ messages.zip(metadata[:keys] || [], message_ids || []).map do |_, k, m_id|
140
+ ids = {}
141
+
142
+ ids[:key] = k if k.present?
143
+ ids[:message_id] = m_id if m_id.present?
144
+
145
+ ids
146
+ end
147
+ end
148
+ end
149
+ end
150
+ end
@@ -0,0 +1,94 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ module Consume
5
+ # Methods used by message-by-message (non-batch) consumers. These consumers
6
+ # are invoked for every individual message.
7
+ module MessageConsumption
8
+ extend ActiveSupport::Concern
9
+ include Phobos::Handler
10
+
11
+ # :nodoc:
12
+ def around_consume(payload, metadata)
13
+ decoded_payload = payload.nil? ? nil : payload.dup
14
+ new_metadata = metadata.dup
15
+ benchmark = Benchmark.measure do
16
+ _with_span do
17
+ new_metadata[:key] = decode_key(metadata[:key]) if self.class.config[:key_configured]
18
+ decoded_payload = payload ? self.class.decoder.decode(payload) : nil
19
+ _received_message(decoded_payload, new_metadata)
20
+ yield decoded_payload, new_metadata
21
+ end
22
+ end
23
+ _handle_success(benchmark.real, decoded_payload, new_metadata)
24
+ rescue StandardError => e
25
+ _handle_error(e, decoded_payload, new_metadata)
26
+ end
27
+
28
+ # Consume incoming messages.
29
+ # @param _payload [String]
30
+ # @param _metadata [Hash]
31
+ def consume(_payload, _metadata)
32
+ raise NotImplementedError
33
+ end
34
+
35
+ private
36
+
37
+ def _received_message(payload, metadata)
38
+ Deimos.config.logger.info(
39
+ message: 'Got Kafka event',
40
+ payload: payload,
41
+ metadata: metadata
42
+ )
43
+ Deimos.config.metrics&.increment('handler', tags: %W(
44
+ status:received
45
+ topic:#{metadata[:topic]}
46
+ ))
47
+ _report_time_delayed(payload, metadata)
48
+ end
49
+
50
+ # @param exception [Throwable]
51
+ # @param payload [Hash]
52
+ # @param metadata [Hash]
53
+ def _handle_error(exception, payload, metadata)
54
+ Deimos.config.metrics&.increment(
55
+ 'handler',
56
+ tags: %W(
57
+ status:error
58
+ topic:#{metadata[:topic]}
59
+ )
60
+ )
61
+ Deimos.config.logger.warn(
62
+ message: 'Error consuming message',
63
+ handler: self.class.name,
64
+ metadata: metadata,
65
+ data: payload,
66
+ error_message: exception.message,
67
+ error: exception.backtrace
68
+ )
69
+
70
+ _error(exception, payload, metadata)
71
+ end
72
+
73
+ # @param time_taken [Float]
74
+ # @param payload [Hash]
75
+ # @param metadata [Hash]
76
+ def _handle_success(time_taken, payload, metadata)
77
+ Deimos.config.metrics&.histogram('handler', time_taken, tags: %W(
78
+ time:consume
79
+ topic:#{metadata[:topic]}
80
+ ))
81
+ Deimos.config.metrics&.increment('handler', tags: %W(
82
+ status:success
83
+ topic:#{metadata[:topic]}
84
+ ))
85
+ Deimos.config.logger.info(
86
+ message: 'Finished processing Kafka event',
87
+ payload: payload,
88
+ time_elapsed: time_taken,
89
+ metadata: metadata
90
+ )
91
+ end
92
+ end
93
+ end
94
+ end
@@ -0,0 +1,104 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'deimos/consume/batch_consumption'
4
+ require 'deimos/consume/message_consumption'
5
+
6
+ # Class to consume messages coming from a Kafka topic
7
+ # Note: According to the docs, instances of your handler will be created
8
+ # for every incoming message/batch. This class should be lightweight.
9
+ module Deimos
10
+ # Basic consumer class. Inherit from this class and override either consume
11
+ # or consume_batch, depending on the delivery mode of your listener.
12
+ # `consume` -> use `delivery :message` or `delivery :batch`
13
+ # `consume_batch` -> use `delivery :inline_batch`
14
+ class Consumer
15
+ include Consume::MessageConsumption
16
+ include Consume::BatchConsumption
17
+ include SharedConfig
18
+
19
+ class << self
20
+ # @return [Deimos::SchemaBackends::Base]
21
+ def decoder
22
+ @decoder ||= Deimos.schema_backend(schema: config[:schema],
23
+ namespace: config[:namespace])
24
+ end
25
+
26
+ # @return [Deimos::SchemaBackends::Base]
27
+ def key_decoder
28
+ @key_decoder ||= Deimos.schema_backend(schema: config[:key_schema],
29
+ namespace: config[:namespace])
30
+ end
31
+ end
32
+
33
+ # Helper method to decode an encoded key.
34
+ # @param key [String]
35
+ # @return [Object] the decoded key.
36
+ def decode_key(key)
37
+ return nil if key.nil?
38
+
39
+ config = self.class.config
40
+ unless config[:key_configured]
41
+ raise 'No key config given - if you are not decoding keys, please use '\
42
+ '`key_config plain: true`'
43
+ end
44
+
45
+ if config[:key_field]
46
+ self.class.decoder.decode_key(key, config[:key_field])
47
+ elsif config[:key_schema]
48
+ self.class.key_decoder.decode(key, schema: config[:key_schema])
49
+ else # no encoding
50
+ key
51
+ end
52
+ end
53
+
54
+ private
55
+
56
+ def _with_span
57
+ @span = Deimos.config.tracer&.start(
58
+ 'deimos-consumer',
59
+ resource: self.class.name.gsub('::', '-')
60
+ )
61
+ yield
62
+ ensure
63
+ Deimos.config.tracer&.finish(@span)
64
+ end
65
+
66
+ def _report_time_delayed(payload, metadata)
67
+ return if payload.nil? || payload['timestamp'].blank?
68
+
69
+ begin
70
+ time_delayed = Time.now.in_time_zone - payload['timestamp'].to_datetime
71
+ rescue ArgumentError
72
+ Deimos.config.logger.info(
73
+ message: "Error parsing timestamp! #{payload['timestamp']}"
74
+ )
75
+ return
76
+ end
77
+ Deimos.config.metrics&.histogram('handler', time_delayed, tags: %W(
78
+ time:time_delayed
79
+ topic:#{metadata[:topic]}
80
+ ))
81
+ end
82
+
83
+ # Overrideable method to determine if a given error should be considered
84
+ # "fatal" and always be reraised.
85
+ # @param _error [Exception]
86
+ # @param _payload [Hash]
87
+ # @param _metadata [Hash]
88
+ # @return [Boolean]
89
+ def fatal_error?(_error, _payload, _metadata)
90
+ false
91
+ end
92
+
93
+ # @param exception [Exception]
94
+ # @param payload [Hash]
95
+ # @param metadata [Hash]
96
+ def _error(exception, payload, metadata)
97
+ Deimos.config.tracer&.set_error(@span, exception)
98
+
99
+ raise if Deimos.config.consumers.reraise_errors ||
100
+ Deimos.config.consumers.fatal_error&.call(exception, payload, metadata) ||
101
+ fatal_error?(exception, payload, metadata)
102
+ end
103
+ end
104
+ end