sbmt-kafka_consumer 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. checksums.yaml +7 -0
  2. data/.rspec +3 -0
  3. data/.rubocop.yml +34 -0
  4. data/Appraisals +23 -0
  5. data/CHANGELOG.md +292 -0
  6. data/Gemfile +5 -0
  7. data/LICENSE +21 -0
  8. data/README.md +296 -0
  9. data/Rakefile +12 -0
  10. data/config.ru +9 -0
  11. data/dip.yml +84 -0
  12. data/docker-compose.yml +68 -0
  13. data/exe/kafka_consumer +16 -0
  14. data/lefthook-local.dip_example.yml +4 -0
  15. data/lefthook.yml +6 -0
  16. data/lib/generators/kafka_consumer/concerns/configuration.rb +30 -0
  17. data/lib/generators/kafka_consumer/consumer/USAGE +24 -0
  18. data/lib/generators/kafka_consumer/consumer/consumer_generator.rb +41 -0
  19. data/lib/generators/kafka_consumer/consumer/templates/consumer.rb.erb +9 -0
  20. data/lib/generators/kafka_consumer/consumer/templates/consumer_group.yml.erb +13 -0
  21. data/lib/generators/kafka_consumer/inbox_consumer/USAGE +22 -0
  22. data/lib/generators/kafka_consumer/inbox_consumer/inbox_consumer_generator.rb +48 -0
  23. data/lib/generators/kafka_consumer/inbox_consumer/templates/consumer_group.yml.erb +22 -0
  24. data/lib/generators/kafka_consumer/install/USAGE +9 -0
  25. data/lib/generators/kafka_consumer/install/install_generator.rb +22 -0
  26. data/lib/generators/kafka_consumer/install/templates/Kafkafile +3 -0
  27. data/lib/generators/kafka_consumer/install/templates/kafka_consumer.yml +59 -0
  28. data/lib/sbmt/kafka_consumer/app_initializer.rb +13 -0
  29. data/lib/sbmt/kafka_consumer/base_consumer.rb +104 -0
  30. data/lib/sbmt/kafka_consumer/cli.rb +55 -0
  31. data/lib/sbmt/kafka_consumer/client_configurer.rb +73 -0
  32. data/lib/sbmt/kafka_consumer/config/auth.rb +56 -0
  33. data/lib/sbmt/kafka_consumer/config/consumer.rb +16 -0
  34. data/lib/sbmt/kafka_consumer/config/consumer_group.rb +9 -0
  35. data/lib/sbmt/kafka_consumer/config/deserializer.rb +15 -0
  36. data/lib/sbmt/kafka_consumer/config/kafka.rb +32 -0
  37. data/lib/sbmt/kafka_consumer/config/metrics.rb +10 -0
  38. data/lib/sbmt/kafka_consumer/config/probes/endpoints.rb +13 -0
  39. data/lib/sbmt/kafka_consumer/config/probes/liveness_probe.rb +11 -0
  40. data/lib/sbmt/kafka_consumer/config/probes/readiness_probe.rb +10 -0
  41. data/lib/sbmt/kafka_consumer/config/probes.rb +8 -0
  42. data/lib/sbmt/kafka_consumer/config/topic.rb +14 -0
  43. data/lib/sbmt/kafka_consumer/config.rb +76 -0
  44. data/lib/sbmt/kafka_consumer/inbox_consumer.rb +129 -0
  45. data/lib/sbmt/kafka_consumer/instrumentation/base_monitor.rb +25 -0
  46. data/lib/sbmt/kafka_consumer/instrumentation/chainable_monitor.rb +31 -0
  47. data/lib/sbmt/kafka_consumer/instrumentation/listener_helper.rb +47 -0
  48. data/lib/sbmt/kafka_consumer/instrumentation/liveness_listener.rb +71 -0
  49. data/lib/sbmt/kafka_consumer/instrumentation/logger_listener.rb +44 -0
  50. data/lib/sbmt/kafka_consumer/instrumentation/open_telemetry_loader.rb +23 -0
  51. data/lib/sbmt/kafka_consumer/instrumentation/open_telemetry_tracer.rb +106 -0
  52. data/lib/sbmt/kafka_consumer/instrumentation/readiness_listener.rb +38 -0
  53. data/lib/sbmt/kafka_consumer/instrumentation/sentry_tracer.rb +103 -0
  54. data/lib/sbmt/kafka_consumer/instrumentation/tracer.rb +18 -0
  55. data/lib/sbmt/kafka_consumer/instrumentation/tracing_monitor.rb +17 -0
  56. data/lib/sbmt/kafka_consumer/instrumentation/yabeda_metrics_listener.rb +186 -0
  57. data/lib/sbmt/kafka_consumer/probes/host.rb +75 -0
  58. data/lib/sbmt/kafka_consumer/probes/probe.rb +33 -0
  59. data/lib/sbmt/kafka_consumer/railtie.rb +31 -0
  60. data/lib/sbmt/kafka_consumer/routing/karafka_v1_consumer_mapper.rb +12 -0
  61. data/lib/sbmt/kafka_consumer/routing/karafka_v2_consumer_mapper.rb +9 -0
  62. data/lib/sbmt/kafka_consumer/serialization/base_deserializer.rb +19 -0
  63. data/lib/sbmt/kafka_consumer/serialization/json_deserializer.rb +18 -0
  64. data/lib/sbmt/kafka_consumer/serialization/null_deserializer.rb +13 -0
  65. data/lib/sbmt/kafka_consumer/serialization/protobuf_deserializer.rb +27 -0
  66. data/lib/sbmt/kafka_consumer/server.rb +35 -0
  67. data/lib/sbmt/kafka_consumer/simple_logging_consumer.rb +11 -0
  68. data/lib/sbmt/kafka_consumer/testing/shared_contexts/with_sbmt_karafka_consumer.rb +61 -0
  69. data/lib/sbmt/kafka_consumer/testing.rb +5 -0
  70. data/lib/sbmt/kafka_consumer/types.rb +15 -0
  71. data/lib/sbmt/kafka_consumer/version.rb +7 -0
  72. data/lib/sbmt/kafka_consumer/yabeda_configurer.rb +91 -0
  73. data/lib/sbmt/kafka_consumer.rb +59 -0
  74. data/rubocop/rspec.yml +29 -0
  75. data/sbmt-kafka_consumer.gemspec +70 -0
  76. metadata +571 -0
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module Instrumentation
6
+ class LoggerListener < Karafka::Instrumentation::LoggerListener
7
+ include ListenerHelper
8
+ CUSTOM_ERROR_TYPES = %w[consumer.base.consume_one consumer.inbox.consume_one].freeze
9
+
10
+ def on_error_occurred(event)
11
+ type = event[:type]
12
+ error = event[:error]
13
+
14
+ # catch here only consumer-specific errors
15
+ # and let default handler to process other
16
+ return super unless CUSTOM_ERROR_TYPES.include?(type)
17
+
18
+ tags = {}
19
+ tags[:status] = event[:status] if type == "consumer.inbox.consume_one"
20
+
21
+ logger.tagged(
22
+ type: type,
23
+ **tags
24
+ ) do
25
+ logger.error(error_message(error))
26
+ log_backtrace(error)
27
+ end
28
+ end
29
+
30
+ # BaseConsumer events
31
+ def on_consumer_consumed_one(event)
32
+ logger.info("Successfully consumed message in #{event.payload[:time]} ms")
33
+ end
34
+
35
+ # InboxConsumer events
36
+ def on_consumer_inbox_consumed_one(event)
37
+ logger.tagged(status: event[:status]) do
38
+ logger.info("Successfully consumed message with uuid: #{event[:message_uuid]} in #{event.payload[:time]} ms")
39
+ end
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "opentelemetry"
4
+ require "opentelemetry-common"
5
+ require "opentelemetry-instrumentation-base"
6
+
7
+ require_relative "open_telemetry_tracer"
8
+
9
+ module Sbmt
10
+ module KafkaConsumer
11
+ module Instrumentation
12
+ class OpenTelemetryLoader < OpenTelemetry::Instrumentation::Base
13
+ install do |_config|
14
+ OpenTelemetryTracer.enabled = true
15
+ end
16
+
17
+ present do
18
+ defined?(OpenTelemetryTracer)
19
+ end
20
+ end
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,106 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "tracer"
4
+
5
+ module Sbmt
6
+ module KafkaConsumer
7
+ module Instrumentation
8
+ class OpenTelemetryTracer < ::Sbmt::KafkaConsumer::Instrumentation::Tracer
9
+ class << self
10
+ def enabled?
11
+ !!@enabled
12
+ end
13
+
14
+ attr_writer :enabled
15
+ end
16
+
17
+ def enabled?
18
+ self.class.enabled?
19
+ end
20
+
21
+ def trace(&block)
22
+ return handle_consumed_one(&block) if @event_id == "consumer.consumed_one"
23
+ return handle_inbox_consumed_one(&block) if @event_id == "consumer.inbox.consumed_one"
24
+ return handle_error(&block) if @event_id == "error.occurred"
25
+
26
+ yield
27
+ end
28
+
29
+ def handle_consumed_one
30
+ return yield unless enabled?
31
+
32
+ consumer = @payload[:caller]
33
+ message = @payload[:message]
34
+
35
+ parent_context = ::OpenTelemetry.propagation.extract(message.headers, getter: ::OpenTelemetry::Context::Propagation.text_map_getter)
36
+ span_context = ::OpenTelemetry::Trace.current_span(parent_context).context
37
+ links = [::OpenTelemetry::Trace::Link.new(span_context)] if span_context.valid?
38
+
39
+ ::OpenTelemetry::Context.with_current(parent_context) do
40
+ tracer.in_span("consume #{message.topic}", links: links, attributes: consumer_attrs(consumer, message), kind: :consumer) do
41
+ yield
42
+ end
43
+ end
44
+ end
45
+
46
+ def handle_inbox_consumed_one
47
+ return yield unless enabled?
48
+
49
+ inbox_name = @payload[:inbox_name]
50
+ event_name = @payload[:event_name]
51
+ status = @payload[:status]
52
+
53
+ inbox_attributes = {
54
+ "inbox.inbox_name" => inbox_name,
55
+ "inbox.event_name" => event_name,
56
+ "inbox.status" => status
57
+ }.compact
58
+
59
+ tracer.in_span("inbox #{inbox_name} process", attributes: inbox_attributes, kind: :consumer) do
60
+ yield
61
+ end
62
+ end
63
+
64
+ def handle_error
65
+ return yield unless enabled?
66
+
67
+ current_span = OpenTelemetry::Trace.current_span
68
+ current_span&.status = OpenTelemetry::Trace::Status.error
69
+
70
+ yield
71
+ end
72
+
73
+ private
74
+
75
+ def tracer
76
+ ::Sbmt::KafkaConsumer::Instrumentation::OpenTelemetryLoader.instance.tracer
77
+ end
78
+
79
+ def consumer_attrs(consumer, message)
80
+ attributes = {
81
+ "messaging.system" => "kafka",
82
+ "messaging.destination" => message.topic,
83
+ "messaging.destination_kind" => "topic",
84
+ "messaging.kafka.consumer_group" => consumer.topic.consumer_group.id,
85
+ "messaging.kafka.partition" => message.partition,
86
+ "messaging.kafka.offset" => message.offset
87
+ }
88
+
89
+ message_key = extract_message_key(message.key)
90
+ attributes["messaging.kafka.message_key"] = message_key if message_key
91
+
92
+ attributes.compact
93
+ end
94
+
95
+ def extract_message_key(key)
96
+ # skip encode if already valid utf8
97
+ return key if key.nil? || (key.encoding == Encoding::UTF_8 && key.valid_encoding?)
98
+
99
+ key.encode(Encoding::UTF_8)
100
+ rescue Encoding::UndefinedConversionError
101
+ nil
102
+ end
103
+ end
104
+ end
105
+ end
106
+ end
@@ -0,0 +1,38 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module Instrumentation
6
+ class ReadinessListener
7
+ include ListenerHelper
8
+ include KafkaConsumer::Probes::Probe
9
+
10
+ def initialize
11
+ setup_subscription
12
+ end
13
+
14
+ def on_app_running(_event)
15
+ @ready = true
16
+ end
17
+
18
+ def on_app_stopping(_event)
19
+ @ready = false
20
+ end
21
+
22
+ def probe(_env)
23
+ ready? ? probe_ok(ready: true) : probe_error(ready: false)
24
+ end
25
+
26
+ private
27
+
28
+ def ready?
29
+ @ready
30
+ end
31
+
32
+ def setup_subscription
33
+ Karafka::App.monitor.subscribe(self)
34
+ end
35
+ end
36
+ end
37
+ end
38
+ end
@@ -0,0 +1,103 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "sentry-ruby"
4
+ require_relative "tracer"
5
+
6
+ module Sbmt
7
+ module KafkaConsumer
8
+ module Instrumentation
9
+ class SentryTracer < ::Sbmt::KafkaConsumer::Instrumentation::Tracer
10
+ CONSUMER_ERROR_TYPES = %w[
11
+ consumer.base.consume_one
12
+ consumer.inbox.consume_one
13
+ ].freeze
14
+
15
+ def trace(&block)
16
+ return handle_consumed_one(&block) if @event_id == "consumer.consumed_one"
17
+ return handle_error(&block) if @event_id == "error.occurred"
18
+
19
+ yield
20
+ end
21
+
22
+ def handle_consumed_one
23
+ return yield unless ::Sentry.initialized?
24
+
25
+ consumer = @payload[:caller]
26
+ message = @payload[:message]
27
+ trace_id = @payload[:trace_id]
28
+
29
+ scope, transaction = start_transaction(trace_id, consumer, message)
30
+
31
+ begin
32
+ yield
33
+ rescue
34
+ finish_transaction(transaction, 500)
35
+ raise
36
+ end
37
+
38
+ finish_transaction(transaction, 200)
39
+ scope.clear
40
+ end
41
+
42
+ def handle_error
43
+ return yield unless ::Sentry.initialized?
44
+
45
+ exception = @payload[:error]
46
+ return yield unless exception.respond_to?(:message)
47
+
48
+ ::Sentry.with_scope do |scope|
49
+ if detailed_logging_enabled?
50
+ message = @payload[:message]
51
+ if message.present?
52
+ contexts = {
53
+ payload: message_payload(message),
54
+ metadata: message.metadata
55
+ }
56
+ scope.set_contexts(contexts: contexts)
57
+ end
58
+ end
59
+ ::Sentry.capture_exception(exception)
60
+ end
61
+
62
+ yield
63
+ end
64
+
65
+ private
66
+
67
+ def start_transaction(trace_id, consumer, message)
68
+ scope = ::Sentry.get_current_scope
69
+ scope.set_tags(trace_id: trace_id, topic: message.topic, offset: message.offset)
70
+ scope.set_transaction_name("Sbmt/KafkaConsumer/#{consumer.class.name}")
71
+
72
+ transaction = ::Sentry.start_transaction(name: scope.transaction_name, op: "kafka-consumer")
73
+
74
+ scope.set_span(transaction) if transaction
75
+
76
+ [scope, transaction]
77
+ end
78
+
79
+ def finish_transaction(transaction, status)
80
+ return unless transaction
81
+
82
+ transaction.set_http_status(status)
83
+ transaction.finish
84
+ end
85
+
86
+ def detailed_logging_enabled?
87
+ consumer = @payload[:caller]
88
+ event_type = @payload[:type]
89
+
90
+ CONSUMER_ERROR_TYPES.include?(event_type) && consumer.send(:log_payload?)
91
+ end
92
+
93
+ def message_payload(message)
94
+ message.payload
95
+ rescue => _ex
96
+ # payload triggers deserialization error
97
+ # so in that case we return raw_payload
98
+ message.raw_payload
99
+ end
100
+ end
101
+ end
102
+ end
103
+ end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module Instrumentation
6
+ class Tracer
7
+ def initialize(event_id, payload)
8
+ @event_id = event_id
9
+ @payload = payload
10
+ end
11
+
12
+ def trace(&block)
13
+ yield
14
+ end
15
+ end
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,17 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module Instrumentation
6
+ class TracingMonitor < ChainableMonitor
7
+ def initialize
8
+ tracers = []
9
+ tracers << OpenTelemetryTracer if defined?(OpenTelemetryTracer)
10
+ tracers << SentryTracer if defined?(SentryTracer)
11
+
12
+ super(tracers)
13
+ end
14
+ end
15
+ end
16
+ end
17
+ end
@@ -0,0 +1,186 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module Instrumentation
6
+ class YabedaMetricsListener
7
+ include ListenerHelper
8
+
9
+ delegate :logger, to: ::Sbmt::KafkaConsumer
10
+
11
+ def on_statistics_emitted(event)
12
+ # statistics.emitted is being executed in the main rdkafka thread
13
+ # so we have to do it in async way to prevent thread's hang issues
14
+ report_rdkafka_stats(event)
15
+ end
16
+
17
+ def on_consumer_consumed(event)
18
+ # batch processed
19
+ consumer = event[:caller]
20
+
21
+ Yabeda.kafka_consumer.batch_size
22
+ .measure(
23
+ consumer_base_tags(consumer),
24
+ consumer.messages.count
25
+ )
26
+
27
+ Yabeda.kafka_consumer.process_batch_latency
28
+ .measure(
29
+ consumer_base_tags(consumer),
30
+ time_elapsed_sec(event)
31
+ )
32
+
33
+ Yabeda.kafka_consumer.time_lag
34
+ .set(
35
+ consumer_base_tags(consumer),
36
+ consumer.messages.metadata.consumption_lag
37
+ )
38
+ end
39
+
40
+ def on_consumer_consumed_one(event)
41
+ # one message processed by any consumer
42
+
43
+ consumer = event[:caller]
44
+ Yabeda.kafka_consumer.process_messages
45
+ .increment(consumer_base_tags(consumer))
46
+ Yabeda.kafka_consumer.process_message_latency
47
+ .measure(
48
+ consumer_base_tags(consumer),
49
+ time_elapsed_sec(event)
50
+ )
51
+ end
52
+
53
+ def on_consumer_inbox_consumed_one(event)
54
+ # one message processed by InboxConsumer
55
+ Yabeda
56
+ .kafka_consumer
57
+ .inbox_consumes
58
+ .increment(consumer_inbox_tags(event))
59
+ end
60
+
61
+ def on_error_occurred(event)
62
+ caller = event[:caller]
63
+
64
+ return unless caller.respond_to?(:messages)
65
+
66
+ # caller is a BaseConsumer subclass
67
+ case event[:type]
68
+ when "consumer.revoked.error"
69
+ Yabeda.kafka_consumer.leave_group_errors
70
+ .increment(consumer_base_tags(caller))
71
+ when "consumer.consume.error"
72
+ Yabeda.kafka_consumer.process_batch_errors
73
+ .increment(consumer_base_tags(caller))
74
+ when "consumer.base.consume_one"
75
+ Yabeda.kafka_consumer.process_message_errors
76
+ .increment(consumer_base_tags(caller))
77
+ when "consumer.inbox.consume_one"
78
+ Yabeda.kafka_consumer.inbox_consumes
79
+ .increment(consumer_inbox_tags(event))
80
+ end
81
+ end
82
+
83
+ private
84
+
85
+ def consumer_base_tags(consumer)
86
+ {
87
+ client: Karafka::App.config.client_id,
88
+ group_id: consumer.topic.consumer_group.id,
89
+ topic: consumer.messages.metadata.topic,
90
+ partition: consumer.messages.metadata.partition
91
+ }
92
+ end
93
+
94
+ def consumer_inbox_tags(event)
95
+ caller = event[:caller]
96
+
97
+ consumer_base_tags(caller)
98
+ .merge(inbox_tags(event))
99
+ end
100
+
101
+ def report_rdkafka_stats(event, async: true)
102
+ thread = Thread.new do
103
+ # https://github.com/confluentinc/librdkafka/blob/master/STATISTICS.md
104
+ stats = event.payload[:statistics]
105
+ consumer_group_id = event.payload[:consumer_group_id]
106
+ consumer_group_stats = stats["cgrp"]
107
+ broker_stats = stats["brokers"]
108
+ topic_stats = stats["topics"]
109
+
110
+ report_broker_stats(broker_stats)
111
+ report_consumer_group_stats(consumer_group_id, consumer_group_stats)
112
+ report_topic_stats(consumer_group_id, topic_stats)
113
+ rescue => e
114
+ logger.error("exception happened while reporting rdkafka metrics: #{e.message}")
115
+ logger.error(e.backtrace&.join("\n"))
116
+ end
117
+
118
+ thread.join unless async
119
+ end
120
+
121
+ def report_broker_stats(brokers)
122
+ brokers.each_value do |broker_statistics|
123
+ # Skip bootstrap nodes
124
+ next if broker_statistics["nodeid"] == -1
125
+
126
+ broker_tags = {
127
+ client: Karafka::App.config.client_id,
128
+ broker: broker_statistics["nodename"]
129
+ }
130
+
131
+ Yabeda.kafka_api.calls
132
+ .increment(broker_tags, by: broker_statistics["tx"])
133
+ Yabeda.kafka_api.latency
134
+ .measure(broker_tags, broker_statistics["rtt"]["avg"])
135
+ Yabeda.kafka_api.request_size
136
+ .measure(broker_tags, broker_statistics["txbytes"])
137
+ Yabeda.kafka_api.response_size
138
+ .measure(broker_tags, broker_statistics["rxbytes"])
139
+ Yabeda.kafka_api.errors
140
+ .increment(broker_tags, by: broker_statistics["txerrs"] + broker_statistics["rxerrs"])
141
+ end
142
+ end
143
+
144
+ def report_consumer_group_stats(group_id, group_stats)
145
+ return if group_stats.blank?
146
+
147
+ cg_tags = {
148
+ client: Karafka::App.config.client_id,
149
+ group_id: group_id,
150
+ state: group_stats["state"]
151
+ }
152
+
153
+ Yabeda.kafka_consumer.consumer_group_rebalances
154
+ .increment(cg_tags, by: group_stats["rebalance_cnt"])
155
+ end
156
+
157
+ def report_topic_stats(group_id, topic_stats)
158
+ return if topic_stats.blank?
159
+
160
+ topic_stats.each do |topic_name, topic_values|
161
+ topic_values["partitions"].each do |partition_name, partition_statistics|
162
+ next if partition_name == "-1"
163
+
164
+ # Skip until lag info is available
165
+ offset_lag = partition_statistics["consumer_lag"]
166
+ next if offset_lag == -1
167
+
168
+ Yabeda.kafka_consumer.offset_lag
169
+ .set({
170
+ client: Karafka::App.config.client_id,
171
+ group_id: group_id,
172
+ topic: topic_name,
173
+ partition: partition_name
174
+ },
175
+ offset_lag)
176
+ end
177
+ end
178
+ end
179
+
180
+ def time_elapsed_sec(event)
181
+ (event.payload[:time] || 0) / 1000.0
182
+ end
183
+ end
184
+ end
185
+ end
186
+ end
@@ -0,0 +1,75 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module Probes
6
+ class Host
7
+ class << self
8
+ def run_async
9
+ config = Sbmt::KafkaConsumer::Config.new
10
+ if config.probes[:port] == config.metrics[:port]
11
+ start_on_single_port(config)
12
+ else
13
+ start_on_different_ports(config)
14
+ end
15
+ end
16
+
17
+ private
18
+
19
+ def health_check_app(config)
20
+ ::HttpHealthCheck::RackApp.configure do |c|
21
+ c.logger Rails.logger unless Rails.env.production?
22
+
23
+ liveness = config[:liveness]
24
+ if liveness[:enabled]
25
+ c.probe liveness[:path], Sbmt::KafkaConsumer::Instrumentation::LivenessListener.new(
26
+ timeout_sec: liveness[:timeout]
27
+ )
28
+ end
29
+
30
+ readiness = config[:readiness]
31
+ if readiness[:enabled]
32
+ c.probe readiness[:path], Sbmt::KafkaConsumer::Instrumentation::ReadinessListener.new
33
+ end
34
+ end
35
+ end
36
+
37
+ def start_on_single_port(config)
38
+ app = health_check_app(config.probes[:endpoints])
39
+ middlewares = defined?(Yabeda) ? {::Yabeda::Prometheus::Exporter => {path: config.metrics[:path]}} : {}
40
+ start_webrick(app, middlewares: middlewares, port: config.probes[:port])
41
+ end
42
+
43
+ def start_on_different_ports(config)
44
+ ::HttpHealthCheck.run_server_async(
45
+ port: config.probes[:port],
46
+ rack_app: health_check_app(config.probes[:endpoints])
47
+ )
48
+ if defined?(Yabeda)
49
+ start_webrick(
50
+ Yabeda::Prometheus::Mmap::Exporter::NOT_FOUND_HANDLER,
51
+ middlewares: {::Yabeda::Prometheus::Exporter => {path: config.metrics[:path]}},
52
+ port: config.metrics[:port]
53
+ )
54
+ end
55
+ end
56
+
57
+ def start_webrick(app, middlewares:, port:)
58
+ Thread.new do
59
+ ::Rack::Handler::WEBrick.run(
60
+ ::Rack::Builder.new do
61
+ middlewares.each do |middleware, options|
62
+ use middleware, **options
63
+ end
64
+ run app
65
+ end,
66
+ Host: "0.0.0.0",
67
+ Port: port
68
+ )
69
+ end
70
+ end
71
+ end
72
+ end
73
+ end
74
+ end
75
+ end
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module Probes
6
+ module Probe
7
+ HEADERS = {"Content-Type" => "application/json"}.freeze
8
+
9
+ def call(env)
10
+ with_error_handler { probe(env) }
11
+ end
12
+
13
+ def meta
14
+ {}
15
+ end
16
+
17
+ def probe_ok(extra_meta = {})
18
+ [200, HEADERS, [meta.merge(extra_meta).to_json]]
19
+ end
20
+
21
+ def probe_error(extra_meta = {})
22
+ [500, HEADERS, [meta.merge(extra_meta).to_json]]
23
+ end
24
+
25
+ def with_error_handler
26
+ yield
27
+ rescue => error
28
+ probe_error(error_class: error.class.name, error_message: error.message)
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "rails/railtie"
4
+
5
+ module Sbmt
6
+ module KafkaConsumer
7
+ class Railtie < Rails::Railtie
8
+ initializer "sbmt_kafka_consumer_yabeda.configure_rails_initialization" do
9
+ YabedaConfigurer.configure
10
+ end
11
+
12
+ # it must be consistent with sbmt_karafka initializers' name
13
+ initializer "sbmt_kafka_consumer_karafka_init.configure_rails_initialization",
14
+ before: "karafka.require_karafka_boot_file" do
15
+ # skip loading native karafka.rb, because we want custom init process
16
+ Karafka.instance_eval do
17
+ def boot_file; false; end
18
+ end
19
+ end
20
+
21
+ initializer "sbmt_kafka_consumer_opentelemetry_init.configure_rails_initialization",
22
+ after: "opentelemetry.configure" do
23
+ require "sbmt/kafka_consumer/instrumentation/open_telemetry_loader" if defined?(::OpenTelemetry)
24
+ end
25
+
26
+ config.after_initialize do
27
+ require "sbmt/kafka_consumer/instrumentation/sentry_tracer" if defined?(::Sentry)
28
+ end
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,12 @@
1
+ module Sbmt
2
+ module KafkaConsumer
3
+ module Routing
4
+ class KarafkaV1ConsumerMapper < Karafka::Routing::ConsumerMapper
5
+ def call(raw_consumer_group_name)
6
+ client_id = ActiveSupport::Inflector.underscore(Karafka::App.config.client_id).tr("/", "_")
7
+ "#{client_id}_#{raw_consumer_group_name}"
8
+ end
9
+ end
10
+ end
11
+ end
12
+ end