sbmt-kafka_consumer 2.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (76) hide show
  1. checksums.yaml +7 -0
  2. data/.rspec +3 -0
  3. data/.rubocop.yml +34 -0
  4. data/Appraisals +23 -0
  5. data/CHANGELOG.md +292 -0
  6. data/Gemfile +5 -0
  7. data/LICENSE +21 -0
  8. data/README.md +296 -0
  9. data/Rakefile +12 -0
  10. data/config.ru +9 -0
  11. data/dip.yml +84 -0
  12. data/docker-compose.yml +68 -0
  13. data/exe/kafka_consumer +16 -0
  14. data/lefthook-local.dip_example.yml +4 -0
  15. data/lefthook.yml +6 -0
  16. data/lib/generators/kafka_consumer/concerns/configuration.rb +30 -0
  17. data/lib/generators/kafka_consumer/consumer/USAGE +24 -0
  18. data/lib/generators/kafka_consumer/consumer/consumer_generator.rb +41 -0
  19. data/lib/generators/kafka_consumer/consumer/templates/consumer.rb.erb +9 -0
  20. data/lib/generators/kafka_consumer/consumer/templates/consumer_group.yml.erb +13 -0
  21. data/lib/generators/kafka_consumer/inbox_consumer/USAGE +22 -0
  22. data/lib/generators/kafka_consumer/inbox_consumer/inbox_consumer_generator.rb +48 -0
  23. data/lib/generators/kafka_consumer/inbox_consumer/templates/consumer_group.yml.erb +22 -0
  24. data/lib/generators/kafka_consumer/install/USAGE +9 -0
  25. data/lib/generators/kafka_consumer/install/install_generator.rb +22 -0
  26. data/lib/generators/kafka_consumer/install/templates/Kafkafile +3 -0
  27. data/lib/generators/kafka_consumer/install/templates/kafka_consumer.yml +59 -0
  28. data/lib/sbmt/kafka_consumer/app_initializer.rb +13 -0
  29. data/lib/sbmt/kafka_consumer/base_consumer.rb +104 -0
  30. data/lib/sbmt/kafka_consumer/cli.rb +55 -0
  31. data/lib/sbmt/kafka_consumer/client_configurer.rb +73 -0
  32. data/lib/sbmt/kafka_consumer/config/auth.rb +56 -0
  33. data/lib/sbmt/kafka_consumer/config/consumer.rb +16 -0
  34. data/lib/sbmt/kafka_consumer/config/consumer_group.rb +9 -0
  35. data/lib/sbmt/kafka_consumer/config/deserializer.rb +15 -0
  36. data/lib/sbmt/kafka_consumer/config/kafka.rb +32 -0
  37. data/lib/sbmt/kafka_consumer/config/metrics.rb +10 -0
  38. data/lib/sbmt/kafka_consumer/config/probes/endpoints.rb +13 -0
  39. data/lib/sbmt/kafka_consumer/config/probes/liveness_probe.rb +11 -0
  40. data/lib/sbmt/kafka_consumer/config/probes/readiness_probe.rb +10 -0
  41. data/lib/sbmt/kafka_consumer/config/probes.rb +8 -0
  42. data/lib/sbmt/kafka_consumer/config/topic.rb +14 -0
  43. data/lib/sbmt/kafka_consumer/config.rb +76 -0
  44. data/lib/sbmt/kafka_consumer/inbox_consumer.rb +129 -0
  45. data/lib/sbmt/kafka_consumer/instrumentation/base_monitor.rb +25 -0
  46. data/lib/sbmt/kafka_consumer/instrumentation/chainable_monitor.rb +31 -0
  47. data/lib/sbmt/kafka_consumer/instrumentation/listener_helper.rb +47 -0
  48. data/lib/sbmt/kafka_consumer/instrumentation/liveness_listener.rb +71 -0
  49. data/lib/sbmt/kafka_consumer/instrumentation/logger_listener.rb +44 -0
  50. data/lib/sbmt/kafka_consumer/instrumentation/open_telemetry_loader.rb +23 -0
  51. data/lib/sbmt/kafka_consumer/instrumentation/open_telemetry_tracer.rb +106 -0
  52. data/lib/sbmt/kafka_consumer/instrumentation/readiness_listener.rb +38 -0
  53. data/lib/sbmt/kafka_consumer/instrumentation/sentry_tracer.rb +103 -0
  54. data/lib/sbmt/kafka_consumer/instrumentation/tracer.rb +18 -0
  55. data/lib/sbmt/kafka_consumer/instrumentation/tracing_monitor.rb +17 -0
  56. data/lib/sbmt/kafka_consumer/instrumentation/yabeda_metrics_listener.rb +186 -0
  57. data/lib/sbmt/kafka_consumer/probes/host.rb +75 -0
  58. data/lib/sbmt/kafka_consumer/probes/probe.rb +33 -0
  59. data/lib/sbmt/kafka_consumer/railtie.rb +31 -0
  60. data/lib/sbmt/kafka_consumer/routing/karafka_v1_consumer_mapper.rb +12 -0
  61. data/lib/sbmt/kafka_consumer/routing/karafka_v2_consumer_mapper.rb +9 -0
  62. data/lib/sbmt/kafka_consumer/serialization/base_deserializer.rb +19 -0
  63. data/lib/sbmt/kafka_consumer/serialization/json_deserializer.rb +18 -0
  64. data/lib/sbmt/kafka_consumer/serialization/null_deserializer.rb +13 -0
  65. data/lib/sbmt/kafka_consumer/serialization/protobuf_deserializer.rb +27 -0
  66. data/lib/sbmt/kafka_consumer/server.rb +35 -0
  67. data/lib/sbmt/kafka_consumer/simple_logging_consumer.rb +11 -0
  68. data/lib/sbmt/kafka_consumer/testing/shared_contexts/with_sbmt_karafka_consumer.rb +61 -0
  69. data/lib/sbmt/kafka_consumer/testing.rb +5 -0
  70. data/lib/sbmt/kafka_consumer/types.rb +15 -0
  71. data/lib/sbmt/kafka_consumer/version.rb +7 -0
  72. data/lib/sbmt/kafka_consumer/yabeda_configurer.rb +91 -0
  73. data/lib/sbmt/kafka_consumer.rb +59 -0
  74. data/rubocop/rspec.yml +29 -0
  75. data/sbmt-kafka_consumer.gemspec +70 -0
  76. metadata +571 -0
@@ -0,0 +1,73 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Sbmt::KafkaConsumer::ClientConfigurer
4
+ def self.configure!(**opts)
5
+ config = Sbmt::KafkaConsumer::Config.new
6
+ Karafka::App.setup do |karafka_config|
7
+ karafka_config.monitor = config.monitor_class.classify.constantize.new
8
+ karafka_config.logger = Sbmt::KafkaConsumer.logger
9
+ karafka_config.deserializer = config.deserializer_class.classify.constantize.new
10
+
11
+ karafka_config.client_id = config.client_id
12
+ karafka_config.consumer_mapper = config.consumer_mapper_class.classify.constantize.new
13
+ karafka_config.kafka = config.to_kafka_options
14
+
15
+ karafka_config.pause_timeout = config.pause_timeout * 1_000 if config.pause_timeout.present?
16
+ karafka_config.pause_max_timeout = config.pause_max_timeout * 1_000 if config.pause_max_timeout.present?
17
+ karafka_config.max_wait_time = config.max_wait_time * 1_000 if config.max_wait_time.present?
18
+ karafka_config.shutdown_timeout = config.shutdown_timeout * 1_000 if config.shutdown_timeout.present?
19
+
20
+ karafka_config.pause_with_exponential_backoff = config.pause_with_exponential_backoff if config.pause_with_exponential_backoff.present?
21
+
22
+ karafka_config.concurrency = (opts[:concurrency]) || config.concurrency
23
+
24
+ # Do not validate topics naming consistency
25
+ # see https://github.com/karafka/karafka/wiki/FAQ#why-am-i-seeing-a-needs-to-be-consistent-namespacing-style-error
26
+ karafka_config.strict_topics_namespacing = false
27
+
28
+ # Recreate consumers with each batch. This will allow Rails code reload to work in the
29
+ # development mode. Otherwise Karafka process would not be aware of code changes
30
+ karafka_config.consumer_persistence = !Rails.env.development?
31
+ end
32
+
33
+ Karafka.monitor.subscribe(config.logger_listener_class.classify.constantize.new)
34
+ Karafka.monitor.subscribe(config.metrics_listener_class.classify.constantize.new)
35
+
36
+ target_consumer_groups = if opts[:consumer_groups].blank?
37
+ config.consumer_groups
38
+ else
39
+ config.consumer_groups.select do |group|
40
+ opts[:consumer_groups].include?(group.id)
41
+ end
42
+ end
43
+
44
+ raise "No configured consumer groups found, exiting" if target_consumer_groups.blank?
45
+
46
+ # clear routes in case CLI runner tries to reconfigure them
47
+ # but railtie initializer had already executed and did the same
48
+ # otherwise we'll get duplicate routes error from sbmt-karafka internal config validation process
49
+ Karafka::App.routes.clear
50
+ Karafka::App.routes.draw do
51
+ target_consumer_groups.each do |cg|
52
+ consumer_group cg.name do
53
+ cg.topics.each do |t|
54
+ topic t.name do
55
+ active t.active
56
+ manual_offset_management t.manual_offset_management
57
+ consumer t.consumer.consumer_klass
58
+ deserializer t.deserializer.instantiate if t.deserializer.klass.present?
59
+ kafka t.kafka_options if t.kafka_options.present?
60
+ end
61
+ end
62
+ end
63
+ end
64
+ end
65
+ end
66
+
67
+ def self.routes
68
+ Karafka::App.routes.map do |cg|
69
+ topics = cg.topics.map { |t| {name: t.name, deserializer: t.deserializer} }
70
+ {group: cg.id, topics: topics}
71
+ end
72
+ end
73
+ end
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Sbmt::KafkaConsumer::Config::Auth < Dry::Struct
4
+ transform_keys(&:to_sym)
5
+
6
+ AVAILABLE_AUTH_KINDS = %w[plaintext sasl_plaintext].freeze
7
+ DEFAULT_AUTH_KIND = "plaintext"
8
+
9
+ AVAILABLE_SASL_MECHANISMS = %w[PLAIN SCRAM-SHA-256 SCRAM-SHA-512].freeze
10
+ DEFAULT_SASL_MECHANISM = "SCRAM-SHA-512"
11
+
12
+ attribute :kind, Sbmt::KafkaConsumer::Types::Strict::String
13
+ .default(DEFAULT_AUTH_KIND)
14
+ .enum(*AVAILABLE_AUTH_KINDS)
15
+ attribute? :sasl_mechanism, Sbmt::KafkaConsumer::Types::Strict::String
16
+ .default(DEFAULT_SASL_MECHANISM)
17
+ .enum(*AVAILABLE_SASL_MECHANISMS)
18
+ attribute? :sasl_username, Sbmt::KafkaConsumer::Types::Strict::String
19
+ attribute? :sasl_password, Sbmt::KafkaConsumer::Types::Strict::String
20
+
21
+ def to_kafka_options
22
+ ensure_options_are_valid
23
+
24
+ opts = {}
25
+
26
+ case kind
27
+ when "sasl_plaintext"
28
+ opts.merge!(
29
+ "security.protocol": kind,
30
+ "sasl.mechanism": sasl_mechanism,
31
+ "sasl.username": sasl_username,
32
+ "sasl.password": sasl_password
33
+ )
34
+ when "plaintext"
35
+ opts[:"security.protocol"] = kind
36
+ else
37
+ raise Anyway::Config::ValidationError, "unknown auth kind: #{kind}"
38
+ end
39
+
40
+ opts.symbolize_keys
41
+ end
42
+
43
+ private
44
+
45
+ def ensure_options_are_valid
46
+ raise Anyway::Config::ValidationError, "unknown auth kind: #{kind}" unless AVAILABLE_AUTH_KINDS.include?(kind)
47
+
48
+ case kind
49
+ when "sasl_plaintext"
50
+ raise Anyway::Config::ValidationError, "sasl_username is required for #{kind} auth kind" if sasl_username.blank?
51
+ raise Anyway::Config::ValidationError, "sasl_password is required for #{kind} auth kind" if sasl_password.blank?
52
+ raise Anyway::Config::ValidationError, "sasl_mechanism is required for #{kind} auth kind" if sasl_mechanism.blank?
53
+ raise Anyway::Config::ValidationError, "invalid sasl_mechanism for #{kind} auth kind, available options are: [#{AVAILABLE_SASL_MECHANISMS.join(",")}]" unless AVAILABLE_SASL_MECHANISMS.include?(sasl_mechanism)
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,16 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Sbmt::KafkaConsumer::Config::Consumer < Dry::Struct
4
+ transform_keys(&:to_sym)
5
+
6
+ attribute :klass, Sbmt::KafkaConsumer::Types::Strict::String
7
+ attribute :init_attrs, Sbmt::KafkaConsumer::Types::ConfigAttrs.optional.default({}.freeze)
8
+
9
+ def consumer_klass
10
+ target_klass = klass.constantize
11
+
12
+ return target_klass.consumer_klass if init_attrs.blank?
13
+
14
+ target_klass.consumer_klass(**init_attrs)
15
+ end
16
+ end
@@ -0,0 +1,9 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Sbmt::KafkaConsumer::Config::ConsumerGroup < Dry::Struct
4
+ transform_keys(&:to_sym)
5
+
6
+ attribute :id, Sbmt::KafkaConsumer::Types::Strict::String
7
+ attribute :name, Sbmt::KafkaConsumer::Types::Strict::String
8
+ attribute :topics, Sbmt::KafkaConsumer::Types.Array(Sbmt::KafkaConsumer::Types::ConfigTopic)
9
+ end
@@ -0,0 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Sbmt::KafkaConsumer::Config::Deserializer < Dry::Struct
4
+ transform_keys(&:to_sym)
5
+
6
+ attribute :klass, Sbmt::KafkaConsumer::Types::Strict::String
7
+ .optional
8
+ .default(Sbmt::KafkaConsumer::Serialization::NullDeserializer.to_s.freeze)
9
+ attribute :init_attrs, Sbmt::KafkaConsumer::Types::ConfigAttrs.optional.default({}.freeze)
10
+
11
+ def instantiate
12
+ return klass.constantize.new if init_attrs.blank?
13
+ klass.constantize.new(**init_attrs)
14
+ end
15
+ end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Sbmt::KafkaConsumer::Config::Kafka < Dry::Struct
4
+ transform_keys(&:to_sym)
5
+
6
+ # srv1:port1,srv2:port2,...
7
+ SERVERS_REGEXP = /^[a-z\d.\-:]+(,[a-z\d.\-:]+)*$/.freeze
8
+
9
+ attribute :servers, Sbmt::KafkaConsumer::Types::String.constrained(format: SERVERS_REGEXP)
10
+
11
+ # defaults are rdkafka's
12
+ # see https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md
13
+ attribute :heartbeat_timeout, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(5)
14
+ attribute :session_timeout, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(30)
15
+ attribute :reconnect_timeout, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(3)
16
+ attribute :connect_timeout, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(5)
17
+ attribute :socket_timeout, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(30)
18
+
19
+ attribute :kafka_options, Sbmt::KafkaConsumer::Types::ConfigAttrs.optional.default({}.freeze)
20
+
21
+ def to_kafka_options
22
+ # root options take precedence over kafka_options' ones
23
+ kafka_options.merge(
24
+ "bootstrap.servers": servers,
25
+ "heartbeat.interval.ms": heartbeat_timeout * 1_000,
26
+ "session.timeout.ms": session_timeout * 1_000,
27
+ "reconnect.backoff.max.ms": reconnect_timeout * 1_000,
28
+ "socket.connection.setup.timeout.ms": connect_timeout * 1_000,
29
+ "socket.timeout.ms": socket_timeout * 1_000
30
+ ).symbolize_keys
31
+ end
32
+ end
@@ -0,0 +1,10 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Sbmt::KafkaConsumer::Config::Metrics < Dry::Struct
4
+ transform_keys(&:to_sym)
5
+
6
+ attribute? :port, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional
7
+ attribute :path, Sbmt::KafkaConsumer::Types::Strict::String
8
+ .optional
9
+ .default("/metrics")
10
+ end
@@ -0,0 +1,13 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Sbmt::KafkaConsumer::Config::Probes::Endpoints < Dry::Struct
4
+ transform_keys(&:to_sym)
5
+
6
+ attribute :liveness, Sbmt::KafkaConsumer::Config::Probes::LivenessProbe.optional.default(
7
+ Sbmt::KafkaConsumer::Config::Probes::LivenessProbe.new.freeze
8
+ )
9
+
10
+ attribute :readiness, Sbmt::KafkaConsumer::Config::Probes::ReadinessProbe.optional.default(
11
+ Sbmt::KafkaConsumer::Config::Probes::ReadinessProbe.new.freeze
12
+ )
13
+ end
@@ -0,0 +1,11 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Sbmt::KafkaConsumer::Config::Probes::LivenessProbe < Dry::Struct
4
+ transform_keys(&:to_sym)
5
+
6
+ attribute :enabled, Sbmt::KafkaConsumer::Types::Bool.optional.default(true)
7
+ attribute :path, Sbmt::KafkaConsumer::Types::Strict::String
8
+ .optional
9
+ .default("/liveness")
10
+ attribute :timeout, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(10)
11
+ end
@@ -0,0 +1,10 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Sbmt::KafkaConsumer::Config::Probes::ReadinessProbe < Dry::Struct
4
+ transform_keys(&:to_sym)
5
+
6
+ attribute :enabled, Sbmt::KafkaConsumer::Types::Bool.optional.default(true)
7
+ attribute :path, Sbmt::KafkaConsumer::Types::Strict::String
8
+ .optional
9
+ .default("/readiness/kafka_consumer")
10
+ end
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Sbmt::KafkaConsumer::Config::Probes < Dry::Struct
4
+ transform_keys(&:to_sym)
5
+
6
+ attribute :port, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(9394)
7
+ attribute :endpoints, Endpoints.optional.default(Endpoints.new.freeze)
8
+ end
@@ -0,0 +1,14 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Sbmt::KafkaConsumer::Config::Topic < Dry::Struct
4
+ transform_keys(&:to_sym)
5
+
6
+ attribute :name, Sbmt::KafkaConsumer::Types::Strict::String
7
+ attribute :consumer, Sbmt::KafkaConsumer::Types::ConfigConsumer
8
+ attribute :deserializer, Sbmt::KafkaConsumer::Types::ConfigDeserializer
9
+ .optional
10
+ .default(Sbmt::KafkaConsumer::Config::Deserializer.new.freeze)
11
+ attribute :active, Sbmt::KafkaConsumer::Types::Bool.optional.default(true)
12
+ attribute :manual_offset_management, Sbmt::KafkaConsumer::Types::Bool.optional.default(true)
13
+ attribute? :kafka_options, Sbmt::KafkaConsumer::Types::ConfigAttrs.optional.default({}.freeze)
14
+ end
@@ -0,0 +1,76 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Sbmt::KafkaConsumer::Config < Anyway::Config
4
+ config_name :kafka_consumer
5
+
6
+ class << self
7
+ def coerce_to(struct)
8
+ lambda do |raw_attrs|
9
+ struct.new(**raw_attrs)
10
+ rescue Dry::Types::SchemaError => e
11
+ raise_validation_error "cannot parse #{struct}: #{e.message}"
12
+ end
13
+ end
14
+
15
+ def coerce_to_array_of(struct)
16
+ lambda do |raw_attrs|
17
+ raw_attrs.keys.map do |obj_title|
18
+ coerce_to(struct)
19
+ .call(**raw_attrs.fetch(obj_title)
20
+ .merge(id: obj_title))
21
+ end
22
+ end
23
+ end
24
+ end
25
+
26
+ attr_config :client_id,
27
+ :pause_timeout, :pause_max_timeout, :pause_with_exponential_backoff,
28
+ :max_wait_time, :shutdown_timeout,
29
+ concurrency: 4, auth: {}, kafka: {}, consumer_groups: {}, probes: {}, metrics: {},
30
+ deserializer_class: "::Sbmt::KafkaConsumer::Serialization::NullDeserializer",
31
+ monitor_class: "::Sbmt::KafkaConsumer::Instrumentation::TracingMonitor",
32
+ logger_class: "::Sbmt::KafkaConsumer::Logger",
33
+ logger_listener_class: "::Sbmt::KafkaConsumer::Instrumentation::LoggerListener",
34
+ metrics_listener_class: "::Sbmt::KafkaConsumer::Instrumentation::YabedaMetricsListener",
35
+ consumer_mapper_class: "::Sbmt::KafkaConsumer::Routing::KarafkaV1ConsumerMapper"
36
+
37
+ required :client_id
38
+
39
+ on_load :validate_consumer_groups
40
+ on_load :set_default_metrics_port
41
+
42
+ coerce_types client_id: :string,
43
+ pause_timeout: :integer,
44
+ pause_max_timeout: :integer,
45
+ pause_with_exponential_backoff: :boolean,
46
+ max_wait_time: :integer,
47
+ shutdown_timeout: :integer,
48
+ concurrency: :integer
49
+
50
+ coerce_types kafka: coerce_to(Kafka)
51
+ coerce_types auth: coerce_to(Auth)
52
+ coerce_types probes: coerce_to(Probes)
53
+ coerce_types metrics: coerce_to(Metrics)
54
+ coerce_types consumer_groups: coerce_to_array_of(ConsumerGroup)
55
+
56
+ def to_kafka_options
57
+ kafka.to_kafka_options
58
+ .merge(auth.to_kafka_options)
59
+ end
60
+
61
+ private
62
+
63
+ def validate_consumer_groups
64
+ consumer_groups.each do |cg|
65
+ raise_validation_error "consumer group #{cg.id} must have at least one topic defined" if cg.topics.blank?
66
+ cg.topics.each do |t|
67
+ raise_validation_error "topic #{cg.id}.topics.name[#{t.name}] contains invalid consumer class: no const #{t.consumer.klass} defined" unless t.consumer.klass.safe_constantize
68
+ raise_validation_error "topic #{cg.id}.topics.name[#{t.name}] contains invalid deserializer class: no const #{t.deserializer.klass} defined" unless t.deserializer&.klass&.safe_constantize
69
+ end
70
+ end
71
+ end
72
+
73
+ def set_default_metrics_port
74
+ self.metrics = metrics.new(port: probes.port) unless metrics.port
75
+ end
76
+ end
@@ -0,0 +1,129 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ class InboxConsumer < BaseConsumer
6
+ IDEMPOTENCY_HEADER_NAME = "Idempotency-Key"
7
+ DEFAULT_SOURCE = "KAFKA"
8
+
9
+ def self.consumer_klass(inbox_item:, event_name: nil, skip_on_error: false, name: nil)
10
+ Class.new(self) do
11
+ const_set(:INBOX_ITEM_CLASS_NAME, inbox_item)
12
+ const_set(:EVENT_NAME, event_name)
13
+ const_set(:SKIP_ON_ERROR, skip_on_error)
14
+
15
+ def self.name
16
+ superclass.name
17
+ end
18
+ end
19
+ end
20
+
21
+ def extra_message_attrs(_message)
22
+ {}
23
+ end
24
+
25
+ private
26
+
27
+ def process_message(message)
28
+ logger.tagged(inbox_name: inbox_name, event_name: event_name) do
29
+ ::Sbmt::KafkaConsumer.monitor.instrument(
30
+ "consumer.inbox.consumed_one", caller: self,
31
+ message: message,
32
+ message_uuid: message_uuid(message),
33
+ inbox_name: inbox_name,
34
+ event_name: event_name,
35
+ status: "success"
36
+ ) do
37
+ process_inbox_item(message)
38
+ end
39
+ end
40
+ end
41
+
42
+ def process_inbox_item(message)
43
+ result = Sbmt::Outbox::CreateInboxItem.call(
44
+ inbox_item_class,
45
+ attributes: message_attrs(message)
46
+ )
47
+
48
+ if result.failure?
49
+ raise "Failed consuming message for #{inbox_name}, message_uuid: #{message_uuid(message)}: #{result}"
50
+ end
51
+
52
+ item = result.success
53
+ item.track_metrics_after_consume if item.respond_to?(:track_metrics_after_consume)
54
+ rescue ActiveRecord::RecordNotUnique
55
+ instrument_error("Skipped duplicate message for #{inbox_name}, message_uuid: #{message_uuid(message)}", message, "duplicate")
56
+ rescue => ex
57
+ if skip_on_error
58
+ logger.warn("skipping unprocessable message for #{inbox_name}, message_uuid: #{message_uuid(message)}")
59
+ instrument_error(ex, message, "skipped")
60
+ else
61
+ instrument_error(ex, message)
62
+ end
63
+ raise ex
64
+ end
65
+
66
+ def message_attrs(message)
67
+ attrs = {
68
+ proto_payload: message.raw_payload,
69
+ options: {
70
+ headers: message.metadata.headers.dup,
71
+ group_id: topic.consumer_group.id,
72
+ topic: message.metadata.topic,
73
+ partition: message.metadata.partition,
74
+ source: DEFAULT_SOURCE
75
+ }
76
+ }
77
+
78
+ if message_uuid(message)
79
+ attrs[:uuid] = message_uuid(message)
80
+ end
81
+
82
+ # if message has no uuid, it will be generated later in Sbmt::Outbox::CreateInboxItem
83
+
84
+ attrs[:event_key] = if message.metadata.key.present?
85
+ message.metadata.key
86
+ elsif inbox_item_class.respond_to?(:event_key)
87
+ inbox_item_class.event_key(message)
88
+ else
89
+ # if message has no partitioning key
90
+ # set it to something random and monotonically increasing like offset
91
+ message.offset
92
+ end
93
+
94
+ attrs[:event_name] = event_name if inbox_item_class.has_attribute?(:event_name)
95
+
96
+ attrs.merge(extra_message_attrs(message))
97
+ end
98
+
99
+ def message_uuid(message)
100
+ message.metadata.headers.fetch(IDEMPOTENCY_HEADER_NAME, nil).presence
101
+ end
102
+
103
+ def inbox_item_class
104
+ @inbox_item_class ||= self.class::INBOX_ITEM_CLASS_NAME.constantize
105
+ end
106
+
107
+ def event_name
108
+ @event_name ||= self.class::EVENT_NAME
109
+ end
110
+
111
+ def inbox_name
112
+ inbox_item_class.box_name
113
+ end
114
+
115
+ def instrument_error(error, message, status = "failure")
116
+ ::Sbmt::KafkaConsumer.monitor.instrument(
117
+ "error.occurred",
118
+ error: error,
119
+ caller: self,
120
+ message: message,
121
+ inbox_name: inbox_name,
122
+ event_name: event_name,
123
+ status: status,
124
+ type: "consumer.inbox.consume_one"
125
+ )
126
+ end
127
+ end
128
+ end
129
+ end
@@ -0,0 +1,25 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module Instrumentation
6
+ class BaseMonitor < Karafka::Instrumentation::Monitor
7
+ # karafka consuming is based around batch-processing
8
+ # so we need these per-message custom events
9
+ SBMT_KAFKA_CONSUMER_EVENTS = %w[
10
+ consumer.consumed_one
11
+ consumer.inbox.consumed_one
12
+ ].freeze
13
+
14
+ def initialize
15
+ super
16
+ SBMT_KAFKA_CONSUMER_EVENTS.each { |event_id| notifications_bus.register_event(event_id) }
17
+ end
18
+
19
+ def instrument(_event_id, _payload = EMPTY_HASH, &block)
20
+ super
21
+ end
22
+ end
23
+ end
24
+ end
25
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module Instrumentation
6
+ class ChainableMonitor < BaseMonitor
7
+ attr_reader :monitors
8
+
9
+ def initialize(monitors = [])
10
+ super()
11
+
12
+ @monitors = monitors
13
+ end
14
+
15
+ def instrument(event_id, payload = EMPTY_HASH, &block)
16
+ return super if monitors.empty?
17
+
18
+ chain = monitors.map { |monitor| monitor.new(event_id, payload) }
19
+ traverse_chain = proc do
20
+ if chain.empty?
21
+ super
22
+ else
23
+ chain.shift.trace(&traverse_chain)
24
+ end
25
+ end
26
+ traverse_chain.call
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,47 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module Instrumentation
6
+ module ListenerHelper
7
+ delegate :logger, to: ::Sbmt::KafkaConsumer
8
+
9
+ private
10
+
11
+ def consumer_tags(event)
12
+ message = event[:message]
13
+ {
14
+ topic: message.metadata.topic,
15
+ partition: message.metadata.partition
16
+ }
17
+ end
18
+
19
+ def inbox_tags(event)
20
+ {
21
+ inbox_name: event[:inbox_name],
22
+ event_name: event[:event_name],
23
+ status: event[:status]
24
+ }
25
+ end
26
+
27
+ def error_message(error)
28
+ if error.respond_to?(:message)
29
+ error.message
30
+ elsif error.respond_to?(:failure)
31
+ error.failure
32
+ else
33
+ error.to_s
34
+ end
35
+ end
36
+
37
+ def log_backtrace(error)
38
+ if error.respond_to?(:backtrace)
39
+ logger.error(error.backtrace.join("\n"))
40
+ elsif error.respond_to?(:trace)
41
+ logger.error(error.trace)
42
+ end
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,71 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module Instrumentation
6
+ class LivenessListener
7
+ include ListenerHelper
8
+ include KafkaConsumer::Probes::Probe
9
+
10
+ def initialize(timeout_sec: 10)
11
+ @consumer_groups = Karafka::App.routes.map(&:name)
12
+ @timeout_sec = timeout_sec
13
+ @polls = {}
14
+
15
+ setup_subscription
16
+ end
17
+
18
+ def probe(_env)
19
+ now = current_time
20
+ timed_out_polls = select_timed_out_polls(now)
21
+ return probe_ok groups: meta_from_polls(polls, now) if timed_out_polls.empty?
22
+
23
+ probe_error failed_groups: meta_from_polls(timed_out_polls, now)
24
+ end
25
+
26
+ def on_connection_listener_fetch_loop(event)
27
+ consumer_group = event.payload[:subscription_group].consumer_group
28
+ polls[consumer_group.name] = current_time
29
+ end
30
+
31
+ private
32
+
33
+ attr_reader :polls, :timeout_sec, :consumer_groups
34
+
35
+ def current_time
36
+ Time.now.utc
37
+ end
38
+
39
+ def select_timed_out_polls(now)
40
+ raise "consumer_groups are empty. Please set them up" if consumer_groups.empty?
41
+
42
+ consumer_groups.each_with_object({}) do |group, hash|
43
+ last_poll_at = polls[group]
44
+ next if last_poll_at && last_poll_at + timeout_sec >= now
45
+
46
+ hash[group] = last_poll_at
47
+ end
48
+ end
49
+
50
+ def meta_from_polls(polls, now)
51
+ polls.each_with_object({}) do |(group, last_poll_at), hash|
52
+ if last_poll_at.nil?
53
+ hash[group] = {had_poll: false}
54
+ next
55
+ end
56
+
57
+ hash[group] = {
58
+ had_poll: true,
59
+ last_poll_at: last_poll_at,
60
+ seconds_since_last_poll: (now - last_poll_at).to_i
61
+ }
62
+ end
63
+ end
64
+
65
+ def setup_subscription
66
+ Karafka::App.monitor.subscribe(self)
67
+ end
68
+ end
69
+ end
70
+ end
71
+ end