sbmt-kafka_producer 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rspec +3 -0
- data/.rubocop.yml +34 -0
- data/Appraisals +24 -0
- data/CHANGELOG.md +166 -0
- data/Gemfile +5 -0
- data/LICENSE +21 -0
- data/README.md +167 -0
- data/Rakefile +12 -0
- data/dip.yml +67 -0
- data/docker-compose.yml +15 -0
- data/lefthook-local.dip_example.yml +4 -0
- data/lefthook.yml +6 -0
- data/lib/generators/kafka_producer/concerns/configuration.rb +30 -0
- data/lib/generators/kafka_producer/install/USAGE +8 -0
- data/lib/generators/kafka_producer/install/install_generator.rb +18 -0
- data/lib/generators/kafka_producer/install/templates/kafka_producer.yml +36 -0
- data/lib/generators/kafka_producer/outbox_producer/USAGE +9 -0
- data/lib/generators/kafka_producer/outbox_producer/outbox_producer_generator.rb +24 -0
- data/lib/generators/kafka_producer/producer/USAGE +10 -0
- data/lib/generators/kafka_producer/producer/producer_generator.rb +18 -0
- data/lib/generators/kafka_producer/producer/templates/producer.rb.erb +11 -0
- data/lib/sbmt/kafka_producer/base_producer.rb +103 -0
- data/lib/sbmt/kafka_producer/config/auth.rb +62 -0
- data/lib/sbmt/kafka_producer/config/kafka.rb +37 -0
- data/lib/sbmt/kafka_producer/config/producer.rb +51 -0
- data/lib/sbmt/kafka_producer/error_tracker.rb +31 -0
- data/lib/sbmt/kafka_producer/instrumentation/open_telemetry_loader.rb +23 -0
- data/lib/sbmt/kafka_producer/instrumentation/open_telemetry_tracer.rb +58 -0
- data/lib/sbmt/kafka_producer/instrumentation/tracing_middleware.rb +15 -0
- data/lib/sbmt/kafka_producer/instrumentation/yabeda_metrics_listener.rb +88 -0
- data/lib/sbmt/kafka_producer/kafka_client_factory.rb +61 -0
- data/lib/sbmt/kafka_producer/logger.rb +25 -0
- data/lib/sbmt/kafka_producer/outbox_producer.rb +11 -0
- data/lib/sbmt/kafka_producer/outbox_transport_factory.rb +13 -0
- data/lib/sbmt/kafka_producer/railtie.rb +16 -0
- data/lib/sbmt/kafka_producer/testing/configure_producer_client.rb +13 -0
- data/lib/sbmt/kafka_producer/testing.rb +5 -0
- data/lib/sbmt/kafka_producer/types.rb +12 -0
- data/lib/sbmt/kafka_producer/version.rb +7 -0
- data/lib/sbmt/kafka_producer/yabeda_configurer.rb +62 -0
- data/lib/sbmt/kafka_producer.rb +42 -0
- data/rubocop/rspec.yml +29 -0
- data/sbmt-kafka_producer.gemspec +59 -0
- metadata +427 -0
@@ -0,0 +1,18 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "rails/generators"
|
4
|
+
|
5
|
+
module KafkaProducer
|
6
|
+
module Generators
|
7
|
+
class ProducerGenerator < Rails::Generators::NamedBase
|
8
|
+
source_root File.expand_path("templates", __dir__)
|
9
|
+
|
10
|
+
argument :producer_type, type: :string, banner: "sync/async"
|
11
|
+
argument :topic, type: :string, banner: "topic"
|
12
|
+
|
13
|
+
def insert_producer_class
|
14
|
+
template "producer.rb.erb", "app/producers/#{file_path}_producer.rb"
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
@@ -0,0 +1,11 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
<%- module_namespacing do -%>
|
4
|
+
class <%= "#{name.classify}Producer" %> < Sbmt::KafkaProducer::BaseProducer
|
5
|
+
option :topic, default: -> { "<%= topic %>" }
|
6
|
+
|
7
|
+
def publish(payload, **options)
|
8
|
+
<%= ("#{producer_type}_publish") %>(payload, options)
|
9
|
+
end
|
10
|
+
end
|
11
|
+
<%- end -%>
|
@@ -0,0 +1,103 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Sbmt
|
4
|
+
module KafkaProducer
|
5
|
+
class BaseProducer
|
6
|
+
extend Dry::Initializer
|
7
|
+
|
8
|
+
option :client, default: -> { KafkaClientFactory.default_client }
|
9
|
+
option :topic
|
10
|
+
|
11
|
+
def sync_publish!(payload, options = {})
|
12
|
+
around_publish do
|
13
|
+
client.produce_sync(payload: payload, **options.merge(topic: topic))
|
14
|
+
end
|
15
|
+
true
|
16
|
+
end
|
17
|
+
|
18
|
+
def sync_publish(payload, options = {})
|
19
|
+
sync_publish!(payload, options)
|
20
|
+
true
|
21
|
+
rescue WaterDrop::Errors::ProduceError => e
|
22
|
+
log_error(e)
|
23
|
+
false
|
24
|
+
end
|
25
|
+
|
26
|
+
def async_publish!(payload, options = {})
|
27
|
+
around_publish do
|
28
|
+
client.produce_async(payload: payload, **options.merge(topic: topic))
|
29
|
+
end
|
30
|
+
true
|
31
|
+
end
|
32
|
+
|
33
|
+
def async_publish(payload, options = {})
|
34
|
+
async_publish!(payload, options)
|
35
|
+
true
|
36
|
+
rescue WaterDrop::Errors::ProduceError => e
|
37
|
+
log_error(e)
|
38
|
+
false
|
39
|
+
end
|
40
|
+
|
41
|
+
private
|
42
|
+
|
43
|
+
def logger
|
44
|
+
::Sbmt::KafkaProducer.logger
|
45
|
+
end
|
46
|
+
|
47
|
+
def around_publish
|
48
|
+
with_sentry_transaction { yield }
|
49
|
+
end
|
50
|
+
|
51
|
+
def with_sentry_transaction
|
52
|
+
return yield unless defined?(::Sentry)
|
53
|
+
return yield unless ::Sentry.initialized?
|
54
|
+
|
55
|
+
transaction = ::Sentry.start_transaction(
|
56
|
+
name: "Karafka/#{self.class.name}",
|
57
|
+
op: "kafka-producer"
|
58
|
+
)
|
59
|
+
|
60
|
+
# Tracing is disabled by config
|
61
|
+
return yield unless transaction
|
62
|
+
|
63
|
+
result = nil
|
64
|
+
transaction.with_child_span do |span|
|
65
|
+
span.set_data(:topic, topic)
|
66
|
+
result = yield
|
67
|
+
end
|
68
|
+
|
69
|
+
transaction.finish
|
70
|
+
result
|
71
|
+
end
|
72
|
+
|
73
|
+
def ignore_kafka_errors?
|
74
|
+
config.ignore_kafka_error.to_s == "true"
|
75
|
+
end
|
76
|
+
|
77
|
+
def log_error(error)
|
78
|
+
return true if ignore_kafka_errors?
|
79
|
+
|
80
|
+
logger.error "KAFKA ERROR: #{format_exception_error(error)}\n#{error.backtrace.join("\n")}"
|
81
|
+
ErrorTracker.error(error)
|
82
|
+
end
|
83
|
+
|
84
|
+
def format_exception_error(error)
|
85
|
+
text = "#{format_exception_error(error.cause)}. " if with_cause?(error)
|
86
|
+
|
87
|
+
if error.respond_to?(:message)
|
88
|
+
"#{text}#{error.class.name} #{error.message}"
|
89
|
+
else
|
90
|
+
"#{text}#{error}"
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
def with_cause?(error)
|
95
|
+
error.respond_to?(:cause) && error.cause.present?
|
96
|
+
end
|
97
|
+
|
98
|
+
def config
|
99
|
+
Config::Producer
|
100
|
+
end
|
101
|
+
end
|
102
|
+
end
|
103
|
+
end
|
@@ -0,0 +1,62 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Sbmt
|
4
|
+
module KafkaProducer
|
5
|
+
module Config
|
6
|
+
class Auth < Dry::Struct
|
7
|
+
transform_keys(&:to_sym)
|
8
|
+
|
9
|
+
AVAILABLE_AUTH_KINDS = %w[plaintext sasl_plaintext].freeze
|
10
|
+
DEFAULT_AUTH_KIND = "plaintext"
|
11
|
+
|
12
|
+
AVAILABLE_SASL_MECHANISMS = %w[PLAIN SCRAM-SHA-256 SCRAM-SHA-512].freeze
|
13
|
+
DEFAULT_SASL_MECHANISM = "SCRAM-SHA-512"
|
14
|
+
|
15
|
+
attribute :kind, Sbmt::KafkaProducer::Types::Strict::String
|
16
|
+
.default(DEFAULT_AUTH_KIND)
|
17
|
+
.enum(*AVAILABLE_AUTH_KINDS)
|
18
|
+
attribute? :sasl_mechanism, Sbmt::KafkaProducer::Types::Strict::String
|
19
|
+
.default(DEFAULT_SASL_MECHANISM)
|
20
|
+
.enum(*AVAILABLE_SASL_MECHANISMS)
|
21
|
+
attribute? :sasl_username, Sbmt::KafkaProducer::Types::Strict::String
|
22
|
+
attribute? :sasl_password, Sbmt::KafkaProducer::Types::Strict::String
|
23
|
+
|
24
|
+
def to_kafka_options
|
25
|
+
ensure_options_are_valid
|
26
|
+
|
27
|
+
opts = {}
|
28
|
+
|
29
|
+
case kind
|
30
|
+
when "sasl_plaintext"
|
31
|
+
opts.merge!(
|
32
|
+
"security.protocol": kind,
|
33
|
+
"sasl.mechanism": sasl_mechanism,
|
34
|
+
"sasl.username": sasl_username,
|
35
|
+
"sasl.password": sasl_password
|
36
|
+
)
|
37
|
+
when "plaintext"
|
38
|
+
opts[:"security.protocol"] = kind
|
39
|
+
else
|
40
|
+
raise Anyway::Config::ValidationError, "unknown auth kind: #{kind}"
|
41
|
+
end
|
42
|
+
|
43
|
+
opts.symbolize_keys
|
44
|
+
end
|
45
|
+
|
46
|
+
private
|
47
|
+
|
48
|
+
def ensure_options_are_valid
|
49
|
+
raise Anyway::Config::ValidationError, "unknown auth kind: #{kind}" unless AVAILABLE_AUTH_KINDS.include?(kind)
|
50
|
+
|
51
|
+
case kind
|
52
|
+
when "sasl_plaintext"
|
53
|
+
raise Anyway::Config::ValidationError, "sasl_username is required for #{kind} auth kind" if sasl_username.blank?
|
54
|
+
raise Anyway::Config::ValidationError, "sasl_password is required for #{kind} auth kind" if sasl_password.blank?
|
55
|
+
raise Anyway::Config::ValidationError, "sasl_mechanism is required for #{kind} auth kind" if sasl_mechanism.blank?
|
56
|
+
raise Anyway::Config::ValidationError, "invalid sasl_mechanism for #{kind} auth kind, available options are: [#{AVAILABLE_SASL_MECHANISMS.join(",")}]" unless AVAILABLE_SASL_MECHANISMS.include?(sasl_mechanism)
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Sbmt
|
4
|
+
module KafkaProducer
|
5
|
+
module Config
|
6
|
+
class Kafka < Dry::Struct
|
7
|
+
transform_keys(&:to_sym)
|
8
|
+
|
9
|
+
# srv1:port1,srv2:port2,...
|
10
|
+
SERVERS_REGEXP = /^[a-z\d.\-:]+(,[a-z\d.\-:]+)*$/.freeze
|
11
|
+
|
12
|
+
attribute :servers, Sbmt::KafkaProducer::Types::String.constrained(format: SERVERS_REGEXP)
|
13
|
+
|
14
|
+
# defaults are rdkafka's
|
15
|
+
# see https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md
|
16
|
+
attribute :connect_timeout, Sbmt::KafkaProducer::Types::Coercible::Integer.optional.default(1)
|
17
|
+
attribute :ack_timeout, Sbmt::KafkaProducer::Types::Coercible::Integer.optional.default(1)
|
18
|
+
attribute :required_acks, Sbmt::KafkaProducer::Types::Coercible::Integer.optional.default(-1)
|
19
|
+
attribute :max_retries, Sbmt::KafkaProducer::Types::Coercible::Integer.optional.default(2)
|
20
|
+
attribute :retry_backoff, Sbmt::KafkaProducer::Types::Coercible::Integer.optional.default(1)
|
21
|
+
|
22
|
+
attribute :kafka_config, Sbmt::KafkaProducer::Types::ConfigAttrs.optional.default({}.freeze)
|
23
|
+
|
24
|
+
def to_kafka_options
|
25
|
+
kafka_config.merge(
|
26
|
+
"bootstrap.servers": servers,
|
27
|
+
"socket.connection.setup.timeout.ms": connect_timeout.to_f * 1000,
|
28
|
+
"request.timeout.ms": ack_timeout.to_f * 1000,
|
29
|
+
"request.required.acks": required_acks,
|
30
|
+
"message.send.max.retries": max_retries,
|
31
|
+
"retry.backoff.ms": retry_backoff.to_f * 1000
|
32
|
+
).symbolize_keys
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
@@ -0,0 +1,51 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Sbmt
|
4
|
+
module KafkaProducer
|
5
|
+
module Config
|
6
|
+
class Producer < Anyway::Config
|
7
|
+
class << self
|
8
|
+
# Make it possible to access a singleton config instance
|
9
|
+
# via class methods (i.e., without explicitly calling `instance`)
|
10
|
+
delegate_missing_to :instance
|
11
|
+
|
12
|
+
def coerce_to(struct)
|
13
|
+
lambda do |raw_attrs|
|
14
|
+
struct.new(**raw_attrs)
|
15
|
+
rescue Dry::Types::SchemaError => e
|
16
|
+
raise_validation_error "cannot parse #{struct}: #{e.message}"
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
private
|
21
|
+
|
22
|
+
# Returns a singleton config instance
|
23
|
+
def instance
|
24
|
+
@instance ||= new
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
config_name :kafka_producer
|
29
|
+
|
30
|
+
attr_config :ignore_kafka_error, :deliver, :wait_on_queue_full,
|
31
|
+
:max_payload_size, :max_wait_timeout, :wait_timeout,
|
32
|
+
:wait_on_queue_full_timeout,
|
33
|
+
auth: {}, kafka: {},
|
34
|
+
logger_class: "::Sbmt::KafkaProducer::Logger",
|
35
|
+
metrics_listener_class: "::Sbmt::KafkaProducer::Instrumentation::YabedaMetricsListener"
|
36
|
+
|
37
|
+
coerce_types ignore_kafka_error: :boolean,
|
38
|
+
deliver: :boolean, wait_on_queue_full: :boolean,
|
39
|
+
max_payload_size: :integer, max_wait_timeout: :integer,
|
40
|
+
wait_timeout: :float, wait_on_queue_full_timeout: :float
|
41
|
+
coerce_types kafka: coerce_to(Kafka)
|
42
|
+
coerce_types auth: coerce_to(Auth)
|
43
|
+
|
44
|
+
def to_kafka_options
|
45
|
+
kafka.to_kafka_options
|
46
|
+
.merge(auth.to_kafka_options)
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Sbmt
|
4
|
+
module KafkaProducer
|
5
|
+
class ErrorTracker
|
6
|
+
class << self
|
7
|
+
def error(arr)
|
8
|
+
logging(:error, arr)
|
9
|
+
end
|
10
|
+
|
11
|
+
private
|
12
|
+
|
13
|
+
def logging(level, arr)
|
14
|
+
return unless defined?(::Sentry)
|
15
|
+
|
16
|
+
sentry_logging(level, arr) if ::Sentry.initialized?
|
17
|
+
end
|
18
|
+
|
19
|
+
def sentry_logging(level, arr)
|
20
|
+
Sentry.with_scope do |_scope|
|
21
|
+
if arr.is_a?(Exception)
|
22
|
+
Sentry.capture_exception(arr, level: level)
|
23
|
+
else
|
24
|
+
Sentry.capture_message(arr, level: level)
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "opentelemetry"
|
4
|
+
require "opentelemetry-common"
|
5
|
+
require "opentelemetry-instrumentation-base"
|
6
|
+
|
7
|
+
require_relative "open_telemetry_tracer"
|
8
|
+
|
9
|
+
module Sbmt
|
10
|
+
module KafkaProducer
|
11
|
+
module Instrumentation
|
12
|
+
class OpenTelemetryLoader < ::OpenTelemetry::Instrumentation::Base
|
13
|
+
install do |_config|
|
14
|
+
OpenTelemetryTracer.enabled = true
|
15
|
+
end
|
16
|
+
|
17
|
+
present do
|
18
|
+
defined?(OpenTelemetryTracer)
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
@@ -0,0 +1,58 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Sbmt
|
4
|
+
module KafkaProducer
|
5
|
+
module Instrumentation
|
6
|
+
class OpenTelemetryTracer
|
7
|
+
class << self
|
8
|
+
def enabled?
|
9
|
+
!!@enabled
|
10
|
+
end
|
11
|
+
|
12
|
+
attr_writer :enabled
|
13
|
+
end
|
14
|
+
|
15
|
+
def enabled?
|
16
|
+
self.class.enabled?
|
17
|
+
end
|
18
|
+
|
19
|
+
def call(message)
|
20
|
+
return message unless enabled?
|
21
|
+
|
22
|
+
topic = message[:topic]
|
23
|
+
attributes = {
|
24
|
+
"messaging.system" => "kafka",
|
25
|
+
"messaging.destination" => topic,
|
26
|
+
"messaging.destination_kind" => "topic"
|
27
|
+
}
|
28
|
+
|
29
|
+
message_key = extract_message_key(message[:key])
|
30
|
+
attributes["messaging.kafka.message_key"] = message_key if message_key
|
31
|
+
|
32
|
+
message[:headers] ||= {}
|
33
|
+
|
34
|
+
tracer.in_span("#{topic} publish", attributes: attributes, kind: :producer) do
|
35
|
+
::OpenTelemetry.propagation.inject(message[:headers])
|
36
|
+
end
|
37
|
+
|
38
|
+
message
|
39
|
+
end
|
40
|
+
|
41
|
+
private
|
42
|
+
|
43
|
+
def tracer
|
44
|
+
::Sbmt::KafkaProducer::Instrumentation::OpenTelemetryLoader.instance.tracer
|
45
|
+
end
|
46
|
+
|
47
|
+
def extract_message_key(key)
|
48
|
+
# skip encode if already valid utf8
|
49
|
+
return key if key.nil? || (key.encoding == Encoding::UTF_8 && key.valid_encoding?)
|
50
|
+
|
51
|
+
key.encode(Encoding::UTF_8)
|
52
|
+
rescue Encoding::UndefinedConversionError
|
53
|
+
nil
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Sbmt
|
4
|
+
module KafkaProducer
|
5
|
+
module Instrumentation
|
6
|
+
class TracingMiddleware < ::WaterDrop::Middleware
|
7
|
+
def initialize
|
8
|
+
super
|
9
|
+
|
10
|
+
append(OpenTelemetryTracer.new) if defined?(OpenTelemetryTracer)
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
@@ -0,0 +1,88 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Sbmt
|
4
|
+
module KafkaProducer
|
5
|
+
module Instrumentation
|
6
|
+
class YabedaMetricsListener
|
7
|
+
DEFAULT_CLIENT = {client: "waterdrop"}.freeze
|
8
|
+
def on_statistics_emitted(event)
|
9
|
+
# https://github.com/confluentinc/librdkafka/blob/master/STATISTICS.md
|
10
|
+
stats = event.payload[:statistics]
|
11
|
+
broker_stats = stats["brokers"]
|
12
|
+
|
13
|
+
report_broker_stats(broker_stats)
|
14
|
+
end
|
15
|
+
|
16
|
+
def on_error_occurred(event)
|
17
|
+
tags = {topic: event[:topic]}.merge!(DEFAULT_CLIENT) if event.payload.include?(:topic)
|
18
|
+
|
19
|
+
case event[:type]
|
20
|
+
when "message.produce_sync", "message.produce_async"
|
21
|
+
Yabeda.kafka_producer.produce_errors
|
22
|
+
.increment(produce_base_tags(event))
|
23
|
+
when "librdkafka.dispatch_error"
|
24
|
+
Yabeda.kafka_producer.deliver_errors
|
25
|
+
.increment(tags)
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
%i[produced_sync produced_async].each do |event_scope|
|
30
|
+
define_method(:"on_message_#{event_scope}") do |event|
|
31
|
+
Yabeda.kafka_producer.produced_messages
|
32
|
+
.increment(produce_base_tags(event))
|
33
|
+
|
34
|
+
Yabeda.kafka_producer.message_size
|
35
|
+
.measure(produce_base_tags(event), event[:message].to_s.bytesize)
|
36
|
+
|
37
|
+
Yabeda.kafka_producer.deliver_latency
|
38
|
+
.measure(produce_base_tags(event), event[:time])
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def on_message_buffered(event)
|
43
|
+
Yabeda.kafka_producer.buffer_size
|
44
|
+
.measure(DEFAULT_CLIENT, event[:buffer].size)
|
45
|
+
end
|
46
|
+
|
47
|
+
def on_message_acknowledged(event)
|
48
|
+
tag = {topic: event[:topic]}.merge!(DEFAULT_CLIENT)
|
49
|
+
|
50
|
+
Yabeda.kafka_producer.deliver_messages
|
51
|
+
.increment(tag)
|
52
|
+
end
|
53
|
+
|
54
|
+
private
|
55
|
+
|
56
|
+
def produce_base_tags(event)
|
57
|
+
{
|
58
|
+
client: DEFAULT_CLIENT[:client],
|
59
|
+
topic: event[:message][:topic]
|
60
|
+
}
|
61
|
+
end
|
62
|
+
|
63
|
+
def report_broker_stats(brokers)
|
64
|
+
brokers.each_value do |broker_statistics|
|
65
|
+
# Skip bootstrap nodes
|
66
|
+
next if broker_statistics["nodeid"] == -1
|
67
|
+
|
68
|
+
broker_tags = {
|
69
|
+
client: DEFAULT_CLIENT[:client],
|
70
|
+
broker: broker_statistics["nodename"]
|
71
|
+
}
|
72
|
+
|
73
|
+
Yabeda.kafka_api.calls
|
74
|
+
.increment(broker_tags, by: broker_statistics["tx"])
|
75
|
+
Yabeda.kafka_api.latency
|
76
|
+
.measure(broker_tags, broker_statistics["rtt"]["avg"])
|
77
|
+
Yabeda.kafka_api.request_size
|
78
|
+
.measure(broker_tags, broker_statistics["txbytes"])
|
79
|
+
Yabeda.kafka_api.response_size
|
80
|
+
.measure(broker_tags, broker_statistics["rxbytes"])
|
81
|
+
Yabeda.kafka_api.errors
|
82
|
+
.increment(broker_tags, by: broker_statistics["txerrs"] + broker_statistics["rxerrs"])
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
88
|
+
end
|
@@ -0,0 +1,61 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Sbmt
|
4
|
+
module KafkaProducer
|
5
|
+
class KafkaClientFactory
|
6
|
+
class << self
|
7
|
+
def default_client
|
8
|
+
@default_client ||= ConnectionPool::Wrapper.new do
|
9
|
+
WaterDrop::Producer.new do |config|
|
10
|
+
configure_client(config)
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
14
|
+
|
15
|
+
def build(kafka = {})
|
16
|
+
return default_client if kafka.empty?
|
17
|
+
|
18
|
+
ConnectionPool::Wrapper.new do
|
19
|
+
WaterDrop::Producer.new do |config|
|
20
|
+
configure_client(config, kafka)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
private
|
26
|
+
|
27
|
+
def configure_client(kafka_config, kafka_options = {})
|
28
|
+
kafka_config.logger = config.logger_class.classify.constantize.new
|
29
|
+
kafka_config.kafka = config.to_kafka_options.merge(custom_kafka_config(kafka_options)).symbolize_keys
|
30
|
+
|
31
|
+
kafka_config.middleware = Instrumentation::TracingMiddleware.new
|
32
|
+
|
33
|
+
kafka_config.deliver = config.deliver if config.deliver.present?
|
34
|
+
kafka_config.wait_on_queue_full = config.wait_on_queue_full if config.wait_on_queue_full.present?
|
35
|
+
kafka_config.max_payload_size = config.max_payload_size if config.max_payload_size.present?
|
36
|
+
kafka_config.max_wait_timeout = config.max_wait_timeout if config.max_wait_timeout.present?
|
37
|
+
kafka_config.wait_timeout = config.wait_timeout if config.wait_timeout.present?
|
38
|
+
kafka_config.wait_on_queue_full_timeout = config.wait_on_queue_full_timeout if config.wait_on_queue_full_timeout.present?
|
39
|
+
|
40
|
+
kafka_config.monitor.subscribe(config.metrics_listener_class.classify.constantize.new)
|
41
|
+
end
|
42
|
+
|
43
|
+
def custom_kafka_config(kafka_options)
|
44
|
+
result = {}
|
45
|
+
|
46
|
+
result["socket.connection.setup.timeout.ms"] = kafka_options["connect_timeout"].to_f * 1000 if kafka_options.key?("connect_timeout")
|
47
|
+
result["request.timeout.ms"] = kafka_options["ack_timeout"].to_f * 1000 if kafka_options.key?("ack_timeout")
|
48
|
+
result["request.required.acks"] = kafka_options["required_acks"] if kafka_options.key?("required_acks")
|
49
|
+
result["message.send.max.retries"] = kafka_options["max_retries"] if kafka_options.key?("max_retries")
|
50
|
+
result["retry.backoff.ms"] = kafka_options["retry_backoff"].to_f * 1000 if kafka_options.key?("retry_backoff")
|
51
|
+
|
52
|
+
result
|
53
|
+
end
|
54
|
+
|
55
|
+
def config
|
56
|
+
Config::Producer
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Sbmt
|
4
|
+
module KafkaProducer
|
5
|
+
class Logger
|
6
|
+
delegate :logger, to: :Rails
|
7
|
+
|
8
|
+
%i[
|
9
|
+
debug
|
10
|
+
info
|
11
|
+
warn
|
12
|
+
error
|
13
|
+
fatal
|
14
|
+
].each do |log_level|
|
15
|
+
define_method log_level do |*args|
|
16
|
+
logger.send(log_level, *args)
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
def add(...)
|
21
|
+
logger.add(...)
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
@@ -0,0 +1,13 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Sbmt
|
4
|
+
module KafkaProducer
|
5
|
+
class OutboxTransportFactory
|
6
|
+
class << self
|
7
|
+
def build(topic:, kafka: {})
|
8
|
+
OutboxProducer.new(topic: topic, client: KafkaClientFactory.build(kafka))
|
9
|
+
end
|
10
|
+
end
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
@@ -0,0 +1,16 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Sbmt
|
4
|
+
module KafkaProducer
|
5
|
+
class Railtie < Rails::Railtie
|
6
|
+
initializer "sbmt_kafka_producer_yabeda.configure_rails_initialization" do
|
7
|
+
YabedaConfigurer.configure
|
8
|
+
end
|
9
|
+
|
10
|
+
initializer "sbmt_kafka_producer_opentelemetry_init.configure_rails_initialization",
|
11
|
+
after: "opentelemetry.configure" do
|
12
|
+
require "sbmt/kafka_producer/instrumentation/open_telemetry_loader" if defined?(::OpenTelemetry)
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
@@ -0,0 +1,13 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
RSpec.configure do |config|
|
4
|
+
config.before(:each) do
|
5
|
+
allow(Sbmt::KafkaProducer::KafkaClientFactory)
|
6
|
+
.to receive(:default_client)
|
7
|
+
.and_return(instance_double(WaterDrop::Producer, {produce_sync: true, produce_async: true}))
|
8
|
+
|
9
|
+
allow(Sbmt::KafkaProducer::KafkaClientFactory)
|
10
|
+
.to receive(:build)
|
11
|
+
.and_return(instance_double(WaterDrop::Producer, {produce_sync: true, produce_async: true}))
|
12
|
+
end
|
13
|
+
end
|
@@ -0,0 +1,12 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Sbmt
|
4
|
+
module KafkaProducer
|
5
|
+
module Types
|
6
|
+
include Dry.Types
|
7
|
+
|
8
|
+
ConfigAttrs = Dry::Types["hash"].constructor { |hsh| hsh.deep_symbolize_keys }
|
9
|
+
ConfigProducer = Types.Constructor(Config::Producer)
|
10
|
+
end
|
11
|
+
end
|
12
|
+
end
|