sbmt-kafka_consumer 2.4.1 → 2.6.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +16 -0
- data/README.md +1 -0
- data/lib/sbmt/kafka_consumer/base_consumer.rb +24 -3
- data/lib/sbmt/kafka_consumer/config/probes/liveness_probe.rb +1 -0
- data/lib/sbmt/kafka_consumer/instrumentation/base_monitor.rb +2 -0
- data/lib/sbmt/kafka_consumer/instrumentation/liveness_listener.rb +23 -3
- data/lib/sbmt/kafka_consumer/instrumentation/logger_listener.rb +8 -0
- data/lib/sbmt/kafka_consumer/instrumentation/open_telemetry_tracer.rb +16 -0
- data/lib/sbmt/kafka_consumer/instrumentation/sentry_tracer.rb +7 -1
- data/lib/sbmt/kafka_consumer/probes/host.rb +1 -1
- data/lib/sbmt/kafka_consumer/probes/probe.rb +1 -0
- data/lib/sbmt/kafka_consumer/version.rb +1 -1
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 167a1f001609eaac9e7cfb33fbd19422a89711d099dedd4b4bdc96b63731562a
|
4
|
+
data.tar.gz: fe081b40a7c1d2462b95874bb9c089b7604db7c7b15e7717ce904462b90383e3
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: de1d6a79ee8fa4eb55830b186037b17b5d900cb6fba4b19556dbc78d62da7d9d8a70dd1b5c829a99dfb88ea60b78ed49ec61c75b00d377311af693167924f4b8
|
7
|
+
data.tar.gz: d2fd8c167da6f471d00a5cad5542fdc0cd5815a3f0680a1bd9404203ce982bddc3b3d50ce524448113cdf97f9d5cebc56c2e23820c5d53a242f2a98e698d60fd
|
data/CHANGELOG.md
CHANGED
@@ -13,6 +13,22 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
|
|
13
13
|
|
14
14
|
### Fixed
|
15
15
|
|
16
|
+
## [2.6.0] - 2024-07-01
|
17
|
+
|
18
|
+
### Added
|
19
|
+
|
20
|
+
- Added instrumentation for methods `process_message` and `mark_as_consumed!`
|
21
|
+
|
22
|
+
### Fixed
|
23
|
+
|
24
|
+
- From `do_consume(message)` to `yield`
|
25
|
+
|
26
|
+
## [2.5.0] - 2024-06-24
|
27
|
+
|
28
|
+
### Added
|
29
|
+
|
30
|
+
- Added option `max_error_count` for liveness probes, which is triggered when `librdkafka.error`
|
31
|
+
|
16
32
|
## [2.4.1] - 2024-06-15
|
17
33
|
|
18
34
|
### Fixed
|
data/README.md
CHANGED
@@ -52,7 +52,7 @@ module Sbmt
|
|
52
52
|
"consumer.consumed_one",
|
53
53
|
caller: self, message: message, trace_id: trace_id
|
54
54
|
) do
|
55
|
-
|
55
|
+
yield
|
56
56
|
rescue SkipUndeserializableMessage => ex
|
57
57
|
instrument_error(ex, message)
|
58
58
|
logger.warn("skipping undeserializable message: #{ex.message}")
|
@@ -87,6 +87,23 @@ module Sbmt
|
|
87
87
|
end
|
88
88
|
end
|
89
89
|
|
90
|
+
def with_common_instrumentation(name, message)
|
91
|
+
@trace_id = SecureRandom.base58
|
92
|
+
|
93
|
+
logger.tagged(
|
94
|
+
trace_id: trace_id
|
95
|
+
) do
|
96
|
+
::Sbmt::KafkaConsumer.monitor.instrument(
|
97
|
+
"consumer.#{name}",
|
98
|
+
caller: self,
|
99
|
+
message: message,
|
100
|
+
trace_id: trace_id
|
101
|
+
) do
|
102
|
+
yield
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
106
|
+
|
90
107
|
def do_consume(message)
|
91
108
|
log_message(message) if log_payload?
|
92
109
|
|
@@ -94,9 +111,13 @@ module Sbmt
|
|
94
111
|
# so we trigger it explicitly to catch undeserializable message early
|
95
112
|
message.payload
|
96
113
|
|
97
|
-
|
114
|
+
with_common_instrumentation("process_message", message) do
|
115
|
+
call_middlewares(message, middlewares) { process_message(message) }
|
116
|
+
end
|
98
117
|
|
99
|
-
mark_as_consumed
|
118
|
+
with_common_instrumentation("mark_as_consumed", message) do
|
119
|
+
mark_as_consumed!(message)
|
120
|
+
end
|
100
121
|
end
|
101
122
|
|
102
123
|
def skip_on_error
|
@@ -8,4 +8,5 @@ class Sbmt::KafkaConsumer::Config::Probes::LivenessProbe < Dry::Struct
|
|
8
8
|
.optional
|
9
9
|
.default("/liveness")
|
10
10
|
attribute :timeout, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(10)
|
11
|
+
attribute :max_error_count, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(10)
|
11
12
|
end
|
@@ -7,9 +7,14 @@ module Sbmt
|
|
7
7
|
include ListenerHelper
|
8
8
|
include KafkaConsumer::Probes::Probe
|
9
9
|
|
10
|
-
|
10
|
+
ERROR_TYPE = "Liveness probe error"
|
11
|
+
|
12
|
+
def initialize(timeout_sec: 10, max_error_count: 10)
|
11
13
|
@consumer_groups = Karafka::App.routes.map(&:name)
|
12
14
|
@timeout_sec = timeout_sec
|
15
|
+
@max_error_count = max_error_count
|
16
|
+
@error_count = 0
|
17
|
+
@error_backtrace = nil
|
13
18
|
@polls = {}
|
14
19
|
|
15
20
|
setup_subscription
|
@@ -18,9 +23,14 @@ module Sbmt
|
|
18
23
|
def probe(_env)
|
19
24
|
now = current_time
|
20
25
|
timed_out_polls = select_timed_out_polls(now)
|
21
|
-
return probe_ok groups: meta_from_polls(polls, now) if timed_out_polls.empty?
|
22
26
|
|
23
|
-
|
27
|
+
if timed_out_polls.empty? && @error_count < @max_error_count
|
28
|
+
probe_ok groups: meta_from_polls(polls, now) if timed_out_polls.empty?
|
29
|
+
elsif @error_count >= @max_error_count
|
30
|
+
probe_error error_type: ERROR_TYPE, failed_librdkafka: {error_count: @error_count, error_backtrace: @error_backtrace}
|
31
|
+
else
|
32
|
+
probe_error error_type: ERROR_TYPE, failed_groups: meta_from_polls(timed_out_polls, now)
|
33
|
+
end
|
24
34
|
end
|
25
35
|
|
26
36
|
def on_connection_listener_fetch_loop(event)
|
@@ -28,6 +38,16 @@ module Sbmt
|
|
28
38
|
polls[consumer_group.name] = current_time
|
29
39
|
end
|
30
40
|
|
41
|
+
def on_error_occurred(event)
|
42
|
+
type = event[:type]
|
43
|
+
|
44
|
+
return unless type == "librdkafka.error"
|
45
|
+
error = event[:error]
|
46
|
+
|
47
|
+
@error_backtrace ||= (error.backtrace || []).join("\n")
|
48
|
+
@error_count += 1
|
49
|
+
end
|
50
|
+
|
31
51
|
private
|
32
52
|
|
33
53
|
attr_reader :polls, :timeout_sec, :consumer_groups
|
@@ -32,6 +32,14 @@ module Sbmt
|
|
32
32
|
logger.info("Successfully consumed message in #{event.payload[:time]} ms")
|
33
33
|
end
|
34
34
|
|
35
|
+
def on_consumer_mark_as_consumed(event)
|
36
|
+
logger.info("Processing message in #{event.payload[:time]} ms")
|
37
|
+
end
|
38
|
+
|
39
|
+
def on_consumer_process_message(event)
|
40
|
+
logger.info("Commit offset in #{event.payload[:time]} ms")
|
41
|
+
end
|
42
|
+
|
35
43
|
# InboxConsumer events
|
36
44
|
def on_consumer_inbox_consumed_one(event)
|
37
45
|
logger.tagged(status: event[:status]) do
|
@@ -6,6 +6,11 @@ module Sbmt
|
|
6
6
|
module KafkaConsumer
|
7
7
|
module Instrumentation
|
8
8
|
class OpenTelemetryTracer < ::Sbmt::KafkaConsumer::Instrumentation::Tracer
|
9
|
+
CONSUMED_EVENTS = %w[
|
10
|
+
consumer.process_message
|
11
|
+
consumer.mark_as_consumed
|
12
|
+
].freeze
|
13
|
+
|
9
14
|
class << self
|
10
15
|
def enabled?
|
11
16
|
!!@enabled
|
@@ -22,6 +27,7 @@ module Sbmt
|
|
22
27
|
return handle_consumed_one(&block) if @event_id == "consumer.consumed_one"
|
23
28
|
return handle_consumed_batch(&block) if @event_id == "consumer.consumed_batch"
|
24
29
|
return handle_inbox_consumed_one(&block) if @event_id == "consumer.inbox.consumed_one"
|
30
|
+
return handle_common_event(&block) if CONSUMED_EVENTS.include?(@event_id)
|
25
31
|
return handle_error(&block) if @event_id == "error.occurred"
|
26
32
|
|
27
33
|
yield
|
@@ -79,6 +85,16 @@ module Sbmt
|
|
79
85
|
end
|
80
86
|
end
|
81
87
|
|
88
|
+
def handle_common_event(&block)
|
89
|
+
return yield unless enabled?
|
90
|
+
|
91
|
+
if @payload[:inbox_name].present?
|
92
|
+
handle_inbox_consumed_one(&block)
|
93
|
+
else
|
94
|
+
handle_consumed_one(&block)
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
82
98
|
def handle_error
|
83
99
|
return yield unless enabled?
|
84
100
|
|
@@ -13,8 +13,14 @@ module Sbmt
|
|
13
13
|
consumer.inbox.consume_one
|
14
14
|
].freeze
|
15
15
|
|
16
|
+
EVENTS = %w[
|
17
|
+
consumer.consumed_one
|
18
|
+
consumer.process_message
|
19
|
+
consumer.mark_as_consumed
|
20
|
+
].freeze
|
21
|
+
|
16
22
|
def trace(&block)
|
17
|
-
return handle_consumed_one(&block) if @event_id
|
23
|
+
return handle_consumed_one(&block) if EVENTS.include?(@event_id)
|
18
24
|
return handle_consumed_batch(&block) if @event_id == "consumer.consumed_batch"
|
19
25
|
return handle_error(&block) if @event_id == "error.occurred"
|
20
26
|
|
@@ -23,7 +23,7 @@ module Sbmt
|
|
23
23
|
liveness = config[:liveness]
|
24
24
|
if liveness[:enabled]
|
25
25
|
c.probe liveness[:path], Sbmt::KafkaConsumer::Instrumentation::LivenessListener.new(
|
26
|
-
timeout_sec: liveness[:timeout]
|
26
|
+
timeout_sec: liveness[:timeout], max_error_count: liveness[:max_error_count]
|
27
27
|
)
|
28
28
|
end
|
29
29
|
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: sbmt-kafka_consumer
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.
|
4
|
+
version: 2.6.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Sbermarket Ruby-Platform Team
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-07-01 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: rails
|
@@ -570,7 +570,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
570
570
|
- !ruby/object:Gem::Version
|
571
571
|
version: '0'
|
572
572
|
requirements: []
|
573
|
-
rubygems_version: 3.5.
|
573
|
+
rubygems_version: 3.5.3
|
574
574
|
signing_key:
|
575
575
|
specification_version: 4
|
576
576
|
summary: Ruby gem for consuming Kafka messages
|