sbmt-kafka_consumer 2.5.0 → 2.6.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 6e41656d16c2a1b741868c2d079e4e83c3df397e5e7491680e417738231afe71
4
- data.tar.gz: '09117d5de7194d45bf44235f731adfd4e6f60cc18d6c613c1fe2a3800048a89c'
3
+ metadata.gz: '08fdb32c69e94b244084454a41b71743a059d11fa1ae35e564ca32a3ee87dafd'
4
+ data.tar.gz: b3e4365734439f273a5e42ebe6135d53627afb22a375a0d7d557d38d13a70cd5
5
5
  SHA512:
6
- metadata.gz: 0ce2e28e58baf91e4e1612d02c37767399e643ed99c78f26adfcae1e23e77aab8f1af8bda64c6f91b16456e069b24f7db53812843347924b124b9e890d8f4e69
7
- data.tar.gz: 3447e5c1c06c3f636d4c5f37070a9f349714aea586710ba7c87d2cd3248ba5ad686bfca28d1550ec0af86d18cd556315945596c7b7c069fa31a4d0db2e1425b9
6
+ metadata.gz: 0744b29e11d89164bf95d0573990bf694cf3e1d09e3861402449d86f142701d2e3982fc7928bc903102bd832e03494960335df5208b3fb3d5a403f60cd9cd93a
7
+ data.tar.gz: 30c0c560a036a1febcf5af0cd66c08dc25c04ec8b5973c8f58892589c16aed4b40ca9d0358d6340228f2b715ea025afc7e2debcb1b962db6c458095f0ab48c7f
data/CHANGELOG.md CHANGED
@@ -13,6 +13,22 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
13
13
 
14
14
  ### Fixed
15
15
 
16
+ ## [2.6.1] - 2024-07-05
17
+
18
+ ### Fixed
19
+
20
+ - Use the current `trace_id` instead of creating a new one
21
+
22
+ ## [2.6.0] - 2024-07-01
23
+
24
+ ### Added
25
+
26
+ - Added instrumentation for methods `process_message` and `mark_as_consumed!`
27
+
28
+ ### Fixed
29
+
30
+ - From `do_consume(message)` to `yield`
31
+
16
32
  ## [2.5.0] - 2024-06-24
17
33
 
18
34
  ### Added
@@ -3,8 +3,6 @@
3
3
  module Sbmt
4
4
  module KafkaConsumer
5
5
  class BaseConsumer < Karafka::BaseConsumer
6
- attr_reader :trace_id
7
-
8
6
  def self.consumer_klass(skip_on_error: false, middlewares: [])
9
7
  Class.new(self) do
10
8
  const_set(:SKIP_ON_ERROR, skip_on_error)
@@ -41,8 +39,6 @@ module Sbmt
41
39
  private
42
40
 
43
41
  def with_instrumentation(message)
44
- @trace_id = SecureRandom.base58
45
-
46
42
  logger.tagged(
47
43
  trace_id: trace_id,
48
44
  topic: message.metadata.topic, partition: message.metadata.partition,
@@ -52,7 +48,7 @@ module Sbmt
52
48
  "consumer.consumed_one",
53
49
  caller: self, message: message, trace_id: trace_id
54
50
  ) do
55
- do_consume(message)
51
+ yield
56
52
  rescue SkipUndeserializableMessage => ex
57
53
  instrument_error(ex, message)
58
54
  logger.warn("skipping undeserializable message: #{ex.message}")
@@ -69,8 +65,6 @@ module Sbmt
69
65
  end
70
66
 
71
67
  def with_batch_instrumentation(messages)
72
- @trace_id = SecureRandom.base58
73
-
74
68
  logger.tagged(
75
69
  trace_id: trace_id,
76
70
  first_offset: messages.first.metadata.offset,
@@ -87,6 +81,21 @@ module Sbmt
87
81
  end
88
82
  end
89
83
 
84
+ def with_common_instrumentation(name, message)
85
+ logger.tagged(
86
+ trace_id: trace_id
87
+ ) do
88
+ ::Sbmt::KafkaConsumer.monitor.instrument(
89
+ "consumer.#{name}",
90
+ caller: self,
91
+ message: message,
92
+ trace_id: trace_id
93
+ ) do
94
+ yield
95
+ end
96
+ end
97
+ end
98
+
90
99
  def do_consume(message)
91
100
  log_message(message) if log_payload?
92
101
 
@@ -94,9 +103,13 @@ module Sbmt
94
103
  # so we trigger it explicitly to catch undeserializable message early
95
104
  message.payload
96
105
 
97
- call_middlewares(message, middlewares) { process_message(message) }
106
+ with_common_instrumentation("process_message", message) do
107
+ call_middlewares(message, middlewares) { process_message(message) }
108
+ end
98
109
 
99
- mark_as_consumed!(message)
110
+ with_common_instrumentation("mark_as_consumed", message) do
111
+ mark_as_consumed!(message)
112
+ end
100
113
  end
101
114
 
102
115
  def skip_on_error
@@ -152,6 +165,10 @@ module Sbmt
152
165
  end
153
166
  traverse_chain.call
154
167
  end
168
+
169
+ def trace_id
170
+ @trace_id ||= SecureRandom.base58
171
+ end
155
172
  end
156
173
  end
157
174
  end
@@ -10,6 +10,8 @@ module Sbmt
10
10
  consumer.consumed_one
11
11
  consumer.inbox.consumed_one
12
12
  consumer.consumed_batch
13
+ consumer.process_message
14
+ consumer.mark_as_consumed
13
15
  ].freeze
14
16
 
15
17
  def initialize
@@ -32,6 +32,14 @@ module Sbmt
32
32
  logger.info("Successfully consumed message in #{event.payload[:time]} ms")
33
33
  end
34
34
 
35
+ def on_consumer_mark_as_consumed(event)
36
+ logger.info("Processing message in #{event.payload[:time]} ms")
37
+ end
38
+
39
+ def on_consumer_process_message(event)
40
+ logger.info("Commit offset in #{event.payload[:time]} ms")
41
+ end
42
+
35
43
  # InboxConsumer events
36
44
  def on_consumer_inbox_consumed_one(event)
37
45
  logger.tagged(status: event[:status]) do
@@ -6,6 +6,11 @@ module Sbmt
6
6
  module KafkaConsumer
7
7
  module Instrumentation
8
8
  class OpenTelemetryTracer < ::Sbmt::KafkaConsumer::Instrumentation::Tracer
9
+ CONSUMED_EVENTS = %w[
10
+ consumer.process_message
11
+ consumer.mark_as_consumed
12
+ ].freeze
13
+
9
14
  class << self
10
15
  def enabled?
11
16
  !!@enabled
@@ -22,6 +27,7 @@ module Sbmt
22
27
  return handle_consumed_one(&block) if @event_id == "consumer.consumed_one"
23
28
  return handle_consumed_batch(&block) if @event_id == "consumer.consumed_batch"
24
29
  return handle_inbox_consumed_one(&block) if @event_id == "consumer.inbox.consumed_one"
30
+ return handle_common_event(&block) if CONSUMED_EVENTS.include?(@event_id)
25
31
  return handle_error(&block) if @event_id == "error.occurred"
26
32
 
27
33
  yield
@@ -79,6 +85,16 @@ module Sbmt
79
85
  end
80
86
  end
81
87
 
88
+ def handle_common_event(&block)
89
+ return yield unless enabled?
90
+
91
+ if @payload[:inbox_name].present?
92
+ handle_inbox_consumed_one(&block)
93
+ else
94
+ handle_consumed_one(&block)
95
+ end
96
+ end
97
+
82
98
  def handle_error
83
99
  return yield unless enabled?
84
100
 
@@ -13,8 +13,14 @@ module Sbmt
13
13
  consumer.inbox.consume_one
14
14
  ].freeze
15
15
 
16
+ EVENTS = %w[
17
+ consumer.consumed_one
18
+ consumer.process_message
19
+ consumer.mark_as_consumed
20
+ ].freeze
21
+
16
22
  def trace(&block)
17
- return handle_consumed_one(&block) if @event_id == "consumer.consumed_one"
23
+ return handle_consumed_one(&block) if EVENTS.include?(@event_id)
18
24
  return handle_consumed_batch(&block) if @event_id == "consumer.consumed_batch"
19
25
  return handle_error(&block) if @event_id == "error.occurred"
20
26
 
@@ -2,6 +2,6 @@
2
2
 
3
3
  module Sbmt
4
4
  module KafkaConsumer
5
- VERSION = "2.5.0"
5
+ VERSION = "2.6.1"
6
6
  end
7
7
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: sbmt-kafka_consumer
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.5.0
4
+ version: 2.6.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Sbermarket Ruby-Platform Team
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-06-25 00:00:00.000000000 Z
11
+ date: 2024-07-08 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rails