sbmt-kafka_consumer 2.0.1 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop.yml +1 -1
- data/Appraisals +1 -2
- data/CHANGELOG.md +13 -0
- data/README.md +31 -0
- data/lib/sbmt/kafka_consumer/base_consumer.rb +35 -2
- data/lib/sbmt/kafka_consumer/instrumentation/base_monitor.rb +1 -0
- data/lib/sbmt/kafka_consumer/instrumentation/open_telemetry_tracer.rb +31 -0
- data/lib/sbmt/kafka_consumer/instrumentation/sentry_tracer.rb +46 -16
- data/lib/sbmt/kafka_consumer/testing/shared_contexts/with_sbmt_karafka_consumer.rb +22 -9
- data/lib/sbmt/kafka_consumer/version.rb +1 -1
- data/sbmt-kafka_consumer.gemspec +1 -1
- metadata +5 -5
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8e14fbca0f8cb5daec037424c3d443f57c68ff8e58e6490fcd265b64d9746ceb
|
4
|
+
data.tar.gz: d3077ea80569c187a3d1fa2046ee92ddd6fcd256fb7ac908dedcee31ca6bf467
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: b39990b437807097d4142638166a2f6655bc90b8e1a7c1e2ef4fc86238fcc46482fb318753e2e186f09b95949b943225baf76238656141d4393086bb5be5fa7a
|
7
|
+
data.tar.gz: c6696572ef9a933faa0142cb24a51853e4a8961c4f9646081e3601d709e74df3c9f86fbca31ffa6493132f69ff75f06b715d956bbab0ad1aaecf4398a2b8830d
|
data/.rubocop.yml
CHANGED
data/Appraisals
CHANGED
@@ -3,11 +3,10 @@
|
|
3
3
|
# See compatibility table at https://www.fastruby.io/blog/ruby/rails/versions/compatibility-table.html
|
4
4
|
|
5
5
|
versions_map = {
|
6
|
-
"5.2" => %w[2.7],
|
7
6
|
"6.0" => %w[2.7],
|
8
7
|
"6.1" => %w[2.7 3.0],
|
9
8
|
"7.0" => %w[3.1],
|
10
|
-
"7.1" => %w[3.2]
|
9
|
+
"7.1" => %w[3.2, 3.3]
|
11
10
|
}
|
12
11
|
|
13
12
|
current_ruby_version = RUBY_VERSION.split(".").first(2).join(".")
|
data/CHANGELOG.md
CHANGED
@@ -13,6 +13,19 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
|
|
13
13
|
|
14
14
|
### Fixed
|
15
15
|
|
16
|
+
## [2.2.0] - 2024-05-13
|
17
|
+
|
18
|
+
### Changed
|
19
|
+
|
20
|
+
- Drop support for Rails 5.2
|
21
|
+
- Add support for Ruby 3.3
|
22
|
+
|
23
|
+
## [2.1.0] - 2024-05-13
|
24
|
+
|
25
|
+
### Added
|
26
|
+
|
27
|
+
- Implemented method `export_batch` for processing messages in batches
|
28
|
+
|
16
29
|
## [2.0.1] - 2024-05-08
|
17
30
|
|
18
31
|
### Fixed
|
data/README.md
CHANGED
@@ -231,6 +231,22 @@ require_relative "config/environment"
|
|
231
231
|
some-extra-configuration
|
232
232
|
```
|
233
233
|
|
234
|
+
### `Export batch`
|
235
|
+
|
236
|
+
To process messages in batches, you need to add the `export_batch` method in the consumer
|
237
|
+
|
238
|
+
```ruby
|
239
|
+
# app/consumers/some_consumer.rb
|
240
|
+
class SomeConsumer < Sbmt::KafkaConsumer::BaseConsumer
|
241
|
+
def export_batch(messages)
|
242
|
+
# some code
|
243
|
+
end
|
244
|
+
end
|
245
|
+
```
|
246
|
+
__CAUTION__:
|
247
|
+
- ⚠️ Inbox does not support batch insertion.
|
248
|
+
- ⚠️ If you want to use this feature, you need to process the stack atomically (eg: insert it into clickhouse in one request).
|
249
|
+
|
234
250
|
## CLI
|
235
251
|
|
236
252
|
Run the following command to execute a server
|
@@ -259,6 +275,7 @@ Also pay attention to the number of processes of the server:
|
|
259
275
|
|
260
276
|
To test your consumer with Rspec, please use [this shared context](./lib/sbmt/kafka_consumer/testing/shared_contexts/with_sbmt_karafka_consumer.rb)
|
261
277
|
|
278
|
+
### for payload
|
262
279
|
```ruby
|
263
280
|
require "sbmt/kafka_consumer/testing"
|
264
281
|
|
@@ -272,6 +289,20 @@ RSpec.describe OrderCreatedConsumer do
|
|
272
289
|
end
|
273
290
|
```
|
274
291
|
|
292
|
+
### for payloads
|
293
|
+
```ruby
|
294
|
+
require "sbmt/kafka_consumer/testing"
|
295
|
+
|
296
|
+
RSpec.describe OrderCreatedConsumer do
|
297
|
+
include_context "with sbmt karafka consumer"
|
298
|
+
|
299
|
+
it "works" do
|
300
|
+
publish_to_sbmt_karafka_batch(payloads, deserializer: deserializer)
|
301
|
+
expect { consume_with_sbmt_karafka }.to change(Order, :count).by(1)
|
302
|
+
end
|
303
|
+
end
|
304
|
+
```
|
305
|
+
|
275
306
|
## Development
|
276
307
|
|
277
308
|
1. Prepare environment
|
@@ -17,12 +17,26 @@ module Sbmt
|
|
17
17
|
|
18
18
|
def consume
|
19
19
|
::Rails.application.executor.wrap do
|
20
|
-
|
21
|
-
|
20
|
+
if export_batch?
|
21
|
+
with_batch_instrumentation(messages) do
|
22
|
+
export_batch(messages)
|
23
|
+
mark_as_consumed!(messages.last)
|
24
|
+
end
|
25
|
+
else
|
26
|
+
messages.each do |message|
|
27
|
+
with_instrumentation(message) { do_consume(message) }
|
28
|
+
end
|
22
29
|
end
|
23
30
|
end
|
24
31
|
end
|
25
32
|
|
33
|
+
def export_batch?
|
34
|
+
if @export_batch_memoized.nil?
|
35
|
+
@export_batch_memoized = respond_to?(:export_batch)
|
36
|
+
end
|
37
|
+
@export_batch_memoized
|
38
|
+
end
|
39
|
+
|
26
40
|
private
|
27
41
|
|
28
42
|
def with_instrumentation(message)
|
@@ -53,6 +67,25 @@ module Sbmt
|
|
53
67
|
end
|
54
68
|
end
|
55
69
|
|
70
|
+
def with_batch_instrumentation(messages)
|
71
|
+
@trace_id = SecureRandom.base58
|
72
|
+
|
73
|
+
logger.tagged(
|
74
|
+
trace_id: trace_id,
|
75
|
+
first_offset: messages.first.metadata.offset,
|
76
|
+
last_offset: messages.last.metadata.offset
|
77
|
+
) do
|
78
|
+
::Sbmt::KafkaConsumer.monitor.instrument(
|
79
|
+
"consumer.consumed_batch",
|
80
|
+
caller: self,
|
81
|
+
messages: messages,
|
82
|
+
trace_id: trace_id
|
83
|
+
) do
|
84
|
+
yield
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
56
89
|
def do_consume(message)
|
57
90
|
log_message(message) if log_payload?
|
58
91
|
|
@@ -20,6 +20,7 @@ module Sbmt
|
|
20
20
|
|
21
21
|
def trace(&block)
|
22
22
|
return handle_consumed_one(&block) if @event_id == "consumer.consumed_one"
|
23
|
+
return handle_consumed_batch(&block) if @event_id == "consumer.consumed_batch"
|
23
24
|
return handle_inbox_consumed_one(&block) if @event_id == "consumer.inbox.consumed_one"
|
24
25
|
return handle_error(&block) if @event_id == "error.occurred"
|
25
26
|
|
@@ -43,6 +44,23 @@ module Sbmt
|
|
43
44
|
end
|
44
45
|
end
|
45
46
|
|
47
|
+
def handle_consumed_batch
|
48
|
+
return yield unless enabled?
|
49
|
+
|
50
|
+
consumer = @payload[:caller]
|
51
|
+
messages = @payload[:messages]
|
52
|
+
|
53
|
+
links = messages.filter_map do |m|
|
54
|
+
parent_context = ::OpenTelemetry.propagation.extract(m.headers, getter: ::OpenTelemetry::Context::Propagation.text_map_getter)
|
55
|
+
span_context = ::OpenTelemetry::Trace.current_span(parent_context).context
|
56
|
+
::OpenTelemetry::Trace::Link.new(span_context) if span_context.valid?
|
57
|
+
end
|
58
|
+
|
59
|
+
tracer.in_span("consume batch", links: links, attributes: batch_attrs(consumer, messages), kind: :consumer) do
|
60
|
+
yield
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
46
64
|
def handle_inbox_consumed_one
|
47
65
|
return yield unless enabled?
|
48
66
|
|
@@ -92,6 +110,19 @@ module Sbmt
|
|
92
110
|
attributes.compact
|
93
111
|
end
|
94
112
|
|
113
|
+
def batch_attrs(consumer, messages)
|
114
|
+
message = messages.first
|
115
|
+
{
|
116
|
+
"messaging.system" => "kafka",
|
117
|
+
"messaging.destination" => message.topic,
|
118
|
+
"messaging.destination_kind" => "topic",
|
119
|
+
"messaging.kafka.consumer_group" => consumer.topic.consumer_group.id,
|
120
|
+
"messaging.batch_size" => messages.count,
|
121
|
+
"messaging.first_offset" => messages.first.offset,
|
122
|
+
"messaging.last_offset" => messages.last.offset
|
123
|
+
}.compact
|
124
|
+
end
|
125
|
+
|
95
126
|
def extract_message_key(key)
|
96
127
|
# skip encode if already valid utf8
|
97
128
|
return key if key.nil? || (key.encoding == Encoding::UTF_8 && key.valid_encoding?)
|
@@ -9,34 +9,48 @@ module Sbmt
|
|
9
9
|
class SentryTracer < ::Sbmt::KafkaConsumer::Instrumentation::Tracer
|
10
10
|
CONSUMER_ERROR_TYPES = %w[
|
11
11
|
consumer.base.consume_one
|
12
|
+
consumer.base.consumed_batch
|
12
13
|
consumer.inbox.consume_one
|
13
14
|
].freeze
|
14
15
|
|
15
16
|
def trace(&block)
|
16
17
|
return handle_consumed_one(&block) if @event_id == "consumer.consumed_one"
|
18
|
+
return handle_consumed_batch(&block) if @event_id == "consumer.consumed_batch"
|
17
19
|
return handle_error(&block) if @event_id == "error.occurred"
|
18
20
|
|
19
21
|
yield
|
20
22
|
end
|
21
23
|
|
22
24
|
def handle_consumed_one
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
25
|
+
message = {
|
26
|
+
trace_id: @payload[:trace_id],
|
27
|
+
topic: @payload[:message].topic,
|
28
|
+
offset: @payload[:message].offset
|
29
|
+
}
|
30
|
+
|
31
|
+
with_sentry_transaction(
|
32
|
+
@payload[:caller],
|
33
|
+
message
|
34
|
+
) do
|
32
35
|
yield
|
33
|
-
rescue
|
34
|
-
finish_transaction(transaction, 500)
|
35
|
-
raise
|
36
36
|
end
|
37
|
+
end
|
37
38
|
|
38
|
-
|
39
|
-
|
39
|
+
def handle_consumed_batch
|
40
|
+
message_first = @payload[:messages].first
|
41
|
+
message = {
|
42
|
+
trace_id: @payload[:trace_id],
|
43
|
+
topic: message_first.topic,
|
44
|
+
first_offset: message_first.offset,
|
45
|
+
last_offset: @payload[:messages].last.offset
|
46
|
+
}
|
47
|
+
|
48
|
+
with_sentry_transaction(
|
49
|
+
@payload[:caller],
|
50
|
+
message
|
51
|
+
) do
|
52
|
+
yield
|
53
|
+
end
|
40
54
|
end
|
41
55
|
|
42
56
|
def handle_error
|
@@ -64,9 +78,9 @@ module Sbmt
|
|
64
78
|
|
65
79
|
private
|
66
80
|
|
67
|
-
def start_transaction(
|
81
|
+
def start_transaction(consumer, message)
|
68
82
|
scope = ::Sentry.get_current_scope
|
69
|
-
scope.set_tags(
|
83
|
+
scope.set_tags(message)
|
70
84
|
scope.set_transaction_name("Sbmt/KafkaConsumer/#{consumer.class.name}")
|
71
85
|
|
72
86
|
transaction = ::Sentry.start_transaction(name: scope.transaction_name, op: "kafka-consumer")
|
@@ -97,6 +111,22 @@ module Sbmt
|
|
97
111
|
# so in that case we return raw_payload
|
98
112
|
message.raw_payload
|
99
113
|
end
|
114
|
+
|
115
|
+
def with_sentry_transaction(consumer, message)
|
116
|
+
return yield unless ::Sentry.initialized?
|
117
|
+
|
118
|
+
scope, transaction = start_transaction(consumer, message)
|
119
|
+
|
120
|
+
begin
|
121
|
+
yield
|
122
|
+
rescue
|
123
|
+
finish_transaction(transaction, 500)
|
124
|
+
raise
|
125
|
+
end
|
126
|
+
|
127
|
+
finish_transaction(transaction, 200)
|
128
|
+
scope.clear
|
129
|
+
end
|
100
130
|
end
|
101
131
|
end
|
102
132
|
end
|
@@ -28,15 +28,14 @@ RSpec.shared_context "with sbmt karafka consumer" do
|
|
28
28
|
|
29
29
|
def publish_to_sbmt_karafka(raw_payload, opts = {})
|
30
30
|
message = Karafka::Messages::Message.new(raw_payload, Karafka::Messages::Metadata.new(metadata_defaults.merge(opts)))
|
31
|
-
consumer.messages =
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
)
|
31
|
+
consumer.messages = consumer_messages([message])
|
32
|
+
end
|
33
|
+
|
34
|
+
def publish_to_sbmt_karafka_batch(raw_payloads, opts = {})
|
35
|
+
messages = raw_payloads.map do |p|
|
36
|
+
Karafka::Messages::Message.new(p, Karafka::Messages::Metadata.new(metadata_defaults.merge(opts)))
|
37
|
+
end
|
38
|
+
consumer.messages = consumer_messages(messages)
|
40
39
|
end
|
41
40
|
|
42
41
|
# @return [Hash] message default options
|
@@ -58,4 +57,18 @@ RSpec.shared_context "with sbmt karafka consumer" do
|
|
58
57
|
instance.singleton_class.include Karafka::Processing::Strategies::Default
|
59
58
|
instance
|
60
59
|
end
|
60
|
+
|
61
|
+
private
|
62
|
+
|
63
|
+
def consumer_messages(messages)
|
64
|
+
Karafka::Messages::Messages.new(
|
65
|
+
messages,
|
66
|
+
Karafka::Messages::BatchMetadata.new(
|
67
|
+
topic: test_topic.name,
|
68
|
+
partition: 0,
|
69
|
+
processed_at: Time.zone.now,
|
70
|
+
created_at: Time.zone.now
|
71
|
+
)
|
72
|
+
)
|
73
|
+
end
|
61
74
|
end
|
data/sbmt-kafka_consumer.gemspec
CHANGED
@@ -31,7 +31,7 @@ Gem::Specification.new do |spec|
|
|
31
31
|
spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
|
32
32
|
spec.require_paths = ["lib"]
|
33
33
|
|
34
|
-
spec.add_dependency "rails", ">=
|
34
|
+
spec.add_dependency "rails", ">= 6.0"
|
35
35
|
spec.add_dependency "zeitwerk", "~> 2.3"
|
36
36
|
spec.add_dependency "karafka", "~> 2.2", "< 2.4" # [Breaking] Drop the concept of consumer group mapping.
|
37
37
|
spec.add_dependency "yabeda", ">= 0.11"
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: sbmt-kafka_consumer
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.0
|
4
|
+
version: 2.2.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Sbermarket Ruby-Platform Team
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-06-04 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: rails
|
@@ -16,14 +16,14 @@ dependencies:
|
|
16
16
|
requirements:
|
17
17
|
- - ">="
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: '
|
19
|
+
version: '6.0'
|
20
20
|
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
24
|
- - ">="
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version: '
|
26
|
+
version: '6.0'
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
28
|
name: zeitwerk
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
@@ -570,7 +570,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
570
570
|
- !ruby/object:Gem::Version
|
571
571
|
version: '0'
|
572
572
|
requirements: []
|
573
|
-
rubygems_version: 3.
|
573
|
+
rubygems_version: 3.5.11
|
574
574
|
signing_key:
|
575
575
|
specification_version: 4
|
576
576
|
summary: Ruby gem for consuming Kafka messages
|