sbmt-kafka_consumer 2.2.0 → 2.3.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8e14fbca0f8cb5daec037424c3d443f57c68ff8e58e6490fcd265b64d9746ceb
4
- data.tar.gz: d3077ea80569c187a3d1fa2046ee92ddd6fcd256fb7ac908dedcee31ca6bf467
3
+ metadata.gz: 903e47e49b20391d11feb8f0f278b52ff6e765daa3a5fa27b50e7e673612d5db
4
+ data.tar.gz: 95c9e87e8a1dde1c85d1266166694e0b06d82c645cd5affcb4f06200bb66d4af
5
5
  SHA512:
6
- metadata.gz: b39990b437807097d4142638166a2f6655bc90b8e1a7c1e2ef4fc86238fcc46482fb318753e2e186f09b95949b943225baf76238656141d4393086bb5be5fa7a
7
- data.tar.gz: c6696572ef9a933faa0142cb24a51853e4a8961c4f9646081e3601d709e74df3c9f86fbca31ffa6493132f69ff75f06b715d956bbab0ad1aaecf4398a2b8830d
6
+ metadata.gz: 40aef0b43f7e39c7f61c32a2bd226893988970ad9d0be02692c12fdfaae06553d56d0464aff307b3a01445c0e683f90eb219768289aedea6423671a2f6e06136
7
+ data.tar.gz: 6fe8bc839e28521bc8795aca004e3dbc0f79bda8d18c2844c01e3ac9d12bdbccb29be894dff8b5ddeb073ae7bcf7e20cfb189b76a2c349aada1457a4f8f857ae
data/CHANGELOG.md CHANGED
@@ -13,6 +13,22 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
13
13
 
14
14
  ### Fixed
15
15
 
16
+ ## [2.3.1] - 2024-06-05
17
+
18
+ ### Fixed
19
+
20
+ - Rename from `export_batch` to `process_batch`
21
+
22
+ ## [2.3.0] - 2024-05-30
23
+
24
+ ### Added
25
+
26
+ - New config option `partition_assignment_strategy`
27
+
28
+ ### Changed
29
+
30
+ - Raise an exception when using the `partition.assignment.strategy` option within `kafka_options` for topics.
31
+
16
32
  ## [2.2.0] - 2024-05-13
17
33
 
18
34
  ### Changed
data/Gemfile CHANGED
@@ -3,3 +3,6 @@
3
3
  source "https://rubygems.org"
4
4
 
5
5
  gemspec
6
+
7
+ # FIXME: remove this after drop support for Ruby 2.7
8
+ gem "ffi", "< 1.17"
data/README.md CHANGED
@@ -69,6 +69,7 @@ default: &default
69
69
  pause_timeout: 1
70
70
  pause_max_timeout: 30
71
71
  pause_with_exponential_backoff: true
72
+ partition_assignment_strategy: cooperative-sticky
72
73
  auth:
73
74
  kind: plaintext
74
75
  kafka:
@@ -155,6 +156,7 @@ auth:
155
156
  The `servers` key is required and should be in rdkafka format: without `kafka://` prefix, for example: `srv1:port1,srv2:port2,...`.
156
157
 
157
158
  The `kafka_config` section may contain any [rdkafka option](https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md). Also, `kafka_options` may be redefined for each topic.
159
+ Please note that the `partition.assignment.strategy` option within kafka_options is not supported for topics; instead, use the global option partition_assignment_strategy.
158
160
 
159
161
  ### `consumer_groups` config section
160
162
 
@@ -231,14 +233,14 @@ require_relative "config/environment"
231
233
  some-extra-configuration
232
234
  ```
233
235
 
234
- ### `Export batch`
236
+ ### `Process batch`
235
237
 
236
- To process messages in batches, you need to add the `export_batch` method in the consumer
238
+ To process messages in batches, you need to add the `process_batch` method in the consumer
237
239
 
238
240
  ```ruby
239
241
  # app/consumers/some_consumer.rb
240
242
  class SomeConsumer < Sbmt::KafkaConsumer::BaseConsumer
241
- def export_batch(messages)
243
+ def process_batch(messages)
242
244
  # some code
243
245
  end
244
246
  end
@@ -6,6 +6,8 @@ default: &default
6
6
  pause_timeout: 1
7
7
  pause_max_timeout: 30
8
8
  pause_with_exponential_backoff: true
9
+ ## available strategies: range, roundrobin, cooperative-sticky
10
+ # partition_assignment_strategy: "range,roundrobin"
9
11
  auth:
10
12
  kind: plaintext
11
13
  kafka:
@@ -17,9 +17,9 @@ module Sbmt
17
17
 
18
18
  def consume
19
19
  ::Rails.application.executor.wrap do
20
- if export_batch?
20
+ if process_batch?
21
21
  with_batch_instrumentation(messages) do
22
- export_batch(messages)
22
+ process_batch(messages)
23
23
  mark_as_consumed!(messages.last)
24
24
  end
25
25
  else
@@ -30,11 +30,11 @@ module Sbmt
30
30
  end
31
31
  end
32
32
 
33
- def export_batch?
34
- if @export_batch_memoized.nil?
35
- @export_batch_memoized = respond_to?(:export_batch)
33
+ def process_batch?
34
+ if @process_batch_memoized.nil?
35
+ @process_batch_memoized = respond_to?(:process_batch)
36
36
  end
37
- @export_batch_memoized
37
+ @process_batch_memoized
38
38
  end
39
39
 
40
40
  private
@@ -25,7 +25,7 @@ class Sbmt::KafkaConsumer::Config < Anyway::Config
25
25
 
26
26
  attr_config :client_id,
27
27
  :pause_timeout, :pause_max_timeout, :pause_with_exponential_backoff,
28
- :max_wait_time, :shutdown_timeout,
28
+ :max_wait_time, :shutdown_timeout, :partition_assignment_strategy,
29
29
  concurrency: 4, auth: {}, kafka: {}, consumer_groups: {}, probes: {}, metrics: {},
30
30
  deserializer_class: "::Sbmt::KafkaConsumer::Serialization::NullDeserializer",
31
31
  monitor_class: "::Sbmt::KafkaConsumer::Instrumentation::TracingMonitor",
@@ -45,6 +45,7 @@ class Sbmt::KafkaConsumer::Config < Anyway::Config
45
45
  pause_with_exponential_backoff: :boolean,
46
46
  max_wait_time: :integer,
47
47
  shutdown_timeout: :integer,
48
+ partition_assignment_strategy: :string,
48
49
  concurrency: :integer
49
50
 
50
51
  coerce_types kafka: coerce_to(Kafka)
@@ -54,7 +55,10 @@ class Sbmt::KafkaConsumer::Config < Anyway::Config
54
55
  coerce_types consumer_groups: coerce_to_array_of(ConsumerGroup)
55
56
 
56
57
  def to_kafka_options
57
- kafka.to_kafka_options
58
+ {
59
+ "partition.assignment.strategy": partition_assignment_strategy
60
+ }.compact
61
+ .merge(kafka.to_kafka_options)
58
62
  .merge(auth.to_kafka_options)
59
63
  end
60
64
 
@@ -64,6 +68,9 @@ class Sbmt::KafkaConsumer::Config < Anyway::Config
64
68
  consumer_groups.each do |cg|
65
69
  raise_validation_error "consumer group #{cg.id} must have at least one topic defined" if cg.topics.blank?
66
70
  cg.topics.each do |t|
71
+ if t.kafka_options.key?(:"partition.assignment.strategy")
72
+ raise_validation_error "Using the partition.assignment.strategy option for individual topics is not supported due to consuming issues. Use the global option `partition_assignment_strategy` instead"
73
+ end
67
74
  raise_validation_error "topic #{cg.id}.topics.name[#{t.name}] contains invalid consumer class: no const #{t.consumer.klass} defined" unless t.consumer.klass.safe_constantize
68
75
  raise_validation_error "topic #{cg.id}.topics.name[#{t.name}] contains invalid deserializer class: no const #{t.deserializer.klass} defined" unless t.deserializer&.klass&.safe_constantize
69
76
  end
@@ -2,6 +2,6 @@
2
2
 
3
3
  module Sbmt
4
4
  module KafkaConsumer
5
- VERSION = "2.2.0"
5
+ VERSION = "2.3.1"
6
6
  end
7
7
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: sbmt-kafka_consumer
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.2.0
4
+ version: 2.3.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Sbermarket Ruby-Platform Team
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-06-04 00:00:00.000000000 Z
11
+ date: 2024-06-05 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rails