sbmt-kafka_consumer 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. checksums.yaml +7 -0
  2. data/.rspec +3 -0
  3. data/.rubocop.yml +34 -0
  4. data/Appraisals +23 -0
  5. data/CHANGELOG.md +292 -0
  6. data/Gemfile +5 -0
  7. data/LICENSE +21 -0
  8. data/README.md +296 -0
  9. data/Rakefile +12 -0
  10. data/config.ru +9 -0
  11. data/dip.yml +84 -0
  12. data/docker-compose.yml +68 -0
  13. data/exe/kafka_consumer +16 -0
  14. data/lefthook-local.dip_example.yml +4 -0
  15. data/lefthook.yml +6 -0
  16. data/lib/generators/kafka_consumer/concerns/configuration.rb +30 -0
  17. data/lib/generators/kafka_consumer/consumer/USAGE +24 -0
  18. data/lib/generators/kafka_consumer/consumer/consumer_generator.rb +41 -0
  19. data/lib/generators/kafka_consumer/consumer/templates/consumer.rb.erb +9 -0
  20. data/lib/generators/kafka_consumer/consumer/templates/consumer_group.yml.erb +13 -0
  21. data/lib/generators/kafka_consumer/inbox_consumer/USAGE +22 -0
  22. data/lib/generators/kafka_consumer/inbox_consumer/inbox_consumer_generator.rb +48 -0
  23. data/lib/generators/kafka_consumer/inbox_consumer/templates/consumer_group.yml.erb +22 -0
  24. data/lib/generators/kafka_consumer/install/USAGE +9 -0
  25. data/lib/generators/kafka_consumer/install/install_generator.rb +22 -0
  26. data/lib/generators/kafka_consumer/install/templates/Kafkafile +3 -0
  27. data/lib/generators/kafka_consumer/install/templates/kafka_consumer.yml +59 -0
  28. data/lib/sbmt/kafka_consumer/app_initializer.rb +13 -0
  29. data/lib/sbmt/kafka_consumer/base_consumer.rb +104 -0
  30. data/lib/sbmt/kafka_consumer/cli.rb +55 -0
  31. data/lib/sbmt/kafka_consumer/client_configurer.rb +73 -0
  32. data/lib/sbmt/kafka_consumer/config/auth.rb +56 -0
  33. data/lib/sbmt/kafka_consumer/config/consumer.rb +16 -0
  34. data/lib/sbmt/kafka_consumer/config/consumer_group.rb +9 -0
  35. data/lib/sbmt/kafka_consumer/config/deserializer.rb +15 -0
  36. data/lib/sbmt/kafka_consumer/config/kafka.rb +32 -0
  37. data/lib/sbmt/kafka_consumer/config/metrics.rb +10 -0
  38. data/lib/sbmt/kafka_consumer/config/probes/endpoints.rb +13 -0
  39. data/lib/sbmt/kafka_consumer/config/probes/liveness_probe.rb +11 -0
  40. data/lib/sbmt/kafka_consumer/config/probes/readiness_probe.rb +10 -0
  41. data/lib/sbmt/kafka_consumer/config/probes.rb +8 -0
  42. data/lib/sbmt/kafka_consumer/config/topic.rb +14 -0
  43. data/lib/sbmt/kafka_consumer/config.rb +76 -0
  44. data/lib/sbmt/kafka_consumer/inbox_consumer.rb +129 -0
  45. data/lib/sbmt/kafka_consumer/instrumentation/base_monitor.rb +25 -0
  46. data/lib/sbmt/kafka_consumer/instrumentation/chainable_monitor.rb +31 -0
  47. data/lib/sbmt/kafka_consumer/instrumentation/listener_helper.rb +47 -0
  48. data/lib/sbmt/kafka_consumer/instrumentation/liveness_listener.rb +71 -0
  49. data/lib/sbmt/kafka_consumer/instrumentation/logger_listener.rb +44 -0
  50. data/lib/sbmt/kafka_consumer/instrumentation/open_telemetry_loader.rb +23 -0
  51. data/lib/sbmt/kafka_consumer/instrumentation/open_telemetry_tracer.rb +106 -0
  52. data/lib/sbmt/kafka_consumer/instrumentation/readiness_listener.rb +38 -0
  53. data/lib/sbmt/kafka_consumer/instrumentation/sentry_tracer.rb +103 -0
  54. data/lib/sbmt/kafka_consumer/instrumentation/tracer.rb +18 -0
  55. data/lib/sbmt/kafka_consumer/instrumentation/tracing_monitor.rb +17 -0
  56. data/lib/sbmt/kafka_consumer/instrumentation/yabeda_metrics_listener.rb +186 -0
  57. data/lib/sbmt/kafka_consumer/probes/host.rb +75 -0
  58. data/lib/sbmt/kafka_consumer/probes/probe.rb +33 -0
  59. data/lib/sbmt/kafka_consumer/railtie.rb +31 -0
  60. data/lib/sbmt/kafka_consumer/routing/karafka_v1_consumer_mapper.rb +12 -0
  61. data/lib/sbmt/kafka_consumer/routing/karafka_v2_consumer_mapper.rb +9 -0
  62. data/lib/sbmt/kafka_consumer/serialization/base_deserializer.rb +19 -0
  63. data/lib/sbmt/kafka_consumer/serialization/json_deserializer.rb +18 -0
  64. data/lib/sbmt/kafka_consumer/serialization/null_deserializer.rb +13 -0
  65. data/lib/sbmt/kafka_consumer/serialization/protobuf_deserializer.rb +27 -0
  66. data/lib/sbmt/kafka_consumer/server.rb +35 -0
  67. data/lib/sbmt/kafka_consumer/simple_logging_consumer.rb +11 -0
  68. data/lib/sbmt/kafka_consumer/testing/shared_contexts/with_sbmt_karafka_consumer.rb +61 -0
  69. data/lib/sbmt/kafka_consumer/testing.rb +5 -0
  70. data/lib/sbmt/kafka_consumer/types.rb +15 -0
  71. data/lib/sbmt/kafka_consumer/version.rb +7 -0
  72. data/lib/sbmt/kafka_consumer/yabeda_configurer.rb +91 -0
  73. data/lib/sbmt/kafka_consumer.rb +59 -0
  74. data/rubocop/rspec.yml +29 -0
  75. data/sbmt-kafka_consumer.gemspec +70 -0
  76. metadata +571 -0
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: fe979c0a187c1aac9f2f8d571d99801e564912db57b86c8219b569fc3faef5c1
4
+ data.tar.gz: af13a117d78170019ee3f0ae0930c5b65c0dd027efa39a46eb51f1a32998f47b
5
+ SHA512:
6
+ metadata.gz: 7105c49b6c2ecf06769bb310c9a4bf836aa5cbad1b2ee994860b814b71059b8217610429dada55b408063597e32893b1efb404be11902d22693b37e4233ddb18
7
+ data.tar.gz: 2a6195f0db6b8a0a7f9435e6b93745422a97f2238c073598155b0c2d81212891028a67db9029160735754189610a3ab4687c433044c67bf9d6d2b6802bdd1149
data/.rspec ADDED
@@ -0,0 +1,3 @@
1
+ --color
2
+ --require spec_helper
3
+ --require rails_helper
data/.rubocop.yml ADDED
@@ -0,0 +1,34 @@
1
+ inherit_mode:
2
+ merge:
3
+ - Exclude
4
+
5
+ require:
6
+ - rubocop-performance
7
+ - rubocop-rails
8
+ - rubocop-rspec
9
+ - standard
10
+
11
+ inherit_gem:
12
+ standard: config/base.yml
13
+
14
+ inherit_from:
15
+ - rubocop/rspec.yml
16
+
17
+ AllCops:
18
+ NewCops: enable
19
+ SuggestExtensions: false
20
+ TargetRubyVersion: 2.7
21
+ TargetRailsVersion: 5.2
22
+
23
+ RSpec/FilePath:
24
+ Enabled: false
25
+
26
+ RSpec/VerifiedDoubles:
27
+ Exclude:
28
+ - spec/**/*_spec.rb
29
+
30
+ Style/SingleLineMethods:
31
+ Enabled: false
32
+
33
+ Style/EmptyMethod:
34
+ Enabled: false
data/Appraisals ADDED
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ # See compatibility table at https://www.fastruby.io/blog/ruby/rails/versions/compatibility-table.html
4
+
5
+ versions_map = {
6
+ "5.2" => %w[2.7],
7
+ "6.0" => %w[2.7],
8
+ "6.1" => %w[2.7 3.0],
9
+ "7.0" => %w[3.1],
10
+ "7.1" => %w[3.2]
11
+ }
12
+
13
+ current_ruby_version = RUBY_VERSION.split(".").first(2).join(".")
14
+
15
+ versions_map.each do |rails_version, ruby_versions|
16
+ ruby_versions.each do |ruby_version|
17
+ next if ruby_version != current_ruby_version
18
+
19
+ appraise "rails-#{rails_version}" do
20
+ gem "rails", "~> #{rails_version}.0"
21
+ end
22
+ end
23
+ end
data/CHANGELOG.md ADDED
@@ -0,0 +1,292 @@
1
+ # Change Log
2
+
3
+ All notable changes to this project will be documented in this file.
4
+
5
+ The format is based on [Keep a Changelog](http://keepachangelog.com/)
6
+ and this project adheres to [Semantic Versioning](http://semver.org/).
7
+
8
+ ## [Unreleased] - yyyy-mm-dd
9
+
10
+ ### Added
11
+
12
+ ### Changed
13
+
14
+ ### Fixed
15
+
16
+ ## [2.0.0] - 2024-01-30
17
+
18
+ ### Changed
19
+
20
+ - Remove `sbmt-dev`
21
+
22
+ ## [1.0.0] - 2024-01-12
23
+
24
+ ### Added
25
+
26
+ - Use mainstream karafka instead of custom fork
27
+
28
+ ## [0.23.0] - 2024-01-12
29
+
30
+ ### Added
31
+
32
+ - ability to override `kafka_options` for topic
33
+
34
+ ## [0.22.0] - 2024-01-09
35
+
36
+ ### Added
37
+
38
+ - removed useless `outbox_producer` param for `InboxConsumer` class
39
+ - removed useless log messages from `InboxConsumer` class
40
+
41
+ ## [0.21.0] - 2024-01-09
42
+
43
+ ### Fixed
44
+
45
+ - initialization of proxy consumer classes
46
+ - consumer class name in sentry's transaction name
47
+
48
+ ## [0.20.0] - 2024-01-09
49
+
50
+ ### Added
51
+
52
+ - New config options `metrics`
53
+ - `metrics.port` for a metrics port that is different from the probes port
54
+ - `metrics.path` for a metrics path
55
+
56
+ ## [0.19.2] - 2023-10-18
57
+
58
+ ### Fixed
59
+
60
+ - Stub kafka_client to prevent calls to librdkafka: fixes SEGFAULT in parallel tests
61
+
62
+ ## [0.19.1] - 2023-10-05
63
+
64
+ ### Fixed
65
+
66
+ - disable karafka's `config.strict_topics_namespacing`
67
+
68
+ ## [0.19.0] - 2023-09-29
69
+
70
+ ### Added
71
+
72
+ - `outbox_producer` configuration flag
73
+
74
+ ## [0.18.4] - 2023-09-26
75
+
76
+ ### Fixed
77
+
78
+ - Use `Rails.application.executor.wrap` instead manual AR connection clearing
79
+
80
+ ## [0.18.3] - 2023-09-15
81
+
82
+ ### Fixed
83
+
84
+ - Fix broken outbox item generator call in the `kafka_consumer:inbox_consumer` generator
85
+
86
+ ## [0.18.2] - 2023-09-14
87
+
88
+ ### Fixed
89
+
90
+ - Properly extract opentelemetry context from kafka message headers
91
+
92
+ ## [0.18.1] - 2023-09-13
93
+
94
+ ### Fixed
95
+
96
+ - Port `v0.17.5` (properly clear `ActiveRecord` connections in case `skip_on_error` option is used) to master (v0.18)
97
+
98
+ ## [0.18.0] - 2023-09-11
99
+
100
+ ### Added
101
+
102
+ - OpenTelemetry tracing
103
+
104
+ ## [0.17.5] - 2023-09-13
105
+
106
+ ### Fixed
107
+
108
+ - Properly clear `ActiveRecord` connections in case `skip_on_error` option is used
109
+
110
+ ## [0.17.4] - 2023-09-05
111
+
112
+ ### Fixed
113
+
114
+ - Latency metrics in seconds instead ms
115
+
116
+ ## [0.17.3] - 2023-08-31
117
+
118
+ ### Fixed
119
+
120
+ - Decreased sleep time on db error in a consumer
121
+
122
+ ## [0.17.2] - 2023-08-16
123
+
124
+ ### Fixed
125
+
126
+ - Fix `message.metadata.key` validation if key is an empty string
127
+
128
+ ## [0.17.1] - 2023-08-08
129
+
130
+ ### Fixed
131
+
132
+ - Check Idempotency-Key for a empty string
133
+
134
+ ## [0.17.0] - 2023-08-07
135
+
136
+ ### Added
137
+
138
+ - ability to configure consumer group mapper in `kafka_consumer.yml` (needed for proper migration from existing karafka v2 based consumers)
139
+ - ability to define/override inbox-item attributes in InboxConsumer
140
+
141
+ ### Fixed
142
+ - report `kafka_consumer_inbox_consumes` metric with tag `status = skipped` (instead `failure`) if skip_on_error is enabled on InboxConsumer
143
+
144
+ ## [0.16.0] - 2023-07-27
145
+
146
+ ### Added
147
+
148
+ - additional tags (client, group_id, partition, topic) for metric `kafka_consumer_inbox_consumes`
149
+
150
+ ## [0.15.0] - 2023-07-21
151
+
152
+ ### Added
153
+
154
+ - `kafka_consumer:install` generator
155
+ - `kafka_consumer:consumer_group` generator
156
+ - `kafka_consumer:consumer` generator
157
+
158
+ ## [0.14.2] - 2023-07-19
159
+
160
+ ### Changed
161
+ - `.clear_all_connections!` is now called for all DB roles
162
+
163
+ ## [0.14.1] - yyyy-mm-dd
164
+
165
+ ### Added
166
+ - add label `api` for group `kafka_api`
167
+
168
+ ### Changed
169
+ - README improvements
170
+
171
+ ## [0.14.0] - 2023-07-06
172
+
173
+ ### Added
174
+ - report message payload and headers to Sentry if consumer detailed logging is enabled
175
+
176
+ ## [0.13.1] - 2023-07-05
177
+
178
+ ### Added
179
+ - `event_key` callback added to `Sbmt::KafkaConsumer::InboxConsumer`
180
+
181
+ ## [0.13.0] - 2023-06-20
182
+
183
+ ### Changed
184
+ - logging / instrumentation improvements
185
+
186
+ ## [0.12.0] - 2023-06-20
187
+
188
+ ### Changed
189
+ - README improvements
190
+ - update sbmt-waterdrop (via sbmt-karafka) to fix karafka-rdkafka 0.13 compatibility issue
191
+
192
+ ## [0.11.0] - 2023-06-13
193
+
194
+ ### Added
195
+ - `skip_on_error` consumer option to skip message processing (and commit offsets) if exception was raised
196
+
197
+ ## [0.10.0] - 2023-06-07
198
+
199
+ ### Added
200
+ - `SimpleLoggingConsumer`, which just consumes/logs messages, can be used for debug purposes
201
+
202
+ ## [0.9.0] - 2023-06-06
203
+
204
+ ### Changed
205
+ - add custom `ConsumerMapper` to be consistent with KarafkaV1 consumer-group naming conventions (e.g. karafka v1 uses underscored client-id in consumer group name)
206
+ - reuse with_db_retry: release ActiveRecord conn everytime after message processing, in case there's a connection-pool degradation
207
+
208
+ ## [0.8.0] - 2023-06-01
209
+
210
+ ### Changed
211
+ - update sbmt-karafka to 2.1.3
212
+ - remove db retries logic as `ActiveRecord::Base::clear_active_connections!` is already handled by karafka v2 after processing a batch
213
+ - async metrics reporting for `statistics.emitted` event to prevent rdkafka's main thread hanging, see https://github.com/karafka/karafka/pull/1420/files
214
+ - use Rails logger by default
215
+ - use `$stdout.sync = true` in consumer server process to avoid STDOUT buffering issues in docker/k8s
216
+
217
+ ## [0.7.1] - 2023-05-31
218
+
219
+ ### Fixed
220
+ - db error logging in base consumer
221
+
222
+ ## [0.7.0] - 2023-05-30
223
+
224
+ ### Added
225
+ - add `Sbmt::KafkaConsumer::Instrumentation::LivenessListener` and `Sbmt::KafkaConsumer::Instrumentation::ReadinessListener` listeners
226
+ - add `probes` option
227
+ - add `HttpHealthCheck` server with probes' endpoints
228
+
229
+ ## [0.6.1] - 2023-05-30
230
+
231
+ ### Added
232
+ - set default `source: "KAFKA"` option when creating `inbox_item` in `InboxConsumer`
233
+
234
+ ## [0.6.0] - 2023-05-29
235
+
236
+ ### Added
237
+ - add `manual_offset_management` topic's option (defaults to true)
238
+ - add consumer `group_id` to inbox-item metadata (InboxConsumer)
239
+
240
+ ## [0.5.1] - 2023-05-25
241
+
242
+ ### Fixed
243
+ - sentry tracing when instrumentation event is not an exception
244
+ - payload deserialization if skip_decoding_error is enabled
245
+
246
+ ## [0.5.0] - 2023-05-23
247
+
248
+ ### Changed
249
+ - add default deserializer (NullDeserializer) to config
250
+ - refactor logging
251
+
252
+ ## [0.4.0] - 2023-05-19
253
+
254
+ ### Changed
255
+ - refactor consumer groups config
256
+
257
+ ## [0.3.0] - 2023-05-19
258
+
259
+ ### Added
260
+ - add timeout aliases to kafka config
261
+ - README actualization
262
+
263
+ ## [0.2.0] - 2023-05-16
264
+
265
+ ### Added
266
+ - implement consumer metrics
267
+
268
+ ## [Unreleased] - 2023-05-03
269
+
270
+ ### Added
271
+ - base config loader via AnywayConfig
272
+
273
+ ### Changed
274
+
275
+ ### Fixed
276
+
277
+ ## [Unreleased] - 2023-04-26
278
+
279
+ ### Added
280
+ - BaseConsumer
281
+ - InboxConsumer
282
+ - Instrumentation listeners: sentry, logger, yabeda
283
+
284
+ ### Changed
285
+
286
+ ### Fixed
287
+
288
+ ## [Unreleased]
289
+
290
+ ## [0.1.0] - 2023-04-19
291
+
292
+ - Initial release
data/Gemfile ADDED
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ source "https://rubygems.org"
4
+
5
+ gemspec
data/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 SberMarket Tech
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,296 @@
1
+ [![Gem Version](https://badge.fury.io/rb/sbmt-kafka_consumer.svg)](https://badge.fury.io/rb/sbmt-kafka_consumer)
2
+ [![Build Status](https://github.com/SberMarket-Tech/sbmt-kafka_consumer/actions/workflows/tests.yml/badge.svg?branch=master)](https://github.com/SberMarket-Tech/sbmt-outbox/actions?query=branch%3Amaster)
3
+
4
+ # Sbmt-KafkaConsumer
5
+
6
+ This gem is used to consume Kafka messages. It is a wrapper over the [Karafka](https://github.com/karafka/karafka) gem, and is recommended for use as a transport with the [sbmt-outbox](https://github.com/SberMarket-Tech/sbmt-outbox) gem.
7
+
8
+ ## Installation
9
+
10
+ Add this line to your application's Gemfile:
11
+
12
+ ```ruby
13
+ gem "sbmt-kafka_consumer"
14
+ ```
15
+
16
+ And then execute:
17
+
18
+ ```bash
19
+ bundle install
20
+ ```
21
+
22
+ ## Auto configuration
23
+
24
+ We recommend going through the configuration and file creation process using the following Rails generators. Each generator can be run by using the `--help` option to learn more about the available arguments.
25
+
26
+ ### Initial configuration
27
+
28
+ If you plug the gem into your application for the first time, you can generate the initial configuration:
29
+
30
+ ```shell
31
+ rails g kafka_consumer:install
32
+ ```
33
+
34
+ As the result, the `config/kafka_consumer.yml` file will be created.
35
+
36
+ ### Consumer class
37
+
38
+ A consumer class can be generated with the following command:
39
+
40
+ ```shell
41
+ rails g kafka_consumer:consumer MaybeNamespaced::Name
42
+ ```
43
+
44
+ ### Inbox consumer
45
+
46
+ To generate an Inbox consumer for use with gem [sbmt-outbox](https://github.com/SberMarket-Tech/sbmt-outbox), run the following command:
47
+
48
+ ```shell
49
+ rails g kafka_consumer:inbox_consumer MaybeNamespaced::Name some-consumer-group some-topic
50
+ ```
51
+
52
+ ## Manual configuration
53
+
54
+ The `config/kafka_consumer.yml` file is a main configuration for the gem.
55
+
56
+ Example config with a full set of options:
57
+
58
+ ```yaml
59
+ default: &default
60
+ client_id: "my-app-consumer"
61
+ concurrency: 4 # max number of threads
62
+ # optional Karafka options
63
+ max_wait_time: 1
64
+ shutdown_timeout: 60
65
+ pause_timeout: 1
66
+ pause_max_timeout: 30
67
+ pause_with_exponential_backoff: true
68
+ auth:
69
+ kind: plaintext
70
+ kafka:
71
+ servers: "kafka:9092"
72
+ # optional Kafka options
73
+ heartbeat_timeout: 5
74
+ session_timeout: 30
75
+ reconnect_timeout: 3
76
+ connect_timeout: 5
77
+ socket_timeout: 30
78
+ kafka_options:
79
+ allow.auto.create.topics: true
80
+ probes: # optional section
81
+ port: 9394
82
+ endpoints:
83
+ readiness:
84
+ enabled: true
85
+ path: "/readiness"
86
+ liveness:
87
+ enabled: true
88
+ path: "/liveness"
89
+ timeout: 15
90
+ metrics: # optional section
91
+ port: 9090
92
+ path: "/metrics"
93
+ consumer_groups:
94
+ group_ref_id_1:
95
+ name: cg_with_single_topic
96
+ topics:
97
+ - name: topic_with_inbox_items
98
+ consumer:
99
+ klass: "Sbmt::KafkaConsumer::InboxConsumer"
100
+ init_attrs:
101
+ name: "test_items"
102
+ inbox_item: "TestInboxItem"
103
+ deserializer:
104
+ klass: "Sbmt::KafkaConsumer::Serialization::NullDeserializer"
105
+ kafka_options:
106
+ auto.offset.reset: latest
107
+ group_ref_id_2:
108
+ name: cg_with_multiple_topics
109
+ topics:
110
+ - name: topic_with_json_data
111
+ consumer:
112
+ klass: "SomeConsumer"
113
+ deserializer:
114
+ klass: "Sbmt::KafkaConsumer::Serialization::JsonDeserializer"
115
+ - name: topic_with_protobuf_data
116
+ consumer:
117
+ klass: "SomeConsumer"
118
+ deserializer:
119
+ klass: "Sbmt::KafkaConsumer::Serialization::ProtobufDeserializer"
120
+ init_attrs:
121
+ message_decoder_klass: "SomeDecoder"
122
+ skip_decoding_error: true
123
+
124
+ development:
125
+ <<: *default
126
+
127
+ test:
128
+ <<: *default
129
+ deliver: false
130
+
131
+ production:
132
+ <<: *default
133
+ ```
134
+
135
+ ### `auth` config section
136
+
137
+ The gem supports 2 variants: plaintext (default) and SASL-plaintext
138
+
139
+ SASL-plaintext:
140
+
141
+ ```yaml
142
+ auth:
143
+ kind: sasl_plaintext
144
+ sasl_username: user
145
+ sasl_password: pwd
146
+ sasl_mechanism: SCRAM-SHA-512
147
+ ```
148
+
149
+ ### `kafka` config section
150
+
151
+ The `servers` key is required and should be in rdkafka format: without `kafka://` prefix, for example: `srv1:port1,srv2:port2,...`.
152
+
153
+ The `kafka_config` section may contain any [rdkafka option](https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md). Also, `kafka_options` may be redefined for each topic.
154
+
155
+ ### `consumer_groups` config section
156
+
157
+ ```yaml
158
+ consumer_groups:
159
+ # group id can be used when starting a consumer process (see CLI section below)
160
+ group_id:
161
+ name: some_group_name # required
162
+ topics:
163
+ - name: some_topic_name # required
164
+ active: true # optional, default true
165
+ consumer:
166
+ klass: SomeConsumerClass # required, a consumer class inherited from Sbmt::KafkaConsumer::BaseConsumer
167
+ init_attrs: # optional, consumer class attributes (see below)
168
+ key: value
169
+ deserializer:
170
+ klass: SomeDeserializerClass # optional, default NullDeserializer, a deserializer class inherited from Sbmt::KafkaConsumer::Serialization::NullDeserializer
171
+ init_attrs: # optional, deserializer class attributes (see below)
172
+ key: value
173
+ kafka_options: # optional, this section allows to redefine the root rdkafka options for each topic
174
+ auto.offset.reset: latest
175
+ ```
176
+
177
+ #### `consumer.init_attrs` options for `BaseConsumer`
178
+
179
+ - `skip_on_error` - optional, default false, omit consumer errors in message processing and commit the offset to Kafka
180
+
181
+ #### `consumer.init_attrs` options for `InboxConsumer`
182
+
183
+ - `inbox_item` - required, name of the inbox item class
184
+ - `event_name` - optional, default nil, used when the inbox item keep several event types
185
+ - `skip_on_error` - optional, default false, omit consumer errors in message processing and commit the offset to Kafka
186
+
187
+ #### `deserializer.init_attrs` options
188
+
189
+ - `skip_decoding_error` — don't raise an exception when cannot deserialize the message
190
+
191
+ ### `probes` config section
192
+
193
+ In Kubernetes, probes are mechanisms used to assess the health of your application running within a container.
194
+
195
+ ```yaml
196
+ probes:
197
+ port: 9394 # optional, default 9394
198
+ endpoints:
199
+ liveness:
200
+ enabled: true # optional, default true
201
+ path: /liveness # optional, default "/liveness"
202
+ timeout: 10 # optional, default 10, timeout in seconds after which the group is considered dead
203
+ readiness:
204
+ enabled: true # optional, default true
205
+ path: /readiness/kafka_consumer # optional, default "/readiness/kafka_consumer"
206
+ ```
207
+
208
+ ### `metrics` config section
209
+
210
+ We use [Yabeda](https://github.com/yabeda-rb/yabeda) to collect [all kind of metrics](./lib/sbmt/kafka_consumer/yabeda_configurer.rb).
211
+
212
+ ```yaml
213
+ metrics:
214
+ port: 9090 # optional, default is probes.port
215
+ path: /metrics # optional, default "/metrics"
216
+ ```
217
+
218
+ ### `Kafkafile`
219
+
220
+ You can create a `Kafkafile` in the root of your app to configure additional settings for your needs.
221
+
222
+ Example:
223
+
224
+ ```ruby
225
+ require_relative "config/environment"
226
+
227
+ some-extra-configuration
228
+ ```
229
+
230
+ ## CLI
231
+
232
+ Run the following command to execute a server
233
+
234
+ ```shell
235
+ kafka_consumer -g some_group_id_1 -g some_group_id_2 -c 5
236
+ ```
237
+
238
+ Where:
239
+ - `-g` - `group`, a consumer group id, if not specified, all groups from the config will be processed
240
+ - `-c` - `concurrency`, a number of threads, default is 4
241
+
242
+ ### `concurrency` argument
243
+
244
+ [Concurrency and Multithreading](https://karafka.io/docs/Concurrency-and-multithreading/).
245
+
246
+ Don't forget to properly calculate and set the size of the ActiveRecord connection pool:
247
+ - each thread will utilize one db connection from the pool
248
+ - an application can have monitoring threads which can use db connections from the pool
249
+
250
+ Also pay attention to the number of processes of the server:
251
+ - `number_of_processes x concurrency` for topics with high data intensity can be equal to the number of partitions of the consumed topic
252
+ - `number_sof_processes x concurrency` for topics with low data intensity can be less than the number of partitions of the consumed topic
253
+
254
+ ## Testing
255
+
256
+ To test your consumer with Rspec, please use [this shared context](./lib/sbmt/kafka_consumer/testing/shared_contexts/with_sbmt_karafka_consumer.rb)
257
+
258
+ ```ruby
259
+ require "sbmt/kafka_consumer/testing"
260
+
261
+ RSpec.describe OrderCreatedConsumer do
262
+ include_context "with sbmt karafka consumer"
263
+
264
+ it "works" do
265
+ publish_to_sbmt_karafka(payload, deserializer: deserializer)
266
+ expect { consume_with_sbmt_karafka }.to change(Order, :count).by(1)
267
+ end
268
+ end
269
+ ```
270
+
271
+ ## Development
272
+
273
+ 1. Prepare environment
274
+ ```shell
275
+ dip provision
276
+ ```
277
+
278
+ 2. Run tests
279
+ ```shell
280
+ dip rspec
281
+ ```
282
+
283
+ 3. Run linter
284
+ ```shell
285
+ dip rubocop
286
+ ```
287
+
288
+ 4. Run Kafka server
289
+ ```shell
290
+ dip up
291
+ ```
292
+
293
+ 5. Run consumer server
294
+ ```shell
295
+ dip kafka-consumer
296
+ ```
data/Rakefile ADDED
@@ -0,0 +1,12 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "bundler/gem_tasks"
4
+ require "rspec/core/rake_task"
5
+
6
+ RSpec::Core::RakeTask.new(:spec)
7
+
8
+ require "rubocop/rake_task"
9
+
10
+ RuboCop::RakeTask.new
11
+
12
+ task default: %i[spec rubocop]
data/config.ru ADDED
@@ -0,0 +1,9 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "rubygems"
4
+ require "bundler"
5
+
6
+ Bundler.require :default, :development
7
+
8
+ Combustion.initialize! :all
9
+ run Combustion::Application