karafka 2.0.0 → 2.0.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/workflows/ci.yml +0 -8
- data/CHANGELOG.md +16 -1
- data/Gemfile.lock +1 -1
- data/README.md +0 -2
- data/docker-compose.yml +1 -30
- data/lib/karafka/admin.rb +57 -0
- data/lib/karafka/cli/info.rb +1 -1
- data/lib/karafka/cli/server.rb +2 -2
- data/lib/karafka/instrumentation/logger_listener.rb +6 -1
- data/lib/karafka/instrumentation/notifications.rb +1 -0
- data/lib/karafka/processing/worker.rb +5 -1
- data/lib/karafka/railtie.rb +15 -1
- data/lib/karafka/version.rb +1 -1
- data/lib/karafka.rb +10 -2
- data.tar.gz.sig +0 -0
- metadata +3 -3
- metadata.gz.sig +0 -0
- data/bin/wait_for_kafka +0 -20
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 16983e52af6623dbbb0b30b6e65d7579fc8795e137c71f25d24f36e248f45248
|
4
|
+
data.tar.gz: 66087ddd0ac20272b573f81dbea84e2296919ea1b1813f1fec01b3880ad0e170
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 7be4a093508e22c77fc5473cdedcafb2e2eb0338745a58da1a9272e0183ea72b20e631809d4a0f970703a13ed660965d5942593395b288f2db67a4070c3ab472
|
7
|
+
data.tar.gz: 64d3d95fbb84eb2bf272e5927a2ffe55c8389e15cc9fb71a12a79d583def6eefbcf13d8986af1464bad83e86a54d111cc682165946298eda5016afcafb41cc3b
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/.github/workflows/ci.yml
CHANGED
@@ -73,10 +73,6 @@ jobs:
|
|
73
73
|
ruby-version: ${{matrix.ruby}}
|
74
74
|
bundler-cache: true
|
75
75
|
|
76
|
-
- name: Ensure all needed Kafka topics are created and wait if not
|
77
|
-
run: |
|
78
|
-
bin/wait_for_kafka
|
79
|
-
|
80
76
|
- name: Run all specs
|
81
77
|
env:
|
82
78
|
GITHUB_COVERAGE: ${{matrix.coverage}}
|
@@ -120,10 +116,6 @@ jobs:
|
|
120
116
|
bundle config set without development
|
121
117
|
bundle install
|
122
118
|
|
123
|
-
- name: Ensure all needed Kafka topics are created and wait if not
|
124
|
-
run: |
|
125
|
-
bin/wait_for_kafka
|
126
|
-
|
127
119
|
- name: Run integration tests
|
128
120
|
env:
|
129
121
|
KARAFKA_PRO_LICENSE_TOKEN: ${{ secrets.KARAFKA_PRO_LICENSE_TOKEN }}
|
data/CHANGELOG.md
CHANGED
@@ -1,6 +1,21 @@
|
|
1
1
|
# Karafka framework changelog
|
2
2
|
|
3
|
-
## 2.0.
|
3
|
+
## 2.0.3 (2022-08-09)
|
4
|
+
- Update boot info on server startup.
|
5
|
+
- Update `karafka info` with more descriptive Ruby version info.
|
6
|
+
- Fix issue where when used with Rails in development, log would be too verbose.
|
7
|
+
- Fix issue where Zeitwerk with Rails would not load Pro components despite license being present.
|
8
|
+
|
9
|
+
## 2.0.2 (2022-08-07)
|
10
|
+
- Bypass issue with Rails reload in development by releasing the connection (https://github.com/rails/rails/issues/44183).
|
11
|
+
|
12
|
+
## 2.0.1 (2022-08-06)
|
13
|
+
- Provide `Karafka::Admin` for creation and destruction of topics and fetching cluster info.
|
14
|
+
- Update integration specs to always use one-time disposable topics.
|
15
|
+
- Remove no longer needed `wait_for_kafka` script.
|
16
|
+
- Add more integration specs for cover offset management upon errors.
|
17
|
+
|
18
|
+
## 2.0.0 (2022-08-05)
|
4
19
|
|
5
20
|
This changelog describes changes between `1.4` and `2.0`. Please refer to appropriate release notes for changes between particular `rc` releases.
|
6
21
|
|
data/Gemfile.lock
CHANGED
data/README.md
CHANGED
@@ -4,8 +4,6 @@
|
|
4
4
|
[![Gem Version](https://badge.fury.io/rb/karafka.svg)](http://badge.fury.io/rb/karafka)
|
5
5
|
[![Join the chat at https://slack.karafka.io](https://raw.githubusercontent.com/karafka/misc/master/slack.svg)](https://slack.karafka.io)
|
6
6
|
|
7
|
-
**Note**: All of the documentation here refers to Karafka `2.0.0` or higher. If you are looking for the documentation for Karafka `1.4`, please click [here](https://github.com/karafka/wiki/tree/1.4).
|
8
|
-
|
9
7
|
## About Karafka
|
10
8
|
|
11
9
|
Karafka is a Ruby and Rails multi-threaded efficient Kafka processing framework that:
|
data/docker-compose.yml
CHANGED
@@ -16,36 +16,7 @@ services:
|
|
16
16
|
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
|
17
17
|
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
|
18
18
|
KAFKA_CREATE_TOPICS:
|
19
|
-
"
|
20
|
-
integrations_01_02:2:1,\
|
21
|
-
integrations_02_02:2:1,\
|
22
|
-
integrations_03_02:2:1,\
|
23
|
-
integrations_04_02:2:1,\
|
24
|
-
integrations_05_02:2:1,\
|
25
|
-
integrations_06_02:2:1,\
|
26
|
-
integrations_07_02:2:1,\
|
27
|
-
integrations_08_02:2:1,\
|
28
|
-
integrations_09_02:2:1,\
|
29
|
-
integrations_10_02:2:1,\
|
30
|
-
integrations_11_02:2:1,\
|
31
|
-
integrations_12_02:2:1,\
|
32
|
-
integrations_13_02:2:1,\
|
33
|
-
integrations_14_02:2:1,\
|
34
|
-
integrations_15_02:2:1,\
|
35
|
-
integrations_16_02:2:1,\
|
36
|
-
integrations_17_02:2:1,\
|
37
|
-
integrations_18_02:2:1,\
|
38
|
-
integrations_19_02:2:1,\
|
39
|
-
integrations_20_02:2:1,\
|
40
|
-
integrations_21_02:2:1,\
|
41
|
-
integrations_00_03:3:1,\
|
42
|
-
integrations_01_03:3:1,\
|
43
|
-
integrations_02_03:3:1,\
|
44
|
-
integrations_03_03:3:1,\
|
45
|
-
integrations_04_03:3:1,\
|
46
|
-
integrations_00_10:10:1,\
|
47
|
-
integrations_01_10:10:1,\
|
48
|
-
benchmarks_00_01:1:1,\
|
19
|
+
"benchmarks_00_01:1:1,\
|
49
20
|
benchmarks_00_05:5:1,\
|
50
21
|
benchmarks_01_05:5:1,\
|
51
22
|
benchmarks_00_10:10:1"
|
@@ -0,0 +1,57 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Simple admin actions that we can perform via Karafka on our Kafka cluster
|
5
|
+
#
|
6
|
+
# @note It always initializes a new admin instance as we want to ensure it is always closed
|
7
|
+
# Since admin actions are not performed that often, that should be ok.
|
8
|
+
#
|
9
|
+
# @note It always uses the primary defined cluster and does not support multi-cluster work.
|
10
|
+
# If you need this, just replace the cluster info for the time you use this
|
11
|
+
module Admin
|
12
|
+
class << self
|
13
|
+
# Creates Kafka topic with given settings
|
14
|
+
#
|
15
|
+
# @param name [String] topic name
|
16
|
+
# @param partitions [Integer] number of partitions we expect
|
17
|
+
# @param replication_factor [Integer] number of replicas
|
18
|
+
# @param topic_config [Hash] topic config details as described here:
|
19
|
+
# https://kafka.apache.org/documentation/#topicconfigs
|
20
|
+
def create_topic(name, partitions, replication_factor, topic_config = {})
|
21
|
+
with_admin do |admin|
|
22
|
+
admin
|
23
|
+
.create_topic(name, partitions, replication_factor, topic_config)
|
24
|
+
.wait
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
# Deleted a given topic
|
29
|
+
#
|
30
|
+
# @param name [String] topic name
|
31
|
+
def delete_topic(name)
|
32
|
+
with_admin do |admin|
|
33
|
+
admin
|
34
|
+
.delete_topic(name)
|
35
|
+
.wait
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
# @return [Rdkafka::Metadata] cluster metadata info
|
40
|
+
def cluster_info
|
41
|
+
with_admin do |admin|
|
42
|
+
Rdkafka::Metadata.new(admin.instance_variable_get('@native_kafka'))
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
private
|
47
|
+
|
48
|
+
# Creates admin instance and yields it. After usage it closes the admin instance
|
49
|
+
def with_admin
|
50
|
+
admin = ::Rdkafka::Config.new(Karafka::App.config.kafka).admin
|
51
|
+
result = yield(admin)
|
52
|
+
admin.close
|
53
|
+
result
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
data/lib/karafka/cli/info.rb
CHANGED
@@ -35,7 +35,7 @@ module Karafka
|
|
35
35
|
|
36
36
|
[
|
37
37
|
"Karafka version: #{Karafka::VERSION}#{postfix}",
|
38
|
-
"Ruby version: #{
|
38
|
+
"Ruby version: #{RUBY_DESCRIPTION}",
|
39
39
|
"Rdkafka version: #{::Rdkafka::VERSION}",
|
40
40
|
"Subscription groups count: #{Karafka::App.subscription_groups.size}",
|
41
41
|
"Workers count: #{Karafka::App.config.concurrency}",
|
data/lib/karafka/cli/server.rb
CHANGED
@@ -33,11 +33,11 @@ module Karafka
|
|
33
33
|
|
34
34
|
if Karafka.pro?
|
35
35
|
Karafka.logger.info(
|
36
|
-
green('Thank you for
|
36
|
+
green('Thank you for using Karafka Pro!')
|
37
37
|
)
|
38
38
|
else
|
39
39
|
Karafka.logger.info(
|
40
|
-
red('
|
40
|
+
red('Upgrade to Karafka Pro for more features and support: https://karafka.io')
|
41
41
|
)
|
42
42
|
end
|
43
43
|
end
|
@@ -96,7 +96,12 @@ module Karafka
|
|
96
96
|
#
|
97
97
|
# @param _event [Dry::Events::Event] event details including payload
|
98
98
|
def on_app_running(_event)
|
99
|
-
info
|
99
|
+
info "Running in #{RUBY_DESCRIPTION}"
|
100
|
+
info "Running Karafka #{Karafka::VERSION} server"
|
101
|
+
|
102
|
+
return if Karafka.pro?
|
103
|
+
|
104
|
+
info 'See LICENSE and the LGPL-3.0 for licensing details.'
|
100
105
|
end
|
101
106
|
|
102
107
|
# Logs info that we're going to stop the Karafka server.
|
@@ -46,8 +46,9 @@ module Karafka
|
|
46
46
|
def process
|
47
47
|
job = @jobs_queue.pop
|
48
48
|
|
49
|
+
instrument_details = { caller: self, job: job, jobs_queue: @jobs_queue }
|
50
|
+
|
49
51
|
if job
|
50
|
-
instrument_details = { caller: self, job: job, jobs_queue: @jobs_queue }
|
51
52
|
|
52
53
|
Karafka.monitor.instrument('worker.process', instrument_details)
|
53
54
|
|
@@ -82,6 +83,9 @@ module Karafka
|
|
82
83
|
ensure
|
83
84
|
# job can be nil when the queue is being closed
|
84
85
|
@jobs_queue.complete(job) if job
|
86
|
+
|
87
|
+
# Always publish info, that we completed all the work despite its result
|
88
|
+
Karafka.monitor.instrument('worker.completed', instrument_details)
|
85
89
|
end
|
86
90
|
end
|
87
91
|
end
|
data/lib/karafka/railtie.rb
CHANGED
@@ -48,9 +48,13 @@ if rails
|
|
48
48
|
next unless Rails.env.development?
|
49
49
|
next unless ENV.key?('KARAFKA_CLI')
|
50
50
|
|
51
|
+
logger = ActiveSupport::Logger.new($stdout)
|
52
|
+
# Inherit the logger level from Rails, otherwise would always run with the debug level
|
53
|
+
logger.level = Rails.logger.level
|
54
|
+
|
51
55
|
Rails.logger.extend(
|
52
56
|
ActiveSupport::Logger.broadcast(
|
53
|
-
|
57
|
+
logger
|
54
58
|
)
|
55
59
|
)
|
56
60
|
end
|
@@ -77,6 +81,16 @@ if rails
|
|
77
81
|
|
78
82
|
Rails.application.reloader.reload!
|
79
83
|
end
|
84
|
+
|
85
|
+
::Karafka::App.monitor.subscribe('worker.completed') do
|
86
|
+
# Skip in case someone is using Rails without ActiveRecord
|
87
|
+
next unless Object.const_defined?('ActiveRecord::Base')
|
88
|
+
|
89
|
+
# Always release the connection after processing is done. Otherwise thread may hang
|
90
|
+
# blocking the reload and further processing
|
91
|
+
# @see https://github.com/rails/rails/issues/44183
|
92
|
+
ActiveRecord::Base.connection_pool.release_connection
|
93
|
+
end
|
80
94
|
end
|
81
95
|
|
82
96
|
initializer 'karafka.require_karafka_boot_file' do |app|
|
data/lib/karafka/version.rb
CHANGED
data/lib/karafka.rb
CHANGED
@@ -85,8 +85,16 @@ end
|
|
85
85
|
loader = Zeitwerk::Loader.for_gem
|
86
86
|
# Do not load Rails extensions by default, this will be handled by Railtie if they are needed
|
87
87
|
loader.ignore(Karafka.gem_root.join('lib/active_job'))
|
88
|
-
|
89
|
-
|
88
|
+
|
89
|
+
begin
|
90
|
+
require 'karafka-license'
|
91
|
+
rescue LoadError
|
92
|
+
# Do not load pro components if we cannot load the license
|
93
|
+
# This is a preliminary check so autoload works as expected
|
94
|
+
# Later on the licenser will make sure to setup all the needed components anyhow
|
95
|
+
loader.ignore(Karafka.gem_root.join('lib/karafka/pro'))
|
96
|
+
end
|
97
|
+
|
90
98
|
# Do not load vendors instrumentation components. Those need to be required manually if needed
|
91
99
|
loader.ignore(Karafka.gem_root.join('lib/karafka/instrumentation/vendors'))
|
92
100
|
loader.setup
|
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.0.
|
4
|
+
version: 2.0.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Maciej Mensfeld
|
@@ -34,7 +34,7 @@ cert_chain:
|
|
34
34
|
R2P11bWoCtr70BsccVrN8jEhzwXngMyI2gVt750Y+dbTu1KgRqZKp/ECe7ZzPzXj
|
35
35
|
pIy9vHxTANKYVyI4qj8OrFdEM5BQNu8oQpL0iQ==
|
36
36
|
-----END CERTIFICATE-----
|
37
|
-
date: 2022-08-
|
37
|
+
date: 2022-08-09 00:00:00.000000000 Z
|
38
38
|
dependencies:
|
39
39
|
- !ruby/object:Gem::Dependency
|
40
40
|
name: karafka-core
|
@@ -152,7 +152,6 @@ files:
|
|
152
152
|
- bin/scenario
|
153
153
|
- bin/stress_many
|
154
154
|
- bin/stress_one
|
155
|
-
- bin/wait_for_kafka
|
156
155
|
- certs/karafka-pro.pem
|
157
156
|
- certs/mensfeld.pem
|
158
157
|
- config/errors.yml
|
@@ -166,6 +165,7 @@ files:
|
|
166
165
|
- lib/karafka/active_job/job_extensions.rb
|
167
166
|
- lib/karafka/active_job/job_options_contract.rb
|
168
167
|
- lib/karafka/active_job/routing/extensions.rb
|
168
|
+
- lib/karafka/admin.rb
|
169
169
|
- lib/karafka/app.rb
|
170
170
|
- lib/karafka/base_consumer.rb
|
171
171
|
- lib/karafka/cli.rb
|
metadata.gz.sig
CHANGED
Binary file
|
data/bin/wait_for_kafka
DELETED
@@ -1,20 +0,0 @@
|
|
1
|
-
#!/bin/bash
|
2
|
-
|
3
|
-
# This script allows us to wait for Kafka docker to fully be ready
|
4
|
-
# We consider it fully ready when all our topics that need to be created are created as expected
|
5
|
-
|
6
|
-
KAFKA_NAME='karafka_20_kafka'
|
7
|
-
ZOOKEEPER='zookeeper:2181'
|
8
|
-
LIST_CMD="kafka-topics.sh --list --zookeeper $ZOOKEEPER"
|
9
|
-
|
10
|
-
# Take the number of topics that we need to create prior to running anything
|
11
|
-
TOPICS_COUNT=`cat docker-compose.yml | grep -E -i 'integrations_|benchmarks_' | wc -l`
|
12
|
-
|
13
|
-
# And wait until all of them are created
|
14
|
-
until (((`docker exec $KAFKA_NAME $LIST_CMD | wc -l`) >= $TOPICS_COUNT));
|
15
|
-
do
|
16
|
-
echo "Waiting for Kafka to create all the needed topics..."
|
17
|
-
sleep 1
|
18
|
-
done
|
19
|
-
|
20
|
-
echo "All the needed topics created."
|