karafka 2.5.3 → 2.5.4.rc1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +10 -0
- data/config/locales/errors.yml +14 -0
- data/karafka.gemspec +13 -2
- data/lib/karafka/admin/contracts/replication.rb +149 -0
- data/lib/karafka/admin/replication.rb +462 -0
- data/lib/karafka/admin.rb +47 -2
- data/lib/karafka/instrumentation/logger_listener.rb +0 -2
- data/lib/karafka/instrumentation/vendors/appsignal/metrics_listener.rb +4 -0
- data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +31 -15
- data/lib/karafka/licenser.rb +1 -1
- data/lib/karafka/messages/messages.rb +32 -0
- data/lib/karafka/pro/cleaner/messages/messages.rb +1 -1
- data/lib/karafka/pro/processing/jobs_queue.rb +0 -2
- data/lib/karafka/pro/processing/strategies/dlq/default.rb +1 -1
- data/lib/karafka/pro/processing/strategies/vp/default.rb +1 -1
- data/lib/karafka/processing/strategies/dlq.rb +1 -1
- data/lib/karafka/routing/consumer_group.rb +19 -1
- data/lib/karafka/routing/subscription_group.rb +1 -1
- data/lib/karafka/routing/subscription_groups_builder.rb +17 -2
- data/lib/karafka/version.rb +1 -1
- data/lib/karafka.rb +0 -1
- metadata +3 -62
- data/.coditsu/ci.yml +0 -3
- data/.console_irbrc +0 -11
- data/.github/CODEOWNERS +0 -3
- data/.github/FUNDING.yml +0 -1
- data/.github/ISSUE_TEMPLATE/bug_report.md +0 -43
- data/.github/ISSUE_TEMPLATE/feature_request.md +0 -20
- data/.github/workflows/ci_linux_ubuntu_x86_64_gnu.yml +0 -296
- data/.github/workflows/ci_macos_arm64.yml +0 -151
- data/.github/workflows/push.yml +0 -35
- data/.github/workflows/trigger-wiki-refresh.yml +0 -30
- data/.github/workflows/verify-action-pins.yml +0 -16
- data/.gitignore +0 -69
- data/.rspec +0 -7
- data/.ruby-gemset +0 -1
- data/.ruby-version +0 -1
- data/.yard-lint.yml +0 -174
- data/CODE_OF_CONDUCT.md +0 -46
- data/CONTRIBUTING.md +0 -32
- data/Gemfile +0 -29
- data/Gemfile.lock +0 -178
- data/Rakefile +0 -4
- data/SECURITY.md +0 -23
- data/bin/benchmarks +0 -99
- data/bin/clean_kafka +0 -43
- data/bin/create_token +0 -22
- data/bin/integrations +0 -341
- data/bin/record_rss +0 -50
- data/bin/rspecs +0 -26
- data/bin/scenario +0 -29
- data/bin/stress_many +0 -13
- data/bin/stress_one +0 -13
- data/bin/verify_kafka_warnings +0 -36
- data/bin/verify_license_integrity +0 -37
- data/bin/verify_topics_naming +0 -27
- data/bin/wait_for_kafka +0 -24
- data/docker-compose.yml +0 -25
- data/examples/payloads/avro/.gitkeep +0 -0
- data/examples/payloads/json/sample_set_01/enrollment_event.json +0 -579
- data/examples/payloads/json/sample_set_01/ingestion_event.json +0 -30
- data/examples/payloads/json/sample_set_01/transaction_event.json +0 -17
- data/examples/payloads/json/sample_set_01/user_event.json +0 -11
- data/examples/payloads/json/sample_set_02/download.json +0 -191
- data/examples/payloads/json/sample_set_03/event_type_1.json +0 -18
- data/examples/payloads/json/sample_set_03/event_type_2.json +0 -263
- data/examples/payloads/json/sample_set_03/event_type_3.json +0 -41
- data/log/.gitkeep +0 -0
- data/renovate.json +0 -21
data/Gemfile.lock
DELETED
|
@@ -1,178 +0,0 @@
|
|
|
1
|
-
PATH
|
|
2
|
-
remote: .
|
|
3
|
-
specs:
|
|
4
|
-
karafka (2.5.3)
|
|
5
|
-
base64 (~> 0.2)
|
|
6
|
-
karafka-core (>= 2.5.6, < 2.6.0)
|
|
7
|
-
karafka-rdkafka (>= 0.23.1)
|
|
8
|
-
waterdrop (>= 2.8.14, < 3.0.0)
|
|
9
|
-
zeitwerk (~> 2.3)
|
|
10
|
-
|
|
11
|
-
GEM
|
|
12
|
-
remote: https://rubygems.org/
|
|
13
|
-
specs:
|
|
14
|
-
activejob (8.1.1)
|
|
15
|
-
activesupport (= 8.1.1)
|
|
16
|
-
globalid (>= 0.3.6)
|
|
17
|
-
activesupport (8.1.1)
|
|
18
|
-
base64
|
|
19
|
-
bigdecimal
|
|
20
|
-
concurrent-ruby (~> 1.0, >= 1.3.1)
|
|
21
|
-
connection_pool (>= 2.2.5)
|
|
22
|
-
drb
|
|
23
|
-
i18n (>= 1.6, < 2)
|
|
24
|
-
json
|
|
25
|
-
logger (>= 1.4.2)
|
|
26
|
-
minitest (>= 5.1)
|
|
27
|
-
securerandom (>= 0.3)
|
|
28
|
-
tzinfo (~> 2.0, >= 2.0.5)
|
|
29
|
-
uri (>= 0.13.1)
|
|
30
|
-
base64 (0.3.0)
|
|
31
|
-
bigdecimal (3.3.1)
|
|
32
|
-
byebug (12.0.0)
|
|
33
|
-
concurrent-ruby (1.3.5)
|
|
34
|
-
connection_pool (2.5.4)
|
|
35
|
-
diff-lcs (1.6.2)
|
|
36
|
-
docile (1.4.1)
|
|
37
|
-
drb (2.2.3)
|
|
38
|
-
erubi (1.13.1)
|
|
39
|
-
et-orbi (1.4.0)
|
|
40
|
-
tzinfo
|
|
41
|
-
factory_bot (6.5.6)
|
|
42
|
-
activesupport (>= 6.1.0)
|
|
43
|
-
ffi (1.17.2)
|
|
44
|
-
ffi (1.17.2-aarch64-linux-gnu)
|
|
45
|
-
ffi (1.17.2-aarch64-linux-musl)
|
|
46
|
-
ffi (1.17.2-arm-linux-gnu)
|
|
47
|
-
ffi (1.17.2-arm-linux-musl)
|
|
48
|
-
ffi (1.17.2-arm64-darwin)
|
|
49
|
-
ffi (1.17.2-x86-linux-gnu)
|
|
50
|
-
ffi (1.17.2-x86-linux-musl)
|
|
51
|
-
ffi (1.17.2-x86_64-darwin)
|
|
52
|
-
ffi (1.17.2-x86_64-linux-gnu)
|
|
53
|
-
ffi (1.17.2-x86_64-linux-musl)
|
|
54
|
-
fugit (1.12.1)
|
|
55
|
-
et-orbi (~> 1.4)
|
|
56
|
-
raabro (~> 1.4)
|
|
57
|
-
globalid (1.3.0)
|
|
58
|
-
activesupport (>= 6.1)
|
|
59
|
-
i18n (1.14.7)
|
|
60
|
-
concurrent-ruby (~> 1.0)
|
|
61
|
-
json (2.15.2)
|
|
62
|
-
karafka-core (2.5.7)
|
|
63
|
-
karafka-rdkafka (>= 0.20.0)
|
|
64
|
-
logger (>= 1.6.0)
|
|
65
|
-
karafka-rdkafka (0.23.1)
|
|
66
|
-
ffi (~> 1.17.1)
|
|
67
|
-
json (> 2.0)
|
|
68
|
-
logger
|
|
69
|
-
mini_portile2 (~> 2.6)
|
|
70
|
-
rake (> 12)
|
|
71
|
-
karafka-rdkafka (0.23.1-aarch64-linux-gnu)
|
|
72
|
-
ffi (~> 1.17.1)
|
|
73
|
-
json (> 2.0)
|
|
74
|
-
logger
|
|
75
|
-
mini_portile2 (~> 2.6)
|
|
76
|
-
rake (> 12)
|
|
77
|
-
karafka-rdkafka (0.23.1-arm64-darwin)
|
|
78
|
-
ffi (~> 1.17.1)
|
|
79
|
-
json (> 2.0)
|
|
80
|
-
logger
|
|
81
|
-
mini_portile2 (~> 2.6)
|
|
82
|
-
rake (> 12)
|
|
83
|
-
karafka-rdkafka (0.23.1-x86_64-linux-gnu)
|
|
84
|
-
ffi (~> 1.17.1)
|
|
85
|
-
json (> 2.0)
|
|
86
|
-
logger
|
|
87
|
-
mini_portile2 (~> 2.6)
|
|
88
|
-
rake (> 12)
|
|
89
|
-
karafka-rdkafka (0.23.1-x86_64-linux-musl)
|
|
90
|
-
ffi (~> 1.17.1)
|
|
91
|
-
json (> 2.0)
|
|
92
|
-
logger
|
|
93
|
-
mini_portile2 (~> 2.6)
|
|
94
|
-
rake (> 12)
|
|
95
|
-
karafka-testing (2.5.4)
|
|
96
|
-
karafka (>= 2.5.0, < 2.6.0)
|
|
97
|
-
waterdrop (>= 2.8.0)
|
|
98
|
-
karafka-web (0.11.4)
|
|
99
|
-
erubi (~> 1.4)
|
|
100
|
-
karafka (>= 2.5.2, < 2.6.0)
|
|
101
|
-
karafka-core (>= 2.5.0, < 2.6.0)
|
|
102
|
-
roda (~> 3.68, >= 3.69)
|
|
103
|
-
tilt (~> 2.0)
|
|
104
|
-
logger (1.7.0)
|
|
105
|
-
mini_portile2 (2.8.9)
|
|
106
|
-
minitest (5.26.0)
|
|
107
|
-
ostruct (0.6.3)
|
|
108
|
-
raabro (1.4.0)
|
|
109
|
-
rack (3.2.3)
|
|
110
|
-
rake (13.3.1)
|
|
111
|
-
roda (3.97.0)
|
|
112
|
-
rack
|
|
113
|
-
rspec (3.13.2)
|
|
114
|
-
rspec-core (~> 3.13.0)
|
|
115
|
-
rspec-expectations (~> 3.13.0)
|
|
116
|
-
rspec-mocks (~> 3.13.0)
|
|
117
|
-
rspec-core (3.13.6)
|
|
118
|
-
rspec-support (~> 3.13.0)
|
|
119
|
-
rspec-expectations (3.13.5)
|
|
120
|
-
diff-lcs (>= 1.2.0, < 2.0)
|
|
121
|
-
rspec-support (~> 3.13.0)
|
|
122
|
-
rspec-mocks (3.13.6)
|
|
123
|
-
diff-lcs (>= 1.2.0, < 2.0)
|
|
124
|
-
rspec-support (~> 3.13.0)
|
|
125
|
-
rspec-support (3.13.6)
|
|
126
|
-
securerandom (0.4.1)
|
|
127
|
-
simplecov (0.22.0)
|
|
128
|
-
docile (~> 1.1)
|
|
129
|
-
simplecov-html (~> 0.11)
|
|
130
|
-
simplecov_json_formatter (~> 0.1)
|
|
131
|
-
simplecov-html (0.13.2)
|
|
132
|
-
simplecov_json_formatter (0.1.4)
|
|
133
|
-
stringio (3.1.8)
|
|
134
|
-
tilt (2.6.1)
|
|
135
|
-
tzinfo (2.0.6)
|
|
136
|
-
concurrent-ruby (~> 1.0)
|
|
137
|
-
uri (1.0.4)
|
|
138
|
-
warning (1.5.0)
|
|
139
|
-
waterdrop (2.8.14)
|
|
140
|
-
karafka-core (>= 2.4.9, < 3.0.0)
|
|
141
|
-
karafka-rdkafka (>= 0.23.1)
|
|
142
|
-
zeitwerk (~> 2.3)
|
|
143
|
-
yard (0.9.37)
|
|
144
|
-
yard-lint (1.2.3)
|
|
145
|
-
yard (~> 0.9)
|
|
146
|
-
zeitwerk (~> 2.6)
|
|
147
|
-
zeitwerk (2.7.3)
|
|
148
|
-
|
|
149
|
-
PLATFORMS
|
|
150
|
-
aarch64-linux-gnu
|
|
151
|
-
aarch64-linux-musl
|
|
152
|
-
arm-linux-gnu
|
|
153
|
-
arm-linux-musl
|
|
154
|
-
arm64-darwin
|
|
155
|
-
ruby
|
|
156
|
-
x86-linux-gnu
|
|
157
|
-
x86-linux-musl
|
|
158
|
-
x86_64-darwin
|
|
159
|
-
x86_64-linux-gnu
|
|
160
|
-
x86_64-linux-musl
|
|
161
|
-
|
|
162
|
-
DEPENDENCIES
|
|
163
|
-
activejob
|
|
164
|
-
byebug
|
|
165
|
-
factory_bot
|
|
166
|
-
fugit
|
|
167
|
-
karafka!
|
|
168
|
-
karafka-testing (>= 2.5.0)
|
|
169
|
-
karafka-web (>= 0.11.1)
|
|
170
|
-
ostruct
|
|
171
|
-
rspec
|
|
172
|
-
simplecov
|
|
173
|
-
stringio
|
|
174
|
-
warning
|
|
175
|
-
yard-lint
|
|
176
|
-
|
|
177
|
-
BUNDLED WITH
|
|
178
|
-
2.7.1
|
data/Rakefile
DELETED
data/SECURITY.md
DELETED
|
@@ -1,23 +0,0 @@
|
|
|
1
|
-
# Security Policy
|
|
2
|
-
|
|
3
|
-
## Supported Versions
|
|
4
|
-
|
|
5
|
-
Please refer to the Karafka [EOL documentation](https://karafka.io/docs/Versions-Lifecycle-and-EOL/) page for detailed information on which versions are actively supported with security updates.
|
|
6
|
-
|
|
7
|
-
## Reporting a Vulnerability
|
|
8
|
-
|
|
9
|
-
If you have identified a potential security vulnerability in our projects, we encourage you to report it immediately. We take all reports of security issues seriously and will work diligently to address them.
|
|
10
|
-
|
|
11
|
-
To report a vulnerability, please send an email directly to contact@karafka.io.
|
|
12
|
-
|
|
13
|
-
We understand the importance of addressing security vulnerabilities promptly. You can expect a reply from us within 2 working days of your report. This initial response will confirm receipt of your report.
|
|
14
|
-
|
|
15
|
-
After acknowledging your report, we will:
|
|
16
|
-
|
|
17
|
-
- Evaluate the reported vulnerability in the context of our project.
|
|
18
|
-
- Provide you with regular updates on our progress.
|
|
19
|
-
- Upon completing our assessment, we will inform you of the outcome. This includes whether the vulnerability will be accepted or declined for further action.
|
|
20
|
-
|
|
21
|
-
Your report will be kept confidential and not disclosed to third parties without your consent, except as required by law.
|
|
22
|
-
|
|
23
|
-
We appreciate your assistance in keeping our projects and their users safe by responsibly reporting vulnerabilities. Together, we can maintain a high standard of security for our community.
|
data/bin/benchmarks
DELETED
|
@@ -1,99 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env ruby
|
|
2
|
-
|
|
3
|
-
# Runner for running given benchmark cases
|
|
4
|
-
# Some of the cases require pre-populated data and we populate this in places that need it
|
|
5
|
-
# In other cases we generate this data in a background process, so the partitions data stream
|
|
6
|
-
# is consistent and we don't end up consuming huge batches of a single partition.
|
|
7
|
-
|
|
8
|
-
require 'open3'
|
|
9
|
-
require 'pathname'
|
|
10
|
-
|
|
11
|
-
$LOAD_PATH.unshift(File.dirname(__FILE__))
|
|
12
|
-
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..'))
|
|
13
|
-
|
|
14
|
-
ROOT_PATH = Pathname.new(File.expand_path(File.join(File.dirname(__FILE__), '../')))
|
|
15
|
-
|
|
16
|
-
BENCHMARK_TOPICS = {
|
|
17
|
-
'benchmarks_00_01' => 1,
|
|
18
|
-
'benchmarks_00_05' => 5,
|
|
19
|
-
'benchmarks_01_05' => 5,
|
|
20
|
-
'benchmarks_00_10' => 10
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
# Load all the benchmarks
|
|
24
|
-
benchmarks = Dir[ROOT_PATH.join('spec/benchmarks/**/*.rb')]
|
|
25
|
-
|
|
26
|
-
# If filter is provided, apply
|
|
27
|
-
benchmarks.delete_if { |name| !name.include?(ARGV[0]) } if ARGV[0]
|
|
28
|
-
|
|
29
|
-
raise ArgumentError, "No benchmarks with filter: #{ARGV[0]}" if benchmarks.empty?
|
|
30
|
-
|
|
31
|
-
# We may skip seeding if we are running the benchmarks multiple times, then since we do not
|
|
32
|
-
# commit offsets we can skip generating more data
|
|
33
|
-
if ENV['SEED']
|
|
34
|
-
require 'spec/benchmarks_helper'
|
|
35
|
-
|
|
36
|
-
# We need to setup karafka here to have producer for data seeding
|
|
37
|
-
setup_karafka
|
|
38
|
-
|
|
39
|
-
# This takes some time but needs to run only once per benchmark session
|
|
40
|
-
puts 'Seeding benchmarks data...'
|
|
41
|
-
|
|
42
|
-
producer = Karafka::App.producer
|
|
43
|
-
|
|
44
|
-
# We make our data json compatible so we can also benchmark serialization
|
|
45
|
-
elements = Array.new(100_000) { { a: :b }.to_json }
|
|
46
|
-
|
|
47
|
-
topics = Karafka::Admin.cluster_info.topics.map { |details| details.fetch(:topic_name) }
|
|
48
|
-
|
|
49
|
-
BENCHMARK_TOPICS.each do |topic_name, partitions_count|
|
|
50
|
-
::Karafka::Admin.delete_topic(topic_name) if topics.include?(topic_name)
|
|
51
|
-
::Karafka::Admin.create_topic(topic_name, partitions_count, 1)
|
|
52
|
-
end
|
|
53
|
-
|
|
54
|
-
# We do not populate data of benchmarks_0_10 as we use it with life-stream data only
|
|
55
|
-
%w[
|
|
56
|
-
benchmarks_00_01
|
|
57
|
-
benchmarks_00_05
|
|
58
|
-
].each do |topic_name|
|
|
59
|
-
partitions_count = topic_name.split('_').last.to_i
|
|
60
|
-
|
|
61
|
-
partitions_count.times do |partition|
|
|
62
|
-
puts "Seeding #{topic_name}:#{partition}"
|
|
63
|
-
|
|
64
|
-
elements.each_slice(10_000) do |data_slice|
|
|
65
|
-
data = data_slice.map do |data|
|
|
66
|
-
{ topic: topic_name, payload: data, partition: partition }
|
|
67
|
-
end
|
|
68
|
-
|
|
69
|
-
producer.buffer_many(data)
|
|
70
|
-
producer.flush_sync
|
|
71
|
-
end
|
|
72
|
-
end
|
|
73
|
-
end
|
|
74
|
-
end
|
|
75
|
-
|
|
76
|
-
# Selects requested benchmarks and runs them one after another
|
|
77
|
-
benchmarks.each do |benchmark_path|
|
|
78
|
-
puts "Running #{benchmark_path.gsub("#{ROOT_PATH}/spec/benchmarks/", '')}"
|
|
79
|
-
|
|
80
|
-
benchmark = "bundle exec ruby -r ./spec/benchmarks_helper.rb #{benchmark_path}"
|
|
81
|
-
|
|
82
|
-
Open3.popen3(benchmark) do |stdin, stdout, stderr, thread|
|
|
83
|
-
t1 = Thread.new do
|
|
84
|
-
while line = stdout.gets
|
|
85
|
-
puts(line)
|
|
86
|
-
end
|
|
87
|
-
rescue IOError
|
|
88
|
-
end
|
|
89
|
-
|
|
90
|
-
t2 = Thread.new do
|
|
91
|
-
while line = stderr.gets
|
|
92
|
-
puts(line)
|
|
93
|
-
end
|
|
94
|
-
rescue IOError
|
|
95
|
-
end
|
|
96
|
-
|
|
97
|
-
thread.join
|
|
98
|
-
end
|
|
99
|
-
end
|
data/bin/clean_kafka
DELETED
|
@@ -1,43 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env ruby
|
|
2
|
-
|
|
3
|
-
# A script that removes most of the auto-generated Kafka topics with their data
|
|
4
|
-
# Useful when having long-running Kafka instance that cannot be fully nuked after running specs
|
|
5
|
-
|
|
6
|
-
# We use the same convention in other framework components (web, waterdrop), so it removes all of
|
|
7
|
-
# them as well.
|
|
8
|
-
|
|
9
|
-
require_relative '../spec/integrations_helper.rb'
|
|
10
|
-
|
|
11
|
-
setup_karafka
|
|
12
|
-
|
|
13
|
-
topics_for_removal = []
|
|
14
|
-
|
|
15
|
-
Karafka::Admin.cluster_info.topics.each do |topic|
|
|
16
|
-
topic_name = topic[:topic_name]
|
|
17
|
-
|
|
18
|
-
next unless topic_name.start_with?('it-')
|
|
19
|
-
|
|
20
|
-
topics_for_removal << topic_name
|
|
21
|
-
end
|
|
22
|
-
|
|
23
|
-
THREADS_COUNT = 3
|
|
24
|
-
QUEUE = SizedQueue.new(THREADS_COUNT)
|
|
25
|
-
TOPICS_TO_REMOVAL_COUNT = topics_for_removal.size
|
|
26
|
-
|
|
27
|
-
threads = Array.new(THREADS_COUNT) do
|
|
28
|
-
Thread.new do
|
|
29
|
-
while topic_name = QUEUE.pop
|
|
30
|
-
puts "Removing topic: #{topic_name} (#{topics_for_removal.count} left)"
|
|
31
|
-
Karafka::Admin.delete_topic(topic_name)
|
|
32
|
-
end
|
|
33
|
-
end
|
|
34
|
-
end
|
|
35
|
-
|
|
36
|
-
while topics_for_removal.size.positive?
|
|
37
|
-
topic_name = topics_for_removal.pop
|
|
38
|
-
|
|
39
|
-
QUEUE << topic_name
|
|
40
|
-
end
|
|
41
|
-
|
|
42
|
-
QUEUE.close
|
|
43
|
-
threads.each(&:join)
|
data/bin/create_token
DELETED
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env ruby
|
|
2
|
-
|
|
3
|
-
require 'openssl'
|
|
4
|
-
require 'base64'
|
|
5
|
-
require 'json'
|
|
6
|
-
require 'date'
|
|
7
|
-
|
|
8
|
-
PRIVATE_KEY_LOCATION = File.join(Dir.home, '.ssh', 'karafka-pro', 'id_rsa')
|
|
9
|
-
|
|
10
|
-
# Name of the entity that acquires the license
|
|
11
|
-
ENTITY = ARGV[0]
|
|
12
|
-
|
|
13
|
-
raise ArgumentError, 'Entity missing' if ENTITY.nil? || ENTITY.empty?
|
|
14
|
-
|
|
15
|
-
pro_token_data = { entity: ENTITY }
|
|
16
|
-
|
|
17
|
-
# This code uses my private key to generate a new token for Karafka Pro capabilities
|
|
18
|
-
private_key = OpenSSL::PKey::RSA.new(File.read(PRIVATE_KEY_LOCATION))
|
|
19
|
-
|
|
20
|
-
bin_key = private_key.private_encrypt(pro_token_data.to_json)
|
|
21
|
-
|
|
22
|
-
puts Base64.encode64(bin_key)
|