karafka 2.5.3 → 2.5.4.rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +10 -0
  3. data/config/locales/errors.yml +14 -0
  4. data/karafka.gemspec +13 -2
  5. data/lib/karafka/admin/contracts/replication.rb +149 -0
  6. data/lib/karafka/admin/replication.rb +462 -0
  7. data/lib/karafka/admin.rb +47 -2
  8. data/lib/karafka/instrumentation/logger_listener.rb +0 -2
  9. data/lib/karafka/instrumentation/vendors/appsignal/metrics_listener.rb +4 -0
  10. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +31 -15
  11. data/lib/karafka/licenser.rb +1 -1
  12. data/lib/karafka/messages/messages.rb +32 -0
  13. data/lib/karafka/pro/cleaner/messages/messages.rb +1 -1
  14. data/lib/karafka/pro/processing/jobs_queue.rb +0 -2
  15. data/lib/karafka/pro/processing/strategies/dlq/default.rb +1 -1
  16. data/lib/karafka/pro/processing/strategies/vp/default.rb +1 -1
  17. data/lib/karafka/processing/strategies/dlq.rb +1 -1
  18. data/lib/karafka/routing/consumer_group.rb +19 -1
  19. data/lib/karafka/routing/subscription_group.rb +1 -1
  20. data/lib/karafka/routing/subscription_groups_builder.rb +17 -2
  21. data/lib/karafka/version.rb +1 -1
  22. data/lib/karafka.rb +0 -1
  23. metadata +3 -62
  24. data/.coditsu/ci.yml +0 -3
  25. data/.console_irbrc +0 -11
  26. data/.github/CODEOWNERS +0 -3
  27. data/.github/FUNDING.yml +0 -1
  28. data/.github/ISSUE_TEMPLATE/bug_report.md +0 -43
  29. data/.github/ISSUE_TEMPLATE/feature_request.md +0 -20
  30. data/.github/workflows/ci_linux_ubuntu_x86_64_gnu.yml +0 -296
  31. data/.github/workflows/ci_macos_arm64.yml +0 -151
  32. data/.github/workflows/push.yml +0 -35
  33. data/.github/workflows/trigger-wiki-refresh.yml +0 -30
  34. data/.github/workflows/verify-action-pins.yml +0 -16
  35. data/.gitignore +0 -69
  36. data/.rspec +0 -7
  37. data/.ruby-gemset +0 -1
  38. data/.ruby-version +0 -1
  39. data/.yard-lint.yml +0 -174
  40. data/CODE_OF_CONDUCT.md +0 -46
  41. data/CONTRIBUTING.md +0 -32
  42. data/Gemfile +0 -29
  43. data/Gemfile.lock +0 -178
  44. data/Rakefile +0 -4
  45. data/SECURITY.md +0 -23
  46. data/bin/benchmarks +0 -99
  47. data/bin/clean_kafka +0 -43
  48. data/bin/create_token +0 -22
  49. data/bin/integrations +0 -341
  50. data/bin/record_rss +0 -50
  51. data/bin/rspecs +0 -26
  52. data/bin/scenario +0 -29
  53. data/bin/stress_many +0 -13
  54. data/bin/stress_one +0 -13
  55. data/bin/verify_kafka_warnings +0 -36
  56. data/bin/verify_license_integrity +0 -37
  57. data/bin/verify_topics_naming +0 -27
  58. data/bin/wait_for_kafka +0 -24
  59. data/docker-compose.yml +0 -25
  60. data/examples/payloads/avro/.gitkeep +0 -0
  61. data/examples/payloads/json/sample_set_01/enrollment_event.json +0 -579
  62. data/examples/payloads/json/sample_set_01/ingestion_event.json +0 -30
  63. data/examples/payloads/json/sample_set_01/transaction_event.json +0 -17
  64. data/examples/payloads/json/sample_set_01/user_event.json +0 -11
  65. data/examples/payloads/json/sample_set_02/download.json +0 -191
  66. data/examples/payloads/json/sample_set_03/event_type_1.json +0 -18
  67. data/examples/payloads/json/sample_set_03/event_type_2.json +0 -263
  68. data/examples/payloads/json/sample_set_03/event_type_3.json +0 -41
  69. data/log/.gitkeep +0 -0
  70. data/renovate.json +0 -21
data/bin/integrations DELETED
@@ -1,341 +0,0 @@
1
- #!/usr/bin/env ruby
2
-
3
- # Runner to run integration specs in parallel
4
-
5
- # Part of integration specs run linear without bundler.
6
- # If we would run bundle exec when running this code, bundler would inject its own context
7
- # into them, messing things up heavily
8
- #
9
- # Types of specs:
10
- # - regular - can run in parallel, includes all the helpers
11
- # - pristine - cannot run in parallel, uses custom bundler but includes helpers
12
- # - poro - cannot run in parallel, uses custom bundler, does not include any helpers
13
- raise 'This code needs to be executed WITHOUT bundle exec' if Kernel.const_defined?(:Bundler)
14
-
15
- require 'open3'
16
- require 'fileutils'
17
- require 'pathname'
18
- require 'tmpdir'
19
- require 'etc'
20
-
21
- ROOT_PATH = Pathname.new(File.expand_path(File.join(File.dirname(__FILE__), '../')))
22
-
23
- # How many child processes with integration specs do we want to run in parallel
24
- # When the value is high, there's a problem with thread allocation on Github CI, that is why
25
- # we limit it. Locally we can run a lot of those, as many of them have sleeps and do not use a lot
26
- # of CPU. Locally we also cannot go beyond certain limit due to how often and how many topics we
27
- # create in Kafka. With an overloaded system, we start getting timeouts.
28
- CONCURRENCY = ENV.key?('CI') ? 5 : Etc.nprocessors * 3
29
-
30
- # How may bytes do we want to keep from the stdout in the buffer for when we need to print it
31
- MAX_BUFFER_OUTPUT = 307_200
32
-
33
- # Abstraction around a single test scenario execution process
34
- class Scenario
35
- # How long a scenario can run before we kill it
36
- # This is a fail-safe just in case something would hang
37
- MAX_RUN_TIME = 5 * 60 # 5 minutes tops
38
-
39
- # There are rare cases where Karafka may force shutdown for some of the integration cases
40
- # This includes exactly those
41
- EXIT_CODES = {
42
- default: [0],
43
- 'consumption/worker_critical_error_behaviour_spec.rb' => [0, 2].freeze,
44
- 'shutdown/on_hanging_jobs_and_a_shutdown_spec.rb' => [2].freeze,
45
- 'shutdown/on_hanging_on_shutdown_job_and_a_shutdown_spec.rb' => [2].freeze,
46
- 'shutdown/on_hanging_listener_and_shutdown_spec.rb' => [2].freeze,
47
- 'swarm/forceful_shutdown_of_hanging_spec.rb' => [2].freeze,
48
- 'swarm/with_blocking_at_exit_spec.rb' => [2].freeze,
49
- # Segfault in the below spec can be expected because we pretty much force terminate handing
50
- # C stuff. This spec is still useful as it catches other things
51
- 'instrumentation/post_errors_instrumentation_error_spec.rb' => [1, 139].freeze,
52
- 'cli/declaratives/delete/existing_with_exit_code_spec.rb' => [2].freeze,
53
- 'cli/declaratives/create/new_with_exit_code_spec.rb' => [2].freeze,
54
- 'cli/declaratives/plan/when_changes_with_detailed_exit_code_spec.rb' => [2].freeze,
55
- 'cli/declaratives/align/incorrectly_spec.rb' => [1].freeze,
56
- 'setup/with_kip_848_protocol_incorrect_config_spec.rb' => [1].freeze
57
- }.freeze
58
-
59
- private_constant :MAX_RUN_TIME, :EXIT_CODES
60
-
61
- attr_reader :index
62
-
63
- # Creates scenario instance and runs in the background process
64
- #
65
- # @param path [String] path to the scenarios file
66
- def initialize(path, index)
67
- @path = path
68
- # First 1024 characters from stdout
69
- @stdout_head = ''
70
- # Last 1024 characters from stdout
71
- @stdout_tail = ''
72
- # Assigns the index for parallel execution in the CI if requested
73
- @index = index
74
- end
75
-
76
- # Starts running given scenario in a separate process
77
- def start
78
- @stdin, @stdout, @stderr, @wait_thr = Open3.popen3(init_and_build_cmd)
79
- @started_at = current_time
80
- end
81
-
82
- # @return [String] integration spec name
83
- def name
84
- @path.gsub("#{ROOT_PATH}/spec/integrations/", '')
85
- end
86
-
87
- # @return [Symbol] type of spec
88
- def type
89
- scenario_dir = File.dirname(@path)
90
-
91
- return :poro if scenario_dir.include?('_poro')
92
- return :pristine if scenario_dir.include?('_pristine')
93
-
94
- :regular
95
- end
96
-
97
- # @return [Boolean] any spec that is not a regular one should not run in parallel with others
98
- def linear?
99
- type != :regular
100
- end
101
-
102
- # @return [Boolean] did this scenario finished or is it still running
103
- def finished?
104
- # If the thread is running too long, kill it
105
- if current_time - @started_at > MAX_RUN_TIME
106
- begin
107
- Process.kill('TERM', pid)
108
- # It may finish right after we want to kill it, that's why we ignore this
109
- rescue Errno::ESRCH
110
- end
111
- end
112
-
113
- # We read it so it won't grow as we use our default logger that prints to both test.log and
114
- # to stdout. Otherwise after reaching the buffer size, it would hang
115
- buffer = ''
116
- @stdout.read_nonblock(MAX_BUFFER_OUTPUT, buffer, exception: false)
117
- @stdout_head = buffer if @stdout_head.empty?
118
- @stdout_tail << buffer
119
- @stdout_tail = @stdout_tail[-MAX_BUFFER_OUTPUT..-1] || @stdout_tail
120
-
121
- !@wait_thr.alive?
122
- end
123
-
124
- # @return [Boolean] did this scenario finish successfully or not
125
- def success?
126
- expected_exit_codes = EXIT_CODES[name] || EXIT_CODES[:default]
127
-
128
- expected_exit_codes.include?(exit_code)
129
- end
130
-
131
- # @return [Integer] pid of the process of this scenario
132
- def pid
133
- @wait_thr.pid
134
- end
135
-
136
- # @return [Integer] exit code of the process running given scenario
137
- def exit_code
138
- # There may be no exit status if we killed the thread
139
- @wait_thr.value&.exitstatus || 123
140
- end
141
-
142
- # @return [String] exit status of the process
143
- def exit_status
144
- @wait_thr.value.to_s
145
- end
146
-
147
- # Prints a status report when scenario is finished and stdout if it failed
148
- def report
149
- if success?
150
- print "\e[#{32}m#{'.'}\e[0m"
151
- else
152
- buffer = ''
153
-
154
- @stderr.read_nonblock(MAX_BUFFER_OUTPUT, buffer, exception: false)
155
-
156
- puts
157
- puts "\e[#{31}m#{'[FAILED]'}\e[0m #{name}"
158
- puts "Time taken: #{current_time - @started_at} seconds"
159
- puts "Exit code: #{exit_code}"
160
- puts "Exit status: #{exit_status}"
161
- puts @stdout_head
162
- puts '...'
163
- puts @stdout_tail
164
- puts buffer
165
- puts
166
- end
167
- end
168
-
169
- # @return [Float] number of seconds that a given spec took to run
170
- def time_taken
171
- @finished_at - @started_at
172
- end
173
-
174
- # Close all the files that are open, so they do not pile up
175
- def close
176
- @finished_at = current_time
177
- @stdin.close
178
- @stdout.close
179
- @stderr.close
180
- end
181
-
182
- private
183
-
184
- # Sets up a proper environment for a given spec to run and returns the run command
185
- # @return [String] run command
186
- def init_and_build_cmd
187
- case type
188
- when :poro
189
- scenario_dir = File.dirname(@path)
190
- # We copy the spec into a temp dir, not to pollute the spec location with logs, etc
191
- temp_dir = Dir.mktmpdir
192
- file_name = File.basename(@path)
193
-
194
- FileUtils.cp_r("#{scenario_dir}/.", temp_dir)
195
-
196
- <<~CMD
197
- cd #{temp_dir} &&
198
- KARAFKA_GEM_DIR=#{ROOT_PATH} \
199
- BUNDLE_AUTO_INSTALL=true \
200
- PRISTINE_MODE=true \
201
- bundle exec ruby #{file_name}
202
- CMD
203
- when :pristine
204
- scenario_dir = File.dirname(@path)
205
- # We copy the spec into a temp dir, not to pollute the spec location with logs, etc
206
- temp_dir = Dir.mktmpdir
207
- file_name = File.basename(@path)
208
-
209
- FileUtils.cp_r("#{scenario_dir}/.", temp_dir)
210
-
211
- <<~CMD
212
- cd #{temp_dir} &&
213
- KARAFKA_GEM_DIR=#{ROOT_PATH} \
214
- BUNDLE_AUTO_INSTALL=true \
215
- PRISTINE_MODE=true \
216
- bundle exec ruby -r #{ROOT_PATH}/spec/integrations_helper.rb #{file_name}
217
- CMD
218
- else
219
- <<~CMD
220
- KARAFKA_GEM_DIR=#{ROOT_PATH} \
221
- bundle exec ruby -r ./spec/integrations_helper.rb #{@path}
222
- CMD
223
- end
224
- end
225
-
226
- # @return [Float] current machine time
227
- def current_time
228
- Process.clock_gettime(Process::CLOCK_MONOTONIC)
229
- end
230
- end
231
-
232
- # Load all the specs
233
- specs = Dir[ROOT_PATH.join('spec/integrations/**/*_spec.rb')]
234
-
235
- FILTER_TYPE = ARGV[0] == '--exclude' ? 'exclude' : 'include'
236
-
237
- # Remove the exclude flag
238
- ARGV.shift if FILTER_TYPE == '--exclude'
239
-
240
- # If filters is provided, apply
241
- # Allows to provide several filters one after another and applies all of them
242
- ARGV.each do |filter|
243
- specs.delete_if do |name|
244
- case FILTER_TYPE
245
- when 'include'
246
- !name.include?(filter)
247
- when 'exclude'
248
- name.include?(filter)
249
- else
250
- raise 'Invalid filter type'
251
- end
252
- end
253
- end
254
-
255
- # Remove Rails 7.2 specs from Ruby < 3.1 because it requires 3.1
256
- # Remove Rails 8.0 specs from Ruby < 3.2 because it requires 3.2
257
- specs.delete_if do |spec|
258
- next true if RUBY_VERSION < '3.1' && spec.include?('rails72')
259
- next true if RUBY_VERSION < '3.2' && spec.include?('rails8')
260
-
261
- false
262
- end
263
-
264
- # Randomize order
265
- seed = (ENV['SPECS_SEED'] || rand(0..10_000)).to_i
266
- group = (ENV['SPECS_GROUP'] || -1).to_i
267
- groups = (ENV['SPECS_GROUPS'] || 2).to_i
268
-
269
- puts "Random seed: #{seed}"
270
- puts "Group: #{group}"
271
- puts "Groups: #{groups}"
272
-
273
- scenarios = specs
274
- .shuffle(random: Random.new(seed))
275
- .map
276
- .with_index { |integration, index| Scenario.new(integration, index % groups) }
277
- .delete_if { |scenario| scenario.index != group && group != -1 }
278
-
279
- raise ArgumentError, "No integration specs with filters: #{ARGV.join(', ')}" if scenarios.empty?
280
-
281
- puts "Running #{scenarios.size} scenarios"
282
-
283
- regulars = scenarios.reject(&:linear?)
284
- linears = scenarios - regulars
285
-
286
- active_scenarios = []
287
- finished_scenarios = []
288
-
289
- while finished_scenarios.size < scenarios.size
290
- # If we have space to run another scenario, we add it
291
- if active_scenarios.size < CONCURRENCY
292
- scenario = nil
293
- # We can run only one linear at the same time due to concurrency issues within bundler
294
- # Since they usually take longer than others, we try to run them as fast as possible when there
295
- # is a slot
296
- scenario = linears.pop unless active_scenarios.any?(&:linear?)
297
- scenario ||= regulars.pop
298
-
299
- if scenario
300
- scenario.start
301
- active_scenarios << scenario
302
- end
303
- end
304
-
305
- active_scenarios.select(&:finished?).each do |exited|
306
- scenario = active_scenarios.delete(exited)
307
- scenario.report
308
- scenario.close
309
- finished_scenarios << scenario
310
- end
311
-
312
- sleep(0.1)
313
- end
314
-
315
- # Report longest scenarios
316
- puts
317
- puts "\nLongest scenarios:\n\n"
318
-
319
- finished_scenarios.sort_by(&:time_taken).reverse.first(10).each do |long_scenario|
320
- puts "[#{'%6.2f' % long_scenario.time_taken}] #{long_scenario.name}"
321
- end
322
-
323
- failed_scenarios = finished_scenarios.reject(&:success?)
324
-
325
- if failed_scenarios.empty?
326
- puts
327
- else
328
- # Report once more on the failed jobs
329
- # This will only list scenarios that failed without printing their stdout here.
330
- puts
331
- puts "\nFailed scenarios:\n\n"
332
-
333
- failed_scenarios.each do |scenario|
334
- puts "\e[#{31}m#{'[FAILED]'}\e[0m #{scenario.name}"
335
- end
336
-
337
- puts
338
-
339
- # Exit with 1 if not all scenarios were successful
340
- exit 1
341
- end
data/bin/record_rss DELETED
@@ -1,50 +0,0 @@
1
- #!/bin/bash
2
-
3
- # This script monitors and records the Resident Set Size (RSS) of a process given its PID.
4
- # The RSS is logged every second to the specified output file until the process terminates.
5
- #
6
- # Usage:
7
- # ./script_name.sh <PID> <OUTPUT_FILE>
8
- #
9
- # Arguments:
10
- # <PID> - Process ID of the process you want to monitor.
11
- # <OUTPUT_FILE> - Name of the file where RSS values will be logged.
12
- #
13
- # The script first checks if the correct number of arguments are provided.
14
- # It then verifies if the given PID exists. If it does, it starts recording the RSS.
15
- # For every iteration, the script fetches the current RSS of the process using the 'ps' command,
16
- # then appends the RSS value along with a timestamp to the output file.
17
- # This recording is done every second.
18
- # The loop stops if the process with the given PID terminates.
19
- # An informative message is printed out when recording starts and when it stops.
20
-
21
- # Check if the correct number of arguments are passed
22
- if [ "$#" -ne 2 ]; then
23
- echo "Usage: $0 <PID> <OUTPUT_FILE>"
24
- exit 1
25
- fi
26
-
27
- PID=$1
28
- OUTPUT_FILE=$2
29
-
30
- # Check if the given PID exists
31
- if ! kill -0 $PID 2>/dev/null; then
32
- echo "Error: PID $PID does not exist."
33
- exit 1
34
- fi
35
-
36
- # Start recording the RSS
37
- echo "Recording RSS for PID $PID every second to $OUTPUT_FILE..."
38
-
39
- while kill -0 $PID 2>/dev/null; do
40
- RSS=$(ps -o rss= -p $PID)
41
- if [ -z "$RSS" ]; then
42
- echo "Error: Failed to get RSS for PID $PID."
43
- exit 1
44
- fi
45
- TIMESTAMP=$(date '+%Y-%m-%d %H:%M:%S')
46
- echo "$TIMESTAMP: $RSS KB" >> $OUTPUT_FILE
47
- sleep 1
48
- done
49
-
50
- echo "Process $PID has terminated. Stopping recording."
data/bin/rspecs DELETED
@@ -1,26 +0,0 @@
1
- #!/usr/bin/env bash
2
-
3
- set -e
4
-
5
- # We remove old coverage because under heavy development and parallel spec execution, the
6
- # rspec results formatter tends to crash
7
- rm -rf ./coverage
8
-
9
- # Run only regular non-forking specs first
10
- SPECS_TYPE=regular bundle exec rspec \
11
- --tag ~type:pro \
12
- --tag ~mode:fork \
13
- --exclude-pattern "**/pro/**/*_spec.rb" \
14
- spec/lib/
15
-
16
- # Run forking specs, they need to run in isolation not to crash because of librdkafka
17
- SPECS_TYPE=regular bundle exec rspec \
18
- --tag mode:fork \
19
- --exclude-pattern "**/pro/**/*_spec.rb" \
20
- spec/lib/
21
-
22
- # Run pro specs at the end
23
- SPECS_TYPE=pro bundle exec rspec \
24
- --tag type:pro \
25
- --tag ~mode:fork \
26
- spec/lib/
data/bin/scenario DELETED
@@ -1,29 +0,0 @@
1
- #!/usr/bin/env ruby
2
-
3
- # Runner for non-parallel execution of a single scenario.
4
- # It prints all the info stdout, etc and basically replaces itself with the scenario execution.
5
- # It is useful when we work with a single spec and we need all the debug info
6
-
7
- raise 'This code needs to be executed WITHOUT bundle exec' if Kernel.const_defined?(:Bundler)
8
-
9
- require 'open3'
10
- require 'fileutils'
11
- require 'pathname'
12
- require 'tmpdir'
13
- require 'etc'
14
-
15
- ROOT_PATH = Pathname.new(File.expand_path(File.join(File.dirname(__FILE__), '../')))
16
-
17
- # Load all the specs
18
- specs = Dir[ROOT_PATH.join('spec/integrations/**/*.rb')]
19
-
20
- # If filters is provided, apply
21
- # Allows to provide several filters one after another and applies all of them
22
- ARGV.each do |filter|
23
- specs.delete_if { |name| !name.include?(filter) }
24
- end
25
-
26
- raise ArgumentError, "No integration specs with filters: #{ARGV.join(', ')}" if specs.empty?
27
- raise ArgumentError, "Many specs found with filters: #{ARGV.join(', ')}" if specs.size != 1
28
-
29
- exec("bundle exec ruby -r #{ROOT_PATH}/spec/integrations_helper.rb #{specs[0]}")
data/bin/stress_many DELETED
@@ -1,13 +0,0 @@
1
- #!/bin/bash
2
-
3
- # Runs integration specs in an endless loop
4
- # This allows us to ensure (after long enough time) that the integrations test suit is stable and
5
- # that there are no anomalies when running it for a long period of time
6
-
7
- set -e
8
-
9
- while :
10
- do
11
- clear
12
- bin/integrations $1
13
- done
data/bin/stress_one DELETED
@@ -1,13 +0,0 @@
1
- #!/bin/bash
2
-
3
- # Runs a single integration spec in an endless loop
4
- # This allows us to ensure (after long enough time) that the integration spec is stable and
5
- # that there are no anomalies when running it for a long period of time
6
-
7
- set -e
8
-
9
- while :
10
- do
11
- clear
12
- bin/scenario $1
13
- done
@@ -1,36 +0,0 @@
1
- #!/bin/bash
2
-
3
- # Checks Kafka logs for unsupported warning patterns
4
- # Only specified warnings are allowed, all others should trigger failure
5
-
6
- allowed_patterns=(
7
- "Performing controller activation"
8
- "registered with feature metadata.version"
9
- "Replayed TopicRecord for"
10
- "Replayed PartitionRecord for"
11
- "Previous leader None and previous leader epoch"
12
- "Creating new"
13
- "Unloaded transaction metadata"
14
- )
15
-
16
- # Get all warnings
17
- warnings=$(docker logs --since=0 kafka | grep "] WARN ")
18
- exit_code=0
19
-
20
- while IFS= read -r line; do
21
- allowed=0
22
- for pattern in "${allowed_patterns[@]}"; do
23
- if echo "$line" | grep -q "$pattern"; then
24
- allowed=1
25
- break
26
- fi
27
- done
28
-
29
- if [ $allowed -eq 0 ]; then
30
- echo "Unexpected warning found:"
31
- echo "$line"
32
- exit_code=1
33
- fi
34
- done <<< "$warnings"
35
-
36
- exit $exit_code
@@ -1,37 +0,0 @@
1
- #!/usr/bin/env bash
2
-
3
- # This script verifies integrity of the Pro license
4
- # Run it before bundle install to ensure, that what you are fetching is what you expect
5
- # Run it after bundle install to ensure that the local artefact was not compromised
6
-
7
- #!/usr/bin/env bash
8
-
9
- set -e
10
-
11
- if [ "$MODE" != "after" ]; then
12
- # Check the remote license prior to bundle installing
13
- curl \
14
- --retry 5 \
15
- --retry-delay 1 \
16
- --fail \
17
- -u $KARAFKA_PRO_USERNAME:$KARAFKA_PRO_PASSWORD \
18
- https://gems.karafka.io/gems/karafka-license-$KARAFKA_PRO_VERSION.gem \
19
- -o ./karafka-license.gem
20
- else
21
- # Check the local cached one after bundle install
22
- cache_path=`ruby -e 'puts "#{Gem.dir}/cache/"'`
23
- cp "$cache_path/karafka-license-$KARAFKA_PRO_VERSION.gem" ./karafka-license.gem
24
- fi
25
-
26
- detected=`sha256sum ./karafka-license.gem | awk '{ print $1 }'`
27
-
28
- rm ./karafka-license.gem
29
-
30
- echo -n "Karafka Pro license artifact checksum verification result: "
31
-
32
- if [ "$detected" = "$KARAFKA_PRO_LICENSE_CHECKSUM" ]; then
33
- echo "Success"
34
- else
35
- echo -e "\033[0;31mFailure!\033[0m"
36
- exit 1
37
- fi
@@ -1,27 +0,0 @@
1
- #!/usr/bin/env ruby
2
-
3
- # This script verifies that we do not create (except few needed exceptions) test topics that do
4
- # not start with the "it-" prefix which is our standard.
5
- #
6
- # This ensures that we can clearly identify all test topics for removal in case of doing dev work
7
- # on a long-lived Kafka cluster without option to fully reset it.
8
- #
9
- # It also ensures we have one convention that we can follow.
10
-
11
- require_relative '../spec/integrations_helper.rb'
12
-
13
- setup_karafka
14
-
15
- # Please note that "__" starting topics are not here by default. It is expected.
16
- invalid = Karafka::Admin
17
- .cluster_info
18
- .topics
19
- .map { |topic| topic[:topic_name] }
20
- .select { |topic| !topic.start_with?('it-') }
21
- .select { |topic| topic.length <= 6 }
22
-
23
- invalid.each do |invalid_name|
24
- puts "#{invalid_name} does not start with the \"it-\" prefix"
25
- end
26
-
27
- exit invalid.empty? ? 0 : 1
data/bin/wait_for_kafka DELETED
@@ -1,24 +0,0 @@
1
- #!/usr/bin/env ruby
2
-
3
- # Waits for Kafka to be ready
4
- # Useful in CI where Kafka needs to be fully started before we run any tests
5
-
6
- require 'karafka'
7
-
8
- Karafka::App.setup do |config|
9
- config.kafka[:'bootstrap.servers'] = '127.0.0.1:9092'
10
- end
11
-
12
- 60.times do
13
- begin
14
- # Stop if we can connect to the cluster and get info
15
- exit if Karafka::Admin.cluster_info
16
- rescue Rdkafka::RdkafkaError
17
- puts "Kafka not available, retrying..."
18
- sleep(1)
19
- end
20
- end
21
-
22
- puts 'Kafka not available!'
23
-
24
- exit 1
data/docker-compose.yml DELETED
@@ -1,25 +0,0 @@
1
- services:
2
- kafka:
3
- container_name: kafka
4
- image: confluentinc/cp-kafka:8.1.0
5
-
6
- ports:
7
- - 9092:9092
8
-
9
- environment:
10
- CLUSTER_ID: kafka-docker-cluster-1
11
- KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
12
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
13
- KAFKA_PROCESS_ROLES: broker,controller
14
- KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
15
- KAFKA_LISTENERS: PLAINTEXT://:9092,CONTROLLER://:9093
16
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT
17
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://127.0.0.1:9092
18
- KAFKA_BROKER_ID: 1
19
- KAFKA_CONTROLLER_QUORUM_VOTERS: 1@127.0.0.1:9093
20
- ALLOW_PLAINTEXT_LISTENER: 'yes'
21
- KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
22
- KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
23
- KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
24
- KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
25
- KAFKA_AUTHORIZER_CLASS_NAME: org.apache.kafka.metadata.authorizer.StandardAuthorizer
File without changes