karafka 2.5.3 → 2.5.4.rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +10 -0
  3. data/config/locales/errors.yml +14 -0
  4. data/karafka.gemspec +13 -2
  5. data/lib/karafka/admin/contracts/replication.rb +149 -0
  6. data/lib/karafka/admin/replication.rb +462 -0
  7. data/lib/karafka/admin.rb +47 -2
  8. data/lib/karafka/instrumentation/logger_listener.rb +0 -2
  9. data/lib/karafka/instrumentation/vendors/appsignal/metrics_listener.rb +4 -0
  10. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +31 -15
  11. data/lib/karafka/licenser.rb +1 -1
  12. data/lib/karafka/messages/messages.rb +32 -0
  13. data/lib/karafka/pro/cleaner/messages/messages.rb +1 -1
  14. data/lib/karafka/pro/processing/jobs_queue.rb +0 -2
  15. data/lib/karafka/pro/processing/strategies/dlq/default.rb +1 -1
  16. data/lib/karafka/pro/processing/strategies/vp/default.rb +1 -1
  17. data/lib/karafka/processing/strategies/dlq.rb +1 -1
  18. data/lib/karafka/routing/consumer_group.rb +19 -1
  19. data/lib/karafka/routing/subscription_group.rb +1 -1
  20. data/lib/karafka/routing/subscription_groups_builder.rb +17 -2
  21. data/lib/karafka/version.rb +1 -1
  22. data/lib/karafka.rb +0 -1
  23. metadata +3 -62
  24. data/.coditsu/ci.yml +0 -3
  25. data/.console_irbrc +0 -11
  26. data/.github/CODEOWNERS +0 -3
  27. data/.github/FUNDING.yml +0 -1
  28. data/.github/ISSUE_TEMPLATE/bug_report.md +0 -43
  29. data/.github/ISSUE_TEMPLATE/feature_request.md +0 -20
  30. data/.github/workflows/ci_linux_ubuntu_x86_64_gnu.yml +0 -296
  31. data/.github/workflows/ci_macos_arm64.yml +0 -151
  32. data/.github/workflows/push.yml +0 -35
  33. data/.github/workflows/trigger-wiki-refresh.yml +0 -30
  34. data/.github/workflows/verify-action-pins.yml +0 -16
  35. data/.gitignore +0 -69
  36. data/.rspec +0 -7
  37. data/.ruby-gemset +0 -1
  38. data/.ruby-version +0 -1
  39. data/.yard-lint.yml +0 -174
  40. data/CODE_OF_CONDUCT.md +0 -46
  41. data/CONTRIBUTING.md +0 -32
  42. data/Gemfile +0 -29
  43. data/Gemfile.lock +0 -178
  44. data/Rakefile +0 -4
  45. data/SECURITY.md +0 -23
  46. data/bin/benchmarks +0 -99
  47. data/bin/clean_kafka +0 -43
  48. data/bin/create_token +0 -22
  49. data/bin/integrations +0 -341
  50. data/bin/record_rss +0 -50
  51. data/bin/rspecs +0 -26
  52. data/bin/scenario +0 -29
  53. data/bin/stress_many +0 -13
  54. data/bin/stress_one +0 -13
  55. data/bin/verify_kafka_warnings +0 -36
  56. data/bin/verify_license_integrity +0 -37
  57. data/bin/verify_topics_naming +0 -27
  58. data/bin/wait_for_kafka +0 -24
  59. data/docker-compose.yml +0 -25
  60. data/examples/payloads/avro/.gitkeep +0 -0
  61. data/examples/payloads/json/sample_set_01/enrollment_event.json +0 -579
  62. data/examples/payloads/json/sample_set_01/ingestion_event.json +0 -30
  63. data/examples/payloads/json/sample_set_01/transaction_event.json +0 -17
  64. data/examples/payloads/json/sample_set_01/user_event.json +0 -11
  65. data/examples/payloads/json/sample_set_02/download.json +0 -191
  66. data/examples/payloads/json/sample_set_03/event_type_1.json +0 -18
  67. data/examples/payloads/json/sample_set_03/event_type_2.json +0 -263
  68. data/examples/payloads/json/sample_set_03/event_type_3.json +0 -41
  69. data/log/.gitkeep +0 -0
  70. data/renovate.json +0 -21
@@ -101,10 +101,8 @@ module Karafka
101
101
  end
102
102
 
103
103
  # Allows for explicit unlocking of locked queue of a group
104
- #
105
104
  # @param group_id [String] id of the group we want to unlock
106
105
  # @param lock_id [Object] unique id we want to use to identify our lock
107
- #
108
106
  def unlock_async(group_id, lock_id)
109
107
  @mutex.synchronize do
110
108
  if @locks[group_id].delete(lock_id)
@@ -81,7 +81,7 @@ module Karafka
81
81
  # @return [Array<Karafka::Messages::Message, Boolean>] message we may want to skip and
82
82
  # information if this message was from marked offset or figured out via mom flow
83
83
  def find_skippable_message
84
- skippable_message = messages.find do |msg|
84
+ skippable_message = messages.raw.find do |msg|
85
85
  coordinator.marked? && msg.offset == seek_offset
86
86
  end
87
87
 
@@ -156,7 +156,7 @@ module Karafka
156
156
  return if collapsed?
157
157
 
158
158
  coordinator.virtual_offset_manager.register(
159
- messages.map(&:offset)
159
+ messages.raw.map(&:offset)
160
160
  )
161
161
  end
162
162
  end
@@ -94,7 +94,7 @@ module Karafka
94
94
  # @return [Array<Karafka::Messages::Message, Boolean>] message we may want to skip and
95
95
  # information if this message was from marked offset or figured out via mom flow
96
96
  def find_skippable_message
97
- skippable_message = messages.find do |msg|
97
+ skippable_message = messages.raw.find do |msg|
98
98
  coordinator.marked? && msg.offset == seek_offset
99
99
  end
100
100
 
@@ -31,6 +31,9 @@ module Karafka
31
31
  # Initialize the subscription group so there's always a value for it, since even if not
32
32
  # defined directly, a subscription group will be created
33
33
  @current_subscription_group_details = { name: SubscriptionGroup.id }
34
+ # Track the base position for subscription groups to ensure stable positions when
35
+ # rebuilding. This is critical for static group membership in swarm mode
36
+ @subscription_groups_base_position = nil
34
37
  end
35
38
 
36
39
  # @return [Boolean] true if this consumer group should be active in our current process
@@ -42,6 +45,10 @@ module Karafka
42
45
  # @param name [String, Symbol] name of topic to which we want to subscribe
43
46
  # @return [Karafka::Routing::Topic] newly built topic instance
44
47
  def topic=(name, &)
48
+ # Clear memoized subscription groups since adding a topic requires rebuilding them
49
+ # This is critical for consumer group reopening across multiple draw calls
50
+ @subscription_groups = nil
51
+
45
52
  topic = Topic.new(name, self)
46
53
  @topics << Proxy.new(
47
54
  topic,
@@ -73,7 +80,18 @@ module Karafka
73
80
  # @return [Array<Routing::SubscriptionGroup>] all the subscription groups build based on
74
81
  # the consumer group topics
75
82
  def subscription_groups
76
- @subscription_groups ||= subscription_groups_builder.call(topics)
83
+ @subscription_groups ||= begin
84
+ result = subscription_groups_builder.call(
85
+ topics,
86
+ base_position: @subscription_groups_base_position
87
+ )
88
+
89
+ # Store the base position from the first subscription group for future rebuilds.
90
+ # This ensures stable positions for static group membership.
91
+ @subscription_groups_base_position ||= result.first&.position
92
+
93
+ result
94
+ end
77
95
  end
78
96
 
79
97
  # Hashed version of consumer group that can be used for validation purposes
@@ -14,7 +14,7 @@ module Karafka
14
14
  node: %i[swarm node]
15
15
  )
16
16
 
17
- attr_reader :id, :name, :topics, :kafka, :consumer_group
17
+ attr_reader :id, :name, :topics, :kafka, :consumer_group, :position
18
18
 
19
19
  # Lock for generating new ids safely
20
20
  ID_MUTEX = Mutex.new
@@ -31,16 +31,31 @@ module Karafka
31
31
 
32
32
  # @param topics [Karafka::Routing::Topics] all the topics based on which we want to build
33
33
  # subscription groups
34
+ # @param base_position [Integer, nil] optional starting position for subscription groups.
35
+ # When provided, positions will start from this value instead of continuing from the
36
+ # global counter. This is used when rebuilding subscription groups to maintain stable
37
+ # positions for static group membership.
34
38
  # @return [Array<SubscriptionGroup>] all subscription groups we need in separate threads
35
- def call(topics)
39
+ def call(topics, base_position: nil)
40
+ # If base_position is provided, use it for stable rebuilding (consumer group reopening).
41
+ # Otherwise continue from global counter for new subscription groups.
42
+ # We subtract 1 from base_position because position is incremented in the map block below.
43
+ # This ensures the first subscription group gets the correct base_position value.
44
+ use_base = !base_position.nil?
45
+ position = use_base ? base_position - 1 : @position
46
+
36
47
  topics
37
48
  .map { |topic| [checksum(topic), topic] }
38
49
  .group_by(&:first)
39
50
  .values
40
51
  .map { |value| value.map(&:last) }
41
52
  .flat_map { |value| expand(value) }
42
- .map { |grouped_topics| SubscriptionGroup.new(@position += 1, grouped_topics) }
53
+ .map { |grouped_topics| SubscriptionGroup.new(position += 1, grouped_topics) }
43
54
  .tap do |subscription_groups|
55
+ # Always ensure global counter is at least as high as the highest position used.
56
+ # This prevents position collisions when new consumer groups are created after
57
+ # existing ones are rebuilt with base_position.
58
+ @position = position if position > @position
44
59
  subscription_groups.each do |subscription_group|
45
60
  subscription_group.topics.each do |topic|
46
61
  topic.subscription_group = subscription_group
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '2.5.3'
6
+ VERSION = '2.5.4.rc1'
7
7
  end
data/lib/karafka.rb CHANGED
@@ -11,7 +11,6 @@ require 'fileutils'
11
11
  require 'openssl'
12
12
  require 'optparse'
13
13
  require 'socket'
14
- require 'base64'
15
14
  require 'date'
16
15
  require 'singleton'
17
16
  require 'digest'
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.5.3
4
+ version: 2.5.4.rc1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -9,20 +9,6 @@ bindir: bin
9
9
  cert_chain: []
10
10
  date: 1980-01-02 00:00:00.000000000 Z
11
11
  dependencies:
12
- - !ruby/object:Gem::Dependency
13
- name: base64
14
- requirement: !ruby/object:Gem::Requirement
15
- requirements:
16
- - - "~>"
17
- - !ruby/object:Gem::Version
18
- version: '0.2'
19
- type: :runtime
20
- prerelease: false
21
- version_requirements: !ruby/object:Gem::Requirement
22
- requirements:
23
- - - "~>"
24
- - !ruby/object:Gem::Version
25
- version: '0.2'
26
12
  - !ruby/object:Gem::Dependency
27
13
  name: karafka-core
28
14
  requirement: !ruby/object:Gem::Requirement
@@ -103,60 +89,15 @@ executables:
103
89
  extensions: []
104
90
  extra_rdoc_files: []
105
91
  files:
106
- - ".coditsu/ci.yml"
107
- - ".console_irbrc"
108
- - ".github/CODEOWNERS"
109
- - ".github/FUNDING.yml"
110
- - ".github/ISSUE_TEMPLATE/bug_report.md"
111
- - ".github/ISSUE_TEMPLATE/feature_request.md"
112
- - ".github/workflows/ci_linux_ubuntu_x86_64_gnu.yml"
113
- - ".github/workflows/ci_macos_arm64.yml"
114
- - ".github/workflows/push.yml"
115
- - ".github/workflows/trigger-wiki-refresh.yml"
116
- - ".github/workflows/verify-action-pins.yml"
117
- - ".gitignore"
118
- - ".rspec"
119
- - ".ruby-gemset"
120
- - ".ruby-version"
121
- - ".yard-lint.yml"
122
92
  - CHANGELOG.md
123
- - CODE_OF_CONDUCT.md
124
- - CONTRIBUTING.md
125
- - Gemfile
126
- - Gemfile.lock
127
93
  - LICENSE
128
94
  - LICENSE-COMM
129
95
  - LICENSE-LGPL
130
96
  - README.md
131
- - Rakefile
132
- - SECURITY.md
133
- - bin/benchmarks
134
- - bin/clean_kafka
135
- - bin/create_token
136
- - bin/integrations
137
97
  - bin/karafka
138
- - bin/record_rss
139
- - bin/rspecs
140
- - bin/scenario
141
- - bin/stress_many
142
- - bin/stress_one
143
- - bin/verify_kafka_warnings
144
- - bin/verify_license_integrity
145
- - bin/verify_topics_naming
146
- - bin/wait_for_kafka
147
98
  - certs/karafka-pro.pem
148
99
  - config/locales/errors.yml
149
100
  - config/locales/pro_errors.yml
150
- - docker-compose.yml
151
- - examples/payloads/avro/.gitkeep
152
- - examples/payloads/json/sample_set_01/enrollment_event.json
153
- - examples/payloads/json/sample_set_01/ingestion_event.json
154
- - examples/payloads/json/sample_set_01/transaction_event.json
155
- - examples/payloads/json/sample_set_01/user_event.json
156
- - examples/payloads/json/sample_set_02/download.json
157
- - examples/payloads/json/sample_set_03/event_type_1.json
158
- - examples/payloads/json/sample_set_03/event_type_2.json
159
- - examples/payloads/json/sample_set_03/event_type_3.json
160
101
  - karafka.gemspec
161
102
  - lib/active_job/karafka.rb
162
103
  - lib/active_job/queue_adapters/karafka_adapter.rb
@@ -176,6 +117,8 @@ files:
176
117
  - lib/karafka/admin/configs/config.rb
177
118
  - lib/karafka/admin/configs/resource.rb
178
119
  - lib/karafka/admin/consumer_groups.rb
120
+ - lib/karafka/admin/contracts/replication.rb
121
+ - lib/karafka/admin/replication.rb
179
122
  - lib/karafka/admin/topics.rb
180
123
  - lib/karafka/app.rb
181
124
  - lib/karafka/base_consumer.rb
@@ -604,8 +547,6 @@ files:
604
547
  - lib/karafka/time_trackers/pause.rb
605
548
  - lib/karafka/time_trackers/poll.rb
606
549
  - lib/karafka/version.rb
607
- - log/.gitkeep
608
- - renovate.json
609
550
  homepage: https://karafka.io
610
551
  licenses:
611
552
  - LGPL-3.0-only
data/.coditsu/ci.yml DELETED
@@ -1,3 +0,0 @@
1
- repository_id: 'd4482d42-f6b5-44ba-a5e4-00989ac519ee'
2
- api_key: <%= ENV['CODITSU_API_KEY'] %>
3
- api_secret: <%= ENV['CODITSU_API_SECRET'] %>
data/.console_irbrc DELETED
@@ -1,11 +0,0 @@
1
- # irbrc for Karafka console
2
-
3
- IRB.conf[:AUTO_INDENT] = true
4
- IRB.conf[:SAVE_HISTORY] = 1000
5
- IRB.conf[:USE_READLINE] = true
6
- IRB.conf[:HISTORY_FILE] = ".irb-history"
7
- IRB.conf[:LOAD_MODULES] = [] unless IRB.conf.key?(:LOAD_MODULES)
8
-
9
- unless IRB.conf[:LOAD_MODULES].include?('irb/completion')
10
- IRB.conf[:LOAD_MODULES] << 'irb/completion'
11
- end
data/.github/CODEOWNERS DELETED
@@ -1,3 +0,0 @@
1
- /.github @mensfeld
2
- /.github/workflows/ @mensfeld
3
- /.github/actions/ @mensfeld
data/.github/FUNDING.yml DELETED
@@ -1 +0,0 @@
1
- custom: ['https://karafka.io/#become-pro']
@@ -1,43 +0,0 @@
1
- ---
2
- name: Bug Report
3
- about: Report an issue within the Karafka ecosystem you've discovered.
4
- ---
5
-
6
- To make this process smoother for everyone involved, please read the following information before filling out the template.
7
-
8
- Scope of the OSS Support
9
- ===========
10
-
11
- We do not provide OSS support for outdated versions of Karafka and its components.
12
-
13
- Please ensure that you are using a version that is still actively supported. We cannot assist with any no longer maintained versions unless you support us with our Pro offering (https://karafka.io/docs/Pro-Support/).
14
-
15
- We acknowledge that understanding the specifics of your application and its configuration can be essential for resolving certain issues. However, due to the extensive time and resources such analysis can require, this may fall beyond our Open Source Support scope.
16
-
17
- If Karafka or its components are critical to your infrastructure, we encourage you to consider our Pro Offering.
18
-
19
- By backing us up, you can gain direct assistance and ensure your use case receives the dedicated attention it deserves.
20
-
21
-
22
- Important Links to Read
23
- ===========
24
-
25
- Please take a moment to review the following resources before submitting your report:
26
-
27
- - Issue Reporting Guide: https://karafka.io/docs/Support/#issue-reporting-guide
28
- - Support Policy: https://karafka.io/docs/Support/
29
- - Versions, Lifecycle, and EOL: https://karafka.io/docs/Versions-Lifecycle-and-EOL/
30
-
31
-
32
- Bug Report Details
33
- ===========
34
-
35
- Please provide all the details per our Issue Reporting Guide: https://karafka.io/docs/Support/#issue-reporting-guide
36
-
37
- Failing to provide the required details may result in the issue being closed. Please include all necessary information to help us understand and resolve your issue effectively.
38
-
39
-
40
- Additional Context
41
- ===========
42
-
43
- Add any other context about the problem here.
@@ -1,20 +0,0 @@
1
- ---
2
- name: Feature Request
3
- about: Suggest new Karafka features or improvements to existing features.
4
- ---
5
-
6
- ## Is your feature request related to a problem? Please describe.
7
-
8
- A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
9
-
10
- ## Describe the solution you'd like
11
-
12
- A clear and concise description of what you want to happen.
13
-
14
- ## Describe alternatives you've considered
15
-
16
- A clear and concise description of any alternative solutions or features you've considered.
17
-
18
- ## Additional context
19
-
20
- Add any other context or screenshots about the feature request here.
@@ -1,296 +0,0 @@
1
- name: CI Linux x86_64 GNU
2
-
3
- concurrency:
4
- group: ${{ github.workflow }}-${{ github.ref }}
5
- cancel-in-progress: true
6
-
7
- on:
8
- pull_request:
9
- branches: [ master ]
10
- schedule:
11
- - cron: '0 1 * * *'
12
-
13
- permissions:
14
- contents: read
15
-
16
- env:
17
- BUNDLE_RETRY: 6
18
- BUNDLE_JOBS: 4
19
-
20
- jobs:
21
- karafka-checksum:
22
- runs-on: ubuntu-latest
23
- timeout-minutes: 5
24
- strategy:
25
- fail-fast: false
26
- steps:
27
- - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
28
- with:
29
- fetch-depth: 0
30
- - name: Run Karafka license checksum verification
31
- env:
32
- KARAFKA_PRO_USERNAME: ${{ secrets.KARAFKA_PRO_USERNAME }}
33
- KARAFKA_PRO_PASSWORD: ${{ secrets.KARAFKA_PRO_PASSWORD }}
34
- KARAFKA_PRO_VERSION: ${{ secrets.KARAFKA_PRO_VERSION }}
35
- KARAFKA_PRO_LICENSE_CHECKSUM: ${{ secrets.KARAFKA_PRO_LICENSE_CHECKSUM }}
36
- run: bin/verify_license_integrity
37
-
38
- coditsu:
39
- runs-on: ubuntu-latest
40
- timeout-minutes: 5
41
- strategy:
42
- fail-fast: false
43
- steps:
44
- - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
45
- with:
46
- fetch-depth: 0
47
- - name: Download Coditsu script
48
- run: |
49
- curl -sSL https://api.coditsu.io/run/ci -o coditsu_script.sh
50
- chmod +x coditsu_script.sh
51
- - name: Verify Coditsu script checksum
52
- run: |
53
- EXPECTED_SHA256="0aecc5aa010f53fca264548a41467a2b0a1208d750ce1da3e98a217304cacbbc"
54
-
55
- ACTUAL_SHA256=$(sha256sum coditsu_script.sh | awk '{ print $1 }')
56
- if [ "$ACTUAL_SHA256" != "$EXPECTED_SHA256" ]; then
57
- echo "::error::Checksum verification failed. Expected $EXPECTED_SHA256 but got $ACTUAL_SHA256."
58
- exit 1
59
- fi
60
- - name: Run Coditsu
61
- run: ./coditsu_script.sh
62
-
63
- yard-lint:
64
- runs-on: ubuntu-latest
65
- timeout-minutes: 5
66
- strategy:
67
- fail-fast: false
68
- steps:
69
- - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
70
- with:
71
- fetch-depth: 0
72
- - name: Set up Ruby
73
- uses: ruby/setup-ruby@d5126b9b3579e429dd52e51e68624dda2e05be25 # v1.267.0
74
- with:
75
- ruby-version: '3.4.7'
76
- bundler-cache: true
77
- - name: Run yard-lint
78
- run: bundle exec yard-lint lib/
79
-
80
- # We do not split RSpec specs to OSS and Pro like integrations because they do not overload
81
- # Kafka heavily, compute total coverage for specs and are fast enough
82
- specs:
83
- timeout-minutes: 15
84
- runs-on: ubuntu-latest
85
- env:
86
- BUNDLE_FORCE_RUBY_PLATFORM: ${{ matrix.force_ruby_platform }}
87
- strategy:
88
- fail-fast: false
89
- matrix:
90
- ruby:
91
- - '3.5.0-preview1'
92
- - '3.4'
93
- - '3.3'
94
- - '3.2'
95
- force_ruby_platform:
96
- - true
97
- - false
98
- include:
99
- - ruby: '3.4'
100
- coverage: 'true'
101
- steps:
102
- - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
103
- - name: Install package dependencies
104
- run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
105
-
106
- - name: Start Kafka with docker compose
107
- run: |
108
- docker compose up -d || (sleep 5 && docker compose up -d)
109
-
110
- - name: Remove Gemfile.lock for Ruby dev/preview versions
111
- if: contains(matrix.ruby, 'dev') || contains(matrix.ruby, 'preview') || contains(matrix.ruby, 'rc')
112
- run: rm -f Gemfile.lock
113
-
114
- - name: Set up Ruby
115
- uses: ruby/setup-ruby@d5126b9b3579e429dd52e51e68624dda2e05be25 # v1.267.0
116
- with:
117
- ruby-version: ${{matrix.ruby}}
118
- bundler-cache: true
119
- bundler: 'latest'
120
-
121
- - name: Wait for Kafka
122
- run: |
123
- bundle exec bin/wait_for_kafka
124
-
125
- - name: Run all specs
126
- env:
127
- GITHUB_COVERAGE: ${{matrix.coverage}}
128
- run: bin/rspecs
129
-
130
- - name: Check Kafka logs for unexpected warnings
131
- run: bin/verify_kafka_warnings
132
-
133
- - name: Check test topics naming convention
134
- run: bundle exec bin/verify_topics_naming
135
-
136
- integrations_oss:
137
- timeout-minutes: 30
138
- runs-on: ubuntu-latest
139
- env:
140
- BUNDLE_FORCE_RUBY_PLATFORM: ${{ matrix.force_ruby_platform }}
141
- strategy:
142
- fail-fast: false
143
- matrix:
144
- ruby:
145
- - '3.5.0-preview1'
146
- - '3.4'
147
- - '3.3'
148
- - '3.2'
149
- force_ruby_platform:
150
- - true
151
- - false
152
- steps:
153
- - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
154
- - name: Install package dependencies
155
- run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
156
-
157
- - name: Remove libzstd-dev to check no supported compressions
158
- run: sudo apt-get -y remove libzstd-dev
159
-
160
- - name: Start Kafka with docker compose
161
- run: |
162
- docker compose up -d || (sleep 5 && docker compose up -d)
163
-
164
- - name: Set up Ruby
165
- uses: ruby/setup-ruby@d5126b9b3579e429dd52e51e68624dda2e05be25 # v1.267.0
166
- with:
167
- # Do not use cache here as we run bundle install also later in some of the integration
168
- # tests and we need to be able to run it without cache
169
- #
170
- # We also want to check that librdkafka is compiling as expected on all versions of Ruby
171
- ruby-version: ${{matrix.ruby}}
172
- bundler: 'latest'
173
-
174
- - name: Install latest Bundler
175
- run: |
176
- gem install bundler --no-document
177
- gem update --system
178
- bundle config set without 'tools benchmarks docs'
179
-
180
- - name: Fix directory permissions for Bundler
181
- run: |
182
- chmod -R o-w /opt/hostedtoolcache/Ruby/3*/x64/lib/ruby/gems/3*/gems
183
- chmod +t /opt/hostedtoolcache/Ruby/3*/x64/lib/ruby/gems/3*/gems
184
-
185
- - name: Force Ruby platform for ffi gem on dev/preview Ruby
186
- if: contains(matrix.ruby, 'dev') || contains(matrix.ruby, 'preview') || contains(matrix.ruby, 'rc')
187
- run: bundle config set force_ruby_platform ffi
188
-
189
- - name: Bundle install
190
- run: |
191
- bundle config set without development
192
- bundle install --jobs 4 --retry 3
193
-
194
- - name: Wait for Kafka
195
- run: |
196
- bundle exec bin/wait_for_kafka
197
-
198
- - name: Run OSS integration tests
199
- run: bin/integrations --exclude '/pro'
200
-
201
- - name: Check Kafka logs for unexpected warnings
202
- run: bin/verify_kafka_warnings
203
-
204
- - name: Check test topics naming convention
205
- run: bundle exec bin/verify_topics_naming
206
-
207
- integrations_pro:
208
- timeout-minutes: 45
209
- runs-on: ubuntu-latest
210
- env:
211
- BUNDLE_FORCE_RUBY_PLATFORM: ${{ matrix.force_ruby_platform }}
212
- strategy:
213
- fail-fast: false
214
- matrix:
215
- ruby:
216
- - '3.5.0-preview1'
217
- - '3.4'
218
- - '3.3'
219
- - '3.2'
220
- force_ruby_platform:
221
- - true
222
- - false
223
- parallel_group:
224
- - '0'
225
- - '1'
226
- steps:
227
- - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
228
- - name: Install package dependencies
229
- run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
230
-
231
- - name: Start Kafka with docker compose
232
- run: |
233
- docker compose up -d || (sleep 5 && docker compose up -d)
234
-
235
- - name: Set up Ruby
236
- uses: ruby/setup-ruby@d5126b9b3579e429dd52e51e68624dda2e05be25 # v1.267.0
237
- with:
238
- ruby-version: ${{matrix.ruby}}
239
- bundler: 'latest'
240
-
241
- - name: Install latest Bundler
242
- run: |
243
- gem install bundler --no-document
244
- gem update --system
245
- bundle config set without 'tools benchmarks docs'
246
-
247
- - name: Force Ruby platform for ffi gem on dev/preview Ruby
248
- if: contains(matrix.ruby, 'dev') || contains(matrix.ruby, 'preview') || contains(matrix.ruby, 'rc')
249
- run: bundle config set force_ruby_platform ffi
250
-
251
- - name: Bundle install
252
- run: |
253
- bundle config set without development
254
- bundle install --jobs 4 --retry 3
255
-
256
- - name: Wait for Kafka
257
- run: |
258
- bundle exec bin/wait_for_kafka
259
-
260
- - name: Run Pro integration tests
261
- env:
262
- KARAFKA_PRO_LICENSE_TOKEN: ${{ secrets.KARAFKA_PRO_LICENSE_TOKEN }}
263
- KARAFKA_PRO_USERNAME: ${{ secrets.KARAFKA_PRO_USERNAME }}
264
- KARAFKA_PRO_PASSWORD: ${{ secrets.KARAFKA_PRO_PASSWORD }}
265
- KARAFKA_PRO_VERSION: ${{ secrets.KARAFKA_PRO_VERSION }}
266
- KARAFKA_PRO_LICENSE_CHECKSUM: ${{ secrets.KARAFKA_PRO_LICENSE_CHECKSUM }}
267
- SPECS_SEED: ${{ github.run_id }}
268
- SPECS_GROUP: ${{ matrix.parallel_group }}
269
- run: |
270
- bin/integrations '/pro'
271
-
272
- - name: Check Kafka logs for unexpected warnings
273
- run: bin/verify_kafka_warnings
274
-
275
- - name: Check test topics naming convention
276
- run: bin/verify_topics_naming
277
-
278
- ci-success:
279
- name: CI Linux Ubuntu x86_64 GNU Success
280
- runs-on: ubuntu-latest
281
- if: always()
282
- needs:
283
- - karafka-checksum
284
- - coditsu
285
- - yard-lint
286
- - specs
287
- - integrations_oss
288
- - integrations_pro
289
- steps:
290
- - name: Check all jobs passed
291
- if: |
292
- contains(needs.*.result, 'failure') ||
293
- contains(needs.*.result, 'cancelled') ||
294
- contains(needs.*.result, 'skipped')
295
- run: exit 1
296
- - run: echo "All CI checks passed!"