karafka 2.4.18 → 2.5.0.beta2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/CODEOWNERS +3 -0
- data/.github/workflows/ci.yml +58 -14
- data/.github/workflows/push.yml +36 -0
- data/.github/workflows/verify-action-pins.yml +16 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +60 -0
- data/Gemfile +2 -2
- data/Gemfile.lock +69 -50
- data/LICENSE-COMM +2 -2
- data/README.md +1 -1
- data/Rakefile +4 -0
- data/bin/clean_kafka +43 -0
- data/bin/integrations +19 -6
- data/bin/rspecs +15 -3
- data/bin/verify_kafka_warnings +35 -0
- data/bin/verify_topics_naming +27 -0
- data/config/locales/errors.yml +3 -0
- data/config/locales/pro_errors.yml +13 -2
- data/docker-compose.yml +1 -1
- data/examples/payloads/json/enrollment_event.json +579 -0
- data/examples/payloads/json/ingestion_event.json +30 -0
- data/examples/payloads/json/transaction_event.json +17 -0
- data/examples/payloads/json/user_event.json +11 -0
- data/karafka.gemspec +3 -8
- data/lib/karafka/active_job/current_attributes.rb +1 -1
- data/lib/karafka/admin/acl.rb +5 -1
- data/lib/karafka/admin/configs.rb +5 -1
- data/lib/karafka/admin.rb +69 -34
- data/lib/karafka/base_consumer.rb +17 -8
- data/lib/karafka/cli/base.rb +8 -2
- data/lib/karafka/cli/topics/align.rb +7 -4
- data/lib/karafka/cli/topics/base.rb +17 -0
- data/lib/karafka/cli/topics/create.rb +9 -7
- data/lib/karafka/cli/topics/delete.rb +4 -2
- data/lib/karafka/cli/topics/help.rb +39 -0
- data/lib/karafka/cli/topics/repartition.rb +4 -2
- data/lib/karafka/cli/topics.rb +10 -3
- data/lib/karafka/cli.rb +2 -0
- data/lib/karafka/connection/client.rb +30 -9
- data/lib/karafka/connection/listener.rb +24 -12
- data/lib/karafka/connection/messages_buffer.rb +1 -1
- data/lib/karafka/connection/proxy.rb +3 -0
- data/lib/karafka/constraints.rb +3 -3
- data/lib/karafka/contracts/config.rb +3 -0
- data/lib/karafka/contracts/topic.rb +1 -1
- data/lib/karafka/errors.rb +46 -2
- data/lib/karafka/helpers/async.rb +3 -1
- data/lib/karafka/instrumentation/callbacks/rebalance.rb +5 -1
- data/lib/karafka/instrumentation/logger_listener.rb +86 -23
- data/lib/karafka/instrumentation/proctitle_listener.rb +5 -1
- data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +2 -2
- data/lib/karafka/messages/builders/batch_metadata.rb +1 -1
- data/lib/karafka/pro/cleaner.rb +8 -0
- data/lib/karafka/pro/cli/parallel_segments/base.rb +89 -0
- data/lib/karafka/pro/cli/parallel_segments/collapse.rb +164 -0
- data/lib/karafka/pro/cli/parallel_segments/distribute.rb +164 -0
- data/lib/karafka/pro/cli/parallel_segments.rb +60 -0
- data/lib/karafka/pro/connection/manager.rb +5 -8
- data/lib/karafka/pro/encryption.rb +8 -0
- data/lib/karafka/pro/instrumentation/performance_tracker.rb +1 -1
- data/lib/karafka/pro/iterator/expander.rb +5 -3
- data/lib/karafka/pro/iterator/tpl_builder.rb +23 -0
- data/lib/karafka/pro/loader.rb +10 -0
- data/lib/karafka/pro/processing/coordinator.rb +4 -1
- data/lib/karafka/pro/processing/coordinators/errors_tracker.rb +27 -3
- data/lib/karafka/pro/processing/coordinators/filters_applier.rb +11 -0
- data/lib/karafka/pro/processing/filters/base.rb +10 -2
- data/lib/karafka/pro/processing/filters/expirer.rb +5 -0
- data/lib/karafka/pro/processing/filters/inline_insights_delayer.rb +2 -2
- data/lib/karafka/pro/processing/filters/virtual_limiter.rb +5 -0
- data/lib/karafka/pro/processing/parallel_segments/filters/base.rb +73 -0
- data/lib/karafka/pro/processing/parallel_segments/filters/default.rb +85 -0
- data/lib/karafka/pro/processing/parallel_segments/filters/mom.rb +66 -0
- data/lib/karafka/pro/processing/partitioner.rb +1 -13
- data/lib/karafka/pro/processing/piping/consumer.rb +13 -13
- data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom.rb +1 -1
- data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom_vp.rb +1 -1
- data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom.rb +1 -1
- data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom_vp.rb +1 -1
- data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom_vp.rb +1 -1
- data/lib/karafka/pro/processing/strategies/aj/lrj_mom_vp.rb +1 -1
- data/lib/karafka/pro/processing/strategies/default.rb +36 -8
- data/lib/karafka/pro/processing/strategies/dlq/default.rb +14 -10
- data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj.rb +1 -1
- data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom.rb +1 -1
- data/lib/karafka/pro/processing/strategies/dlq/lrj.rb +3 -1
- data/lib/karafka/pro/processing/strategies/dlq/lrj_mom.rb +1 -1
- data/lib/karafka/pro/processing/strategies/ftr/default.rb +1 -1
- data/lib/karafka/pro/processing/strategies/lrj/default.rb +4 -1
- data/lib/karafka/pro/processing/strategies/lrj/ftr.rb +1 -1
- data/lib/karafka/pro/processing/strategies/lrj/ftr_mom.rb +1 -1
- data/lib/karafka/pro/processing/strategies/lrj/mom.rb +1 -1
- data/lib/karafka/pro/processing/virtual_partitions/distributors/balanced.rb +50 -0
- data/lib/karafka/pro/processing/virtual_partitions/distributors/base.rb +29 -0
- data/lib/karafka/pro/processing/virtual_partitions/distributors/consistent.rb +27 -0
- data/lib/karafka/pro/recurring_tasks/contracts/config.rb +8 -4
- data/lib/karafka/pro/recurring_tasks/dispatcher.rb +3 -3
- data/lib/karafka/pro/recurring_tasks/setup/config.rb +7 -2
- data/lib/karafka/pro/recurring_tasks.rb +13 -0
- data/lib/karafka/pro/routing/features/dead_letter_queue/topic.rb +1 -1
- data/lib/karafka/pro/routing/features/multiplexing/config.rb +1 -0
- data/lib/karafka/pro/routing/features/multiplexing/contracts/topic.rb +17 -0
- data/lib/karafka/pro/routing/features/multiplexing/proxy.rb +5 -2
- data/lib/karafka/pro/routing/features/multiplexing/subscription_group.rb +8 -1
- data/lib/karafka/pro/routing/features/parallel_segments/builder.rb +47 -0
- data/lib/karafka/pro/routing/features/parallel_segments/config.rb +27 -0
- data/lib/karafka/pro/routing/features/parallel_segments/consumer_group.rb +83 -0
- data/lib/karafka/pro/routing/features/parallel_segments/contracts/consumer_group.rb +49 -0
- data/lib/karafka/pro/routing/features/parallel_segments/topic.rb +43 -0
- data/lib/karafka/pro/routing/features/parallel_segments.rb +24 -0
- data/lib/karafka/pro/routing/features/patterns/pattern.rb +1 -1
- data/lib/karafka/pro/routing/features/recurring_tasks/builder.rb +2 -2
- data/lib/karafka/pro/routing/features/scheduled_messages/builder.rb +10 -6
- data/lib/karafka/pro/routing/features/virtual_partitions/config.rb +20 -2
- data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +1 -0
- data/lib/karafka/pro/routing/features/virtual_partitions/topic.rb +8 -2
- data/lib/karafka/pro/scheduled_messages/consumer.rb +19 -21
- data/lib/karafka/pro/scheduled_messages/daily_buffer.rb +9 -6
- data/lib/karafka/pro/scheduled_messages/deserializers/headers.rb +7 -1
- data/lib/karafka/pro/scheduled_messages/max_epoch.rb +15 -6
- data/lib/karafka/pro/scheduled_messages.rb +13 -0
- data/lib/karafka/processing/coordinators_buffer.rb +1 -0
- data/lib/karafka/processing/strategies/default.rb +4 -4
- data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +1 -0
- data/lib/karafka/routing/subscription_group.rb +1 -1
- data/lib/karafka/runner.rb +7 -1
- data/lib/karafka/server.rb +19 -19
- data/lib/karafka/setup/attributes_map.rb +2 -0
- data/lib/karafka/setup/config.rb +22 -1
- data/lib/karafka/setup/defaults_injector.rb +26 -1
- data/lib/karafka/status.rb +6 -1
- data/lib/karafka/swarm/node.rb +31 -0
- data/lib/karafka/swarm/supervisor.rb +4 -0
- data/lib/karafka/templates/karafka.rb.erb +14 -1
- data/lib/karafka/version.rb +1 -1
- data/lib/karafka.rb +17 -9
- data/renovate.json +14 -2
- metadata +40 -40
- checksums.yaml.gz.sig +0 -0
- data/certs/cert.pem +0 -26
- data.tar.gz.sig +0 -0
- metadata.gz.sig +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: bbcaf396d7f2eff35ec3b59c96ffe7dd880f1f07294aa28bb623e95ce328e3e9
|
4
|
+
data.tar.gz: 410b037a79abbbc82fbd4b540dcb26378dca396e408f68280325e354ae2e275b
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: dcd79f3bda653b74d95938d440ccfe93d8d2cdc214e02de31d9ad56f738b554699bd1991d83dd1c9c7090e1c3a63669f8837677fef77efc22df4804bc16f25a0
|
7
|
+
data.tar.gz: dbca53f433a0e13ec7582f9ee1c6da15bf38678a1019a3a36e361e7bbbae2c5e5777bf44e61330692b5f38c4fa011dd9e3950027925767908f617df02e1e8219
|
data/.github/CODEOWNERS
ADDED
data/.github/workflows/ci.yml
CHANGED
@@ -6,10 +6,15 @@ concurrency:
|
|
6
6
|
|
7
7
|
on:
|
8
8
|
pull_request:
|
9
|
+
branches: [ main, master ]
|
9
10
|
push:
|
11
|
+
branches: [ main, master ]
|
10
12
|
schedule:
|
11
13
|
- cron: '0 1 * * *'
|
12
14
|
|
15
|
+
permissions:
|
16
|
+
contents: read
|
17
|
+
|
13
18
|
env:
|
14
19
|
BUNDLE_RETRY: 6
|
15
20
|
BUNDLE_JOBS: 4
|
@@ -17,15 +22,16 @@ env:
|
|
17
22
|
jobs:
|
18
23
|
diffend:
|
19
24
|
runs-on: ubuntu-latest
|
25
|
+
timeout-minutes: 5
|
20
26
|
strategy:
|
21
27
|
fail-fast: false
|
22
28
|
steps:
|
23
|
-
- uses: actions/checkout@v4
|
29
|
+
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
24
30
|
with:
|
25
31
|
fetch-depth: 0
|
26
32
|
|
27
33
|
- name: Set up Ruby
|
28
|
-
uses: ruby/setup-ruby@v1
|
34
|
+
uses: ruby/setup-ruby@bb0f760b6c925183520ee0bcc9c4a432a7c8c3c6 # v1.241.0
|
29
35
|
with:
|
30
36
|
ruby-version: 3.4
|
31
37
|
bundler-cache: true
|
@@ -38,10 +44,11 @@ jobs:
|
|
38
44
|
|
39
45
|
karafka-checksum:
|
40
46
|
runs-on: ubuntu-latest
|
47
|
+
timeout-minutes: 5
|
41
48
|
strategy:
|
42
49
|
fail-fast: false
|
43
50
|
steps:
|
44
|
-
- uses: actions/checkout@v4
|
51
|
+
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
45
52
|
with:
|
46
53
|
fetch-depth: 0
|
47
54
|
- name: Run Karafka license checksum verification
|
@@ -54,19 +61,33 @@ jobs:
|
|
54
61
|
|
55
62
|
coditsu:
|
56
63
|
runs-on: ubuntu-latest
|
64
|
+
timeout-minutes: 5
|
57
65
|
strategy:
|
58
66
|
fail-fast: false
|
59
67
|
steps:
|
60
|
-
- uses: actions/checkout@v4
|
68
|
+
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
61
69
|
with:
|
62
70
|
fetch-depth: 0
|
71
|
+
- name: Download Coditsu script
|
72
|
+
run: |
|
73
|
+
curl -sSL https://api.coditsu.io/run/ci -o coditsu_script.sh
|
74
|
+
chmod +x coditsu_script.sh
|
75
|
+
- name: Verify Coditsu script checksum
|
76
|
+
run: |
|
77
|
+
EXPECTED_SHA256="0aecc5aa010f53fca264548a41467a2b0a1208d750ce1da3e98a217304cacbbc"
|
78
|
+
|
79
|
+
ACTUAL_SHA256=$(sha256sum coditsu_script.sh | awk '{ print $1 }')
|
80
|
+
if [ "$ACTUAL_SHA256" != "$EXPECTED_SHA256" ]; then
|
81
|
+
echo "::error::Checksum verification failed. Expected $EXPECTED_SHA256 but got $ACTUAL_SHA256."
|
82
|
+
exit 1
|
83
|
+
fi
|
63
84
|
- name: Run Coditsu
|
64
|
-
run:
|
85
|
+
run: ./coditsu_script.sh
|
65
86
|
|
66
87
|
# We do not split RSpec specs to OSS and Pro like integrations because they do not overload
|
67
88
|
# Kafka heavily, compute total coverage for specs and are fast enough
|
68
89
|
specs:
|
69
|
-
timeout-minutes:
|
90
|
+
timeout-minutes: 15
|
70
91
|
runs-on: ubuntu-latest
|
71
92
|
needs: diffend
|
72
93
|
strategy:
|
@@ -81,7 +102,7 @@ jobs:
|
|
81
102
|
- ruby: '3.4'
|
82
103
|
coverage: 'true'
|
83
104
|
steps:
|
84
|
-
- uses: actions/checkout@v4
|
105
|
+
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
85
106
|
- name: Install package dependencies
|
86
107
|
run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
|
87
108
|
|
@@ -97,7 +118,7 @@ jobs:
|
|
97
118
|
run: rm -f Gemfile.lock
|
98
119
|
|
99
120
|
- name: Set up Ruby
|
100
|
-
uses: ruby/setup-ruby@v1
|
121
|
+
uses: ruby/setup-ruby@bb0f760b6c925183520ee0bcc9c4a432a7c8c3c6 # v1.241.0
|
101
122
|
with:
|
102
123
|
ruby-version: ${{matrix.ruby}}
|
103
124
|
bundler-cache: true
|
@@ -112,8 +133,14 @@ jobs:
|
|
112
133
|
GITHUB_COVERAGE: ${{matrix.coverage}}
|
113
134
|
run: bin/rspecs
|
114
135
|
|
136
|
+
- name: Check Kafka logs for unexpected warnings
|
137
|
+
run: bin/verify_kafka_warnings
|
138
|
+
|
139
|
+
- name: Check test topics naming convention
|
140
|
+
run: bin/verify_topics_naming
|
141
|
+
|
115
142
|
integrations_oss:
|
116
|
-
timeout-minutes:
|
143
|
+
timeout-minutes: 30
|
117
144
|
runs-on: ubuntu-latest
|
118
145
|
needs: diffend
|
119
146
|
strategy:
|
@@ -125,7 +152,7 @@ jobs:
|
|
125
152
|
- '3.2'
|
126
153
|
- '3.1'
|
127
154
|
steps:
|
128
|
-
- uses: actions/checkout@v4
|
155
|
+
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
129
156
|
- name: Install package dependencies
|
130
157
|
run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
|
131
158
|
|
@@ -137,7 +164,7 @@ jobs:
|
|
137
164
|
docker compose up -d || (sleep 5 && docker compose up -d)
|
138
165
|
|
139
166
|
- name: Set up Ruby
|
140
|
-
uses: ruby/setup-ruby@v1
|
167
|
+
uses: ruby/setup-ruby@bb0f760b6c925183520ee0bcc9c4a432a7c8c3c6 # v1.241.0
|
141
168
|
with:
|
142
169
|
# Do not use cache here as we run bundle install also later in some of the integration
|
143
170
|
# tests and we need to be able to run it without cache
|
@@ -170,6 +197,12 @@ jobs:
|
|
170
197
|
- name: Run OSS integration tests
|
171
198
|
run: bin/integrations --exclude '/pro'
|
172
199
|
|
200
|
+
- name: Check Kafka logs for unexpected warnings
|
201
|
+
run: bin/verify_kafka_warnings
|
202
|
+
|
203
|
+
- name: Check test topics naming convention
|
204
|
+
run: bin/verify_topics_naming
|
205
|
+
|
173
206
|
integrations_pro:
|
174
207
|
timeout-minutes: 45
|
175
208
|
runs-on: ubuntu-latest
|
@@ -182,8 +215,11 @@ jobs:
|
|
182
215
|
- '3.3'
|
183
216
|
- '3.2'
|
184
217
|
- '3.1'
|
218
|
+
parallel_group:
|
219
|
+
- '0'
|
220
|
+
- '1'
|
185
221
|
steps:
|
186
|
-
- uses: actions/checkout@v4
|
222
|
+
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
187
223
|
- name: Install package dependencies
|
188
224
|
run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
|
189
225
|
|
@@ -192,7 +228,7 @@ jobs:
|
|
192
228
|
docker compose up -d || (sleep 5 && docker compose up -d)
|
193
229
|
|
194
230
|
- name: Set up Ruby
|
195
|
-
uses: ruby/setup-ruby@v1
|
231
|
+
uses: ruby/setup-ruby@bb0f760b6c925183520ee0bcc9c4a432a7c8c3c6 # v1.241.0
|
196
232
|
with:
|
197
233
|
ruby-version: ${{matrix.ruby}}
|
198
234
|
bundler: 'latest'
|
@@ -220,5 +256,13 @@ jobs:
|
|
220
256
|
KARAFKA_PRO_PASSWORD: ${{ secrets.KARAFKA_PRO_PASSWORD }}
|
221
257
|
KARAFKA_PRO_VERSION: ${{ secrets.KARAFKA_PRO_VERSION }}
|
222
258
|
KARAFKA_PRO_LICENSE_CHECKSUM: ${{ secrets.KARAFKA_PRO_LICENSE_CHECKSUM }}
|
259
|
+
SPECS_SEED: ${{ github.run_id }}
|
260
|
+
SPECS_GROUP: ${{ matrix.parallel_group }}
|
261
|
+
run: |
|
262
|
+
bin/integrations '/pro'
|
263
|
+
|
264
|
+
- name: Check Kafka logs for unexpected warnings
|
265
|
+
run: bin/verify_kafka_warnings
|
223
266
|
|
224
|
-
|
267
|
+
- name: Check test topics naming convention
|
268
|
+
run: bin/verify_topics_naming
|
@@ -0,0 +1,36 @@
|
|
1
|
+
name: Push Gem
|
2
|
+
|
3
|
+
on:
|
4
|
+
push:
|
5
|
+
tags:
|
6
|
+
- v*
|
7
|
+
|
8
|
+
permissions:
|
9
|
+
contents: read
|
10
|
+
|
11
|
+
jobs:
|
12
|
+
push:
|
13
|
+
if: github.repository_owner == 'karafka'
|
14
|
+
runs-on: ubuntu-latest
|
15
|
+
environment: deployment
|
16
|
+
|
17
|
+
permissions:
|
18
|
+
contents: write
|
19
|
+
id-token: write
|
20
|
+
|
21
|
+
steps:
|
22
|
+
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
23
|
+
with:
|
24
|
+
fetch-depth: 0
|
25
|
+
|
26
|
+
- name: Set up Ruby
|
27
|
+
uses: ruby/setup-ruby@bb0f760b6c925183520ee0bcc9c4a432a7c8c3c6 # v1.241.0
|
28
|
+
with:
|
29
|
+
bundler-cache: false
|
30
|
+
|
31
|
+
- name: Bundle install
|
32
|
+
run: |
|
33
|
+
bundle install --jobs 4 --retry 3
|
34
|
+
|
35
|
+
# Release
|
36
|
+
- uses: rubygems/release-gem@9e85cb11501bebc2ae661c1500176316d3987059 # v1
|
@@ -0,0 +1,16 @@
|
|
1
|
+
name: Verify Action Pins
|
2
|
+
on:
|
3
|
+
pull_request:
|
4
|
+
paths:
|
5
|
+
- '.github/workflows/**'
|
6
|
+
jobs:
|
7
|
+
verify_action_pins:
|
8
|
+
runs-on: ubuntu-latest
|
9
|
+
steps:
|
10
|
+
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
11
|
+
- name: Check SHA pins
|
12
|
+
run: |
|
13
|
+
if grep -E -r "uses: .*/.*@(v[0-9]+|main|master)($|[[:space:]]|$)" --include="*.yml" --include="*.yaml" .github/workflows/ | grep -v "#"; then
|
14
|
+
echo "::error::Actions should use SHA pins, not tags or branch names"
|
15
|
+
exit 1
|
16
|
+
fi
|
data/.ruby-version
CHANGED
@@ -1 +1 @@
|
|
1
|
-
3.4.
|
1
|
+
3.4.4
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,65 @@
|
|
1
1
|
# Karafka Framework Changelog
|
2
2
|
|
3
|
+
## 2.5.0 (Unreleased)
|
4
|
+
- **[Breaking]** Use DLQ and Piping prefix `source_` instead of `original_` to align with naming convention of Kafka Streams and Apache Flink for future usage.
|
5
|
+
- **[Breaking]** Rename scheduled jobs topics names in their config (Pro).
|
6
|
+
- **[Feature]** Parallel Segments for concurrent processing of the same partition with more than partition count of processes (Pro).
|
7
|
+
- [Enhancement] Support KIP-82 (header values of arrays).
|
8
|
+
- [Enhancement] Enhance errors tracker with `#counts` that contains per-error class specific counters for granular flow handling.
|
9
|
+
- [Enhancement] Provide explicit `Karafka::Admin.copy_consumer_group` API.
|
10
|
+
- [Enhancement] Return explicit value from `Karafka::Admin.copy_consumer_group` and `Karafka::Admin.rename_consumer_group` APIs.
|
11
|
+
- [Enhancement] Introduce balanced non-consistent VP distributor improving the utilization up to 50% (Pro).
|
12
|
+
- [Enhancement] Make the error tracker for advanced DLQ strategies respond to `#topic` and `#partition` for context aware dispatches.
|
13
|
+
- [Enhancement] Allow setting the workers thread priority and set it to -1 (50ms) by default.
|
14
|
+
- [Enhancement] Enhance low-level `client.pause` event with timeout value (if provided).
|
15
|
+
- [Enhancement] Introduce `#marking_cursor` API (defaults to `#cursor`) in the filtering API (Pro).
|
16
|
+
- [Enhancement] Support multiple DLQ target topics via context aware strategies (Pro).
|
17
|
+
- [Enhancement] Raise error when post-transactional committing of offset is done outside of the transaction (Pro).
|
18
|
+
- [Enhancement] Include info level rebalance logger listener data.
|
19
|
+
- [Enhancement] Include info level subscription start info.
|
20
|
+
- [Enhancement] Make the generic error handling in the `LoggerListener` more descriptive by logging also the error class.
|
21
|
+
- [Enhancement] Allow marking older offsets to support advanced rewind capabilities.
|
22
|
+
- [Enhancement] Change optional `#seek` reset offset flag default to `true` as `false` is almost never used and seek by default should move the internal consumer offset position as well.
|
23
|
+
- [Enhancement] Include Swarm node ID in the swarm process tags.
|
24
|
+
- [Enhancement] Replace internal usage of MD5 with SHA256 for FIPS.
|
25
|
+
- [Enhancement] Improve OSS vs. Pro specs execution isolation.
|
26
|
+
- [Enhancement] Preload `librdkafka` code prior to forking in the Swarm mode to save memory.
|
27
|
+
- [Enhancement] Extract errors tracker class reference into an internal `errors_tracker_class` config option (Pro).
|
28
|
+
- [Enhancement] Support rdkafka native kafka polling customization for admin.
|
29
|
+
- [Enhancement] Customize the multiplexing scale delay (Pro) per consumer group (Pro).
|
30
|
+
- [Enhancement] Set `topic.metadata.refresh.interval.ms` for default producer in dev to 5s to align with consumer setup.
|
31
|
+
- [Enhancement] Alias `-2` and `-1` with `latest` and `earliest` for seeking.
|
32
|
+
- [Enhancement] Allow for usage of `latest` and `earliest` in the `Karafka::Pro::Iterator`.
|
33
|
+
- [Enhancement] Failures during `topics migrate` (and other subcommands) don't show what topic failed, and why it's invalid.
|
34
|
+
- [Enhancement] Apply changes to topics configuration in atomic independent requests when using Declarative Topics.
|
35
|
+
- [Enhancement] Execute the help CLI command when no command provided (similar to Rails) to improve DX.
|
36
|
+
- [Enhancement] Remove backtrace from the CLI error for incorrect commands (similar to Rails) to improve DX.
|
37
|
+
- [Enhancement] Provide `karafka topics help` sub-help due to nesting of Declarative Topics actions.
|
38
|
+
- [Refactor] Introduce a `bin/verify_kafka_warnings` script to clean Kafka from temporary test-suite topics.
|
39
|
+
- [Refactor] Introduce a `bin/verify_topics_naming` script to ensure proper test topics naming convention.
|
40
|
+
- [Refactor] Make sure all temporary topics have a `it-` prefix in their name.
|
41
|
+
- [Refactor] Improve CI specs parallelization.
|
42
|
+
- [Maintenance] Lower the `Karafka::Admin` `poll_timeout` to 50 ms to improve responsiveness of admin operations.
|
43
|
+
- [Maintenance] Require `karafka-rdkafka` `>=` `0.19.2` due to usage of `#rd_kafka_global_init`, KIP-82 and the new producer caching engine.
|
44
|
+
- [Maintenance] Add Deimos routing patch into integration suite not to break it in the future.
|
45
|
+
- [Maintenance] Remove Rails `7.0` specs due to upcoming EOL.
|
46
|
+
- [Fix] Fix Recurring Tasks and Scheduled Messages not working with Swarm (using closed producer).
|
47
|
+
- [Fix] Fix a case where `unknown_topic_or_part` error could leak out of the consumer on consumer shutdown.
|
48
|
+
- [Fix] Fix missing `virtual_partitions.partitioner.error` custom error logging in the `LoggerListener`.
|
49
|
+
- [Fix] Prevent applied system filters `#timeout` from potentially interacting with user filters.
|
50
|
+
- [Fix] Use more sane value in `Admin#seek_consumer_group` for long ago.
|
51
|
+
- [Fix] Prevent multiplexing of 1:1 from routing.
|
52
|
+
- [Fix] WaterDrop level aborting transaction may cause seek offset to move (Pro).
|
53
|
+
- [Fix] Fix inconsistency in the logs where `Karafka::Server` originating logs would not have server id reference.
|
54
|
+
- [Fix] Fix inconsistency in the logs where OS signal originating logs would not have server id reference.
|
55
|
+
- [Fix] Post-fork WaterDrop instance looses some of the non-kafka settings.
|
56
|
+
- [Fix] Max epoch tracking for early cleanup causes messages to be skipped until reload.
|
57
|
+
- [Fix] optparse double parse loses ARGV.
|
58
|
+
- [Fix] `karafka` cannot be required without Bundler.
|
59
|
+
- [Fix] Scheduled Messages re-seek moves to `latest` on inheritance of initial offset when `0` offset is compacted.
|
60
|
+
- [Fix] Seek to `:latest` without `topic_partition_position` (-1) will not seek at all.
|
61
|
+
- [Change] Move to trusted-publishers and remove signing since no longer needed.
|
62
|
+
|
3
63
|
## 2.4.18 (2025-04-09)
|
4
64
|
- [Fix] Make sure `Bundler.with_unbundled_env` is not called multiple times.
|
5
65
|
|
data/Gemfile
CHANGED
@@ -17,8 +17,8 @@ end
|
|
17
17
|
|
18
18
|
group :integrations do
|
19
19
|
gem 'activejob', require: false
|
20
|
-
gem 'karafka-testing', '>= 2.
|
21
|
-
gem 'karafka-web', '>= 0.
|
20
|
+
gem 'karafka-testing', '>= 2.5.0', require: false
|
21
|
+
gem 'karafka-web', '>= 0.11.0.beta1', require: false
|
22
22
|
end
|
23
23
|
|
24
24
|
group :test do
|
data/Gemfile.lock
CHANGED
@@ -1,20 +1,20 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
karafka (2.
|
4
|
+
karafka (2.5.0.beta2)
|
5
5
|
base64 (~> 0.2)
|
6
|
-
karafka-core (>= 2.
|
7
|
-
karafka-rdkafka (>= 0.
|
8
|
-
waterdrop (>= 2.
|
6
|
+
karafka-core (>= 2.5.0, < 2.6.0)
|
7
|
+
karafka-rdkafka (>= 0.19.2)
|
8
|
+
waterdrop (>= 2.8.3, < 3.0.0)
|
9
9
|
zeitwerk (~> 2.3)
|
10
10
|
|
11
11
|
GEM
|
12
12
|
remote: https://rubygems.org/
|
13
13
|
specs:
|
14
|
-
activejob (8.0.
|
15
|
-
activesupport (= 8.0.
|
14
|
+
activejob (8.0.2)
|
15
|
+
activesupport (= 8.0.2)
|
16
16
|
globalid (>= 0.3.6)
|
17
|
-
activesupport (8.0.
|
17
|
+
activesupport (8.0.2)
|
18
18
|
base64
|
19
19
|
benchmark (>= 0.3)
|
20
20
|
bigdecimal
|
@@ -28,86 +28,105 @@ GEM
|
|
28
28
|
tzinfo (~> 2.0, >= 2.0.5)
|
29
29
|
uri (>= 0.13.1)
|
30
30
|
base64 (0.2.0)
|
31
|
-
benchmark (0.
|
32
|
-
bigdecimal (3.1.
|
33
|
-
byebug (
|
34
|
-
concurrent-ruby (1.3.
|
35
|
-
connection_pool (2.
|
36
|
-
diff-lcs (1.
|
31
|
+
benchmark (0.4.0)
|
32
|
+
bigdecimal (3.1.9)
|
33
|
+
byebug (12.0.0)
|
34
|
+
concurrent-ruby (1.3.5)
|
35
|
+
connection_pool (2.5.3)
|
36
|
+
diff-lcs (1.6.2)
|
37
37
|
docile (1.4.1)
|
38
|
-
drb (2.2.
|
39
|
-
erubi (1.13.
|
38
|
+
drb (2.2.3)
|
39
|
+
erubi (1.13.1)
|
40
40
|
et-orbi (1.2.11)
|
41
41
|
tzinfo
|
42
|
-
factory_bot (6.5.
|
43
|
-
activesupport (>=
|
44
|
-
ffi (1.17.
|
42
|
+
factory_bot (6.5.1)
|
43
|
+
activesupport (>= 6.1.0)
|
44
|
+
ffi (1.17.2)
|
45
|
+
ffi (1.17.2-aarch64-linux-gnu)
|
46
|
+
ffi (1.17.2-aarch64-linux-musl)
|
47
|
+
ffi (1.17.2-arm-linux-gnu)
|
48
|
+
ffi (1.17.2-arm-linux-musl)
|
49
|
+
ffi (1.17.2-arm64-darwin)
|
50
|
+
ffi (1.17.2-x86-linux-gnu)
|
51
|
+
ffi (1.17.2-x86-linux-musl)
|
52
|
+
ffi (1.17.2-x86_64-darwin)
|
53
|
+
ffi (1.17.2-x86_64-linux-gnu)
|
54
|
+
ffi (1.17.2-x86_64-linux-musl)
|
45
55
|
fugit (1.11.1)
|
46
56
|
et-orbi (~> 1, >= 1.2.11)
|
47
57
|
raabro (~> 1.4)
|
48
58
|
globalid (1.2.1)
|
49
59
|
activesupport (>= 6.1)
|
50
|
-
i18n (1.14.
|
60
|
+
i18n (1.14.7)
|
51
61
|
concurrent-ruby (~> 1.0)
|
52
|
-
karafka-core (2.
|
53
|
-
karafka-rdkafka (>= 0.
|
62
|
+
karafka-core (2.5.1)
|
63
|
+
karafka-rdkafka (>= 0.19.2, < 0.21.0)
|
54
64
|
logger (>= 1.6.0)
|
55
|
-
karafka-rdkafka (0.
|
65
|
+
karafka-rdkafka (0.19.4)
|
56
66
|
ffi (~> 1.15)
|
57
67
|
mini_portile2 (~> 2.6)
|
58
68
|
rake (> 12)
|
59
|
-
karafka-testing (2.
|
60
|
-
karafka (>= 2.
|
61
|
-
waterdrop (>= 2.
|
62
|
-
karafka-web (0.
|
69
|
+
karafka-testing (2.5.0)
|
70
|
+
karafka (>= 2.5.0.beta1, < 2.6.0)
|
71
|
+
waterdrop (>= 2.8.0)
|
72
|
+
karafka-web (0.11.0.beta3)
|
63
73
|
erubi (~> 1.4)
|
64
|
-
karafka (>= 2.
|
65
|
-
karafka-core (>= 2.
|
74
|
+
karafka (>= 2.5.0.beta1, < 2.6.0)
|
75
|
+
karafka-core (>= 2.5.0, < 2.6.0)
|
66
76
|
roda (~> 3.68, >= 3.69)
|
67
77
|
tilt (~> 2.0)
|
68
|
-
logger (1.
|
69
|
-
mini_portile2 (2.8.
|
70
|
-
minitest (5.25.
|
78
|
+
logger (1.7.0)
|
79
|
+
mini_portile2 (2.8.9)
|
80
|
+
minitest (5.25.5)
|
71
81
|
ostruct (0.6.1)
|
72
82
|
raabro (1.4.0)
|
73
|
-
rack (3.1.
|
83
|
+
rack (3.1.15)
|
74
84
|
rake (13.2.1)
|
75
|
-
roda (3.
|
85
|
+
roda (3.92.0)
|
76
86
|
rack
|
77
87
|
rspec (3.13.0)
|
78
88
|
rspec-core (~> 3.13.0)
|
79
89
|
rspec-expectations (~> 3.13.0)
|
80
90
|
rspec-mocks (~> 3.13.0)
|
81
|
-
rspec-core (3.13.
|
91
|
+
rspec-core (3.13.3)
|
82
92
|
rspec-support (~> 3.13.0)
|
83
|
-
rspec-expectations (3.13.
|
93
|
+
rspec-expectations (3.13.4)
|
84
94
|
diff-lcs (>= 1.2.0, < 2.0)
|
85
95
|
rspec-support (~> 3.13.0)
|
86
|
-
rspec-mocks (3.13.
|
96
|
+
rspec-mocks (3.13.4)
|
87
97
|
diff-lcs (>= 1.2.0, < 2.0)
|
88
98
|
rspec-support (~> 3.13.0)
|
89
|
-
rspec-support (3.13.
|
90
|
-
securerandom (0.
|
99
|
+
rspec-support (3.13.3)
|
100
|
+
securerandom (0.4.1)
|
91
101
|
simplecov (0.22.0)
|
92
102
|
docile (~> 1.1)
|
93
103
|
simplecov-html (~> 0.11)
|
94
104
|
simplecov_json_formatter (~> 0.1)
|
95
|
-
simplecov-html (0.
|
105
|
+
simplecov-html (0.13.1)
|
96
106
|
simplecov_json_formatter (0.1.4)
|
97
|
-
stringio (3.1.
|
98
|
-
tilt (2.
|
107
|
+
stringio (3.1.7)
|
108
|
+
tilt (2.6.0)
|
99
109
|
tzinfo (2.0.6)
|
100
110
|
concurrent-ruby (~> 1.0)
|
101
|
-
uri (1.0.
|
102
|
-
waterdrop (2.8.
|
103
|
-
karafka-core (>= 2.4.
|
104
|
-
karafka-rdkafka (>= 0.
|
111
|
+
uri (1.0.3)
|
112
|
+
waterdrop (2.8.4)
|
113
|
+
karafka-core (>= 2.4.9, < 3.0.0)
|
114
|
+
karafka-rdkafka (>= 0.19.2)
|
105
115
|
zeitwerk (~> 2.3)
|
106
|
-
zeitwerk (2.
|
116
|
+
zeitwerk (2.6.18)
|
107
117
|
|
108
118
|
PLATFORMS
|
119
|
+
aarch64-linux-gnu
|
120
|
+
aarch64-linux-musl
|
121
|
+
arm-linux-gnu
|
122
|
+
arm-linux-musl
|
123
|
+
arm64-darwin
|
109
124
|
ruby
|
110
|
-
|
125
|
+
x86-linux-gnu
|
126
|
+
x86-linux-musl
|
127
|
+
x86_64-darwin
|
128
|
+
x86_64-linux-gnu
|
129
|
+
x86_64-linux-musl
|
111
130
|
|
112
131
|
DEPENDENCIES
|
113
132
|
activejob
|
@@ -115,12 +134,12 @@ DEPENDENCIES
|
|
115
134
|
factory_bot
|
116
135
|
fugit
|
117
136
|
karafka!
|
118
|
-
karafka-testing (>= 2.
|
119
|
-
karafka-web (>= 0.
|
137
|
+
karafka-testing (>= 2.5.0)
|
138
|
+
karafka-web (>= 0.11.0.beta1)
|
120
139
|
ostruct
|
121
140
|
rspec
|
122
141
|
simplecov
|
123
142
|
stringio
|
124
143
|
|
125
144
|
BUNDLED WITH
|
126
|
-
2.
|
145
|
+
2.6.9
|
data/LICENSE-COMM
CHANGED
@@ -6,7 +6,7 @@ IMPORTANT: THIS SOFTWARE END-USER LICENSE AGREEMENT ("EULA") IS A LEGAL AGREEMEN
|
|
6
6
|
|
7
7
|
------------------------------------------------------------------------------
|
8
8
|
|
9
|
-
In order to use the Software under this Agreement, you must receive a "Source URL" to a license package at the time of purchase, in accordance with the scope of use and other terms specified for each type of Software and as set forth in this Section 1 of this Agreement.
|
9
|
+
In order to use the Software under this Agreement, you must either: (a) receive a "Source URL" to a license package at the time of purchase, or (b) for Enterprise customers only, be provided with a fully offline license, in accordance with the scope of use and other terms specified for each type of Software and as set forth in this Section 1 of this Agreement.
|
10
10
|
|
11
11
|
1. License Grant
|
12
12
|
|
@@ -22,7 +22,7 @@ In order to use the Software under this Agreement, you must receive a "Source UR
|
|
22
22
|
|
23
23
|
3. Restricted Uses.
|
24
24
|
|
25
|
-
3.1 You shall not (and shall not allow any third party to): (a) decompile, disassemble, or otherwise reverse engineer the Software or attempt to reconstruct or discover any source code, underlying ideas, algorithms, file formats or programming interfaces of the Software by any means whatsoever (except and only to the extent that applicable law prohibits or restricts reverse engineering restrictions); (b) distribute, sell, sublicense, rent, lease or use the Software for time sharing, hosting, service provider or like purposes, except as expressly permitted under this Agreement; (c) redistribute the Software or Modifications other than by including the Software or a portion thereof within your own product, which must have substantially different functionality than the Software or Modifications and must not allow any third party to use the Software or Modifications, or any portions thereof, for software development or application development purposes; (d) redistribute the Software as part of a product, "appliance" or "virtual server"; (e) redistribute the Software on any server which is not directly under your control; (f) remove any product identification, proprietary, copyright or other notices contained in the Software; (g) modify any part of the Software, create a derivative work of any part of the Software (except as permitted in Section 4), or incorporate the Software, except to the extent expressly authorized in writing by Maciej Mensfeld; (h) publicly disseminate performance information or analysis (including, without limitation, benchmarks) from any source relating to the Software; (i) utilize any equipment, device, software, or other means designed to circumvent or remove any form of Source URL or copy protection used by Maciej Mensfeld in connection with the Software, or use the Software together with any authorization code, Source URL, serial number, or other copy protection device not supplied by Maciej Mensfeld; (j) use the Software to develop a product which is competitive with any Maciej Mensfeld product offerings; or (k) use unauthorized Source URLS or keycode(s) or distribute or publish Source URLs or keycode(s), except as may be expressly permitted by Maciej Mensfeld in writing. If your unique Source URL is ever published, Maciej Mensfeld reserves the right to terminate your access without notice.
|
25
|
+
3.1 You shall not (and shall not allow any third party to): (a) decompile, disassemble, or otherwise reverse engineer the Software or attempt to reconstruct or discover any source code, underlying ideas, algorithms, file formats or programming interfaces of the Software by any means whatsoever (except and only to the extent that applicable law prohibits or restricts reverse engineering restrictions); (b) distribute, sell, sublicense, rent, lease or use the Software for time sharing, hosting, service provider or like purposes, except as expressly permitted under this Agreement; (c) redistribute the Software or Modifications other than by including the Software or a portion thereof within your own product, which must have substantially different functionality than the Software or Modifications and must not allow any third party to use the Software or Modifications, or any portions thereof, for software development or application development purposes; (d) redistribute the Software as part of a product, "appliance" or "virtual server"; (e) redistribute the Software on any server which is not directly under your control; (f) remove any product identification, proprietary, copyright or other notices contained in the Software; (g) modify any part of the Software, create a derivative work of any part of the Software (except as permitted in Section 4), or incorporate the Software, except to the extent expressly authorized in writing by Maciej Mensfeld; (h) publicly disseminate performance information or analysis (including, without limitation, benchmarks) from any source relating to the Software; (i) utilize any equipment, device, software, or other means designed to circumvent or remove any form of Source URL or copy protection used by Maciej Mensfeld in connection with the Software, or use the Software together with any authorization code, Source URL, serial number, or other copy protection device not supplied by Maciej Mensfeld; (j) use the Software to develop a product which is competitive with any Maciej Mensfeld product offerings; or (k) use unauthorized Source URLS or keycode(s) or distribute or publish Source URLs or keycode(s), except as may be expressly permitted by Maciej Mensfeld in writing. If your unique Source URL or the offline license is ever published, Maciej Mensfeld reserves the right to terminate your access without notice.
|
26
26
|
|
27
27
|
3.2 UNDER NO CIRCUMSTANCES MAY YOU USE THE SOFTWARE AS PART OF A PRODUCT OR SERVICE THAT PROVIDES SIMILAR FUNCTIONALITY TO THE SOFTWARE ITSELF.
|
28
28
|
|
data/README.md
CHANGED
@@ -84,7 +84,7 @@ bundle exec karafka server
|
|
84
84
|
|
85
85
|
I also sell Karafka Pro subscriptions. It includes a commercial-friendly license, priority support, architecture consultations, enhanced Web UI and high throughput data processing-related features (virtual partitions, long-running jobs, and more).
|
86
86
|
|
87
|
-
|
87
|
+
Part of the income is [distributed back](https://github.com/orgs/karafka/sponsoring) to other OSS projects that Karafka uses under the hood.
|
88
88
|
|
89
89
|
Help me provide high-quality open-source software. Please see the Karafka [homepage](https://karafka.io/#become-pro) for more details.
|
90
90
|
|
data/Rakefile
ADDED
data/bin/clean_kafka
ADDED
@@ -0,0 +1,43 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
# A script that removes most of the auto-generated Kafka topics with their data
|
4
|
+
# Useful when having long-running Kafka instance that cannot be fully nuked after running specs
|
5
|
+
|
6
|
+
# We use the same convention in other framework components (web, waterdrop), so it removes all of
|
7
|
+
# them as well.
|
8
|
+
|
9
|
+
require_relative '../spec/integrations_helper.rb'
|
10
|
+
|
11
|
+
setup_karafka
|
12
|
+
|
13
|
+
topics_for_removal = []
|
14
|
+
|
15
|
+
Karafka::Admin.cluster_info.topics.each do |topic|
|
16
|
+
topic_name = topic[:topic_name]
|
17
|
+
|
18
|
+
next unless topic_name.start_with?('it-')
|
19
|
+
|
20
|
+
topics_for_removal << topic_name
|
21
|
+
end
|
22
|
+
|
23
|
+
THREADS_COUNT = 3
|
24
|
+
QUEUE = SizedQueue.new(THREADS_COUNT)
|
25
|
+
TOPICS_TO_REMOVAL_COUNT = topics_for_removal.size
|
26
|
+
|
27
|
+
threads = Array.new(THREADS_COUNT) do
|
28
|
+
Thread.new do
|
29
|
+
while topic_name = QUEUE.pop
|
30
|
+
puts "Removing topic: #{topic_name} (#{topics_for_removal.count} left)"
|
31
|
+
Karafka::Admin.delete_topic(topic_name)
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
while topics_for_removal.size.positive?
|
37
|
+
topic_name = topics_for_removal.pop
|
38
|
+
|
39
|
+
QUEUE << topic_name
|
40
|
+
end
|
41
|
+
|
42
|
+
QUEUE.close
|
43
|
+
threads.each(&:join)
|
data/bin/integrations
CHANGED
@@ -48,20 +48,25 @@ class Scenario
|
|
48
48
|
'instrumentation/post_errors_instrumentation_error_spec.rb' => [1].freeze,
|
49
49
|
'cli/declaratives/delete/existing_with_exit_code_spec.rb' => [2].freeze,
|
50
50
|
'cli/declaratives/create/new_with_exit_code_spec.rb' => [2].freeze,
|
51
|
-
'cli/declaratives/plan/when_changes_with_detailed_exit_code_spec.rb' => [2].freeze
|
51
|
+
'cli/declaratives/plan/when_changes_with_detailed_exit_code_spec.rb' => [2].freeze,
|
52
|
+
'cli/declaratives/align/incorrectly_spec.rb' => [1].freeze
|
52
53
|
}.freeze
|
53
54
|
|
54
55
|
private_constant :MAX_RUN_TIME, :EXIT_CODES
|
55
56
|
|
57
|
+
attr_reader :index
|
58
|
+
|
56
59
|
# Creates scenario instance and runs in the background process
|
57
60
|
#
|
58
61
|
# @param path [String] path to the scenarios file
|
59
|
-
def initialize(path)
|
62
|
+
def initialize(path, index)
|
60
63
|
@path = path
|
61
64
|
# First 1024 characters from stdout
|
62
65
|
@stdout_head = ''
|
63
66
|
# Last 1024 characters from stdout
|
64
67
|
@stdout_tail = ''
|
68
|
+
# Assigns the index for parallel execution in the CI if requested
|
69
|
+
@index = index
|
65
70
|
end
|
66
71
|
|
67
72
|
# Starts running given scenario in a separate process
|
@@ -252,16 +257,24 @@ specs.delete_if do |spec|
|
|
252
257
|
false
|
253
258
|
end
|
254
259
|
|
255
|
-
raise ArgumentError, "No integration specs with filters: #{ARGV.join(', ')}" if specs.empty?
|
256
|
-
|
257
260
|
# Randomize order
|
258
|
-
seed = (ENV['
|
261
|
+
seed = (ENV['SPECS_SEED'] || rand(0..10_000)).to_i
|
262
|
+
group = (ENV['SPECS_GROUP'] || -1).to_i
|
263
|
+
groups = (ENV['SPECS_GROUPS'] || 2).to_i
|
259
264
|
|
260
265
|
puts "Random seed: #{seed}"
|
266
|
+
puts "Group: #{group}"
|
267
|
+
puts "Groups: #{groups}"
|
261
268
|
|
262
269
|
scenarios = specs
|
263
270
|
.shuffle(random: Random.new(seed))
|
264
|
-
.map
|
271
|
+
.map
|
272
|
+
.with_index { |integration, index| Scenario.new(integration, index % groups) }
|
273
|
+
.delete_if { |scenario| scenario.index != group && group != -1 }
|
274
|
+
|
275
|
+
raise ArgumentError, "No integration specs with filters: #{ARGV.join(', ')}" if scenarios.empty?
|
276
|
+
|
277
|
+
puts "Running #{scenarios.size} scenarios"
|
265
278
|
|
266
279
|
regulars = scenarios.reject(&:linear?)
|
267
280
|
linears = scenarios - regulars
|