karafka 2.4.18 → 2.5.0.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (129) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/CODEOWNERS +3 -0
  4. data/.github/workflows/ci.yml +58 -14
  5. data/.github/workflows/verify-action-pins.yml +16 -0
  6. data/.ruby-version +1 -1
  7. data/CHANGELOG.md +53 -0
  8. data/Gemfile +3 -3
  9. data/Gemfile.lock +55 -58
  10. data/LICENSE-COMM +2 -2
  11. data/bin/clean_kafka +43 -0
  12. data/bin/integrations +17 -5
  13. data/bin/rspecs +15 -3
  14. data/bin/verify_kafka_warnings +35 -0
  15. data/bin/verify_topics_naming +27 -0
  16. data/config/locales/errors.yml +3 -0
  17. data/config/locales/pro_errors.yml +13 -2
  18. data/docker-compose.yml +1 -1
  19. data/examples/payloads/json/enrollment_event.json +579 -0
  20. data/examples/payloads/json/ingestion_event.json +30 -0
  21. data/examples/payloads/json/transaction_event.json +17 -0
  22. data/examples/payloads/json/user_event.json +11 -0
  23. data/karafka.gemspec +3 -3
  24. data/lib/karafka/active_job/current_attributes.rb +1 -1
  25. data/lib/karafka/admin/acl.rb +5 -1
  26. data/lib/karafka/admin.rb +51 -19
  27. data/lib/karafka/base_consumer.rb +17 -8
  28. data/lib/karafka/cli/base.rb +8 -2
  29. data/lib/karafka/connection/client.rb +20 -7
  30. data/lib/karafka/connection/listener.rb +24 -12
  31. data/lib/karafka/connection/messages_buffer.rb +1 -1
  32. data/lib/karafka/connection/proxy.rb +3 -0
  33. data/lib/karafka/contracts/config.rb +3 -0
  34. data/lib/karafka/contracts/topic.rb +1 -1
  35. data/lib/karafka/errors.rb +11 -0
  36. data/lib/karafka/helpers/async.rb +3 -1
  37. data/lib/karafka/instrumentation/callbacks/rebalance.rb +5 -1
  38. data/lib/karafka/instrumentation/logger_listener.rb +86 -23
  39. data/lib/karafka/instrumentation/proctitle_listener.rb +5 -1
  40. data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +2 -2
  41. data/lib/karafka/messages/builders/batch_metadata.rb +1 -1
  42. data/lib/karafka/pro/cleaner.rb +8 -0
  43. data/lib/karafka/pro/cli/parallel_segments/base.rb +89 -0
  44. data/lib/karafka/pro/cli/parallel_segments/collapse.rb +164 -0
  45. data/lib/karafka/pro/cli/parallel_segments/distribute.rb +164 -0
  46. data/lib/karafka/pro/cli/parallel_segments.rb +60 -0
  47. data/lib/karafka/pro/connection/manager.rb +5 -8
  48. data/lib/karafka/pro/encryption.rb +8 -0
  49. data/lib/karafka/pro/instrumentation/performance_tracker.rb +1 -1
  50. data/lib/karafka/pro/iterator/expander.rb +5 -3
  51. data/lib/karafka/pro/iterator/tpl_builder.rb +23 -0
  52. data/lib/karafka/pro/loader.rb +10 -0
  53. data/lib/karafka/pro/processing/coordinator.rb +4 -1
  54. data/lib/karafka/pro/processing/coordinators/errors_tracker.rb +27 -3
  55. data/lib/karafka/pro/processing/coordinators/filters_applier.rb +11 -0
  56. data/lib/karafka/pro/processing/filters/base.rb +10 -2
  57. data/lib/karafka/pro/processing/filters/expirer.rb +5 -0
  58. data/lib/karafka/pro/processing/filters/inline_insights_delayer.rb +2 -2
  59. data/lib/karafka/pro/processing/filters/virtual_limiter.rb +5 -0
  60. data/lib/karafka/pro/processing/parallel_segments/filters/base.rb +73 -0
  61. data/lib/karafka/pro/processing/parallel_segments/filters/default.rb +85 -0
  62. data/lib/karafka/pro/processing/parallel_segments/filters/mom.rb +66 -0
  63. data/lib/karafka/pro/processing/partitioner.rb +1 -13
  64. data/lib/karafka/pro/processing/piping/consumer.rb +13 -13
  65. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom.rb +1 -1
  66. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom_vp.rb +1 -1
  67. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom.rb +1 -1
  68. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom_vp.rb +1 -1
  69. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom_vp.rb +1 -1
  70. data/lib/karafka/pro/processing/strategies/aj/lrj_mom_vp.rb +1 -1
  71. data/lib/karafka/pro/processing/strategies/default.rb +36 -8
  72. data/lib/karafka/pro/processing/strategies/dlq/default.rb +14 -10
  73. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj.rb +1 -1
  74. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom.rb +1 -1
  75. data/lib/karafka/pro/processing/strategies/dlq/lrj.rb +3 -1
  76. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom.rb +1 -1
  77. data/lib/karafka/pro/processing/strategies/ftr/default.rb +1 -1
  78. data/lib/karafka/pro/processing/strategies/lrj/default.rb +4 -1
  79. data/lib/karafka/pro/processing/strategies/lrj/ftr.rb +1 -1
  80. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom.rb +1 -1
  81. data/lib/karafka/pro/processing/strategies/lrj/mom.rb +1 -1
  82. data/lib/karafka/pro/processing/virtual_partitions/distributors/balanced.rb +50 -0
  83. data/lib/karafka/pro/processing/virtual_partitions/distributors/base.rb +29 -0
  84. data/lib/karafka/pro/processing/virtual_partitions/distributors/consistent.rb +27 -0
  85. data/lib/karafka/pro/recurring_tasks/contracts/config.rb +8 -4
  86. data/lib/karafka/pro/recurring_tasks/dispatcher.rb +3 -3
  87. data/lib/karafka/pro/recurring_tasks/setup/config.rb +7 -2
  88. data/lib/karafka/pro/recurring_tasks.rb +13 -0
  89. data/lib/karafka/pro/routing/features/dead_letter_queue/topic.rb +1 -1
  90. data/lib/karafka/pro/routing/features/multiplexing/config.rb +1 -0
  91. data/lib/karafka/pro/routing/features/multiplexing/contracts/topic.rb +17 -0
  92. data/lib/karafka/pro/routing/features/multiplexing/proxy.rb +5 -2
  93. data/lib/karafka/pro/routing/features/multiplexing/subscription_group.rb +8 -1
  94. data/lib/karafka/pro/routing/features/parallel_segments/builder.rb +47 -0
  95. data/lib/karafka/pro/routing/features/parallel_segments/config.rb +27 -0
  96. data/lib/karafka/pro/routing/features/parallel_segments/consumer_group.rb +83 -0
  97. data/lib/karafka/pro/routing/features/parallel_segments/contracts/consumer_group.rb +49 -0
  98. data/lib/karafka/pro/routing/features/parallel_segments/topic.rb +43 -0
  99. data/lib/karafka/pro/routing/features/parallel_segments.rb +24 -0
  100. data/lib/karafka/pro/routing/features/patterns/pattern.rb +1 -1
  101. data/lib/karafka/pro/routing/features/recurring_tasks/builder.rb +2 -2
  102. data/lib/karafka/pro/routing/features/scheduled_messages/builder.rb +10 -6
  103. data/lib/karafka/pro/routing/features/virtual_partitions/config.rb +20 -2
  104. data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +1 -0
  105. data/lib/karafka/pro/routing/features/virtual_partitions/topic.rb +8 -2
  106. data/lib/karafka/pro/scheduled_messages/consumer.rb +14 -15
  107. data/lib/karafka/pro/scheduled_messages/daily_buffer.rb +9 -6
  108. data/lib/karafka/pro/scheduled_messages/deserializers/headers.rb +7 -1
  109. data/lib/karafka/pro/scheduled_messages/max_epoch.rb +15 -6
  110. data/lib/karafka/pro/scheduled_messages.rb +13 -0
  111. data/lib/karafka/processing/coordinators_buffer.rb +1 -0
  112. data/lib/karafka/processing/strategies/default.rb +4 -4
  113. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +1 -0
  114. data/lib/karafka/routing/subscription_group.rb +1 -1
  115. data/lib/karafka/runner.rb +7 -1
  116. data/lib/karafka/server.rb +5 -0
  117. data/lib/karafka/setup/attributes_map.rb +2 -0
  118. data/lib/karafka/setup/config.rb +22 -1
  119. data/lib/karafka/setup/defaults_injector.rb +26 -1
  120. data/lib/karafka/status.rb +6 -1
  121. data/lib/karafka/swarm/node.rb +31 -0
  122. data/lib/karafka/swarm/supervisor.rb +4 -0
  123. data/lib/karafka/templates/karafka.rb.erb +14 -1
  124. data/lib/karafka/version.rb +1 -1
  125. data/lib/karafka.rb +17 -9
  126. data/renovate.json +14 -2
  127. data.tar.gz.sig +0 -0
  128. metadata +36 -11
  129. metadata.gz.sig +0 -0
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 66dcb47a4936205b3d71838001db5c64e6fab4da332f54fcb8e64521a081c871
4
- data.tar.gz: 5f113ec8bc1459a5994acf8935c802b893695fd69002937b4a91b5697423cb81
3
+ metadata.gz: 4bcac2dd9a093cd85ab76342d9c227d552e9927e681e79f55963a9d13e64f79d
4
+ data.tar.gz: 8b5e32c1c1099b8e654599c6df25c33816c3a4a315ffe7e0de06c49a2b4e720e
5
5
  SHA512:
6
- metadata.gz: 1abbfec5c703e755f012d9171934e7486ab0fbfd1295c0fd2b81216156783156415aaa2d547215aa9ec8a62f54db53b269181ed6638644c66529456f897a708a
7
- data.tar.gz: 677b50678298d384ea4862fc703327a9e351d1f2bacd82f027f4f43fd60cceaac81a93cec133c005e7f90f917162301ff5725bfe5f0ba6ed1754ce2fa0503c66
6
+ metadata.gz: d2ffa6709d42103eea487e44b6756d9b449bd762b162e8ce35283b2513b26c2fde8ef559b900c7932d1fe4e3e8c771d9874d1ed54a572e3e94f6df067eb354d8
7
+ data.tar.gz: 9bfafb2b2c8ec8975e55fe1bddeaaedfb4d7fd1cedae36abdfed45819e8136eb614922d4f52c0b011d97602ad728dc7a860a3025e9ab1694720b0f35af00429d
checksums.yaml.gz.sig CHANGED
Binary file
@@ -0,0 +1,3 @@
1
+ /.github @mensfeld
2
+ /.github/workflows/ @mensfeld
3
+ /.github/actions/ @mensfeld
@@ -6,10 +6,15 @@ concurrency:
6
6
 
7
7
  on:
8
8
  pull_request:
9
+ branches: [ main, master ]
9
10
  push:
11
+ branches: [ main, master ]
10
12
  schedule:
11
13
  - cron: '0 1 * * *'
12
14
 
15
+ permissions:
16
+ contents: read
17
+
13
18
  env:
14
19
  BUNDLE_RETRY: 6
15
20
  BUNDLE_JOBS: 4
@@ -17,15 +22,16 @@ env:
17
22
  jobs:
18
23
  diffend:
19
24
  runs-on: ubuntu-latest
25
+ timeout-minutes: 5
20
26
  strategy:
21
27
  fail-fast: false
22
28
  steps:
23
- - uses: actions/checkout@v4
29
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
24
30
  with:
25
31
  fetch-depth: 0
26
32
 
27
33
  - name: Set up Ruby
28
- uses: ruby/setup-ruby@v1
34
+ uses: ruby/setup-ruby@e34163cd15f4bb403dcd72d98e295997e6a55798 # v1.238.0
29
35
  with:
30
36
  ruby-version: 3.4
31
37
  bundler-cache: true
@@ -38,10 +44,11 @@ jobs:
38
44
 
39
45
  karafka-checksum:
40
46
  runs-on: ubuntu-latest
47
+ timeout-minutes: 5
41
48
  strategy:
42
49
  fail-fast: false
43
50
  steps:
44
- - uses: actions/checkout@v4
51
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
45
52
  with:
46
53
  fetch-depth: 0
47
54
  - name: Run Karafka license checksum verification
@@ -54,19 +61,33 @@ jobs:
54
61
 
55
62
  coditsu:
56
63
  runs-on: ubuntu-latest
64
+ timeout-minutes: 5
57
65
  strategy:
58
66
  fail-fast: false
59
67
  steps:
60
- - uses: actions/checkout@v4
68
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
61
69
  with:
62
70
  fetch-depth: 0
71
+ - name: Download Coditsu script
72
+ run: |
73
+ curl -sSL https://api.coditsu.io/run/ci -o coditsu_script.sh
74
+ chmod +x coditsu_script.sh
75
+ - name: Verify Coditsu script checksum
76
+ run: |
77
+ EXPECTED_SHA256="0aecc5aa010f53fca264548a41467a2b0a1208d750ce1da3e98a217304cacbbc"
78
+
79
+ ACTUAL_SHA256=$(sha256sum coditsu_script.sh | awk '{ print $1 }')
80
+ if [ "$ACTUAL_SHA256" != "$EXPECTED_SHA256" ]; then
81
+ echo "::error::Checksum verification failed. Expected $EXPECTED_SHA256 but got $ACTUAL_SHA256."
82
+ exit 1
83
+ fi
63
84
  - name: Run Coditsu
64
- run: \curl -sSL https://api.coditsu.io/run/ci | bash
85
+ run: ./coditsu_script.sh
65
86
 
66
87
  # We do not split RSpec specs to OSS and Pro like integrations because they do not overload
67
88
  # Kafka heavily, compute total coverage for specs and are fast enough
68
89
  specs:
69
- timeout-minutes: 10
90
+ timeout-minutes: 15
70
91
  runs-on: ubuntu-latest
71
92
  needs: diffend
72
93
  strategy:
@@ -81,7 +102,7 @@ jobs:
81
102
  - ruby: '3.4'
82
103
  coverage: 'true'
83
104
  steps:
84
- - uses: actions/checkout@v4
105
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
85
106
  - name: Install package dependencies
86
107
  run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
87
108
 
@@ -97,7 +118,7 @@ jobs:
97
118
  run: rm -f Gemfile.lock
98
119
 
99
120
  - name: Set up Ruby
100
- uses: ruby/setup-ruby@v1
121
+ uses: ruby/setup-ruby@e34163cd15f4bb403dcd72d98e295997e6a55798 # v1.238.0
101
122
  with:
102
123
  ruby-version: ${{matrix.ruby}}
103
124
  bundler-cache: true
@@ -112,8 +133,14 @@ jobs:
112
133
  GITHUB_COVERAGE: ${{matrix.coverage}}
113
134
  run: bin/rspecs
114
135
 
136
+ - name: Check Kafka logs for unexpected warnings
137
+ run: bin/verify_kafka_warnings
138
+
139
+ - name: Check test topics naming convention
140
+ run: bin/verify_topics_naming
141
+
115
142
  integrations_oss:
116
- timeout-minutes: 20
143
+ timeout-minutes: 30
117
144
  runs-on: ubuntu-latest
118
145
  needs: diffend
119
146
  strategy:
@@ -125,7 +152,7 @@ jobs:
125
152
  - '3.2'
126
153
  - '3.1'
127
154
  steps:
128
- - uses: actions/checkout@v4
155
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
129
156
  - name: Install package dependencies
130
157
  run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
131
158
 
@@ -137,7 +164,7 @@ jobs:
137
164
  docker compose up -d || (sleep 5 && docker compose up -d)
138
165
 
139
166
  - name: Set up Ruby
140
- uses: ruby/setup-ruby@v1
167
+ uses: ruby/setup-ruby@e34163cd15f4bb403dcd72d98e295997e6a55798 # v1.238.0
141
168
  with:
142
169
  # Do not use cache here as we run bundle install also later in some of the integration
143
170
  # tests and we need to be able to run it without cache
@@ -170,6 +197,12 @@ jobs:
170
197
  - name: Run OSS integration tests
171
198
  run: bin/integrations --exclude '/pro'
172
199
 
200
+ - name: Check Kafka logs for unexpected warnings
201
+ run: bin/verify_kafka_warnings
202
+
203
+ - name: Check test topics naming convention
204
+ run: bin/verify_topics_naming
205
+
173
206
  integrations_pro:
174
207
  timeout-minutes: 45
175
208
  runs-on: ubuntu-latest
@@ -182,8 +215,11 @@ jobs:
182
215
  - '3.3'
183
216
  - '3.2'
184
217
  - '3.1'
218
+ parallel_group:
219
+ - '0'
220
+ - '1'
185
221
  steps:
186
- - uses: actions/checkout@v4
222
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
187
223
  - name: Install package dependencies
188
224
  run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
189
225
 
@@ -192,7 +228,7 @@ jobs:
192
228
  docker compose up -d || (sleep 5 && docker compose up -d)
193
229
 
194
230
  - name: Set up Ruby
195
- uses: ruby/setup-ruby@v1
231
+ uses: ruby/setup-ruby@e34163cd15f4bb403dcd72d98e295997e6a55798 # v1.238.0
196
232
  with:
197
233
  ruby-version: ${{matrix.ruby}}
198
234
  bundler: 'latest'
@@ -220,5 +256,13 @@ jobs:
220
256
  KARAFKA_PRO_PASSWORD: ${{ secrets.KARAFKA_PRO_PASSWORD }}
221
257
  KARAFKA_PRO_VERSION: ${{ secrets.KARAFKA_PRO_VERSION }}
222
258
  KARAFKA_PRO_LICENSE_CHECKSUM: ${{ secrets.KARAFKA_PRO_LICENSE_CHECKSUM }}
259
+ SPECS_SEED: ${{ github.run_id }}
260
+ SPECS_GROUP: ${{ matrix.parallel_group }}
261
+ run: |
262
+ bin/integrations '/pro'
263
+
264
+ - name: Check Kafka logs for unexpected warnings
265
+ run: bin/verify_kafka_warnings
223
266
 
224
- run: bin/integrations '/pro'
267
+ - name: Check test topics naming convention
268
+ run: bin/verify_topics_naming
@@ -0,0 +1,16 @@
1
+ name: Verify Action Pins
2
+ on:
3
+ pull_request:
4
+ paths:
5
+ - '.github/workflows/**'
6
+ jobs:
7
+ verify_action_pins:
8
+ runs-on: ubuntu-latest
9
+ steps:
10
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
11
+ - name: Check SHA pins
12
+ run: |
13
+ if grep -E -r "uses: .*/.*@(v[0-9]+|main|master)($|[[:space:]]|$)" --include="*.yml" --include="*.yaml" .github/workflows/ | grep -v "#"; then
14
+ echo "::error::Actions should use SHA pins, not tags or branch names"
15
+ exit 1
16
+ fi
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 3.4.1
1
+ 3.4.4
data/CHANGELOG.md CHANGED
@@ -1,5 +1,58 @@
1
1
  # Karafka Framework Changelog
2
2
 
3
+ ## 2.4.19 (Unreleased)
4
+ - **[Breaking]** Use DLQ and Piping prefix `source_` instead of `original_` to align with naming convention of Kafka Streams and Apache Flink for future usage.
5
+ - **[Breaking]** Rename scheduled jobs topics names in their config (Pro).
6
+ - **[Feature]** Parallel Segments for concurrent processing of the same partition with more than partition count of processes (Pro).
7
+ - [Enhancement] Support KIP-82 (header values of arrays).
8
+ - [Enhancement] Enhance errors tracker with `#counts` that contains per-error class specific counters for granular flow handling.
9
+ - [Enhancement] Provide explicit `Karafka::Admin.copy_consumer_group` API.
10
+ - [Enhancement] Return explicit value from `Karafka::Admin.copy_consumer_group` and `Karafka::Admin.rename_consumer_group` APIs.
11
+ - [Enhancement] Introduce balanced non-consistent VP distributor improving the utilization up to 50% (Pro).
12
+ - [Enhancement] Make the error tracker for advanced DLQ strategies respond to `#topic` and `#partition` for context aware dispatches.
13
+ - [Enhancement] Allow setting the workers thread priority and set it to -1 (50ms) by default.
14
+ - [Enhancement] Enhance low-level `client.pause` event with timeout value (if provided).
15
+ - [Enhancement] Introduce `#marking_cursor` API (defaults to `#cursor`) in the filtering API (Pro).
16
+ - [Enhancement] Support multiple DLQ target topics via context aware strategies (Pro).
17
+ - [Enhancement] Raise error when post-transactional committing of offset is done outside of the transaction (Pro).
18
+ - [Enhancement] Include info level rebalance logger listener data.
19
+ - [Enhancement] Include info level subscription start info.
20
+ - [Enhancement] Make the generic error handling in the `LoggerListener` more descriptive by logging also the error class.
21
+ - [Enhancement] Allow marking older offsets to support advanced rewind capabilities.
22
+ - [Enhancement] Change optional `#seek` reset offset flag default to `true` as `false` is almost never used and seek by default should move the internal consumer offset position as well.
23
+ - [Enhancement] Include Swarm node ID in the swarm process tags.
24
+ - [Enhancement] Replace internal usage of MD5 with SHA256 for FIPS.
25
+ - [Enhancement] Improve OSS vs. Pro specs execution isolation.
26
+ - [Enhancement] Preload `librdkafka` code prior to forking in the Swarm mode to save memory.
27
+ - [Enhancement] Extract errors tracker class reference into an internal `errors_tracker_class` config option (Pro).
28
+ - [Enhancement] Support rdkafka native kafka polling customization for admin.
29
+ - [Enhancement] Customize the multiplexing scale delay (Pro) per consumer group (Pro).
30
+ - [Enhancement] Set `topic.metadata.refresh.interval.ms` for default producer in dev to 5s to align with consumer setup.
31
+ - [Enhancement] Alias `-2` and `-1` with `latest` and `earliest` for seeking.
32
+ - [Enhancement] Allow for usage of `latest` and `earliest` in the `Karafka::Pro::Iterator`.
33
+ - [Refactor] Introduce a `bin/verify_kafka_warnings` script to clean Kafka from temporary test-suite topics.
34
+ - [Refactor] Introduce a `bin/verify_topics_naming` script to ensure proper test topics naming convention.
35
+ - [Refactor] Make sure all temporary topics have a `it-` prefix in their name.
36
+ - [Refactor] Improve CI specs parallelization.
37
+ - [Maintenance] Lower the `Karafka::Admin` `poll_timeout` to 50 ms to improve responsiveness of admin operations.
38
+ - [Maintenance] Require `karafka-rdkafka` `>=` `0.19.2` due to usage of `#rd_kafka_global_init`, KIP-82 and the new producer caching engine.
39
+ - [Maintenance] Add Deimos routing patch into integration suite not to break it in the future.
40
+ - [Maintenance] Remove Rails `7.0` specs due to upcoming EOL.
41
+ - [Fix] Fix Recurring Tasks and Scheduled Messages not working with Swarm (using closed producer).
42
+ - [Fix] Fix a case where `unknown_topic_or_part` error could leak out of the consumer on consumer shutdown.
43
+ - [Fix] Fix missing `virtual_partitions.partitioner.error` custom error logging in the `LoggerListener`.
44
+ - [Fix] Prevent applied system filters `#timeout` from potentially interacting with user filters.
45
+ - [Fix] Use more sane value in `Admin#seek_consumer_group` for long ago.
46
+ - [Fix] Prevent multiplexing of 1:1 from routing.
47
+ - [Fix] WaterDrop level aborting transaction may cause seek offset to move (Pro).
48
+ - [Fix] Fix inconsistency in the logs where `Karafka::Server` originating logs would not have server id reference.
49
+ - [Fix] Fix inconsistency in the logs where OS signal originating logs would not have server id reference.
50
+ - [Fix] Post-fork WaterDrop instance looses some of the non-kafka settings.
51
+ - [Fix] Max epoch tracking for early cleanup causes messages to be skipped until reload.
52
+ - [Fix] optparse double parse loses ARGV.
53
+ - [Fix] `karafka` cannot be required without Bundler.
54
+ - [Fix] Scheduled Messages re-seek moves to `latest` on inheritance of initial offset when `0` offset is compacted.
55
+
3
56
  ## 2.4.18 (2025-04-09)
4
57
  - [Fix] Make sure `Bundler.with_unbundled_env` is not called multiple times.
5
58
 
data/Gemfile CHANGED
@@ -16,9 +16,9 @@ group :integrations, :test do
16
16
  end
17
17
 
18
18
  group :integrations do
19
- gem 'activejob', require: false
20
- gem 'karafka-testing', '>= 2.4.6', require: false
21
- gem 'karafka-web', '>= 0.10.0.rc2', require: false
19
+ # gem 'activejob', require: false
20
+ # gem 'karafka-testing', '>= 2.4.6', require: false
21
+ # gem 'karafka-web', '>= 0.10.4', require: false
22
22
  end
23
23
 
24
24
  group :test do
data/Gemfile.lock CHANGED
@@ -1,20 +1,17 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (2.4.18)
4
+ karafka (2.5.0.beta1)
5
5
  base64 (~> 0.2)
6
- karafka-core (>= 2.4.4, < 2.5.0)
7
- karafka-rdkafka (>= 0.17.2)
8
- waterdrop (>= 2.7.3, < 3.0.0)
6
+ karafka-core (>= 2.5.0, < 2.6.0)
7
+ karafka-rdkafka (>= 0.19.2)
8
+ waterdrop (>= 2.8.3, < 3.0.0)
9
9
  zeitwerk (~> 2.3)
10
10
 
11
11
  GEM
12
12
  remote: https://rubygems.org/
13
13
  specs:
14
- activejob (8.0.1)
15
- activesupport (= 8.0.1)
16
- globalid (>= 0.3.6)
17
- activesupport (8.0.1)
14
+ activesupport (8.0.2)
18
15
  base64
19
16
  benchmark (>= 0.3)
20
17
  bigdecimal
@@ -28,99 +25,99 @@ GEM
28
25
  tzinfo (~> 2.0, >= 2.0.5)
29
26
  uri (>= 0.13.1)
30
27
  base64 (0.2.0)
31
- benchmark (0.3.0)
32
- bigdecimal (3.1.8)
33
- byebug (11.1.3)
34
- concurrent-ruby (1.3.4)
35
- connection_pool (2.4.1)
36
- diff-lcs (1.5.1)
28
+ benchmark (0.4.0)
29
+ bigdecimal (3.1.9)
30
+ byebug (12.0.0)
31
+ concurrent-ruby (1.3.5)
32
+ connection_pool (2.5.3)
33
+ diff-lcs (1.6.2)
37
34
  docile (1.4.1)
38
- drb (2.2.1)
39
- erubi (1.13.0)
35
+ drb (2.2.3)
40
36
  et-orbi (1.2.11)
41
37
  tzinfo
42
- factory_bot (6.5.0)
43
- activesupport (>= 5.0.0)
44
- ffi (1.17.0)
38
+ factory_bot (6.5.1)
39
+ activesupport (>= 6.1.0)
40
+ ffi (1.17.2)
41
+ ffi (1.17.2-aarch64-linux-gnu)
42
+ ffi (1.17.2-aarch64-linux-musl)
43
+ ffi (1.17.2-arm-linux-gnu)
44
+ ffi (1.17.2-arm-linux-musl)
45
+ ffi (1.17.2-arm64-darwin)
46
+ ffi (1.17.2-x86-linux-gnu)
47
+ ffi (1.17.2-x86-linux-musl)
48
+ ffi (1.17.2-x86_64-darwin)
49
+ ffi (1.17.2-x86_64-linux-gnu)
50
+ ffi (1.17.2-x86_64-linux-musl)
45
51
  fugit (1.11.1)
46
52
  et-orbi (~> 1, >= 1.2.11)
47
53
  raabro (~> 1.4)
48
- globalid (1.2.1)
49
- activesupport (>= 6.1)
50
- i18n (1.14.6)
54
+ i18n (1.14.7)
51
55
  concurrent-ruby (~> 1.0)
52
- karafka-core (2.4.8)
53
- karafka-rdkafka (>= 0.17.6, < 0.19.0)
56
+ karafka-core (2.5.0)
57
+ karafka-rdkafka (>= 0.19.2, < 0.21.0)
54
58
  logger (>= 1.6.0)
55
- karafka-rdkafka (0.18.1)
59
+ karafka-rdkafka (0.19.2)
56
60
  ffi (~> 1.15)
57
61
  mini_portile2 (~> 2.6)
58
62
  rake (> 12)
59
- karafka-testing (2.4.6)
60
- karafka (>= 2.4.0, < 2.5.0)
61
- waterdrop (>= 2.7.0)
62
- karafka-web (0.10.4)
63
- erubi (~> 1.4)
64
- karafka (>= 2.4.10, < 2.5.0)
65
- karafka-core (>= 2.4.0, < 2.5.0)
66
- roda (~> 3.68, >= 3.69)
67
- tilt (~> 2.0)
68
- logger (1.6.3)
69
- mini_portile2 (2.8.8)
70
- minitest (5.25.4)
63
+ logger (1.7.0)
64
+ mini_portile2 (2.8.9)
65
+ minitest (5.25.5)
71
66
  ostruct (0.6.1)
72
67
  raabro (1.4.0)
73
- rack (3.1.8)
74
68
  rake (13.2.1)
75
- roda (3.84.0)
76
- rack
77
69
  rspec (3.13.0)
78
70
  rspec-core (~> 3.13.0)
79
71
  rspec-expectations (~> 3.13.0)
80
72
  rspec-mocks (~> 3.13.0)
81
- rspec-core (3.13.0)
73
+ rspec-core (3.13.3)
82
74
  rspec-support (~> 3.13.0)
83
- rspec-expectations (3.13.1)
75
+ rspec-expectations (3.13.4)
84
76
  diff-lcs (>= 1.2.0, < 2.0)
85
77
  rspec-support (~> 3.13.0)
86
- rspec-mocks (3.13.1)
78
+ rspec-mocks (3.13.4)
87
79
  diff-lcs (>= 1.2.0, < 2.0)
88
80
  rspec-support (~> 3.13.0)
89
- rspec-support (3.13.1)
90
- securerandom (0.3.2)
81
+ rspec-support (3.13.3)
82
+ securerandom (0.4.1)
91
83
  simplecov (0.22.0)
92
84
  docile (~> 1.1)
93
85
  simplecov-html (~> 0.11)
94
86
  simplecov_json_formatter (~> 0.1)
95
- simplecov-html (0.12.3)
87
+ simplecov-html (0.13.1)
96
88
  simplecov_json_formatter (0.1.4)
97
- stringio (3.1.2)
98
- tilt (2.4.0)
89
+ stringio (3.1.7)
99
90
  tzinfo (2.0.6)
100
91
  concurrent-ruby (~> 1.0)
101
- uri (1.0.2)
102
- waterdrop (2.8.1)
103
- karafka-core (>= 2.4.3, < 3.0.0)
104
- karafka-rdkafka (>= 0.17.5)
92
+ uri (1.0.3)
93
+ waterdrop (2.8.3)
94
+ karafka-core (>= 2.4.9, < 3.0.0)
95
+ karafka-rdkafka (>= 0.19.1)
105
96
  zeitwerk (~> 2.3)
106
- zeitwerk (2.7.1)
97
+ zeitwerk (2.7.3)
107
98
 
108
99
  PLATFORMS
100
+ aarch64-linux-gnu
101
+ aarch64-linux-musl
102
+ arm-linux-gnu
103
+ arm-linux-musl
104
+ arm64-darwin
109
105
  ruby
110
- x86_64-linux
106
+ x86-linux-gnu
107
+ x86-linux-musl
108
+ x86_64-darwin
109
+ x86_64-linux-gnu
110
+ x86_64-linux-musl
111
111
 
112
112
  DEPENDENCIES
113
- activejob
114
113
  byebug
115
114
  factory_bot
116
115
  fugit
117
116
  karafka!
118
- karafka-testing (>= 2.4.6)
119
- karafka-web (>= 0.10.0.rc2)
120
117
  ostruct
121
118
  rspec
122
119
  simplecov
123
120
  stringio
124
121
 
125
122
  BUNDLED WITH
126
- 2.4.22
123
+ 2.6.9
data/LICENSE-COMM CHANGED
@@ -6,7 +6,7 @@ IMPORTANT: THIS SOFTWARE END-USER LICENSE AGREEMENT ("EULA") IS A LEGAL AGREEMEN
6
6
 
7
7
  ------------------------------------------------------------------------------
8
8
 
9
- In order to use the Software under this Agreement, you must receive a "Source URL" to a license package at the time of purchase, in accordance with the scope of use and other terms specified for each type of Software and as set forth in this Section 1 of this Agreement.
9
+ In order to use the Software under this Agreement, you must either: (a) receive a "Source URL" to a license package at the time of purchase, or (b) for Enterprise customers only, be provided with a fully offline license, in accordance with the scope of use and other terms specified for each type of Software and as set forth in this Section 1 of this Agreement.
10
10
 
11
11
  1. License Grant
12
12
 
@@ -22,7 +22,7 @@ In order to use the Software under this Agreement, you must receive a "Source UR
22
22
 
23
23
  3. Restricted Uses.
24
24
 
25
- 3.1 You shall not (and shall not allow any third party to): (a) decompile, disassemble, or otherwise reverse engineer the Software or attempt to reconstruct or discover any source code, underlying ideas, algorithms, file formats or programming interfaces of the Software by any means whatsoever (except and only to the extent that applicable law prohibits or restricts reverse engineering restrictions); (b) distribute, sell, sublicense, rent, lease or use the Software for time sharing, hosting, service provider or like purposes, except as expressly permitted under this Agreement; (c) redistribute the Software or Modifications other than by including the Software or a portion thereof within your own product, which must have substantially different functionality than the Software or Modifications and must not allow any third party to use the Software or Modifications, or any portions thereof, for software development or application development purposes; (d) redistribute the Software as part of a product, "appliance" or "virtual server"; (e) redistribute the Software on any server which is not directly under your control; (f) remove any product identification, proprietary, copyright or other notices contained in the Software; (g) modify any part of the Software, create a derivative work of any part of the Software (except as permitted in Section 4), or incorporate the Software, except to the extent expressly authorized in writing by Maciej Mensfeld; (h) publicly disseminate performance information or analysis (including, without limitation, benchmarks) from any source relating to the Software; (i) utilize any equipment, device, software, or other means designed to circumvent or remove any form of Source URL or copy protection used by Maciej Mensfeld in connection with the Software, or use the Software together with any authorization code, Source URL, serial number, or other copy protection device not supplied by Maciej Mensfeld; (j) use the Software to develop a product which is competitive with any Maciej Mensfeld product offerings; or (k) use unauthorized Source URLS or keycode(s) or distribute or publish Source URLs or keycode(s), except as may be expressly permitted by Maciej Mensfeld in writing. If your unique Source URL is ever published, Maciej Mensfeld reserves the right to terminate your access without notice.
25
+ 3.1 You shall not (and shall not allow any third party to): (a) decompile, disassemble, or otherwise reverse engineer the Software or attempt to reconstruct or discover any source code, underlying ideas, algorithms, file formats or programming interfaces of the Software by any means whatsoever (except and only to the extent that applicable law prohibits or restricts reverse engineering restrictions); (b) distribute, sell, sublicense, rent, lease or use the Software for time sharing, hosting, service provider or like purposes, except as expressly permitted under this Agreement; (c) redistribute the Software or Modifications other than by including the Software or a portion thereof within your own product, which must have substantially different functionality than the Software or Modifications and must not allow any third party to use the Software or Modifications, or any portions thereof, for software development or application development purposes; (d) redistribute the Software as part of a product, "appliance" or "virtual server"; (e) redistribute the Software on any server which is not directly under your control; (f) remove any product identification, proprietary, copyright or other notices contained in the Software; (g) modify any part of the Software, create a derivative work of any part of the Software (except as permitted in Section 4), or incorporate the Software, except to the extent expressly authorized in writing by Maciej Mensfeld; (h) publicly disseminate performance information or analysis (including, without limitation, benchmarks) from any source relating to the Software; (i) utilize any equipment, device, software, or other means designed to circumvent or remove any form of Source URL or copy protection used by Maciej Mensfeld in connection with the Software, or use the Software together with any authorization code, Source URL, serial number, or other copy protection device not supplied by Maciej Mensfeld; (j) use the Software to develop a product which is competitive with any Maciej Mensfeld product offerings; or (k) use unauthorized Source URLS or keycode(s) or distribute or publish Source URLs or keycode(s), except as may be expressly permitted by Maciej Mensfeld in writing. If your unique Source URL or the offline license is ever published, Maciej Mensfeld reserves the right to terminate your access without notice.
26
26
 
27
27
  3.2 UNDER NO CIRCUMSTANCES MAY YOU USE THE SOFTWARE AS PART OF A PRODUCT OR SERVICE THAT PROVIDES SIMILAR FUNCTIONALITY TO THE SOFTWARE ITSELF.
28
28
 
data/bin/clean_kafka ADDED
@@ -0,0 +1,43 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # A script that removes most of the auto-generated Kafka topics with their data
4
+ # Useful when having long-running Kafka instance that cannot be fully nuked after running specs
5
+
6
+ # We use the same convention in other framework components (web, waterdrop), so it removes all of
7
+ # them as well.
8
+
9
+ require_relative '../spec/integrations_helper.rb'
10
+
11
+ setup_karafka
12
+
13
+ topics_for_removal = []
14
+
15
+ Karafka::Admin.cluster_info.topics.each do |topic|
16
+ topic_name = topic[:topic_name]
17
+
18
+ next unless topic_name.start_with?('it-')
19
+
20
+ topics_for_removal << topic_name
21
+ end
22
+
23
+ THREADS_COUNT = 3
24
+ QUEUE = SizedQueue.new(THREADS_COUNT)
25
+ TOPICS_TO_REMOVAL_COUNT = topics_for_removal.size
26
+
27
+ threads = Array.new(THREADS_COUNT) do
28
+ Thread.new do
29
+ while topic_name = QUEUE.pop
30
+ puts "Removing topic: #{topic_name} (#{topics_for_removal.count} left)"
31
+ Karafka::Admin.delete_topic(topic_name)
32
+ end
33
+ end
34
+ end
35
+
36
+ while topics_for_removal.size.positive?
37
+ topic_name = topics_for_removal.pop
38
+
39
+ QUEUE << topic_name
40
+ end
41
+
42
+ QUEUE.close
43
+ threads.each(&:join)
data/bin/integrations CHANGED
@@ -53,15 +53,19 @@ class Scenario
53
53
 
54
54
  private_constant :MAX_RUN_TIME, :EXIT_CODES
55
55
 
56
+ attr_reader :index
57
+
56
58
  # Creates scenario instance and runs in the background process
57
59
  #
58
60
  # @param path [String] path to the scenarios file
59
- def initialize(path)
61
+ def initialize(path, index)
60
62
  @path = path
61
63
  # First 1024 characters from stdout
62
64
  @stdout_head = ''
63
65
  # Last 1024 characters from stdout
64
66
  @stdout_tail = ''
67
+ # Assigns the index for parallel execution in the CI if requested
68
+ @index = index
65
69
  end
66
70
 
67
71
  # Starts running given scenario in a separate process
@@ -252,16 +256,24 @@ specs.delete_if do |spec|
252
256
  false
253
257
  end
254
258
 
255
- raise ArgumentError, "No integration specs with filters: #{ARGV.join(', ')}" if specs.empty?
256
-
257
259
  # Randomize order
258
- seed = (ENV['SEED'] || rand(0..10_000)).to_i
260
+ seed = (ENV['SPECS_SEED'] || rand(0..10_000)).to_i
261
+ group = (ENV['SPECS_GROUP'] || -1).to_i
262
+ groups = (ENV['SPECS_GROUPS'] || 2).to_i
259
263
 
260
264
  puts "Random seed: #{seed}"
265
+ puts "Group: #{group}"
266
+ puts "Groups: #{groups}"
261
267
 
262
268
  scenarios = specs
263
269
  .shuffle(random: Random.new(seed))
264
- .map { |integration_test| Scenario.new(integration_test) }
270
+ .map
271
+ .with_index { |integration, index| Scenario.new(integration, index % groups) }
272
+ .delete_if { |scenario| scenario.index != group && group != -1 }
273
+
274
+ raise ArgumentError, "No integration specs with filters: #{ARGV.join(', ')}" if scenarios.empty?
275
+
276
+ puts "Running #{scenarios.size} scenarios"
265
277
 
266
278
  regulars = scenarios.reject(&:linear?)
267
279
  linears = scenarios - regulars
data/bin/rspecs CHANGED
@@ -3,8 +3,20 @@
3
3
  set -e
4
4
 
5
5
  # Run only regular non-forking specs first
6
- SPECS_TYPE=regular bundle exec rspec --tag ~type:pro --tag ~mode:fork
6
+ SPECS_TYPE=regular bundle exec rspec \
7
+ --tag ~type:pro \
8
+ --tag ~mode:fork \
9
+ --exclude-pattern "**/pro/**/*_spec.rb" \
10
+ spec/lib/
11
+
7
12
  # Run forking specs, they need to run in isolation not to crash because of librdkafka
8
- SPECS_TYPE=regular bundle exec rspec --tag mode:fork
13
+ SPECS_TYPE=regular bundle exec rspec \
14
+ --tag mode:fork \
15
+ --exclude-pattern "**/pro/**/*_spec.rb" \
16
+ spec/lib/
17
+
9
18
  # Run pro specs at the end
10
- SPECS_TYPE=pro bundle exec rspec --tag type:pro --tag ~mode:fork
19
+ SPECS_TYPE=pro bundle exec rspec \
20
+ --tag type:pro \
21
+ --tag ~mode:fork \
22
+ spec/lib/
@@ -0,0 +1,35 @@
1
+ #!/bin/bash
2
+
3
+ # Checks Kafka logs for unsupported warning patterns
4
+ # Only specified warnings are allowed, all others should trigger failure
5
+
6
+ allowed_patterns=(
7
+ "Performing controller activation"
8
+ "registered with feature metadata.version"
9
+ "Replayed TopicRecord for"
10
+ "Replayed PartitionRecord for"
11
+ "Previous leader None and previous leader epoch"
12
+ "Creating new"
13
+ )
14
+
15
+ # Get all warnings
16
+ warnings=$(docker logs --since=0 kafka | grep WARN)
17
+ exit_code=0
18
+
19
+ while IFS= read -r line; do
20
+ allowed=0
21
+ for pattern in "${allowed_patterns[@]}"; do
22
+ if echo "$line" | grep -q "$pattern"; then
23
+ allowed=1
24
+ break
25
+ fi
26
+ done
27
+
28
+ if [ $allowed -eq 0 ]; then
29
+ echo "Unexpected warning found:"
30
+ echo "$line"
31
+ exit_code=1
32
+ fi
33
+ done <<< "$warnings"
34
+
35
+ exit $exit_code