karafka 2.5.2 → 2.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/ci_linux_ubuntu_x86_64_gnu.yml +18 -0
- data/.yard-lint.yml +174 -0
- data/CHANGELOG.md +6 -0
- data/Gemfile +1 -0
- data/Gemfile.lock +24 -19
- data/examples/payloads/json/sample_set_03/event_type_1.json +1 -1
- data/examples/payloads/json/sample_set_03/event_type_2.json +1 -1
- data/examples/payloads/json/sample_set_03/event_type_3.json +1 -1
- data/karafka.gemspec +2 -2
- data/lib/active_job/queue_adapters/karafka_adapter.rb +2 -2
- data/lib/karafka/active_job/consumer.rb +2 -2
- data/lib/karafka/active_job/current_attributes.rb +2 -2
- data/lib/karafka/active_job/deserializer.rb +1 -1
- data/lib/karafka/active_job/dispatcher.rb +2 -2
- data/lib/karafka/admin/configs/resource.rb +7 -1
- data/lib/karafka/admin/consumer_groups.rb +6 -8
- data/lib/karafka/admin/topics.rb +5 -4
- data/lib/karafka/admin.rb +10 -10
- data/lib/karafka/app.rb +3 -3
- data/lib/karafka/base_consumer.rb +1 -1
- data/lib/karafka/cli/base.rb +1 -1
- data/lib/karafka/cli/console.rb +1 -1
- data/lib/karafka/cli/contracts/server.rb +1 -1
- data/lib/karafka/cli/help.rb +1 -1
- data/lib/karafka/cli/install.rb +2 -1
- data/lib/karafka/cli/server.rb +1 -1
- data/lib/karafka/cli/swarm.rb +1 -1
- data/lib/karafka/connection/client.rb +19 -18
- data/lib/karafka/connection/manager.rb +1 -0
- data/lib/karafka/connection/proxy.rb +1 -1
- data/lib/karafka/connection/rebalance_manager.rb +1 -1
- data/lib/karafka/connection/status.rb +1 -0
- data/lib/karafka/constraints.rb +1 -1
- data/lib/karafka/contracts/base.rb +1 -1
- data/lib/karafka/deserializers/payload.rb +1 -1
- data/lib/karafka/helpers/async.rb +1 -1
- data/lib/karafka/helpers/config_importer.rb +3 -3
- data/lib/karafka/helpers/multi_delegator.rb +3 -0
- data/lib/karafka/instrumentation/assignments_tracker.rb +2 -1
- data/lib/karafka/instrumentation/callbacks/error.rb +2 -2
- data/lib/karafka/instrumentation/callbacks/statistics.rb +3 -3
- data/lib/karafka/instrumentation/logger.rb +6 -6
- data/lib/karafka/instrumentation/monitor.rb +2 -2
- data/lib/karafka/instrumentation/vendors/appsignal/base.rb +1 -1
- data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +1 -1
- data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +2 -2
- data/lib/karafka/instrumentation/vendors/kubernetes/base_listener.rb +1 -1
- data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +3 -15
- data/lib/karafka/messages/builders/batch_metadata.rb +1 -1
- data/lib/karafka/pro/active_job/consumer.rb +2 -2
- data/lib/karafka/pro/active_job/dispatcher.rb +3 -3
- data/lib/karafka/pro/cleaner.rb +3 -3
- data/lib/karafka/pro/cli/contracts/server.rb +1 -1
- data/lib/karafka/pro/cli/parallel_segments/base.rb +4 -3
- data/lib/karafka/pro/cli/parallel_segments/collapse.rb +1 -1
- data/lib/karafka/pro/cli/parallel_segments/distribute.rb +1 -1
- data/lib/karafka/pro/cli/parallel_segments.rb +1 -1
- data/lib/karafka/pro/connection/manager.rb +1 -2
- data/lib/karafka/pro/connection/multiplexing/listener.rb +1 -0
- data/lib/karafka/pro/contracts/base.rb +1 -1
- data/lib/karafka/pro/encryption/cipher.rb +3 -2
- data/lib/karafka/pro/encryption/contracts/config.rb +1 -1
- data/lib/karafka/pro/encryption/messages/parser.rb +1 -1
- data/lib/karafka/pro/encryption/setup/config.rb +1 -1
- data/lib/karafka/pro/iterator/tpl_builder.rb +1 -1
- data/lib/karafka/pro/iterator.rb +1 -1
- data/lib/karafka/pro/loader.rb +1 -1
- data/lib/karafka/pro/processing/coordinator.rb +1 -1
- data/lib/karafka/pro/processing/filters/base.rb +1 -0
- data/lib/karafka/pro/processing/filters/delayer.rb +1 -1
- data/lib/karafka/pro/processing/filters/expirer.rb +1 -1
- data/lib/karafka/pro/processing/filters/inline_insights_delayer.rb +1 -1
- data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +1 -1
- data/lib/karafka/pro/processing/jobs/eofed_non_blocking.rb +1 -1
- data/lib/karafka/pro/processing/jobs/periodic.rb +1 -1
- data/lib/karafka/pro/processing/jobs/revoked_non_blocking.rb +1 -1
- data/lib/karafka/pro/processing/jobs_builder.rb +1 -1
- data/lib/karafka/pro/processing/offset_metadata/fetcher.rb +1 -0
- data/lib/karafka/pro/processing/partitioner.rb +1 -1
- data/lib/karafka/pro/processing/strategies/base.rb +1 -1
- data/lib/karafka/pro/processing/strategies/default.rb +2 -2
- data/lib/karafka/pro/processing/strategy_selector.rb +1 -0
- data/lib/karafka/pro/processing/virtual_partitions/distributors/balanced.rb +4 -2
- data/lib/karafka/pro/processing/virtual_partitions/distributors/consistent.rb +4 -2
- data/lib/karafka/pro/recurring_tasks/consumer.rb +3 -2
- data/lib/karafka/pro/recurring_tasks/contracts/config.rb +2 -2
- data/lib/karafka/pro/recurring_tasks/contracts/task.rb +1 -1
- data/lib/karafka/pro/recurring_tasks/deserializer.rb +1 -1
- data/lib/karafka/pro/recurring_tasks/dispatcher.rb +1 -1
- data/lib/karafka/pro/recurring_tasks/executor.rb +2 -1
- data/lib/karafka/pro/recurring_tasks/schedule.rb +5 -2
- data/lib/karafka/pro/recurring_tasks/serializer.rb +6 -5
- data/lib/karafka/pro/recurring_tasks/setup/config.rb +2 -2
- data/lib/karafka/pro/recurring_tasks/task.rb +1 -1
- data/lib/karafka/pro/routing/features/dead_letter_queue/topic.rb +3 -0
- data/lib/karafka/pro/routing/features/multiplexing/subscription_groups_builder.rb +1 -1
- data/lib/karafka/pro/routing/features/multiplexing.rb +5 -5
- data/lib/karafka/pro/routing/features/offset_metadata.rb +4 -4
- data/lib/karafka/pro/routing/features/parallel_segments/builder.rb +1 -1
- data/lib/karafka/pro/routing/features/patterns/patterns.rb +1 -1
- data/lib/karafka/pro/routing/features/periodic_job/topic.rb +1 -1
- data/lib/karafka/pro/routing/features/recurring_tasks/builder.rb +1 -1
- data/lib/karafka/pro/routing/features/swarm.rb +1 -1
- data/lib/karafka/pro/routing/features/throttling/topic.rb +3 -1
- data/lib/karafka/pro/scheduled_messages/consumer.rb +1 -1
- data/lib/karafka/pro/scheduled_messages/contracts/config.rb +2 -2
- data/lib/karafka/pro/scheduled_messages/contracts/message.rb +1 -1
- data/lib/karafka/pro/scheduled_messages/daily_buffer.rb +3 -2
- data/lib/karafka/pro/scheduled_messages/day.rb +1 -0
- data/lib/karafka/pro/scheduled_messages/deserializers/headers.rb +1 -1
- data/lib/karafka/pro/scheduled_messages/deserializers/payload.rb +1 -1
- data/lib/karafka/pro/scheduled_messages/max_epoch.rb +1 -0
- data/lib/karafka/pro/scheduled_messages/proxy.rb +1 -1
- data/lib/karafka/pro/scheduled_messages/serializer.rb +3 -3
- data/lib/karafka/pro/scheduled_messages/setup/config.rb +2 -2
- data/lib/karafka/pro/scheduled_messages/state.rb +1 -0
- data/lib/karafka/pro/scheduled_messages/tracker.rb +1 -0
- data/lib/karafka/process.rb +4 -4
- data/lib/karafka/processing/executor.rb +1 -1
- data/lib/karafka/processing/inline_insights/tracker.rb +1 -0
- data/lib/karafka/processing/jobs_queue.rb +1 -1
- data/lib/karafka/processing/result.rb +1 -0
- data/lib/karafka/processing/strategy_selector.rb +1 -0
- data/lib/karafka/routing/activity_manager.rb +1 -0
- data/lib/karafka/routing/builder.rb +3 -1
- data/lib/karafka/routing/contracts/consumer_group.rb +3 -2
- data/lib/karafka/routing/contracts/topic.rb +5 -2
- data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +1 -1
- data/lib/karafka/routing/features/declaratives/topic.rb +5 -2
- data/lib/karafka/routing/features/deserializers/topic.rb +3 -3
- data/lib/karafka/routing/features/inline_insights.rb +5 -5
- data/lib/karafka/routing/router.rb +1 -1
- data/lib/karafka/routing/subscription_group.rb +1 -1
- data/lib/karafka/routing/subscription_groups_builder.rb +1 -0
- data/lib/karafka/routing/topic.rb +3 -3
- data/lib/karafka/server.rb +1 -1
- data/lib/karafka/setup/attributes_map.rb +4 -2
- data/lib/karafka/setup/config.rb +21 -10
- data/lib/karafka/setup/config_proxy.rb +209 -0
- data/lib/karafka/setup/contracts/config.rb +1 -1
- data/lib/karafka/swarm/liveness_listener.rb +1 -0
- data/lib/karafka/swarm/manager.rb +7 -6
- data/lib/karafka/swarm/node.rb +1 -1
- data/lib/karafka/swarm/supervisor.rb +1 -0
- data/lib/karafka/time_trackers/base.rb +1 -1
- data/lib/karafka/version.rb +1 -1
- data/lib/karafka.rb +2 -2
- metadata +7 -5
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: d7b2a6d732bc69e537d14e306a4edc922b6375aa0154dada74527b06efea34fa
|
|
4
|
+
data.tar.gz: 2e6c1f533f706d70699db822d6d34eac5b6df0c0275bc12366238cc7316a3793
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: c760fb9e76f6e3af5421cfc0d821d03c3c845c629ea4ab1b2bcfa89c50431320b5cada72482eb48dfc9c27718631e1920ca5133ccd3884daddbc84cc6a7288e7
|
|
7
|
+
data.tar.gz: ce5466e71edc225b9e11283be5f7e1f877f6222a766dd63f69a235a0739a812f5007da341f5010a71eb2322efebb400fa441a7188f41ac6c244acade0b37c4e4
|
|
@@ -60,6 +60,23 @@ jobs:
|
|
|
60
60
|
- name: Run Coditsu
|
|
61
61
|
run: ./coditsu_script.sh
|
|
62
62
|
|
|
63
|
+
yard-lint:
|
|
64
|
+
runs-on: ubuntu-latest
|
|
65
|
+
timeout-minutes: 5
|
|
66
|
+
strategy:
|
|
67
|
+
fail-fast: false
|
|
68
|
+
steps:
|
|
69
|
+
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
|
70
|
+
with:
|
|
71
|
+
fetch-depth: 0
|
|
72
|
+
- name: Set up Ruby
|
|
73
|
+
uses: ruby/setup-ruby@d5126b9b3579e429dd52e51e68624dda2e05be25 # v1.267.0
|
|
74
|
+
with:
|
|
75
|
+
ruby-version: '3.4.7'
|
|
76
|
+
bundler-cache: true
|
|
77
|
+
- name: Run yard-lint
|
|
78
|
+
run: bundle exec yard-lint lib/
|
|
79
|
+
|
|
63
80
|
# We do not split RSpec specs to OSS and Pro like integrations because they do not overload
|
|
64
81
|
# Kafka heavily, compute total coverage for specs and are fast enough
|
|
65
82
|
specs:
|
|
@@ -265,6 +282,7 @@ jobs:
|
|
|
265
282
|
needs:
|
|
266
283
|
- karafka-checksum
|
|
267
284
|
- coditsu
|
|
285
|
+
- yard-lint
|
|
268
286
|
- specs
|
|
269
287
|
- integrations_oss
|
|
270
288
|
- integrations_pro
|
data/.yard-lint.yml
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
# YARD-Lint Configuration
|
|
2
|
+
# See https://github.com/mensfeld/yard-lint for documentation
|
|
3
|
+
|
|
4
|
+
# Global settings for all validators
|
|
5
|
+
AllValidators:
|
|
6
|
+
# YARD command-line options (applied to all validators by default)
|
|
7
|
+
YardOptions:
|
|
8
|
+
- --private
|
|
9
|
+
- --protected
|
|
10
|
+
|
|
11
|
+
# Global file exclusion patterns
|
|
12
|
+
Exclude:
|
|
13
|
+
- '\.git'
|
|
14
|
+
- 'vendor/**/*'
|
|
15
|
+
- 'node_modules/**/*'
|
|
16
|
+
- 'spec/**/*'
|
|
17
|
+
- 'test/**/*'
|
|
18
|
+
|
|
19
|
+
# Exit code behavior (error, warning, convention, never)
|
|
20
|
+
FailOnSeverity: error
|
|
21
|
+
|
|
22
|
+
# Minimum documentation coverage percentage (0-100)
|
|
23
|
+
# Fails if coverage is below this threshold
|
|
24
|
+
MinCoverage: 99.0
|
|
25
|
+
|
|
26
|
+
# Diff mode settings
|
|
27
|
+
DiffMode:
|
|
28
|
+
# Default base ref for --diff (auto-detects main/master if not specified)
|
|
29
|
+
DefaultBaseRef: ~
|
|
30
|
+
|
|
31
|
+
# Documentation validators
|
|
32
|
+
Documentation/UndocumentedObjects:
|
|
33
|
+
Description: 'Checks for classes, modules, and methods without documentation.'
|
|
34
|
+
Enabled: true
|
|
35
|
+
Severity: error
|
|
36
|
+
ExcludedMethods:
|
|
37
|
+
- 'initialize/0' # Exclude parameter-less initialize
|
|
38
|
+
- '/^_/' # Exclude private methods (by convention)
|
|
39
|
+
|
|
40
|
+
Documentation/UndocumentedMethodArguments:
|
|
41
|
+
Description: 'Checks for method parameters without @param tags.'
|
|
42
|
+
Enabled: true
|
|
43
|
+
Severity: error
|
|
44
|
+
|
|
45
|
+
Documentation/UndocumentedBooleanMethods:
|
|
46
|
+
Description: 'Checks that question mark methods document their boolean return.'
|
|
47
|
+
Enabled: true
|
|
48
|
+
Severity: error
|
|
49
|
+
|
|
50
|
+
Documentation/UndocumentedOptions:
|
|
51
|
+
Description: 'Detects methods with options hash parameters but no @option tags.'
|
|
52
|
+
Enabled: true
|
|
53
|
+
Severity: error
|
|
54
|
+
|
|
55
|
+
Documentation/MarkdownSyntax:
|
|
56
|
+
Description: 'Detects common markdown syntax errors in documentation.'
|
|
57
|
+
Enabled: true
|
|
58
|
+
Severity: error
|
|
59
|
+
|
|
60
|
+
# Tags validators
|
|
61
|
+
Tags/Order:
|
|
62
|
+
Description: 'Enforces consistent ordering of YARD tags.'
|
|
63
|
+
Enabled: true
|
|
64
|
+
Severity: error
|
|
65
|
+
EnforcedOrder:
|
|
66
|
+
- param
|
|
67
|
+
- option
|
|
68
|
+
- return
|
|
69
|
+
- raise
|
|
70
|
+
- example
|
|
71
|
+
|
|
72
|
+
Tags/InvalidTypes:
|
|
73
|
+
Description: 'Validates type definitions in @param, @return, @option tags.'
|
|
74
|
+
Enabled: true
|
|
75
|
+
Severity: error
|
|
76
|
+
ValidatedTags:
|
|
77
|
+
- param
|
|
78
|
+
- option
|
|
79
|
+
- return
|
|
80
|
+
|
|
81
|
+
Tags/TypeSyntax:
|
|
82
|
+
Description: 'Validates YARD type syntax using YARD parser.'
|
|
83
|
+
Enabled: true
|
|
84
|
+
Severity: error
|
|
85
|
+
ValidatedTags:
|
|
86
|
+
- param
|
|
87
|
+
- option
|
|
88
|
+
- return
|
|
89
|
+
- yieldreturn
|
|
90
|
+
|
|
91
|
+
Tags/MeaninglessTag:
|
|
92
|
+
Description: 'Detects @param/@option tags on classes, modules, or constants.'
|
|
93
|
+
Enabled: true
|
|
94
|
+
Severity: error
|
|
95
|
+
CheckedTags:
|
|
96
|
+
- param
|
|
97
|
+
- option
|
|
98
|
+
InvalidObjectTypes:
|
|
99
|
+
- class
|
|
100
|
+
- module
|
|
101
|
+
- constant
|
|
102
|
+
|
|
103
|
+
Tags/CollectionType:
|
|
104
|
+
Description: 'Validates Hash collection syntax consistency.'
|
|
105
|
+
Enabled: true
|
|
106
|
+
Severity: error
|
|
107
|
+
EnforcedStyle: long # 'long' for Hash{K => V} (YARD standard), 'short' for {K => V}
|
|
108
|
+
ValidatedTags:
|
|
109
|
+
- param
|
|
110
|
+
- option
|
|
111
|
+
- return
|
|
112
|
+
- yieldreturn
|
|
113
|
+
|
|
114
|
+
Tags/TagTypePosition:
|
|
115
|
+
Description: 'Validates type annotation position in tags.'
|
|
116
|
+
Enabled: true
|
|
117
|
+
Severity: error
|
|
118
|
+
CheckedTags:
|
|
119
|
+
- param
|
|
120
|
+
- option
|
|
121
|
+
# EnforcedStyle: 'type_after_name' (YARD standard: @param name [Type])
|
|
122
|
+
# or 'type_first' (@param [Type] name)
|
|
123
|
+
EnforcedStyle: type_after_name
|
|
124
|
+
|
|
125
|
+
Tags/ApiTags:
|
|
126
|
+
Description: 'Enforces @api tags on public objects.'
|
|
127
|
+
Enabled: false # Opt-in validator
|
|
128
|
+
Severity: error
|
|
129
|
+
AllowedApis:
|
|
130
|
+
- public
|
|
131
|
+
- private
|
|
132
|
+
- internal
|
|
133
|
+
|
|
134
|
+
Tags/OptionTags:
|
|
135
|
+
Description: 'Requires @option tags for methods with options parameters.'
|
|
136
|
+
Enabled: true
|
|
137
|
+
Severity: error
|
|
138
|
+
|
|
139
|
+
# Warnings validators - catches YARD parser errors
|
|
140
|
+
Warnings/UnknownTag:
|
|
141
|
+
Description: 'Detects unknown YARD tags.'
|
|
142
|
+
Enabled: true
|
|
143
|
+
Severity: error
|
|
144
|
+
|
|
145
|
+
Warnings/UnknownDirective:
|
|
146
|
+
Description: 'Detects unknown YARD directives.'
|
|
147
|
+
Enabled: true
|
|
148
|
+
Severity: error
|
|
149
|
+
|
|
150
|
+
Warnings/InvalidTagFormat:
|
|
151
|
+
Description: 'Detects malformed tag syntax.'
|
|
152
|
+
Enabled: true
|
|
153
|
+
Severity: error
|
|
154
|
+
|
|
155
|
+
Warnings/InvalidDirectiveFormat:
|
|
156
|
+
Description: 'Detects malformed directive syntax.'
|
|
157
|
+
Enabled: true
|
|
158
|
+
Severity: error
|
|
159
|
+
|
|
160
|
+
Warnings/DuplicatedParameterName:
|
|
161
|
+
Description: 'Detects duplicate @param tags.'
|
|
162
|
+
Enabled: true
|
|
163
|
+
Severity: error
|
|
164
|
+
|
|
165
|
+
Warnings/UnknownParameterName:
|
|
166
|
+
Description: 'Detects @param tags for non-existent parameters.'
|
|
167
|
+
Enabled: true
|
|
168
|
+
Severity: error
|
|
169
|
+
|
|
170
|
+
# Semantic validators
|
|
171
|
+
Semantic/AbstractMethods:
|
|
172
|
+
Description: 'Ensures @abstract methods do not have real implementations.'
|
|
173
|
+
Enabled: true
|
|
174
|
+
Severity: error
|
data/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,11 @@
|
|
|
1
1
|
# Karafka Framework Changelog
|
|
2
2
|
|
|
3
|
+
## 2.5.3 (2025-11-14)
|
|
4
|
+
- [Enhancement] Dynamically support `librdkafka` fatal errors with correct reported details.
|
|
5
|
+
- [Enhancement] Add `producer` block API to setup for simplified WaterDrop producer configuration without manual producer instance creation, using a transparent ConfigProxy during setup to avoid polluting the permanent config API.
|
|
6
|
+
- [Change] Require `waterdrop` `>=` `2.8.14` to support new features.
|
|
7
|
+
- [Change] Require `karafka-rdkafka` `>=` `0.23.1` to support new rebalance protocol.
|
|
8
|
+
|
|
3
9
|
## 2.5.2 (2025-10-31)
|
|
4
10
|
- **[EOL]** Remove Rails 7.1 support according to EOL while not blocking Rails 7.1 usage.
|
|
5
11
|
- [Enhancement] Retry on the KIP-848 `stale_member_epoch` error.
|
data/Gemfile
CHANGED
data/Gemfile.lock
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
PATH
|
|
2
2
|
remote: .
|
|
3
3
|
specs:
|
|
4
|
-
karafka (2.5.
|
|
4
|
+
karafka (2.5.3)
|
|
5
5
|
base64 (~> 0.2)
|
|
6
6
|
karafka-core (>= 2.5.6, < 2.6.0)
|
|
7
|
-
karafka-rdkafka (>= 0.
|
|
8
|
-
waterdrop (>= 2.8.
|
|
7
|
+
karafka-rdkafka (>= 0.23.1)
|
|
8
|
+
waterdrop (>= 2.8.14, < 3.0.0)
|
|
9
9
|
zeitwerk (~> 2.3)
|
|
10
10
|
|
|
11
11
|
GEM
|
|
@@ -62,32 +62,32 @@ GEM
|
|
|
62
62
|
karafka-core (2.5.7)
|
|
63
63
|
karafka-rdkafka (>= 0.20.0)
|
|
64
64
|
logger (>= 1.6.0)
|
|
65
|
-
karafka-rdkafka (0.
|
|
66
|
-
ffi (~> 1.
|
|
65
|
+
karafka-rdkafka (0.23.1)
|
|
66
|
+
ffi (~> 1.17.1)
|
|
67
67
|
json (> 2.0)
|
|
68
68
|
logger
|
|
69
69
|
mini_portile2 (~> 2.6)
|
|
70
70
|
rake (> 12)
|
|
71
|
-
karafka-rdkafka (0.
|
|
72
|
-
ffi (~> 1.
|
|
71
|
+
karafka-rdkafka (0.23.1-aarch64-linux-gnu)
|
|
72
|
+
ffi (~> 1.17.1)
|
|
73
73
|
json (> 2.0)
|
|
74
74
|
logger
|
|
75
75
|
mini_portile2 (~> 2.6)
|
|
76
76
|
rake (> 12)
|
|
77
|
-
karafka-rdkafka (0.
|
|
78
|
-
ffi (~> 1.
|
|
77
|
+
karafka-rdkafka (0.23.1-arm64-darwin)
|
|
78
|
+
ffi (~> 1.17.1)
|
|
79
79
|
json (> 2.0)
|
|
80
80
|
logger
|
|
81
81
|
mini_portile2 (~> 2.6)
|
|
82
82
|
rake (> 12)
|
|
83
|
-
karafka-rdkafka (0.
|
|
84
|
-
ffi (~> 1.
|
|
83
|
+
karafka-rdkafka (0.23.1-x86_64-linux-gnu)
|
|
84
|
+
ffi (~> 1.17.1)
|
|
85
85
|
json (> 2.0)
|
|
86
86
|
logger
|
|
87
87
|
mini_portile2 (~> 2.6)
|
|
88
88
|
rake (> 12)
|
|
89
|
-
karafka-rdkafka (0.
|
|
90
|
-
ffi (~> 1.
|
|
89
|
+
karafka-rdkafka (0.23.1-x86_64-linux-musl)
|
|
90
|
+
ffi (~> 1.17.1)
|
|
91
91
|
json (> 2.0)
|
|
92
92
|
logger
|
|
93
93
|
mini_portile2 (~> 2.6)
|
|
@@ -95,9 +95,9 @@ GEM
|
|
|
95
95
|
karafka-testing (2.5.4)
|
|
96
96
|
karafka (>= 2.5.0, < 2.6.0)
|
|
97
97
|
waterdrop (>= 2.8.0)
|
|
98
|
-
karafka-web (0.11.
|
|
98
|
+
karafka-web (0.11.4)
|
|
99
99
|
erubi (~> 1.4)
|
|
100
|
-
karafka (>= 2.5.
|
|
100
|
+
karafka (>= 2.5.2, < 2.6.0)
|
|
101
101
|
karafka-core (>= 2.5.0, < 2.6.0)
|
|
102
102
|
roda (~> 3.68, >= 3.69)
|
|
103
103
|
tilt (~> 2.0)
|
|
@@ -107,7 +107,7 @@ GEM
|
|
|
107
107
|
ostruct (0.6.3)
|
|
108
108
|
raabro (1.4.0)
|
|
109
109
|
rack (3.2.3)
|
|
110
|
-
rake (13.3.
|
|
110
|
+
rake (13.3.1)
|
|
111
111
|
roda (3.97.0)
|
|
112
112
|
rack
|
|
113
113
|
rspec (3.13.2)
|
|
@@ -130,16 +130,20 @@ GEM
|
|
|
130
130
|
simplecov_json_formatter (~> 0.1)
|
|
131
131
|
simplecov-html (0.13.2)
|
|
132
132
|
simplecov_json_formatter (0.1.4)
|
|
133
|
-
stringio (3.1.
|
|
133
|
+
stringio (3.1.8)
|
|
134
134
|
tilt (2.6.1)
|
|
135
135
|
tzinfo (2.0.6)
|
|
136
136
|
concurrent-ruby (~> 1.0)
|
|
137
137
|
uri (1.0.4)
|
|
138
138
|
warning (1.5.0)
|
|
139
|
-
waterdrop (2.8.
|
|
139
|
+
waterdrop (2.8.14)
|
|
140
140
|
karafka-core (>= 2.4.9, < 3.0.0)
|
|
141
|
-
karafka-rdkafka (>= 0.
|
|
141
|
+
karafka-rdkafka (>= 0.23.1)
|
|
142
142
|
zeitwerk (~> 2.3)
|
|
143
|
+
yard (0.9.37)
|
|
144
|
+
yard-lint (1.2.3)
|
|
145
|
+
yard (~> 0.9)
|
|
146
|
+
zeitwerk (~> 2.6)
|
|
143
147
|
zeitwerk (2.7.3)
|
|
144
148
|
|
|
145
149
|
PLATFORMS
|
|
@@ -168,6 +172,7 @@ DEPENDENCIES
|
|
|
168
172
|
simplecov
|
|
169
173
|
stringio
|
|
170
174
|
warning
|
|
175
|
+
yard-lint
|
|
171
176
|
|
|
172
177
|
BUNDLED WITH
|
|
173
178
|
2.7.1
|
data/karafka.gemspec
CHANGED
|
@@ -23,8 +23,8 @@ Gem::Specification.new do |spec|
|
|
|
23
23
|
|
|
24
24
|
spec.add_dependency 'base64', '~> 0.2'
|
|
25
25
|
spec.add_dependency 'karafka-core', '>= 2.5.6', '< 2.6.0'
|
|
26
|
-
spec.add_dependency 'karafka-rdkafka', '>= 0.
|
|
27
|
-
spec.add_dependency 'waterdrop', '>= 2.8.
|
|
26
|
+
spec.add_dependency 'karafka-rdkafka', '>= 0.23.1'
|
|
27
|
+
spec.add_dependency 'waterdrop', '>= 2.8.14', '< 3.0.0'
|
|
28
28
|
spec.add_dependency 'zeitwerk', '~> 2.3'
|
|
29
29
|
|
|
30
30
|
spec.required_ruby_version = '>= 3.2.0'
|
|
@@ -21,7 +21,7 @@ module ActiveJob
|
|
|
21
21
|
# - No Rails: Inherit from Object (standalone ActiveJob usage)
|
|
22
22
|
#
|
|
23
23
|
# @see https://github.com/sidekiq/sidekiq/issues/6746 Similar issue in Sidekiq
|
|
24
|
-
base = if defined?(Rails
|
|
24
|
+
base = if defined?(Rails::VERSION)
|
|
25
25
|
(Rails::VERSION::MAJOR == 7 && Rails::VERSION::MINOR < 2 ? Object : AbstractAdapter)
|
|
26
26
|
else
|
|
27
27
|
# Fallback when Rails is not loaded
|
|
@@ -31,7 +31,7 @@ module ActiveJob
|
|
|
31
31
|
# Karafka adapter for enqueuing jobs
|
|
32
32
|
# This is here for ease of integration with ActiveJob.
|
|
33
33
|
class KarafkaAdapter < base
|
|
34
|
-
include Karafka::Helpers::ConfigImporter.new(
|
|
34
|
+
include ::Karafka::Helpers::ConfigImporter.new(
|
|
35
35
|
dispatcher: %i[internal active_job dispatcher]
|
|
36
36
|
)
|
|
37
37
|
|
|
@@ -5,7 +5,7 @@ module Karafka
|
|
|
5
5
|
module ActiveJob
|
|
6
6
|
# This is the consumer for ActiveJob that eats the messages enqueued with it one after another.
|
|
7
7
|
# It marks the offset after each message, so we make sure, none of the jobs is executed twice
|
|
8
|
-
class Consumer <
|
|
8
|
+
class Consumer < Karafka::BaseConsumer
|
|
9
9
|
include Helpers::ConfigImporter.new(
|
|
10
10
|
deserializer: %i[internal active_job deserializer]
|
|
11
11
|
)
|
|
@@ -14,7 +14,7 @@ module Karafka
|
|
|
14
14
|
# @note ActiveJob does not support batches, so we just run one message after another
|
|
15
15
|
def consume
|
|
16
16
|
messages.each do |message|
|
|
17
|
-
break if Karafka::App.stopping?
|
|
17
|
+
break if ::Karafka::App.stopping?
|
|
18
18
|
|
|
19
19
|
consume_job(message)
|
|
20
20
|
|
|
@@ -23,8 +23,8 @@ module Karafka
|
|
|
23
23
|
.each { |expandable| expandable.class_attribute :_cattr_klasses, default: {} }
|
|
24
24
|
|
|
25
25
|
# Do not double inject in case of running persist multiple times
|
|
26
|
-
Dispatcher.prepend(Persistence) unless Dispatcher
|
|
27
|
-
Consumer.prepend(Loading) unless Consumer
|
|
26
|
+
Dispatcher.prepend(Persistence) unless Dispatcher <= Persistence
|
|
27
|
+
Consumer.prepend(Loading) unless Consumer <= Loading
|
|
28
28
|
|
|
29
29
|
klasses.map(&:to_s).each do |stringified_klass|
|
|
30
30
|
# Prevent registering same klass multiple times
|
|
@@ -43,7 +43,7 @@ module Karafka
|
|
|
43
43
|
#
|
|
44
44
|
# @param job [ActiveJob::Base, #serialize] job to serialize. The job must respond to
|
|
45
45
|
# #serialize which returns a Hash of job attributes. When CurrentAttributes are used,
|
|
46
|
-
# this may be a JobWrapper instance instead of the original ActiveJob::Base.
|
|
46
|
+
# this may be a JobWrapper instance instead of the original ::ActiveJob::Base.
|
|
47
47
|
# @return [String] serialized job payload
|
|
48
48
|
def serialize(job)
|
|
49
49
|
::ActiveSupport::JSON.encode(job.serialize)
|
|
@@ -19,7 +19,7 @@ module Karafka
|
|
|
19
19
|
|
|
20
20
|
# @param job [ActiveJob::Base] job
|
|
21
21
|
def dispatch(job)
|
|
22
|
-
|
|
22
|
+
Karafka.producer.public_send(
|
|
23
23
|
fetch_option(job, :dispatch_method, DEFAULTS),
|
|
24
24
|
topic: job.queue_name,
|
|
25
25
|
payload: serialize_job(job)
|
|
@@ -43,7 +43,7 @@ module Karafka
|
|
|
43
43
|
end
|
|
44
44
|
|
|
45
45
|
dispatches.each do |type, messages|
|
|
46
|
-
|
|
46
|
+
Karafka.producer.public_send(
|
|
47
47
|
type,
|
|
48
48
|
messages
|
|
49
49
|
)
|
|
@@ -40,10 +40,16 @@ module Karafka
|
|
|
40
40
|
OPERATIONS_TYPES_MAP.each do |op_name, op_value|
|
|
41
41
|
# Adds an outgoing operation to a given resource of a given type
|
|
42
42
|
# Useful since we alter in batches and not one at a time
|
|
43
|
+
#
|
|
44
|
+
# For example, when op_name is :set and op_value is 0:
|
|
45
|
+
# def set(name, value)
|
|
46
|
+
# @operations[0] << Config.new(name: name, value: value.to_s)
|
|
47
|
+
# end
|
|
48
|
+
default_value = op_name == :delete ? ' = nil' : ''
|
|
43
49
|
class_eval <<~RUBY, __FILE__, __LINE__ + 1
|
|
44
50
|
# @param name [String] name of the config to alter
|
|
45
51
|
# @param value [String] value of the config
|
|
46
|
-
def #{op_name}(name, value
|
|
52
|
+
def #{op_name}(name, value#{default_value})
|
|
47
53
|
@operations[#{op_value}] << Config.new(name: name, value: value.to_s)
|
|
48
54
|
end
|
|
49
55
|
RUBY
|
|
@@ -80,15 +80,13 @@ module Karafka
|
|
|
80
80
|
case casted_position
|
|
81
81
|
# Earliest is not always 0. When compacting/deleting it can be much later, that's why
|
|
82
82
|
# we fetch the oldest possible offset
|
|
83
|
-
|
|
83
|
+
# false is treated the same as 'earliest'
|
|
84
|
+
when 'earliest', false
|
|
84
85
|
LONG_TIME_AGO
|
|
85
86
|
# Latest will always be the high-watermark offset and we can get it just by getting
|
|
86
87
|
# a future position
|
|
87
88
|
when 'latest'
|
|
88
89
|
Time.now + DAY_IN_SECONDS
|
|
89
|
-
# Same as `'earliest'`
|
|
90
|
-
when false
|
|
91
|
-
LONG_TIME_AGO
|
|
92
90
|
# Regular offset case
|
|
93
91
|
else
|
|
94
92
|
position
|
|
@@ -307,14 +305,14 @@ module Karafka
|
|
|
307
305
|
# Reads lags and offsets for given topics in the context of consumer groups defined in the
|
|
308
306
|
# routing
|
|
309
307
|
#
|
|
310
|
-
# @param consumer_groups_with_topics [Hash
|
|
308
|
+
# @param consumer_groups_with_topics [Hash{String => Array<String>}] hash with consumer
|
|
311
309
|
# groups names with array of topics to query per consumer group inside
|
|
312
310
|
# @param active_topics_only [Boolean] if set to false, when we use routing topics, will
|
|
313
311
|
# select also topics that are marked as inactive in routing
|
|
314
312
|
#
|
|
315
|
-
# @return [Hash
|
|
316
|
-
# the consumer groups and values are hashes with topics and inside
|
|
317
|
-
# and offsets
|
|
313
|
+
# @return [Hash{String => Hash{Integer => Hash{Integer => Object}}}] hash where the top
|
|
314
|
+
# level keys are the consumer groups and values are hashes with topics and inside
|
|
315
|
+
# partitions with lags and offsets
|
|
318
316
|
#
|
|
319
317
|
# @note For topics that do not exist, topic details will be set to an empty hash
|
|
320
318
|
#
|
data/lib/karafka/admin/topics.rb
CHANGED
|
@@ -36,7 +36,7 @@ module Karafka
|
|
|
36
36
|
|
|
37
37
|
# Build the requested range - since first element is on the start offset we need to
|
|
38
38
|
# subtract one from requested count to end up with expected number of elements
|
|
39
|
-
requested_range = (start_offset..(start_offset +
|
|
39
|
+
requested_range = (start_offset..(start_offset + count - 1))
|
|
40
40
|
# Establish theoretical available range. Note, that this does not handle cases related
|
|
41
41
|
# to log retention or compaction
|
|
42
42
|
available_range = (low_offset..(high_offset - 1))
|
|
@@ -75,7 +75,7 @@ module Karafka
|
|
|
75
75
|
# Use topic from routes if we can match it or create a dummy one
|
|
76
76
|
# Dummy one is used in case we cannot match the topic with routes. This can happen
|
|
77
77
|
# when admin API is used to read topics that are not part of the routing
|
|
78
|
-
topic =
|
|
78
|
+
topic = Karafka::Routing::Router.find_or_initialize_by_name(name)
|
|
79
79
|
|
|
80
80
|
messages.map! do |message|
|
|
81
81
|
Messages::Builders::Message.call(
|
|
@@ -143,7 +143,8 @@ module Karafka
|
|
|
143
143
|
# partitions
|
|
144
144
|
#
|
|
145
145
|
# @param name_or_hash [String, Symbol, Hash] topic name or hash with topics and partitions
|
|
146
|
-
# @param partition [Integer, nil] partition number
|
|
146
|
+
# @param partition [Integer, nil] partition number
|
|
147
|
+
# (required when first param is topic name)
|
|
147
148
|
#
|
|
148
149
|
# @return [Array<Integer, Integer>, Hash] when querying single partition returns array with
|
|
149
150
|
# low and high watermark offsets, when querying multiple returns nested hash
|
|
@@ -217,7 +218,7 @@ module Karafka
|
|
|
217
218
|
# @return [Integer] expected offset
|
|
218
219
|
def resolve_offset(consumer, name, partition, offset)
|
|
219
220
|
if offset.is_a?(Time)
|
|
220
|
-
tpl =
|
|
221
|
+
tpl = Rdkafka::Consumer::TopicPartitionList.new
|
|
221
222
|
tpl.add_topic_and_partitions_with_offsets(
|
|
222
223
|
name, partition => offset
|
|
223
224
|
)
|
data/lib/karafka/admin.rb
CHANGED
|
@@ -124,13 +124,13 @@ module Karafka
|
|
|
124
124
|
|
|
125
125
|
# Reads lags and offsets for given topics in the context of consumer groups defined in the
|
|
126
126
|
# routing
|
|
127
|
-
# @param consumer_groups_with_topics [Hash
|
|
128
|
-
# names with array of topics to query per consumer group inside
|
|
127
|
+
# @param consumer_groups_with_topics [Hash{String => Array<String>}] hash with consumer
|
|
128
|
+
# groups names with array of topics to query per consumer group inside
|
|
129
129
|
# @param active_topics_only [Boolean] if set to false, when we use routing topics, will
|
|
130
130
|
# select also topics that are marked as inactive in routing
|
|
131
|
-
# @return [Hash
|
|
132
|
-
# the consumer groups and values are hashes with topics and inside
|
|
133
|
-
# and offsets
|
|
131
|
+
# @return [Hash{String => Hash{Integer => Hash{Integer => Object}}}] hash where the top
|
|
132
|
+
# level keys are the consumer groups and values are hashes with topics and inside
|
|
133
|
+
# partitions with lags and offsets
|
|
134
134
|
# @see ConsumerGroups.read_lags_with_offsets
|
|
135
135
|
def read_lags_with_offsets(consumer_groups_with_topics = {}, active_topics_only: true)
|
|
136
136
|
ConsumerGroups.read_lags_with_offsets(
|
|
@@ -158,7 +158,7 @@ module Karafka
|
|
|
158
158
|
bind_oauth(bind_id, consumer)
|
|
159
159
|
|
|
160
160
|
consumer.start
|
|
161
|
-
proxy =
|
|
161
|
+
proxy = Karafka::Connection::Proxy.new(consumer)
|
|
162
162
|
yield(proxy)
|
|
163
163
|
ensure
|
|
164
164
|
# Always unsubscribe consumer just to be sure, that no metadata requests are running
|
|
@@ -188,7 +188,7 @@ module Karafka
|
|
|
188
188
|
bind_oauth(bind_id, admin)
|
|
189
189
|
|
|
190
190
|
admin.start
|
|
191
|
-
proxy =
|
|
191
|
+
proxy = Karafka::Connection::Proxy.new(admin)
|
|
192
192
|
yield(proxy)
|
|
193
193
|
ensure
|
|
194
194
|
admin&.close
|
|
@@ -211,7 +211,7 @@ module Karafka
|
|
|
211
211
|
# @param instance [Rdkafka::Consumer, Rdkafka::Admin] rdkafka instance to be used to set
|
|
212
212
|
# appropriate oauth token when needed
|
|
213
213
|
def bind_oauth(id, instance)
|
|
214
|
-
|
|
214
|
+
Karafka::Core::Instrumentation.oauthbearer_token_refresh_callbacks.add(
|
|
215
215
|
id,
|
|
216
216
|
Instrumentation::Callbacks::OauthbearerTokenRefresh.new(
|
|
217
217
|
instance
|
|
@@ -224,7 +224,7 @@ module Karafka
|
|
|
224
224
|
# @param id [String, Symbol] unique (for the lifetime of instance) id that we use for
|
|
225
225
|
# callback referencing
|
|
226
226
|
def unbind_oauth(id)
|
|
227
|
-
|
|
227
|
+
Karafka::Core::Instrumentation.oauthbearer_token_refresh_callbacks.delete(id)
|
|
228
228
|
end
|
|
229
229
|
|
|
230
230
|
# There are some cases where rdkafka admin operations finish successfully but without the
|
|
@@ -269,7 +269,7 @@ module Karafka
|
|
|
269
269
|
# consumer group or do something similar
|
|
270
270
|
.merge!(settings)
|
|
271
271
|
.then { |config| Karafka::Setup::AttributesMap.public_send(type, config) }
|
|
272
|
-
.then { |config|
|
|
272
|
+
.then { |config| Rdkafka::Config.new(config) }
|
|
273
273
|
end
|
|
274
274
|
end
|
|
275
275
|
end
|
data/lib/karafka/app.rb
CHANGED
|
@@ -52,7 +52,7 @@ module Karafka
|
|
|
52
52
|
|
|
53
53
|
# Returns current assignments of this process. Both topics and partitions
|
|
54
54
|
#
|
|
55
|
-
# @return [Hash
|
|
55
|
+
# @return [Hash{Karafka::Routing::Topic => Array<Integer>}]
|
|
56
56
|
def assignments
|
|
57
57
|
Instrumentation::AssignmentsTracker.instance.current
|
|
58
58
|
end
|
|
@@ -102,8 +102,8 @@ module Karafka
|
|
|
102
102
|
#
|
|
103
103
|
# @param contexts [String] librdkafka low level debug contexts for granular debugging
|
|
104
104
|
def debug!(contexts = 'all')
|
|
105
|
-
logger.level =
|
|
106
|
-
producer.config.logger.level =
|
|
105
|
+
logger.level = Logger::DEBUG
|
|
106
|
+
producer.config.logger.level = Logger::DEBUG
|
|
107
107
|
|
|
108
108
|
config.kafka[:debug] = contexts
|
|
109
109
|
producer.config.kafka[:debug] = contexts
|
|
@@ -5,7 +5,7 @@ module Karafka
|
|
|
5
5
|
# Base consumer from which all Karafka consumers should inherit
|
|
6
6
|
class BaseConsumer
|
|
7
7
|
# Allow for consumer instance tagging for instrumentation
|
|
8
|
-
include
|
|
8
|
+
include Karafka::Core::Taggable
|
|
9
9
|
include Helpers::ConfigImporter.new(
|
|
10
10
|
monitor: %i[monitor]
|
|
11
11
|
)
|
data/lib/karafka/cli/base.rb
CHANGED
|
@@ -72,7 +72,7 @@ module Karafka
|
|
|
72
72
|
|
|
73
73
|
# All other commands except help and install do require an existing boot file if it was
|
|
74
74
|
# declared
|
|
75
|
-
raise
|
|
75
|
+
raise Karafka::Errors::MissingBootFileError, Karafka.boot_file
|
|
76
76
|
end
|
|
77
77
|
|
|
78
78
|
# Allows to set options for Thor cli
|
data/lib/karafka/cli/console.rb
CHANGED