karafka 2.5.2 → 2.5.4.rc1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +16 -0
- data/config/locales/errors.yml +14 -0
- data/karafka.gemspec +15 -4
- data/lib/active_job/queue_adapters/karafka_adapter.rb +2 -2
- data/lib/karafka/active_job/consumer.rb +2 -2
- data/lib/karafka/active_job/current_attributes.rb +2 -2
- data/lib/karafka/active_job/deserializer.rb +1 -1
- data/lib/karafka/active_job/dispatcher.rb +2 -2
- data/lib/karafka/admin/configs/resource.rb +7 -1
- data/lib/karafka/admin/consumer_groups.rb +6 -8
- data/lib/karafka/admin/contracts/replication.rb +149 -0
- data/lib/karafka/admin/replication.rb +462 -0
- data/lib/karafka/admin/topics.rb +5 -4
- data/lib/karafka/admin.rb +57 -12
- data/lib/karafka/app.rb +3 -3
- data/lib/karafka/base_consumer.rb +1 -1
- data/lib/karafka/cli/base.rb +1 -1
- data/lib/karafka/cli/console.rb +1 -1
- data/lib/karafka/cli/contracts/server.rb +1 -1
- data/lib/karafka/cli/help.rb +1 -1
- data/lib/karafka/cli/install.rb +2 -1
- data/lib/karafka/cli/server.rb +1 -1
- data/lib/karafka/cli/swarm.rb +1 -1
- data/lib/karafka/connection/client.rb +19 -18
- data/lib/karafka/connection/manager.rb +1 -0
- data/lib/karafka/connection/proxy.rb +1 -1
- data/lib/karafka/connection/rebalance_manager.rb +1 -1
- data/lib/karafka/connection/status.rb +1 -0
- data/lib/karafka/constraints.rb +1 -1
- data/lib/karafka/contracts/base.rb +1 -1
- data/lib/karafka/deserializers/payload.rb +1 -1
- data/lib/karafka/helpers/async.rb +1 -1
- data/lib/karafka/helpers/config_importer.rb +3 -3
- data/lib/karafka/helpers/multi_delegator.rb +3 -0
- data/lib/karafka/instrumentation/assignments_tracker.rb +2 -1
- data/lib/karafka/instrumentation/callbacks/error.rb +2 -2
- data/lib/karafka/instrumentation/callbacks/statistics.rb +3 -3
- data/lib/karafka/instrumentation/logger.rb +6 -6
- data/lib/karafka/instrumentation/logger_listener.rb +0 -2
- data/lib/karafka/instrumentation/monitor.rb +2 -2
- data/lib/karafka/instrumentation/vendors/appsignal/base.rb +1 -1
- data/lib/karafka/instrumentation/vendors/appsignal/metrics_listener.rb +4 -0
- data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +32 -16
- data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +2 -2
- data/lib/karafka/instrumentation/vendors/kubernetes/base_listener.rb +1 -1
- data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +3 -15
- data/lib/karafka/licenser.rb +1 -1
- data/lib/karafka/messages/builders/batch_metadata.rb +1 -1
- data/lib/karafka/messages/messages.rb +32 -0
- data/lib/karafka/pro/active_job/consumer.rb +2 -2
- data/lib/karafka/pro/active_job/dispatcher.rb +3 -3
- data/lib/karafka/pro/cleaner/messages/messages.rb +1 -1
- data/lib/karafka/pro/cleaner.rb +3 -3
- data/lib/karafka/pro/cli/contracts/server.rb +1 -1
- data/lib/karafka/pro/cli/parallel_segments/base.rb +4 -3
- data/lib/karafka/pro/cli/parallel_segments/collapse.rb +1 -1
- data/lib/karafka/pro/cli/parallel_segments/distribute.rb +1 -1
- data/lib/karafka/pro/cli/parallel_segments.rb +1 -1
- data/lib/karafka/pro/connection/manager.rb +1 -2
- data/lib/karafka/pro/connection/multiplexing/listener.rb +1 -0
- data/lib/karafka/pro/contracts/base.rb +1 -1
- data/lib/karafka/pro/encryption/cipher.rb +3 -2
- data/lib/karafka/pro/encryption/contracts/config.rb +1 -1
- data/lib/karafka/pro/encryption/messages/parser.rb +1 -1
- data/lib/karafka/pro/encryption/setup/config.rb +1 -1
- data/lib/karafka/pro/iterator/tpl_builder.rb +1 -1
- data/lib/karafka/pro/iterator.rb +1 -1
- data/lib/karafka/pro/loader.rb +1 -1
- data/lib/karafka/pro/processing/coordinator.rb +1 -1
- data/lib/karafka/pro/processing/filters/base.rb +1 -0
- data/lib/karafka/pro/processing/filters/delayer.rb +1 -1
- data/lib/karafka/pro/processing/filters/expirer.rb +1 -1
- data/lib/karafka/pro/processing/filters/inline_insights_delayer.rb +1 -1
- data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +1 -1
- data/lib/karafka/pro/processing/jobs/eofed_non_blocking.rb +1 -1
- data/lib/karafka/pro/processing/jobs/periodic.rb +1 -1
- data/lib/karafka/pro/processing/jobs/revoked_non_blocking.rb +1 -1
- data/lib/karafka/pro/processing/jobs_builder.rb +1 -1
- data/lib/karafka/pro/processing/jobs_queue.rb +0 -2
- data/lib/karafka/pro/processing/offset_metadata/fetcher.rb +1 -0
- data/lib/karafka/pro/processing/partitioner.rb +1 -1
- data/lib/karafka/pro/processing/strategies/base.rb +1 -1
- data/lib/karafka/pro/processing/strategies/default.rb +2 -2
- data/lib/karafka/pro/processing/strategies/dlq/default.rb +1 -1
- data/lib/karafka/pro/processing/strategies/vp/default.rb +1 -1
- data/lib/karafka/pro/processing/strategy_selector.rb +1 -0
- data/lib/karafka/pro/processing/virtual_partitions/distributors/balanced.rb +4 -2
- data/lib/karafka/pro/processing/virtual_partitions/distributors/consistent.rb +4 -2
- data/lib/karafka/pro/recurring_tasks/consumer.rb +3 -2
- data/lib/karafka/pro/recurring_tasks/contracts/config.rb +2 -2
- data/lib/karafka/pro/recurring_tasks/contracts/task.rb +1 -1
- data/lib/karafka/pro/recurring_tasks/deserializer.rb +1 -1
- data/lib/karafka/pro/recurring_tasks/dispatcher.rb +1 -1
- data/lib/karafka/pro/recurring_tasks/executor.rb +2 -1
- data/lib/karafka/pro/recurring_tasks/schedule.rb +5 -2
- data/lib/karafka/pro/recurring_tasks/serializer.rb +6 -5
- data/lib/karafka/pro/recurring_tasks/setup/config.rb +2 -2
- data/lib/karafka/pro/recurring_tasks/task.rb +1 -1
- data/lib/karafka/pro/routing/features/dead_letter_queue/topic.rb +3 -0
- data/lib/karafka/pro/routing/features/multiplexing/subscription_groups_builder.rb +1 -1
- data/lib/karafka/pro/routing/features/multiplexing.rb +5 -5
- data/lib/karafka/pro/routing/features/offset_metadata.rb +4 -4
- data/lib/karafka/pro/routing/features/parallel_segments/builder.rb +1 -1
- data/lib/karafka/pro/routing/features/patterns/patterns.rb +1 -1
- data/lib/karafka/pro/routing/features/periodic_job/topic.rb +1 -1
- data/lib/karafka/pro/routing/features/recurring_tasks/builder.rb +1 -1
- data/lib/karafka/pro/routing/features/swarm.rb +1 -1
- data/lib/karafka/pro/routing/features/throttling/topic.rb +3 -1
- data/lib/karafka/pro/scheduled_messages/consumer.rb +1 -1
- data/lib/karafka/pro/scheduled_messages/contracts/config.rb +2 -2
- data/lib/karafka/pro/scheduled_messages/contracts/message.rb +1 -1
- data/lib/karafka/pro/scheduled_messages/daily_buffer.rb +3 -2
- data/lib/karafka/pro/scheduled_messages/day.rb +1 -0
- data/lib/karafka/pro/scheduled_messages/deserializers/headers.rb +1 -1
- data/lib/karafka/pro/scheduled_messages/deserializers/payload.rb +1 -1
- data/lib/karafka/pro/scheduled_messages/max_epoch.rb +1 -0
- data/lib/karafka/pro/scheduled_messages/proxy.rb +1 -1
- data/lib/karafka/pro/scheduled_messages/serializer.rb +3 -3
- data/lib/karafka/pro/scheduled_messages/setup/config.rb +2 -2
- data/lib/karafka/pro/scheduled_messages/state.rb +1 -0
- data/lib/karafka/pro/scheduled_messages/tracker.rb +1 -0
- data/lib/karafka/process.rb +4 -4
- data/lib/karafka/processing/executor.rb +1 -1
- data/lib/karafka/processing/inline_insights/tracker.rb +1 -0
- data/lib/karafka/processing/jobs_queue.rb +1 -1
- data/lib/karafka/processing/result.rb +1 -0
- data/lib/karafka/processing/strategies/dlq.rb +1 -1
- data/lib/karafka/processing/strategy_selector.rb +1 -0
- data/lib/karafka/routing/activity_manager.rb +1 -0
- data/lib/karafka/routing/builder.rb +3 -1
- data/lib/karafka/routing/consumer_group.rb +19 -1
- data/lib/karafka/routing/contracts/consumer_group.rb +3 -2
- data/lib/karafka/routing/contracts/topic.rb +5 -2
- data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +1 -1
- data/lib/karafka/routing/features/declaratives/topic.rb +5 -2
- data/lib/karafka/routing/features/deserializers/topic.rb +3 -3
- data/lib/karafka/routing/features/inline_insights.rb +5 -5
- data/lib/karafka/routing/router.rb +1 -1
- data/lib/karafka/routing/subscription_group.rb +2 -2
- data/lib/karafka/routing/subscription_groups_builder.rb +18 -2
- data/lib/karafka/routing/topic.rb +3 -3
- data/lib/karafka/server.rb +1 -1
- data/lib/karafka/setup/attributes_map.rb +4 -2
- data/lib/karafka/setup/config.rb +21 -10
- data/lib/karafka/setup/config_proxy.rb +209 -0
- data/lib/karafka/setup/contracts/config.rb +1 -1
- data/lib/karafka/swarm/liveness_listener.rb +1 -0
- data/lib/karafka/swarm/manager.rb +7 -6
- data/lib/karafka/swarm/node.rb +1 -1
- data/lib/karafka/swarm/supervisor.rb +1 -0
- data/lib/karafka/time_trackers/base.rb +1 -1
- data/lib/karafka/version.rb +1 -1
- data/lib/karafka.rb +2 -3
- metadata +8 -65
- data/.coditsu/ci.yml +0 -3
- data/.console_irbrc +0 -11
- data/.github/CODEOWNERS +0 -3
- data/.github/FUNDING.yml +0 -1
- data/.github/ISSUE_TEMPLATE/bug_report.md +0 -43
- data/.github/ISSUE_TEMPLATE/feature_request.md +0 -20
- data/.github/workflows/ci_linux_ubuntu_x86_64_gnu.yml +0 -278
- data/.github/workflows/ci_macos_arm64.yml +0 -151
- data/.github/workflows/push.yml +0 -35
- data/.github/workflows/trigger-wiki-refresh.yml +0 -30
- data/.github/workflows/verify-action-pins.yml +0 -16
- data/.gitignore +0 -69
- data/.rspec +0 -7
- data/.ruby-gemset +0 -1
- data/.ruby-version +0 -1
- data/CODE_OF_CONDUCT.md +0 -46
- data/CONTRIBUTING.md +0 -32
- data/Gemfile +0 -28
- data/Gemfile.lock +0 -173
- data/Rakefile +0 -4
- data/SECURITY.md +0 -23
- data/bin/benchmarks +0 -99
- data/bin/clean_kafka +0 -43
- data/bin/create_token +0 -22
- data/bin/integrations +0 -341
- data/bin/record_rss +0 -50
- data/bin/rspecs +0 -26
- data/bin/scenario +0 -29
- data/bin/stress_many +0 -13
- data/bin/stress_one +0 -13
- data/bin/verify_kafka_warnings +0 -36
- data/bin/verify_license_integrity +0 -37
- data/bin/verify_topics_naming +0 -27
- data/bin/wait_for_kafka +0 -24
- data/docker-compose.yml +0 -25
- data/examples/payloads/avro/.gitkeep +0 -0
- data/examples/payloads/json/sample_set_01/enrollment_event.json +0 -579
- data/examples/payloads/json/sample_set_01/ingestion_event.json +0 -30
- data/examples/payloads/json/sample_set_01/transaction_event.json +0 -17
- data/examples/payloads/json/sample_set_01/user_event.json +0 -11
- data/examples/payloads/json/sample_set_02/download.json +0 -191
- data/examples/payloads/json/sample_set_03/event_type_1.json +0 -18
- data/examples/payloads/json/sample_set_03/event_type_2.json +0 -263
- data/examples/payloads/json/sample_set_03/event_type_3.json +0 -41
- data/log/.gitkeep +0 -0
- data/renovate.json +0 -21
|
@@ -10,7 +10,7 @@ module Karafka
|
|
|
10
10
|
module Setup
|
|
11
11
|
# Config for recurring tasks
|
|
12
12
|
class Config
|
|
13
|
-
extend
|
|
13
|
+
extend Karafka::Core::Configurable
|
|
14
14
|
|
|
15
15
|
setting(:consumer_class, default: Consumer)
|
|
16
16
|
setting(:deserializer, default: Deserializer.new)
|
|
@@ -27,7 +27,7 @@ module Karafka
|
|
|
27
27
|
# a separate instance in case of heavy usage of the transactional producer, etc.
|
|
28
28
|
setting(
|
|
29
29
|
:producer,
|
|
30
|
-
constructor: -> {
|
|
30
|
+
constructor: -> { Karafka.producer },
|
|
31
31
|
lazy: true
|
|
32
32
|
)
|
|
33
33
|
|
|
@@ -31,7 +31,7 @@ module Karafka
|
|
|
31
31
|
# @param block [Proc] code to execute.
|
|
32
32
|
def initialize(id:, cron:, previous_time: 0, enabled: true, &block)
|
|
33
33
|
@id = id
|
|
34
|
-
@cron =
|
|
34
|
+
@cron = Fugit::Cron.do_parse(cron)
|
|
35
35
|
@previous_time = previous_time
|
|
36
36
|
@start_time = Time.now
|
|
37
37
|
@executable = block
|
|
@@ -22,6 +22,9 @@ module Karafka
|
|
|
22
22
|
# @param strategy [#call, nil] Strategy we want to use or nil if a default strategy
|
|
23
23
|
# (same as in OSS) should be applied
|
|
24
24
|
# @param args [Hash] Pro DLQ arguments
|
|
25
|
+
# @option args [String, nil] :topic name of the dead letter queue topic
|
|
26
|
+
# @option args [Integer] :max_retries maximum number of retries before dispatch to DLQ
|
|
27
|
+
# @option args [Boolean] :independent whether DLQ runs independently
|
|
25
28
|
def dead_letter_queue(strategy: nil, **args)
|
|
26
29
|
return @dead_letter_queue if @dead_letter_queue
|
|
27
30
|
|
|
@@ -19,7 +19,7 @@ module Karafka
|
|
|
19
19
|
factor = topics_array.first.subscription_group_details.fetch(:multiplexing_max, 1)
|
|
20
20
|
|
|
21
21
|
Array.new(factor) do |i|
|
|
22
|
-
|
|
22
|
+
Karafka::Routing::Topics.new(
|
|
23
23
|
i.zero? ? topics_array : topics_array.map(&:dup)
|
|
24
24
|
)
|
|
25
25
|
end
|
|
@@ -17,7 +17,7 @@ module Karafka
|
|
|
17
17
|
# @param _config [Karafka::Core::Configurable::Node] app config node
|
|
18
18
|
def pre_setup(_config)
|
|
19
19
|
# Make sure we use proper unique validator for topics definitions
|
|
20
|
-
|
|
20
|
+
Karafka::Routing::Contracts::ConsumerGroup.singleton_class.prepend(
|
|
21
21
|
Patches::Contracts::ConsumerGroup
|
|
22
22
|
)
|
|
23
23
|
end
|
|
@@ -26,11 +26,11 @@ module Karafka
|
|
|
26
26
|
#
|
|
27
27
|
# @param _config [Karafka::Core::Configurable::Node] app config
|
|
28
28
|
def post_setup(_config)
|
|
29
|
-
|
|
29
|
+
Karafka::App.monitor.subscribe('app.running') do
|
|
30
30
|
# Do not install the manager and listener to control multiplexing unless there is
|
|
31
31
|
# multiplexing enabled and it is dynamic.
|
|
32
32
|
# We only need to control multiplexing when it is in a dynamic state
|
|
33
|
-
next unless
|
|
33
|
+
next unless Karafka::App
|
|
34
34
|
.subscription_groups
|
|
35
35
|
.values
|
|
36
36
|
.flat_map(&:itself)
|
|
@@ -38,8 +38,8 @@ module Karafka
|
|
|
38
38
|
|
|
39
39
|
# Subscribe for events and possibility to manage via the Pro connection manager
|
|
40
40
|
# that supports multiplexing
|
|
41
|
-
|
|
42
|
-
|
|
41
|
+
Karafka.monitor.subscribe(
|
|
42
|
+
Karafka::Pro::Connection::Multiplexing::Listener.new
|
|
43
43
|
)
|
|
44
44
|
end
|
|
45
45
|
end
|
|
@@ -14,13 +14,13 @@ module Karafka
|
|
|
14
14
|
#
|
|
15
15
|
# @param _config [Karafka::Core::Configurable::Node] app config
|
|
16
16
|
def post_setup(_config)
|
|
17
|
-
|
|
17
|
+
Karafka::App.monitor.subscribe('app.running') do
|
|
18
18
|
# Initialize the tracker prior to becoming multi-threaded
|
|
19
|
-
|
|
19
|
+
Karafka::Processing::InlineInsights::Tracker.instance
|
|
20
20
|
|
|
21
21
|
# Subscribe to the statistics reports and collect them
|
|
22
|
-
|
|
23
|
-
|
|
22
|
+
Karafka.monitor.subscribe(
|
|
23
|
+
Karafka::Pro::Processing::OffsetMetadata::Listener.new
|
|
24
24
|
)
|
|
25
25
|
end
|
|
26
26
|
end
|
|
@@ -23,7 +23,7 @@ module Karafka
|
|
|
23
23
|
# We build a temp consumer group and a target to check if it has parallel segments
|
|
24
24
|
# enabled and if so, we do not add it to the routing but instead we build the
|
|
25
25
|
# appropriate number of parallel segment groups
|
|
26
|
-
temp_consumer_group =
|
|
26
|
+
temp_consumer_group = Karafka::Routing::ConsumerGroup.new(group_id.to_s)
|
|
27
27
|
temp_target = Karafka::Routing::Proxy.new(temp_consumer_group, &block).target
|
|
28
28
|
config = temp_target.parallel_segments
|
|
29
29
|
|
|
@@ -9,7 +9,7 @@ module Karafka
|
|
|
9
9
|
module Features
|
|
10
10
|
class Patterns < Base
|
|
11
11
|
# Representation of groups of topics
|
|
12
|
-
class Patterns <
|
|
12
|
+
class Patterns < Karafka::Routing::Topics
|
|
13
13
|
# Finds first pattern matching given topic name
|
|
14
14
|
#
|
|
15
15
|
# @param topic_name [String] topic name that may match a pattern
|
|
@@ -108,7 +108,7 @@ module Karafka
|
|
|
108
108
|
require 'fugit'
|
|
109
109
|
rescue LoadError
|
|
110
110
|
raise(
|
|
111
|
-
|
|
111
|
+
Karafka::Errors::DependencyConstraintsError,
|
|
112
112
|
<<~ERROR_MSG
|
|
113
113
|
Failed to require fugit gem.
|
|
114
114
|
Add it to your Gemfile, as it is required for the recurring tasks to work.
|
|
@@ -14,7 +14,7 @@ module Karafka
|
|
|
14
14
|
# Binds our routing validation contract prior to warmup in the supervisor, so we can
|
|
15
15
|
# run it when all the context should be there (config + full routing)
|
|
16
16
|
#
|
|
17
|
-
# @param config [Karafka::Core::Configurable::Node]
|
|
17
|
+
# @param config [Karafka::Core::Configurable::Node]
|
|
18
18
|
def post_setup(config)
|
|
19
19
|
config.monitor.subscribe('app.before_warmup') do
|
|
20
20
|
Contracts::Routing.new.validate!(
|
|
@@ -46,7 +46,9 @@ module Karafka
|
|
|
46
46
|
|
|
47
47
|
# Just an alias for nice API
|
|
48
48
|
#
|
|
49
|
-
# @param args [
|
|
49
|
+
# @param args [Hash] Anything `#throttling` accepts
|
|
50
|
+
# @option args [Integer] :limit max messages to process in a time interval
|
|
51
|
+
# @option args [Integer] :interval time interval for processing in milliseconds
|
|
50
52
|
def throttle(**args)
|
|
51
53
|
throttling(**args)
|
|
52
54
|
end
|
|
@@ -7,7 +7,7 @@ module Karafka
|
|
|
7
7
|
module Pro
|
|
8
8
|
module ScheduledMessages
|
|
9
9
|
# Consumer that coordinates scheduling of messages when the time comes
|
|
10
|
-
class Consumer <
|
|
10
|
+
class Consumer < Karafka::BaseConsumer
|
|
11
11
|
include Helpers::ConfigImporter.new(
|
|
12
12
|
dispatcher_class: %i[scheduled_messages dispatcher_class]
|
|
13
13
|
)
|
|
@@ -9,7 +9,7 @@ module Karafka
|
|
|
9
9
|
# Recurring Tasks related contracts
|
|
10
10
|
module Contracts
|
|
11
11
|
# Makes sure, all the expected config is defined as it should be
|
|
12
|
-
class Config <
|
|
12
|
+
class Config < Karafka::Contracts::Base
|
|
13
13
|
configure do |config|
|
|
14
14
|
config.error_messages = YAML.safe_load_file(
|
|
15
15
|
File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
|
|
@@ -17,7 +17,7 @@ module Karafka
|
|
|
17
17
|
end
|
|
18
18
|
|
|
19
19
|
nested(:scheduled_messages) do
|
|
20
|
-
required(:consumer_class) { |val| val <
|
|
20
|
+
required(:consumer_class) { |val| val < Karafka::BaseConsumer }
|
|
21
21
|
|
|
22
22
|
# Do not allow to run more often than every second
|
|
23
23
|
required(:interval) { |val| val.is_a?(Integer) && val >= 1_000 }
|
|
@@ -11,7 +11,7 @@ module Karafka
|
|
|
11
11
|
#
|
|
12
12
|
# Our envelope always needs to comply with this format, otherwise we won't have enough
|
|
13
13
|
# details to be able to dispatch the message
|
|
14
|
-
class Message <
|
|
14
|
+
class Message < Karafka::Contracts::Base
|
|
15
15
|
configure do |config|
|
|
16
16
|
config.error_messages = YAML.safe_load_file(
|
|
17
17
|
File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
|
|
@@ -14,6 +14,7 @@ module Karafka
|
|
|
14
14
|
# up to 1.5 second, thus it is acceptable. Please ping me if you encounter performance
|
|
15
15
|
# issues with this naive implementation so it can be improved.
|
|
16
16
|
class DailyBuffer
|
|
17
|
+
# Initializes the daily buffer with empty accumulator
|
|
17
18
|
def initialize
|
|
18
19
|
@accu = {}
|
|
19
20
|
end
|
|
@@ -69,8 +70,8 @@ module Karafka
|
|
|
69
70
|
selected.each { |_, message| yield(message) }
|
|
70
71
|
end
|
|
71
72
|
|
|
72
|
-
# Removes given key from the
|
|
73
|
-
# @param key [String]
|
|
73
|
+
# Removes the schedule entry identified by the given key from the daily buffer
|
|
74
|
+
# @param key [String]
|
|
74
75
|
def delete(key)
|
|
75
76
|
@accu.delete(key)
|
|
76
77
|
end
|
|
@@ -21,7 +21,7 @@ module Karafka
|
|
|
21
21
|
module Proxy
|
|
22
22
|
# General WaterDrop message contract. Before we envelop a message, we need to be certain
|
|
23
23
|
# it is correct, hence we use this contract.
|
|
24
|
-
MSG_CONTRACT =
|
|
24
|
+
MSG_CONTRACT = WaterDrop::Contracts::Message.new(
|
|
25
25
|
# Payload size is a subject to the target producer dispatch validation, so we set it
|
|
26
26
|
# to 100MB basically to ignore it here.
|
|
27
27
|
max_payload_size: 104_857_600
|
|
@@ -9,7 +9,7 @@ module Karafka
|
|
|
9
9
|
# Serializers used to build payloads (if applicable) for dispatch
|
|
10
10
|
# @note We only deal with states payload. Other payloads are not ours but end users.
|
|
11
11
|
class Serializer
|
|
12
|
-
include
|
|
12
|
+
include Karafka::Core::Helpers::Time
|
|
13
13
|
|
|
14
14
|
# @param tracker [Tracker] tracker based on which we build the state
|
|
15
15
|
# @return [String] compressed payload with the state details
|
|
@@ -32,9 +32,9 @@ module Karafka
|
|
|
32
32
|
hash.to_json
|
|
33
33
|
end
|
|
34
34
|
|
|
35
|
-
# Compresses the provided data
|
|
35
|
+
# Compresses the provided data using Zlib deflate algorithm
|
|
36
36
|
#
|
|
37
|
-
# @param data [String]
|
|
37
|
+
# @param data [String]
|
|
38
38
|
# @return [String] compressed data
|
|
39
39
|
def compress(data)
|
|
40
40
|
Zlib::Deflate.deflate(data)
|
|
@@ -10,7 +10,7 @@ module Karafka
|
|
|
10
10
|
module Setup
|
|
11
11
|
# Config for recurring tasks
|
|
12
12
|
class Config
|
|
13
|
-
extend
|
|
13
|
+
extend Karafka::Core::Configurable
|
|
14
14
|
|
|
15
15
|
setting(:consumer_class, default: Consumer)
|
|
16
16
|
setting(:group_id, default: 'karafka_scheduled_messages')
|
|
@@ -27,7 +27,7 @@ module Karafka
|
|
|
27
27
|
# Producer to use. By default uses default Karafka producer.
|
|
28
28
|
setting(
|
|
29
29
|
:producer,
|
|
30
|
-
constructor: -> {
|
|
30
|
+
constructor: -> { Karafka.producer },
|
|
31
31
|
lazy: true
|
|
32
32
|
)
|
|
33
33
|
|
data/lib/karafka/process.rb
CHANGED
|
@@ -5,7 +5,7 @@ module Karafka
|
|
|
5
5
|
# @note There might be only one process - this class is a singleton
|
|
6
6
|
class Process
|
|
7
7
|
# Allow for process tagging for instrumentation
|
|
8
|
-
extend
|
|
8
|
+
extend Karafka::Core::Taggable
|
|
9
9
|
|
|
10
10
|
# Signal types that we handle
|
|
11
11
|
HANDLED_SIGNALS = %i[
|
|
@@ -79,12 +79,12 @@ module Karafka
|
|
|
79
79
|
private
|
|
80
80
|
|
|
81
81
|
# Traps a single signal and performs callbacks (if any) or just ignores this signal
|
|
82
|
-
# @param [Symbol] signal type that we want to catch
|
|
82
|
+
# @param signal [Symbol] signal type that we want to catch
|
|
83
83
|
# @note Since we do a lot of threading and queuing, we don't want to handle signals from the
|
|
84
84
|
# trap context s some things may not work there as expected, that is why we spawn a separate
|
|
85
85
|
# thread to handle the signals process
|
|
86
86
|
def trap_signal(signal)
|
|
87
|
-
previous_handler =
|
|
87
|
+
previous_handler = Signal.trap(signal) do
|
|
88
88
|
Thread.new do
|
|
89
89
|
notice_signal(signal)
|
|
90
90
|
|
|
@@ -96,7 +96,7 @@ module Karafka
|
|
|
96
96
|
end
|
|
97
97
|
|
|
98
98
|
# Informs monitoring about trapped signal
|
|
99
|
-
# @param [Symbol] signal type that we received
|
|
99
|
+
# @param signal [Symbol] signal type that we received
|
|
100
100
|
def notice_signal(signal)
|
|
101
101
|
Karafka.monitor.instrument('process.notice_signal', caller: self, signal: signal)
|
|
102
102
|
end
|
|
@@ -184,7 +184,7 @@ module Karafka
|
|
|
184
184
|
# We assign producer only when not available already. It may already be available if
|
|
185
185
|
# user redefined the `#producer` method for example. This can be useful for example when
|
|
186
186
|
# having a multi-cluster setup and using a totally custom producer
|
|
187
|
-
consumer.producer ||=
|
|
187
|
+
consumer.producer ||= Karafka::App.producer
|
|
188
188
|
# Since we have some message-less flows (idle, etc), we initialize consumer with empty
|
|
189
189
|
# messages set. In production we have persistent consumers, so this is not a performance
|
|
190
190
|
# overhead as this will happen only once per consumer lifetime
|
|
@@ -99,7 +99,7 @@ module Karafka
|
|
|
99
99
|
# Marks a given job from a given group as completed. When there are no more jobs from a given
|
|
100
100
|
# group to be executed, we won't wait.
|
|
101
101
|
#
|
|
102
|
-
# @param [Jobs::Base] job that was completed
|
|
102
|
+
# @param job [Jobs::Base] job that was completed
|
|
103
103
|
def complete(job)
|
|
104
104
|
@mutex.synchronize do
|
|
105
105
|
# We finish one job and if there is another, we pick it up
|
|
@@ -94,7 +94,7 @@ module Karafka
|
|
|
94
94
|
# @return [Array<Karafka::Messages::Message, Boolean>] message we may want to skip and
|
|
95
95
|
# information if this message was from marked offset or figured out via mom flow
|
|
96
96
|
def find_skippable_message
|
|
97
|
-
skippable_message = messages.find do |msg|
|
|
97
|
+
skippable_message = messages.raw.find do |msg|
|
|
98
98
|
coordinator.marked? && msg.offset == seek_offset
|
|
99
99
|
end
|
|
100
100
|
|
|
@@ -23,6 +23,7 @@ module Karafka
|
|
|
23
23
|
|
|
24
24
|
private_constant :EMPTY_DEFAULTS
|
|
25
25
|
|
|
26
|
+
# Initializes the routing builder with empty routes
|
|
26
27
|
def initialize
|
|
27
28
|
@mutex = Mutex.new
|
|
28
29
|
@draws = []
|
|
@@ -138,8 +139,9 @@ module Karafka
|
|
|
138
139
|
# subscription group customization
|
|
139
140
|
# @param subscription_group_name [String, Symbol] subscription group id. When not provided,
|
|
140
141
|
# a random uuid will be used
|
|
141
|
-
# @param args [
|
|
142
|
+
# @param args [Hash] any extra arguments accepted by the subscription group builder
|
|
142
143
|
# @param block [Proc] further topics definitions
|
|
144
|
+
# @option args [String] :kafka optional kafka scope settings
|
|
143
145
|
def subscription_group(
|
|
144
146
|
subscription_group_name = SubscriptionGroup.id,
|
|
145
147
|
**args,
|
|
@@ -31,6 +31,9 @@ module Karafka
|
|
|
31
31
|
# Initialize the subscription group so there's always a value for it, since even if not
|
|
32
32
|
# defined directly, a subscription group will be created
|
|
33
33
|
@current_subscription_group_details = { name: SubscriptionGroup.id }
|
|
34
|
+
# Track the base position for subscription groups to ensure stable positions when
|
|
35
|
+
# rebuilding. This is critical for static group membership in swarm mode
|
|
36
|
+
@subscription_groups_base_position = nil
|
|
34
37
|
end
|
|
35
38
|
|
|
36
39
|
# @return [Boolean] true if this consumer group should be active in our current process
|
|
@@ -42,6 +45,10 @@ module Karafka
|
|
|
42
45
|
# @param name [String, Symbol] name of topic to which we want to subscribe
|
|
43
46
|
# @return [Karafka::Routing::Topic] newly built topic instance
|
|
44
47
|
def topic=(name, &)
|
|
48
|
+
# Clear memoized subscription groups since adding a topic requires rebuilding them
|
|
49
|
+
# This is critical for consumer group reopening across multiple draw calls
|
|
50
|
+
@subscription_groups = nil
|
|
51
|
+
|
|
45
52
|
topic = Topic.new(name, self)
|
|
46
53
|
@topics << Proxy.new(
|
|
47
54
|
topic,
|
|
@@ -73,7 +80,18 @@ module Karafka
|
|
|
73
80
|
# @return [Array<Routing::SubscriptionGroup>] all the subscription groups build based on
|
|
74
81
|
# the consumer group topics
|
|
75
82
|
def subscription_groups
|
|
76
|
-
@subscription_groups ||=
|
|
83
|
+
@subscription_groups ||= begin
|
|
84
|
+
result = subscription_groups_builder.call(
|
|
85
|
+
topics,
|
|
86
|
+
base_position: @subscription_groups_base_position
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
# Store the base position from the first subscription group for future rebuilds.
|
|
90
|
+
# This ensures stable positions for static group membership.
|
|
91
|
+
@subscription_groups_base_position ||= result.first&.position
|
|
92
|
+
|
|
93
|
+
result
|
|
94
|
+
end
|
|
77
95
|
end
|
|
78
96
|
|
|
79
97
|
# Hashed version of consumer group that can be used for validation purposes
|
|
@@ -44,14 +44,15 @@ module Karafka
|
|
|
44
44
|
|
|
45
45
|
virtual do |data, errors|
|
|
46
46
|
next unless errors.empty?
|
|
47
|
-
next unless
|
|
47
|
+
next unless Karafka::App.config.strict_topics_namespacing
|
|
48
48
|
|
|
49
49
|
names = data.fetch(:topics).map { |topic| topic[:name] }
|
|
50
50
|
names_hash = names.each_with_object({}) { |n, h| h[n] = true }
|
|
51
51
|
error_occured = false
|
|
52
|
+
namespace_chars = ['.', '_'].freeze
|
|
52
53
|
names.each do |n|
|
|
53
54
|
# Skip topic names that are not namespaced
|
|
54
|
-
next unless n.chars.find { |c|
|
|
55
|
+
next unless n.chars.find { |c| namespace_chars.include?(c) }
|
|
55
56
|
|
|
56
57
|
if n.chars.include?('.')
|
|
57
58
|
# Check underscore styled topic
|
|
@@ -66,10 +66,13 @@ module Karafka
|
|
|
66
66
|
|
|
67
67
|
virtual do |data, errors|
|
|
68
68
|
next unless errors.empty?
|
|
69
|
-
next unless
|
|
69
|
+
next unless Karafka::App.config.strict_topics_namespacing
|
|
70
70
|
|
|
71
71
|
value = data.fetch(:name)
|
|
72
|
-
|
|
72
|
+
namespace_chars = ['.', '_'].freeze
|
|
73
|
+
namespacing_chars_count = value.chars.find_all do |c|
|
|
74
|
+
namespace_chars.include?(c)
|
|
75
|
+
end.uniq.size
|
|
73
76
|
|
|
74
77
|
next if namespacing_chars_count <= 1
|
|
75
78
|
|
|
@@ -39,7 +39,7 @@ module Karafka
|
|
|
39
39
|
next unless dead_letter_queue[:active]
|
|
40
40
|
|
|
41
41
|
topic = dead_letter_queue[:topic]
|
|
42
|
-
topic_regexp =
|
|
42
|
+
topic_regexp = Karafka::Contracts::TOPIC_REGEXP
|
|
43
43
|
|
|
44
44
|
# When topic is set to false, it means we just want to skip dispatch on DLQ
|
|
45
45
|
next if topic == false
|
|
@@ -16,9 +16,12 @@ module Karafka
|
|
|
16
16
|
end
|
|
17
17
|
|
|
18
18
|
# @param active [Boolean] is the topic structure management feature active
|
|
19
|
-
# @param partitions [Integer]
|
|
20
|
-
# @param replication_factor [Integer]
|
|
19
|
+
# @param partitions [Integer] number of partitions for the topic
|
|
20
|
+
# @param replication_factor [Integer] replication factor for the topic
|
|
21
21
|
# @param details [Hash] extra configuration for the topic
|
|
22
|
+
# @option details [String] :retention.ms retention time in milliseconds
|
|
23
|
+
# @option details [String] :compression.type compression type
|
|
24
|
+
# (none, gzip, snappy, lz4, zstd)
|
|
22
25
|
# @return [Config] defined structure
|
|
23
26
|
def config(active: true, partitions: 1, replication_factor: 1, **details)
|
|
24
27
|
@declaratives ||= Config.new(
|
|
@@ -22,9 +22,9 @@ module Karafka
|
|
|
22
22
|
# @param key [Object] deserializer for the message key
|
|
23
23
|
# @param headers [Object] deserializer for the message headers
|
|
24
24
|
def deserializers(
|
|
25
|
-
payload:
|
|
26
|
-
key:
|
|
27
|
-
headers:
|
|
25
|
+
payload: Karafka::Deserializers::Payload.new,
|
|
26
|
+
key: Karafka::Deserializers::Key.new,
|
|
27
|
+
headers: Karafka::Deserializers::Headers.new
|
|
28
28
|
)
|
|
29
29
|
@deserializers ||= Config.new(
|
|
30
30
|
active: true,
|
|
@@ -12,11 +12,11 @@ module Karafka
|
|
|
12
12
|
#
|
|
13
13
|
# @param _config [Karafka::Core::Configurable::Node] app config
|
|
14
14
|
def post_setup(_config)
|
|
15
|
-
|
|
15
|
+
Karafka::App.monitor.subscribe('app.running') do
|
|
16
16
|
# Do not activate tracking of statistics if none of our active topics uses it
|
|
17
17
|
# This prevents us from tracking metrics when user just runs a subset of topics
|
|
18
18
|
# in a given process and none of those actually utilizes this feature
|
|
19
|
-
next unless
|
|
19
|
+
next unless Karafka::App
|
|
20
20
|
.subscription_groups
|
|
21
21
|
.values
|
|
22
22
|
.flat_map(&:itself)
|
|
@@ -25,11 +25,11 @@ module Karafka
|
|
|
25
25
|
.any?(&:inline_insights?)
|
|
26
26
|
|
|
27
27
|
# Initialize the tracker prior to becoming multi-threaded
|
|
28
|
-
|
|
28
|
+
Karafka::Processing::InlineInsights::Tracker.instance
|
|
29
29
|
|
|
30
30
|
# Subscribe to the statistics reports and collect them
|
|
31
|
-
|
|
32
|
-
|
|
31
|
+
Karafka.monitor.subscribe(
|
|
32
|
+
Karafka::Processing::InlineInsights::Listener.new
|
|
33
33
|
)
|
|
34
34
|
end
|
|
35
35
|
end
|
|
@@ -8,7 +8,7 @@ module Karafka
|
|
|
8
8
|
# structure so all the routes are being stored in a single level array
|
|
9
9
|
module Router
|
|
10
10
|
# Finds first reference of a given topic based on provided lookup attribute
|
|
11
|
-
# @param lookup [Hash
|
|
11
|
+
# @param lookup [Hash{Symbol => String}] hash with attribute - value key pairs
|
|
12
12
|
# @return [Karafka::Routing::Topic, nil] proper route details or nil if not found
|
|
13
13
|
def find_by(lookup)
|
|
14
14
|
App.consumer_groups.each do |consumer_group|
|
|
@@ -14,7 +14,7 @@ module Karafka
|
|
|
14
14
|
node: %i[swarm node]
|
|
15
15
|
)
|
|
16
16
|
|
|
17
|
-
attr_reader :id, :name, :topics, :kafka, :consumer_group
|
|
17
|
+
attr_reader :id, :name, :topics, :kafka, :consumer_group, :position
|
|
18
18
|
|
|
19
19
|
# Lock for generating new ids safely
|
|
20
20
|
ID_MUTEX = Mutex.new
|
|
@@ -30,7 +30,7 @@ module Karafka
|
|
|
30
30
|
@group_counter ||= 0
|
|
31
31
|
@group_counter += 1
|
|
32
32
|
|
|
33
|
-
|
|
33
|
+
Digest::SHA256.hexdigest(
|
|
34
34
|
@group_counter.to_s
|
|
35
35
|
)[0..11]
|
|
36
36
|
end
|