karafka 2.5.1 → 2.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/ci_linux_ubuntu_x86_64_gnu.yml +21 -29
- data/.github/workflows/ci_macos_arm64.yml +1 -1
- data/.github/workflows/push.yml +2 -2
- data/.github/workflows/trigger-wiki-refresh.yml +1 -1
- data/.ruby-version +1 -1
- data/.yard-lint.yml +174 -0
- data/CHANGELOG.md +20 -4
- data/Gemfile +1 -2
- data/Gemfile.lock +45 -41
- data/bin/integrations +2 -1
- data/bin/rspecs +4 -0
- data/config/locales/errors.yml +6 -4
- data/config/locales/pro_errors.yml +5 -4
- data/docker-compose.yml +1 -1
- data/examples/payloads/json/sample_set_02/download.json +191 -0
- data/examples/payloads/json/sample_set_03/event_type_1.json +18 -0
- data/examples/payloads/json/sample_set_03/event_type_2.json +263 -0
- data/examples/payloads/json/sample_set_03/event_type_3.json +41 -0
- data/karafka.gemspec +3 -3
- data/lib/active_job/queue_adapters/karafka_adapter.rb +3 -3
- data/lib/karafka/active_job/consumer.rb +7 -3
- data/lib/karafka/active_job/current_attributes/job_wrapper.rb +45 -0
- data/lib/karafka/active_job/current_attributes/loading.rb +1 -1
- data/lib/karafka/active_job/current_attributes/persistence.rb +19 -7
- data/lib/karafka/active_job/current_attributes.rb +3 -2
- data/lib/karafka/active_job/deserializer.rb +61 -0
- data/lib/karafka/active_job/dispatcher.rb +34 -14
- data/lib/karafka/active_job/job_options_contract.rb +2 -4
- data/lib/karafka/admin/acl.rb +8 -4
- data/lib/karafka/admin/configs/config.rb +6 -4
- data/lib/karafka/admin/configs/resource.rb +7 -1
- data/lib/karafka/admin/consumer_groups.rb +80 -12
- data/lib/karafka/admin/topics.rb +43 -9
- data/lib/karafka/admin.rb +23 -14
- data/lib/karafka/app.rb +3 -3
- data/lib/karafka/base_consumer.rb +6 -6
- data/lib/karafka/cli/base.rb +2 -2
- data/lib/karafka/cli/console.rb +1 -1
- data/lib/karafka/cli/contracts/server.rb +3 -5
- data/lib/karafka/cli/help.rb +1 -1
- data/lib/karafka/cli/install.rb +3 -2
- data/lib/karafka/cli/server.rb +1 -1
- data/lib/karafka/cli/swarm.rb +1 -1
- data/lib/karafka/cli/topics/align.rb +1 -1
- data/lib/karafka/cli/topics/repartition.rb +2 -2
- data/lib/karafka/connection/client.rb +30 -19
- data/lib/karafka/connection/listeners_batch.rb +2 -3
- data/lib/karafka/connection/manager.rb +1 -0
- data/lib/karafka/connection/proxy.rb +12 -8
- data/lib/karafka/connection/rebalance_manager.rb +1 -1
- data/lib/karafka/connection/status.rb +1 -0
- data/lib/karafka/constraints.rb +1 -1
- data/lib/karafka/contracts/base.rb +1 -1
- data/lib/karafka/deserializers/payload.rb +1 -1
- data/lib/karafka/env.rb +1 -2
- data/lib/karafka/helpers/async.rb +1 -1
- data/lib/karafka/helpers/config_importer.rb +3 -3
- data/lib/karafka/helpers/interval_runner.rb +4 -1
- data/lib/karafka/helpers/multi_delegator.rb +3 -0
- data/lib/karafka/instrumentation/assignments_tracker.rb +19 -1
- data/lib/karafka/instrumentation/callbacks/error.rb +2 -2
- data/lib/karafka/instrumentation/callbacks/statistics.rb +3 -3
- data/lib/karafka/instrumentation/logger.rb +6 -6
- data/lib/karafka/instrumentation/monitor.rb +3 -3
- data/lib/karafka/instrumentation/notifications.rb +1 -0
- data/lib/karafka/instrumentation/vendors/appsignal/base.rb +3 -4
- data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +3 -4
- data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +10 -11
- data/lib/karafka/instrumentation/vendors/kubernetes/base_listener.rb +1 -1
- data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +5 -18
- data/lib/karafka/messages/builders/batch_metadata.rb +2 -2
- data/lib/karafka/messages/builders/message.rb +1 -1
- data/lib/karafka/messages/messages.rb +2 -3
- data/lib/karafka/patches/rdkafka/bindings.rb +6 -6
- data/lib/karafka/patches/rdkafka/opaque.rb +1 -1
- data/lib/karafka/pro/active_job/consumer.rb +2 -2
- data/lib/karafka/pro/active_job/dispatcher.rb +10 -6
- data/lib/karafka/pro/active_job/job_options_contract.rb +2 -4
- data/lib/karafka/pro/cleaner/messages/messages.rb +2 -3
- data/lib/karafka/pro/cleaner.rb +3 -3
- data/lib/karafka/pro/cli/contracts/server.rb +3 -5
- data/lib/karafka/pro/cli/parallel_segments/base.rb +5 -5
- data/lib/karafka/pro/cli/parallel_segments/collapse.rb +3 -3
- data/lib/karafka/pro/cli/parallel_segments/distribute.rb +3 -3
- data/lib/karafka/pro/cli/parallel_segments.rb +1 -1
- data/lib/karafka/pro/connection/manager.rb +3 -4
- data/lib/karafka/pro/connection/multiplexing/listener.rb +1 -0
- data/lib/karafka/pro/contracts/base.rb +1 -1
- data/lib/karafka/pro/encryption/cipher.rb +3 -2
- data/lib/karafka/pro/encryption/contracts/config.rb +5 -7
- data/lib/karafka/pro/encryption/messages/parser.rb +4 -4
- data/lib/karafka/pro/encryption/setup/config.rb +1 -1
- data/lib/karafka/pro/instrumentation/performance_tracker.rb +3 -3
- data/lib/karafka/pro/iterator/expander.rb +1 -1
- data/lib/karafka/pro/iterator/tpl_builder.rb +2 -2
- data/lib/karafka/pro/iterator.rb +3 -3
- data/lib/karafka/pro/loader.rb +1 -1
- data/lib/karafka/pro/processing/coordinator.rb +1 -1
- data/lib/karafka/pro/processing/coordinators/errors_tracker.rb +2 -3
- data/lib/karafka/pro/processing/coordinators/filters_applier.rb +3 -3
- data/lib/karafka/pro/processing/filters/base.rb +1 -0
- data/lib/karafka/pro/processing/filters/delayer.rb +1 -1
- data/lib/karafka/pro/processing/filters/expirer.rb +1 -1
- data/lib/karafka/pro/processing/filters/inline_insights_delayer.rb +1 -1
- data/lib/karafka/pro/processing/filters/throttler.rb +1 -1
- data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +1 -1
- data/lib/karafka/pro/processing/jobs/eofed_non_blocking.rb +1 -1
- data/lib/karafka/pro/processing/jobs/periodic.rb +1 -1
- data/lib/karafka/pro/processing/jobs/revoked_non_blocking.rb +1 -1
- data/lib/karafka/pro/processing/jobs_builder.rb +1 -1
- data/lib/karafka/pro/processing/offset_metadata/fetcher.rb +1 -0
- data/lib/karafka/pro/processing/partitioner.rb +1 -1
- data/lib/karafka/pro/processing/schedulers/default.rb +2 -4
- data/lib/karafka/pro/processing/strategies/base.rb +1 -1
- data/lib/karafka/pro/processing/strategies/default.rb +2 -2
- data/lib/karafka/pro/processing/strategies/lrj/default.rb +2 -4
- data/lib/karafka/pro/processing/strategies/vp/default.rb +2 -4
- data/lib/karafka/pro/processing/strategy_selector.rb +1 -0
- data/lib/karafka/pro/processing/subscription_groups_coordinator.rb +2 -3
- data/lib/karafka/pro/processing/virtual_partitions/distributors/balanced.rb +4 -2
- data/lib/karafka/pro/processing/virtual_partitions/distributors/consistent.rb +4 -2
- data/lib/karafka/pro/recurring_tasks/consumer.rb +3 -2
- data/lib/karafka/pro/recurring_tasks/contracts/config.rb +4 -6
- data/lib/karafka/pro/recurring_tasks/contracts/task.rb +3 -5
- data/lib/karafka/pro/recurring_tasks/deserializer.rb +1 -1
- data/lib/karafka/pro/recurring_tasks/dispatcher.rb +7 -6
- data/lib/karafka/pro/recurring_tasks/executor.rb +2 -1
- data/lib/karafka/pro/recurring_tasks/schedule.rb +9 -8
- data/lib/karafka/pro/recurring_tasks/serializer.rb +6 -5
- data/lib/karafka/pro/recurring_tasks/setup/config.rb +2 -2
- data/lib/karafka/pro/recurring_tasks/task.rb +1 -1
- data/lib/karafka/pro/recurring_tasks.rb +8 -5
- data/lib/karafka/pro/routing/features/adaptive_iterator/contracts/topic.rb +2 -4
- data/lib/karafka/pro/routing/features/dead_letter_queue/contracts/topic.rb +2 -4
- data/lib/karafka/pro/routing/features/dead_letter_queue/topic.rb +3 -0
- data/lib/karafka/pro/routing/features/delaying/contracts/topic.rb +2 -4
- data/lib/karafka/pro/routing/features/delaying/topic.rb +2 -4
- data/lib/karafka/pro/routing/features/direct_assignments/contracts/consumer_group.rb +4 -8
- data/lib/karafka/pro/routing/features/direct_assignments/contracts/topic.rb +5 -7
- data/lib/karafka/pro/routing/features/direct_assignments/subscription_group.rb +7 -6
- data/lib/karafka/pro/routing/features/direct_assignments/topic.rb +2 -2
- data/lib/karafka/pro/routing/features/expiring/contracts/topic.rb +2 -4
- data/lib/karafka/pro/routing/features/expiring/topic.rb +2 -4
- data/lib/karafka/pro/routing/features/filtering/contracts/topic.rb +2 -4
- data/lib/karafka/pro/routing/features/filtering/topic.rb +2 -3
- data/lib/karafka/pro/routing/features/inline_insights/contracts/topic.rb +2 -4
- data/lib/karafka/pro/routing/features/long_running_job/contracts/topic.rb +2 -4
- data/lib/karafka/pro/routing/features/multiplexing/contracts/topic.rb +3 -5
- data/lib/karafka/pro/routing/features/multiplexing/subscription_groups_builder.rb +1 -1
- data/lib/karafka/pro/routing/features/multiplexing.rb +5 -5
- data/lib/karafka/pro/routing/features/non_blocking_job/topic.rb +3 -3
- data/lib/karafka/pro/routing/features/offset_metadata/contracts/topic.rb +2 -4
- data/lib/karafka/pro/routing/features/offset_metadata.rb +4 -4
- data/lib/karafka/pro/routing/features/parallel_segments/builder.rb +1 -1
- data/lib/karafka/pro/routing/features/parallel_segments/contracts/consumer_group.rb +2 -4
- data/lib/karafka/pro/routing/features/patterns/contracts/consumer_group.rb +3 -5
- data/lib/karafka/pro/routing/features/patterns/contracts/pattern.rb +2 -4
- data/lib/karafka/pro/routing/features/patterns/contracts/topic.rb +2 -4
- data/lib/karafka/pro/routing/features/patterns/patterns.rb +1 -1
- data/lib/karafka/pro/routing/features/pausing/config.rb +26 -0
- data/lib/karafka/pro/routing/features/pausing/contracts/topic.rb +17 -11
- data/lib/karafka/pro/routing/features/pausing/topic.rb +69 -8
- data/lib/karafka/pro/routing/features/periodic_job/contracts/topic.rb +2 -4
- data/lib/karafka/pro/routing/features/periodic_job/topic.rb +1 -1
- data/lib/karafka/pro/routing/features/recurring_tasks/builder.rb +1 -1
- data/lib/karafka/pro/routing/features/recurring_tasks/contracts/topic.rb +2 -4
- data/lib/karafka/pro/routing/features/scheduled_messages/contracts/topic.rb +2 -4
- data/lib/karafka/pro/routing/features/swarm/contracts/routing.rb +2 -4
- data/lib/karafka/pro/routing/features/swarm/contracts/topic.rb +6 -8
- data/lib/karafka/pro/routing/features/swarm.rb +1 -1
- data/lib/karafka/pro/routing/features/throttling/contracts/topic.rb +2 -4
- data/lib/karafka/pro/routing/features/throttling/topic.rb +3 -1
- data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +2 -4
- data/lib/karafka/pro/scheduled_messages/consumer.rb +1 -1
- data/lib/karafka/pro/scheduled_messages/contracts/config.rb +4 -6
- data/lib/karafka/pro/scheduled_messages/contracts/message.rb +3 -5
- data/lib/karafka/pro/scheduled_messages/daily_buffer.rb +3 -2
- data/lib/karafka/pro/scheduled_messages/day.rb +1 -0
- data/lib/karafka/pro/scheduled_messages/deserializers/headers.rb +1 -1
- data/lib/karafka/pro/scheduled_messages/deserializers/payload.rb +1 -1
- data/lib/karafka/pro/scheduled_messages/max_epoch.rb +1 -0
- data/lib/karafka/pro/scheduled_messages/proxy.rb +1 -1
- data/lib/karafka/pro/scheduled_messages/serializer.rb +3 -3
- data/lib/karafka/pro/scheduled_messages/setup/config.rb +2 -2
- data/lib/karafka/pro/scheduled_messages/state.rb +1 -0
- data/lib/karafka/pro/scheduled_messages/tracker.rb +1 -0
- data/lib/karafka/pro/scheduled_messages.rb +4 -6
- data/lib/karafka/pro/swarm/liveness_listener.rb +2 -2
- data/lib/karafka/process.rb +4 -4
- data/lib/karafka/processing/coordinator.rb +2 -4
- data/lib/karafka/processing/coordinators_buffer.rb +2 -3
- data/lib/karafka/processing/executor.rb +3 -4
- data/lib/karafka/processing/inline_insights/tracker.rb +1 -0
- data/lib/karafka/processing/jobs/base.rb +2 -3
- data/lib/karafka/processing/jobs_queue.rb +1 -1
- data/lib/karafka/processing/result.rb +1 -0
- data/lib/karafka/processing/strategy_selector.rb +1 -0
- data/lib/karafka/processing/workers_batch.rb +2 -3
- data/lib/karafka/railtie.rb +1 -0
- data/lib/karafka/routing/activity_manager.rb +3 -2
- data/lib/karafka/routing/builder.rb +8 -8
- data/lib/karafka/routing/consumer_group.rb +4 -6
- data/lib/karafka/routing/contracts/consumer_group.rb +6 -7
- data/lib/karafka/routing/contracts/routing.rb +2 -4
- data/lib/karafka/routing/contracts/topic.rb +7 -6
- data/lib/karafka/routing/features/active_job/contracts/topic.rb +2 -4
- data/lib/karafka/routing/features/active_job/topic.rb +6 -0
- data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +3 -5
- data/lib/karafka/routing/features/declaratives/contracts/topic.rb +3 -5
- data/lib/karafka/routing/features/declaratives/topic.rb +5 -2
- data/lib/karafka/routing/features/deserializers/contracts/topic.rb +2 -4
- data/lib/karafka/routing/features/deserializers/topic.rb +3 -3
- data/lib/karafka/routing/features/eofed/contracts/topic.rb +2 -4
- data/lib/karafka/routing/features/inline_insights/contracts/topic.rb +2 -4
- data/lib/karafka/routing/features/inline_insights.rb +5 -5
- data/lib/karafka/routing/features/manual_offset_management/contracts/topic.rb +2 -4
- data/lib/karafka/routing/router.rb +1 -1
- data/lib/karafka/routing/subscription_group.rb +1 -1
- data/lib/karafka/routing/subscription_groups_builder.rb +1 -0
- data/lib/karafka/routing/topic.rb +3 -3
- data/lib/karafka/routing/topics.rb +4 -9
- data/lib/karafka/server.rb +2 -2
- data/lib/karafka/setup/attributes_map.rb +4 -2
- data/lib/karafka/setup/config.rb +85 -17
- data/lib/karafka/setup/config_proxy.rb +209 -0
- data/lib/karafka/setup/contracts/config.rb +13 -11
- data/lib/karafka/setup/defaults_injector.rb +3 -2
- data/lib/karafka/setup/dsl.rb +2 -3
- data/lib/karafka/swarm/liveness_listener.rb +3 -3
- data/lib/karafka/swarm/manager.rb +7 -6
- data/lib/karafka/swarm/node.rb +1 -1
- data/lib/karafka/swarm/supervisor.rb +2 -1
- data/lib/karafka/time_trackers/base.rb +1 -1
- data/lib/karafka/version.rb +1 -1
- data/lib/karafka.rb +4 -4
- metadata +14 -6
- data/.diffend.yml +0 -3
|
@@ -8,7 +8,7 @@ module Karafka
|
|
|
8
8
|
module Processing
|
|
9
9
|
# Pro coordinator that provides extra orchestration methods useful for parallel processing
|
|
10
10
|
# within the same partition
|
|
11
|
-
class Coordinator <
|
|
11
|
+
class Coordinator < Karafka::Processing::Coordinator
|
|
12
12
|
extend Forwardable
|
|
13
13
|
include Helpers::ConfigImporter.new(
|
|
14
14
|
errors_tracker_class: %i[internal processing errors_tracker_class]
|
|
@@ -79,9 +79,8 @@ module Karafka
|
|
|
79
79
|
end
|
|
80
80
|
|
|
81
81
|
# Iterates over errors
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
@errors.each(&block)
|
|
82
|
+
def each(&)
|
|
83
|
+
@errors.each(&)
|
|
85
84
|
end
|
|
86
85
|
|
|
87
86
|
# @return [Array<StandardError>] array with all the errors that occurred
|
|
@@ -68,7 +68,7 @@ module Karafka
|
|
|
68
68
|
# @return [Integer] minimum timeout we need to pause. This is the minimum for all the
|
|
69
69
|
# filters to satisfy all of them.
|
|
70
70
|
def timeout
|
|
71
|
-
applied.
|
|
71
|
+
applied.filter_map(&:timeout).min || 0
|
|
72
72
|
end
|
|
73
73
|
|
|
74
74
|
# The first message we do need to get next time we poll. We use the minimum not to jump
|
|
@@ -78,7 +78,7 @@ module Karafka
|
|
|
78
78
|
def cursor
|
|
79
79
|
return nil unless active?
|
|
80
80
|
|
|
81
|
-
applied.
|
|
81
|
+
applied.filter_map(&:cursor).min_by(&:offset)
|
|
82
82
|
end
|
|
83
83
|
|
|
84
84
|
# @return [Boolean] did any of the filters requested offset storage during filter
|
|
@@ -106,7 +106,7 @@ module Karafka
|
|
|
106
106
|
def marking_cursor
|
|
107
107
|
return nil unless active?
|
|
108
108
|
|
|
109
|
-
applied.
|
|
109
|
+
applied.filter_map(&:marking_cursor).min_by(&:offset)
|
|
110
110
|
end
|
|
111
111
|
|
|
112
112
|
private
|
|
@@ -25,7 +25,7 @@ module Karafka
|
|
|
25
25
|
|
|
26
26
|
# Time on message is in seconds with ms precision, so we need to convert the ttl that
|
|
27
27
|
# is in ms to this format
|
|
28
|
-
border =
|
|
28
|
+
border = Time.now.utc - (@delay / 1_000.0)
|
|
29
29
|
|
|
30
30
|
messages.delete_if do |message|
|
|
31
31
|
too_young = message.timestamp > border
|
|
@@ -26,7 +26,7 @@ module Karafka
|
|
|
26
26
|
|
|
27
27
|
# Time on message is in seconds with ms precision, so we need to convert the ttl that
|
|
28
28
|
# is in ms to this format
|
|
29
|
-
border =
|
|
29
|
+
border = Time.now.utc - (@ttl / 1_000.to_f)
|
|
30
30
|
|
|
31
31
|
messages.delete_if do |message|
|
|
32
32
|
too_old = message.timestamp < border
|
|
@@ -17,7 +17,7 @@ module Karafka
|
|
|
17
17
|
#
|
|
18
18
|
# @note It needs to be working with a proper consumer that will handle the partition
|
|
19
19
|
# management. This layer of the framework knows nothing about Kafka messages consumption.
|
|
20
|
-
class ConsumeNonBlocking <
|
|
20
|
+
class ConsumeNonBlocking < Karafka::Processing::Jobs::Consume
|
|
21
21
|
self.action = :consume
|
|
22
22
|
|
|
23
23
|
# Makes this job non-blocking from the start
|
|
@@ -11,7 +11,7 @@ module Karafka
|
|
|
11
11
|
# We use this version for LRJ topics for cases where saturated resources would not allow
|
|
12
12
|
# to run this job for extended period of time. Under such scenarios, if we would not use
|
|
13
13
|
# a non-blocking one, we would reach max.poll.interval.ms.
|
|
14
|
-
class EofedNonBlocking <
|
|
14
|
+
class EofedNonBlocking < Karafka::Processing::Jobs::Eofed
|
|
15
15
|
self.action = :eofed
|
|
16
16
|
|
|
17
17
|
# @param args [Array] any arguments accepted by `::Karafka::Processing::Jobs::Eofed`
|
|
@@ -9,7 +9,7 @@ module Karafka
|
|
|
9
9
|
module Jobs
|
|
10
10
|
# Job that represents a "ticking" work. Work that we run periodically for the Periodics
|
|
11
11
|
# enabled topics.
|
|
12
|
-
class Periodic <
|
|
12
|
+
class Periodic < Karafka::Processing::Jobs::Base
|
|
13
13
|
self.action = :tick
|
|
14
14
|
|
|
15
15
|
# @param executor [Karafka::Pro::Processing::Executor] pro executor that is suppose to
|
|
@@ -15,7 +15,7 @@ module Karafka
|
|
|
15
15
|
# It can be useful when having long lasting jobs that would exceed `max.poll.interval`
|
|
16
16
|
# in scenarios where there are more jobs than threads, without this being async we
|
|
17
17
|
# would potentially stop polling
|
|
18
|
-
class RevokedNonBlocking <
|
|
18
|
+
class RevokedNonBlocking < Karafka::Processing::Jobs::Revoked
|
|
19
19
|
self.action = :revoked
|
|
20
20
|
|
|
21
21
|
# Makes this job non-blocking from the start
|
|
@@ -7,7 +7,7 @@ module Karafka
|
|
|
7
7
|
module Pro
|
|
8
8
|
module Processing
|
|
9
9
|
# Pro jobs builder that supports lrj
|
|
10
|
-
class JobsBuilder <
|
|
10
|
+
class JobsBuilder < Karafka::Processing::JobsBuilder
|
|
11
11
|
# @param executor [Karafka::Pro::Processing::Executor]
|
|
12
12
|
def idle(executor)
|
|
13
13
|
Karafka::Processing::Jobs::Idle.new(executor)
|
|
@@ -7,7 +7,7 @@ module Karafka
|
|
|
7
7
|
module Pro
|
|
8
8
|
module Processing
|
|
9
9
|
# Pro partitioner that can distribute work based on the virtual partitioner settings
|
|
10
|
-
class Partitioner <
|
|
10
|
+
class Partitioner < Karafka::Processing::Partitioner
|
|
11
11
|
# @param topic [String] topic name
|
|
12
12
|
# @param messages [Array<Karafka::Messages::Message>] karafka messages
|
|
13
13
|
# @param coordinator [Karafka::Pro::Processing::Coordinator] processing coordinator that
|
|
@@ -29,10 +29,8 @@ module Karafka
|
|
|
29
29
|
def on_schedule_consumption(jobs_array)
|
|
30
30
|
perf_tracker = Instrumentation::PerformanceTracker.instance
|
|
31
31
|
|
|
32
|
-
ordered =
|
|
33
|
-
|
|
34
|
-
jobs_array.each do |job|
|
|
35
|
-
ordered << [
|
|
32
|
+
ordered = jobs_array.map do |job|
|
|
33
|
+
[
|
|
36
34
|
job,
|
|
37
35
|
processing_cost(perf_tracker, job)
|
|
38
36
|
]
|
|
@@ -14,7 +14,7 @@ module Karafka
|
|
|
14
14
|
# Nothing. Just standard, automatic flow
|
|
15
15
|
module Default
|
|
16
16
|
include Base
|
|
17
|
-
include
|
|
17
|
+
include Karafka::Processing::Strategies::Default
|
|
18
18
|
|
|
19
19
|
# Apply strategy for a non-feature based flow
|
|
20
20
|
FEATURES = %i[].freeze
|
|
@@ -263,7 +263,7 @@ module Karafka
|
|
|
263
263
|
# the post-user code execution marking with transactional producer to result in a
|
|
264
264
|
# boolean state of marking for further framework flow. This is a normalization to make it
|
|
265
265
|
# behave the same way as it would behave with a non-transactional one
|
|
266
|
-
rescue
|
|
266
|
+
rescue Rdkafka::RdkafkaError, Errors::AssignmentLostError
|
|
267
267
|
false
|
|
268
268
|
ensure
|
|
269
269
|
@_transaction_internal = false
|
|
@@ -78,10 +78,8 @@ module Karafka
|
|
|
78
78
|
# Allows for LRJ to synchronize its work. It may be needed because LRJ can run
|
|
79
79
|
# lifecycle events like revocation while the LRJ work is running and there may be a
|
|
80
80
|
# need for a critical section.
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
def synchronize(&block)
|
|
84
|
-
coordinator.shared_mutex.synchronize(&block)
|
|
81
|
+
def synchronize(&)
|
|
82
|
+
coordinator.shared_mutex.synchronize(&)
|
|
85
83
|
end
|
|
86
84
|
end
|
|
87
85
|
end
|
|
@@ -134,10 +134,8 @@ module Karafka
|
|
|
134
134
|
# the end users. With LRJ it is needed and provided in the `LRJ::Default` strategy,
|
|
135
135
|
# because lifecycle events on revocation can run in parallel to the LRJ job as it is
|
|
136
136
|
# non-blocking.
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
def synchronize(&block)
|
|
140
|
-
coordinator.shared_mutex.synchronize(&block)
|
|
137
|
+
def synchronize(&)
|
|
138
|
+
coordinator.shared_mutex.synchronize(&)
|
|
141
139
|
end
|
|
142
140
|
|
|
143
141
|
private
|
|
@@ -16,12 +16,11 @@ module Karafka
|
|
|
16
16
|
# want to pause
|
|
17
17
|
# @param lock_id [Object] key we want to use if we want to set multiple locks on the same
|
|
18
18
|
# subscription group
|
|
19
|
-
|
|
20
|
-
def pause(subscription_group, lock_id = nil, **kwargs)
|
|
19
|
+
def pause(subscription_group, lock_id = nil, **)
|
|
21
20
|
jobs_queue.lock_async(
|
|
22
21
|
subscription_group.id,
|
|
23
22
|
lock_id,
|
|
24
|
-
**
|
|
23
|
+
**
|
|
25
24
|
)
|
|
26
25
|
end
|
|
27
26
|
|
|
@@ -11,8 +11,10 @@ module Karafka
|
|
|
11
11
|
# Balanced distributor that groups messages by partition key
|
|
12
12
|
# and processes larger groups first while maintaining message order within groups
|
|
13
13
|
class Balanced < Base
|
|
14
|
-
#
|
|
15
|
-
#
|
|
14
|
+
# Distributes messages to virtual partitions ensuring balanced load across workers
|
|
15
|
+
# by grouping messages by partition key and assigning larger groups first
|
|
16
|
+
# @param messages [Array<Karafka::Messages::Message>]
|
|
17
|
+
# @return [Hash{Integer => Array<Karafka::Messages::Message>}] hash with group ids as
|
|
16
18
|
# keys and message groups as values
|
|
17
19
|
def call(messages)
|
|
18
20
|
# Group messages by partition key
|
|
@@ -11,8 +11,10 @@ module Karafka
|
|
|
11
11
|
# Consistent distributor that ensures messages with the same partition key
|
|
12
12
|
# are always processed in the same virtual partition
|
|
13
13
|
class Consistent < Base
|
|
14
|
-
#
|
|
15
|
-
#
|
|
14
|
+
# Distributes messages ensuring consistent routing where messages with the same
|
|
15
|
+
# partition key always go to the same virtual partition
|
|
16
|
+
# @param messages [Array<Karafka::Messages::Message>]
|
|
17
|
+
# @return [Hash{Integer => Array<Karafka::Messages::Message>}] hash with group ids as
|
|
16
18
|
# keys and message groups as values
|
|
17
19
|
def call(messages)
|
|
18
20
|
messages
|
|
@@ -11,13 +11,14 @@ module Karafka
|
|
|
11
11
|
# - we only run schedules that are of same or newer version
|
|
12
12
|
# - we always mark as consumed in such a way, that the first message received after
|
|
13
13
|
# assignment (if any) is a state
|
|
14
|
-
class Consumer <
|
|
14
|
+
class Consumer < Karafka::BaseConsumer
|
|
15
15
|
# @param args [Array] all arguments accepted by the consumer
|
|
16
16
|
def initialize(*args)
|
|
17
17
|
super
|
|
18
18
|
@executor = Executor.new
|
|
19
19
|
end
|
|
20
20
|
|
|
21
|
+
# Consumes messages and manages recurring tasks execution
|
|
21
22
|
def consume
|
|
22
23
|
# There is nothing we can do if we operate on a newer schedule. In such cases we should
|
|
23
24
|
# just wait and re-raise error hoping someone will notice or that this will be
|
|
@@ -54,7 +55,7 @@ module Karafka
|
|
|
54
55
|
# that collectively have a different outcome
|
|
55
56
|
@executor.call
|
|
56
57
|
else
|
|
57
|
-
raise
|
|
58
|
+
raise Karafka::Errors::UnsupportedCaseError, type
|
|
58
59
|
end
|
|
59
60
|
end
|
|
60
61
|
|
|
@@ -9,17 +9,15 @@ module Karafka
|
|
|
9
9
|
# Recurring Tasks related contracts
|
|
10
10
|
module Contracts
|
|
11
11
|
# Makes sure, all the expected config is defined as it should be
|
|
12
|
-
class Config <
|
|
12
|
+
class Config < Karafka::Contracts::Base
|
|
13
13
|
configure do |config|
|
|
14
|
-
config.error_messages = YAML.
|
|
15
|
-
File.
|
|
16
|
-
File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
|
|
17
|
-
)
|
|
14
|
+
config.error_messages = YAML.safe_load_file(
|
|
15
|
+
File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
|
|
18
16
|
).fetch('en').fetch('validations').fetch('setup').fetch('config')
|
|
19
17
|
end
|
|
20
18
|
|
|
21
19
|
nested(:recurring_tasks) do
|
|
22
|
-
required(:consumer_class) { |val| val <
|
|
20
|
+
required(:consumer_class) { |val| val < Karafka::BaseConsumer }
|
|
23
21
|
required(:deserializer) { |val| !val.nil? }
|
|
24
22
|
required(:logging) { |val| [true, false].include?(val) }
|
|
25
23
|
# Do not allow to run more often than every 5 seconds
|
|
@@ -9,12 +9,10 @@ module Karafka
|
|
|
9
9
|
# Recurring Tasks related contracts
|
|
10
10
|
module Contracts
|
|
11
11
|
# Ensures that task details are as expected
|
|
12
|
-
class Task <
|
|
12
|
+
class Task < Karafka::Contracts::Base
|
|
13
13
|
configure do |config|
|
|
14
|
-
config.error_messages = YAML.
|
|
15
|
-
File.
|
|
16
|
-
File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
|
|
17
|
-
)
|
|
14
|
+
config.error_messages = YAML.safe_load_file(
|
|
15
|
+
File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
|
|
18
16
|
).fetch('en').fetch('validations').fetch('recurring_tasks')
|
|
19
17
|
end
|
|
20
18
|
|
|
@@ -8,6 +8,10 @@ module Karafka
|
|
|
8
8
|
module RecurringTasks
|
|
9
9
|
# Dispatches appropriate recurring tasks related messages to expected topics
|
|
10
10
|
class Dispatcher
|
|
11
|
+
extend Helpers::ConfigImporter.new(
|
|
12
|
+
topics: %i[recurring_tasks topics]
|
|
13
|
+
)
|
|
14
|
+
|
|
11
15
|
class << self
|
|
12
16
|
# Snapshots to Kafka current schedule state
|
|
13
17
|
def schedule
|
|
@@ -44,13 +48,10 @@ module Karafka
|
|
|
44
48
|
private
|
|
45
49
|
|
|
46
50
|
# @return [::WaterDrop::Producer] web ui producer
|
|
51
|
+
# @note We do not fetch it via the ConfigImporter not to cache it so we can re-use it
|
|
52
|
+
# if needed
|
|
47
53
|
def producer
|
|
48
|
-
|
|
49
|
-
end
|
|
50
|
-
|
|
51
|
-
# @return [String] consumers commands topic
|
|
52
|
-
def topics
|
|
53
|
-
::Karafka::App.config.recurring_tasks.topics
|
|
54
|
+
Karafka::App.config.recurring_tasks.producer
|
|
54
55
|
end
|
|
55
56
|
|
|
56
57
|
# @return [Serializer]
|
|
@@ -17,6 +17,7 @@ module Karafka
|
|
|
17
17
|
trigger
|
|
18
18
|
].freeze
|
|
19
19
|
|
|
20
|
+
# Initializes the executor in replaying mode
|
|
20
21
|
def initialize
|
|
21
22
|
@replaying = true
|
|
22
23
|
@incompatible = false
|
|
@@ -131,7 +132,7 @@ module Karafka
|
|
|
131
132
|
|
|
132
133
|
# @return [Karafka::Pro::RecurringTasks::Schedule] current in-memory schedule
|
|
133
134
|
def schedule
|
|
134
|
-
|
|
135
|
+
Karafka::Pro::RecurringTasks.schedule
|
|
135
136
|
end
|
|
136
137
|
|
|
137
138
|
# Dispatches the current schedule state to Kafka
|
|
@@ -13,7 +13,7 @@ module Karafka
|
|
|
13
13
|
# @return [String]
|
|
14
14
|
attr_reader :version
|
|
15
15
|
|
|
16
|
-
# @return [Hash
|
|
16
|
+
# @return [Hash{String => Task}]
|
|
17
17
|
attr_reader :tasks
|
|
18
18
|
|
|
19
19
|
# @param version [String] schedule version. In case of usage of versioning it is used to
|
|
@@ -32,9 +32,8 @@ module Karafka
|
|
|
32
32
|
end
|
|
33
33
|
|
|
34
34
|
# Iterates over tasks yielding them one after another
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
@tasks.each_value(&block)
|
|
35
|
+
def each(&)
|
|
36
|
+
@tasks.each_value(&)
|
|
38
37
|
end
|
|
39
38
|
|
|
40
39
|
# @param id [String] id of a particular recurring task
|
|
@@ -44,10 +43,12 @@ module Karafka
|
|
|
44
43
|
end
|
|
45
44
|
|
|
46
45
|
# Allows us to have a nice DSL for defining schedules
|
|
47
|
-
# @param args [
|
|
48
|
-
# @
|
|
49
|
-
|
|
50
|
-
|
|
46
|
+
# @param args [Hash] attributes accepted by the task initializer
|
|
47
|
+
# @option args [String] :id unique task identifier
|
|
48
|
+
# @option args [String] :cron cron expression for task scheduling
|
|
49
|
+
# @option args [Proc] :previous_time optional lambda returning previous execution time
|
|
50
|
+
def schedule(**args, &)
|
|
51
|
+
self << Task.new(**args, &)
|
|
51
52
|
end
|
|
52
53
|
end
|
|
53
54
|
end
|
|
@@ -11,7 +11,8 @@ module Karafka
|
|
|
11
11
|
# Current recurring tasks related schema structure
|
|
12
12
|
SCHEMA_VERSION = '1.0'
|
|
13
13
|
|
|
14
|
-
#
|
|
14
|
+
# Serializes and compresses the schedule with all its tasks and their execution state
|
|
15
|
+
# @param schedule [Karafka::Pro::RecurringTasks::Schedule]
|
|
15
16
|
# @return [String] serialized and compressed current schedule data with its tasks and their
|
|
16
17
|
# current state.
|
|
17
18
|
def schedule(schedule)
|
|
@@ -46,7 +47,7 @@ module Karafka
|
|
|
46
47
|
def command(command_name, task_id)
|
|
47
48
|
data = {
|
|
48
49
|
schema_version: SCHEMA_VERSION,
|
|
49
|
-
schedule_version:
|
|
50
|
+
schedule_version: Karafka::Pro::RecurringTasks.schedule.version,
|
|
50
51
|
dispatched_at: Time.now.to_f,
|
|
51
52
|
type: 'command',
|
|
52
53
|
command: {
|
|
@@ -69,7 +70,7 @@ module Karafka
|
|
|
69
70
|
|
|
70
71
|
data = {
|
|
71
72
|
schema_version: SCHEMA_VERSION,
|
|
72
|
-
schedule_version:
|
|
73
|
+
schedule_version: Karafka::Pro::RecurringTasks.schedule.version,
|
|
73
74
|
dispatched_at: Time.now.to_f,
|
|
74
75
|
type: 'log',
|
|
75
76
|
task: {
|
|
@@ -92,9 +93,9 @@ module Karafka
|
|
|
92
93
|
hash.to_json
|
|
93
94
|
end
|
|
94
95
|
|
|
95
|
-
# Compresses the provided data
|
|
96
|
+
# Compresses the provided data using Zlib deflate algorithm
|
|
96
97
|
#
|
|
97
|
-
# @param data [String]
|
|
98
|
+
# @param data [String]
|
|
98
99
|
# @return [String] compressed data
|
|
99
100
|
def compress(data)
|
|
100
101
|
Zlib::Deflate.deflate(data)
|
|
@@ -10,7 +10,7 @@ module Karafka
|
|
|
10
10
|
module Setup
|
|
11
11
|
# Config for recurring tasks
|
|
12
12
|
class Config
|
|
13
|
-
extend
|
|
13
|
+
extend Karafka::Core::Configurable
|
|
14
14
|
|
|
15
15
|
setting(:consumer_class, default: Consumer)
|
|
16
16
|
setting(:deserializer, default: Deserializer.new)
|
|
@@ -27,7 +27,7 @@ module Karafka
|
|
|
27
27
|
# a separate instance in case of heavy usage of the transactional producer, etc.
|
|
28
28
|
setting(
|
|
29
29
|
:producer,
|
|
30
|
-
constructor: -> {
|
|
30
|
+
constructor: -> { Karafka.producer },
|
|
31
31
|
lazy: true
|
|
32
32
|
)
|
|
33
33
|
|
|
@@ -31,7 +31,7 @@ module Karafka
|
|
|
31
31
|
# @param block [Proc] code to execute.
|
|
32
32
|
def initialize(id:, cron:, previous_time: 0, enabled: true, &block)
|
|
33
33
|
@id = id
|
|
34
|
-
@cron =
|
|
34
|
+
@cron = Fugit::Cron.do_parse(cron)
|
|
35
35
|
@previous_time = previous_time
|
|
36
36
|
@start_time = Time.now
|
|
37
37
|
@executable = block
|
|
@@ -7,16 +7,19 @@ module Karafka
|
|
|
7
7
|
module Pro
|
|
8
8
|
# Recurring tasks functionality
|
|
9
9
|
module RecurringTasks
|
|
10
|
+
extend Helpers::ConfigImporter.new(
|
|
11
|
+
recurring_tasks_logging: %i[recurring_tasks logging]
|
|
12
|
+
)
|
|
13
|
+
|
|
10
14
|
class << self
|
|
11
15
|
# @return [Schedule, nil] current defined schedule or nil if not defined
|
|
12
16
|
def schedule
|
|
13
|
-
@schedule || define('0.0.0') {}
|
|
17
|
+
@schedule || define('0.0.0') { nil }
|
|
14
18
|
end
|
|
15
19
|
|
|
16
20
|
# Simplified API for schedules definitions and validates the tasks data
|
|
17
21
|
#
|
|
18
22
|
# @param version [String]
|
|
19
|
-
# @param block [Proc]
|
|
20
23
|
#
|
|
21
24
|
# @example
|
|
22
25
|
# Karafka::Pro::RecurringTasks.define('1.0.1') do
|
|
@@ -24,9 +27,9 @@ module Karafka
|
|
|
24
27
|
# MailingJob.perform_async
|
|
25
28
|
# end
|
|
26
29
|
# end
|
|
27
|
-
def define(version = '1.0.0', &
|
|
30
|
+
def define(version = '1.0.0', &)
|
|
28
31
|
@schedule = Schedule.new(version: version)
|
|
29
|
-
@schedule.instance_exec(&
|
|
32
|
+
@schedule.instance_exec(&)
|
|
30
33
|
|
|
31
34
|
@schedule.each do |task|
|
|
32
35
|
Contracts::Task.new.validate!(
|
|
@@ -75,7 +78,7 @@ module Karafka
|
|
|
75
78
|
RecurringTasks.schedule
|
|
76
79
|
|
|
77
80
|
# User can disable logging of executions, in which case we don't track them
|
|
78
|
-
return unless
|
|
81
|
+
return unless recurring_tasks_logging
|
|
79
82
|
|
|
80
83
|
Karafka.monitor.subscribe(Listener.new)
|
|
81
84
|
end
|
|
@@ -13,10 +13,8 @@ module Karafka
|
|
|
13
13
|
# Contract to validate configuration of the adaptive iterator feature
|
|
14
14
|
class Topic < Karafka::Contracts::Base
|
|
15
15
|
configure do |config|
|
|
16
|
-
config.error_messages = YAML.
|
|
17
|
-
File.
|
|
18
|
-
File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
|
|
19
|
-
)
|
|
16
|
+
config.error_messages = YAML.safe_load_file(
|
|
17
|
+
File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
|
|
20
18
|
).fetch('en').fetch('validations').fetch('routing').fetch('topic')
|
|
21
19
|
end
|
|
22
20
|
|
|
@@ -13,10 +13,8 @@ module Karafka
|
|
|
13
13
|
# Extended rules for dead letter queue settings
|
|
14
14
|
class Topic < Karafka::Contracts::Base
|
|
15
15
|
configure do |config|
|
|
16
|
-
config.error_messages = YAML.
|
|
17
|
-
File.
|
|
18
|
-
File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
|
|
19
|
-
)
|
|
16
|
+
config.error_messages = YAML.safe_load_file(
|
|
17
|
+
File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
|
|
20
18
|
).fetch('en').fetch('validations').fetch('routing').fetch('topic')
|
|
21
19
|
end
|
|
22
20
|
|
|
@@ -22,6 +22,9 @@ module Karafka
|
|
|
22
22
|
# @param strategy [#call, nil] Strategy we want to use or nil if a default strategy
|
|
23
23
|
# (same as in OSS) should be applied
|
|
24
24
|
# @param args [Hash] Pro DLQ arguments
|
|
25
|
+
# @option args [String, nil] :topic name of the dead letter queue topic
|
|
26
|
+
# @option args [Integer] :max_retries maximum number of retries before dispatch to DLQ
|
|
27
|
+
# @option args [Boolean] :independent whether DLQ runs independently
|
|
25
28
|
def dead_letter_queue(strategy: nil, **args)
|
|
26
29
|
return @dead_letter_queue if @dead_letter_queue
|
|
27
30
|
|
|
@@ -13,10 +13,8 @@ module Karafka
|
|
|
13
13
|
# Contract to validate configuration of the expiring feature
|
|
14
14
|
class Topic < Karafka::Contracts::Base
|
|
15
15
|
configure do |config|
|
|
16
|
-
config.error_messages = YAML.
|
|
17
|
-
File.
|
|
18
|
-
File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
|
|
19
|
-
)
|
|
16
|
+
config.error_messages = YAML.safe_load_file(
|
|
17
|
+
File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
|
|
20
18
|
).fetch('en').fetch('validations').fetch('routing').fetch('topic')
|
|
21
19
|
end
|
|
22
20
|
|