karafka 2.0.15 → 2.0.16

Sign up to get free protection for your applications and to get access to all the features.
Files changed (105) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +1 -1
  4. data/.rspec +2 -0
  5. data/CHANGELOG.md +78 -0
  6. data/Gemfile.lock +14 -14
  7. data/LICENSE +1 -1
  8. data/README.md +2 -1
  9. data/bin/integrations +3 -2
  10. data/bin/rspecs +4 -0
  11. data/config/errors.yml +10 -4
  12. data/lib/active_job/karafka.rb +0 -6
  13. data/lib/karafka/active_job/consumer.rb +1 -0
  14. data/lib/karafka/admin.rb +2 -2
  15. data/lib/karafka/base_consumer.rb +31 -21
  16. data/lib/karafka/connection/listener.rb +6 -4
  17. data/lib/karafka/contracts/consumer_group.rb +0 -14
  18. data/lib/karafka/contracts/{consumer_group_topic.rb → topic.rb} +2 -3
  19. data/lib/karafka/errors.rb +6 -4
  20. data/lib/karafka/instrumentation/logger_listener.rb +25 -11
  21. data/lib/karafka/instrumentation/notifications.rb +2 -0
  22. data/lib/karafka/instrumentation/vendors/datadog/dashboard.json +1 -1
  23. data/lib/karafka/instrumentation/vendors/datadog/listener.rb +37 -32
  24. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +153 -0
  25. data/lib/karafka/pro/active_job/consumer.rb +3 -1
  26. data/lib/karafka/pro/active_job/dispatcher.rb +3 -1
  27. data/lib/karafka/pro/active_job/job_options_contract.rb +3 -1
  28. data/lib/karafka/pro/base_consumer.rb +3 -85
  29. data/lib/karafka/pro/loader.rb +31 -24
  30. data/lib/karafka/pro/performance_tracker.rb +3 -1
  31. data/lib/karafka/pro/processing/coordinator.rb +16 -1
  32. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +3 -1
  33. data/lib/karafka/pro/processing/jobs_builder.rb +3 -1
  34. data/lib/karafka/pro/processing/partitioner.rb +3 -1
  35. data/lib/karafka/pro/processing/scheduler.rb +3 -1
  36. data/lib/karafka/pro/processing/strategies/aj_dlq_lrj_mom.rb +40 -0
  37. data/lib/karafka/pro/processing/strategies/aj_dlq_mom.rb +62 -0
  38. data/lib/karafka/pro/processing/strategies/aj_lrj_mom.rb +35 -0
  39. data/lib/karafka/pro/processing/strategies/aj_lrj_mom_vp.rb +69 -0
  40. data/lib/karafka/pro/processing/strategies/aj_mom.rb +33 -0
  41. data/lib/karafka/pro/processing/strategies/aj_mom_vp.rb +58 -0
  42. data/lib/karafka/pro/processing/strategies/base.rb +26 -0
  43. data/lib/karafka/pro/processing/strategies/default.rb +69 -0
  44. data/lib/karafka/pro/processing/strategies/dlq.rb +88 -0
  45. data/lib/karafka/pro/processing/strategies/dlq_lrj.rb +64 -0
  46. data/lib/karafka/pro/processing/strategies/dlq_lrj_mom.rb +60 -0
  47. data/lib/karafka/pro/processing/strategies/dlq_mom.rb +58 -0
  48. data/lib/karafka/pro/processing/strategies/lrj.rb +76 -0
  49. data/lib/karafka/pro/processing/strategies/lrj_mom.rb +68 -0
  50. data/lib/karafka/pro/processing/strategies/lrj_vp.rb +33 -0
  51. data/lib/karafka/pro/processing/strategies/mom.rb +43 -0
  52. data/lib/karafka/pro/processing/strategies/vp.rb +32 -0
  53. data/lib/karafka/pro/processing/strategy_selector.rb +58 -0
  54. data/lib/karafka/pro/{contracts → routing/features}/base.rb +8 -5
  55. data/lib/karafka/pro/routing/features/dead_letter_queue/contract.rb +49 -0
  56. data/lib/karafka/pro/routing/{builder_extensions.rb → features/dead_letter_queue.rb} +9 -12
  57. data/lib/karafka/pro/routing/features/long_running_job/config.rb +28 -0
  58. data/lib/karafka/pro/routing/features/long_running_job/contract.rb +37 -0
  59. data/lib/karafka/pro/routing/features/long_running_job/topic.rb +42 -0
  60. data/lib/karafka/pro/routing/features/long_running_job.rb +28 -0
  61. data/lib/karafka/pro/routing/features/virtual_partitions/config.rb +30 -0
  62. data/lib/karafka/pro/routing/features/virtual_partitions/contract.rb +69 -0
  63. data/lib/karafka/pro/routing/features/virtual_partitions/topic.rb +56 -0
  64. data/lib/karafka/pro/routing/features/virtual_partitions.rb +27 -0
  65. data/lib/karafka/processing/coordinator.rb +1 -1
  66. data/lib/karafka/processing/executor.rb +6 -0
  67. data/lib/karafka/processing/strategies/aj_dlq_mom.rb +44 -0
  68. data/lib/karafka/processing/strategies/aj_mom.rb +21 -0
  69. data/lib/karafka/processing/strategies/base.rb +37 -0
  70. data/lib/karafka/processing/strategies/default.rb +52 -0
  71. data/lib/karafka/processing/strategies/dlq.rb +77 -0
  72. data/lib/karafka/processing/strategies/dlq_mom.rb +42 -0
  73. data/lib/karafka/processing/strategies/mom.rb +29 -0
  74. data/lib/karafka/processing/strategy_selector.rb +30 -0
  75. data/lib/karafka/railtie.rb +9 -8
  76. data/lib/karafka/routing/builder.rb +6 -0
  77. data/lib/karafka/routing/features/active_job/builder.rb +33 -0
  78. data/lib/karafka/routing/features/active_job/config.rb +15 -0
  79. data/lib/karafka/routing/features/active_job/contract.rb +41 -0
  80. data/lib/karafka/routing/features/active_job/topic.rb +33 -0
  81. data/lib/karafka/routing/features/active_job.rb +13 -0
  82. data/lib/karafka/routing/features/base/expander.rb +53 -0
  83. data/lib/karafka/routing/features/base.rb +34 -0
  84. data/lib/karafka/routing/features/dead_letter_queue/config.rb +19 -0
  85. data/lib/karafka/routing/features/dead_letter_queue/contract.rb +40 -0
  86. data/lib/karafka/routing/features/dead_letter_queue/topic.rb +40 -0
  87. data/lib/karafka/routing/features/dead_letter_queue.rb +16 -0
  88. data/lib/karafka/routing/features/manual_offset_management/config.rb +15 -0
  89. data/lib/karafka/routing/features/manual_offset_management/contract.rb +24 -0
  90. data/lib/karafka/routing/features/manual_offset_management/topic.rb +35 -0
  91. data/lib/karafka/routing/features/manual_offset_management.rb +18 -0
  92. data/lib/karafka/routing/topic.rb +2 -10
  93. data/lib/karafka/server.rb +4 -2
  94. data/lib/karafka/setup/attributes_map.rb +5 -0
  95. data/lib/karafka/setup/config.rb +4 -4
  96. data/lib/karafka/time_trackers/pause.rb +21 -12
  97. data/lib/karafka/version.rb +1 -1
  98. data/lib/karafka.rb +7 -11
  99. data.tar.gz.sig +0 -0
  100. metadata +57 -9
  101. metadata.gz.sig +0 -0
  102. data/lib/karafka/active_job/routing/extensions.rb +0 -33
  103. data/lib/karafka/pro/contracts/consumer_group.rb +0 -34
  104. data/lib/karafka/pro/contracts/consumer_group_topic.rb +0 -69
  105. data/lib/karafka/pro/routing/topic_extensions.rb +0 -74
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Routing
17
+ module Features
18
+ class VirtualPartitions < Base
19
+ # Config for virtual partitions
20
+ Config = Struct.new(
21
+ :active,
22
+ :partitioner,
23
+ :max_partitions,
24
+ keyword_init: true
25
+ ) { alias_method :active?, :active }
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,69 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Routing
17
+ module Features
18
+ class VirtualPartitions < Base
19
+ # Rules around virtual partitions
20
+ class Contract < Contracts::Base
21
+ configure do |config|
22
+ config.error_messages = YAML.safe_load(
23
+ File.read(
24
+ File.join(Karafka.gem_root, 'config', 'errors.yml')
25
+ )
26
+ ).fetch('en').fetch('validations').fetch('pro_topic')
27
+ end
28
+
29
+ nested(:virtual_partitions) do
30
+ required(:active) { |val| [true, false].include?(val) }
31
+ required(:partitioner) { |val| val.nil? || val.respond_to?(:call) }
32
+ required(:max_partitions) { |val| val.is_a?(Integer) && val >= 1 }
33
+ end
34
+
35
+ # When virtual partitions are defined, partitioner needs to respond to `#call` and it
36
+ # cannot be nil
37
+ virtual do |data, errors|
38
+ next unless errors.empty?
39
+
40
+ virtual_partitions = data[:virtual_partitions]
41
+
42
+ next unless virtual_partitions[:active]
43
+ next if virtual_partitions[:partitioner].respond_to?(:call)
44
+
45
+ [[%i[virtual_partitions partitioner], :respond_to_call]]
46
+ end
47
+
48
+ # Make sure that manual offset management is not used together with Virtual Partitions
49
+ # This would not make any sense as there would be edge cases related to skipping
50
+ # messages even if there were errors.
51
+ virtual do |data, errors|
52
+ next unless errors.empty?
53
+
54
+ virtual_partitions = data[:virtual_partitions]
55
+ manual_offset_management = data[:manual_offset_management]
56
+ active_job = data[:active_job]
57
+
58
+ next unless virtual_partitions[:active]
59
+ next unless manual_offset_management[:active]
60
+ next if active_job[:active]
61
+
62
+ [[%i[manual_offset_management], :not_with_virtual_partitions]]
63
+ end
64
+ end
65
+ end
66
+ end
67
+ end
68
+ end
69
+ end
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Routing
17
+ module Features
18
+ class VirtualPartitions < Base
19
+ # Topic extensions to be able to manage virtual partitions feature
20
+ module Topic
21
+ # @param max_partitions [Integer] max number of virtual partitions that can come out of
22
+ # the single distribution flow. When set to more than the Karafka threading, will
23
+ # create more work than workers. When less, can ensure we have spare resources to
24
+ # process other things in parallel.
25
+ # @param partitioner [nil, #call] nil or callable partitioner
26
+ # @return [VirtualPartitions] method that allows to set the virtual partitions details
27
+ # during the routing configuration and then allows to retrieve it
28
+ def virtual_partitions(
29
+ max_partitions: Karafka::App.config.concurrency,
30
+ partitioner: nil
31
+ )
32
+ @virtual_partitions ||= Config.new(
33
+ active: !partitioner.nil?,
34
+ max_partitions: max_partitions,
35
+ partitioner: partitioner
36
+ )
37
+ end
38
+
39
+ # @return [Boolean] are virtual partitions enabled for given topic
40
+ def virtual_partitions?
41
+ virtual_partitions.active?
42
+ end
43
+
44
+ # @return [Hash] topic with all its native configuration options plus manual offset
45
+ # management namespace settings
46
+ def to_h
47
+ super.merge(
48
+ virtual_partitions: virtual_partitions.to_h
49
+ ).freeze
50
+ end
51
+ end
52
+ end
53
+ end
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Routing
17
+ module Features
18
+ # Virtual Partitions feature config and DSL namespace.
19
+ #
20
+ # Virtual Partitions allow you to parallelize the processing of data from a single
21
+ # partition. This can drastically increase throughput when IO operations are involved.
22
+ class VirtualPartitions < Base
23
+ end
24
+ end
25
+ end
26
+ end
27
+ end
@@ -63,7 +63,7 @@ module Karafka
63
63
 
64
64
  # This should never happen. If it does, something is heavily out of sync. Please reach
65
65
  # out to us if you encounter this
66
- raise Karafka::Errors::InvalidCoordinatorState, 'Was zero before decrementation'
66
+ raise Karafka::Errors::InvalidCoordinatorStateError, 'Was zero before decrementation'
67
67
  end
68
68
  end
69
69
 
@@ -114,10 +114,16 @@ module Karafka
114
114
  # @return [Object] cached consumer instance
115
115
  def consumer
116
116
  @consumer ||= begin
117
+ strategy = ::Karafka::App.config.internal.processing.strategy_selector.find(@topic)
118
+
117
119
  consumer = @topic.consumer_class.new
120
+ # We use singleton class as the same consumer class may be used to process different
121
+ # topics with different settings
122
+ consumer.singleton_class.include(strategy)
118
123
  consumer.topic = @topic
119
124
  consumer.client = @client
120
125
  consumer.producer = ::Karafka::App.producer
126
+
121
127
  consumer
122
128
  end
123
129
  end
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Processing
5
+ module Strategies
6
+ # ActiveJob strategy to cooperate with the DLQ.
7
+ #
8
+ # While AJ is uses MOM by default because it delegates the offset management to the AJ
9
+ # consumer. With DLQ however there is an extra case for skipping broken jobs with offset
10
+ # marking due to ordered processing.
11
+ module AjDlqMom
12
+ include DlqMom
13
+
14
+ # Apply strategy when only when using AJ with MOM and DLQ
15
+ FEATURES = %i[
16
+ active_job
17
+ dead_letter_queue
18
+ manual_offset_management
19
+ ].freeze
20
+
21
+ # How should we post-finalize consumption.
22
+ def handle_after_consume
23
+ return if revoked?
24
+
25
+ if coordinator.success?
26
+ # Do NOT commit offsets, they are comitted after each job in the AJ consumer.
27
+ coordinator.pause_tracker.reset
28
+ elsif coordinator.pause_tracker.attempt <= topic.dead_letter_queue.max_retries
29
+ pause(coordinator.seek_offset)
30
+ else
31
+ coordinator.pause_tracker.reset
32
+ skippable_message = find_skippable_message
33
+ dispatch_to_dlq(skippable_message)
34
+ # We can commit the offset here because we know that we skip it "forever" and
35
+ # since AJ consumer commits the offset after each job, we also know that the
36
+ # previous job was successful
37
+ mark_as_consumed(skippable_message)
38
+ pause(coordinator.seek_offset)
39
+ end
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Processing
5
+ module Strategies
6
+ # ActiveJob enabled
7
+ # Manual offset management enabled
8
+ #
9
+ # This is the default AJ strategy since AJ cannot be used without MOM
10
+ module AjMom
11
+ include Mom
12
+
13
+ # Apply strategy when only when using AJ with MOM
14
+ FEATURES = %i[
15
+ active_job
16
+ manual_offset_management
17
+ ].freeze
18
+ end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,37 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Processing
5
+ # Our processing patterns differ depending on various features configurations
6
+ # In this namespace we collect strategies for particular feature combinations to simplify the
7
+ # design. Based on features combinations we can then select handling strategy for a given case.
8
+ #
9
+ # @note The lack of common code here is intentional. It would get complex if there would be
10
+ # any type of composition, so each strategy is expected to be self-sufficient
11
+ module Strategies
12
+ # Base strategy that should be included in each strategy, just to ensure the API
13
+ module Base
14
+ # What should happen before jobs are enqueued
15
+ # @note This runs from the listener thread, not recommended to put anything slow here
16
+ def handle_before_enqueue
17
+ raise NotImplementedError, 'Implement in a subclass'
18
+ end
19
+
20
+ # What should happen before we kick in the processing
21
+ def handle_before_consume
22
+ raise NotImplementedError, 'Implement in a subclass'
23
+ end
24
+
25
+ # Post-consumption handling
26
+ def handle_after_consume
27
+ raise NotImplementedError, 'Implement in a subclass'
28
+ end
29
+
30
+ # Revocation handling
31
+ def handle_revoked
32
+ raise NotImplementedError, 'Implement in a subclass'
33
+ end
34
+ end
35
+ end
36
+ end
37
+ end
@@ -0,0 +1,52 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Processing
5
+ module Strategies
6
+ # No features enabled.
7
+ # No manual offset management
8
+ # No long running jobs
9
+ # Nothing. Just standard, automatic flow
10
+ module Default
11
+ include Base
12
+
13
+ # Apply strategy for a non-feature based flow
14
+ FEATURES = %i[].freeze
15
+
16
+ # No actions needed for the standard flow here
17
+ def handle_before_enqueue
18
+ nil
19
+ end
20
+
21
+ # Increment number of attempts
22
+ def handle_before_consume
23
+ coordinator.pause_tracker.increment
24
+ end
25
+
26
+ # Standard flow marks work as consumed and moves on if everything went ok.
27
+ # If there was a processing error, we will pause and continue from the next message
28
+ # (next that is +1 from the last one that was successfully marked as consumed)
29
+ def handle_after_consume
30
+ return if revoked?
31
+
32
+ if coordinator.success?
33
+ coordinator.pause_tracker.reset
34
+
35
+ mark_as_consumed(messages.last)
36
+ else
37
+ pause(coordinator.seek_offset)
38
+ end
39
+ end
40
+
41
+ # We need to always un-pause the processing in case we have lost a given partition.
42
+ # Otherwise the underlying librdkafka would not know we may want to continue processing and
43
+ # the pause could in theory last forever
44
+ def handle_revoked
45
+ resume
46
+
47
+ coordinator.revoke
48
+ end
49
+ end
50
+ end
51
+ end
52
+ end
@@ -0,0 +1,77 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Processing
5
+ module Strategies
6
+ # When using dead letter queue, processing won't stop after defined number of retries
7
+ # upon encountering non-critical errors but the messages that error will be moved to a
8
+ # separate topic with their payload and metadata, so they can be handled differently.
9
+ module Dlq
10
+ include Default
11
+
12
+ # Apply strategy when only dead letter queue is turned on
13
+ FEATURES = %i[
14
+ dead_letter_queue
15
+ ].freeze
16
+
17
+ # When manual offset management is on, we do not mark anything as consumed automatically
18
+ # and we rely on the user to figure things out
19
+ def handle_after_consume
20
+ return if revoked?
21
+
22
+ if coordinator.success?
23
+ coordinator.pause_tracker.reset
24
+
25
+ mark_as_consumed(messages.last)
26
+ elsif coordinator.pause_tracker.attempt <= topic.dead_letter_queue.max_retries
27
+ pause(coordinator.seek_offset)
28
+ # If we've reached number of retries that we could, we need to skip the first message
29
+ # that was not marked as consumed, pause and continue, while also moving this message
30
+ # to the dead topic
31
+ else
32
+ # We reset the pause to indicate we will now consider it as "ok".
33
+ coordinator.pause_tracker.reset
34
+
35
+ skippable_message = find_skippable_message
36
+
37
+ # Send skippable message to the dql topic
38
+ dispatch_to_dlq(skippable_message)
39
+
40
+ # We mark the broken message as consumed and move on
41
+ mark_as_consumed(skippable_message)
42
+
43
+ return if revoked?
44
+
45
+ # We pause to backoff once just in case.
46
+ pause(coordinator.seek_offset)
47
+ end
48
+ end
49
+
50
+ # Finds the message we want to skip
51
+ # @private
52
+ def find_skippable_message
53
+ skippable_message = messages.find { |message| message.offset == coordinator.seek_offset }
54
+ skippable_message || raise(Errors::SkipMessageNotFoundError, topic.name)
55
+ end
56
+
57
+ # Moves the broken message into a separate queue defined via the settings
58
+ # @private
59
+ # @param skippable_message [Karafka::Messages::Message] message we are skipping that also
60
+ # should go to the dlq topic
61
+ def dispatch_to_dlq(skippable_message)
62
+ producer.produce_async(
63
+ topic: topic.dead_letter_queue.topic,
64
+ payload: skippable_message.raw_payload
65
+ )
66
+
67
+ # Notify about dispatch on the events bus
68
+ Karafka.monitor.instrument(
69
+ 'dead_letter_queue.dispatched',
70
+ caller: self,
71
+ message: skippable_message
72
+ )
73
+ end
74
+ end
75
+ end
76
+ end
77
+ end
@@ -0,0 +1,42 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Processing
5
+ module Strategies
6
+ # Same as pure dead letter queue but we do not marked failed message as consumed
7
+ module DlqMom
8
+ include Dlq
9
+
10
+ # Apply strategy when dlq is on with manual offset management
11
+ FEATURES = %i[
12
+ dead_letter_queue
13
+ manual_offset_management
14
+ ].freeze
15
+
16
+ # When manual offset management is on, we do not mark anything as consumed automatically
17
+ # and we rely on the user to figure things out
18
+ def handle_after_consume
19
+ return if revoked?
20
+
21
+ if coordinator.success?
22
+ coordinator.pause_tracker.reset
23
+ elsif coordinator.pause_tracker.attempt <= topic.dead_letter_queue.max_retries
24
+ pause(coordinator.seek_offset)
25
+ # If we've reached number of retries that we could, we need to skip the first message
26
+ # that was not marked as consumed, pause and continue, while also moving this message
27
+ # to the dead topic
28
+ else
29
+ # We reset the pause to indicate we will now consider it as "ok".
30
+ coordinator.pause_tracker.reset
31
+
32
+ skippable_message = find_skippable_message
33
+ dispatch_to_dlq(skippable_message)
34
+
35
+ # We pause to backoff once just in case.
36
+ pause(coordinator.seek_offset)
37
+ end
38
+ end
39
+ end
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Processing
5
+ module Strategies
6
+ # When using manual offset management, we do not mark as consumed after successful processing
7
+ module Mom
8
+ include Default
9
+
10
+ # Apply strategy when only manual offset management is turned on
11
+ FEATURES = %i[
12
+ manual_offset_management
13
+ ].freeze
14
+
15
+ # When manual offset management is on, we do not mark anything as consumed automatically
16
+ # and we rely on the user to figure things out
17
+ def handle_after_consume
18
+ return if revoked?
19
+
20
+ if coordinator.success?
21
+ coordinator.pause_tracker.reset
22
+ else
23
+ pause(coordinator.seek_offset)
24
+ end
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Processing
5
+ # Selector of appropriate processing strategy matching topic combinations
6
+ class StrategySelector
7
+ def initialize
8
+ # We load them once for performance reasons not to do too many lookups
9
+ @available_strategies = Strategies
10
+ .constants
11
+ .delete_if { |k| k == :Base }
12
+ .map { |k| Strategies.const_get(k) }
13
+ end
14
+
15
+ # @param topic [Karafka::Routing::Topic] topic with settings based on which we find strategy
16
+ # @return [Module] module with proper strategy
17
+ def find(topic)
18
+ feature_set = [
19
+ topic.active_job? ? :active_job : nil,
20
+ topic.manual_offset_management? ? :manual_offset_management : nil,
21
+ topic.dead_letter_queue? ? :dead_letter_queue : nil
22
+ ].compact
23
+
24
+ @available_strategies.find do |strategy|
25
+ strategy::FEATURES.sort == feature_set.sort
26
+ end || raise(Errors::StrategyNotFoundError, topic.name)
27
+ end
28
+ end
29
+ end
30
+ end
@@ -81,15 +81,16 @@ if rails
81
81
 
82
82
  Rails.application.reloader.reload!
83
83
  end
84
+ end
84
85
 
85
- ::Karafka::App.monitor.subscribe('worker.completed') do
86
- # Skip in case someone is using Rails without ActiveRecord
87
- next unless Object.const_defined?('ActiveRecord::Base')
88
-
89
- # Always release the connection after processing is done. Otherwise thread may hang
90
- # blocking the reload and further processing
91
- # @see https://github.com/rails/rails/issues/44183
92
- ActiveRecord::Base.connection_pool.release_connection
86
+ initializer 'karafka.release_active_record_connections' do
87
+ ActiveSupport.on_load(:active_record) do
88
+ ::Karafka::App.monitor.subscribe('worker.completed') do
89
+ # Always release the connection after processing is done. Otherwise thread may hang
90
+ # blocking the reload and further processing
91
+ # @see https://github.com/rails/rails/issues/44183
92
+ ActiveRecord::Base.clear_active_connections!
93
+ end
93
94
  end
94
95
  end
95
96
 
@@ -33,7 +33,13 @@ module Karafka
33
33
  instance_eval(&block)
34
34
 
35
35
  each do |consumer_group|
36
+ # Validate consumer group settings
36
37
  Contracts::ConsumerGroup.new.validate!(consumer_group.to_h)
38
+
39
+ # and then its topics settings
40
+ consumer_group.topics.each do |topic|
41
+ Contracts::Topic.new.validate!(topic.to_h)
42
+ end
37
43
  end
38
44
  end
39
45
 
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class ActiveJob < Base
7
+ # Routing extensions for ActiveJob
8
+ module Builder
9
+ # This method simplifies routes definition for ActiveJob topics / queues by
10
+ # auto-injecting the consumer class
11
+ #
12
+ # @param name [String, Symbol] name of the topic where ActiveJobs jobs should go
13
+ # @param block [Proc] block that we can use for some extra configuration
14
+ def active_job_topic(name, &block)
15
+ topic(name) do
16
+ consumer App.config.internal.active_job.consumer_class
17
+ active_job true
18
+
19
+ # This is handled by our custom ActiveJob consumer
20
+ # Without this, default behaviour would cause messages to skip upon shutdown as the
21
+ # offset would be committed for the last message
22
+ manual_offset_management true
23
+
24
+ next unless block
25
+
26
+ instance_eval(&block)
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class ActiveJob < Base
7
+ # Config for ActiveJob usage
8
+ Config = Struct.new(
9
+ :active,
10
+ keyword_init: true
11
+ ) { alias_method :active?, :active }
12
+ end
13
+ end
14
+ end
15
+ end