karafka 2.0.37 → 2.0.39

Sign up to get free protection for your applications and to get access to all the features.
Files changed (116) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +1 -1
  4. data/.ruby-version +1 -1
  5. data/CHANGELOG.md +34 -0
  6. data/Gemfile.lock +7 -7
  7. data/README.md +1 -1
  8. data/bin/integrations +1 -1
  9. data/config/locales/errors.yml +0 -7
  10. data/config/locales/pro_errors.yml +18 -0
  11. data/lib/karafka/active_job/consumer.rb +22 -7
  12. data/lib/karafka/admin.rb +46 -14
  13. data/lib/karafka/base_consumer.rb +35 -55
  14. data/lib/karafka/connection/listener.rb +15 -10
  15. data/lib/karafka/errors.rb +0 -3
  16. data/lib/karafka/instrumentation/logger_listener.rb +44 -3
  17. data/lib/karafka/instrumentation/notifications.rb +7 -0
  18. data/lib/karafka/pro/active_job/consumer.rb +10 -5
  19. data/lib/karafka/pro/processing/coordinator.rb +13 -4
  20. data/lib/karafka/pro/processing/filters/base.rb +61 -0
  21. data/lib/karafka/pro/processing/filters/delayer.rb +70 -0
  22. data/lib/karafka/pro/processing/filters/expirer.rb +51 -0
  23. data/lib/karafka/pro/processing/filters/throttler.rb +84 -0
  24. data/lib/karafka/pro/processing/filters_applier.rb +100 -0
  25. data/lib/karafka/pro/processing/jobs_builder.rb +7 -3
  26. data/lib/karafka/pro/processing/scheduler.rb +24 -7
  27. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom.rb +68 -0
  28. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom_vp.rb +74 -0
  29. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom.rb +72 -0
  30. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom_vp.rb +76 -0
  31. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom.rb +62 -0
  32. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom_vp.rb +68 -0
  33. data/lib/karafka/pro/processing/strategies/aj/dlq_mom.rb +64 -0
  34. data/lib/karafka/pro/processing/strategies/aj/dlq_mom_vp.rb +69 -0
  35. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom.rb +38 -0
  36. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom_vp.rb +64 -0
  37. data/lib/karafka/pro/processing/strategies/aj/ftr_mom.rb +38 -0
  38. data/lib/karafka/pro/processing/strategies/aj/ftr_mom_vp.rb +58 -0
  39. data/lib/karafka/pro/processing/strategies/{dlq_lrj_vp.rb → aj/lrj_mom.rb} +14 -13
  40. data/lib/karafka/pro/processing/strategies/aj/lrj_mom_vp.rb +77 -0
  41. data/lib/karafka/pro/processing/strategies/aj/mom.rb +36 -0
  42. data/lib/karafka/pro/processing/strategies/aj/mom_vp.rb +52 -0
  43. data/lib/karafka/pro/processing/strategies/dlq/default.rb +131 -0
  44. data/lib/karafka/pro/processing/strategies/dlq/ftr.rb +61 -0
  45. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj.rb +75 -0
  46. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom.rb +74 -0
  47. data/lib/karafka/pro/processing/strategies/{mom.rb → dlq/ftr_lrj_vp.rb} +16 -19
  48. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom.rb +73 -0
  49. data/lib/karafka/pro/processing/strategies/dlq/ftr_vp.rb +39 -0
  50. data/lib/karafka/pro/processing/strategies/dlq/lrj.rb +63 -0
  51. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom.rb +66 -0
  52. data/lib/karafka/pro/processing/strategies/dlq/lrj_vp.rb +38 -0
  53. data/lib/karafka/pro/processing/strategies/dlq/mom.rb +67 -0
  54. data/lib/karafka/pro/processing/strategies/dlq/vp.rb +39 -0
  55. data/lib/karafka/pro/processing/strategies/ftr/default.rb +104 -0
  56. data/lib/karafka/pro/processing/strategies/ftr/vp.rb +40 -0
  57. data/lib/karafka/pro/processing/strategies/lrj/default.rb +85 -0
  58. data/lib/karafka/pro/processing/strategies/lrj/ftr.rb +69 -0
  59. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom.rb +67 -0
  60. data/lib/karafka/pro/processing/strategies/{vp.rb → lrj/ftr_vp.rb} +15 -13
  61. data/lib/karafka/pro/processing/strategies/lrj/mom.rb +78 -0
  62. data/lib/karafka/pro/processing/strategies/{aj_lrj_mom.rb → lrj/vp.rb} +13 -12
  63. data/lib/karafka/pro/processing/strategies/mom/default.rb +46 -0
  64. data/lib/karafka/pro/processing/strategies/mom/ftr.rb +53 -0
  65. data/lib/karafka/pro/processing/strategies/vp/default.rb +53 -0
  66. data/lib/karafka/pro/processing/{strategies/lrj_vp.rb → strategies.rb} +1 -13
  67. data/lib/karafka/pro/processing/strategy_selector.rb +44 -18
  68. data/lib/karafka/pro/{processing/strategies/aj_mom.rb → routing/features/delaying/config.rb} +7 -13
  69. data/lib/karafka/pro/routing/features/delaying/contract.rb +38 -0
  70. data/lib/karafka/pro/routing/features/delaying/topic.rb +59 -0
  71. data/lib/karafka/pro/routing/features/delaying.rb +29 -0
  72. data/lib/karafka/pro/routing/features/expiring/config.rb +27 -0
  73. data/lib/karafka/pro/routing/features/expiring/contract.rb +38 -0
  74. data/lib/karafka/pro/routing/features/expiring/topic.rb +59 -0
  75. data/lib/karafka/pro/routing/features/expiring.rb +27 -0
  76. data/lib/karafka/pro/routing/features/filtering/config.rb +40 -0
  77. data/lib/karafka/pro/routing/features/filtering/contract.rb +41 -0
  78. data/lib/karafka/pro/routing/features/filtering/topic.rb +51 -0
  79. data/lib/karafka/pro/routing/features/filtering.rb +27 -0
  80. data/lib/karafka/pro/routing/features/long_running_job/contract.rb +1 -1
  81. data/lib/karafka/pro/routing/features/throttling/config.rb +32 -0
  82. data/lib/karafka/pro/routing/features/throttling/contract.rb +41 -0
  83. data/lib/karafka/pro/routing/features/throttling/topic.rb +69 -0
  84. data/lib/karafka/pro/routing/features/throttling.rb +30 -0
  85. data/lib/karafka/processing/coordinator.rb +60 -30
  86. data/lib/karafka/processing/coordinators_buffer.rb +5 -1
  87. data/lib/karafka/processing/executor.rb +23 -16
  88. data/lib/karafka/processing/executors_buffer.rb +10 -26
  89. data/lib/karafka/processing/jobs/consume.rb +2 -4
  90. data/lib/karafka/processing/jobs/idle.rb +24 -0
  91. data/lib/karafka/processing/jobs_builder.rb +2 -3
  92. data/lib/karafka/processing/result.rb +5 -0
  93. data/lib/karafka/processing/strategies/aj_dlq_mom.rb +1 -1
  94. data/lib/karafka/processing/strategies/base.rb +5 -0
  95. data/lib/karafka/processing/strategies/default.rb +50 -0
  96. data/lib/karafka/processing/strategies/dlq.rb +13 -4
  97. data/lib/karafka/processing/strategies/dlq_mom.rb +8 -3
  98. data/lib/karafka/processing/strategy_selector.rb +27 -10
  99. data/lib/karafka/version.rb +1 -1
  100. data/renovate.json +6 -0
  101. data.tar.gz.sig +0 -0
  102. metadata +66 -22
  103. metadata.gz.sig +0 -0
  104. data/lib/karafka/pro/processing/strategies/aj_dlq_lrj_mom.rb +0 -42
  105. data/lib/karafka/pro/processing/strategies/aj_dlq_lrj_mom_vp.rb +0 -70
  106. data/lib/karafka/pro/processing/strategies/aj_dlq_mom.rb +0 -62
  107. data/lib/karafka/pro/processing/strategies/aj_dlq_mom_vp.rb +0 -68
  108. data/lib/karafka/pro/processing/strategies/aj_lrj_mom_vp.rb +0 -75
  109. data/lib/karafka/pro/processing/strategies/aj_mom_vp.rb +0 -62
  110. data/lib/karafka/pro/processing/strategies/dlq.rb +0 -120
  111. data/lib/karafka/pro/processing/strategies/dlq_lrj.rb +0 -65
  112. data/lib/karafka/pro/processing/strategies/dlq_lrj_mom.rb +0 -62
  113. data/lib/karafka/pro/processing/strategies/dlq_mom.rb +0 -62
  114. data/lib/karafka/pro/processing/strategies/dlq_vp.rb +0 -37
  115. data/lib/karafka/pro/processing/strategies/lrj.rb +0 -83
  116. data/lib/karafka/pro/processing/strategies/lrj_mom.rb +0 -73
@@ -20,14 +20,13 @@ module Karafka
20
20
  # @param topic [String] topic name
21
21
  # @param partition [Integer] partition number
22
22
  # @param parallel_key [String] parallel group key
23
+ # @param coordinator [Karafka::Processing::Coordinator]
23
24
  # @return [Executor] consumer executor
24
- def find_or_create(topic, partition, parallel_key)
25
- ktopic = find_topic(topic)
26
-
27
- @buffer[ktopic][partition][parallel_key] ||= Executor.new(
25
+ def find_or_create(topic, partition, parallel_key, coordinator)
26
+ @buffer[topic][partition][parallel_key] ||= Executor.new(
28
27
  @subscription_group.id,
29
28
  @client,
30
- ktopic
29
+ coordinator
31
30
  )
32
31
  end
33
32
 
@@ -37,9 +36,7 @@ module Karafka
37
36
  # @param topic [String] topic name
38
37
  # @param partition [Integer] partition number
39
38
  def revoke(topic, partition)
40
- ktopic = find_topic(topic)
41
-
42
- @buffer[ktopic][partition].clear
39
+ @buffer[topic][partition].clear
43
40
  end
44
41
 
45
42
  # Finds all the executors available for a given topic partition
@@ -48,9 +45,7 @@ module Karafka
48
45
  # @param partition [Integer] partition number
49
46
  # @return [Array<Executor>] executors in use for this topic + partition
50
47
  def find_all(topic, partition)
51
- ktopic = find_topic(topic)
52
-
53
- @buffer[ktopic][partition].values
48
+ @buffer[topic][partition].values
54
49
  end
55
50
 
56
51
  # Iterates over all available executors and yields them together with topic and partition
@@ -59,11 +54,10 @@ module Karafka
59
54
  # @yieldparam [Integer] partition number
60
55
  # @yieldparam [Executor] given executor
61
56
  def each
62
- @buffer.each do |ktopic, partitions|
63
- partitions.each do |partition, executors|
64
- executors.each do |_parallel_key, executor|
65
- # We skip the parallel key here as it does not serve any value when iterating
66
- yield(ktopic, partition, executor)
57
+ @buffer.each do |_, partitions|
58
+ partitions.each do |_, executors|
59
+ executors.each do |_, executor|
60
+ yield(executor)
67
61
  end
68
62
  end
69
63
  end
@@ -73,16 +67,6 @@ module Karafka
73
67
  def clear
74
68
  @buffer.clear
75
69
  end
76
-
77
- private
78
-
79
- # Finds topic based on its name
80
- #
81
- # @param topic [String] topic we're looking for
82
- # @return [Karafka::Routing::Topic] topic we're interested in
83
- def find_topic(topic)
84
- @subscription_group.topics.find(topic) || raise(Errors::TopicNotFoundError, topic)
85
- end
86
70
  end
87
71
  end
88
72
  end
@@ -12,19 +12,17 @@ module Karafka
12
12
  # @param executor [Karafka::Processing::Executor] executor that is suppose to run a given
13
13
  # job
14
14
  # @param messages [Karafka::Messages::Messages] karafka messages batch
15
- # @param coordinator [Karafka::Processing::Coordinator] processing coordinator
16
15
  # @return [Consume]
17
- def initialize(executor, messages, coordinator)
16
+ def initialize(executor, messages)
18
17
  @executor = executor
19
18
  @messages = messages
20
- @coordinator = coordinator
21
19
  super()
22
20
  end
23
21
 
24
22
  # Runs all the preparation code on the executor that needs to happen before the job is
25
23
  # enqueued.
26
24
  def before_enqueue
27
- executor.before_enqueue(@messages, @coordinator)
25
+ executor.before_enqueue(@messages)
28
26
  end
29
27
 
30
28
  # Runs the before consumption preparations on the executor
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Processing
5
+ module Jobs
6
+ # Type of job that we may use to run some extra handling that happens without the user
7
+ # related lifecycle event like consumption, revocation, etc.
8
+ class Idle < Base
9
+ # @param executor [Karafka::Processing::Executor] executor that is suppose to run a given
10
+ # job on an active consumer
11
+ # @return [Shutdown]
12
+ def initialize(executor)
13
+ @executor = executor
14
+ super()
15
+ end
16
+
17
+ # Run the idle work via the executor
18
+ def call
19
+ executor.idle
20
+ end
21
+ end
22
+ end
23
+ end
24
+ end
@@ -7,10 +7,9 @@ module Karafka
7
7
  class JobsBuilder
8
8
  # @param executor [Karafka::Processing::Executor]
9
9
  # @param messages [Karafka::Messages::Messages] messages batch to be consumed
10
- # @param coordinator [Karafka::Processing::Coordinator]
11
10
  # @return [Karafka::Processing::Jobs::Consume] consumption job
12
- def consume(executor, messages, coordinator)
13
- Jobs::Consume.new(executor, messages, coordinator)
11
+ def consume(executor, messages)
12
+ Jobs::Consume.new(executor, messages)
14
13
  end
15
14
 
16
15
  # @param executor [Karafka::Processing::Executor]
@@ -32,6 +32,11 @@ module Karafka
32
32
  @success = false
33
33
  @cause = cause
34
34
  end
35
+
36
+ # @return [Boolean] true if processing failed
37
+ def failure?
38
+ !@success
39
+ end
35
40
  end
36
41
  end
37
42
  end
@@ -29,7 +29,7 @@ module Karafka
29
29
  retry_after_pause
30
30
  else
31
31
  coordinator.pause_tracker.reset
32
- skippable_message = find_skippable_message
32
+ skippable_message, = find_skippable_message
33
33
  dispatch_to_dlq(skippable_message)
34
34
  # We can commit the offset here because we know that we skip it "forever" and
35
35
  # since AJ consumer commits the offset after each job, we also know that the
@@ -32,6 +32,11 @@ module Karafka
32
32
  raise NotImplementedError, 'Implement in a subclass'
33
33
  end
34
34
 
35
+ # Idle run handling
36
+ def handle_idle
37
+ raise NotImplementedError, 'Implement in a subclass'
38
+ end
39
+
35
40
  # Revocation handling
36
41
  def handle_revoked
37
42
  raise NotImplementedError, 'Implement in a subclass'
@@ -13,6 +13,51 @@ module Karafka
13
13
  # Apply strategy for a non-feature based flow
14
14
  FEATURES = %i[].freeze
15
15
 
16
+ # Marks message as consumed in an async way.
17
+ #
18
+ # @param message [Messages::Message] last successfully processed message.
19
+ # @return [Boolean] true if we were able to mark the offset, false otherwise.
20
+ # False indicates that we were not able and that we have lost the partition.
21
+ #
22
+ # @note We keep track of this offset in case we would mark as consumed and got error when
23
+ # processing another message. In case like this we do not pause on the message we've
24
+ # already processed but rather at the next one. This applies to both sync and async
25
+ # versions of this method.
26
+ def mark_as_consumed(message)
27
+ # Ignore earlier offsets than the one we already committed
28
+ return true if coordinator.seek_offset > message.offset
29
+
30
+ unless client.mark_as_consumed(message)
31
+ coordinator.revoke
32
+
33
+ return false
34
+ end
35
+
36
+ coordinator.seek_offset = message.offset + 1
37
+
38
+ true
39
+ end
40
+
41
+ # Marks message as consumed in a sync way.
42
+ #
43
+ # @param message [Messages::Message] last successfully processed message.
44
+ # @return [Boolean] true if we were able to mark the offset, false otherwise.
45
+ # False indicates that we were not able and that we have lost the partition.
46
+ def mark_as_consumed!(message)
47
+ # Ignore earlier offsets than the one we already committed
48
+ return true if coordinator.seek_offset > message.offset
49
+
50
+ unless client.mark_as_consumed!(message)
51
+ coordinator.revoke
52
+
53
+ return false
54
+ end
55
+
56
+ coordinator.seek_offset = message.offset + 1
57
+
58
+ true
59
+ end
60
+
16
61
  # No actions needed for the standard flow here
17
62
  def handle_before_enqueue
18
63
  nil
@@ -63,6 +108,11 @@ module Karafka
63
108
  end
64
109
  end
65
110
 
111
+ # Code that should run on idle runs without messages available
112
+ def handle_idle
113
+ nil
114
+ end
115
+
66
116
  # We need to always un-pause the processing in case we have lost a given partition.
67
117
  # Otherwise the underlying librdkafka would not know we may want to continue processing and
68
118
  # the pause could in theory last forever
@@ -34,7 +34,7 @@ module Karafka
34
34
  # We reset the pause to indicate we will now consider it as "ok".
35
35
  coordinator.pause_tracker.reset
36
36
 
37
- skippable_message = find_skippable_message
37
+ skippable_message, = find_skippable_message
38
38
 
39
39
  # Send skippable message to the dql topic
40
40
  dispatch_to_dlq(skippable_message)
@@ -49,11 +49,20 @@ module Karafka
49
49
  end
50
50
  end
51
51
 
52
- # Finds the message we want to skip
52
+ # Finds the message may want to skip (all, starting from first)
53
53
  # @private
54
+ # @return [Array<Karafka::Messages::Message, Boolean>] message we may want to skip and
55
+ # information if this message was from marked offset or figured out via mom flow
54
56
  def find_skippable_message
55
- skippable_message = messages.find { |message| message.offset == coordinator.seek_offset }
56
- skippable_message || raise(Errors::SkipMessageNotFoundError, topic.name)
57
+ skippable_message = messages.find do |msg|
58
+ coordinator.marked? && msg.offset == coordinator.seek_offset
59
+ end
60
+
61
+ # If we don't have the message matching the last comitted offset, it means that
62
+ # user operates with manual offsets and we're beyond the batch in which things
63
+ # broke for the first time. Then we skip the first (as no markings) and we
64
+ # move on one by one.
65
+ skippable_message ? [skippable_message, true] : [messages.first, false]
57
66
  end
58
67
 
59
68
  # Moves the broken message into a separate queue defined via the settings
@@ -29,11 +29,16 @@ module Karafka
29
29
  # We reset the pause to indicate we will now consider it as "ok".
30
30
  coordinator.pause_tracker.reset
31
31
 
32
- skippable_message = find_skippable_message
32
+ skippable_message, marked = find_skippable_message
33
+
33
34
  dispatch_to_dlq(skippable_message)
34
35
 
35
- # We pause to backoff once just in case.
36
- pause(coordinator.seek_offset, nil, false)
36
+ # Backoff and move forward
37
+ if marked
38
+ pause(coordinator.seek_offset, nil, false)
39
+ else
40
+ pause(skippable_message.offset + 1, nil, false)
41
+ end
37
42
  end
38
43
  end
39
44
  end
@@ -4,27 +4,44 @@ module Karafka
4
4
  module Processing
5
5
  # Selector of appropriate processing strategy matching topic combinations
6
6
  class StrategySelector
7
+ attr_reader :strategies
8
+
9
+ # Features we support in the OSS offering.
10
+ SUPPORTED_FEATURES = %i[
11
+ active_job
12
+ manual_offset_management
13
+ dead_letter_queue
14
+ ].freeze
15
+
7
16
  def initialize
8
17
  # We load them once for performance reasons not to do too many lookups
9
- @available_strategies = Strategies
10
- .constants
11
- .delete_if { |k| k == :Base }
12
- .map { |k| Strategies.const_get(k) }
18
+ @strategies = find_all
13
19
  end
14
20
 
15
21
  # @param topic [Karafka::Routing::Topic] topic with settings based on which we find strategy
16
22
  # @return [Module] module with proper strategy
17
23
  def find(topic)
18
- feature_set = [
19
- topic.active_job? ? :active_job : nil,
20
- topic.manual_offset_management? ? :manual_offset_management : nil,
21
- topic.dead_letter_queue? ? :dead_letter_queue : nil
22
- ].compact
24
+ feature_set = SUPPORTED_FEATURES.map do |feature_name|
25
+ topic.public_send("#{feature_name}?") ? feature_name : nil
26
+ end
27
+
28
+ feature_set.compact!
23
29
 
24
- @available_strategies.find do |strategy|
30
+ @strategies.find do |strategy|
25
31
  strategy::FEATURES.sort == feature_set.sort
26
32
  end || raise(Errors::StrategyNotFoundError, topic.name)
27
33
  end
34
+
35
+ private
36
+
37
+ # @return [Array<Module>] available strategies
38
+ def find_all
39
+ Strategies
40
+ .constants
41
+ .delete_if { |k| k == :Base }
42
+ .map { |k| Strategies.const_get(k) }
43
+ .uniq
44
+ end
28
45
  end
29
46
  end
30
47
  end
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '2.0.37'
6
+ VERSION = '2.0.39'
7
7
  end
data/renovate.json ADDED
@@ -0,0 +1,6 @@
1
+ {
2
+ "$schema": "https://docs.renovatebot.com/renovate-schema.json",
3
+ "extends": [
4
+ "config:base"
5
+ ]
6
+ }
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.37
4
+ version: 2.0.39
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -35,7 +35,7 @@ cert_chain:
35
35
  Qf04B9ceLUaC4fPVEz10FyobjaFoY4i32xRto3XnrzeAgfEe4swLq8bQsR3w/EF3
36
36
  MGU0FeSV2Yj7Xc2x/7BzLK8xQn5l7Yy75iPF+KP3vVmDHnNl
37
37
  -----END CERTIFICATE-----
38
- date: 2023-03-20 00:00:00.000000000 Z
38
+ date: 2023-04-11 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: karafka-core
@@ -228,40 +228,82 @@ files:
228
228
  - lib/karafka/pro/performance_tracker.rb
229
229
  - lib/karafka/pro/processing/collapser.rb
230
230
  - lib/karafka/pro/processing/coordinator.rb
231
+ - lib/karafka/pro/processing/filters/base.rb
232
+ - lib/karafka/pro/processing/filters/delayer.rb
233
+ - lib/karafka/pro/processing/filters/expirer.rb
234
+ - lib/karafka/pro/processing/filters/throttler.rb
235
+ - lib/karafka/pro/processing/filters_applier.rb
231
236
  - lib/karafka/pro/processing/jobs/consume_non_blocking.rb
232
237
  - lib/karafka/pro/processing/jobs/revoked_non_blocking.rb
233
238
  - lib/karafka/pro/processing/jobs_builder.rb
234
239
  - lib/karafka/pro/processing/partitioner.rb
235
240
  - lib/karafka/pro/processing/scheduler.rb
236
- - lib/karafka/pro/processing/strategies/aj_dlq_lrj_mom.rb
237
- - lib/karafka/pro/processing/strategies/aj_dlq_lrj_mom_vp.rb
238
- - lib/karafka/pro/processing/strategies/aj_dlq_mom.rb
239
- - lib/karafka/pro/processing/strategies/aj_dlq_mom_vp.rb
240
- - lib/karafka/pro/processing/strategies/aj_lrj_mom.rb
241
- - lib/karafka/pro/processing/strategies/aj_lrj_mom_vp.rb
242
- - lib/karafka/pro/processing/strategies/aj_mom.rb
243
- - lib/karafka/pro/processing/strategies/aj_mom_vp.rb
241
+ - lib/karafka/pro/processing/strategies.rb
242
+ - lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom.rb
243
+ - lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom_vp.rb
244
+ - lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom.rb
245
+ - lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom_vp.rb
246
+ - lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom.rb
247
+ - lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom_vp.rb
248
+ - lib/karafka/pro/processing/strategies/aj/dlq_mom.rb
249
+ - lib/karafka/pro/processing/strategies/aj/dlq_mom_vp.rb
250
+ - lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom.rb
251
+ - lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom_vp.rb
252
+ - lib/karafka/pro/processing/strategies/aj/ftr_mom.rb
253
+ - lib/karafka/pro/processing/strategies/aj/ftr_mom_vp.rb
254
+ - lib/karafka/pro/processing/strategies/aj/lrj_mom.rb
255
+ - lib/karafka/pro/processing/strategies/aj/lrj_mom_vp.rb
256
+ - lib/karafka/pro/processing/strategies/aj/mom.rb
257
+ - lib/karafka/pro/processing/strategies/aj/mom_vp.rb
244
258
  - lib/karafka/pro/processing/strategies/base.rb
245
259
  - lib/karafka/pro/processing/strategies/default.rb
246
- - lib/karafka/pro/processing/strategies/dlq.rb
247
- - lib/karafka/pro/processing/strategies/dlq_lrj.rb
248
- - lib/karafka/pro/processing/strategies/dlq_lrj_mom.rb
249
- - lib/karafka/pro/processing/strategies/dlq_lrj_vp.rb
250
- - lib/karafka/pro/processing/strategies/dlq_mom.rb
251
- - lib/karafka/pro/processing/strategies/dlq_vp.rb
252
- - lib/karafka/pro/processing/strategies/lrj.rb
253
- - lib/karafka/pro/processing/strategies/lrj_mom.rb
254
- - lib/karafka/pro/processing/strategies/lrj_vp.rb
255
- - lib/karafka/pro/processing/strategies/mom.rb
256
- - lib/karafka/pro/processing/strategies/vp.rb
260
+ - lib/karafka/pro/processing/strategies/dlq/default.rb
261
+ - lib/karafka/pro/processing/strategies/dlq/ftr.rb
262
+ - lib/karafka/pro/processing/strategies/dlq/ftr_lrj.rb
263
+ - lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom.rb
264
+ - lib/karafka/pro/processing/strategies/dlq/ftr_lrj_vp.rb
265
+ - lib/karafka/pro/processing/strategies/dlq/ftr_mom.rb
266
+ - lib/karafka/pro/processing/strategies/dlq/ftr_vp.rb
267
+ - lib/karafka/pro/processing/strategies/dlq/lrj.rb
268
+ - lib/karafka/pro/processing/strategies/dlq/lrj_mom.rb
269
+ - lib/karafka/pro/processing/strategies/dlq/lrj_vp.rb
270
+ - lib/karafka/pro/processing/strategies/dlq/mom.rb
271
+ - lib/karafka/pro/processing/strategies/dlq/vp.rb
272
+ - lib/karafka/pro/processing/strategies/ftr/default.rb
273
+ - lib/karafka/pro/processing/strategies/ftr/vp.rb
274
+ - lib/karafka/pro/processing/strategies/lrj/default.rb
275
+ - lib/karafka/pro/processing/strategies/lrj/ftr.rb
276
+ - lib/karafka/pro/processing/strategies/lrj/ftr_mom.rb
277
+ - lib/karafka/pro/processing/strategies/lrj/ftr_vp.rb
278
+ - lib/karafka/pro/processing/strategies/lrj/mom.rb
279
+ - lib/karafka/pro/processing/strategies/lrj/vp.rb
280
+ - lib/karafka/pro/processing/strategies/mom/default.rb
281
+ - lib/karafka/pro/processing/strategies/mom/ftr.rb
282
+ - lib/karafka/pro/processing/strategies/vp/default.rb
257
283
  - lib/karafka/pro/processing/strategy_selector.rb
258
284
  - lib/karafka/pro/routing/features/base.rb
259
285
  - lib/karafka/pro/routing/features/dead_letter_queue.rb
260
286
  - lib/karafka/pro/routing/features/dead_letter_queue/contract.rb
287
+ - lib/karafka/pro/routing/features/delaying.rb
288
+ - lib/karafka/pro/routing/features/delaying/config.rb
289
+ - lib/karafka/pro/routing/features/delaying/contract.rb
290
+ - lib/karafka/pro/routing/features/delaying/topic.rb
291
+ - lib/karafka/pro/routing/features/expiring.rb
292
+ - lib/karafka/pro/routing/features/expiring/config.rb
293
+ - lib/karafka/pro/routing/features/expiring/contract.rb
294
+ - lib/karafka/pro/routing/features/expiring/topic.rb
295
+ - lib/karafka/pro/routing/features/filtering.rb
296
+ - lib/karafka/pro/routing/features/filtering/config.rb
297
+ - lib/karafka/pro/routing/features/filtering/contract.rb
298
+ - lib/karafka/pro/routing/features/filtering/topic.rb
261
299
  - lib/karafka/pro/routing/features/long_running_job.rb
262
300
  - lib/karafka/pro/routing/features/long_running_job/config.rb
263
301
  - lib/karafka/pro/routing/features/long_running_job/contract.rb
264
302
  - lib/karafka/pro/routing/features/long_running_job/topic.rb
303
+ - lib/karafka/pro/routing/features/throttling.rb
304
+ - lib/karafka/pro/routing/features/throttling/config.rb
305
+ - lib/karafka/pro/routing/features/throttling/contract.rb
306
+ - lib/karafka/pro/routing/features/throttling/topic.rb
265
307
  - lib/karafka/pro/routing/features/virtual_partitions.rb
266
308
  - lib/karafka/pro/routing/features/virtual_partitions/config.rb
267
309
  - lib/karafka/pro/routing/features/virtual_partitions/contract.rb
@@ -273,6 +315,7 @@ files:
273
315
  - lib/karafka/processing/executors_buffer.rb
274
316
  - lib/karafka/processing/jobs/base.rb
275
317
  - lib/karafka/processing/jobs/consume.rb
318
+ - lib/karafka/processing/jobs/idle.rb
276
319
  - lib/karafka/processing/jobs/revoked.rb
277
320
  - lib/karafka/processing/jobs/shutdown.rb
278
321
  - lib/karafka/processing/jobs_builder.rb
@@ -335,6 +378,7 @@ files:
335
378
  - lib/karafka/time_trackers/poll.rb
336
379
  - lib/karafka/version.rb
337
380
  - log/.gitkeep
381
+ - renovate.json
338
382
  homepage: https://karafka.io
339
383
  licenses:
340
384
  - LGPL-3.0
@@ -362,7 +406,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
362
406
  - !ruby/object:Gem::Version
363
407
  version: '0'
364
408
  requirements: []
365
- rubygems_version: 3.4.6
409
+ rubygems_version: 3.4.10
366
410
  signing_key:
367
411
  specification_version: 4
368
412
  summary: Karafka is Ruby and Rails efficient Kafka processing framework.
metadata.gz.sig CHANGED
Binary file
@@ -1,42 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- # This Karafka component is a Pro component under a commercial license.
4
- # This Karafka component is NOT licensed under LGPL.
5
- #
6
- # All of the commercial components are present in the lib/karafka/pro directory of this
7
- # repository and their usage requires commercial license agreement.
8
- #
9
- # Karafka has also commercial-friendly license, commercial support and commercial components.
10
- #
11
- # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
- # your code to Maciej Mensfeld.
13
-
14
- module Karafka
15
- module Pro
16
- module Processing
17
- module Strategies
18
- # ActiveJob enabled
19
- # DLQ enabled
20
- # Long-Running Job enabled
21
- # Manual offset management enabled
22
- #
23
- # This case is a bit of special. Please see the `AjDlqMom` for explanation on how the
24
- # offset management works in this case.
25
- module AjDlqLrjMom
26
- # We can use the same code as for VP because non VP behaves like:
27
- # - with one virtual partition
28
- # - with "never ending" collapse
29
- include AjDlqLrjMomVp
30
-
31
- # Features for this strategy
32
- FEATURES = %i[
33
- active_job
34
- long_running_job
35
- manual_offset_management
36
- dead_letter_queue
37
- ].freeze
38
- end
39
- end
40
- end
41
- end
42
- end
@@ -1,70 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- # This Karafka component is a Pro component under a commercial license.
4
- # This Karafka component is NOT licensed under LGPL.
5
- #
6
- # All of the commercial components are present in the lib/karafka/pro directory of this
7
- # repository and their usage requires commercial license agreement.
8
- #
9
- # Karafka has also commercial-friendly license, commercial support and commercial components.
10
- #
11
- # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
- # your code to Maciej Mensfeld.
13
-
14
- module Karafka
15
- module Pro
16
- module Processing
17
- module Strategies
18
- # ActiveJob enabled
19
- # DLQ enabled
20
- # Long-Running Job enabled
21
- # Manual offset management enabled
22
- # Virtual Partitions enabled
23
- #
24
- # This case is a bit of special. Please see the `AjDlqMom` for explanation on how the
25
- # offset management works in this case.
26
- module AjDlqLrjMomVp
27
- include AjDlqMomVp
28
- include AjLrjMom
29
-
30
- # Features for this strategy
31
- FEATURES = %i[
32
- active_job
33
- long_running_job
34
- manual_offset_management
35
- dead_letter_queue
36
- virtual_partitions
37
- ].freeze
38
-
39
- # This strategy is pretty much as non VP one because of the collapse
40
- def handle_after_consume
41
- coordinator.on_finished do |last_group_message|
42
- if coordinator.success?
43
- coordinator.pause_tracker.reset
44
-
45
- return if revoked?
46
- return if Karafka::App.stopping?
47
-
48
- # Since we have VP here we do not commit intermediate offsets and need to commit
49
- # them here. We do commit in collapsed mode but this is generalized.
50
- mark_as_consumed(last_group_message)
51
-
52
- seek(coordinator.seek_offset) unless revoked?
53
-
54
- resume
55
- elsif coordinator.pause_tracker.attempt <= topic.dead_letter_queue.max_retries
56
- retry_after_pause
57
- else
58
- coordinator.pause_tracker.reset
59
- skippable_message = find_skippable_message
60
- dispatch_to_dlq(skippable_message) if dispatch_to_dlq?
61
- mark_as_consumed(skippable_message)
62
- pause(coordinator.seek_offset, nil, false)
63
- end
64
- end
65
- end
66
- end
67
- end
68
- end
69
- end
70
- end