karafka 2.0.15 → 2.0.16

Sign up to get free protection for your applications and to get access to all the features.
Files changed (105) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +1 -1
  4. data/.rspec +2 -0
  5. data/CHANGELOG.md +78 -0
  6. data/Gemfile.lock +14 -14
  7. data/LICENSE +1 -1
  8. data/README.md +2 -1
  9. data/bin/integrations +3 -2
  10. data/bin/rspecs +4 -0
  11. data/config/errors.yml +10 -4
  12. data/lib/active_job/karafka.rb +0 -6
  13. data/lib/karafka/active_job/consumer.rb +1 -0
  14. data/lib/karafka/admin.rb +2 -2
  15. data/lib/karafka/base_consumer.rb +31 -21
  16. data/lib/karafka/connection/listener.rb +6 -4
  17. data/lib/karafka/contracts/consumer_group.rb +0 -14
  18. data/lib/karafka/contracts/{consumer_group_topic.rb → topic.rb} +2 -3
  19. data/lib/karafka/errors.rb +6 -4
  20. data/lib/karafka/instrumentation/logger_listener.rb +25 -11
  21. data/lib/karafka/instrumentation/notifications.rb +2 -0
  22. data/lib/karafka/instrumentation/vendors/datadog/dashboard.json +1 -1
  23. data/lib/karafka/instrumentation/vendors/datadog/listener.rb +37 -32
  24. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +153 -0
  25. data/lib/karafka/pro/active_job/consumer.rb +3 -1
  26. data/lib/karafka/pro/active_job/dispatcher.rb +3 -1
  27. data/lib/karafka/pro/active_job/job_options_contract.rb +3 -1
  28. data/lib/karafka/pro/base_consumer.rb +3 -85
  29. data/lib/karafka/pro/loader.rb +31 -24
  30. data/lib/karafka/pro/performance_tracker.rb +3 -1
  31. data/lib/karafka/pro/processing/coordinator.rb +16 -1
  32. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +3 -1
  33. data/lib/karafka/pro/processing/jobs_builder.rb +3 -1
  34. data/lib/karafka/pro/processing/partitioner.rb +3 -1
  35. data/lib/karafka/pro/processing/scheduler.rb +3 -1
  36. data/lib/karafka/pro/processing/strategies/aj_dlq_lrj_mom.rb +40 -0
  37. data/lib/karafka/pro/processing/strategies/aj_dlq_mom.rb +62 -0
  38. data/lib/karafka/pro/processing/strategies/aj_lrj_mom.rb +35 -0
  39. data/lib/karafka/pro/processing/strategies/aj_lrj_mom_vp.rb +69 -0
  40. data/lib/karafka/pro/processing/strategies/aj_mom.rb +33 -0
  41. data/lib/karafka/pro/processing/strategies/aj_mom_vp.rb +58 -0
  42. data/lib/karafka/pro/processing/strategies/base.rb +26 -0
  43. data/lib/karafka/pro/processing/strategies/default.rb +69 -0
  44. data/lib/karafka/pro/processing/strategies/dlq.rb +88 -0
  45. data/lib/karafka/pro/processing/strategies/dlq_lrj.rb +64 -0
  46. data/lib/karafka/pro/processing/strategies/dlq_lrj_mom.rb +60 -0
  47. data/lib/karafka/pro/processing/strategies/dlq_mom.rb +58 -0
  48. data/lib/karafka/pro/processing/strategies/lrj.rb +76 -0
  49. data/lib/karafka/pro/processing/strategies/lrj_mom.rb +68 -0
  50. data/lib/karafka/pro/processing/strategies/lrj_vp.rb +33 -0
  51. data/lib/karafka/pro/processing/strategies/mom.rb +43 -0
  52. data/lib/karafka/pro/processing/strategies/vp.rb +32 -0
  53. data/lib/karafka/pro/processing/strategy_selector.rb +58 -0
  54. data/lib/karafka/pro/{contracts → routing/features}/base.rb +8 -5
  55. data/lib/karafka/pro/routing/features/dead_letter_queue/contract.rb +49 -0
  56. data/lib/karafka/pro/routing/{builder_extensions.rb → features/dead_letter_queue.rb} +9 -12
  57. data/lib/karafka/pro/routing/features/long_running_job/config.rb +28 -0
  58. data/lib/karafka/pro/routing/features/long_running_job/contract.rb +37 -0
  59. data/lib/karafka/pro/routing/features/long_running_job/topic.rb +42 -0
  60. data/lib/karafka/pro/routing/features/long_running_job.rb +28 -0
  61. data/lib/karafka/pro/routing/features/virtual_partitions/config.rb +30 -0
  62. data/lib/karafka/pro/routing/features/virtual_partitions/contract.rb +69 -0
  63. data/lib/karafka/pro/routing/features/virtual_partitions/topic.rb +56 -0
  64. data/lib/karafka/pro/routing/features/virtual_partitions.rb +27 -0
  65. data/lib/karafka/processing/coordinator.rb +1 -1
  66. data/lib/karafka/processing/executor.rb +6 -0
  67. data/lib/karafka/processing/strategies/aj_dlq_mom.rb +44 -0
  68. data/lib/karafka/processing/strategies/aj_mom.rb +21 -0
  69. data/lib/karafka/processing/strategies/base.rb +37 -0
  70. data/lib/karafka/processing/strategies/default.rb +52 -0
  71. data/lib/karafka/processing/strategies/dlq.rb +77 -0
  72. data/lib/karafka/processing/strategies/dlq_mom.rb +42 -0
  73. data/lib/karafka/processing/strategies/mom.rb +29 -0
  74. data/lib/karafka/processing/strategy_selector.rb +30 -0
  75. data/lib/karafka/railtie.rb +9 -8
  76. data/lib/karafka/routing/builder.rb +6 -0
  77. data/lib/karafka/routing/features/active_job/builder.rb +33 -0
  78. data/lib/karafka/routing/features/active_job/config.rb +15 -0
  79. data/lib/karafka/routing/features/active_job/contract.rb +41 -0
  80. data/lib/karafka/routing/features/active_job/topic.rb +33 -0
  81. data/lib/karafka/routing/features/active_job.rb +13 -0
  82. data/lib/karafka/routing/features/base/expander.rb +53 -0
  83. data/lib/karafka/routing/features/base.rb +34 -0
  84. data/lib/karafka/routing/features/dead_letter_queue/config.rb +19 -0
  85. data/lib/karafka/routing/features/dead_letter_queue/contract.rb +40 -0
  86. data/lib/karafka/routing/features/dead_letter_queue/topic.rb +40 -0
  87. data/lib/karafka/routing/features/dead_letter_queue.rb +16 -0
  88. data/lib/karafka/routing/features/manual_offset_management/config.rb +15 -0
  89. data/lib/karafka/routing/features/manual_offset_management/contract.rb +24 -0
  90. data/lib/karafka/routing/features/manual_offset_management/topic.rb +35 -0
  91. data/lib/karafka/routing/features/manual_offset_management.rb +18 -0
  92. data/lib/karafka/routing/topic.rb +2 -10
  93. data/lib/karafka/server.rb +4 -2
  94. data/lib/karafka/setup/attributes_map.rb +5 -0
  95. data/lib/karafka/setup/config.rb +4 -4
  96. data/lib/karafka/time_trackers/pause.rb +21 -12
  97. data/lib/karafka/version.rb +1 -1
  98. data/lib/karafka.rb +7 -11
  99. data.tar.gz.sig +0 -0
  100. metadata +57 -9
  101. metadata.gz.sig +0 -0
  102. data/lib/karafka/active_job/routing/extensions.rb +0 -33
  103. data/lib/karafka/pro/contracts/consumer_group.rb +0 -34
  104. data/lib/karafka/pro/contracts/consumer_group_topic.rb +0 -69
  105. data/lib/karafka/pro/routing/topic_extensions.rb +0 -74
@@ -0,0 +1,41 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class ActiveJob < Base
7
+ # Rules around using ActiveJob routing - basically you need to have ActiveJob available
8
+ # in order to be able to use active job routing
9
+ class Contract < Contracts::Base
10
+ configure do |config|
11
+ config.error_messages = YAML.safe_load(
12
+ File.read(
13
+ File.join(Karafka.gem_root, 'config', 'errors.yml')
14
+ )
15
+ ).fetch('en').fetch('validations').fetch('topic')
16
+ end
17
+
18
+ virtual do |data, errors|
19
+ next unless errors.empty?
20
+ next unless data[:active_job][:active]
21
+ # One should not define active job jobs without ActiveJob being available for usage
22
+ next if Object.const_defined?('ActiveJob::Base')
23
+
24
+ [[%i[consumer], :active_job_missing]]
25
+ end
26
+
27
+ # ActiveJob needs to always run with manual offset management
28
+ # Automatic offset management cannot work with ActiveJob. Otherwise we could mark as
29
+ # consumed jobs that did not run because of shutdown.
30
+ virtual do |data, errors|
31
+ next unless errors.empty?
32
+ next unless data[:active_job][:active]
33
+ next if data[:manual_offset_management][:active]
34
+
35
+ [[%i[manual_offset_management], :must_be_enabled]]
36
+ end
37
+ end
38
+ end
39
+ end
40
+ end
41
+ end
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class ActiveJob < Base
7
+ # Topic extensions to be able to check if given topic is ActiveJob topic
8
+ module Topic
9
+ # @param active [Boolean] should this topic be considered one working with ActiveJob
10
+ #
11
+ # @note Since this feature supports only one setting (active), we can use the old API
12
+ # where the boolean would be an argument
13
+ def active_job(active = false)
14
+ @active_job ||= Config.new(active: active)
15
+ end
16
+
17
+ # @return [Boolean] is this an ActiveJob topic
18
+ def active_job?
19
+ active_job.active?
20
+ end
21
+
22
+ # @return [Hash] topic with all its native configuration options plus active job
23
+ # namespace settings
24
+ def to_h
25
+ super.merge(
26
+ active_job: active_job.to_h
27
+ ).freeze
28
+ end
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,13 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ # Active-Job related components
7
+ # @note We can load it always, despite someone not using ActiveJob as it just adds a method
8
+ # to the routing, without actually breaking anything.
9
+ class ActiveJob < Base
10
+ end
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,53 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class Base
7
+ # Routing builder expander that injects feature related drawing operations into it
8
+ class Expander < Module
9
+ # @param scope [Module] feature scope in which contract and other things should be
10
+ # @return [Expander] builder expander instance
11
+ def initialize(scope)
12
+ super()
13
+ @scope = scope
14
+ end
15
+
16
+ # Builds anonymous module that alters how `#draw` behaves allowing the feature contracts
17
+ # to run.
18
+ # @param mod [::Karafka::Routing::Builder] builder we will prepend to
19
+ def prepended(mod)
20
+ super
21
+
22
+ mod.prepend(prepended_module)
23
+ end
24
+
25
+ private
26
+
27
+ # @return [Module] builds an anonymous module with `#draw` that will alter the builder
28
+ # `#draw` allowing to run feature context aware code.
29
+ def prepended_module
30
+ scope = @scope
31
+
32
+ Module.new do
33
+ # Runs validations related to this feature on a topic
34
+ #
35
+ # @param block [Proc] routing defining block
36
+ define_method :draw do |&block|
37
+ result = super(&block)
38
+
39
+ each do |consumer_group|
40
+ consumer_group.topics.each do |topic|
41
+ scope::Contract.new.validate!(topic.to_h)
42
+ end
43
+ end
44
+
45
+ result
46
+ end
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
52
+ end
53
+ end
@@ -0,0 +1,34 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ # Namespace for all the topic related features we support
6
+ #
7
+ # @note Not all the Karafka features need to be defined here as only those that have routing
8
+ # or other extensions need to be here. That is why we keep (for now) features under the
9
+ # routing namespace.
10
+ module Features
11
+ # Base for all the features
12
+ class Base
13
+ class << self
14
+ # Extends topic and builder with given feature API
15
+ def activate
16
+ Topic.prepend(self::Topic) if const_defined?('Topic')
17
+ Proxy.prepend(self::Builder) if const_defined?('Builder')
18
+ Builder.prepend(self::Builder) if const_defined?('Builder')
19
+ Builder.prepend(Base::Expander.new(self)) if const_defined?('Contract')
20
+ end
21
+
22
+ # Loads all the features and activates them
23
+ def load_all
24
+ ObjectSpace
25
+ .each_object(Class)
26
+ .select { |klass| klass < self }
27
+ .sort_by(&:to_s)
28
+ .each(&:activate)
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class DeadLetterQueue < Base
7
+ # Config for dead letter queue feature
8
+ Config = Struct.new(
9
+ :active,
10
+ # We add skip variants but in regular we support only `:one`
11
+ :max_retries,
12
+ # To what topic the skipped messages should be moved
13
+ :topic,
14
+ keyword_init: true
15
+ ) { alias_method :active?, :active }
16
+ end
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,40 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class DeadLetterQueue < Base
7
+ # Rules around dead letter queue settings
8
+ class Contract < Contracts::Base
9
+ configure do |config|
10
+ config.error_messages = YAML.safe_load(
11
+ File.read(
12
+ File.join(Karafka.gem_root, 'config', 'errors.yml')
13
+ )
14
+ ).fetch('en').fetch('validations').fetch('topic')
15
+ end
16
+
17
+ nested :dead_letter_queue do
18
+ required(:active) { |val| [true, false].include?(val) }
19
+ required(:max_retries) { |val| val.is_a?(Integer) && val >= 0 }
20
+ end
21
+
22
+ # Validate topic name only if dlq is active
23
+ virtual do |data, errors|
24
+ next unless errors.empty?
25
+
26
+ dead_letter_queue = data[:dead_letter_queue]
27
+
28
+ next unless dead_letter_queue[:active]
29
+
30
+ topic = dead_letter_queue[:topic]
31
+
32
+ next if topic.is_a?(String) && Contracts::TOPIC_REGEXP.match?(topic)
33
+
34
+ [[%i[dead_letter_queue topic], :format]]
35
+ end
36
+ end
37
+ end
38
+ end
39
+ end
40
+ end
@@ -0,0 +1,40 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class DeadLetterQueue < Base
7
+ # DQL topic extensions
8
+ module Topic
9
+ # After how many retries should be move data to DLQ
10
+ DEFAULT_MAX_RETRIES = 3
11
+
12
+ private_constant :DEFAULT_MAX_RETRIES
13
+
14
+ # @param max_retries [Integer] after how many retries should we move data to dlq
15
+ # @param topic [String] where the messages should be moved if failing
16
+ # @return [Config] defined config
17
+ def dead_letter_queue(max_retries: DEFAULT_MAX_RETRIES, topic: nil)
18
+ @dead_letter_queue ||= Config.new(
19
+ active: !topic.nil?,
20
+ max_retries: max_retries,
21
+ topic: topic
22
+ )
23
+ end
24
+
25
+ # @return [Boolean] is the dlq active or not
26
+ def dead_letter_queue?
27
+ dead_letter_queue.active?
28
+ end
29
+
30
+ # @return [Hash] topic with all its native configuration options plus dlq settings
31
+ def to_h
32
+ super.merge(
33
+ dead_letter_queue: dead_letter_queue.to_h
34
+ ).freeze
35
+ end
36
+ end
37
+ end
38
+ end
39
+ end
40
+ end
@@ -0,0 +1,16 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ # This feature allows to continue processing when encountering errors.
7
+ # After certain number of retries, given messages will be moved to alternative topic,
8
+ # unclogging processing.
9
+ #
10
+ # @note This feature has an expanded version in the Pro mode. We do not use a new feature
11
+ # injection in Pro (topic settings)
12
+ class DeadLetterQueue < Base
13
+ end
14
+ end
15
+ end
16
+ end
@@ -0,0 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class ManualOffsetManagement < Base
7
+ # Config for manual offset management feature
8
+ Config = Struct.new(
9
+ :active,
10
+ keyword_init: true
11
+ ) { alias_method :active?, :active }
12
+ end
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class ManualOffsetManagement < Base
7
+ # Rules around manual offset management settings
8
+ class Contract < Contracts::Base
9
+ configure do |config|
10
+ config.error_messages = YAML.safe_load(
11
+ File.read(
12
+ File.join(Karafka.gem_root, 'config', 'errors.yml')
13
+ )
14
+ ).fetch('en').fetch('validations').fetch('topic')
15
+ end
16
+
17
+ nested :manual_offset_management do
18
+ required(:active) { |val| [true, false].include?(val) }
19
+ end
20
+ end
21
+ end
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,35 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class ManualOffsetManagement < Base
7
+ # Topic extensions to be able to manage manual offset management settings
8
+ module Topic
9
+ # @param active [Boolean] should we stop managing the offset in Karafka and make the user
10
+ # responsible for marking messages as consumed.
11
+ # @return [Config] defined config
12
+ #
13
+ # @note Since this feature supports only one setting (active), we can use the old API
14
+ # where the boolean would be an argument
15
+ def manual_offset_management(active = false)
16
+ @manual_offset_management ||= Config.new(active: active)
17
+ end
18
+
19
+ # @return [Boolean] is manual offset management enabled for a given topic
20
+ def manual_offset_management?
21
+ manual_offset_management.active?
22
+ end
23
+
24
+ # @return [Hash] topic with all its native configuration options plus manual offset
25
+ # management namespace settings
26
+ def to_h
27
+ super.merge(
28
+ manual_offset_management: manual_offset_management.to_h
29
+ ).freeze
30
+ end
31
+ end
32
+ end
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ # All the things needed to be able to manage manual offset management from the routing
7
+ # perspective.
8
+ #
9
+ # Manual offset management allows users to completely disable automatic management of the
10
+ # offset. This can be used for implementing long-living window operations and other things
11
+ # where we do not want to commit the offset with each batch.
12
+ #
13
+ # Not all the Karafka and Karafka Pro features may be compatible with this feature being on.
14
+ class ManualOffsetManagement < Base
15
+ end
16
+ end
17
+ end
18
+ end
@@ -6,7 +6,7 @@ module Karafka
6
6
  # It belongs to a consumer group as from 0.6 all the topics can work in the same consumer group
7
7
  # It is a part of Karafka's DSL.
8
8
  class Topic
9
- attr_reader :id, :name, :consumer_group, :tags
9
+ attr_reader :id, :name, :consumer_group
10
10
  attr_writer :consumer
11
11
  attr_accessor :subscription_group
12
12
 
@@ -14,7 +14,6 @@ module Karafka
14
14
  INHERITABLE_ATTRIBUTES = %i[
15
15
  kafka
16
16
  deserializer
17
- manual_offset_management
18
17
  max_messages
19
18
  max_wait_time
20
19
  initial_offset
@@ -32,7 +31,6 @@ module Karafka
32
31
  # Karafka 0.6 we can handle multiple Kafka instances with the same process and we can
33
32
  # have same topic name across multiple consumer groups
34
33
  @id = "#{consumer_group.id}_#{@name}"
35
- @tags = []
36
34
  end
37
35
 
38
36
  INHERITABLE_ATTRIBUTES.each do |attribute|
@@ -77,11 +75,6 @@ module Karafka
77
75
  consumer
78
76
  end
79
77
 
80
- # @return [Boolean] true if this topic offset is handled by the end user
81
- def manual_offset_management?
82
- manual_offset_management
83
- end
84
-
85
78
  # @return [Hash] hash with all the topic attributes
86
79
  # @note This is being used when we validate the consumer_group and its topics
87
80
  def to_h
@@ -94,8 +87,7 @@ module Karafka
94
87
  name: name,
95
88
  consumer: consumer,
96
89
  consumer_group_id: consumer_group.id,
97
- subscription_group: subscription_group,
98
- tags: tags
90
+ subscription_group: subscription_group
99
91
  ).freeze
100
92
  end
101
93
  end
@@ -74,7 +74,7 @@ module Karafka
74
74
  # please start a separate thread to do so.
75
75
  def stop
76
76
  # Initialize the stopping process only if Karafka was running
77
- return unless Karafka::App.running?
77
+ return if Karafka::App.stopping? || Karafka::App.stopped?
78
78
 
79
79
  Karafka::App.stop!
80
80
 
@@ -120,7 +120,9 @@ module Karafka
120
120
  # exit! is not within the instrumentation as it would not trigger due to exit
121
121
  Kernel.exit!(FORCEFUL_EXIT_CODE)
122
122
  ensure
123
- Karafka::App.stopped!
123
+ # We need to check if it wasn't an early exit to make sure that only on stop invocation
124
+ # can change the status after everything is closed
125
+ Karafka::App.stopped! if timeout
124
126
  end
125
127
 
126
128
  private
@@ -128,6 +128,7 @@ module Karafka
128
128
  ssl.key.pem
129
129
  ssl.keystore.location
130
130
  ssl.keystore.password
131
+ ssl.providers
131
132
  ssl.sigalgs.list
132
133
  ssl_ca
133
134
  ssl_certificate
@@ -259,6 +260,7 @@ module Karafka
259
260
  ssl.key.pem
260
261
  ssl.keystore.location
261
262
  ssl.keystore.password
263
+ ssl.providers
262
264
  ssl.sigalgs.list
263
265
  ssl_ca
264
266
  ssl_certificate
@@ -327,6 +329,9 @@ module Karafka
327
329
  end
328
330
  end
329
331
 
332
+ # This can be removed when 0.13 librdkafka is released
333
+ attributes[:producer].delete_if { |val| val == 'allow.auto.create.topics' }
334
+
330
335
  attributes.transform_values!(&:sort)
331
336
  attributes.each_value { |vals| vals.map!(&:to_sym) }
332
337
  attributes
@@ -68,8 +68,6 @@ module Karafka
68
68
  # option [String] should we start with the earliest possible offset or latest
69
69
  # This will set the `auto.offset.reset` value unless present in the kafka scope
70
70
  setting :initial_offset, default: 'earliest'
71
- # option [Boolean] should we leave offset management to the user
72
- setting :manual_offset_management, default: false
73
71
  # options max_messages [Integer] how many messages do we want to fetch from Kafka in one go
74
72
  setting :max_messages, default: 100
75
73
  # option [Integer] number of milliseconds we can wait while fetching data
@@ -80,9 +78,9 @@ module Karafka
80
78
  setting :shutdown_timeout, default: 60_000
81
79
  # option [Integer] number of threads in which we want to do parallel processing
82
80
  setting :concurrency, default: 5
83
- # option [Integer] how long should we wait upon processing error
81
+ # option [Integer] how long should we wait upon processing error (milliseconds)
84
82
  setting :pause_timeout, default: 1_000
85
- # option [Integer] what is the max timeout in case of an exponential backoff
83
+ # option [Integer] what is the max timeout in case of an exponential backoff (milliseconds)
86
84
  setting :pause_max_timeout, default: 30_000
87
85
  # option [Boolean] should we use exponential backoff
88
86
  setting :pause_with_exponential_backoff, default: true
@@ -120,6 +118,8 @@ module Karafka
120
118
  setting :coordinator_class, default: Processing::Coordinator
121
119
  # option partitioner_class [Class] partitioner we use against a batch of data
122
120
  setting :partitioner_class, default: Processing::Partitioner
121
+ # option strategy_selector [Object] processing strategy selector to be used
122
+ setting :strategy_selector, default: Processing::StrategySelector.new
123
123
  end
124
124
 
125
125
  # Karafka components for ActiveJob
@@ -10,7 +10,7 @@ module Karafka
10
10
  # code here, as this is not a frequently used tracker. It is active only once per batch in
11
11
  # case of long-running-jobs and upon errors.
12
12
  class Pause < Base
13
- attr_reader :count
13
+ attr_reader :attempt
14
14
 
15
15
  # @param timeout [Integer] how long should we wait when anything went wrong (in ms)
16
16
  # @param max_timeout [Integer, nil] if exponential is on, what is the max value we can reach
@@ -19,26 +19,29 @@ module Karafka
19
19
  # timeout value
20
20
  # @return [Karafka::TimeTrackers::Pause]
21
21
  # @example
22
- # pause = Karafka::TimeTrackers::Pause.new(timeout: 1000)
22
+ # options = { timeout: 1000, max_timeout: 1000, exponential_backoff: false }
23
+ # pause = Karafka::TimeTrackers::Pause.new(**options)
23
24
  # pause.expired? #=> true
24
25
  # pause.paused? #=> false
25
26
  # pause.pause
27
+ # pause.increment
26
28
  # sleep(1.1)
27
29
  # pause.paused? #=> true
28
30
  # pause.expired? #=> true
29
- # pause.count #=> 1
31
+ # pause.attempt #=> 1
30
32
  # pause.pause
31
- # pause.count #=> 1
33
+ # pause.increment
34
+ # pause.attempt #=> 2
32
35
  # pause.paused? #=> true
33
36
  # pause.expired? #=> false
34
37
  # pause.resume
35
- # pause.count #=> 2
38
+ # pause.attempt #=> 2
36
39
  # pause.paused? #=> false
37
40
  # pause.reset
38
- # pause.count #=> 0
41
+ # pause.attempt #=> 0
39
42
  def initialize(timeout:, max_timeout:, exponential_backoff:)
40
43
  @started_at = nil
41
- @count = 0
44
+ @attempt = 0
42
45
  @timeout = timeout
43
46
  @max_timeout = max_timeout
44
47
  @exponential_backoff = exponential_backoff
@@ -47,7 +50,7 @@ module Karafka
47
50
  end
48
51
 
49
52
  # Pauses the processing from now till the end of the interval (backoff or non-backoff)
50
- # and records the count.
53
+ # and records the attempt.
51
54
  # @param timeout [Integer] timeout value in milliseconds that overwrites the default timeout
52
55
  # @note Providing this value can be useful when we explicitly want to pause for a certain
53
56
  # period of time, outside of any regular pausing logic
@@ -55,7 +58,13 @@ module Karafka
55
58
  @mutex.synchronize do
56
59
  @started_at = now
57
60
  @ends_at = @started_at + timeout
58
- @count += 1
61
+ end
62
+ end
63
+
64
+ # Increments the number of attempt by 1
65
+ def increment
66
+ @mutex.synchronize do
67
+ @attempt += 1
59
68
  end
60
69
  end
61
70
 
@@ -88,10 +97,10 @@ module Karafka
88
97
  end
89
98
  end
90
99
 
91
- # Resets the pause counter.
100
+ # Resets the pause attempt count.
92
101
  def reset
93
102
  @mutex.synchronize do
94
- @count = 0
103
+ @attempt = 0
95
104
  end
96
105
  end
97
106
 
@@ -100,7 +109,7 @@ module Karafka
100
109
  # Computers the exponential backoff
101
110
  # @return [Integer] backoff in milliseconds
102
111
  def backoff_interval
103
- backoff_factor = @exponential_backoff ? 2**@count : 1
112
+ backoff_factor = @exponential_backoff ? 2**@attempt : 1
104
113
 
105
114
  timeout = backoff_factor * @timeout
106
115
 
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '2.0.15'
6
+ VERSION = '2.0.16'
7
7
  end
data/lib/karafka.rb CHANGED
@@ -86,17 +86,13 @@ end
86
86
  loader = Zeitwerk::Loader.for_gem
87
87
  # Do not load Rails extensions by default, this will be handled by Railtie if they are needed
88
88
  loader.ignore(Karafka.gem_root.join('lib/active_job'))
89
-
90
- begin
91
- require 'karafka-license'
92
- rescue LoadError
93
- # Do not load pro components if we cannot load the license
94
- # This is a preliminary check so autoload works as expected
95
- # Later on the licenser will make sure to setup all the needed components anyhow
96
- loader.ignore(Karafka.gem_root.join('lib/karafka/pro'))
97
- end
98
-
89
+ # Do not load pro components as they will be loaded if needed and allowed
90
+ loader.ignore(Karafka.core_root.join('pro/'))
99
91
  # Do not load vendors instrumentation components. Those need to be required manually if needed
100
- loader.ignore(Karafka.gem_root.join('lib/karafka/instrumentation/vendors'))
92
+ loader.ignore(Karafka.core_root.join('instrumentation/vendors'))
101
93
  loader.setup
102
94
  loader.eager_load
95
+
96
+ # This will load features but since Pro are not loaded automatically, they will not be visible
97
+ # nor included here
98
+ ::Karafka::Routing::Features::Base.load_all
data.tar.gz.sig CHANGED
Binary file