karafka 2.1.13 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/CHANGELOG.md +35 -0
  4. data/Gemfile.lock +1 -1
  5. data/config/locales/errors.yml +4 -0
  6. data/config/locales/pro_errors.yml +17 -0
  7. data/lib/karafka/admin.rb +21 -33
  8. data/lib/karafka/connection/client.rb +1 -1
  9. data/lib/karafka/contracts/config.rb +24 -0
  10. data/lib/karafka/pro/contracts/base.rb +23 -0
  11. data/lib/karafka/pro/contracts/server_cli_options.rb +111 -0
  12. data/lib/karafka/pro/loader.rb +4 -1
  13. data/lib/karafka/pro/routing/features/active_job/builder.rb +45 -0
  14. data/lib/karafka/pro/routing/features/active_job.rb +26 -0
  15. data/lib/karafka/pro/routing/features/dead_letter_queue/contracts/topic.rb +53 -0
  16. data/lib/karafka/pro/routing/features/delaying/contracts/topic.rb +41 -0
  17. data/lib/karafka/pro/routing/features/expiring/contracts/topic.rb +41 -0
  18. data/lib/karafka/pro/routing/features/filtering/contracts/topic.rb +44 -0
  19. data/lib/karafka/pro/routing/features/long_running_job/{contract.rb → contracts/topic.rb} +14 -11
  20. data/lib/karafka/pro/routing/features/{filtering/contract.rb → patterns/builder.rb} +13 -16
  21. data/lib/karafka/pro/routing/features/patterns/config.rb +54 -0
  22. data/lib/karafka/pro/routing/features/patterns/consumer_group.rb +68 -0
  23. data/lib/karafka/pro/routing/features/patterns/contracts/consumer_group.rb +62 -0
  24. data/lib/karafka/pro/routing/features/patterns/contracts/pattern.rb +46 -0
  25. data/lib/karafka/pro/routing/features/patterns/contracts/topic.rb +41 -0
  26. data/lib/karafka/pro/routing/features/patterns/detector.rb +68 -0
  27. data/lib/karafka/pro/routing/features/patterns/pattern.rb +81 -0
  28. data/lib/karafka/pro/routing/features/{delaying/contract.rb → patterns/patterns.rb} +11 -14
  29. data/lib/karafka/pro/routing/features/patterns/topic.rb +50 -0
  30. data/lib/karafka/pro/routing/features/patterns/topics.rb +53 -0
  31. data/lib/karafka/pro/routing/features/patterns.rb +33 -0
  32. data/lib/karafka/pro/routing/features/pausing/contracts/topic.rb +51 -0
  33. data/lib/karafka/pro/routing/features/throttling/contracts/topic.rb +44 -0
  34. data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +55 -0
  35. data/lib/karafka/routing/consumer_group.rb +1 -1
  36. data/lib/karafka/routing/features/active_job/contracts/topic.rb +44 -0
  37. data/lib/karafka/routing/features/active_job/proxy.rb +14 -0
  38. data/lib/karafka/routing/features/base/expander.rb +8 -2
  39. data/lib/karafka/routing/features/base.rb +4 -2
  40. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +46 -0
  41. data/lib/karafka/routing/features/declaratives/contracts/topic.rb +33 -0
  42. data/lib/karafka/routing/features/manual_offset_management/contracts/topic.rb +27 -0
  43. data/lib/karafka/routing/router.rb +0 -11
  44. data/lib/karafka/routing/subscription_group.rb +9 -0
  45. data/lib/karafka/routing/topic.rb +5 -0
  46. data/lib/karafka/server.rb +9 -4
  47. data/lib/karafka/setup/config.rb +45 -0
  48. data/lib/karafka/version.rb +1 -1
  49. data.tar.gz.sig +0 -0
  50. metadata +32 -15
  51. metadata.gz.sig +0 -0
  52. data/lib/karafka/pro/routing/features/dead_letter_queue/contract.rb +0 -50
  53. data/lib/karafka/pro/routing/features/expiring/contract.rb +0 -38
  54. data/lib/karafka/pro/routing/features/pausing/contract.rb +0 -48
  55. data/lib/karafka/pro/routing/features/throttling/contract.rb +0 -41
  56. data/lib/karafka/pro/routing/features/virtual_partitions/contract.rb +0 -52
  57. data/lib/karafka/routing/features/active_job/contract.rb +0 -41
  58. data/lib/karafka/routing/features/dead_letter_queue/contract.rb +0 -42
  59. data/lib/karafka/routing/features/declaratives/contract.rb +0 -30
  60. data/lib/karafka/routing/features/manual_offset_management/contract.rb +0 -24
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 262c2bbfa4fb09c4038ce559c71baa6c40b861497dd7ef3b1915ba6b8aa47652
4
- data.tar.gz: a34d5fad3bdbd1a58a0938f33c8fd28a5df72cb89293d7835c41cfce09c9e736
3
+ metadata.gz: 6ca6426dc8527aac122a1d9ebcaccf33a4eff1608133b210e68e66d3a5f5c2c7
4
+ data.tar.gz: 85233cc04e591e5a96d53b3b9d3fe203b4b56bbde24c564ffa3663daeb3673c6
5
5
  SHA512:
6
- metadata.gz: 2fd4672fef274f5913b543d9b3a91aef8a8aba59a23e457a61f455cf6095ea8176c6f8e799f65602c7ef0bb15add27a960df70cd99913fe6b5340df9cfff8c31
7
- data.tar.gz: 4b890531f2a783c72573bcc2e915cc70b85e09c5e43de76b979fc34ea732f51a2c8ec4f64e91c95293337973103c7eeee89e424d27fa35dc708f935364fa9db3
6
+ metadata.gz: e89b72adccdb6a622d571ab9c580191e725cdb98d88f5d86f59d9e4a9900eff74fa438b3838cfe104f7259c0c4cee0dc60f7fb6d16cf14b59ff9229170bae504
7
+ data.tar.gz: a8ba4b92e1002c6d1112837e1b96706208ba0d55fa39feaadaa4f04f4c616095eb406d72bffa50bd346fb1daff7ca45951efd2237a71c25a7539be5a4397a781
checksums.yaml.gz.sig CHANGED
Binary file
data/CHANGELOG.md CHANGED
@@ -1,5 +1,37 @@
1
1
  # Karafka framework changelog
2
2
 
3
+ ## 2.2.0 (2023-09-01)
4
+ - **[Feature]** Introduce dynamic topic subscriptions based on patterns [Pro].
5
+ - [Enhancement] Allow for `Karafka::Admin` setup reconfiguration via `config.admin` scope.
6
+ - [Enhancement] Make sure that consumer group used by `Karafka::Admin` obeys the `ConsumerMapper` setup.
7
+ - [Fix] Fix a case where subscription group would not accept a symbol name.
8
+
9
+ ### Upgrade notes
10
+
11
+ As always, please make sure you have upgraded to the most recent version of `2.1` before upgrading to `2.2`.
12
+
13
+ If you are not using Kafka ACLs, there is no action you need to take.
14
+
15
+ If you are using Kafka ACLs and you've set up permissions for `karafka_admin` group, please note that this name has now been changed and is subject to [Consumer Name Mapping](https://karafka.io/docs/Consumer-mappers/).
16
+
17
+ That means you must ensure that the new consumer group that by default equals `CLIENT_ID_karafka_admin` has appropriate permissions. Please note that the Web UI also uses this group.
18
+
19
+ `Karafka::Admin` now has its own set of configuration options available, and you can find more details about that [here](https://karafka.io/docs/Topics-management-and-administration/#configuration).
20
+
21
+ If you want to maintain the `2.1` behavior, that is `karafka_admin` admin group, we recommend introducing this case inside your consumer mapper. Assuming you use the default one, the code will look as follows:
22
+
23
+ ```ruby
24
+ class MyMapper
25
+ def call(raw_consumer_group_name)
26
+ # If group is the admin one, use as it was in 2.1
27
+ return 'karafka_admin' if raw_consumer_group_name == 'karafka_admin'
28
+
29
+ # Otherwise use default karafka strategy for the rest
30
+ "#{Karafka::App.config.client_id}_#{raw_consumer_group_name}"
31
+ end
32
+ end
33
+ ```
34
+
3
35
  ## 2.1.13 (2023-08-28)
4
36
  - **[Feature]** Introduce Cleaning API for much better memory management for iterative data processing [Pro].
5
37
  - [Enhancement] Automatically free message resources after processed for ActiveJob jobs [Pro]
@@ -15,7 +47,10 @@
15
47
 
16
48
  ## 2.1.10 (2023-08-21)
17
49
  - [Enhancement] Introduce `connection.client.rebalance_callback` event for instrumentation of rebalances.
50
+ - [Enhancement] Introduce new `runner.before_call` monitor event.
18
51
  - [Refactor] Introduce low level commands proxy to handle deviation in how we want to run certain commands and how rdkafka-ruby runs that by design.
52
+ - [Change] No longer validate excluded topics routing presence if patterns any as it does not match pattern subscriptions where you can exclude things that could be subscribed in the future.
53
+ - [Fix] do not report negative lag stored in the DD listener.
19
54
  - [Fix] Do not report lags in the DD listener for cases where the assignment is not workable.
20
55
  - [Fix] Do not report negative lags in the DD listener.
21
56
  - [Fix] Extremely fast shutdown after boot in specs can cause process not to stop.
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (2.1.13)
4
+ karafka (2.2.0)
5
5
  karafka-core (>= 2.1.1, < 2.2.0)
6
6
  thor (>= 0.20)
7
7
  waterdrop (>= 2.6.6, < 3.0.0)
@@ -35,6 +35,10 @@ en:
35
35
  key_must_be_a_symbol: All keys under the kafka settings scope need to be symbols
36
36
  max_timeout_vs_pause_max_timeout: pause_timeout must be less or equal to pause_max_timeout
37
37
  shutdown_timeout_vs_max_wait_time: shutdown_timeout must be more than max_wait_time
38
+ admin.kafka_format: needs to be a hash
39
+ admin.group_id_format: 'needs to be a string with a Kafka accepted format'
40
+ admin.max_wait_time_format: 'needs to be an integer bigger than 0'
41
+ admin.max_attempts_format: 'needs to be an integer bigger than 0'
38
42
 
39
43
  server_cli_options:
40
44
  missing: needs to be present
@@ -28,6 +28,20 @@ en:
28
28
  pause_with_exponential_backoff_format: needs to be either true or false
29
29
  pause_timeout_max_timeout_vs_pause_max_timeout: pause_timeout must be less or equal to pause_max_timeout
30
30
 
31
+ patterns.active_format: 'needs to be boolean'
32
+ patterns.type_format: 'needs to be :matcher, :discovered or :regular'
33
+
34
+ consumer_group:
35
+ patterns_format: must be an array with hashes
36
+ patterns_missing: needs to be present
37
+ patterns_regexps_not_unique: 'must be unique within consumer group'
38
+
39
+ pattern:
40
+ regexp_format: must be a regular expression
41
+ name_format: 'needs to be a string with a Kafka accepted format'
42
+ regexp_string_format: 'needs to be a string and start with ^'
43
+ missing: needs to be present
44
+
31
45
  config:
32
46
  encryption.active_format: 'needs to be either true or false'
33
47
  encryption.public_key_invalid: 'is not a valid public RSA key'
@@ -37,3 +51,6 @@ en:
37
51
  encryption.version_format: must be a non-empty string
38
52
  encryption.public_key_format: 'is not a valid public RSA key'
39
53
  encryption.private_keys_invalid: 'contains an invalid private RSA key string'
54
+
55
+ patterns.ttl_format: needs to be an integer bigger than 0
56
+ patterns.ttl_missing: needs to be present
data/lib/karafka/admin.rb CHANGED
@@ -7,32 +7,9 @@ module Karafka
7
7
  # Since admin actions are not performed that often, that should be ok.
8
8
  #
9
9
  # @note It always uses the primary defined cluster and does not support multi-cluster work.
10
- # If you need this, just replace the cluster info for the time you use this
10
+ # Cluster on which operations are performed can be changed via `admin.kafka` config, however
11
+ # there is no multi-cluster runtime support.
11
12
  module Admin
12
- # We wait only for this amount of time before raising error as we intercept this error and
13
- # retry after checking that the operation was finished or failed using external factor.
14
- MAX_WAIT_TIMEOUT = 1
15
-
16
- # How many times should be try. 1 x 60 => 60 seconds wait in total
17
- MAX_ATTEMPTS = 60
18
-
19
- # Defaults for config
20
- CONFIG_DEFAULTS = {
21
- 'group.id': 'karafka_admin',
22
- # We want to know when there is no more data not to end up with an endless loop
23
- 'enable.partition.eof': true,
24
- 'statistics.interval.ms': 0,
25
- # Fetch at most 5 MBs when using admin
26
- 'fetch.message.max.bytes': 5 * 1_048_576,
27
- # Do not commit offset automatically, this prevents offset tracking for operations involving
28
- # a consumer instance
29
- 'enable.auto.commit': false,
30
- # Make sure that topic metadata lookups do not create topics accidentally
31
- 'allow.auto.create.topics': false
32
- }.freeze
33
-
34
- private_constant :CONFIG_DEFAULTS, :MAX_WAIT_TIMEOUT, :MAX_ATTEMPTS
35
-
36
13
  class << self
37
14
  # Allows us to read messages from the topic
38
15
  #
@@ -123,7 +100,7 @@ module Karafka
123
100
  handler = admin.create_topic(name, partitions, replication_factor, topic_config)
124
101
 
125
102
  with_re_wait(
126
- -> { handler.wait(max_wait_timeout: MAX_WAIT_TIMEOUT) },
103
+ -> { handler.wait(max_wait_timeout: app_config.admin.max_wait_time) },
127
104
  -> { topics_names.include?(name) }
128
105
  )
129
106
  end
@@ -137,7 +114,7 @@ module Karafka
137
114
  handler = admin.delete_topic(name)
138
115
 
139
116
  with_re_wait(
140
- -> { handler.wait(max_wait_timeout: MAX_WAIT_TIMEOUT) },
117
+ -> { handler.wait(max_wait_timeout: app_config.admin.max_wait_time) },
141
118
  -> { !topics_names.include?(name) }
142
119
  )
143
120
  end
@@ -152,7 +129,7 @@ module Karafka
152
129
  handler = admin.create_partitions(name, partitions)
153
130
 
154
131
  with_re_wait(
155
- -> { handler.wait(max_wait_timeout: MAX_WAIT_TIMEOUT) },
132
+ -> { handler.wait(max_wait_timeout: app_config.admin.max_wait_time) },
156
133
  -> { topic(name).fetch(:partition_count) >= partitions }
157
134
  )
158
135
  end
@@ -242,7 +219,7 @@ module Karafka
242
219
  rescue Rdkafka::AbstractHandle::WaitTimeoutError
243
220
  return if breaker.call
244
221
 
245
- retry if attempt <= MAX_ATTEMPTS
222
+ retry if attempt <= app_config.admin.max_attempts
246
223
 
247
224
  raise
248
225
  end
@@ -251,12 +228,18 @@ module Karafka
251
228
  # @param settings [Hash] extra settings for config (if needed)
252
229
  # @return [::Rdkafka::Config] rdkafka config
253
230
  def config(type, settings)
254
- config_hash = Karafka::Setup::AttributesMap.public_send(
255
- type,
256
- Karafka::App.config.kafka.dup.merge(CONFIG_DEFAULTS).merge!(settings)
231
+ group_id = app_config.consumer_mapper.call(
232
+ app_config.admin.group_id
257
233
  )
258
234
 
259
- ::Rdkafka::Config.new(config_hash)
235
+ app_config
236
+ .kafka
237
+ .then(&:dup)
238
+ .merge(app_config.admin.kafka)
239
+ .merge!(settings)
240
+ .tap { |config| config[:'group.id'] = group_id }
241
+ .then { |config| Karafka::Setup::AttributesMap.public_send(type, config) }
242
+ .then { |config| ::Rdkafka::Config.new(config) }
260
243
  end
261
244
 
262
245
  # Resolves the offset if offset is in a time format. Otherwise returns the offset without
@@ -281,6 +264,11 @@ module Karafka
281
264
  offset
282
265
  end
283
266
  end
267
+
268
+ # @return [Karafka::Core::Configurable::Node] root node config
269
+ def app_config
270
+ ::Karafka::App.config
271
+ end
284
272
  end
285
273
  end
286
274
  end
@@ -510,7 +510,7 @@ module Karafka
510
510
 
511
511
  # Subscription needs to happen after we assigned the rebalance callbacks just in case of
512
512
  # a race condition
513
- consumer.subscribe(*@subscription_group.topics.map(&:name))
513
+ consumer.subscribe(*@subscription_group.subscriptions)
514
514
  consumer
515
515
  end
516
516
 
@@ -34,6 +34,14 @@ module Karafka
34
34
  required(:max_wait_time) { |val| val.is_a?(Integer) && val.positive? }
35
35
  required(:kafka) { |val| val.is_a?(Hash) && !val.empty? }
36
36
 
37
+ nested(:admin) do
38
+ # Can be empty because inherits values from the root kafka
39
+ required(:kafka) { |val| val.is_a?(Hash) }
40
+ required(:group_id) { |val| val.is_a?(String) && Contracts::TOPIC_REGEXP.match?(val) }
41
+ required(:max_wait_time) { |val| val.is_a?(Integer) && val.positive? }
42
+ required(:max_attempts) { |val| val.is_a?(Integer) && val.positive? }
43
+ end
44
+
37
45
  # We validate internals just to be sure, that they are present and working
38
46
  nested(:internal) do
39
47
  required(:status) { |val| !val.nil? }
@@ -74,6 +82,7 @@ module Karafka
74
82
  end
75
83
  end
76
84
 
85
+ # Ensure all root kafka keys are symbols
77
86
  virtual do |data, errors|
78
87
  next unless errors.empty?
79
88
 
@@ -88,6 +97,21 @@ module Karafka
88
97
  detected_errors
89
98
  end
90
99
 
100
+ # Ensure all admin kafka keys are symbols
101
+ virtual do |data, errors|
102
+ next unless errors.empty?
103
+
104
+ detected_errors = []
105
+
106
+ data.fetch(:admin).fetch(:kafka).each_key do |key|
107
+ next if key.is_a?(Symbol)
108
+
109
+ detected_errors << [[:admin, :kafka, key], :key_must_be_a_symbol]
110
+ end
111
+
112
+ detected_errors
113
+ end
114
+
91
115
  virtual do |data, errors|
92
116
  next unless errors.empty?
93
117
 
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ # Pro contracts that aim to replace or complement the general framework contracts
17
+ module Contracts
18
+ # Base for all the Pro contracts
19
+ class Base < ::Karafka::Contracts::Base
20
+ end
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,111 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Contracts
17
+ # Contract for validating correctness of the server cli command options.
18
+ # It differs slightly from the OSS one because it is aware of the routing patterns
19
+ class ServerCliOptions < ::Karafka::Contracts::ServerCliOptions
20
+ configure do |config|
21
+ config.error_messages = YAML.safe_load(
22
+ File.read(
23
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
24
+ )
25
+ ).fetch('en').fetch('validations').fetch('server_cli_options')
26
+ end
27
+
28
+ %i[
29
+ include
30
+ exclude
31
+ ].each do |action|
32
+ optional(:"#{action}_consumer_groups") { |cg| cg.is_a?(Array) }
33
+ optional(:"#{action}_subscription_groups") { |sg| sg.is_a?(Array) }
34
+ optional(:"#{action}_topics") { |topics| topics.is_a?(Array) }
35
+
36
+ virtual do |data, errors|
37
+ next unless errors.empty?
38
+
39
+ value = data.fetch(:"#{action}_consumer_groups")
40
+
41
+ # If there were no consumer_groups declared in the server cli, it means that we will
42
+ # run all of them and no need to validate them here at all
43
+ next if value.empty?
44
+ next if (value - Karafka::App.consumer_groups.map(&:name)).empty?
45
+
46
+ # Found unknown consumer groups
47
+ [[[:"#{action}_consumer_groups"], :consumer_groups_inclusion]]
48
+ end
49
+
50
+ virtual do |data, errors|
51
+ next unless errors.empty?
52
+
53
+ value = data.fetch(:"#{action}_subscription_groups")
54
+
55
+ # If there were no subscription_groups declared in the server cli, it means that we
56
+ # will run all of them and no need to validate them here at all
57
+ next if value.empty?
58
+
59
+ subscription_groups = Karafka::App
60
+ .consumer_groups
61
+ .map(&:subscription_groups)
62
+ .flatten
63
+ .map(&:name)
64
+
65
+ next if (value - subscription_groups).empty?
66
+
67
+ # Found unknown subscription groups
68
+ [[[:"#{action}_subscription_groups"], :subscription_groups_inclusion]]
69
+ end
70
+
71
+ virtual do |data, errors|
72
+ next unless errors.empty?
73
+
74
+ value = data.fetch(:"#{action}_topics")
75
+
76
+ # If there were no topics declared in the server cli, it means that we will
77
+ # run all of them and no need to validate them here at all
78
+ next if value.empty?
79
+
80
+ topics = Karafka::App
81
+ .consumer_groups
82
+ .map(&:subscription_groups)
83
+ .flatten
84
+ .map(&:topics)
85
+ .map { |gtopics| gtopics.map(&:name) }
86
+ .flatten
87
+
88
+ next if (value - topics).empty?
89
+
90
+ # If there are any patterns defined, we cannot report on topics inclusions because
91
+ # topics may be added during boot or runtime. We go with simple assumption:
92
+ # if there are patterns defined, we do not check the inclusions at all
93
+ next unless Karafka::App.consumer_groups.map(&:patterns).flatten.empty?
94
+
95
+ # Found unknown topics
96
+ [[[:"#{action}_topics"], :topics_inclusion]]
97
+ end
98
+ end
99
+
100
+ # Makes sure we have anything to subscribe to when we start the server
101
+ virtual do |_, errors|
102
+ next unless errors.empty?
103
+
104
+ next unless Karafka::App.subscription_groups.empty?
105
+
106
+ [[%i[include_topics], :topics_missing]]
107
+ end
108
+ end
109
+ end
110
+ end
111
+ end
@@ -66,7 +66,8 @@ module Karafka
66
66
 
67
67
  private
68
68
 
69
- # @return [Array<Module>] extra non-routing related pro features
69
+ # @return [Array<Module>] extra non-routing related pro features and routing components
70
+ # that need to have some special configuration stuff injected into config, etc
70
71
  def features
71
72
  [
72
73
  Encryption,
@@ -79,6 +80,8 @@ module Karafka
79
80
  def reconfigure(config)
80
81
  icfg = config.internal
81
82
 
83
+ icfg.cli.contract = Contracts::ServerCliOptions.new
84
+
82
85
  icfg.processing.coordinator_class = Processing::Coordinator
83
86
  icfg.processing.partitioner_class = Processing::Partitioner
84
87
  icfg.processing.scheduler = Processing::Scheduler.new
@@ -0,0 +1,45 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Routing
17
+ module Features
18
+ class ActiveJob < Base
19
+ # Pro ActiveJob builder expansions
20
+ module Builder
21
+ # This method simplifies routes definition for ActiveJob patterns / queues by
22
+ # auto-injecting the consumer class and other things needed
23
+ #
24
+ # @param regexp_or_name [String, Symbol, Regexp] pattern name or regexp to use
25
+ # auto-generated regexp names
26
+ # @param regexp [Regexp, nil] activejob regexp pattern or nil when regexp is provided
27
+ # as the first argument
28
+ # @param block [Proc] block that we can use for some extra configuration
29
+ def active_job_pattern(regexp_or_name, regexp = nil, &block)
30
+ pattern(regexp_or_name, regexp) do
31
+ consumer App.config.internal.active_job.consumer_class
32
+ active_job true
33
+ manual_offset_management true
34
+
35
+ next unless block
36
+
37
+ instance_eval(&block)
38
+ end
39
+ end
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end
45
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Routing
17
+ module Features
18
+ # Small extension to make ActiveJob work with pattern matching.
19
+ # Since our `#active_job_topic` is just a topic wrapper, we can introduce a similar
20
+ # `#active_job_pattern` to align with pattern building
21
+ class ActiveJob < Base
22
+ end
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,53 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Routing
17
+ module Features
18
+ class DeadLetterQueue < Base
19
+ # Namespace for DLQ contracts
20
+ module Contracts
21
+ # Extended rules for dead letter queue settings
22
+ class Topic < Karafka::Contracts::Base
23
+ configure do |config|
24
+ config.error_messages = YAML.safe_load(
25
+ File.read(
26
+ File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
27
+ )
28
+ ).fetch('en').fetch('validations').fetch('topic')
29
+ end
30
+
31
+ # Make sure that when we use virtual partitions with DLQ, at least one retry is set
32
+ # We cannot use VP with DLQ without retries as we in order to provide ordering
33
+ # warranties on errors with VP, we need to collapse the VPs concurrency and retry
34
+ # without any indeterministic work
35
+ virtual do |data, errors|
36
+ next unless errors.empty?
37
+
38
+ dead_letter_queue = data[:dead_letter_queue]
39
+ virtual_partitions = data[:virtual_partitions]
40
+
41
+ next unless dead_letter_queue[:active]
42
+ next unless virtual_partitions[:active]
43
+ next if dead_letter_queue[:max_retries].positive?
44
+
45
+ [[%i[dead_letter_queue], :with_virtual_partitions]]
46
+ end
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
52
+ end
53
+ end
@@ -0,0 +1,41 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Routing
17
+ module Features
18
+ class Delaying < Base
19
+ # Namespace for delaying feature contracts
20
+ module Contracts
21
+ # Contract to validate configuration of the expiring feature
22
+ class Topic < Karafka::Contracts::Base
23
+ configure do |config|
24
+ config.error_messages = YAML.safe_load(
25
+ File.read(
26
+ File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
27
+ )
28
+ ).fetch('en').fetch('validations').fetch('topic')
29
+ end
30
+
31
+ nested(:delaying) do
32
+ required(:active) { |val| [true, false].include?(val) }
33
+ required(:delay) { |val| val.nil? || (val.is_a?(Integer) && val.positive?) }
34
+ end
35
+ end
36
+ end
37
+ end
38
+ end
39
+ end
40
+ end
41
+ end
@@ -0,0 +1,41 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Routing
17
+ module Features
18
+ class Expiring < Base
19
+ # Namespace for expiring messages contracts
20
+ module Contracts
21
+ # Contract to validate configuration of the expiring feature
22
+ class Topic < Karafka::Contracts::Base
23
+ configure do |config|
24
+ config.error_messages = YAML.safe_load(
25
+ File.read(
26
+ File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
27
+ )
28
+ ).fetch('en').fetch('validations').fetch('topic')
29
+ end
30
+
31
+ nested(:expiring) do
32
+ required(:active) { |val| [true, false].include?(val) }
33
+ required(:ttl) { |val| val.nil? || (val.is_a?(Integer) && val.positive?) }
34
+ end
35
+ end
36
+ end
37
+ end
38
+ end
39
+ end
40
+ end
41
+ end