karafka 2.0.21 → 2.0.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +6 -0
  4. data/.ruby-version +1 -1
  5. data/CHANGELOG.md +21 -0
  6. data/Gemfile.lock +7 -8
  7. data/README.md +1 -1
  8. data/bin/integrations +2 -2
  9. data/bin/rspecs +2 -0
  10. data/config/errors.yml +5 -2
  11. data/karafka.gemspec +2 -3
  12. data/lib/karafka/admin.rb +84 -4
  13. data/lib/karafka/app.rb +12 -2
  14. data/lib/karafka/base_consumer.rb +8 -11
  15. data/lib/karafka/cli/info.rb +2 -1
  16. data/lib/karafka/cli/server.rb +7 -6
  17. data/lib/karafka/connection/client.rb +4 -4
  18. data/lib/karafka/contracts/server_cli_options.rb +60 -3
  19. data/lib/karafka/contracts/topic.rb +1 -1
  20. data/lib/karafka/licenser.rb +53 -50
  21. data/lib/karafka/pro/loader.rb +1 -3
  22. data/lib/karafka/pro/processing/strategies/aj_dlq_mom.rb +2 -2
  23. data/lib/karafka/pro/processing/strategies/aj_lrj_mom_vp.rb +2 -2
  24. data/lib/karafka/pro/processing/strategies/aj_mom_vp.rb +1 -1
  25. data/lib/karafka/pro/processing/strategies/default.rb +31 -1
  26. data/lib/karafka/pro/processing/strategies/dlq.rb +4 -2
  27. data/lib/karafka/pro/processing/strategies/dlq_lrj.rb +3 -1
  28. data/lib/karafka/pro/processing/strategies/dlq_lrj_mom.rb +1 -1
  29. data/lib/karafka/pro/processing/strategies/dlq_mom.rb +2 -2
  30. data/lib/karafka/pro/processing/strategies/lrj.rb +4 -2
  31. data/lib/karafka/pro/processing/strategies/lrj_mom.rb +2 -2
  32. data/lib/karafka/pro/processing/strategies/mom.rb +1 -1
  33. data/lib/karafka/processing/coordinator.rb +15 -0
  34. data/lib/karafka/processing/jobs_queue.rb +1 -1
  35. data/lib/karafka/processing/strategies/aj_dlq_mom.rb +2 -2
  36. data/lib/karafka/processing/strategies/base.rb +5 -0
  37. data/lib/karafka/processing/strategies/default.rb +26 -1
  38. data/lib/karafka/processing/strategies/dlq.rb +4 -2
  39. data/lib/karafka/processing/strategies/dlq_mom.rb +2 -2
  40. data/lib/karafka/processing/strategies/mom.rb +1 -1
  41. data/lib/karafka/railtie.rb +3 -0
  42. data/lib/karafka/routing/builder.rb +1 -1
  43. data/lib/karafka/routing/consumer_group.rb +6 -3
  44. data/lib/karafka/routing/subscription_group.rb +11 -2
  45. data/lib/karafka/routing/topic.rb +8 -0
  46. data/lib/karafka/routing/topics.rb +8 -0
  47. data/lib/karafka/server.rb +11 -10
  48. data/lib/karafka/setup/config.rb +15 -11
  49. data/lib/karafka/version.rb +1 -1
  50. data/lib/karafka.rb +9 -0
  51. data.tar.gz.sig +0 -0
  52. metadata +7 -22
  53. metadata.gz.sig +0 -0
  54. data/lib/karafka/instrumentation.rb +0 -21
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 9de6cb2d19b24258eb60548b383e664dc035f1f48ffc7077ca5f238b3a85aa3e
4
- data.tar.gz: 706d4e81a74b1aad9d1f9b13a67d168a7ef3c45088b89ec710d29abb3a767f11
3
+ metadata.gz: 84d8130c528081b283889f9f1ebe89b8829b800a2c5d1f4ca99f6d6ce9b4c9df
4
+ data.tar.gz: 3cc30f65586226bcb6d8ed4fdac912a72c1a4a7eb5691862b378629d9de1347f
5
5
  SHA512:
6
- metadata.gz: a22be6ff489f9f7cce39e0e4d6d374a70680124842afeeb962e33eaa8c6cd3db8cbcd6f3251543653c98777d025b6d9b0385f4580d92fd4b807e835a68296a43
7
- data.tar.gz: 466d9723427771594701071e5a27a3a1a53b6b4efce9ffa6b241f5261c9a887136b2a8a4a90b35e216b2af01539336bde5377f4edc1ab99837087dd751b830a8
6
+ metadata.gz: 8e1f3fc0a3c73035fdb38093eb0594c999d0a715752f41ed95079b1d8f2e89a5a0fa0abdf054272891e89ffa4dd8d010dd2225540d4480067d8bc0359bf2b7b8
7
+ data.tar.gz: 7c1f3f958ef52682e46efb11e8a3ddcbe641ba85c62956fefb84ec6eacadbe9d28ee509bdfd61fcd73ea8f52b1ab867de4909990503024cc5c603788931264ca
checksums.yaml.gz.sig CHANGED
Binary file
@@ -101,6 +101,9 @@ jobs:
101
101
  - name: Install package dependencies
102
102
  run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
103
103
 
104
+ - name: Remove libzstd-dev to check no supported compressions
105
+ run: sudo apt-get -y remove libzstd-dev
106
+
104
107
  - name: Start Kafka with docker-compose
105
108
  run: |
106
109
  docker-compose up -d
@@ -124,5 +127,8 @@ jobs:
124
127
  - name: Run integration tests
125
128
  env:
126
129
  KARAFKA_PRO_LICENSE_TOKEN: ${{ secrets.KARAFKA_PRO_LICENSE_TOKEN }}
130
+ KARAFKA_PRO_USERNAME: ${{ secrets.KARAFKA_PRO_USERNAME }}
131
+ KARAFKA_PRO_PASSWORD: ${{ secrets.KARAFKA_PRO_PASSWORD }}
132
+ KARAFKA_PRO_VERSION: ${{ secrets.KARAFKA_PRO_VERSION }}
127
133
  GITHUB_COVERAGE: ${{matrix.coverage}}
128
134
  run: bin/integrations
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 3.1.2
1
+ 3.1.3
data/CHANGELOG.md CHANGED
@@ -1,5 +1,26 @@
1
1
  # Karafka framework changelog
2
2
 
3
+ ## 2.0.23 (2022-12-07)
4
+ - [Maintenance] Align with `waterdrop` and `karafka-core`
5
+ - [Improvement] Provide `Admin#read_topic` API to get topic data without subscribing.
6
+ - [Improvement] Upon an end user `#pause`, do not commit the offset in automatic offset management mode. This will prevent from a scenario where pause is needed but during it a rebalance occurs and a different assigned process starts not from the pause location but from the automatic offset that may be different. This still allows for using the `#mark_as_consumed`.
7
+ - [Fix] Fix a scenario where manual `#pause` would be overwritten by a resume initiated by the strategy.
8
+ - [Fix] Fix a scenario where manual `#pause` in LRJ would cause infinite pause.
9
+
10
+ ## 2.0.22 (2022-12-02)
11
+ - [Improvement] Load Pro components upon Karafka require so they can be altered prior to setup.
12
+ - [Improvement] Do not run LRJ jobs that were added to the jobs queue but were revoked meanwhile.
13
+ - [Improvement] Allow running particular named subscription groups similar to consumer groups.
14
+ - [Improvement] Allow running particular topics similar to consumer groups.
15
+ - [Improvement] Raise configuration error when trying to run Karafka with options leading to no subscriptions.
16
+ - [Fix] Fix `karafka info` subscription groups count reporting as it was misleading.
17
+ - [Fix] Allow for defining subscription groups with symbols similar to consumer groups and topics to align the API.
18
+ - [Fix] Do not allow for an explicit `nil` as a `subscription_group` block argument.
19
+ - [Fix] Fix instability in subscription groups static members ids when using `--consumer_groups` CLI flag.
20
+ - [Fix] Fix a case in routing, where anonymous subscription group could not be used inside of a consumer group.
21
+ - [Fix] Fix a case where shutdown prior to listeners build would crash the server initialization.
22
+ - [Fix] Duplicated logs in development environment for Rails when logger set to `$stdout`.
23
+
3
24
  ## 20.0.21 (2022-11-25)
4
25
  - [Improvement] Make revocation jobs for LRJ topics non-blocking to prevent blocking polling when someone uses non-revocation aware LRJ jobs and revocation happens.
5
26
 
data/Gemfile.lock CHANGED
@@ -1,11 +1,10 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (2.0.21)
5
- karafka-core (>= 2.0.4, < 3.0.0)
6
- rdkafka (>= 0.12)
4
+ karafka (2.0.23)
5
+ karafka-core (>= 2.0.6, < 3.0.0)
7
6
  thor (>= 0.20)
8
- waterdrop (>= 2.4.1, < 3.0.0)
7
+ waterdrop (>= 2.4.3, < 3.0.0)
9
8
  zeitwerk (~> 2.3)
10
9
 
11
10
  GEM
@@ -30,8 +29,9 @@ GEM
30
29
  activesupport (>= 5.0)
31
30
  i18n (1.12.0)
32
31
  concurrent-ruby (~> 1.0)
33
- karafka-core (2.0.4)
32
+ karafka-core (2.0.6)
34
33
  concurrent-ruby (>= 1.1)
34
+ rdkafka (>= 0.12)
35
35
  mini_portile2 (2.8.0)
36
36
  minitest (5.16.3)
37
37
  rake (13.0.6)
@@ -61,9 +61,8 @@ GEM
61
61
  thor (1.2.1)
62
62
  tzinfo (2.0.5)
63
63
  concurrent-ruby (~> 1.0)
64
- waterdrop (2.4.2)
65
- karafka-core (>= 2.0.2, < 3.0.0)
66
- rdkafka (>= 0.10)
64
+ waterdrop (2.4.3)
65
+ karafka-core (>= 2.0.6, < 3.0.0)
67
66
  zeitwerk (~> 2.3)
68
67
  zeitwerk (2.6.6)
69
68
 
data/README.md CHANGED
@@ -13,7 +13,7 @@ Karafka is a Ruby and Rails multi-threaded efficient Kafka processing framework
13
13
  - Supports parallel processing in [multiple threads](https://karafka.io/docs/Concurrency-and-multithreading) (also for a [single topic partition](https://karafka.io/docs/Pro-Virtual-Partitions) work)
14
14
  - [Automatically integrates](https://karafka.io/docs/Integrating-with-Ruby-on-Rails-and-other-frameworks#integrating-with-ruby-on-rails) with Ruby on Rails
15
15
  - Has [ActiveJob backend](https://karafka.io/docs/Active-Job) support (including [ordered jobs](https://karafka.io/docs/Pro-Enhanced-Active-Job#ordered-jobs))
16
- - Has a seamless [Dead Letter Queue](karafka.io/docs/Dead-Letter-Queue/) functionality built-in
16
+ - Has a seamless [Dead Letter Queue](https://karafka.io/docs/Dead-Letter-Queue/) functionality built-in
17
17
  - Supports in-development [code reloading](https://karafka.io/docs/Auto-reload-of-code-changes-in-development)
18
18
  - Is powered by [librdkafka](https://github.com/edenhill/librdkafka) (the Apache Kafka C/C++ client library)
19
19
  - Has an out-of the box [StatsD/DataDog monitoring](https://karafka.io/docs/Monitoring-and-logging) with a dashboard template.
data/bin/integrations CHANGED
@@ -74,8 +74,8 @@ class Scenario
74
74
  def type
75
75
  scenario_dir = File.dirname(@path)
76
76
 
77
- return :poro if scenario_dir.end_with?('_poro')
78
- return :pristine if scenario_dir.end_with?('_pristine')
77
+ return :poro if scenario_dir.include?('_poro')
78
+ return :pristine if scenario_dir.include?('_pristine')
79
79
 
80
80
  :regular
81
81
  end
data/bin/rspecs CHANGED
@@ -1,4 +1,6 @@
1
1
  #!/usr/bin/env bash
2
2
 
3
+ set -e
4
+
3
5
  SPECS_TYPE=regular bundle exec rspec --tag ~type:pro
4
6
  SPECS_TYPE=pro bundle exec rspec --tag type:pro
data/config/errors.yml CHANGED
@@ -25,7 +25,10 @@ en:
25
25
 
26
26
  server_cli_options:
27
27
  missing: needs to be present
28
- consumer_groups_inclusion: Unknown consumer group
28
+ consumer_groups_inclusion: Unknown consumer group name
29
+ subscription_groups_inclusion: Unknown subscription group name
30
+ topics_inclusion: Unknown topic name
31
+ topics_missing: No topics to subscribe to
29
32
 
30
33
  topic:
31
34
  missing: needs to be present
@@ -34,7 +37,7 @@ en:
34
37
  consumer_format: needs to be present
35
38
  id_format: 'needs to be a string with a Kafka accepted format'
36
39
  initial_offset_format: needs to be either earliest or latest
37
- subscription_group_format: must be nil or a non-empty string
40
+ subscription_group_format: must be a non-empty string
38
41
  manual_offset_management.active_format: needs to be either true or false
39
42
  consumer_active_job_missing: ActiveJob needs to be available
40
43
  manual_offset_management_must_be_enabled: cannot be disabled for ActiveJob topics
data/karafka.gemspec CHANGED
@@ -21,10 +21,9 @@ Gem::Specification.new do |spec|
21
21
  without having to focus on things that are not your business domain.
22
22
  DESC
23
23
 
24
- spec.add_dependency 'karafka-core', '>= 2.0.4', '< 3.0.0'
25
- spec.add_dependency 'rdkafka', '>= 0.12'
24
+ spec.add_dependency 'karafka-core', '>= 2.0.6', '< 3.0.0'
26
25
  spec.add_dependency 'thor', '>= 0.20'
27
- spec.add_dependency 'waterdrop', '>= 2.4.1', '< 3.0.0'
26
+ spec.add_dependency 'waterdrop', '>= 2.4.3', '< 3.0.0'
28
27
  spec.add_dependency 'zeitwerk', '~> 2.3'
29
28
 
30
29
  spec.required_ruby_version = '>= 2.7.0'
data/lib/karafka/admin.rb CHANGED
@@ -9,7 +9,70 @@ module Karafka
9
9
  # @note It always uses the primary defined cluster and does not support multi-cluster work.
10
10
  # If you need this, just replace the cluster info for the time you use this
11
11
  module Admin
12
+ # A fake admin topic representation that we use for messages fetched using this API
13
+ # We cannot use the topics directly because we may want to request data from topics that we
14
+ # do not have in the routing
15
+ Topic = Struct.new(:name, :deserializer)
16
+
17
+ # Defaults for config
18
+ CONFIG_DEFAULTS = {
19
+ 'group.id': 'karafka_admin',
20
+ # We want to know when there is no more data not to end up with an endless loop
21
+ 'enable.partition.eof': true,
22
+ 'statistics.interval.ms': 0
23
+ }.freeze
24
+
25
+ private_constant :Topic, :CONFIG_DEFAULTS
26
+
12
27
  class << self
28
+ # Allows us to read messages from the topic
29
+ #
30
+ # @param name [String, Symbol] topic name
31
+ # @param partition [Integer] partition
32
+ # @param count [Integer] how many messages we want to get at most
33
+ # @param offset [Integer] offset from which we should start. If -1 is provided (default) we
34
+ # will start from the latest offset
35
+ #
36
+ # @return [Array<Karafka::Messages::Message>] array with messages
37
+ def read_topic(name, partition, count, offset = -1)
38
+ messages = []
39
+ tpl = Rdkafka::Consumer::TopicPartitionList.new
40
+
41
+ with_consumer do |consumer|
42
+ if offset.negative?
43
+ offsets = consumer.query_watermark_offsets(name, partition)
44
+ offset = offsets.last - count
45
+ end
46
+
47
+ offset = offset.negative? ? 0 : offset
48
+
49
+ tpl.add_topic_and_partitions_with_offsets(name, partition => offset)
50
+ consumer.assign(tpl)
51
+
52
+ # We should poll as long as we don't have all the messages that we need or as long as
53
+ # we do not read all the messages from the topic
54
+ loop do
55
+ break if messages.size >= count
56
+
57
+ message = consumer.poll(200)
58
+ messages << message if message
59
+ rescue Rdkafka::RdkafkaError => e
60
+ # End of partition
61
+ break if e.code == :partition_eof
62
+
63
+ raise e
64
+ end
65
+ end
66
+
67
+ messages.map do |message|
68
+ Messages::Builders::Message.call(
69
+ message,
70
+ Topic.new(name, Karafka::App.config.deserializer),
71
+ Time.now
72
+ )
73
+ end
74
+ end
75
+
13
76
  # Creates Kafka topic with given settings
14
77
  #
15
78
  # @param name [String] topic name
@@ -52,15 +115,32 @@ module Karafka
52
115
 
53
116
  # Creates admin instance and yields it. After usage it closes the admin instance
54
117
  def with_admin
55
- # Admin needs a producer config
56
- config = Karafka::Setup::AttributesMap.producer(Karafka::App.config.kafka.dup)
57
-
58
- admin = ::Rdkafka::Config.new(config).admin
118
+ admin = config(:producer).admin
59
119
  result = yield(admin)
60
120
  result
61
121
  ensure
62
122
  admin&.close
63
123
  end
124
+
125
+ # Creates consumer instance and yields it. After usage it closes the consumer instance
126
+ def with_consumer
127
+ consumer = config(:consumer).consumer
128
+ result = yield(consumer)
129
+ result
130
+ ensure
131
+ consumer&.close
132
+ end
133
+
134
+ # @param type [Symbol] type of config we want
135
+ # @return [::Rdkafka::Config] rdkafka config
136
+ def config(type)
137
+ config_hash = Karafka::Setup::AttributesMap.public_send(
138
+ type,
139
+ Karafka::App.config.kafka.dup.merge(CONFIG_DEFAULTS)
140
+ )
141
+
142
+ ::Rdkafka::Config.new(config_hash)
143
+ end
64
144
  end
65
145
  end
66
146
  end
data/lib/karafka/app.rb CHANGED
@@ -16,9 +16,19 @@ module Karafka
16
16
 
17
17
  # @return [Hash] active subscription groups grouped based on consumer group in a hash
18
18
  def subscription_groups
19
+ # We first build all the subscription groups, so they all get the same position, despite
20
+ # later narrowing that. It allows us to maintain same position number for static members
21
+ # even when we want to run subset of consumer groups or subscription groups
22
+ #
23
+ # We then narrow this to active consumer groups from which we select active subscription
24
+ # groups.
19
25
  consumer_groups
20
- .active
21
- .map { |consumer_group| [consumer_group, consumer_group.subscription_groups] }
26
+ .map { |cg| [cg, cg.subscription_groups] }
27
+ .select { |cg, _| cg.active? }
28
+ .select { |_, sgs| sgs.delete_if { |sg| !sg.active? } }
29
+ .delete_if { |_, sgs| sgs.empty? }
30
+ .each { |_, sgs| sgs.each { |sg| sg.topics.delete_if { |top| !top.active? } } }
31
+ .each { |_, sgs| sgs.delete_if { |sg| sg.topics.empty? } }
22
32
  .to_h
23
33
  end
24
34
 
@@ -62,14 +62,8 @@ module Karafka
62
62
  # that may not yet kick in when error occurs. That way we pause always on the last processed
63
63
  # message.
64
64
  def on_consume
65
- Karafka.monitor.instrument('consumer.consumed', caller: self) do
66
- consume
67
- end
68
-
69
- coordinator.consumption(self).success!
65
+ handle_consume
70
66
  rescue StandardError => e
71
- coordinator.consumption(self).failure!(e)
72
-
73
67
  Karafka.monitor.instrument(
74
68
  'error.occurred',
75
69
  error: e,
@@ -77,9 +71,6 @@ module Karafka
77
71
  seek_offset: coordinator.seek_offset,
78
72
  type: 'consumer.consume.error'
79
73
  )
80
- ensure
81
- # We need to decrease number of jobs that this coordinator coordinates as it has finished
82
- coordinator.decrement
83
74
  end
84
75
 
85
76
  # @private
@@ -199,7 +190,10 @@ module Karafka
199
190
  # @param offset [Integer] offset from which we want to restart the processing
200
191
  # @param timeout [Integer, nil] how long in milliseconds do we want to pause or nil to use the
201
192
  # default exponential pausing strategy defined for retries
202
- def pause(offset, timeout = nil)
193
+ # @param manual_pause [Boolean] Flag to differentiate between user pause and system/strategy
194
+ # based pause. While they both pause in exactly the same way, the strategy application
195
+ # may need to differentiate between them.
196
+ def pause(offset, timeout = nil, manual_pause = true)
203
197
  timeout ? coordinator.pause_tracker.pause(timeout) : coordinator.pause_tracker.pause
204
198
 
205
199
  client.pause(
@@ -207,6 +201,9 @@ module Karafka
207
201
  messages.metadata.partition,
208
202
  offset
209
203
  )
204
+
205
+ # Indicate, that user took a manual action of pausing
206
+ coordinator.manual_pause if manual_pause
210
207
  end
211
208
 
212
209
  # Resumes processing of the current topic partition
@@ -37,7 +37,8 @@ module Karafka
37
37
  "Karafka version: #{Karafka::VERSION}#{postfix}",
38
38
  "Ruby version: #{RUBY_DESCRIPTION}",
39
39
  "Rdkafka version: #{::Rdkafka::VERSION}",
40
- "Subscription groups count: #{Karafka::App.subscription_groups.size}",
40
+ "Consumer groups count: #{Karafka::App.consumer_groups.size}",
41
+ "Subscription groups count: #{Karafka::App.subscription_groups.values.flatten.size}",
41
42
  "Workers count: #{Karafka::App.config.concurrency}",
42
43
  "Application client id: #{config.client_id}",
43
44
  "Boot file: #{Karafka.boot_file}",
@@ -9,18 +9,19 @@ module Karafka
9
9
 
10
10
  desc 'Start the Karafka server (short-cut alias: "s")'
11
11
  option aliases: 's'
12
- option :consumer_groups, type: :array, default: nil, aliases: :g
12
+ option :consumer_groups, type: :array, default: [], aliases: :g
13
+ option :subscription_groups, type: :array, default: []
14
+ option :topics, type: :array, default: []
13
15
 
14
16
  # Start the Karafka server
15
17
  def call
16
18
  # Print our banner and info in the dev mode
17
19
  print_marketing_info if Karafka::App.env.development?
18
20
 
19
- Contracts::ServerCliOptions.new.validate!(cli.options)
20
-
21
- # We assign active topics on a server level, as only server is expected to listen on
22
- # part of the topics
23
- Karafka::Server.consumer_groups = cli.options[:consumer_groups]
21
+ active_routing_config = Karafka::App.config.internal.routing.active
22
+ active_routing_config.consumer_groups = cli.options[:consumer_groups]
23
+ active_routing_config.subscription_groups = cli.options[:subscription_groups]
24
+ active_routing_config.topics = cli.options[:topics]
24
25
 
25
26
  Karafka::Server.run
26
27
  end
@@ -308,8 +308,8 @@ module Karafka
308
308
  @closed = true
309
309
 
310
310
  # Remove callbacks runners that were registered
311
- ::Karafka::Instrumentation.statistics_callbacks.delete(@subscription_group.id)
312
- ::Karafka::Instrumentation.error_callbacks.delete(@subscription_group.id)
311
+ ::Karafka::Core::Instrumentation.statistics_callbacks.delete(@subscription_group.id)
312
+ ::Karafka::Core::Instrumentation.error_callbacks.delete(@subscription_group.id)
313
313
 
314
314
  @kafka.close
315
315
  @buffer.clear
@@ -397,7 +397,7 @@ module Karafka
397
397
  @name = consumer.name
398
398
 
399
399
  # Register statistics runner for this particular type of callbacks
400
- ::Karafka::Instrumentation.statistics_callbacks.add(
400
+ ::Karafka::Core::Instrumentation.statistics_callbacks.add(
401
401
  @subscription_group.id,
402
402
  Instrumentation::Callbacks::Statistics.new(
403
403
  @subscription_group.id,
@@ -408,7 +408,7 @@ module Karafka
408
408
  )
409
409
 
410
410
  # Register error tracking callback
411
- ::Karafka::Instrumentation.error_callbacks.add(
411
+ ::Karafka::Core::Instrumentation.error_callbacks.add(
412
412
  @subscription_group.id,
413
413
  Instrumentation::Callbacks::Error.new(
414
414
  @subscription_group.id,
@@ -12,7 +12,9 @@ module Karafka
12
12
  ).fetch('en').fetch('validations').fetch('server_cli_options')
13
13
  end
14
14
 
15
- optional(:consumer_groups) { |cg| cg.is_a?(Array) && !cg.empty? }
15
+ optional(:consumer_groups) { |cg| cg.is_a?(Array) }
16
+ optional(:subscription_groups) { |sg| sg.is_a?(Array) }
17
+ optional(:topics) { |topics| topics.is_a?(Array) }
16
18
 
17
19
  virtual do |data, errors|
18
20
  next unless errors.empty?
@@ -22,11 +24,66 @@ module Karafka
22
24
 
23
25
  # If there were no consumer_groups declared in the server cli, it means that we will
24
26
  # run all of them and no need to validate them here at all
25
- next if value.nil?
26
- next if (value - Karafka::App.config.internal.routing.builder.map(&:name)).empty?
27
+ next if value.empty?
28
+ next if (value - Karafka::App.consumer_groups.map(&:name)).empty?
27
29
 
30
+ # Found unknown consumer groups
28
31
  [[%i[consumer_groups], :consumer_groups_inclusion]]
29
32
  end
33
+
34
+ virtual do |data, errors|
35
+ next unless errors.empty?
36
+ next unless data.key?(:subscription_groups)
37
+
38
+ value = data.fetch(:subscription_groups)
39
+
40
+ # If there were no subscription_groups declared in the server cli, it means that we will
41
+ # run all of them and no need to validate them here at all
42
+ next if value.empty?
43
+
44
+ subscription_groups = Karafka::App
45
+ .consumer_groups
46
+ .map(&:subscription_groups)
47
+ .flatten
48
+ .map(&:name)
49
+
50
+ next if (value - subscription_groups).empty?
51
+
52
+ # Found unknown subscription groups
53
+ [[%i[subscription_groups], :subscription_groups_inclusion]]
54
+ end
55
+
56
+ virtual do |data, errors|
57
+ next unless errors.empty?
58
+ next unless data.key?(:topics)
59
+
60
+ value = data.fetch(:topics)
61
+
62
+ # If there were no topics declared in the server cli, it means that we will
63
+ # run all of them and no need to validate them here at all
64
+ next if value.empty?
65
+
66
+ topics = Karafka::App
67
+ .consumer_groups
68
+ .map(&:subscription_groups)
69
+ .flatten
70
+ .map(&:topics)
71
+ .map { |gtopics| gtopics.map(&:name) }
72
+ .flatten
73
+
74
+ next if (value - topics).empty?
75
+
76
+ # Found unknown topics
77
+ [[%i[topics], :topics_inclusion]]
78
+ end
79
+
80
+ # Makes sure we have anything to subscribe to when we start the server
81
+ virtual do |_, errors|
82
+ next unless errors.empty?
83
+ next unless Karafka::App.subscription_groups.empty?
84
+
85
+ [[%i[topics], :topics_missing]]
86
+ end
30
87
  end
31
88
  end
32
89
  end
@@ -20,7 +20,7 @@ module Karafka
20
20
  required(:initial_offset) { |val| %w[earliest latest].include?(val) }
21
21
  required(:max_wait_time) { |val| val.is_a?(Integer) && val >= 10 }
22
22
  required(:name) { |val| val.is_a?(String) && Contracts::TOPIC_REGEXP.match?(val) }
23
- required(:subscription_group) { |val| val.nil? || (val.is_a?(String) && !val.empty?) }
23
+ required(:subscription_group) { |val| val.is_a?(String) && !val.empty? }
24
24
 
25
25
  virtual do |data, errors|
26
26
  next unless errors.empty?
@@ -8,68 +8,71 @@ module Karafka
8
8
 
9
9
  private_constant :PUBLIC_KEY_LOCATION
10
10
 
11
- # Tries to prepare license and verifies it
12
- #
13
- # @param license_config [Karafka::Core::Configurable::Node] config related to the licensing
14
- def prepare_and_verify(license_config)
15
- prepare(license_config)
16
- verify(license_config)
17
- end
18
-
19
- private
11
+ class << self
12
+ # Tries to load the license and yields if successful
13
+ def detect
14
+ # If required, do not require again
15
+ require('karafka-license') unless const_defined?('::Karafka::License')
20
16
 
21
- # @param license_config [Karafka::Core::Configurable::Node] config related to the licensing
22
- def prepare(license_config)
23
- # If there is token, no action needed
24
- # We support a case where someone would put the token in instead of using one from the
25
- # license. That's in case there are limitations to using external package sources, etc
26
- return if license_config.token
17
+ yield
27
18
 
28
- begin
29
- license_config.token || require('karafka-license')
19
+ true
30
20
  rescue LoadError
31
- return
21
+ false
32
22
  end
33
23
 
34
- license_config.token = Karafka::License.token
35
- end
36
-
37
- # Check license and setup license details (if needed)
38
- # @param license_config [Karafka::Core::Configurable::Node] config related to the licensing
39
- def verify(license_config)
40
- # If no license, it will just run LGPL components without anything extra
41
- return unless license_config.token
24
+ # Tries to prepare license and verifies it
25
+ #
26
+ # @param license_config [Karafka::Core::Configurable::Node] config related to the licensing
27
+ def prepare_and_verify(license_config)
28
+ # If license is not loaded, nothing to do
29
+ return unless const_defined?('::Karafka::License')
42
30
 
43
- public_key = OpenSSL::PKey::RSA.new(File.read(PUBLIC_KEY_LOCATION))
31
+ prepare(license_config)
32
+ verify(license_config)
33
+ end
44
34
 
45
- # We gsub and strip in case someone copy-pasted it as a multi line string
46
- formatted_token = license_config.token.strip.delete("\n").delete(' ')
47
- decoded_token = Base64.decode64(formatted_token)
35
+ private
48
36
 
49
- begin
50
- data = public_key.public_decrypt(decoded_token)
51
- rescue OpenSSL::OpenSSLError
52
- data = nil
37
+ # @param license_config [Karafka::Core::Configurable::Node] config related to the licensing
38
+ def prepare(license_config)
39
+ license_config.token = Karafka::License.token
53
40
  end
54
41
 
55
- details = data ? JSON.parse(data) : raise_invalid_license_token(license_config)
42
+ # Check license and setup license details (if needed)
43
+ # @param license_config [Karafka::Core::Configurable::Node] config related to the licensing
44
+ def verify(license_config)
45
+ public_key = OpenSSL::PKey::RSA.new(File.read(PUBLIC_KEY_LOCATION))
56
46
 
57
- license_config.entity = details.fetch('entity')
58
- end
47
+ # We gsub and strip in case someone copy-pasted it as a multi line string
48
+ formatted_token = license_config.token.strip.delete("\n").delete(' ')
49
+ decoded_token = Base64.decode64(formatted_token)
59
50
 
60
- # Raises an error with info, that used token is invalid
61
- # @param license_config [Karafka::Core::Configurable::Node]
62
- def raise_invalid_license_token(license_config)
63
- # We set it to false so `Karafka.pro?` method behaves as expected
64
- license_config.token = false
65
-
66
- raise(
67
- Errors::InvalidLicenseTokenError,
68
- <<~MSG.tr("\n", ' ')
69
- License key you provided is invalid.
70
- Please reach us at contact@karafka.io or visit https://karafka.io to obtain a valid one.
71
- MSG
72
- )
51
+ begin
52
+ data = public_key.public_decrypt(decoded_token)
53
+ rescue OpenSSL::OpenSSLError
54
+ data = nil
55
+ end
56
+
57
+ details = data ? JSON.parse(data) : raise_invalid_license_token(license_config)
58
+
59
+ license_config.entity = details.fetch('entity')
60
+ end
61
+
62
+ # Raises an error with info, that used token is invalid
63
+ # @param license_config [Karafka::Core::Configurable::Node]
64
+ def raise_invalid_license_token(license_config)
65
+ # We set it to false so `Karafka.pro?` method behaves as expected
66
+ license_config.token = false
67
+
68
+ raise(
69
+ Errors::InvalidLicenseTokenError,
70
+ <<~MSG.tr("\n", ' ')
71
+ License key you provided is invalid.
72
+ Please reach us at contact@karafka.io or visit https://karafka.io to obtain a valid one.
73
+ MSG
74
+ )
75
+ end
73
76
  end
74
77
  end
75
78
  end
@@ -45,8 +45,6 @@ module Karafka
45
45
  # @param config [Karafka::Core::Configurable::Node] app config that we can alter with pro
46
46
  # components
47
47
  def setup(config)
48
- require_all
49
-
50
48
  reconfigure(config)
51
49
 
52
50
  load_topic_features
@@ -55,7 +53,7 @@ module Karafka
55
53
  private
56
54
 
57
55
  # Sets proper config options to use pro components
58
- # @param config [WaterDrop::Configurable::Node] root config node
56
+ # @param config [::Karafka::Core::Configurable::Node] root config node
59
57
  def reconfigure(config)
60
58
  icfg = config.internal
61
59