karafka 2.0.0.alpha1 → 2.0.0.alpha4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/workflows/ci.yml +3 -1
- data/CHANGELOG.md +14 -1
- data/CONTRIBUTING.md +6 -6
- data/Gemfile.lock +24 -24
- data/LICENSE +3 -0
- data/bin/integrations +44 -8
- data/bin/karafka +4 -0
- data/bin/stress +1 -1
- data/config/errors.yml +1 -0
- data/docker-compose.yml +1 -0
- data/karafka.gemspec +1 -1
- data/lib/active_job/karafka.rb +16 -13
- data/lib/active_job/queue_adapters/karafka_adapter.rb +3 -6
- data/lib/karafka/active_job/consumer.rb +24 -0
- data/lib/karafka/active_job/dispatcher.rb +38 -0
- data/lib/karafka/active_job/job_extensions.rb +34 -0
- data/lib/karafka/active_job/job_options_contract.rb +15 -0
- data/lib/karafka/active_job/routing_extensions.rb +18 -0
- data/lib/karafka/app.rb +1 -0
- data/lib/karafka/cli/info.rb +3 -3
- data/lib/karafka/cli/install.rb +1 -0
- data/lib/karafka/cli/server.rb +2 -16
- data/lib/karafka/contracts/base.rb +23 -0
- data/lib/karafka/contracts/config.rb +21 -3
- data/lib/karafka/contracts/consumer_group.rb +1 -3
- data/lib/karafka/contracts/consumer_group_topic.rb +2 -3
- data/lib/karafka/contracts/server_cli_options.rb +1 -3
- data/lib/karafka/errors.rb +4 -0
- data/lib/karafka/instrumentation/monitor.rb +1 -0
- data/lib/karafka/instrumentation/stdout_listener.rb +3 -0
- data/lib/karafka/licenser.rb +20 -9
- data/lib/karafka/messages/batch_metadata.rb +2 -0
- data/lib/karafka/messages/builders/batch_metadata.rb +23 -1
- data/lib/karafka/pro/active_job/dispatcher.rb +58 -0
- data/lib/karafka/pro/active_job/job_options_contract.rb +27 -0
- data/lib/karafka/pro/loader.rb +29 -0
- data/lib/karafka/pro.rb +13 -0
- data/lib/karafka/processing/worker.rb +1 -1
- data/lib/karafka/railtie.rb +55 -19
- data/lib/karafka/routing/builder.rb +1 -11
- data/lib/karafka/routing/subscription_group.rb +5 -5
- data/lib/karafka/routing/subscription_groups_builder.rb +1 -0
- data/lib/karafka/routing/topic.rb +1 -0
- data/lib/karafka/setup/config.rb +25 -20
- data/lib/karafka/status.rb +1 -0
- data/lib/karafka/templates/karafka.rb.erb +1 -1
- data/lib/karafka/version.rb +1 -1
- data/lib/karafka.rb +7 -2
- data.tar.gz.sig +0 -0
- metadata +14 -7
- metadata.gz.sig +0 -0
- data/.github/FUNDING.yml +0 -3
- data/lib/active_job/consumer.rb +0 -22
- data/lib/active_job/routing_extensions.rb +0 -15
@@ -8,9 +8,7 @@ module Karafka
|
|
8
8
|
# `Karafka::Setup::Config` model, but we don't validate them here as they are
|
9
9
|
# validated per each route (topic + consumer_group) because they can be overwritten,
|
10
10
|
# so we validate all of that once all the routes are defined and ready.
|
11
|
-
class Config <
|
12
|
-
config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
|
13
|
-
|
11
|
+
class Config < Base
|
14
12
|
params do
|
15
13
|
# License validity happens in the licenser. Here we do only the simple consistency checks
|
16
14
|
required(:license).schema do
|
@@ -27,6 +25,26 @@ module Karafka
|
|
27
25
|
required(:pause_max_timeout) { int? & gt?(0) }
|
28
26
|
required(:pause_with_exponential_backoff).filled(:bool?)
|
29
27
|
required(:shutdown_timeout) { int? & gt?(0) }
|
28
|
+
required(:kafka).filled(:hash)
|
29
|
+
|
30
|
+
# We validate internals just to be sure, that they are present and working
|
31
|
+
required(:internal).schema do
|
32
|
+
required(:routing_builder)
|
33
|
+
required(:status)
|
34
|
+
required(:process)
|
35
|
+
required(:subscription_groups_builder)
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
# rdkafka requires all the keys to be strings, so we ensure that
|
40
|
+
rule(:kafka) do
|
41
|
+
next unless value.is_a?(Hash)
|
42
|
+
|
43
|
+
value.each_key do |key|
|
44
|
+
next if key.is_a?(Symbol)
|
45
|
+
|
46
|
+
key(:"kafka.#{key}").failure(:kafka_key_must_be_a_symbol)
|
47
|
+
end
|
30
48
|
end
|
31
49
|
|
32
50
|
rule(:pause_timeout, :pause_max_timeout) do
|
@@ -3,9 +3,7 @@
|
|
3
3
|
module Karafka
|
4
4
|
module Contracts
|
5
5
|
# Contract for single full route (consumer group + topics) validation.
|
6
|
-
class ConsumerGroup <
|
7
|
-
config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
|
8
|
-
|
6
|
+
class ConsumerGroup < Base
|
9
7
|
# Internal contract for sub-validating topics schema
|
10
8
|
TOPIC_CONTRACT = ConsumerGroupTopic.new.freeze
|
11
9
|
|
@@ -3,15 +3,14 @@
|
|
3
3
|
module Karafka
|
4
4
|
module Contracts
|
5
5
|
# Consumer group topic validation rules.
|
6
|
-
class ConsumerGroupTopic <
|
7
|
-
config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
|
8
|
-
|
6
|
+
class ConsumerGroupTopic < Base
|
9
7
|
params do
|
10
8
|
required(:consumer).filled
|
11
9
|
required(:deserializer).filled
|
12
10
|
required(:id).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
|
13
11
|
required(:kafka).filled
|
14
12
|
required(:max_messages) { int? & gteq?(1) }
|
13
|
+
required(:initial_offset).filled(included_in?: %w[earliest latest])
|
15
14
|
required(:max_wait_time).filled { int? & gteq?(10) }
|
16
15
|
required(:manual_offset_management).filled(:bool?)
|
17
16
|
required(:name).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
|
@@ -3,9 +3,7 @@
|
|
3
3
|
module Karafka
|
4
4
|
module Contracts
|
5
5
|
# Contract for validating correctness of the server cli command options.
|
6
|
-
class ServerCliOptions <
|
7
|
-
config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
|
8
|
-
|
6
|
+
class ServerCliOptions < Base
|
9
7
|
params do
|
10
8
|
optional(:consumer_groups).value(:array, :filled?)
|
11
9
|
end
|
data/lib/karafka/errors.rb
CHANGED
@@ -43,5 +43,9 @@ module Karafka
|
|
43
43
|
|
44
44
|
# Raised when the license token is not valid
|
45
45
|
InvalidLicenseTokenError = Class.new(BaseError)
|
46
|
+
|
47
|
+
# Used to instrument this error into the error notifications
|
48
|
+
# We do not raise it so we won't crash deployed systems
|
49
|
+
ExpiredLicenseTokenError = Class.new(BaseError)
|
46
50
|
end
|
47
51
|
end
|
@@ -88,6 +88,9 @@ module Karafka
|
|
88
88
|
when 'connection.listener.fetch_loop.error'
|
89
89
|
error "Listener fetch loop error: #{error}"
|
90
90
|
error details
|
91
|
+
when 'licenser.expired'
|
92
|
+
error error
|
93
|
+
error details
|
91
94
|
when 'runner.call.error'
|
92
95
|
fatal "Runner crashed due to an error: #{error}"
|
93
96
|
fatal details
|
data/lib/karafka/licenser.rb
CHANGED
@@ -26,7 +26,7 @@ module Karafka
|
|
26
26
|
data = nil
|
27
27
|
end
|
28
28
|
|
29
|
-
details = data ? JSON.parse(data) : raise_invalid_license_token
|
29
|
+
details = data ? JSON.parse(data) : raise_invalid_license_token(license_config)
|
30
30
|
|
31
31
|
license_config.entity = details.fetch('entity')
|
32
32
|
license_config.expires_on = Date.parse(details.fetch('expires_on'))
|
@@ -39,7 +39,11 @@ module Karafka
|
|
39
39
|
private
|
40
40
|
|
41
41
|
# Raises an error with info, that used token is invalid
|
42
|
-
|
42
|
+
# @param license_config [Dry::Configurable::Config]
|
43
|
+
def raise_invalid_license_token(license_config)
|
44
|
+
# We set it to false so `Karafka.pro?` method behaves as expected
|
45
|
+
license_config.token = false
|
46
|
+
|
43
47
|
raise(
|
44
48
|
Errors::InvalidLicenseTokenError,
|
45
49
|
<<~MSG.tr("\n", ' ')
|
@@ -50,15 +54,22 @@ module Karafka
|
|
50
54
|
end
|
51
55
|
|
52
56
|
# We do not raise an error here as we don't want to cause any problems to someone that runs
|
53
|
-
# Karafka on production. Error is enough.
|
57
|
+
# Karafka on production. Error message is enough.
|
58
|
+
#
|
54
59
|
# @param expires_on [Date] when the license expires
|
55
60
|
def notify_if_license_expired(expires_on)
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
61
|
+
message = <<~MSG.tr("\n", ' ')
|
62
|
+
Your license expired on #{expires_on}.
|
63
|
+
Please reach us at contact@karafka.io or visit https://karafka.io to obtain a valid one.
|
64
|
+
MSG
|
65
|
+
|
66
|
+
Karafka.logger.error(message)
|
67
|
+
|
68
|
+
Karafka.monitor.instrument(
|
69
|
+
'error.occurred',
|
70
|
+
caller: self,
|
71
|
+
error: Errors::ExpiredLicenseTokenError.new(message),
|
72
|
+
type: 'licenser.expired'
|
62
73
|
)
|
63
74
|
end
|
64
75
|
end
|
@@ -12,7 +12,12 @@ module Karafka
|
|
12
12
|
# @param topic [Karafka::Routing::Topic] topic for which we've fetched the batch
|
13
13
|
# @param scheduled_at [Time] moment when the batch was scheduled for processing
|
14
14
|
# @return [Karafka::Messages::BatchMetadata] batch metadata object
|
15
|
+
# @note Regarding the time lags: we can use the current time here, as batch metadata is
|
16
|
+
# created in the worker. So whenever this is being built, it means that the processing
|
17
|
+
# of this batch has already started.
|
15
18
|
def call(kafka_batch, topic, scheduled_at)
|
19
|
+
now = Time.now
|
20
|
+
|
16
21
|
Karafka::Messages::BatchMetadata.new(
|
17
22
|
size: kafka_batch.count,
|
18
23
|
first_offset: kafka_batch.first.offset,
|
@@ -20,9 +25,26 @@ module Karafka
|
|
20
25
|
deserializer: topic.deserializer,
|
21
26
|
partition: kafka_batch[0].partition,
|
22
27
|
topic: topic.name,
|
23
|
-
scheduled_at: scheduled_at
|
28
|
+
scheduled_at: scheduled_at,
|
29
|
+
# This lag describes how long did it take for a message to be consumed from the
|
30
|
+
# moment it was created
|
31
|
+
consumption_lag: time_distance_in_ms(now, kafka_batch.last.timestamp),
|
32
|
+
# This lag describes how long did a batch have to wait before it was picked up by
|
33
|
+
# one of the workers
|
34
|
+
processing_lag: time_distance_in_ms(now, scheduled_at)
|
24
35
|
).freeze
|
25
36
|
end
|
37
|
+
|
38
|
+
private
|
39
|
+
|
40
|
+
# Computes time distance in between two times in ms
|
41
|
+
#
|
42
|
+
# @param time1 [Time]
|
43
|
+
# @param time2 [Time]
|
44
|
+
# @return [Integer] distance in between two times in ms
|
45
|
+
def time_distance_in_ms(time1, time2)
|
46
|
+
((time1 - time2) * 1_000).round
|
47
|
+
end
|
26
48
|
end
|
27
49
|
end
|
28
50
|
end
|
@@ -0,0 +1,58 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# This Karafka component is a Pro component.
|
4
|
+
# All of the commercial components are present in the lib/karafka/pro directory of this repository
|
5
|
+
# and their usage requires commercial license agreement.
|
6
|
+
#
|
7
|
+
# Karafka has also commercial-friendly license, commercial support and commercial components.
|
8
|
+
#
|
9
|
+
# By sending a pull request to the pro components, you are agreeing to transfer the copyright of
|
10
|
+
# your code to Maciej Mensfeld.
|
11
|
+
|
12
|
+
module Karafka
|
13
|
+
module Pro
|
14
|
+
# Karafka Pro ActiveJob components
|
15
|
+
module ActiveJob
|
16
|
+
# Pro dispatcher that sends the ActiveJob job to a proper topic based on the queue name
|
17
|
+
# and that allows to inject additional options into the producer, effectively allowing for a
|
18
|
+
# much better and more granular control over the dispatch and consumption process.
|
19
|
+
class Dispatcher < ::Karafka::ActiveJob::Dispatcher
|
20
|
+
# Defaults for dispatching
|
21
|
+
# The can be updated by using `#karafka_options` on the job
|
22
|
+
DEFAULTS = {
|
23
|
+
dispatch_method: :produce_async,
|
24
|
+
# We don't create a dummy proc based partitioner as we would have to evaluate it with
|
25
|
+
# each job.
|
26
|
+
partitioner: nil
|
27
|
+
}.freeze
|
28
|
+
|
29
|
+
private_constant :DEFAULTS
|
30
|
+
|
31
|
+
# @param job [ActiveJob::Base] job
|
32
|
+
def call(job)
|
33
|
+
::Karafka.producer.public_send(
|
34
|
+
fetch_option(job, :dispatch_method, DEFAULTS),
|
35
|
+
dispatch_details(job).merge!(
|
36
|
+
topic: job.queue_name,
|
37
|
+
payload: ::ActiveSupport::JSON.encode(job.serialize)
|
38
|
+
)
|
39
|
+
)
|
40
|
+
end
|
41
|
+
|
42
|
+
private
|
43
|
+
|
44
|
+
# @param job [ActiveJob::Base] job instance
|
45
|
+
# @return [Hash] hash with dispatch details to which we merge topic and payload
|
46
|
+
def dispatch_details(job)
|
47
|
+
partitioner = fetch_option(job, :partitioner, DEFAULTS)
|
48
|
+
|
49
|
+
return {} unless partitioner
|
50
|
+
|
51
|
+
{
|
52
|
+
partition_key: partitioner.call(job)
|
53
|
+
}
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# This Karafka component is a Pro component.
|
4
|
+
# All of the commercial components are present in the lib/karafka/pro directory of this repository
|
5
|
+
# and their usage requires commercial license agreement.
|
6
|
+
#
|
7
|
+
# Karafka has also commercial-friendly license, commercial support and commercial components.
|
8
|
+
#
|
9
|
+
# By sending a pull request to the pro components, you are agreeing to transfer the copyright of
|
10
|
+
# your code to Maciej Mensfeld.
|
11
|
+
|
12
|
+
module Karafka
|
13
|
+
module Pro
|
14
|
+
module ActiveJob
|
15
|
+
# Contract for validating the options that can be altered with `#karafka_options` per job
|
16
|
+
# class that works with Pro features.
|
17
|
+
class JobOptionsContract < ::Karafka::ActiveJob::JobOptionsContract
|
18
|
+
# Dry types
|
19
|
+
Types = include Dry.Types()
|
20
|
+
|
21
|
+
params do
|
22
|
+
optional(:partitioner).value(Types.Interface(:call))
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# This Karafka component is a Pro component.
|
4
|
+
# All of the commercial components are present in the lib/karafka/pro directory of this repository
|
5
|
+
# and their usage requires commercial license agreement.
|
6
|
+
#
|
7
|
+
# Karafka has also commercial-friendly license, commercial support and commercial components.
|
8
|
+
#
|
9
|
+
# By sending a pull request to the pro components, you are agreeing to transfer the copyright of
|
10
|
+
# your code to Maciej Mensfeld.
|
11
|
+
module Karafka
|
12
|
+
module Pro
|
13
|
+
# Loader requires and loads all the pro components only when they are needed
|
14
|
+
class Loader
|
15
|
+
class << self
|
16
|
+
# Loads all the pro components and configures them wherever it is expected
|
17
|
+
# @param config [Dry::Configurable::Config] whole app config that we can alter with pro
|
18
|
+
# components
|
19
|
+
def setup(config)
|
20
|
+
require_relative 'active_job/dispatcher'
|
21
|
+
require_relative 'active_job/job_options_contract'
|
22
|
+
|
23
|
+
config.internal.active_job.dispatcher = ActiveJob::Dispatcher.new
|
24
|
+
config.internal.active_job.job_options_contract = ActiveJob::JobOptionsContract.new
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
data/lib/karafka/pro.rb
ADDED
@@ -0,0 +1,13 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# This Karafka component is a Pro component.
|
4
|
+
# All of the commercial components are present in the lib/karafka/pro directory of this repository
|
5
|
+
# and their usage requires commercial license agreement.
|
6
|
+
#
|
7
|
+
# Karafka has also commercial-friendly license, commercial support and commercial components.
|
8
|
+
#
|
9
|
+
module Karafka
|
10
|
+
# Namespace for pro components, licensed under the commercial license agreement.
|
11
|
+
module Pro
|
12
|
+
end
|
13
|
+
end
|
@@ -41,7 +41,7 @@ module Karafka
|
|
41
41
|
# We signal critical exceptions, notify and do not allow worker to fail
|
42
42
|
# rubocop:disable Lint/RescueException
|
43
43
|
rescue Exception => e
|
44
|
-
|
44
|
+
# rubocop:enable Lint/RescueException
|
45
45
|
Karafka.monitor.instrument(
|
46
46
|
'error.occurred',
|
47
47
|
caller: self,
|
data/lib/karafka/railtie.rb
CHANGED
@@ -19,6 +19,7 @@ end
|
|
19
19
|
if rails
|
20
20
|
# Load Karafka
|
21
21
|
require 'karafka'
|
22
|
+
|
22
23
|
# Load ActiveJob adapter
|
23
24
|
require 'active_job/karafka'
|
24
25
|
|
@@ -30,34 +31,69 @@ if rails
|
|
30
31
|
class Railtie < Rails::Railtie
|
31
32
|
railtie_name :karafka
|
32
33
|
|
33
|
-
initializer 'karafka.
|
34
|
-
|
35
|
-
|
34
|
+
initializer 'karafka.active_job_integration' do
|
35
|
+
ActiveSupport.on_load(:active_job) do
|
36
|
+
# Extend ActiveJob with some Karafka specific ActiveJob magic
|
37
|
+
extend ::Karafka::ActiveJob::JobExtensions
|
38
|
+
end
|
39
|
+
end
|
36
40
|
|
41
|
+
# This lines will make Karafka print to stdout like puma or unicorn when we run karafka
|
42
|
+
# server + will support code reloading with each fetched loop. We do it only for karafka
|
43
|
+
# based commands as Rails processes and console will have it enabled already
|
44
|
+
initializer 'karafka.configure_rails_logger' do
|
37
45
|
# Make Karafka use Rails logger
|
38
46
|
::Karafka::App.config.logger = Rails.logger
|
39
47
|
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
)
|
48
|
+
next unless Rails.env.development?
|
49
|
+
next unless ENV.key?('KARAFKA_CLI')
|
50
|
+
|
51
|
+
Rails.logger.extend(
|
52
|
+
ActiveSupport::Logger.broadcast(
|
53
|
+
ActiveSupport::Logger.new($stdout)
|
46
54
|
)
|
55
|
+
)
|
56
|
+
end
|
47
57
|
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
next unless Rails.application.reloaders.any?(&:updated?)
|
58
|
+
initializer 'karafka.configure_rails_auto_load_paths' do |app|
|
59
|
+
# Consumers should autoload by default in the Rails app so they are visible
|
60
|
+
app.config.autoload_paths += %w[app/consumers]
|
61
|
+
end
|
53
62
|
|
54
|
-
|
55
|
-
|
63
|
+
initializer 'karafka.configure_rails_code_reloader' do
|
64
|
+
# There are components that won't work with older Rails version, so we check it and
|
65
|
+
# provide a failover
|
66
|
+
rails6plus = Rails.gem_version >= Gem::Version.new('6.0.0')
|
67
|
+
|
68
|
+
next unless Rails.env.development?
|
69
|
+
next unless ENV.key?('KARAFKA_CLI')
|
70
|
+
next unless rails6plus
|
71
|
+
|
72
|
+
# We can have many listeners, but it does not matter in which we will reload the code
|
73
|
+
# as long as all the consumers will be re-created as Rails reload is thread-safe
|
74
|
+
::Karafka::App.monitor.subscribe('connection.listener.fetch_loop') do
|
75
|
+
# Reload code each time there is a change in the code
|
76
|
+
next unless Rails.application.reloaders.any?(&:updated?)
|
77
|
+
|
78
|
+
Rails.application.reloader.reload!
|
56
79
|
end
|
80
|
+
end
|
57
81
|
|
58
|
-
|
59
|
-
|
60
|
-
|
82
|
+
initializer 'karafka.require_karafka_boot_file' do |app|
|
83
|
+
rails6plus = Rails.gem_version >= Gem::Version.new('6.0.0')
|
84
|
+
|
85
|
+
karafka_boot_file = Rails.root.join(Karafka.boot_file.to_s).to_s
|
86
|
+
|
87
|
+
if rails6plus
|
88
|
+
app.reloader.to_prepare do
|
89
|
+
# Load Karafka boot file, so it can be used in Rails server context
|
90
|
+
require karafka_boot_file
|
91
|
+
end
|
92
|
+
else
|
93
|
+
# Load Karafka main setup for older Rails versions
|
94
|
+
app.config.after_initialize do
|
95
|
+
require karafka_boot_file
|
96
|
+
end
|
61
97
|
end
|
62
98
|
end
|
63
99
|
end
|
@@ -10,11 +10,6 @@ module Karafka
|
|
10
10
|
# end
|
11
11
|
# end
|
12
12
|
class Builder < Concurrent::Array
|
13
|
-
# Consumer group consistency checking contract
|
14
|
-
CONTRACT = Karafka::Contracts::ConsumerGroup.new.freeze
|
15
|
-
|
16
|
-
private_constant :CONTRACT
|
17
|
-
|
18
13
|
def initialize
|
19
14
|
@draws = Concurrent::Array.new
|
20
15
|
super
|
@@ -38,12 +33,7 @@ module Karafka
|
|
38
33
|
instance_eval(&block)
|
39
34
|
|
40
35
|
each do |consumer_group|
|
41
|
-
|
42
|
-
validation_result = CONTRACT.call(hashed_group)
|
43
|
-
|
44
|
-
next if validation_result.success?
|
45
|
-
|
46
|
-
raise Errors::InvalidConfigurationError, validation_result.errors.to_h
|
36
|
+
Contracts::ConsumerGroup.new.validate!(consumer_group.to_h)
|
47
37
|
end
|
48
38
|
end
|
49
39
|
|
@@ -20,7 +20,7 @@ module Karafka
|
|
20
20
|
|
21
21
|
# @return [String] consumer group id
|
22
22
|
def consumer_group_id
|
23
|
-
kafka['group.id']
|
23
|
+
kafka[:'group.id']
|
24
24
|
end
|
25
25
|
|
26
26
|
# @return [Integer] max messages fetched in a single go
|
@@ -39,12 +39,12 @@ module Karafka
|
|
39
39
|
def kafka
|
40
40
|
kafka = @topics.first.kafka.dup
|
41
41
|
|
42
|
-
kafka['client.id'] ||= Karafka::App.config.client_id
|
43
|
-
kafka['group.id'] ||= @topics.first.consumer_group.id
|
44
|
-
kafka['auto.offset.reset'] ||=
|
42
|
+
kafka[:'client.id'] ||= Karafka::App.config.client_id
|
43
|
+
kafka[:'group.id'] ||= @topics.first.consumer_group.id
|
44
|
+
kafka[:'auto.offset.reset'] ||= @topics.first.initial_offset
|
45
45
|
# Karafka manages the offsets based on the processing state, thus we do not rely on the
|
46
46
|
# rdkafka offset auto-storing
|
47
|
-
kafka['enable.auto.offset.store'] = 'false'
|
47
|
+
kafka[:'enable.auto.offset.store'] = 'false'
|
48
48
|
kafka.freeze
|
49
49
|
kafka
|
50
50
|
end
|
data/lib/karafka/setup/config.rb
CHANGED
@@ -14,15 +14,12 @@ module Karafka
|
|
14
14
|
class Config
|
15
15
|
extend Dry::Configurable
|
16
16
|
|
17
|
-
# Contract for checking the config provided by the user
|
18
|
-
CONTRACT = Karafka::Contracts::Config.new.freeze
|
19
|
-
|
20
17
|
# Defaults for kafka settings, that will be overwritten only if not present already
|
21
18
|
KAFKA_DEFAULTS = {
|
22
|
-
'client.id'
|
19
|
+
'client.id': 'karafka'
|
23
20
|
}.freeze
|
24
21
|
|
25
|
-
private_constant :
|
22
|
+
private_constant :KAFKA_DEFAULTS
|
26
23
|
|
27
24
|
# Available settings
|
28
25
|
|
@@ -57,6 +54,9 @@ module Karafka
|
|
57
54
|
setting :consumer_persistence, default: true
|
58
55
|
# Default deserializer for converting incoming data into ruby objects
|
59
56
|
setting :deserializer, default: Karafka::Serialization::Json::Deserializer.new
|
57
|
+
# option [String] should we start with the earliest possible offset or latest
|
58
|
+
# This will set the `auto.offset.reset` value unless present in the kafka scope
|
59
|
+
setting :initial_offset, default: 'earliest'
|
60
60
|
# option [Boolean] should we leave offset management to the user
|
61
61
|
setting :manual_offset_management, default: false
|
62
62
|
# options max_messages [Integer] how many messages do we want to fetch from Kafka in one go
|
@@ -84,8 +84,6 @@ module Karafka
|
|
84
84
|
setting :kafka, default: {}
|
85
85
|
|
86
86
|
# Namespace for internal settings that should not be modified
|
87
|
-
# It's a temporary step to "declassify" several things internally before we move to a
|
88
|
-
# non global state
|
89
87
|
setting :internal do
|
90
88
|
# option routing_builder [Karafka::Routing::Builder] builder instance
|
91
89
|
setting :routing_builder, default: Routing::Builder.new
|
@@ -98,6 +96,17 @@ module Karafka
|
|
98
96
|
# option subscription_groups_builder [Routing::SubscriptionGroupsBuilder] subscription
|
99
97
|
# group builder
|
100
98
|
setting :subscription_groups_builder, default: Routing::SubscriptionGroupsBuilder.new
|
99
|
+
|
100
|
+
# Karafka components for ActiveJob
|
101
|
+
setting :active_job do
|
102
|
+
# option dispatcher [Karafka::ActiveJob::Dispatcher] default dispatcher for ActiveJob
|
103
|
+
setting :dispatcher, default: ActiveJob::Dispatcher.new
|
104
|
+
# option job_options_contract [Karafka::Contracts::JobOptionsContract] contract for
|
105
|
+
# ensuring, that extra job options defined are valid
|
106
|
+
setting :job_options_contract, default: ActiveJob::JobOptionsContract.new
|
107
|
+
# option consumer [Class] consumer class that should be used to consume ActiveJob data
|
108
|
+
setting :consumer, default: ActiveJob::Consumer
|
109
|
+
end
|
101
110
|
end
|
102
111
|
|
103
112
|
class << self
|
@@ -106,12 +115,14 @@ module Karafka
|
|
106
115
|
def setup(&block)
|
107
116
|
configure(&block)
|
108
117
|
merge_kafka_defaults!(config)
|
109
|
-
validate!
|
118
|
+
Contracts::Config.new.validate!(config.to_h)
|
110
119
|
|
111
120
|
# Check the license presence (if needed) and
|
112
121
|
Licenser.new.verify(config.license)
|
113
122
|
|
114
123
|
configure_components
|
124
|
+
|
125
|
+
Karafka::App.initialized!
|
115
126
|
end
|
116
127
|
|
117
128
|
private
|
@@ -128,18 +139,6 @@ module Karafka
|
|
128
139
|
end
|
129
140
|
end
|
130
141
|
|
131
|
-
# Validate config based on the config contract
|
132
|
-
# @return [Boolean] true if configuration is valid
|
133
|
-
# @raise [Karafka::Errors::InvalidConfigurationError] raised when configuration
|
134
|
-
# doesn't match with the config contract
|
135
|
-
def validate!
|
136
|
-
validation_result = CONTRACT.call(config.to_h)
|
137
|
-
|
138
|
-
return true if validation_result.success?
|
139
|
-
|
140
|
-
raise Errors::InvalidConfigurationError, validation_result.errors.to_h
|
141
|
-
end
|
142
|
-
|
143
142
|
# Sets up all the components that are based on the user configuration
|
144
143
|
# @note At the moment it is only WaterDrop
|
145
144
|
def configure_components
|
@@ -149,6 +148,12 @@ module Karafka
|
|
149
148
|
producer_config.kafka = config.kafka.dup
|
150
149
|
producer_config.logger = config.logger
|
151
150
|
end
|
151
|
+
|
152
|
+
return unless Karafka.pro?
|
153
|
+
|
154
|
+
# Runs the pro loader that includes all the pro components
|
155
|
+
require 'karafka/pro/loader'
|
156
|
+
Pro::Loader.setup(config)
|
152
157
|
end
|
153
158
|
end
|
154
159
|
end
|
data/lib/karafka/status.rb
CHANGED
@@ -27,7 +27,7 @@ APP_LOADER.eager_load
|
|
27
27
|
|
28
28
|
class KarafkaApp < Karafka::App
|
29
29
|
setup do |config|
|
30
|
-
config.kafka = { 'bootstrap.servers'
|
30
|
+
config.kafka = { 'bootstrap.servers': '127.0.0.1:9092' }
|
31
31
|
config.client_id = 'example_app'
|
32
32
|
<% if rails? -%>
|
33
33
|
# Recreate consumers with each batch. This will allow Rails code reload to work in the
|
data/lib/karafka/version.rb
CHANGED