karafka 1.2.8 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +1 -3
  6. data/.diffend.yml +3 -0
  7. data/.github/FUNDING.yml +3 -0
  8. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  9. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  10. data/.github/workflows/ci.yml +52 -0
  11. data/.gitignore +1 -0
  12. data/.ruby-version +1 -1
  13. data/CHANGELOG.md +134 -14
  14. data/CODE_OF_CONDUCT.md +1 -1
  15. data/CONTRIBUTING.md +1 -1
  16. data/Gemfile +4 -5
  17. data/Gemfile.lock +92 -81
  18. data/README.md +9 -12
  19. data/bin/karafka +1 -1
  20. data/certs/mensfeld.pem +25 -0
  21. data/config/errors.yml +38 -5
  22. data/docker-compose.yml +17 -0
  23. data/karafka.gemspec +18 -17
  24. data/lib/karafka.rb +10 -16
  25. data/lib/karafka/app.rb +14 -6
  26. data/lib/karafka/attributes_map.rb +5 -10
  27. data/lib/karafka/base_consumer.rb +19 -30
  28. data/lib/karafka/base_responder.rb +45 -27
  29. data/lib/karafka/cli.rb +2 -2
  30. data/lib/karafka/cli/console.rb +11 -9
  31. data/lib/karafka/cli/flow.rb +9 -7
  32. data/lib/karafka/cli/info.rb +4 -2
  33. data/lib/karafka/cli/install.rb +30 -6
  34. data/lib/karafka/cli/server.rb +11 -6
  35. data/lib/karafka/code_reloader.rb +67 -0
  36. data/lib/karafka/connection/api_adapter.rb +22 -9
  37. data/lib/karafka/connection/batch_delegator.rb +55 -0
  38. data/lib/karafka/connection/builder.rb +5 -3
  39. data/lib/karafka/connection/client.rb +31 -31
  40. data/lib/karafka/connection/listener.rb +26 -15
  41. data/lib/karafka/connection/message_delegator.rb +36 -0
  42. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  43. data/lib/karafka/consumers/callbacks.rb +32 -15
  44. data/lib/karafka/consumers/includer.rb +31 -18
  45. data/lib/karafka/consumers/responders.rb +2 -2
  46. data/lib/karafka/contracts.rb +10 -0
  47. data/lib/karafka/contracts/config.rb +21 -0
  48. data/lib/karafka/contracts/consumer_group.rb +206 -0
  49. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  50. data/lib/karafka/contracts/responder_usage.rb +54 -0
  51. data/lib/karafka/contracts/server_cli_options.rb +31 -0
  52. data/lib/karafka/errors.rb +17 -16
  53. data/lib/karafka/fetcher.rb +28 -30
  54. data/lib/karafka/helpers/class_matcher.rb +12 -2
  55. data/lib/karafka/helpers/config_retriever.rb +1 -1
  56. data/lib/karafka/helpers/inflector.rb +26 -0
  57. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  58. data/lib/karafka/instrumentation/logger.rb +9 -6
  59. data/lib/karafka/instrumentation/monitor.rb +15 -9
  60. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  61. data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
  62. data/lib/karafka/params/batch_metadata.rb +26 -0
  63. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  64. data/lib/karafka/params/builders/params.rb +38 -0
  65. data/lib/karafka/params/builders/params_batch.rb +25 -0
  66. data/lib/karafka/params/metadata.rb +20 -0
  67. data/lib/karafka/params/params.rb +54 -0
  68. data/lib/karafka/params/params_batch.rb +35 -21
  69. data/lib/karafka/patches/ruby_kafka.rb +21 -8
  70. data/lib/karafka/persistence/client.rb +15 -11
  71. data/lib/karafka/persistence/{consumer.rb → consumers.rb} +20 -13
  72. data/lib/karafka/persistence/topics.rb +48 -0
  73. data/lib/karafka/process.rb +0 -2
  74. data/lib/karafka/responders/builder.rb +1 -1
  75. data/lib/karafka/responders/topic.rb +6 -8
  76. data/lib/karafka/routing/builder.rb +36 -8
  77. data/lib/karafka/routing/consumer_group.rb +1 -1
  78. data/lib/karafka/routing/consumer_mapper.rb +9 -9
  79. data/lib/karafka/routing/proxy.rb +10 -1
  80. data/lib/karafka/routing/topic.rb +5 -3
  81. data/lib/karafka/routing/topic_mapper.rb +16 -18
  82. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  83. data/lib/karafka/serialization/json/serializer.rb +31 -0
  84. data/lib/karafka/server.rb +29 -28
  85. data/lib/karafka/setup/config.rb +67 -37
  86. data/lib/karafka/setup/configurators/water_drop.rb +7 -3
  87. data/lib/karafka/setup/dsl.rb +0 -1
  88. data/lib/karafka/status.rb +7 -3
  89. data/lib/karafka/templates/{application_consumer.rb.example → application_consumer.rb.erb} +2 -1
  90. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  91. data/lib/karafka/templates/karafka.rb.erb +92 -0
  92. data/lib/karafka/version.rb +1 -1
  93. metadata +94 -72
  94. metadata.gz.sig +0 -0
  95. data/.travis.yml +0 -21
  96. data/lib/karafka/callbacks.rb +0 -30
  97. data/lib/karafka/callbacks/config.rb +0 -22
  98. data/lib/karafka/callbacks/dsl.rb +0 -16
  99. data/lib/karafka/connection/delegator.rb +0 -46
  100. data/lib/karafka/instrumentation/listener.rb +0 -112
  101. data/lib/karafka/loader.rb +0 -28
  102. data/lib/karafka/params/dsl.rb +0 -156
  103. data/lib/karafka/parsers/json.rb +0 -38
  104. data/lib/karafka/patches/dry_configurable.rb +0 -35
  105. data/lib/karafka/persistence/topic.rb +0 -29
  106. data/lib/karafka/schemas/config.rb +0 -24
  107. data/lib/karafka/schemas/consumer_group.rb +0 -78
  108. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  109. data/lib/karafka/schemas/responder_usage.rb +0 -39
  110. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  111. data/lib/karafka/setup/configurators/base.rb +0 -29
  112. data/lib/karafka/setup/configurators/params.rb +0 -25
  113. data/lib/karafka/templates/karafka.rb.example +0 -54
@@ -1,24 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Namespace for all the validation schemas that we use to check input
5
- module Schemas
6
- # Regexp for validating format of groups and topics
7
- TOPIC_REGEXP = /\A(\w|\-|\.)+\z/
8
-
9
- # Schema with validation rules for Karafka configuration details
10
- # @note There are many more configuration options inside of the
11
- # Karafka::Setup::Config model, but we don't validate them here as they are
12
- # validated per each route (topic + consumer_group) because they can be overwritten,
13
- # so we validate all of that once all the routes are defined and ready
14
- Config = Dry::Validation.Schema do
15
- required(:client_id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
16
- required(:shutdown_timeout) { none? | (int? & gteq?(0)) }
17
- required(:consumer_mapper)
18
- required(:topic_mapper)
19
- required(:params_base_class).filled
20
-
21
- optional(:backend).filled
22
- end
23
- end
24
- end
@@ -1,78 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Schemas
5
- # Schema for single full route (consumer group + topics) validation.
6
- ConsumerGroup = Dry::Validation.Schema do
7
- # Valid uri schemas of Kafka broker url
8
- # The ||= is due to the behavior of require_all that resolves dependencies
9
- # but someetimes loads things twice
10
- URI_SCHEMES ||= %w[kafka kafka+ssl plaintext ssl].freeze
11
-
12
- # Available sasl scram mechanism of authentication (plus nil)
13
- SASL_SCRAM_MECHANISMS ||= %w[sha256 sha512].freeze
14
-
15
- configure do
16
- config.messages_file = File.join(
17
- Karafka.gem_root, 'config', 'errors.yml'
18
- )
19
-
20
- # Uri validator to check if uri is in a Karafka acceptable format
21
- # @param uri [String] uri we want to validate
22
- # @return [Boolean] true if it is a valid uri, otherwise false
23
- def broker_schema?(uri)
24
- uri = URI.parse(uri)
25
- URI_SCHEMES.include?(uri.scheme) && uri.port
26
- rescue URI::InvalidURIError
27
- false
28
- end
29
- end
30
-
31
- required(:id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
32
- required(:seed_brokers).filled { each(:broker_schema?) }
33
- required(:session_timeout).filled { int? | float? }
34
- required(:pause_timeout) { none? | ((int? | float?) & gteq?(0)) }
35
- required(:offset_commit_interval) { int? | float? }
36
- required(:offset_commit_threshold).filled(:int?)
37
- required(:offset_retention_time) { none?.not > int? }
38
- required(:heartbeat_interval).filled { (int? | float?) & gteq?(0) }
39
- required(:fetcher_max_queue_size).filled(:int?, gt?: 0)
40
- required(:connect_timeout).filled { (int? | float?) & gt?(0) }
41
- required(:socket_timeout).filled { (int? | float?) & gt?(0) }
42
- required(:min_bytes).filled(:int?, gt?: 0)
43
- required(:max_bytes).filled(:int?, gt?: 0)
44
- required(:max_wait_time).filled { (int? | float?) & gteq?(0) }
45
- required(:batch_fetching).filled(:bool?)
46
- required(:topics).filled { each { schema(ConsumerGroupTopic) } }
47
-
48
- # Max wait time cannot exceed socket_timeout - wouldn't make sense
49
- rule(
50
- max_wait_time_limit: %i[max_wait_time socket_timeout]
51
- ) do |max_wait_time, socket_timeout|
52
- socket_timeout.int? > max_wait_time.lteq?(value(:socket_timeout))
53
- end
54
-
55
- %i[
56
- ssl_ca_cert
57
- ssl_ca_cert_file_path
58
- ssl_client_cert
59
- ssl_client_cert_key
60
- sasl_gssapi_principal
61
- sasl_gssapi_keytab
62
- sasl_plain_authzid
63
- sasl_plain_username
64
- sasl_plain_password
65
- sasl_scram_username
66
- sasl_scram_password
67
- ].each do |encryption_attribute|
68
- optional(encryption_attribute).maybe(:str?)
69
- end
70
-
71
- optional(:ssl_ca_certs_from_system).maybe(:bool?)
72
-
73
- # It's not with other encryptions as it has some more rules
74
- optional(:sasl_scram_mechanism)
75
- .maybe(:str?, included_in?: Karafka::Schemas::SASL_SCRAM_MECHANISMS)
76
- end
77
- end
78
- end
@@ -1,18 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Schemas
5
- # Consumer group topic validation rules
6
- ConsumerGroupTopic = Dry::Validation.Schema do
7
- required(:id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
8
- required(:name).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
9
- required(:backend).filled(included_in?: %i[inline sidekiq])
10
- required(:consumer).filled
11
- required(:parser).filled
12
- required(:max_bytes_per_partition).filled(:int?, gteq?: 0)
13
- required(:start_from_beginning).filled(:bool?)
14
- required(:batch_consuming).filled(:bool?)
15
- required(:persistent).filled(:bool?)
16
- end
17
- end
18
- end
@@ -1,39 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Schemas
5
- # Validator to check responder topic usage
6
- ResponderUsageTopic = Dry::Validation.Schema do
7
- required(:name).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
8
- required(:required).filled(:bool?)
9
- required(:multiple_usage).filled(:bool?)
10
- required(:usage_count).filled(:int?, gteq?: 0)
11
- required(:registered).filled(eql?: true)
12
- required(:async).filled(:bool?)
13
-
14
- rule(
15
- required_usage: %i[required usage_count]
16
- ) do |required, usage_count|
17
- required.true? > usage_count.gteq?(1)
18
- end
19
-
20
- rule(
21
- multiple_usage_permission: %i[multiple_usage usage_count]
22
- ) do |multiple_usage, usage_count|
23
- usage_count.gt?(1) > multiple_usage.true?
24
- end
25
-
26
- rule(
27
- multiple_usage_block: %i[multiple_usage usage_count]
28
- ) do |multiple_usage, usage_count|
29
- multiple_usage.false? > usage_count.lteq?(1)
30
- end
31
- end
32
-
33
- # Validator to check that everything in a responder flow matches responder rules
34
- ResponderUsage = Dry::Validation.Schema do
35
- required(:used_topics) { filled? > each { schema(ResponderUsageTopic) } }
36
- required(:registered_topics) { filled? > each { schema(ResponderUsageTopic) } }
37
- end
38
- end
39
- end
@@ -1,43 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Schemas
5
- # Schema for validating correctness of the server cli command options
6
- # We validate some basics + the list of consumer_groups on which we want to use, to make
7
- # sure that all of them are defined, plus that a pidfile does not exist
8
- ServerCliOptions = Dry::Validation.Schema do
9
- configure do
10
- option :consumer_groups
11
-
12
- def self.messages
13
- super.merge(
14
- en: {
15
- errors: {
16
- consumer_groups_inclusion: 'Unknown consumer group.',
17
- pid_existence: 'Pidfile already exists.'
18
- }
19
- }
20
- )
21
- end
22
- end
23
-
24
- optional(:pid).filled(:str?)
25
- optional(:daemon).filled(:bool?)
26
- optional(:consumer_groups).filled(:array?)
27
-
28
- validate(consumer_groups_inclusion: :consumer_groups) do |consumer_groups|
29
- # If there were no consumer_groups declared in the server cli, it means that we will
30
- # run all of them and no need to validate them here at all
31
- if consumer_groups.nil?
32
- true
33
- else
34
- (consumer_groups - Karafka::Routing::Builder.instance.map(&:name)).empty?
35
- end
36
- end
37
-
38
- validate(pid_existence: :pid) do |pid|
39
- pid ? !File.exist?(pid) : true
40
- end
41
- end
42
- end
43
- end
@@ -1,29 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Setup
5
- # Configurators module is used to enclose all the external dependencies configurations
6
- # upon which Karafka depents
7
- class Configurators
8
- # Karafka has some components that it relies on (like Sidekiq)
9
- # We need to configure all of them only when the framework was set up.
10
- # Any class that descends from this one will be automatically invoked upon setup (after it)
11
- # @note This should be used only for internal Karafka dependencies configuration
12
- # End users configuration should go to the after_init block
13
- # @example Configure an Example class
14
- # class ExampleConfigurator < Base
15
- # def setup
16
- # ExampleClass.logger = Karafka.logger
17
- # ExampleClass.redis = config.redis
18
- # end
19
- # end
20
- class Base
21
- # @param _config [Karafka::Config] config instance
22
- # This method needs to be implemented in a subclass
23
- def self.setup(_config)
24
- raise NotImplementedError
25
- end
26
- end
27
- end
28
- end
29
- end
@@ -1,25 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Setup
5
- class Configurators
6
- # Karafka::Params::Params are dynamically built based on user defined parent class
7
- # so we cannot just require it, we need to initialize it after user is done with
8
- # the framework configuration. This is a configurator that does exactly that.
9
- class Params < Base
10
- # Builds up Karafka::Params::Params class with user defined parent class
11
- # @param config [Karafka::Setup::Config] Config we can user to setup things
12
- def self.setup(config)
13
- return if defined? Karafka::Params::Params
14
-
15
- Karafka::Params.const_set(
16
- 'Params',
17
- Class
18
- .new(config.params_base_class)
19
- .tap { |klass| klass.include(Karafka::Params::Dsl) }
20
- )
21
- end
22
- end
23
- end
24
- end
25
- end
@@ -1,54 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- # Non Ruby on Rails setup
4
- ENV['RACK_ENV'] ||= 'development'
5
- ENV['KARAFKA_ENV'] ||= ENV['RACK_ENV']
6
- Bundler.require(:default, ENV['KARAFKA_ENV'])
7
- Karafka::Loader.load(Karafka::App.root)
8
-
9
- # Ruby on Rails setup
10
- # Remove whole non-Rails setup that is above and uncomment the 4 lines below
11
- # ENV['RAILS_ENV'] ||= 'development'
12
- # ENV['KARAFKA_ENV'] = ENV['RAILS_ENV']
13
- # require ::File.expand_path('../config/environment', __FILE__)
14
- # Rails.application.eager_load!
15
-
16
- class KarafkaApp < Karafka::App
17
- setup do |config|
18
- config.kafka.seed_brokers = %w[kafka://127.0.0.1:9092]
19
- config.client_id = 'example_app'
20
- config.backend = :inline
21
- config.batch_fetching = true
22
- # Uncomment this for Rails app integration
23
- # config.logger = Rails.logger
24
- end
25
-
26
- after_init do |config|
27
- # Put here all the things you want to do after the Karafka framework
28
- # initialization
29
- end
30
-
31
- # Comment out this part if you are not using instrumentation and/or you are not
32
- # interested in logging events for certain environments. Since instrumentation
33
- # notifications add extra boilerplate, if you want to achieve max performance,
34
- # listen to only what you really need for given environment.
35
- Karafka.monitor.subscribe(Karafka::Instrumentation::Listener)
36
-
37
- consumer_groups.draw do
38
- # topic :example do
39
- # consumer ExampleConsumer
40
- # end
41
-
42
- # consumer_group :bigger_group do
43
- # topic :test do
44
- # consumer TestConsumer
45
- # end
46
- #
47
- # topic :test2 do
48
- # consumer Test2Consumer
49
- # end
50
- # end
51
- end
52
- end
53
-
54
- KarafkaApp.boot!