karafka 1.4.9 → 2.0.0.alpha1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (127) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/FUNDING.yml +3 -0
  4. data/.github/workflows/ci.yml +78 -26
  5. data/.ruby-version +1 -1
  6. data/CHANGELOG.md +46 -0
  7. data/Gemfile +6 -0
  8. data/Gemfile.lock +39 -49
  9. data/LICENSE +14 -0
  10. data/LICENSE-COMM +89 -0
  11. data/LICENSE-LGPL +165 -0
  12. data/README.md +16 -48
  13. data/bin/benchmarks +85 -0
  14. data/bin/create_token +28 -0
  15. data/bin/integrations +160 -0
  16. data/bin/stress +13 -0
  17. data/certs/karafka-pro.pem +11 -0
  18. data/config/errors.yml +4 -38
  19. data/docker-compose.yml +11 -3
  20. data/karafka.gemspec +17 -17
  21. data/lib/active_job/consumer.rb +22 -0
  22. data/lib/active_job/karafka.rb +18 -0
  23. data/lib/active_job/queue_adapters/karafka_adapter.rb +29 -0
  24. data/lib/active_job/routing_extensions.rb +15 -0
  25. data/lib/karafka/app.rb +13 -20
  26. data/lib/karafka/base_consumer.rb +103 -34
  27. data/lib/karafka/cli/base.rb +4 -4
  28. data/lib/karafka/cli/info.rb +43 -8
  29. data/lib/karafka/cli/install.rb +3 -8
  30. data/lib/karafka/cli/server.rb +17 -30
  31. data/lib/karafka/cli.rb +4 -11
  32. data/lib/karafka/connection/client.rb +279 -93
  33. data/lib/karafka/connection/listener.rb +137 -38
  34. data/lib/karafka/connection/messages_buffer.rb +57 -0
  35. data/lib/karafka/connection/pauses_manager.rb +46 -0
  36. data/lib/karafka/connection/rebalance_manager.rb +62 -0
  37. data/lib/karafka/contracts/config.rb +25 -7
  38. data/lib/karafka/contracts/consumer_group.rb +0 -173
  39. data/lib/karafka/contracts/consumer_group_topic.rb +17 -7
  40. data/lib/karafka/contracts/server_cli_options.rb +1 -9
  41. data/lib/karafka/contracts.rb +1 -1
  42. data/lib/karafka/env.rb +46 -0
  43. data/lib/karafka/errors.rb +14 -18
  44. data/lib/karafka/helpers/multi_delegator.rb +2 -2
  45. data/lib/karafka/instrumentation/callbacks/error.rb +40 -0
  46. data/lib/karafka/instrumentation/callbacks/statistics.rb +42 -0
  47. data/lib/karafka/instrumentation/monitor.rb +14 -21
  48. data/lib/karafka/instrumentation/stdout_listener.rb +64 -91
  49. data/lib/karafka/instrumentation.rb +21 -0
  50. data/lib/karafka/licenser.rb +65 -0
  51. data/lib/karafka/{params → messages}/batch_metadata.rb +7 -13
  52. data/lib/karafka/messages/builders/batch_metadata.rb +30 -0
  53. data/lib/karafka/messages/builders/message.rb +38 -0
  54. data/lib/karafka/messages/builders/messages.rb +40 -0
  55. data/lib/karafka/{params/params.rb → messages/message.rb} +7 -12
  56. data/lib/karafka/messages/messages.rb +64 -0
  57. data/lib/karafka/{params → messages}/metadata.rb +4 -6
  58. data/lib/karafka/messages/seek.rb +9 -0
  59. data/lib/karafka/patches/rdkafka/consumer.rb +22 -0
  60. data/lib/karafka/processing/executor.rb +96 -0
  61. data/lib/karafka/processing/executors_buffer.rb +49 -0
  62. data/lib/karafka/processing/jobs/base.rb +18 -0
  63. data/lib/karafka/processing/jobs/consume.rb +28 -0
  64. data/lib/karafka/processing/jobs/revoked.rb +22 -0
  65. data/lib/karafka/processing/jobs/shutdown.rb +23 -0
  66. data/lib/karafka/processing/jobs_queue.rb +121 -0
  67. data/lib/karafka/processing/worker.rb +57 -0
  68. data/lib/karafka/processing/workers_batch.rb +22 -0
  69. data/lib/karafka/railtie.rb +65 -0
  70. data/lib/karafka/routing/builder.rb +15 -14
  71. data/lib/karafka/routing/consumer_group.rb +10 -18
  72. data/lib/karafka/routing/consumer_mapper.rb +1 -2
  73. data/lib/karafka/routing/router.rb +1 -1
  74. data/lib/karafka/routing/subscription_group.rb +53 -0
  75. data/lib/karafka/routing/subscription_groups_builder.rb +51 -0
  76. data/lib/karafka/routing/topic.rb +47 -25
  77. data/lib/karafka/runner.rb +59 -0
  78. data/lib/karafka/serialization/json/deserializer.rb +6 -15
  79. data/lib/karafka/server.rb +62 -25
  80. data/lib/karafka/setup/config.rb +86 -159
  81. data/lib/karafka/status.rb +13 -3
  82. data/lib/karafka/templates/example_consumer.rb.erb +16 -0
  83. data/lib/karafka/templates/karafka.rb.erb +14 -50
  84. data/lib/karafka/time_trackers/base.rb +19 -0
  85. data/lib/karafka/time_trackers/pause.rb +84 -0
  86. data/lib/karafka/time_trackers/poll.rb +65 -0
  87. data/lib/karafka/version.rb +1 -1
  88. data/lib/karafka.rb +30 -13
  89. data.tar.gz.sig +0 -0
  90. metadata +78 -108
  91. metadata.gz.sig +0 -0
  92. data/MIT-LICENCE +0 -18
  93. data/lib/karafka/assignment_strategies/round_robin.rb +0 -13
  94. data/lib/karafka/attributes_map.rb +0 -63
  95. data/lib/karafka/backends/inline.rb +0 -16
  96. data/lib/karafka/base_responder.rb +0 -226
  97. data/lib/karafka/cli/flow.rb +0 -48
  98. data/lib/karafka/cli/missingno.rb +0 -19
  99. data/lib/karafka/code_reloader.rb +0 -67
  100. data/lib/karafka/connection/api_adapter.rb +0 -158
  101. data/lib/karafka/connection/batch_delegator.rb +0 -55
  102. data/lib/karafka/connection/builder.rb +0 -23
  103. data/lib/karafka/connection/message_delegator.rb +0 -36
  104. data/lib/karafka/consumers/batch_metadata.rb +0 -10
  105. data/lib/karafka/consumers/callbacks.rb +0 -71
  106. data/lib/karafka/consumers/includer.rb +0 -64
  107. data/lib/karafka/consumers/responders.rb +0 -24
  108. data/lib/karafka/consumers/single_params.rb +0 -15
  109. data/lib/karafka/contracts/responder_usage.rb +0 -54
  110. data/lib/karafka/fetcher.rb +0 -42
  111. data/lib/karafka/helpers/class_matcher.rb +0 -88
  112. data/lib/karafka/helpers/config_retriever.rb +0 -46
  113. data/lib/karafka/helpers/inflector.rb +0 -26
  114. data/lib/karafka/params/builders/batch_metadata.rb +0 -30
  115. data/lib/karafka/params/builders/params.rb +0 -38
  116. data/lib/karafka/params/builders/params_batch.rb +0 -25
  117. data/lib/karafka/params/params_batch.rb +0 -60
  118. data/lib/karafka/patches/ruby_kafka.rb +0 -47
  119. data/lib/karafka/persistence/client.rb +0 -29
  120. data/lib/karafka/persistence/consumers.rb +0 -45
  121. data/lib/karafka/persistence/topics.rb +0 -48
  122. data/lib/karafka/responders/builder.rb +0 -36
  123. data/lib/karafka/responders/topic.rb +0 -55
  124. data/lib/karafka/routing/topic_mapper.rb +0 -53
  125. data/lib/karafka/serialization/json/serializer.rb +0 -31
  126. data/lib/karafka/setup/configurators/water_drop.rb +0 -36
  127. data/lib/karafka/templates/application_responder.rb.erb +0 -11
@@ -1,45 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Module used to provide a persistent cache layer for Karafka components that need to be
5
- # shared inside of a same thread
6
- module Persistence
7
- # Module used to provide a persistent cache across batch requests for a given
8
- # topic and partition to store some additional details when the persistent mode
9
- # for a given topic is turned on
10
- class Consumers
11
- # Thread.current scope under which we store consumers data
12
- PERSISTENCE_SCOPE = :consumers
13
-
14
- private_constant :PERSISTENCE_SCOPE
15
-
16
- class << self
17
- # @return [Hash] current thread's persistence scope hash with all the consumers
18
- def current
19
- Thread.current[PERSISTENCE_SCOPE] ||= Concurrent::Hash.new do |hash, key|
20
- hash[key] = Concurrent::Hash.new
21
- end
22
- end
23
-
24
- # Used to build (if block given) and/or fetch a current consumer instance that will be
25
- # used to process messages from a given topic and partition
26
- # @param topic [Karafka::Routing::Topic] topic instance for which we might cache
27
- # @param partition [Integer] number of partition for which we want to cache
28
- # @return [Karafka::BaseConsumer] base consumer descendant
29
- def fetch(topic, partition)
30
- current[topic][partition] ||= topic.consumer.new(topic)
31
- end
32
-
33
- # Removes all persisted instances of consumers from the consumer cache
34
- # @note This is used to reload consumers instances when code reloading in development mode
35
- # is present. This should not be used in production.
36
- def clear
37
- Thread
38
- .list
39
- .select { |thread| thread[PERSISTENCE_SCOPE] }
40
- .each { |thread| thread[PERSISTENCE_SCOPE].clear }
41
- end
42
- end
43
- end
44
- end
45
- end
@@ -1,48 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Persistence
5
- # Local cache for routing topics
6
- # We use it in order not to build string instances and remap incoming topic upon each
7
- # message / message batches received
8
- class Topics
9
- # Thread.current scope under which we store topics data
10
- PERSISTENCE_SCOPE = :topics
11
-
12
- private_constant :PERSISTENCE_SCOPE
13
-
14
- class << self
15
- # @return [Concurrent::Hash] hash with all the topics from given groups
16
- def current
17
- Thread.current[PERSISTENCE_SCOPE] ||= Concurrent::Hash.new do |hash, key|
18
- hash[key] = Concurrent::Hash.new
19
- end
20
- end
21
-
22
- # @param group_id [String] group id for which we fetch a topic representation
23
- # @param raw_topic_name [String] raw topic name (before remapping) for which we fetch a
24
- # topic representation
25
- # @return [Karafka::Routing::Topics] remapped topic representation that can be used further
26
- # on when working with given parameters
27
- def fetch(group_id, raw_topic_name)
28
- current[group_id][raw_topic_name] ||= begin
29
- # We map from incoming topic name, as it might be namespaced, etc.
30
- # @see topic_mapper internal docs
31
- mapped_topic_name = Karafka::App.config.topic_mapper.incoming(raw_topic_name)
32
- Routing::Router.find("#{group_id}_#{mapped_topic_name}")
33
- end
34
- end
35
-
36
- # Clears the whole topics cache for all the threads
37
- # This is used for in-development code reloading as we need to get rid of all the
38
- # preloaded and cached instances of objects to make it work
39
- def clear
40
- Thread
41
- .list
42
- .select { |thread| thread[PERSISTENCE_SCOPE] }
43
- .each { |thread| thread[PERSISTENCE_SCOPE].clear }
44
- end
45
- end
46
- end
47
- end
48
- end
@@ -1,36 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Responders namespace encapsulates all the internal responder implementation parts
5
- module Responders
6
- # Responders builder is used for finding (based on the consumer class name) a responder
7
- # that match the consumer. We use it when user does not provide a responder inside routing,
8
- # but he still names responder with the same convention (and namespaces) as consumer
9
- #
10
- # @example Matching responder exists
11
- # Karafka::Responder::Builder(NewEventsConsumer).build #=> NewEventsResponder
12
- # @example Matching responder does not exist
13
- # Karafka::Responder::Builder(NewBuildsConsumer).build #=> nil
14
- class Builder
15
- # @param consumer_class [Karafka::BaseConsumer, nil] descendant of
16
- # Karafka::BaseConsumer
17
- # @example Tries to find a responder that matches a given consumer. If nothing found,
18
- # will return nil (nil is accepted, because it means that a given consumer don't
19
- # pipe stuff further on)
20
- def initialize(consumer_class)
21
- @consumer_class = consumer_class
22
- end
23
-
24
- # Tries to figure out a responder based on a consumer class name
25
- # @return [Class] Responder class (not an instance)
26
- # @return [nil] or nil if there's no matching responding class
27
- def build
28
- Helpers::ClassMatcher.new(
29
- @consumer_class,
30
- from: 'Consumer',
31
- to: 'Responder'
32
- ).match
33
- end
34
- end
35
- end
36
- end
@@ -1,55 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Responders
5
- # Topic describes a single topic on which we want to respond with responding requirements
6
- # @example Define topic (required by default)
7
- # Karafka::Responders::Topic.new(:topic_name, {}) #=> #<Karafka::Responders::Topic...
8
- # @example Define optional topic
9
- # Karafka::Responders::Topic.new(:topic_name, required: false)
10
- class Topic
11
- # Name of the topic on which we want to respond
12
- attr_reader :name
13
-
14
- # @param name [Symbol, String] name of a topic on which we want to respond
15
- # @param options [Hash] non-default options for this topic
16
- # @return [Karafka::Responders::Topic] topic description object
17
- def initialize(name, options)
18
- @name = name.to_s
19
- @options = options
20
- end
21
-
22
- # @return [Boolean] is this a required topic (if not, it is optional)
23
- def required?
24
- @options.key?(:required) ? @options[:required] : true
25
- end
26
-
27
- # @return [Boolean] was usage of this topic registered or not
28
- def registered?
29
- @options[:registered] == true
30
- end
31
-
32
- # @return [Class] Class to use to serialize messages for this topic
33
- def serializer
34
- @options[:serializer]
35
- end
36
-
37
- # @return [Boolean] do we want to use async producer. Defaults to false as the sync producer
38
- # is safer and introduces less problems
39
- def async?
40
- @options.key?(:async) ? @options[:async] : false
41
- end
42
-
43
- # @return [Hash] hash with this topic attributes and options
44
- def to_h
45
- {
46
- name: name,
47
- required: required?,
48
- registered: registered?,
49
- serializer: serializer,
50
- async: async?
51
- }
52
- end
53
- end
54
- end
55
- end
@@ -1,53 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Routing
5
- # Default topic mapper that does not remap things
6
- # Mapper can be used for Kafka providers that require namespaced topic names. Instead of being
7
- # provider dependent, we can then define mapper and use internally "pure" topic names in
8
- # routes and responders
9
- #
10
- # @example Mapper for mapping prefixed topics
11
- # class MyMapper
12
- # PREFIX = "my_user_name."
13
- #
14
- # def incoming(topic)
15
- # topic.to_s.gsub(PREFIX, '')
16
- # end
17
- #
18
- # def outgoing(topic)
19
- # "#{PREFIX}#{topic}"
20
- # end
21
- # end
22
- #
23
- # @example Mapper for replacing "." with "_" in topic names
24
- # class MyMapper
25
- # PREFIX = "my_user_name."
26
- #
27
- # def incoming(topic)
28
- # topic.to_s.gsub('.', '_')
29
- # end
30
- #
31
- # def outgoing(topic)
32
- # topic.to_s.gsub('_', '.')
33
- # end
34
- # end
35
- class TopicMapper
36
- # @param topic [String, Symbol] topic
37
- # @return [String, Symbol] same topic as on input
38
- # @example
39
- # incoming('topic_name') #=> 'topic_name'
40
- def incoming(topic)
41
- topic
42
- end
43
-
44
- # @param topic [String, Symbol] topic
45
- # @return [String, Symbol] same topic as on input
46
- # @example
47
- # outgoing('topic_name') #=> 'topic_name'
48
- def outgoing(topic)
49
- topic
50
- end
51
- end
52
- end
53
- end
@@ -1,31 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Module for all supported by default serialization and deserialization ways
5
- module Serialization
6
- module Json
7
- # Default Karafka Json serializer for serializing data
8
- class Serializer
9
- # @param content [Object] any object that we want to convert to a json string
10
- # @return [String] Valid JSON string containing serialized data
11
- # @raise [Karafka::Errors::SerializationError] raised when we don't have a way to
12
- # serialize provided data to json
13
- # @note When string is passed to this method, we assume that it is already a json
14
- # string and we don't serialize it again. This allows us to serialize data before
15
- # it is being forwarded to this serializer if we want to have a custom (not that simple)
16
- # json serialization
17
- #
18
- # @example From an ActiveRecord object
19
- # Serializer.call(Repository.first) #=> "{\"repository\":{\"id\":\"04b504e0\"}}"
20
- # @example From a string (no changes)
21
- # Serializer.call("{\"a\":1}") #=> "{\"a\":1}"
22
- def call(content)
23
- return content if content.is_a?(String)
24
- return content.to_json if content.respond_to?(:to_json)
25
-
26
- raise Karafka::Errors::SerializationError, content
27
- end
28
- end
29
- end
30
- end
31
- end
@@ -1,36 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Setup
5
- # Configurators are used to post setup some of the components of Karafka after the core
6
- # framework is initialized
7
- module Configurators
8
- # Class responsible for setting up WaterDrop configuration
9
- class WaterDrop
10
- # Sets up a WaterDrop settings
11
- # @param config [Karafka::Setup::Config] Config we can user to setup things
12
- # @note This will also inject Karafka monitor as a default monitor into WaterDrop,
13
- # so we have the same monitor within whole Karafka framework (same with logger)
14
- def call(config)
15
- ::WaterDrop.setup do |water_config|
16
- water_config.deliver = true
17
-
18
- config.to_h.reject { |k, _v| k == :kafka }.each do |k, v|
19
- key_assignment = :"#{k}="
20
- next unless water_config.respond_to?(key_assignment)
21
-
22
- water_config.public_send(key_assignment, v)
23
- end
24
-
25
- config.kafka.to_h.each do |k, v|
26
- key_assignment = :"#{k}="
27
- next unless water_config.kafka.respond_to?(key_assignment)
28
-
29
- water_config.kafka.public_send(key_assignment, v)
30
- end
31
- end
32
- end
33
- end
34
- end
35
- end
36
- end
@@ -1,11 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- # Application responder from which all Karafka responders should inherit
4
- # You can rename it if it would conflict with your current code base (in case you're integrating
5
- # Karafka with other frameworks)
6
- class ApplicationResponder < Karafka::BaseResponder
7
- # This method needs to be implemented in each of the responders
8
- # def respond(data)
9
- # respond_to :topic, data.to_json
10
- # end
11
- end