karafka 1.4.0 → 2.0.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (172) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +89 -18
  4. data/.ruby-version +1 -1
  5. data/CHANGELOG.md +365 -1
  6. data/CONTRIBUTING.md +10 -19
  7. data/Gemfile +6 -0
  8. data/Gemfile.lock +56 -112
  9. data/LICENSE +17 -0
  10. data/LICENSE-COMM +89 -0
  11. data/LICENSE-LGPL +165 -0
  12. data/README.md +61 -68
  13. data/bin/benchmarks +85 -0
  14. data/bin/create_token +22 -0
  15. data/bin/integrations +272 -0
  16. data/bin/karafka +10 -0
  17. data/bin/scenario +29 -0
  18. data/bin/stress_many +13 -0
  19. data/bin/stress_one +13 -0
  20. data/certs/cert_chain.pem +26 -0
  21. data/certs/karafka-pro.pem +11 -0
  22. data/config/errors.yml +59 -38
  23. data/docker-compose.yml +10 -3
  24. data/karafka.gemspec +18 -21
  25. data/lib/active_job/karafka.rb +21 -0
  26. data/lib/active_job/queue_adapters/karafka_adapter.rb +26 -0
  27. data/lib/karafka/active_job/consumer.rb +26 -0
  28. data/lib/karafka/active_job/dispatcher.rb +38 -0
  29. data/lib/karafka/active_job/job_extensions.rb +34 -0
  30. data/lib/karafka/active_job/job_options_contract.rb +21 -0
  31. data/lib/karafka/active_job/routing/extensions.rb +33 -0
  32. data/lib/karafka/admin.rb +63 -0
  33. data/lib/karafka/app.rb +15 -20
  34. data/lib/karafka/base_consumer.rb +197 -31
  35. data/lib/karafka/cli/info.rb +44 -10
  36. data/lib/karafka/cli/install.rb +22 -12
  37. data/lib/karafka/cli/server.rb +17 -42
  38. data/lib/karafka/cli.rb +4 -3
  39. data/lib/karafka/connection/client.rb +379 -89
  40. data/lib/karafka/connection/listener.rb +250 -38
  41. data/lib/karafka/connection/listeners_batch.rb +24 -0
  42. data/lib/karafka/connection/messages_buffer.rb +84 -0
  43. data/lib/karafka/connection/pauses_manager.rb +46 -0
  44. data/lib/karafka/connection/raw_messages_buffer.rb +101 -0
  45. data/lib/karafka/connection/rebalance_manager.rb +78 -0
  46. data/lib/karafka/contracts/base.rb +17 -0
  47. data/lib/karafka/contracts/config.rb +88 -11
  48. data/lib/karafka/contracts/consumer_group.rb +21 -184
  49. data/lib/karafka/contracts/consumer_group_topic.rb +35 -11
  50. data/lib/karafka/contracts/server_cli_options.rb +19 -18
  51. data/lib/karafka/contracts.rb +1 -1
  52. data/lib/karafka/env.rb +46 -0
  53. data/lib/karafka/errors.rb +21 -21
  54. data/lib/karafka/helpers/async.rb +33 -0
  55. data/lib/karafka/helpers/colorize.rb +20 -0
  56. data/lib/karafka/helpers/multi_delegator.rb +2 -2
  57. data/lib/karafka/instrumentation/callbacks/error.rb +40 -0
  58. data/lib/karafka/instrumentation/callbacks/statistics.rb +41 -0
  59. data/lib/karafka/instrumentation/logger.rb +6 -10
  60. data/lib/karafka/instrumentation/logger_listener.rb +174 -0
  61. data/lib/karafka/instrumentation/monitor.rb +13 -61
  62. data/lib/karafka/instrumentation/notifications.rb +53 -0
  63. data/lib/karafka/instrumentation/proctitle_listener.rb +3 -3
  64. data/lib/karafka/instrumentation/vendors/datadog/dashboard.json +1 -0
  65. data/lib/karafka/instrumentation/vendors/datadog/listener.rb +232 -0
  66. data/lib/karafka/instrumentation.rb +21 -0
  67. data/lib/karafka/licenser.rb +75 -0
  68. data/lib/karafka/messages/batch_metadata.rb +45 -0
  69. data/lib/karafka/messages/builders/batch_metadata.rb +39 -0
  70. data/lib/karafka/messages/builders/message.rb +39 -0
  71. data/lib/karafka/messages/builders/messages.rb +34 -0
  72. data/lib/karafka/{params/params.rb → messages/message.rb} +7 -12
  73. data/lib/karafka/messages/messages.rb +64 -0
  74. data/lib/karafka/{params → messages}/metadata.rb +4 -6
  75. data/lib/karafka/messages/seek.rb +9 -0
  76. data/lib/karafka/patches/rdkafka/consumer.rb +22 -0
  77. data/lib/karafka/pro/active_job/consumer.rb +46 -0
  78. data/lib/karafka/pro/active_job/dispatcher.rb +61 -0
  79. data/lib/karafka/pro/active_job/job_options_contract.rb +32 -0
  80. data/lib/karafka/pro/base_consumer.rb +107 -0
  81. data/lib/karafka/pro/contracts/base.rb +21 -0
  82. data/lib/karafka/pro/contracts/consumer_group.rb +34 -0
  83. data/lib/karafka/pro/contracts/consumer_group_topic.rb +69 -0
  84. data/lib/karafka/pro/loader.rb +76 -0
  85. data/lib/karafka/pro/performance_tracker.rb +80 -0
  86. data/lib/karafka/pro/processing/coordinator.rb +85 -0
  87. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +38 -0
  88. data/lib/karafka/pro/processing/jobs_builder.rb +32 -0
  89. data/lib/karafka/pro/processing/partitioner.rb +58 -0
  90. data/lib/karafka/pro/processing/scheduler.rb +56 -0
  91. data/lib/karafka/pro/routing/builder_extensions.rb +30 -0
  92. data/lib/karafka/pro/routing/topic_extensions.rb +74 -0
  93. data/lib/karafka/pro.rb +13 -0
  94. data/lib/karafka/process.rb +1 -0
  95. data/lib/karafka/processing/coordinator.rb +103 -0
  96. data/lib/karafka/processing/coordinators_buffer.rb +54 -0
  97. data/lib/karafka/processing/executor.rb +126 -0
  98. data/lib/karafka/processing/executors_buffer.rb +88 -0
  99. data/lib/karafka/processing/jobs/base.rb +55 -0
  100. data/lib/karafka/processing/jobs/consume.rb +47 -0
  101. data/lib/karafka/processing/jobs/revoked.rb +22 -0
  102. data/lib/karafka/processing/jobs/shutdown.rb +23 -0
  103. data/lib/karafka/processing/jobs_builder.rb +29 -0
  104. data/lib/karafka/processing/jobs_queue.rb +144 -0
  105. data/lib/karafka/processing/partitioner.rb +22 -0
  106. data/lib/karafka/processing/result.rb +37 -0
  107. data/lib/karafka/processing/scheduler.rb +22 -0
  108. data/lib/karafka/processing/worker.rb +91 -0
  109. data/lib/karafka/processing/workers_batch.rb +27 -0
  110. data/lib/karafka/railtie.rb +127 -0
  111. data/lib/karafka/routing/builder.rb +26 -23
  112. data/lib/karafka/routing/consumer_group.rb +37 -17
  113. data/lib/karafka/routing/consumer_mapper.rb +1 -2
  114. data/lib/karafka/routing/proxy.rb +9 -16
  115. data/lib/karafka/routing/router.rb +1 -1
  116. data/lib/karafka/routing/subscription_group.rb +53 -0
  117. data/lib/karafka/routing/subscription_groups_builder.rb +54 -0
  118. data/lib/karafka/routing/topic.rb +65 -24
  119. data/lib/karafka/routing/topics.rb +38 -0
  120. data/lib/karafka/runner.rb +51 -0
  121. data/lib/karafka/serialization/json/deserializer.rb +6 -15
  122. data/lib/karafka/server.rb +67 -26
  123. data/lib/karafka/setup/config.rb +153 -175
  124. data/lib/karafka/status.rb +14 -5
  125. data/lib/karafka/templates/example_consumer.rb.erb +16 -0
  126. data/lib/karafka/templates/karafka.rb.erb +17 -55
  127. data/lib/karafka/time_trackers/base.rb +19 -0
  128. data/lib/karafka/time_trackers/pause.rb +92 -0
  129. data/lib/karafka/time_trackers/poll.rb +65 -0
  130. data/lib/karafka/version.rb +1 -1
  131. data/lib/karafka.rb +46 -16
  132. data.tar.gz.sig +0 -0
  133. metadata +145 -171
  134. metadata.gz.sig +0 -0
  135. data/.github/FUNDING.yml +0 -3
  136. data/MIT-LICENCE +0 -18
  137. data/certs/mensfeld.pem +0 -25
  138. data/lib/karafka/attributes_map.rb +0 -62
  139. data/lib/karafka/backends/inline.rb +0 -16
  140. data/lib/karafka/base_responder.rb +0 -226
  141. data/lib/karafka/cli/flow.rb +0 -48
  142. data/lib/karafka/code_reloader.rb +0 -67
  143. data/lib/karafka/connection/api_adapter.rb +0 -161
  144. data/lib/karafka/connection/batch_delegator.rb +0 -55
  145. data/lib/karafka/connection/builder.rb +0 -18
  146. data/lib/karafka/connection/message_delegator.rb +0 -36
  147. data/lib/karafka/consumers/batch_metadata.rb +0 -10
  148. data/lib/karafka/consumers/callbacks.rb +0 -71
  149. data/lib/karafka/consumers/includer.rb +0 -64
  150. data/lib/karafka/consumers/responders.rb +0 -24
  151. data/lib/karafka/consumers/single_params.rb +0 -15
  152. data/lib/karafka/contracts/responder_usage.rb +0 -54
  153. data/lib/karafka/fetcher.rb +0 -42
  154. data/lib/karafka/helpers/class_matcher.rb +0 -88
  155. data/lib/karafka/helpers/config_retriever.rb +0 -46
  156. data/lib/karafka/helpers/inflector.rb +0 -26
  157. data/lib/karafka/instrumentation/stdout_listener.rb +0 -140
  158. data/lib/karafka/params/batch_metadata.rb +0 -26
  159. data/lib/karafka/params/builders/batch_metadata.rb +0 -30
  160. data/lib/karafka/params/builders/params.rb +0 -38
  161. data/lib/karafka/params/builders/params_batch.rb +0 -25
  162. data/lib/karafka/params/params_batch.rb +0 -60
  163. data/lib/karafka/patches/ruby_kafka.rb +0 -47
  164. data/lib/karafka/persistence/client.rb +0 -29
  165. data/lib/karafka/persistence/consumers.rb +0 -45
  166. data/lib/karafka/persistence/topics.rb +0 -48
  167. data/lib/karafka/responders/builder.rb +0 -36
  168. data/lib/karafka/responders/topic.rb +0 -55
  169. data/lib/karafka/routing/topic_mapper.rb +0 -53
  170. data/lib/karafka/serialization/json/serializer.rb +0 -31
  171. data/lib/karafka/setup/configurators/water_drop.rb +0 -36
  172. data/lib/karafka/templates/application_responder.rb.erb +0 -11
data/config/errors.yml CHANGED
@@ -1,39 +1,60 @@
1
1
  en:
2
- dry_validation:
3
- errors:
4
- invalid_broker_schema: >
5
- has an invalid format
6
- Expected schema, host and port number
7
- Example: kafka://127.0.0.1:9092 or kafka+ssl://127.0.0.1:9092
8
- invalid_certificate: >
9
- is not a valid certificate
10
- invalid_certificate_from_path: >
11
- is not a valid certificate
12
- invalid_private_key: >
13
- is not a valid private key
14
- max_timeout_size_for_exponential: >
15
- pause_timeout cannot be more than pause_max_timeout
16
- max_wait_time_limit:
17
- max_wait_time cannot be more than socket_timeout
18
- topics_names_not_unique: >
19
- all topic names within a single consumer group must be unique
20
- ssl_client_cert_with_ssl_client_cert_key: >
21
- Both ssl_client_cert and ssl_client_cert_key need to be provided
22
- ssl_client_cert_key_with_ssl_client_cert: >
23
- Both ssl_client_cert_key and ssl_client_cert need to be provided
24
- ssl_client_cert_chain_with_ssl_client_cert: >
25
- Both ssl_client_cert_chain and ssl_client_cert need to be provided
26
- ssl_client_cert_chain_with_ssl_client_cert_key: >
27
- Both ssl_client_cert_chain and ssl_client_cert_key need to be provided
28
- ssl_client_cert_key_password_with_ssl_client_cert_key: >
29
- Both ssl_client_cert_key_password and ssl_client_cert_key need to be provided
30
- does_not_respond_to_token: >
31
- needs to respond to a #token method
32
- required_usage_count: >
33
- Given topic must be used at least once
34
- pid_already_exists: >
35
- Pidfile already exists
36
- consumer_groups_inclusion: >
37
- Unknown consumer group
38
- does_not_exist:
39
- Given file does not exist or cannot be read
2
+ validations:
3
+ config:
4
+ missing: needs to be present
5
+ client_id_format: 'needs to be a string with a Kafka accepted format'
6
+ license.entity_format: needs to be a string
7
+ license.token_format: needs to be either false or a string
8
+ license.expires_on_format: needs to be a valid date
9
+ concurrency_format: needs to be an integer bigger than 0
10
+ consumer_mapper_format: needs to be present
11
+ consumer_persistence_format: needs to be either true or false
12
+ pause_timeout_format: needs to be an integer bigger than 0
13
+ pause_max_timeout_format: needs to be an integer bigger than 0
14
+ pause_with_exponential_backoff_format: needs to be either true or false
15
+ shutdown_timeout_format: needs to be an integer bigger than 0
16
+ max_wait_time_format: needs to be an integer bigger than 0
17
+ kafka_format: needs to be a filled hash
18
+ internal.status_format: needs to be present
19
+ internal.process_format: needs to be present
20
+ internal.routing.builder_format: needs to be present
21
+ internal.routing.subscription_groups_builder_format: needs to be present
22
+ key_must_be_a_symbol: All keys under the kafka settings scope need to be symbols
23
+ max_timeout_vs_pause_max_timeout: pause_timeout must be less or equal to pause_max_timeout
24
+ shutdown_timeout_vs_max_wait_time: shutdown_timeout must be more than max_wait_time
25
+
26
+ server_cli_options:
27
+ missing: needs to be present
28
+ consumer_groups_inclusion: Unknown consumer group
29
+
30
+ consumer_group_topic:
31
+ missing: needs to be present
32
+ name_format: 'needs to be a string with a Kafka accepted format'
33
+ deserializer_format: needs to be present
34
+ manual_offset_management_format: needs to be either true or false
35
+ consumer_format: needs to be present
36
+ id_format: 'needs to be a string with a Kafka accepted format'
37
+ initial_offset_format: needs to be either earliest or latest
38
+ subscription_group_format: must be nil or a non-empty string
39
+
40
+ consumer_group:
41
+ missing: needs to be present
42
+ topics_names_not_unique: all topic names within a single consumer group must be unique
43
+ id_format: 'needs to be a string with a Kafka accepted format'
44
+ topics_format: needs to be a non-empty array
45
+
46
+ job_options:
47
+ missing: needs to be present
48
+ dispatch_method_format: needs to be either :produce_async or :produce_sync
49
+ partitioner_format: 'needs to respond to #call'
50
+ partition_key_type_format: 'needs to be either :key or :partition_key'
51
+
52
+ test:
53
+ missing: needs to be present
54
+ id_format: needs to be a String
55
+
56
+ pro_consumer_group_topic:
57
+ consumer_format: needs to inherit from Karafka::Pro::BaseConsumer and not Karafka::Consumer
58
+ virtual_partitions.partitioner_respond_to_call: needs to be defined and needs to respond to `#call`
59
+ virtual_partitions.max_partitions_format: needs to be equl or more than 1
60
+ manual_offset_management_not_with_virtual_partitions: cannot be used together with Virtual Partitions
data/docker-compose.yml CHANGED
@@ -1,17 +1,24 @@
1
1
  version: '2'
2
2
  services:
3
3
  zookeeper:
4
+ container_name: karafka_20_zookeeper
4
5
  image: wurstmeister/zookeeper
5
6
  ports:
6
- - "2181:2181"
7
+ - '2181:2181'
7
8
  kafka:
8
- image: wurstmeister/kafka:1.0.1
9
+ container_name: karafka_20_kafka
10
+ image: wurstmeister/kafka
9
11
  ports:
10
- - "9092:9092"
12
+ - '9092:9092'
11
13
  environment:
12
14
  KAFKA_ADVERTISED_HOST_NAME: localhost
13
15
  KAFKA_ADVERTISED_PORT: 9092
14
16
  KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
15
17
  KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
18
+ KAFKA_CREATE_TOPICS:
19
+ "benchmarks_00_01:1:1,\
20
+ benchmarks_00_05:5:1,\
21
+ benchmarks_01_05:5:1,\
22
+ benchmarks_00_10:10:1"
16
23
  volumes:
17
24
  - /var/run/docker.sock:/var/run/docker.sock
data/karafka.gemspec CHANGED
@@ -5,39 +5,36 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
5
5
 
6
6
  require 'karafka/version'
7
7
 
8
- # rubocop:disable Metrics/BlockLength
9
8
  Gem::Specification.new do |spec|
10
9
  spec.name = 'karafka'
11
10
  spec.version = ::Karafka::VERSION
12
11
  spec.platform = Gem::Platform::RUBY
13
- spec.authors = ['Maciej Mensfeld', 'Pavlo Vavruk', 'Adam Gwozdowski']
14
- spec.email = %w[maciej@mensfeld.pl pavlo.vavruk@gmail.com adam99g@gmail.com]
15
- spec.homepage = 'https://github.com/karafka/karafka'
16
- spec.summary = 'Ruby based framework for working with Apache Kafka'
12
+ spec.authors = ['Maciej Mensfeld']
13
+ spec.email = %w[contact@karafka.io]
14
+ spec.homepage = 'https://karafka.io'
15
+ spec.summary = 'Efficient Kafka processing framework for Ruby and Rails'
17
16
  spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
18
- spec.license = 'MIT'
19
-
20
- spec.add_dependency 'dry-configurable', '~> 0.8'
21
- spec.add_dependency 'dry-inflector', '~> 0.1'
22
- spec.add_dependency 'dry-monitor', '~> 0.3'
23
- spec.add_dependency 'dry-validation', '~> 1.2'
24
- spec.add_dependency 'envlogic', '~> 1.1'
25
- spec.add_dependency 'irb', '~> 1.0'
26
- spec.add_dependency 'rake', '>= 11.3'
27
- spec.add_dependency 'ruby-kafka', '>= 1.0.0'
17
+ spec.licenses = ['LGPL-3.0', 'Commercial']
18
+
19
+ spec.add_dependency 'karafka-core', '>= 2.0.2', '< 3.0.0'
20
+ spec.add_dependency 'rdkafka', '>= 0.12'
28
21
  spec.add_dependency 'thor', '>= 0.20'
29
- spec.add_dependency 'waterdrop', '~> 1.4.0'
30
- spec.add_dependency 'zeitwerk', '~> 2.1'
22
+ spec.add_dependency 'waterdrop', '>= 2.4.1', '< 3.0.0'
23
+ spec.add_dependency 'zeitwerk', '~> 2.3'
31
24
 
32
- spec.required_ruby_version = '>= 2.5.0'
25
+ spec.required_ruby_version = '>= 2.7.0'
33
26
 
34
27
  if $PROGRAM_NAME.end_with?('gem')
35
28
  spec.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
36
29
  end
37
30
 
38
- spec.cert_chain = %w[certs/mensfeld.pem]
31
+ spec.cert_chain = %w[certs/cert_chain.pem]
39
32
  spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec)/}) }
40
- spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
33
+ spec.executables = %w[karafka]
41
34
  spec.require_paths = %w[lib]
35
+
36
+ spec.metadata = {
37
+ 'source_code_uri' => 'https://github.com/karafka/karafka',
38
+ 'rubygems_mfa_required' => 'true'
39
+ }
42
40
  end
43
- # rubocop:enable Metrics/BlockLength
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ begin
4
+ require 'active_job'
5
+ require_relative 'queue_adapters/karafka_adapter'
6
+
7
+ module ActiveJob
8
+ # Namespace for usage simplification outside of Rails where Railtie will not kick in.
9
+ # That way a require 'active_job/karafka' should be enough to use it
10
+ module Karafka
11
+ end
12
+ end
13
+
14
+ # We extend routing builder by adding a simple wrapper for easier jobs topics defining
15
+ # This needs to be extended here as it is going to be used in karafka routes, hence doing that in
16
+ # the railtie initializer would be too late
17
+ ::Karafka::Routing::Builder.include ::Karafka::ActiveJob::Routing::Extensions
18
+ ::Karafka::Routing::Proxy.include ::Karafka::ActiveJob::Routing::Extensions
19
+ rescue LoadError
20
+ # We extend ActiveJob stuff in the railtie
21
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ # ActiveJob components to allow for jobs consumption with Karafka
4
+ module ActiveJob
5
+ # ActiveJob queue adapters
6
+ module QueueAdapters
7
+ # Karafka adapter for enqueuing jobs
8
+ # This is here for ease of integration with ActiveJob.
9
+ class KarafkaAdapter
10
+ # Enqueues the job using the configured dispatcher
11
+ #
12
+ # @param job [Object] job that should be enqueued
13
+ def enqueue(job)
14
+ ::Karafka::App.config.internal.active_job.dispatcher.call(job)
15
+ end
16
+
17
+ # Raises info, that Karafka backend does not support scheduling jobs
18
+ #
19
+ # @param _job [Object] job we cannot enqueue
20
+ # @param _timestamp [Time] time when job should run
21
+ def enqueue_at(_job, _timestamp)
22
+ raise NotImplementedError, 'This queueing backend does not support scheduling jobs.'
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module ActiveJob
5
+ # This is the consumer for ActiveJob that eats the messages enqueued with it one after another.
6
+ # It marks the offset after each message, so we make sure, none of the jobs is executed twice
7
+ class Consumer < ::Karafka::BaseConsumer
8
+ # Executes the ActiveJob logic
9
+ # @note ActiveJob does not support batches, so we just run one message after another
10
+ def consume
11
+ messages.each do |message|
12
+ break if Karafka::App.stopping?
13
+
14
+ ::ActiveJob::Base.execute(
15
+ # We technically speaking could set this as deserializer and reference it from the
16
+ # message instead of using the `#raw_payload`. This is not done on purpose to simplify
17
+ # the ActiveJob setup here
18
+ ::ActiveSupport::JSON.decode(message.raw_payload)
19
+ )
20
+
21
+ mark_as_consumed(message)
22
+ end
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,38 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module ActiveJob
5
+ # Dispatcher that sends the ActiveJob job to a proper topic based on the queue name
6
+ class Dispatcher
7
+ # Defaults for dispatching
8
+ # The can be updated by using `#karafka_options` on the job
9
+ DEFAULTS = {
10
+ dispatch_method: :produce_async
11
+ }.freeze
12
+
13
+ private_constant :DEFAULTS
14
+
15
+ # @param job [ActiveJob::Base] job
16
+ def call(job)
17
+ ::Karafka.producer.public_send(
18
+ fetch_option(job, :dispatch_method, DEFAULTS),
19
+ topic: job.queue_name,
20
+ payload: ::ActiveSupport::JSON.encode(job.serialize)
21
+ )
22
+ end
23
+
24
+ private
25
+
26
+ # @param job [ActiveJob::Base] job
27
+ # @param key [Symbol] key we want to fetch
28
+ # @param defaults [Hash]
29
+ # @return [Object] options we are interested in
30
+ def fetch_option(job, key, defaults)
31
+ job
32
+ .class
33
+ .karafka_options
34
+ .fetch(key, defaults.fetch(key))
35
+ end
36
+ end
37
+ end
38
+ end
@@ -0,0 +1,34 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module ActiveJob
5
+ # Allows for setting karafka specific options in ActiveJob jobs
6
+ module JobExtensions
7
+ class << self
8
+ # Defines all the needed accessors and sets defaults
9
+ # @param klass [ActiveJob::Base] active job base
10
+ def extended(klass)
11
+ klass.class_attribute :_karafka_options
12
+ klass._karafka_options = {}
13
+ end
14
+ end
15
+
16
+ # @param new_options [Hash] additional options that allow for jobs Karafka related options
17
+ # customization
18
+ # @return [Hash] karafka options
19
+ def karafka_options(new_options = {})
20
+ return _karafka_options if new_options.empty?
21
+
22
+ # Make sure, that karafka options that someone wants to use are valid before assigning
23
+ # them
24
+ App.config.internal.active_job.job_options_contract.validate!(new_options)
25
+
26
+ new_options.each do |name, value|
27
+ _karafka_options[name] = value
28
+ end
29
+
30
+ _karafka_options
31
+ end
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module ActiveJob
5
+ # Contract for validating the options that can be altered with `#karafka_options` per job class
6
+ # @note We keep this in the `Karafka::ActiveJob` namespace instead of `Karafka::Contracts` as
7
+ # we want to keep ActiveJob related Karafka components outside of the core Karafka code and
8
+ # all in the same place
9
+ class JobOptionsContract < Contracts::Base
10
+ configure do |config|
11
+ config.error_messages = YAML.safe_load(
12
+ File.read(
13
+ File.join(Karafka.gem_root, 'config', 'errors.yml')
14
+ )
15
+ ).fetch('en').fetch('validations').fetch('job_options')
16
+ end
17
+
18
+ optional(:dispatch_method) { |val| %i[produce_async produce_sync].include?(val) }
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # ActiveJob related Karafka stuff
5
+ module ActiveJob
6
+ # Karafka routing ActiveJob related components
7
+ module Routing
8
+ # Routing extensions for ActiveJob
9
+ module Extensions
10
+ # This method simplifies routes definition for ActiveJob topics / queues by auto-injecting
11
+ # the consumer class
12
+ # @param name [String, Symbol] name of the topic where ActiveJobs jobs should go
13
+ # @param block [Proc] block that we can use for some extra configuration
14
+ def active_job_topic(name, &block)
15
+ topic(name) do
16
+ consumer App.config.internal.active_job.consumer_class
17
+
18
+ next unless block
19
+
20
+ instance_eval(&block)
21
+
22
+ target.tags << :active_job
23
+
24
+ # This is handled by our custom ActiveJob consumer
25
+ # Without this, default behaviour would cause messages to skip upon shutdown as the
26
+ # offset would be committed for the last message
27
+ manual_offset_management true
28
+ end
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,63 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Simple admin actions that we can perform via Karafka on our Kafka cluster
5
+ #
6
+ # @note It always initializes a new admin instance as we want to ensure it is always closed
7
+ # Since admin actions are not performed that often, that should be ok.
8
+ #
9
+ # @note It always uses the primary defined cluster and does not support multi-cluster work.
10
+ # If you need this, just replace the cluster info for the time you use this
11
+ module Admin
12
+ class << self
13
+ # Creates Kafka topic with given settings
14
+ #
15
+ # @param name [String] topic name
16
+ # @param partitions [Integer] number of partitions we expect
17
+ # @param replication_factor [Integer] number of replicas
18
+ # @param topic_config [Hash] topic config details as described here:
19
+ # https://kafka.apache.org/documentation/#topicconfigs
20
+ def create_topic(name, partitions, replication_factor, topic_config = {})
21
+ with_admin do |admin|
22
+ admin.create_topic(name, partitions, replication_factor, topic_config)
23
+
24
+ sleep(0.1) until topics_names.include?(name)
25
+ end
26
+ end
27
+
28
+ # Deleted a given topic
29
+ #
30
+ # @param name [String] topic name
31
+ def delete_topic(name)
32
+ with_admin do |admin|
33
+ admin.delete_topic(name)
34
+
35
+ sleep(0.1) while topics_names.include?(name)
36
+ end
37
+ end
38
+
39
+ # @return [Rdkafka::Metadata] cluster metadata info
40
+ def cluster_info
41
+ with_admin do |admin|
42
+ Rdkafka::Metadata.new(admin.instance_variable_get('@native_kafka'))
43
+ end
44
+ end
45
+
46
+ private
47
+
48
+ # @return [Array<String>] topics names
49
+ def topics_names
50
+ cluster_info.topics.map { |topic| topic.fetch(:topic_name) }
51
+ end
52
+
53
+ # Creates admin instance and yields it. After usage it closes the admin instance
54
+ def with_admin
55
+ admin = ::Rdkafka::Config.new(Karafka::App.config.kafka).admin
56
+ result = yield(admin)
57
+ result
58
+ ensure
59
+ admin&.close
60
+ end
61
+ end
62
+ end
63
+ end
data/lib/karafka/app.rb CHANGED
@@ -6,31 +6,24 @@ module Karafka
6
6
  extend Setup::Dsl
7
7
 
8
8
  class << self
9
- # Sets up all the internal components and bootstrap whole app
10
- # We need to know details about consumers in order to setup components,
11
- # that's why we don't setup them after std setup is done
12
- # @raise [Karafka::Errors::InvalidConfigurationError] raised when configuration
13
- # doesn't match with the config contract
14
- def boot!
15
- initialize!
16
- Setup::Config.validate!
17
- Setup::Config.setup_components
18
- initialized!
19
- end
20
-
21
- # @return [Karafka::Routing::Builder] consumers builder instance
9
+ # @return [Karafka::Routing::Builder] consumers builder instance alias
22
10
  def consumer_groups
23
- config.internal.routing_builder
11
+ config
12
+ .internal
13
+ .routing
14
+ .builder
24
15
  end
25
16
 
26
- # Triggers reload of all cached Karafka app components, so we can use in-process
27
- # in-development hot code reloading without Karafka process restart
28
- def reload
29
- Karafka::Persistence::Consumers.clear
30
- Karafka::Persistence::Topics.clear
31
- config.internal.routing_builder.reload
17
+ # @return [Array<Karafka::Routing::SubscriptionGroup>] active subscription groups
18
+ def subscription_groups
19
+ consumer_groups
20
+ .active
21
+ .flat_map(&:subscription_groups)
32
22
  end
33
23
 
24
+ # Just a nicer name for the consumer groups
25
+ alias routes consumer_groups
26
+
34
27
  Status.instance_methods(false).each do |delegated|
35
28
  define_method(delegated) do
36
29
  App.config.internal.status.send(delegated)
@@ -42,7 +35,9 @@ module Karafka
42
35
  root
43
36
  env
44
37
  logger
38
+ producer
45
39
  monitor
40
+ pro?
46
41
  ].each do |delegated|
47
42
  define_method(delegated) do
48
43
  Karafka.send(delegated)