karafka 1.1.0 → 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (114) hide show
  1. checksums.yaml +5 -5
  2. checksums.yaml.gz.sig +2 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +1 -3
  6. data/.github/FUNDING.yml +3 -0
  7. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  8. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  9. data/.gitignore +1 -0
  10. data/.ruby-version +1 -1
  11. data/.travis.yml +35 -16
  12. data/CHANGELOG.md +151 -2
  13. data/CONTRIBUTING.md +6 -7
  14. data/Gemfile +3 -3
  15. data/Gemfile.lock +96 -70
  16. data/README.md +29 -23
  17. data/bin/karafka +1 -1
  18. data/certs/mensfeld.pem +25 -0
  19. data/config/errors.yml +38 -5
  20. data/karafka.gemspec +19 -10
  21. data/lib/karafka.rb +15 -12
  22. data/lib/karafka/app.rb +19 -18
  23. data/lib/karafka/attributes_map.rb +15 -14
  24. data/lib/karafka/backends/inline.rb +1 -2
  25. data/lib/karafka/base_consumer.rb +57 -0
  26. data/lib/karafka/base_responder.rb +72 -31
  27. data/lib/karafka/cli.rb +1 -1
  28. data/lib/karafka/cli/console.rb +11 -9
  29. data/lib/karafka/cli/flow.rb +0 -1
  30. data/lib/karafka/cli/info.rb +3 -1
  31. data/lib/karafka/cli/install.rb +29 -8
  32. data/lib/karafka/cli/server.rb +11 -7
  33. data/lib/karafka/code_reloader.rb +67 -0
  34. data/lib/karafka/connection/{config_adapter.rb → api_adapter.rb} +67 -24
  35. data/lib/karafka/connection/batch_delegator.rb +51 -0
  36. data/lib/karafka/connection/builder.rb +16 -0
  37. data/lib/karafka/connection/client.rb +117 -0
  38. data/lib/karafka/connection/listener.rb +37 -17
  39. data/lib/karafka/connection/message_delegator.rb +36 -0
  40. data/lib/karafka/consumers/callbacks.rb +71 -0
  41. data/lib/karafka/consumers/includer.rb +63 -0
  42. data/lib/karafka/consumers/metadata.rb +10 -0
  43. data/lib/karafka/consumers/responders.rb +24 -0
  44. data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
  45. data/lib/karafka/contracts.rb +10 -0
  46. data/lib/karafka/contracts/config.rb +21 -0
  47. data/lib/karafka/contracts/consumer_group.rb +206 -0
  48. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  49. data/lib/karafka/contracts/responder_usage.rb +54 -0
  50. data/lib/karafka/contracts/server_cli_options.rb +29 -0
  51. data/lib/karafka/errors.rb +23 -15
  52. data/lib/karafka/fetcher.rb +6 -12
  53. data/lib/karafka/helpers/class_matcher.rb +19 -9
  54. data/lib/karafka/helpers/config_retriever.rb +3 -3
  55. data/lib/karafka/helpers/inflector.rb +26 -0
  56. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  57. data/lib/karafka/instrumentation/logger.rb +57 -0
  58. data/lib/karafka/instrumentation/monitor.rb +70 -0
  59. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  60. data/lib/karafka/instrumentation/stdout_listener.rb +138 -0
  61. data/lib/karafka/params/builders/metadata.rb +33 -0
  62. data/lib/karafka/params/builders/params.rb +36 -0
  63. data/lib/karafka/params/builders/params_batch.rb +25 -0
  64. data/lib/karafka/params/metadata.rb +35 -0
  65. data/lib/karafka/params/params.rb +35 -95
  66. data/lib/karafka/params/params_batch.rb +38 -18
  67. data/lib/karafka/patches/ruby_kafka.rb +25 -12
  68. data/lib/karafka/persistence/client.rb +29 -0
  69. data/lib/karafka/persistence/consumers.rb +45 -0
  70. data/lib/karafka/persistence/topics.rb +48 -0
  71. data/lib/karafka/process.rb +5 -8
  72. data/lib/karafka/responders/builder.rb +15 -14
  73. data/lib/karafka/responders/topic.rb +6 -8
  74. data/lib/karafka/routing/builder.rb +37 -9
  75. data/lib/karafka/routing/consumer_group.rb +1 -1
  76. data/lib/karafka/routing/consumer_mapper.rb +10 -9
  77. data/lib/karafka/routing/proxy.rb +10 -1
  78. data/lib/karafka/routing/router.rb +1 -1
  79. data/lib/karafka/routing/topic.rb +8 -12
  80. data/lib/karafka/routing/topic_mapper.rb +16 -18
  81. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  82. data/lib/karafka/serialization/json/serializer.rb +31 -0
  83. data/lib/karafka/server.rb +45 -24
  84. data/lib/karafka/setup/config.rb +95 -37
  85. data/lib/karafka/setup/configurators/water_drop.rb +12 -5
  86. data/lib/karafka/setup/dsl.rb +21 -0
  87. data/lib/karafka/status.rb +7 -3
  88. data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.erb} +2 -2
  89. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  90. data/lib/karafka/templates/karafka.rb.erb +92 -0
  91. data/lib/karafka/version.rb +1 -1
  92. metadata +126 -57
  93. metadata.gz.sig +0 -0
  94. data/.github/ISSUE_TEMPLATE.md +0 -2
  95. data/lib/karafka/base_controller.rb +0 -60
  96. data/lib/karafka/connection/consumer.rb +0 -121
  97. data/lib/karafka/connection/processor.rb +0 -61
  98. data/lib/karafka/controllers/callbacks.rb +0 -54
  99. data/lib/karafka/controllers/includer.rb +0 -51
  100. data/lib/karafka/controllers/responders.rb +0 -19
  101. data/lib/karafka/loader.rb +0 -29
  102. data/lib/karafka/logger.rb +0 -53
  103. data/lib/karafka/monitor.rb +0 -98
  104. data/lib/karafka/parsers/json.rb +0 -38
  105. data/lib/karafka/patches/dry_configurable.rb +0 -31
  106. data/lib/karafka/persistence/consumer.rb +0 -25
  107. data/lib/karafka/persistence/controller.rb +0 -38
  108. data/lib/karafka/schemas/config.rb +0 -21
  109. data/lib/karafka/schemas/consumer_group.rb +0 -65
  110. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  111. data/lib/karafka/schemas/responder_usage.rb +0 -39
  112. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  113. data/lib/karafka/setup/configurators/base.rb +0 -35
  114. data/lib/karafka/templates/karafka.rb.example +0 -41
data/README.md CHANGED
@@ -2,41 +2,56 @@
2
2
 
3
3
  [![Build Status](https://travis-ci.org/karafka/karafka.svg?branch=master)](https://travis-ci.org/karafka/karafka)
4
4
 
5
+ ## New release in progress!
6
+
7
+ **Note**: Documentation presented here refers to Karafka `1.3.0`.
8
+
9
+ If you are looking for the documentation for Karafka `1.2.*`, it can be found [here](https://github.com/karafka/wiki/tree/1.2).
10
+
11
+ ## About Karafka
12
+
5
13
  Framework used to simplify Apache Kafka based Ruby applications development.
6
14
 
7
- It allows programmers to use approach similar to standard HTTP conventions (```params``` and ```params_batch```) when working with asynchronous Kafka messages.
15
+ Karafka allows you to capture everything that happens in your systems in large scale, providing you with a seamless and stable core for consuming and processing this data, without having to focus on things that are not your business domain.
8
16
 
9
17
  Karafka not only handles incoming messages but also provides tools for building complex data-flow applications that receive and send messages.
10
18
 
11
19
  ## How does it work
12
20
 
13
- Karafka provides a higher-level abstraction that allows you to focus on your business logic development, instead of focusing on implementing lower level abstraction layers. It provides developers with a set of tools that are dedicated for building multi-topic applications similarly to how Rails applications are being built.
21
+ Karafka provides a higher-level abstraction that allows you to focus on your business logic development, instead of focusing on implementing lower level abstraction layers. It provides developers with a set of tools that are dedicated for building multi-topic applications similar to how Rails applications are being built.
14
22
 
15
23
  ### Some things you might wonder about:
16
24
 
17
- - You can integrate Karafka with any Ruby based application.
25
+ - You can integrate Karafka with **any** Ruby-based application.
18
26
  - Karafka does **not** require Sidekiq or any other third party software (apart from Kafka itself).
19
- - Karafka works with Ruby on Rails but it is a standalone framework that can work without it.
20
- - Karafka has a minimal set of dependencies, so adding it won't be a huge burden for your already existing applications.
27
+ - Karafka works with Ruby on Rails but it is a **standalone** framework that can work without it.
28
+ - Karafka has a **minimal** set of dependencies, so adding it won't be a huge burden for your already existing applications.
29
+ - Karafka processes can be executed for a **given subset** of consumer groups and/or topics, so you can fine tune it depending on your business logic.
21
30
 
22
31
  Karafka based applications can be easily deployed to any type of infrastructure, including those based on:
23
32
 
24
33
  * Heroku
25
34
  * Capistrano
26
35
  * Docker
36
+ * Terraform
27
37
 
28
38
  ## Support
29
39
 
30
- Karafka has a [Wiki pages](https://github.com/karafka/karafka/wiki) for almost everything and a pretty decent [FAQ](https://github.com/karafka/karafka/wiki/FAQ). It covers the whole installation, setup and deployment along with other useful details on how to run Karafka.
40
+ Karafka has a [Wiki pages](https://github.com/karafka/karafka/wiki) for almost everything and a pretty decent [FAQ](https://github.com/karafka/karafka/wiki/FAQ). It covers the whole installation, setup, and deployment along with other useful details on how to run Karafka.
31
41
 
32
42
  If you have any questions about using Karafka, feel free to join our [Gitter](https://gitter.im/karafka/karafka) chat channel.
33
43
 
34
44
  ## Getting started
35
45
 
46
+ If you're completely new to the subject, you can start with our "Kafka on Rails" articles series, that will get you up and running with the terminology and basic ideas behind using Kafka:
47
+
48
+ - [Kafka on Rails: Using Kafka with Ruby on Rails – Part 1 – Kafka basics and its advantages](https://mensfeld.pl/2017/11/kafka-on-rails-using-kafka-with-ruby-on-rails-part-1-kafka-basics-and-its-advantages/)
49
+ - [Kafka on Rails: Using Kafka with Ruby on Rails – Part 2 – Getting started with Ruby and Kafka](https://mensfeld.pl/2018/01/kafka-on-rails-using-kafka-with-ruby-on-rails-part-2-getting-started-with-ruby-and-kafka/)
50
+
36
51
  If you want to get started with Kafka and Karafka as fast as possible, then the best idea is to just clone our example repository:
37
52
 
38
53
  ```bash
39
- git clone https://github.com/karafka/karafka-example-app ./example_app
54
+ git clone https://github.com/karafka/example-app ./example_app
40
55
  ```
41
56
 
42
57
  then, just bundle install all the dependencies:
@@ -46,7 +61,7 @@ cd ./example_app
46
61
  bundle install
47
62
  ```
48
63
 
49
- and follow the instructions from the [example app Wiki](https://github.com/karafka/karafka-example-app/blob/master/README.md).
64
+ and follow the instructions from the [example app Wiki](https://github.com/karafka/example-app/blob/master/README.md).
50
65
 
51
66
  **Note**: you need to ensure, that you have Kafka up and running and you need to configure Kafka seed_brokers in the ```karafka.rb``` file.
52
67
 
@@ -62,26 +77,17 @@ Karafka framework and Karafka team are __not__ related to Kafka streaming servic
62
77
  * [Karafka Travis CI](https://travis-ci.org/karafka/karafka)
63
78
  * [Karafka Coditsu](https://app.coditsu.io/karafka/repositories/karafka)
64
79
 
65
- ## Note on Patches/Pull Requests
66
-
67
- Fork the project.
68
- Make your feature addition or bug fix.
69
- Add tests for it. This is important so we don't break it in a future versions unintentionally.
70
- Commit, do not mess with Rakefile, version, or history. (if you want to have your own version, that is fine but bump version in a commit by itself I can ignore when I pull). Send me a pull request. Bonus points for topic branches.
71
-
72
- [![coditsu](https://coditsu.io/assets/quality_bar.svg)](https://app.coditsu.io/karafka/repositories/karafka)
80
+ ## Note on contributions
73
81
 
74
- Each pull request must pass our quality requirements. To check if everything is as it should be, we use [Coditsu](https://coditsu.io) that combines multiple linters and code analyzers for both code and documentation.
82
+ First, thank you for considering contributing to Karafka! It's people like you that make the open source community such a great community!
75
83
 
76
- Unfortunately, it does not yet support independent forks, however you should be fine by looking at what we require.
84
+ Each pull request must pass all the RSpec specs and meet our quality requirements.
77
85
 
78
- Please run:
86
+ To check if everything is as it should be, we use [Coditsu](https://coditsu.io) that combines multiple linters and code analyzers for both code and documentation. Once you're done with your changes, submit a pull request.
79
87
 
80
- ```bash
81
- bundle exec rspec
82
- ```
88
+ Coditsu will automatically check your work against our quality standards. You can find your commit check results on the [builds page](https://app.coditsu.io/karafka/commit_builds) of Karafka organization.
83
89
 
84
- to check if everything is in order. After that you can submit a pull request.
90
+ [![coditsu](https://coditsu.io/assets/quality_bar.svg)](https://app.coditsu.io/karafka/commit_builds)
85
91
 
86
92
  ## Contributors
87
93
 
@@ -10,7 +10,7 @@ else
10
10
  # However when it is unavailable, we still want to be able to run help command
11
11
  # and install command as they don't require configured app itself to run
12
12
  raise(
13
- Karafka::Errors::MissingBootFile,
13
+ Karafka::Errors::MissingBootFileError,
14
14
  Karafka.boot_file
15
15
  ) unless %w[-h install].include?(ARGV[0])
16
16
  end
@@ -0,0 +1,25 @@
1
+ -----BEGIN CERTIFICATE-----
2
+ MIIEODCCAqCgAwIBAgIBATANBgkqhkiG9w0BAQsFADAjMSEwHwYDVQQDDBhtYWNp
3
+ ZWovREM9bWVuc2ZlbGQvREM9cGwwHhcNMTkwNzMwMTQ1NDU0WhcNMjAwNzI5MTQ1
4
+ NDU0WjAjMSEwHwYDVQQDDBhtYWNpZWovREM9bWVuc2ZlbGQvREM9cGwwggGiMA0G
5
+ CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQC9fCwtaHZG2SyyNXiH8r0QbJQx/xxl
6
+ dkvwWz9QGJO+O8rEx20FB1Ab+MVkfOscwIv5jWpmk1U9whzDPl1uFtIbgu+sk+Zb
7
+ uQlZyK/DPN6c+/BbBL+RryTBRyvkPLoCVwm7uxc/JZ1n4AI6eF4cCZ2ieZ9QgQbU
8
+ MQs2QPqs9hT50Ez/40GnOdadVfiDDGz+NME2C4ms0BriXwZ1tcRTfJIHe2xjIbbb
9
+ y5qRGfsLKcgMzvLQR24olixyX1MR0s4+Wveq3QL/gBhL4veUcv+UABJA8IJR0kyB
10
+ seHHutusiwZ1v3SjjjW1xLLrc2ARV0mgCb0WaK2T4iA3oFTGLh6Ydz8LNl31KQFv
11
+ 94nRd8IhmJxrhQ6dQ/WT9IXoa5S9lfT5lPJeINemH4/6QPABzf9W2IZlCdI9wCdB
12
+ TBaw57MKneGAYZiKjw6OALSy2ltQUCl3RqFl3VP7n8uFy1U987Q5VIIQ3O1UUsQD
13
+ Oe/h+r7GUU4RSPKgPlrwvW9bD/UQ+zF51v8CAwEAAaN3MHUwCQYDVR0TBAIwADAL
14
+ BgNVHQ8EBAMCBLAwHQYDVR0OBBYEFJNIBHdfEUD7TqHqIer2YhWaWhwcMB0GA1Ud
15
+ EQQWMBSBEm1hY2llakBtZW5zZmVsZC5wbDAdBgNVHRIEFjAUgRJtYWNpZWpAbWVu
16
+ c2ZlbGQucGwwDQYJKoZIhvcNAQELBQADggGBAKA4eqko6BTNhlysip6rfBkVTGri
17
+ ZXsL+kRb2hLvsQJS/kLyM21oMlu+LN0aPj3qEFR8mE/YeDD8rLAfruBRTltPNbR7
18
+ xA5eE1gkxY5LfExUtK3b2wPqfmo7mZgfcsMwfYg/tUXw1WpBCnrhAJodpGH6SXmp
19
+ A40qFUZst0vjiOoO+aTblIHPmMJXoZ3K42dTlNKlEiDKUWMRKSgpjjYGEYalFNWI
20
+ hHfCz2r8L2t+dYdMZg1JGbEkq4ADGsAA8ioZIpJd7V4hI17u5TCdi7X5wh/0gN0E
21
+ CgP+nLox3D+l2q0QuQEkayr+auFYkzTCkF+BmEk1D0Ru4mcf3F4CJvEmW4Pzbjqt
22
+ i1tsCWPtJ4E/UUKnKaWKqGbjrjHJ0MuShYzHkodox5IOiCXIQg+1+YSzfXUV6WEK
23
+ KJG/fhg1JV5vVDdVy6x+tv5SQ5ctU0feCsVfESi3rE3zRd+nvzE9HcZ5aXeL1UtJ
24
+ nT5Xrioegu2w1jPyVEgyZgTZC5rvD0nNS5sFNQ==
25
+ -----END CERTIFICATE-----
@@ -1,6 +1,39 @@
1
1
  en:
2
- errors:
3
- broker_schema?: >
4
- has an invalid format.
5
- Expected schema, host and port number.
6
- Example: kafka://127.0.0.1:9092 or kafka+ssl://127.0.0.1:9092
2
+ dry_validation:
3
+ errors:
4
+ invalid_broker_schema: >
5
+ has an invalid format
6
+ Expected schema, host and port number
7
+ Example: kafka://127.0.0.1:9092 or kafka+ssl://127.0.0.1:9092
8
+ invalid_certificate: >
9
+ is not a valid certificate
10
+ invalid_certificate_from_path: >
11
+ is not a valid certificate
12
+ invalid_private_key: >
13
+ is not a valid private key
14
+ max_timeout_size_for_exponential: >
15
+ pause_timeout cannot be more than pause_max_timeout
16
+ max_wait_time_limit:
17
+ max_wait_time cannot be more than socket_timeout
18
+ topics_names_not_unique: >
19
+ all topic names within a single consumer group must be unique
20
+ ssl_client_cert_with_ssl_client_cert_key: >
21
+ Both ssl_client_cert and ssl_client_cert_key need to be provided
22
+ ssl_client_cert_key_with_ssl_client_cert: >
23
+ Both ssl_client_cert_key and ssl_client_cert need to be provided
24
+ ssl_client_cert_chain_with_ssl_client_cert: >
25
+ Both ssl_client_cert_chain and ssl_client_cert need to be provided
26
+ ssl_client_cert_chain_with_ssl_client_cert_key: >
27
+ Both ssl_client_cert_chain and ssl_client_cert_key need to be provided
28
+ ssl_client_cert_key_password_with_ssl_client_cert_key: >
29
+ Both ssl_client_cert_key_password and ssl_client_cert_key need to be provided
30
+ does_not_respond_to_token: >
31
+ needs to respond to a #token method
32
+ required_usage_count: >
33
+ Given topic must be used at least once
34
+ pid_already_exists: >
35
+ Pidfile already exists
36
+ consumer_groups_inclusion: >
37
+ Unknown consumer group
38
+ does_not_exist:
39
+ Given file does not exist or cannot be read
@@ -1,10 +1,11 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- lib = File.expand_path('../lib', __FILE__)
3
+ lib = File.expand_path('lib', __dir__)
4
4
  $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
5
5
 
6
6
  require 'karafka/version'
7
7
 
8
+ # rubocop:disable Metrics/BlockLength
8
9
  Gem::Specification.new do |spec|
9
10
  spec.name = 'karafka'
10
11
  spec.version = ::Karafka::VERSION
@@ -16,20 +17,28 @@ Gem::Specification.new do |spec|
16
17
  spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
17
18
  spec.license = 'MIT'
18
19
 
19
- spec.add_dependency 'activesupport', '>= 5.0'
20
- spec.add_dependency 'dry-configurable', '~> 0.7'
21
- spec.add_dependency 'dry-validation', '~> 0.11'
22
- spec.add_dependency 'envlogic', '~> 1.0'
20
+ spec.add_dependency 'dry-configurable', '~> 0.8'
21
+ spec.add_dependency 'dry-inflector', '~> 0.1'
22
+ spec.add_dependency 'dry-monitor', '~> 0.3'
23
+ spec.add_dependency 'dry-validation', '~> 1.2'
24
+ spec.add_dependency 'envlogic', '~> 1.1'
25
+ spec.add_dependency 'irb', '~> 1.0'
23
26
  spec.add_dependency 'multi_json', '>= 1.12'
24
27
  spec.add_dependency 'rake', '>= 11.3'
25
- spec.add_dependency 'require_all', '>= 1.4'
26
- spec.add_dependency 'ruby-kafka', '>= 0.5'
27
- spec.add_dependency 'thor', '~> 0.19'
28
- spec.add_dependency 'waterdrop', '>= 1.0.1'
28
+ spec.add_dependency 'ruby-kafka', '>= 0.7.8'
29
+ spec.add_dependency 'thor', '~> 0.20'
30
+ spec.add_dependency 'waterdrop', '~> 1.3.0'
31
+ spec.add_dependency 'zeitwerk', '~> 2.1'
29
32
 
30
- spec.required_ruby_version = '>= 2.3.0'
33
+ spec.required_ruby_version = '>= 2.4.0'
31
34
 
35
+ if $PROGRAM_NAME.end_with?('gem')
36
+ spec.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
37
+ end
38
+
39
+ spec.cert_chain = %w[certs/mensfeld.pem]
32
40
  spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec)/}) }
33
41
  spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
34
42
  spec.require_paths = %w[lib]
35
43
  end
44
+ # rubocop:enable Metrics/BlockLength
@@ -6,16 +6,16 @@
6
6
  kafka
7
7
  envlogic
8
8
  thor
9
+ forwardable
9
10
  fileutils
10
11
  multi_json
11
- require_all
12
12
  dry-configurable
13
13
  dry-validation
14
- active_support/callbacks
15
- active_support/core_ext/hash/indifferent_access
16
- active_support/descendants_tracker
17
- active_support/inflector
18
- karafka/loader
14
+ dry/events/publisher
15
+ dry/inflector
16
+ dry/monitor/notifications
17
+ dry/core/constants
18
+ zeitwerk
19
19
  ].each(&method(:require))
20
20
 
21
21
  # Karafka library
@@ -28,14 +28,14 @@ module Karafka
28
28
  @logger ||= App.config.logger
29
29
  end
30
30
 
31
- # @return [::Karafka::Monitor] monitor that we want to use. Will use dummy monitor by default
31
+ # @return [::Karafka::Monitor] monitor that we want to use
32
32
  def monitor
33
33
  @monitor ||= App.config.monitor
34
34
  end
35
35
 
36
36
  # @return [String] root path of this gem
37
37
  def gem_root
38
- Pathname.new(File.expand_path('../..', __FILE__))
38
+ Pathname.new(File.expand_path('..', __dir__))
39
39
  end
40
40
 
41
41
  # @return [String] Karafka app root path (user application path)
@@ -45,13 +45,13 @@ module Karafka
45
45
 
46
46
  # @return [String] path to Karafka gem root core
47
47
  def core_root
48
- Pathname.new(File.expand_path('../karafka', __FILE__))
48
+ Pathname.new(File.expand_path('karafka', __dir__))
49
49
  end
50
50
 
51
51
  # @return [String] path to a default file that contains booting procedure etc
52
52
  # @note By default it is a file called 'karafka.rb' but it can be specified as you wish if you
53
53
  # have Karafka that is merged into a Sinatra/Rails app and karafka.rb is taken.
54
- # It will be used for console/controllers/etc
54
+ # It will be used for console/consumers/etc
55
55
  # @example Standard only-Karafka case
56
56
  # Karafka.boot_file #=> '/home/app_path/karafka.rb'
57
57
  # @example Non standard case
@@ -63,6 +63,9 @@ module Karafka
63
63
  end
64
64
  end
65
65
 
66
- Karafka::Loader.load!(Karafka.core_root)
66
+ Zeitwerk::Loader
67
+ .for_gem
68
+ .tap(&:setup)
69
+ .tap(&:eager_load)
70
+
67
71
  Kafka::Consumer.prepend(Karafka::Patches::RubyKafka)
68
- Dry::Configurable::Config.prepend(Karafka::Patches::DryConfigurable)
@@ -3,37 +3,37 @@
3
3
  module Karafka
4
4
  # App class
5
5
  class App
6
- class << self
7
- # Sets up the whole configuration
8
- # @param [Block] block configuration block
9
- def setup(&block)
10
- Setup::Config.setup(&block)
11
- initialize!
12
- end
6
+ extend Setup::Dsl
13
7
 
8
+ class << self
14
9
  # Sets up all the internal components and bootstrap whole app
15
10
  # We need to know details about consumers in order to setup components,
16
11
  # that's why we don't setup them after std setup is done
17
- # @raise [Karafka::Errors::InvalidConfiguration] raised when configuration
18
- # doesn't match with ConfigurationSchema
12
+ # @raise [Karafka::Errors::InvalidConfigurationError] raised when configuration
13
+ # doesn't match with the config contract
19
14
  def boot!
15
+ initialize!
20
16
  Setup::Config.validate!
21
17
  Setup::Config.setup_components
22
- end
23
-
24
- # @return [Karafka::Config] config instance
25
- def config
26
- Setup::Config.config
18
+ initialized!
27
19
  end
28
20
 
29
21
  # @return [Karafka::Routing::Builder] consumers builder instance
30
22
  def consumer_groups
31
- Routing::Builder.instance
23
+ config.internal.routing_builder
24
+ end
25
+
26
+ # Triggers reload of all cached Karafka app components, so we can use in-process
27
+ # in-development hot code reloading without Karafka process restart
28
+ def reload
29
+ Karafka::Persistence::Consumers.clear
30
+ Karafka::Persistence::Topics.clear
31
+ config.internal.routing_builder.reload
32
32
  end
33
33
 
34
34
  Status.instance_methods(false).each do |delegated|
35
35
  define_method(delegated) do
36
- Status.instance.public_send(delegated)
36
+ App.config.internal.status.send(delegated)
37
37
  end
38
38
  end
39
39
 
@@ -41,10 +41,11 @@ module Karafka
41
41
  %i[
42
42
  root
43
43
  env
44
- logger monitor
44
+ logger
45
+ monitor
45
46
  ].each do |delegated|
46
47
  define_method(delegated) do
47
- Karafka.public_send(delegated)
48
+ Karafka.send(delegated)
48
49
  end
49
50
  end
50
51
  end
@@ -11,18 +11,18 @@ module Karafka
11
11
  module AttributesMap
12
12
  class << self
13
13
  # What settings should go where in ruby-kafka
14
+ # @return [Hash] hash with proper sections on what to proxy where in Ruby-Kafka
14
15
  # @note All other settings will be passed to Kafka.new method invocation.
15
16
  # All elements in this hash are just edge cases
16
- # @return [Hash] hash with proper sections on what to proxy where in Ruby-Kafka
17
- def config_adapter
17
+ def api_adapter
18
18
  {
19
19
  consumer: %i[
20
20
  session_timeout offset_commit_interval offset_commit_threshold
21
- offset_retention_time heartbeat_interval
21
+ offset_retention_time heartbeat_interval fetcher_max_queue_size
22
22
  ],
23
- subscription: %i[start_from_beginning max_bytes_per_partition],
24
- consuming: %i[min_bytes max_wait_time],
25
- pausing: %i[pause_timeout],
23
+ subscribe: %i[start_from_beginning max_bytes_per_partition],
24
+ consumption: %i[min_bytes max_bytes max_wait_time],
25
+ pause: %i[pause_timeout pause_max_timeout pause_exponential_backoff],
26
26
  # All the options that are under kafka config namespace, but are not used
27
27
  # directly with kafka api, but from the Karafka user perspective, they are
28
28
  # still related to kafka. They should not be proxied anywhere
@@ -32,13 +32,12 @@ module Karafka
32
32
 
33
33
  # @return [Array<Symbol>] properties that can be set on a per topic level
34
34
  def topic
35
- (config_adapter[:subscription] + %i[
35
+ (api_adapter[:subscribe] + %i[
36
36
  backend
37
37
  name
38
- parser
38
+ deserializer
39
39
  responder
40
40
  batch_consuming
41
- persistent
42
41
  ]).uniq
43
42
  end
44
43
 
@@ -48,17 +47,19 @@ module Karafka
48
47
  # Thanks to this solution, if any new setting is available for ruby-kafka, we just need
49
48
  # to add it to our configuration class and it will be handled automatically.
50
49
  def consumer_group
51
- # @note We don't ignore the config_adapter[:ignored] values as they should be ignored
50
+ # @note We don't ignore the api_adapter[:ignored] values as they should be ignored
52
51
  # only when proxying details go ruby-kafka. We use ignored fields internally in karafka
53
- ignored_settings = config_adapter[:subscription]
54
- defined_settings = config_adapter.values.flatten
52
+ ignored_settings = api_adapter[:subscribe]
53
+ defined_settings = api_adapter.values.flatten
55
54
  karafka_settings = %i[batch_fetching]
56
- # This is a drity and bad hack of dry-configurable to get keys before setting values
55
+ # This is a dirty and bad hack of dry-configurable to get keys before setting values
57
56
  dynamically_proxied = Karafka::Setup::Config
58
57
  ._settings
58
+ .settings
59
59
  .find { |s| s.name == :kafka }
60
60
  .value
61
- .instance_variable_get('@klass').settings
61
+ .names
62
+ .to_a
62
63
 
63
64
  (defined_settings + dynamically_proxied).uniq + karafka_settings - ignored_settings
64
65
  end
@@ -9,8 +9,7 @@ module Karafka
9
9
 
10
10
  # Executes consume code immediately (without enqueuing)
11
11
  def process
12
- Karafka.monitor.notice(self.class, params_batch)
13
- consume
12
+ Karafka.monitor.instrument('backends.inline.process', caller: self) { consume }
14
13
  end
15
14
  end
16
15
  end