karafka 1.4.13 → 2.0.0.alpha1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (126) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/FUNDING.yml +3 -0
  4. data/.github/workflows/ci.yml +74 -24
  5. data/CHANGELOG.md +38 -7
  6. data/Gemfile +6 -0
  7. data/Gemfile.lock +25 -26
  8. data/LICENSE +14 -0
  9. data/LICENSE-COMM +89 -0
  10. data/LICENSE-LGPL +165 -0
  11. data/README.md +16 -48
  12. data/bin/benchmarks +85 -0
  13. data/bin/create_token +28 -0
  14. data/bin/integrations +160 -0
  15. data/bin/stress +13 -0
  16. data/certs/karafka-pro.pem +11 -0
  17. data/config/errors.yml +4 -38
  18. data/docker-compose.yml +11 -3
  19. data/karafka.gemspec +9 -13
  20. data/lib/active_job/consumer.rb +22 -0
  21. data/lib/active_job/karafka.rb +18 -0
  22. data/lib/active_job/queue_adapters/karafka_adapter.rb +29 -0
  23. data/lib/active_job/routing_extensions.rb +15 -0
  24. data/lib/karafka/app.rb +13 -20
  25. data/lib/karafka/base_consumer.rb +103 -34
  26. data/lib/karafka/cli/base.rb +4 -4
  27. data/lib/karafka/cli/info.rb +43 -8
  28. data/lib/karafka/cli/install.rb +3 -8
  29. data/lib/karafka/cli/server.rb +17 -30
  30. data/lib/karafka/cli.rb +4 -11
  31. data/lib/karafka/connection/client.rb +279 -93
  32. data/lib/karafka/connection/listener.rb +137 -38
  33. data/lib/karafka/connection/messages_buffer.rb +57 -0
  34. data/lib/karafka/connection/pauses_manager.rb +46 -0
  35. data/lib/karafka/connection/rebalance_manager.rb +62 -0
  36. data/lib/karafka/contracts/config.rb +25 -7
  37. data/lib/karafka/contracts/consumer_group.rb +0 -173
  38. data/lib/karafka/contracts/consumer_group_topic.rb +17 -7
  39. data/lib/karafka/contracts/server_cli_options.rb +1 -9
  40. data/lib/karafka/contracts.rb +1 -1
  41. data/lib/karafka/env.rb +46 -0
  42. data/lib/karafka/errors.rb +14 -18
  43. data/lib/karafka/helpers/multi_delegator.rb +2 -2
  44. data/lib/karafka/instrumentation/callbacks/error.rb +40 -0
  45. data/lib/karafka/instrumentation/callbacks/statistics.rb +42 -0
  46. data/lib/karafka/instrumentation/monitor.rb +14 -21
  47. data/lib/karafka/instrumentation/stdout_listener.rb +64 -91
  48. data/lib/karafka/instrumentation.rb +21 -0
  49. data/lib/karafka/licenser.rb +65 -0
  50. data/lib/karafka/{params → messages}/batch_metadata.rb +7 -13
  51. data/lib/karafka/messages/builders/batch_metadata.rb +30 -0
  52. data/lib/karafka/messages/builders/message.rb +38 -0
  53. data/lib/karafka/messages/builders/messages.rb +40 -0
  54. data/lib/karafka/{params/params.rb → messages/message.rb} +7 -12
  55. data/lib/karafka/messages/messages.rb +64 -0
  56. data/lib/karafka/{params → messages}/metadata.rb +4 -6
  57. data/lib/karafka/messages/seek.rb +9 -0
  58. data/lib/karafka/patches/rdkafka/consumer.rb +22 -0
  59. data/lib/karafka/processing/executor.rb +96 -0
  60. data/lib/karafka/processing/executors_buffer.rb +49 -0
  61. data/lib/karafka/processing/jobs/base.rb +18 -0
  62. data/lib/karafka/processing/jobs/consume.rb +28 -0
  63. data/lib/karafka/processing/jobs/revoked.rb +22 -0
  64. data/lib/karafka/processing/jobs/shutdown.rb +23 -0
  65. data/lib/karafka/processing/jobs_queue.rb +121 -0
  66. data/lib/karafka/processing/worker.rb +57 -0
  67. data/lib/karafka/processing/workers_batch.rb +22 -0
  68. data/lib/karafka/railtie.rb +65 -0
  69. data/lib/karafka/routing/builder.rb +15 -14
  70. data/lib/karafka/routing/consumer_group.rb +10 -18
  71. data/lib/karafka/routing/consumer_mapper.rb +1 -2
  72. data/lib/karafka/routing/router.rb +1 -1
  73. data/lib/karafka/routing/subscription_group.rb +53 -0
  74. data/lib/karafka/routing/subscription_groups_builder.rb +51 -0
  75. data/lib/karafka/routing/topic.rb +47 -25
  76. data/lib/karafka/runner.rb +59 -0
  77. data/lib/karafka/serialization/json/deserializer.rb +6 -15
  78. data/lib/karafka/server.rb +62 -25
  79. data/lib/karafka/setup/config.rb +86 -159
  80. data/lib/karafka/status.rb +13 -3
  81. data/lib/karafka/templates/example_consumer.rb.erb +16 -0
  82. data/lib/karafka/templates/karafka.rb.erb +14 -50
  83. data/lib/karafka/time_trackers/base.rb +19 -0
  84. data/lib/karafka/time_trackers/pause.rb +84 -0
  85. data/lib/karafka/time_trackers/poll.rb +65 -0
  86. data/lib/karafka/version.rb +1 -1
  87. data/lib/karafka.rb +30 -13
  88. data.tar.gz.sig +4 -3
  89. metadata +71 -89
  90. metadata.gz.sig +0 -0
  91. data/MIT-LICENCE +0 -18
  92. data/lib/karafka/assignment_strategies/round_robin.rb +0 -13
  93. data/lib/karafka/attributes_map.rb +0 -63
  94. data/lib/karafka/backends/inline.rb +0 -16
  95. data/lib/karafka/base_responder.rb +0 -226
  96. data/lib/karafka/cli/flow.rb +0 -48
  97. data/lib/karafka/cli/missingno.rb +0 -19
  98. data/lib/karafka/code_reloader.rb +0 -67
  99. data/lib/karafka/connection/api_adapter.rb +0 -158
  100. data/lib/karafka/connection/batch_delegator.rb +0 -55
  101. data/lib/karafka/connection/builder.rb +0 -23
  102. data/lib/karafka/connection/message_delegator.rb +0 -36
  103. data/lib/karafka/consumers/batch_metadata.rb +0 -10
  104. data/lib/karafka/consumers/callbacks.rb +0 -71
  105. data/lib/karafka/consumers/includer.rb +0 -64
  106. data/lib/karafka/consumers/responders.rb +0 -24
  107. data/lib/karafka/consumers/single_params.rb +0 -15
  108. data/lib/karafka/contracts/responder_usage.rb +0 -54
  109. data/lib/karafka/fetcher.rb +0 -42
  110. data/lib/karafka/helpers/class_matcher.rb +0 -88
  111. data/lib/karafka/helpers/config_retriever.rb +0 -46
  112. data/lib/karafka/helpers/inflector.rb +0 -26
  113. data/lib/karafka/params/builders/batch_metadata.rb +0 -30
  114. data/lib/karafka/params/builders/params.rb +0 -38
  115. data/lib/karafka/params/builders/params_batch.rb +0 -25
  116. data/lib/karafka/params/params_batch.rb +0 -60
  117. data/lib/karafka/patches/ruby_kafka.rb +0 -47
  118. data/lib/karafka/persistence/client.rb +0 -29
  119. data/lib/karafka/persistence/consumers.rb +0 -45
  120. data/lib/karafka/persistence/topics.rb +0 -48
  121. data/lib/karafka/responders/builder.rb +0 -36
  122. data/lib/karafka/responders/topic.rb +0 -55
  123. data/lib/karafka/routing/topic_mapper.rb +0 -53
  124. data/lib/karafka/serialization/json/serializer.rb +0 -31
  125. data/lib/karafka/setup/configurators/water_drop.rb +0 -36
  126. data/lib/karafka/templates/application_responder.rb.erb +0 -11
data/README.md CHANGED
@@ -4,15 +4,15 @@
4
4
  [![Gem Version](https://badge.fury.io/rb/karafka.svg)](http://badge.fury.io/rb/karafka)
5
5
  [![Join the chat at https://slack.karafka.io](https://raw.githubusercontent.com/karafka/misc/master/slack.svg)](https://slack.karafka.io)
6
6
 
7
- **Note**: We're finishing the new Karafka `2.0` but for now, please use `1.4`. All the documentation presented here refers to `1.4`
7
+ **Note**: All of the documentation here refers to Karafka `2.0`. If you are looking for the documentation to Karafka `1.4` please click here (TBA).
8
8
 
9
9
  ## About Karafka
10
10
 
11
- Framework used to simplify Apache Kafka based Ruby applications development.
11
+ Karafka is a framework used to simplify Apache Kafka based Ruby and Ruby on Rails applications development.
12
12
 
13
13
  ```ruby
14
- # Define what topics you want to consume with which consumers
15
- Karafka::App.consumer_groups.draw do
14
+ # Define what topics you want to consume with which consumers in karafka.rb
15
+ Karafka::App.routes.draw do
16
16
  topic 'system_events' do
17
17
  consumer EventsConsumer
18
18
  end
@@ -23,39 +23,14 @@ class EventsConsumer < ApplicationConsumer
23
23
  # Example that utilizes ActiveRecord#insert_all and Karafka batch processing
24
24
  def consume
25
25
  # Store all of the incoming Kafka events locally in an efficient way
26
- Event.insert_all params_batch.payloads
26
+ Event.insert_all messages.payloads
27
27
  end
28
28
  end
29
29
  ```
30
30
 
31
- Karafka allows you to capture everything that happens in your systems in large scale, providing you with a seamless and stable core for consuming and processing this data, without having to focus on things that are not your business domain.
31
+ Karafka allows you to capture everything that happens in your systems in large scale, providing you with a seamless and stable core for consuming, processing and producing data, without having to focus on things that are not your business domain.
32
32
 
33
- Karafka not only handles incoming messages but also provides tools for building complex data-flow applications that receive and send messages.
34
-
35
- ## How does it work
36
-
37
- Karafka provides a higher-level abstraction that allows you to focus on your business logic development, instead of focusing on implementing lower level abstraction layers. It provides developers with a set of tools that are dedicated for building multi-topic applications similar to how Rails applications are being built.
38
-
39
- ### Some things you might wonder about:
40
-
41
- - You can integrate Karafka with **any** Ruby-based application.
42
- - Karafka does **not** require Sidekiq or any other third party software (apart from Kafka itself).
43
- - Karafka works with Ruby on Rails but it is a **standalone** framework that can work without it.
44
- - Karafka has a **minimal** set of dependencies, so adding it won't be a huge burden for your already existing applications.
45
- - Karafka processes can be executed for a **given subset** of consumer groups and/or topics, so you can fine tune it depending on your business logic.
46
-
47
- Karafka based applications can be easily deployed to any type of infrastructure, including those based on:
48
-
49
- * Heroku
50
- * Capistrano
51
- * Docker
52
- * Terraform
53
-
54
- ## Support
55
-
56
- Karafka has [Wiki pages](https://github.com/karafka/karafka/wiki) for almost everything and a pretty decent [FAQ](https://github.com/karafka/karafka/wiki/FAQ). It covers the whole installation, setup, and deployment along with other useful details on how to run Karafka.
57
-
58
- If you have any questions about using Karafka, feel free to join our [Gitter](https://gitter.im/karafka/karafka) chat channel.
33
+ Karafka **uses** threads to handle many messages at the same time in the same process. It does not require Rails but will integrate tightly with any Ruby on Rails applications to make event processing dead simple.
59
34
 
60
35
  ## Getting started
61
36
 
@@ -64,30 +39,23 @@ If you're completely new to the subject, you can start with our "Kafka on Rails"
64
39
  - [Kafka on Rails: Using Kafka with Ruby on Rails – Part 1 – Kafka basics and its advantages](https://mensfeld.pl/2017/11/kafka-on-rails-using-kafka-with-ruby-on-rails-part-1-kafka-basics-and-its-advantages/)
65
40
  - [Kafka on Rails: Using Kafka with Ruby on Rails – Part 2 – Getting started with Ruby and Kafka](https://mensfeld.pl/2018/01/kafka-on-rails-using-kafka-with-ruby-on-rails-part-2-getting-started-with-ruby-and-kafka/)
66
41
 
67
- If you want to get started with Kafka and Karafka as fast as possible, then the best idea is to just clone our example repository:
42
+ If you want to get started with Kafka and Karafka as fast as possible, then the best idea is to visit our [Getting started](https://github.com/karafka/karafka/wiki/Getting-started) guides and the [example apps repository](https://github.com/karafka/example-apps).
68
43
 
69
- ```bash
70
- git clone https://github.com/karafka/example-app ./example_app
71
- ```
44
+ We also maintain many [integration specs](https://github.com/karafka/karafka/tree/master/spec/integrations) illustrating various use-cases and features of the framework.
72
45
 
73
- then, just bundle install all the dependencies:
46
+ ## Want to Upgrade? LGPL is not for you? Want to help?
74
47
 
75
- ```bash
76
- cd ./example_app
77
- bundle install
78
- ```
48
+ I also sell Karafka Pro subscription. It includes commercial-friendly license, priority support, architecture consultations and high throughput data processing-related features (under development).
79
49
 
80
- and follow the instructions from the [example app Wiki](https://github.com/karafka/example-app/blob/master/README.md).
50
+ **20%** of the income will be distributed back to other OSS projects that Karafka uses under the hood.
81
51
 
82
- **Note**: you need to ensure, that you have Kafka up and running and you need to configure Kafka seed_brokers in the ```karafka.rb``` file.
52
+ Help me provide high-quality open-source software. Please see the Karafka [homepage](https://karafka.io) for more details.
83
53
 
84
- If you need more details and know how on how to start Karafka with a clean installation, read the [Getting started page](https://github.com/karafka/karafka/wiki/Getting-started) section of our Wiki.
54
+ ## Support
85
55
 
86
- ## References
56
+ Karafka has [Wiki pages](https://github.com/karafka/karafka/wiki) for almost everything and a pretty decent [FAQ](https://github.com/karafka/karafka/wiki/FAQ). It covers the whole installation, setup and deployment along with other useful details on how to run Karafka.
87
57
 
88
- * [Karafka framework](https://github.com/karafka/karafka)
89
- * [Karafka GitHub Actions](https://github.com/karafka/karafka/actions)
90
- * [Karafka Coditsu](https://app.coditsu.io/karafka/repositories/karafka)
58
+ If you have any questions about using Karafka, feel free to join our [Slack](https://slack.karafka.io) channel.
91
59
 
92
60
  ## Note on contributions
93
61
 
data/bin/benchmarks ADDED
@@ -0,0 +1,85 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # Runner for running given benchmark cases
4
+ # Some of the cases require pre-populated data and we populate this in places that need it
5
+ # In other cases we generate this data in a background process, so the partitions data stream
6
+ # is consistent and we don't end up consuming huge batches of a single partition.
7
+
8
+ require 'open3'
9
+ require 'pathname'
10
+
11
+ $LOAD_PATH.unshift(File.dirname(__FILE__))
12
+ $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..'))
13
+
14
+ ROOT_PATH = Pathname.new(File.expand_path(File.join(File.dirname(__FILE__), '../')))
15
+
16
+ # Load all the benchmarks
17
+ benchmarks = Dir[ROOT_PATH.join('spec/benchmarks/**/*.rb')]
18
+
19
+ # If filter is provided, apply
20
+ benchmarks.delete_if { |name| !name.include?(ARGV[0]) } if ARGV[0]
21
+
22
+ raise ArgumentError, "No benchmarks with filter: #{ARGV[0]}" if benchmarks.empty?
23
+
24
+ # We may skip seeding if we are running the benchmarks multiple times, then since we do not
25
+ # commit offsets we can skip generating more data
26
+ if ENV['SEED']
27
+ require 'spec/benchmarks_helper'
28
+
29
+ # We need to setup karafka here to have producer for data seeding
30
+ setup_karafka
31
+
32
+ # This takes some time but needs to run only once per benchmark session
33
+ puts 'Seeding benchmarks data...'
34
+
35
+ producer = Karafka::App.producer
36
+
37
+ # We make our data json compatible so we can also benchmark serialization
38
+ elements = Array.new(100_000) { { a: :b }.to_json }
39
+
40
+ # We do not populate data of benchmarks_0_10 as we use it with life-stream data only
41
+ %w[
42
+ benchmarks_0_01
43
+ benchmarks_0_05
44
+ ].each do |topic_name|
45
+ partitions_count = topic_name.split('_').last.to_i
46
+
47
+ partitions_count.times do |partition|
48
+ puts "Seeding #{topic_name}:#{partition}"
49
+
50
+ elements.each_slice(10_000) do |data_slice|
51
+ data = data_slice.map do |data|
52
+ { topic: topic_name, payload: data, partition: partition }
53
+ end
54
+
55
+ producer.buffer_many(data)
56
+ producer.flush_sync
57
+ end
58
+ end
59
+ end
60
+ end
61
+
62
+ # Selects requested benchmarks and runs them one after another
63
+ benchmarks.each do |benchmark_path|
64
+ puts "Running #{benchmark_path.gsub("#{ROOT_PATH}/spec/benchmarks/", '')}"
65
+
66
+ benchmark = "bundle exec ruby -r ./spec/benchmarks_helper.rb #{benchmark_path}"
67
+
68
+ Open3.popen3(benchmark) do |stdin, stdout, stderr, thread|
69
+ t1 = Thread.new do
70
+ while line = stdout.gets
71
+ puts(line)
72
+ end
73
+ rescue IOError
74
+ end
75
+
76
+ t2 = Thread.new do
77
+ while line = stderr.gets
78
+ puts(line)
79
+ end
80
+ rescue IOError
81
+ end
82
+
83
+ thread.join
84
+ end
85
+ end
data/bin/create_token ADDED
@@ -0,0 +1,28 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'openssl'
4
+ require 'base64'
5
+ require 'json'
6
+ require 'date'
7
+
8
+ PRIVATE_KEY_LOCATION = File.join(Dir.home, '.ssh', 'karafka-pro', 'id_rsa')
9
+
10
+ # Name of the entity that acquires the license
11
+ ENTITY = ARGV[0]
12
+ # Date till which license is valid
13
+ EXPIRES_ON = Date.parse(ARGV[1])
14
+
15
+ raise ArgumentError, 'Entity missing' if ENTITY.nil? || ENTITY.empty?
16
+ raise ArgumentError, 'Expires on needs to be in the future' if EXPIRES_ON <= Date.today
17
+
18
+ pro_token_data = {
19
+ entity: ENTITY,
20
+ expires_on: EXPIRES_ON
21
+ }
22
+
23
+ # This code uses my private key to generate a new token for Karafka Pro capabilities
24
+ private_key = OpenSSL::PKey::RSA.new(File.read(PRIVATE_KEY_LOCATION))
25
+
26
+ bin_key = private_key.private_encrypt(pro_token_data.to_json)
27
+
28
+ puts Base64.encode64(bin_key)
data/bin/integrations ADDED
@@ -0,0 +1,160 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # Runner to run integration specs in parallel
4
+
5
+ require 'open3'
6
+ require 'fileutils'
7
+ require 'pathname'
8
+
9
+ ROOT_PATH = Pathname.new(File.expand_path(File.join(File.dirname(__FILE__), '../')))
10
+
11
+ # Raised from the parent process if any of the integration tests fails
12
+ IntegrationTestError = Class.new(StandardError)
13
+
14
+ # How many child processes with integration specs do we want to run in parallel
15
+ # When the value is high, there's a problem with thread allocation on Github
16
+ CONCURRENCY = 5
17
+
18
+ # Abstraction around a single test scenario execution process
19
+ class Scenario
20
+ # How long a scenario can run before we kill it
21
+ # This is a fail-safe just in case something would hang
22
+ MAX_RUN_TIME = 60 * 5
23
+
24
+ # There are rare cases where Karafka may force shutdown for some of the integration cases
25
+ # This includes exactly those
26
+ EXIT_CODES = {
27
+ default: [0],
28
+ 'consumption/worker_critical_error_behaviour.rb' => [0, 2].freeze,
29
+ 'shutdown/on_hanging_jobs_and_a_shutdown.rb' => [2].freeze,
30
+ 'shutdown/on_hanging_on_shutdown_job_and_a_shutdown.rb' => [2].freeze,
31
+ 'shutdown/on_hanging_poll_and_shutdown.rb' => [2].freeze
32
+ }.freeze
33
+
34
+ private_constant :MAX_RUN_TIME, :EXIT_CODES
35
+
36
+ # Creates scenario instance and runs in the background process
37
+ #
38
+ # @param path [String] path to the scenarios file
39
+ def initialize(path)
40
+ @path = path
41
+ @stdin, @stdout, @stderr, @wait_thr = Open3.popen3(
42
+ "bundle exec ruby -r ./spec/integrations_helper.rb #{path}"
43
+ )
44
+ @started_at = current_time
45
+ # Last 1024 characters from stdout
46
+ @stdout_tail = ''
47
+ end
48
+
49
+ # @return [String] integration spec name
50
+ def name
51
+ @path.gsub("#{ROOT_PATH}/spec/integrations/", '')
52
+ end
53
+
54
+ # @return [Boolean] did this scenario finished or is it still running
55
+ def finished?
56
+ # If the thread is running too long, kill it
57
+ if current_time - @started_at > MAX_RUN_TIME
58
+ @wait_thr.kill
59
+ Process.kill('TERM', pid)
60
+ end
61
+
62
+ # We read it so it won't grow as we use our default logger that prints to both test.log and
63
+ # to stdout. Otherwise after reaching the buffer size, it would hang
64
+ buffer = ''
65
+ @stdout.read_nonblock(10_240, buffer, exception: false)
66
+ @stdout_tail << buffer
67
+ @stdout_tail = @stdout_tail[-10_024..-1] || @stdout_tail
68
+
69
+ !@wait_thr.alive?
70
+ end
71
+
72
+ # @return [Integer] pid of the process of this scenario
73
+ def pid
74
+ @wait_thr.pid
75
+ end
76
+
77
+ # @return [Integer] exit code of the process running given scenario
78
+ def exit_code
79
+ # There may be no exit status if we killed the thread
80
+ @wait_thr.value&.exitstatus || 123
81
+ end
82
+
83
+ # @return [Boolean] did this scenario finish successfully or not
84
+ def success?
85
+ expected_exit_codes = EXIT_CODES[name] || EXIT_CODES[:default]
86
+
87
+ expected_exit_codes.include?(exit_code)
88
+ end
89
+
90
+ # Prints a status report when scenario is finished and stdout if it failed
91
+ def report
92
+ result = success? ? "\e[#{32}m#{'OK'}\e[0m" : "\e[#{31}m#{'FAILED'}\e[0m"
93
+
94
+ puts "#{result} #{name}"
95
+
96
+ unless success?
97
+ puts "Exit code: #{exit_code}"
98
+ puts @stdout_tail
99
+ puts @stderr.read
100
+ end
101
+ end
102
+
103
+ private
104
+
105
+ # @return [Float] current machine time
106
+ def current_time
107
+ Process.clock_gettime(Process::CLOCK_MONOTONIC)
108
+ end
109
+ end
110
+
111
+ # Simple array to keep track of active integration processes thread running with info on which
112
+ # test scenario is running
113
+ active_scenarios = []
114
+
115
+ # Finished runners
116
+ finished_scenarios = []
117
+
118
+ # Waits for any of the processes to be finished and tracks exit codes
119
+ #
120
+ # @param active_scenarios [Array] active runners
121
+ # @param finished_scenarios [Hash] finished forks exit codes
122
+ def wait_and_track(active_scenarios, finished_scenarios)
123
+ exited = active_scenarios.find(&:finished?)
124
+
125
+ if exited
126
+ scenario = active_scenarios.delete(exited)
127
+
128
+ scenario.report
129
+
130
+ finished_scenarios << scenario
131
+ else
132
+ Thread.pass
133
+ end
134
+ end
135
+
136
+ # Load all the specs
137
+ specs = Dir[ROOT_PATH.join('spec/integrations/**/*.rb')]
138
+
139
+ # If filter is provided, apply
140
+ specs.delete_if { |name| !name.include?(ARGV[0]) } if ARGV[0]
141
+
142
+ raise ArgumentError, "No integration specs with filter: #{ARGV[0]}" if specs.empty?
143
+
144
+ # Randomize order
145
+ seed = (ENV['SEED'] || rand(0..10_000)).to_i
146
+
147
+ puts "Random seed: #{seed}"
148
+
149
+ specs.shuffle(random: Random.new(seed)).each do |integration_test|
150
+ scenario = Scenario.new(integration_test)
151
+
152
+ active_scenarios << scenario
153
+
154
+ wait_and_track(active_scenarios, finished_scenarios) until active_scenarios.size < CONCURRENCY
155
+ end
156
+
157
+ wait_and_track(active_scenarios, finished_scenarios) while !active_scenarios.empty?
158
+
159
+ # Fail all if any of the tests does not have expected exit code
160
+ raise IntegrationTestError unless finished_scenarios.all?(&:success?)
data/bin/stress ADDED
@@ -0,0 +1,13 @@
1
+ #!/bin/bash
2
+
3
+ # Runs integration specs in an endless loop
4
+ # This allows us to ensure (after long enough time) that the integrations test suit is stable and
5
+ # that there are no anomalies when running it for a long period of time
6
+
7
+ set -e
8
+
9
+ while :
10
+ do
11
+ reset
12
+ bundle exec bin/integrations $1
13
+ done
@@ -0,0 +1,11 @@
1
+ -----BEGIN RSA PUBLIC KEY-----
2
+ MIIBigKCAYEApcd6ybskiNs9WUvBGVUE8GdWDehjZ9TyjSj/fDl/UcMYqY0R5YX9
3
+ tnYxEwZZRMdVltKWxr88Qmshh1IQz6CpJVbcfYjt/158pSGPm+AUua6tkLqIvZDM
4
+ ocFOMafmroI+BMuL+Zu5QH7HC2tkT16jclGYfMQkJjXVUQTk2UZr+94+8RlUz/CH
5
+ Y6hPA7xPgIyPfyPCxz1VWzAwXwT++NCJQPBr5MqT84LNSEzUSlR9pFNShf3UCUT+
6
+ 8LWOvjFSNGmMMSsbo2T7/+dz9/FM02YG00EO0x04qteggwcaEYLFrigDN6/fM0ih
7
+ BXZILnMUqC/qrfW2YFg4ZqKZJuxaALqqkPxrkBDYqoqcAloqn36jBSke6tc/2I/J
8
+ 2Afq3r53UoAbUH7h5I/L8YeaiA4MYjAuq724lHlrOmIr4D6yjYC0a1LGlPjLk869
9
+ 2nsVXNgomhVb071E6amR+rJJnfvkdZgCmEBFnqnBV5A1u4qgNsa2rVcD+gJRvb2T
10
+ aQtjlQWKPx5xAgMBAAE=
11
+ -----END RSA PUBLIC KEY-----
data/config/errors.yml CHANGED
@@ -1,41 +1,7 @@
1
1
  en:
2
2
  dry_validation:
3
3
  errors:
4
- invalid_broker_schema: >
5
- has an invalid format
6
- Expected schema, host and port number
7
- Example: kafka://127.0.0.1:9092 or kafka+ssl://127.0.0.1:9092
8
- invalid_certificate: >
9
- is not a valid certificate
10
- invalid_certificate_from_path: >
11
- is not a valid certificate
12
- invalid_private_key: >
13
- is not a valid private key
14
- max_timeout_size_for_exponential: >
15
- pause_timeout cannot be more than pause_max_timeout
16
- max_wait_time_limit:
17
- max_wait_time cannot be more than socket_timeout
18
- topics_names_not_unique: >
19
- all topic names within a single consumer group must be unique
20
- ssl_client_cert_with_ssl_client_cert_key: >
21
- Both ssl_client_cert and ssl_client_cert_key need to be provided
22
- ssl_client_cert_key_with_ssl_client_cert: >
23
- Both ssl_client_cert_key and ssl_client_cert need to be provided
24
- ssl_client_cert_chain_with_ssl_client_cert: >
25
- Both ssl_client_cert_chain and ssl_client_cert need to be provided
26
- ssl_client_cert_chain_with_ssl_client_cert_key: >
27
- Both ssl_client_cert_chain and ssl_client_cert_key need to be provided
28
- ssl_client_cert_key_password_with_ssl_client_cert_key: >
29
- Both ssl_client_cert_key_password and ssl_client_cert_key need to be provided
30
- does_not_respond_to_token: >
31
- needs to respond to a #token method
32
- required_usage_count: >
33
- Given topic must be used at least once
34
- pid_already_exists: >
35
- Pidfile already exists
36
- consumer_groups_inclusion: >
37
- Unknown consumer group
38
- does_not_exist:
39
- Given file does not exist or cannot be read
40
- does_not_respond_to_call: >
41
- needs to respond to a #call method
4
+ max_timeout_vs_pause_max_timeout: pause_timeout must be less or equal to pause_max_timeout
5
+ topics_names_not_unique: all topic names within a single consumer group must be unique
6
+ required_usage_count: Given topic must be used at least once
7
+ consumer_groups_inclusion: Unknown consumer group
data/docker-compose.yml CHANGED
@@ -3,15 +3,23 @@ services:
3
3
  zookeeper:
4
4
  image: wurstmeister/zookeeper
5
5
  ports:
6
- - "2181:2181"
6
+ - '2181:2181'
7
7
  kafka:
8
- image: wurstmeister/kafka:1.0.1
8
+ image: wurstmeister/kafka
9
9
  ports:
10
- - "9092:9092"
10
+ - '9092:9092'
11
11
  environment:
12
12
  KAFKA_ADVERTISED_HOST_NAME: localhost
13
13
  KAFKA_ADVERTISED_PORT: 9092
14
14
  KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
15
15
  KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
16
+ KAFKA_CREATE_TOPICS:
17
+ "integrations_0_03:3:1,\
18
+ integrations_1_03:3:1,\
19
+ integrations_0_10:10:1,\
20
+ integrations_1_10:10:1,\
21
+ benchmarks_0_01:1:1,\
22
+ benchmarks_0_05:5:1,\
23
+ benchmarks_0_10:10:1"
16
24
  volumes:
17
25
  - /var/run/docker.sock:/var/run/docker.sock
data/karafka.gemspec CHANGED
@@ -5,29 +5,26 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
5
5
 
6
6
  require 'karafka/version'
7
7
 
8
- # rubocop:disable Metrics/BlockLength
9
8
  Gem::Specification.new do |spec|
10
9
  spec.name = 'karafka'
11
10
  spec.version = ::Karafka::VERSION
12
11
  spec.platform = Gem::Platform::RUBY
13
- spec.authors = ['Maciej Mensfeld', 'Pavlo Vavruk', 'Adam Gwozdowski']
14
- spec.email = %w[maciej@mensfeld.pl pavlo.vavruk@gmail.com adam99g@gmail.com]
12
+ spec.authors = ['Maciej Mensfeld']
13
+ spec.email = %w[maciej@mensfeld.pl]
15
14
  spec.homepage = 'https://karafka.io'
16
15
  spec.summary = 'Ruby based framework for working with Apache Kafka'
17
16
  spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
18
- spec.license = 'MIT'
17
+ spec.license = 'LGPL-3.0'
19
18
 
20
19
  spec.add_dependency 'dry-configurable', '~> 0.13'
21
- spec.add_dependency 'dry-inflector', '~> 0.2'
22
20
  spec.add_dependency 'dry-monitor', '~> 0.5'
23
21
  spec.add_dependency 'dry-validation', '~> 1.7'
24
- spec.add_dependency 'envlogic', '~> 1.1'
25
- spec.add_dependency 'ruby-kafka', '>= 1.3.0'
26
- spec.add_dependency 'thor', '>= 1.1'
27
- spec.add_dependency 'waterdrop', '~> 1.4'
28
- spec.add_dependency 'zeitwerk', '~> 2.4'
22
+ spec.add_dependency 'rdkafka', '>= 0.10'
23
+ spec.add_dependency 'thor', '>= 0.20'
24
+ spec.add_dependency 'waterdrop', '>= 2.1.0', '< 3.0.0'
25
+ spec.add_dependency 'zeitwerk', '~> 2.3'
29
26
 
30
- spec.required_ruby_version = '>= 2.7'
27
+ spec.required_ruby_version = '>= 2.6.0'
31
28
 
32
29
  if $PROGRAM_NAME.end_with?('gem')
33
30
  spec.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
@@ -35,7 +32,7 @@ Gem::Specification.new do |spec|
35
32
 
36
33
  spec.cert_chain = %w[certs/mensfeld.pem]
37
34
  spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec)/}) }
38
- spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
35
+ spec.executables = %w[karafka]
39
36
  spec.require_paths = %w[lib]
40
37
 
41
38
  spec.metadata = {
@@ -43,4 +40,3 @@ Gem::Specification.new do |spec|
43
40
  'rubygems_mfa_required' => 'true'
44
41
  }
45
42
  end
46
- # rubocop:enable Metrics/BlockLength
@@ -0,0 +1,22 @@
1
+ # frozen_string_literal: true
2
+
3
+ module ActiveJob
4
+ # This is the consumer for ActiveJob that eats the messages enqueued with it one after another.
5
+ # It marks the offset after each message, so we make sure, none of the jobs is executed twice
6
+ class Consumer < Karafka::BaseConsumer
7
+ # Executes the ActiveJob logic
8
+ # @note ActiveJob does not support batches, so we just run one message after another
9
+ def consume
10
+ messages.each do |message|
11
+ ActiveJob::Base.execute(
12
+ # We technically speaking could set this as deserializer and reference it from the
13
+ # message instead of using the `#raw_payload`. This is not done on purpose to simplify
14
+ # the ActiveJob setup here
15
+ ActiveSupport::JSON.decode(message.raw_payload)
16
+ )
17
+
18
+ mark_as_consumed(message)
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'active_job'
4
+ require 'active_job/queue_adapters'
5
+ require 'active_job/consumer'
6
+ require 'active_job/routing_extensions'
7
+ require 'active_job/queue_adapters/karafka_adapter'
8
+
9
+ module ActiveJob
10
+ # Namespace for usage simplification outside of Rails where Railtie will not kick in.
11
+ # That way a require 'active_job/karafka' should be enough to use it
12
+ module Karafka
13
+ end
14
+ end
15
+
16
+ # We extend routing builder by adding a simple wrapper for easier jobs topics defining
17
+ ::Karafka::Routing::Builder.include ActiveJob::RoutingExtensions
18
+ ::Karafka::Routing::Proxy.include ActiveJob::RoutingExtensions
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ # ActiveJob components to allow for jobs consumption with Karafka
4
+ module ActiveJob
5
+ # ActiveJob queue adapters
6
+ module QueueAdapters
7
+ # Karafka adapter for enqueuing jobs
8
+ class KarafkaAdapter
9
+ # Enqueues the job by sending all the payload to a dedicated topic in Kafka that will be
10
+ # later on consumed by a special ActiveJob consumer
11
+ #
12
+ # @param job [Object] job that should be enqueued
13
+ def enqueue(job)
14
+ ::Karafka.producer.produce_async(
15
+ topic: job.queue_name,
16
+ payload: ActiveSupport::JSON.encode(job.serialize)
17
+ )
18
+ end
19
+
20
+ # Raises info, that Karafka backend does not support scheduling jobs
21
+ #
22
+ # @param _job [Object] job we cannot enqueue
23
+ # @param _timestamp [Time] time when job should run
24
+ def enqueue_at(_job, _timestamp)
25
+ raise NotImplementedError, 'This queueing backend does not support scheduling jobs.'
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ module ActiveJob
4
+ # Routing extensions for ActiveJob
5
+ module RoutingExtensions
6
+ # This method simplifies routes definition for ActiveJob topics / queues by auto-injecting the
7
+ # consumer class
8
+ # @param name [String, Symbol] name of the topic where ActiveJobs jobs should go
9
+ def active_job_topic(name)
10
+ topic(name) do
11
+ consumer ActiveJob::Consumer
12
+ end
13
+ end
14
+ end
15
+ end