karafka 1.4.15 → 2.0.0.alpha1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/FUNDING.yml +3 -0
- data/.github/workflows/ci.yml +74 -24
- data/.ruby-version +1 -1
- data/CHANGELOG.md +38 -39
- data/Gemfile +6 -0
- data/Gemfile.lock +50 -52
- data/LICENSE +14 -0
- data/LICENSE-COMM +89 -0
- data/LICENSE-LGPL +165 -0
- data/README.md +59 -14
- data/bin/benchmarks +85 -0
- data/bin/create_token +28 -0
- data/bin/integrations +160 -0
- data/bin/stress +13 -0
- data/certs/karafka-pro.pem +11 -0
- data/certs/mensfeld.pem +23 -24
- data/config/errors.yml +4 -38
- data/docker-compose.yml +11 -3
- data/karafka.gemspec +10 -20
- data/lib/active_job/consumer.rb +22 -0
- data/lib/active_job/karafka.rb +18 -0
- data/lib/active_job/queue_adapters/karafka_adapter.rb +29 -0
- data/lib/active_job/routing_extensions.rb +15 -0
- data/lib/karafka/app.rb +13 -20
- data/lib/karafka/base_consumer.rb +103 -34
- data/lib/karafka/cli/base.rb +4 -4
- data/lib/karafka/cli/info.rb +43 -8
- data/lib/karafka/cli/install.rb +3 -8
- data/lib/karafka/cli/server.rb +17 -30
- data/lib/karafka/cli.rb +4 -11
- data/lib/karafka/connection/client.rb +279 -93
- data/lib/karafka/connection/listener.rb +137 -38
- data/lib/karafka/connection/messages_buffer.rb +57 -0
- data/lib/karafka/connection/pauses_manager.rb +46 -0
- data/lib/karafka/connection/rebalance_manager.rb +62 -0
- data/lib/karafka/contracts/config.rb +25 -7
- data/lib/karafka/contracts/consumer_group.rb +0 -173
- data/lib/karafka/contracts/consumer_group_topic.rb +17 -7
- data/lib/karafka/contracts/server_cli_options.rb +1 -9
- data/lib/karafka/contracts.rb +1 -1
- data/lib/karafka/env.rb +46 -0
- data/lib/karafka/errors.rb +14 -18
- data/lib/karafka/helpers/multi_delegator.rb +2 -2
- data/lib/karafka/instrumentation/callbacks/error.rb +40 -0
- data/lib/karafka/instrumentation/callbacks/statistics.rb +42 -0
- data/lib/karafka/instrumentation/monitor.rb +14 -21
- data/lib/karafka/instrumentation/stdout_listener.rb +64 -91
- data/lib/karafka/instrumentation.rb +21 -0
- data/lib/karafka/licenser.rb +65 -0
- data/lib/karafka/{params → messages}/batch_metadata.rb +7 -13
- data/lib/karafka/messages/builders/batch_metadata.rb +30 -0
- data/lib/karafka/messages/builders/message.rb +38 -0
- data/lib/karafka/messages/builders/messages.rb +40 -0
- data/lib/karafka/{params/params.rb → messages/message.rb} +7 -12
- data/lib/karafka/messages/messages.rb +64 -0
- data/lib/karafka/{params → messages}/metadata.rb +4 -6
- data/lib/karafka/messages/seek.rb +9 -0
- data/lib/karafka/patches/rdkafka/consumer.rb +22 -0
- data/lib/karafka/processing/executor.rb +96 -0
- data/lib/karafka/processing/executors_buffer.rb +49 -0
- data/lib/karafka/processing/jobs/base.rb +18 -0
- data/lib/karafka/processing/jobs/consume.rb +28 -0
- data/lib/karafka/processing/jobs/revoked.rb +22 -0
- data/lib/karafka/processing/jobs/shutdown.rb +23 -0
- data/lib/karafka/processing/jobs_queue.rb +121 -0
- data/lib/karafka/processing/worker.rb +57 -0
- data/lib/karafka/processing/workers_batch.rb +22 -0
- data/lib/karafka/railtie.rb +65 -0
- data/lib/karafka/routing/builder.rb +15 -14
- data/lib/karafka/routing/consumer_group.rb +10 -18
- data/lib/karafka/routing/consumer_mapper.rb +1 -2
- data/lib/karafka/routing/router.rb +1 -1
- data/lib/karafka/routing/subscription_group.rb +53 -0
- data/lib/karafka/routing/subscription_groups_builder.rb +51 -0
- data/lib/karafka/routing/topic.rb +47 -25
- data/lib/karafka/runner.rb +59 -0
- data/lib/karafka/serialization/json/deserializer.rb +6 -15
- data/lib/karafka/server.rb +62 -25
- data/lib/karafka/setup/config.rb +86 -159
- data/lib/karafka/status.rb +13 -3
- data/lib/karafka/templates/example_consumer.rb.erb +16 -0
- data/lib/karafka/templates/karafka.rb.erb +14 -50
- data/lib/karafka/time_trackers/base.rb +19 -0
- data/lib/karafka/time_trackers/pause.rb +84 -0
- data/lib/karafka/time_trackers/poll.rb +65 -0
- data/lib/karafka/version.rb +1 -1
- data/lib/karafka.rb +30 -44
- data.tar.gz.sig +0 -0
- metadata +96 -132
- metadata.gz.sig +0 -0
- data/MIT-LICENCE +0 -18
- data/lib/karafka/assignment_strategies/round_robin.rb +0 -13
- data/lib/karafka/attributes_map.rb +0 -63
- data/lib/karafka/backends/inline.rb +0 -16
- data/lib/karafka/base_responder.rb +0 -226
- data/lib/karafka/cli/flow.rb +0 -48
- data/lib/karafka/cli/missingno.rb +0 -19
- data/lib/karafka/code_reloader.rb +0 -67
- data/lib/karafka/connection/api_adapter.rb +0 -158
- data/lib/karafka/connection/batch_delegator.rb +0 -55
- data/lib/karafka/connection/builder.rb +0 -23
- data/lib/karafka/connection/message_delegator.rb +0 -36
- data/lib/karafka/consumers/batch_metadata.rb +0 -10
- data/lib/karafka/consumers/callbacks.rb +0 -71
- data/lib/karafka/consumers/includer.rb +0 -64
- data/lib/karafka/consumers/responders.rb +0 -24
- data/lib/karafka/consumers/single_params.rb +0 -15
- data/lib/karafka/contracts/responder_usage.rb +0 -54
- data/lib/karafka/fetcher.rb +0 -42
- data/lib/karafka/helpers/class_matcher.rb +0 -88
- data/lib/karafka/helpers/config_retriever.rb +0 -46
- data/lib/karafka/helpers/inflector.rb +0 -26
- data/lib/karafka/params/builders/batch_metadata.rb +0 -30
- data/lib/karafka/params/builders/params.rb +0 -38
- data/lib/karafka/params/builders/params_batch.rb +0 -25
- data/lib/karafka/params/params_batch.rb +0 -60
- data/lib/karafka/patches/ruby_kafka.rb +0 -47
- data/lib/karafka/persistence/client.rb +0 -29
- data/lib/karafka/persistence/consumers.rb +0 -45
- data/lib/karafka/persistence/topics.rb +0 -48
- data/lib/karafka/responders/builder.rb +0 -36
- data/lib/karafka/responders/topic.rb +0 -55
- data/lib/karafka/routing/topic_mapper.rb +0 -53
- data/lib/karafka/serialization/json/serializer.rb +0 -31
- data/lib/karafka/setup/configurators/water_drop.rb +0 -36
- data/lib/karafka/templates/application_responder.rb.erb +0 -11
data/LICENSE-LGPL
ADDED
@@ -0,0 +1,165 @@
|
|
1
|
+
GNU LESSER GENERAL PUBLIC LICENSE
|
2
|
+
Version 3, 29 June 2007
|
3
|
+
|
4
|
+
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
5
|
+
Everyone is permitted to copy and distribute verbatim copies
|
6
|
+
of this license document, but changing it is not allowed.
|
7
|
+
|
8
|
+
|
9
|
+
This version of the GNU Lesser General Public License incorporates
|
10
|
+
the terms and conditions of version 3 of the GNU General Public
|
11
|
+
License, supplemented by the additional permissions listed below.
|
12
|
+
|
13
|
+
0. Additional Definitions.
|
14
|
+
|
15
|
+
As used herein, "this License" refers to version 3 of the GNU Lesser
|
16
|
+
General Public License, and the "GNU GPL" refers to version 3 of the GNU
|
17
|
+
General Public License.
|
18
|
+
|
19
|
+
"The Library" refers to a covered work governed by this License,
|
20
|
+
other than an Application or a Combined Work as defined below.
|
21
|
+
|
22
|
+
An "Application" is any work that makes use of an interface provided
|
23
|
+
by the Library, but which is not otherwise based on the Library.
|
24
|
+
Defining a subclass of a class defined by the Library is deemed a mode
|
25
|
+
of using an interface provided by the Library.
|
26
|
+
|
27
|
+
A "Combined Work" is a work produced by combining or linking an
|
28
|
+
Application with the Library. The particular version of the Library
|
29
|
+
with which the Combined Work was made is also called the "Linked
|
30
|
+
Version".
|
31
|
+
|
32
|
+
The "Minimal Corresponding Source" for a Combined Work means the
|
33
|
+
Corresponding Source for the Combined Work, excluding any source code
|
34
|
+
for portions of the Combined Work that, considered in isolation, are
|
35
|
+
based on the Application, and not on the Linked Version.
|
36
|
+
|
37
|
+
The "Corresponding Application Code" for a Combined Work means the
|
38
|
+
object code and/or source code for the Application, including any data
|
39
|
+
and utility programs needed for reproducing the Combined Work from the
|
40
|
+
Application, but excluding the System Libraries of the Combined Work.
|
41
|
+
|
42
|
+
1. Exception to Section 3 of the GNU GPL.
|
43
|
+
|
44
|
+
You may convey a covered work under sections 3 and 4 of this License
|
45
|
+
without being bound by section 3 of the GNU GPL.
|
46
|
+
|
47
|
+
2. Conveying Modified Versions.
|
48
|
+
|
49
|
+
If you modify a copy of the Library, and, in your modifications, a
|
50
|
+
facility refers to a function or data to be supplied by an Application
|
51
|
+
that uses the facility (other than as an argument passed when the
|
52
|
+
facility is invoked), then you may convey a copy of the modified
|
53
|
+
version:
|
54
|
+
|
55
|
+
a) under this License, provided that you make a good faith effort to
|
56
|
+
ensure that, in the event an Application does not supply the
|
57
|
+
function or data, the facility still operates, and performs
|
58
|
+
whatever part of its purpose remains meaningful, or
|
59
|
+
|
60
|
+
b) under the GNU GPL, with none of the additional permissions of
|
61
|
+
this License applicable to that copy.
|
62
|
+
|
63
|
+
3. Object Code Incorporating Material from Library Header Files.
|
64
|
+
|
65
|
+
The object code form of an Application may incorporate material from
|
66
|
+
a header file that is part of the Library. You may convey such object
|
67
|
+
code under terms of your choice, provided that, if the incorporated
|
68
|
+
material is not limited to numerical parameters, data structure
|
69
|
+
layouts and accessors, or small macros, inline functions and templates
|
70
|
+
(ten or fewer lines in length), you do both of the following:
|
71
|
+
|
72
|
+
a) Give prominent notice with each copy of the object code that the
|
73
|
+
Library is used in it and that the Library and its use are
|
74
|
+
covered by this License.
|
75
|
+
|
76
|
+
b) Accompany the object code with a copy of the GNU GPL and this license
|
77
|
+
document.
|
78
|
+
|
79
|
+
4. Combined Works.
|
80
|
+
|
81
|
+
You may convey a Combined Work under terms of your choice that,
|
82
|
+
taken together, effectively do not restrict modification of the
|
83
|
+
portions of the Library contained in the Combined Work and reverse
|
84
|
+
engineering for debugging such modifications, if you also do each of
|
85
|
+
the following:
|
86
|
+
|
87
|
+
a) Give prominent notice with each copy of the Combined Work that
|
88
|
+
the Library is used in it and that the Library and its use are
|
89
|
+
covered by this License.
|
90
|
+
|
91
|
+
b) Accompany the Combined Work with a copy of the GNU GPL and this license
|
92
|
+
document.
|
93
|
+
|
94
|
+
c) For a Combined Work that displays copyright notices during
|
95
|
+
execution, include the copyright notice for the Library among
|
96
|
+
these notices, as well as a reference directing the user to the
|
97
|
+
copies of the GNU GPL and this license document.
|
98
|
+
|
99
|
+
d) Do one of the following:
|
100
|
+
|
101
|
+
0) Convey the Minimal Corresponding Source under the terms of this
|
102
|
+
License, and the Corresponding Application Code in a form
|
103
|
+
suitable for, and under terms that permit, the user to
|
104
|
+
recombine or relink the Application with a modified version of
|
105
|
+
the Linked Version to produce a modified Combined Work, in the
|
106
|
+
manner specified by section 6 of the GNU GPL for conveying
|
107
|
+
Corresponding Source.
|
108
|
+
|
109
|
+
1) Use a suitable shared library mechanism for linking with the
|
110
|
+
Library. A suitable mechanism is one that (a) uses at run time
|
111
|
+
a copy of the Library already present on the user's computer
|
112
|
+
system, and (b) will operate properly with a modified version
|
113
|
+
of the Library that is interface-compatible with the Linked
|
114
|
+
Version.
|
115
|
+
|
116
|
+
e) Provide Installation Information, but only if you would otherwise
|
117
|
+
be required to provide such information under section 6 of the
|
118
|
+
GNU GPL, and only to the extent that such information is
|
119
|
+
necessary to install and execute a modified version of the
|
120
|
+
Combined Work produced by recombining or relinking the
|
121
|
+
Application with a modified version of the Linked Version. (If
|
122
|
+
you use option 4d0, the Installation Information must accompany
|
123
|
+
the Minimal Corresponding Source and Corresponding Application
|
124
|
+
Code. If you use option 4d1, you must provide the Installation
|
125
|
+
Information in the manner specified by section 6 of the GNU GPL
|
126
|
+
for conveying Corresponding Source.)
|
127
|
+
|
128
|
+
5. Combined Libraries.
|
129
|
+
|
130
|
+
You may place library facilities that are a work based on the
|
131
|
+
Library side by side in a single library together with other library
|
132
|
+
facilities that are not Applications and are not covered by this
|
133
|
+
License, and convey such a combined library under terms of your
|
134
|
+
choice, if you do both of the following:
|
135
|
+
|
136
|
+
a) Accompany the combined library with a copy of the same work based
|
137
|
+
on the Library, uncombined with any other library facilities,
|
138
|
+
conveyed under the terms of this License.
|
139
|
+
|
140
|
+
b) Give prominent notice with the combined library that part of it
|
141
|
+
is a work based on the Library, and explaining where to find the
|
142
|
+
accompanying uncombined form of the same work.
|
143
|
+
|
144
|
+
6. Revised Versions of the GNU Lesser General Public License.
|
145
|
+
|
146
|
+
The Free Software Foundation may publish revised and/or new versions
|
147
|
+
of the GNU Lesser General Public License from time to time. Such new
|
148
|
+
versions will be similar in spirit to the present version, but may
|
149
|
+
differ in detail to address new problems or concerns.
|
150
|
+
|
151
|
+
Each version is given a distinguishing version number. If the
|
152
|
+
Library as you received it specifies that a certain numbered version
|
153
|
+
of the GNU Lesser General Public License "or any later version"
|
154
|
+
applies to it, you have the option of following the terms and
|
155
|
+
conditions either of that published version or of any later version
|
156
|
+
published by the Free Software Foundation. If the Library as you
|
157
|
+
received it does not specify a version number of the GNU Lesser
|
158
|
+
General Public License, you may choose any version of the GNU Lesser
|
159
|
+
General Public License ever published by the Free Software Foundation.
|
160
|
+
|
161
|
+
If the Library as you received it specifies that a proxy can decide
|
162
|
+
whether future versions of the GNU Lesser General Public License shall
|
163
|
+
apply, that proxy's public statement of acceptance of any version is
|
164
|
+
permanent authorization for you to choose that version for the
|
165
|
+
Library.
|
data/README.md
CHANGED
@@ -1,21 +1,66 @@
|
|
1
|
-
|
1
|
+
![karafka logo](https://raw.githubusercontent.com/karafka/misc/master/logo/karafka_logotype_transparent2.png)
|
2
2
|
|
3
|
-
|
3
|
+
[![Build Status](https://github.com/karafka/karafka/actions/workflows/ci.yml/badge.svg)](https://github.com/karafka/karafka/actions/workflows/ci.yml)
|
4
|
+
[![Gem Version](https://badge.fury.io/rb/karafka.svg)](http://badge.fury.io/rb/karafka)
|
5
|
+
[![Join the chat at https://slack.karafka.io](https://raw.githubusercontent.com/karafka/misc/master/slack.svg)](https://slack.karafka.io)
|
4
6
|
|
5
|
-
Karafka `
|
7
|
+
**Note**: All of the documentation here refers to Karafka `2.0`. If you are looking for the documentation to Karafka `1.4` please click here (TBA).
|
6
8
|
|
7
|
-
|
8
|
-
- Skipping messages
|
9
|
-
- Hanging during processing
|
10
|
-
- Unexpectedly stopping message processing
|
11
|
-
- Failure to deliver messages to Kafka
|
12
|
-
- Resetting the consumer group and starting from the beginning
|
9
|
+
## About Karafka
|
13
10
|
|
14
|
-
|
11
|
+
Karafka is a framework used to simplify Apache Kafka based Ruby and Ruby on Rails applications development.
|
15
12
|
|
16
|
-
|
13
|
+
```ruby
|
14
|
+
# Define what topics you want to consume with which consumers in karafka.rb
|
15
|
+
Karafka::App.routes.draw do
|
16
|
+
topic 'system_events' do
|
17
|
+
consumer EventsConsumer
|
18
|
+
end
|
19
|
+
end
|
17
20
|
|
18
|
-
|
19
|
-
|
21
|
+
# And create your consumers, within which your messages will be processed
|
22
|
+
class EventsConsumer < ApplicationConsumer
|
23
|
+
# Example that utilizes ActiveRecord#insert_all and Karafka batch processing
|
24
|
+
def consume
|
25
|
+
# Store all of the incoming Kafka events locally in an efficient way
|
26
|
+
Event.insert_all messages.payloads
|
27
|
+
end
|
28
|
+
end
|
29
|
+
```
|
20
30
|
|
21
|
-
|
31
|
+
Karafka allows you to capture everything that happens in your systems in large scale, providing you with a seamless and stable core for consuming, processing and producing data, without having to focus on things that are not your business domain.
|
32
|
+
|
33
|
+
Karafka **uses** threads to handle many messages at the same time in the same process. It does not require Rails but will integrate tightly with any Ruby on Rails applications to make event processing dead simple.
|
34
|
+
|
35
|
+
## Getting started
|
36
|
+
|
37
|
+
If you're completely new to the subject, you can start with our "Kafka on Rails" articles series, that will get you up and running with the terminology and basic ideas behind using Kafka:
|
38
|
+
|
39
|
+
- [Kafka on Rails: Using Kafka with Ruby on Rails – Part 1 – Kafka basics and its advantages](https://mensfeld.pl/2017/11/kafka-on-rails-using-kafka-with-ruby-on-rails-part-1-kafka-basics-and-its-advantages/)
|
40
|
+
- [Kafka on Rails: Using Kafka with Ruby on Rails – Part 2 – Getting started with Ruby and Kafka](https://mensfeld.pl/2018/01/kafka-on-rails-using-kafka-with-ruby-on-rails-part-2-getting-started-with-ruby-and-kafka/)
|
41
|
+
|
42
|
+
If you want to get started with Kafka and Karafka as fast as possible, then the best idea is to visit our [Getting started](https://github.com/karafka/karafka/wiki/Getting-started) guides and the [example apps repository](https://github.com/karafka/example-apps).
|
43
|
+
|
44
|
+
We also maintain many [integration specs](https://github.com/karafka/karafka/tree/master/spec/integrations) illustrating various use-cases and features of the framework.
|
45
|
+
|
46
|
+
## Want to Upgrade? LGPL is not for you? Want to help?
|
47
|
+
|
48
|
+
I also sell Karafka Pro subscription. It includes commercial-friendly license, priority support, architecture consultations and high throughput data processing-related features (under development).
|
49
|
+
|
50
|
+
**20%** of the income will be distributed back to other OSS projects that Karafka uses under the hood.
|
51
|
+
|
52
|
+
Help me provide high-quality open-source software. Please see the Karafka [homepage](https://karafka.io) for more details.
|
53
|
+
|
54
|
+
## Support
|
55
|
+
|
56
|
+
Karafka has [Wiki pages](https://github.com/karafka/karafka/wiki) for almost everything and a pretty decent [FAQ](https://github.com/karafka/karafka/wiki/FAQ). It covers the whole installation, setup and deployment along with other useful details on how to run Karafka.
|
57
|
+
|
58
|
+
If you have any questions about using Karafka, feel free to join our [Slack](https://slack.karafka.io) channel.
|
59
|
+
|
60
|
+
## Note on contributions
|
61
|
+
|
62
|
+
First, thank you for considering contributing to the Karafka ecosystem! It's people like you that make the open source community such a great community!
|
63
|
+
|
64
|
+
Each pull request must pass all the RSpec specs, integration tests and meet our quality requirements.
|
65
|
+
|
66
|
+
Fork it, update and wait for the Github Actions results.
|
data/bin/benchmarks
ADDED
@@ -0,0 +1,85 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
# Runner for running given benchmark cases
|
4
|
+
# Some of the cases require pre-populated data and we populate this in places that need it
|
5
|
+
# In other cases we generate this data in a background process, so the partitions data stream
|
6
|
+
# is consistent and we don't end up consuming huge batches of a single partition.
|
7
|
+
|
8
|
+
require 'open3'
|
9
|
+
require 'pathname'
|
10
|
+
|
11
|
+
$LOAD_PATH.unshift(File.dirname(__FILE__))
|
12
|
+
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..'))
|
13
|
+
|
14
|
+
ROOT_PATH = Pathname.new(File.expand_path(File.join(File.dirname(__FILE__), '../')))
|
15
|
+
|
16
|
+
# Load all the benchmarks
|
17
|
+
benchmarks = Dir[ROOT_PATH.join('spec/benchmarks/**/*.rb')]
|
18
|
+
|
19
|
+
# If filter is provided, apply
|
20
|
+
benchmarks.delete_if { |name| !name.include?(ARGV[0]) } if ARGV[0]
|
21
|
+
|
22
|
+
raise ArgumentError, "No benchmarks with filter: #{ARGV[0]}" if benchmarks.empty?
|
23
|
+
|
24
|
+
# We may skip seeding if we are running the benchmarks multiple times, then since we do not
|
25
|
+
# commit offsets we can skip generating more data
|
26
|
+
if ENV['SEED']
|
27
|
+
require 'spec/benchmarks_helper'
|
28
|
+
|
29
|
+
# We need to setup karafka here to have producer for data seeding
|
30
|
+
setup_karafka
|
31
|
+
|
32
|
+
# This takes some time but needs to run only once per benchmark session
|
33
|
+
puts 'Seeding benchmarks data...'
|
34
|
+
|
35
|
+
producer = Karafka::App.producer
|
36
|
+
|
37
|
+
# We make our data json compatible so we can also benchmark serialization
|
38
|
+
elements = Array.new(100_000) { { a: :b }.to_json }
|
39
|
+
|
40
|
+
# We do not populate data of benchmarks_0_10 as we use it with life-stream data only
|
41
|
+
%w[
|
42
|
+
benchmarks_0_01
|
43
|
+
benchmarks_0_05
|
44
|
+
].each do |topic_name|
|
45
|
+
partitions_count = topic_name.split('_').last.to_i
|
46
|
+
|
47
|
+
partitions_count.times do |partition|
|
48
|
+
puts "Seeding #{topic_name}:#{partition}"
|
49
|
+
|
50
|
+
elements.each_slice(10_000) do |data_slice|
|
51
|
+
data = data_slice.map do |data|
|
52
|
+
{ topic: topic_name, payload: data, partition: partition }
|
53
|
+
end
|
54
|
+
|
55
|
+
producer.buffer_many(data)
|
56
|
+
producer.flush_sync
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
# Selects requested benchmarks and runs them one after another
|
63
|
+
benchmarks.each do |benchmark_path|
|
64
|
+
puts "Running #{benchmark_path.gsub("#{ROOT_PATH}/spec/benchmarks/", '')}"
|
65
|
+
|
66
|
+
benchmark = "bundle exec ruby -r ./spec/benchmarks_helper.rb #{benchmark_path}"
|
67
|
+
|
68
|
+
Open3.popen3(benchmark) do |stdin, stdout, stderr, thread|
|
69
|
+
t1 = Thread.new do
|
70
|
+
while line = stdout.gets
|
71
|
+
puts(line)
|
72
|
+
end
|
73
|
+
rescue IOError
|
74
|
+
end
|
75
|
+
|
76
|
+
t2 = Thread.new do
|
77
|
+
while line = stderr.gets
|
78
|
+
puts(line)
|
79
|
+
end
|
80
|
+
rescue IOError
|
81
|
+
end
|
82
|
+
|
83
|
+
thread.join
|
84
|
+
end
|
85
|
+
end
|
data/bin/create_token
ADDED
@@ -0,0 +1,28 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require 'openssl'
|
4
|
+
require 'base64'
|
5
|
+
require 'json'
|
6
|
+
require 'date'
|
7
|
+
|
8
|
+
PRIVATE_KEY_LOCATION = File.join(Dir.home, '.ssh', 'karafka-pro', 'id_rsa')
|
9
|
+
|
10
|
+
# Name of the entity that acquires the license
|
11
|
+
ENTITY = ARGV[0]
|
12
|
+
# Date till which license is valid
|
13
|
+
EXPIRES_ON = Date.parse(ARGV[1])
|
14
|
+
|
15
|
+
raise ArgumentError, 'Entity missing' if ENTITY.nil? || ENTITY.empty?
|
16
|
+
raise ArgumentError, 'Expires on needs to be in the future' if EXPIRES_ON <= Date.today
|
17
|
+
|
18
|
+
pro_token_data = {
|
19
|
+
entity: ENTITY,
|
20
|
+
expires_on: EXPIRES_ON
|
21
|
+
}
|
22
|
+
|
23
|
+
# This code uses my private key to generate a new token for Karafka Pro capabilities
|
24
|
+
private_key = OpenSSL::PKey::RSA.new(File.read(PRIVATE_KEY_LOCATION))
|
25
|
+
|
26
|
+
bin_key = private_key.private_encrypt(pro_token_data.to_json)
|
27
|
+
|
28
|
+
puts Base64.encode64(bin_key)
|
data/bin/integrations
ADDED
@@ -0,0 +1,160 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
# Runner to run integration specs in parallel
|
4
|
+
|
5
|
+
require 'open3'
|
6
|
+
require 'fileutils'
|
7
|
+
require 'pathname'
|
8
|
+
|
9
|
+
ROOT_PATH = Pathname.new(File.expand_path(File.join(File.dirname(__FILE__), '../')))
|
10
|
+
|
11
|
+
# Raised from the parent process if any of the integration tests fails
|
12
|
+
IntegrationTestError = Class.new(StandardError)
|
13
|
+
|
14
|
+
# How many child processes with integration specs do we want to run in parallel
|
15
|
+
# When the value is high, there's a problem with thread allocation on Github
|
16
|
+
CONCURRENCY = 5
|
17
|
+
|
18
|
+
# Abstraction around a single test scenario execution process
|
19
|
+
class Scenario
|
20
|
+
# How long a scenario can run before we kill it
|
21
|
+
# This is a fail-safe just in case something would hang
|
22
|
+
MAX_RUN_TIME = 60 * 5
|
23
|
+
|
24
|
+
# There are rare cases where Karafka may force shutdown for some of the integration cases
|
25
|
+
# This includes exactly those
|
26
|
+
EXIT_CODES = {
|
27
|
+
default: [0],
|
28
|
+
'consumption/worker_critical_error_behaviour.rb' => [0, 2].freeze,
|
29
|
+
'shutdown/on_hanging_jobs_and_a_shutdown.rb' => [2].freeze,
|
30
|
+
'shutdown/on_hanging_on_shutdown_job_and_a_shutdown.rb' => [2].freeze,
|
31
|
+
'shutdown/on_hanging_poll_and_shutdown.rb' => [2].freeze
|
32
|
+
}.freeze
|
33
|
+
|
34
|
+
private_constant :MAX_RUN_TIME, :EXIT_CODES
|
35
|
+
|
36
|
+
# Creates scenario instance and runs in the background process
|
37
|
+
#
|
38
|
+
# @param path [String] path to the scenarios file
|
39
|
+
def initialize(path)
|
40
|
+
@path = path
|
41
|
+
@stdin, @stdout, @stderr, @wait_thr = Open3.popen3(
|
42
|
+
"bundle exec ruby -r ./spec/integrations_helper.rb #{path}"
|
43
|
+
)
|
44
|
+
@started_at = current_time
|
45
|
+
# Last 1024 characters from stdout
|
46
|
+
@stdout_tail = ''
|
47
|
+
end
|
48
|
+
|
49
|
+
# @return [String] integration spec name
|
50
|
+
def name
|
51
|
+
@path.gsub("#{ROOT_PATH}/spec/integrations/", '')
|
52
|
+
end
|
53
|
+
|
54
|
+
# @return [Boolean] did this scenario finished or is it still running
|
55
|
+
def finished?
|
56
|
+
# If the thread is running too long, kill it
|
57
|
+
if current_time - @started_at > MAX_RUN_TIME
|
58
|
+
@wait_thr.kill
|
59
|
+
Process.kill('TERM', pid)
|
60
|
+
end
|
61
|
+
|
62
|
+
# We read it so it won't grow as we use our default logger that prints to both test.log and
|
63
|
+
# to stdout. Otherwise after reaching the buffer size, it would hang
|
64
|
+
buffer = ''
|
65
|
+
@stdout.read_nonblock(10_240, buffer, exception: false)
|
66
|
+
@stdout_tail << buffer
|
67
|
+
@stdout_tail = @stdout_tail[-10_024..-1] || @stdout_tail
|
68
|
+
|
69
|
+
!@wait_thr.alive?
|
70
|
+
end
|
71
|
+
|
72
|
+
# @return [Integer] pid of the process of this scenario
|
73
|
+
def pid
|
74
|
+
@wait_thr.pid
|
75
|
+
end
|
76
|
+
|
77
|
+
# @return [Integer] exit code of the process running given scenario
|
78
|
+
def exit_code
|
79
|
+
# There may be no exit status if we killed the thread
|
80
|
+
@wait_thr.value&.exitstatus || 123
|
81
|
+
end
|
82
|
+
|
83
|
+
# @return [Boolean] did this scenario finish successfully or not
|
84
|
+
def success?
|
85
|
+
expected_exit_codes = EXIT_CODES[name] || EXIT_CODES[:default]
|
86
|
+
|
87
|
+
expected_exit_codes.include?(exit_code)
|
88
|
+
end
|
89
|
+
|
90
|
+
# Prints a status report when scenario is finished and stdout if it failed
|
91
|
+
def report
|
92
|
+
result = success? ? "\e[#{32}m#{'OK'}\e[0m" : "\e[#{31}m#{'FAILED'}\e[0m"
|
93
|
+
|
94
|
+
puts "#{result} #{name}"
|
95
|
+
|
96
|
+
unless success?
|
97
|
+
puts "Exit code: #{exit_code}"
|
98
|
+
puts @stdout_tail
|
99
|
+
puts @stderr.read
|
100
|
+
end
|
101
|
+
end
|
102
|
+
|
103
|
+
private
|
104
|
+
|
105
|
+
# @return [Float] current machine time
|
106
|
+
def current_time
|
107
|
+
Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
108
|
+
end
|
109
|
+
end
|
110
|
+
|
111
|
+
# Simple array to keep track of active integration processes thread running with info on which
|
112
|
+
# test scenario is running
|
113
|
+
active_scenarios = []
|
114
|
+
|
115
|
+
# Finished runners
|
116
|
+
finished_scenarios = []
|
117
|
+
|
118
|
+
# Waits for any of the processes to be finished and tracks exit codes
|
119
|
+
#
|
120
|
+
# @param active_scenarios [Array] active runners
|
121
|
+
# @param finished_scenarios [Hash] finished forks exit codes
|
122
|
+
def wait_and_track(active_scenarios, finished_scenarios)
|
123
|
+
exited = active_scenarios.find(&:finished?)
|
124
|
+
|
125
|
+
if exited
|
126
|
+
scenario = active_scenarios.delete(exited)
|
127
|
+
|
128
|
+
scenario.report
|
129
|
+
|
130
|
+
finished_scenarios << scenario
|
131
|
+
else
|
132
|
+
Thread.pass
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
# Load all the specs
|
137
|
+
specs = Dir[ROOT_PATH.join('spec/integrations/**/*.rb')]
|
138
|
+
|
139
|
+
# If filter is provided, apply
|
140
|
+
specs.delete_if { |name| !name.include?(ARGV[0]) } if ARGV[0]
|
141
|
+
|
142
|
+
raise ArgumentError, "No integration specs with filter: #{ARGV[0]}" if specs.empty?
|
143
|
+
|
144
|
+
# Randomize order
|
145
|
+
seed = (ENV['SEED'] || rand(0..10_000)).to_i
|
146
|
+
|
147
|
+
puts "Random seed: #{seed}"
|
148
|
+
|
149
|
+
specs.shuffle(random: Random.new(seed)).each do |integration_test|
|
150
|
+
scenario = Scenario.new(integration_test)
|
151
|
+
|
152
|
+
active_scenarios << scenario
|
153
|
+
|
154
|
+
wait_and_track(active_scenarios, finished_scenarios) until active_scenarios.size < CONCURRENCY
|
155
|
+
end
|
156
|
+
|
157
|
+
wait_and_track(active_scenarios, finished_scenarios) while !active_scenarios.empty?
|
158
|
+
|
159
|
+
# Fail all if any of the tests does not have expected exit code
|
160
|
+
raise IntegrationTestError unless finished_scenarios.all?(&:success?)
|
data/bin/stress
ADDED
@@ -0,0 +1,13 @@
|
|
1
|
+
#!/bin/bash
|
2
|
+
|
3
|
+
# Runs integration specs in an endless loop
|
4
|
+
# This allows us to ensure (after long enough time) that the integrations test suit is stable and
|
5
|
+
# that there are no anomalies when running it for a long period of time
|
6
|
+
|
7
|
+
set -e
|
8
|
+
|
9
|
+
while :
|
10
|
+
do
|
11
|
+
reset
|
12
|
+
bundle exec bin/integrations $1
|
13
|
+
done
|
@@ -0,0 +1,11 @@
|
|
1
|
+
-----BEGIN RSA PUBLIC KEY-----
|
2
|
+
MIIBigKCAYEApcd6ybskiNs9WUvBGVUE8GdWDehjZ9TyjSj/fDl/UcMYqY0R5YX9
|
3
|
+
tnYxEwZZRMdVltKWxr88Qmshh1IQz6CpJVbcfYjt/158pSGPm+AUua6tkLqIvZDM
|
4
|
+
ocFOMafmroI+BMuL+Zu5QH7HC2tkT16jclGYfMQkJjXVUQTk2UZr+94+8RlUz/CH
|
5
|
+
Y6hPA7xPgIyPfyPCxz1VWzAwXwT++NCJQPBr5MqT84LNSEzUSlR9pFNShf3UCUT+
|
6
|
+
8LWOvjFSNGmMMSsbo2T7/+dz9/FM02YG00EO0x04qteggwcaEYLFrigDN6/fM0ih
|
7
|
+
BXZILnMUqC/qrfW2YFg4ZqKZJuxaALqqkPxrkBDYqoqcAloqn36jBSke6tc/2I/J
|
8
|
+
2Afq3r53UoAbUH7h5I/L8YeaiA4MYjAuq724lHlrOmIr4D6yjYC0a1LGlPjLk869
|
9
|
+
2nsVXNgomhVb071E6amR+rJJnfvkdZgCmEBFnqnBV5A1u4qgNsa2rVcD+gJRvb2T
|
10
|
+
aQtjlQWKPx5xAgMBAAE=
|
11
|
+
-----END RSA PUBLIC KEY-----
|
data/certs/mensfeld.pem
CHANGED
@@ -1,26 +1,25 @@
|
|
1
1
|
-----BEGIN CERTIFICATE-----
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
MGU0FeSV2Yj7Xc2x/7BzLK8xQn5l7Yy75iPF+KP3vVmDHnNl
|
2
|
+
MIIEODCCAqCgAwIBAgIBATANBgkqhkiG9w0BAQsFADAjMSEwHwYDVQQDDBhtYWNp
|
3
|
+
ZWovREM9bWVuc2ZlbGQvREM9cGwwHhcNMjEwODExMTQxNTEzWhcNMjIwODExMTQx
|
4
|
+
NTEzWjAjMSEwHwYDVQQDDBhtYWNpZWovREM9bWVuc2ZlbGQvREM9cGwwggGiMA0G
|
5
|
+
CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDV2jKH4Ti87GM6nyT6D+ESzTI0MZDj
|
6
|
+
ak2/TEwnxvijMJyCCPKT/qIkbW4/f0VHM4rhPr1nW73sb5SZBVFCLlJcOSKOBdUY
|
7
|
+
TMY+SIXN2EtUaZuhAOe8LxtxjHTgRHvHcqUQMBENXTISNzCo32LnUxweu66ia4Pd
|
8
|
+
1mNRhzOqNv9YiBZvtBf7IMQ+sYdOCjboq2dlsWmJiwiDpY9lQBTnWORnT3mQxU5x
|
9
|
+
vPSwnLB854cHdCS8fQo4DjeJBRZHhEbcE5sqhEMB3RZA3EtFVEXOxlNxVTS3tncI
|
10
|
+
qyNXiWDaxcipaens4ObSY1C2HTV7OWb7OMqSCIybeYTSfkaSdqmcl4S6zxXkjH1J
|
11
|
+
tnjayAVzD+QVXGijsPLE2PFnJAh9iDET2cMsjabO1f6l1OQNyAtqpcyQcgfnyW0z
|
12
|
+
g7tGxTYD+6wJHffM9d9txOUw6djkF6bDxyqB8lo4Z3IObCx18AZjI9XPS9QG7w6q
|
13
|
+
LCWuMG2lkCcRgASqaVk9fEf9yMc2xxz5o3kCAwEAAaN3MHUwCQYDVR0TBAIwADAL
|
14
|
+
BgNVHQ8EBAMCBLAwHQYDVR0OBBYEFBqUFCKCOe5IuueUVqOB991jyCLLMB0GA1Ud
|
15
|
+
EQQWMBSBEm1hY2llakBtZW5zZmVsZC5wbDAdBgNVHRIEFjAUgRJtYWNpZWpAbWVu
|
16
|
+
c2ZlbGQucGwwDQYJKoZIhvcNAQELBQADggGBADD0/UuTTFgW+CGk2U0RDw2RBOca
|
17
|
+
W2LTF/G7AOzuzD0Tc4voc7WXyrgKwJREv8rgBimLnNlgmFJLmtUCh2U/MgxvcilH
|
18
|
+
yshYcbseNvjkrtYnLRlWZR4SSB6Zei5AlyGVQLPkvdsBpNegcG6w075YEwzX/38a
|
19
|
+
8V9B/Yri2OGELBz8ykl7BsXUgNoUPA/4pHF6YRLz+VirOaUIQ4JfY7xGj6fSOWWz
|
20
|
+
/rQ/d77r6o1mfJYM/3BRVg73a3b7DmRnE5qjwmSaSQ7u802pJnLesmArch0xGCT/
|
21
|
+
fMmRli1Qb+6qOTl9mzD6UDMAyFR4t6MStLm0mIEqM0nBO5nUdUWbC7l9qXEf8XBE
|
22
|
+
2DP28p3EqSuS+lKbAWKcqv7t0iRhhmaod+Yn9mcrLN1sa3q3KSQ9BCyxezCD4Mk2
|
23
|
+
R2P11bWoCtr70BsccVrN8jEhzwXngMyI2gVt750Y+dbTu1KgRqZKp/ECe7ZzPzXj
|
24
|
+
pIy9vHxTANKYVyI4qj8OrFdEM5BQNu8oQpL0iQ==
|
26
25
|
-----END CERTIFICATE-----
|
data/config/errors.yml
CHANGED
@@ -1,41 +1,7 @@
|
|
1
1
|
en:
|
2
2
|
dry_validation:
|
3
3
|
errors:
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
invalid_certificate: >
|
9
|
-
is not a valid certificate
|
10
|
-
invalid_certificate_from_path: >
|
11
|
-
is not a valid certificate
|
12
|
-
invalid_private_key: >
|
13
|
-
is not a valid private key
|
14
|
-
max_timeout_size_for_exponential: >
|
15
|
-
pause_timeout cannot be more than pause_max_timeout
|
16
|
-
max_wait_time_limit:
|
17
|
-
max_wait_time cannot be more than socket_timeout
|
18
|
-
topics_names_not_unique: >
|
19
|
-
all topic names within a single consumer group must be unique
|
20
|
-
ssl_client_cert_with_ssl_client_cert_key: >
|
21
|
-
Both ssl_client_cert and ssl_client_cert_key need to be provided
|
22
|
-
ssl_client_cert_key_with_ssl_client_cert: >
|
23
|
-
Both ssl_client_cert_key and ssl_client_cert need to be provided
|
24
|
-
ssl_client_cert_chain_with_ssl_client_cert: >
|
25
|
-
Both ssl_client_cert_chain and ssl_client_cert need to be provided
|
26
|
-
ssl_client_cert_chain_with_ssl_client_cert_key: >
|
27
|
-
Both ssl_client_cert_chain and ssl_client_cert_key need to be provided
|
28
|
-
ssl_client_cert_key_password_with_ssl_client_cert_key: >
|
29
|
-
Both ssl_client_cert_key_password and ssl_client_cert_key need to be provided
|
30
|
-
does_not_respond_to_token: >
|
31
|
-
needs to respond to a #token method
|
32
|
-
required_usage_count: >
|
33
|
-
Given topic must be used at least once
|
34
|
-
pid_already_exists: >
|
35
|
-
Pidfile already exists
|
36
|
-
consumer_groups_inclusion: >
|
37
|
-
Unknown consumer group
|
38
|
-
does_not_exist:
|
39
|
-
Given file does not exist or cannot be read
|
40
|
-
does_not_respond_to_call: >
|
41
|
-
needs to respond to a #call method
|
4
|
+
max_timeout_vs_pause_max_timeout: pause_timeout must be less or equal to pause_max_timeout
|
5
|
+
topics_names_not_unique: all topic names within a single consumer group must be unique
|
6
|
+
required_usage_count: Given topic must be used at least once
|
7
|
+
consumer_groups_inclusion: Unknown consumer group
|
data/docker-compose.yml
CHANGED
@@ -3,15 +3,23 @@ services:
|
|
3
3
|
zookeeper:
|
4
4
|
image: wurstmeister/zookeeper
|
5
5
|
ports:
|
6
|
-
-
|
6
|
+
- '2181:2181'
|
7
7
|
kafka:
|
8
|
-
image: wurstmeister/kafka
|
8
|
+
image: wurstmeister/kafka
|
9
9
|
ports:
|
10
|
-
-
|
10
|
+
- '9092:9092'
|
11
11
|
environment:
|
12
12
|
KAFKA_ADVERTISED_HOST_NAME: localhost
|
13
13
|
KAFKA_ADVERTISED_PORT: 9092
|
14
14
|
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
|
15
15
|
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
|
16
|
+
KAFKA_CREATE_TOPICS:
|
17
|
+
"integrations_0_03:3:1,\
|
18
|
+
integrations_1_03:3:1,\
|
19
|
+
integrations_0_10:10:1,\
|
20
|
+
integrations_1_10:10:1,\
|
21
|
+
benchmarks_0_01:1:1,\
|
22
|
+
benchmarks_0_05:5:1,\
|
23
|
+
benchmarks_0_10:10:1"
|
16
24
|
volumes:
|
17
25
|
- /var/run/docker.sock:/var/run/docker.sock
|