karafka 0.5.0.2 → 0.5.0.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.ruby-version +1 -1
- data/.travis.yml +1 -0
- data/CHANGELOG.md +29 -11
- data/Gemfile.lock +47 -20
- data/README.md +86 -16
- data/karafka.gemspec +14 -13
- data/lib/karafka.rb +2 -1
- data/lib/karafka/app.rb +3 -0
- data/lib/karafka/base_responder.rb +23 -1
- data/lib/karafka/connection/consumer.rb +6 -2
- data/lib/karafka/connection/topic_consumer.rb +11 -0
- data/lib/karafka/errors.rb +12 -3
- data/lib/karafka/process.rb +1 -1
- data/lib/karafka/routing/mapper.rb +53 -0
- data/lib/karafka/routing/route.rb +1 -1
- data/lib/karafka/server.rb +10 -0
- data/lib/karafka/setup/config.rb +18 -0
- data/lib/karafka/setup/config_schema.rb +44 -0
- data/lib/karafka/version.rb +1 -1
- metadata +28 -14
- data/lib/karafka/capistrano.rb +0 -2
- data/lib/karafka/capistrano/karafka.cap +0 -85
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 6d336fa4ee7c6d7a9b38ee1ea08e70a9f6208312
|
4
|
+
data.tar.gz: 219b4f431dc21cbe403f2f8aa50b81eae850c93c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 288e9e7054f1716c9d2600fb572b4fd288c6d33a29df89b644d584fb91d410da40ace8628889d7d936bd0526b40f0ffcf75729a41826c1bd0b6ddd1e5262e5d1
|
7
|
+
data.tar.gz: 00214b9cf81f378b42d9a3c78151f1a8f95cfe5a8110c7566ae2280edf41bbcd9cc527e586d7d3978195418c1b8d6e48821a7ac21855c1578ca193e78c484241
|
data/.ruby-version
CHANGED
@@ -1 +1 @@
|
|
1
|
-
2.4.
|
1
|
+
2.4.1
|
data/.travis.yml
CHANGED
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,23 @@
|
|
1
1
|
# Karafka framework changelog
|
2
2
|
|
3
|
+
## 0.5.0.3
|
4
|
+
- #132 - When Kafka is gone, should reconnect after a time period
|
5
|
+
- #136 - new ruby-kafka version + other gem bumps
|
6
|
+
- ruby-kafka update
|
7
|
+
- #135 - NonMatchingRouteError - better error description in the code
|
8
|
+
- #140 - Move Capistrano Karafka to a different specific gem
|
9
|
+
- #110 - Add call method on a responder class to alias instance build and call
|
10
|
+
- #76 - Configs validator
|
11
|
+
- #138 - Possibility to have no worker class defined if inline_mode is being used
|
12
|
+
- #145 - Topic Mapper
|
13
|
+
- Ruby update to 2.4.1
|
14
|
+
- Gem bump x2
|
15
|
+
- #158 - Update docs section on heroku usage
|
16
|
+
- #150 - Add support for start_from_beginning on a per topic basis
|
17
|
+
- #148 - Lower Karafka Sidekiq dependency
|
18
|
+
- Allow karafka root to be specified from ENV
|
19
|
+
- Handle SIGTERM as a shutdown command for kafka server to support Heroku deployment
|
20
|
+
|
3
21
|
## 0.5.0.2
|
4
22
|
- Gems update x3
|
5
23
|
- Default Ruby set to 2.3.3
|
@@ -16,7 +34,7 @@
|
|
16
34
|
- #130 - start_from_beginning flag on routes and default
|
17
35
|
- #128 - Monitor caller_label not working with super on inheritance
|
18
36
|
- Renamed *inline* to *inline_mode* to stay consistent with flags that change the way karafka works (#125)
|
19
|
-
- Dry-configurable
|
37
|
+
- Dry-configurable bump to 0.5 with fixed proc value evaluation on retrieve patch (internal change)
|
20
38
|
|
21
39
|
## 0.5.0.1
|
22
40
|
- Fixed inconsistency in responders non-required topic definition. Now only required: false available
|
@@ -50,7 +68,7 @@
|
|
50
68
|
- Using App name as a Kafka client_id
|
51
69
|
- Automatic Capistrano integration
|
52
70
|
- Responders support for handling better responses pipelining and better responses flow description and design (see README for more details)
|
53
|
-
- Gem
|
71
|
+
- Gem bump
|
54
72
|
- Readme updates
|
55
73
|
- karafka flow CLI command for printing the application flow
|
56
74
|
- Some internal refactorings
|
@@ -59,7 +77,7 @@
|
|
59
77
|
- #87 - Reconsume mode with crone for better Rails/Rack integration
|
60
78
|
- Moved Karafka server related stuff into separate Karafka::Server class
|
61
79
|
- Renamed Karafka::Runner into Karafka::Fetcher
|
62
|
-
- Gem
|
80
|
+
- Gem bump
|
63
81
|
- Added chroot option to Zookeeper options
|
64
82
|
- Moved BROKERS_PATH into config from constant
|
65
83
|
- Added Karafka consume CLI action for a short running single consumption round
|
@@ -72,7 +90,7 @@
|
|
72
90
|
- #63 - Graceful shutdown with current offset state during data processing
|
73
91
|
- #65 - Example of NewRelic monitor is outdated
|
74
92
|
- #71 - Setup should be executed after user code is loaded
|
75
|
-
- Gem
|
93
|
+
- Gem bump x3
|
76
94
|
- Rubocop remarks
|
77
95
|
- worker_timeout config option has been removed. It now needs to be defined manually by the framework user because WorkerGlass::Timeout can be disabled and we cannot use Karafka settings on a class level to initialize user code stuff
|
78
96
|
- Moved setup logic under setup/Setup namespace
|
@@ -82,12 +100,12 @@
|
|
82
100
|
- #81 - Switch config management to dry configurable
|
83
101
|
- Version fix
|
84
102
|
- Dropped support for Ruby 2.1.*
|
85
|
-
- Ruby
|
103
|
+
- Ruby bump to 2.3.1
|
86
104
|
|
87
105
|
## 0.4.0
|
88
106
|
- Added WaterDrop gem with default configuration
|
89
107
|
- Refactoring of config logic to simplify adding new dependencies that need to be configured based on #setup data
|
90
|
-
- Gem
|
108
|
+
- Gem bump
|
91
109
|
- Readme updates
|
92
110
|
- Renamed cluster to actor_cluster for method names
|
93
111
|
- Replaced SidekiqGlass with generic WorkerGlass lib
|
@@ -128,21 +146,21 @@
|
|
128
146
|
|
129
147
|
## 0.3.0
|
130
148
|
- Switched from custom ParserError for each parser to general catching of Karafka::Errors::ParseError and its descendants
|
131
|
-
- Gem
|
149
|
+
- Gem bump
|
132
150
|
- Fixed #32 - now when using custom workers that does not inherit from Karafka::BaseWorker perform method is not required. Using custom workers means that the logic that would normally lie under #perform, needs to be executed directly from the worker.
|
133
151
|
- Fixed #31 - Technically didn't fix because this is how Sidekiq is meant to work, but provided possibility to assign custom interchangers that allow to bypass JSON encoding issues by converting data that goes to Redis to a required format (and parsing it back when it is fetched)
|
134
152
|
- Added full parameters lazy load - content is no longer loaded during #perform_async if params are not used in before_enqueue
|
135
153
|
- No more namespaces for Redis by default (use separate DBs)
|
136
154
|
|
137
155
|
## 0.1.21
|
138
|
-
- Sidekiq 4.0.1
|
139
|
-
- Gem
|
156
|
+
- Sidekiq 4.0.1 bump
|
157
|
+
- Gem bump
|
140
158
|
- Added direct celluloid requirement to Karafka (removed from Sidekiq)
|
141
159
|
|
142
160
|
## 0.1.19
|
143
161
|
- Internal call - schedule naming change
|
144
162
|
- Enqueue to perform_async naming in controller to follow Sidekiqs naming convention
|
145
|
-
- Gem
|
163
|
+
- Gem bump
|
146
164
|
|
147
165
|
## 0.1.18
|
148
166
|
- Changed Redis configuration options into a single hash that is directly passed to Redis setup for Sidekiq
|
@@ -174,7 +192,7 @@
|
|
174
192
|
|
175
193
|
## 0.1.13
|
176
194
|
- Ability to assign custom workers and use them bypassing Karafka::BaseWorker (or its descendants)
|
177
|
-
- Gem
|
195
|
+
- Gem bump
|
178
196
|
|
179
197
|
## 0.1.12
|
180
198
|
- All internal errors went to Karafka::Errors namespace
|
data/Gemfile.lock
CHANGED
@@ -1,22 +1,23 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
karafka (0.5.0.
|
4
|
+
karafka (0.5.0.3)
|
5
5
|
activesupport (~> 5.0)
|
6
6
|
celluloid (~> 0.17)
|
7
|
-
dry-configurable (~> 0.
|
7
|
+
dry-configurable (~> 0.7)
|
8
|
+
dry-validation (~> 0.10.6)
|
8
9
|
envlogic (~> 1.0)
|
9
10
|
rake (~> 11.3)
|
10
|
-
ruby-kafka (= 0.3.
|
11
|
-
sidekiq (
|
11
|
+
ruby-kafka (= 0.3.17)
|
12
|
+
sidekiq (>= 4.2)
|
12
13
|
thor (~> 0.19)
|
13
|
-
waterdrop (~> 0.3.2.
|
14
|
+
waterdrop (~> 0.3.2.4)
|
14
15
|
worker-glass (~> 0.2)
|
15
16
|
|
16
17
|
GEM
|
17
18
|
remote: https://rubygems.org/
|
18
19
|
specs:
|
19
|
-
activesupport (5.0.
|
20
|
+
activesupport (5.0.2)
|
20
21
|
concurrent-ruby (~> 1.0, >= 1.0.2)
|
21
22
|
i18n (~> 0.7)
|
22
23
|
minitest (~> 5.1)
|
@@ -54,14 +55,39 @@ GEM
|
|
54
55
|
coercible (1.0.0)
|
55
56
|
descendants_tracker (~> 0.0.1)
|
56
57
|
colorize (0.8.1)
|
57
|
-
concurrent-ruby (1.0.
|
58
|
+
concurrent-ruby (1.0.5)
|
58
59
|
connection_pool (2.2.1)
|
59
60
|
descendants_tracker (0.0.4)
|
60
61
|
thread_safe (~> 0.3, >= 0.3.1)
|
61
62
|
diff-lcs (1.2.5)
|
62
63
|
docile (1.1.5)
|
63
|
-
dry-configurable (0.
|
64
|
+
dry-configurable (0.7.0)
|
64
65
|
concurrent-ruby (~> 1.0)
|
66
|
+
dry-container (0.6.0)
|
67
|
+
concurrent-ruby (~> 1.0)
|
68
|
+
dry-configurable (~> 0.1, >= 0.1.3)
|
69
|
+
dry-core (0.3.1)
|
70
|
+
concurrent-ruby (~> 1.0)
|
71
|
+
dry-equalizer (0.2.0)
|
72
|
+
dry-logic (0.4.1)
|
73
|
+
dry-container (~> 0.2, >= 0.2.6)
|
74
|
+
dry-core (~> 0.2)
|
75
|
+
dry-equalizer (~> 0.2)
|
76
|
+
dry-types (0.10.3)
|
77
|
+
concurrent-ruby (~> 1.0)
|
78
|
+
dry-configurable (~> 0.1)
|
79
|
+
dry-container (~> 0.3)
|
80
|
+
dry-core (~> 0.2, >= 0.2.1)
|
81
|
+
dry-equalizer (~> 0.2)
|
82
|
+
dry-logic (~> 0.4, >= 0.4.0)
|
83
|
+
inflecto (~> 0.0.0, >= 0.0.2)
|
84
|
+
dry-validation (0.10.7)
|
85
|
+
concurrent-ruby (~> 1.0)
|
86
|
+
dry-configurable (~> 0.1, >= 0.1.3)
|
87
|
+
dry-core (~> 0.2, >= 0.2.1)
|
88
|
+
dry-equalizer (~> 0.2)
|
89
|
+
dry-logic (~> 0.4, >= 0.4.0)
|
90
|
+
dry-types (~> 0.9, >= 0.9.0)
|
65
91
|
envlogic (1.0.3)
|
66
92
|
activesupport
|
67
93
|
equalizer (0.0.11)
|
@@ -84,15 +110,16 @@ GEM
|
|
84
110
|
rake (>= 10, < 12)
|
85
111
|
rubocop (>= 0.36.0)
|
86
112
|
sysexits (~> 1.1)
|
87
|
-
hitimes (1.2.
|
113
|
+
hitimes (1.2.5)
|
88
114
|
i18n (0.7.0)
|
89
115
|
ice_nine (0.11.2)
|
116
|
+
inflecto (0.0.2)
|
90
117
|
json (2.0.2)
|
91
118
|
launchy (2.4.3)
|
92
119
|
addressable (~> 2.3)
|
93
120
|
method_source (0.8.2)
|
94
121
|
minitest (5.10.1)
|
95
|
-
null-logger (0.1.
|
122
|
+
null-logger (0.1.4)
|
96
123
|
parser (2.3.1.2)
|
97
124
|
ast (~> 2.2)
|
98
125
|
path_expander (1.0.0)
|
@@ -115,8 +142,8 @@ GEM
|
|
115
142
|
coderay (~> 1.1.0)
|
116
143
|
method_source (~> 0.8.1)
|
117
144
|
slop (~> 3.4)
|
118
|
-
rack (2.0.
|
119
|
-
rack-protection (
|
145
|
+
rack (2.0.3)
|
146
|
+
rack-protection (2.0.0)
|
120
147
|
rack
|
121
148
|
rainbow (2.1.0)
|
122
149
|
rake (11.3.0)
|
@@ -146,7 +173,7 @@ GEM
|
|
146
173
|
unicode-display_width (~> 1.0, >= 1.0.1)
|
147
174
|
rubocop-rspec (1.5.3)
|
148
175
|
rubocop (>= 0.42.0)
|
149
|
-
ruby-kafka (0.3.
|
176
|
+
ruby-kafka (0.3.17)
|
150
177
|
ruby-progressbar (1.8.1)
|
151
178
|
ruby_parser (3.8.2)
|
152
179
|
sexp_processor (~> 4.1)
|
@@ -166,11 +193,11 @@ GEM
|
|
166
193
|
shoulda-context (1.2.1)
|
167
194
|
shoulda-matchers (2.8.0)
|
168
195
|
activesupport (>= 3.0.0)
|
169
|
-
sidekiq (
|
196
|
+
sidekiq (5.0.3)
|
170
197
|
concurrent-ruby (~> 1.0)
|
171
198
|
connection_pool (~> 2.2, >= 2.2.0)
|
172
199
|
rack-protection (>= 1.5.0)
|
173
|
-
redis (~> 3.
|
200
|
+
redis (~> 3.3, >= 3.3.3)
|
174
201
|
simplecov (0.12.0)
|
175
202
|
docile (~> 1.1.0)
|
176
203
|
json (>= 1.8, < 3)
|
@@ -184,7 +211,7 @@ GEM
|
|
184
211
|
timecop (0.8.1)
|
185
212
|
timers (4.1.2)
|
186
213
|
hitimes
|
187
|
-
tzinfo (1.2.
|
214
|
+
tzinfo (1.2.3)
|
188
215
|
thread_safe (~> 0.1)
|
189
216
|
unicode-display_width (1.1.0)
|
190
217
|
virtus (1.0.5)
|
@@ -192,14 +219,14 @@ GEM
|
|
192
219
|
coercible (~> 1.0)
|
193
220
|
descendants_tracker (~> 0.0, >= 0.0.3)
|
194
221
|
equalizer (~> 0.0, >= 0.0.9)
|
195
|
-
waterdrop (0.3.2.
|
222
|
+
waterdrop (0.3.2.4)
|
196
223
|
bundler
|
197
224
|
connection_pool
|
198
|
-
dry-configurable (~> 0.
|
225
|
+
dry-configurable (~> 0.6)
|
199
226
|
null-logger
|
200
227
|
rake
|
201
228
|
ruby-kafka
|
202
|
-
worker-glass (0.2.
|
229
|
+
worker-glass (0.2.3)
|
203
230
|
activesupport
|
204
231
|
null-logger
|
205
232
|
yard (0.9.5)
|
@@ -214,4 +241,4 @@ DEPENDENCIES
|
|
214
241
|
timecop
|
215
242
|
|
216
243
|
BUNDLED WITH
|
217
|
-
1.
|
244
|
+
1.14.6
|
data/README.md
CHANGED
@@ -22,6 +22,7 @@ Karafka not only handles incoming messages but also provides tools for building
|
|
22
22
|
- [Configurators](#configurators)
|
23
23
|
- [Environment variables settings](#environment-variables-settings)
|
24
24
|
- [Kafka brokers auto-discovery](#kafka-brokers-auto-discovery)
|
25
|
+
- [Topic mappers](#topic-mappers)
|
25
26
|
- [Usage](#usage)
|
26
27
|
- [Karafka CLI](#karafka-cli)
|
27
28
|
- [Routing](#routing)
|
@@ -53,6 +54,7 @@ Karafka not only handles incoming messages but also provides tools for building
|
|
53
54
|
- [Deployment](#deployment)
|
54
55
|
- [Capistrano](#capistrano)
|
55
56
|
- [Docker](#docker)
|
57
|
+
- [Heroku](#heroku)
|
56
58
|
- [Sidekiq Web UI](#sidekiq-web-ui)
|
57
59
|
- [Concurrency](#concurrency)
|
58
60
|
- [Integrating with other frameworks](#integrating-with-other-frameworks)
|
@@ -112,13 +114,14 @@ Karafka has following configuration options:
|
|
112
114
|
| Option | Required | Value type | Description |
|
113
115
|
|-------------------------------|----------|-------------------|------------------------------------------------------------------------------------------------------------|
|
114
116
|
| name | true | String | Application name |
|
115
|
-
|
|
117
|
+
| topic_mapper | false | Class/Module | Mapper for hiding Kafka provider specific topic prefixes/postfixes, so internaly we use "pure" topics |
|
118
|
+
| redis | false | Hash | Hash with Redis configuration options. It is required if inline_mode is off. |
|
116
119
|
| inline_mode | false | Boolean | Do we want to perform logic without enqueuing it with Sidekiq (directly and asap) |
|
117
120
|
| batch_mode | false | Boolean | Should the incoming messages be consumed in batches, or one at a time |
|
118
121
|
| start_from_beginning | false | Boolean | Consume messages starting at the beginning or consume new messages that are produced at first run |
|
119
122
|
| monitor | false | Object | Monitor instance (defaults to Karafka::Monitor) |
|
120
123
|
| logger | false | Object | Logger instance (defaults to Karafka::Logger) |
|
121
|
-
| kafka.hosts |
|
124
|
+
| kafka.hosts | true | Array<String> | Kafka server hosts. If 1 provided, Karafka will discover cluster structure automatically |
|
122
125
|
| kafka.session_timeout | false | Integer | The number of seconds after which, if a consumer hasn't contacted the Kafka cluster, it will be kicked out |
|
123
126
|
| kafka.offset_commit_interval | false | Integer | The interval between offset commits in seconds |
|
124
127
|
| kafka.offset_commit_threshold | false | Integer | The number of messages that can be processed before their offsets are committed |
|
@@ -173,11 +176,56 @@ There are several env settings you can use:
|
|
173
176
|
|-------------------|-----------------|-------------------------------------------------------------------------------|
|
174
177
|
| KARAFKA_ENV | development | In what mode this application should boot (production/development/test/etc) |
|
175
178
|
| KARAFKA_BOOT_FILE | app_root/app.rb | Path to a file that contains Karafka app configuration and booting procedures |
|
179
|
+
| KARAFKA_ROOT_DIR | Gemfile location| Path to Karafka's root directory |
|
176
180
|
|
177
181
|
### Kafka brokers auto-discovery
|
178
182
|
|
179
183
|
Karafka supports Kafka brokers auto-discovery during startup and on failures. You need to provide at least one Kafka broker, from which the entire Kafka cluster will be discovered. Karafka will refresh list of available brokers if something goes wrong. This allows it to be aware of changes that happen in the infrastructure (adding and removing nodes).
|
180
184
|
|
185
|
+
### Topic mappers
|
186
|
+
|
187
|
+
Some Kafka cloud providers require topics to be namespaced with user name. This approach is understandable, but at the same time, makes your applications less provider agnostic. To target that issue, you can create your own topic mapper that will sanitize incoming/outgoing topic names, so your logic won't be binded to those specific versions of topic names.
|
188
|
+
|
189
|
+
Mapper needs to implement two following methods:
|
190
|
+
|
191
|
+
- ```#incoming``` - accepts an incoming "namespace dirty" version ot topic. Needs to return sanitized topic.
|
192
|
+
- ```#outgoing``` - accepts outgoing sanitized topic version. Needs to return namespaced one.
|
193
|
+
|
194
|
+
Given each of the topics needs to have "karafka." prefix, your mapper could look like that:
|
195
|
+
|
196
|
+
```ruby
|
197
|
+
class KarafkaTopicMapper
|
198
|
+
def initialize(prefix)
|
199
|
+
@prefix = prefix
|
200
|
+
end
|
201
|
+
|
202
|
+
def incoming(topic)
|
203
|
+
topic.to_s.gsub("#{@prefix}.", '')
|
204
|
+
end
|
205
|
+
|
206
|
+
def outgoing(topic)
|
207
|
+
"#{@prefix}.#{topic}"
|
208
|
+
end
|
209
|
+
end
|
210
|
+
|
211
|
+
mapper = KarafkaTopicMapper.new('karafka')
|
212
|
+
mapper.incoming('karafka.my_super_topic') #=> 'my_super_topic'
|
213
|
+
mapper.outgoing('my_other_topic') #=> 'karafka.my_other_topic'
|
214
|
+
```
|
215
|
+
|
216
|
+
To use custom mapper, just assign it during application configuration:
|
217
|
+
|
218
|
+
```ruby
|
219
|
+
class App < Karafka::App
|
220
|
+
setup do |config|
|
221
|
+
# Other settings
|
222
|
+
config.topic_mapper = MyCustomMapper.new('username')
|
223
|
+
end
|
224
|
+
end
|
225
|
+
```
|
226
|
+
|
227
|
+
Topic mapper automatically integrates with both messages consumer and responders.
|
228
|
+
|
181
229
|
## Usage
|
182
230
|
|
183
231
|
### Karafka CLI
|
@@ -717,7 +765,7 @@ Here's a simple example of monitor that is used to handle errors logging into Ai
|
|
717
765
|
class AppMonitor < Karafka::Monitor
|
718
766
|
def notice_error(caller_class, e)
|
719
767
|
super
|
720
|
-
Airbrake.
|
768
|
+
Airbrake.notify(e)
|
721
769
|
end
|
722
770
|
end
|
723
771
|
```
|
@@ -794,19 +842,7 @@ Since the only thing that is long-running is Karafka server, it should't be hard
|
|
794
842
|
|
795
843
|
### Capistrano
|
796
844
|
|
797
|
-
|
798
|
-
|
799
|
-
In your **Capfile** file:
|
800
|
-
|
801
|
-
```ruby
|
802
|
-
require 'karafka/capistrano'
|
803
|
-
```
|
804
|
-
|
805
|
-
Take a look at the [load:defaults task](https://github.com/karafka/karafka/blob/master/lib/karafka/capistrano/karafka.cap) (top of file) for options you can set. For example, to specify a different pidfile than default:
|
806
|
-
|
807
|
-
```ruby
|
808
|
-
set :karafka_pid, ->{ File.join(shared_path, 'tmp', 'pids', 'karafka0') }
|
809
|
-
```
|
845
|
+
For details about integration with Capistrano, please go to [capistrano-karafka](https://github.com/karafka/capistrano-karafka) gem page.
|
810
846
|
|
811
847
|
### Docker
|
812
848
|
|
@@ -817,6 +853,37 @@ ENV KARAFKA_ENV production
|
|
817
853
|
CMD bundle exec karafka server
|
818
854
|
```
|
819
855
|
|
856
|
+
### Heroku
|
857
|
+
|
858
|
+
Karafka may be deployed on [Heroku](https://www.heroku.com/), and works with
|
859
|
+
[Heroku Kafka](https://www.heroku.com/kafka) and [Heroku Redis](https://www.heroku.com/redis).
|
860
|
+
|
861
|
+
Set `KARAFKA_ENV`:
|
862
|
+
```bash
|
863
|
+
heroku config:set KARAFKA_ENV=production
|
864
|
+
```
|
865
|
+
|
866
|
+
Configure Karafka to use the Kafka and Redis configuration provided by Heroku:
|
867
|
+
```ruby
|
868
|
+
# app_root/app.rb
|
869
|
+
class App < Karafka::App
|
870
|
+
setup do |config|
|
871
|
+
config.kafka.hosts = ENV['KAFKA_URL'].split(',') # Convert CSV list of broker urls to an array
|
872
|
+
config.kafka.ssl.ca_cert = ENV['KAFKA_TRUSTED_CERT'] if ENV['KAFKA_TRUSTED_CERT']
|
873
|
+
config.kafka.ssl.client_cert = ENV['KAFKA_CLIENT_CERT'] if ENV['KAFKA_CLIENT_CERT']
|
874
|
+
config.kafka.ssl.client_cert_key = ENV['KAFKA_CLIENT_CERT_KEY'] if ENV['KAFKA_CLIENT_CERT_KEY']
|
875
|
+
config.redis = { url: ENV['REDIS_URL'] }
|
876
|
+
# ...other configuration options...
|
877
|
+
end
|
878
|
+
end
|
879
|
+
```
|
880
|
+
|
881
|
+
Create your Procfile:
|
882
|
+
```text
|
883
|
+
karafka_server: bundle exec karafka server
|
884
|
+
karafka_worker: bundle exec karafka worker
|
885
|
+
```
|
886
|
+
|
820
887
|
## Sidekiq Web UI
|
821
888
|
|
822
889
|
Karafka comes with a Sidekiq Web UI application that can display the current state of a Sidekiq installation. If you installed Karafka based on the install instructions, you will have a **config.ru** file that allows you to run standalone Puma instance with a Sidekiq Web UI.
|
@@ -863,6 +930,8 @@ ENV['RACK_ENV'] ||= 'development'
|
|
863
930
|
ENV['KARAFKA_ENV'] = ENV['RACK_ENV']
|
864
931
|
|
865
932
|
Bundler.require(:default, ENV['KARAFKA_ENV'])
|
933
|
+
|
934
|
+
Karafka::Loader.new.load(Karafka::App.root)
|
866
935
|
```
|
867
936
|
|
868
937
|
with
|
@@ -894,6 +963,7 @@ After that make sure that whole your application is loaded before setting up and
|
|
894
963
|
### Libraries and components
|
895
964
|
|
896
965
|
* [Karafka framework](https://github.com/karafka/karafka)
|
966
|
+
* [Capistrano Karafka](https://github.com/karafka/capistrano-karafka)
|
897
967
|
* [Waterdrop](https://github.com/karafka/waterdrop)
|
898
968
|
* [Worker Glass](https://github.com/karafka/worker-glass)
|
899
969
|
* [Envlogic](https://github.com/karafka/envlogic)
|
data/karafka.gemspec
CHANGED
@@ -4,28 +4,29 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
|
4
4
|
require 'karafka/version'
|
5
5
|
|
6
6
|
Gem::Specification.new do |spec|
|
7
|
-
spec.name
|
8
|
-
spec.version
|
9
|
-
spec.platform
|
10
|
-
spec.authors
|
11
|
-
spec.email
|
12
|
-
spec.homepage
|
13
|
-
spec.summary
|
14
|
-
spec.description
|
15
|
-
spec.license
|
7
|
+
spec.name = 'karafka'
|
8
|
+
spec.version = ::Karafka::VERSION
|
9
|
+
spec.platform = Gem::Platform::RUBY
|
10
|
+
spec.authors = ['Maciej Mensfeld', 'Pavlo Vavruk', 'Adam Gwozdowski']
|
11
|
+
spec.email = %w( maciej@coditsu.io pavlo.vavruk@gmail.com adam99g@gmail.com )
|
12
|
+
spec.homepage = 'https://github.com/karafka/karafka'
|
13
|
+
spec.summary = %q{ Ruby based framework for working with Apache Kafka }
|
14
|
+
spec.description = %q{ Framework used to simplify Apache Kafka based Ruby applications development }
|
15
|
+
spec.license = 'MIT'
|
16
16
|
|
17
17
|
spec.add_development_dependency 'bundler', '~> 1.2'
|
18
18
|
|
19
|
-
spec.add_dependency 'ruby-kafka', '= 0.3.
|
20
|
-
spec.add_dependency 'sidekiq', '
|
19
|
+
spec.add_dependency 'ruby-kafka', '= 0.3.17'
|
20
|
+
spec.add_dependency 'sidekiq', '>= 4.2'
|
21
21
|
spec.add_dependency 'worker-glass', '~> 0.2'
|
22
22
|
spec.add_dependency 'celluloid', '~> 0.17'
|
23
23
|
spec.add_dependency 'envlogic', '~> 1.0'
|
24
|
-
spec.add_dependency 'waterdrop', '~> 0.3.2.
|
24
|
+
spec.add_dependency 'waterdrop', '~> 0.3.2.4'
|
25
25
|
spec.add_dependency 'rake', '~> 11.3'
|
26
26
|
spec.add_dependency 'thor', '~> 0.19'
|
27
27
|
spec.add_dependency 'activesupport', '~> 5.0'
|
28
|
-
spec.add_dependency 'dry-
|
28
|
+
spec.add_dependency 'dry-validation', '~> 0.10.6'
|
29
|
+
spec.add_dependency 'dry-configurable', '~> 0.7'
|
29
30
|
spec.required_ruby_version = '>= 2.3.0'
|
30
31
|
|
31
32
|
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec)/}) }
|
data/lib/karafka.rb
CHANGED
@@ -16,6 +16,7 @@
|
|
16
16
|
thor
|
17
17
|
fileutils
|
18
18
|
dry-configurable
|
19
|
+
dry-validation
|
19
20
|
active_support/callbacks
|
20
21
|
active_support/core_ext/class/subclasses
|
21
22
|
active_support/core_ext/hash/indifferent_access
|
@@ -48,7 +49,7 @@ module Karafka
|
|
48
49
|
|
49
50
|
# @return [String] Karafka app root path (user application path)
|
50
51
|
def root
|
51
|
-
Pathname.new(File.dirname(ENV['BUNDLE_GEMFILE']))
|
52
|
+
Pathname.new(ENV['KARAFKA_ROOT_DIR'] || File.dirname(ENV['BUNDLE_GEMFILE']))
|
52
53
|
end
|
53
54
|
|
54
55
|
# @return [String] path to Karafka gem root core
|
data/lib/karafka/app.rb
CHANGED
@@ -12,7 +12,10 @@ module Karafka
|
|
12
12
|
# Sets up all the internal components and bootstrap whole app
|
13
13
|
# We need to know details about routes in order to setup components,
|
14
14
|
# that's why we don't setup them after std setup is done
|
15
|
+
# @raise [Karafka::Errors::InvalidConfiguration] raised when configuration
|
16
|
+
# doesn't match with ConfigurationSchema
|
15
17
|
def boot!
|
18
|
+
Setup::Config.validate!
|
16
19
|
Setup::Config.setup_components
|
17
20
|
end
|
18
21
|
|
@@ -71,6 +71,16 @@ module Karafka
|
|
71
71
|
topic_obj = Responders::Topic.new(topic_name, options)
|
72
72
|
self.topics[topic_obj.name] = topic_obj
|
73
73
|
end
|
74
|
+
|
75
|
+
# A simple alias for easier standalone responder usage.
|
76
|
+
# Instead of building it with new.call it allows (in case of usin JSON parser)
|
77
|
+
# to just run it directly from the class level
|
78
|
+
# @param data Anything that we want to respond with
|
79
|
+
# @example Send user data with a responder (uses default Karafka::Parsers::Json parser)
|
80
|
+
# UsersCreatedResponder.call(@created_user)
|
81
|
+
def call(*data)
|
82
|
+
new.call(*data)
|
83
|
+
end
|
74
84
|
end
|
75
85
|
|
76
86
|
# Creates a responder object
|
@@ -87,6 +97,10 @@ module Karafka
|
|
87
97
|
# @note We know that validators should be executed also before sending data to topics, however
|
88
98
|
# the implementation gets way more complicated then, that's why we check after everything
|
89
99
|
# was sent using responder
|
100
|
+
# @example Send user data with a responder (uses default Karafka::Parsers::Json parser)
|
101
|
+
# UsersCreatedResponder.new.call(@created_user)
|
102
|
+
# @example Send user data with a responder using non default Parser
|
103
|
+
# UsersCreatedResponder.new(MyParser).call(@created_user)
|
90
104
|
def call(*data)
|
91
105
|
respond(*data)
|
92
106
|
validate!
|
@@ -133,8 +147,16 @@ module Karafka
|
|
133
147
|
# what we send is legit and it will go to a proper topics
|
134
148
|
def deliver!
|
135
149
|
messages_buffer.each do |topic, data_elements|
|
150
|
+
# We map this topic name, so it will match namespaced/etc topic in Kafka
|
151
|
+
# @note By default will not change topic (if default mapper used)
|
152
|
+
mapped_topic = Karafka::App.config.topic_mapper.outgoing(topic)
|
153
|
+
|
136
154
|
data_elements.each do |(data, options)|
|
137
|
-
::WaterDrop::Message.new(
|
155
|
+
::WaterDrop::Message.new(
|
156
|
+
mapped_topic,
|
157
|
+
data,
|
158
|
+
options
|
159
|
+
).send!
|
138
160
|
end
|
139
161
|
end
|
140
162
|
end
|
@@ -10,10 +10,14 @@ module Karafka
|
|
10
10
|
# If we would't catch it, it would propagate up until killing the Celluloid actor
|
11
11
|
# @param message [Kafka::FetchedMessage] message that was fetched by kafka
|
12
12
|
def consume(message)
|
13
|
-
|
13
|
+
# We map from incoming topic name, as it might be namespaced, etc.
|
14
|
+
# @see topic_mapper internal docs
|
15
|
+
mapped_topic = Karafka::App.config.topic_mapper.incoming(message.topic)
|
16
|
+
|
17
|
+
controller = Karafka::Routing::Router.new(mapped_topic).build
|
14
18
|
# We wrap it around with our internal message format, so we don't pass around
|
15
19
|
# a raw Kafka message
|
16
|
-
controller.params = Message.new(
|
20
|
+
controller.params = Message.new(mapped_topic, message.value)
|
17
21
|
|
18
22
|
Karafka.monitor.notice(self.class, controller.to_h)
|
19
23
|
|
@@ -3,6 +3,10 @@ module Karafka
|
|
3
3
|
# Class used as a wrapper around Ruby-Kafka to simplify additional
|
4
4
|
# features that we provide/might provide in future
|
5
5
|
class TopicConsumer
|
6
|
+
# How long should we wait before trying to reconnect to Kafka cluster
|
7
|
+
# that went down (in seconds)
|
8
|
+
RECONNECT_TIMEOUT = 5
|
9
|
+
|
6
10
|
# Creates a queue consumer that will pull the data from Kafka
|
7
11
|
# @param [Karafka::Routing::Route] route details that will be used to build up a
|
8
12
|
# queue consumer instance
|
@@ -63,6 +67,13 @@ module Karafka
|
|
63
67
|
start_from_beginning: @route.start_from_beginning
|
64
68
|
)
|
65
69
|
end
|
70
|
+
rescue Kafka::ConnectionError
|
71
|
+
# If we would not wait it would totally spam log file with failed
|
72
|
+
# attempts if Kafka is down
|
73
|
+
sleep(RECONNECT_TIMEOUT)
|
74
|
+
# We don't log and just reraise - this will be logged
|
75
|
+
# down the road
|
76
|
+
raise
|
66
77
|
end
|
67
78
|
|
68
79
|
# @return [Kafka] returns a Kafka
|
data/lib/karafka/errors.rb
CHANGED
@@ -10,9 +10,15 @@ module Karafka
|
|
10
10
|
class ParserError < BaseError; end
|
11
11
|
|
12
12
|
# Raised when router receives topic name which does not correspond with any routes
|
13
|
-
#
|
14
|
-
#
|
15
|
-
#
|
13
|
+
# This can only happen in a case when:
|
14
|
+
# - you've received a message and it was scheduled to Sidekiq background worker
|
15
|
+
# - you've changed the routing, so router can no longer associate your topic to
|
16
|
+
# any controller
|
17
|
+
# - or in a case when you do a lot of metaprogramming and you change routing/etc on runtime
|
18
|
+
#
|
19
|
+
# In case this happens, you will have to create a temporary route that will allow
|
20
|
+
# you to "eat" everything from the Sidekiq queue.
|
21
|
+
# @see https://github.com/karafka/karafka/issues/135
|
16
22
|
class NonMatchingRouteError < BaseError; end
|
17
23
|
|
18
24
|
# Raised when we have few controllers(inherited from Karafka::BaseController)
|
@@ -46,5 +52,8 @@ module Karafka
|
|
46
52
|
|
47
53
|
# Raised when we didn't use a topic that was defined as non-optional (required)
|
48
54
|
class UnusedResponderRequiredTopic < BaseError; end
|
55
|
+
|
56
|
+
# Raised when configuration doesn't match with validation schema
|
57
|
+
class InvalidConfiguration < BaseError; end
|
49
58
|
end
|
50
59
|
end
|
data/lib/karafka/process.rb
CHANGED
@@ -0,0 +1,53 @@
|
|
1
|
+
module Karafka
|
2
|
+
module Routing
|
3
|
+
# Default routes mapper that does not remap things
|
4
|
+
# Mapper can be used for Kafka providers that require namespaced topic names. Instead of being
|
5
|
+
# provider dependent, we can then define mapper and use internally "pure" topic names in
|
6
|
+
# routes and responders
|
7
|
+
#
|
8
|
+
# @example Mapper for mapping prefixed topics
|
9
|
+
# module MyMapper
|
10
|
+
# PREFIX = "my_user_name."
|
11
|
+
#
|
12
|
+
# def incoming(topic)
|
13
|
+
# topic.to_s.gsub(PREFIX, '')
|
14
|
+
# end
|
15
|
+
#
|
16
|
+
# def outgoing(topic)
|
17
|
+
# "#{PREFIX}#{topic}"
|
18
|
+
# end
|
19
|
+
# end
|
20
|
+
#
|
21
|
+
# @example Mapper for replacing "." with "_" in topic names
|
22
|
+
# module MyMapper
|
23
|
+
# PREFIX = "my_user_name."
|
24
|
+
#
|
25
|
+
# def incoming(topic)
|
26
|
+
# topic.to_s.gsub('.', '_')
|
27
|
+
# end
|
28
|
+
#
|
29
|
+
# def outgoing(topic)
|
30
|
+
# topic.to_s.gsub('_', '.')
|
31
|
+
# end
|
32
|
+
# end
|
33
|
+
module Mapper
|
34
|
+
class << self
|
35
|
+
# @param topic [String, Symbol] topic
|
36
|
+
# @return [String, Symbol] same topic as on input
|
37
|
+
# @example
|
38
|
+
# incoming('topic_name') #=> 'topic_name'
|
39
|
+
def incoming(topic)
|
40
|
+
topic
|
41
|
+
end
|
42
|
+
|
43
|
+
# @param topic [String, Symbol] topic
|
44
|
+
# @return [String, Symbol] same topic as on input
|
45
|
+
# @example
|
46
|
+
# outgoing('topic_name') #=> 'topic_name'
|
47
|
+
def outgoing(topic)
|
48
|
+
topic
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
@@ -56,7 +56,7 @@ module Karafka
|
|
56
56
|
# background job
|
57
57
|
# @note If not provided - will be built based on the provided controller
|
58
58
|
def worker
|
59
|
-
@worker ||= Karafka::Workers::Builder.new(controller).build
|
59
|
+
@worker ||= inline_mode ? nil : Karafka::Workers::Builder.new(controller).build
|
60
60
|
end
|
61
61
|
|
62
62
|
# @return [Class, nil] Class (not an instance) of a responder that should respond from
|
data/lib/karafka/server.rb
CHANGED
@@ -11,6 +11,7 @@ module Karafka
|
|
11
11
|
@consumers = Concurrent::Array.new
|
12
12
|
bind_on_sigint
|
13
13
|
bind_on_sigquit
|
14
|
+
bind_on_sigterm
|
14
15
|
start_supervised
|
15
16
|
end
|
16
17
|
|
@@ -39,6 +40,15 @@ module Karafka
|
|
39
40
|
end
|
40
41
|
end
|
41
42
|
|
43
|
+
# What should happen when we decide to quit with sigterm
|
44
|
+
def bind_on_sigterm
|
45
|
+
process.on_sigterm do
|
46
|
+
Karafka::App.stop!
|
47
|
+
consumers.map(&:stop)
|
48
|
+
exit
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
42
52
|
# Starts Karafka with a supervision
|
43
53
|
# @note We don't need to sleep because Karafka::Fetcher is locking and waiting to
|
44
54
|
# finish loop (and it won't happen until we explicitily want to stop)
|
data/lib/karafka/setup/config.rb
CHANGED
@@ -29,6 +29,12 @@ module Karafka
|
|
29
29
|
setting :batch_mode, false
|
30
30
|
# whether to consume messages starting at the beginning or to just consume new messages
|
31
31
|
setting :start_from_beginning, true
|
32
|
+
# Mapper used to remap names of topics, so we can have a clean internal topic namings despite
|
33
|
+
# using any Kafka provider that uses namespacing, etc
|
34
|
+
# It needs to implement two methods:
|
35
|
+
# - #incoming - for remapping from the incoming message to our internal format
|
36
|
+
# - #outgoing - for remapping from internal topic name into outgoing message
|
37
|
+
setting :topic_mapper, Routing::Mapper
|
32
38
|
|
33
39
|
# Connection pool options are used for producer (Waterdrop)
|
34
40
|
# They are configured automatically based on Sidekiq concurrency and number of routes
|
@@ -95,6 +101,18 @@ module Karafka
|
|
95
101
|
klass.new(config).setup
|
96
102
|
end
|
97
103
|
end
|
104
|
+
|
105
|
+
# Validate config based on ConfigurationSchema
|
106
|
+
# @return [Boolean] true if configuration is valid
|
107
|
+
# @raise [Karafka::Errors::InvalidConfiguration] raised when configuration
|
108
|
+
# doesn't match with ConfigurationSchema
|
109
|
+
def validate!
|
110
|
+
validation_result = Karafka::Setup::ConfigSchema.call(config.to_h)
|
111
|
+
|
112
|
+
return true if validation_result.success?
|
113
|
+
|
114
|
+
raise Errors::InvalidConfiguration, validation_result.errors
|
115
|
+
end
|
98
116
|
end
|
99
117
|
end
|
100
118
|
end
|
@@ -0,0 +1,44 @@
|
|
1
|
+
module Karafka
|
2
|
+
module Setup
|
3
|
+
# Schema with validation rules for all configuration
|
4
|
+
ConfigSchema = Dry::Validation.Schema do
|
5
|
+
required(:name).filled(:str?)
|
6
|
+
required(:topic_mapper).filled
|
7
|
+
optional(:inline_mode).filled(:bool?)
|
8
|
+
|
9
|
+
required(:redis).maybe do
|
10
|
+
schema do
|
11
|
+
required(:url).filled(:str?)
|
12
|
+
end
|
13
|
+
end
|
14
|
+
|
15
|
+
# If inline_mode is true, redis should be filled
|
16
|
+
rule(redis_presence: [:redis, :inline_mode]) do |redis, inline_mode|
|
17
|
+
inline_mode.false?.then(redis.filled?)
|
18
|
+
end
|
19
|
+
|
20
|
+
optional(:batch_mode).filled(:bool?)
|
21
|
+
optional(:start_from_beginning).filled(:bool?)
|
22
|
+
|
23
|
+
optional(:connection_pool).schema do
|
24
|
+
required(:size).filled
|
25
|
+
optional(:timeout).filled(:int?)
|
26
|
+
end
|
27
|
+
|
28
|
+
required(:kafka).schema do
|
29
|
+
required(:hosts).filled(:array?)
|
30
|
+
|
31
|
+
required(:session_timeout).filled(:int?)
|
32
|
+
required(:offset_commit_interval).filled(:int?)
|
33
|
+
required(:offset_commit_threshold).filled(:int?)
|
34
|
+
required(:heartbeat_interval).filled(:int?)
|
35
|
+
|
36
|
+
optional(:ssl).schema do
|
37
|
+
required(:ca_cert).maybe(:str?)
|
38
|
+
required(:client_cert).maybe(:str?)
|
39
|
+
required(:client_cert_key).maybe(:str?)
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
data/lib/karafka/version.rb
CHANGED
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.5.0.
|
4
|
+
version: 0.5.0.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Maciej Mensfeld
|
@@ -10,7 +10,7 @@ authors:
|
|
10
10
|
autorequire:
|
11
11
|
bindir: bin
|
12
12
|
cert_chain: []
|
13
|
-
date: 2017-
|
13
|
+
date: 2017-06-28 00:00:00.000000000 Z
|
14
14
|
dependencies:
|
15
15
|
- !ruby/object:Gem::Dependency
|
16
16
|
name: bundler
|
@@ -32,26 +32,26 @@ dependencies:
|
|
32
32
|
requirements:
|
33
33
|
- - '='
|
34
34
|
- !ruby/object:Gem::Version
|
35
|
-
version: 0.3.
|
35
|
+
version: 0.3.17
|
36
36
|
type: :runtime
|
37
37
|
prerelease: false
|
38
38
|
version_requirements: !ruby/object:Gem::Requirement
|
39
39
|
requirements:
|
40
40
|
- - '='
|
41
41
|
- !ruby/object:Gem::Version
|
42
|
-
version: 0.3.
|
42
|
+
version: 0.3.17
|
43
43
|
- !ruby/object:Gem::Dependency
|
44
44
|
name: sidekiq
|
45
45
|
requirement: !ruby/object:Gem::Requirement
|
46
46
|
requirements:
|
47
|
-
- - "
|
47
|
+
- - ">="
|
48
48
|
- !ruby/object:Gem::Version
|
49
49
|
version: '4.2'
|
50
50
|
type: :runtime
|
51
51
|
prerelease: false
|
52
52
|
version_requirements: !ruby/object:Gem::Requirement
|
53
53
|
requirements:
|
54
|
-
- - "
|
54
|
+
- - ">="
|
55
55
|
- !ruby/object:Gem::Version
|
56
56
|
version: '4.2'
|
57
57
|
- !ruby/object:Gem::Dependency
|
@@ -102,14 +102,14 @@ dependencies:
|
|
102
102
|
requirements:
|
103
103
|
- - "~>"
|
104
104
|
- !ruby/object:Gem::Version
|
105
|
-
version: 0.3.2.
|
105
|
+
version: 0.3.2.4
|
106
106
|
type: :runtime
|
107
107
|
prerelease: false
|
108
108
|
version_requirements: !ruby/object:Gem::Requirement
|
109
109
|
requirements:
|
110
110
|
- - "~>"
|
111
111
|
- !ruby/object:Gem::Version
|
112
|
-
version: 0.3.2.
|
112
|
+
version: 0.3.2.4
|
113
113
|
- !ruby/object:Gem::Dependency
|
114
114
|
name: rake
|
115
115
|
requirement: !ruby/object:Gem::Requirement
|
@@ -152,23 +152,37 @@ dependencies:
|
|
152
152
|
- - "~>"
|
153
153
|
- !ruby/object:Gem::Version
|
154
154
|
version: '5.0'
|
155
|
+
- !ruby/object:Gem::Dependency
|
156
|
+
name: dry-validation
|
157
|
+
requirement: !ruby/object:Gem::Requirement
|
158
|
+
requirements:
|
159
|
+
- - "~>"
|
160
|
+
- !ruby/object:Gem::Version
|
161
|
+
version: 0.10.6
|
162
|
+
type: :runtime
|
163
|
+
prerelease: false
|
164
|
+
version_requirements: !ruby/object:Gem::Requirement
|
165
|
+
requirements:
|
166
|
+
- - "~>"
|
167
|
+
- !ruby/object:Gem::Version
|
168
|
+
version: 0.10.6
|
155
169
|
- !ruby/object:Gem::Dependency
|
156
170
|
name: dry-configurable
|
157
171
|
requirement: !ruby/object:Gem::Requirement
|
158
172
|
requirements:
|
159
173
|
- - "~>"
|
160
174
|
- !ruby/object:Gem::Version
|
161
|
-
version: '0.
|
175
|
+
version: '0.7'
|
162
176
|
type: :runtime
|
163
177
|
prerelease: false
|
164
178
|
version_requirements: !ruby/object:Gem::Requirement
|
165
179
|
requirements:
|
166
180
|
- - "~>"
|
167
181
|
- !ruby/object:Gem::Version
|
168
|
-
version: '0.
|
182
|
+
version: '0.7'
|
169
183
|
description: " Framework used to simplify Apache Kafka based Ruby applications development "
|
170
184
|
email:
|
171
|
-
- maciej@
|
185
|
+
- maciej@coditsu.io
|
172
186
|
- pavlo.vavruk@gmail.com
|
173
187
|
- adam99g@gmail.com
|
174
188
|
executables:
|
@@ -194,8 +208,6 @@ files:
|
|
194
208
|
- lib/karafka/base_controller.rb
|
195
209
|
- lib/karafka/base_responder.rb
|
196
210
|
- lib/karafka/base_worker.rb
|
197
|
-
- lib/karafka/capistrano.rb
|
198
|
-
- lib/karafka/capistrano/karafka.cap
|
199
211
|
- lib/karafka/cli.rb
|
200
212
|
- lib/karafka/cli/base.rb
|
201
213
|
- lib/karafka/cli/console.rb
|
@@ -225,10 +237,12 @@ files:
|
|
225
237
|
- lib/karafka/responders/topic.rb
|
226
238
|
- lib/karafka/responders/usage_validator.rb
|
227
239
|
- lib/karafka/routing/builder.rb
|
240
|
+
- lib/karafka/routing/mapper.rb
|
228
241
|
- lib/karafka/routing/route.rb
|
229
242
|
- lib/karafka/routing/router.rb
|
230
243
|
- lib/karafka/server.rb
|
231
244
|
- lib/karafka/setup/config.rb
|
245
|
+
- lib/karafka/setup/config_schema.rb
|
232
246
|
- lib/karafka/setup/configurators/base.rb
|
233
247
|
- lib/karafka/setup/configurators/celluloid.rb
|
234
248
|
- lib/karafka/setup/configurators/sidekiq.rb
|
@@ -264,7 +278,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
264
278
|
version: '0'
|
265
279
|
requirements: []
|
266
280
|
rubyforge_project:
|
267
|
-
rubygems_version: 2.6.
|
281
|
+
rubygems_version: 2.6.11
|
268
282
|
signing_key:
|
269
283
|
specification_version: 4
|
270
284
|
summary: Ruby based framework for working with Apache Kafka
|
data/lib/karafka/capistrano.rb
DELETED
@@ -1,85 +0,0 @@
|
|
1
|
-
# @note Inspired by Puma capistrano handlers
|
2
|
-
# @see https://github.com/seuros/capistrano-puma/blob/master/lib/capistrano/tasks/puma.rake
|
3
|
-
namespace :load do
|
4
|
-
task :defaults do
|
5
|
-
set :karafka_role, :app
|
6
|
-
set :karafka_default_hooks, -> { true }
|
7
|
-
set :karafka_env, -> { fetch(:karafka_env, fetch(:environment)) }
|
8
|
-
set :karafka_pid, -> { File.join(shared_path, 'tmp', 'pids', 'karafka.pid') }
|
9
|
-
end
|
10
|
-
end
|
11
|
-
|
12
|
-
namespace :deploy do
|
13
|
-
before :starting, :check_karafka_hooks do
|
14
|
-
invoke 'karafka:add_default_hooks' if fetch(:karafka_default_hooks)
|
15
|
-
end
|
16
|
-
end
|
17
|
-
|
18
|
-
namespace :karafka do
|
19
|
-
desc 'Stop Karafka'
|
20
|
-
task :stop do
|
21
|
-
on roles(fetch(:karafka_role)) do |host|
|
22
|
-
within shared_path do
|
23
|
-
# If there's no pidfile it means that Karafka is not running
|
24
|
-
next unless test "cat #{fetch(:karafka_pid)}"
|
25
|
-
|
26
|
-
# Send a kill signal to a given process
|
27
|
-
execute "kill -INT `cat #{fetch(:karafka_pid)}`"
|
28
|
-
|
29
|
-
# And wait until it finishes. We wait because we don't want to start next process until
|
30
|
-
# the previous one is stopped. That way we won't have problems with Kafka registering and
|
31
|
-
# deregistering processes from topics (although nothing bad would happen. It would just
|
32
|
-
# take more time to rebalance)
|
33
|
-
while true
|
34
|
-
break unless test "cat #{fetch(:karafka_pid)}"
|
35
|
-
info 'Waiting for Karafka to stop'
|
36
|
-
sleep 5
|
37
|
-
end
|
38
|
-
end
|
39
|
-
end
|
40
|
-
end
|
41
|
-
|
42
|
-
desc 'Start Karafka'
|
43
|
-
task :start do
|
44
|
-
on roles(fetch(:karafka_role)) do |host|
|
45
|
-
within current_path do
|
46
|
-
# We use all 3 because when combined with Sinatra/Rails it will use their parts as well
|
47
|
-
# so we want to set proper env for any of them
|
48
|
-
with(
|
49
|
-
KARAFKA_ENV: fetch(:karafka_env),
|
50
|
-
RAILS_ENV: fetch(:rails_env),
|
51
|
-
RACK_ENV: fetch(:rack_env)
|
52
|
-
)do
|
53
|
-
execute :bundle, "exec karafka server -d -p #{fetch(:karafka_pid)}"
|
54
|
-
end
|
55
|
-
end
|
56
|
-
end
|
57
|
-
end
|
58
|
-
|
59
|
-
desc 'Restart Karafka'
|
60
|
-
task :restart do
|
61
|
-
invoke 'karafka:stop'
|
62
|
-
invoke 'karafka:start'
|
63
|
-
end
|
64
|
-
|
65
|
-
desc 'Status Karafka'
|
66
|
-
task :status do
|
67
|
-
on roles(fetch(:karafka_role)) do |host|
|
68
|
-
if test "cat #{fetch(:karafka_pid)}"
|
69
|
-
pid = capture "cat #{fetch(:karafka_pid)}"
|
70
|
-
|
71
|
-
if test "ps -p #{pid} > /dev/null"
|
72
|
-
info "Karafka is started: #{pid}"
|
73
|
-
else
|
74
|
-
error "Karafka is not started but pidfile exists"
|
75
|
-
end
|
76
|
-
else
|
77
|
-
info "Karafka is not started"
|
78
|
-
end
|
79
|
-
end
|
80
|
-
end
|
81
|
-
|
82
|
-
task :add_default_hooks do
|
83
|
-
after 'deploy:finished', 'karafka:restart'
|
84
|
-
end
|
85
|
-
end
|