karafka 1.2.13

Sign up to get free protection for your applications and to get access to all the features.
Files changed (88) hide show
  1. checksums.yaml +7 -0
  2. data/.coditsu.yml +3 -0
  3. data/.console_irbrc +13 -0
  4. data/.gitignore +68 -0
  5. data/.rspec +1 -0
  6. data/.ruby-gemset +1 -0
  7. data/.ruby-version +1 -0
  8. data/.travis.yml +49 -0
  9. data/CHANGELOG.md +464 -0
  10. data/CODE_OF_CONDUCT.md +46 -0
  11. data/CONTRIBUTING.md +41 -0
  12. data/Gemfile +15 -0
  13. data/Gemfile.lock +126 -0
  14. data/MIT-LICENCE +18 -0
  15. data/README.md +102 -0
  16. data/bin/karafka +19 -0
  17. data/config/errors.yml +6 -0
  18. data/karafka.gemspec +42 -0
  19. data/lib/karafka.rb +79 -0
  20. data/lib/karafka/app.rb +45 -0
  21. data/lib/karafka/attributes_map.rb +69 -0
  22. data/lib/karafka/backends/inline.rb +16 -0
  23. data/lib/karafka/base_consumer.rb +68 -0
  24. data/lib/karafka/base_responder.rb +208 -0
  25. data/lib/karafka/callbacks.rb +30 -0
  26. data/lib/karafka/callbacks/config.rb +22 -0
  27. data/lib/karafka/callbacks/dsl.rb +16 -0
  28. data/lib/karafka/cli.rb +54 -0
  29. data/lib/karafka/cli/base.rb +78 -0
  30. data/lib/karafka/cli/console.rb +29 -0
  31. data/lib/karafka/cli/flow.rb +46 -0
  32. data/lib/karafka/cli/info.rb +29 -0
  33. data/lib/karafka/cli/install.rb +42 -0
  34. data/lib/karafka/cli/server.rb +66 -0
  35. data/lib/karafka/connection/api_adapter.rb +148 -0
  36. data/lib/karafka/connection/builder.rb +16 -0
  37. data/lib/karafka/connection/client.rb +107 -0
  38. data/lib/karafka/connection/delegator.rb +46 -0
  39. data/lib/karafka/connection/listener.rb +60 -0
  40. data/lib/karafka/consumers/callbacks.rb +54 -0
  41. data/lib/karafka/consumers/includer.rb +51 -0
  42. data/lib/karafka/consumers/responders.rb +24 -0
  43. data/lib/karafka/consumers/single_params.rb +15 -0
  44. data/lib/karafka/errors.rb +50 -0
  45. data/lib/karafka/fetcher.rb +44 -0
  46. data/lib/karafka/helpers/class_matcher.rb +84 -0
  47. data/lib/karafka/helpers/config_retriever.rb +46 -0
  48. data/lib/karafka/helpers/multi_delegator.rb +33 -0
  49. data/lib/karafka/instrumentation/listener.rb +112 -0
  50. data/lib/karafka/instrumentation/logger.rb +55 -0
  51. data/lib/karafka/instrumentation/monitor.rb +64 -0
  52. data/lib/karafka/loader.rb +28 -0
  53. data/lib/karafka/params/dsl.rb +158 -0
  54. data/lib/karafka/params/params_batch.rb +46 -0
  55. data/lib/karafka/parsers/json.rb +38 -0
  56. data/lib/karafka/patches/dry_configurable.rb +33 -0
  57. data/lib/karafka/patches/ruby_kafka.rb +34 -0
  58. data/lib/karafka/persistence/client.rb +25 -0
  59. data/lib/karafka/persistence/consumer.rb +38 -0
  60. data/lib/karafka/persistence/topic.rb +29 -0
  61. data/lib/karafka/process.rb +62 -0
  62. data/lib/karafka/responders/builder.rb +36 -0
  63. data/lib/karafka/responders/topic.rb +57 -0
  64. data/lib/karafka/routing/builder.rb +61 -0
  65. data/lib/karafka/routing/consumer_group.rb +61 -0
  66. data/lib/karafka/routing/consumer_mapper.rb +34 -0
  67. data/lib/karafka/routing/proxy.rb +37 -0
  68. data/lib/karafka/routing/router.rb +29 -0
  69. data/lib/karafka/routing/topic.rb +60 -0
  70. data/lib/karafka/routing/topic_mapper.rb +55 -0
  71. data/lib/karafka/schemas/config.rb +24 -0
  72. data/lib/karafka/schemas/consumer_group.rb +79 -0
  73. data/lib/karafka/schemas/consumer_group_topic.rb +18 -0
  74. data/lib/karafka/schemas/responder_usage.rb +39 -0
  75. data/lib/karafka/schemas/server_cli_options.rb +43 -0
  76. data/lib/karafka/server.rb +85 -0
  77. data/lib/karafka/setup/config.rb +195 -0
  78. data/lib/karafka/setup/configurators/base.rb +29 -0
  79. data/lib/karafka/setup/configurators/params.rb +25 -0
  80. data/lib/karafka/setup/configurators/water_drop.rb +32 -0
  81. data/lib/karafka/setup/dsl.rb +22 -0
  82. data/lib/karafka/status.rb +25 -0
  83. data/lib/karafka/templates/application_consumer.rb.example +6 -0
  84. data/lib/karafka/templates/application_responder.rb.example +11 -0
  85. data/lib/karafka/templates/karafka.rb.example +54 -0
  86. data/lib/karafka/version.rb +7 -0
  87. data/log/.gitkeep +0 -0
  88. metadata +303 -0
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: d14a343dffbcc91c326964cc833bb82e9daedfdc4913eb6478c69e6caa5a995c
4
+ data.tar.gz: 14c3b5a9275dae6a5fb70790b4caf579cd2e1d9a6ac5d56307d950090012ec3d
5
+ SHA512:
6
+ metadata.gz: 890c1b249d3457984c2c392dc1bed2413f36704d1268247bd67b2fa70cd3489370786a71e6be6804aa9dc1e02136bc92e97a338c917577929f2a8e0aabaf07bc
7
+ data.tar.gz: cf9a0043a4b82797bf0f6d36a8ba18c97702e22dd9fea25d5c1df13ca3b30b4255244d828e6cd3fba68807d2717209ae626d2a99a25f497a7e15b04d80168d6f
@@ -0,0 +1,3 @@
1
+ api_key: <%= ENV['CODITSU_API_KEY'] %>
2
+ api_secret: <%= ENV['CODITSU_API_SECRET'] %>
3
+ repository_id: <%= ENV['CODITSU_REPOSITORY_ID'] %>
@@ -0,0 +1,13 @@
1
+ # irbrc for Karafka console
2
+ require 'karafka'
3
+ require Karafka.boot_file
4
+
5
+ IRB.conf[:AUTO_INDENT] = true
6
+ IRB.conf[:SAVE_HISTORY] = 1000
7
+ IRB.conf[:USE_READLINE] = true
8
+ IRB.conf[:HISTORY_FILE] = "#{Karafka::App.root}/.irb-history"
9
+ IRB.conf[:LOAD_MODULES] = [] unless IRB.conf.key?(:LOAD_MODULES)
10
+
11
+ unless IRB.conf[:LOAD_MODULES].include?('irb/completion')
12
+ IRB.conf[:LOAD_MODULES] << 'irb/completion'
13
+ end
@@ -0,0 +1,68 @@
1
+ # bundler state
2
+ /.bundle
3
+ /vendor/bundle/
4
+ /vendor/ruby/
5
+ /ruby/
6
+ app.god
7
+
8
+ # minimal Rails specific artifacts
9
+ db/*.sqlite3
10
+ /log/development.log
11
+ /log/production.log
12
+ /log/test.log
13
+ /tmp/*
14
+ *.gem
15
+ *.~
16
+
17
+ # various artifacts
18
+ **.war
19
+ *.rbc
20
+ *.sassc
21
+ .byebug_history
22
+ .redcar/
23
+ .capistrano/
24
+ .sass-cache
25
+ /config/god/sidekiq.rb
26
+ /config/puma.rb
27
+ /coverage.data
28
+ /coverage/
29
+ /doc/api/
30
+ /doc/app/
31
+ /doc/yard
32
+ /doc/features.html
33
+ /doc/specs.html
34
+ /spec/tmp/*
35
+ /cache
36
+ /capybara*
37
+ /capybara-*.html
38
+ /gems
39
+ /specifications
40
+ rerun.txt
41
+ pickle-email-*.html
42
+
43
+ # If you find yourself ignoring temporary files generated by your text editor
44
+ # or operating system, you probably want to add a global ignore instead:
45
+ # git config --global core.excludesfile ~/.gitignore_global
46
+ #
47
+ # Here are some files you may want to ignore globally:
48
+
49
+ # scm revert files
50
+ **.orig
51
+
52
+ # Mac finder artifacts
53
+ .DS_Store
54
+
55
+ # Netbeans project directory
56
+ /nbproject
57
+
58
+ # RubyMine project files
59
+ .idea
60
+
61
+ # Textmate project files
62
+ /*.tmproj
63
+
64
+ # vim artifacts
65
+ **.swp
66
+
67
+ # documentation
68
+ .yardoc
data/.rspec ADDED
@@ -0,0 +1 @@
1
+ --require spec_helper
@@ -0,0 +1 @@
1
+ karafka
@@ -0,0 +1 @@
1
+ 2.6.1
@@ -0,0 +1,49 @@
1
+ services:
2
+ - docker
3
+
4
+ dist: trusty
5
+ sudo: false
6
+ cache: bundler
7
+
8
+ git:
9
+ depth: false
10
+
11
+ test: &test
12
+ stage: Test
13
+ language: ruby
14
+ before_install:
15
+ - gem install bundler
16
+ - gem update --system
17
+ script: bundle exec rspec
18
+
19
+ jobs:
20
+ include:
21
+ - <<: *test
22
+ rvm: 2.6.1
23
+ - <<: *test
24
+ rvm: 2.5.3
25
+ - <<: *test
26
+ rvm: 2.4.5
27
+ - <<: *test
28
+ rvm: 2.3.8
29
+
30
+ - stage: coditsu
31
+ language: ruby
32
+ rvm: 2.6.1
33
+ before_install:
34
+ - gem update --system
35
+ - gem install bundler
36
+ before_script:
37
+ - docker create -v /sources --name sources alpine:3.4 /bin/true
38
+ - docker cp ./ sources:/sources
39
+ script: >
40
+ docker run
41
+ -e CODITSU_API_KEY
42
+ -e CODITSU_API_SECRET
43
+ -e CODITSU_REPOSITORY_ID
44
+ --volumes-from sources
45
+ coditsu/build-runner:latest
46
+
47
+ stages:
48
+ - coditsu
49
+ - test
@@ -0,0 +1,464 @@
1
+ # Karafka framework changelog
2
+
3
+ ## 1.2.13
4
+ - Add support for parameter sasl_over_ssl in ruby-kafka
5
+
6
+ ## 1.2.12
7
+ - #29 - Consumer class names must have the word "Consumer" in it in order to work (Sidekiq backend)
8
+
9
+ ## 1.2.11
10
+ - [#470](https://github.com/karafka/karafka/issues/470) Karafka not working with dry-configurable 0.8
11
+
12
+ ## 1.2.10
13
+ - [#453](https://github.com/karafka/karafka/pull/453) require `Forwardable` module
14
+
15
+ ## 1.2.9
16
+ - Critical exceptions now will cause consumer to stop instead of retrying without a break
17
+ - #412 - Fix dry-inflector dependency lock in gemspec
18
+ - #414 - Backport to 1.2 the delayed retry upon failure
19
+ - #437 - Raw message is no longer added to params after ParserError raised
20
+
21
+ ## 1.2.8
22
+ - #408 - Responder Topic Lookup Bug on Heroku
23
+
24
+ ## 1.2.7
25
+ - Unlock Ruby-kafka version with a warning
26
+
27
+ ## 1.2.6
28
+ - Lock WaterDrop to 1.2.3
29
+ - Lock Ruby-Kafka to 0.6.x (support for 0.7 will be added in Karafka 1.3)
30
+
31
+ ## 1.2.5
32
+ - #354 - Expose consumer heartbeat
33
+ - #373 - Async producer not working properly with responders
34
+
35
+ ## 1.2.4
36
+ - #332 - Fetcher for max queue size
37
+
38
+ ## 1.2.3
39
+ - #313 - support PLAINTEXT and SSL for scheme
40
+ - #320 - Pausing indefinetely with nil pause timeout doesn't work
41
+ - #318 - Partition pausing doesn't work with custom topic mappers
42
+ - Rename ConfigAdapter to ApiAdapter to better reflect what it does
43
+ - #317 - Manual offset committing doesn't work with custom topic mappers
44
+
45
+ ## 1.2.2
46
+ - #312 - Broken for ActiveSupport 5.2.0
47
+
48
+ ## 1.2.1
49
+ - #304 - Unification of error instrumentation event details
50
+ - #306 - Using file logger from within a trap context upon shutdown is impossible
51
+
52
+ ## 1.2.0
53
+ - Spec improvements
54
+ - #260 - Specs missing randomization
55
+ - #251 - Shutdown upon non responding (unreachable) cluster is not possible
56
+ - #258 - Investigate lowering requirements on activesupport
57
+ - #246 - Alias consumer#mark_as_consumed on controller
58
+ - #259 - Allow forcing key/partition key on responders
59
+ - #267 - Styling inconsistency
60
+ - #242 - Support setting the max bytes to fetch per request
61
+ - #247 - Support SCRAM once released
62
+ - #271 - Provide an after_init option to pass a configuration block
63
+ - #262 - Error in the monitor code for NewRelic
64
+ - #241 - Performance metrics
65
+ - #274 - Rename controllers to consumers
66
+ - #184 - Seek to
67
+ - #284 - Dynamic Params parent class
68
+ - #275 - ssl_ca_certs_from_system
69
+ - #296 - Instrument forceful exit with an error
70
+ - Replaced some of the activesupport parts with dry-inflector
71
+ - Lower ActiveSupport dependency
72
+ - Remove configurators in favor of the after_init block configurator
73
+ - Ruby 2.5.0 support
74
+ - Renamed Karafka::Connection::Processor to Karafka::Connection::Delegator to match incoming naming conventions
75
+ - Renamed Karafka::Connection::Consumer to Karafka::Connection::Client due to #274
76
+ - Removed HashWithIndifferentAccess in favor of a regular hash
77
+ - JSON parsing defaults now to string keys
78
+ - Lower memory usage due to less params data internal details
79
+ - Support multiple ```after_init``` blocks in favor of a single one
80
+ - Renamed ```received_at``` to ```receive_time``` to follow ruby-kafka and WaterDrop conventions
81
+ - Adjust internal setup to easier map Ruby-Kafka config changes
82
+ - System callbacks reorganization
83
+ - Added ```before_fetch_loop``` configuration block for early client usage (```#seek```, etc)
84
+ - Renamed ```after_fetched``` to ```after_fetch``` to normalize the naming convention
85
+ - Instrumentation on a connection delegator level
86
+ - Added ```params_batch#last``` method to retrieve last element after unparsing
87
+ - All params keys are now strings
88
+
89
+ ## 1.1.2
90
+ - #256 - Default kafka.seed_brokers configuration is created in invalid format
91
+
92
+ ## 1.1.1
93
+ - #253 - Allow providing a global per app parser in config settings
94
+
95
+ ## 1.1.0
96
+ - Gem bump
97
+ - Switch from Celluloid to native Thread management
98
+ - Improved shutdown process
99
+ - Introduced optional fetch callbacks and moved current the ```after_received``` there as well
100
+ - Karafka will raise Errors::InvalidPauseTimeout exception when trying to pause but timeout set to 0
101
+ - Allow float for timeouts and other time based second settings
102
+ - Renamed MessagesProcessor to Processor and MessagesConsumer to Consumer - we don't process and don't consumer anything else so it was pointless to keep this "namespace"
103
+ - #232 - Remove unused ActiveSupport require
104
+ - #214 - Expose consumer on a controller layer
105
+ - #193 - Process shutdown callbacks
106
+ - Fixed accessibility of ```#params_batch``` from the outside of the controller
107
+ - connection_pool config options are no longer required
108
+ - celluloid config options are no longer required
109
+ - ```#perform``` is now renamed to ```#consume``` with warning level on using the old one (deprecated)
110
+ - #235 - Rename perform to consume
111
+ - Upgrade to ruby-kafka 0.5
112
+ - Due to redesign of Waterdrop concurrency setting is no longer needed
113
+ - #236 - Manual offset management
114
+ - WaterDrop 1.0.0 support with async
115
+ - Renamed ```batch_consuming``` option to ```batch_fetching``` as it is not a consumption (with processing) but a process of fetching messages from Kafka. The messages is considered consumed, when it is processed.
116
+ - Renamed ```batch_processing``` to ```batch_consuming``` to resemble Kafka concept of consuming messages.
117
+ - Renamed ```after_received``` to ```after_fetched``` to normalize the naming conventions.
118
+ - Responders support the per topic ```async``` option.
119
+
120
+ ## 1.0.1
121
+ - #210 - LoadError: cannot load such file -- [...]/karafka.rb
122
+ - Ruby 2.4.2 as a default (+travis integration)
123
+ - JRuby upgrade
124
+ - Expanded persistence layer (moved to a namespace for easier future development)
125
+ - #213 - Misleading error when non-existing dependency is required
126
+ - #212 - Make params react to #topic, #partition, #offset
127
+ - #215 - Consumer group route dynamic options are ignored
128
+ - #217 - check RUBY_ENGINE constant if RUBY_VERSION is missing (#217)
129
+ - #218 - add configuration setting to control Celluloid's shutdown timeout
130
+ - Renamed Karafka::Routing::Mapper to Karafka::Routing::TopicMapper to match naming conventions
131
+ - #219 - Allow explicit consumer group names, without prefixes
132
+ - Fix to early removed pid upon shutdown of demonized process
133
+ - max_wait_time updated to match https://github.com/zendesk/ruby-kafka/issues/433
134
+ - #230 - Better uri validation for seed brokers (incompatibility as the kafka:// or kafka+ssl:// is required)
135
+ - Small internal docs fixes
136
+ - Dry::Validation::MissingMessageError: message for broker_schema? was not found
137
+ - #238 - warning: already initialized constant Karafka::Schemas::URI_SCHEMES
138
+
139
+ ## 1.0.0
140
+
141
+ ### Closed issues:
142
+
143
+ - #103 - Env for logger is loaded 2 early (on gem load not on app init)
144
+ - #142 - Possibility to better control Kafka consumers (consumer groups management)
145
+ - #150 - Add support for start_from_beginning on a per topic basis
146
+ - #154 - Support for min_bytes and max_wait_time on messages consuming
147
+ - #160 - Reorganize settings to better resemble ruby-kafka requirements
148
+ - #164 - If we decide to have configuration per topic, topic uniqueness should be removed
149
+ - #165 - Router validator
150
+ - #166 - Params and route reorganization (new API)
151
+ - #167 - Remove Sidekiq UI from Karafka
152
+ - #168 - Introduce unique IDs of routes
153
+ - #171 - Add kafka message metadata to params
154
+ - #176 - Transform Karafka::Connection::Consumer into a module
155
+ - #177 - Monitor not reacting when kafka killed with -9
156
+ - #175 - Allow single consumer to subscribe to multiple topics
157
+ - #178 - Remove parsing failover when cannot unparse data
158
+ - #174 - Extended config validation
159
+ - ~~#180 - Switch from JSON parser to yajl-ruby~~
160
+ - #181 - When responder is defined and not used due to ```respond_with``` not being triggered in the perform, it won't raise an exception.
161
+ - #188 - Rename name in config to client id
162
+ - #186 - Support ruby-kafka ```ssl_ca_cert_file_path``` config
163
+ - #189 - karafka console does not preserve history on exit
164
+ - #191 - Karafka 0.6.0rc1 does not work with jruby / now it does :-)
165
+ - Switch to multi json so everyone can use their favourite JSON parser
166
+ - Added jruby support in general and in Travis
167
+ - #196 - Topic mapper does not map topics when subscribing thanks to @webandtech
168
+ - #96 - Karafka server - possiblity to run it only for a certain topics
169
+ - ~~karafka worker cli option is removed (please use sidekiq directly)~~ - restored, bad idea
170
+ - (optional) pausing upon processing failures ```pause_timeout```
171
+ - Karafka console main process no longer intercepts irb errors
172
+ - Wiki updates
173
+ - #204 - Long running controllers
174
+ - Better internal API to handle multiple usage cases using ```Karafka::Controllers::Includer```
175
+ - #207 - Rename before_enqueued to after_received
176
+ - #147 - Deattach Karafka from Sidekiq by extracting Sidekiq backend
177
+
178
+ ### New features and improvements
179
+
180
+ - batch processing thanks to ```#batch_consuming``` flag and ```#params_batch``` on controllers
181
+ - ```#topic``` method on an controller instance to make a clear distinction in between params and route details
182
+ - Changed routing model (still compatible with 0.5) to allow better resources management
183
+ - Lower memory requirements due to object creation limitation (2-3 times less objects on each new message)
184
+ - Introduced the ```#batch_consuming``` config flag (config for #126) that can be set per each consumer_group
185
+ - Added support for partition, offset and partition key in the params hash
186
+ - ```name``` option in config renamed to ```client_id```
187
+ - Long running controllers with ```persistent``` flag on a topic config level, to make controller instances persistent between messages batches (single controller instance per topic per partition no per messages batch) - turned on by default
188
+
189
+ ### Incompatibilities
190
+
191
+ - Default boot file is renamed from app.rb to karafka.rb
192
+ - Removed worker glass as dependency (now and independent gem)
193
+ - ```kafka.hosts``` option renamed to ```kafka.seed_brokers``` - you don't need to provide all the hosts to work with Kafka
194
+ - ```start_from_beginning``` moved into kafka scope (```kafka.start_from_beginning```)
195
+ - Router no longer checks for route uniqueness - now you can define same routes for multiple kafkas and do a lot of crazy stuff, so it's your responsibility to check uniqueness
196
+ - Change in the way we identify topics in between Karafka and Sidekiq workers. If you upgrade, please make sure, all the jobs scheduled in Sidekiq are finished before the upgrade.
197
+ - ```batch_mode``` renamed to ```batch_fetching```
198
+ - Renamed content to value to better resemble ruby-kafka internal messages naming convention
199
+ - When having a responder with ```required``` topics and not using ```#respond_with``` at all, it will raise an exception
200
+ - Renamed ```inline_mode``` to ```inline_processing``` to resemble other settings conventions
201
+ - Renamed ```inline_processing``` to ```backend``` to reach 1.0 future compatibility
202
+ - Single controller **needs** to be used for a single topic consumption
203
+ - Renamed ```before_enqueue``` to ```after_received``` to better resemble internal logic, since for inline backend, there is no enqueue.
204
+ - Due to the level on which topic and controller are related (class level), the dynamic worker selection is no longer available.
205
+ - Renamed params #retrieve to params #retrieve! to better reflect what it does
206
+
207
+ ### Other changes
208
+ - PolishGeeksDevTools removed (in favour of Coditsu)
209
+ - Waaaaaay better code quality thanks to switching from dev tools to Coditsu
210
+ - Gem bump
211
+ - Cleaner internal API
212
+ - SRP
213
+ - Better settings proxying and management between ruby-kafka and karafka
214
+ - All internal validations are now powered by dry-validation
215
+ - Better naming conventions to reflect Kafka reality
216
+ - Removed Karafka::Connection::Message in favour of direct message details extraction from Kafka::FetchedMessage
217
+
218
+ ## 0.5.0.3
219
+ - #132 - When Kafka is gone, should reconnect after a time period
220
+ - #136 - new ruby-kafka version + other gem bumps
221
+ - ruby-kafka update
222
+ - #135 - NonMatchingRouteError - better error description in the code
223
+ - #140 - Move Capistrano Karafka to a different specific gem
224
+ - #110 - Add call method on a responder class to alias instance build and call
225
+ - #76 - Configs validator
226
+ - #138 - Possibility to have no worker class defined if inline_mode is being used
227
+ - #145 - Topic Mapper
228
+ - Ruby update to 2.4.1
229
+ - Gem bump x2
230
+ - #158 - Update docs section on heroku usage
231
+ - #150 - Add support for start_from_beginning on a per topic basis
232
+ - #148 - Lower Karafka Sidekiq dependency
233
+ - Allow karafka root to be specified from ENV
234
+ - Handle SIGTERM as a shutdown command for kafka server to support Heroku deployment
235
+
236
+ ## 0.5.0.2
237
+ - Gems update x3
238
+ - Default Ruby set to 2.3.3
239
+ - ~~Default Ruby set to 2.4.0~~
240
+ - Readme updates to match bug fixes and resolved issues
241
+ - #95 - Allow options into responder
242
+ - #98 - Use parser when responding on a topic
243
+ - #114 - Option to configure waterdrop connection pool timeout and concurrency
244
+ - #118 - Added dot in topic validation format
245
+ - #119 - add support for authentication using SSL
246
+ - #121 - JSON as a default for standalone responders usage
247
+ - #122 - Allow on capistrano role customization
248
+ - #125 - Add support to batch incoming messages
249
+ - #130 - start_from_beginning flag on routes and default
250
+ - #128 - Monitor caller_label not working with super on inheritance
251
+ - Renamed *inline* to *inline_mode* to stay consistent with flags that change the way karafka works (#125)
252
+ - Dry-configurable bump to 0.5 with fixed proc value evaluation on retrieve patch (internal change)
253
+
254
+ ## 0.5.0.1
255
+ - Fixed inconsistency in responders non-required topic definition. Now only required: false available
256
+ - #101 - Responders fail when multiple_usage true and required false
257
+ - fix error on startup from waterdrop #102
258
+ - Waterdrop 0.3.2.1 with kafka.hosts instead of kafka_hosts
259
+ - #105 - Karafka::Monitor#caller_label not working with inherited monitors
260
+ - #99 - Standalone mode (without Sidekiq)
261
+ - #97 - Buffer responders single topics before send (prevalidation)
262
+ - Better control over consumer thanks to additional config options
263
+ - #111 - Dynamic worker assignment based on the income params
264
+ - Long shutdown time fix
265
+
266
+ ## 0.5.0
267
+ - Removed Zookeeper totally as dependency
268
+ - Better group and partition rebalancing
269
+ - Automatic thread management (no need for tunning) - each topic is a separate actor/thread
270
+ - Moved from Poseidon into Ruby-Kafka
271
+ - No more max_concurrency setting
272
+ - After you define your App class and routes (and everything else) you need to add execute App.boot!
273
+ - Manual consuming is no longer available (no more karafka consume command)
274
+ - Karafka topics CLI is no longer available. No Zookeeper - no global topic discovery
275
+ - Dropped ZK as dependency
276
+ - karafka info command no longer prints details about Zookeeper
277
+ - Better shutdown
278
+ - No more autodiscovery via Zookeeper - instead, the whole cluster will be discovered directly from Kafka
279
+ - No more support for Kafka 0.8
280
+ - Support for Kafka 0.9
281
+ - No more need for ActorCluster, since now we have a single thread (and Kafka connection) per topic
282
+ - Ruby 2.2.* support dropped
283
+ - Using App name as a Kafka client_id
284
+ - Automatic Capistrano integration
285
+ - Responders support for handling better responses pipelining and better responses flow description and design (see README for more details)
286
+ - Gem bump
287
+ - Readme updates
288
+ - karafka flow CLI command for printing the application flow
289
+ - Some internal refactorings
290
+
291
+ ## 0.4.2
292
+ - #87 - Reconsume mode with crone for better Rails/Rack integration
293
+ - Moved Karafka server related stuff into separate Karafka::Server class
294
+ - Renamed Karafka::Runner into Karafka::Fetcher
295
+ - Gem bump
296
+ - Added chroot option to Zookeeper options
297
+ - Moved BROKERS_PATH into config from constant
298
+ - Added Karafka consume CLI action for a short running single consumption round
299
+ - Small fixes to close broken connections
300
+ - Readme updates
301
+
302
+ ## 0.4.1
303
+ - Explicit throw(:abort) required to halt before_enqueue (like in Rails 5)
304
+ - #61 - Autodiscover Kafka brokers based on Zookeeper data
305
+ - #63 - Graceful shutdown with current offset state during data processing
306
+ - #65 - Example of NewRelic monitor is outdated
307
+ - #71 - Setup should be executed after user code is loaded
308
+ - Gem bump x3
309
+ - Rubocop remarks
310
+ - worker_timeout config option has been removed. It now needs to be defined manually by the framework user because WorkerGlass::Timeout can be disabled and we cannot use Karafka settings on a class level to initialize user code stuff
311
+ - Moved setup logic under setup/Setup namespace
312
+ - Better defaults handling
313
+ - #75 - Kafka and Zookeeper options as a hash
314
+ - #82 - Karafka autodiscovery fails upon caching of configs
315
+ - #81 - Switch config management to dry configurable
316
+ - Version fix
317
+ - Dropped support for Ruby 2.1.*
318
+ - Ruby bump to 2.3.1
319
+
320
+ ## 0.4.0
321
+ - Added WaterDrop gem with default configuration
322
+ - Refactoring of config logic to simplify adding new dependencies that need to be configured based on #setup data
323
+ - Gem bump
324
+ - Readme updates
325
+ - Renamed cluster to actor_cluster for method names
326
+ - Replaced SidekiqGlass with generic WorkerGlass lib
327
+ - Application bootstrap in app.rb no longer required
328
+ - Karafka.boot needs to be executed after all the application files are loaded (template updated)
329
+ - Small loader refactor (no API changes)
330
+ - Ruby 2.3.0 support (default)
331
+ - No more rake tasks
332
+ - Karafka CLI instead of rake tasks
333
+ - Worker cli command allows passing additional options directly to Sidekiq
334
+ - Renamed concurrency to max_concurrency - it describes better what happens - Karafka will use this number of threads only when required
335
+ - Added wait_timeout that allows us to tune how long should we wait on a single socket connection (single topic) for new messages before going to next one (this applies to each thread separately)
336
+ - Rubocop remarks
337
+ - Removed Sinatra and Puma dependencies
338
+ - Karafka Cli internal reorganization
339
+ - Karafka Cli routes task
340
+ - #37 - warn log for failed parsing of a message
341
+ - #43 - wrong constant name
342
+ - #44 - Method name conflict
343
+ - #48 - Cannot load such file -- celluloid/current
344
+ - #46 - Loading application
345
+ - #45 - Set up monitor in config
346
+ - #47 - rake karafka:run uses app.rb only
347
+ - #53 - README update with Sinatra/Rails integration description
348
+ - #41 - New Routing engine
349
+ - #54 - Move Karafka::Workers::BaseWorker to Karafka::BaseWorker
350
+ - #55 - ApplicationController and ApplicationWorker
351
+
352
+ ## 0.3.2
353
+ - Karafka::Params::Params lazy load merge keys with string/symbol names priorities fix
354
+
355
+ ## 0.3.1
356
+ - Renamed Karafka::Monitor to Karafka::Process to represent a Karafka process wrapper
357
+ - Added Karafka::Monitoring that allows to add custom logging and monitoring with external libraries and systems
358
+ - Moved logging functionality into Karafka::Monitoring default monitoring
359
+ - Added possibility to provide own monitoring as long as in responds to #notice and #notice_error
360
+ - Standarized logging format for all logs
361
+
362
+ ## 0.3.0
363
+ - Switched from custom ParserError for each parser to general catching of Karafka::Errors::ParseError and its descendants
364
+ - Gem bump
365
+ - Fixed #32 - now when using custom workers that does not inherit from Karafka::BaseWorker perform method is not required. Using custom workers means that the logic that would normally lie under #perform, needs to be executed directly from the worker.
366
+ - Fixed #31 - Technically didn't fix because this is how Sidekiq is meant to work, but provided possibility to assign custom interchangers that allow to bypass JSON encoding issues by converting data that goes to Redis to a required format (and parsing it back when it is fetched)
367
+ - Added full parameters lazy load - content is no longer loaded during #perform_async if params are not used in before_enqueue
368
+ - No more namespaces for Redis by default (use separate DBs)
369
+
370
+ ## 0.1.21
371
+ - Sidekiq 4.0.1 bump
372
+ - Gem bump
373
+ - Added direct celluloid requirement to Karafka (removed from Sidekiq)
374
+
375
+ ## 0.1.19
376
+ - Internal call - schedule naming change
377
+ - Enqueue to perform_async naming in controller to follow Sidekiqs naming convention
378
+ - Gem bump
379
+
380
+ ## 0.1.18
381
+ - Changed Redis configuration options into a single hash that is directly passed to Redis setup for Sidekiq
382
+ - Added config.ru to provide a Sidekiq web UI (see README for more details)
383
+
384
+ ## 0.1.17
385
+ - Changed Karafka::Connection::Cluster tp Karafka::Connection::ActorCluster to distinguish between a single thread actor cluster for multiple topic connection and a future feature that will allow process clusterization.
386
+ - Add an ability to use user-defined parsers for a messages
387
+ - Lazy load params for before callbacks
388
+ - Automatic loading/initializng all workers classes during startup (so Sidekiq won't fail with unknown workers exception)
389
+ - Params are now private to controller
390
+ - Added bootstrap method to app.rb
391
+
392
+ ## 0.1.16
393
+ - Cluster level error catching for all exceptions so actor is not killer
394
+ - Cluster level error logging
395
+ - Listener refactoring (QueueConsumer extracted)
396
+ - Karafka::Connection::QueueConsumer to wrap around fetching logic - technically we could replace Kafka with any other messaging engine as long as we preserve the same API
397
+ - Added debug env for debugging purpose in applications
398
+
399
+ ## 0.1.15
400
+ - Fixed max_wait_ms vs socket_timeout_ms issue
401
+ - Fixed closing queue connection after Poseidon::Errors::ProtocolError failure
402
+ - Fixed wrong logging file selection based on env
403
+ - Extracted Karafka::Connection::QueueConsumer object to wrap around queue connection
404
+
405
+ ## 0.1.14
406
+ - Rake tasks for listing all the topics on Kafka server (rake kafka:topics)
407
+
408
+ ## 0.1.13
409
+ - Ability to assign custom workers and use them bypassing Karafka::BaseWorker (or its descendants)
410
+ - Gem bump
411
+
412
+ ## 0.1.12
413
+ - All internal errors went to Karafka::Errors namespace
414
+
415
+ ## 0.1.11
416
+ - Rescuing all the "before Sidekiq" processing so errors won't affect other incoming messages
417
+ - Fixed dying actors after connection error
418
+ - Added a new app status - "initializing"
419
+ - Karafka::Status model cleanup
420
+
421
+ ## 0.1.10
422
+ - Added possibility to specify redis namespace in configuration (failover to app name)
423
+ - Renamed redis_host to redis_url in configuration
424
+
425
+ ## 0.1.9
426
+ - Added worker logger
427
+
428
+ ## 0.1.8
429
+ - Droped local env suppot in favour of [Envlogic](https://github.com/karafka/envlogic) - no changes in API
430
+
431
+ ## 0.1.7
432
+ - Karafka option for Redis hosts (not localhost only)
433
+
434
+ ## 0.1.6
435
+ - Added better concurency by clusterization of listeners
436
+ - Added graceful shutdown
437
+ - Added concurency that allows to handle bigger applications with celluloid
438
+ - Karafka controllers no longer require group to be defined (created based on the topic and app name)
439
+ - Karafka controllers no longer require topic to be defined (created based on the controller name)
440
+ - Readme updates
441
+
442
+ ## 0.1.5
443
+ - Celluloid support for listeners
444
+ - Multi target logging (STDOUT and file)
445
+
446
+ ## 0.1.4
447
+ - Renamed events to messages to follow Apache Kafka naming convention
448
+
449
+ ## 0.1.3
450
+ - Karafka::App.logger moved to Karafka.logger
451
+ - README updates (Usage section was added)
452
+
453
+ ## 0.1.2
454
+ - Logging to log/environment.log
455
+ - Karafka::Runner
456
+
457
+ ## 0.1.1
458
+ - README updates
459
+ - Raketasks updates
460
+ - Rake installation task
461
+ - Changelog file added
462
+
463
+ ## 0.1.0
464
+ - Initial framework code