karafka 1.2.11

Sign up to get free protection for your applications and to get access to all the features.
Files changed (88) hide show
  1. checksums.yaml +7 -0
  2. data/.coditsu.yml +3 -0
  3. data/.console_irbrc +13 -0
  4. data/.gitignore +68 -0
  5. data/.rspec +1 -0
  6. data/.ruby-gemset +1 -0
  7. data/.ruby-version +1 -0
  8. data/.travis.yml +49 -0
  9. data/CHANGELOG.md +458 -0
  10. data/CODE_OF_CONDUCT.md +46 -0
  11. data/CONTRIBUTING.md +41 -0
  12. data/Gemfile +15 -0
  13. data/Gemfile.lock +126 -0
  14. data/MIT-LICENCE +18 -0
  15. data/README.md +102 -0
  16. data/bin/karafka +19 -0
  17. data/config/errors.yml +6 -0
  18. data/karafka.gemspec +42 -0
  19. data/lib/karafka.rb +79 -0
  20. data/lib/karafka/app.rb +45 -0
  21. data/lib/karafka/attributes_map.rb +69 -0
  22. data/lib/karafka/backends/inline.rb +16 -0
  23. data/lib/karafka/base_consumer.rb +68 -0
  24. data/lib/karafka/base_responder.rb +208 -0
  25. data/lib/karafka/callbacks.rb +30 -0
  26. data/lib/karafka/callbacks/config.rb +22 -0
  27. data/lib/karafka/callbacks/dsl.rb +16 -0
  28. data/lib/karafka/cli.rb +54 -0
  29. data/lib/karafka/cli/base.rb +78 -0
  30. data/lib/karafka/cli/console.rb +29 -0
  31. data/lib/karafka/cli/flow.rb +46 -0
  32. data/lib/karafka/cli/info.rb +29 -0
  33. data/lib/karafka/cli/install.rb +42 -0
  34. data/lib/karafka/cli/server.rb +66 -0
  35. data/lib/karafka/connection/api_adapter.rb +148 -0
  36. data/lib/karafka/connection/builder.rb +16 -0
  37. data/lib/karafka/connection/client.rb +107 -0
  38. data/lib/karafka/connection/delegator.rb +46 -0
  39. data/lib/karafka/connection/listener.rb +60 -0
  40. data/lib/karafka/consumers/callbacks.rb +54 -0
  41. data/lib/karafka/consumers/includer.rb +51 -0
  42. data/lib/karafka/consumers/responders.rb +24 -0
  43. data/lib/karafka/consumers/single_params.rb +15 -0
  44. data/lib/karafka/errors.rb +50 -0
  45. data/lib/karafka/fetcher.rb +44 -0
  46. data/lib/karafka/helpers/class_matcher.rb +78 -0
  47. data/lib/karafka/helpers/config_retriever.rb +46 -0
  48. data/lib/karafka/helpers/multi_delegator.rb +33 -0
  49. data/lib/karafka/instrumentation/listener.rb +112 -0
  50. data/lib/karafka/instrumentation/logger.rb +55 -0
  51. data/lib/karafka/instrumentation/monitor.rb +64 -0
  52. data/lib/karafka/loader.rb +28 -0
  53. data/lib/karafka/params/dsl.rb +158 -0
  54. data/lib/karafka/params/params_batch.rb +46 -0
  55. data/lib/karafka/parsers/json.rb +38 -0
  56. data/lib/karafka/patches/dry_configurable.rb +33 -0
  57. data/lib/karafka/patches/ruby_kafka.rb +34 -0
  58. data/lib/karafka/persistence/client.rb +25 -0
  59. data/lib/karafka/persistence/consumer.rb +38 -0
  60. data/lib/karafka/persistence/topic.rb +29 -0
  61. data/lib/karafka/process.rb +62 -0
  62. data/lib/karafka/responders/builder.rb +36 -0
  63. data/lib/karafka/responders/topic.rb +57 -0
  64. data/lib/karafka/routing/builder.rb +61 -0
  65. data/lib/karafka/routing/consumer_group.rb +61 -0
  66. data/lib/karafka/routing/consumer_mapper.rb +34 -0
  67. data/lib/karafka/routing/proxy.rb +37 -0
  68. data/lib/karafka/routing/router.rb +29 -0
  69. data/lib/karafka/routing/topic.rb +60 -0
  70. data/lib/karafka/routing/topic_mapper.rb +55 -0
  71. data/lib/karafka/schemas/config.rb +24 -0
  72. data/lib/karafka/schemas/consumer_group.rb +78 -0
  73. data/lib/karafka/schemas/consumer_group_topic.rb +18 -0
  74. data/lib/karafka/schemas/responder_usage.rb +39 -0
  75. data/lib/karafka/schemas/server_cli_options.rb +43 -0
  76. data/lib/karafka/server.rb +85 -0
  77. data/lib/karafka/setup/config.rb +193 -0
  78. data/lib/karafka/setup/configurators/base.rb +29 -0
  79. data/lib/karafka/setup/configurators/params.rb +25 -0
  80. data/lib/karafka/setup/configurators/water_drop.rb +32 -0
  81. data/lib/karafka/setup/dsl.rb +22 -0
  82. data/lib/karafka/status.rb +25 -0
  83. data/lib/karafka/templates/application_consumer.rb.example +6 -0
  84. data/lib/karafka/templates/application_responder.rb.example +11 -0
  85. data/lib/karafka/templates/karafka.rb.example +54 -0
  86. data/lib/karafka/version.rb +7 -0
  87. data/log/.gitkeep +0 -0
  88. metadata +303 -0
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 5c2e53b615dda9e231595e49d3ec0be42c18fa37c0143a1387e5e86b7e2250c6
4
+ data.tar.gz: 27221b33684a4418b111eea86e088367de20a6d2e27157e7358685c3b4fc88eb
5
+ SHA512:
6
+ metadata.gz: 2c3279ad72202031be303b64e8505c578fac252364f0bd78351251d4468e465ffad0427fc3218adc00d36cfb4dce5b5d6df33a8db60f1bf05f33f64c9fbeba75
7
+ data.tar.gz: 9009bf7b22eaac06faa714f1a349f05807a82f5fc2dbd75ddc73681027bd1ce6bf71e244b42a51349afa024c516ac3388bf2e81370cf12f5f12cccbbc9000418
@@ -0,0 +1,3 @@
1
+ api_key: <%= ENV['CODITSU_API_KEY'] %>
2
+ api_secret: <%= ENV['CODITSU_API_SECRET'] %>
3
+ repository_id: <%= ENV['CODITSU_REPOSITORY_ID'] %>
@@ -0,0 +1,13 @@
1
+ # irbrc for Karafka console
2
+ require 'karafka'
3
+ require Karafka.boot_file
4
+
5
+ IRB.conf[:AUTO_INDENT] = true
6
+ IRB.conf[:SAVE_HISTORY] = 1000
7
+ IRB.conf[:USE_READLINE] = true
8
+ IRB.conf[:HISTORY_FILE] = "#{Karafka::App.root}/.irb-history"
9
+ IRB.conf[:LOAD_MODULES] = [] unless IRB.conf.key?(:LOAD_MODULES)
10
+
11
+ unless IRB.conf[:LOAD_MODULES].include?('irb/completion')
12
+ IRB.conf[:LOAD_MODULES] << 'irb/completion'
13
+ end
@@ -0,0 +1,68 @@
1
+ # bundler state
2
+ /.bundle
3
+ /vendor/bundle/
4
+ /vendor/ruby/
5
+ /ruby/
6
+ app.god
7
+
8
+ # minimal Rails specific artifacts
9
+ db/*.sqlite3
10
+ /log/development.log
11
+ /log/production.log
12
+ /log/test.log
13
+ /tmp/*
14
+ *.gem
15
+ *.~
16
+
17
+ # various artifacts
18
+ **.war
19
+ *.rbc
20
+ *.sassc
21
+ .byebug_history
22
+ .redcar/
23
+ .capistrano/
24
+ .sass-cache
25
+ /config/god/sidekiq.rb
26
+ /config/puma.rb
27
+ /coverage.data
28
+ /coverage/
29
+ /doc/api/
30
+ /doc/app/
31
+ /doc/yard
32
+ /doc/features.html
33
+ /doc/specs.html
34
+ /spec/tmp/*
35
+ /cache
36
+ /capybara*
37
+ /capybara-*.html
38
+ /gems
39
+ /specifications
40
+ rerun.txt
41
+ pickle-email-*.html
42
+
43
+ # If you find yourself ignoring temporary files generated by your text editor
44
+ # or operating system, you probably want to add a global ignore instead:
45
+ # git config --global core.excludesfile ~/.gitignore_global
46
+ #
47
+ # Here are some files you may want to ignore globally:
48
+
49
+ # scm revert files
50
+ **.orig
51
+
52
+ # Mac finder artifacts
53
+ .DS_Store
54
+
55
+ # Netbeans project directory
56
+ /nbproject
57
+
58
+ # RubyMine project files
59
+ .idea
60
+
61
+ # Textmate project files
62
+ /*.tmproj
63
+
64
+ # vim artifacts
65
+ **.swp
66
+
67
+ # documentation
68
+ .yardoc
data/.rspec ADDED
@@ -0,0 +1 @@
1
+ --require spec_helper
@@ -0,0 +1 @@
1
+ karafka
@@ -0,0 +1 @@
1
+ 2.6.1
@@ -0,0 +1,49 @@
1
+ services:
2
+ - docker
3
+
4
+ dist: trusty
5
+ sudo: false
6
+ cache: bundler
7
+
8
+ git:
9
+ depth: false
10
+
11
+ test: &test
12
+ stage: Test
13
+ language: ruby
14
+ before_install:
15
+ - gem install bundler
16
+ - gem update --system
17
+ script: bundle exec rspec
18
+
19
+ jobs:
20
+ include:
21
+ - <<: *test
22
+ rvm: 2.6.1
23
+ - <<: *test
24
+ rvm: 2.5.3
25
+ - <<: *test
26
+ rvm: 2.4.5
27
+ - <<: *test
28
+ rvm: 2.3.8
29
+
30
+ - stage: coditsu
31
+ language: ruby
32
+ rvm: 2.6.1
33
+ before_install:
34
+ - gem update --system
35
+ - gem install bundler
36
+ before_script:
37
+ - docker create -v /sources --name sources alpine:3.4 /bin/true
38
+ - docker cp ./ sources:/sources
39
+ script: >
40
+ docker run
41
+ -e CODITSU_API_KEY
42
+ -e CODITSU_API_SECRET
43
+ -e CODITSU_REPOSITORY_ID
44
+ --volumes-from sources
45
+ coditsu/build-runner:latest
46
+
47
+ stages:
48
+ - coditsu
49
+ - test
@@ -0,0 +1,458 @@
1
+ # Karafka framework changelog
2
+
3
+ ## 1.2.11
4
+ - [#470](https://github.com/karafka/karafka/issues/470) Karafka not working with dry-configurable 0.8
5
+
6
+ ## 1.2.10
7
+ - [#453](https://github.com/karafka/karafka/pull/453) require `Forwardable` module
8
+
9
+ ## 1.2.9
10
+ - Critical exceptions now will cause consumer to stop instead of retrying without a break
11
+ - #412 - Fix dry-inflector dependency lock in gemspec
12
+ - #414 - Backport to 1.2 the delayed retry upon failure
13
+ - #437 - Raw message is no longer added to params after ParserError raised
14
+
15
+ ## 1.2.8
16
+ - #408 - Responder Topic Lookup Bug on Heroku
17
+
18
+ ## 1.2.7
19
+ - Unlock Ruby-kafka version with a warning
20
+
21
+ ## 1.2.6
22
+ - Lock WaterDrop to 1.2.3
23
+ - Lock Ruby-Kafka to 0.6.x (support for 0.7 will be added in Karafka 1.3)
24
+
25
+ ## 1.2.5
26
+ - #354 - Expose consumer heartbeat
27
+ - #373 - Async producer not working properly with responders
28
+
29
+ ## 1.2.4
30
+ - #332 - Fetcher for max queue size
31
+
32
+ ## 1.2.3
33
+ - #313 - support PLAINTEXT and SSL for scheme
34
+ - #320 - Pausing indefinetely with nil pause timeout doesn't work
35
+ - #318 - Partition pausing doesn't work with custom topic mappers
36
+ - Rename ConfigAdapter to ApiAdapter to better reflect what it does
37
+ - #317 - Manual offset committing doesn't work with custom topic mappers
38
+
39
+ ## 1.2.2
40
+ - #312 - Broken for ActiveSupport 5.2.0
41
+
42
+ ## 1.2.1
43
+ - #304 - Unification of error instrumentation event details
44
+ - #306 - Using file logger from within a trap context upon shutdown is impossible
45
+
46
+ ## 1.2.0
47
+ - Spec improvements
48
+ - #260 - Specs missing randomization
49
+ - #251 - Shutdown upon non responding (unreachable) cluster is not possible
50
+ - #258 - Investigate lowering requirements on activesupport
51
+ - #246 - Alias consumer#mark_as_consumed on controller
52
+ - #259 - Allow forcing key/partition key on responders
53
+ - #267 - Styling inconsistency
54
+ - #242 - Support setting the max bytes to fetch per request
55
+ - #247 - Support SCRAM once released
56
+ - #271 - Provide an after_init option to pass a configuration block
57
+ - #262 - Error in the monitor code for NewRelic
58
+ - #241 - Performance metrics
59
+ - #274 - Rename controllers to consumers
60
+ - #184 - Seek to
61
+ - #284 - Dynamic Params parent class
62
+ - #275 - ssl_ca_certs_from_system
63
+ - #296 - Instrument forceful exit with an error
64
+ - Replaced some of the activesupport parts with dry-inflector
65
+ - Lower ActiveSupport dependency
66
+ - Remove configurators in favor of the after_init block configurator
67
+ - Ruby 2.5.0 support
68
+ - Renamed Karafka::Connection::Processor to Karafka::Connection::Delegator to match incoming naming conventions
69
+ - Renamed Karafka::Connection::Consumer to Karafka::Connection::Client due to #274
70
+ - Removed HashWithIndifferentAccess in favor of a regular hash
71
+ - JSON parsing defaults now to string keys
72
+ - Lower memory usage due to less params data internal details
73
+ - Support multiple ```after_init``` blocks in favor of a single one
74
+ - Renamed ```received_at``` to ```receive_time``` to follow ruby-kafka and WaterDrop conventions
75
+ - Adjust internal setup to easier map Ruby-Kafka config changes
76
+ - System callbacks reorganization
77
+ - Added ```before_fetch_loop``` configuration block for early client usage (```#seek```, etc)
78
+ - Renamed ```after_fetched``` to ```after_fetch``` to normalize the naming convention
79
+ - Instrumentation on a connection delegator level
80
+ - Added ```params_batch#last``` method to retrieve last element after unparsing
81
+ - All params keys are now strings
82
+
83
+ ## 1.1.2
84
+ - #256 - Default kafka.seed_brokers configuration is created in invalid format
85
+
86
+ ## 1.1.1
87
+ - #253 - Allow providing a global per app parser in config settings
88
+
89
+ ## 1.1.0
90
+ - Gem bump
91
+ - Switch from Celluloid to native Thread management
92
+ - Improved shutdown process
93
+ - Introduced optional fetch callbacks and moved current the ```after_received``` there as well
94
+ - Karafka will raise Errors::InvalidPauseTimeout exception when trying to pause but timeout set to 0
95
+ - Allow float for timeouts and other time based second settings
96
+ - Renamed MessagesProcessor to Processor and MessagesConsumer to Consumer - we don't process and don't consumer anything else so it was pointless to keep this "namespace"
97
+ - #232 - Remove unused ActiveSupport require
98
+ - #214 - Expose consumer on a controller layer
99
+ - #193 - Process shutdown callbacks
100
+ - Fixed accessibility of ```#params_batch``` from the outside of the controller
101
+ - connection_pool config options are no longer required
102
+ - celluloid config options are no longer required
103
+ - ```#perform``` is now renamed to ```#consume``` with warning level on using the old one (deprecated)
104
+ - #235 - Rename perform to consume
105
+ - Upgrade to ruby-kafka 0.5
106
+ - Due to redesign of Waterdrop concurrency setting is no longer needed
107
+ - #236 - Manual offset management
108
+ - WaterDrop 1.0.0 support with async
109
+ - Renamed ```batch_consuming``` option to ```batch_fetching``` as it is not a consumption (with processing) but a process of fetching messages from Kafka. The messages is considered consumed, when it is processed.
110
+ - Renamed ```batch_processing``` to ```batch_consuming``` to resemble Kafka concept of consuming messages.
111
+ - Renamed ```after_received``` to ```after_fetched``` to normalize the naming conventions.
112
+ - Responders support the per topic ```async``` option.
113
+
114
+ ## 1.0.1
115
+ - #210 - LoadError: cannot load such file -- [...]/karafka.rb
116
+ - Ruby 2.4.2 as a default (+travis integration)
117
+ - JRuby upgrade
118
+ - Expanded persistence layer (moved to a namespace for easier future development)
119
+ - #213 - Misleading error when non-existing dependency is required
120
+ - #212 - Make params react to #topic, #partition, #offset
121
+ - #215 - Consumer group route dynamic options are ignored
122
+ - #217 - check RUBY_ENGINE constant if RUBY_VERSION is missing (#217)
123
+ - #218 - add configuration setting to control Celluloid's shutdown timeout
124
+ - Renamed Karafka::Routing::Mapper to Karafka::Routing::TopicMapper to match naming conventions
125
+ - #219 - Allow explicit consumer group names, without prefixes
126
+ - Fix to early removed pid upon shutdown of demonized process
127
+ - max_wait_time updated to match https://github.com/zendesk/ruby-kafka/issues/433
128
+ - #230 - Better uri validation for seed brokers (incompatibility as the kafka:// or kafka+ssl:// is required)
129
+ - Small internal docs fixes
130
+ - Dry::Validation::MissingMessageError: message for broker_schema? was not found
131
+ - #238 - warning: already initialized constant Karafka::Schemas::URI_SCHEMES
132
+
133
+ ## 1.0.0
134
+
135
+ ### Closed issues:
136
+
137
+ - #103 - Env for logger is loaded 2 early (on gem load not on app init)
138
+ - #142 - Possibility to better control Kafka consumers (consumer groups management)
139
+ - #150 - Add support for start_from_beginning on a per topic basis
140
+ - #154 - Support for min_bytes and max_wait_time on messages consuming
141
+ - #160 - Reorganize settings to better resemble ruby-kafka requirements
142
+ - #164 - If we decide to have configuration per topic, topic uniqueness should be removed
143
+ - #165 - Router validator
144
+ - #166 - Params and route reorganization (new API)
145
+ - #167 - Remove Sidekiq UI from Karafka
146
+ - #168 - Introduce unique IDs of routes
147
+ - #171 - Add kafka message metadata to params
148
+ - #176 - Transform Karafka::Connection::Consumer into a module
149
+ - #177 - Monitor not reacting when kafka killed with -9
150
+ - #175 - Allow single consumer to subscribe to multiple topics
151
+ - #178 - Remove parsing failover when cannot unparse data
152
+ - #174 - Extended config validation
153
+ - ~~#180 - Switch from JSON parser to yajl-ruby~~
154
+ - #181 - When responder is defined and not used due to ```respond_with``` not being triggered in the perform, it won't raise an exception.
155
+ - #188 - Rename name in config to client id
156
+ - #186 - Support ruby-kafka ```ssl_ca_cert_file_path``` config
157
+ - #189 - karafka console does not preserve history on exit
158
+ - #191 - Karafka 0.6.0rc1 does not work with jruby / now it does :-)
159
+ - Switch to multi json so everyone can use their favourite JSON parser
160
+ - Added jruby support in general and in Travis
161
+ - #196 - Topic mapper does not map topics when subscribing thanks to @webandtech
162
+ - #96 - Karafka server - possiblity to run it only for a certain topics
163
+ - ~~karafka worker cli option is removed (please use sidekiq directly)~~ - restored, bad idea
164
+ - (optional) pausing upon processing failures ```pause_timeout```
165
+ - Karafka console main process no longer intercepts irb errors
166
+ - Wiki updates
167
+ - #204 - Long running controllers
168
+ - Better internal API to handle multiple usage cases using ```Karafka::Controllers::Includer```
169
+ - #207 - Rename before_enqueued to after_received
170
+ - #147 - Deattach Karafka from Sidekiq by extracting Sidekiq backend
171
+
172
+ ### New features and improvements
173
+
174
+ - batch processing thanks to ```#batch_consuming``` flag and ```#params_batch``` on controllers
175
+ - ```#topic``` method on an controller instance to make a clear distinction in between params and route details
176
+ - Changed routing model (still compatible with 0.5) to allow better resources management
177
+ - Lower memory requirements due to object creation limitation (2-3 times less objects on each new message)
178
+ - Introduced the ```#batch_consuming``` config flag (config for #126) that can be set per each consumer_group
179
+ - Added support for partition, offset and partition key in the params hash
180
+ - ```name``` option in config renamed to ```client_id```
181
+ - Long running controllers with ```persistent``` flag on a topic config level, to make controller instances persistent between messages batches (single controller instance per topic per partition no per messages batch) - turned on by default
182
+
183
+ ### Incompatibilities
184
+
185
+ - Default boot file is renamed from app.rb to karafka.rb
186
+ - Removed worker glass as dependency (now and independent gem)
187
+ - ```kafka.hosts``` option renamed to ```kafka.seed_brokers``` - you don't need to provide all the hosts to work with Kafka
188
+ - ```start_from_beginning``` moved into kafka scope (```kafka.start_from_beginning```)
189
+ - Router no longer checks for route uniqueness - now you can define same routes for multiple kafkas and do a lot of crazy stuff, so it's your responsibility to check uniqueness
190
+ - Change in the way we identify topics in between Karafka and Sidekiq workers. If you upgrade, please make sure, all the jobs scheduled in Sidekiq are finished before the upgrade.
191
+ - ```batch_mode``` renamed to ```batch_fetching```
192
+ - Renamed content to value to better resemble ruby-kafka internal messages naming convention
193
+ - When having a responder with ```required``` topics and not using ```#respond_with``` at all, it will raise an exception
194
+ - Renamed ```inline_mode``` to ```inline_processing``` to resemble other settings conventions
195
+ - Renamed ```inline_processing``` to ```backend``` to reach 1.0 future compatibility
196
+ - Single controller **needs** to be used for a single topic consumption
197
+ - Renamed ```before_enqueue``` to ```after_received``` to better resemble internal logic, since for inline backend, there is no enqueue.
198
+ - Due to the level on which topic and controller are related (class level), the dynamic worker selection is no longer available.
199
+ - Renamed params #retrieve to params #retrieve! to better reflect what it does
200
+
201
+ ### Other changes
202
+ - PolishGeeksDevTools removed (in favour of Coditsu)
203
+ - Waaaaaay better code quality thanks to switching from dev tools to Coditsu
204
+ - Gem bump
205
+ - Cleaner internal API
206
+ - SRP
207
+ - Better settings proxying and management between ruby-kafka and karafka
208
+ - All internal validations are now powered by dry-validation
209
+ - Better naming conventions to reflect Kafka reality
210
+ - Removed Karafka::Connection::Message in favour of direct message details extraction from Kafka::FetchedMessage
211
+
212
+ ## 0.5.0.3
213
+ - #132 - When Kafka is gone, should reconnect after a time period
214
+ - #136 - new ruby-kafka version + other gem bumps
215
+ - ruby-kafka update
216
+ - #135 - NonMatchingRouteError - better error description in the code
217
+ - #140 - Move Capistrano Karafka to a different specific gem
218
+ - #110 - Add call method on a responder class to alias instance build and call
219
+ - #76 - Configs validator
220
+ - #138 - Possibility to have no worker class defined if inline_mode is being used
221
+ - #145 - Topic Mapper
222
+ - Ruby update to 2.4.1
223
+ - Gem bump x2
224
+ - #158 - Update docs section on heroku usage
225
+ - #150 - Add support for start_from_beginning on a per topic basis
226
+ - #148 - Lower Karafka Sidekiq dependency
227
+ - Allow karafka root to be specified from ENV
228
+ - Handle SIGTERM as a shutdown command for kafka server to support Heroku deployment
229
+
230
+ ## 0.5.0.2
231
+ - Gems update x3
232
+ - Default Ruby set to 2.3.3
233
+ - ~~Default Ruby set to 2.4.0~~
234
+ - Readme updates to match bug fixes and resolved issues
235
+ - #95 - Allow options into responder
236
+ - #98 - Use parser when responding on a topic
237
+ - #114 - Option to configure waterdrop connection pool timeout and concurrency
238
+ - #118 - Added dot in topic validation format
239
+ - #119 - add support for authentication using SSL
240
+ - #121 - JSON as a default for standalone responders usage
241
+ - #122 - Allow on capistrano role customization
242
+ - #125 - Add support to batch incoming messages
243
+ - #130 - start_from_beginning flag on routes and default
244
+ - #128 - Monitor caller_label not working with super on inheritance
245
+ - Renamed *inline* to *inline_mode* to stay consistent with flags that change the way karafka works (#125)
246
+ - Dry-configurable bump to 0.5 with fixed proc value evaluation on retrieve patch (internal change)
247
+
248
+ ## 0.5.0.1
249
+ - Fixed inconsistency in responders non-required topic definition. Now only required: false available
250
+ - #101 - Responders fail when multiple_usage true and required false
251
+ - fix error on startup from waterdrop #102
252
+ - Waterdrop 0.3.2.1 with kafka.hosts instead of kafka_hosts
253
+ - #105 - Karafka::Monitor#caller_label not working with inherited monitors
254
+ - #99 - Standalone mode (without Sidekiq)
255
+ - #97 - Buffer responders single topics before send (prevalidation)
256
+ - Better control over consumer thanks to additional config options
257
+ - #111 - Dynamic worker assignment based on the income params
258
+ - Long shutdown time fix
259
+
260
+ ## 0.5.0
261
+ - Removed Zookeeper totally as dependency
262
+ - Better group and partition rebalancing
263
+ - Automatic thread management (no need for tunning) - each topic is a separate actor/thread
264
+ - Moved from Poseidon into Ruby-Kafka
265
+ - No more max_concurrency setting
266
+ - After you define your App class and routes (and everything else) you need to add execute App.boot!
267
+ - Manual consuming is no longer available (no more karafka consume command)
268
+ - Karafka topics CLI is no longer available. No Zookeeper - no global topic discovery
269
+ - Dropped ZK as dependency
270
+ - karafka info command no longer prints details about Zookeeper
271
+ - Better shutdown
272
+ - No more autodiscovery via Zookeeper - instead, the whole cluster will be discovered directly from Kafka
273
+ - No more support for Kafka 0.8
274
+ - Support for Kafka 0.9
275
+ - No more need for ActorCluster, since now we have a single thread (and Kafka connection) per topic
276
+ - Ruby 2.2.* support dropped
277
+ - Using App name as a Kafka client_id
278
+ - Automatic Capistrano integration
279
+ - Responders support for handling better responses pipelining and better responses flow description and design (see README for more details)
280
+ - Gem bump
281
+ - Readme updates
282
+ - karafka flow CLI command for printing the application flow
283
+ - Some internal refactorings
284
+
285
+ ## 0.4.2
286
+ - #87 - Reconsume mode with crone for better Rails/Rack integration
287
+ - Moved Karafka server related stuff into separate Karafka::Server class
288
+ - Renamed Karafka::Runner into Karafka::Fetcher
289
+ - Gem bump
290
+ - Added chroot option to Zookeeper options
291
+ - Moved BROKERS_PATH into config from constant
292
+ - Added Karafka consume CLI action for a short running single consumption round
293
+ - Small fixes to close broken connections
294
+ - Readme updates
295
+
296
+ ## 0.4.1
297
+ - Explicit throw(:abort) required to halt before_enqueue (like in Rails 5)
298
+ - #61 - Autodiscover Kafka brokers based on Zookeeper data
299
+ - #63 - Graceful shutdown with current offset state during data processing
300
+ - #65 - Example of NewRelic monitor is outdated
301
+ - #71 - Setup should be executed after user code is loaded
302
+ - Gem bump x3
303
+ - Rubocop remarks
304
+ - worker_timeout config option has been removed. It now needs to be defined manually by the framework user because WorkerGlass::Timeout can be disabled and we cannot use Karafka settings on a class level to initialize user code stuff
305
+ - Moved setup logic under setup/Setup namespace
306
+ - Better defaults handling
307
+ - #75 - Kafka and Zookeeper options as a hash
308
+ - #82 - Karafka autodiscovery fails upon caching of configs
309
+ - #81 - Switch config management to dry configurable
310
+ - Version fix
311
+ - Dropped support for Ruby 2.1.*
312
+ - Ruby bump to 2.3.1
313
+
314
+ ## 0.4.0
315
+ - Added WaterDrop gem with default configuration
316
+ - Refactoring of config logic to simplify adding new dependencies that need to be configured based on #setup data
317
+ - Gem bump
318
+ - Readme updates
319
+ - Renamed cluster to actor_cluster for method names
320
+ - Replaced SidekiqGlass with generic WorkerGlass lib
321
+ - Application bootstrap in app.rb no longer required
322
+ - Karafka.boot needs to be executed after all the application files are loaded (template updated)
323
+ - Small loader refactor (no API changes)
324
+ - Ruby 2.3.0 support (default)
325
+ - No more rake tasks
326
+ - Karafka CLI instead of rake tasks
327
+ - Worker cli command allows passing additional options directly to Sidekiq
328
+ - Renamed concurrency to max_concurrency - it describes better what happens - Karafka will use this number of threads only when required
329
+ - Added wait_timeout that allows us to tune how long should we wait on a single socket connection (single topic) for new messages before going to next one (this applies to each thread separately)
330
+ - Rubocop remarks
331
+ - Removed Sinatra and Puma dependencies
332
+ - Karafka Cli internal reorganization
333
+ - Karafka Cli routes task
334
+ - #37 - warn log for failed parsing of a message
335
+ - #43 - wrong constant name
336
+ - #44 - Method name conflict
337
+ - #48 - Cannot load such file -- celluloid/current
338
+ - #46 - Loading application
339
+ - #45 - Set up monitor in config
340
+ - #47 - rake karafka:run uses app.rb only
341
+ - #53 - README update with Sinatra/Rails integration description
342
+ - #41 - New Routing engine
343
+ - #54 - Move Karafka::Workers::BaseWorker to Karafka::BaseWorker
344
+ - #55 - ApplicationController and ApplicationWorker
345
+
346
+ ## 0.3.2
347
+ - Karafka::Params::Params lazy load merge keys with string/symbol names priorities fix
348
+
349
+ ## 0.3.1
350
+ - Renamed Karafka::Monitor to Karafka::Process to represent a Karafka process wrapper
351
+ - Added Karafka::Monitoring that allows to add custom logging and monitoring with external libraries and systems
352
+ - Moved logging functionality into Karafka::Monitoring default monitoring
353
+ - Added possibility to provide own monitoring as long as in responds to #notice and #notice_error
354
+ - Standarized logging format for all logs
355
+
356
+ ## 0.3.0
357
+ - Switched from custom ParserError for each parser to general catching of Karafka::Errors::ParseError and its descendants
358
+ - Gem bump
359
+ - Fixed #32 - now when using custom workers that does not inherit from Karafka::BaseWorker perform method is not required. Using custom workers means that the logic that would normally lie under #perform, needs to be executed directly from the worker.
360
+ - Fixed #31 - Technically didn't fix because this is how Sidekiq is meant to work, but provided possibility to assign custom interchangers that allow to bypass JSON encoding issues by converting data that goes to Redis to a required format (and parsing it back when it is fetched)
361
+ - Added full parameters lazy load - content is no longer loaded during #perform_async if params are not used in before_enqueue
362
+ - No more namespaces for Redis by default (use separate DBs)
363
+
364
+ ## 0.1.21
365
+ - Sidekiq 4.0.1 bump
366
+ - Gem bump
367
+ - Added direct celluloid requirement to Karafka (removed from Sidekiq)
368
+
369
+ ## 0.1.19
370
+ - Internal call - schedule naming change
371
+ - Enqueue to perform_async naming in controller to follow Sidekiqs naming convention
372
+ - Gem bump
373
+
374
+ ## 0.1.18
375
+ - Changed Redis configuration options into a single hash that is directly passed to Redis setup for Sidekiq
376
+ - Added config.ru to provide a Sidekiq web UI (see README for more details)
377
+
378
+ ## 0.1.17
379
+ - Changed Karafka::Connection::Cluster tp Karafka::Connection::ActorCluster to distinguish between a single thread actor cluster for multiple topic connection and a future feature that will allow process clusterization.
380
+ - Add an ability to use user-defined parsers for a messages
381
+ - Lazy load params for before callbacks
382
+ - Automatic loading/initializng all workers classes during startup (so Sidekiq won't fail with unknown workers exception)
383
+ - Params are now private to controller
384
+ - Added bootstrap method to app.rb
385
+
386
+ ## 0.1.16
387
+ - Cluster level error catching for all exceptions so actor is not killer
388
+ - Cluster level error logging
389
+ - Listener refactoring (QueueConsumer extracted)
390
+ - Karafka::Connection::QueueConsumer to wrap around fetching logic - technically we could replace Kafka with any other messaging engine as long as we preserve the same API
391
+ - Added debug env for debugging purpose in applications
392
+
393
+ ## 0.1.15
394
+ - Fixed max_wait_ms vs socket_timeout_ms issue
395
+ - Fixed closing queue connection after Poseidon::Errors::ProtocolError failure
396
+ - Fixed wrong logging file selection based on env
397
+ - Extracted Karafka::Connection::QueueConsumer object to wrap around queue connection
398
+
399
+ ## 0.1.14
400
+ - Rake tasks for listing all the topics on Kafka server (rake kafka:topics)
401
+
402
+ ## 0.1.13
403
+ - Ability to assign custom workers and use them bypassing Karafka::BaseWorker (or its descendants)
404
+ - Gem bump
405
+
406
+ ## 0.1.12
407
+ - All internal errors went to Karafka::Errors namespace
408
+
409
+ ## 0.1.11
410
+ - Rescuing all the "before Sidekiq" processing so errors won't affect other incoming messages
411
+ - Fixed dying actors after connection error
412
+ - Added a new app status - "initializing"
413
+ - Karafka::Status model cleanup
414
+
415
+ ## 0.1.10
416
+ - Added possibility to specify redis namespace in configuration (failover to app name)
417
+ - Renamed redis_host to redis_url in configuration
418
+
419
+ ## 0.1.9
420
+ - Added worker logger
421
+
422
+ ## 0.1.8
423
+ - Droped local env suppot in favour of [Envlogic](https://github.com/karafka/envlogic) - no changes in API
424
+
425
+ ## 0.1.7
426
+ - Karafka option for Redis hosts (not localhost only)
427
+
428
+ ## 0.1.6
429
+ - Added better concurency by clusterization of listeners
430
+ - Added graceful shutdown
431
+ - Added concurency that allows to handle bigger applications with celluloid
432
+ - Karafka controllers no longer require group to be defined (created based on the topic and app name)
433
+ - Karafka controllers no longer require topic to be defined (created based on the controller name)
434
+ - Readme updates
435
+
436
+ ## 0.1.5
437
+ - Celluloid support for listeners
438
+ - Multi target logging (STDOUT and file)
439
+
440
+ ## 0.1.4
441
+ - Renamed events to messages to follow Apache Kafka naming convention
442
+
443
+ ## 0.1.3
444
+ - Karafka::App.logger moved to Karafka.logger
445
+ - README updates (Usage section was added)
446
+
447
+ ## 0.1.2
448
+ - Logging to log/environment.log
449
+ - Karafka::Runner
450
+
451
+ ## 0.1.1
452
+ - README updates
453
+ - Raketasks updates
454
+ - Rake installation task
455
+ - Changelog file added
456
+
457
+ ## 0.1.0
458
+ - Initial framework code