karafka 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (99) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +2 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +11 -0
  6. data/.github/FUNDING.yml +3 -0
  7. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  8. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  9. data/.gitignore +69 -0
  10. data/.rspec +1 -0
  11. data/.ruby-gemset +1 -0
  12. data/.ruby-version +1 -0
  13. data/.travis.yml +36 -0
  14. data/CHANGELOG.md +520 -0
  15. data/CODE_OF_CONDUCT.md +46 -0
  16. data/CONTRIBUTING.md +41 -0
  17. data/Gemfile +12 -0
  18. data/Gemfile.lock +137 -0
  19. data/MIT-LICENCE +18 -0
  20. data/README.md +101 -0
  21. data/bin/karafka +19 -0
  22. data/certs/mensfeld.pem +25 -0
  23. data/config/errors.yml +39 -0
  24. data/karafka.gemspec +44 -0
  25. data/lib/karafka.rb +71 -0
  26. data/lib/karafka/app.rb +53 -0
  27. data/lib/karafka/attributes_map.rb +68 -0
  28. data/lib/karafka/backends/inline.rb +16 -0
  29. data/lib/karafka/base_consumer.rb +57 -0
  30. data/lib/karafka/base_responder.rb +226 -0
  31. data/lib/karafka/cli.rb +54 -0
  32. data/lib/karafka/cli/base.rb +78 -0
  33. data/lib/karafka/cli/console.rb +31 -0
  34. data/lib/karafka/cli/flow.rb +45 -0
  35. data/lib/karafka/cli/info.rb +31 -0
  36. data/lib/karafka/cli/install.rb +64 -0
  37. data/lib/karafka/cli/server.rb +71 -0
  38. data/lib/karafka/code_reloader.rb +67 -0
  39. data/lib/karafka/connection/api_adapter.rb +155 -0
  40. data/lib/karafka/connection/batch_delegator.rb +51 -0
  41. data/lib/karafka/connection/builder.rb +16 -0
  42. data/lib/karafka/connection/client.rb +117 -0
  43. data/lib/karafka/connection/listener.rb +71 -0
  44. data/lib/karafka/connection/message_delegator.rb +36 -0
  45. data/lib/karafka/consumers/callbacks.rb +71 -0
  46. data/lib/karafka/consumers/includer.rb +63 -0
  47. data/lib/karafka/consumers/metadata.rb +10 -0
  48. data/lib/karafka/consumers/responders.rb +24 -0
  49. data/lib/karafka/consumers/single_params.rb +15 -0
  50. data/lib/karafka/contracts.rb +10 -0
  51. data/lib/karafka/contracts/config.rb +21 -0
  52. data/lib/karafka/contracts/consumer_group.rb +206 -0
  53. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  54. data/lib/karafka/contracts/responder_usage.rb +54 -0
  55. data/lib/karafka/contracts/server_cli_options.rb +29 -0
  56. data/lib/karafka/errors.rb +51 -0
  57. data/lib/karafka/fetcher.rb +42 -0
  58. data/lib/karafka/helpers/class_matcher.rb +88 -0
  59. data/lib/karafka/helpers/config_retriever.rb +46 -0
  60. data/lib/karafka/helpers/inflector.rb +26 -0
  61. data/lib/karafka/helpers/multi_delegator.rb +32 -0
  62. data/lib/karafka/instrumentation/logger.rb +57 -0
  63. data/lib/karafka/instrumentation/monitor.rb +70 -0
  64. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  65. data/lib/karafka/instrumentation/stdout_listener.rb +138 -0
  66. data/lib/karafka/params/builders/metadata.rb +33 -0
  67. data/lib/karafka/params/builders/params.rb +36 -0
  68. data/lib/karafka/params/builders/params_batch.rb +25 -0
  69. data/lib/karafka/params/metadata.rb +35 -0
  70. data/lib/karafka/params/params.rb +68 -0
  71. data/lib/karafka/params/params_batch.rb +61 -0
  72. data/lib/karafka/patches/ruby_kafka.rb +47 -0
  73. data/lib/karafka/persistence/client.rb +29 -0
  74. data/lib/karafka/persistence/consumers.rb +45 -0
  75. data/lib/karafka/persistence/topics.rb +48 -0
  76. data/lib/karafka/process.rb +60 -0
  77. data/lib/karafka/responders/builder.rb +36 -0
  78. data/lib/karafka/responders/topic.rb +55 -0
  79. data/lib/karafka/routing/builder.rb +89 -0
  80. data/lib/karafka/routing/consumer_group.rb +61 -0
  81. data/lib/karafka/routing/consumer_mapper.rb +34 -0
  82. data/lib/karafka/routing/proxy.rb +46 -0
  83. data/lib/karafka/routing/router.rb +29 -0
  84. data/lib/karafka/routing/topic.rb +62 -0
  85. data/lib/karafka/routing/topic_mapper.rb +53 -0
  86. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  87. data/lib/karafka/serialization/json/serializer.rb +31 -0
  88. data/lib/karafka/server.rb +83 -0
  89. data/lib/karafka/setup/config.rb +221 -0
  90. data/lib/karafka/setup/configurators/water_drop.rb +36 -0
  91. data/lib/karafka/setup/dsl.rb +21 -0
  92. data/lib/karafka/status.rb +29 -0
  93. data/lib/karafka/templates/application_consumer.rb.erb +7 -0
  94. data/lib/karafka/templates/application_responder.rb.erb +11 -0
  95. data/lib/karafka/templates/karafka.rb.erb +92 -0
  96. data/lib/karafka/version.rb +7 -0
  97. data/log/.gitkeep +0 -0
  98. metadata +336 -0
  99. metadata.gz.sig +0 -0
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: ffe10d1ca48b0b218191231ec6969f3a767f5a1a4010dca0ef00fa44c2eaab67
4
+ data.tar.gz: 27d1b52ba3782b562176b7a6de73563bdd48c250bc0576a1821c55bfcec07bff
5
+ SHA512:
6
+ metadata.gz: 85bdedfe0791c7d17abbc54ed49df83e2c9cbfb048124107d403559e72b2884909bcc5974c98a738744f80184315b2ee31cc8bd1048cc5a10230e6b6f0184e3d
7
+ data.tar.gz: 382074bee041ed27776571816a982fbd7f0af6b6f9cfa8fa393113acf1c2d4cbfb0a7573d2bcd16f2662667d1b8175546e281deb5a7b66ce30b5d9c4c19d005d
@@ -0,0 +1,2 @@
1
+ ���k�W%g
2
+ �9�RHj��ia/�ۋ#���O��#�K��rS�����d��!q�:�v��}��.��Th�-S�j�T$�.�3 �C+�
Binary file
@@ -0,0 +1,3 @@
1
+ repository_id: 'd4482d42-f6b5-44ba-a5e4-00989ac519ee'
2
+ api_key: <%= ENV['CODITSU_API_KEY'] %>
3
+ api_secret: <%= ENV['CODITSU_API_SECRET'] %>
@@ -0,0 +1,11 @@
1
+ # irbrc for Karafka console
2
+
3
+ IRB.conf[:AUTO_INDENT] = true
4
+ IRB.conf[:SAVE_HISTORY] = 1000
5
+ IRB.conf[:USE_READLINE] = true
6
+ IRB.conf[:HISTORY_FILE] = ".irb-history"
7
+ IRB.conf[:LOAD_MODULES] = [] unless IRB.conf.key?(:LOAD_MODULES)
8
+
9
+ unless IRB.conf[:LOAD_MODULES].include?('irb/completion')
10
+ IRB.conf[:LOAD_MODULES] << 'irb/completion'
11
+ end
@@ -0,0 +1,3 @@
1
+ # These are supported funding model platforms
2
+
3
+ open_collective: karafka
@@ -0,0 +1,50 @@
1
+ ---
2
+ name: Bug Report
3
+ about: Report an issue with Karafka you've discovered.
4
+ ---
5
+
6
+ *Be clear, concise and precise in your description of the problem.
7
+ Open an issue with a descriptive title and a summary in grammatically correct,
8
+ complete sentences.*
9
+
10
+ *Use the template below when reporting bugs. Please, make sure that
11
+ you're running the latest stable Karafka and that the problem you're reporting
12
+ hasn't been reported (and potentially fixed) already.*
13
+
14
+ *Before filing the ticket you should replace all text above the horizontal
15
+ rule with your own words.*
16
+
17
+ --------
18
+
19
+ ## Expected behavior
20
+
21
+ Describe here how you expected Karafka to behave in this particular situation.
22
+
23
+ ## Actual behavior
24
+
25
+ Describe here what actually happened.
26
+
27
+ ## Steps to reproduce the problem
28
+
29
+ This is extremely important! Providing us with a reliable way to reproduce
30
+ a problem will expedite its solution.
31
+
32
+ ## Your setup details
33
+
34
+ Please provide kafka version and the output of `karafka info` or `bundle exec karafka info` if using Bundler.
35
+
36
+ Here's an example:
37
+
38
+ ```
39
+ $ [bundle exec] karafka info
40
+ Karafka version: 1.3.0
41
+ Ruby version: 2.6.3
42
+ Ruby-kafka version: 0.7.9
43
+ Application client id: karafka-local
44
+ Backend: inline
45
+ Batch fetching: true
46
+ Batch consuming: true
47
+ Boot file: /app/karafka/karafka.rb
48
+ Environment: development
49
+ Kafka seed brokers: ["kafka://kafka:9092"]
50
+ ```
@@ -0,0 +1,20 @@
1
+ ---
2
+ name: Feature Request
3
+ about: Suggest new Karafka features or improvements to existing features.
4
+ ---
5
+
6
+ ## Is your feature request related to a problem? Please describe.
7
+
8
+ A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
9
+
10
+ ## Describe the solution you'd like
11
+
12
+ A clear and concise description of what you want to happen.
13
+
14
+ ## Describe alternatives you've considered
15
+
16
+ A clear and concise description of any alternative solutions or features you've considered.
17
+
18
+ ## Additional context
19
+
20
+ Add any other context or screenshots about the feature request here.
@@ -0,0 +1,69 @@
1
+ # bundler state
2
+ /.bundle
3
+ /vendor/bundle/
4
+ /vendor/ruby/
5
+ /ruby/
6
+ app.god
7
+
8
+ # minimal Rails specific artifacts
9
+ /.coditsu/local.yml
10
+ db/*.sqlite3
11
+ /log/development.log
12
+ /log/production.log
13
+ /log/test.log
14
+ /tmp/*
15
+ *.gem
16
+ *.~
17
+
18
+ # various artifacts
19
+ **.war
20
+ *.rbc
21
+ *.sassc
22
+ .byebug_history
23
+ .redcar/
24
+ .capistrano/
25
+ .sass-cache
26
+ /config/god/sidekiq.rb
27
+ /config/puma.rb
28
+ /coverage.data
29
+ /coverage/
30
+ /doc/api/
31
+ /doc/app/
32
+ /doc/yard
33
+ /doc/features.html
34
+ /doc/specs.html
35
+ /spec/tmp/*
36
+ /cache
37
+ /capybara*
38
+ /capybara-*.html
39
+ /gems
40
+ /specifications
41
+ rerun.txt
42
+ pickle-email-*.html
43
+
44
+ # If you find yourself ignoring temporary files generated by your text editor
45
+ # or operating system, you probably want to add a global ignore instead:
46
+ # git config --global core.excludesfile ~/.gitignore_global
47
+ #
48
+ # Here are some files you may want to ignore globally:
49
+
50
+ # scm revert files
51
+ **.orig
52
+
53
+ # Mac finder artifacts
54
+ .DS_Store
55
+
56
+ # Netbeans project directory
57
+ /nbproject
58
+
59
+ # RubyMine project files
60
+ .idea
61
+
62
+ # Textmate project files
63
+ /*.tmproj
64
+
65
+ # vim artifacts
66
+ **.swp
67
+
68
+ # documentation
69
+ .yardoc
data/.rspec ADDED
@@ -0,0 +1 @@
1
+ --require spec_helper
@@ -0,0 +1 @@
1
+ karafka
@@ -0,0 +1 @@
1
+ 2.6.3
@@ -0,0 +1,36 @@
1
+ services:
2
+ - docker
3
+
4
+ dist: trusty
5
+ sudo: false
6
+ cache: bundler
7
+
8
+ git:
9
+ depth: false
10
+
11
+ test: &test
12
+ stage: Test
13
+ language: ruby
14
+ before_install:
15
+ - gem install bundler
16
+ - gem update --system
17
+ script: bundle exec rspec
18
+
19
+ jobs:
20
+ include:
21
+ - <<: *test
22
+ rvm: 2.6.3
23
+ - <<: *test
24
+ rvm: 2.5.5
25
+
26
+ - stage: coditsu
27
+ language: ruby
28
+ rvm: 2.6.3
29
+ before_install:
30
+ - gem update --system
31
+ - gem install bundler
32
+ script: \curl -sSL https://api.coditsu.io/run/ci | bash
33
+
34
+ stages:
35
+ - coditsu
36
+ - test
@@ -0,0 +1,520 @@
1
+ # Karafka framework changelog
2
+
3
+ ## 1.3.0 (2019-09-09)
4
+ - Drop support for Ruby 2.4
5
+ - YARD docs tags cleanup
6
+
7
+ ## 1.3.0.rc1 (2019-07-31)
8
+ - Drop support for Kafka 0.10 in favor of native support for Kafka 0.11.
9
+ - Update ruby-kafka to the 0.7 version
10
+ - Support messages headers receiving
11
+ - Message bus unification
12
+ - Parser available in metadata
13
+ - Cleanup towards moving to a non-global state app management
14
+ - Drop Ruby 2.3 support
15
+ - Support for Ruby 2.6.3
16
+ - `Karafka::Loader` has been removed in favor of Zeitwerk
17
+ - Schemas are now contracts
18
+ - #393 - Reorganize responders - removed `multiple_usage` constrain
19
+ - #388 - ssl_client_cert_chain sync
20
+ - #300 - Store value in a value key and replace its content with parsed version - without root merge
21
+ - #331 - Disallow building groups without topics
22
+ - #340 - Instrumentation unification. Better and more consistent naming
23
+ - #340 - Procline instrumentation for a nicer process name
24
+ - #342 - Change default for `fetcher_max_queue_size` from `100` to `10` to lower max memory usage
25
+ - #345 - Cleanup exceptions names
26
+ - #341 - Split connection delegator into batch delegator and single_delegator
27
+ - #351 - Rename `#retrieve!` to `#parse!` on params and `#parsed` to `parse!` on params batch.
28
+ - #351 - Adds '#first' for params_batch that returns parsed first element from the params_batch object.
29
+ - #360 - Single params consuming mode automatically parses data specs
30
+ - #359 - Divide mark_as_consumed into mark_as_consumed and mark_as_consumed!
31
+ - #356 - Provide a `#values` for params_batch to extract only values of objects from the params_batch
32
+ - #363 - Too shallow ruby-kafka version lock
33
+ - #354 - Expose consumer heartbeat
34
+ - #377 - Remove the persistent setup in favor of persistence
35
+ - #375 - Sidekiq Backend parser mismatch
36
+ - #369 - Single consumer can support more than one topic
37
+ - #288 - Drop dependency on `activesupport` gem
38
+ - #371 - SASL over SSL
39
+ - #392 - Move params redundant data to metadata
40
+ - #335 - Metadata access from within the consumer
41
+ - #402 - Delayed reconnection upon critical failures
42
+ - #405 - `reconnect_timeout` value is now being validated
43
+ - #437 - Specs ensuring that the `#437` won't occur in the `1.3` release
44
+ - #426 - ssl client cert key password
45
+ - #444 - add certificate and private key validation
46
+ - #460 - Decouple responder "parser" (generator?) from topic.parser (benissimo)
47
+ - #463 - Split parsers into serializers / deserializers
48
+ - #473 - Support SASL OAuthBearer Authentication
49
+ - #475 - Disallow subscribing to the same topic with multiple consumers
50
+ - #485 - Setting shutdown_timeout to nil kills the app without waiting for anything
51
+ - #487 - Make listeners as instances
52
+ - #29 - Consumer class names must have the word "Consumer" in it in order to work (Sidekiq backend)
53
+ - #491 - irb is missing for console to work
54
+ - #502 - Karafka process hangs when sending multiple sigkills
55
+ - #506 - ssl_verify_hostname sync
56
+ - #483 - Upgrade dry-validation before releasing 1.3
57
+ - #492 - Use Zeitwerk for code reload in development
58
+ - #508 - Reset the consumers instances upon reconnecting to a cluster
59
+ - [#530](https://github.com/karafka/karafka/pull/530) - expose ruby and ruby-kafka version
60
+ - [534](https://github.com/karafka/karafka/pull/534) - Allow to use headers in the deserializer object
61
+
62
+ ## 1.2.11
63
+ - [#470](https://github.com/karafka/karafka/issues/470) Karafka not working with dry-configurable 0.8
64
+
65
+ ## 1.2.10
66
+ - [#453](https://github.com/karafka/karafka/pull/453) require `Forwardable` module
67
+
68
+ ## 1.2.9
69
+ - Critical exceptions now will cause consumer to stop instead of retrying without a break
70
+ - #412 - Fix dry-inflector dependency lock in gemspec
71
+ - #414 - Backport to 1.2 the delayed retry upon failure
72
+ - #437 - Raw message is no longer added to params after ParserError raised
73
+
74
+ ## 1.2.8
75
+ - #408 - Responder Topic Lookup Bug on Heroku
76
+
77
+ ## 1.2.7
78
+ - Unlock Ruby-kafka version with a warning
79
+
80
+ ## 1.2.6
81
+ - Lock WaterDrop to 1.2.3
82
+ - Lock Ruby-Kafka to 0.6.x (support for 0.7 will be added in Karafka 1.3)
83
+ - #382 - Full logging with AR, etc for development mode when there is Rails integration
84
+
85
+ ## 1.2.5
86
+ - #354 - Expose consumer heartbeat
87
+ - #373 - Async producer not working properly with responders
88
+
89
+ ## 1.2.4
90
+ - #332 - Fetcher for max queue size
91
+
92
+ ## 1.2.3
93
+ - #313 - support PLAINTEXT and SSL for scheme
94
+ - #288 - drop activesupport callbacks in favor of notifications
95
+ - #320 - Pausing indefinetely with nil pause timeout doesn't work
96
+ - #318 - Partition pausing doesn't work with custom topic mappers
97
+ - Rename ConfigAdapter to ApiAdapter to better reflect what it does
98
+ - #317 - Manual offset committing doesn't work with custom topic mappers
99
+ - #319 - Support for exponential backoff in pause
100
+
101
+ ## 1.2.2
102
+ - #312 - Broken for ActiveSupport 5.2.0
103
+
104
+ ## 1.2.1
105
+ - #304 - Unification of error instrumentation event details
106
+ - #306 - Using file logger from within a trap context upon shutdown is impossible
107
+
108
+ ## 1.2.0
109
+ - Spec improvements
110
+ - #260 - Specs missing randomization
111
+ - #251 - Shutdown upon non responding (unreachable) cluster is not possible
112
+ - #258 - Investigate lowering requirements on activesupport
113
+ - #246 - Alias consumer#mark_as_consumed on controller
114
+ - #259 - Allow forcing key/partition key on responders
115
+ - #267 - Styling inconsistency
116
+ - #242 - Support setting the max bytes to fetch per request
117
+ - #247 - Support SCRAM once released
118
+ - #271 - Provide an after_init option to pass a configuration block
119
+ - #262 - Error in the monitor code for NewRelic
120
+ - #241 - Performance metrics
121
+ - #274 - Rename controllers to consumers
122
+ - #184 - Seek to
123
+ - #284 - Dynamic Params parent class
124
+ - #275 - ssl_ca_certs_from_system
125
+ - #296 - Instrument forceful exit with an error
126
+ - Replaced some of the activesupport parts with dry-inflector
127
+ - Lower ActiveSupport dependency
128
+ - Remove configurators in favor of the after_init block configurator
129
+ - Ruby 2.5.0 support
130
+ - Renamed Karafka::Connection::Processor to Karafka::Connection::Delegator to match incoming naming conventions
131
+ - Renamed Karafka::Connection::Consumer to Karafka::Connection::Client due to #274
132
+ - Removed HashWithIndifferentAccess in favor of a regular hash
133
+ - JSON parsing defaults now to string keys
134
+ - Lower memory usage due to less params data internal details
135
+ - Support multiple ```after_init``` blocks in favor of a single one
136
+ - Renamed ```received_at``` to ```receive_time``` to follow ruby-kafka and WaterDrop conventions
137
+ - Adjust internal setup to easier map Ruby-Kafka config changes
138
+ - System callbacks reorganization
139
+ - Added ```before_fetch_loop``` configuration block for early client usage (```#seek```, etc)
140
+ - Renamed ```after_fetched``` to ```after_fetch``` to normalize the naming convention
141
+ - Instrumentation on a connection delegator level
142
+ - Added ```params_batch#last``` method to retrieve last element after unparsing
143
+ - All params keys are now strings
144
+
145
+ ## 1.1.2
146
+ - #256 - Default kafka.seed_brokers configuration is created in invalid format
147
+
148
+ ## 1.1.1
149
+ - #253 - Allow providing a global per app parser in config settings
150
+
151
+ ## 1.1.0
152
+ - Gem bump
153
+ - Switch from Celluloid to native Thread management
154
+ - Improved shutdown process
155
+ - Introduced optional fetch callbacks and moved current the ```after_received``` there as well
156
+ - Karafka will raise Errors::InvalidPauseTimeout exception when trying to pause but timeout set to 0
157
+ - Allow float for timeouts and other time based second settings
158
+ - Renamed MessagesProcessor to Processor and MessagesConsumer to Consumer - we don't process and don't consumer anything else so it was pointless to keep this "namespace"
159
+ - #232 - Remove unused ActiveSupport require
160
+ - #214 - Expose consumer on a controller layer
161
+ - #193 - Process shutdown callbacks
162
+ - Fixed accessibility of ```#params_batch``` from the outside of the controller
163
+ - connection_pool config options are no longer required
164
+ - celluloid config options are no longer required
165
+ - ```#perform``` is now renamed to ```#consume``` with warning level on using the old one (deprecated)
166
+ - #235 - Rename perform to consume
167
+ - Upgrade to ruby-kafka 0.5
168
+ - Due to redesign of Waterdrop concurrency setting is no longer needed
169
+ - #236 - Manual offset management
170
+ - WaterDrop 1.0.0 support with async
171
+ - Renamed ```batch_consuming``` option to ```batch_fetching``` as it is not a consumption (with processing) but a process of fetching messages from Kafka. The messages is considered consumed, when it is processed.
172
+ - Renamed ```batch_processing``` to ```batch_consuming``` to resemble Kafka concept of consuming messages.
173
+ - Renamed ```after_received``` to ```after_fetched``` to normalize the naming conventions.
174
+ - Responders support the per topic ```async``` option.
175
+
176
+ ## 1.0.1
177
+ - #210 - LoadError: cannot load such file -- [...]/karafka.rb
178
+ - Ruby 2.4.2 as a default (+travis integration)
179
+ - JRuby upgrade
180
+ - Expanded persistence layer (moved to a namespace for easier future development)
181
+ - #213 - Misleading error when non-existing dependency is required
182
+ - #212 - Make params react to #topic, #partition, #offset
183
+ - #215 - Consumer group route dynamic options are ignored
184
+ - #217 - check RUBY_ENGINE constant if RUBY_VERSION is missing (#217)
185
+ - #218 - add configuration setting to control Celluloid's shutdown timeout
186
+ - Renamed Karafka::Routing::Mapper to Karafka::Routing::TopicMapper to match naming conventions
187
+ - #219 - Allow explicit consumer group names, without prefixes
188
+ - Fix to early removed pid upon shutdown of demonized process
189
+ - max_wait_time updated to match https://github.com/zendesk/ruby-kafka/issues/433
190
+ - #230 - Better uri validation for seed brokers (incompatibility as the kafka:// or kafka+ssl:// is required)
191
+ - Small internal docs fixes
192
+ - Dry::Validation::MissingMessageError: message for broker_schema? was not found
193
+ - #238 - warning: already initialized constant Karafka::Schemas::URI_SCHEMES
194
+
195
+ ## 1.0.0
196
+
197
+ ### Closed issues:
198
+
199
+ - #103 - Env for logger is loaded 2 early (on gem load not on app init)
200
+ - #142 - Possibility to better control Kafka consumers (consumer groups management)
201
+ - #150 - Add support for start_from_beginning on a per topic basis
202
+ - #154 - Support for min_bytes and max_wait_time on messages consuming
203
+ - #160 - Reorganize settings to better resemble ruby-kafka requirements
204
+ - #164 - If we decide to have configuration per topic, topic uniqueness should be removed
205
+ - #165 - Router validator
206
+ - #166 - Params and route reorganization (new API)
207
+ - #167 - Remove Sidekiq UI from Karafka
208
+ - #168 - Introduce unique IDs of routes
209
+ - #171 - Add kafka message metadata to params
210
+ - #176 - Transform Karafka::Connection::Consumer into a module
211
+ - #177 - Monitor not reacting when kafka killed with -9
212
+ - #175 - Allow single consumer to subscribe to multiple topics
213
+ - #178 - Remove parsing failover when cannot unparse data
214
+ - #174 - Extended config validation
215
+ - ~~#180 - Switch from JSON parser to yajl-ruby~~
216
+ - #181 - When responder is defined and not used due to ```respond_with``` not being triggered in the perform, it won't raise an exception.
217
+ - #188 - Rename name in config to client id
218
+ - #186 - Support ruby-kafka ```ssl_ca_cert_file_path``` config
219
+ - #189 - karafka console does not preserve history on exit
220
+ - #191 - Karafka 0.6.0rc1 does not work with jruby / now it does :-)
221
+ - Switch to multi json so everyone can use their favourite JSON parser
222
+ - Added jruby support in general and in Travis
223
+ - #196 - Topic mapper does not map topics when subscribing thanks to @webandtech
224
+ - #96 - Karafka server - possiblity to run it only for a certain topics
225
+ - ~~karafka worker cli option is removed (please use sidekiq directly)~~ - restored, bad idea
226
+ - (optional) pausing upon processing failures ```pause_timeout```
227
+ - Karafka console main process no longer intercepts irb errors
228
+ - Wiki updates
229
+ - #204 - Long running controllers
230
+ - Better internal API to handle multiple usage cases using ```Karafka::Controllers::Includer```
231
+ - #207 - Rename before_enqueued to after_received
232
+ - #147 - Deattach Karafka from Sidekiq by extracting Sidekiq backend
233
+
234
+ ### New features and improvements
235
+
236
+ - batch processing thanks to ```#batch_consuming``` flag and ```#params_batch``` on controllers
237
+ - ```#topic``` method on an controller instance to make a clear distinction in between params and route details
238
+ - Changed routing model (still compatible with 0.5) to allow better resources management
239
+ - Lower memory requirements due to object creation limitation (2-3 times less objects on each new message)
240
+ - Introduced the ```#batch_consuming``` config flag (config for #126) that can be set per each consumer_group
241
+ - Added support for partition, offset and partition key in the params hash
242
+ - ```name``` option in config renamed to ```client_id```
243
+ - Long running controllers with ```persistent``` flag on a topic config level, to make controller instances persistent between messages batches (single controller instance per topic per partition no per messages batch) - turned on by default
244
+
245
+ ### Incompatibilities
246
+
247
+ - Default boot file is renamed from app.rb to karafka.rb
248
+ - Removed worker glass as dependency (now and independent gem)
249
+ - ```kafka.hosts``` option renamed to ```kafka.seed_brokers``` - you don't need to provide all the hosts to work with Kafka
250
+ - ```start_from_beginning``` moved into kafka scope (```kafka.start_from_beginning```)
251
+ - Router no longer checks for route uniqueness - now you can define same routes for multiple kafkas and do a lot of crazy stuff, so it's your responsibility to check uniqueness
252
+ - Change in the way we identify topics in between Karafka and Sidekiq workers. If you upgrade, please make sure, all the jobs scheduled in Sidekiq are finished before the upgrade.
253
+ - ```batch_mode``` renamed to ```batch_fetching```
254
+ - Renamed content to value to better resemble ruby-kafka internal messages naming convention
255
+ - When having a responder with ```required``` topics and not using ```#respond_with``` at all, it will raise an exception
256
+ - Renamed ```inline_mode``` to ```inline_processing``` to resemble other settings conventions
257
+ - Renamed ```inline_processing``` to ```backend``` to reach 1.0 future compatibility
258
+ - Single controller **needs** to be used for a single topic consumption
259
+ - Renamed ```before_enqueue``` to ```after_received``` to better resemble internal logic, since for inline backend, there is no enqueue.
260
+ - Due to the level on which topic and controller are related (class level), the dynamic worker selection is no longer available.
261
+ - Renamed params #retrieve to params #retrieve! to better reflect what it does
262
+
263
+ ### Other changes
264
+ - PolishGeeksDevTools removed (in favour of Coditsu)
265
+ - Waaaaaay better code quality thanks to switching from dev tools to Coditsu
266
+ - Gem bump
267
+ - Cleaner internal API
268
+ - SRP
269
+ - Better settings proxying and management between ruby-kafka and karafka
270
+ - All internal validations are now powered by dry-validation
271
+ - Better naming conventions to reflect Kafka reality
272
+ - Removed Karafka::Connection::Message in favour of direct message details extraction from Kafka::FetchedMessage
273
+
274
+ ## 0.5.0.3
275
+ - #132 - When Kafka is gone, should reconnect after a time period
276
+ - #136 - new ruby-kafka version + other gem bumps
277
+ - ruby-kafka update
278
+ - #135 - NonMatchingRouteError - better error description in the code
279
+ - #140 - Move Capistrano Karafka to a different specific gem
280
+ - #110 - Add call method on a responder class to alias instance build and call
281
+ - #76 - Configs validator
282
+ - #138 - Possibility to have no worker class defined if inline_mode is being used
283
+ - #145 - Topic Mapper
284
+ - Ruby update to 2.4.1
285
+ - Gem bump x2
286
+ - #158 - Update docs section on heroku usage
287
+ - #150 - Add support for start_from_beginning on a per topic basis
288
+ - #148 - Lower Karafka Sidekiq dependency
289
+ - Allow karafka root to be specified from ENV
290
+ - Handle SIGTERM as a shutdown command for kafka server to support Heroku deployment
291
+
292
+ ## 0.5.0.2
293
+ - Gems update x3
294
+ - Default Ruby set to 2.3.3
295
+ - ~~Default Ruby set to 2.4.0~~
296
+ - Readme updates to match bug fixes and resolved issues
297
+ - #95 - Allow options into responder
298
+ - #98 - Use parser when responding on a topic
299
+ - #114 - Option to configure waterdrop connection pool timeout and concurrency
300
+ - #118 - Added dot in topic validation format
301
+ - #119 - add support for authentication using SSL
302
+ - #121 - JSON as a default for standalone responders usage
303
+ - #122 - Allow on capistrano role customization
304
+ - #125 - Add support to batch incoming messages
305
+ - #130 - start_from_beginning flag on routes and default
306
+ - #128 - Monitor caller_label not working with super on inheritance
307
+ - Renamed *inline* to *inline_mode* to stay consistent with flags that change the way karafka works (#125)
308
+ - Dry-configurable bump to 0.5 with fixed proc value evaluation on retrieve patch (internal change)
309
+
310
+ ## 0.5.0.1
311
+ - Fixed inconsistency in responders non-required topic definition. Now only required: false available
312
+ - #101 - Responders fail when multiple_usage true and required false
313
+ - fix error on startup from waterdrop #102
314
+ - Waterdrop 0.3.2.1 with kafka.hosts instead of kafka_hosts
315
+ - #105 - Karafka::Monitor#caller_label not working with inherited monitors
316
+ - #99 - Standalone mode (without Sidekiq)
317
+ - #97 - Buffer responders single topics before send (prevalidation)
318
+ - Better control over consumer thanks to additional config options
319
+ - #111 - Dynamic worker assignment based on the income params
320
+ - Long shutdown time fix
321
+
322
+ ## 0.5.0
323
+ - Removed Zookeeper totally as dependency
324
+ - Better group and partition rebalancing
325
+ - Automatic thread management (no need for tunning) - each topic is a separate actor/thread
326
+ - Moved from Poseidon into Ruby-Kafka
327
+ - No more max_concurrency setting
328
+ - After you define your App class and routes (and everything else) you need to add execute App.boot!
329
+ - Manual consuming is no longer available (no more karafka consume command)
330
+ - Karafka topics CLI is no longer available. No Zookeeper - no global topic discovery
331
+ - Dropped ZK as dependency
332
+ - karafka info command no longer prints details about Zookeeper
333
+ - Better shutdown
334
+ - No more autodiscovery via Zookeeper - instead, the whole cluster will be discovered directly from Kafka
335
+ - No more support for Kafka 0.8
336
+ - Support for Kafka 0.9
337
+ - No more need for ActorCluster, since now we have a single thread (and Kafka connection) per topic
338
+ - Ruby 2.2.* support dropped
339
+ - Using App name as a Kafka client_id
340
+ - Automatic Capistrano integration
341
+ - Responders support for handling better responses pipelining and better responses flow description and design (see README for more details)
342
+ - Gem bump
343
+ - Readme updates
344
+ - karafka flow CLI command for printing the application flow
345
+ - Some internal refactorings
346
+
347
+ ## 0.4.2
348
+ - #87 - Reconsume mode with crone for better Rails/Rack integration
349
+ - Moved Karafka server related stuff into separate Karafka::Server class
350
+ - Renamed Karafka::Runner into Karafka::Fetcher
351
+ - Gem bump
352
+ - Added chroot option to Zookeeper options
353
+ - Moved BROKERS_PATH into config from constant
354
+ - Added Karafka consume CLI action for a short running single consumption round
355
+ - Small fixes to close broken connections
356
+ - Readme updates
357
+
358
+ ## 0.4.1
359
+ - Explicit throw(:abort) required to halt before_enqueue (like in Rails 5)
360
+ - #61 - Autodiscover Kafka brokers based on Zookeeper data
361
+ - #63 - Graceful shutdown with current offset state during data processing
362
+ - #65 - Example of NewRelic monitor is outdated
363
+ - #71 - Setup should be executed after user code is loaded
364
+ - Gem bump x3
365
+ - Rubocop remarks
366
+ - worker_timeout config option has been removed. It now needs to be defined manually by the framework user because WorkerGlass::Timeout can be disabled and we cannot use Karafka settings on a class level to initialize user code stuff
367
+ - Moved setup logic under setup/Setup namespace
368
+ - Better defaults handling
369
+ - #75 - Kafka and Zookeeper options as a hash
370
+ - #82 - Karafka autodiscovery fails upon caching of configs
371
+ - #81 - Switch config management to dry configurable
372
+ - Version fix
373
+ - Dropped support for Ruby 2.1.*
374
+ - Ruby bump to 2.3.1
375
+
376
+ ## 0.4.0
377
+ - Added WaterDrop gem with default configuration
378
+ - Refactoring of config logic to simplify adding new dependencies that need to be configured based on #setup data
379
+ - Gem bump
380
+ - Readme updates
381
+ - Renamed cluster to actor_cluster for method names
382
+ - Replaced SidekiqGlass with generic WorkerGlass lib
383
+ - Application bootstrap in app.rb no longer required
384
+ - Karafka.boot needs to be executed after all the application files are loaded (template updated)
385
+ - Small loader refactor (no API changes)
386
+ - Ruby 2.3.0 support (default)
387
+ - No more rake tasks
388
+ - Karafka CLI instead of rake tasks
389
+ - Worker cli command allows passing additional options directly to Sidekiq
390
+ - Renamed concurrency to max_concurrency - it describes better what happens - Karafka will use this number of threads only when required
391
+ - Added wait_timeout that allows us to tune how long should we wait on a single socket connection (single topic) for new messages before going to next one (this applies to each thread separately)
392
+ - Rubocop remarks
393
+ - Removed Sinatra and Puma dependencies
394
+ - Karafka Cli internal reorganization
395
+ - Karafka Cli routes task
396
+ - #37 - warn log for failed parsing of a message
397
+ - #43 - wrong constant name
398
+ - #44 - Method name conflict
399
+ - #48 - Cannot load such file -- celluloid/current
400
+ - #46 - Loading application
401
+ - #45 - Set up monitor in config
402
+ - #47 - rake karafka:run uses app.rb only
403
+ - #53 - README update with Sinatra/Rails integration description
404
+ - #41 - New Routing engine
405
+ - #54 - Move Karafka::Workers::BaseWorker to Karafka::BaseWorker
406
+ - #55 - ApplicationController and ApplicationWorker
407
+
408
+ ## 0.3.2
409
+ - Karafka::Params::Params lazy load merge keys with string/symbol names priorities fix
410
+
411
+ ## 0.3.1
412
+ - Renamed Karafka::Monitor to Karafka::Process to represent a Karafka process wrapper
413
+ - Added Karafka::Monitoring that allows to add custom logging and monitoring with external libraries and systems
414
+ - Moved logging functionality into Karafka::Monitoring default monitoring
415
+ - Added possibility to provide own monitoring as long as in responds to #notice and #notice_error
416
+ - Standarized logging format for all logs
417
+
418
+ ## 0.3.0
419
+ - Switched from custom ParserError for each parser to general catching of Karafka::Errors::ParseError and its descendants
420
+ - Gem bump
421
+ - Fixed #32 - now when using custom workers that does not inherit from Karafka::BaseWorker perform method is not required. Using custom workers means that the logic that would normally lie under #perform, needs to be executed directly from the worker.
422
+ - Fixed #31 - Technically didn't fix because this is how Sidekiq is meant to work, but provided possibility to assign custom interchangers that allow to bypass JSON encoding issues by converting data that goes to Redis to a required format (and parsing it back when it is fetched)
423
+ - Added full parameters lazy load - content is no longer loaded during #perform_async if params are not used in before_enqueue
424
+ - No more namespaces for Redis by default (use separate DBs)
425
+
426
+ ## 0.1.21
427
+ - Sidekiq 4.0.1 bump
428
+ - Gem bump
429
+ - Added direct celluloid requirement to Karafka (removed from Sidekiq)
430
+
431
+ ## 0.1.19
432
+ - Internal call - schedule naming change
433
+ - Enqueue to perform_async naming in controller to follow Sidekiqs naming convention
434
+ - Gem bump
435
+
436
+ ## 0.1.18
437
+ - Changed Redis configuration options into a single hash that is directly passed to Redis setup for Sidekiq
438
+ - Added config.ru to provide a Sidekiq web UI (see README for more details)
439
+
440
+ ## 0.1.17
441
+ - Changed Karafka::Connection::Cluster tp Karafka::Connection::ActorCluster to distinguish between a single thread actor cluster for multiple topic connection and a future feature that will allow process clusterization.
442
+ - Add an ability to use user-defined parsers for a messages
443
+ - Lazy load params for before callbacks
444
+ - Automatic loading/initializng all workers classes during startup (so Sidekiq won't fail with unknown workers exception)
445
+ - Params are now private to controller
446
+ - Added bootstrap method to app.rb
447
+
448
+ ## 0.1.16
449
+ - Cluster level error catching for all exceptions so actor is not killer
450
+ - Cluster level error logging
451
+ - Listener refactoring (QueueConsumer extracted)
452
+ - Karafka::Connection::QueueConsumer to wrap around fetching logic - technically we could replace Kafka with any other messaging engine as long as we preserve the same API
453
+ - Added debug env for debugging purpose in applications
454
+
455
+ ## 0.1.15
456
+ - Fixed max_wait_ms vs socket_timeout_ms issue
457
+ - Fixed closing queue connection after Poseidon::Errors::ProtocolError failure
458
+ - Fixed wrong logging file selection based on env
459
+ - Extracted Karafka::Connection::QueueConsumer object to wrap around queue connection
460
+
461
+ ## 0.1.14
462
+ - Rake tasks for listing all the topics on Kafka server (rake kafka:topics)
463
+
464
+ ## 0.1.13
465
+ - Ability to assign custom workers and use them bypassing Karafka::BaseWorker (or its descendants)
466
+ - Gem bump
467
+
468
+ ## 0.1.12
469
+ - All internal errors went to Karafka::Errors namespace
470
+
471
+ ## 0.1.11
472
+ - Rescuing all the "before Sidekiq" processing so errors won't affect other incoming messages
473
+ - Fixed dying actors after connection error
474
+ - Added a new app status - "initializing"
475
+ - Karafka::Status model cleanup
476
+
477
+ ## 0.1.10
478
+ - Added possibility to specify redis namespace in configuration (failover to app name)
479
+ - Renamed redis_host to redis_url in configuration
480
+
481
+ ## 0.1.9
482
+ - Added worker logger
483
+
484
+ ## 0.1.8
485
+ - Droped local env suppot in favour of [Envlogic](https://github.com/karafka/envlogic) - no changes in API
486
+
487
+ ## 0.1.7
488
+ - Karafka option for Redis hosts (not localhost only)
489
+
490
+ ## 0.1.6
491
+ - Added better concurency by clusterization of listeners
492
+ - Added graceful shutdown
493
+ - Added concurency that allows to handle bigger applications with celluloid
494
+ - Karafka controllers no longer require group to be defined (created based on the topic and app name)
495
+ - Karafka controllers no longer require topic to be defined (created based on the controller name)
496
+ - Readme updates
497
+
498
+ ## 0.1.5
499
+ - Celluloid support for listeners
500
+ - Multi target logging (STDOUT and file)
501
+
502
+ ## 0.1.4
503
+ - Renamed events to messages to follow Apache Kafka naming convention
504
+
505
+ ## 0.1.3
506
+ - Karafka::App.logger moved to Karafka.logger
507
+ - README updates (Usage section was added)
508
+
509
+ ## 0.1.2
510
+ - Logging to log/environment.log
511
+ - Karafka::Runner
512
+
513
+ ## 0.1.1
514
+ - README updates
515
+ - Raketasks updates
516
+ - Rake installation task
517
+ - Changelog file added
518
+
519
+ ## 0.1.0
520
+ - Initial framework code