karafka-enterprise 0.0.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of karafka-enterprise might be problematic. Click here for more details.

Files changed (104) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +0 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +11 -0
  6. data/.diffend.yml +3 -0
  7. data/.github/FUNDING.yml +3 -0
  8. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  9. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  10. data/.github/workflows/ci.yml +74 -0
  11. data/.gitignore +69 -0
  12. data/.rspec +1 -0
  13. data/.ruby-gemset +1 -0
  14. data/.ruby-version +1 -0
  15. data/CHANGELOG.md +573 -0
  16. data/CODE_OF_CONDUCT.md +46 -0
  17. data/CONTRIBUTING.md +41 -0
  18. data/Gemfile +14 -0
  19. data/Gemfile.lock +137 -0
  20. data/MIT-LICENCE +18 -0
  21. data/README.md +99 -0
  22. data/bin/karafka +19 -0
  23. data/certs/mensfeld.pem +25 -0
  24. data/config/errors.yml +41 -0
  25. data/docker-compose.yml +17 -0
  26. data/karafka.gemspec +43 -0
  27. data/lib/karafka.rb +72 -0
  28. data/lib/karafka/app.rb +53 -0
  29. data/lib/karafka/assignment_strategies/round_robin.rb +13 -0
  30. data/lib/karafka/attributes_map.rb +63 -0
  31. data/lib/karafka/backends/inline.rb +16 -0
  32. data/lib/karafka/base_consumer.rb +57 -0
  33. data/lib/karafka/base_responder.rb +226 -0
  34. data/lib/karafka/cli.rb +62 -0
  35. data/lib/karafka/cli/base.rb +78 -0
  36. data/lib/karafka/cli/console.rb +31 -0
  37. data/lib/karafka/cli/flow.rb +48 -0
  38. data/lib/karafka/cli/info.rb +31 -0
  39. data/lib/karafka/cli/install.rb +66 -0
  40. data/lib/karafka/cli/missingno.rb +19 -0
  41. data/lib/karafka/cli/server.rb +71 -0
  42. data/lib/karafka/code_reloader.rb +67 -0
  43. data/lib/karafka/connection/api_adapter.rb +161 -0
  44. data/lib/karafka/connection/batch_delegator.rb +55 -0
  45. data/lib/karafka/connection/builder.rb +18 -0
  46. data/lib/karafka/connection/client.rb +117 -0
  47. data/lib/karafka/connection/listener.rb +71 -0
  48. data/lib/karafka/connection/message_delegator.rb +36 -0
  49. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  50. data/lib/karafka/consumers/callbacks.rb +71 -0
  51. data/lib/karafka/consumers/includer.rb +64 -0
  52. data/lib/karafka/consumers/responders.rb +24 -0
  53. data/lib/karafka/consumers/single_params.rb +15 -0
  54. data/lib/karafka/contracts.rb +10 -0
  55. data/lib/karafka/contracts/config.rb +21 -0
  56. data/lib/karafka/contracts/consumer_group.rb +211 -0
  57. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  58. data/lib/karafka/contracts/responder_usage.rb +54 -0
  59. data/lib/karafka/contracts/server_cli_options.rb +31 -0
  60. data/lib/karafka/errors.rb +51 -0
  61. data/lib/karafka/fetcher.rb +42 -0
  62. data/lib/karafka/helpers/class_matcher.rb +88 -0
  63. data/lib/karafka/helpers/config_retriever.rb +46 -0
  64. data/lib/karafka/helpers/inflector.rb +26 -0
  65. data/lib/karafka/helpers/multi_delegator.rb +32 -0
  66. data/lib/karafka/instrumentation/logger.rb +58 -0
  67. data/lib/karafka/instrumentation/monitor.rb +70 -0
  68. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  69. data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
  70. data/lib/karafka/params/batch_metadata.rb +26 -0
  71. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  72. data/lib/karafka/params/builders/params.rb +38 -0
  73. data/lib/karafka/params/builders/params_batch.rb +25 -0
  74. data/lib/karafka/params/metadata.rb +20 -0
  75. data/lib/karafka/params/params.rb +54 -0
  76. data/lib/karafka/params/params_batch.rb +60 -0
  77. data/lib/karafka/patches/ruby_kafka.rb +47 -0
  78. data/lib/karafka/persistence/client.rb +29 -0
  79. data/lib/karafka/persistence/consumers.rb +45 -0
  80. data/lib/karafka/persistence/topics.rb +48 -0
  81. data/lib/karafka/process.rb +60 -0
  82. data/lib/karafka/responders/builder.rb +36 -0
  83. data/lib/karafka/responders/topic.rb +55 -0
  84. data/lib/karafka/routing/builder.rb +90 -0
  85. data/lib/karafka/routing/consumer_group.rb +63 -0
  86. data/lib/karafka/routing/consumer_mapper.rb +34 -0
  87. data/lib/karafka/routing/proxy.rb +46 -0
  88. data/lib/karafka/routing/router.rb +29 -0
  89. data/lib/karafka/routing/topic.rb +62 -0
  90. data/lib/karafka/routing/topic_mapper.rb +53 -0
  91. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  92. data/lib/karafka/serialization/json/serializer.rb +31 -0
  93. data/lib/karafka/server.rb +86 -0
  94. data/lib/karafka/setup/config.rb +226 -0
  95. data/lib/karafka/setup/configurators/water_drop.rb +36 -0
  96. data/lib/karafka/setup/dsl.rb +21 -0
  97. data/lib/karafka/status.rb +29 -0
  98. data/lib/karafka/templates/application_consumer.rb.erb +7 -0
  99. data/lib/karafka/templates/application_responder.rb.erb +11 -0
  100. data/lib/karafka/templates/karafka.rb.erb +92 -0
  101. data/lib/karafka/version.rb +7 -0
  102. data/log/.gitkeep +0 -0
  103. metadata +325 -0
  104. metadata.gz.sig +0 -0
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 5f0aee4213dffa91cf4cab1b67a58b07b0bf9809bbaf04f346741bf6d8c5ae75
4
+ data.tar.gz: b922109bde03701ef789f035d20e76ad642a271e69d8f3628d9790dd20ede025
5
+ SHA512:
6
+ metadata.gz: 23f59a7c48d34c7395ea4c51dba354cfa7b9c8ea61501011c769dd537a52cf63cfebbf7a8d4b061b6878c3febe2a707ecf26cfb849b327dd032f8945036e7eab
7
+ data.tar.gz: 3d508b6a97a3ec0bd0be05895ec00296eb7ee421cba9b57762e1af2e8e4c73adc6284cf7d520a2a7461895301cadae5fdab056e54dd926569b36401b6d5d6633
checksums.yaml.gz.sig ADDED
Binary file
data.tar.gz.sig ADDED
Binary file
data/.coditsu/ci.yml ADDED
@@ -0,0 +1,3 @@
1
+ repository_id: 'd4482d42-f6b5-44ba-a5e4-00989ac519ee'
2
+ api_key: <%= ENV['CODITSU_API_KEY'] %>
3
+ api_secret: <%= ENV['CODITSU_API_SECRET'] %>
data/.console_irbrc ADDED
@@ -0,0 +1,11 @@
1
+ # irbrc for Karafka console
2
+
3
+ IRB.conf[:AUTO_INDENT] = true
4
+ IRB.conf[:SAVE_HISTORY] = 1000
5
+ IRB.conf[:USE_READLINE] = true
6
+ IRB.conf[:HISTORY_FILE] = ".irb-history"
7
+ IRB.conf[:LOAD_MODULES] = [] unless IRB.conf.key?(:LOAD_MODULES)
8
+
9
+ unless IRB.conf[:LOAD_MODULES].include?('irb/completion')
10
+ IRB.conf[:LOAD_MODULES] << 'irb/completion'
11
+ end
data/.diffend.yml ADDED
@@ -0,0 +1,3 @@
1
+ project_id: 'de9b9933-7610-4cc4-b69b-f7e3e3c5e797'
2
+ shareable_id: '68a8c626-b605-40ad-ac45-e3961ad7c57d'
3
+ shareable_key: 'a3ec2dac-fba2-4b6c-b181-49e927b15057'
@@ -0,0 +1,3 @@
1
+ # These are supported funding model platforms
2
+
3
+ open_collective: karafka
@@ -0,0 +1,50 @@
1
+ ---
2
+ name: Bug Report
3
+ about: Report an issue with Karafka you've discovered.
4
+ ---
5
+
6
+ *Be clear, concise and precise in your description of the problem.
7
+ Open an issue with a descriptive title and a summary in grammatically correct,
8
+ complete sentences.*
9
+
10
+ *Use the template below when reporting bugs. Please, make sure that
11
+ you're running the latest stable Karafka and that the problem you're reporting
12
+ hasn't been reported (and potentially fixed) already.*
13
+
14
+ *Before filing the ticket you should replace all text above the horizontal
15
+ rule with your own words.*
16
+
17
+ --------
18
+
19
+ ## Expected behavior
20
+
21
+ Describe here how you expected Karafka to behave in this particular situation.
22
+
23
+ ## Actual behavior
24
+
25
+ Describe here what actually happened.
26
+
27
+ ## Steps to reproduce the problem
28
+
29
+ This is extremely important! Providing us with a reliable way to reproduce
30
+ a problem will expedite its solution.
31
+
32
+ ## Your setup details
33
+
34
+ Please provide kafka version and the output of `karafka info` or `bundle exec karafka info` if using Bundler.
35
+
36
+ Here's an example:
37
+
38
+ ```
39
+ $ [bundle exec] karafka info
40
+ Karafka version: 1.3.0
41
+ Ruby version: 2.6.3
42
+ Ruby-kafka version: 0.7.9
43
+ Application client id: karafka-local
44
+ Backend: inline
45
+ Batch fetching: true
46
+ Batch consuming: true
47
+ Boot file: /app/karafka/karafka.rb
48
+ Environment: development
49
+ Kafka seed brokers: ["kafka://kafka:9092"]
50
+ ```
@@ -0,0 +1,20 @@
1
+ ---
2
+ name: Feature Request
3
+ about: Suggest new Karafka features or improvements to existing features.
4
+ ---
5
+
6
+ ## Is your feature request related to a problem? Please describe.
7
+
8
+ A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
9
+
10
+ ## Describe the solution you'd like
11
+
12
+ A clear and concise description of what you want to happen.
13
+
14
+ ## Describe alternatives you've considered
15
+
16
+ A clear and concise description of any alternative solutions or features you've considered.
17
+
18
+ ## Additional context
19
+
20
+ Add any other context or screenshots about the feature request here.
@@ -0,0 +1,74 @@
1
+ name: ci
2
+
3
+ on:
4
+ push:
5
+ schedule:
6
+ - cron: '0 1 * * *'
7
+
8
+ jobs:
9
+ specs:
10
+ runs-on: ubuntu-latest
11
+ needs: diffend
12
+ strategy:
13
+ fail-fast: false
14
+ matrix:
15
+ ruby:
16
+ - '3.0'
17
+ - '2.7'
18
+ - '2.6'
19
+ - '2.5'
20
+ include:
21
+ - ruby: '3.0'
22
+ coverage: 'true'
23
+ steps:
24
+ - uses: actions/checkout@v2
25
+ - name: Install package dependencies
26
+ run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
27
+ - name: Set up Ruby
28
+ uses: ruby/setup-ruby@v1
29
+ with:
30
+ ruby-version: ${{matrix.ruby}}
31
+ - name: Install latest bundler
32
+ run: |
33
+ gem install bundler --no-document
34
+ bundle config set without 'tools benchmarks docs'
35
+ - name: Bundle install
36
+ run: |
37
+ bundle config set without development
38
+ bundle install --jobs 4 --retry 3
39
+ - name: Run Kafka with docker-compose
40
+ run: docker-compose up -d
41
+ - name: Run all tests
42
+ env:
43
+ GITHUB_COVERAGE: ${{matrix.coverage}}
44
+ run: bundle exec rspec
45
+
46
+ diffend:
47
+ runs-on: ubuntu-latest
48
+ strategy:
49
+ fail-fast: false
50
+ steps:
51
+ - uses: actions/checkout@v2
52
+ with:
53
+ fetch-depth: 0
54
+ - name: Set up Ruby
55
+ uses: ruby/setup-ruby@v1
56
+ with:
57
+ ruby-version: 3.0
58
+ - name: Install latest bundler
59
+ run: gem install bundler --no-document
60
+ - name: Install Diffend plugin
61
+ run: bundle plugin install diffend
62
+ - name: Bundle Secure
63
+ run: bundle secure
64
+
65
+ coditsu:
66
+ runs-on: ubuntu-latest
67
+ strategy:
68
+ fail-fast: false
69
+ steps:
70
+ - uses: actions/checkout@v2
71
+ with:
72
+ fetch-depth: 0
73
+ - name: Run Coditsu
74
+ run: \curl -sSL https://api.coditsu.io/run/ci | bash
data/.gitignore ADDED
@@ -0,0 +1,69 @@
1
+ # bundler state
2
+ /.bundle
3
+ /vendor/bundle/
4
+ /vendor/ruby/
5
+ /ruby/
6
+ app.god
7
+
8
+ # minimal Rails specific artifacts
9
+ /.coditsu/local.yml
10
+ db/*.sqlite3
11
+ /log/development.log
12
+ /log/production.log
13
+ /log/test.log
14
+ /tmp/*
15
+ *.gem
16
+ *.~
17
+
18
+ # various artifacts
19
+ **.war
20
+ *.rbc
21
+ *.sassc
22
+ .byebug_history
23
+ .redcar/
24
+ .capistrano/
25
+ .sass-cache
26
+ /config/god/sidekiq.rb
27
+ /config/puma.rb
28
+ /coverage.data
29
+ /coverage/
30
+ /doc/api/
31
+ /doc/app/
32
+ /doc/yard
33
+ /doc/features.html
34
+ /doc/specs.html
35
+ /spec/tmp/*
36
+ /cache
37
+ /capybara*
38
+ /capybara-*.html
39
+ /gems
40
+ /specifications
41
+ rerun.txt
42
+ pickle-email-*.html
43
+
44
+ # If you find yourself ignoring temporary files generated by your text editor
45
+ # or operating system, you probably want to add a global ignore instead:
46
+ # git config --global core.excludesfile ~/.gitignore_global
47
+ #
48
+ # Here are some files you may want to ignore globally:
49
+
50
+ # scm revert files
51
+ **.orig
52
+
53
+ # Mac finder artifacts
54
+ .DS_Store
55
+
56
+ # Netbeans project directory
57
+ /nbproject
58
+
59
+ # RubyMine project files
60
+ .idea
61
+
62
+ # Textmate project files
63
+ /*.tmproj
64
+
65
+ # vim artifacts
66
+ **.swp
67
+
68
+ # documentation
69
+ .yardoc
data/.rspec ADDED
@@ -0,0 +1 @@
1
+ --require spec_helper
data/.ruby-gemset ADDED
@@ -0,0 +1 @@
1
+ karafka
data/.ruby-version ADDED
@@ -0,0 +1 @@
1
+ 3.0.0
data/CHANGELOG.md ADDED
@@ -0,0 +1,573 @@
1
+ # Karafka framework changelog
2
+
3
+ ## 1.4.2 (2021-02-16)
4
+ - Rescue Errno::EROFS in ensure_dir_exists (unasuke)
5
+
6
+ ## 1.4.1 (2020-12-04)
7
+ - Return non-zero exit code when printing usage
8
+ - Add support for :assignment_strategy for consumers
9
+
10
+ ## 1.4.0 (2020-09-05)
11
+ - Rename `Karafka::Params::Metadata` to `Karafka::Params::BatchMetadata`
12
+ ` Rename consumer `#metadata` to `#batch_metadata`
13
+ - Separate metadata (including Karafka native metadata) from the root of params (backwards compatibility preserved thanks to rabotyaga)
14
+ - Remove metadata hash dependency
15
+ - Remove params dependency on a hash in favour of PORO
16
+ - Remove batch metadata dependency on a hash
17
+ - Remove MultiJson in favour of JSON in the default deserializer
18
+ - allow accessing all the metadata without accessing the payload
19
+ - freeze params and underlying elements except for the mutable payload
20
+ - provide access to raw payload after serialization
21
+ - fixes a bug where a non-deserializable (error) params would be marked as deserialized after first unsuccessful deserialization attempt
22
+ - fixes bug where karafka would mutate internal ruby-kafka state
23
+ - fixes bug where topic name in metadata would not be mapped using topic mappers
24
+ - simplifies the params and params batch API, before `#payload` usage, it won't be deserialized
25
+ - removes the `#[]` API from params to prevent from accessing raw data in a different way than #raw_payload
26
+ - makes the params batch operations consistent as params payload is deserialized only when accessed explicitly
27
+
28
+ ## 1.3.7 (2020-08-11)
29
+ - #599 - Allow metadata access without deserialization attempt (rabotyaga)
30
+ - Sync with ruby-kafka `1.2.0` api
31
+
32
+ ## 1.3.6 (2020-04-24)
33
+ - #583 - Use Karafka.logger for CLI messages (prikha)
34
+ - #582 - Cannot only define seed brokers in consumer groups
35
+
36
+ ## 1.3.5 (2020-04-02)
37
+ - #578 - ThreadError: can't be called from trap context patch
38
+
39
+ ## 1.3.4 (2020-02-17)
40
+ - `dry-configurable` upgrade (solnic)
41
+ - Remove temporary `thor` patches that are no longer needed
42
+
43
+ ## 1.3.3 (2019-12-23)
44
+ - Require `delegate` to fix missing dependency in `ruby-kafka`
45
+
46
+ ## 1.3.2 (2019-12-23)
47
+ - #561 - Allow `thor` 1.0.x usage in Karafka
48
+ - #567 - Ruby 2.7.0 support + unfreeze of a frozen string fix
49
+
50
+ ## 1.3.1 (2019-11-11)
51
+ - #545 - Makes sure the log directory exists when is possible (robertomiranda)
52
+ - Ruby 2.6.5 support
53
+ - #551 - add support for DSA keys
54
+ - #549 - Missing directories after `karafka install` (nijikon)
55
+
56
+ ## 1.3.0 (2019-09-09)
57
+ - Drop support for Ruby 2.4
58
+ - YARD docs tags cleanup
59
+
60
+ ## 1.3.0.rc1 (2019-07-31)
61
+ - Drop support for Kafka 0.10 in favor of native support for Kafka 0.11.
62
+ - Update ruby-kafka to the 0.7 version
63
+ - Support messages headers receiving
64
+ - Message bus unification
65
+ - Parser available in metadata
66
+ - Cleanup towards moving to a non-global state app management
67
+ - Drop Ruby 2.3 support
68
+ - Support for Ruby 2.6.3
69
+ - `Karafka::Loader` has been removed in favor of Zeitwerk
70
+ - Schemas are now contracts
71
+ - #393 - Reorganize responders - removed `multiple_usage` constrain
72
+ - #388 - ssl_client_cert_chain sync
73
+ - #300 - Store value in a value key and replace its content with parsed version - without root merge
74
+ - #331 - Disallow building groups without topics
75
+ - #340 - Instrumentation unification. Better and more consistent naming
76
+ - #340 - Procline instrumentation for a nicer process name
77
+ - #342 - Change default for `fetcher_max_queue_size` from `100` to `10` to lower max memory usage
78
+ - #345 - Cleanup exceptions names
79
+ - #341 - Split connection delegator into batch delegator and single_delegator
80
+ - #351 - Rename `#retrieve!` to `#parse!` on params and `#parsed` to `parse!` on params batch.
81
+ - #351 - Adds '#first' for params_batch that returns parsed first element from the params_batch object.
82
+ - #360 - Single params consuming mode automatically parses data specs
83
+ - #359 - Divide mark_as_consumed into mark_as_consumed and mark_as_consumed!
84
+ - #356 - Provide a `#values` for params_batch to extract only values of objects from the params_batch
85
+ - #363 - Too shallow ruby-kafka version lock
86
+ - #354 - Expose consumer heartbeat
87
+ - #377 - Remove the persistent setup in favor of persistence
88
+ - #375 - Sidekiq Backend parser mismatch
89
+ - #369 - Single consumer can support more than one topic
90
+ - #288 - Drop dependency on `activesupport` gem
91
+ - #371 - SASL over SSL
92
+ - #392 - Move params redundant data to metadata
93
+ - #335 - Metadata access from within the consumer
94
+ - #402 - Delayed reconnection upon critical failures
95
+ - #405 - `reconnect_timeout` value is now being validated
96
+ - #437 - Specs ensuring that the `#437` won't occur in the `1.3` release
97
+ - #426 - ssl client cert key password
98
+ - #444 - add certificate and private key validation
99
+ - #460 - Decouple responder "parser" (generator?) from topic.parser (benissimo)
100
+ - #463 - Split parsers into serializers / deserializers
101
+ - #473 - Support SASL OAuthBearer Authentication
102
+ - #475 - Disallow subscribing to the same topic with multiple consumers
103
+ - #485 - Setting shutdown_timeout to nil kills the app without waiting for anything
104
+ - #487 - Make listeners as instances
105
+ - #29 - Consumer class names must have the word "Consumer" in it in order to work (Sidekiq backend)
106
+ - #491 - irb is missing for console to work
107
+ - #502 - Karafka process hangs when sending multiple sigkills
108
+ - #506 - ssl_verify_hostname sync
109
+ - #483 - Upgrade dry-validation before releasing 1.3
110
+ - #492 - Use Zeitwerk for code reload in development
111
+ - #508 - Reset the consumers instances upon reconnecting to a cluster
112
+ - [#530](https://github.com/karafka/karafka/pull/530) - expose ruby and ruby-kafka version
113
+ - [534](https://github.com/karafka/karafka/pull/534) - Allow to use headers in the deserializer object
114
+ - [#319](https://github.com/karafka/karafka/pull/328) - Support for exponential backoff in pause
115
+
116
+ ## 1.2.11
117
+ - [#470](https://github.com/karafka/karafka/issues/470) Karafka not working with dry-configurable 0.8
118
+
119
+ ## 1.2.10
120
+ - [#453](https://github.com/karafka/karafka/pull/453) require `Forwardable` module
121
+
122
+ ## 1.2.9
123
+ - Critical exceptions now will cause consumer to stop instead of retrying without a break
124
+ - #412 - Fix dry-inflector dependency lock in gemspec
125
+ - #414 - Backport to 1.2 the delayed retry upon failure
126
+ - #437 - Raw message is no longer added to params after ParserError raised
127
+
128
+ ## 1.2.8
129
+ - #408 - Responder Topic Lookup Bug on Heroku
130
+
131
+ ## 1.2.7
132
+ - Unlock Ruby-kafka version with a warning
133
+
134
+ ## 1.2.6
135
+ - Lock WaterDrop to 1.2.3
136
+ - Lock Ruby-Kafka to 0.6.x (support for 0.7 will be added in Karafka 1.3)
137
+ - #382 - Full logging with AR, etc for development mode when there is Rails integration
138
+
139
+ ## 1.2.5
140
+ - #354 - Expose consumer heartbeat
141
+ - #373 - Async producer not working properly with responders
142
+
143
+ ## 1.2.4
144
+ - #332 - Fetcher for max queue size
145
+
146
+ ## 1.2.3
147
+ - #313 - support PLAINTEXT and SSL for scheme
148
+ - #288 - drop activesupport callbacks in favor of notifications
149
+ - #320 - Pausing indefinitely with nil pause timeout doesn't work
150
+ - #318 - Partition pausing doesn't work with custom topic mappers
151
+ - Rename ConfigAdapter to ApiAdapter to better reflect what it does
152
+ - #317 - Manual offset committing doesn't work with custom topic mappers
153
+
154
+ ## 1.2.2
155
+ - #312 - Broken for ActiveSupport 5.2.0
156
+
157
+ ## 1.2.1
158
+ - #304 - Unification of error instrumentation event details
159
+ - #306 - Using file logger from within a trap context upon shutdown is impossible
160
+
161
+ ## 1.2.0
162
+ - Spec improvements
163
+ - #260 - Specs missing randomization
164
+ - #251 - Shutdown upon non responding (unreachable) cluster is not possible
165
+ - #258 - Investigate lowering requirements on activesupport
166
+ - #246 - Alias consumer#mark_as_consumed on controller
167
+ - #259 - Allow forcing key/partition key on responders
168
+ - #267 - Styling inconsistency
169
+ - #242 - Support setting the max bytes to fetch per request
170
+ - #247 - Support SCRAM once released
171
+ - #271 - Provide an after_init option to pass a configuration block
172
+ - #262 - Error in the monitor code for NewRelic
173
+ - #241 - Performance metrics
174
+ - #274 - Rename controllers to consumers
175
+ - #184 - Seek to
176
+ - #284 - Dynamic Params parent class
177
+ - #275 - ssl_ca_certs_from_system
178
+ - #296 - Instrument forceful exit with an error
179
+ - Replaced some of the activesupport parts with dry-inflector
180
+ - Lower ActiveSupport dependency
181
+ - Remove configurators in favor of the after_init block configurator
182
+ - Ruby 2.5.0 support
183
+ - Renamed Karafka::Connection::Processor to Karafka::Connection::Delegator to match incoming naming conventions
184
+ - Renamed Karafka::Connection::Consumer to Karafka::Connection::Client due to #274
185
+ - Removed HashWithIndifferentAccess in favor of a regular hash
186
+ - JSON parsing defaults now to string keys
187
+ - Lower memory usage due to less params data internal details
188
+ - Support multiple ```after_init``` blocks in favor of a single one
189
+ - Renamed ```received_at``` to ```receive_time``` to follow ruby-kafka and WaterDrop conventions
190
+ - Adjust internal setup to easier map Ruby-Kafka config changes
191
+ - System callbacks reorganization
192
+ - Added ```before_fetch_loop``` configuration block for early client usage (```#seek```, etc)
193
+ - Renamed ```after_fetched``` to ```after_fetch``` to normalize the naming convention
194
+ - Instrumentation on a connection delegator level
195
+ - Added ```params_batch#last``` method to retrieve last element after unparsing
196
+ - All params keys are now strings
197
+
198
+ ## 1.1.2
199
+ - #256 - Default kafka.seed_brokers configuration is created in invalid format
200
+
201
+ ## 1.1.1
202
+ - #253 - Allow providing a global per app parser in config settings
203
+
204
+ ## 1.1.0
205
+ - Gem bump
206
+ - Switch from Celluloid to native Thread management
207
+ - Improved shutdown process
208
+ - Introduced optional fetch callbacks and moved current the ```after_received``` there as well
209
+ - Karafka will raise Errors::InvalidPauseTimeout exception when trying to pause but timeout set to 0
210
+ - Allow float for timeouts and other time based second settings
211
+ - Renamed MessagesProcessor to Processor and MessagesConsumer to Consumer - we don't process and don't consumer anything else so it was pointless to keep this "namespace"
212
+ - #232 - Remove unused ActiveSupport require
213
+ - #214 - Expose consumer on a controller layer
214
+ - #193 - Process shutdown callbacks
215
+ - Fixed accessibility of ```#params_batch``` from the outside of the controller
216
+ - connection_pool config options are no longer required
217
+ - celluloid config options are no longer required
218
+ - ```#perform``` is now renamed to ```#consume``` with warning level on using the old one (deprecated)
219
+ - #235 - Rename perform to consume
220
+ - Upgrade to ruby-kafka 0.5
221
+ - Due to redesign of Waterdrop concurrency setting is no longer needed
222
+ - #236 - Manual offset management
223
+ - WaterDrop 1.0.0 support with async
224
+ - Renamed ```batch_consuming``` option to ```batch_fetching``` as it is not a consumption (with processing) but a process of fetching messages from Kafka. The messages is considered consumed, when it is processed.
225
+ - Renamed ```batch_processing``` to ```batch_consuming``` to resemble Kafka concept of consuming messages.
226
+ - Renamed ```after_received``` to ```after_fetched``` to normalize the naming conventions.
227
+ - Responders support the per topic ```async``` option.
228
+
229
+ ## 1.0.1
230
+ - #210 - LoadError: cannot load such file -- [...]/karafka.rb
231
+ - Ruby 2.4.2 as a default (+travis integration)
232
+ - JRuby upgrade
233
+ - Expanded persistence layer (moved to a namespace for easier future development)
234
+ - #213 - Misleading error when non-existing dependency is required
235
+ - #212 - Make params react to #topic, #partition, #offset
236
+ - #215 - Consumer group route dynamic options are ignored
237
+ - #217 - check RUBY_ENGINE constant if RUBY_VERSION is missing (#217)
238
+ - #218 - add configuration setting to control Celluloid's shutdown timeout
239
+ - Renamed Karafka::Routing::Mapper to Karafka::Routing::TopicMapper to match naming conventions
240
+ - #219 - Allow explicit consumer group names, without prefixes
241
+ - Fix to early removed pid upon shutdown of demonized process
242
+ - max_wait_time updated to match https://github.com/zendesk/ruby-kafka/issues/433
243
+ - #230 - Better uri validation for seed brokers (incompatibility as the kafka:// or kafka+ssl:// is required)
244
+ - Small internal docs fixes
245
+ - Dry::Validation::MissingMessageError: message for broker_schema? was not found
246
+ - #238 - warning: already initialized constant Karafka::Schemas::URI_SCHEMES
247
+
248
+ ## 1.0.0
249
+
250
+ ### Closed issues:
251
+
252
+ - #103 - Env for logger is loaded 2 early (on gem load not on app init)
253
+ - #142 - Possibility to better control Kafka consumers (consumer groups management)
254
+ - #150 - Add support for start_from_beginning on a per topic basis
255
+ - #154 - Support for min_bytes and max_wait_time on messages consuming
256
+ - #160 - Reorganize settings to better resemble ruby-kafka requirements
257
+ - #164 - If we decide to have configuration per topic, topic uniqueness should be removed
258
+ - #165 - Router validator
259
+ - #166 - Params and route reorganization (new API)
260
+ - #167 - Remove Sidekiq UI from Karafka
261
+ - #168 - Introduce unique IDs of routes
262
+ - #171 - Add kafka message metadata to params
263
+ - #176 - Transform Karafka::Connection::Consumer into a module
264
+ - #177 - Monitor not reacting when kafka killed with -9
265
+ - #175 - Allow single consumer to subscribe to multiple topics
266
+ - #178 - Remove parsing failover when cannot unparse data
267
+ - #174 - Extended config validation
268
+ - ~~#180 - Switch from JSON parser to yajl-ruby~~
269
+ - #181 - When responder is defined and not used due to ```respond_with``` not being triggered in the perform, it won't raise an exception.
270
+ - #188 - Rename name in config to client id
271
+ - #186 - Support ruby-kafka ```ssl_ca_cert_file_path``` config
272
+ - #189 - karafka console does not preserve history on exit
273
+ - #191 - Karafka 0.6.0rc1 does not work with jruby / now it does :-)
274
+ - Switch to multi json so everyone can use their favourite JSON parser
275
+ - Added jruby support in general and in Travis
276
+ - #196 - Topic mapper does not map topics when subscribing thanks to @webandtech
277
+ - #96 - Karafka server - possibility to run it only for a certain topics
278
+ - ~~karafka worker cli option is removed (please use sidekiq directly)~~ - restored, bad idea
279
+ - (optional) pausing upon processing failures ```pause_timeout```
280
+ - Karafka console main process no longer intercepts irb errors
281
+ - Wiki updates
282
+ - #204 - Long running controllers
283
+ - Better internal API to handle multiple usage cases using ```Karafka::Controllers::Includer```
284
+ - #207 - Rename before_enqueued to after_received
285
+ - #147 - De-attach Karafka from Sidekiq by extracting Sidekiq backend
286
+
287
+ ### New features and improvements
288
+
289
+ - batch processing thanks to ```#batch_consuming``` flag and ```#params_batch``` on controllers
290
+ - ```#topic``` method on an controller instance to make a clear distinction in between params and route details
291
+ - Changed routing model (still compatible with 0.5) to allow better resources management
292
+ - Lower memory requirements due to object creation limitation (2-3 times less objects on each new message)
293
+ - Introduced the ```#batch_consuming``` config flag (config for #126) that can be set per each consumer_group
294
+ - Added support for partition, offset and partition key in the params hash
295
+ - ```name``` option in config renamed to ```client_id```
296
+ - Long running controllers with ```persistent``` flag on a topic config level, to make controller instances persistent between messages batches (single controller instance per topic per partition no per messages batch) - turned on by default
297
+
298
+ ### Incompatibilities
299
+
300
+ - Default boot file is renamed from app.rb to karafka.rb
301
+ - Removed worker glass as dependency (now and independent gem)
302
+ - ```kafka.hosts``` option renamed to ```kafka.seed_brokers``` - you don't need to provide all the hosts to work with Kafka
303
+ - ```start_from_beginning``` moved into kafka scope (```kafka.start_from_beginning```)
304
+ - Router no longer checks for route uniqueness - now you can define same routes for multiple kafkas and do a lot of crazy stuff, so it's your responsibility to check uniqueness
305
+ - Change in the way we identify topics in between Karafka and Sidekiq workers. If you upgrade, please make sure, all the jobs scheduled in Sidekiq are finished before the upgrade.
306
+ - ```batch_mode``` renamed to ```batch_fetching```
307
+ - Renamed content to value to better resemble ruby-kafka internal messages naming convention
308
+ - When having a responder with ```required``` topics and not using ```#respond_with``` at all, it will raise an exception
309
+ - Renamed ```inline_mode``` to ```inline_processing``` to resemble other settings conventions
310
+ - Renamed ```inline_processing``` to ```backend``` to reach 1.0 future compatibility
311
+ - Single controller **needs** to be used for a single topic consumption
312
+ - Renamed ```before_enqueue``` to ```after_received``` to better resemble internal logic, since for inline backend, there is no enqueue.
313
+ - Due to the level on which topic and controller are related (class level), the dynamic worker selection is no longer available.
314
+ - Renamed params #retrieve to params #retrieve! to better reflect what it does
315
+
316
+ ### Other changes
317
+ - PolishGeeksDevTools removed (in favour of Coditsu)
318
+ - Waaaaaay better code quality thanks to switching from dev tools to Coditsu
319
+ - Gem bump
320
+ - Cleaner internal API
321
+ - SRP
322
+ - Better settings proxying and management between ruby-kafka and karafka
323
+ - All internal validations are now powered by dry-validation
324
+ - Better naming conventions to reflect Kafka reality
325
+ - Removed Karafka::Connection::Message in favour of direct message details extraction from Kafka::FetchedMessage
326
+
327
+ ## 0.5.0.3
328
+ - #132 - When Kafka is gone, should reconnect after a time period
329
+ - #136 - new ruby-kafka version + other gem bumps
330
+ - ruby-kafka update
331
+ - #135 - NonMatchingRouteError - better error description in the code
332
+ - #140 - Move Capistrano Karafka to a different specific gem
333
+ - #110 - Add call method on a responder class to alias instance build and call
334
+ - #76 - Configs validator
335
+ - #138 - Possibility to have no worker class defined if inline_mode is being used
336
+ - #145 - Topic Mapper
337
+ - Ruby update to 2.4.1
338
+ - Gem bump x2
339
+ - #158 - Update docs section on heroku usage
340
+ - #150 - Add support for start_from_beginning on a per topic basis
341
+ - #148 - Lower Karafka Sidekiq dependency
342
+ - Allow karafka root to be specified from ENV
343
+ - Handle SIGTERM as a shutdown command for kafka server to support Heroku deployment
344
+
345
+ ## 0.5.0.2
346
+ - Gems update x3
347
+ - Default Ruby set to 2.3.3
348
+ - ~~Default Ruby set to 2.4.0~~
349
+ - Readme updates to match bug fixes and resolved issues
350
+ - #95 - Allow options into responder
351
+ - #98 - Use parser when responding on a topic
352
+ - #114 - Option to configure waterdrop connection pool timeout and concurrency
353
+ - #118 - Added dot in topic validation format
354
+ - #119 - add support for authentication using SSL
355
+ - #121 - JSON as a default for standalone responders usage
356
+ - #122 - Allow on capistrano role customization
357
+ - #125 - Add support to batch incoming messages
358
+ - #130 - start_from_beginning flag on routes and default
359
+ - #128 - Monitor caller_label not working with super on inheritance
360
+ - Renamed *inline* to *inline_mode* to stay consistent with flags that change the way karafka works (#125)
361
+ - Dry-configurable bump to 0.5 with fixed proc value evaluation on retrieve patch (internal change)
362
+
363
+ ## 0.5.0.1
364
+ - Fixed inconsistency in responders non-required topic definition. Now only required: false available
365
+ - #101 - Responders fail when multiple_usage true and required false
366
+ - fix error on startup from waterdrop #102
367
+ - Waterdrop 0.3.2.1 with kafka.hosts instead of kafka_hosts
368
+ - #105 - Karafka::Monitor#caller_label not working with inherited monitors
369
+ - #99 - Standalone mode (without Sidekiq)
370
+ - #97 - Buffer responders single topics before send (pre-validation)
371
+ - Better control over consumer thanks to additional config options
372
+ - #111 - Dynamic worker assignment based on the income params
373
+ - Long shutdown time fix
374
+
375
+ ## 0.5.0
376
+ - Removed Zookeeper totally as dependency
377
+ - Better group and partition rebalancing
378
+ - Automatic thread management (no need for tuning) - each topic is a separate actor/thread
379
+ - Moved from Poseidon into Ruby-Kafka
380
+ - No more max_concurrency setting
381
+ - After you define your App class and routes (and everything else) you need to add execute App.boot!
382
+ - Manual consuming is no longer available (no more karafka consume command)
383
+ - Karafka topics CLI is no longer available. No Zookeeper - no global topic discovery
384
+ - Dropped ZK as dependency
385
+ - karafka info command no longer prints details about Zookeeper
386
+ - Better shutdown
387
+ - No more autodiscovery via Zookeeper - instead, the whole cluster will be discovered directly from Kafka
388
+ - No more support for Kafka 0.8
389
+ - Support for Kafka 0.9
390
+ - No more need for ActorCluster, since now we have a single thread (and Kafka connection) per topic
391
+ - Ruby 2.2.* support dropped
392
+ - Using App name as a Kafka client_id
393
+ - Automatic Capistrano integration
394
+ - Responders support for handling better responses pipe-lining and better responses flow description and design (see README for more details)
395
+ - Gem bump
396
+ - Readme updates
397
+ - karafka flow CLI command for printing the application flow
398
+ - Some internal refactoring
399
+
400
+ ## 0.4.2
401
+ - #87 - Re-consume mode with crone for better Rails/Rack integration
402
+ - Moved Karafka server related stuff into separate Karafka::Server class
403
+ - Renamed Karafka::Runner into Karafka::Fetcher
404
+ - Gem bump
405
+ - Added chroot option to Zookeeper options
406
+ - Moved BROKERS_PATH into config from constant
407
+ - Added Karafka consume CLI action for a short running single consumption round
408
+ - Small fixes to close broken connections
409
+ - Readme updates
410
+
411
+ ## 0.4.1
412
+ - Explicit throw(:abort) required to halt before_enqueue (like in Rails 5)
413
+ - #61 - autodiscovery of Kafka brokers based on Zookeeper data
414
+ - #63 - Graceful shutdown with current offset state during data processing
415
+ - #65 - Example of NewRelic monitor is outdated
416
+ - #71 - Setup should be executed after user code is loaded
417
+ - Gem bump x3
418
+ - Rubocop remarks
419
+ - worker_timeout config option has been removed. It now needs to be defined manually by the framework user because WorkerGlass::Timeout can be disabled and we cannot use Karafka settings on a class level to initialize user code stuff
420
+ - Moved setup logic under setup/Setup namespace
421
+ - Better defaults handling
422
+ - #75 - Kafka and Zookeeper options as a hash
423
+ - #82 - Karafka autodiscovery fails upon caching of configs
424
+ - #81 - Switch config management to dry configurable
425
+ - Version fix
426
+ - Dropped support for Ruby 2.1.*
427
+ - Ruby bump to 2.3.1
428
+
429
+ ## 0.4.0
430
+ - Added WaterDrop gem with default configuration
431
+ - Refactoring of config logic to simplify adding new dependencies that need to be configured based on #setup data
432
+ - Gem bump
433
+ - Readme updates
434
+ - Renamed cluster to actor_cluster for method names
435
+ - Replaced SidekiqGlass with generic WorkerGlass lib
436
+ - Application bootstrap in app.rb no longer required
437
+ - Karafka.boot needs to be executed after all the application files are loaded (template updated)
438
+ - Small loader refactor (no API changes)
439
+ - Ruby 2.3.0 support (default)
440
+ - No more rake tasks
441
+ - Karafka CLI instead of rake tasks
442
+ - Worker cli command allows passing additional options directly to Sidekiq
443
+ - Renamed concurrency to max_concurrency - it describes better what happens - Karafka will use this number of threads only when required
444
+ - Added wait_timeout that allows us to tune how long should we wait on a single socket connection (single topic) for new messages before going to next one (this applies to each thread separately)
445
+ - Rubocop remarks
446
+ - Removed Sinatra and Puma dependencies
447
+ - Karafka Cli internal reorganization
448
+ - Karafka Cli routes task
449
+ - #37 - warn log for failed parsing of a message
450
+ - #43 - wrong constant name
451
+ - #44 - Method name conflict
452
+ - #48 - Cannot load such file -- celluloid/current
453
+ - #46 - Loading application
454
+ - #45 - Set up monitor in config
455
+ - #47 - rake karafka:run uses app.rb only
456
+ - #53 - README update with Sinatra/Rails integration description
457
+ - #41 - New Routing engine
458
+ - #54 - Move Karafka::Workers::BaseWorker to Karafka::BaseWorker
459
+ - #55 - ApplicationController and ApplicationWorker
460
+
461
+ ## 0.3.2
462
+ - Karafka::Params::Params lazy load merge keys with string/symbol names priorities fix
463
+
464
+ ## 0.3.1
465
+ - Renamed Karafka::Monitor to Karafka::Process to represent a Karafka process wrapper
466
+ - Added Karafka::Monitoring that allows to add custom logging and monitoring with external libraries and systems
467
+ - Moved logging functionality into Karafka::Monitoring default monitoring
468
+ - Added possibility to provide own monitoring as long as in responds to #notice and #notice_error
469
+ - Standardized logging format for all logs
470
+
471
+ ## 0.3.0
472
+ - Switched from custom ParserError for each parser to general catching of Karafka::Errors::ParseError and its descendants
473
+ - Gem bump
474
+ - Fixed #32 - now when using custom workers that does not inherit from Karafka::BaseWorker perform method is not required. Using custom workers means that the logic that would normally lie under #perform, needs to be executed directly from the worker.
475
+ - Fixed #31 - Technically didn't fix because this is how Sidekiq is meant to work, but provided possibility to assign custom interchangers that allow to bypass JSON encoding issues by converting data that goes to Redis to a required format (and parsing it back when it is fetched)
476
+ - Added full parameters lazy load - content is no longer loaded during #perform_async if params are not used in before_enqueue
477
+ - No more namespaces for Redis by default (use separate DBs)
478
+
479
+ ## 0.1.21
480
+ - Sidekiq 4.0.1 bump
481
+ - Gem bump
482
+ - Added direct celluloid requirement to Karafka (removed from Sidekiq)
483
+
484
+ ## 0.1.19
485
+ - Internal call - schedule naming change
486
+ - Enqueue to perform_async naming in controller to follow Sidekiq naming convention
487
+ - Gem bump
488
+
489
+ ## 0.1.18
490
+ - Changed Redis configuration options into a single hash that is directly passed to Redis setup for Sidekiq
491
+ - Added config.ru to provide a Sidekiq web UI (see README for more details)
492
+
493
+ ## 0.1.17
494
+ - Changed Karafka::Connection::Cluster tp Karafka::Connection::ActorCluster to distinguish between a single thread actor cluster for multiple topic connection and a future feature that will allow process clusterization.
495
+ - Add an ability to use user-defined parsers for a messages
496
+ - Lazy load params for before callbacks
497
+ - Automatic loading/initializing all workers classes during startup (so Sidekiq won't fail with unknown workers exception)
498
+ - Params are now private to controller
499
+ - Added bootstrap method to app.rb
500
+
501
+ ## 0.1.16
502
+ - Cluster level error catching for all exceptions so actor is not killer
503
+ - Cluster level error logging
504
+ - Listener refactoring (QueueConsumer extracted)
505
+ - Karafka::Connection::QueueConsumer to wrap around fetching logic - technically we could replace Kafka with any other messaging engine as long as we preserve the same API
506
+ - Added debug env for debugging purpose in applications
507
+
508
+ ## 0.1.15
509
+ - Fixed max_wait_ms vs socket_timeout_ms issue
510
+ - Fixed closing queue connection after Poseidon::Errors::ProtocolError failure
511
+ - Fixed wrong logging file selection based on env
512
+ - Extracted Karafka::Connection::QueueConsumer object to wrap around queue connection
513
+
514
+ ## 0.1.14
515
+ - Rake tasks for listing all the topics on Kafka server (rake kafka:topics)
516
+
517
+ ## 0.1.13
518
+ - Ability to assign custom workers and use them bypassing Karafka::BaseWorker (or its descendants)
519
+ - Gem bump
520
+
521
+ ## 0.1.12
522
+ - All internal errors went to Karafka::Errors namespace
523
+
524
+ ## 0.1.11
525
+ - Rescuing all the "before Sidekiq" processing so errors won't affect other incoming messages
526
+ - Fixed dying actors after connection error
527
+ - Added a new app status - "initializing"
528
+ - Karafka::Status model cleanup
529
+
530
+ ## 0.1.10
531
+ - Added possibility to specify redis namespace in configuration (failover to app name)
532
+ - Renamed redis_host to redis_url in configuration
533
+
534
+ ## 0.1.9
535
+ - Added worker logger
536
+
537
+ ## 0.1.8
538
+ - Dropped local env support in favour of [Envlogic](https://github.com/karafka/envlogic) - no changes in API
539
+
540
+ ## 0.1.7
541
+ - Karafka option for Redis hosts (not localhost only)
542
+
543
+ ## 0.1.6
544
+ - Added better concurency by clusterization of listeners
545
+ - Added graceful shutdown
546
+ - Added concurency that allows to handle bigger applications with celluloid
547
+ - Karafka controllers no longer require group to be defined (created based on the topic and app name)
548
+ - Karafka controllers no longer require topic to be defined (created based on the controller name)
549
+ - Readme updates
550
+
551
+ ## 0.1.5
552
+ - Celluloid support for listeners
553
+ - Multi target logging (STDOUT and file)
554
+
555
+ ## 0.1.4
556
+ - Renamed events to messages to follow Apache Kafka naming convention
557
+
558
+ ## 0.1.3
559
+ - Karafka::App.logger moved to Karafka.logger
560
+ - README updates (Usage section was added)
561
+
562
+ ## 0.1.2
563
+ - Logging to log/environment.log
564
+ - Karafka::Runner
565
+
566
+ ## 0.1.1
567
+ - README updates
568
+ - Rake tasks updates
569
+ - Rake installation task
570
+ - Changelog file added
571
+
572
+ ## 0.1.0
573
+ - Initial framework code