karafka-pro 0.0.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of karafka-pro might be problematic. Click here for more details.

Files changed (104) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +0 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +11 -0
  6. data/.diffend.yml +3 -0
  7. data/.github/FUNDING.yml +3 -0
  8. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  9. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  10. data/.github/workflows/ci.yml +74 -0
  11. data/.gitignore +69 -0
  12. data/.rspec +1 -0
  13. data/.ruby-gemset +1 -0
  14. data/.ruby-version +1 -0
  15. data/CHANGELOG.md +570 -0
  16. data/CODE_OF_CONDUCT.md +46 -0
  17. data/CONTRIBUTING.md +41 -0
  18. data/Gemfile +14 -0
  19. data/Gemfile.lock +137 -0
  20. data/MIT-LICENCE +18 -0
  21. data/README.md +99 -0
  22. data/bin/karafka +19 -0
  23. data/certs/mensfeld.pem +25 -0
  24. data/config/errors.yml +41 -0
  25. data/docker-compose.yml +17 -0
  26. data/karafka.gemspec +43 -0
  27. data/lib/karafka.rb +72 -0
  28. data/lib/karafka/app.rb +53 -0
  29. data/lib/karafka/assignment_strategies/round_robin.rb +13 -0
  30. data/lib/karafka/attributes_map.rb +63 -0
  31. data/lib/karafka/backends/inline.rb +16 -0
  32. data/lib/karafka/base_consumer.rb +57 -0
  33. data/lib/karafka/base_responder.rb +226 -0
  34. data/lib/karafka/cli.rb +62 -0
  35. data/lib/karafka/cli/base.rb +78 -0
  36. data/lib/karafka/cli/console.rb +31 -0
  37. data/lib/karafka/cli/flow.rb +48 -0
  38. data/lib/karafka/cli/info.rb +31 -0
  39. data/lib/karafka/cli/install.rb +66 -0
  40. data/lib/karafka/cli/missingno.rb +19 -0
  41. data/lib/karafka/cli/server.rb +71 -0
  42. data/lib/karafka/code_reloader.rb +67 -0
  43. data/lib/karafka/connection/api_adapter.rb +161 -0
  44. data/lib/karafka/connection/batch_delegator.rb +55 -0
  45. data/lib/karafka/connection/builder.rb +18 -0
  46. data/lib/karafka/connection/client.rb +117 -0
  47. data/lib/karafka/connection/listener.rb +71 -0
  48. data/lib/karafka/connection/message_delegator.rb +36 -0
  49. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  50. data/lib/karafka/consumers/callbacks.rb +71 -0
  51. data/lib/karafka/consumers/includer.rb +64 -0
  52. data/lib/karafka/consumers/responders.rb +24 -0
  53. data/lib/karafka/consumers/single_params.rb +15 -0
  54. data/lib/karafka/contracts.rb +10 -0
  55. data/lib/karafka/contracts/config.rb +21 -0
  56. data/lib/karafka/contracts/consumer_group.rb +211 -0
  57. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  58. data/lib/karafka/contracts/responder_usage.rb +54 -0
  59. data/lib/karafka/contracts/server_cli_options.rb +31 -0
  60. data/lib/karafka/errors.rb +51 -0
  61. data/lib/karafka/fetcher.rb +42 -0
  62. data/lib/karafka/helpers/class_matcher.rb +88 -0
  63. data/lib/karafka/helpers/config_retriever.rb +46 -0
  64. data/lib/karafka/helpers/inflector.rb +26 -0
  65. data/lib/karafka/helpers/multi_delegator.rb +32 -0
  66. data/lib/karafka/instrumentation/logger.rb +58 -0
  67. data/lib/karafka/instrumentation/monitor.rb +70 -0
  68. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  69. data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
  70. data/lib/karafka/params/batch_metadata.rb +26 -0
  71. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  72. data/lib/karafka/params/builders/params.rb +38 -0
  73. data/lib/karafka/params/builders/params_batch.rb +25 -0
  74. data/lib/karafka/params/metadata.rb +20 -0
  75. data/lib/karafka/params/params.rb +54 -0
  76. data/lib/karafka/params/params_batch.rb +60 -0
  77. data/lib/karafka/patches/ruby_kafka.rb +47 -0
  78. data/lib/karafka/persistence/client.rb +29 -0
  79. data/lib/karafka/persistence/consumers.rb +45 -0
  80. data/lib/karafka/persistence/topics.rb +48 -0
  81. data/lib/karafka/process.rb +60 -0
  82. data/lib/karafka/responders/builder.rb +36 -0
  83. data/lib/karafka/responders/topic.rb +55 -0
  84. data/lib/karafka/routing/builder.rb +90 -0
  85. data/lib/karafka/routing/consumer_group.rb +63 -0
  86. data/lib/karafka/routing/consumer_mapper.rb +34 -0
  87. data/lib/karafka/routing/proxy.rb +46 -0
  88. data/lib/karafka/routing/router.rb +29 -0
  89. data/lib/karafka/routing/topic.rb +62 -0
  90. data/lib/karafka/routing/topic_mapper.rb +53 -0
  91. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  92. data/lib/karafka/serialization/json/serializer.rb +31 -0
  93. data/lib/karafka/server.rb +86 -0
  94. data/lib/karafka/setup/config.rb +226 -0
  95. data/lib/karafka/setup/configurators/water_drop.rb +36 -0
  96. data/lib/karafka/setup/dsl.rb +21 -0
  97. data/lib/karafka/status.rb +29 -0
  98. data/lib/karafka/templates/application_consumer.rb.erb +7 -0
  99. data/lib/karafka/templates/application_responder.rb.erb +11 -0
  100. data/lib/karafka/templates/karafka.rb.erb +92 -0
  101. data/lib/karafka/version.rb +7 -0
  102. data/log/.gitkeep +0 -0
  103. metadata +325 -0
  104. metadata.gz.sig +0 -0
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 2b686fa1ab1a8e92c68ea065095f98a8e1ca678d339ac9c9a8329efca49350fe
4
+ data.tar.gz: d8c76865b8583ba61fea3adb2ca4a4e8c05f9f72e53cd57ce0491de7566ea23e
5
+ SHA512:
6
+ metadata.gz: b74d7a8e1d0482901eb41817dfcc23d3f26f257d46b437614dbf05cb01753392ee90c9b2bc7b3376a8851d19293e7043a3bd37df392f88a591d7b1903621ba3b
7
+ data.tar.gz: 0170dd05cb912e42d8a598400ba2cef4188a6c442be5a41d414e02460f80b00d4ecfa4691cc81db9dbb441439c6cfeb41544f5cb344990beb3f517fbc6be95f4
checksums.yaml.gz.sig ADDED
Binary file
data.tar.gz.sig ADDED
Binary file
data/.coditsu/ci.yml ADDED
@@ -0,0 +1,3 @@
1
+ repository_id: 'd4482d42-f6b5-44ba-a5e4-00989ac519ee'
2
+ api_key: <%= ENV['CODITSU_API_KEY'] %>
3
+ api_secret: <%= ENV['CODITSU_API_SECRET'] %>
data/.console_irbrc ADDED
@@ -0,0 +1,11 @@
1
+ # irbrc for Karafka console
2
+
3
+ IRB.conf[:AUTO_INDENT] = true
4
+ IRB.conf[:SAVE_HISTORY] = 1000
5
+ IRB.conf[:USE_READLINE] = true
6
+ IRB.conf[:HISTORY_FILE] = ".irb-history"
7
+ IRB.conf[:LOAD_MODULES] = [] unless IRB.conf.key?(:LOAD_MODULES)
8
+
9
+ unless IRB.conf[:LOAD_MODULES].include?('irb/completion')
10
+ IRB.conf[:LOAD_MODULES] << 'irb/completion'
11
+ end
data/.diffend.yml ADDED
@@ -0,0 +1,3 @@
1
+ project_id: 'de9b9933-7610-4cc4-b69b-f7e3e3c5e797'
2
+ shareable_id: '68a8c626-b605-40ad-ac45-e3961ad7c57d'
3
+ shareable_key: 'a3ec2dac-fba2-4b6c-b181-49e927b15057'
@@ -0,0 +1,3 @@
1
+ # These are supported funding model platforms
2
+
3
+ open_collective: karafka
@@ -0,0 +1,50 @@
1
+ ---
2
+ name: Bug Report
3
+ about: Report an issue with Karafka you've discovered.
4
+ ---
5
+
6
+ *Be clear, concise and precise in your description of the problem.
7
+ Open an issue with a descriptive title and a summary in grammatically correct,
8
+ complete sentences.*
9
+
10
+ *Use the template below when reporting bugs. Please, make sure that
11
+ you're running the latest stable Karafka and that the problem you're reporting
12
+ hasn't been reported (and potentially fixed) already.*
13
+
14
+ *Before filing the ticket you should replace all text above the horizontal
15
+ rule with your own words.*
16
+
17
+ --------
18
+
19
+ ## Expected behavior
20
+
21
+ Describe here how you expected Karafka to behave in this particular situation.
22
+
23
+ ## Actual behavior
24
+
25
+ Describe here what actually happened.
26
+
27
+ ## Steps to reproduce the problem
28
+
29
+ This is extremely important! Providing us with a reliable way to reproduce
30
+ a problem will expedite its solution.
31
+
32
+ ## Your setup details
33
+
34
+ Please provide kafka version and the output of `karafka info` or `bundle exec karafka info` if using Bundler.
35
+
36
+ Here's an example:
37
+
38
+ ```
39
+ $ [bundle exec] karafka info
40
+ Karafka version: 1.3.0
41
+ Ruby version: 2.6.3
42
+ Ruby-kafka version: 0.7.9
43
+ Application client id: karafka-local
44
+ Backend: inline
45
+ Batch fetching: true
46
+ Batch consuming: true
47
+ Boot file: /app/karafka/karafka.rb
48
+ Environment: development
49
+ Kafka seed brokers: ["kafka://kafka:9092"]
50
+ ```
@@ -0,0 +1,20 @@
1
+ ---
2
+ name: Feature Request
3
+ about: Suggest new Karafka features or improvements to existing features.
4
+ ---
5
+
6
+ ## Is your feature request related to a problem? Please describe.
7
+
8
+ A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
9
+
10
+ ## Describe the solution you'd like
11
+
12
+ A clear and concise description of what you want to happen.
13
+
14
+ ## Describe alternatives you've considered
15
+
16
+ A clear and concise description of any alternative solutions or features you've considered.
17
+
18
+ ## Additional context
19
+
20
+ Add any other context or screenshots about the feature request here.
@@ -0,0 +1,74 @@
1
+ name: ci
2
+
3
+ on:
4
+ push:
5
+ schedule:
6
+ - cron: '0 1 * * *'
7
+
8
+ jobs:
9
+ specs:
10
+ runs-on: ubuntu-latest
11
+ needs: diffend
12
+ strategy:
13
+ fail-fast: false
14
+ matrix:
15
+ ruby:
16
+ - '3.0'
17
+ - '2.7'
18
+ - '2.6'
19
+ - '2.5'
20
+ include:
21
+ - ruby: '3.0'
22
+ coverage: 'true'
23
+ steps:
24
+ - uses: actions/checkout@v2
25
+ - name: Install package dependencies
26
+ run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
27
+ - name: Set up Ruby
28
+ uses: ruby/setup-ruby@v1
29
+ with:
30
+ ruby-version: ${{matrix.ruby}}
31
+ - name: Install latest bundler
32
+ run: |
33
+ gem install bundler --no-document
34
+ bundle config set without 'tools benchmarks docs'
35
+ - name: Bundle install
36
+ run: |
37
+ bundle config set without development
38
+ bundle install --jobs 4 --retry 3
39
+ - name: Run Kafka with docker-compose
40
+ run: docker-compose up -d
41
+ - name: Run all tests
42
+ env:
43
+ GITHUB_COVERAGE: ${{matrix.coverage}}
44
+ run: bundle exec rspec
45
+
46
+ diffend:
47
+ runs-on: ubuntu-latest
48
+ strategy:
49
+ fail-fast: false
50
+ steps:
51
+ - uses: actions/checkout@v2
52
+ with:
53
+ fetch-depth: 0
54
+ - name: Set up Ruby
55
+ uses: ruby/setup-ruby@v1
56
+ with:
57
+ ruby-version: 3.0
58
+ - name: Install latest bundler
59
+ run: gem install bundler --no-document
60
+ - name: Install Diffend plugin
61
+ run: bundle plugin install diffend
62
+ - name: Bundle Secure
63
+ run: bundle secure
64
+
65
+ coditsu:
66
+ runs-on: ubuntu-latest
67
+ strategy:
68
+ fail-fast: false
69
+ steps:
70
+ - uses: actions/checkout@v2
71
+ with:
72
+ fetch-depth: 0
73
+ - name: Run Coditsu
74
+ run: \curl -sSL https://api.coditsu.io/run/ci | bash
data/.gitignore ADDED
@@ -0,0 +1,69 @@
1
+ # bundler state
2
+ /.bundle
3
+ /vendor/bundle/
4
+ /vendor/ruby/
5
+ /ruby/
6
+ app.god
7
+
8
+ # minimal Rails specific artifacts
9
+ /.coditsu/local.yml
10
+ db/*.sqlite3
11
+ /log/development.log
12
+ /log/production.log
13
+ /log/test.log
14
+ /tmp/*
15
+ *.gem
16
+ *.~
17
+
18
+ # various artifacts
19
+ **.war
20
+ *.rbc
21
+ *.sassc
22
+ .byebug_history
23
+ .redcar/
24
+ .capistrano/
25
+ .sass-cache
26
+ /config/god/sidekiq.rb
27
+ /config/puma.rb
28
+ /coverage.data
29
+ /coverage/
30
+ /doc/api/
31
+ /doc/app/
32
+ /doc/yard
33
+ /doc/features.html
34
+ /doc/specs.html
35
+ /spec/tmp/*
36
+ /cache
37
+ /capybara*
38
+ /capybara-*.html
39
+ /gems
40
+ /specifications
41
+ rerun.txt
42
+ pickle-email-*.html
43
+
44
+ # If you find yourself ignoring temporary files generated by your text editor
45
+ # or operating system, you probably want to add a global ignore instead:
46
+ # git config --global core.excludesfile ~/.gitignore_global
47
+ #
48
+ # Here are some files you may want to ignore globally:
49
+
50
+ # scm revert files
51
+ **.orig
52
+
53
+ # Mac finder artifacts
54
+ .DS_Store
55
+
56
+ # Netbeans project directory
57
+ /nbproject
58
+
59
+ # RubyMine project files
60
+ .idea
61
+
62
+ # Textmate project files
63
+ /*.tmproj
64
+
65
+ # vim artifacts
66
+ **.swp
67
+
68
+ # documentation
69
+ .yardoc
data/.rspec ADDED
@@ -0,0 +1 @@
1
+ --require spec_helper
data/.ruby-gemset ADDED
@@ -0,0 +1 @@
1
+ karafka
data/.ruby-version ADDED
@@ -0,0 +1 @@
1
+ 3.0.0
data/CHANGELOG.md ADDED
@@ -0,0 +1,570 @@
1
+ # Karafka framework changelog
2
+
3
+ ## 1.4.1 (2020-12-04)
4
+ - Return non-zero exit code when printing usage
5
+ - Add support for :assignment_strategy for consumers
6
+
7
+ ## 1.4.0 (2020-09-05)
8
+ - Rename `Karafka::Params::Metadata` to `Karafka::Params::BatchMetadata`
9
+ ` Rename consumer `#metadata` to `#batch_metadata`
10
+ - Separate metadata (including Karafka native metadata) from the root of params (backwards compatibility preserved thanks to rabotyaga)
11
+ - Remove metadata hash dependency
12
+ - Remove params dependency on a hash in favour of PORO
13
+ - Remove batch metadata dependency on a hash
14
+ - Remove MultiJson in favour of JSON in the default deserializer
15
+ - allow accessing all the metadata without accessing the payload
16
+ - freeze params and underlying elements except for the mutable payload
17
+ - provide access to raw payload after serialization
18
+ - fixes a bug where a non-deserializable (error) params would be marked as deserialized after first unsuccessful deserialization attempt
19
+ - fixes bug where karafka would mutate internal ruby-kafka state
20
+ - fixes bug where topic name in metadata would not be mapped using topic mappers
21
+ - simplifies the params and params batch API, before `#payload` usage, it won't be deserialized
22
+ - removes the `#[]` API from params to prevent from accessing raw data in a different way than #raw_payload
23
+ - makes the params batch operations consistent as params payload is deserialized only when accessed explicitly
24
+
25
+ ## 1.3.7 (2020-08-11)
26
+ - #599 - Allow metadata access without deserialization attempt (rabotyaga)
27
+ - Sync with ruby-kafka `1.2.0` api
28
+
29
+ ## 1.3.6 (2020-04-24)
30
+ - #583 - Use Karafka.logger for CLI messages (prikha)
31
+ - #582 - Cannot only define seed brokers in consumer groups
32
+
33
+ ## 1.3.5 (2020-04-02)
34
+ - #578 - ThreadError: can't be called from trap context patch
35
+
36
+ ## 1.3.4 (2020-02-17)
37
+ - `dry-configurable` upgrade (solnic)
38
+ - Remove temporary `thor` patches that are no longer needed
39
+
40
+ ## 1.3.3 (2019-12-23)
41
+ - Require `delegate` to fix missing dependency in `ruby-kafka`
42
+
43
+ ## 1.3.2 (2019-12-23)
44
+ - #561 - Allow `thor` 1.0.x usage in Karafka
45
+ - #567 - Ruby 2.7.0 support + unfreeze of a frozen string fix
46
+
47
+ ## 1.3.1 (2019-11-11)
48
+ - #545 - Makes sure the log directory exists when is possible (robertomiranda)
49
+ - Ruby 2.6.5 support
50
+ - #551 - add support for DSA keys
51
+ - #549 - Missing directories after `karafka install` (nijikon)
52
+
53
+ ## 1.3.0 (2019-09-09)
54
+ - Drop support for Ruby 2.4
55
+ - YARD docs tags cleanup
56
+
57
+ ## 1.3.0.rc1 (2019-07-31)
58
+ - Drop support for Kafka 0.10 in favor of native support for Kafka 0.11.
59
+ - Update ruby-kafka to the 0.7 version
60
+ - Support messages headers receiving
61
+ - Message bus unification
62
+ - Parser available in metadata
63
+ - Cleanup towards moving to a non-global state app management
64
+ - Drop Ruby 2.3 support
65
+ - Support for Ruby 2.6.3
66
+ - `Karafka::Loader` has been removed in favor of Zeitwerk
67
+ - Schemas are now contracts
68
+ - #393 - Reorganize responders - removed `multiple_usage` constrain
69
+ - #388 - ssl_client_cert_chain sync
70
+ - #300 - Store value in a value key and replace its content with parsed version - without root merge
71
+ - #331 - Disallow building groups without topics
72
+ - #340 - Instrumentation unification. Better and more consistent naming
73
+ - #340 - Procline instrumentation for a nicer process name
74
+ - #342 - Change default for `fetcher_max_queue_size` from `100` to `10` to lower max memory usage
75
+ - #345 - Cleanup exceptions names
76
+ - #341 - Split connection delegator into batch delegator and single_delegator
77
+ - #351 - Rename `#retrieve!` to `#parse!` on params and `#parsed` to `parse!` on params batch.
78
+ - #351 - Adds '#first' for params_batch that returns parsed first element from the params_batch object.
79
+ - #360 - Single params consuming mode automatically parses data specs
80
+ - #359 - Divide mark_as_consumed into mark_as_consumed and mark_as_consumed!
81
+ - #356 - Provide a `#values` for params_batch to extract only values of objects from the params_batch
82
+ - #363 - Too shallow ruby-kafka version lock
83
+ - #354 - Expose consumer heartbeat
84
+ - #377 - Remove the persistent setup in favor of persistence
85
+ - #375 - Sidekiq Backend parser mismatch
86
+ - #369 - Single consumer can support more than one topic
87
+ - #288 - Drop dependency on `activesupport` gem
88
+ - #371 - SASL over SSL
89
+ - #392 - Move params redundant data to metadata
90
+ - #335 - Metadata access from within the consumer
91
+ - #402 - Delayed reconnection upon critical failures
92
+ - #405 - `reconnect_timeout` value is now being validated
93
+ - #437 - Specs ensuring that the `#437` won't occur in the `1.3` release
94
+ - #426 - ssl client cert key password
95
+ - #444 - add certificate and private key validation
96
+ - #460 - Decouple responder "parser" (generator?) from topic.parser (benissimo)
97
+ - #463 - Split parsers into serializers / deserializers
98
+ - #473 - Support SASL OAuthBearer Authentication
99
+ - #475 - Disallow subscribing to the same topic with multiple consumers
100
+ - #485 - Setting shutdown_timeout to nil kills the app without waiting for anything
101
+ - #487 - Make listeners as instances
102
+ - #29 - Consumer class names must have the word "Consumer" in it in order to work (Sidekiq backend)
103
+ - #491 - irb is missing for console to work
104
+ - #502 - Karafka process hangs when sending multiple sigkills
105
+ - #506 - ssl_verify_hostname sync
106
+ - #483 - Upgrade dry-validation before releasing 1.3
107
+ - #492 - Use Zeitwerk for code reload in development
108
+ - #508 - Reset the consumers instances upon reconnecting to a cluster
109
+ - [#530](https://github.com/karafka/karafka/pull/530) - expose ruby and ruby-kafka version
110
+ - [534](https://github.com/karafka/karafka/pull/534) - Allow to use headers in the deserializer object
111
+ - [#319](https://github.com/karafka/karafka/pull/328) - Support for exponential backoff in pause
112
+
113
+ ## 1.2.11
114
+ - [#470](https://github.com/karafka/karafka/issues/470) Karafka not working with dry-configurable 0.8
115
+
116
+ ## 1.2.10
117
+ - [#453](https://github.com/karafka/karafka/pull/453) require `Forwardable` module
118
+
119
+ ## 1.2.9
120
+ - Critical exceptions now will cause consumer to stop instead of retrying without a break
121
+ - #412 - Fix dry-inflector dependency lock in gemspec
122
+ - #414 - Backport to 1.2 the delayed retry upon failure
123
+ - #437 - Raw message is no longer added to params after ParserError raised
124
+
125
+ ## 1.2.8
126
+ - #408 - Responder Topic Lookup Bug on Heroku
127
+
128
+ ## 1.2.7
129
+ - Unlock Ruby-kafka version with a warning
130
+
131
+ ## 1.2.6
132
+ - Lock WaterDrop to 1.2.3
133
+ - Lock Ruby-Kafka to 0.6.x (support for 0.7 will be added in Karafka 1.3)
134
+ - #382 - Full logging with AR, etc for development mode when there is Rails integration
135
+
136
+ ## 1.2.5
137
+ - #354 - Expose consumer heartbeat
138
+ - #373 - Async producer not working properly with responders
139
+
140
+ ## 1.2.4
141
+ - #332 - Fetcher for max queue size
142
+
143
+ ## 1.2.3
144
+ - #313 - support PLAINTEXT and SSL for scheme
145
+ - #288 - drop activesupport callbacks in favor of notifications
146
+ - #320 - Pausing indefinitely with nil pause timeout doesn't work
147
+ - #318 - Partition pausing doesn't work with custom topic mappers
148
+ - Rename ConfigAdapter to ApiAdapter to better reflect what it does
149
+ - #317 - Manual offset committing doesn't work with custom topic mappers
150
+
151
+ ## 1.2.2
152
+ - #312 - Broken for ActiveSupport 5.2.0
153
+
154
+ ## 1.2.1
155
+ - #304 - Unification of error instrumentation event details
156
+ - #306 - Using file logger from within a trap context upon shutdown is impossible
157
+
158
+ ## 1.2.0
159
+ - Spec improvements
160
+ - #260 - Specs missing randomization
161
+ - #251 - Shutdown upon non responding (unreachable) cluster is not possible
162
+ - #258 - Investigate lowering requirements on activesupport
163
+ - #246 - Alias consumer#mark_as_consumed on controller
164
+ - #259 - Allow forcing key/partition key on responders
165
+ - #267 - Styling inconsistency
166
+ - #242 - Support setting the max bytes to fetch per request
167
+ - #247 - Support SCRAM once released
168
+ - #271 - Provide an after_init option to pass a configuration block
169
+ - #262 - Error in the monitor code for NewRelic
170
+ - #241 - Performance metrics
171
+ - #274 - Rename controllers to consumers
172
+ - #184 - Seek to
173
+ - #284 - Dynamic Params parent class
174
+ - #275 - ssl_ca_certs_from_system
175
+ - #296 - Instrument forceful exit with an error
176
+ - Replaced some of the activesupport parts with dry-inflector
177
+ - Lower ActiveSupport dependency
178
+ - Remove configurators in favor of the after_init block configurator
179
+ - Ruby 2.5.0 support
180
+ - Renamed Karafka::Connection::Processor to Karafka::Connection::Delegator to match incoming naming conventions
181
+ - Renamed Karafka::Connection::Consumer to Karafka::Connection::Client due to #274
182
+ - Removed HashWithIndifferentAccess in favor of a regular hash
183
+ - JSON parsing defaults now to string keys
184
+ - Lower memory usage due to less params data internal details
185
+ - Support multiple ```after_init``` blocks in favor of a single one
186
+ - Renamed ```received_at``` to ```receive_time``` to follow ruby-kafka and WaterDrop conventions
187
+ - Adjust internal setup to easier map Ruby-Kafka config changes
188
+ - System callbacks reorganization
189
+ - Added ```before_fetch_loop``` configuration block for early client usage (```#seek```, etc)
190
+ - Renamed ```after_fetched``` to ```after_fetch``` to normalize the naming convention
191
+ - Instrumentation on a connection delegator level
192
+ - Added ```params_batch#last``` method to retrieve last element after unparsing
193
+ - All params keys are now strings
194
+
195
+ ## 1.1.2
196
+ - #256 - Default kafka.seed_brokers configuration is created in invalid format
197
+
198
+ ## 1.1.1
199
+ - #253 - Allow providing a global per app parser in config settings
200
+
201
+ ## 1.1.0
202
+ - Gem bump
203
+ - Switch from Celluloid to native Thread management
204
+ - Improved shutdown process
205
+ - Introduced optional fetch callbacks and moved current the ```after_received``` there as well
206
+ - Karafka will raise Errors::InvalidPauseTimeout exception when trying to pause but timeout set to 0
207
+ - Allow float for timeouts and other time based second settings
208
+ - Renamed MessagesProcessor to Processor and MessagesConsumer to Consumer - we don't process and don't consumer anything else so it was pointless to keep this "namespace"
209
+ - #232 - Remove unused ActiveSupport require
210
+ - #214 - Expose consumer on a controller layer
211
+ - #193 - Process shutdown callbacks
212
+ - Fixed accessibility of ```#params_batch``` from the outside of the controller
213
+ - connection_pool config options are no longer required
214
+ - celluloid config options are no longer required
215
+ - ```#perform``` is now renamed to ```#consume``` with warning level on using the old one (deprecated)
216
+ - #235 - Rename perform to consume
217
+ - Upgrade to ruby-kafka 0.5
218
+ - Due to redesign of Waterdrop concurrency setting is no longer needed
219
+ - #236 - Manual offset management
220
+ - WaterDrop 1.0.0 support with async
221
+ - Renamed ```batch_consuming``` option to ```batch_fetching``` as it is not a consumption (with processing) but a process of fetching messages from Kafka. The messages is considered consumed, when it is processed.
222
+ - Renamed ```batch_processing``` to ```batch_consuming``` to resemble Kafka concept of consuming messages.
223
+ - Renamed ```after_received``` to ```after_fetched``` to normalize the naming conventions.
224
+ - Responders support the per topic ```async``` option.
225
+
226
+ ## 1.0.1
227
+ - #210 - LoadError: cannot load such file -- [...]/karafka.rb
228
+ - Ruby 2.4.2 as a default (+travis integration)
229
+ - JRuby upgrade
230
+ - Expanded persistence layer (moved to a namespace for easier future development)
231
+ - #213 - Misleading error when non-existing dependency is required
232
+ - #212 - Make params react to #topic, #partition, #offset
233
+ - #215 - Consumer group route dynamic options are ignored
234
+ - #217 - check RUBY_ENGINE constant if RUBY_VERSION is missing (#217)
235
+ - #218 - add configuration setting to control Celluloid's shutdown timeout
236
+ - Renamed Karafka::Routing::Mapper to Karafka::Routing::TopicMapper to match naming conventions
237
+ - #219 - Allow explicit consumer group names, without prefixes
238
+ - Fix to early removed pid upon shutdown of demonized process
239
+ - max_wait_time updated to match https://github.com/zendesk/ruby-kafka/issues/433
240
+ - #230 - Better uri validation for seed brokers (incompatibility as the kafka:// or kafka+ssl:// is required)
241
+ - Small internal docs fixes
242
+ - Dry::Validation::MissingMessageError: message for broker_schema? was not found
243
+ - #238 - warning: already initialized constant Karafka::Schemas::URI_SCHEMES
244
+
245
+ ## 1.0.0
246
+
247
+ ### Closed issues:
248
+
249
+ - #103 - Env for logger is loaded 2 early (on gem load not on app init)
250
+ - #142 - Possibility to better control Kafka consumers (consumer groups management)
251
+ - #150 - Add support for start_from_beginning on a per topic basis
252
+ - #154 - Support for min_bytes and max_wait_time on messages consuming
253
+ - #160 - Reorganize settings to better resemble ruby-kafka requirements
254
+ - #164 - If we decide to have configuration per topic, topic uniqueness should be removed
255
+ - #165 - Router validator
256
+ - #166 - Params and route reorganization (new API)
257
+ - #167 - Remove Sidekiq UI from Karafka
258
+ - #168 - Introduce unique IDs of routes
259
+ - #171 - Add kafka message metadata to params
260
+ - #176 - Transform Karafka::Connection::Consumer into a module
261
+ - #177 - Monitor not reacting when kafka killed with -9
262
+ - #175 - Allow single consumer to subscribe to multiple topics
263
+ - #178 - Remove parsing failover when cannot unparse data
264
+ - #174 - Extended config validation
265
+ - ~~#180 - Switch from JSON parser to yajl-ruby~~
266
+ - #181 - When responder is defined and not used due to ```respond_with``` not being triggered in the perform, it won't raise an exception.
267
+ - #188 - Rename name in config to client id
268
+ - #186 - Support ruby-kafka ```ssl_ca_cert_file_path``` config
269
+ - #189 - karafka console does not preserve history on exit
270
+ - #191 - Karafka 0.6.0rc1 does not work with jruby / now it does :-)
271
+ - Switch to multi json so everyone can use their favourite JSON parser
272
+ - Added jruby support in general and in Travis
273
+ - #196 - Topic mapper does not map topics when subscribing thanks to @webandtech
274
+ - #96 - Karafka server - possibility to run it only for a certain topics
275
+ - ~~karafka worker cli option is removed (please use sidekiq directly)~~ - restored, bad idea
276
+ - (optional) pausing upon processing failures ```pause_timeout```
277
+ - Karafka console main process no longer intercepts irb errors
278
+ - Wiki updates
279
+ - #204 - Long running controllers
280
+ - Better internal API to handle multiple usage cases using ```Karafka::Controllers::Includer```
281
+ - #207 - Rename before_enqueued to after_received
282
+ - #147 - De-attach Karafka from Sidekiq by extracting Sidekiq backend
283
+
284
+ ### New features and improvements
285
+
286
+ - batch processing thanks to ```#batch_consuming``` flag and ```#params_batch``` on controllers
287
+ - ```#topic``` method on an controller instance to make a clear distinction in between params and route details
288
+ - Changed routing model (still compatible with 0.5) to allow better resources management
289
+ - Lower memory requirements due to object creation limitation (2-3 times less objects on each new message)
290
+ - Introduced the ```#batch_consuming``` config flag (config for #126) that can be set per each consumer_group
291
+ - Added support for partition, offset and partition key in the params hash
292
+ - ```name``` option in config renamed to ```client_id```
293
+ - Long running controllers with ```persistent``` flag on a topic config level, to make controller instances persistent between messages batches (single controller instance per topic per partition no per messages batch) - turned on by default
294
+
295
+ ### Incompatibilities
296
+
297
+ - Default boot file is renamed from app.rb to karafka.rb
298
+ - Removed worker glass as dependency (now and independent gem)
299
+ - ```kafka.hosts``` option renamed to ```kafka.seed_brokers``` - you don't need to provide all the hosts to work with Kafka
300
+ - ```start_from_beginning``` moved into kafka scope (```kafka.start_from_beginning```)
301
+ - Router no longer checks for route uniqueness - now you can define same routes for multiple kafkas and do a lot of crazy stuff, so it's your responsibility to check uniqueness
302
+ - Change in the way we identify topics in between Karafka and Sidekiq workers. If you upgrade, please make sure, all the jobs scheduled in Sidekiq are finished before the upgrade.
303
+ - ```batch_mode``` renamed to ```batch_fetching```
304
+ - Renamed content to value to better resemble ruby-kafka internal messages naming convention
305
+ - When having a responder with ```required``` topics and not using ```#respond_with``` at all, it will raise an exception
306
+ - Renamed ```inline_mode``` to ```inline_processing``` to resemble other settings conventions
307
+ - Renamed ```inline_processing``` to ```backend``` to reach 1.0 future compatibility
308
+ - Single controller **needs** to be used for a single topic consumption
309
+ - Renamed ```before_enqueue``` to ```after_received``` to better resemble internal logic, since for inline backend, there is no enqueue.
310
+ - Due to the level on which topic and controller are related (class level), the dynamic worker selection is no longer available.
311
+ - Renamed params #retrieve to params #retrieve! to better reflect what it does
312
+
313
+ ### Other changes
314
+ - PolishGeeksDevTools removed (in favour of Coditsu)
315
+ - Waaaaaay better code quality thanks to switching from dev tools to Coditsu
316
+ - Gem bump
317
+ - Cleaner internal API
318
+ - SRP
319
+ - Better settings proxying and management between ruby-kafka and karafka
320
+ - All internal validations are now powered by dry-validation
321
+ - Better naming conventions to reflect Kafka reality
322
+ - Removed Karafka::Connection::Message in favour of direct message details extraction from Kafka::FetchedMessage
323
+
324
+ ## 0.5.0.3
325
+ - #132 - When Kafka is gone, should reconnect after a time period
326
+ - #136 - new ruby-kafka version + other gem bumps
327
+ - ruby-kafka update
328
+ - #135 - NonMatchingRouteError - better error description in the code
329
+ - #140 - Move Capistrano Karafka to a different specific gem
330
+ - #110 - Add call method on a responder class to alias instance build and call
331
+ - #76 - Configs validator
332
+ - #138 - Possibility to have no worker class defined if inline_mode is being used
333
+ - #145 - Topic Mapper
334
+ - Ruby update to 2.4.1
335
+ - Gem bump x2
336
+ - #158 - Update docs section on heroku usage
337
+ - #150 - Add support for start_from_beginning on a per topic basis
338
+ - #148 - Lower Karafka Sidekiq dependency
339
+ - Allow karafka root to be specified from ENV
340
+ - Handle SIGTERM as a shutdown command for kafka server to support Heroku deployment
341
+
342
+ ## 0.5.0.2
343
+ - Gems update x3
344
+ - Default Ruby set to 2.3.3
345
+ - ~~Default Ruby set to 2.4.0~~
346
+ - Readme updates to match bug fixes and resolved issues
347
+ - #95 - Allow options into responder
348
+ - #98 - Use parser when responding on a topic
349
+ - #114 - Option to configure waterdrop connection pool timeout and concurrency
350
+ - #118 - Added dot in topic validation format
351
+ - #119 - add support for authentication using SSL
352
+ - #121 - JSON as a default for standalone responders usage
353
+ - #122 - Allow on capistrano role customization
354
+ - #125 - Add support to batch incoming messages
355
+ - #130 - start_from_beginning flag on routes and default
356
+ - #128 - Monitor caller_label not working with super on inheritance
357
+ - Renamed *inline* to *inline_mode* to stay consistent with flags that change the way karafka works (#125)
358
+ - Dry-configurable bump to 0.5 with fixed proc value evaluation on retrieve patch (internal change)
359
+
360
+ ## 0.5.0.1
361
+ - Fixed inconsistency in responders non-required topic definition. Now only required: false available
362
+ - #101 - Responders fail when multiple_usage true and required false
363
+ - fix error on startup from waterdrop #102
364
+ - Waterdrop 0.3.2.1 with kafka.hosts instead of kafka_hosts
365
+ - #105 - Karafka::Monitor#caller_label not working with inherited monitors
366
+ - #99 - Standalone mode (without Sidekiq)
367
+ - #97 - Buffer responders single topics before send (pre-validation)
368
+ - Better control over consumer thanks to additional config options
369
+ - #111 - Dynamic worker assignment based on the income params
370
+ - Long shutdown time fix
371
+
372
+ ## 0.5.0
373
+ - Removed Zookeeper totally as dependency
374
+ - Better group and partition rebalancing
375
+ - Automatic thread management (no need for tuning) - each topic is a separate actor/thread
376
+ - Moved from Poseidon into Ruby-Kafka
377
+ - No more max_concurrency setting
378
+ - After you define your App class and routes (and everything else) you need to add execute App.boot!
379
+ - Manual consuming is no longer available (no more karafka consume command)
380
+ - Karafka topics CLI is no longer available. No Zookeeper - no global topic discovery
381
+ - Dropped ZK as dependency
382
+ - karafka info command no longer prints details about Zookeeper
383
+ - Better shutdown
384
+ - No more autodiscovery via Zookeeper - instead, the whole cluster will be discovered directly from Kafka
385
+ - No more support for Kafka 0.8
386
+ - Support for Kafka 0.9
387
+ - No more need for ActorCluster, since now we have a single thread (and Kafka connection) per topic
388
+ - Ruby 2.2.* support dropped
389
+ - Using App name as a Kafka client_id
390
+ - Automatic Capistrano integration
391
+ - Responders support for handling better responses pipe-lining and better responses flow description and design (see README for more details)
392
+ - Gem bump
393
+ - Readme updates
394
+ - karafka flow CLI command for printing the application flow
395
+ - Some internal refactoring
396
+
397
+ ## 0.4.2
398
+ - #87 - Re-consume mode with crone for better Rails/Rack integration
399
+ - Moved Karafka server related stuff into separate Karafka::Server class
400
+ - Renamed Karafka::Runner into Karafka::Fetcher
401
+ - Gem bump
402
+ - Added chroot option to Zookeeper options
403
+ - Moved BROKERS_PATH into config from constant
404
+ - Added Karafka consume CLI action for a short running single consumption round
405
+ - Small fixes to close broken connections
406
+ - Readme updates
407
+
408
+ ## 0.4.1
409
+ - Explicit throw(:abort) required to halt before_enqueue (like in Rails 5)
410
+ - #61 - autodiscovery of Kafka brokers based on Zookeeper data
411
+ - #63 - Graceful shutdown with current offset state during data processing
412
+ - #65 - Example of NewRelic monitor is outdated
413
+ - #71 - Setup should be executed after user code is loaded
414
+ - Gem bump x3
415
+ - Rubocop remarks
416
+ - worker_timeout config option has been removed. It now needs to be defined manually by the framework user because WorkerGlass::Timeout can be disabled and we cannot use Karafka settings on a class level to initialize user code stuff
417
+ - Moved setup logic under setup/Setup namespace
418
+ - Better defaults handling
419
+ - #75 - Kafka and Zookeeper options as a hash
420
+ - #82 - Karafka autodiscovery fails upon caching of configs
421
+ - #81 - Switch config management to dry configurable
422
+ - Version fix
423
+ - Dropped support for Ruby 2.1.*
424
+ - Ruby bump to 2.3.1
425
+
426
+ ## 0.4.0
427
+ - Added WaterDrop gem with default configuration
428
+ - Refactoring of config logic to simplify adding new dependencies that need to be configured based on #setup data
429
+ - Gem bump
430
+ - Readme updates
431
+ - Renamed cluster to actor_cluster for method names
432
+ - Replaced SidekiqGlass with generic WorkerGlass lib
433
+ - Application bootstrap in app.rb no longer required
434
+ - Karafka.boot needs to be executed after all the application files are loaded (template updated)
435
+ - Small loader refactor (no API changes)
436
+ - Ruby 2.3.0 support (default)
437
+ - No more rake tasks
438
+ - Karafka CLI instead of rake tasks
439
+ - Worker cli command allows passing additional options directly to Sidekiq
440
+ - Renamed concurrency to max_concurrency - it describes better what happens - Karafka will use this number of threads only when required
441
+ - Added wait_timeout that allows us to tune how long should we wait on a single socket connection (single topic) for new messages before going to next one (this applies to each thread separately)
442
+ - Rubocop remarks
443
+ - Removed Sinatra and Puma dependencies
444
+ - Karafka Cli internal reorganization
445
+ - Karafka Cli routes task
446
+ - #37 - warn log for failed parsing of a message
447
+ - #43 - wrong constant name
448
+ - #44 - Method name conflict
449
+ - #48 - Cannot load such file -- celluloid/current
450
+ - #46 - Loading application
451
+ - #45 - Set up monitor in config
452
+ - #47 - rake karafka:run uses app.rb only
453
+ - #53 - README update with Sinatra/Rails integration description
454
+ - #41 - New Routing engine
455
+ - #54 - Move Karafka::Workers::BaseWorker to Karafka::BaseWorker
456
+ - #55 - ApplicationController and ApplicationWorker
457
+
458
+ ## 0.3.2
459
+ - Karafka::Params::Params lazy load merge keys with string/symbol names priorities fix
460
+
461
+ ## 0.3.1
462
+ - Renamed Karafka::Monitor to Karafka::Process to represent a Karafka process wrapper
463
+ - Added Karafka::Monitoring that allows to add custom logging and monitoring with external libraries and systems
464
+ - Moved logging functionality into Karafka::Monitoring default monitoring
465
+ - Added possibility to provide own monitoring as long as in responds to #notice and #notice_error
466
+ - Standardized logging format for all logs
467
+
468
+ ## 0.3.0
469
+ - Switched from custom ParserError for each parser to general catching of Karafka::Errors::ParseError and its descendants
470
+ - Gem bump
471
+ - Fixed #32 - now when using custom workers that does not inherit from Karafka::BaseWorker perform method is not required. Using custom workers means that the logic that would normally lie under #perform, needs to be executed directly from the worker.
472
+ - Fixed #31 - Technically didn't fix because this is how Sidekiq is meant to work, but provided possibility to assign custom interchangers that allow to bypass JSON encoding issues by converting data that goes to Redis to a required format (and parsing it back when it is fetched)
473
+ - Added full parameters lazy load - content is no longer loaded during #perform_async if params are not used in before_enqueue
474
+ - No more namespaces for Redis by default (use separate DBs)
475
+
476
+ ## 0.1.21
477
+ - Sidekiq 4.0.1 bump
478
+ - Gem bump
479
+ - Added direct celluloid requirement to Karafka (removed from Sidekiq)
480
+
481
+ ## 0.1.19
482
+ - Internal call - schedule naming change
483
+ - Enqueue to perform_async naming in controller to follow Sidekiq naming convention
484
+ - Gem bump
485
+
486
+ ## 0.1.18
487
+ - Changed Redis configuration options into a single hash that is directly passed to Redis setup for Sidekiq
488
+ - Added config.ru to provide a Sidekiq web UI (see README for more details)
489
+
490
+ ## 0.1.17
491
+ - Changed Karafka::Connection::Cluster tp Karafka::Connection::ActorCluster to distinguish between a single thread actor cluster for multiple topic connection and a future feature that will allow process clusterization.
492
+ - Add an ability to use user-defined parsers for a messages
493
+ - Lazy load params for before callbacks
494
+ - Automatic loading/initializing all workers classes during startup (so Sidekiq won't fail with unknown workers exception)
495
+ - Params are now private to controller
496
+ - Added bootstrap method to app.rb
497
+
498
+ ## 0.1.16
499
+ - Cluster level error catching for all exceptions so actor is not killer
500
+ - Cluster level error logging
501
+ - Listener refactoring (QueueConsumer extracted)
502
+ - Karafka::Connection::QueueConsumer to wrap around fetching logic - technically we could replace Kafka with any other messaging engine as long as we preserve the same API
503
+ - Added debug env for debugging purpose in applications
504
+
505
+ ## 0.1.15
506
+ - Fixed max_wait_ms vs socket_timeout_ms issue
507
+ - Fixed closing queue connection after Poseidon::Errors::ProtocolError failure
508
+ - Fixed wrong logging file selection based on env
509
+ - Extracted Karafka::Connection::QueueConsumer object to wrap around queue connection
510
+
511
+ ## 0.1.14
512
+ - Rake tasks for listing all the topics on Kafka server (rake kafka:topics)
513
+
514
+ ## 0.1.13
515
+ - Ability to assign custom workers and use them bypassing Karafka::BaseWorker (or its descendants)
516
+ - Gem bump
517
+
518
+ ## 0.1.12
519
+ - All internal errors went to Karafka::Errors namespace
520
+
521
+ ## 0.1.11
522
+ - Rescuing all the "before Sidekiq" processing so errors won't affect other incoming messages
523
+ - Fixed dying actors after connection error
524
+ - Added a new app status - "initializing"
525
+ - Karafka::Status model cleanup
526
+
527
+ ## 0.1.10
528
+ - Added possibility to specify redis namespace in configuration (failover to app name)
529
+ - Renamed redis_host to redis_url in configuration
530
+
531
+ ## 0.1.9
532
+ - Added worker logger
533
+
534
+ ## 0.1.8
535
+ - Dropped local env support in favour of [Envlogic](https://github.com/karafka/envlogic) - no changes in API
536
+
537
+ ## 0.1.7
538
+ - Karafka option for Redis hosts (not localhost only)
539
+
540
+ ## 0.1.6
541
+ - Added better concurency by clusterization of listeners
542
+ - Added graceful shutdown
543
+ - Added concurency that allows to handle bigger applications with celluloid
544
+ - Karafka controllers no longer require group to be defined (created based on the topic and app name)
545
+ - Karafka controllers no longer require topic to be defined (created based on the controller name)
546
+ - Readme updates
547
+
548
+ ## 0.1.5
549
+ - Celluloid support for listeners
550
+ - Multi target logging (STDOUT and file)
551
+
552
+ ## 0.1.4
553
+ - Renamed events to messages to follow Apache Kafka naming convention
554
+
555
+ ## 0.1.3
556
+ - Karafka::App.logger moved to Karafka.logger
557
+ - README updates (Usage section was added)
558
+
559
+ ## 0.1.2
560
+ - Logging to log/environment.log
561
+ - Karafka::Runner
562
+
563
+ ## 0.1.1
564
+ - README updates
565
+ - Rake tasks updates
566
+ - Rake installation task
567
+ - Changelog file added
568
+
569
+ ## 0.1.0
570
+ - Initial framework code