karafka 1.3.0 → 1.4.14

Sign up to get free protection for your applications and to get access to all the features.
Files changed (58) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.diffend.yml +3 -0
  4. data/.github/workflows/ci.yml +76 -0
  5. data/.ruby-version +1 -1
  6. data/CHANGELOG.md +112 -15
  7. data/CODE_OF_CONDUCT.md +1 -1
  8. data/Gemfile +2 -0
  9. data/Gemfile.lock +87 -98
  10. data/README.md +28 -31
  11. data/certs/mensfeld.pem +24 -23
  12. data/config/errors.yml +2 -0
  13. data/docker-compose.yml +17 -0
  14. data/karafka.gemspec +22 -14
  15. data/lib/karafka/assignment_strategies/round_robin.rb +13 -0
  16. data/lib/karafka/attributes_map.rb +3 -8
  17. data/lib/karafka/cli/base.rb +4 -4
  18. data/lib/karafka/cli/flow.rb +9 -6
  19. data/lib/karafka/cli/info.rb +1 -1
  20. data/lib/karafka/cli/install.rb +5 -2
  21. data/lib/karafka/cli/missingno.rb +19 -0
  22. data/lib/karafka/cli/server.rb +8 -8
  23. data/lib/karafka/cli.rb +9 -1
  24. data/lib/karafka/connection/api_adapter.rb +27 -24
  25. data/lib/karafka/connection/batch_delegator.rb +5 -1
  26. data/lib/karafka/connection/builder.rb +9 -2
  27. data/lib/karafka/connection/client.rb +9 -6
  28. data/lib/karafka/connection/listener.rb +2 -2
  29. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  30. data/lib/karafka/consumers/includer.rb +5 -4
  31. data/lib/karafka/contracts/consumer_group.rb +10 -5
  32. data/lib/karafka/contracts/server_cli_options.rb +2 -0
  33. data/lib/karafka/contracts.rb +1 -1
  34. data/lib/karafka/helpers/class_matcher.rb +2 -2
  35. data/lib/karafka/instrumentation/logger.rb +6 -9
  36. data/lib/karafka/instrumentation/stdout_listener.rb +6 -4
  37. data/lib/karafka/params/batch_metadata.rb +26 -0
  38. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  39. data/lib/karafka/params/builders/params.rb +17 -15
  40. data/lib/karafka/params/builders/params_batch.rb +2 -2
  41. data/lib/karafka/params/metadata.rb +14 -29
  42. data/lib/karafka/params/params.rb +27 -41
  43. data/lib/karafka/params/params_batch.rb +15 -16
  44. data/lib/karafka/routing/builder.rb +1 -0
  45. data/lib/karafka/routing/consumer_group.rb +5 -3
  46. data/lib/karafka/serialization/json/deserializer.rb +2 -2
  47. data/lib/karafka/server.rb +4 -1
  48. data/lib/karafka/setup/config.rb +60 -52
  49. data/lib/karafka/templates/karafka.rb.erb +1 -1
  50. data/lib/karafka/version.rb +1 -1
  51. data/lib/karafka.rb +3 -1
  52. data.tar.gz.sig +0 -0
  53. metadata +75 -93
  54. metadata.gz.sig +0 -0
  55. data/.github/FUNDING.yml +0 -3
  56. data/.travis.yml +0 -36
  57. data/lib/karafka/consumers/metadata.rb +0 -10
  58. data/lib/karafka/params/builders/metadata.rb +0 -33
@@ -24,83 +24,89 @@ module Karafka
24
24
  # default Kafka groups namespaces and identify that app in kafka
25
25
  setting :client_id
26
26
  # What backend do we want to use to process messages
27
- setting :backend, :inline
27
+ setting :backend, default: :inline
28
28
  # option logger [Instance] logger that we want to use
29
- setting :logger, ::Karafka::Instrumentation::Logger.new
29
+ setting :logger, default: ::Karafka::Instrumentation::Logger.new
30
30
  # option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
31
- setting :monitor, ::Karafka::Instrumentation::Monitor.new
31
+ setting :monitor, default: ::Karafka::Instrumentation::Monitor.new
32
32
  # Mapper used to remap consumer groups ids, so in case users migrate from other tools
33
33
  # or they need to maintain their own internal consumer group naming conventions, they
34
34
  # can easily do it, replacing the default client_id + consumer name pattern concept
35
- setting :consumer_mapper, Routing::ConsumerMapper.new
35
+ setting :consumer_mapper, default: Routing::ConsumerMapper.new
36
36
  # Mapper used to remap names of topics, so we can have a clean internal topic naming
37
37
  # despite using any Kafka provider that uses namespacing, etc
38
38
  # It needs to implement two methods:
39
39
  # - #incoming - for remapping from the incoming message to our internal format
40
40
  # - #outgoing - for remapping from internal topic name into outgoing message
41
- setting :topic_mapper, Routing::TopicMapper.new
41
+ setting :topic_mapper, default: Routing::TopicMapper.new
42
42
  # Default serializer for converting whatever we want to send to kafka to json
43
- setting :serializer, Karafka::Serialization::Json::Serializer.new
43
+ setting :serializer, default: Karafka::Serialization::Json::Serializer.new
44
44
  # Default deserializer for converting incoming data into ruby objects
45
- setting :deserializer, Karafka::Serialization::Json::Deserializer.new
45
+ setting :deserializer, default: Karafka::Serialization::Json::Deserializer.new
46
46
  # If batch_fetching is true, we will fetch kafka messages in batches instead of 1 by 1
47
47
  # @note Fetching does not equal consuming, see batch_consuming description for details
48
- setting :batch_fetching, true
48
+ setting :batch_fetching, default: true
49
49
  # If batch_consuming is true, we will have access to #params_batch instead of #params.
50
50
  # #params_batch will contain params received from Kafka (may be more than 1) so we can
51
51
  # process them in batches
52
- setting :batch_consuming, false
52
+ setting :batch_consuming, default: false
53
53
  # option shutdown_timeout [Integer, nil] the number of seconds after which Karafka no
54
54
  # longer wait for the consumers to stop gracefully but instead we force terminate
55
55
  # everything.
56
- setting :shutdown_timeout, 60
56
+ setting :shutdown_timeout, default: 60
57
57
 
58
58
  # option kafka [Hash] - optional - kafka configuration options
59
59
  setting :kafka do
60
60
  # Array with at least one host
61
- setting :seed_brokers, %w[kafka://127.0.0.1:9092]
61
+ setting :seed_brokers, default: %w[kafka://127.0.0.1:9092]
62
62
  # option session_timeout [Integer] the number of seconds after which, if a client
63
63
  # hasn't contacted the Kafka cluster, it will be kicked out of the group.
64
- setting :session_timeout, 30
64
+ setting :session_timeout, default: 30
65
65
  # Time that a given partition will be paused from fetching messages, when message
66
66
  # consumption fails. It allows us to process other partitions, while the error is being
67
67
  # resolved and also "slows" things down, so it prevents from "eating" up all messages and
68
68
  # consuming them with failed code. Use `nil` if you want to pause forever and never retry.
69
- setting :pause_timeout, 10
69
+ setting :pause_timeout, default: 10
70
70
  # option pause_max_timeout [Integer, nil] the maximum number of seconds to pause for,
71
71
  # or `nil` if no maximum should be enforced.
72
- setting :pause_max_timeout, nil
72
+ setting :pause_max_timeout, default: nil
73
73
  # option pause_exponential_backoff [Boolean] whether to enable exponential backoff
74
- setting :pause_exponential_backoff, false
74
+ setting :pause_exponential_backoff, default: false
75
75
  # option offset_commit_interval [Integer] the interval between offset commits,
76
76
  # in seconds.
77
- setting :offset_commit_interval, 10
77
+ setting :offset_commit_interval, default: 10
78
78
  # option offset_commit_threshold [Integer] the number of messages that can be
79
79
  # processed before their offsets are committed. If zero, offset commits are
80
80
  # not triggered by message consumption.
81
- setting :offset_commit_threshold, 0
81
+ setting :offset_commit_threshold, default: 0
82
82
  # option heartbeat_interval [Integer] the interval between heartbeats; must be less
83
83
  # than the session window.
84
- setting :heartbeat_interval, 10
84
+ setting :heartbeat_interval, default: 10
85
85
  # option offset_retention_time [Integer] The length of the retention window, known as
86
86
  # offset retention time
87
- setting :offset_retention_time, nil
87
+ setting :offset_retention_time, default: nil
88
88
  # option fetcher_max_queue_size [Integer] max number of items in the fetch queue that
89
89
  # are stored for further processing. Note, that each item in the queue represents a
90
90
  # response from a single broker
91
- setting :fetcher_max_queue_size, 10
91
+ setting :fetcher_max_queue_size, default: 10
92
+ # option assignment_strategy [Object] a strategy determining the assignment of
93
+ # partitions to the consumers.
94
+ setting :assignment_strategy, default: Karafka::AssignmentStrategies::RoundRobin.new
92
95
  # option max_bytes_per_partition [Integer] the maximum amount of data fetched
93
96
  # from a single partition at a time.
94
- setting :max_bytes_per_partition, 1_048_576
97
+ setting :max_bytes_per_partition, default: 1_048_576
95
98
  # whether to consume messages starting at the beginning or to just consume new messages
96
- setting :start_from_beginning, true
99
+ setting :start_from_beginning, default: true
100
+ # option resolve_seed_brokers [Boolean] whether to resolve each hostname of the seed
101
+ # brokers
102
+ setting :resolve_seed_brokers, default: false
97
103
  # option min_bytes [Integer] the minimum number of bytes to read before
98
104
  # returning messages from the server; if `max_wait_time` is reached, this
99
105
  # is ignored.
100
- setting :min_bytes, 1
106
+ setting :min_bytes, default: 1
101
107
  # option max_bytes [Integer] the maximum number of bytes to read before returning messages
102
108
  # from each broker.
103
- setting :max_bytes, 10_485_760
109
+ setting :max_bytes, default: 10_485_760
104
110
  # option max_wait_time [Integer, Float] max_wait_time is the maximum number of seconds to
105
111
  # wait before returning data from a single message fetch. By setting this high you also
106
112
  # increase the fetching throughput - and by setting it low you set a bound on latency.
@@ -108,63 +114,65 @@ module Karafka
108
114
  # time specified. The default value is one second. If you want to have at most five
109
115
  # seconds of latency, set `max_wait_time` to 5. You should make sure
110
116
  # max_wait_time * num brokers + heartbeat_interval is less than session_timeout.
111
- setting :max_wait_time, 1
117
+ setting :max_wait_time, default: 1
112
118
  # option automatically_mark_as_consumed [Boolean] should we automatically mark received
113
119
  # messages as consumed (processed) after non-error consumption
114
- setting :automatically_mark_as_consumed, true
120
+ setting :automatically_mark_as_consumed, default: true
115
121
  # option reconnect_timeout [Integer] How long should we wait before trying to reconnect to
116
122
  # Kafka cluster that went down (in seconds)
117
- setting :reconnect_timeout, 5
123
+ setting :reconnect_timeout, default: 5
118
124
  # option connect_timeout [Integer] Sets the number of seconds to wait while connecting to
119
125
  # a broker for the first time. When ruby-kafka initializes, it needs to connect to at
120
126
  # least one host.
121
- setting :connect_timeout, 10
127
+ setting :connect_timeout, default: 10
122
128
  # option socket_timeout [Integer] Sets the number of seconds to wait when reading from or
123
129
  # writing to a socket connection to a broker. After this timeout expires the connection
124
130
  # will be killed. Note that some Kafka operations are by definition long-running, such as
125
131
  # waiting for new messages to arrive in a partition, so don't set this value too low
126
- setting :socket_timeout, 30
132
+ setting :socket_timeout, default: 30
133
+ # option partitioner [Object, nil] the partitioner that should be used by the client
134
+ setting :partitioner, default: nil
127
135
 
128
136
  # SSL authentication related settings
129
137
  # option ca_cert [String, nil] SSL CA certificate
130
- setting :ssl_ca_cert, nil
138
+ setting :ssl_ca_cert, default: nil
131
139
  # option ssl_ca_cert_file_path [String, nil] SSL CA certificate file path
132
- setting :ssl_ca_cert_file_path, nil
140
+ setting :ssl_ca_cert_file_path, default: nil
133
141
  # option ssl_ca_certs_from_system [Boolean] Use the CA certs from your system's default
134
142
  # certificate store
135
- setting :ssl_ca_certs_from_system, false
143
+ setting :ssl_ca_certs_from_system, default: false
136
144
  # option ssl_verify_hostname [Boolean] Verify the hostname for client certs
137
- setting :ssl_verify_hostname, true
145
+ setting :ssl_verify_hostname, default: true
138
146
  # option ssl_client_cert [String, nil] SSL client certificate
139
- setting :ssl_client_cert, nil
147
+ setting :ssl_client_cert, default: nil
140
148
  # option ssl_client_cert_key [String, nil] SSL client certificate password
141
- setting :ssl_client_cert_key, nil
149
+ setting :ssl_client_cert_key, default: nil
142
150
  # option sasl_gssapi_principal [String, nil] sasl principal
143
- setting :sasl_gssapi_principal, nil
151
+ setting :sasl_gssapi_principal, default: nil
144
152
  # option sasl_gssapi_keytab [String, nil] sasl keytab
145
- setting :sasl_gssapi_keytab, nil
153
+ setting :sasl_gssapi_keytab, default: nil
146
154
  # option sasl_plain_authzid [String] The authorization identity to use
147
- setting :sasl_plain_authzid, ''
155
+ setting :sasl_plain_authzid, default: ''
148
156
  # option sasl_plain_username [String, nil] The username used to authenticate
149
- setting :sasl_plain_username, nil
157
+ setting :sasl_plain_username, default: nil
150
158
  # option sasl_plain_password [String, nil] The password used to authenticate
151
- setting :sasl_plain_password, nil
159
+ setting :sasl_plain_password, default: nil
152
160
  # option sasl_scram_username [String, nil] The username used to authenticate
153
- setting :sasl_scram_username, nil
161
+ setting :sasl_scram_username, default: nil
154
162
  # option sasl_scram_password [String, nil] The password used to authenticate
155
- setting :sasl_scram_password, nil
163
+ setting :sasl_scram_password, default: nil
156
164
  # option sasl_scram_mechanism [String, nil] Scram mechanism, either 'sha256' or 'sha512'
157
- setting :sasl_scram_mechanism, nil
165
+ setting :sasl_scram_mechanism, default: nil
158
166
  # option sasl_over_ssl [Boolean] whether to enforce SSL with SASL
159
- setting :sasl_over_ssl, true
167
+ setting :sasl_over_ssl, default: true
160
168
  # option ssl_client_cert_chain [String, nil] client cert chain or nil if not used
161
- setting :ssl_client_cert_chain, nil
169
+ setting :ssl_client_cert_chain, default: nil
162
170
  # option ssl_client_cert_key_password [String, nil] the password required to read
163
171
  # the ssl_client_cert_key
164
- setting :ssl_client_cert_key_password, nil
172
+ setting :ssl_client_cert_key_password, default: nil
165
173
  # @param sasl_oauth_token_provider [Object, nil] OAuthBearer Token Provider instance that
166
174
  # implements method token.
167
- setting :sasl_oauth_token_provider, nil
175
+ setting :sasl_oauth_token_provider, default: nil
168
176
  end
169
177
 
170
178
  # Namespace for internal settings that should not be modified
@@ -172,18 +180,18 @@ module Karafka
172
180
  # non global state
173
181
  setting :internal do
174
182
  # option routing_builder [Karafka::Routing::Builder] builder instance
175
- setting :routing_builder, Routing::Builder.new
183
+ setting :routing_builder, default: Routing::Builder.new
176
184
  # option status [Karafka::Status] app status
177
- setting :status, Status.new
185
+ setting :status, default: Status.new
178
186
  # option process [Karafka::Process] process status
179
187
  # @note In the future, we need to have a single process representation for all the karafka
180
188
  # instances
181
- setting :process, Process.new
189
+ setting :process, default: Process.new
182
190
  # option fetcher [Karafka::Fetcher] fetcher instance
183
- setting :fetcher, Fetcher.new
191
+ setting :fetcher, default: Fetcher.new
184
192
  # option configurators [Array<Object>] all configurators that we want to run after
185
193
  # the setup
186
- setting :configurators, [Configurators::WaterDrop.new]
194
+ setting :configurators, default: [Configurators::WaterDrop.new]
187
195
  end
188
196
 
189
197
  class << self
@@ -54,7 +54,7 @@ class KarafkaApp < Karafka::App
54
54
  # listen to only what you really need for given environment.
55
55
  Karafka.monitor.subscribe(WaterDrop::Instrumentation::StdoutListener.new)
56
56
  Karafka.monitor.subscribe(Karafka::Instrumentation::StdoutListener.new)
57
- Karafka.monitor.subscribe(Karafka::Instrumentation::ProctitleListener.new)
57
+ # Karafka.monitor.subscribe(Karafka::Instrumentation::ProctitleListener.new)
58
58
 
59
59
  # Uncomment that in order to achieve code reload in development mode
60
60
  # Be aware, that this might have some side-effects. Please refer to the wiki
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '1.3.0'
6
+ VERSION = '1.4.14'
7
7
  end
data/lib/karafka.rb CHANGED
@@ -1,14 +1,16 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  %w[
4
+ delegate
4
5
  English
5
6
  waterdrop
6
7
  kafka
7
8
  envlogic
9
+ json
8
10
  thor
9
11
  forwardable
10
12
  fileutils
11
- multi_json
13
+ concurrent
12
14
  dry-configurable
13
15
  dry-validation
14
16
  dry/events/publisher
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.3.0
4
+ version: 1.4.14
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -12,203 +12,176 @@ bindir: bin
12
12
  cert_chain:
13
13
  - |
14
14
  -----BEGIN CERTIFICATE-----
15
- MIIEODCCAqCgAwIBAgIBATANBgkqhkiG9w0BAQsFADAjMSEwHwYDVQQDDBhtYWNp
16
- ZWovREM9bWVuc2ZlbGQvREM9cGwwHhcNMTkwNzMwMTQ1NDU0WhcNMjAwNzI5MTQ1
17
- NDU0WjAjMSEwHwYDVQQDDBhtYWNpZWovREM9bWVuc2ZlbGQvREM9cGwwggGiMA0G
18
- CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQC9fCwtaHZG2SyyNXiH8r0QbJQx/xxl
19
- dkvwWz9QGJO+O8rEx20FB1Ab+MVkfOscwIv5jWpmk1U9whzDPl1uFtIbgu+sk+Zb
20
- uQlZyK/DPN6c+/BbBL+RryTBRyvkPLoCVwm7uxc/JZ1n4AI6eF4cCZ2ieZ9QgQbU
21
- MQs2QPqs9hT50Ez/40GnOdadVfiDDGz+NME2C4ms0BriXwZ1tcRTfJIHe2xjIbbb
22
- y5qRGfsLKcgMzvLQR24olixyX1MR0s4+Wveq3QL/gBhL4veUcv+UABJA8IJR0kyB
23
- seHHutusiwZ1v3SjjjW1xLLrc2ARV0mgCb0WaK2T4iA3oFTGLh6Ydz8LNl31KQFv
24
- 94nRd8IhmJxrhQ6dQ/WT9IXoa5S9lfT5lPJeINemH4/6QPABzf9W2IZlCdI9wCdB
25
- TBaw57MKneGAYZiKjw6OALSy2ltQUCl3RqFl3VP7n8uFy1U987Q5VIIQ3O1UUsQD
26
- Oe/h+r7GUU4RSPKgPlrwvW9bD/UQ+zF51v8CAwEAAaN3MHUwCQYDVR0TBAIwADAL
27
- BgNVHQ8EBAMCBLAwHQYDVR0OBBYEFJNIBHdfEUD7TqHqIer2YhWaWhwcMB0GA1Ud
28
- EQQWMBSBEm1hY2llakBtZW5zZmVsZC5wbDAdBgNVHRIEFjAUgRJtYWNpZWpAbWVu
29
- c2ZlbGQucGwwDQYJKoZIhvcNAQELBQADggGBAKA4eqko6BTNhlysip6rfBkVTGri
30
- ZXsL+kRb2hLvsQJS/kLyM21oMlu+LN0aPj3qEFR8mE/YeDD8rLAfruBRTltPNbR7
31
- xA5eE1gkxY5LfExUtK3b2wPqfmo7mZgfcsMwfYg/tUXw1WpBCnrhAJodpGH6SXmp
32
- A40qFUZst0vjiOoO+aTblIHPmMJXoZ3K42dTlNKlEiDKUWMRKSgpjjYGEYalFNWI
33
- hHfCz2r8L2t+dYdMZg1JGbEkq4ADGsAA8ioZIpJd7V4hI17u5TCdi7X5wh/0gN0E
34
- CgP+nLox3D+l2q0QuQEkayr+auFYkzTCkF+BmEk1D0Ru4mcf3F4CJvEmW4Pzbjqt
35
- i1tsCWPtJ4E/UUKnKaWKqGbjrjHJ0MuShYzHkodox5IOiCXIQg+1+YSzfXUV6WEK
36
- KJG/fhg1JV5vVDdVy6x+tv5SQ5ctU0feCsVfESi3rE3zRd+nvzE9HcZ5aXeL1UtJ
37
- nT5Xrioegu2w1jPyVEgyZgTZC5rvD0nNS5sFNQ==
15
+ MIIEcDCCAtigAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MRAwDgYDVQQDDAdjb250
16
+ YWN0MRcwFQYKCZImiZPyLGQBGRYHa2FyYWZrYTESMBAGCgmSJomT8ixkARkWAmlv
17
+ MB4XDTIyMDgxOTE3MjEzN1oXDTIzMDgxOTE3MjEzN1owPzEQMA4GA1UEAwwHY29u
18
+ dGFjdDEXMBUGCgmSJomT8ixkARkWB2thcmFma2ExEjAQBgoJkiaJk/IsZAEZFgJp
19
+ bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAODzeO3L6lxdATzMHKNW
20
+ jFA/GGunoPuylO/BMzy8RiQHh7VIvysAKs0tHhTx3g2D0STDpF+hcQcPELFikiT2
21
+ F+1wOHj/SsrK7VKqfA8+gq04hKc5sQoX2Egf9k3V0YJ3eZ6R/koHkQ8A0TVt0w6F
22
+ ZQckoV4MqnEAx0g/FZN3mnHTlJ3VFLSBqJEIe+S6FZMl92mSv+hTrlUG8VaYxSfN
23
+ lTCvnKk284F6QZq5XIENLRmcDd/3aPBLnLwNnyMyhB+6gK8cUO+CFlDO5tjo/aBA
24
+ rUnl++wGG0JooF1ed0v+evOn9KoMBG6rHewcf79qJbVOscbD8qSAmo+sCXtcFryr
25
+ KRMTB8gNbowJkFRJDEe8tfRy11u1fYzFg/qNO82FJd62rKAw2wN0C29yCeQOPRb1
26
+ Cw9Y4ZwK9VFNEcV9L+3pHTHn2XfuZHtDaG198VweiF6raFO4yiEYccodH/USP0L5
27
+ cbcCFtmu/4HDSxL1ByQXO84A0ybJuk3/+aPUSXe9C9U8fwIDAQABo3cwdTAJBgNV
28
+ HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQUSlcEakb7gfn/5E2WY6z73BF/
29
+ iZkwHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
30
+ bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEA1aS+E7RXJ1w9g9mJ
31
+ G0NzFxe64OEuENosNlvYQCbRKGCXAU1qqelYkBQHseRgRKxLICrnypRo9IEobyHa
32
+ vDnJ4r7Tsb34dleqQW2zY/obG+cia3Ym2JsegXWF7dDOzCXJ4FN8MFoT2jHlqLLw
33
+ yrap0YO5zx0GSQ0Dwy8h2n2v2vanMEeCx7iNm3ERgR5WuN5sjzWoz2A/JLEEcK0C
34
+ EnAGKCWAd1fuG8IemDjT1edsd5FyYR4bIX0m+99oDuFZyPiiIbalmyYiSBBp59Yb
35
+ Q0P8zeBi4OfwCZNcxqz0KONmw9JLNv6DgyEAH5xe/4JzhMEgvIRiPj0pHfA7oqQF
36
+ KUNqvD1KlxbEC+bZfE5IZhnqYLdld/Ksqd22FI1RBhiS1Ejfsj99LVIm9cBuZEY2
37
+ Qf04B9ceLUaC4fPVEz10FyobjaFoY4i32xRto3XnrzeAgfEe4swLq8bQsR3w/EF3
38
+ MGU0FeSV2Yj7Xc2x/7BzLK8xQn5l7Yy75iPF+KP3vVmDHnNl
38
39
  -----END CERTIFICATE-----
39
- date: 2019-09-09 00:00:00.000000000 Z
40
+ date: 2022-10-14 00:00:00.000000000 Z
40
41
  dependencies:
41
42
  - !ruby/object:Gem::Dependency
42
- name: dry-configurable
43
+ name: concurrent-ruby
43
44
  requirement: !ruby/object:Gem::Requirement
44
45
  requirements:
45
- - - "~>"
46
+ - - ">="
46
47
  - !ruby/object:Gem::Version
47
- version: '0.8'
48
+ version: '0'
48
49
  type: :runtime
49
50
  prerelease: false
50
51
  version_requirements: !ruby/object:Gem::Requirement
51
52
  requirements:
52
- - - "~>"
53
+ - - ">="
53
54
  - !ruby/object:Gem::Version
54
- version: '0.8'
55
+ version: '0'
55
56
  - !ruby/object:Gem::Dependency
56
- name: dry-inflector
57
+ name: dry-configurable
57
58
  requirement: !ruby/object:Gem::Requirement
58
59
  requirements:
59
60
  - - "~>"
60
61
  - !ruby/object:Gem::Version
61
- version: '0.1'
62
+ version: '0.16'
62
63
  type: :runtime
63
64
  prerelease: false
64
65
  version_requirements: !ruby/object:Gem::Requirement
65
66
  requirements:
66
67
  - - "~>"
67
68
  - !ruby/object:Gem::Version
68
- version: '0.1'
69
+ version: '0.16'
69
70
  - !ruby/object:Gem::Dependency
70
- name: dry-monitor
71
+ name: dry-inflector
71
72
  requirement: !ruby/object:Gem::Requirement
72
73
  requirements:
73
74
  - - "~>"
74
75
  - !ruby/object:Gem::Version
75
- version: '0.3'
76
+ version: '0.2'
76
77
  type: :runtime
77
78
  prerelease: false
78
79
  version_requirements: !ruby/object:Gem::Requirement
79
80
  requirements:
80
81
  - - "~>"
81
82
  - !ruby/object:Gem::Version
82
- version: '0.3'
83
+ version: '0.2'
83
84
  - !ruby/object:Gem::Dependency
84
- name: dry-validation
85
+ name: dry-monitor
85
86
  requirement: !ruby/object:Gem::Requirement
86
87
  requirements:
87
88
  - - "~>"
88
89
  - !ruby/object:Gem::Version
89
- version: '1.2'
90
+ version: '0.5'
90
91
  type: :runtime
91
92
  prerelease: false
92
93
  version_requirements: !ruby/object:Gem::Requirement
93
94
  requirements:
94
95
  - - "~>"
95
96
  - !ruby/object:Gem::Version
96
- version: '1.2'
97
+ version: '0.5'
97
98
  - !ruby/object:Gem::Dependency
98
- name: envlogic
99
+ name: dry-validation
99
100
  requirement: !ruby/object:Gem::Requirement
100
101
  requirements:
101
102
  - - "~>"
102
103
  - !ruby/object:Gem::Version
103
- version: '1.1'
104
+ version: '1.7'
104
105
  type: :runtime
105
106
  prerelease: false
106
107
  version_requirements: !ruby/object:Gem::Requirement
107
108
  requirements:
108
109
  - - "~>"
109
110
  - !ruby/object:Gem::Version
110
- version: '1.1'
111
+ version: '1.7'
111
112
  - !ruby/object:Gem::Dependency
112
- name: irb
113
+ name: envlogic
113
114
  requirement: !ruby/object:Gem::Requirement
114
115
  requirements:
115
116
  - - "~>"
116
117
  - !ruby/object:Gem::Version
117
- version: '1.0'
118
+ version: '1.1'
118
119
  type: :runtime
119
120
  prerelease: false
120
121
  version_requirements: !ruby/object:Gem::Requirement
121
122
  requirements:
122
123
  - - "~>"
123
124
  - !ruby/object:Gem::Version
124
- version: '1.0'
125
- - !ruby/object:Gem::Dependency
126
- name: multi_json
127
- requirement: !ruby/object:Gem::Requirement
128
- requirements:
129
- - - ">="
130
- - !ruby/object:Gem::Version
131
- version: '1.12'
132
- type: :runtime
133
- prerelease: false
134
- version_requirements: !ruby/object:Gem::Requirement
135
- requirements:
136
- - - ">="
137
- - !ruby/object:Gem::Version
138
- version: '1.12'
139
- - !ruby/object:Gem::Dependency
140
- name: rake
141
- requirement: !ruby/object:Gem::Requirement
142
- requirements:
143
- - - ">="
144
- - !ruby/object:Gem::Version
145
- version: '11.3'
146
- type: :runtime
147
- prerelease: false
148
- version_requirements: !ruby/object:Gem::Requirement
149
- requirements:
150
- - - ">="
151
- - !ruby/object:Gem::Version
152
- version: '11.3'
125
+ version: '1.1'
153
126
  - !ruby/object:Gem::Dependency
154
127
  name: ruby-kafka
155
128
  requirement: !ruby/object:Gem::Requirement
156
129
  requirements:
157
130
  - - ">="
158
131
  - !ruby/object:Gem::Version
159
- version: 0.7.8
132
+ version: 1.3.0
160
133
  type: :runtime
161
134
  prerelease: false
162
135
  version_requirements: !ruby/object:Gem::Requirement
163
136
  requirements:
164
137
  - - ">="
165
138
  - !ruby/object:Gem::Version
166
- version: 0.7.8
139
+ version: 1.3.0
167
140
  - !ruby/object:Gem::Dependency
168
141
  name: thor
169
142
  requirement: !ruby/object:Gem::Requirement
170
143
  requirements:
171
- - - "~>"
144
+ - - ">="
172
145
  - !ruby/object:Gem::Version
173
- version: '0.20'
146
+ version: '1.1'
174
147
  type: :runtime
175
148
  prerelease: false
176
149
  version_requirements: !ruby/object:Gem::Requirement
177
150
  requirements:
178
- - - "~>"
151
+ - - ">="
179
152
  - !ruby/object:Gem::Version
180
- version: '0.20'
153
+ version: '1.1'
181
154
  - !ruby/object:Gem::Dependency
182
155
  name: waterdrop
183
156
  requirement: !ruby/object:Gem::Requirement
184
157
  requirements:
185
158
  - - "~>"
186
159
  - !ruby/object:Gem::Version
187
- version: 1.3.0
160
+ version: '1.4'
188
161
  type: :runtime
189
162
  prerelease: false
190
163
  version_requirements: !ruby/object:Gem::Requirement
191
164
  requirements:
192
165
  - - "~>"
193
166
  - !ruby/object:Gem::Version
194
- version: 1.3.0
167
+ version: '1.4'
195
168
  - !ruby/object:Gem::Dependency
196
169
  name: zeitwerk
197
170
  requirement: !ruby/object:Gem::Requirement
198
171
  requirements:
199
172
  - - "~>"
200
173
  - !ruby/object:Gem::Version
201
- version: '2.1'
174
+ version: '2.6'
202
175
  type: :runtime
203
176
  prerelease: false
204
177
  version_requirements: !ruby/object:Gem::Requirement
205
178
  requirements:
206
179
  - - "~>"
207
180
  - !ruby/object:Gem::Version
208
- version: '2.1'
181
+ version: '2.6'
209
182
  description: Framework used to simplify Apache Kafka based Ruby applications development
210
183
  email:
211
- - maciej@coditsu.io
184
+ - maciej@mensfeld.pl
212
185
  - pavlo.vavruk@gmail.com
213
186
  - adam99g@gmail.com
214
187
  executables:
@@ -218,14 +191,14 @@ extra_rdoc_files: []
218
191
  files:
219
192
  - ".coditsu/ci.yml"
220
193
  - ".console_irbrc"
221
- - ".github/FUNDING.yml"
194
+ - ".diffend.yml"
222
195
  - ".github/ISSUE_TEMPLATE/bug_report.md"
223
196
  - ".github/ISSUE_TEMPLATE/feature_request.md"
197
+ - ".github/workflows/ci.yml"
224
198
  - ".gitignore"
225
199
  - ".rspec"
226
200
  - ".ruby-gemset"
227
201
  - ".ruby-version"
228
- - ".travis.yml"
229
202
  - CHANGELOG.md
230
203
  - CODE_OF_CONDUCT.md
231
204
  - CONTRIBUTING.md
@@ -236,9 +209,11 @@ files:
236
209
  - bin/karafka
237
210
  - certs/mensfeld.pem
238
211
  - config/errors.yml
212
+ - docker-compose.yml
239
213
  - karafka.gemspec
240
214
  - lib/karafka.rb
241
215
  - lib/karafka/app.rb
216
+ - lib/karafka/assignment_strategies/round_robin.rb
242
217
  - lib/karafka/attributes_map.rb
243
218
  - lib/karafka/backends/inline.rb
244
219
  - lib/karafka/base_consumer.rb
@@ -249,6 +224,7 @@ files:
249
224
  - lib/karafka/cli/flow.rb
250
225
  - lib/karafka/cli/info.rb
251
226
  - lib/karafka/cli/install.rb
227
+ - lib/karafka/cli/missingno.rb
252
228
  - lib/karafka/cli/server.rb
253
229
  - lib/karafka/code_reloader.rb
254
230
  - lib/karafka/connection/api_adapter.rb
@@ -257,9 +233,9 @@ files:
257
233
  - lib/karafka/connection/client.rb
258
234
  - lib/karafka/connection/listener.rb
259
235
  - lib/karafka/connection/message_delegator.rb
236
+ - lib/karafka/consumers/batch_metadata.rb
260
237
  - lib/karafka/consumers/callbacks.rb
261
238
  - lib/karafka/consumers/includer.rb
262
- - lib/karafka/consumers/metadata.rb
263
239
  - lib/karafka/consumers/responders.rb
264
240
  - lib/karafka/consumers/single_params.rb
265
241
  - lib/karafka/contracts.rb
@@ -278,7 +254,8 @@ files:
278
254
  - lib/karafka/instrumentation/monitor.rb
279
255
  - lib/karafka/instrumentation/proctitle_listener.rb
280
256
  - lib/karafka/instrumentation/stdout_listener.rb
281
- - lib/karafka/params/builders/metadata.rb
257
+ - lib/karafka/params/batch_metadata.rb
258
+ - lib/karafka/params/builders/batch_metadata.rb
282
259
  - lib/karafka/params/builders/params.rb
283
260
  - lib/karafka/params/builders/params_batch.rb
284
261
  - lib/karafka/params/metadata.rb
@@ -310,11 +287,16 @@ files:
310
287
  - lib/karafka/templates/karafka.rb.erb
311
288
  - lib/karafka/version.rb
312
289
  - log/.gitkeep
313
- homepage: https://github.com/karafka/karafka
290
+ homepage: https://karafka.io
314
291
  licenses:
315
292
  - MIT
316
- metadata: {}
317
- post_install_message:
293
+ metadata:
294
+ source_code_uri: https://github.com/karafka/karafka
295
+ rubygems_mfa_required: 'true'
296
+ post_install_message: |
297
+ WARN: Karafka 1.4 will reach the end of life soon.
298
+ We highly recommend updating to Karafka 2.0.
299
+ Visit this page for more details: https://karafka.io/docs/Versions-Lifecycle-and-EOL
318
300
  rdoc_options: []
319
301
  require_paths:
320
302
  - lib
@@ -322,14 +304,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
322
304
  requirements:
323
305
  - - ">="
324
306
  - !ruby/object:Gem::Version
325
- version: 2.4.0
307
+ version: '2.7'
326
308
  required_rubygems_version: !ruby/object:Gem::Requirement
327
309
  requirements:
328
310
  - - ">="
329
311
  - !ruby/object:Gem::Version
330
312
  version: '0'
331
313
  requirements: []
332
- rubygems_version: 3.0.3
314
+ rubygems_version: 3.3.7
333
315
  signing_key:
334
316
  specification_version: 4
335
317
  summary: Ruby based framework for working with Apache Kafka
metadata.gz.sig CHANGED
Binary file
data/.github/FUNDING.yml DELETED
@@ -1,3 +0,0 @@
1
- # These are supported funding model platforms
2
-
3
- open_collective: karafka