karafka 1.3.0 → 1.4.0.rc1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (48) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +2 -2
  3. data.tar.gz.sig +0 -0
  4. data/.diffend.yml +3 -0
  5. data/.github/workflows/ci.yml +52 -0
  6. data/.ruby-version +1 -1
  7. data/CHANGELOG.md +61 -15
  8. data/CODE_OF_CONDUCT.md +1 -1
  9. data/Gemfile +2 -0
  10. data/Gemfile.lock +59 -57
  11. data/README.md +3 -5
  12. data/certs/mensfeld.pem +21 -21
  13. data/docker-compose.yml +17 -0
  14. data/karafka.gemspec +5 -6
  15. data/lib/karafka.rb +2 -1
  16. data/lib/karafka/attributes_map.rb +2 -8
  17. data/lib/karafka/cli.rb +1 -1
  18. data/lib/karafka/cli/flow.rb +9 -6
  19. data/lib/karafka/cli/info.rb +1 -1
  20. data/lib/karafka/cli/install.rb +2 -0
  21. data/lib/karafka/connection/api_adapter.rb +12 -6
  22. data/lib/karafka/connection/batch_delegator.rb +5 -1
  23. data/lib/karafka/connection/builder.rb +4 -2
  24. data/lib/karafka/connection/client.rb +1 -1
  25. data/lib/karafka/connection/listener.rb +2 -2
  26. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  27. data/lib/karafka/consumers/includer.rb +5 -4
  28. data/lib/karafka/contracts/consumer_group.rb +2 -2
  29. data/lib/karafka/contracts/server_cli_options.rb +2 -0
  30. data/lib/karafka/helpers/class_matcher.rb +1 -1
  31. data/lib/karafka/instrumentation/logger.rb +4 -3
  32. data/lib/karafka/instrumentation/stdout_listener.rb +4 -2
  33. data/lib/karafka/params/batch_metadata.rb +26 -0
  34. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  35. data/lib/karafka/params/builders/params.rb +17 -15
  36. data/lib/karafka/params/builders/params_batch.rb +2 -2
  37. data/lib/karafka/params/metadata.rb +14 -29
  38. data/lib/karafka/params/params.rb +24 -42
  39. data/lib/karafka/params/params_batch.rb +15 -16
  40. data/lib/karafka/serialization/json/deserializer.rb +2 -2
  41. data/lib/karafka/server.rb +4 -1
  42. data/lib/karafka/setup/config.rb +2 -0
  43. data/lib/karafka/version.rb +1 -1
  44. metadata +40 -51
  45. metadata.gz.sig +0 -0
  46. data/.travis.yml +0 -36
  47. data/lib/karafka/consumers/metadata.rb +0 -10
  48. data/lib/karafka/params/builders/metadata.rb +0 -33
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Params
5
+ module Builders
6
+ # Builder for creating batch metadata object based on the batch informations
7
+ module BatchMetadata
8
+ class << self
9
+ # Creates metadata based on the kafka batch data
10
+ # @param kafka_batch [Kafka::FetchedBatch] kafka batch details
11
+ # @param topic [Karafka::Routing::Topic] topic for which we've fetched the batch
12
+ # @return [Karafka::Params::BatchMetadata] batch metadata object
13
+ def from_kafka_batch(kafka_batch, topic)
14
+ Karafka::Params::BatchMetadata.new(
15
+ batch_size: kafka_batch.messages.count,
16
+ first_offset: kafka_batch.first_offset,
17
+ highwater_mark_offset: kafka_batch.highwater_mark_offset,
18
+ unknown_last_offset: kafka_batch.unknown_last_offset?,
19
+ last_offset: kafka_batch.last_offset,
20
+ offset_lag: kafka_batch.offset_lag,
21
+ deserializer: topic.deserializer,
22
+ partition: kafka_batch.partition,
23
+ topic: topic.name
24
+ ).freeze
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
@@ -12,22 +12,24 @@ module Karafka
12
12
  class << self
13
13
  # @param kafka_message [Kafka::FetchedMessage] message fetched from Kafka
14
14
  # @param topic [Karafka::Routing::Topic] topic for which this message was fetched
15
- # @return [Karafka::Params::Params] params object
15
+ # @return [Karafka::Params::Params] params object with payload and message metadata
16
16
  def from_kafka_message(kafka_message, topic)
17
- Karafka::Params::Params
18
- .new
19
- .merge!(
20
- 'create_time' => kafka_message.create_time,
21
- 'headers' => kafka_message.headers || {},
22
- 'is_control_record' => kafka_message.is_control_record,
23
- 'key' => kafka_message.key,
24
- 'offset' => kafka_message.offset,
25
- 'deserializer' => topic.deserializer,
26
- 'partition' => kafka_message.partition,
27
- 'receive_time' => Time.now,
28
- 'topic' => kafka_message.topic,
29
- 'payload' => kafka_message.value
30
- )
17
+ metadata = Karafka::Params::Metadata.new(
18
+ create_time: kafka_message.create_time,
19
+ headers: kafka_message.headers || {},
20
+ is_control_record: kafka_message.is_control_record,
21
+ key: kafka_message.key,
22
+ offset: kafka_message.offset,
23
+ deserializer: topic.deserializer,
24
+ partition: kafka_message.partition,
25
+ receive_time: Time.now,
26
+ topic: topic.name
27
+ ).freeze
28
+
29
+ Karafka::Params::Params.new(
30
+ kafka_message.value,
31
+ metadata
32
+ )
31
33
  end
32
34
  end
33
35
  end
@@ -12,11 +12,11 @@ module Karafka
12
12
  # @param topic [Karafka::Routing::Topic] topic for which we're received messages
13
13
  # @return [Karafka::Params::ParamsBatch<Karafka::Params::Params>] batch with params
14
14
  def from_kafka_messages(kafka_messages, topic)
15
- params_array = kafka_messages.map! do |message|
15
+ params_array = kafka_messages.map do |message|
16
16
  Karafka::Params::Builders::Params.from_kafka_message(message, topic)
17
17
  end
18
18
 
19
- Karafka::Params::ParamsBatch.new(params_array)
19
+ Karafka::Params::ParamsBatch.new(params_array).freeze
20
20
  end
21
21
  end
22
22
  end
@@ -2,34 +2,19 @@
2
2
 
3
3
  module Karafka
4
4
  module Params
5
- # Simple metadata object that stores all non-message information received from Kafka cluster
6
- # while fetching the data
7
- class Metadata < Hash
8
- # Attributes that should be accessible as methods as well (not only hash)
9
- METHOD_ATTRIBUTES = %w[
10
- batch_size
11
- first_offset
12
- highwater_mark_offset
13
- last_offset
14
- offset_lag
15
- deserializer
16
- partition
17
- topic
18
- ].freeze
19
-
20
- private_constant :METHOD_ATTRIBUTES
21
-
22
- METHOD_ATTRIBUTES.each do |attr|
23
- # Defines a method call accessor to a particular hash field.
24
- define_method(attr) do
25
- self[attr]
26
- end
27
- end
28
-
29
- # @return [Boolean] is the last offset known or unknown
30
- def unknown_last_offset?
31
- self['unknown_last_offset']
32
- end
33
- end
5
+ # Single message / params metadata details that can be accessed without the need for the
6
+ # payload deserialization
7
+ Metadata = Struct.new(
8
+ :create_time,
9
+ :headers,
10
+ :is_control_record,
11
+ :key,
12
+ :offset,
13
+ :deserializer,
14
+ :partition,
15
+ :receive_time,
16
+ :topic,
17
+ keyword_init: true
18
+ )
34
19
  end
35
20
  end
@@ -6,58 +6,40 @@ module Karafka
6
6
  # It provides lazy loading not only until the first usage, but also allows us to skip
7
7
  # using deserializer until we execute our logic. That way we can operate with
8
8
  # heavy-deserialization data without slowing down the whole application.
9
- class Params < Hash
10
- # Params attributes that should be available via a method call invocation for Kafka
11
- # client compatibility.
12
- # Kafka passes internally Kafka::FetchedMessage object and the ruby-kafka consumer
13
- # uses those fields via method calls, so in order to be able to pass there our params
14
- # objects, have to have same api.
15
- METHOD_ATTRIBUTES = %w[
16
- create_time
17
- headers
18
- is_control_record
19
- key
20
- offset
21
- deserializer
22
- deserialized
23
- partition
24
- receive_time
25
- topic
26
- payload
27
- ].freeze
9
+ class Params
10
+ attr_reader :raw_payload, :metadata
28
11
 
29
- private_constant :METHOD_ATTRIBUTES
30
-
31
- METHOD_ATTRIBUTES.each do |attr|
32
- # Defines a method call accessor to a particular hash field.
33
- # @note Won't work for complex key names that contain spaces, etc
34
- # @param key [Symbol] name of a field that we want to retrieve with a method call
35
- # @example
36
- # key_attr_reader :example
37
- # params.example #=> 'my example payload'
38
- define_method(attr) do
39
- self[attr]
40
- end
12
+ # @param raw_payload [Object] incoming payload before deserialization
13
+ # @param metadata [Karafka::Params::Metadata] message metadata object
14
+ def initialize(raw_payload, metadata)
15
+ @raw_payload = raw_payload
16
+ @metadata = metadata
17
+ @deserialized = false
18
+ @payload = nil
41
19
  end
42
20
 
43
- # @return [Karafka::Params::Params] This method will trigger deserializer execution. If we
44
- # decide to retrieve data, deserializer will be executed to get data. Output of that will
45
- # be merged to the current object. This object will be also marked as already deserialized,
46
- # so we won't deserialize it again.
47
- def deserialize!
48
- return self if self['deserialized']
21
+ # @return [Object] lazy-deserialized data (deserialized upon first request)
22
+ def payload
23
+ return @payload if deserialized?
24
+
25
+ @payload = deserialize
26
+ # We mark deserialization as successful after deserialization, as in case of an error
27
+ # this won't be falsely set to true
28
+ @deserialized = true
29
+ @payload
30
+ end
49
31
 
50
- self['deserialized'] = true
51
- self['payload'] = deserialize
52
- self
32
+ # @return [Boolean] did given params payload were deserialized already
33
+ def deserialized?
34
+ @deserialized
53
35
  end
54
36
 
55
37
  private
56
38
 
57
- # @return [Object] deserialized data
39
+ # @return [Object] tries de-serializes data
58
40
  def deserialize
59
41
  Karafka.monitor.instrument('params.params.deserialize', caller: self) do
60
- self['deserializer'].call(self)
42
+ metadata.deserializer.call(self)
61
43
  end
62
44
  rescue ::StandardError => e
63
45
  Karafka.monitor.instrument('params.params.deserialize.error', caller: self, error: e)
@@ -15,47 +15,46 @@ module Karafka
15
15
  @params_array = params_array
16
16
  end
17
17
 
18
- # @yieldparam [Karafka::Params::Params] each deserialized and loaded params instance
19
- # @note Invocation of this method will cause loading and deserializing each param after
20
- # another. If you want to get access without deserializing, please access params_array
21
- # directly
18
+ # @yieldparam [Karafka::Params::Params] each params instance
19
+ # @note Invocation of this method will not cause loading and deserializing each param after
20
+ # another.
22
21
  def each
23
- @params_array.each { |param| yield(param.deserialize!) }
22
+ @params_array.each { |param| yield(param) }
24
23
  end
25
24
 
26
25
  # @return [Array<Karafka::Params::Params>] returns all the params in a loaded state, so they
27
26
  # can be used for batch insert, etc. Without invoking all, up until first use, they won't
28
27
  # be deserialized
29
28
  def deserialize!
30
- each(&:itself)
29
+ each(&:payload)
31
30
  end
32
31
 
33
32
  # @return [Array<Object>] array with deserialized payloads. This method can be useful when
34
33
  # we don't care about metadata and just want to extract all the data payloads from the
35
34
  # batch
36
35
  def payloads
37
- deserialize!.map(&:payload)
36
+ map(&:payload)
38
37
  end
39
38
 
40
- # @return [Karafka::Params::Params] first element after the deserialization process
39
+ # @return [Karafka::Params::Params] first element
41
40
  def first
42
- @params_array.first.deserialize!
41
+ @params_array.first
43
42
  end
44
43
 
45
- # @return [Karafka::Params::Params] last element after the deserialization process
44
+ # @return [Karafka::Params::Params] last element
46
45
  def last
47
- @params_array.last.deserialize!
48
- end
49
-
50
- # @return [Array<Karafka::Params::Params>] pure array with params (not deserialized)
51
- def to_a
52
- @params_array
46
+ @params_array.last
53
47
  end
54
48
 
55
49
  # @return [Integer] number of messages in the batch
56
50
  def size
57
51
  @params_array.size
58
52
  end
53
+
54
+ # @return [Array<Karafka::Params::Params>] pure array with params
55
+ def to_a
56
+ @params_array
57
+ end
59
58
  end
60
59
  end
61
60
  end
@@ -17,8 +17,8 @@ module Karafka
17
17
  # }
18
18
  # Deserializer.call(params) #=> { 'a' => 1 }
19
19
  def call(params)
20
- ::MultiJson.load(params['payload'])
21
- rescue ::MultiJson::ParseError => e
20
+ params.raw_payload.nil? ? nil : ::JSON.parse(params.raw_payload)
21
+ rescue ::JSON::ParserError => e
22
22
  raise ::Karafka::Errors::DeserializationError, e
23
23
  end
24
24
  end
@@ -58,9 +58,12 @@ module Karafka
58
58
  def stop_supervised
59
59
  Karafka::App.stop!
60
60
 
61
+ # Temporary patch until https://github.com/dry-rb/dry-configurable/issues/93 is fixed
62
+ timeout = Thread.new { Karafka::App.config.shutdown_timeout }.join.value
63
+
61
64
  # We check from time to time (for the timeout period) if all the threads finished
62
65
  # their work and if so, we can just return and normal shutdown process will take place
63
- (Karafka::App.config.shutdown_timeout * SUPERVISION_CHECK_FACTOR).to_i.times do
66
+ (timeout * SUPERVISION_CHECK_FACTOR).to_i.times do
64
67
  if consumer_threads.count(&:alive?).zero?
65
68
  Thread.new { Karafka.monitor.instrument('app.stopped') }.join
66
69
  return
@@ -124,6 +124,8 @@ module Karafka
124
124
  # will be killed. Note that some Kafka operations are by definition long-running, such as
125
125
  # waiting for new messages to arrive in a partition, so don't set this value too low
126
126
  setting :socket_timeout, 30
127
+ # option partitioner [Object, nil] the partitioner that should be used by the client
128
+ setting :partitioner, nil
127
129
 
128
130
  # SSL authentication related settings
129
131
  # option ca_cert [String, nil] SSL CA certificate
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '1.3.0'
6
+ VERSION = '1.4.0.rc1'
7
7
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.3.0
4
+ version: 1.4.0.rc1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -13,30 +13,30 @@ cert_chain:
13
13
  - |
14
14
  -----BEGIN CERTIFICATE-----
15
15
  MIIEODCCAqCgAwIBAgIBATANBgkqhkiG9w0BAQsFADAjMSEwHwYDVQQDDBhtYWNp
16
- ZWovREM9bWVuc2ZlbGQvREM9cGwwHhcNMTkwNzMwMTQ1NDU0WhcNMjAwNzI5MTQ1
17
- NDU0WjAjMSEwHwYDVQQDDBhtYWNpZWovREM9bWVuc2ZlbGQvREM9cGwwggGiMA0G
18
- CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQC9fCwtaHZG2SyyNXiH8r0QbJQx/xxl
19
- dkvwWz9QGJO+O8rEx20FB1Ab+MVkfOscwIv5jWpmk1U9whzDPl1uFtIbgu+sk+Zb
20
- uQlZyK/DPN6c+/BbBL+RryTBRyvkPLoCVwm7uxc/JZ1n4AI6eF4cCZ2ieZ9QgQbU
21
- MQs2QPqs9hT50Ez/40GnOdadVfiDDGz+NME2C4ms0BriXwZ1tcRTfJIHe2xjIbbb
22
- y5qRGfsLKcgMzvLQR24olixyX1MR0s4+Wveq3QL/gBhL4veUcv+UABJA8IJR0kyB
23
- seHHutusiwZ1v3SjjjW1xLLrc2ARV0mgCb0WaK2T4iA3oFTGLh6Ydz8LNl31KQFv
24
- 94nRd8IhmJxrhQ6dQ/WT9IXoa5S9lfT5lPJeINemH4/6QPABzf9W2IZlCdI9wCdB
25
- TBaw57MKneGAYZiKjw6OALSy2ltQUCl3RqFl3VP7n8uFy1U987Q5VIIQ3O1UUsQD
26
- Oe/h+r7GUU4RSPKgPlrwvW9bD/UQ+zF51v8CAwEAAaN3MHUwCQYDVR0TBAIwADAL
27
- BgNVHQ8EBAMCBLAwHQYDVR0OBBYEFJNIBHdfEUD7TqHqIer2YhWaWhwcMB0GA1Ud
16
+ ZWovREM9bWVuc2ZlbGQvREM9cGwwHhcNMjAwODExMDkxNTM3WhcNMjEwODExMDkx
17
+ NTM3WjAjMSEwHwYDVQQDDBhtYWNpZWovREM9bWVuc2ZlbGQvREM9cGwwggGiMA0G
18
+ CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDCpXsCgmINb6lHBXXBdyrgsBPSxC4/
19
+ 2H+weJ6L9CruTiv2+2/ZkQGtnLcDgrD14rdLIHK7t0o3EKYlDT5GhD/XUVhI15JE
20
+ N7IqnPUgexe1fbZArwQ51afxz2AmPQN2BkB2oeQHXxnSWUGMhvcEZpfbxCCJH26w
21
+ hS0Ccsma8yxA6hSlGVhFVDuCr7c2L1di6cK2CtIDpfDaWqnVNJEwBYHIxrCoWK5g
22
+ sIGekVt/admS9gRhIMaIBg+Mshth5/DEyWO2QjteTodItlxfTctrfmiAl8X8T5JP
23
+ VXeLp5SSOJ5JXE80nShMJp3RFnGw5fqjX/ffjtISYh78/By4xF3a25HdWH9+qO2Z
24
+ tx0wSGc9/4gqNM0APQnjN/4YXrGZ4IeSjtE+OrrX07l0TiyikzSLFOkZCAp8oBJi
25
+ Fhlosz8xQDJf7mhNxOaZziqASzp/hJTU/tuDKl5+ql2icnMv5iV/i6SlmvU29QNg
26
+ LCV71pUv0pWzN+OZbHZKWepGhEQ3cG9MwvkCAwEAAaN3MHUwCQYDVR0TBAIwADAL
27
+ BgNVHQ8EBAMCBLAwHQYDVR0OBBYEFImGed2AXS070ohfRidiCEhXEUN+MB0GA1Ud
28
28
  EQQWMBSBEm1hY2llakBtZW5zZmVsZC5wbDAdBgNVHRIEFjAUgRJtYWNpZWpAbWVu
29
- c2ZlbGQucGwwDQYJKoZIhvcNAQELBQADggGBAKA4eqko6BTNhlysip6rfBkVTGri
30
- ZXsL+kRb2hLvsQJS/kLyM21oMlu+LN0aPj3qEFR8mE/YeDD8rLAfruBRTltPNbR7
31
- xA5eE1gkxY5LfExUtK3b2wPqfmo7mZgfcsMwfYg/tUXw1WpBCnrhAJodpGH6SXmp
32
- A40qFUZst0vjiOoO+aTblIHPmMJXoZ3K42dTlNKlEiDKUWMRKSgpjjYGEYalFNWI
33
- hHfCz2r8L2t+dYdMZg1JGbEkq4ADGsAA8ioZIpJd7V4hI17u5TCdi7X5wh/0gN0E
34
- CgP+nLox3D+l2q0QuQEkayr+auFYkzTCkF+BmEk1D0Ru4mcf3F4CJvEmW4Pzbjqt
35
- i1tsCWPtJ4E/UUKnKaWKqGbjrjHJ0MuShYzHkodox5IOiCXIQg+1+YSzfXUV6WEK
36
- KJG/fhg1JV5vVDdVy6x+tv5SQ5ctU0feCsVfESi3rE3zRd+nvzE9HcZ5aXeL1UtJ
37
- nT5Xrioegu2w1jPyVEgyZgTZC5rvD0nNS5sFNQ==
29
+ c2ZlbGQucGwwDQYJKoZIhvcNAQELBQADggGBAKiHpwoENVrMi94V1zD4o8/6G3AU
30
+ gWz4udkPYHTZLUy3dLznc/sNjdkJFWT3E6NKYq7c60EpJ0m0vAEg5+F5pmNOsvD3
31
+ 2pXLj9kisEeYhR516HwXAvtngboUcb75skqvBCU++4Pu7BRAPjO1/ihLSBexbwSS
32
+ fF+J5OWNuyHHCQp+kGPLtXJe2yUYyvSWDj3I2//Vk0VhNOIlaCS1+5/P3ZJThOtm
33
+ zJUBI7h3HgovwRpcnmk2mXTmU4Zx/bCzX8EA6VY0khEvnmiq7S6eBF0H9qH8KyQ6
34
+ EkVLpvmUDFcf/uNaBQdazEMB5jYtwoA8gQlANETNGPi51KlkukhKgaIEDMkBDJOx
35
+ 65N7DzmkcyY0/GwjIVIxmRhcrCt1YeCUElmfFx0iida1/YRm6sB2AXqScc1+ECRi
36
+ 2DND//YJUikn1zwbz1kT70XmHd97B4Eytpln7K+M1u2g1pHVEPW4owD/ammXNpUy
37
+ nt70FcDD4yxJQ+0YNiHd0N8IcVBM1TMIVctMNQ==
38
38
  -----END CERTIFICATE-----
39
- date: 2019-09-09 00:00:00.000000000 Z
39
+ date: 2020-08-25 00:00:00.000000000 Z
40
40
  dependencies:
41
41
  - !ruby/object:Gem::Dependency
42
42
  name: dry-configurable
@@ -122,20 +122,6 @@ dependencies:
122
122
  - - "~>"
123
123
  - !ruby/object:Gem::Version
124
124
  version: '1.0'
125
- - !ruby/object:Gem::Dependency
126
- name: multi_json
127
- requirement: !ruby/object:Gem::Requirement
128
- requirements:
129
- - - ">="
130
- - !ruby/object:Gem::Version
131
- version: '1.12'
132
- type: :runtime
133
- prerelease: false
134
- version_requirements: !ruby/object:Gem::Requirement
135
- requirements:
136
- - - ">="
137
- - !ruby/object:Gem::Version
138
- version: '1.12'
139
125
  - !ruby/object:Gem::Dependency
140
126
  name: rake
141
127
  requirement: !ruby/object:Gem::Requirement
@@ -156,26 +142,26 @@ dependencies:
156
142
  requirements:
157
143
  - - ">="
158
144
  - !ruby/object:Gem::Version
159
- version: 0.7.8
145
+ version: 1.0.0
160
146
  type: :runtime
161
147
  prerelease: false
162
148
  version_requirements: !ruby/object:Gem::Requirement
163
149
  requirements:
164
150
  - - ">="
165
151
  - !ruby/object:Gem::Version
166
- version: 0.7.8
152
+ version: 1.0.0
167
153
  - !ruby/object:Gem::Dependency
168
154
  name: thor
169
155
  requirement: !ruby/object:Gem::Requirement
170
156
  requirements:
171
- - - "~>"
157
+ - - ">="
172
158
  - !ruby/object:Gem::Version
173
159
  version: '0.20'
174
160
  type: :runtime
175
161
  prerelease: false
176
162
  version_requirements: !ruby/object:Gem::Requirement
177
163
  requirements:
178
- - - "~>"
164
+ - - ">="
179
165
  - !ruby/object:Gem::Version
180
166
  version: '0.20'
181
167
  - !ruby/object:Gem::Dependency
@@ -184,14 +170,14 @@ dependencies:
184
170
  requirements:
185
171
  - - "~>"
186
172
  - !ruby/object:Gem::Version
187
- version: 1.3.0
173
+ version: 1.4.0
188
174
  type: :runtime
189
175
  prerelease: false
190
176
  version_requirements: !ruby/object:Gem::Requirement
191
177
  requirements:
192
178
  - - "~>"
193
179
  - !ruby/object:Gem::Version
194
- version: 1.3.0
180
+ version: 1.4.0
195
181
  - !ruby/object:Gem::Dependency
196
182
  name: zeitwerk
197
183
  requirement: !ruby/object:Gem::Requirement
@@ -208,7 +194,7 @@ dependencies:
208
194
  version: '2.1'
209
195
  description: Framework used to simplify Apache Kafka based Ruby applications development
210
196
  email:
211
- - maciej@coditsu.io
197
+ - maciej@mensfeld.pl
212
198
  - pavlo.vavruk@gmail.com
213
199
  - adam99g@gmail.com
214
200
  executables:
@@ -218,14 +204,15 @@ extra_rdoc_files: []
218
204
  files:
219
205
  - ".coditsu/ci.yml"
220
206
  - ".console_irbrc"
207
+ - ".diffend.yml"
221
208
  - ".github/FUNDING.yml"
222
209
  - ".github/ISSUE_TEMPLATE/bug_report.md"
223
210
  - ".github/ISSUE_TEMPLATE/feature_request.md"
211
+ - ".github/workflows/ci.yml"
224
212
  - ".gitignore"
225
213
  - ".rspec"
226
214
  - ".ruby-gemset"
227
215
  - ".ruby-version"
228
- - ".travis.yml"
229
216
  - CHANGELOG.md
230
217
  - CODE_OF_CONDUCT.md
231
218
  - CONTRIBUTING.md
@@ -236,6 +223,7 @@ files:
236
223
  - bin/karafka
237
224
  - certs/mensfeld.pem
238
225
  - config/errors.yml
226
+ - docker-compose.yml
239
227
  - karafka.gemspec
240
228
  - lib/karafka.rb
241
229
  - lib/karafka/app.rb
@@ -257,9 +245,9 @@ files:
257
245
  - lib/karafka/connection/client.rb
258
246
  - lib/karafka/connection/listener.rb
259
247
  - lib/karafka/connection/message_delegator.rb
248
+ - lib/karafka/consumers/batch_metadata.rb
260
249
  - lib/karafka/consumers/callbacks.rb
261
250
  - lib/karafka/consumers/includer.rb
262
- - lib/karafka/consumers/metadata.rb
263
251
  - lib/karafka/consumers/responders.rb
264
252
  - lib/karafka/consumers/single_params.rb
265
253
  - lib/karafka/contracts.rb
@@ -278,7 +266,8 @@ files:
278
266
  - lib/karafka/instrumentation/monitor.rb
279
267
  - lib/karafka/instrumentation/proctitle_listener.rb
280
268
  - lib/karafka/instrumentation/stdout_listener.rb
281
- - lib/karafka/params/builders/metadata.rb
269
+ - lib/karafka/params/batch_metadata.rb
270
+ - lib/karafka/params/builders/batch_metadata.rb
282
271
  - lib/karafka/params/builders/params.rb
283
272
  - lib/karafka/params/builders/params_batch.rb
284
273
  - lib/karafka/params/metadata.rb
@@ -322,14 +311,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
322
311
  requirements:
323
312
  - - ">="
324
313
  - !ruby/object:Gem::Version
325
- version: 2.4.0
314
+ version: 2.5.0
326
315
  required_rubygems_version: !ruby/object:Gem::Requirement
327
316
  requirements:
328
- - - ">="
317
+ - - ">"
329
318
  - !ruby/object:Gem::Version
330
- version: '0'
319
+ version: 1.3.1
331
320
  requirements: []
332
- rubygems_version: 3.0.3
321
+ rubygems_version: 3.1.4
333
322
  signing_key:
334
323
  specification_version: 4
335
324
  summary: Ruby based framework for working with Apache Kafka