karafka 1.3.0 → 1.4.0.rc1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +2 -2
- data.tar.gz.sig +0 -0
- data/.diffend.yml +3 -0
- data/.github/workflows/ci.yml +52 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +61 -15
- data/CODE_OF_CONDUCT.md +1 -1
- data/Gemfile +2 -0
- data/Gemfile.lock +59 -57
- data/README.md +3 -5
- data/certs/mensfeld.pem +21 -21
- data/docker-compose.yml +17 -0
- data/karafka.gemspec +5 -6
- data/lib/karafka.rb +2 -1
- data/lib/karafka/attributes_map.rb +2 -8
- data/lib/karafka/cli.rb +1 -1
- data/lib/karafka/cli/flow.rb +9 -6
- data/lib/karafka/cli/info.rb +1 -1
- data/lib/karafka/cli/install.rb +2 -0
- data/lib/karafka/connection/api_adapter.rb +12 -6
- data/lib/karafka/connection/batch_delegator.rb +5 -1
- data/lib/karafka/connection/builder.rb +4 -2
- data/lib/karafka/connection/client.rb +1 -1
- data/lib/karafka/connection/listener.rb +2 -2
- data/lib/karafka/consumers/batch_metadata.rb +10 -0
- data/lib/karafka/consumers/includer.rb +5 -4
- data/lib/karafka/contracts/consumer_group.rb +2 -2
- data/lib/karafka/contracts/server_cli_options.rb +2 -0
- data/lib/karafka/helpers/class_matcher.rb +1 -1
- data/lib/karafka/instrumentation/logger.rb +4 -3
- data/lib/karafka/instrumentation/stdout_listener.rb +4 -2
- data/lib/karafka/params/batch_metadata.rb +26 -0
- data/lib/karafka/params/builders/batch_metadata.rb +30 -0
- data/lib/karafka/params/builders/params.rb +17 -15
- data/lib/karafka/params/builders/params_batch.rb +2 -2
- data/lib/karafka/params/metadata.rb +14 -29
- data/lib/karafka/params/params.rb +24 -42
- data/lib/karafka/params/params_batch.rb +15 -16
- data/lib/karafka/serialization/json/deserializer.rb +2 -2
- data/lib/karafka/server.rb +4 -1
- data/lib/karafka/setup/config.rb +2 -0
- data/lib/karafka/version.rb +1 -1
- metadata +40 -51
- metadata.gz.sig +0 -0
- data/.travis.yml +0 -36
- data/lib/karafka/consumers/metadata.rb +0 -10
- data/lib/karafka/params/builders/metadata.rb +0 -33
data/README.md
CHANGED
@@ -2,11 +2,9 @@
|
|
2
2
|
|
3
3
|
[](https://travis-ci.org/karafka/karafka)
|
4
4
|
|
5
|
-
|
5
|
+
**Note**: Documentation presented here refers to **not yet** released Karafka `1.4.x`.
|
6
6
|
|
7
|
-
|
8
|
-
|
9
|
-
If you are looking for the documentation for Karafka `1.2.*`, it can be found [here](https://github.com/karafka/wiki/tree/1.2).
|
7
|
+
If you are looking for the documentation for Karafka `1.3.x`, it can be found [here](https://github.com/karafka/wiki/tree/1.3).
|
10
8
|
|
11
9
|
## About Karafka
|
12
10
|
|
@@ -98,4 +96,4 @@ This project exists thanks to all the people who contribute.
|
|
98
96
|
|
99
97
|
We are looking for sustainable sponsorship. If your company is relying on Karafka framework or simply want to see Karafka evolve faster to meet your requirements, please consider backing the project.
|
100
98
|
|
101
|
-
Please contact [Maciej Mensfeld](mailto:maciej@
|
99
|
+
Please contact [Maciej Mensfeld](mailto:maciej@mensfeld.pl) directly for more details.
|
data/certs/mensfeld.pem
CHANGED
@@ -1,25 +1,25 @@
|
|
1
1
|
-----BEGIN CERTIFICATE-----
|
2
2
|
MIIEODCCAqCgAwIBAgIBATANBgkqhkiG9w0BAQsFADAjMSEwHwYDVQQDDBhtYWNp
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
3
|
+
ZWovREM9bWVuc2ZlbGQvREM9cGwwHhcNMjAwODExMDkxNTM3WhcNMjEwODExMDkx
|
4
|
+
NTM3WjAjMSEwHwYDVQQDDBhtYWNpZWovREM9bWVuc2ZlbGQvREM9cGwwggGiMA0G
|
5
|
+
CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDCpXsCgmINb6lHBXXBdyrgsBPSxC4/
|
6
|
+
2H+weJ6L9CruTiv2+2/ZkQGtnLcDgrD14rdLIHK7t0o3EKYlDT5GhD/XUVhI15JE
|
7
|
+
N7IqnPUgexe1fbZArwQ51afxz2AmPQN2BkB2oeQHXxnSWUGMhvcEZpfbxCCJH26w
|
8
|
+
hS0Ccsma8yxA6hSlGVhFVDuCr7c2L1di6cK2CtIDpfDaWqnVNJEwBYHIxrCoWK5g
|
9
|
+
sIGekVt/admS9gRhIMaIBg+Mshth5/DEyWO2QjteTodItlxfTctrfmiAl8X8T5JP
|
10
|
+
VXeLp5SSOJ5JXE80nShMJp3RFnGw5fqjX/ffjtISYh78/By4xF3a25HdWH9+qO2Z
|
11
|
+
tx0wSGc9/4gqNM0APQnjN/4YXrGZ4IeSjtE+OrrX07l0TiyikzSLFOkZCAp8oBJi
|
12
|
+
Fhlosz8xQDJf7mhNxOaZziqASzp/hJTU/tuDKl5+ql2icnMv5iV/i6SlmvU29QNg
|
13
|
+
LCV71pUv0pWzN+OZbHZKWepGhEQ3cG9MwvkCAwEAAaN3MHUwCQYDVR0TBAIwADAL
|
14
|
+
BgNVHQ8EBAMCBLAwHQYDVR0OBBYEFImGed2AXS070ohfRidiCEhXEUN+MB0GA1Ud
|
15
15
|
EQQWMBSBEm1hY2llakBtZW5zZmVsZC5wbDAdBgNVHRIEFjAUgRJtYWNpZWpAbWVu
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
16
|
+
c2ZlbGQucGwwDQYJKoZIhvcNAQELBQADggGBAKiHpwoENVrMi94V1zD4o8/6G3AU
|
17
|
+
gWz4udkPYHTZLUy3dLznc/sNjdkJFWT3E6NKYq7c60EpJ0m0vAEg5+F5pmNOsvD3
|
18
|
+
2pXLj9kisEeYhR516HwXAvtngboUcb75skqvBCU++4Pu7BRAPjO1/ihLSBexbwSS
|
19
|
+
fF+J5OWNuyHHCQp+kGPLtXJe2yUYyvSWDj3I2//Vk0VhNOIlaCS1+5/P3ZJThOtm
|
20
|
+
zJUBI7h3HgovwRpcnmk2mXTmU4Zx/bCzX8EA6VY0khEvnmiq7S6eBF0H9qH8KyQ6
|
21
|
+
EkVLpvmUDFcf/uNaBQdazEMB5jYtwoA8gQlANETNGPi51KlkukhKgaIEDMkBDJOx
|
22
|
+
65N7DzmkcyY0/GwjIVIxmRhcrCt1YeCUElmfFx0iida1/YRm6sB2AXqScc1+ECRi
|
23
|
+
2DND//YJUikn1zwbz1kT70XmHd97B4Eytpln7K+M1u2g1pHVEPW4owD/ammXNpUy
|
24
|
+
nt70FcDD4yxJQ+0YNiHd0N8IcVBM1TMIVctMNQ==
|
25
25
|
-----END CERTIFICATE-----
|
data/docker-compose.yml
ADDED
@@ -0,0 +1,17 @@
|
|
1
|
+
version: '2'
|
2
|
+
services:
|
3
|
+
zookeeper:
|
4
|
+
image: wurstmeister/zookeeper
|
5
|
+
ports:
|
6
|
+
- "2181:2181"
|
7
|
+
kafka:
|
8
|
+
image: wurstmeister/kafka:1.0.1
|
9
|
+
ports:
|
10
|
+
- "9092:9092"
|
11
|
+
environment:
|
12
|
+
KAFKA_ADVERTISED_HOST_NAME: localhost
|
13
|
+
KAFKA_ADVERTISED_PORT: 9092
|
14
|
+
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
|
15
|
+
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
|
16
|
+
volumes:
|
17
|
+
- /var/run/docker.sock:/var/run/docker.sock
|
data/karafka.gemspec
CHANGED
@@ -11,7 +11,7 @@ Gem::Specification.new do |spec|
|
|
11
11
|
spec.version = ::Karafka::VERSION
|
12
12
|
spec.platform = Gem::Platform::RUBY
|
13
13
|
spec.authors = ['Maciej Mensfeld', 'Pavlo Vavruk', 'Adam Gwozdowski']
|
14
|
-
spec.email = %w[maciej@
|
14
|
+
spec.email = %w[maciej@mensfeld.pl pavlo.vavruk@gmail.com adam99g@gmail.com]
|
15
15
|
spec.homepage = 'https://github.com/karafka/karafka'
|
16
16
|
spec.summary = 'Ruby based framework for working with Apache Kafka'
|
17
17
|
spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
|
@@ -23,14 +23,13 @@ Gem::Specification.new do |spec|
|
|
23
23
|
spec.add_dependency 'dry-validation', '~> 1.2'
|
24
24
|
spec.add_dependency 'envlogic', '~> 1.1'
|
25
25
|
spec.add_dependency 'irb', '~> 1.0'
|
26
|
-
spec.add_dependency 'multi_json', '>= 1.12'
|
27
26
|
spec.add_dependency 'rake', '>= 11.3'
|
28
|
-
spec.add_dependency 'ruby-kafka', '>= 0.
|
29
|
-
spec.add_dependency 'thor', '
|
30
|
-
spec.add_dependency 'waterdrop', '~> 1.
|
27
|
+
spec.add_dependency 'ruby-kafka', '>= 1.0.0'
|
28
|
+
spec.add_dependency 'thor', '>= 0.20'
|
29
|
+
spec.add_dependency 'waterdrop', '~> 1.4.0'
|
31
30
|
spec.add_dependency 'zeitwerk', '~> 2.1'
|
32
31
|
|
33
|
-
spec.required_ruby_version = '>= 2.
|
32
|
+
spec.required_ruby_version = '>= 2.5.0'
|
34
33
|
|
35
34
|
if $PROGRAM_NAME.end_with?('gem')
|
36
35
|
spec.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
|
data/lib/karafka.rb
CHANGED
@@ -52,14 +52,8 @@ module Karafka
|
|
52
52
|
ignored_settings = api_adapter[:subscribe]
|
53
53
|
defined_settings = api_adapter.values.flatten
|
54
54
|
karafka_settings = %i[batch_fetching]
|
55
|
-
|
56
|
-
dynamically_proxied = Karafka::Setup::Config
|
57
|
-
._settings
|
58
|
-
.settings
|
59
|
-
.find { |s| s.name == :kafka }
|
60
|
-
.value
|
61
|
-
.names
|
62
|
-
.to_a
|
55
|
+
|
56
|
+
dynamically_proxied = Karafka::Setup::Config.config.kafka.to_h.keys
|
63
57
|
|
64
58
|
(defined_settings + dynamically_proxied).uniq + karafka_settings - ignored_settings
|
65
59
|
end
|
data/lib/karafka/cli.rb
CHANGED
data/lib/karafka/cli/flow.rb
CHANGED
@@ -11,19 +11,22 @@ module Karafka
|
|
11
11
|
def call
|
12
12
|
topics.each do |topic|
|
13
13
|
any_topics = !topic.responder&.topics.nil?
|
14
|
+
log_messages = []
|
14
15
|
|
15
16
|
if any_topics
|
16
|
-
|
17
|
+
log_messages << "#{topic.name} =>"
|
17
18
|
|
18
19
|
topic.responder.topics.each_value do |responder_topic|
|
19
20
|
features = []
|
20
21
|
features << (responder_topic.required? ? 'always' : 'conditionally')
|
21
22
|
|
22
|
-
|
23
|
+
log_messages << format(responder_topic.name, "(#{features.join(', ')})")
|
23
24
|
end
|
24
25
|
else
|
25
|
-
|
26
|
+
log_messages << "#{topic.name} => (nothing)"
|
26
27
|
end
|
28
|
+
|
29
|
+
Karafka.logger.info(log_messages.join("\n"))
|
27
30
|
end
|
28
31
|
end
|
29
32
|
|
@@ -34,11 +37,11 @@ module Karafka
|
|
34
37
|
Karafka::App.consumer_groups.map(&:topics).flatten.sort_by(&:name)
|
35
38
|
end
|
36
39
|
|
37
|
-
#
|
40
|
+
# Formats a given value with label in a nice way
|
38
41
|
# @param label [String] label describing value
|
39
42
|
# @param value [String] value that should be printed
|
40
|
-
def
|
41
|
-
|
43
|
+
def format(label, value)
|
44
|
+
" - #{label}: #{value}"
|
42
45
|
end
|
43
46
|
end
|
44
47
|
end
|
data/lib/karafka/cli/info.rb
CHANGED
data/lib/karafka/cli/install.rb
CHANGED
@@ -14,11 +14,12 @@ module Karafka
|
|
14
14
|
module ApiAdapter
|
15
15
|
class << self
|
16
16
|
# Builds all the configuration settings for Kafka.new method
|
17
|
+
# @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
|
17
18
|
# @return [Array<Hash>] Array with all the client arguments including hash with all
|
18
19
|
# the settings required by Kafka.new method
|
19
20
|
# @note We return array, so we can inject any arguments we want, in case of changes in the
|
20
21
|
# raw driver
|
21
|
-
def client
|
22
|
+
def client(consumer_group)
|
22
23
|
# This one is a default that takes all the settings except special
|
23
24
|
# cases defined in the map
|
24
25
|
settings = {
|
@@ -26,14 +27,17 @@ module Karafka
|
|
26
27
|
client_id: ::Karafka::App.config.client_id
|
27
28
|
}
|
28
29
|
|
29
|
-
kafka_configs.
|
30
|
+
kafka_configs.each_key do |setting_name|
|
30
31
|
# All options for config adapter should be ignored as we're just interested
|
31
32
|
# in what is left, as we want to pass all the options that are "typical"
|
32
33
|
# and not listed in the api_adapter special cases mapping. All the values
|
33
34
|
# from the api_adapter mapping go somewhere else, not to the client directly
|
34
35
|
next if AttributesMap.api_adapter.values.flatten.include?(setting_name)
|
35
36
|
|
36
|
-
|
37
|
+
# Settings for each consumer group are either defined per consumer group or are
|
38
|
+
# inherited from the global/general settings level, thus we don't have to fetch them
|
39
|
+
# from the kafka settings as they are already on a consumer group level
|
40
|
+
settings[setting_name] = consumer_group.public_send(setting_name)
|
37
41
|
end
|
38
42
|
|
39
43
|
settings_hash = sanitize(settings)
|
@@ -105,11 +109,13 @@ module Karafka
|
|
105
109
|
# Majority of users don't use custom topic mappers. No need to change anything when it
|
106
110
|
# is a default mapper that does not change anything. Only some cloud providers require
|
107
111
|
# topics to be remapped
|
108
|
-
return [params] if Karafka::App.config.topic_mapper.is_a?(
|
112
|
+
return [params.metadata] if Karafka::App.config.topic_mapper.is_a?(
|
113
|
+
Karafka::Routing::TopicMapper
|
114
|
+
)
|
109
115
|
|
110
116
|
# @note We don't use tap as it is around 13% slower than non-dup version
|
111
|
-
dupped = params.dup
|
112
|
-
dupped['topic'] = Karafka::App.config.topic_mapper.outgoing(params.topic)
|
117
|
+
dupped = params.metadata.dup
|
118
|
+
dupped['topic'] = Karafka::App.config.topic_mapper.outgoing(params.metadata.topic)
|
113
119
|
[dupped]
|
114
120
|
end
|
115
121
|
|
@@ -23,7 +23,11 @@ module Karafka
|
|
23
23
|
) do
|
24
24
|
# Due to how ruby-kafka is built, we have the metadata that is stored on the batch
|
25
25
|
# level only available for batch consuming
|
26
|
-
consumer.
|
26
|
+
consumer.batch_metadata = Params::Builders::BatchMetadata.from_kafka_batch(
|
27
|
+
kafka_batch,
|
28
|
+
topic
|
29
|
+
)
|
30
|
+
|
27
31
|
kafka_messages = kafka_batch.messages
|
28
32
|
|
29
33
|
# Depending on a case (persisted or not) we might use new consumer instance per
|
@@ -6,9 +6,11 @@ module Karafka
|
|
6
6
|
module Builder
|
7
7
|
class << self
|
8
8
|
# Builds a Kafka::Client instance that we use to work with Kafka cluster
|
9
|
+
# @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group for which we want
|
10
|
+
# to have a new Kafka client
|
9
11
|
# @return [::Kafka::Client] returns a Kafka client
|
10
|
-
def call
|
11
|
-
Kafka.new(*ApiAdapter.client)
|
12
|
+
def call(consumer_group)
|
13
|
+
Kafka.new(*ApiAdapter.client(consumer_group))
|
12
14
|
end
|
13
15
|
end
|
14
16
|
end
|
@@ -97,7 +97,7 @@ module Karafka
|
|
97
97
|
def kafka_consumer
|
98
98
|
# @note We don't cache the connection internally because we cache kafka_consumer that uses
|
99
99
|
# kafka client object instance
|
100
|
-
@kafka_consumer ||= Builder.call.consumer(
|
100
|
+
@kafka_consumer ||= Builder.call(consumer_group).consumer(
|
101
101
|
*ApiAdapter.consumer(consumer_group)
|
102
102
|
).tap do |consumer|
|
103
103
|
consumer_group.topics.each do |topic|
|
@@ -47,10 +47,10 @@ module Karafka
|
|
47
47
|
end
|
48
48
|
end
|
49
49
|
# This is on purpose - see the notes for this method
|
50
|
-
# rubocop:disable RescueException
|
50
|
+
# rubocop:disable Lint/RescueException
|
51
51
|
rescue Exception => e
|
52
52
|
Karafka.monitor.instrument('connection.listener.fetch_loop.error', caller: self, error: e)
|
53
|
-
# rubocop:enable RescueException
|
53
|
+
# rubocop:enable Lint/RescueException
|
54
54
|
# We can stop client without a problem, as it will reinitialize itself when running the
|
55
55
|
# `fetch_loop` again
|
56
56
|
@client.stop
|
@@ -16,7 +16,7 @@ module Karafka
|
|
16
16
|
|
17
17
|
bind_backend(consumer, topic)
|
18
18
|
bind_params(consumer, topic)
|
19
|
-
|
19
|
+
bind_batch_metadata(consumer, topic)
|
20
20
|
bind_responders(consumer, topic)
|
21
21
|
end
|
22
22
|
|
@@ -40,13 +40,14 @@ module Karafka
|
|
40
40
|
consumer.extend(SingleParams)
|
41
41
|
end
|
42
42
|
|
43
|
-
# Adds an option to work with metadata for consumer instances that have
|
43
|
+
# Adds an option to work with batch metadata for consumer instances that have
|
44
|
+
# batch fetching enabled
|
44
45
|
# @param consumer [Karafka::BaseConsumer] consumer instance
|
45
46
|
# @param topic [Karafka::Routing::Topic] topic of a consumer class
|
46
|
-
def
|
47
|
+
def bind_batch_metadata(consumer, topic)
|
47
48
|
return unless topic.batch_fetching
|
48
49
|
|
49
|
-
consumer.extend(
|
50
|
+
consumer.extend(BatchMetadata)
|
50
51
|
end
|
51
52
|
|
52
53
|
# Adds responders support for topics and consumers with responders defined for them
|
@@ -178,9 +178,9 @@ module Karafka
|
|
178
178
|
# @param value [String] potential RSA key value
|
179
179
|
# @return [Boolean] is the given string a valid RSA key
|
180
180
|
def valid_private_key?(value)
|
181
|
-
OpenSSL::PKey
|
181
|
+
OpenSSL::PKey.read(value)
|
182
182
|
true
|
183
|
-
rescue OpenSSL::PKey::
|
183
|
+
rescue OpenSSL::PKey::PKeyError
|
184
184
|
false
|
185
185
|
end
|
186
186
|
|
@@ -6,6 +6,8 @@ module Karafka
|
|
6
6
|
# We validate some basics + the list of consumer_groups on which we want to use, to make
|
7
7
|
# sure that all of them are defined, plus that a pidfile does not exist
|
8
8
|
class ServerCliOptions < Dry::Validation::Contract
|
9
|
+
config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
|
10
|
+
|
9
11
|
params do
|
10
12
|
optional(:pid).filled(:str?)
|
11
13
|
optional(:daemon).filled(:bool?)
|
@@ -44,7 +44,7 @@ module Karafka
|
|
44
44
|
# @example From Namespaced::Super2Consumer matching responder
|
45
45
|
# matcher.name #=> Super2Responder
|
46
46
|
def name
|
47
|
-
inflected =
|
47
|
+
inflected = +@klass.to_s.split('::').last.to_s
|
48
48
|
# We inject the from into the name just in case it is missing as in a situation like
|
49
49
|
# that it would just sanitize the name without adding the "to" postfix.
|
50
50
|
# It could create cases when we want to build for example a responder to a consumer
|
@@ -36,10 +36,11 @@ module Karafka
|
|
36
36
|
.to(STDOUT, file)
|
37
37
|
end
|
38
38
|
|
39
|
-
# Makes sure the log directory exists
|
39
|
+
# Makes sure the log directory exists as long as we can write to it
|
40
40
|
def ensure_dir_exists
|
41
|
-
|
42
|
-
|
41
|
+
FileUtils.mkdir_p(File.dirname(log_path))
|
42
|
+
rescue Errno::EACCES
|
43
|
+
nil
|
43
44
|
end
|
44
45
|
|
45
46
|
# @return [Pathname] Path to a file to which we should log
|
@@ -43,7 +43,7 @@ module Karafka
|
|
43
43
|
# so it returns a topic as a string, not a routing topic
|
44
44
|
debug(
|
45
45
|
<<~MSG.chomp.tr("\n", ' ')
|
46
|
-
Params deserialization for #{event[:caller].topic} topic
|
46
|
+
Params deserialization for #{event[:caller].metadata.topic} topic
|
47
47
|
successful in #{event[:time]} ms
|
48
48
|
MSG
|
49
49
|
)
|
@@ -52,7 +52,9 @@ module Karafka
|
|
52
52
|
# Logs unsuccessful deserialization attempts of incoming data
|
53
53
|
# @param event [Dry::Events::Event] event details including payload
|
54
54
|
def on_params_params_deserialize_error(event)
|
55
|
-
|
55
|
+
topic = event[:caller].metadata.topic
|
56
|
+
error = event[:error]
|
57
|
+
error "Params deserialization error for #{topic} topic: #{error}"
|
56
58
|
end
|
57
59
|
|
58
60
|
# Logs errors that occurred in a listener fetch loop
|
@@ -0,0 +1,26 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Params
|
5
|
+
# Simple batch metadata object that stores all non-message information received from Kafka
|
6
|
+
# cluster while fetching the data
|
7
|
+
# @note This metadata object refers to per batch metadata, not `#params.metadata`
|
8
|
+
BatchMetadata = Struct.new(
|
9
|
+
:batch_size,
|
10
|
+
:first_offset,
|
11
|
+
:highwater_mark_offset,
|
12
|
+
:unknown_last_offset,
|
13
|
+
:last_offset,
|
14
|
+
:offset_lag,
|
15
|
+
:deserializer,
|
16
|
+
:partition,
|
17
|
+
:topic,
|
18
|
+
keyword_init: true
|
19
|
+
) do
|
20
|
+
# @return [Boolean] is the last offset known or unknown
|
21
|
+
def unknown_last_offset?
|
22
|
+
unknown_last_offset
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|