karafka-rdkafka 0.20.0.rc3-x86_64-linux-gnu
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.github/CODEOWNERS +3 -0
- data/.github/FUNDING.yml +1 -0
- data/.github/workflows/ci_linux_x86_64_gnu.yml +248 -0
- data/.github/workflows/ci_macos_arm64.yml +301 -0
- data/.github/workflows/push_linux_x86_64_gnu.yml +60 -0
- data/.github/workflows/push_ruby.yml +37 -0
- data/.github/workflows/verify-action-pins.yml +16 -0
- data/.gitignore +15 -0
- data/.rspec +2 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/.yardopts +2 -0
- data/CHANGELOG.md +323 -0
- data/Gemfile +5 -0
- data/MIT-LICENSE +22 -0
- data/README.md +177 -0
- data/Rakefile +96 -0
- data/docker-compose.yml +25 -0
- data/ext/README.md +19 -0
- data/ext/Rakefile +131 -0
- data/ext/build_common.sh +361 -0
- data/ext/build_linux_x86_64_gnu.sh +306 -0
- data/ext/build_macos_arm64.sh +550 -0
- data/ext/librdkafka.so +0 -0
- data/karafka-rdkafka.gemspec +61 -0
- data/lib/rdkafka/abstract_handle.rb +116 -0
- data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
- data/lib/rdkafka/admin/config_binding_result.rb +30 -0
- data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
- data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
- data/lib/rdkafka/admin/create_acl_report.rb +24 -0
- data/lib/rdkafka/admin/create_partitions_handle.rb +30 -0
- data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
- data/lib/rdkafka/admin/create_topic_handle.rb +32 -0
- data/lib/rdkafka/admin/create_topic_report.rb +24 -0
- data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
- data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
- data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
- data/lib/rdkafka/admin/delete_topic_handle.rb +32 -0
- data/lib/rdkafka/admin/delete_topic_report.rb +24 -0
- data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/describe_acl_report.rb +24 -0
- data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
- data/lib/rdkafka/admin/describe_configs_report.rb +48 -0
- data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
- data/lib/rdkafka/admin/incremental_alter_configs_report.rb +48 -0
- data/lib/rdkafka/admin.rb +832 -0
- data/lib/rdkafka/bindings.rb +582 -0
- data/lib/rdkafka/callbacks.rb +415 -0
- data/lib/rdkafka/config.rb +398 -0
- data/lib/rdkafka/consumer/headers.rb +79 -0
- data/lib/rdkafka/consumer/message.rb +86 -0
- data/lib/rdkafka/consumer/partition.rb +57 -0
- data/lib/rdkafka/consumer/topic_partition_list.rb +190 -0
- data/lib/rdkafka/consumer.rb +663 -0
- data/lib/rdkafka/error.rb +201 -0
- data/lib/rdkafka/helpers/oauth.rb +58 -0
- data/lib/rdkafka/helpers/time.rb +14 -0
- data/lib/rdkafka/metadata.rb +115 -0
- data/lib/rdkafka/native_kafka.rb +139 -0
- data/lib/rdkafka/producer/delivery_handle.rb +48 -0
- data/lib/rdkafka/producer/delivery_report.rb +45 -0
- data/lib/rdkafka/producer/partitions_count_cache.rb +216 -0
- data/lib/rdkafka/producer.rb +492 -0
- data/lib/rdkafka/version.rb +7 -0
- data/lib/rdkafka.rb +54 -0
- data/renovate.json +92 -0
- data/spec/rdkafka/abstract_handle_spec.rb +117 -0
- data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
- data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
- data/spec/rdkafka/admin/create_topic_handle_spec.rb +54 -0
- data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
- data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
- data/spec/rdkafka/admin/delete_topic_handle_spec.rb +54 -0
- data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
- data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
- data/spec/rdkafka/admin_spec.rb +769 -0
- data/spec/rdkafka/bindings_spec.rb +222 -0
- data/spec/rdkafka/callbacks_spec.rb +20 -0
- data/spec/rdkafka/config_spec.rb +258 -0
- data/spec/rdkafka/consumer/headers_spec.rb +73 -0
- data/spec/rdkafka/consumer/message_spec.rb +139 -0
- data/spec/rdkafka/consumer/partition_spec.rb +57 -0
- data/spec/rdkafka/consumer/topic_partition_list_spec.rb +248 -0
- data/spec/rdkafka/consumer_spec.rb +1299 -0
- data/spec/rdkafka/error_spec.rb +95 -0
- data/spec/rdkafka/metadata_spec.rb +79 -0
- data/spec/rdkafka/native_kafka_spec.rb +130 -0
- data/spec/rdkafka/producer/delivery_handle_spec.rb +60 -0
- data/spec/rdkafka/producer/delivery_report_spec.rb +25 -0
- data/spec/rdkafka/producer/partitions_count_cache_spec.rb +359 -0
- data/spec/rdkafka/producer/partitions_count_spec.rb +359 -0
- data/spec/rdkafka/producer_spec.rb +1234 -0
- data/spec/spec_helper.rb +181 -0
- metadata +244 -0
@@ -0,0 +1,190 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Rdkafka
|
4
|
+
class Consumer
|
5
|
+
# A list of topics with their partition information
|
6
|
+
class TopicPartitionList
|
7
|
+
# Create a topic partition list.
|
8
|
+
#
|
9
|
+
# @param data [Hash{String => nil,Partition}] The topic and partition data or nil to create an empty list
|
10
|
+
#
|
11
|
+
# @return [TopicPartitionList]
|
12
|
+
def initialize(data=nil)
|
13
|
+
@data = data || {}
|
14
|
+
end
|
15
|
+
|
16
|
+
# Number of items in the list
|
17
|
+
# @return [Integer]
|
18
|
+
def count
|
19
|
+
i = 0
|
20
|
+
@data.each do |_topic, partitions|
|
21
|
+
if partitions
|
22
|
+
i += partitions.count
|
23
|
+
else
|
24
|
+
i+= 1
|
25
|
+
end
|
26
|
+
end
|
27
|
+
i
|
28
|
+
end
|
29
|
+
|
30
|
+
# Whether this list is empty
|
31
|
+
# @return [Boolean]
|
32
|
+
def empty?
|
33
|
+
@data.empty?
|
34
|
+
end
|
35
|
+
|
36
|
+
# Add a topic with optionally partitions to the list.
|
37
|
+
# Calling this method multiple times for the same topic will overwrite the previous configuraton.
|
38
|
+
#
|
39
|
+
# @param topic [String] The topic's name
|
40
|
+
# @param partitions [Array<Integer>, Range<Integer>, Integer] The topic's partitions or partition count
|
41
|
+
#
|
42
|
+
# @return [nil]
|
43
|
+
#
|
44
|
+
# @example Add a topic with unassigned partitions
|
45
|
+
# tpl.add_topic("topic")
|
46
|
+
#
|
47
|
+
# @example Add a topic with assigned partitions
|
48
|
+
# tpl.add_topic("topic", (0..8))
|
49
|
+
#
|
50
|
+
# @example Add a topic with all topics up to a count
|
51
|
+
# tpl.add_topic("topic", 9)
|
52
|
+
#
|
53
|
+
def add_topic(topic, partitions=nil)
|
54
|
+
if partitions.nil?
|
55
|
+
@data[topic.to_s] = nil
|
56
|
+
else
|
57
|
+
if partitions.is_a? Integer
|
58
|
+
partitions = (0..partitions - 1)
|
59
|
+
end
|
60
|
+
@data[topic.to_s] = partitions.map { |p| Partition.new(p, nil, 0) }
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
# Add a topic with partitions and offsets set to the list
|
65
|
+
# Calling this method multiple times for the same topic will overwrite the previous configuraton.
|
66
|
+
#
|
67
|
+
# @param topic [String] The topic's name
|
68
|
+
# @param partitions_with_offsets [Hash<Integer, Integer>] The topic's partitions and offsets
|
69
|
+
# @param partitions_with_offsets [Array<Consumer::Partition>] The topic's partitions with offsets
|
70
|
+
# and metadata (if any)
|
71
|
+
#
|
72
|
+
# @return [nil]
|
73
|
+
def add_topic_and_partitions_with_offsets(topic, partitions_with_offsets)
|
74
|
+
@data[topic.to_s] = partitions_with_offsets.map do |p, o|
|
75
|
+
p.is_a?(Partition) ? p : Partition.new(p, o)
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
# Return a `Hash` with the topics as keys and and an array of partition information as the value if present.
|
80
|
+
#
|
81
|
+
# @return [Hash{String => Array<Partition>,nil}]
|
82
|
+
def to_h
|
83
|
+
@data
|
84
|
+
end
|
85
|
+
|
86
|
+
# Human readable representation of this list.
|
87
|
+
# @return [String]
|
88
|
+
def to_s
|
89
|
+
"<TopicPartitionList: #{to_h}>"
|
90
|
+
end
|
91
|
+
|
92
|
+
def ==(other)
|
93
|
+
self.to_h == other.to_h
|
94
|
+
end
|
95
|
+
|
96
|
+
# Create a new topic partition list based of a native one.
|
97
|
+
#
|
98
|
+
# @private
|
99
|
+
#
|
100
|
+
# @param pointer [FFI::Pointer] Optional pointer to an existing native list. Its contents will be copied.
|
101
|
+
#
|
102
|
+
# @return [TopicPartitionList]
|
103
|
+
def self.from_native_tpl(pointer)
|
104
|
+
# Data to be moved into the tpl
|
105
|
+
data = {}
|
106
|
+
|
107
|
+
# Create struct and copy its contents
|
108
|
+
native_tpl = Rdkafka::Bindings::TopicPartitionList.new(pointer)
|
109
|
+
native_tpl[:cnt].times do |i|
|
110
|
+
ptr = native_tpl[:elems] + (i * Rdkafka::Bindings::TopicPartition.size)
|
111
|
+
elem = Rdkafka::Bindings::TopicPartition.new(ptr)
|
112
|
+
if elem[:partition] == -1
|
113
|
+
data[elem[:topic]] = nil
|
114
|
+
else
|
115
|
+
partitions = data[elem[:topic]] || []
|
116
|
+
offset = if elem[:offset] == Rdkafka::Bindings::RD_KAFKA_OFFSET_INVALID
|
117
|
+
nil
|
118
|
+
else
|
119
|
+
elem[:offset]
|
120
|
+
end
|
121
|
+
|
122
|
+
partition = Partition.new(
|
123
|
+
elem[:partition],
|
124
|
+
offset,
|
125
|
+
elem[:err],
|
126
|
+
elem[:metadata].null? ? nil : elem[:metadata].read_string(elem[:metadata_size])
|
127
|
+
)
|
128
|
+
partitions.push(partition)
|
129
|
+
data[elem[:topic]] = partitions
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
# Return the created object
|
134
|
+
TopicPartitionList.new(data)
|
135
|
+
end
|
136
|
+
|
137
|
+
# Create a native tpl with the contents of this object added.
|
138
|
+
#
|
139
|
+
# The pointer will be cleaned by `rd_kafka_topic_partition_list_destroy` when GC releases it.
|
140
|
+
#
|
141
|
+
# @private
|
142
|
+
# @return [FFI::Pointer]
|
143
|
+
def to_native_tpl
|
144
|
+
tpl = Rdkafka::Bindings.rd_kafka_topic_partition_list_new(count)
|
145
|
+
|
146
|
+
@data.each do |topic, partitions|
|
147
|
+
if partitions
|
148
|
+
partitions.each do |p|
|
149
|
+
ref = Rdkafka::Bindings.rd_kafka_topic_partition_list_add(
|
150
|
+
tpl,
|
151
|
+
topic,
|
152
|
+
p.partition
|
153
|
+
)
|
154
|
+
|
155
|
+
# Remove the respond to check after karafka 2.3.0 is released
|
156
|
+
if p.respond_to?(:metadata) && p.metadata
|
157
|
+
part = Rdkafka::Bindings::TopicPartition.new(ref)
|
158
|
+
str_ptr = FFI::MemoryPointer.from_string(p.metadata)
|
159
|
+
# released here:
|
160
|
+
# https://github.com/confluentinc/librdkafka/blob/e03d3bb91ed92a38f38d9806b8d8deffe78a1de5/src/rdkafka_partition.c#L2682C18-L2682C18
|
161
|
+
str_ptr.autorelease = false
|
162
|
+
part[:metadata] = str_ptr
|
163
|
+
part[:metadata_size] = p.metadata.bytesize
|
164
|
+
end
|
165
|
+
|
166
|
+
if p.offset
|
167
|
+
offset = p.offset.is_a?(Time) ? p.offset.to_f * 1_000 : p.offset
|
168
|
+
|
169
|
+
Rdkafka::Bindings.rd_kafka_topic_partition_list_set_offset(
|
170
|
+
tpl,
|
171
|
+
topic,
|
172
|
+
p.partition,
|
173
|
+
offset
|
174
|
+
)
|
175
|
+
end
|
176
|
+
end
|
177
|
+
else
|
178
|
+
Rdkafka::Bindings.rd_kafka_topic_partition_list_add(
|
179
|
+
tpl,
|
180
|
+
topic,
|
181
|
+
-1
|
182
|
+
)
|
183
|
+
end
|
184
|
+
end
|
185
|
+
|
186
|
+
tpl
|
187
|
+
end
|
188
|
+
end
|
189
|
+
end
|
190
|
+
end
|