ruby-kafka-aws-iam 1.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. checksums.yaml +7 -0
  2. data/.circleci/config.yml +393 -0
  3. data/.github/workflows/stale.yml +19 -0
  4. data/.gitignore +13 -0
  5. data/.readygo +1 -0
  6. data/.rspec +3 -0
  7. data/.rubocop.yml +44 -0
  8. data/.ruby-version +1 -0
  9. data/.yardopts +3 -0
  10. data/CHANGELOG.md +314 -0
  11. data/Gemfile +5 -0
  12. data/ISSUE_TEMPLATE.md +23 -0
  13. data/LICENSE.txt +176 -0
  14. data/Procfile +2 -0
  15. data/README.md +1356 -0
  16. data/Rakefile +8 -0
  17. data/benchmarks/message_encoding.rb +23 -0
  18. data/bin/console +8 -0
  19. data/bin/setup +5 -0
  20. data/docker-compose.yml +39 -0
  21. data/examples/consumer-group.rb +35 -0
  22. data/examples/firehose-consumer.rb +64 -0
  23. data/examples/firehose-producer.rb +54 -0
  24. data/examples/simple-consumer.rb +34 -0
  25. data/examples/simple-producer.rb +42 -0
  26. data/examples/ssl-producer.rb +44 -0
  27. data/lib/kafka/async_producer.rb +297 -0
  28. data/lib/kafka/broker.rb +217 -0
  29. data/lib/kafka/broker_info.rb +16 -0
  30. data/lib/kafka/broker_pool.rb +41 -0
  31. data/lib/kafka/broker_uri.rb +43 -0
  32. data/lib/kafka/client.rb +838 -0
  33. data/lib/kafka/cluster.rb +513 -0
  34. data/lib/kafka/compression.rb +45 -0
  35. data/lib/kafka/compressor.rb +86 -0
  36. data/lib/kafka/connection.rb +228 -0
  37. data/lib/kafka/connection_builder.rb +33 -0
  38. data/lib/kafka/consumer.rb +642 -0
  39. data/lib/kafka/consumer_group/assignor.rb +63 -0
  40. data/lib/kafka/consumer_group.rb +231 -0
  41. data/lib/kafka/crc32_hash.rb +15 -0
  42. data/lib/kafka/datadog.rb +420 -0
  43. data/lib/kafka/digest.rb +22 -0
  44. data/lib/kafka/fetch_operation.rb +115 -0
  45. data/lib/kafka/fetched_batch.rb +58 -0
  46. data/lib/kafka/fetched_batch_generator.rb +120 -0
  47. data/lib/kafka/fetched_message.rb +48 -0
  48. data/lib/kafka/fetched_offset_resolver.rb +48 -0
  49. data/lib/kafka/fetcher.rb +224 -0
  50. data/lib/kafka/gzip_codec.rb +34 -0
  51. data/lib/kafka/heartbeat.rb +25 -0
  52. data/lib/kafka/instrumenter.rb +38 -0
  53. data/lib/kafka/interceptors.rb +33 -0
  54. data/lib/kafka/lz4_codec.rb +27 -0
  55. data/lib/kafka/message_buffer.rb +87 -0
  56. data/lib/kafka/murmur2_hash.rb +17 -0
  57. data/lib/kafka/offset_manager.rb +259 -0
  58. data/lib/kafka/partitioner.rb +40 -0
  59. data/lib/kafka/pause.rb +92 -0
  60. data/lib/kafka/pending_message.rb +29 -0
  61. data/lib/kafka/pending_message_queue.rb +41 -0
  62. data/lib/kafka/produce_operation.rb +205 -0
  63. data/lib/kafka/producer.rb +528 -0
  64. data/lib/kafka/prometheus.rb +316 -0
  65. data/lib/kafka/protocol/add_offsets_to_txn_request.rb +29 -0
  66. data/lib/kafka/protocol/add_offsets_to_txn_response.rb +21 -0
  67. data/lib/kafka/protocol/add_partitions_to_txn_request.rb +34 -0
  68. data/lib/kafka/protocol/add_partitions_to_txn_response.rb +47 -0
  69. data/lib/kafka/protocol/alter_configs_request.rb +44 -0
  70. data/lib/kafka/protocol/alter_configs_response.rb +49 -0
  71. data/lib/kafka/protocol/api_versions_request.rb +21 -0
  72. data/lib/kafka/protocol/api_versions_response.rb +53 -0
  73. data/lib/kafka/protocol/consumer_group_protocol.rb +19 -0
  74. data/lib/kafka/protocol/create_partitions_request.rb +42 -0
  75. data/lib/kafka/protocol/create_partitions_response.rb +28 -0
  76. data/lib/kafka/protocol/create_topics_request.rb +45 -0
  77. data/lib/kafka/protocol/create_topics_response.rb +26 -0
  78. data/lib/kafka/protocol/decoder.rb +175 -0
  79. data/lib/kafka/protocol/delete_topics_request.rb +33 -0
  80. data/lib/kafka/protocol/delete_topics_response.rb +26 -0
  81. data/lib/kafka/protocol/describe_configs_request.rb +35 -0
  82. data/lib/kafka/protocol/describe_configs_response.rb +73 -0
  83. data/lib/kafka/protocol/describe_groups_request.rb +27 -0
  84. data/lib/kafka/protocol/describe_groups_response.rb +73 -0
  85. data/lib/kafka/protocol/encoder.rb +184 -0
  86. data/lib/kafka/protocol/end_txn_request.rb +29 -0
  87. data/lib/kafka/protocol/end_txn_response.rb +19 -0
  88. data/lib/kafka/protocol/fetch_request.rb +70 -0
  89. data/lib/kafka/protocol/fetch_response.rb +136 -0
  90. data/lib/kafka/protocol/find_coordinator_request.rb +29 -0
  91. data/lib/kafka/protocol/find_coordinator_response.rb +29 -0
  92. data/lib/kafka/protocol/heartbeat_request.rb +27 -0
  93. data/lib/kafka/protocol/heartbeat_response.rb +17 -0
  94. data/lib/kafka/protocol/init_producer_id_request.rb +26 -0
  95. data/lib/kafka/protocol/init_producer_id_response.rb +27 -0
  96. data/lib/kafka/protocol/join_group_request.rb +47 -0
  97. data/lib/kafka/protocol/join_group_response.rb +41 -0
  98. data/lib/kafka/protocol/leave_group_request.rb +25 -0
  99. data/lib/kafka/protocol/leave_group_response.rb +17 -0
  100. data/lib/kafka/protocol/list_groups_request.rb +23 -0
  101. data/lib/kafka/protocol/list_groups_response.rb +35 -0
  102. data/lib/kafka/protocol/list_offset_request.rb +53 -0
  103. data/lib/kafka/protocol/list_offset_response.rb +89 -0
  104. data/lib/kafka/protocol/member_assignment.rb +42 -0
  105. data/lib/kafka/protocol/message.rb +172 -0
  106. data/lib/kafka/protocol/message_set.rb +55 -0
  107. data/lib/kafka/protocol/metadata_request.rb +31 -0
  108. data/lib/kafka/protocol/metadata_response.rb +185 -0
  109. data/lib/kafka/protocol/offset_commit_request.rb +47 -0
  110. data/lib/kafka/protocol/offset_commit_response.rb +29 -0
  111. data/lib/kafka/protocol/offset_fetch_request.rb +38 -0
  112. data/lib/kafka/protocol/offset_fetch_response.rb +56 -0
  113. data/lib/kafka/protocol/produce_request.rb +94 -0
  114. data/lib/kafka/protocol/produce_response.rb +63 -0
  115. data/lib/kafka/protocol/record.rb +88 -0
  116. data/lib/kafka/protocol/record_batch.rb +223 -0
  117. data/lib/kafka/protocol/request_message.rb +26 -0
  118. data/lib/kafka/protocol/sasl_handshake_request.rb +33 -0
  119. data/lib/kafka/protocol/sasl_handshake_response.rb +28 -0
  120. data/lib/kafka/protocol/sync_group_request.rb +33 -0
  121. data/lib/kafka/protocol/sync_group_response.rb +26 -0
  122. data/lib/kafka/protocol/txn_offset_commit_request.rb +46 -0
  123. data/lib/kafka/protocol/txn_offset_commit_response.rb +47 -0
  124. data/lib/kafka/protocol.rb +225 -0
  125. data/lib/kafka/round_robin_assignment_strategy.rb +52 -0
  126. data/lib/kafka/sasl/awsmskiam.rb +128 -0
  127. data/lib/kafka/sasl/gssapi.rb +76 -0
  128. data/lib/kafka/sasl/oauth.rb +64 -0
  129. data/lib/kafka/sasl/plain.rb +39 -0
  130. data/lib/kafka/sasl/scram.rb +180 -0
  131. data/lib/kafka/sasl_authenticator.rb +73 -0
  132. data/lib/kafka/snappy_codec.rb +29 -0
  133. data/lib/kafka/socket_with_timeout.rb +96 -0
  134. data/lib/kafka/ssl_context.rb +66 -0
  135. data/lib/kafka/ssl_socket_with_timeout.rb +192 -0
  136. data/lib/kafka/statsd.rb +296 -0
  137. data/lib/kafka/tagged_logger.rb +77 -0
  138. data/lib/kafka/transaction_manager.rb +306 -0
  139. data/lib/kafka/transaction_state_machine.rb +72 -0
  140. data/lib/kafka/version.rb +5 -0
  141. data/lib/kafka/zstd_codec.rb +27 -0
  142. data/lib/kafka.rb +373 -0
  143. data/lib/ruby-kafka.rb +5 -0
  144. data/ruby-kafka.gemspec +54 -0
  145. metadata +520 -0
@@ -0,0 +1,28 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class CreatePartitionsResponse
7
+ attr_reader :errors
8
+
9
+ def initialize(throttle_time_ms:, errors:)
10
+ @throttle_time_ms = throttle_time_ms
11
+ @errors = errors
12
+ end
13
+
14
+ def self.decode(decoder)
15
+ throttle_time_ms = decoder.int32
16
+ errors = decoder.array do
17
+ topic = decoder.string
18
+ error_code = decoder.int16
19
+ error_message = decoder.string
20
+ [topic, error_code, error_message]
21
+ end
22
+
23
+ new(throttle_time_ms: throttle_time_ms, errors: errors)
24
+ end
25
+ end
26
+
27
+ end
28
+ end
@@ -0,0 +1,45 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class CreateTopicsRequest
7
+ def initialize(topics:, timeout:)
8
+ @topics, @timeout = topics, timeout
9
+ end
10
+
11
+ def api_key
12
+ CREATE_TOPICS_API
13
+ end
14
+
15
+ def api_version
16
+ 0
17
+ end
18
+
19
+ def response_class
20
+ Protocol::CreateTopicsResponse
21
+ end
22
+
23
+ def encode(encoder)
24
+ encoder.write_array(@topics) do |topic, config|
25
+ encoder.write_string(topic)
26
+ encoder.write_int32(config.fetch(:num_partitions))
27
+ encoder.write_int16(config.fetch(:replication_factor))
28
+
29
+ # Replica assignments. We don't care.
30
+ encoder.write_array([])
31
+
32
+ encoder.write_array(config.fetch(:config)) do |config_name, config_value|
33
+ config_value = config_value.to_s unless config_value.nil?
34
+ encoder.write_string(config_name)
35
+ encoder.write_string(config_value)
36
+ end
37
+ end
38
+
39
+ # Timeout is in ms.
40
+ encoder.write_int32(@timeout * 1000)
41
+ end
42
+ end
43
+
44
+ end
45
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class CreateTopicsResponse
7
+ attr_reader :errors
8
+
9
+ def initialize(errors:)
10
+ @errors = errors
11
+ end
12
+
13
+ def self.decode(decoder)
14
+ errors = decoder.array do
15
+ topic = decoder.string
16
+ error_code = decoder.int16
17
+
18
+ [topic, error_code]
19
+ end
20
+
21
+ new(errors: errors)
22
+ end
23
+ end
24
+
25
+ end
26
+ end
@@ -0,0 +1,175 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+ # A decoder wraps an IO object, making it easy to read specific data types
6
+ # from it. The Kafka protocol is not self-describing, so a client must call
7
+ # these methods in just the right order for things to work.
8
+ class Decoder
9
+ def self.from_string(str)
10
+ new(StringIO.new(str))
11
+ end
12
+
13
+ # Initializes a new decoder.
14
+ #
15
+ # @param io [IO] an object that acts as an IO.
16
+ def initialize(io)
17
+ @io = io
18
+ end
19
+
20
+ def eof?
21
+ @io.eof?
22
+ end
23
+
24
+ # Get some next bytes without touching the current io offset
25
+ #
26
+ # @return [Integer]
27
+ def peek(offset, length)
28
+ data = @io.read(offset + length)
29
+ return [] if data.nil?
30
+ @io.ungetc(data)
31
+ data.bytes[offset, offset + length] || []
32
+ end
33
+
34
+ # Decodes an 8-bit boolean from the IO object.
35
+ #
36
+ # @return [Boolean]
37
+ def boolean
38
+ read(1) == 0x1
39
+ end
40
+
41
+ # Decodes an 8-bit integer from the IO object.
42
+ #
43
+ # @return [Integer]
44
+ def int8
45
+ read(1).unpack("C").first
46
+ end
47
+
48
+ # Decodes a 16-bit integer from the IO object.
49
+ #
50
+ # @return [Integer]
51
+ def int16
52
+ read(2).unpack("s>").first
53
+ end
54
+
55
+ # Decodes a 32-bit integer from the IO object.
56
+ #
57
+ # @return [Integer]
58
+ def int32
59
+ read(4).unpack("l>").first
60
+ end
61
+
62
+ # Decodes a 64-bit integer from the IO object.
63
+ #
64
+ # @return [Integer]
65
+ def int64
66
+ read(8).unpack("q>").first
67
+ end
68
+
69
+ # Decodes an array from the IO object.
70
+ #
71
+ # The provided block will be called once for each item in the array. It is
72
+ # the responsibility of the block to decode the proper type in the block,
73
+ # since there's no information that allows the type to be inferred
74
+ # automatically.
75
+ #
76
+ # @return [Array]
77
+ def array(&block)
78
+ size = int32
79
+ size.times.map(&block)
80
+ end
81
+
82
+ # Decodes an array from the IO object.
83
+ # Just like #array except the size is in varint format
84
+ #
85
+ # @return [Array]
86
+ def varint_array(&block)
87
+ size = varint
88
+ size.times.map(&block)
89
+ end
90
+
91
+ # Decodes a string from the IO object.
92
+ #
93
+ # @return [String]
94
+ def string
95
+ size = int16
96
+
97
+ if size == -1
98
+ nil
99
+ else
100
+ read(size)
101
+ end
102
+ end
103
+
104
+ # Decodes a string from the IO object, the size is in varint format
105
+ #
106
+ # @return [String]
107
+ def varint_string
108
+ size = varint
109
+
110
+ if size == -1
111
+ nil
112
+ else
113
+ read(size)
114
+ end
115
+ end
116
+
117
+ # Read an integer under varints serializing from the IO object.
118
+ # https://developers.google.com/protocol-buffers/docs/encoding#varints
119
+ #
120
+ # @return [Integer]
121
+ def varint
122
+ group = 0
123
+ data = 0
124
+ while (chunk = int8) & 0x80 != 0
125
+ data |= (chunk & 0x7f) << group
126
+ group += 7
127
+ end
128
+ data |= chunk << group
129
+ data & 0b1 != 0 ? ~(data >> 1) : (data >> 1)
130
+ end
131
+
132
+ # Decodes a list of bytes from the IO object.
133
+ #
134
+ # @return [String]
135
+ def bytes
136
+ size = int32
137
+
138
+ if size == -1
139
+ nil
140
+ else
141
+ read(size)
142
+ end
143
+ end
144
+
145
+ # Decodes a list of bytes from the IO object. The size is in varint format
146
+ #
147
+ # @return [String]
148
+ def varint_bytes
149
+ size = varint
150
+
151
+ if size == -1
152
+ nil
153
+ else
154
+ read(size)
155
+ end
156
+ end
157
+
158
+ # Reads the specified number of bytes from the IO object, returning them
159
+ # as a String.
160
+ #
161
+ # @return [String]
162
+ def read(number_of_bytes)
163
+ return "" if number_of_bytes == 0
164
+
165
+ data = @io.read(number_of_bytes) or raise EOFError
166
+
167
+ # If the `read` call returned less data than expected we should not
168
+ # proceed.
169
+ raise EOFError if data.size != number_of_bytes
170
+
171
+ data
172
+ end
173
+ end
174
+ end
175
+ end
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class DeleteTopicsRequest
7
+ def initialize(topics:, timeout:)
8
+ @topics, @timeout = topics, timeout
9
+ end
10
+
11
+ def api_key
12
+ DELETE_TOPICS_API
13
+ end
14
+
15
+ def api_version
16
+ 0
17
+ end
18
+
19
+ def response_class
20
+ Protocol::DeleteTopicsResponse
21
+ end
22
+
23
+ def encode(encoder)
24
+ encoder.write_array(@topics) do |topic|
25
+ encoder.write_string(topic)
26
+ end
27
+ # Timeout is in ms.
28
+ encoder.write_int32(@timeout * 1000)
29
+ end
30
+ end
31
+
32
+ end
33
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class DeleteTopicsResponse
7
+ attr_reader :errors
8
+
9
+ def initialize(errors:)
10
+ @errors = errors
11
+ end
12
+
13
+ def self.decode(decoder)
14
+ errors = decoder.array do
15
+ topic = decoder.string
16
+ error_code = decoder.int16
17
+
18
+ [topic, error_code]
19
+ end
20
+
21
+ new(errors: errors)
22
+ end
23
+ end
24
+
25
+ end
26
+ end
@@ -0,0 +1,35 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class DescribeConfigsRequest
7
+ def initialize(resources:)
8
+ @resources = resources
9
+ end
10
+
11
+ def api_key
12
+ DESCRIBE_CONFIGS_API
13
+ end
14
+
15
+ def api_version
16
+ 0
17
+ end
18
+
19
+ def response_class
20
+ Protocol::DescribeConfigsResponse
21
+ end
22
+
23
+ def encode(encoder)
24
+ encoder.write_array(@resources) do |type, name, configs|
25
+ encoder.write_int8(type)
26
+ encoder.write_string(name)
27
+ encoder.write_array(configs) do |config|
28
+ encoder.write_string(config)
29
+ end
30
+ end
31
+ end
32
+ end
33
+
34
+ end
35
+ end
@@ -0,0 +1,73 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+ class DescribeConfigsResponse
6
+ class ResourceDescription
7
+ attr_reader :name, :type, :error_code, :error_message, :configs
8
+
9
+ def initialize(name:, type:, error_code:, error_message:, configs:)
10
+ @name = name
11
+ @type = type
12
+ @error_code = error_code
13
+ @error_message = error_message
14
+ @configs = configs
15
+ end
16
+ end
17
+
18
+ class ConfigEntry
19
+ attr_reader :name, :value, :read_only, :is_default, :is_sensitive
20
+
21
+ def initialize(name:, value:, read_only:, is_default:, is_sensitive:)
22
+ @name = name
23
+ @value = value
24
+ @read_only = read_only
25
+ @is_default = is_default
26
+ @is_sensitive = is_sensitive
27
+ end
28
+ end
29
+
30
+ attr_reader :resources
31
+
32
+ def initialize(throttle_time_ms:, resources:)
33
+ @throttle_time_ms = throttle_time_ms
34
+ @resources = resources
35
+ end
36
+
37
+ def self.decode(decoder)
38
+ throttle_time_ms = decoder.int32
39
+ resources = decoder.array do
40
+ error_code = decoder.int16
41
+ error_message = decoder.string
42
+
43
+ resource_type = decoder.int8
44
+ if Kafka::Protocol::RESOURCE_TYPES[resource_type].nil?
45
+ raise Kafka::ProtocolError, "Resource type not supported: #{resource_type}"
46
+ end
47
+ resource_name = decoder.string
48
+
49
+ configs = decoder.array do
50
+ ConfigEntry.new(
51
+ name: decoder.string,
52
+ value: decoder.string,
53
+ read_only: decoder.boolean,
54
+ is_default: decoder.boolean,
55
+ is_sensitive: decoder.boolean,
56
+ )
57
+ end
58
+
59
+ ResourceDescription.new(
60
+ type: RESOURCE_TYPES[resource_type],
61
+ name: resource_name,
62
+ error_code: error_code,
63
+ error_message: error_message,
64
+ configs: configs
65
+ )
66
+ end
67
+
68
+ new(throttle_time_ms: throttle_time_ms, resources: resources)
69
+ end
70
+ end
71
+
72
+ end
73
+ end
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+ class DescribeGroupsRequest
6
+ def initialize(group_ids:)
7
+ @group_ids = group_ids
8
+ end
9
+
10
+ def api_key
11
+ DESCRIBE_GROUPS_API
12
+ end
13
+
14
+ def api_version
15
+ 0
16
+ end
17
+
18
+ def response_class
19
+ Protocol::DescribeGroupsResponse
20
+ end
21
+
22
+ def encode(encoder)
23
+ encoder.write_array(@group_ids) { |group_id| encoder.write_string(group_id) }
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,73 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+ class DescribeGroupsResponse
6
+ class Group
7
+ attr_reader :error_code, :group_id, :state, :members, :protocol
8
+
9
+ def initialize(error_code:, group_id:, protocol_type:, protocol:, state:, members:)
10
+ @error_code = error_code
11
+ @group_id = group_id
12
+ @protocol_type = protocol_type
13
+ @protocol = protocol
14
+ @state = state
15
+ @members = members
16
+ end
17
+ end
18
+
19
+ class Member
20
+ attr_reader :member_id, :client_id, :client_host, :member_assignment
21
+
22
+ def initialize(member_id:, client_id:, client_host:, member_assignment:)
23
+ @member_id = member_id
24
+ @client_id = client_id
25
+ @client_host = client_host
26
+ @member_assignment = member_assignment
27
+ end
28
+ end
29
+
30
+ attr_reader :error_code, :groups
31
+
32
+ def initialize(groups:)
33
+ @groups = groups
34
+ end
35
+
36
+ def self.decode(decoder)
37
+ groups = decoder.array do
38
+ error_code = decoder.int16
39
+ group_id = decoder.string
40
+ state = decoder.string
41
+ protocol_type = decoder.string
42
+ protocol = decoder.string
43
+
44
+ members = decoder.array do
45
+ member_id = decoder.string
46
+ client_id = decoder.string
47
+ client_host = decoder.string
48
+ _metadata = decoder.bytes
49
+ assignment = MemberAssignment.decode(Decoder.from_string(decoder.bytes))
50
+
51
+ Member.new(
52
+ member_id: member_id,
53
+ client_id: client_id,
54
+ client_host: client_host,
55
+ member_assignment: assignment
56
+ )
57
+ end
58
+
59
+ Group.new(
60
+ error_code: error_code,
61
+ group_id: group_id,
62
+ state: state,
63
+ protocol_type: protocol_type,
64
+ protocol: protocol,
65
+ members: members
66
+ )
67
+ end
68
+
69
+ new(groups: groups)
70
+ end
71
+ end
72
+ end
73
+ end