ruby-kafka-custom 0.7.7.26

Sign up to get free protection for your applications and to get access to all the features.
Files changed (105) hide show
  1. checksums.yaml +7 -0
  2. data/lib/kafka/async_producer.rb +279 -0
  3. data/lib/kafka/broker.rb +205 -0
  4. data/lib/kafka/broker_info.rb +16 -0
  5. data/lib/kafka/broker_pool.rb +41 -0
  6. data/lib/kafka/broker_uri.rb +43 -0
  7. data/lib/kafka/client.rb +754 -0
  8. data/lib/kafka/cluster.rb +455 -0
  9. data/lib/kafka/compression.rb +43 -0
  10. data/lib/kafka/compressor.rb +85 -0
  11. data/lib/kafka/connection.rb +220 -0
  12. data/lib/kafka/connection_builder.rb +33 -0
  13. data/lib/kafka/consumer.rb +592 -0
  14. data/lib/kafka/consumer_group.rb +208 -0
  15. data/lib/kafka/datadog.rb +413 -0
  16. data/lib/kafka/fetch_operation.rb +115 -0
  17. data/lib/kafka/fetched_batch.rb +54 -0
  18. data/lib/kafka/fetched_batch_generator.rb +117 -0
  19. data/lib/kafka/fetched_message.rb +47 -0
  20. data/lib/kafka/fetched_offset_resolver.rb +48 -0
  21. data/lib/kafka/fetcher.rb +221 -0
  22. data/lib/kafka/gzip_codec.rb +30 -0
  23. data/lib/kafka/heartbeat.rb +25 -0
  24. data/lib/kafka/instrumenter.rb +38 -0
  25. data/lib/kafka/lz4_codec.rb +23 -0
  26. data/lib/kafka/message_buffer.rb +87 -0
  27. data/lib/kafka/offset_manager.rb +248 -0
  28. data/lib/kafka/partitioner.rb +35 -0
  29. data/lib/kafka/pause.rb +92 -0
  30. data/lib/kafka/pending_message.rb +29 -0
  31. data/lib/kafka/pending_message_queue.rb +41 -0
  32. data/lib/kafka/produce_operation.rb +205 -0
  33. data/lib/kafka/producer.rb +504 -0
  34. data/lib/kafka/protocol.rb +217 -0
  35. data/lib/kafka/protocol/add_partitions_to_txn_request.rb +34 -0
  36. data/lib/kafka/protocol/add_partitions_to_txn_response.rb +47 -0
  37. data/lib/kafka/protocol/alter_configs_request.rb +44 -0
  38. data/lib/kafka/protocol/alter_configs_response.rb +49 -0
  39. data/lib/kafka/protocol/api_versions_request.rb +21 -0
  40. data/lib/kafka/protocol/api_versions_response.rb +53 -0
  41. data/lib/kafka/protocol/consumer_group_protocol.rb +19 -0
  42. data/lib/kafka/protocol/create_partitions_request.rb +42 -0
  43. data/lib/kafka/protocol/create_partitions_response.rb +28 -0
  44. data/lib/kafka/protocol/create_topics_request.rb +45 -0
  45. data/lib/kafka/protocol/create_topics_response.rb +26 -0
  46. data/lib/kafka/protocol/decoder.rb +175 -0
  47. data/lib/kafka/protocol/delete_topics_request.rb +33 -0
  48. data/lib/kafka/protocol/delete_topics_response.rb +26 -0
  49. data/lib/kafka/protocol/describe_configs_request.rb +35 -0
  50. data/lib/kafka/protocol/describe_configs_response.rb +73 -0
  51. data/lib/kafka/protocol/describe_groups_request.rb +27 -0
  52. data/lib/kafka/protocol/describe_groups_response.rb +73 -0
  53. data/lib/kafka/protocol/encoder.rb +184 -0
  54. data/lib/kafka/protocol/end_txn_request.rb +29 -0
  55. data/lib/kafka/protocol/end_txn_response.rb +19 -0
  56. data/lib/kafka/protocol/fetch_request.rb +70 -0
  57. data/lib/kafka/protocol/fetch_response.rb +136 -0
  58. data/lib/kafka/protocol/find_coordinator_request.rb +29 -0
  59. data/lib/kafka/protocol/find_coordinator_response.rb +29 -0
  60. data/lib/kafka/protocol/heartbeat_request.rb +27 -0
  61. data/lib/kafka/protocol/heartbeat_response.rb +17 -0
  62. data/lib/kafka/protocol/init_producer_id_request.rb +26 -0
  63. data/lib/kafka/protocol/init_producer_id_response.rb +27 -0
  64. data/lib/kafka/protocol/join_group_request.rb +41 -0
  65. data/lib/kafka/protocol/join_group_response.rb +33 -0
  66. data/lib/kafka/protocol/leave_group_request.rb +25 -0
  67. data/lib/kafka/protocol/leave_group_response.rb +17 -0
  68. data/lib/kafka/protocol/list_groups_request.rb +23 -0
  69. data/lib/kafka/protocol/list_groups_response.rb +35 -0
  70. data/lib/kafka/protocol/list_offset_request.rb +53 -0
  71. data/lib/kafka/protocol/list_offset_response.rb +89 -0
  72. data/lib/kafka/protocol/member_assignment.rb +42 -0
  73. data/lib/kafka/protocol/message.rb +172 -0
  74. data/lib/kafka/protocol/message_set.rb +55 -0
  75. data/lib/kafka/protocol/metadata_request.rb +31 -0
  76. data/lib/kafka/protocol/metadata_response.rb +185 -0
  77. data/lib/kafka/protocol/offset_commit_request.rb +47 -0
  78. data/lib/kafka/protocol/offset_commit_response.rb +29 -0
  79. data/lib/kafka/protocol/offset_fetch_request.rb +36 -0
  80. data/lib/kafka/protocol/offset_fetch_response.rb +56 -0
  81. data/lib/kafka/protocol/produce_request.rb +92 -0
  82. data/lib/kafka/protocol/produce_response.rb +63 -0
  83. data/lib/kafka/protocol/record.rb +88 -0
  84. data/lib/kafka/protocol/record_batch.rb +222 -0
  85. data/lib/kafka/protocol/request_message.rb +26 -0
  86. data/lib/kafka/protocol/sasl_handshake_request.rb +33 -0
  87. data/lib/kafka/protocol/sasl_handshake_response.rb +28 -0
  88. data/lib/kafka/protocol/sync_group_request.rb +33 -0
  89. data/lib/kafka/protocol/sync_group_response.rb +23 -0
  90. data/lib/kafka/round_robin_assignment_strategy.rb +54 -0
  91. data/lib/kafka/sasl/gssapi.rb +76 -0
  92. data/lib/kafka/sasl/oauth.rb +64 -0
  93. data/lib/kafka/sasl/plain.rb +39 -0
  94. data/lib/kafka/sasl/scram.rb +177 -0
  95. data/lib/kafka/sasl_authenticator.rb +61 -0
  96. data/lib/kafka/snappy_codec.rb +25 -0
  97. data/lib/kafka/socket_with_timeout.rb +96 -0
  98. data/lib/kafka/ssl_context.rb +66 -0
  99. data/lib/kafka/ssl_socket_with_timeout.rb +187 -0
  100. data/lib/kafka/statsd.rb +296 -0
  101. data/lib/kafka/tagged_logger.rb +72 -0
  102. data/lib/kafka/transaction_manager.rb +261 -0
  103. data/lib/kafka/transaction_state_machine.rb +72 -0
  104. data/lib/kafka/version.rb +5 -0
  105. metadata +461 -0
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+ class ConsumerGroupProtocol
6
+ def initialize(version: 0, topics:, user_data: nil)
7
+ @version = version
8
+ @topics = topics
9
+ @user_data = user_data
10
+ end
11
+
12
+ def encode(encoder)
13
+ encoder.write_int16(@version)
14
+ encoder.write_array(@topics) {|topic| encoder.write_string(topic) }
15
+ encoder.write_bytes(@user_data)
16
+ end
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,42 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class CreatePartitionsRequest
7
+ def initialize(topics:, timeout:)
8
+ @topics, @timeout = topics, timeout
9
+ end
10
+
11
+ def api_key
12
+ CREATE_PARTITIONS_API
13
+ end
14
+
15
+ def api_version
16
+ 0
17
+ end
18
+
19
+ def response_class
20
+ Protocol::CreatePartitionsResponse
21
+ end
22
+
23
+ def encode(encoder)
24
+ encoder.write_array(@topics) do |topic, count, assignments|
25
+ encoder.write_string(topic)
26
+ encoder.write_int32(count)
27
+ encoder.write_array(assignments) do |assignment|
28
+ encoder.write_array(assignment) do |broker|
29
+ encoder.write_int32(broker)
30
+ end
31
+ end
32
+ end
33
+ # Timeout is in ms.
34
+ encoder.write_int32(@timeout * 1000)
35
+ # validate_only. There isn't any use case for this in real life. So
36
+ # let's ignore it for now
37
+ encoder.write_boolean(false)
38
+ end
39
+ end
40
+
41
+ end
42
+ end
@@ -0,0 +1,28 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class CreatePartitionsResponse
7
+ attr_reader :errors
8
+
9
+ def initialize(throttle_time_ms:, errors:)
10
+ @throttle_time_ms = throttle_time_ms
11
+ @errors = errors
12
+ end
13
+
14
+ def self.decode(decoder)
15
+ throttle_time_ms = decoder.int32
16
+ errors = decoder.array do
17
+ topic = decoder.string
18
+ error_code = decoder.int16
19
+ error_message = decoder.string
20
+ [topic, error_code, error_message]
21
+ end
22
+
23
+ new(throttle_time_ms: throttle_time_ms, errors: errors)
24
+ end
25
+ end
26
+
27
+ end
28
+ end
@@ -0,0 +1,45 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class CreateTopicsRequest
7
+ def initialize(topics:, timeout:)
8
+ @topics, @timeout = topics, timeout
9
+ end
10
+
11
+ def api_key
12
+ CREATE_TOPICS_API
13
+ end
14
+
15
+ def api_version
16
+ 0
17
+ end
18
+
19
+ def response_class
20
+ Protocol::CreateTopicsResponse
21
+ end
22
+
23
+ def encode(encoder)
24
+ encoder.write_array(@topics) do |topic, config|
25
+ encoder.write_string(topic)
26
+ encoder.write_int32(config.fetch(:num_partitions))
27
+ encoder.write_int16(config.fetch(:replication_factor))
28
+
29
+ # Replica assignments. We don't care.
30
+ encoder.write_array([])
31
+
32
+ encoder.write_array(config.fetch(:config)) do |config_name, config_value|
33
+ config_value = config_value.to_s unless config_value.nil?
34
+ encoder.write_string(config_name)
35
+ encoder.write_string(config_value)
36
+ end
37
+ end
38
+
39
+ # Timeout is in ms.
40
+ encoder.write_int32(@timeout * 1000)
41
+ end
42
+ end
43
+
44
+ end
45
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class CreateTopicsResponse
7
+ attr_reader :errors
8
+
9
+ def initialize(errors:)
10
+ @errors = errors
11
+ end
12
+
13
+ def self.decode(decoder)
14
+ errors = decoder.array do
15
+ topic = decoder.string
16
+ error_code = decoder.int16
17
+
18
+ [topic, error_code]
19
+ end
20
+
21
+ new(errors: errors)
22
+ end
23
+ end
24
+
25
+ end
26
+ end
@@ -0,0 +1,175 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+ # A decoder wraps an IO object, making it easy to read specific data types
6
+ # from it. The Kafka protocol is not self-describing, so a client must call
7
+ # these methods in just the right order for things to work.
8
+ class Decoder
9
+ def self.from_string(str)
10
+ new(StringIO.new(str))
11
+ end
12
+
13
+ # Initializes a new decoder.
14
+ #
15
+ # @param io [IO] an object that acts as an IO.
16
+ def initialize(io)
17
+ @io = io
18
+ end
19
+
20
+ def eof?
21
+ @io.eof?
22
+ end
23
+
24
+ # Get some next bytes without touching the current io offset
25
+ #
26
+ # @return [Integer]
27
+ def peek(offset, length)
28
+ data = @io.read(offset + length)
29
+ return [] if data.nil?
30
+ @io.ungetc(data)
31
+ data.bytes[offset, offset + length] || []
32
+ end
33
+
34
+ # Decodes an 8-bit boolean from the IO object.
35
+ #
36
+ # @return [Boolean]
37
+ def boolean
38
+ read(1) == 0x1
39
+ end
40
+
41
+ # Decodes an 8-bit integer from the IO object.
42
+ #
43
+ # @return [Integer]
44
+ def int8
45
+ read(1).unpack("C").first
46
+ end
47
+
48
+ # Decodes a 16-bit integer from the IO object.
49
+ #
50
+ # @return [Integer]
51
+ def int16
52
+ read(2).unpack("s>").first
53
+ end
54
+
55
+ # Decodes a 32-bit integer from the IO object.
56
+ #
57
+ # @return [Integer]
58
+ def int32
59
+ read(4).unpack("l>").first
60
+ end
61
+
62
+ # Decodes a 64-bit integer from the IO object.
63
+ #
64
+ # @return [Integer]
65
+ def int64
66
+ read(8).unpack("q>").first
67
+ end
68
+
69
+ # Decodes an array from the IO object.
70
+ #
71
+ # The provided block will be called once for each item in the array. It is
72
+ # the responsibility of the block to decode the proper type in the block,
73
+ # since there's no information that allows the type to be inferred
74
+ # automatically.
75
+ #
76
+ # @return [Array]
77
+ def array(&block)
78
+ size = int32
79
+ size.times.map(&block)
80
+ end
81
+
82
+ # Decodes an array from the IO object.
83
+ # Just like #array except the size is in varint format
84
+ #
85
+ # @return [Array]
86
+ def varint_array(&block)
87
+ size = varint
88
+ size.times.map(&block)
89
+ end
90
+
91
+ # Decodes a string from the IO object.
92
+ #
93
+ # @return [String]
94
+ def string
95
+ size = int16
96
+
97
+ if size == -1
98
+ nil
99
+ else
100
+ read(size)
101
+ end
102
+ end
103
+
104
+ # Decodes a string from the IO object, the size is in varint format
105
+ #
106
+ # @return [String]
107
+ def varint_string
108
+ size = varint
109
+
110
+ if size == -1
111
+ nil
112
+ else
113
+ read(size)
114
+ end
115
+ end
116
+
117
+ # Read an integer under varints serializing from the IO object.
118
+ # https://developers.google.com/protocol-buffers/docs/encoding#varints
119
+ #
120
+ # @return [Integer]
121
+ def varint
122
+ group = 0
123
+ data = 0
124
+ while (chunk = int8) & 0x80 != 0
125
+ data |= (chunk & 0x7f) << group
126
+ group += 7
127
+ end
128
+ data |= chunk << group
129
+ data & 0b1 != 0 ? ~(data >> 1) : (data >> 1)
130
+ end
131
+
132
+ # Decodes a list of bytes from the IO object.
133
+ #
134
+ # @return [String]
135
+ def bytes
136
+ size = int32
137
+
138
+ if size == -1
139
+ nil
140
+ else
141
+ read(size)
142
+ end
143
+ end
144
+
145
+ # Decodes a list of bytes from the IO object. The size is in varint format
146
+ #
147
+ # @return [String]
148
+ def varint_bytes
149
+ size = varint
150
+
151
+ if size == -1
152
+ nil
153
+ else
154
+ read(size)
155
+ end
156
+ end
157
+
158
+ # Reads the specified number of bytes from the IO object, returning them
159
+ # as a String.
160
+ #
161
+ # @return [String]
162
+ def read(number_of_bytes)
163
+ return "" if number_of_bytes == 0
164
+
165
+ data = @io.read(number_of_bytes) or raise EOFError
166
+
167
+ # If the `read` call returned less data than expected we should not
168
+ # proceed.
169
+ raise EOFError if data.size != number_of_bytes
170
+
171
+ data
172
+ end
173
+ end
174
+ end
175
+ end
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class DeleteTopicsRequest
7
+ def initialize(topics:, timeout:)
8
+ @topics, @timeout = topics, timeout
9
+ end
10
+
11
+ def api_key
12
+ DELETE_TOPICS_API
13
+ end
14
+
15
+ def api_version
16
+ 0
17
+ end
18
+
19
+ def response_class
20
+ Protocol::DeleteTopicsResponse
21
+ end
22
+
23
+ def encode(encoder)
24
+ encoder.write_array(@topics) do |topic|
25
+ encoder.write_string(topic)
26
+ end
27
+ # Timeout is in ms.
28
+ encoder.write_int32(@timeout * 1000)
29
+ end
30
+ end
31
+
32
+ end
33
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class DeleteTopicsResponse
7
+ attr_reader :errors
8
+
9
+ def initialize(errors:)
10
+ @errors = errors
11
+ end
12
+
13
+ def self.decode(decoder)
14
+ errors = decoder.array do
15
+ topic = decoder.string
16
+ error_code = decoder.int16
17
+
18
+ [topic, error_code]
19
+ end
20
+
21
+ new(errors: errors)
22
+ end
23
+ end
24
+
25
+ end
26
+ end
@@ -0,0 +1,35 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class DescribeConfigsRequest
7
+ def initialize(resources:)
8
+ @resources = resources
9
+ end
10
+
11
+ def api_key
12
+ DESCRIBE_CONFIGS_API
13
+ end
14
+
15
+ def api_version
16
+ 0
17
+ end
18
+
19
+ def response_class
20
+ Protocol::DescribeConfigsResponse
21
+ end
22
+
23
+ def encode(encoder)
24
+ encoder.write_array(@resources) do |type, name, configs|
25
+ encoder.write_int8(type)
26
+ encoder.write_string(name)
27
+ encoder.write_array(configs) do |config|
28
+ encoder.write_string(config)
29
+ end
30
+ end
31
+ end
32
+ end
33
+
34
+ end
35
+ end
@@ -0,0 +1,73 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+ class DescribeConfigsResponse
6
+ class ResourceDescription
7
+ attr_reader :name, :type, :error_code, :error_message, :configs
8
+
9
+ def initialize(name:, type:, error_code:, error_message:, configs:)
10
+ @name = name
11
+ @type = type
12
+ @error_code = error_code
13
+ @error_message = error_message
14
+ @configs = configs
15
+ end
16
+ end
17
+
18
+ class ConfigEntry
19
+ attr_reader :name, :value, :read_only, :is_default, :is_sensitive
20
+
21
+ def initialize(name:, value:, read_only:, is_default:, is_sensitive:)
22
+ @name = name
23
+ @value = value
24
+ @read_only = read_only
25
+ @is_default = is_default
26
+ @is_sensitive = is_sensitive
27
+ end
28
+ end
29
+
30
+ attr_reader :resources
31
+
32
+ def initialize(throttle_time_ms:, resources:)
33
+ @throttle_time_ms = throttle_time_ms
34
+ @resources = resources
35
+ end
36
+
37
+ def self.decode(decoder)
38
+ throttle_time_ms = decoder.int32
39
+ resources = decoder.array do
40
+ error_code = decoder.int16
41
+ error_message = decoder.string
42
+
43
+ resource_type = decoder.int8
44
+ if Kafka::Protocol::RESOURCE_TYPES[resource_type].nil?
45
+ raise Kafka::ProtocolError, "Resource type not supported: #{resource_type}"
46
+ end
47
+ resource_name = decoder.string
48
+
49
+ configs = decoder.array do
50
+ ConfigEntry.new(
51
+ name: decoder.string,
52
+ value: decoder.string,
53
+ read_only: decoder.boolean,
54
+ is_default: decoder.boolean,
55
+ is_sensitive: decoder.boolean,
56
+ )
57
+ end
58
+
59
+ ResourceDescription.new(
60
+ type: RESOURCE_TYPES[resource_type],
61
+ name: resource_name,
62
+ error_code: error_code,
63
+ error_message: error_message,
64
+ configs: configs
65
+ )
66
+ end
67
+
68
+ new(throttle_time_ms: throttle_time_ms, resources: resources)
69
+ end
70
+ end
71
+
72
+ end
73
+ end