ruby-kafka-custom 0.7.7.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (105) hide show
  1. checksums.yaml +7 -0
  2. data/lib/kafka/async_producer.rb +279 -0
  3. data/lib/kafka/broker.rb +205 -0
  4. data/lib/kafka/broker_info.rb +16 -0
  5. data/lib/kafka/broker_pool.rb +41 -0
  6. data/lib/kafka/broker_uri.rb +43 -0
  7. data/lib/kafka/client.rb +754 -0
  8. data/lib/kafka/cluster.rb +455 -0
  9. data/lib/kafka/compression.rb +43 -0
  10. data/lib/kafka/compressor.rb +85 -0
  11. data/lib/kafka/connection.rb +220 -0
  12. data/lib/kafka/connection_builder.rb +33 -0
  13. data/lib/kafka/consumer.rb +592 -0
  14. data/lib/kafka/consumer_group.rb +208 -0
  15. data/lib/kafka/datadog.rb +413 -0
  16. data/lib/kafka/fetch_operation.rb +115 -0
  17. data/lib/kafka/fetched_batch.rb +54 -0
  18. data/lib/kafka/fetched_batch_generator.rb +117 -0
  19. data/lib/kafka/fetched_message.rb +47 -0
  20. data/lib/kafka/fetched_offset_resolver.rb +48 -0
  21. data/lib/kafka/fetcher.rb +221 -0
  22. data/lib/kafka/gzip_codec.rb +30 -0
  23. data/lib/kafka/heartbeat.rb +25 -0
  24. data/lib/kafka/instrumenter.rb +38 -0
  25. data/lib/kafka/lz4_codec.rb +23 -0
  26. data/lib/kafka/message_buffer.rb +87 -0
  27. data/lib/kafka/offset_manager.rb +248 -0
  28. data/lib/kafka/partitioner.rb +35 -0
  29. data/lib/kafka/pause.rb +92 -0
  30. data/lib/kafka/pending_message.rb +29 -0
  31. data/lib/kafka/pending_message_queue.rb +41 -0
  32. data/lib/kafka/produce_operation.rb +205 -0
  33. data/lib/kafka/producer.rb +504 -0
  34. data/lib/kafka/protocol.rb +217 -0
  35. data/lib/kafka/protocol/add_partitions_to_txn_request.rb +34 -0
  36. data/lib/kafka/protocol/add_partitions_to_txn_response.rb +47 -0
  37. data/lib/kafka/protocol/alter_configs_request.rb +44 -0
  38. data/lib/kafka/protocol/alter_configs_response.rb +49 -0
  39. data/lib/kafka/protocol/api_versions_request.rb +21 -0
  40. data/lib/kafka/protocol/api_versions_response.rb +53 -0
  41. data/lib/kafka/protocol/consumer_group_protocol.rb +19 -0
  42. data/lib/kafka/protocol/create_partitions_request.rb +42 -0
  43. data/lib/kafka/protocol/create_partitions_response.rb +28 -0
  44. data/lib/kafka/protocol/create_topics_request.rb +45 -0
  45. data/lib/kafka/protocol/create_topics_response.rb +26 -0
  46. data/lib/kafka/protocol/decoder.rb +175 -0
  47. data/lib/kafka/protocol/delete_topics_request.rb +33 -0
  48. data/lib/kafka/protocol/delete_topics_response.rb +26 -0
  49. data/lib/kafka/protocol/describe_configs_request.rb +35 -0
  50. data/lib/kafka/protocol/describe_configs_response.rb +73 -0
  51. data/lib/kafka/protocol/describe_groups_request.rb +27 -0
  52. data/lib/kafka/protocol/describe_groups_response.rb +73 -0
  53. data/lib/kafka/protocol/encoder.rb +184 -0
  54. data/lib/kafka/protocol/end_txn_request.rb +29 -0
  55. data/lib/kafka/protocol/end_txn_response.rb +19 -0
  56. data/lib/kafka/protocol/fetch_request.rb +70 -0
  57. data/lib/kafka/protocol/fetch_response.rb +136 -0
  58. data/lib/kafka/protocol/find_coordinator_request.rb +29 -0
  59. data/lib/kafka/protocol/find_coordinator_response.rb +29 -0
  60. data/lib/kafka/protocol/heartbeat_request.rb +27 -0
  61. data/lib/kafka/protocol/heartbeat_response.rb +17 -0
  62. data/lib/kafka/protocol/init_producer_id_request.rb +26 -0
  63. data/lib/kafka/protocol/init_producer_id_response.rb +27 -0
  64. data/lib/kafka/protocol/join_group_request.rb +41 -0
  65. data/lib/kafka/protocol/join_group_response.rb +33 -0
  66. data/lib/kafka/protocol/leave_group_request.rb +25 -0
  67. data/lib/kafka/protocol/leave_group_response.rb +17 -0
  68. data/lib/kafka/protocol/list_groups_request.rb +23 -0
  69. data/lib/kafka/protocol/list_groups_response.rb +35 -0
  70. data/lib/kafka/protocol/list_offset_request.rb +53 -0
  71. data/lib/kafka/protocol/list_offset_response.rb +89 -0
  72. data/lib/kafka/protocol/member_assignment.rb +42 -0
  73. data/lib/kafka/protocol/message.rb +172 -0
  74. data/lib/kafka/protocol/message_set.rb +55 -0
  75. data/lib/kafka/protocol/metadata_request.rb +31 -0
  76. data/lib/kafka/protocol/metadata_response.rb +185 -0
  77. data/lib/kafka/protocol/offset_commit_request.rb +47 -0
  78. data/lib/kafka/protocol/offset_commit_response.rb +29 -0
  79. data/lib/kafka/protocol/offset_fetch_request.rb +36 -0
  80. data/lib/kafka/protocol/offset_fetch_response.rb +56 -0
  81. data/lib/kafka/protocol/produce_request.rb +92 -0
  82. data/lib/kafka/protocol/produce_response.rb +63 -0
  83. data/lib/kafka/protocol/record.rb +88 -0
  84. data/lib/kafka/protocol/record_batch.rb +222 -0
  85. data/lib/kafka/protocol/request_message.rb +26 -0
  86. data/lib/kafka/protocol/sasl_handshake_request.rb +33 -0
  87. data/lib/kafka/protocol/sasl_handshake_response.rb +28 -0
  88. data/lib/kafka/protocol/sync_group_request.rb +33 -0
  89. data/lib/kafka/protocol/sync_group_response.rb +23 -0
  90. data/lib/kafka/round_robin_assignment_strategy.rb +54 -0
  91. data/lib/kafka/sasl/gssapi.rb +76 -0
  92. data/lib/kafka/sasl/oauth.rb +64 -0
  93. data/lib/kafka/sasl/plain.rb +39 -0
  94. data/lib/kafka/sasl/scram.rb +177 -0
  95. data/lib/kafka/sasl_authenticator.rb +61 -0
  96. data/lib/kafka/snappy_codec.rb +25 -0
  97. data/lib/kafka/socket_with_timeout.rb +96 -0
  98. data/lib/kafka/ssl_context.rb +66 -0
  99. data/lib/kafka/ssl_socket_with_timeout.rb +187 -0
  100. data/lib/kafka/statsd.rb +296 -0
  101. data/lib/kafka/tagged_logger.rb +72 -0
  102. data/lib/kafka/transaction_manager.rb +261 -0
  103. data/lib/kafka/transaction_state_machine.rb +72 -0
  104. data/lib/kafka/version.rb +5 -0
  105. metadata +461 -0
@@ -0,0 +1,217 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+
5
+ # The protocol layer of the library.
6
+ #
7
+ # The Kafka protocol (https://kafka.apache.org/protocol) defines a set of API
8
+ # requests, each with a well-known numeric API key, as well as a set of error
9
+ # codes with specific meanings.
10
+ #
11
+ # This module, and the classes contained in it, implement the client side of
12
+ # the protocol.
13
+ module Protocol
14
+ # The replica id of non-brokers is always -1.
15
+ REPLICA_ID = -1
16
+
17
+ PRODUCE_API = 0
18
+ FETCH_API = 1
19
+ LIST_OFFSET_API = 2
20
+ TOPIC_METADATA_API = 3
21
+ OFFSET_COMMIT_API = 8
22
+ OFFSET_FETCH_API = 9
23
+ FIND_COORDINATOR_API = 10
24
+ JOIN_GROUP_API = 11
25
+ HEARTBEAT_API = 12
26
+ LEAVE_GROUP_API = 13
27
+ SYNC_GROUP_API = 14
28
+ DESCRIBE_GROUPS_API = 15
29
+ LIST_GROUPS_API = 16
30
+ SASL_HANDSHAKE_API = 17
31
+ API_VERSIONS_API = 18
32
+ CREATE_TOPICS_API = 19
33
+ DELETE_TOPICS_API = 20
34
+ INIT_PRODUCER_ID_API = 22
35
+ ADD_PARTITIONS_TO_TXN_API = 24
36
+ END_TXN_API = 26
37
+ DESCRIBE_CONFIGS_API = 32
38
+ ALTER_CONFIGS_API = 33
39
+ CREATE_PARTITIONS_API = 37
40
+
41
+ # A mapping from numeric API keys to symbolic API names.
42
+ APIS = {
43
+ PRODUCE_API => :produce,
44
+ FETCH_API => :fetch,
45
+ LIST_OFFSET_API => :list_offset,
46
+ TOPIC_METADATA_API => :topic_metadata,
47
+ OFFSET_COMMIT_API => :offset_commit,
48
+ OFFSET_FETCH_API => :offset_fetch,
49
+ FIND_COORDINATOR_API => :find_coordinator,
50
+ JOIN_GROUP_API => :join_group,
51
+ HEARTBEAT_API => :heartbeat,
52
+ LEAVE_GROUP_API => :leave_group,
53
+ SYNC_GROUP_API => :sync_group,
54
+ SASL_HANDSHAKE_API => :sasl_handshake,
55
+ API_VERSIONS_API => :api_versions,
56
+ CREATE_TOPICS_API => :create_topics,
57
+ DELETE_TOPICS_API => :delete_topics,
58
+ INIT_PRODUCER_ID_API => :init_producer_id_api,
59
+ ADD_PARTITIONS_TO_TXN_API => :add_partitions_to_txn_api,
60
+ END_TXN_API => :end_txn_api,
61
+ DESCRIBE_CONFIGS_API => :describe_configs_api,
62
+ CREATE_PARTITIONS_API => :create_partitions
63
+ }
64
+
65
+ # A mapping from numeric error codes to exception classes.
66
+ ERRORS = {
67
+ -1 => UnknownError,
68
+ 1 => OffsetOutOfRange,
69
+ 2 => CorruptMessage,
70
+ 3 => UnknownTopicOrPartition,
71
+ 4 => InvalidMessageSize,
72
+ 5 => LeaderNotAvailable,
73
+ 6 => NotLeaderForPartition,
74
+ 7 => RequestTimedOut,
75
+ 8 => BrokerNotAvailable,
76
+ 9 => ReplicaNotAvailable,
77
+ 10 => MessageSizeTooLarge,
78
+ 11 => StaleControllerEpoch,
79
+ 12 => OffsetMetadataTooLarge,
80
+ 13 => NetworkException,
81
+ 14 => CoordinatorLoadInProgress,
82
+ 15 => CoordinatorNotAvailable,
83
+ 16 => NotCoordinatorForGroup,
84
+ 17 => InvalidTopic,
85
+ 18 => RecordListTooLarge,
86
+ 19 => NotEnoughReplicas,
87
+ 20 => NotEnoughReplicasAfterAppend,
88
+ 21 => InvalidRequiredAcks,
89
+ 22 => IllegalGeneration,
90
+ 23 => InconsistentGroupProtocol,
91
+ 24 => InvalidGroupId,
92
+ 25 => UnknownMemberId,
93
+ 26 => InvalidSessionTimeout,
94
+ 27 => RebalanceInProgress,
95
+ 28 => InvalidCommitOffsetSize,
96
+ 29 => TopicAuthorizationFailed,
97
+ 30 => GroupAuthorizationFailed,
98
+ 31 => ClusterAuthorizationFailed,
99
+ 32 => InvalidTimestamp,
100
+ 33 => UnsupportedSaslMechanism,
101
+ 34 => InvalidSaslState,
102
+ 35 => UnsupportedVersion,
103
+ 36 => TopicAlreadyExists,
104
+ 37 => InvalidPartitions,
105
+ 38 => InvalidReplicationFactor,
106
+ 39 => InvalidReplicaAssignment,
107
+ 40 => InvalidConfig,
108
+ 41 => NotController,
109
+ 42 => InvalidRequest,
110
+ 43 => UnsupportedForMessageFormat,
111
+ 44 => PolicyViolation,
112
+ 45 => OutOfOrderSequenceNumberError,
113
+ 46 => DuplicateSequenceNumberError,
114
+ 47 => InvalidProducerEpochError,
115
+ 48 => InvalidTxnStateError,
116
+ 49 => InvalidProducerIDMappingError,
117
+ 50 => InvalidTransactionTimeoutError,
118
+ 51 => ConcurrentTransactionError,
119
+ 52 => TransactionCoordinatorFencedError
120
+ }
121
+
122
+ # A mapping from int to corresponding resource type in symbol.
123
+ # https://github.com/apache/kafka/blob/trunk/clients/src/main/java/org/apache/kafka/common/resource/ResourceType.java
124
+ RESOURCE_TYPE_UNKNOWN = 0
125
+ RESOURCE_TYPE_ANY = 1
126
+ RESOURCE_TYPE_TOPIC = 2
127
+ RESOURCE_TYPE_GROUP = 3
128
+ RESOURCE_TYPE_CLUSTER = 4
129
+ RESOURCE_TYPE_TRANSACTIONAL_ID = 5
130
+ RESOURCE_TYPE_DELEGATION_TOKEN = 6
131
+ RESOURCE_TYPES = {
132
+ RESOURCE_TYPE_UNKNOWN => :unknown,
133
+ RESOURCE_TYPE_ANY => :any,
134
+ RESOURCE_TYPE_TOPIC => :topic,
135
+ RESOURCE_TYPE_GROUP => :group,
136
+ RESOURCE_TYPE_CLUSTER => :cluster,
137
+ RESOURCE_TYPE_TRANSACTIONAL_ID => :transactional_id,
138
+ RESOURCE_TYPE_DELEGATION_TOKEN => :delegation_token,
139
+ }
140
+
141
+ # Coordinator types. Since Kafka 0.11.0, there are types of coordinators:
142
+ # Group and Transaction
143
+ COORDINATOR_TYPE_GROUP = 0
144
+ COORDINATOR_TYPE_TRANSACTION = 1
145
+
146
+ # Handles an error code by either doing nothing (if there was no error) or
147
+ # by raising an appropriate exception.
148
+ #
149
+ # @param error_code Integer
150
+ # @raise [ProtocolError]
151
+ # @return [nil]
152
+ def self.handle_error(error_code, error_message = nil)
153
+ if error_code == 0
154
+ # No errors, yay!
155
+ elsif error = ERRORS[error_code]
156
+ raise error, error_message
157
+ else
158
+ raise UnknownError, "Unknown error with code #{error_code} #{error_message}"
159
+ end
160
+ end
161
+
162
+ # Returns the symbolic name for an API key.
163
+ #
164
+ # @param api_key Integer
165
+ # @return [Symbol]
166
+ def self.api_name(api_key)
167
+ APIS.fetch(api_key, :unknown)
168
+ end
169
+ end
170
+ end
171
+
172
+ require "kafka/protocol/metadata_request"
173
+ require "kafka/protocol/metadata_response"
174
+ require "kafka/protocol/produce_request"
175
+ require "kafka/protocol/produce_response"
176
+ require "kafka/protocol/fetch_request"
177
+ require "kafka/protocol/fetch_response"
178
+ require "kafka/protocol/list_offset_request"
179
+ require "kafka/protocol/list_offset_response"
180
+ require "kafka/protocol/find_coordinator_request"
181
+ require "kafka/protocol/find_coordinator_response"
182
+ require "kafka/protocol/join_group_request"
183
+ require "kafka/protocol/join_group_response"
184
+ require "kafka/protocol/sync_group_request"
185
+ require "kafka/protocol/sync_group_response"
186
+ require "kafka/protocol/leave_group_request"
187
+ require "kafka/protocol/leave_group_response"
188
+ require "kafka/protocol/heartbeat_request"
189
+ require "kafka/protocol/heartbeat_response"
190
+ require "kafka/protocol/offset_fetch_request"
191
+ require "kafka/protocol/offset_fetch_response"
192
+ require "kafka/protocol/offset_commit_request"
193
+ require "kafka/protocol/offset_commit_response"
194
+ require "kafka/protocol/api_versions_request"
195
+ require "kafka/protocol/api_versions_response"
196
+ require "kafka/protocol/sasl_handshake_request"
197
+ require "kafka/protocol/sasl_handshake_response"
198
+ require "kafka/protocol/create_topics_request"
199
+ require "kafka/protocol/create_topics_response"
200
+ require "kafka/protocol/delete_topics_request"
201
+ require "kafka/protocol/delete_topics_response"
202
+ require "kafka/protocol/describe_configs_request"
203
+ require "kafka/protocol/describe_configs_response"
204
+ require "kafka/protocol/alter_configs_request"
205
+ require "kafka/protocol/alter_configs_response"
206
+ require "kafka/protocol/create_partitions_request"
207
+ require "kafka/protocol/create_partitions_response"
208
+ require "kafka/protocol/list_groups_request"
209
+ require "kafka/protocol/list_groups_response"
210
+ require "kafka/protocol/describe_groups_request"
211
+ require "kafka/protocol/describe_groups_response"
212
+ require "kafka/protocol/init_producer_id_request"
213
+ require "kafka/protocol/init_producer_id_response"
214
+ require "kafka/protocol/add_partitions_to_txn_request"
215
+ require "kafka/protocol/add_partitions_to_txn_response"
216
+ require "kafka/protocol/end_txn_request"
217
+ require "kafka/protocol/end_txn_response"
@@ -0,0 +1,34 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+ class AddPartitionsToTxnRequest
6
+ def initialize(transactional_id: nil, producer_id:, producer_epoch:, topics:)
7
+ @transactional_id = transactional_id
8
+ @producer_id = producer_id
9
+ @producer_epoch = producer_epoch
10
+ @topics = topics
11
+ end
12
+
13
+ def api_key
14
+ ADD_PARTITIONS_TO_TXN_API
15
+ end
16
+
17
+ def response_class
18
+ AddPartitionsToTxnResponse
19
+ end
20
+
21
+ def encode(encoder)
22
+ encoder.write_string(@transactional_id.to_s)
23
+ encoder.write_int64(@producer_id)
24
+ encoder.write_int16(@producer_epoch)
25
+ encoder.write_array(@topics.to_a) do |topic, partitions|
26
+ encoder.write_string(topic)
27
+ encoder.write_array(partitions) do |partition|
28
+ encoder.write_int32(partition)
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,47 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+ class AddPartitionsToTxnResponse
6
+ class PartitionError
7
+ attr_reader :partition, :error_code
8
+
9
+ def initialize(partition:, error_code:)
10
+ @partition = partition
11
+ @error_code = error_code
12
+ end
13
+ end
14
+
15
+ class TopicPartitionsError
16
+ attr_reader :topic, :partitions
17
+
18
+ def initialize(topic:, partitions:)
19
+ @topic = topic
20
+ @partitions = partitions
21
+ end
22
+ end
23
+
24
+ attr_reader :errors
25
+
26
+ def initialize(errors:)
27
+ @errors = errors
28
+ end
29
+
30
+ def self.decode(decoder)
31
+ _throttle_time_ms = decoder.int32
32
+ errors = decoder.array do
33
+ TopicPartitionsError.new(
34
+ topic: decoder.string,
35
+ partitions: decoder.array do
36
+ PartitionError.new(
37
+ partition: decoder.int32,
38
+ error_code: decoder.int16
39
+ )
40
+ end
41
+ )
42
+ end
43
+ new(errors: errors)
44
+ end
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class AlterConfigsRequest
7
+ def initialize(resources:)
8
+ @resources = resources
9
+ end
10
+
11
+ def api_key
12
+ ALTER_CONFIGS_API
13
+ end
14
+
15
+ def api_version
16
+ 0
17
+ end
18
+
19
+ def response_class
20
+ Protocol::AlterConfigsResponse
21
+ end
22
+
23
+ def encode(encoder)
24
+ encoder.write_array(@resources) do |type, name, configs|
25
+ encoder.write_int8(type)
26
+ encoder.write_string(name)
27
+
28
+ configs = configs.to_a
29
+ encoder.write_array(configs) do |config_name, config_value|
30
+ # Config value is nullable. In other cases, we must write the
31
+ # stringified value.
32
+ config_value = config_value.to_s unless config_value.nil?
33
+
34
+ encoder.write_string(config_name)
35
+ encoder.write_string(config_value)
36
+ end
37
+ end
38
+ # validate_only. We'll skip this feature.
39
+ encoder.write_boolean(false)
40
+ end
41
+ end
42
+
43
+ end
44
+ end
@@ -0,0 +1,49 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+ class AlterConfigsResponse
6
+ class ResourceDescription
7
+ attr_reader :name, :type, :error_code, :error_message
8
+
9
+ def initialize(name:, type:, error_code:, error_message:)
10
+ @name = name
11
+ @type = type
12
+ @error_code = error_code
13
+ @error_message = error_message
14
+ end
15
+ end
16
+
17
+ attr_reader :resources
18
+
19
+ def initialize(throttle_time_ms:, resources:)
20
+ @throttle_time_ms = throttle_time_ms
21
+ @resources = resources
22
+ end
23
+
24
+ def self.decode(decoder)
25
+ throttle_time_ms = decoder.int32
26
+ resources = decoder.array do
27
+ error_code = decoder.int16
28
+ error_message = decoder.string
29
+
30
+ resource_type = decoder.int8
31
+ if Kafka::Protocol::RESOURCE_TYPES[resource_type].nil?
32
+ raise Kafka::ProtocolError, "Resource type not supported: #{resource_type}"
33
+ end
34
+ resource_name = decoder.string
35
+
36
+ ResourceDescription.new(
37
+ type: RESOURCE_TYPES[resource_type],
38
+ name: resource_name,
39
+ error_code: error_code,
40
+ error_message: error_message
41
+ )
42
+ end
43
+
44
+ new(throttle_time_ms: throttle_time_ms, resources: resources)
45
+ end
46
+ end
47
+
48
+ end
49
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class ApiVersionsRequest
7
+ def api_key
8
+ API_VERSIONS_API
9
+ end
10
+
11
+ def encode(encoder)
12
+ # Nothing to do.
13
+ end
14
+
15
+ def response_class
16
+ Protocol::ApiVersionsResponse
17
+ end
18
+ end
19
+
20
+ end
21
+ end
@@ -0,0 +1,53 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+
6
+ class ApiVersionsResponse
7
+ class ApiInfo
8
+ attr_reader :api_key, :min_version, :max_version
9
+
10
+ def initialize(api_key:, min_version:, max_version:)
11
+ @api_key, @min_version, @max_version = api_key, min_version, max_version
12
+ end
13
+
14
+ def api_name
15
+ Protocol.api_name(api_key)
16
+ end
17
+
18
+ def version_supported?(version)
19
+ (min_version..max_version).include?(version)
20
+ end
21
+
22
+ def to_s
23
+ "#{api_name}=#{min_version}..#{max_version}"
24
+ end
25
+
26
+ def inspect
27
+ "#<Kafka api version #{to_s}>"
28
+ end
29
+ end
30
+
31
+ attr_reader :error_code, :apis
32
+
33
+ def initialize(error_code:, apis:)
34
+ @error_code = error_code
35
+ @apis = apis
36
+ end
37
+
38
+ def self.decode(decoder)
39
+ error_code = decoder.int16
40
+
41
+ apis = decoder.array do
42
+ ApiInfo.new(
43
+ api_key: decoder.int16,
44
+ min_version: decoder.int16,
45
+ max_version: decoder.int16,
46
+ )
47
+ end
48
+
49
+ new(error_code: error_code, apis: apis)
50
+ end
51
+ end
52
+ end
53
+ end