kafka_command 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (159) hide show
  1. checksums.yaml +7 -0
  2. data/.circleci/config.yml +179 -0
  3. data/.env +1 -0
  4. data/.env.test +1 -0
  5. data/.gitignore +41 -0
  6. data/.rspec +1 -0
  7. data/.rubocop.yml +12 -0
  8. data/.ruby-version +1 -0
  9. data/Gemfile +17 -0
  10. data/Gemfile.lock +194 -0
  11. data/LICENSE +21 -0
  12. data/README.md +138 -0
  13. data/Rakefile +34 -0
  14. data/app/assets/config/manifest.js +3 -0
  15. data/app/assets/images/.keep +0 -0
  16. data/app/assets/images/kafka_command/cluster_view.png +0 -0
  17. data/app/assets/images/kafka_command/kafka.png +0 -0
  18. data/app/assets/images/kafka_command/topic_view.png +0 -0
  19. data/app/assets/javascripts/kafka_command/application.js +14 -0
  20. data/app/assets/stylesheets/kafka_command/application.css +27 -0
  21. data/app/assets/stylesheets/kafka_command/clusters.css +8 -0
  22. data/app/assets/stylesheets/kafka_command/topics.css +3 -0
  23. data/app/channels/application_cable/channel.rb +6 -0
  24. data/app/channels/application_cable/connection.rb +6 -0
  25. data/app/controllers/kafka_command/application_controller.rb +96 -0
  26. data/app/controllers/kafka_command/brokers_controller.rb +26 -0
  27. data/app/controllers/kafka_command/clusters_controller.rb +46 -0
  28. data/app/controllers/kafka_command/consumer_groups_controller.rb +44 -0
  29. data/app/controllers/kafka_command/topics_controller.rb +187 -0
  30. data/app/helpers/kafka_command/application_helper.rb +29 -0
  31. data/app/helpers/kafka_command/consumer_group_helper.rb +13 -0
  32. data/app/jobs/application_job.rb +6 -0
  33. data/app/mailers/application_mailer.rb +8 -0
  34. data/app/models/kafka_command/broker.rb +47 -0
  35. data/app/models/kafka_command/client.rb +102 -0
  36. data/app/models/kafka_command/cluster.rb +172 -0
  37. data/app/models/kafka_command/consumer_group.rb +142 -0
  38. data/app/models/kafka_command/consumer_group_partition.rb +23 -0
  39. data/app/models/kafka_command/group_member.rb +18 -0
  40. data/app/models/kafka_command/partition.rb +36 -0
  41. data/app/models/kafka_command/topic.rb +153 -0
  42. data/app/views/kafka_command/brokers/index.html.erb +38 -0
  43. data/app/views/kafka_command/clusters/_tabs.html.erb +9 -0
  44. data/app/views/kafka_command/clusters/index.html.erb +54 -0
  45. data/app/views/kafka_command/clusters/new.html.erb +115 -0
  46. data/app/views/kafka_command/configuration_error.html.erb +1 -0
  47. data/app/views/kafka_command/consumer_groups/index.html.erb +32 -0
  48. data/app/views/kafka_command/consumer_groups/show.html.erb +115 -0
  49. data/app/views/kafka_command/shared/_alert.html.erb +13 -0
  50. data/app/views/kafka_command/shared/_search_bar.html.erb +31 -0
  51. data/app/views/kafka_command/shared/_title.html.erb +6 -0
  52. data/app/views/kafka_command/topics/_form_fields.html.erb +49 -0
  53. data/app/views/kafka_command/topics/edit.html.erb +17 -0
  54. data/app/views/kafka_command/topics/index.html.erb +46 -0
  55. data/app/views/kafka_command/topics/new.html.erb +36 -0
  56. data/app/views/kafka_command/topics/show.html.erb +126 -0
  57. data/app/views/layouts/kafka_command/application.html.erb +50 -0
  58. data/bin/rails +16 -0
  59. data/config/initializers/kafka.rb +13 -0
  60. data/config/initializers/kafka_command.rb +11 -0
  61. data/config/routes.rb +11 -0
  62. data/docker-compose.yml +18 -0
  63. data/kafka_command.gemspec +27 -0
  64. data/lib/assets/.keep +0 -0
  65. data/lib/core_extensions/kafka/broker/attr_readers.rb +11 -0
  66. data/lib/core_extensions/kafka/broker_pool/attr_readers.rb +11 -0
  67. data/lib/core_extensions/kafka/client/attr_readers.rb +11 -0
  68. data/lib/core_extensions/kafka/cluster/attr_readers.rb +11 -0
  69. data/lib/core_extensions/kafka/protocol/metadata_response/partition_metadata/attr_readers.rb +15 -0
  70. data/lib/kafka_command/configuration.rb +150 -0
  71. data/lib/kafka_command/engine.rb +11 -0
  72. data/lib/kafka_command/errors.rb +6 -0
  73. data/lib/kafka_command/version.rb +5 -0
  74. data/lib/kafka_command.rb +13 -0
  75. data/lib/tasks/.keep +0 -0
  76. data/spec/dummy/Rakefile +6 -0
  77. data/spec/dummy/app/assets/config/manifest.js +4 -0
  78. data/spec/dummy/app/assets/javascripts/application.js +15 -0
  79. data/spec/dummy/app/assets/javascripts/cable.js +13 -0
  80. data/spec/dummy/app/assets/stylesheets/application.css +15 -0
  81. data/spec/dummy/app/channels/application_cable/channel.rb +4 -0
  82. data/spec/dummy/app/channels/application_cable/connection.rb +4 -0
  83. data/spec/dummy/app/controllers/application_controller.rb +2 -0
  84. data/spec/dummy/app/helpers/application_helper.rb +2 -0
  85. data/spec/dummy/app/jobs/application_job.rb +2 -0
  86. data/spec/dummy/app/mailers/application_mailer.rb +4 -0
  87. data/spec/dummy/app/models/application_record.rb +3 -0
  88. data/spec/dummy/app/views/layouts/application.html.erb +15 -0
  89. data/spec/dummy/app/views/layouts/mailer.html.erb +13 -0
  90. data/spec/dummy/app/views/layouts/mailer.text.erb +1 -0
  91. data/spec/dummy/bin/bundle +3 -0
  92. data/spec/dummy/bin/rails +4 -0
  93. data/spec/dummy/bin/rake +4 -0
  94. data/spec/dummy/bin/setup +36 -0
  95. data/spec/dummy/bin/update +31 -0
  96. data/spec/dummy/bin/yarn +11 -0
  97. data/spec/dummy/config/application.rb +19 -0
  98. data/spec/dummy/config/boot.rb +5 -0
  99. data/spec/dummy/config/cable.yml +10 -0
  100. data/spec/dummy/config/database.yml +25 -0
  101. data/spec/dummy/config/environment.rb +5 -0
  102. data/spec/dummy/config/environments/development.rb +61 -0
  103. data/spec/dummy/config/environments/production.rb +94 -0
  104. data/spec/dummy/config/environments/test.rb +46 -0
  105. data/spec/dummy/config/initializers/application_controller_renderer.rb +8 -0
  106. data/spec/dummy/config/initializers/assets.rb +14 -0
  107. data/spec/dummy/config/initializers/backtrace_silencers.rb +7 -0
  108. data/spec/dummy/config/initializers/content_security_policy.rb +25 -0
  109. data/spec/dummy/config/initializers/cookies_serializer.rb +5 -0
  110. data/spec/dummy/config/initializers/filter_parameter_logging.rb +4 -0
  111. data/spec/dummy/config/initializers/inflections.rb +16 -0
  112. data/spec/dummy/config/initializers/mime_types.rb +4 -0
  113. data/spec/dummy/config/initializers/wrap_parameters.rb +14 -0
  114. data/spec/dummy/config/kafka_command.yml +18 -0
  115. data/spec/dummy/config/locales/en.yml +33 -0
  116. data/spec/dummy/config/puma.rb +34 -0
  117. data/spec/dummy/config/routes.rb +3 -0
  118. data/spec/dummy/config/spring.rb +6 -0
  119. data/spec/dummy/config/ssl/test_ca_cert +1 -0
  120. data/spec/dummy/config/ssl/test_client_cert +1 -0
  121. data/spec/dummy/config/ssl/test_client_cert_key +1 -0
  122. data/spec/dummy/config/storage.yml +34 -0
  123. data/spec/dummy/config.ru +5 -0
  124. data/spec/dummy/db/schema.rb +42 -0
  125. data/spec/dummy/db/test.sqlite3 +0 -0
  126. data/spec/dummy/log/development.log +0 -0
  127. data/spec/dummy/log/hey.log +0 -0
  128. data/spec/dummy/log/test.log +2227 -0
  129. data/spec/dummy/package.json +5 -0
  130. data/spec/dummy/public/404.html +67 -0
  131. data/spec/dummy/public/422.html +67 -0
  132. data/spec/dummy/public/500.html +66 -0
  133. data/spec/dummy/public/apple-touch-icon-precomposed.png +0 -0
  134. data/spec/dummy/public/apple-touch-icon.png +0 -0
  135. data/spec/dummy/public/favicon.ico +0 -0
  136. data/spec/examples.txt +165 -0
  137. data/spec/fast_helper.rb +20 -0
  138. data/spec/fixtures/files/kafka_command_sasl.yml +10 -0
  139. data/spec/fixtures/files/kafka_command_ssl.yml +10 -0
  140. data/spec/fixtures/files/kafka_command_ssl_file_paths.yml +11 -0
  141. data/spec/fixtures/files/kafka_command_staging.yml +8 -0
  142. data/spec/lib/kafka_command/configuration_spec.rb +311 -0
  143. data/spec/models/kafka_command/broker_spec.rb +83 -0
  144. data/spec/models/kafka_command/client_spec.rb +306 -0
  145. data/spec/models/kafka_command/cluster_spec.rb +163 -0
  146. data/spec/models/kafka_command/consumer_group_partition_spec.rb +43 -0
  147. data/spec/models/kafka_command/consumer_group_spec.rb +236 -0
  148. data/spec/models/kafka_command/partition_spec.rb +95 -0
  149. data/spec/models/kafka_command/topic_spec.rb +311 -0
  150. data/spec/rails_helper.rb +63 -0
  151. data/spec/requests/json/brokers_spec.rb +50 -0
  152. data/spec/requests/json/clusters_spec.rb +58 -0
  153. data/spec/requests/json/consumer_groups_spec.rb +139 -0
  154. data/spec/requests/json/topics_spec.rb +274 -0
  155. data/spec/spec_helper.rb +109 -0
  156. data/spec/support/factory_bot.rb +5 -0
  157. data/spec/support/json_helper.rb +13 -0
  158. data/spec/support/kafka_helper.rb +93 -0
  159. metadata +326 -0
@@ -0,0 +1,311 @@
1
+ # frozen_string_literal: true
2
+
3
+ RSpec.describe KafkaCommand::Configuration do
4
+ subject { described_class.new(config_hash) }
5
+
6
+ describe '#valid?' do
7
+ context 'valid' do
8
+ let(:config_hash) { described_class.parse_yaml('spec/dummy/config/kafka_command.yml') }
9
+
10
+ it 'returns true' do
11
+ expect(subject.valid?).to eq(true)
12
+ expect(subject.errors).to be_empty
13
+ end
14
+
15
+ context 'ssl' do
16
+ context 'no file paths' do
17
+ let(:config_hash) { described_class.parse_yaml('spec/fixtures/files/kafka_command_ssl.yml') }
18
+
19
+ it 'returns true' do
20
+ expect(subject.valid?).to eq(true)
21
+ expect(subject.errors).to be_empty end
22
+ end
23
+
24
+ context 'file paths' do
25
+ let(:config_hash) { described_class.parse_yaml('spec/fixtures/files/kafka_command_ssl_file_paths.yml') }
26
+
27
+ it 'returns true' do
28
+ expect(subject.valid?).to eq(true)
29
+ expect(subject.errors).to be_empty
30
+ end
31
+ end
32
+ end
33
+
34
+ context 'sasl' do
35
+ let(:config_hash) { described_class.parse_yaml('spec/fixtures/files/kafka_command_sasl.yml') }
36
+
37
+ it 'returns true' do
38
+ expect(subject.valid?).to eq(true)
39
+ expect(subject.errors).to be_empty
40
+ end
41
+ end
42
+ end
43
+
44
+ context 'invalid' do
45
+ context 'wrong environment' do
46
+ let(:config_hash) { YAML.load(File.read('spec/fixtures/files/kafka_command_staging.yml')) }
47
+ let(:env) { 'staging' }
48
+
49
+ before do
50
+ expect(env).to_not eq(ENV['RAILS_ENV'])
51
+ end
52
+
53
+ it 'returns false' do
54
+ expect(subject.valid?).to eq(false)
55
+ expect(subject.errors).to include('No config specified for environment')
56
+ end
57
+ end
58
+
59
+ context 'no clusters' do
60
+ let(:config_hash) do
61
+ {
62
+ 'test' => {
63
+ 'test_cluster' => {
64
+ 'seed_brokers' => ['localhost:9092']
65
+ }
66
+ }
67
+ }
68
+ end
69
+
70
+ it 'returns false' do
71
+ expect(subject.valid?).to eq(false)
72
+ expect(subject.errors).to include('Cluster must be provided')
73
+ end
74
+ end
75
+
76
+ context 'invalid clusters' do
77
+ context 'bad cluster option' do
78
+ let(:config_hash) do
79
+ {
80
+ 'test' => {
81
+ 'clusters' => {
82
+ 'test_cluster' => {
83
+ 'bad_option' => 'bad',
84
+ 'seed_brokers' => ['localhost:9092']
85
+ }
86
+ }
87
+ }
88
+ }
89
+ end
90
+
91
+ it 'returns false' do
92
+ expect(subject.valid?).to eq(false)
93
+ expect(subject.errors).to include('Invalid cluster option, bad_option')
94
+ end
95
+ end
96
+
97
+ context 'no seed brokers' do
98
+ let(:config_hash) do
99
+ {
100
+ 'test' => {
101
+ 'clusters' => {
102
+ 'test_cluster' => {
103
+ 'description' => 'production cluster'
104
+ }
105
+ }
106
+ }
107
+ }
108
+ end
109
+
110
+ it 'returns false' do
111
+ expect(subject.valid?).to eq(false)
112
+ expect(subject.errors).to include('Must specify a list of seed brokers')
113
+ end
114
+
115
+ context 'bad host/port combination' do
116
+ let(:config_hash) do
117
+ {
118
+ 'test' => {
119
+ 'clusters' => {
120
+ 'test_cluster' => {
121
+ 'seed_brokers' => ['badhost'],
122
+ }
123
+ }
124
+ }
125
+ }
126
+ end
127
+
128
+ it 'returns false' do
129
+ expect(subject.valid?).to eq(false)
130
+ expect(subject.errors).to include('Broker must be a valid host/port combination')
131
+ end
132
+ end
133
+ end
134
+
135
+ context 'sasl' do
136
+ context 'username and no password' do
137
+ let(:config_hash) do
138
+ {
139
+ 'test' => {
140
+ 'clusters' => {
141
+ 'test_cluster' => {
142
+ 'seed_brokers' => ['localhost:9092'],
143
+ 'sasl_scram_username' => 'jason'
144
+ }
145
+ }
146
+ }
147
+ }
148
+ end
149
+
150
+ it 'returns false' do
151
+ expect(subject.valid?).to eq(false)
152
+ expect(subject.errors).to include('Initialized with `sasl_scram_username` but no `sasl_scram_password`. Please provide both.')
153
+ end
154
+ end
155
+
156
+ context 'password and no username' do
157
+ let(:config_hash) do
158
+ {
159
+ 'test' => {
160
+ 'clusters' => {
161
+ 'test_cluster' => {
162
+ 'seed_brokers' => ['localhost:9092'],
163
+ 'sasl_scram_password' => 'jason'
164
+ }
165
+ }
166
+ }
167
+ }
168
+ end
169
+
170
+ it 'returns false' do
171
+ expect(subject.valid?).to eq(false)
172
+ expect(subject.errors).to include('Initialized with `sasl_scram_password` but no `sasl_scram_username`. Please provide both.')
173
+ end
174
+ end
175
+ end
176
+
177
+ context 'ssl' do
178
+ context 'no file paths' do
179
+ context 'client, no client key' do
180
+ let(:config_hash) do
181
+ {
182
+ 'test' => {
183
+ 'clusters' => {
184
+ 'test_cluster' => {
185
+ 'seed_brokers' => ['localhost:9092'],
186
+ 'ssl_ca_cert' => 'test_ca_cert',
187
+ 'ssl_client_cert' => 'test_client_cert'
188
+ }
189
+ }
190
+ }
191
+ }
192
+ end
193
+
194
+ it 'returns false' do
195
+ expect(subject.valid?).to eq(false)
196
+ expect(subject.errors).to include('Initialized with `ssl_client_cert` but no `ssl_client_cert_key`. Please provide both.')
197
+ end
198
+ end
199
+
200
+ context 'client key, no client' do
201
+ let(:config_hash) do
202
+ {
203
+ 'test' => {
204
+ 'clusters' => {
205
+ 'test_cluster' => {
206
+ 'seed_brokers' => ['localhost:9092'],
207
+ 'ssl_ca_cert' => 'test_ca_cert',
208
+ 'ssl_client_cert_key' => 'test_client_cert_key'
209
+ }
210
+ }
211
+ }
212
+ }
213
+ end
214
+
215
+ it 'returns false' do
216
+ expect(subject.valid?).to eq(false)
217
+ expect(subject.errors).to include('Initialized with `ssl_client_cert_key`, but no `ssl_client_cert`. Please provide both.')
218
+ end
219
+ end
220
+
221
+ context 'with client and key, no certificate authority' do
222
+ let(:config_hash) do
223
+ {
224
+ 'test' => {
225
+ 'clusters' => {
226
+ 'test_cluster' => {
227
+ 'seed_brokers' => ['localhost:9092'],
228
+ 'ssl_client_cert' => 'test_client_cert',
229
+ 'ssl_client_cert_key' => 'test_client_cert_key'
230
+ }
231
+ }
232
+ }
233
+ }
234
+ end
235
+
236
+ it 'returns false' do
237
+ expect(subject.valid?).to eq(false)
238
+ expect(subject.errors).to include('Cannot provide client certificate/key without a certificate authority')
239
+ end
240
+ end
241
+ end
242
+
243
+ context 'file paths' do
244
+ context 'client, no client key' do
245
+ let(:config_hash) do
246
+ {
247
+ 'test' => {
248
+ 'clusters' => {
249
+ 'test_cluster' => {
250
+ 'seed_brokers' => ['localhost:9092'],
251
+ 'ssl_ca_cert_file_path' => 'test_ca_cert',
252
+ 'ssl_client_cert_file_path' => 'test_client_cert'
253
+ }
254
+ }
255
+ }
256
+ }
257
+ end
258
+
259
+ it 'returns false' do
260
+ expect(subject.valid?).to eq(false)
261
+ expect(subject.errors).to include('Initialized with `ssl_client_cert` but no `ssl_client_cert_key`. Please provide both.')
262
+ end
263
+ end
264
+
265
+ context 'client key, no client' do
266
+ let(:config_hash) do
267
+ {
268
+ 'test' => {
269
+ 'clusters' => {
270
+ 'test_cluster' => {
271
+ 'seed_brokers' => ['localhost:9092'],
272
+ 'ssl_ca_cert_file_path' => 'test_ca_cert',
273
+ 'ssl_client_cert_key_file_path' => 'test_client_cert_key'
274
+ }
275
+ }
276
+ }
277
+ }
278
+ end
279
+
280
+ it 'returns false' do
281
+ expect(subject.valid?).to eq(false)
282
+ expect(subject.errors).to include('Initialized with `ssl_client_cert_key`, but no `ssl_client_cert`. Please provide both.')
283
+ end
284
+ end
285
+
286
+ context 'with client and key, no certificate authority' do
287
+ let(:config_hash) do
288
+ {
289
+ 'test' => {
290
+ 'clusters' => {
291
+ 'test_cluster' => {
292
+ 'seed_brokers' => ['localhost:9092'],
293
+ 'ssl_client_cert_file_path' => 'test_client_cert',
294
+ 'ssl_client_cert_key_file_path' => 'test_client_cert_key'
295
+ }
296
+ }
297
+ }
298
+ }
299
+ end
300
+
301
+ it 'returns false' do
302
+ expect(subject.valid?).to eq(false)
303
+ expect(subject.errors).to include('Cannot provide client certificate/key without a certificate authority')
304
+ end
305
+ end
306
+ end
307
+ end
308
+ end
309
+ end
310
+ end
311
+ end
@@ -0,0 +1,83 @@
1
+ # frozen_string_literal: true
2
+
3
+ RSpec.describe KafkaCommand::Broker do
4
+ let(:hostname) { 'localhost' }
5
+ let(:port) { 9092 }
6
+ subject { KafkaCommand::Cluster.all.first.brokers.find { |b| b.port == port } }
7
+
8
+ describe '#new' do
9
+ it 'wraps a Kafka::Broker' do
10
+ expect(subject.broker).to be_an_instance_of(Kafka::Broker)
11
+ end
12
+ end
13
+
14
+ describe '#connected' do
15
+ context 'when connected' do
16
+ it 'returns true' do
17
+ expect(subject.connected?).to eq(true)
18
+ end
19
+ end
20
+
21
+ context 'when not connected' do
22
+ before do
23
+ allow(subject.broker).to receive(:api_versions).and_raise(Kafka::ConnectionError)
24
+ end
25
+
26
+ it 'returns false' do
27
+ expect(subject.connected?).to eq(false)
28
+ end
29
+ end
30
+ end
31
+
32
+ context 'forwarding' do
33
+ describe '#port' do
34
+ it 'forwards port to the Kafka::Broker' do
35
+ expect(subject.broker).to receive(:port)
36
+ subject.port
37
+ end
38
+
39
+ it 'returns the port' do
40
+ expect(subject.port).to eq(port)
41
+ end
42
+ end
43
+
44
+ describe '#host' do
45
+ it 'forwards host to the Kafka::Broker' do
46
+ expect(subject.broker).to receive(:host)
47
+ subject.host
48
+ end
49
+
50
+ it 'returns the host' do
51
+ expect(subject.host).to eq('localhost')
52
+ end
53
+ end
54
+
55
+ describe '#node_id' do
56
+ it 'forwards node_id to the Kafka::Broker' do
57
+ expect(subject.broker).to receive(:node_id)
58
+ subject.node_id
59
+ end
60
+
61
+ it 'returns the node_id' do
62
+ expect(subject.node_id).to eq(subject.broker.node_id)
63
+ end
64
+ end
65
+
66
+ describe '#fetch_metadata' do
67
+ it 'forwards fetch_metdata to the Kafka::Broker' do
68
+ expect(subject.broker).to receive(:fetch_metadata).and_call_original
69
+ subject.fetch_metadata
70
+ end
71
+
72
+ it 'returns an instance of Kafka::Protocol::MetadataResponse' do
73
+ expect(subject.fetch_metadata).to be_an_instance_of(Kafka::Protocol::MetadataResponse)
74
+ end
75
+ end
76
+ end
77
+
78
+ describe '#host_with_port' do
79
+ it 'returns the host and port combination' do
80
+ expect(subject.hostname).to eq(hostname)
81
+ end
82
+ end
83
+ end
@@ -0,0 +1,306 @@
1
+ # frozen_string_literal: true
2
+
3
+ RSpec.describe KafkaCommand::Client do
4
+ let(:host) { 'localhost' }
5
+ let(:port) { 9092 }
6
+ let(:brokers) { ["#{host}:#{port}"] }
7
+ let(:topic_name) { "test-#{SecureRandom.hex(12)}" }
8
+ let(:group_id) { "test-#{SecureRandom.hex(12)}" }
9
+ let(:client) { kafka_client }
10
+
11
+ subject { described_class.new(brokers: brokers) }
12
+
13
+ describe '#new' do
14
+ it 'wraps a Kafka::Cluster' do
15
+ expect(subject.cluster).to be_an_instance_of(Kafka::Cluster)
16
+ end
17
+ end
18
+
19
+ describe '#brokers' do
20
+ it 'initializes brokers' do
21
+ expect(subject.brokers).to_not be_empty
22
+ expect(subject.brokers.count).to eq(ENV['SEED_BROKERS'].split(',').count)
23
+ expect(subject.brokers.first).to be_an_instance_of(KafkaCommand::Broker)
24
+ expect(subject.brokers.first.host).to eq('localhost')
25
+ expect(subject.brokers.map(&:port)).to include(9092)
26
+ end
27
+ end
28
+
29
+ describe '#topics' do
30
+ before { create_topic(topic_name) }
31
+
32
+ it 'initializes topics' do
33
+ expect(subject.topics).to_not be_empty
34
+ expect(subject.topics.first).to be_an_instance_of(KafkaCommand::Topic)
35
+ expect(subject.topics.map(&:name)).to include(topic_name)
36
+ end
37
+ end
38
+
39
+ describe '#groups' do
40
+ before do
41
+ create_topic(topic_name)
42
+ run_consumer_group(topic_name, group_id)
43
+ end
44
+
45
+ it 'initializes groups' do
46
+ expect(subject.groups).to_not be_empty
47
+ expect(subject.groups.first).to be_an_instance_of(KafkaCommand::ConsumerGroup)
48
+ expect(subject.groups.map(&:group_id)).to include(group_id)
49
+ end
50
+ end
51
+
52
+ describe '#refresh_topics!' do
53
+ it 'refreshes topic information' do
54
+ expect(subject.topics.map(&:name)).to_not include(topic_name)
55
+ create_topic(topic_name)
56
+ subject.refresh_topics!
57
+ expect(subject.topics).to_not be_empty
58
+ expect(subject.topics.map(&:name)).to include(topic_name)
59
+ end
60
+ end
61
+
62
+ describe '#refresh_groups!' do
63
+ before do
64
+ subject.groups
65
+ create_topic(topic_name)
66
+ run_consumer_group(topic_name, group_id)
67
+ end
68
+
69
+ it 'refreshes group information' do
70
+ expect(subject.groups.map(&:group_id)).to_not include(group_id)
71
+ subject.refresh_groups!
72
+ expect(subject.groups.map(&:group_id)).to include(group_id)
73
+ end
74
+ end
75
+
76
+ describe '#refresh!' do
77
+ it 'refreshes cluster information' do
78
+ expect(subject).to receive(:refresh_topics!).once
79
+ expect(subject).to receive(:refresh_brokers!).once
80
+ expect(subject).to receive(:refresh_groups!).once
81
+ subject.refresh!
82
+ end
83
+ end
84
+
85
+ describe '#fetch_metadata' do
86
+ it 'returns a Kafka::Protocol::MetadataResponse' do
87
+ expect(subject.fetch_metadata).to be_an_instance_of(Kafka::Protocol::MetadataResponse)
88
+ end
89
+
90
+ context 'with topics' do
91
+ let(:metadata) { subject.fetch_metadata }
92
+ before { create_topic(topic_name) }
93
+
94
+ it 'contains topic and partition metadata' do
95
+ expect(metadata.topics).to_not be_empty
96
+ expect(metadata.topics.sample).to be_an_instance_of(Kafka::Protocol::MetadataResponse::TopicMetadata)
97
+ expect(metadata.topics.sample.partitions.first).to be_an_instance_of(Kafka::Protocol::MetadataResponse::PartitionMetadata)
98
+ end
99
+ end
100
+ end
101
+
102
+ describe '#find_topic' do
103
+ context 'topic exists' do
104
+ before { create_topic(topic_name) }
105
+
106
+ it 'returns the topic' do
107
+ expect(subject.find_topic(topic_name)).to be_an_instance_of(KafkaCommand::Topic)
108
+ expect(subject.find_topic(topic_name).name).to eq(topic_name)
109
+ end
110
+ end
111
+
112
+ context 'topic non-existent' do
113
+ it 'returns nil' do
114
+ expect(subject.find_topic(topic_name)).to be_nil
115
+ end
116
+ end
117
+ end
118
+
119
+ describe '#connect_to_broker' do
120
+ let(:broker_id) { subject.brokers.first.node_id }
121
+
122
+ it 'returns a KafkaCommand::Broker' do
123
+ result = subject.connect_to_broker(
124
+ host: host,
125
+ port: port,
126
+ broker_id: broker_id
127
+ )
128
+
129
+ expect(result).to be_an_instance_of(KafkaCommand::Broker)
130
+ expect(result.port).to eq(port)
131
+ expect(result.host).to eq(host)
132
+ expect(result.node_id).to eq(broker_id)
133
+ end
134
+ end
135
+
136
+ context 'forwarding' do
137
+ describe '#broker_pool' do
138
+ it 'forwards broker_pool to the Kafka::Cluster' do
139
+ expect(subject.cluster).to receive(:broker_pool)
140
+ subject.broker_pool
141
+ end
142
+
143
+ it 'returns a Kafka::BrokerPool' do
144
+ expect(subject.broker_pool).to be_an_instance_of(Kafka::BrokerPool)
145
+ end
146
+ end
147
+
148
+ describe '#delete_topic' do
149
+ let(:delete_topic_kwargs) do
150
+ { timeout: 30 }
151
+ end
152
+
153
+ it 'forwards delete_topic to the Kafka::Cluster' do
154
+ expect(subject.cluster).to receive(:delete_topic).with(topic_name, **delete_topic_kwargs)
155
+ subject.delete_topic(topic_name, **delete_topic_kwargs)
156
+ end
157
+
158
+ context 'deletion' do
159
+ before { create_topic(topic_name) }
160
+
161
+ it 'deletes the topic' do
162
+ expect(topic_exists?(topic_name)).to eq(true)
163
+ subject.delete_topic(topic_name, **delete_topic_kwargs)
164
+ expect(topic_exists?(topic_name)).to eq(false)
165
+ end
166
+ end
167
+ end
168
+
169
+ describe '#alter_topic' do
170
+ let(:retention_ms) { 1000000 }
171
+ let(:retention_bytes) { 100000000 }
172
+ let(:max_message_bytes) { 100000000 }
173
+ let(:alter_topic_configs) do
174
+ {
175
+ 'retention.ms' => retention_ms,
176
+ 'retention.bytes' => retention_bytes,
177
+ 'max.message.bytes' => max_message_bytes
178
+ }
179
+ end
180
+
181
+ it 'forwards alter_topic to the Kafka::Cluster' do
182
+ expect(subject.cluster).to receive(:alter_topic).with(topic_name, alter_topic_configs)
183
+ subject.alter_topic(topic_name, alter_topic_configs)
184
+ end
185
+
186
+ context 'altering the topic' do
187
+ before { create_topic(topic_name) }
188
+
189
+ it 'alters the configs' do
190
+ subject.alter_topic(topic_name, alter_topic_configs)
191
+ configs = subject.describe_topic(topic_name, alter_topic_configs.keys)
192
+ expect(configs['max.message.bytes']).to eq(max_message_bytes.to_s)
193
+ expect(configs['retention.bytes']).to eq(retention_bytes.to_s)
194
+ expect(configs['retention.ms']).to eq(retention_ms.to_s)
195
+ end
196
+ end
197
+ end
198
+
199
+ describe '#describe_topic' do
200
+ let(:describe_topic_configs) { KafkaCommand::Topic::TOPIC_CONFIGS }
201
+
202
+ it 'forwards describe_topic to the Kafka::Cluster' do
203
+ expect(subject.cluster).to receive(:describe_topic).with(topic_name, describe_topic_configs)
204
+ subject.describe_topic(topic_name, describe_topic_configs)
205
+ end
206
+
207
+ context 'describing the topic' do
208
+ before { create_topic(topic_name) }
209
+
210
+ it 'describes the topic' do
211
+ config = subject.describe_topic(topic_name, describe_topic_configs)
212
+ describe_topic_configs.each { |c| expect(config.key?(c)).to eq(true) }
213
+ end
214
+ end
215
+ end
216
+
217
+ describe '#create_partitions_for' do
218
+ let(:create_partitions_kwargs) do
219
+ { num_partitions: 5, timeout: 30 }
220
+ end
221
+
222
+ it 'forwards create_partitions_for to the Kafka::Cluster' do
223
+ expect(subject.cluster).to receive(:create_partitions_for).with(topic_name, **create_partitions_kwargs)
224
+ subject.create_partitions_for(topic_name, **create_partitions_kwargs)
225
+ end
226
+
227
+ context 'altering partitions' do
228
+ before { create_topic(topic_name, num_partitions: 1) }
229
+
230
+ it 'changes the number of partitions' do
231
+ expect(partitions_for(topic_name)).to eq(1)
232
+ subject.create_partitions_for(topic_name, **create_partitions_kwargs)
233
+ sleep_if_necessary # For flaky spec
234
+ expect(partitions_for(topic_name)).to eq(5)
235
+ end
236
+ end
237
+ end
238
+
239
+ describe '#resolve_offset' do
240
+ let(:partition_id) { 0 }
241
+
242
+ it 'forwards resolve_offset to the Kafka::Cluster' do
243
+ expect(subject.cluster).to receive(:resolve_offset).with(topic_name, partition_id, :latest)
244
+ subject.resolve_offset(topic_name, partition_id, :latest)
245
+ end
246
+
247
+ context 'retrieving offsets ' do
248
+ before { create_topic(topic_name) }
249
+
250
+ it 'returns the offset' do
251
+ offset = subject.resolve_offset(topic_name, partition_id, :latest)
252
+ expect(offset).to eq(0)
253
+
254
+ deliver_message('test', topic: topic_name, partition: partition_id)
255
+ offset = subject.resolve_offset(topic_name, partition_id, :latest)
256
+ expect(offset).to eq(1)
257
+ end
258
+ end
259
+ end
260
+
261
+ describe '#resolve_offsets' do
262
+ let(:num_partitions) { 2 }
263
+ let(:partition_ids) { [0, 1] }
264
+
265
+ it 'forwards resolve_offsets to the Kafka::Cluster' do
266
+ expect(subject.cluster).to receive(:resolve_offsets).with(topic_name, partition_ids, :latest)
267
+ subject.resolve_offsets(topic_name, partition_ids, :latest)
268
+ end
269
+
270
+ context 'retrieving offsets ' do
271
+ before { create_topic(topic_name, num_partitions: num_partitions) }
272
+
273
+ it 'returns the offsets' do
274
+ offsets = subject.resolve_offsets(topic_name, partition_ids, :latest)
275
+
276
+ partition_ids.each do |partition_id|
277
+ expect(offsets[partition_id]).to eq(0)
278
+
279
+ deliver_message('test', topic: topic_name, partition: partition_id)
280
+ offset = subject.resolve_offsets(topic_name, partition_ids, :latest)
281
+ expect(offset[partition_id]).to eq(1)
282
+ end
283
+ end
284
+ end
285
+ end
286
+
287
+ describe '#describe_group' do
288
+ it 'forwards describe_group to the Kafka::Cluster' do
289
+ expect(subject.cluster).to receive(:describe_group).with(group_id)
290
+ subject.describe_group(group_id)
291
+ end
292
+
293
+ context 'describing' do
294
+ before do
295
+ create_topic(topic_name)
296
+ run_consumer_group(topic_name, group_id)
297
+ end
298
+
299
+ it 'returns the group metadata' do
300
+ expect(subject.describe_group(group_id)).to be_an_instance_of(Kafka::Protocol::DescribeGroupsResponse::Group)
301
+ expect(subject.describe_group(group_id).group_id).to eq(group_id)
302
+ end
303
+ end
304
+ end
305
+ end
306
+ end