kafka_command 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (159) hide show
  1. checksums.yaml +7 -0
  2. data/.circleci/config.yml +179 -0
  3. data/.env +1 -0
  4. data/.env.test +1 -0
  5. data/.gitignore +41 -0
  6. data/.rspec +1 -0
  7. data/.rubocop.yml +12 -0
  8. data/.ruby-version +1 -0
  9. data/Gemfile +17 -0
  10. data/Gemfile.lock +194 -0
  11. data/LICENSE +21 -0
  12. data/README.md +138 -0
  13. data/Rakefile +34 -0
  14. data/app/assets/config/manifest.js +3 -0
  15. data/app/assets/images/.keep +0 -0
  16. data/app/assets/images/kafka_command/cluster_view.png +0 -0
  17. data/app/assets/images/kafka_command/kafka.png +0 -0
  18. data/app/assets/images/kafka_command/topic_view.png +0 -0
  19. data/app/assets/javascripts/kafka_command/application.js +14 -0
  20. data/app/assets/stylesheets/kafka_command/application.css +27 -0
  21. data/app/assets/stylesheets/kafka_command/clusters.css +8 -0
  22. data/app/assets/stylesheets/kafka_command/topics.css +3 -0
  23. data/app/channels/application_cable/channel.rb +6 -0
  24. data/app/channels/application_cable/connection.rb +6 -0
  25. data/app/controllers/kafka_command/application_controller.rb +96 -0
  26. data/app/controllers/kafka_command/brokers_controller.rb +26 -0
  27. data/app/controllers/kafka_command/clusters_controller.rb +46 -0
  28. data/app/controllers/kafka_command/consumer_groups_controller.rb +44 -0
  29. data/app/controllers/kafka_command/topics_controller.rb +187 -0
  30. data/app/helpers/kafka_command/application_helper.rb +29 -0
  31. data/app/helpers/kafka_command/consumer_group_helper.rb +13 -0
  32. data/app/jobs/application_job.rb +6 -0
  33. data/app/mailers/application_mailer.rb +8 -0
  34. data/app/models/kafka_command/broker.rb +47 -0
  35. data/app/models/kafka_command/client.rb +102 -0
  36. data/app/models/kafka_command/cluster.rb +172 -0
  37. data/app/models/kafka_command/consumer_group.rb +142 -0
  38. data/app/models/kafka_command/consumer_group_partition.rb +23 -0
  39. data/app/models/kafka_command/group_member.rb +18 -0
  40. data/app/models/kafka_command/partition.rb +36 -0
  41. data/app/models/kafka_command/topic.rb +153 -0
  42. data/app/views/kafka_command/brokers/index.html.erb +38 -0
  43. data/app/views/kafka_command/clusters/_tabs.html.erb +9 -0
  44. data/app/views/kafka_command/clusters/index.html.erb +54 -0
  45. data/app/views/kafka_command/clusters/new.html.erb +115 -0
  46. data/app/views/kafka_command/configuration_error.html.erb +1 -0
  47. data/app/views/kafka_command/consumer_groups/index.html.erb +32 -0
  48. data/app/views/kafka_command/consumer_groups/show.html.erb +115 -0
  49. data/app/views/kafka_command/shared/_alert.html.erb +13 -0
  50. data/app/views/kafka_command/shared/_search_bar.html.erb +31 -0
  51. data/app/views/kafka_command/shared/_title.html.erb +6 -0
  52. data/app/views/kafka_command/topics/_form_fields.html.erb +49 -0
  53. data/app/views/kafka_command/topics/edit.html.erb +17 -0
  54. data/app/views/kafka_command/topics/index.html.erb +46 -0
  55. data/app/views/kafka_command/topics/new.html.erb +36 -0
  56. data/app/views/kafka_command/topics/show.html.erb +126 -0
  57. data/app/views/layouts/kafka_command/application.html.erb +50 -0
  58. data/bin/rails +16 -0
  59. data/config/initializers/kafka.rb +13 -0
  60. data/config/initializers/kafka_command.rb +11 -0
  61. data/config/routes.rb +11 -0
  62. data/docker-compose.yml +18 -0
  63. data/kafka_command.gemspec +27 -0
  64. data/lib/assets/.keep +0 -0
  65. data/lib/core_extensions/kafka/broker/attr_readers.rb +11 -0
  66. data/lib/core_extensions/kafka/broker_pool/attr_readers.rb +11 -0
  67. data/lib/core_extensions/kafka/client/attr_readers.rb +11 -0
  68. data/lib/core_extensions/kafka/cluster/attr_readers.rb +11 -0
  69. data/lib/core_extensions/kafka/protocol/metadata_response/partition_metadata/attr_readers.rb +15 -0
  70. data/lib/kafka_command/configuration.rb +150 -0
  71. data/lib/kafka_command/engine.rb +11 -0
  72. data/lib/kafka_command/errors.rb +6 -0
  73. data/lib/kafka_command/version.rb +5 -0
  74. data/lib/kafka_command.rb +13 -0
  75. data/lib/tasks/.keep +0 -0
  76. data/spec/dummy/Rakefile +6 -0
  77. data/spec/dummy/app/assets/config/manifest.js +4 -0
  78. data/spec/dummy/app/assets/javascripts/application.js +15 -0
  79. data/spec/dummy/app/assets/javascripts/cable.js +13 -0
  80. data/spec/dummy/app/assets/stylesheets/application.css +15 -0
  81. data/spec/dummy/app/channels/application_cable/channel.rb +4 -0
  82. data/spec/dummy/app/channels/application_cable/connection.rb +4 -0
  83. data/spec/dummy/app/controllers/application_controller.rb +2 -0
  84. data/spec/dummy/app/helpers/application_helper.rb +2 -0
  85. data/spec/dummy/app/jobs/application_job.rb +2 -0
  86. data/spec/dummy/app/mailers/application_mailer.rb +4 -0
  87. data/spec/dummy/app/models/application_record.rb +3 -0
  88. data/spec/dummy/app/views/layouts/application.html.erb +15 -0
  89. data/spec/dummy/app/views/layouts/mailer.html.erb +13 -0
  90. data/spec/dummy/app/views/layouts/mailer.text.erb +1 -0
  91. data/spec/dummy/bin/bundle +3 -0
  92. data/spec/dummy/bin/rails +4 -0
  93. data/spec/dummy/bin/rake +4 -0
  94. data/spec/dummy/bin/setup +36 -0
  95. data/spec/dummy/bin/update +31 -0
  96. data/spec/dummy/bin/yarn +11 -0
  97. data/spec/dummy/config/application.rb +19 -0
  98. data/spec/dummy/config/boot.rb +5 -0
  99. data/spec/dummy/config/cable.yml +10 -0
  100. data/spec/dummy/config/database.yml +25 -0
  101. data/spec/dummy/config/environment.rb +5 -0
  102. data/spec/dummy/config/environments/development.rb +61 -0
  103. data/spec/dummy/config/environments/production.rb +94 -0
  104. data/spec/dummy/config/environments/test.rb +46 -0
  105. data/spec/dummy/config/initializers/application_controller_renderer.rb +8 -0
  106. data/spec/dummy/config/initializers/assets.rb +14 -0
  107. data/spec/dummy/config/initializers/backtrace_silencers.rb +7 -0
  108. data/spec/dummy/config/initializers/content_security_policy.rb +25 -0
  109. data/spec/dummy/config/initializers/cookies_serializer.rb +5 -0
  110. data/spec/dummy/config/initializers/filter_parameter_logging.rb +4 -0
  111. data/spec/dummy/config/initializers/inflections.rb +16 -0
  112. data/spec/dummy/config/initializers/mime_types.rb +4 -0
  113. data/spec/dummy/config/initializers/wrap_parameters.rb +14 -0
  114. data/spec/dummy/config/kafka_command.yml +18 -0
  115. data/spec/dummy/config/locales/en.yml +33 -0
  116. data/spec/dummy/config/puma.rb +34 -0
  117. data/spec/dummy/config/routes.rb +3 -0
  118. data/spec/dummy/config/spring.rb +6 -0
  119. data/spec/dummy/config/ssl/test_ca_cert +1 -0
  120. data/spec/dummy/config/ssl/test_client_cert +1 -0
  121. data/spec/dummy/config/ssl/test_client_cert_key +1 -0
  122. data/spec/dummy/config/storage.yml +34 -0
  123. data/spec/dummy/config.ru +5 -0
  124. data/spec/dummy/db/schema.rb +42 -0
  125. data/spec/dummy/db/test.sqlite3 +0 -0
  126. data/spec/dummy/log/development.log +0 -0
  127. data/spec/dummy/log/hey.log +0 -0
  128. data/spec/dummy/log/test.log +2227 -0
  129. data/spec/dummy/package.json +5 -0
  130. data/spec/dummy/public/404.html +67 -0
  131. data/spec/dummy/public/422.html +67 -0
  132. data/spec/dummy/public/500.html +66 -0
  133. data/spec/dummy/public/apple-touch-icon-precomposed.png +0 -0
  134. data/spec/dummy/public/apple-touch-icon.png +0 -0
  135. data/spec/dummy/public/favicon.ico +0 -0
  136. data/spec/examples.txt +165 -0
  137. data/spec/fast_helper.rb +20 -0
  138. data/spec/fixtures/files/kafka_command_sasl.yml +10 -0
  139. data/spec/fixtures/files/kafka_command_ssl.yml +10 -0
  140. data/spec/fixtures/files/kafka_command_ssl_file_paths.yml +11 -0
  141. data/spec/fixtures/files/kafka_command_staging.yml +8 -0
  142. data/spec/lib/kafka_command/configuration_spec.rb +311 -0
  143. data/spec/models/kafka_command/broker_spec.rb +83 -0
  144. data/spec/models/kafka_command/client_spec.rb +306 -0
  145. data/spec/models/kafka_command/cluster_spec.rb +163 -0
  146. data/spec/models/kafka_command/consumer_group_partition_spec.rb +43 -0
  147. data/spec/models/kafka_command/consumer_group_spec.rb +236 -0
  148. data/spec/models/kafka_command/partition_spec.rb +95 -0
  149. data/spec/models/kafka_command/topic_spec.rb +311 -0
  150. data/spec/rails_helper.rb +63 -0
  151. data/spec/requests/json/brokers_spec.rb +50 -0
  152. data/spec/requests/json/clusters_spec.rb +58 -0
  153. data/spec/requests/json/consumer_groups_spec.rb +139 -0
  154. data/spec/requests/json/topics_spec.rb +274 -0
  155. data/spec/spec_helper.rb +109 -0
  156. data/spec/support/factory_bot.rb +5 -0
  157. data/spec/support/json_helper.rb +13 -0
  158. data/spec/support/kafka_helper.rb +93 -0
  159. metadata +326 -0
@@ -0,0 +1,274 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rails_helper'
4
+
5
+ RSpec.describe 'Topics API', type: :request do
6
+ let(:cluster) { KafkaCommand::Cluster.all.first }
7
+ let(:topic_name) { "test-#{SecureRandom.hex(12)}" }
8
+ let(:num_partitions) { 5 }
9
+ let(:replication_factor) { 1 }
10
+ let(:uri_base) { '/clusters' }
11
+ let(:create_topic_kwargs) do
12
+ {
13
+ num_partitions: num_partitions,
14
+ replication_factor: replication_factor
15
+ }
16
+ end
17
+
18
+ before { create_topic(topic_name, **create_topic_kwargs) }
19
+
20
+ describe 'listing all topics' do
21
+ let(:topic_two_name) { "test-#{SecureRandom.hex(12)}" }
22
+
23
+ before { create_topic(topic_two_name) }
24
+
25
+ it 'lists' do
26
+ get "#{uri_base}/#{cluster.id}/topics.json"
27
+ expect(response.status).to eq(200)
28
+ expect(json['data']).to be_an_instance_of(Array)
29
+ expect(json['data'].map { |d| d['name'] }).to include(topic_name)
30
+ expect(json['data'].map { |d| d['name'] }).to include(topic_two_name)
31
+ end
32
+
33
+ context 'filtering' do
34
+ it 'filters by name' do
35
+ get "#{uri_base}/#{cluster.id}/topics.json?name=#{topic_name}"
36
+ expect(response.status).to eq(200)
37
+ expect(json['data']).to be_an_instance_of(Array)
38
+ expect(json['data'].map { |d| d['name'] }).to include(topic_name)
39
+ expect(json['data'].map { |d| d['name'] }).to_not include(topic_two_name)
40
+ end
41
+
42
+ it 'filters by name' do
43
+ get "#{uri_base}/#{cluster.id}/topics.json?name=unknown"
44
+ expect(response.status).to eq(200)
45
+ expect(json['data']).to be_an_instance_of(Array)
46
+ expect(json['data']).to be_empty
47
+ end
48
+ end
49
+ end
50
+
51
+ describe 'showing a topic' do
52
+ context 'topic exists' do
53
+ it 'shows' do
54
+ get "#{uri_base}/#{cluster.id}/topics/#{topic_name}.json"
55
+ expect(response.status).to eq(200)
56
+ expect(json['name']).to eq(topic_name)
57
+ expect(json['partitions']).to be_an_instance_of(Array)
58
+ expect(json['partitions'].count).to eq(num_partitions)
59
+ expect(json['replication_factor']).to eq(replication_factor)
60
+ end
61
+ end
62
+
63
+ context 'topic does not exist' do
64
+ it 'returns 404' do
65
+ get "#{uri_base}/#{cluster.id}/topics/doesnotexist.json"
66
+ expect(response.status).to eq(404)
67
+ expect(response.body).to eq('Topic not found')
68
+ end
69
+ end
70
+ end
71
+
72
+ describe 'creating a topic' do
73
+ let(:topic_two_name) { "test-#{SecureRandom.hex(12)}" }
74
+ let(:new_num_partitions) { num_partitions }
75
+ let(:new_replication_factor) { replication_factor }
76
+ let(:retention_ms) { 1024 }
77
+ let(:retention_bytes) { 10000 }
78
+ let(:max_message_bytes) { 10000 }
79
+ let(:create_topic_params) do
80
+ {
81
+ name: topic_two_name,
82
+ replication_factor: new_replication_factor,
83
+ num_partitions: new_num_partitions,
84
+ retention_bytes: retention_bytes,
85
+ retention_ms: retention_ms,
86
+ max_message_bytes: max_message_bytes
87
+ }
88
+ end
89
+
90
+ it 'creates' do
91
+ expect do
92
+ post "#{uri_base}/#{cluster.id}/topics.json", params: create_topic_params
93
+ expect(response.status).to eq(201)
94
+ expect(json['name']).to eq(topic_two_name)
95
+ expect(json['partitions']).to be_an_instance_of(Array)
96
+ expect(json['partitions'].count).to eq(num_partitions)
97
+ expect(json['replication_factor']).to eq(replication_factor)
98
+ expect(json['config']['retention_ms']).to eq(retention_ms)
99
+ expect(json['config']['retention_bytes']).to eq(retention_bytes)
100
+ expect(json['config']['max_message_bytes']).to eq(max_message_bytes)
101
+ end.to change { cluster.client.refresh_topics!; cluster.topics.count }.by(1)
102
+ end
103
+
104
+ context 'invalid parameters' do
105
+ describe 'no name' do
106
+ let(:topic_two_name) { '' }
107
+
108
+ it 'returns 422' do
109
+ expect do
110
+ post "#{uri_base}/#{cluster.id}/topics.json", params: create_topic_params
111
+ expect(response.status).to eq(422)
112
+ expect(response.body).to eq('Topic must have a name')
113
+ end.to change { cluster.client.refresh_topics!; cluster.topics.count }.by(0)
114
+ end
115
+ end
116
+
117
+ describe 'topic already exists' do
118
+ let(:topic_two_name) { topic_name }
119
+
120
+ it 'returns 422' do
121
+ expect do
122
+ post "#{uri_base}/#{cluster.id}/topics.json", params: create_topic_params
123
+ expect(response.status).to eq(422)
124
+ expect(response.body).to eq('Topic already exists')
125
+ end.to change { cluster.topics.count }.by(0)
126
+ end
127
+ end
128
+
129
+ describe 'invalid max message bytes' do
130
+ let(:max_message_bytes) { -1 }
131
+
132
+ it 'returns 422' do
133
+ expect do
134
+ post "#{uri_base}/#{cluster.id}/topics.json", params: create_topic_params
135
+ expect(response.status).to eq(422)
136
+ expect(response.body).to eq('An unknown error occurred with the request to Kafka. Check any request parameters.')
137
+ end.to change { cluster.topics.count }.by(0)
138
+ end
139
+ end
140
+
141
+ describe 'invalid partitions' do
142
+ let(:new_num_partitions) { -1 }
143
+
144
+ it 'returns 422' do
145
+ expect do
146
+ post "#{uri_base}/#{cluster.id}/topics.json", params: create_topic_params
147
+ expect(response.status).to eq(422)
148
+ expect(response.body).to eq('Num partitions must be > 0 or > current number of partitions')
149
+ end.to change { cluster.topics.count }.by(0)
150
+ end
151
+ end
152
+
153
+ describe 'invalid replication factor' do
154
+ let(:error_message) do
155
+ 'Replication factor must be > 0 and < total number of brokers'
156
+ end
157
+
158
+ describe 'when 0' do
159
+ let(:new_replication_factor) { 0 }
160
+
161
+ it 'returns 422' do
162
+ expect do
163
+ post "#{uri_base}/#{cluster.id}/topics.json", params: create_topic_params
164
+ expect(response.status).to eq(422)
165
+ expect(response.body).to eq(error_message)
166
+ end.to change { cluster.topics.count }.by(0)
167
+ end
168
+ end
169
+
170
+ describe 'when > number of brokers' do
171
+ let(:new_replication_factor) { cluster.brokers.count + 1 }
172
+
173
+ it 'returns 422' do
174
+ expect do
175
+ post "#{uri_base}/#{cluster.id}/topics.json", params: create_topic_params
176
+ expect(response.status).to eq(422)
177
+ expect(response.body).to eq(error_message)
178
+ end.to change { cluster.topics.count }.by(0)
179
+ end
180
+ end
181
+ end
182
+ end
183
+ end
184
+
185
+ describe 'updating a topic' do
186
+ let(:topic) { cluster.topics.find { |t| t.name == topic_name } }
187
+ let(:new_num_partitions) { num_partitions + 1 }
188
+ let(:max_message_bytes) { 1024 }
189
+ let(:retention_ms) { 1024 }
190
+ let(:retention_bytes) { 1024 }
191
+ let(:update_topic_params) do
192
+ {
193
+ num_partitions: new_num_partitions,
194
+ retention_bytes: retention_bytes,
195
+ max_message_bytes: max_message_bytes,
196
+ retention_ms: retention_ms
197
+ }
198
+ end
199
+
200
+ context 'topic exists' do
201
+ it 'updates the topic' do
202
+ patch "#{uri_base}/#{cluster.id}/topics/#{topic_name}.json", params: update_topic_params
203
+ expect(response.status).to eq(200)
204
+ expect(topic.partitions.count).to eq(update_topic_params[:num_partitions])
205
+ expect(topic.retention_ms).to eq(retention_ms)
206
+ expect(topic.retention_bytes).to eq(retention_bytes)
207
+ expect(topic.max_message_bytes).to eq(max_message_bytes)
208
+ end
209
+
210
+ context 'invalid parameters' do
211
+ describe 'invalid num partitions' do
212
+ let(:new_num_partitions) { num_partitions - 1 }
213
+
214
+ it 'returns 422' do
215
+ patch "#{uri_base}/#{cluster.id}/topics/#{topic_name}.json", params: update_topic_params
216
+ expect(response.status).to eq(422)
217
+ expect(response.body).to eq('Num partitions must be > 0 or > current number of partitions')
218
+ end
219
+ end
220
+
221
+ describe 'invalid max message bytes' do
222
+ let(:max_message_bytes) { -1 }
223
+
224
+ it 'returns 422' do
225
+ patch "#{uri_base}/#{cluster.id}/topics/#{topic_name}.json", params: update_topic_params
226
+ expect(response.status).to eq(422)
227
+ expect(response.body).to eq('An unknown error occurred with the request to Kafka. Check any request parameters.')
228
+ end
229
+ end
230
+ end
231
+ end
232
+
233
+ context 'topic does not exist' do
234
+ it 'returns 404' do
235
+ patch "#{uri_base}/#{cluster.id}/topics/nonexistent.json", params: update_topic_params
236
+ expect(response.status).to eq(404)
237
+ end
238
+ end
239
+ end
240
+
241
+ describe 'destroying a topic' do
242
+ context 'topic exists' do
243
+ it 'destroys' do
244
+ expect do
245
+ delete "#{uri_base}/#{cluster.id}/topics/#{topic_name}.json"
246
+ expect(response.status).to eq(204)
247
+ end.to change { cluster.client.refresh_topics!; cluster.topics.count }.by(-1)
248
+ end
249
+
250
+ context 'consumer offsets topic' do
251
+ before do
252
+ allow_any_instance_of(KafkaCommand::Topic).to receive(:name).and_return(KafkaCommand::Topic::CONSUMER_OFFSET_TOPIC)
253
+ end
254
+
255
+ it 'returns 422' do
256
+ delete "#{uri_base}/#{cluster.id}/topics/#{KafkaCommand::Topic::CONSUMER_OFFSET_TOPIC}.json"
257
+ expect(response.status).to eq(422)
258
+ end
259
+ end
260
+ end
261
+
262
+ context 'topic does not exist' do
263
+ before { delete_topic(topic_name) }
264
+
265
+ it 'returns 404' do
266
+ expect do
267
+ delete "#{uri_base}/#{cluster.id}/topics/doesnotexist.json"
268
+ expect(response.status).to eq(404)
269
+ expect(response.body).to eq('Topic not found')
270
+ end.to change { cluster.topics.count }.by(0)
271
+ end
272
+ end
273
+ end
274
+ end
@@ -0,0 +1,109 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This file was generated by the `rails generate rspec:install` command. Conventionally, all
4
+ # specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
5
+ # The generated `.rspec` file contains `--require spec_helper` which will cause
6
+ # this file to always be loaded, without a need to explicitly require it in any
7
+ # files.
8
+ #
9
+ # Given that it is always loaded, you are encouraged to keep this file as
10
+ # light-weight as possible. Requiring heavyweight dependencies from this file
11
+ # will add to the boot time of your test suite on EVERY test run, even for an
12
+ # individual file that may not need all of that loaded. Instead, consider making
13
+ # a separate helper file that requires the additional dependencies and performs
14
+ # the additional setup, and require it from the spec files that actually need
15
+ # it.
16
+ #
17
+ # See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
18
+ #
19
+ # Makes loading files into out tests without loading rails much easier
20
+ $LOAD_PATH.unshift(File.expand_path('.'))
21
+
22
+ require 'pry'
23
+ require 'json'
24
+ require 'dotenv'
25
+
26
+ Dotenv.load('.env.test.local', '.env.test')
27
+
28
+ require 'support/kafka_helper'
29
+
30
+ RSpec.configure do |config|
31
+ # rspec-expectations config goes here. You can use an alternate
32
+ # assertion/expectation library such as wrong or the stdlib/minitest
33
+ # assertions if you prefer.
34
+ config.expect_with :rspec do |expectations|
35
+ # This option will default to `true` in RSpec 4. It makes the `description`
36
+ # and `failure_message` of custom matchers include text for helper methods
37
+ # defined using `chain`, e.g.:
38
+ # be_bigger_than(2).and_smaller_than(4).description
39
+ # # => "be bigger than 2 and smaller than 4"
40
+ # ...rather than:
41
+ # # => "be bigger than 2"
42
+ expectations.include_chain_clauses_in_custom_matcher_descriptions = true
43
+ end
44
+
45
+ # rspec-mocks config goes here. You can use an alternate test double
46
+ # library (such as bogus or mocha) by changing the `mock_with` option here.
47
+ config.mock_with :rspec do |mocks|
48
+ # Prevents you from mocking or stubbing a method that does not exist on
49
+ # a real object. This is generally recommended, and will default to
50
+ # `true` in RSpec 4.
51
+ mocks.verify_partial_doubles = true
52
+ end
53
+
54
+ # This option will default to `:apply_to_host_groups` in RSpec 4 (and will
55
+ # have no way to turn it off -- the option exists only for backwards
56
+ # compatibility in RSpec 3). It causes shared context metadata to be
57
+ # inherited by the metadata hash of host groups and examples, rather than
58
+ # triggering implicit auto-inclusion in groups with matching metadata.
59
+ config.shared_context_metadata_behavior = :apply_to_host_groups
60
+
61
+ # The settings below are suggested to provide a good initial experience
62
+ # with RSpec, but feel free to customize to your heart's content.
63
+
64
+ # This allows you to limit a spec run to individual examples or groups
65
+ # you care about by tagging them with `:focus` metadata. When nothing
66
+ # is tagged with `:focus`, all examples get run. RSpec also provides
67
+ # aliases for `it`, `describe`, and `context` that include `:focus`
68
+ # metadata: `fit`, `fdescribe` and `fcontext`, respectively.
69
+ config.filter_run_when_matching :focus
70
+
71
+ # Allows RSpec to persist some state between runs in order to support
72
+ # the `--only-failures` and `--next-failure` CLI options. We recommend
73
+ # you configure your source control system to ignore this file.
74
+ config.example_status_persistence_file_path = 'spec/examples.txt'
75
+
76
+ # Limits the available syntax to the non-monkey patched syntax that is
77
+ # recommended. For more details, see:
78
+ # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
79
+ # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
80
+ # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
81
+ config.disable_monkey_patching!
82
+
83
+ # Many RSpec users commonly either run the entire suite or an individual
84
+ # file, and it's useful to allow more verbose output when running an
85
+ # individual spec file.
86
+ if config.files_to_run.one?
87
+ # Use the documentation formatter for detailed output,
88
+ # unless a formatter has already been configured
89
+ # (e.g. via a command-line flag).
90
+ config.default_formatter = 'doc'
91
+ end
92
+
93
+ # Print the 10 slowest examples and example groups at the
94
+ # end of the spec run, to help surface which specs are running
95
+ # particularly slow.
96
+ # config.profile_examples = 10
97
+
98
+ # Run specs in random order to surface order dependencies. If you find an
99
+ # order dependency and want to debug it, you can fix the order by providing
100
+ # the seed, which is printed after each run.
101
+ # --seed 1234
102
+ config.order = :random
103
+
104
+ # Seed global randomization in this process using the `--seed` CLI option.
105
+ # Setting this allows you to use `--seed` to deterministically reproduce
106
+ # test failures related to randomization by passing the same `--seed` value
107
+ # as the one that triggered the failure.
108
+ Kernel.srand config.seed
109
+ end
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ RSpec.configure do |config|
4
+ config.include FactoryBot::Syntax::Methods
5
+ end
@@ -0,0 +1,13 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Requests
4
+ module JsonHelper
5
+ def json
6
+ JSON.parse(response.body)
7
+ end
8
+ end
9
+ end
10
+
11
+ RSpec.configure do |config|
12
+ config.include(Requests::JsonHelper)
13
+ end
@@ -0,0 +1,93 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'fast_helper'
4
+ require 'securerandom'
5
+ require 'kafka'
6
+ require 'lib/kafka_command/configuration'
7
+ require 'lib/kafka_command/errors'
8
+ require 'config/initializers/kafka'
9
+ require 'app/models/kafka_command/client'
10
+ require 'app/models/kafka_command/broker'
11
+ require 'app/models/kafka_command/cluster'
12
+ require 'app/models/kafka_command/topic'
13
+ require 'app/models/kafka_command/consumer_group'
14
+ require 'app/models/kafka_command/partition'
15
+ require 'app/models/kafka_command/consumer_group_partition'
16
+ require 'app/models/kafka_command/group_member'
17
+
18
+ $LOAD_PATH.unshift(File.expand_path('.'))
19
+ ENV['RAILS_ENV'] = 'test'
20
+
21
+ KafkaCommand::Configuration.load!('spec/dummy/config/kafka_command.yml')
22
+
23
+ begin
24
+ KafkaCommand::Cluster.all.first.topics
25
+ rescue => e
26
+ puts "#{e.class}. An online kafka cluster is required to run the specs."
27
+ exit(0)
28
+ end
29
+
30
+ module KafkaHelpers
31
+ def kafka_command_cluster
32
+ KafkaCommand::Cluster.all.find { |c| c.name == 'test_cluster' }
33
+ end
34
+
35
+ def kafka_client
36
+ Kafka.new(ENV['SEED_BROKERS'].split(','))
37
+ end
38
+
39
+ def create_topic(topic_name, **kwargs)
40
+ kafka_client.create_topic(topic_name, **kwargs)
41
+ sleep_if_necessary
42
+ end
43
+
44
+ def delete_topic(topic_name)
45
+ kafka_client.delete_topic(topic_name)
46
+ sleep_if_necessary
47
+ end
48
+
49
+ def list_topic_names
50
+ kafka_client.topics
51
+ end
52
+
53
+ def deliver_message(msg, **kwargs)
54
+ kafka_client.deliver_message(msg, **kwargs)
55
+ end
56
+
57
+ def partitions_for(topic_name)
58
+ kafka_client.partitions_for(topic_name)
59
+ end
60
+
61
+ def create_partitions_for(topic_name, **kwargs)
62
+ kafka_client.create_partitions_for(topic_name, **kwargs)
63
+ sleep_if_necessary
64
+ end
65
+
66
+ def topic_exists?(topic_name)
67
+ return false if topic_name.nil? || topic_name.empty?
68
+ list_topic_names.include?(topic_name)
69
+ end
70
+
71
+ def run_consumer_group(topic_name, group_id, num_messages_to_consume: 0)
72
+ deliver_message('test', topic: topic_name)
73
+ consumer = kafka_client.consumer(group_id: group_id)
74
+ consumer.subscribe(topic_name)
75
+
76
+ message_counter = 0
77
+ num_messages_to_consume += 1
78
+ consumer.each_message do |msg|
79
+ yield if block_given?
80
+ message_counter += 1
81
+ consumer.stop if message_counter >= num_messages_to_consume
82
+ end
83
+ end
84
+
85
+ # Sleep if more than one broker is in the cluster to fix flaky tests
86
+ def sleep_if_necessary
87
+ sleep(0.5) if ENV['SEED_BROKERS'].split(',').count > 1
88
+ end
89
+ end
90
+
91
+ RSpec.configure do |config|
92
+ config.include(KafkaHelpers)
93
+ end