aptible-cli 0.19.4 → 0.19.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,334 @@
1
+ require 'spec_helper'
2
+
3
+ describe Aptible::CLI::Helpers::S3LogHelpers do
4
+ subject { Class.new.send(:include, described_class).new }
5
+ let(:v2_pfx) { 'mystack/shareable/v2/fakesha' }
6
+ let(:v3_pfx) { 'mystack/shareable/v3/fakesha' }
7
+ let(:v2app) do
8
+ "#{v2_pfx}/apps-321/fakebread-json.log.2022-06-29T18:30:01.bck.gz"
9
+ end
10
+ let(:v2app_rotated) do
11
+ "#{v2_pfx}/apps-321/fakebread-json.1.log.2022-06-29T18:30:01.bck.gz"
12
+ end
13
+ let(:v3app) do
14
+ "#{v3_pfx}/apps-321/service-123/deadbeef-json.log." \
15
+ '2022-08-24T21:12:33.2022-08-24T21:14:38.archived.gz'
16
+ end
17
+ let(:v3db) do
18
+ "#{v3_pfx}/databases-321/fakebread-json.log." \
19
+ '2022-08-24T21:12:33.2022-08-24T21:14:38.archived.gz'
20
+ end
21
+ let(:v3db_rotated) do
22
+ "#{v3_pfx}/databases-321/fakebread-json.log.1." \
23
+ '2022-08-24T21:12:33.2022-08-24T21:14:38.archived.gz'
24
+ end
25
+
26
+ describe '#ensure_aws_creds' do
27
+ it 'Raises if no keys are provided via ENV' do
28
+ expect { subject.ensure_aws_creds }
29
+ .to raise_error(Thor::Error, /Missing environment variable/)
30
+ end
31
+
32
+ it 'Accepts AWS keypair from the ENV' do
33
+ ENV['AWS_ACCESS_KEY_ID'] = 'foo'
34
+ ENV['AWS_SECRET_ACCESS_KEY'] = 'bar'
35
+ expect { subject.ensure_aws_creds }.to_not raise_error
36
+ end
37
+ end
38
+
39
+ describe '#info_from_path' do
40
+ context 'time zones are in UTC' do
41
+ it 'processes v2 upload time in UTC' do
42
+ result = subject.info_from_path(v2app)
43
+ expect(result[:uploaded_at].zone).to eq('UTC')
44
+ end
45
+
46
+ it 'processes v3 log times in UTC' do
47
+ result = subject.info_from_path(v3app)
48
+ expect(result[:start_time].zone).to eq('UTC')
49
+ expect(result[:end_time].zone).to eq('UTC')
50
+ end
51
+ end
52
+
53
+ it 'does not choke on v3 logs with unknown timestamps' do
54
+ path = "#{v3_pfx}/apps-321/service-123/deadbeef-json.log." \
55
+ 'unknown.unknown.archived.gz'
56
+ result = subject.info_from_path(path)
57
+ expect(result[:start_time]).to be(nil)
58
+ expect(result[:end_time]).to be(nil)
59
+ end
60
+
61
+ it 'can read app data from v2 paths' do
62
+ result = subject.info_from_path(v2app)
63
+ expect(result[:schema]).to eq('v2')
64
+ expect(result[:shasum]).to eq('fakesha')
65
+ expect(result[:type]).to eq('apps')
66
+ expect(result[:id]).to eq(321)
67
+ expect(result[:service_id]).to be(nil)
68
+ expect(result[:container_id]).to eq('fakebread')
69
+ expect(result[:uploaded_at]).to eq('2022-06-29T18:30:01')
70
+ expect(result[:container_id]).to eq('fakebread')
71
+ expect(result[:start_time]).to be(nil)
72
+ expect(result[:end_time]).to be(nil)
73
+ end
74
+
75
+ it 'can read app data from v3 paths' do
76
+ result = subject.info_from_path(v3app)
77
+ expect(result[:schema]).to eq('v3')
78
+ expect(result[:shasum]).to eq('fakesha')
79
+ expect(result[:type]).to eq('apps')
80
+ expect(result[:id]).to eq(321)
81
+ expect(result[:service_id]).to eq(123)
82
+ expect(result[:container_id]).to eq('deadbeef')
83
+ expect(result[:uploaded_at]).to be(nil)
84
+ expect(result[:start_time]).to eq('2022-08-24T21:12:33')
85
+ expect(result[:end_time]).to eq('2022-08-24T21:14:38')
86
+ end
87
+
88
+ it 'can read db data from v3 paths' do
89
+ result = subject.info_from_path(v3db)
90
+ expect(result[:schema]).to eq('v3')
91
+ expect(result[:shasum]).to eq('fakesha')
92
+ expect(result[:type]).to eq('databases')
93
+ expect(result[:id]).to eq(321)
94
+ expect(result[:service_id]).to be(nil)
95
+ expect(result[:container_id]).to eq('fakebread')
96
+ expect(result[:uploaded_at]).to be(nil)
97
+ expect(result[:start_time]).to eq('2022-08-24T21:12:33')
98
+ expect(result[:end_time]).to eq('2022-08-24T21:14:38')
99
+ end
100
+
101
+ context 'files that have been rotated by docker (.json.log.1)' do
102
+ it 'can read data from v3 paths' do
103
+ result = subject.info_from_path(v3db_rotated)
104
+ expect(result[:schema]).to eq('v3')
105
+ expect(result[:shasum]).to eq('fakesha')
106
+ expect(result[:type]).to eq('databases')
107
+ expect(result[:id]).to eq(321)
108
+ expect(result[:service_id]).to be(nil)
109
+ expect(result[:container_id]).to eq('fakebread')
110
+ expect(result[:uploaded_at]).to be(nil)
111
+ expect(result[:start_time]).to eq('2022-08-24T21:12:33')
112
+ expect(result[:end_time]).to eq('2022-08-24T21:14:38')
113
+ end
114
+
115
+ it 'can read app data from v2 paths' do
116
+ result = subject.info_from_path(v2app)
117
+ expect(result[:schema]).to eq('v2')
118
+ expect(result[:shasum]).to eq('fakesha')
119
+ expect(result[:type]).to eq('apps')
120
+ expect(result[:id]).to eq(321)
121
+ expect(result[:service_id]).to be(nil)
122
+ expect(result[:container_id]).to eq('fakebread')
123
+ expect(result[:uploaded_at]).to eq('2022-06-29T18:30:01')
124
+ expect(result[:container_id]).to eq('fakebread')
125
+ expect(result[:start_time]).to be(nil)
126
+ expect(result[:end_time]).to be(nil)
127
+ end
128
+ end
129
+ end
130
+
131
+ describe '#validate_log_search_options' do
132
+ it 'Forces you to identify the files with a supported option' do
133
+ opts = {}
134
+ expect { subject.validate_log_search_options(opts) }
135
+ .to raise_error(Thor::Error, / specify an option to identify/)
136
+ end
137
+
138
+ it 'Does not let you pass --string-matches and id options' do
139
+ opts = { string_matches: ['foo'], app_id: 123 }
140
+ expect { subject.validate_log_search_options(opts) }
141
+ .to raise_error(Thor::Error, /cannot pass/)
142
+ end
143
+
144
+ it 'Does not let you pass multiple id options' do
145
+ opts = { database_id: 12, app_id: 23 }
146
+ expect { subject.validate_log_search_options(opts) }
147
+ .to raise_error(Thor::Error, /specify only one of/)
148
+ end
149
+
150
+ it 'Does not let you use date options with string-matches' do
151
+ opts = { string_matches: 12, start_date: 'foo' }
152
+ expect { subject.validate_log_search_options(opts) }
153
+ .to raise_error(Thor::Error, /cannot be used when searching by string/)
154
+ end
155
+
156
+ it 'Does not allow open-ended date range.' do
157
+ opts = { app_id: 123, start_date: 'foo' }
158
+ expect { subject.validate_log_search_options(opts) }
159
+ .to raise_error(Thor::Error, /must pass both/)
160
+ end
161
+
162
+ it 'Ensures you have provided a long enough container ID' do
163
+ opts = { container_id: 'tooshort' }
164
+ expect { subject.validate_log_search_options(opts) }
165
+ .to raise_error(Thor::Error, /at least the first 12/)
166
+ end
167
+
168
+ it 'Requires you to pass keys when downloading' do
169
+ opts = { app_id: 123, download_location: 'asdf' }
170
+ expect { subject.validate_log_search_options(opts) }
171
+ .to raise_error(Thor::Error, /You must provide decryption keys/)
172
+ end
173
+ end
174
+
175
+ describe '#find_s3_files_by_string_match' do
176
+ client_stub = Aws::S3::Client.new(stub_responses: true)
177
+ client_stub.stub_responses(
178
+ :list_buckets, buckets: [{ name: 'bucket' }]
179
+ )
180
+ client_stub.stub_responses(
181
+ :list_objects_v2, contents: [
182
+ { key: 'stack/it/doesnt/matter' },
183
+ { key: 'stack/matter/it/does/not/yoda' }
184
+ ]
185
+ )
186
+ before do
187
+ subject.stub(:s3_client) do
188
+ Aws::S3::Resource.new(region: 'us-east-1', client: client_stub)
189
+ end
190
+ end
191
+
192
+ it 'finds files with a single matching string' do
193
+ strings = %w(yoda)
194
+ result = subject.find_s3_files_by_string_match('us-east-1', 'bucket',
195
+ 'stack', strings)
196
+ expect(result).to match_array(%w(stack/matter/it/does/not/yoda))
197
+ end
198
+
199
+ it 'finds files with two matching strings' do
200
+ strings = %w(it matter)
201
+ result = subject.find_s3_files_by_string_match('us-east-1', 'bucket',
202
+ 'stack', strings)
203
+ expect(result).to match_array(%w(stack/it/doesnt/matter
204
+ stack/matter/it/does/not/yoda))
205
+ end
206
+
207
+ it 'only find files with all matching strings' do
208
+ strings = %w(it yoda)
209
+ result = subject.find_s3_files_by_string_match('us-east-1', 'bucket',
210
+ 'stack', strings)
211
+ expect(result).to match_array(%w(stack/matter/it/does/not/yoda))
212
+ end
213
+ end
214
+
215
+ describe '#find_s3_files_by_attrs' do
216
+ before do
217
+ client_stub = Aws::S3::Client.new(stub_responses: true)
218
+ client_stub.stub_responses(
219
+ :list_buckets, buckets: [{ name: 'bucket' }]
220
+ )
221
+ client_stub.stub_responses(
222
+ :list_objects_v2, contents: [
223
+ { key: v2app },
224
+ { key: v2app_rotated },
225
+ { key: v3db_rotated },
226
+ { key: v3db },
227
+ { key: v3app }
228
+ ]
229
+ )
230
+ subject.stub(:s3_client) do
231
+ Aws::S3::Resource.new(region: 'us-east-1', client: client_stub)
232
+ end
233
+ end
234
+
235
+ it 'can find apps by id' do
236
+ attrs = { type: 'apps', id: 321 }
237
+ result = subject.find_s3_files_by_attrs('us-east-1', 'bucket',
238
+ 'stack', attrs)
239
+ expect(result).to match_array([v3app, v2app, v2app_rotated])
240
+ end
241
+
242
+ it 'can find databases by id' do
243
+ attrs = { type: 'databases', id: 321 }
244
+ result = subject.find_s3_files_by_attrs('us-east-1', 'bucket',
245
+ 'stack', attrs)
246
+ expect(result).to match_array([v3db, v3db_rotated])
247
+ end
248
+
249
+ it 'can find by other attributes of the log file like container id' do
250
+ attrs = { container_id: 'deadbeef' }
251
+ result = subject.find_s3_files_by_attrs('us-east-1', 'bucket',
252
+ 'stack', attrs)
253
+ expect(result).to match_array([v3app])
254
+ end
255
+ end
256
+
257
+ describe '#time_match?' do
258
+ # Here's a represenation of the test cases. We keep the file timestamps
259
+ # fixed and move --start-date/--end-date around to all possible combos.
260
+ # Note that we do foce the start to be earlier than the end, which keeps the
261
+ # logic here quite simple.
262
+
263
+ # | |se
264
+ # | s|e
265
+ # s| |e
266
+ # |se|
267
+ # s|e |
268
+ # se| |
269
+
270
+ # s = start / lower bound of search
271
+ # e = end / upper bound of search
272
+ # |'s are the first and last timestamp in the file
273
+
274
+ let(:first_log) { Time.parse('2022-08-01T00:00:00') }
275
+ let(:last_log) { Time.parse('2022-09-01T00:00:00') }
276
+ let(:before) { Time.parse('2022-07-01T00:00:00') }
277
+ let(:between) { Time.parse('2022-08-15T00:00:00') }
278
+ let(:after) { Time.parse('2022-10-01T00:00:00') }
279
+
280
+ context 'identifies files that may have lines within a range' do
281
+ it 'before before does not match' do
282
+ range = [before, before]
283
+ expect(subject.time_match?(range, first_log, last_log)).to be(false)
284
+ end
285
+
286
+ it 'before between matches' do
287
+ range = [before, between]
288
+ expect(subject.time_match?(range, first_log, last_log)).to be(true)
289
+ end
290
+
291
+ it 'between between matches' do
292
+ range = [between, between]
293
+ expect(subject.time_match?(range, first_log, last_log)).to be(true)
294
+ end
295
+
296
+ it 'before after matches' do
297
+ range = [before, after]
298
+ expect(subject.time_match?(range, first_log, last_log)).to be(true)
299
+ end
300
+
301
+ it 'between after matches' do
302
+ range = [between, after]
303
+ expect(subject.time_match?(range, first_log, last_log)).to be(true)
304
+ end
305
+
306
+ it 'after after does not match' do
307
+ range = [after, after]
308
+ expect(subject.time_match?(range, first_log, last_log)).to be(false)
309
+ end
310
+ end
311
+ end
312
+
313
+ describe '#utc_date' do
314
+ e = 'Please provide dates in YYYY-MM-DD format'
315
+
316
+ it 'converts strings to dates in UTC' do
317
+ result = subject.utc_date('2022-08-30')
318
+ expect(result).to be_a(Time)
319
+ expect(result).to eq(Time.utc(2022, 8, 30, 0, 0, 0))
320
+ end
321
+
322
+ it 'raises an error if the input is a valid date/tiem in wrong format' do
323
+ expect do
324
+ subject.utc_date('2022-08-30 11:32')
325
+ end.to raise_error(Thor::Error, e)
326
+ end
327
+
328
+ it 'raises an error if the input is wrong' do
329
+ expect do
330
+ subject.utc_date('foobar')
331
+ end.to raise_error(Thor::Error, e)
332
+ end
333
+ end
334
+ end
@@ -60,4 +60,137 @@ describe Aptible::CLI::Agent do
60
60
  expect { subject.send(:logs) }.to raise_error(/only one of/im)
61
61
  end
62
62
  end
63
+
64
+ describe '#logs_from_archive' do
65
+ context 'using string-matches' do
66
+ let(:files) { %w(file_1 file_2) }
67
+
68
+ before do
69
+ subject.options = {
70
+ region: 'some-region',
71
+ bucket: 'some-bucket',
72
+ decryption_keys: 'mykey',
73
+ string_matches: 'foo',
74
+ download_location: './'
75
+ }
76
+ subject.stub(:info_from_path) { { shasum: 'foo' } }
77
+ subject.stub(:encryption_key) { subject.options[:decryption_keys] }
78
+ end
79
+
80
+ it 'download all files' do
81
+ expect(subject).to receive(:ensure_aws_creds)
82
+ expect(subject).to receive(:validate_log_search_options)
83
+ .with(subject.options)
84
+
85
+ expect(subject).to receive(:find_s3_files_by_string_match)
86
+ .with(
87
+ subject.options[:region],
88
+ subject.options[:bucket],
89
+ subject.options[:stack],
90
+ subject.options[:string_matches]
91
+ ).and_return(files)
92
+
93
+ files.each do |f|
94
+ expect(subject).to receive(:decrypt_and_translate_s3_file)
95
+ .with(
96
+ f,
97
+ subject.options[:decryption_keys],
98
+ subject.options[:region],
99
+ subject.options[:bucket],
100
+ subject.options[:download_location]
101
+ )
102
+ end
103
+ subject.send('logs_from_archive')
104
+ end
105
+ end
106
+
107
+ context 'using app/database/endpoint id' do
108
+ let(:files) { %w(file_1 file_2) }
109
+
110
+ before do
111
+ subject.options = {
112
+ region: 'some-region',
113
+ bucket: 'some-bucket',
114
+ stack: 'mystack',
115
+ decryption_keys: 'mykey',
116
+ app_id: 123,
117
+ download_location: './'
118
+ }
119
+ subject.stub(:info_from_path) { { shasum: 'foo' } }
120
+ subject.stub(:encryption_key) { subject.options[:decryption_keys] }
121
+ end
122
+
123
+ it 'download all files' do
124
+ expect(subject).to receive(:ensure_aws_creds)
125
+ expect(subject).to receive(:validate_log_search_options)
126
+ .with(subject.options)
127
+
128
+ expect(subject).to receive(:find_s3_files_by_attrs)
129
+ .with(
130
+ subject.options[:region],
131
+ subject.options[:bucket],
132
+ subject.options[:stack],
133
+ { type: 'apps', id: 123 },
134
+ nil
135
+ ).and_return(files)
136
+
137
+ files.each do |f|
138
+ expect(subject).to receive(:decrypt_and_translate_s3_file)
139
+ .with(
140
+ f,
141
+ subject.options[:decryption_keys],
142
+ subject.options[:region],
143
+ subject.options[:bucket],
144
+ subject.options[:download_location]
145
+ )
146
+ end
147
+ subject.send('logs_from_archive')
148
+ end
149
+ end
150
+
151
+ context 'using container id' do
152
+ let(:files) { %w(file_1 file_2) }
153
+
154
+ before do
155
+ subject.options = {
156
+ region: 'some-region',
157
+ bucket: 'some-bucket',
158
+ stack: 'mystack',
159
+ decryption_keys: 'mykey',
160
+ container_id:
161
+ '9080b96447f98b31ef9831d5fd98b09e3c5c545269734e2e825644571152457c',
162
+ download_location: './'
163
+ }
164
+ subject.stub(:info_from_path) { { shasum: 'foo' } }
165
+ subject.stub(:encryption_key) { subject.options[:decryption_keys] }
166
+ end
167
+
168
+ it 'download all files' do
169
+ expect(subject).to receive(:ensure_aws_creds)
170
+ expect(subject).to receive(:validate_log_search_options)
171
+ .with(subject.options)
172
+
173
+ expect(subject).to receive(:find_s3_files_by_attrs)
174
+ .with(
175
+ subject.options[:region],
176
+ subject.options[:bucket],
177
+ subject.options[:stack],
178
+ { container_id: subject.options[:container_id] },
179
+ nil
180
+ ).and_return(files)
181
+
182
+ files.each do |f|
183
+ expect(subject).to receive(:decrypt_and_translate_s3_file)
184
+ .with(
185
+ f,
186
+ subject.options[:decryption_keys],
187
+ subject.options[:region],
188
+ subject.options[:bucket],
189
+ subject.options[:download_location]
190
+ )
191
+ end
192
+ subject.send('logs_from_archive')
193
+ end
194
+ end
195
+ end
63
196
  end
@@ -0,0 +1,189 @@
1
+ require 'spec_helper'
2
+
3
+ describe Aptible::CLI::Agent do
4
+ include Aptible::CLI::Helpers::DateHelpers
5
+
6
+ let(:token) { double('token') }
7
+
8
+ before do
9
+ allow(subject).to receive(:ask)
10
+ allow(subject).to receive(:save_token)
11
+ allow(subject).to receive(:fetch_token) { token }
12
+ end
13
+
14
+ let(:handle) { 'foobar' }
15
+ let(:stack) { Fabricate(:stack, internal_domain: 'aptible.in') }
16
+ let(:account) { Fabricate(:account, stack: stack) }
17
+ let(:database) { Fabricate(:database, handle: handle, account: account) }
18
+ let(:staging) { Fabricate(:account, handle: 'staging') }
19
+ let(:prod) { Fabricate(:account, handle: 'production') }
20
+ let(:window_start) { '2073-09-05T22:00:00.000Z' }
21
+ let(:window_end) { '2073-09-05T23:00:00.000Z' }
22
+ let(:maintenance_dbs) do
23
+ [
24
+ [staging, 'staging-redis-db', [window_start, window_end]],
25
+ [staging, 'staging-postgres-db', nil],
26
+ [prod, 'prod-elsearch-db', [window_start, window_end]],
27
+ [prod, 'prod-postgres-db', nil]
28
+ ].map do |a, h, m|
29
+ Fabricate(
30
+ :maintenance_database,
31
+ account: a,
32
+ handle: h,
33
+ maintenance_deadline: m
34
+ )
35
+ end
36
+ end
37
+ let(:maintenance_apps) do
38
+ [
39
+ [staging, 'staging-app-1', [window_start, window_end]],
40
+ [staging, 'staging-app-2', nil],
41
+ [prod, 'prod-app-1', [window_start, window_end]],
42
+ [prod, 'prod-app-2', nil]
43
+ ].map do |a, h, m|
44
+ Fabricate(
45
+ :maintenance_app,
46
+ account: a,
47
+ handle: h,
48
+ maintenance_deadline: m
49
+ )
50
+ end
51
+ end
52
+
53
+ describe '#maintenance:dbs' do
54
+ before do
55
+ token = 'the-token'
56
+ allow(subject).to receive(:fetch_token) { token }
57
+ allow(Aptible::Api::Account).to receive(:all)
58
+ .with(token: token)
59
+ .and_return([staging, prod])
60
+ allow(Aptible::Api::MaintenanceDatabase).to receive(:all)
61
+ .with(token: token)
62
+ .and_return(maintenance_dbs)
63
+ end
64
+
65
+ context 'when no account is specified' do
66
+ it 'prints out the grouped database handles for all accounts' do
67
+ subject.send('maintenance:dbs')
68
+
69
+ expect(captured_output_text).to include('=== staging')
70
+ expect(captured_output_text).to include('staging-redis-db')
71
+ expect(captured_output_text).to include('2073-09-05 22:00:00 UTC')
72
+ expect(captured_output_text).to include('2073-09-05 23:00:00 UTC')
73
+ expect(captured_output_text).not_to include('staging-postgres-db')
74
+
75
+ expect(captured_output_text).to include('=== production')
76
+ expect(captured_output_text).to include('prod-elsearch-db')
77
+ expect(captured_output_text).to include('2073-09-05 22:00:00 UTC')
78
+ expect(captured_output_text).to include('2073-09-05 23:00:00 UTC')
79
+ expect(captured_output_text).not_to include('prod-postgres-db')
80
+
81
+ expect(captured_output_json.to_s)
82
+ .to include(
83
+ 'aptible db:restart staging-redis-db --environment staging'
84
+ )
85
+ expect(captured_output_json.to_s)
86
+ .to include(
87
+ 'aptible db:restart prod-elsearch-db --environment production'
88
+ )
89
+ end
90
+ end
91
+
92
+ context 'when a valid account is specified' do
93
+ it 'prints out the database handles for the account' do
94
+ subject.options = { environment: 'staging' }
95
+ subject.send('maintenance:dbs')
96
+
97
+ expect(captured_output_text).to include('=== staging')
98
+ expect(captured_output_text).to include('staging-redis-db')
99
+ expect(captured_output_text).to include('2073-09-05 22:00:00 UTC')
100
+ expect(captured_output_text).to include('2073-09-05 23:00:00 UTC')
101
+ expect(captured_output_text).not_to include('staging-postgres-db')
102
+
103
+ expect(captured_output_text).not_to include('=== production')
104
+ expect(captured_output_text).not_to include('prod-elsearch-db')
105
+ expect(captured_output_text).not_to include('prod-postgres-db')
106
+
107
+ expect(captured_output_json.to_s)
108
+ .to include(
109
+ 'aptible db:restart staging-redis-db --environment staging'
110
+ )
111
+ end
112
+ end
113
+
114
+ context 'when an invalid account is specified' do
115
+ it 'prints out an error' do
116
+ subject.options = { environment: 'foo' }
117
+ expect { subject.send('maintenance:dbs') }
118
+ .to raise_error('Specified account does not exist')
119
+ end
120
+ end
121
+ end
122
+ describe '#maintenance:apps' do
123
+ before do
124
+ token = 'the-token'
125
+ allow(subject).to receive(:fetch_token) { token }
126
+ allow(Aptible::Api::Account).to receive(:all).with(token: token)
127
+ .and_return([staging, prod])
128
+ allow(Aptible::Api::MaintenanceApp).to receive(:all).with(token: token)
129
+ .and_return(maintenance_apps)
130
+ end
131
+
132
+ context 'when no account is specified' do
133
+ it 'prints out the grouped app handles for all accounts' do
134
+ subject.send('maintenance:apps')
135
+
136
+ expect(captured_output_text).to include('=== staging')
137
+ expect(captured_output_text).to include('staging-app-1')
138
+ expect(captured_output_text).to include('2073-09-05 22:00:00 UTC')
139
+ expect(captured_output_text).to include('2073-09-05 23:00:00 UTC')
140
+ expect(captured_output_text).not_to include('staging-app-2')
141
+
142
+ expect(captured_output_text).to include('=== production')
143
+ expect(captured_output_text).to include('prod-app-1')
144
+ expect(captured_output_text).to include('2073-09-05 22:00:00 UTC')
145
+ expect(captured_output_text).to include('2073-09-05 23:00:00 UTC')
146
+ expect(captured_output_text).not_to include('prod-app-2')
147
+
148
+ expect(captured_output_json.to_s)
149
+ .to include(
150
+ 'aptible restart --app staging-app-1 --environment staging'
151
+ )
152
+ expect(captured_output_json.to_s)
153
+ .to include(
154
+ 'aptible restart --app prod-app-1 --environment production'
155
+ )
156
+ end
157
+ end
158
+
159
+ context 'when a valid account is specified' do
160
+ it 'prints out the app handles for the account' do
161
+ subject.options = { environment: 'staging' }
162
+ subject.send('maintenance:apps')
163
+
164
+ expect(captured_output_text).to include('=== staging')
165
+ expect(captured_output_text).to include('staging-app-1')
166
+ expect(captured_output_text).to include('2073-09-05 22:00:00 UTC')
167
+ expect(captured_output_text).to include('2073-09-05 23:00:00 UTC')
168
+ expect(captured_output_text).not_to include('staging-app-2')
169
+
170
+ expect(captured_output_text).not_to include('=== production')
171
+ expect(captured_output_text).not_to include('prod-app-1')
172
+ expect(captured_output_text).not_to include('prod-app-2')
173
+
174
+ expect(captured_output_json.to_s)
175
+ .to include(
176
+ 'aptible restart --app staging-app-1 --environment staging'
177
+ )
178
+ end
179
+ end
180
+
181
+ context 'when an invalid account is specified' do
182
+ it 'prints out an error' do
183
+ subject.options = { environment: 'foo' }
184
+ expect { subject.send('maintenance:apps') }
185
+ .to raise_error('Specified account does not exist')
186
+ end
187
+ end
188
+ end
189
+ end
@@ -86,7 +86,7 @@ describe Aptible::CLI::Agent do
86
86
  end
87
87
 
88
88
  context 'influxdb:custom' do
89
- it 'creates a new InfluxDB metric drain' do
89
+ it 'creates a new InfluxDB v1 metric drain' do
90
90
  opts = {
91
91
  handle: 'test-influxdb-custom',
92
92
  drain_type: :influxdb,
@@ -111,6 +111,32 @@ describe Aptible::CLI::Agent do
111
111
  end
112
112
  end
113
113
 
114
+ context 'influxdb:customv2' do
115
+ it 'creates a new InfluxDB v2 metric drain' do
116
+ opts = {
117
+ handle: 'test-influxdb2-custom',
118
+ drain_type: :influxdb2,
119
+ drain_configuration: {
120
+ address: 'https://test.foo.com:443',
121
+ org: 'foobar',
122
+ authToken: 'bar',
123
+ bucket: 'foo'
124
+ }
125
+ }
126
+ expect_provision_metric_drain(opts)
127
+
128
+ subject.options = {
129
+ environment: account.handle,
130
+ bucket: 'foo',
131
+ token: 'bar',
132
+ org: 'foobar',
133
+ url: 'https://test.foo.com:443'
134
+ }
135
+ subject.send('metric_drain:create:influxdb:customv2',
136
+ 'test-influxdb2-custom')
137
+ end
138
+ end
139
+
114
140
  context 'datadog' do
115
141
  it 'creates a new Datadog metric drain' do
116
142
  opts = {