aptible-cli 0.19.3 → 0.19.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -84,6 +84,7 @@ module Aptible
84
84
  version = options[:version]
85
85
 
86
86
  if version && type
87
+ validate_image_type(type)
87
88
  image = find_database_image(type, version)
88
89
  db_opts[:type] = image.type
89
90
  db_opts[:database_image] = image
@@ -91,6 +92,7 @@ module Aptible
91
92
  raise Thor::Error, '--type is required when passing --version'
92
93
  else
93
94
  db_opts[:type] = type || 'postgresql'
95
+ validate_image_type(db_opts[:type])
94
96
  end
95
97
 
96
98
  database = account.create_database!(db_opts)
@@ -346,6 +348,24 @@ module Aptible
346
348
  end
347
349
  end
348
350
  end
351
+
352
+ desc 'db:rename OLD_HANDLE NEW_HANDLE [--environment'\
353
+ ' ENVIRONMENT_HANDLE]', 'Rename a database handle. In order'\
354
+ ' for the new database handle to appear in log drain and'\
355
+ ' metric drain destinations, you must reload the database.'
356
+ option :environment
357
+ define_method 'db:rename' do |old_handle, new_handle|
358
+ env = ensure_environment(options)
359
+ db = ensure_database(options.merge(db: old_handle))
360
+ db.update!(handle: new_handle)
361
+ m1 = "In order for the new database name (#{new_handle}) to"\
362
+ ' appear in log drain and metric drain destinations,'\
363
+ ' you must reload the database.'
364
+ m2 = 'You can reload your database with this command: "aptible'\
365
+ " db:reload #{new_handle} --environment #{env.handle}\""
366
+ CLI.logger.warn m1
367
+ CLI.logger.info m2
368
+ end
349
369
  end
350
370
  end
351
371
  end
@@ -41,6 +41,25 @@ module Aptible
41
41
  end
42
42
  end
43
43
  end
44
+
45
+ desc 'environment:rename OLD_HANDLE NEW_HANDLE',
46
+ 'Rename an environment handle. In order for the new'\
47
+ ' environment handle to appear in log drain/metric'\
48
+ ' destinations, you must restart the apps/databases in'\
49
+ ' this environment.'
50
+ define_method 'environment:rename' do |old_handle, new_handle|
51
+ env = ensure_environment(options.merge(environment: old_handle))
52
+ env.update!(handle: new_handle)
53
+ m1 = "In order for the new environment handle (#{new_handle})"\
54
+ ' to appear in log drain and metric drain destinations,'\
55
+ ' you must restart the apps and databases in this'\
56
+ ' environment. Also be aware of the following resources'\
57
+ ' that may need names adjusted:'
58
+ m2 = "* Git remote URLs (ex: git@beta.aptible.com:#{new_handle}"\
59
+ '/APP_HANDLE.git)'
60
+ m3 = '* Your own external scripts (e.g. for CI/CD)'
61
+ [m1, m2, m3].each { |val| CLI.logger.info val }
62
+ end
44
63
  end
45
64
  end
46
65
  end
@@ -1,4 +1,6 @@
1
+ require 'aws-sdk'
1
2
  require 'shellwords'
3
+ require 'time'
2
4
 
3
5
  module Aptible
4
6
  module CLI
@@ -8,6 +10,7 @@ module Aptible
8
10
  thor.class_eval do
9
11
  include Helpers::Operation
10
12
  include Helpers::AppOrDatabase
13
+ include Helpers::S3LogHelpers
11
14
 
12
15
  desc 'logs [--app APP | --database DATABASE]',
13
16
  'Follows logs from a running app or database'
@@ -25,6 +28,148 @@ module Aptible
25
28
  ENV['ACCESS_TOKEN'] = fetch_token
26
29
  exit_with_ssh_portal(op, '-o', 'SendEnv=ACCESS_TOKEN', '-T')
27
30
  end
31
+
32
+ desc 'logs_from_archive --bucket NAME --region REGION ' \
33
+ '--stack NAME [ --decryption-keys ONE [OR MORE] ] ' \
34
+ '[ --download-location LOCATION ] ' \
35
+ '[ [ --string-matches ONE [OR MORE] ] ' \
36
+ '| [ --app-id ID | --database-id ID | --endpoint-id ID | ' \
37
+ '--container-id ID ] ' \
38
+ '[ --start-date YYYY-MM-DD --end-date YYYY-MM-DD ] ]',
39
+ 'Retrieves container logs from an S3 archive in your own ' \
40
+ 'AWS account. You must provide your AWS credentials via ' \
41
+ 'the environment variables AWS_ACCESS_KEY_ID and ' \
42
+ 'AWS_SECRET_ACCESS_KEY'
43
+
44
+ # Required to retrieve files
45
+ option :region,
46
+ desc: 'The AWS region your S3 bucket resides in',
47
+ type: :string, required: true
48
+ option :bucket,
49
+ desc: 'The name of your S3 bucket',
50
+ type: :string, required: true
51
+ option :stack,
52
+ desc: 'The name of the Stack to download logs from',
53
+ type: :string, required: true
54
+ option :decryption_keys,
55
+ desc: 'The Aptible-provided keys for decryption. ' \
56
+ '(Space separated if multiple)',
57
+ type: :array
58
+
59
+ # For identifying files to download
60
+ option :string_matches,
61
+ desc: 'The strings to match in log file names.' \
62
+ '(Space separated if multiple)',
63
+ type: :array
64
+ option :app_id,
65
+ desc: 'The Application ID to download logs for.',
66
+ type: :numeric
67
+ option :database_id,
68
+ desc: 'The Database ID to download logs for.',
69
+ type: :numeric
70
+ option :endpoint_id,
71
+ desc: 'The Endpoint ID to download logs for.',
72
+ type: :numeric
73
+ option :container_id,
74
+ desc: 'The container ID to download logs for'
75
+ option :start_date,
76
+ desc: 'Get logs starting from this (UTC) date ' \
77
+ '(format: YYYY-MM-DD)',
78
+ type: :string
79
+ option :end_date,
80
+ desc: 'Get logs before this (UTC) date (format: YYYY-MM-DD)',
81
+ type: :string
82
+
83
+ # We don't download by default
84
+ option :download_location,
85
+ desc: 'The local path place downloaded log files. ' \
86
+ 'If you do not set this option, the file names ' \
87
+ 'will be shown, but not downloaded.',
88
+ type: :string
89
+
90
+ def logs_from_archive
91
+ ensure_aws_creds
92
+ validate_log_search_options(options)
93
+
94
+ id_options = [
95
+ options[:app_id],
96
+ options[:database_id],
97
+ options[:endpoint_id],
98
+ options[:container_id]
99
+ ]
100
+
101
+ date_options = [options[:start_date], options[:end_date]]
102
+
103
+ r_type = 'apps' if options[:app_id]
104
+ r_type = 'databases' if options[:database_id]
105
+ r_type = 'proxy' if options[:endpoint_id]
106
+
107
+ if date_options.any?
108
+ start_date = utc_date(options[:start_date])
109
+ end_date = utc_date(options[:end_date])
110
+ if end_date < start_date
111
+ raise Thor::Error, 'End date must be after start date.'
112
+ end
113
+ time_range = [start_date, end_date]
114
+ CLI.logger.info "Searching from #{start_date} to #{end_date}"
115
+ else
116
+ time_range = nil
117
+ end
118
+
119
+ # --string-matches is useful for matching by partial container id,
120
+ # or for more flexibility than the currently supported id_options
121
+ # may allow for. We should update id_options with new use cases,
122
+ # but leave string_matches as a way to download any named file
123
+ if options[:string_matches]
124
+ files = find_s3_files_by_string_match(
125
+ options[:region],
126
+ options[:bucket],
127
+ options[:stack],
128
+ options[:string_matches]
129
+ )
130
+ elsif id_options.any?
131
+ if options[:container_id]
132
+ search_attrs = { container_id: options[:container_id] }
133
+ else
134
+ search_attrs = { type: r_type, id: id_options.compact.first }
135
+ end
136
+ files = find_s3_files_by_attrs(
137
+ options[:region],
138
+ options[:bucket],
139
+ options[:stack],
140
+ search_attrs,
141
+ time_range
142
+ )
143
+ end
144
+
145
+ unless files.any?
146
+ raise Thor::Error, 'No files found that matched all criteria'
147
+ end
148
+
149
+ CLI.logger.info "Found #{files.count} matching files..."
150
+
151
+ if options[:download_location]
152
+ # Since these files likely contain PHI, we will only download
153
+ # them if the user is explicit about where to save them.
154
+ files.each do |file|
155
+ shasum = info_from_path(file)[:shasum]
156
+ decrypt_and_translate_s3_file(
157
+ file,
158
+ encryption_key(shasum, options[:decryption_keys]),
159
+ options[:region],
160
+ options[:bucket],
161
+ options[:download_location]
162
+ )
163
+ end
164
+ else
165
+ files.each do |file|
166
+ CLI.logger.info file.split('/').drop(4).join('/')
167
+ end
168
+ m = 'No files were downloaded. Please provide a location ' \
169
+ 'with --download-location to download the files.'
170
+ CLI.logger.warn m
171
+ end
172
+ end
28
173
  end
29
174
  end
30
175
  end
@@ -8,6 +8,7 @@ module Aptible
8
8
  'EU1' => 'https://app.datadoghq.eu',
9
9
  'US1-FED' => 'https://app.ddog-gov.com'
10
10
  }.freeze
11
+ PATH = '/api/v1/series'.freeze
11
12
 
12
13
  def self.included(thor)
13
14
  thor.class_eval do
@@ -106,7 +107,7 @@ module Aptible
106
107
  "Valid options are #{sites}"
107
108
  end
108
109
 
109
- config[:series_url] = site
110
+ config[:series_url] = site + PATH
110
111
  end
111
112
  opts = {
112
113
  handle: handle,
@@ -16,6 +16,39 @@ module Aptible
16
16
  CLI.logger.info m
17
17
  o.update!(cancelled: true)
18
18
  end
19
+
20
+ desc 'operation:follow OPERATION_ID',
21
+ 'Follow logs of a running operation'
22
+ define_method 'operation:follow' do |operation_id|
23
+ o = Aptible::Api::Operation.find(operation_id, token: fetch_token)
24
+ raise "Operation ##{operation_id} not found" if o.nil?
25
+
26
+ if %w(failed succeeded).include? o.status
27
+ raise Thor::Error, "This operation has already #{o.status}. " \
28
+ 'Run the following command to retrieve ' \
29
+ "the operation's logs:\n" \
30
+ "aptible operation:logs #{o.id}"
31
+ end
32
+
33
+ CLI.logger.info "Streaming logs for #{prettify_operation(o)}..."
34
+
35
+ attach_to_operation_logs(o)
36
+ end
37
+
38
+ desc 'operation:logs OPERATION_ID', 'View logs for given operation'
39
+ define_method 'operation:logs' do |operation_id|
40
+ o = Aptible::Api::Operation.find(operation_id, token: fetch_token)
41
+ raise "Operation ##{operation_id} not found" if o.nil?
42
+
43
+ unless %w(succeeded failed).include? o.status
44
+ e = 'Error - You can view the logs when operation is complete.'
45
+ raise Thor::Error, e
46
+ end
47
+
48
+ m = "Requesting operation logs for #{prettify_operation(o)}..."
49
+ CLI.logger.info m
50
+ operation_logs(o)
51
+ end
19
52
  end
20
53
  end
21
54
  end
@@ -1,5 +1,5 @@
1
1
  module Aptible
2
2
  module CLI
3
- VERSION = '0.19.3'.freeze
3
+ VERSION = '0.19.6'.freeze
4
4
  end
5
5
  end
@@ -0,0 +1,36 @@
1
+ require 'spec_helper'
2
+
3
+ describe Aptible::CLI::Helpers::Database do
4
+ subject { Class.new.send(:include, described_class).new }
5
+
6
+ describe '#validate_image_type' do
7
+ let(:pg) do
8
+ Fabricate(:database_image, type: 'postgresql', version: '10')
9
+ end
10
+
11
+ let(:redis) do
12
+ Fabricate(:database_image, type: 'redis', version: '9.4')
13
+ end
14
+
15
+ let(:token) { 'some-token' }
16
+
17
+ before do
18
+ allow(subject).to receive(:fetch_token).and_return(token)
19
+ allow(Aptible::Api::DatabaseImage).to receive(:all)
20
+ .and_return([pg, redis])
21
+ end
22
+
23
+ it 'Raises an error if provided an invalid type' do
24
+ bad_type = 'cassandra'
25
+ err = "No Database Image of type \"#{bad_type}\", " \
26
+ "valid types: #{pg.type}, #{redis.type}"
27
+ expect do
28
+ subject.validate_image_type(bad_type)
29
+ end.to raise_error(Thor::Error, err)
30
+ end
31
+
32
+ it 'Retruns true when provided a valid type' do
33
+ expect(subject.validate_image_type(pg.type)).to be(true)
34
+ end
35
+ end
36
+ end
@@ -0,0 +1,334 @@
1
+ require 'spec_helper'
2
+
3
+ describe Aptible::CLI::Helpers::S3LogHelpers do
4
+ subject { Class.new.send(:include, described_class).new }
5
+ let(:v2_pfx) { 'mystack/shareable/v2/fakesha' }
6
+ let(:v3_pfx) { 'mystack/shareable/v3/fakesha' }
7
+ let(:v2app) do
8
+ "#{v2_pfx}/apps-321/fakebread-json.log.2022-06-29T18:30:01.bck.gz"
9
+ end
10
+ let(:v2app_rotated) do
11
+ "#{v2_pfx}/apps-321/fakebread-json.1.log.2022-06-29T18:30:01.bck.gz"
12
+ end
13
+ let(:v3app) do
14
+ "#{v3_pfx}/apps-321/service-123/deadbeef-json.log." \
15
+ '2022-08-24T21:12:33.2022-08-24T21:14:38.archived.gz'
16
+ end
17
+ let(:v3db) do
18
+ "#{v3_pfx}/databases-321/fakebread-json.log." \
19
+ '2022-08-24T21:12:33.2022-08-24T21:14:38.archived.gz'
20
+ end
21
+ let(:v3db_rotated) do
22
+ "#{v3_pfx}/databases-321/fakebread-json.log.1." \
23
+ '2022-08-24T21:12:33.2022-08-24T21:14:38.archived.gz'
24
+ end
25
+
26
+ describe '#ensure_aws_creds' do
27
+ it 'Raises if no keys are provided via ENV' do
28
+ expect { subject.ensure_aws_creds }
29
+ .to raise_error(Thor::Error, /Missing environment variable/)
30
+ end
31
+
32
+ it 'Accepts AWS keypair from the ENV' do
33
+ ENV['AWS_ACCESS_KEY_ID'] = 'foo'
34
+ ENV['AWS_SECRET_ACCESS_KEY'] = 'bar'
35
+ expect { subject.ensure_aws_creds }.to_not raise_error
36
+ end
37
+ end
38
+
39
+ describe '#info_from_path' do
40
+ context 'time zones are in UTC' do
41
+ it 'processes v2 upload time in UTC' do
42
+ result = subject.info_from_path(v2app)
43
+ expect(result[:uploaded_at].zone).to eq('UTC')
44
+ end
45
+
46
+ it 'processes v3 log times in UTC' do
47
+ result = subject.info_from_path(v3app)
48
+ expect(result[:start_time].zone).to eq('UTC')
49
+ expect(result[:end_time].zone).to eq('UTC')
50
+ end
51
+ end
52
+
53
+ it 'does not choke on v3 logs with unknown timestamps' do
54
+ path = "#{v3_pfx}/apps-321/service-123/deadbeef-json.log." \
55
+ 'unknown.unknown.archived.gz'
56
+ result = subject.info_from_path(path)
57
+ expect(result[:start_time]).to be(nil)
58
+ expect(result[:end_time]).to be(nil)
59
+ end
60
+
61
+ it 'can read app data from v2 paths' do
62
+ result = subject.info_from_path(v2app)
63
+ expect(result[:schema]).to eq('v2')
64
+ expect(result[:shasum]).to eq('fakesha')
65
+ expect(result[:type]).to eq('apps')
66
+ expect(result[:id]).to eq(321)
67
+ expect(result[:service_id]).to be(nil)
68
+ expect(result[:container_id]).to eq('fakebread')
69
+ expect(result[:uploaded_at]).to eq('2022-06-29T18:30:01')
70
+ expect(result[:container_id]).to eq('fakebread')
71
+ expect(result[:start_time]).to be(nil)
72
+ expect(result[:end_time]).to be(nil)
73
+ end
74
+
75
+ it 'can read app data from v3 paths' do
76
+ result = subject.info_from_path(v3app)
77
+ expect(result[:schema]).to eq('v3')
78
+ expect(result[:shasum]).to eq('fakesha')
79
+ expect(result[:type]).to eq('apps')
80
+ expect(result[:id]).to eq(321)
81
+ expect(result[:service_id]).to eq(123)
82
+ expect(result[:container_id]).to eq('deadbeef')
83
+ expect(result[:uploaded_at]).to be(nil)
84
+ expect(result[:start_time]).to eq('2022-08-24T21:12:33')
85
+ expect(result[:end_time]).to eq('2022-08-24T21:14:38')
86
+ end
87
+
88
+ it 'can read db data from v3 paths' do
89
+ result = subject.info_from_path(v3db)
90
+ expect(result[:schema]).to eq('v3')
91
+ expect(result[:shasum]).to eq('fakesha')
92
+ expect(result[:type]).to eq('databases')
93
+ expect(result[:id]).to eq(321)
94
+ expect(result[:service_id]).to be(nil)
95
+ expect(result[:container_id]).to eq('fakebread')
96
+ expect(result[:uploaded_at]).to be(nil)
97
+ expect(result[:start_time]).to eq('2022-08-24T21:12:33')
98
+ expect(result[:end_time]).to eq('2022-08-24T21:14:38')
99
+ end
100
+
101
+ context 'files that have been rotated by docker (.json.log.1)' do
102
+ it 'can read data from v3 paths' do
103
+ result = subject.info_from_path(v3db_rotated)
104
+ expect(result[:schema]).to eq('v3')
105
+ expect(result[:shasum]).to eq('fakesha')
106
+ expect(result[:type]).to eq('databases')
107
+ expect(result[:id]).to eq(321)
108
+ expect(result[:service_id]).to be(nil)
109
+ expect(result[:container_id]).to eq('fakebread')
110
+ expect(result[:uploaded_at]).to be(nil)
111
+ expect(result[:start_time]).to eq('2022-08-24T21:12:33')
112
+ expect(result[:end_time]).to eq('2022-08-24T21:14:38')
113
+ end
114
+
115
+ it 'can read app data from v2 paths' do
116
+ result = subject.info_from_path(v2app)
117
+ expect(result[:schema]).to eq('v2')
118
+ expect(result[:shasum]).to eq('fakesha')
119
+ expect(result[:type]).to eq('apps')
120
+ expect(result[:id]).to eq(321)
121
+ expect(result[:service_id]).to be(nil)
122
+ expect(result[:container_id]).to eq('fakebread')
123
+ expect(result[:uploaded_at]).to eq('2022-06-29T18:30:01')
124
+ expect(result[:container_id]).to eq('fakebread')
125
+ expect(result[:start_time]).to be(nil)
126
+ expect(result[:end_time]).to be(nil)
127
+ end
128
+ end
129
+ end
130
+
131
+ describe '#validate_log_search_options' do
132
+ it 'Forces you to identify the files with a supported option' do
133
+ opts = {}
134
+ expect { subject.validate_log_search_options(opts) }
135
+ .to raise_error(Thor::Error, / specify an option to identify/)
136
+ end
137
+
138
+ it 'Does not let you pass --string-matches and id options' do
139
+ opts = { string_matches: ['foo'], app_id: 123 }
140
+ expect { subject.validate_log_search_options(opts) }
141
+ .to raise_error(Thor::Error, /cannot pass/)
142
+ end
143
+
144
+ it 'Does not let you pass multiple id options' do
145
+ opts = { database_id: 12, app_id: 23 }
146
+ expect { subject.validate_log_search_options(opts) }
147
+ .to raise_error(Thor::Error, /specify only one of/)
148
+ end
149
+
150
+ it 'Does not let you use date options with string-matches' do
151
+ opts = { string_matches: 12, start_date: 'foo' }
152
+ expect { subject.validate_log_search_options(opts) }
153
+ .to raise_error(Thor::Error, /cannot be used when searching by string/)
154
+ end
155
+
156
+ it 'Does not allow open-ended date range.' do
157
+ opts = { app_id: 123, start_date: 'foo' }
158
+ expect { subject.validate_log_search_options(opts) }
159
+ .to raise_error(Thor::Error, /must pass both/)
160
+ end
161
+
162
+ it 'Ensures you have provided a long enough container ID' do
163
+ opts = { container_id: 'tooshort' }
164
+ expect { subject.validate_log_search_options(opts) }
165
+ .to raise_error(Thor::Error, /at least the first 12/)
166
+ end
167
+
168
+ it 'Requires you to pass keys when downloading' do
169
+ opts = { app_id: 123, download_location: 'asdf' }
170
+ expect { subject.validate_log_search_options(opts) }
171
+ .to raise_error(Thor::Error, /You must provide decryption keys/)
172
+ end
173
+ end
174
+
175
+ describe '#find_s3_files_by_string_match' do
176
+ client_stub = Aws::S3::Client.new(stub_responses: true)
177
+ client_stub.stub_responses(
178
+ :list_buckets, buckets: [{ name: 'bucket' }]
179
+ )
180
+ client_stub.stub_responses(
181
+ :list_objects_v2, contents: [
182
+ { key: 'stack/it/doesnt/matter' },
183
+ { key: 'stack/matter/it/does/not/yoda' }
184
+ ]
185
+ )
186
+ before do
187
+ subject.stub(:s3_client) do
188
+ Aws::S3::Resource.new(region: 'us-east-1', client: client_stub)
189
+ end
190
+ end
191
+
192
+ it 'finds files with a single matching string' do
193
+ strings = %w(yoda)
194
+ result = subject.find_s3_files_by_string_match('us-east-1', 'bucket',
195
+ 'stack', strings)
196
+ expect(result).to match_array(%w(stack/matter/it/does/not/yoda))
197
+ end
198
+
199
+ it 'finds files with two matching strings' do
200
+ strings = %w(it matter)
201
+ result = subject.find_s3_files_by_string_match('us-east-1', 'bucket',
202
+ 'stack', strings)
203
+ expect(result).to match_array(%w(stack/it/doesnt/matter
204
+ stack/matter/it/does/not/yoda))
205
+ end
206
+
207
+ it 'only find files with all matching strings' do
208
+ strings = %w(it yoda)
209
+ result = subject.find_s3_files_by_string_match('us-east-1', 'bucket',
210
+ 'stack', strings)
211
+ expect(result).to match_array(%w(stack/matter/it/does/not/yoda))
212
+ end
213
+ end
214
+
215
+ describe '#find_s3_files_by_attrs' do
216
+ before do
217
+ client_stub = Aws::S3::Client.new(stub_responses: true)
218
+ client_stub.stub_responses(
219
+ :list_buckets, buckets: [{ name: 'bucket' }]
220
+ )
221
+ client_stub.stub_responses(
222
+ :list_objects_v2, contents: [
223
+ { key: v2app },
224
+ { key: v2app_rotated },
225
+ { key: v3db_rotated },
226
+ { key: v3db },
227
+ { key: v3app }
228
+ ]
229
+ )
230
+ subject.stub(:s3_client) do
231
+ Aws::S3::Resource.new(region: 'us-east-1', client: client_stub)
232
+ end
233
+ end
234
+
235
+ it 'can find apps by id' do
236
+ attrs = { type: 'apps', id: 321 }
237
+ result = subject.find_s3_files_by_attrs('us-east-1', 'bucket',
238
+ 'stack', attrs)
239
+ expect(result).to match_array([v3app, v2app, v2app_rotated])
240
+ end
241
+
242
+ it 'can find databases by id' do
243
+ attrs = { type: 'databases', id: 321 }
244
+ result = subject.find_s3_files_by_attrs('us-east-1', 'bucket',
245
+ 'stack', attrs)
246
+ expect(result).to match_array([v3db, v3db_rotated])
247
+ end
248
+
249
+ it 'can find by other attributes of the log file like container id' do
250
+ attrs = { container_id: 'deadbeef' }
251
+ result = subject.find_s3_files_by_attrs('us-east-1', 'bucket',
252
+ 'stack', attrs)
253
+ expect(result).to match_array([v3app])
254
+ end
255
+ end
256
+
257
+ describe '#time_match?' do
258
+ # Here's a represenation of the test cases. We keep the file timestamps
259
+ # fixed and move --start-date/--end-date around to all possible combos.
260
+ # Note that we do foce the start to be earlier than the end, which keeps the
261
+ # logic here quite simple.
262
+
263
+ # | |se
264
+ # | s|e
265
+ # s| |e
266
+ # |se|
267
+ # s|e |
268
+ # se| |
269
+
270
+ # s = start / lower bound of search
271
+ # e = end / upper bound of search
272
+ # |'s are the first and last timestamp in the file
273
+
274
+ let(:first_log) { Time.parse('2022-08-01T00:00:00') }
275
+ let(:last_log) { Time.parse('2022-09-01T00:00:00') }
276
+ let(:before) { Time.parse('2022-07-01T00:00:00') }
277
+ let(:between) { Time.parse('2022-08-15T00:00:00') }
278
+ let(:after) { Time.parse('2022-10-01T00:00:00') }
279
+
280
+ context 'identifies files that may have lines within a range' do
281
+ it 'before before does not match' do
282
+ range = [before, before]
283
+ expect(subject.time_match?(range, first_log, last_log)).to be(false)
284
+ end
285
+
286
+ it 'before between matches' do
287
+ range = [before, between]
288
+ expect(subject.time_match?(range, first_log, last_log)).to be(true)
289
+ end
290
+
291
+ it 'between between matches' do
292
+ range = [between, between]
293
+ expect(subject.time_match?(range, first_log, last_log)).to be(true)
294
+ end
295
+
296
+ it 'before after matches' do
297
+ range = [before, after]
298
+ expect(subject.time_match?(range, first_log, last_log)).to be(true)
299
+ end
300
+
301
+ it 'between after matches' do
302
+ range = [between, after]
303
+ expect(subject.time_match?(range, first_log, last_log)).to be(true)
304
+ end
305
+
306
+ it 'after after does not match' do
307
+ range = [after, after]
308
+ expect(subject.time_match?(range, first_log, last_log)).to be(false)
309
+ end
310
+ end
311
+ end
312
+
313
+ describe '#utc_date' do
314
+ e = 'Please provide dates in YYYY-MM-DD format'
315
+
316
+ it 'converts strings to dates in UTC' do
317
+ result = subject.utc_date('2022-08-30')
318
+ expect(result).to be_a(Time)
319
+ expect(result).to eq(Time.utc(2022, 8, 30, 0, 0, 0))
320
+ end
321
+
322
+ it 'raises an error if the input is a valid date/tiem in wrong format' do
323
+ expect do
324
+ subject.utc_date('2022-08-30 11:32')
325
+ end.to raise_error(Thor::Error, e)
326
+ end
327
+
328
+ it 'raises an error if the input is wrong' do
329
+ expect do
330
+ subject.utc_date('foobar')
331
+ end.to raise_error(Thor::Error, e)
332
+ end
333
+ end
334
+ end