aptible-cli 0.19.4 → 0.19.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.travis.yml +7 -1
- data/README.md +78 -76
- data/SECURITY.md +23 -0
- data/aptible-cli.gemspec +3 -1
- data/cleanup_bundler +14 -0
- data/lib/aptible/cli/agent.rb +6 -0
- data/lib/aptible/cli/helpers/date_helpers.rb +34 -0
- data/lib/aptible/cli/helpers/maintenance.rb +19 -0
- data/lib/aptible/cli/helpers/s3_log_helpers.rb +214 -0
- data/lib/aptible/cli/resource_formatter.rb +25 -2
- data/lib/aptible/cli/subcommands/deploy.rb +1 -1
- data/lib/aptible/cli/subcommands/logs.rb +145 -0
- data/lib/aptible/cli/subcommands/maintenance.rb +97 -0
- data/lib/aptible/cli/subcommands/metric_drain.rb +30 -0
- data/lib/aptible/cli/version.rb +1 -1
- data/spec/aptible/cli/helpers/date_helpers_spec.rb +12 -0
- data/spec/aptible/cli/helpers/s3_log_helpers_spec.rb +334 -0
- data/spec/aptible/cli/subcommands/logs_spec.rb +133 -0
- data/spec/aptible/cli/subcommands/maintenance_spec.rb +189 -0
- data/spec/aptible/cli/subcommands/metric_drain_spec.rb +27 -1
- data/spec/fabricators/maintenance_app_fabricator.rb +10 -0
- data/spec/fabricators/maintenance_database_fabricator.rb +10 -0
- metadata +52 -8
@@ -0,0 +1,214 @@
|
|
1
|
+
require 'aws-sdk'
|
2
|
+
require 'pathname'
|
3
|
+
|
4
|
+
module Aptible
|
5
|
+
module CLI
|
6
|
+
module Helpers
|
7
|
+
module S3LogHelpers
|
8
|
+
include Helpers::DateHelpers
|
9
|
+
|
10
|
+
def ensure_aws_creds
|
11
|
+
cred_errors = []
|
12
|
+
unless ENV['AWS_ACCESS_KEY_ID']
|
13
|
+
cred_errors << 'Missing environment variable: AWS_ACCESS_KEY_ID'
|
14
|
+
end
|
15
|
+
unless ENV['AWS_SECRET_ACCESS_KEY']
|
16
|
+
cred_errors << 'Missing environment variable: AWS_SECRET_ACCESS_KEY'
|
17
|
+
end
|
18
|
+
raise Thor::Error, cred_errors.join(' ') if cred_errors.any?
|
19
|
+
end
|
20
|
+
|
21
|
+
def validate_log_search_options(options = {})
|
22
|
+
id_options = [
|
23
|
+
options[:app_id],
|
24
|
+
options[:database_id],
|
25
|
+
options[:endpoint_id],
|
26
|
+
options[:container_id]
|
27
|
+
]
|
28
|
+
date_options = [options[:start_date], options[:end_date]]
|
29
|
+
unless options[:string_matches] || id_options.any?
|
30
|
+
m = 'You must specify an option to identify the logs to download,' \
|
31
|
+
' either: --string-matches, --app-id, --database-id,' \
|
32
|
+
' --endpoint-id, or --container-id'
|
33
|
+
raise Thor::Error, m
|
34
|
+
end
|
35
|
+
|
36
|
+
m = 'You cannot pass --app-id, --database-id, --endpoint-id, or ' \
|
37
|
+
'--container-id when using --string-matches.'
|
38
|
+
raise Thor::Error, m if options[:string_matches] && id_options.any?
|
39
|
+
|
40
|
+
m = 'You must specify only one of ' \
|
41
|
+
'--app-id, --database-id, --endpoint-id or --container-id'
|
42
|
+
raise Thor::Error, m if id_options.any? && !id_options.one?
|
43
|
+
|
44
|
+
m = 'The options --start-date/--end-date cannot be used when ' \
|
45
|
+
'searching by string'
|
46
|
+
raise Thor::Error, m if options[:string_matches] && date_options.any?
|
47
|
+
|
48
|
+
m = 'You must pass both --start-date and --end-date'
|
49
|
+
raise Thor::Error, m if date_options.any? && !date_options.all?
|
50
|
+
|
51
|
+
if options[:container_id] && options[:container_id].length < 12
|
52
|
+
m = 'You must specify at least the first 12 characters of the ' \
|
53
|
+
'container ID'
|
54
|
+
raise Thor::Error, m
|
55
|
+
end
|
56
|
+
|
57
|
+
if options[:download_location] && !options[:decryption_keys]
|
58
|
+
m = 'You must provide decryption keys with the --decryption-keys' \
|
59
|
+
'option in order to download files.'
|
60
|
+
raise Thor::Error, m
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
def info_from_path(file)
|
65
|
+
properties = {}
|
66
|
+
|
67
|
+
properties[:stack], _, properties[:schema],
|
68
|
+
properties[:shasum], type_id, *remainder = file.split('/')
|
69
|
+
|
70
|
+
properties[:id] = type_id.split('-').last.to_i
|
71
|
+
properties[:type] = type_id.split('-').first
|
72
|
+
|
73
|
+
case properties[:schema]
|
74
|
+
when 'v2'
|
75
|
+
# Eliminate the extensions
|
76
|
+
split_by_dot = remainder.pop.split('.') - %w(log bck gz)
|
77
|
+
properties[:container_id] = split_by_dot.first.delete!('-json')
|
78
|
+
properties[:uploaded_at] = utc_datetime(split_by_dot.last)
|
79
|
+
when 'v3'
|
80
|
+
case properties[:type]
|
81
|
+
when 'apps'
|
82
|
+
properties[:service_id] = remainder.first.split('-').last.to_i
|
83
|
+
file_name = remainder.second
|
84
|
+
else
|
85
|
+
file_name = remainder.first
|
86
|
+
end
|
87
|
+
# The file name may have differing number of elements due to
|
88
|
+
# docker file log rotation. So we eliminate some useless items
|
89
|
+
# and then work from the beginning or end of the remaining to find
|
90
|
+
# known elements, ignoring any .1 .2 (or none at all) extension
|
91
|
+
# found in the middle of the file name. EG:
|
92
|
+
# ['container_id', 'start_time', 'end_time']
|
93
|
+
# or
|
94
|
+
# ['container_id', '.1', 'start_time', 'end_time']]
|
95
|
+
split_by_dot = file_name.split('.') - %w(log gz archived)
|
96
|
+
properties[:container_id] = split_by_dot.first.delete!('-json')
|
97
|
+
properties[:start_time] = utc_datetime(split_by_dot[-2])
|
98
|
+
properties[:end_time] = utc_datetime(split_by_dot[-1])
|
99
|
+
else
|
100
|
+
m = "Cannot determine aptible log naming schema from #{file}"
|
101
|
+
raise Thor::Error, m
|
102
|
+
end
|
103
|
+
properties
|
104
|
+
end
|
105
|
+
|
106
|
+
def decrypt_and_translate_s3_file(file, enc_key, region, bucket, path)
|
107
|
+
# AWS warns us about using the legacy encryption schema
|
108
|
+
s3 = Kernel.silence_warnings do
|
109
|
+
Aws::S3::EncryptionV2::Client.new(
|
110
|
+
encryption_key: enc_key, region: region,
|
111
|
+
key_wrap_schema: :aes_gcm,
|
112
|
+
content_encryption_schema: :aes_gcm_no_padding,
|
113
|
+
security_profile: :v2_and_legacy
|
114
|
+
)
|
115
|
+
end
|
116
|
+
|
117
|
+
# Just write it to a file directly
|
118
|
+
location = File.join(path, file.split('/').drop(4).join('/'))
|
119
|
+
FileUtils.mkdir_p(File.dirname(location))
|
120
|
+
File.open(location, 'wb') do |f|
|
121
|
+
CLI.logger.info location
|
122
|
+
# Is this memory efficient?
|
123
|
+
s3.get_object(bucket: bucket, key: file, response_target: f)
|
124
|
+
end
|
125
|
+
end
|
126
|
+
|
127
|
+
def find_s3_files_by_string_match(region, bucket, stack, strings)
|
128
|
+
# This function just regex matches a provided string anywhwere
|
129
|
+
# in the s3 path
|
130
|
+
begin
|
131
|
+
stack_logs = s3_client(region).bucket(bucket)
|
132
|
+
.objects(prefix: stack)
|
133
|
+
.map(&:key)
|
134
|
+
rescue => error
|
135
|
+
raise Thor::Error, error.message
|
136
|
+
end
|
137
|
+
strings.each do |s|
|
138
|
+
stack_logs = stack_logs.select { |f| f =~ /#{s}/ }
|
139
|
+
end
|
140
|
+
stack_logs
|
141
|
+
end
|
142
|
+
|
143
|
+
def find_s3_files_by_attrs(region, bucket, stack,
|
144
|
+
attrs, time_range = nil)
|
145
|
+
# This function uses the known path schema to return files matching
|
146
|
+
# any provided criteria. EG:
|
147
|
+
# * attrs: { :type => 'app', :id => 123 }
|
148
|
+
# * attrs: { :container_id => 'deadbeef' }
|
149
|
+
|
150
|
+
begin
|
151
|
+
stack_logs = s3_client(region).bucket(bucket)
|
152
|
+
.objects(prefix: stack)
|
153
|
+
.map(&:key)
|
154
|
+
rescue => error
|
155
|
+
raise Thor::Error, error.message
|
156
|
+
end
|
157
|
+
attrs.each do |k, v|
|
158
|
+
stack_logs = stack_logs.select do |f|
|
159
|
+
if k == :container_id
|
160
|
+
# Match short container IDs
|
161
|
+
info_from_path(f)[k].start_with?(v)
|
162
|
+
else
|
163
|
+
info_from_path(f)[k] == v
|
164
|
+
end
|
165
|
+
end
|
166
|
+
end
|
167
|
+
|
168
|
+
if time_range
|
169
|
+
# select only logs within the time range
|
170
|
+
stack_logs = stack_logs.select do |f|
|
171
|
+
info = info_from_path(f)
|
172
|
+
first_log = info[:start_time]
|
173
|
+
last_log = info[:end_time]
|
174
|
+
if first_log.nil? || last_log.nil?
|
175
|
+
m = 'Cannot determine precise timestamps of file: ' \
|
176
|
+
"#{f.split('/').drop(4).join('/')}"
|
177
|
+
CLI.logger.warn m
|
178
|
+
false
|
179
|
+
else
|
180
|
+
time_match?(time_range, first_log, last_log)
|
181
|
+
end
|
182
|
+
end
|
183
|
+
end
|
184
|
+
|
185
|
+
stack_logs
|
186
|
+
end
|
187
|
+
|
188
|
+
def time_match?(time_range, start_timestamp, end_timestamp)
|
189
|
+
return false if start_timestamp.nil? || end_timestamp.nil?
|
190
|
+
return false if time_range.last < start_timestamp
|
191
|
+
return false if time_range.first > end_timestamp
|
192
|
+
true
|
193
|
+
end
|
194
|
+
|
195
|
+
def encryption_key(filesum, possible_keys)
|
196
|
+
# The key can be determined from the sum
|
197
|
+
possible_keys.each do |k|
|
198
|
+
keysum = Digest::SHA256.hexdigest(Base64.strict_decode64(k))
|
199
|
+
next unless keysum == filesum
|
200
|
+
return Base64.strict_decode64(k)
|
201
|
+
end
|
202
|
+
m = "Did not find a matching key for shasum #{filesum}"
|
203
|
+
raise Thor::Error, m
|
204
|
+
end
|
205
|
+
|
206
|
+
def s3_client(region)
|
207
|
+
@s3_client ||= Kernel.silence_warnings do
|
208
|
+
Aws::S3::Resource.new(region: region)
|
209
|
+
end
|
210
|
+
end
|
211
|
+
end
|
212
|
+
end
|
213
|
+
end
|
214
|
+
end
|
@@ -2,6 +2,8 @@ module Aptible
|
|
2
2
|
module CLI
|
3
3
|
module ResourceFormatter
|
4
4
|
class << self
|
5
|
+
include Helpers::DateHelpers
|
6
|
+
|
5
7
|
NO_NESTING = Object.new.freeze
|
6
8
|
|
7
9
|
def inject_backup(node, backup, include_db: false)
|
@@ -50,8 +52,7 @@ module Aptible
|
|
50
52
|
end
|
51
53
|
end
|
52
54
|
|
53
|
-
if bu_operation &&
|
54
|
-
backup.manual && !backup.copied_from
|
55
|
+
if bu_operation && !backup.copied_from
|
55
56
|
node.keyed_object('created_from_operation', 'id') do |n|
|
56
57
|
inject_operation(n, bu_operation)
|
57
58
|
end
|
@@ -236,6 +237,28 @@ module Aptible
|
|
236
237
|
attach_account(node, account)
|
237
238
|
end
|
238
239
|
|
240
|
+
def inject_maintenance(
|
241
|
+
node,
|
242
|
+
command_prefix,
|
243
|
+
maintenance_resource,
|
244
|
+
account
|
245
|
+
)
|
246
|
+
node.value('id', maintenance_resource.id)
|
247
|
+
raw_start, raw_end = maintenance_resource.maintenance_deadline
|
248
|
+
window_start = utc_string(raw_start)
|
249
|
+
window_end = utc_string(raw_end)
|
250
|
+
label = "#{maintenance_resource.handle} between #{window_start} "\
|
251
|
+
"and #{window_end}"
|
252
|
+
restart_command = "#{command_prefix}"\
|
253
|
+
" #{maintenance_resource.handle}"\
|
254
|
+
" --environment #{account.handle}"
|
255
|
+
node.value('label', label)
|
256
|
+
node.value('handle', maintenance_resource.handle)
|
257
|
+
node.value('restart_command', restart_command)
|
258
|
+
|
259
|
+
attach_account(node, account)
|
260
|
+
end
|
261
|
+
|
239
262
|
private
|
240
263
|
|
241
264
|
def attach_account(node, account)
|
@@ -41,7 +41,7 @@ module Aptible
|
|
41
41
|
git_ref = options[:git_commitish]
|
42
42
|
if options[:git_detach]
|
43
43
|
if git_ref
|
44
|
-
raise Thor::Error, 'The options --git-
|
44
|
+
raise Thor::Error, 'The options --git-commitish and ' \
|
45
45
|
'--git-detach are incompatible'
|
46
46
|
end
|
47
47
|
git_ref = NULL_SHA1
|
@@ -1,4 +1,6 @@
|
|
1
|
+
require 'aws-sdk'
|
1
2
|
require 'shellwords'
|
3
|
+
require 'time'
|
2
4
|
|
3
5
|
module Aptible
|
4
6
|
module CLI
|
@@ -8,6 +10,7 @@ module Aptible
|
|
8
10
|
thor.class_eval do
|
9
11
|
include Helpers::Operation
|
10
12
|
include Helpers::AppOrDatabase
|
13
|
+
include Helpers::DateHelpers
|
11
14
|
|
12
15
|
desc 'logs [--app APP | --database DATABASE]',
|
13
16
|
'Follows logs from a running app or database'
|
@@ -25,6 +28,148 @@ module Aptible
|
|
25
28
|
ENV['ACCESS_TOKEN'] = fetch_token
|
26
29
|
exit_with_ssh_portal(op, '-o', 'SendEnv=ACCESS_TOKEN', '-T')
|
27
30
|
end
|
31
|
+
|
32
|
+
desc 'logs_from_archive --bucket NAME --region REGION ' \
|
33
|
+
'--stack NAME [ --decryption-keys ONE [OR MORE] ] ' \
|
34
|
+
'[ --download-location LOCATION ] ' \
|
35
|
+
'[ [ --string-matches ONE [OR MORE] ] ' \
|
36
|
+
'| [ --app-id ID | --database-id ID | --endpoint-id ID | ' \
|
37
|
+
'--container-id ID ] ' \
|
38
|
+
'[ --start-date YYYY-MM-DD --end-date YYYY-MM-DD ] ]',
|
39
|
+
'Retrieves container logs from an S3 archive in your own ' \
|
40
|
+
'AWS account. You must provide your AWS credentials via ' \
|
41
|
+
'the environment variables AWS_ACCESS_KEY_ID and ' \
|
42
|
+
'AWS_SECRET_ACCESS_KEY'
|
43
|
+
|
44
|
+
# Required to retrieve files
|
45
|
+
option :region,
|
46
|
+
desc: 'The AWS region your S3 bucket resides in',
|
47
|
+
type: :string, required: true
|
48
|
+
option :bucket,
|
49
|
+
desc: 'The name of your S3 bucket',
|
50
|
+
type: :string, required: true
|
51
|
+
option :stack,
|
52
|
+
desc: 'The name of the Stack to download logs from',
|
53
|
+
type: :string, required: true
|
54
|
+
option :decryption_keys,
|
55
|
+
desc: 'The Aptible-provided keys for decryption. ' \
|
56
|
+
'(Space separated if multiple)',
|
57
|
+
type: :array
|
58
|
+
|
59
|
+
# For identifying files to download
|
60
|
+
option :string_matches,
|
61
|
+
desc: 'The strings to match in log file names.' \
|
62
|
+
'(Space separated if multiple)',
|
63
|
+
type: :array
|
64
|
+
option :app_id,
|
65
|
+
desc: 'The Application ID to download logs for.',
|
66
|
+
type: :numeric
|
67
|
+
option :database_id,
|
68
|
+
desc: 'The Database ID to download logs for.',
|
69
|
+
type: :numeric
|
70
|
+
option :endpoint_id,
|
71
|
+
desc: 'The Endpoint ID to download logs for.',
|
72
|
+
type: :numeric
|
73
|
+
option :container_id,
|
74
|
+
desc: 'The container ID to download logs for'
|
75
|
+
option :start_date,
|
76
|
+
desc: 'Get logs starting from this (UTC) date ' \
|
77
|
+
'(format: YYYY-MM-DD)',
|
78
|
+
type: :string
|
79
|
+
option :end_date,
|
80
|
+
desc: 'Get logs before this (UTC) date (format: YYYY-MM-DD)',
|
81
|
+
type: :string
|
82
|
+
|
83
|
+
# We don't download by default
|
84
|
+
option :download_location,
|
85
|
+
desc: 'The local path place downloaded log files. ' \
|
86
|
+
'If you do not set this option, the file names ' \
|
87
|
+
'will be shown, but not downloaded.',
|
88
|
+
type: :string
|
89
|
+
|
90
|
+
def logs_from_archive
|
91
|
+
ensure_aws_creds
|
92
|
+
validate_log_search_options(options)
|
93
|
+
|
94
|
+
id_options = [
|
95
|
+
options[:app_id],
|
96
|
+
options[:database_id],
|
97
|
+
options[:endpoint_id],
|
98
|
+
options[:container_id]
|
99
|
+
]
|
100
|
+
|
101
|
+
date_options = [options[:start_date], options[:end_date]]
|
102
|
+
|
103
|
+
r_type = 'apps' if options[:app_id]
|
104
|
+
r_type = 'databases' if options[:database_id]
|
105
|
+
r_type = 'proxy' if options[:endpoint_id]
|
106
|
+
|
107
|
+
if date_options.any?
|
108
|
+
start_date = utc_date(options[:start_date])
|
109
|
+
end_date = utc_date(options[:end_date])
|
110
|
+
if end_date < start_date
|
111
|
+
raise Thor::Error, 'End date must be after start date.'
|
112
|
+
end
|
113
|
+
time_range = [start_date, end_date]
|
114
|
+
CLI.logger.info "Searching from #{start_date} to #{end_date}"
|
115
|
+
else
|
116
|
+
time_range = nil
|
117
|
+
end
|
118
|
+
|
119
|
+
# --string-matches is useful for matching by partial container id,
|
120
|
+
# or for more flexibility than the currently supported id_options
|
121
|
+
# may allow for. We should update id_options with new use cases,
|
122
|
+
# but leave string_matches as a way to download any named file
|
123
|
+
if options[:string_matches]
|
124
|
+
files = find_s3_files_by_string_match(
|
125
|
+
options[:region],
|
126
|
+
options[:bucket],
|
127
|
+
options[:stack],
|
128
|
+
options[:string_matches]
|
129
|
+
)
|
130
|
+
elsif id_options.any?
|
131
|
+
if options[:container_id]
|
132
|
+
search_attrs = { container_id: options[:container_id] }
|
133
|
+
else
|
134
|
+
search_attrs = { type: r_type, id: id_options.compact.first }
|
135
|
+
end
|
136
|
+
files = find_s3_files_by_attrs(
|
137
|
+
options[:region],
|
138
|
+
options[:bucket],
|
139
|
+
options[:stack],
|
140
|
+
search_attrs,
|
141
|
+
time_range
|
142
|
+
)
|
143
|
+
end
|
144
|
+
|
145
|
+
unless files.any?
|
146
|
+
raise Thor::Error, 'No files found that matched all criteria'
|
147
|
+
end
|
148
|
+
|
149
|
+
CLI.logger.info "Found #{files.count} matching files..."
|
150
|
+
|
151
|
+
if options[:download_location]
|
152
|
+
# Since these files likely contain PHI, we will only download
|
153
|
+
# them if the user is explicit about where to save them.
|
154
|
+
files.each do |file|
|
155
|
+
shasum = info_from_path(file)[:shasum]
|
156
|
+
decrypt_and_translate_s3_file(
|
157
|
+
file,
|
158
|
+
encryption_key(shasum, options[:decryption_keys]),
|
159
|
+
options[:region],
|
160
|
+
options[:bucket],
|
161
|
+
options[:download_location]
|
162
|
+
)
|
163
|
+
end
|
164
|
+
else
|
165
|
+
files.each do |file|
|
166
|
+
CLI.logger.info file.split('/').drop(4).join('/')
|
167
|
+
end
|
168
|
+
m = 'No files were downloaded. Please provide a location ' \
|
169
|
+
'with --download-location to download the files.'
|
170
|
+
CLI.logger.warn m
|
171
|
+
end
|
172
|
+
end
|
28
173
|
end
|
29
174
|
end
|
30
175
|
end
|
@@ -0,0 +1,97 @@
|
|
1
|
+
module Aptible
|
2
|
+
module CLI
|
3
|
+
module Subcommands
|
4
|
+
module Maintenance
|
5
|
+
def self.included(thor)
|
6
|
+
thor.class_eval do
|
7
|
+
include Helpers::Environment
|
8
|
+
include Helpers::Maintenance
|
9
|
+
include Helpers::Token
|
10
|
+
|
11
|
+
desc 'maintenance:apps',
|
12
|
+
'List Apps impacted by maintenance schedules where '\
|
13
|
+
'restarts are required'
|
14
|
+
option :environment
|
15
|
+
define_method 'maintenance:apps' do
|
16
|
+
found_maintenance = false
|
17
|
+
m = maintenance_apps
|
18
|
+
Formatter.render(Renderer.current) do |root|
|
19
|
+
root.grouped_keyed_list(
|
20
|
+
{ 'environment' => 'handle' },
|
21
|
+
'label'
|
22
|
+
) do |node|
|
23
|
+
scoped_environments(options).each do |account|
|
24
|
+
m.select { |app| app.account.id == account.id }
|
25
|
+
.each do |app|
|
26
|
+
next unless app.maintenance_deadline
|
27
|
+
found_maintenance = true
|
28
|
+
node.object do |n|
|
29
|
+
ResourceFormatter.inject_maintenance(
|
30
|
+
n,
|
31
|
+
'aptible restart --app',
|
32
|
+
app,
|
33
|
+
account
|
34
|
+
)
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
if found_maintenance
|
41
|
+
explanation 'app'
|
42
|
+
else
|
43
|
+
no_maintenances 'app'
|
44
|
+
end
|
45
|
+
end
|
46
|
+
desc 'maintenance:dbs',
|
47
|
+
'List Databases impacted by maintenance schedules where '\
|
48
|
+
'restarts are required'
|
49
|
+
option :environment
|
50
|
+
define_method 'maintenance:dbs' do
|
51
|
+
found_maintenance = false
|
52
|
+
m = maintenance_databases
|
53
|
+
Formatter.render(Renderer.current) do |root|
|
54
|
+
root.grouped_keyed_list(
|
55
|
+
{ 'environment' => 'handle' },
|
56
|
+
'label'
|
57
|
+
) do |node|
|
58
|
+
scoped_environments(options).each do |account|
|
59
|
+
m.select { |db| db.account.id == account.id }
|
60
|
+
.each do |db|
|
61
|
+
next unless db.maintenance_deadline
|
62
|
+
found_maintenance = true
|
63
|
+
node.object do |n|
|
64
|
+
ResourceFormatter.inject_maintenance(
|
65
|
+
n,
|
66
|
+
'aptible db:restart',
|
67
|
+
db,
|
68
|
+
account
|
69
|
+
)
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
if found_maintenance
|
76
|
+
explanation 'database'
|
77
|
+
else
|
78
|
+
no_maintenances 'database'
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
end
|
83
|
+
|
84
|
+
def explanation(resource_type)
|
85
|
+
CLI.logger.warn "\nYou may restart these #{resource_type}(s)"\
|
86
|
+
' at any time, or Aptible will restart it'\
|
87
|
+
' during the defined window.'
|
88
|
+
end
|
89
|
+
|
90
|
+
def no_maintenances(resource_type)
|
91
|
+
CLI.logger.info "\nNo #{resource_type}s found affected "\
|
92
|
+
'by maintenance schedules.'
|
93
|
+
end
|
94
|
+
end
|
95
|
+
end
|
96
|
+
end
|
97
|
+
end
|
@@ -5,6 +5,7 @@ module Aptible
|
|
5
5
|
SITES = {
|
6
6
|
'US1' => 'https://app.datadoghq.com',
|
7
7
|
'US3' => 'https://us3.datadoghq.com',
|
8
|
+
'US5' => 'https://us5.datadoghq.com',
|
8
9
|
'EU1' => 'https://app.datadoghq.eu',
|
9
10
|
'US1-FED' => 'https://app.ddog-gov.com'
|
10
11
|
}.freeze
|
@@ -84,6 +85,35 @@ module Aptible
|
|
84
85
|
create_metric_drain(account, opts)
|
85
86
|
end
|
86
87
|
|
88
|
+
desc 'metric_drain:create:influxdb:customv2 HANDLE '\
|
89
|
+
'--org ORGANIZATION --token INFLUX_TOKEN ' \
|
90
|
+
'--url URL_INCLUDING_PORT ' \
|
91
|
+
'--bucket INFLUX_BUCKET_NAME ' \
|
92
|
+
'--environment ENVIRONMENT',
|
93
|
+
'Create an InfluxDB v2 Metric Drain'
|
94
|
+
option :bucket, type: :string
|
95
|
+
option :org, type: :string
|
96
|
+
option :token, type: :string
|
97
|
+
option :url, type: :string
|
98
|
+
option :environment
|
99
|
+
define_method 'metric_drain:create:influxdb:customv2' do |handle|
|
100
|
+
account = ensure_environment(options)
|
101
|
+
|
102
|
+
config = {
|
103
|
+
address: options[:url],
|
104
|
+
org: options[:org],
|
105
|
+
authToken: options[:token],
|
106
|
+
bucket: options[:bucket]
|
107
|
+
}
|
108
|
+
opts = {
|
109
|
+
handle: handle,
|
110
|
+
drain_configuration: config,
|
111
|
+
drain_type: :influxdb2
|
112
|
+
}
|
113
|
+
|
114
|
+
create_metric_drain(account, opts)
|
115
|
+
end
|
116
|
+
|
87
117
|
desc 'metric_drain:create:datadog HANDLE '\
|
88
118
|
'--api_key DATADOG_API_KEY '\
|
89
119
|
'--site DATADOG_SITE ' \
|
data/lib/aptible/cli/version.rb
CHANGED
@@ -0,0 +1,12 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Aptible::CLI::Helpers::DateHelpers do
|
4
|
+
subject { Class.new.send(:include, described_class).new }
|
5
|
+
|
6
|
+
describe '#utc_string' do
|
7
|
+
it 'should accept a Datetime string from our API and return a UTC string' do
|
8
|
+
result = subject.utc_string('2023-09-05T22:00:00.000Z')
|
9
|
+
expect(result).to eq '2023-09-05 22:00:00 UTC'
|
10
|
+
end
|
11
|
+
end
|
12
|
+
end
|