firespring_dev_commands 2.2.8.pre.alpha.1 → 2.5.0.pre.alpha.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +1 -1
- data/lib/firespring_dev_commands/audit/report.rb +2 -9
- data/lib/firespring_dev_commands/aws/account.rb +1 -1
- data/lib/firespring_dev_commands/aws/cloudformation.rb +3 -10
- data/lib/firespring_dev_commands/aws/login.rb +1 -1
- data/lib/firespring_dev_commands/common.rb +2 -22
- data/lib/firespring_dev_commands/docker/status.rb +0 -20
- data/lib/firespring_dev_commands/docker.rb +16 -86
- data/lib/firespring_dev_commands/eol/aws.rb +2 -10
- data/lib/firespring_dev_commands/eol.rb +2 -22
- data/lib/firespring_dev_commands/git.rb +24 -37
- data/lib/firespring_dev_commands/jira/issue.rb +1 -3
- data/lib/firespring_dev_commands/node.rb +1 -1
- data/lib/firespring_dev_commands/php/audit.rb +0 -4
- data/lib/firespring_dev_commands/php.rb +12 -28
- data/lib/firespring_dev_commands/platform.rb +31 -38
- data/lib/firespring_dev_commands/ruby.rb +3 -6
- data/lib/firespring_dev_commands/target_process/query.rb +4 -30
- data/lib/firespring_dev_commands/target_process/release.rb +1 -1
- data/lib/firespring_dev_commands/target_process/team_assignment.rb +1 -1
- data/lib/firespring_dev_commands/target_process/user.rb +1 -13
- data/lib/firespring_dev_commands/target_process/user_story.rb +1 -1
- data/lib/firespring_dev_commands/target_process/user_story_history.rb +1 -1
- data/lib/firespring_dev_commands/target_process.rb +7 -24
- data/lib/firespring_dev_commands/templates/aws.rb +6 -33
- data/lib/firespring_dev_commands/templates/base_interface.rb +2 -2
- data/lib/firespring_dev_commands/templates/ci.rb +11 -16
- data/lib/firespring_dev_commands/templates/docker/application.rb +2 -2
- data/lib/firespring_dev_commands/templates/docker/node/application.rb +5 -55
- data/lib/firespring_dev_commands/templates/docker/php/application.rb +16 -58
- data/lib/firespring_dev_commands/templates/docker/ruby/application.rb +5 -54
- data/lib/firespring_dev_commands/templates/eol.rb +2 -9
- data/lib/firespring_dev_commands/templates/git.rb +0 -165
- data/lib/firespring_dev_commands/version.rb +1 -1
- data/lib/firespring_dev_commands.rb +1 -1
- metadata +35 -125
- data/lib/firespring_dev_commands/aws/route53.rb +0 -177
- data/lib/firespring_dev_commands/bloom_growth/rock.rb +0 -34
- data/lib/firespring_dev_commands/bloom_growth/seat.rb +0 -16
- data/lib/firespring_dev_commands/bloom_growth/user.rb +0 -43
- data/lib/firespring_dev_commands/bloom_growth.rb +0 -132
- data/lib/firespring_dev_commands/certificate.rb +0 -59
- data/lib/firespring_dev_commands/coverage/base.rb +0 -16
- data/lib/firespring_dev_commands/coverage/cobertura.rb +0 -86
- data/lib/firespring_dev_commands/coverage/none.rb +0 -21
- data/lib/firespring_dev_commands/dns/resource.rb +0 -93
- data/lib/firespring_dev_commands/docker/desktop.rb +0 -61
- data/lib/firespring_dev_commands/eol/node.rb +0 -42
- data/lib/firespring_dev_commands/eol/php.rb +0 -50
- data/lib/firespring_dev_commands/eol/ruby.rb +0 -42
- data/lib/firespring_dev_commands/jira/parent.rb +0 -19
- data/lib/firespring_dev_commands/os.rb +0 -35
- data/lib/firespring_dev_commands/port.rb +0 -24
- data/lib/firespring_dev_commands/target_process/time.rb +0 -32
- data/lib/firespring_dev_commands/templates/aws/services/route53.rb +0 -130
- data/lib/firespring_dev_commands/templates/certificate.rb +0 -41
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: e146e8c9d9c3422dfd7641c6a09ca8c2cdfad6ed430d73aec5808de28ed5d880
|
4
|
+
data.tar.gz: 273923aae5ccce231fc77174ed89cbe0e4f5ad59b52219b04303f983e9966e1d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 67223e6ba12a10f798d7fee6b1cab844da41cd3dc65a449330e8c8bd0ef86180cc297889b7365b7b68f5ca8054919e9881693707ecfde02e539d0a7e22c6af51
|
7
|
+
data.tar.gz: ada0e13a3f2d8022687a2542503bad6bc847bf32fc17604116d17e06ed46ee7efa18aea35635c31a26c07f5ccdd6a3c5083235edaafd77684528bc487c17f3c7
|
data/README.md
CHANGED
@@ -28,7 +28,7 @@ Dev::Template::Docker::Node::Application.new('foo')
|
|
28
28
|
```
|
29
29
|
* If you run `rake -T` now, you should have base rake commands and application rake commands for an app called `foo`
|
30
30
|
|
31
|
-
* (
|
31
|
+
* (optinoal) Add AWS login template commands
|
32
32
|
```
|
33
33
|
# Configure AWS accounts and create tasks
|
34
34
|
Dev::Aws::Account::configure do |c|
|
@@ -3,12 +3,11 @@ module Dev
|
|
3
3
|
class Audit
|
4
4
|
# The class containing standardized information about an audit report
|
5
5
|
class Report
|
6
|
-
attr_accessor :items, :min_severity, :
|
6
|
+
attr_accessor :items, :min_severity, :ignorelist, :filtered_items
|
7
7
|
|
8
8
|
def initialize(
|
9
9
|
items,
|
10
10
|
min_severity: ENV.fetch('MIN_SEVERITY', nil),
|
11
|
-
error_on_unknown: ENV.fetch('ERROR_ON_UNKNOWN', nil),
|
12
11
|
ignorelist: ENV['IGNORELIST'].to_s.split(/\s*,\s*/)
|
13
12
|
)
|
14
13
|
# Items should be an array of Item objects
|
@@ -16,18 +15,12 @@ module Dev
|
|
16
15
|
raise 'items must all be report items' unless @items.all?(Dev::Audit::Report::Item)
|
17
16
|
|
18
17
|
@min_severity = min_severity || Level::HIGH
|
19
|
-
@error_on_unknown = error_on_unknown
|
20
18
|
@ignorelist = Array(ignorelist).compact
|
21
19
|
end
|
22
20
|
|
23
21
|
# Get all severities greater than or equal to the minimum severity
|
24
22
|
def desired_severities
|
25
|
-
|
26
|
-
-1
|
27
|
-
else
|
28
|
-
-2
|
29
|
-
end
|
30
|
-
LEVELS.slice(LEVELS.find_index(min_severity)..max_severity)
|
23
|
+
LEVELS.slice(LEVELS.find_index(min_severity)..-1)
|
31
24
|
end
|
32
25
|
|
33
26
|
# Run the filters against the report items and filter out any which should be excluded
|
@@ -94,7 +94,7 @@ module Dev
|
|
94
94
|
# because some projects may be using older versions of the dev_commands library
|
95
95
|
# defaultini.delete('mfa_serial')
|
96
96
|
|
97
|
-
session_name_default = defaultini['role_session_name'] || "#{ENV.fetch('USERNAME',
|
97
|
+
session_name_default = defaultini['role_session_name'] || "#{ENV.fetch('USERNAME', nil)}_cli"
|
98
98
|
defaultini['role_session_name'] = Dev::Common.new.ask('Default session name', session_name_default)
|
99
99
|
|
100
100
|
duration_default = defaultini['session_duration'] || 36_000
|
@@ -21,10 +21,9 @@ module Dev
|
|
21
21
|
# Finished status
|
22
22
|
FINISHED = :finished
|
23
23
|
|
24
|
-
attr_accessor :client, :name, :template_filename, :parameters, :capabilities, :failure_behavior, :
|
24
|
+
attr_accessor :client, :name, :template_filename, :parameters, :capabilities, :failure_behavior, :state
|
25
25
|
|
26
|
-
def initialize(name, template_filename, parameters: Dev::Aws::Cloudformation::Parameters.new, capabilities: [], failure_behavior: 'ROLLBACK'
|
27
|
-
preserve_parameters_on_update: false)
|
26
|
+
def initialize(name, template_filename, parameters: Dev::Aws::Cloudformation::Parameters.new, capabilities: [], failure_behavior: 'ROLLBACK')
|
28
27
|
raise 'parameters must be an intsance of parameters' unless parameters.is_a?(Dev::Aws::Cloudformation::Parameters)
|
29
28
|
|
30
29
|
@client = nil
|
@@ -33,7 +32,6 @@ module Dev
|
|
33
32
|
@parameters = parameters
|
34
33
|
@capabilities = capabilities
|
35
34
|
@failure_behavior = failure_behavior
|
36
|
-
@preserve_parameters_on_update = preserve_parameters_on_update
|
37
35
|
@state = NOT_STARTED
|
38
36
|
end
|
39
37
|
|
@@ -83,16 +81,11 @@ module Dev
|
|
83
81
|
# Call upload function to get the s3 url
|
84
82
|
template_url = upload(template_filename)
|
85
83
|
|
86
|
-
update_parameters = if preserve_parameters_on_update
|
87
|
-
parameters.preserve
|
88
|
-
else
|
89
|
-
parameters.default
|
90
|
-
end
|
91
84
|
# Update the cloudformation stack
|
92
85
|
client.update_stack(
|
93
86
|
stack_name: name,
|
94
87
|
template_url:,
|
95
|
-
parameters:
|
88
|
+
parameters: parameters.preserve,
|
96
89
|
capabilities:
|
97
90
|
)
|
98
91
|
@state = STARTED
|
@@ -61,7 +61,7 @@ module Dev
|
|
61
61
|
puts " Logging in to #{account} in #{region} as #{role}".light_yellow
|
62
62
|
puts
|
63
63
|
|
64
|
-
code = ENV['AWS_TOKEN_CODE'] || Dev::Common.new.ask("Enter the MFA code for the #{ENV.fetch('USERNAME', '
|
64
|
+
code = ENV['AWS_TOKEN_CODE'] || Dev::Common.new.ask("Enter the MFA code for the #{ENV.fetch('USERNAME', '')} user serial #{serial}")
|
65
65
|
raise 'MFA is required' unless code.to_s.strip
|
66
66
|
|
67
67
|
sts = ::Aws::STS::Client.new(profile: 'default', region:)
|
@@ -100,8 +100,8 @@ module Dev
|
|
100
100
|
# Receive a string from the user on stdin unless non_interactive is set to true
|
101
101
|
# If a default value was specified and no answer was given, return the default
|
102
102
|
def gather_input(default: nil)
|
103
|
-
answer = $stdin.gets unless ENV['NON_INTERACTIVE'] == 'true'
|
104
|
-
answer
|
103
|
+
answer = $stdin.gets.to_s.strip unless ENV['NON_INTERACTIVE'] == 'true'
|
104
|
+
answer.to_s.strip
|
105
105
|
return default if default && answer.empty?
|
106
106
|
|
107
107
|
answer
|
@@ -187,25 +187,5 @@ module Dev
|
|
187
187
|
return false
|
188
188
|
end
|
189
189
|
end
|
190
|
-
|
191
|
-
# Center the string and pad on either side with the given padding character
|
192
|
-
def center_pad(string = '', pad: '-', len: 80)
|
193
|
-
string = " #{string} " unless string.strip.empty?
|
194
|
-
center_dash = len / 2
|
195
|
-
string = string.to_s
|
196
|
-
center_str = string.length / 2
|
197
|
-
string.rjust(center_dash + center_str - 1, pad).ljust(len - 1, pad)
|
198
|
-
end
|
199
|
-
|
200
|
-
# Print the given filesize using the most appropriate units
|
201
|
-
def filesize(size)
|
202
|
-
return '0.0 B' if size.to_i.zero?
|
203
|
-
|
204
|
-
units = %w(B KB MB GB TB Pb EB)
|
205
|
-
exp = (Math.log(size) / Math.log(1024)).to_i
|
206
|
-
exp = 6 if exp > 6
|
207
|
-
|
208
|
-
format('%.1f %s', size.to_f / (1024**exp), units[exp])
|
209
|
-
end
|
210
190
|
end
|
211
191
|
end
|
@@ -33,26 +33,6 @@ module Dev
|
|
33
33
|
EXITED,
|
34
34
|
DEAD
|
35
35
|
].freeze
|
36
|
-
|
37
|
-
# TODO: Can we use 'curses' here and overwrite the correct line?
|
38
|
-
def response_callback(response)
|
39
|
-
response.split("\n").each do |line|
|
40
|
-
data = JSON.parse(line)
|
41
|
-
if data.include?('status')
|
42
|
-
if data['id']
|
43
|
-
LOG.info "#{data['id']}: #{data['status']}"
|
44
|
-
else
|
45
|
-
LOG.info (data['status']).to_s
|
46
|
-
end
|
47
|
-
elsif data.include?('errorDetail')
|
48
|
-
raise data['errorDetail']['message']
|
49
|
-
elsif data.include?('aux')
|
50
|
-
next
|
51
|
-
else
|
52
|
-
raise "Unrecognized message from docker: #{data}"
|
53
|
-
end
|
54
|
-
end
|
55
|
-
end
|
56
36
|
end
|
57
37
|
end
|
58
38
|
end
|
@@ -103,7 +103,7 @@ module Dev
|
|
103
103
|
LOG.info "\nDeleted #{type.capitalize}"
|
104
104
|
deleted_items = info["#{type}Deleted"] || []
|
105
105
|
deleted_items.each { |it| LOG.info " #{it}" }
|
106
|
-
LOG.info "Total reclaimed space: #{
|
106
|
+
LOG.info "Total reclaimed space: #{filesize(info['SpaceReclaimed'])}"
|
107
107
|
end
|
108
108
|
|
109
109
|
# Print the given filesize using the most appropriate units
|
@@ -117,44 +117,6 @@ module Dev
|
|
117
117
|
format('%.1f %s', size.to_f / (1024**exp), units[exp])
|
118
118
|
end
|
119
119
|
|
120
|
-
# Push the local version of the docker image to the defined remote repository
|
121
|
-
def push_image(image, name, tag = nil)
|
122
|
-
unless tag
|
123
|
-
if name.include?(':')
|
124
|
-
name, tag = name.split(':')
|
125
|
-
else
|
126
|
-
tag = 'latest'
|
127
|
-
end
|
128
|
-
end
|
129
|
-
|
130
|
-
puts "Pushing to #{name}:#{tag}"
|
131
|
-
image.push(::Docker.creds, repo_tag: "#{name}:#{tag}") { |response| Dev::Docker::Status.new.response_callback(response) }
|
132
|
-
end
|
133
|
-
|
134
|
-
# Push the remote version of the docker image from the defined remote repository
|
135
|
-
def pull_image(name, tag = nil)
|
136
|
-
unless tag
|
137
|
-
if name.include?(':')
|
138
|
-
name, tag = name.split(':')
|
139
|
-
else
|
140
|
-
tag = 'latest'
|
141
|
-
end
|
142
|
-
end
|
143
|
-
|
144
|
-
puts "\nPulling #{name}:#{tag}"
|
145
|
-
opts = {
|
146
|
-
fromImage: "#{name}:#{tag}",
|
147
|
-
platform: Dev::Platform.new.architecture
|
148
|
-
}
|
149
|
-
::Docker::Image.create(**opts) { |response| Dev::Docker::Status.new.response_callback(response) }
|
150
|
-
end
|
151
|
-
|
152
|
-
# Remove the local version of the given docker image
|
153
|
-
def untag_image(image, name, tag)
|
154
|
-
puts "Untagging #{name}:#{tag}"
|
155
|
-
image.remove(name: "#{name}:#{tag}")
|
156
|
-
end
|
157
|
-
|
158
120
|
# Remove docker images with the "force" option set to true
|
159
121
|
# This will remove the images even if they are currently in use and cause unintended side effects.
|
160
122
|
def force_remove_images(name_and_tag)
|
@@ -176,59 +138,27 @@ module Dev
|
|
176
138
|
Docker::Compose.new.mapped_public_port(name, private_port)
|
177
139
|
end
|
178
140
|
|
179
|
-
# Gets the default working dir of the container
|
180
|
-
def working_dir(container)
|
181
|
-
container.json['Config']['WorkingDir']
|
182
|
-
end
|
183
|
-
|
184
141
|
# Copies the source path on your local machine to the destination path on the container
|
185
|
-
def copy_to_container(container,
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
filetype_cmd = [
|
196
|
-
'bash',
|
197
|
-
'-c',
|
198
|
-
"set -e; [ ! -e '#{destination}' ] && exit #{noexist_code}; [ -f '#{destination}' ] " \
|
199
|
-
"&& exit #{file_code}; [ -d '#{destination}' ] && exit #{directory_code}; exit #{unknown_code}"
|
200
|
-
]
|
201
|
-
destination_filetype_code = container.exec(filetype_cmd).last
|
202
|
-
|
203
|
-
# If destination_filetype_code is a file - that means the user passed in a destination filename
|
204
|
-
# Unfortunately the archive_in command does not support that so we will strip it off and use it later (if needed)
|
205
|
-
source_filename = File.basename(source)
|
206
|
-
destination_filename = File.basename(source)
|
207
|
-
destination, _, destination_filename = destination.rpartition(File::SEPARATOR) if destination_filetype_code == file_code
|
208
|
-
|
209
|
-
container.archive_in(source, destination, overwrite: true)
|
210
|
-
|
211
|
-
if File.directory?(source)
|
212
|
-
# If the source was a directory, then the archive_in command leaves it as a tar on the system - so we need to unpack it
|
213
|
-
# TODO: Can we find a better solution for this? Seems pretty brittle
|
214
|
-
retcode = container.exec(['bash', '-c', "cd #{destination}; tar -xf #{destination_filename}; rm -f #{destination_filename}"]).last
|
215
|
-
raise 'Unable to unpack on container' unless retcode.zero?
|
216
|
-
elsif destination_filetype_code == file_code && source_filename != destination_filename
|
217
|
-
# If the destination was a file _and_ the filename is different than the source filename, then we need to rename it
|
218
|
-
retcode = container.exec(['bash', '-c', "cd #{destination}; mv #{source_filename} #{destination_filename}"]).last
|
219
|
-
raise "Unable to rename '#{source_filename}' to '#{destination_filename}' on container" unless retcode.zero?
|
220
|
-
end
|
142
|
+
def copy_to_container(container, source_path, dest_path)
|
143
|
+
LOG.info "Copying #{source_path} to #{dest_path}... "
|
144
|
+
|
145
|
+
container.archive_in(source_path, dest_path, overwrite: true)
|
146
|
+
return unless File.directory?(source_path)
|
147
|
+
|
148
|
+
dest_file = File.basename(source_path)
|
149
|
+
# TODO: Can we find a better solution for this? Seems pretty brittle
|
150
|
+
retcode = container.exec(['bash', '-c', "cd #{dest_path}; tar -xf #{dest_file}; rm -f #{dest_file}"])[-1]
|
151
|
+
raise 'Unable to unpack on container' unless retcode.zero?
|
221
152
|
end
|
222
153
|
|
223
154
|
# Copies the source path on the container to the destination path on your local machine
|
224
155
|
# If required is set to true, the command will fail if the source path does not exist on the container
|
225
|
-
def copy_from_container(container,
|
226
|
-
|
227
|
-
LOG.info "Copying #{source} to #{destination}..."
|
156
|
+
def copy_from_container(container, source_path, dest_path, required: true)
|
157
|
+
LOG.info "Copying #{source_path} to #{dest_path}... "
|
228
158
|
|
229
159
|
tar = StringIO.new
|
230
160
|
begin
|
231
|
-
container.archive_out(
|
161
|
+
container.archive_out(source_path) do |chunk|
|
232
162
|
tar.write(chunk)
|
233
163
|
end
|
234
164
|
rescue => e
|
@@ -237,7 +167,7 @@ module Dev
|
|
237
167
|
puts 'Not Found'
|
238
168
|
end
|
239
169
|
|
240
|
-
Dev::Tar.new(tar).unpack(
|
170
|
+
Dev::Tar.new(tar).unpack(source_path, dest_path)
|
241
171
|
end
|
242
172
|
|
243
173
|
# rubocop:disable Metrics/ParameterLists
|
@@ -275,7 +205,7 @@ module Dev
|
|
275
205
|
arch = "#{arch}/#{variant}" if variant
|
276
206
|
id = image.info&.dig('id')&.split(':')&.last&.slice(0..11)
|
277
207
|
created = timesince(Time.at(image.info&.dig('Created')))
|
278
|
-
size =
|
208
|
+
size = filesize(image.info&.dig('Size'))
|
279
209
|
|
280
210
|
repo_urls = image.info&.dig('RepoTags')
|
281
211
|
repo_urls ||= ["#{image.info&.dig('RepoDigests')&.first&.split(':')&.first&.split('@')&.first}:<none>"]
|
@@ -72,23 +72,19 @@ module Dev
|
|
72
72
|
# Queries and returns product versions for rds instance products
|
73
73
|
def rds_instance_products
|
74
74
|
aws_engines = %w(mysql postgresql)
|
75
|
-
aws_sqlserver_engines = %w(sqlserver-ee sqlserver-ex sqlserver-se sqlserver-web)
|
76
75
|
client = ::Aws::RDS::Client.new
|
77
76
|
|
78
77
|
[].tap do |ary|
|
79
78
|
Dev::Aws.each_page(client, :describe_db_instances) do |response|
|
80
79
|
response.db_instances.each do |instance|
|
81
80
|
name = instance.db_instance_identifier
|
82
|
-
engine = instance.engine.
|
81
|
+
engine = instance.engine.tr('aurora-', '')
|
83
82
|
product = if aws_engines.include?(engine)
|
84
83
|
"amazon-rds-#{engine}"
|
85
|
-
elsif aws_sqlserver_engines.include?(engine)
|
86
|
-
'mssqlserver'
|
87
84
|
else
|
88
85
|
engine
|
89
86
|
end
|
90
87
|
version = instance.engine_version.reverse.split('.')[-2..].join('.').reverse
|
91
|
-
version.chop! if version.end_with?('.00')
|
92
88
|
ary << Dev::EndOfLife::ProductVersion.new(product, version, name)
|
93
89
|
end
|
94
90
|
end
|
@@ -98,23 +94,19 @@ module Dev
|
|
98
94
|
# Queries and returns product versions for rds cluster products
|
99
95
|
def rds_cluster_products
|
100
96
|
aws_engines = %w(mysql postgresql)
|
101
|
-
aws_sqlserver_engines = %w(sqlserver-ee sqlserver-ex sqlserver-se sqlserver-web)
|
102
97
|
client = ::Aws::RDS::Client.new
|
103
98
|
|
104
99
|
[].tap do |ary|
|
105
100
|
Dev::Aws.each_page(client, :describe_db_clusters) do |response|
|
106
101
|
response.db_clusters.each do |cluster|
|
107
102
|
name = cluster.db_cluster_identifier
|
108
|
-
engine = cluster.engine.
|
103
|
+
engine = cluster.engine.tr('aurora-', '')
|
109
104
|
product = if aws_engines.include?(engine)
|
110
105
|
"amazon-rds-#{engine}"
|
111
|
-
elsif aws_sqlserver_engines.include?(engine)
|
112
|
-
'mssqlserver'
|
113
106
|
else
|
114
107
|
engine
|
115
108
|
end
|
116
109
|
version = cluster.engine_version.reverse.split('.')[-2..].join('.').reverse
|
117
|
-
version.chop! if version.end_with?('.00')
|
118
110
|
ary << Dev::EndOfLife::ProductVersion.new(product, version, name)
|
119
111
|
end
|
120
112
|
end
|
@@ -46,33 +46,13 @@ module Dev
|
|
46
46
|
@products
|
47
47
|
end
|
48
48
|
|
49
|
-
# Returns true if the given product is supported either in the endoflife api products or a manual product
|
50
|
-
def product?(product)
|
51
|
-
products.include?(product) || self.class.config.manual_dates.any? { |key, _| key.to_s.start_with?("#{product}_") }
|
52
|
-
end
|
53
|
-
|
54
|
-
# Prints all of the product version statuses
|
55
|
-
def status
|
56
|
-
if product_versions.empty?
|
57
|
-
puts ' no tracked products'
|
58
|
-
return
|
59
|
-
end
|
60
|
-
|
61
|
-
product_versions.sort_by(&:name).each(&:print_status)
|
62
|
-
end
|
63
|
-
|
64
|
-
# Returns true if any of the products are EOL
|
65
|
-
def eol?
|
66
|
-
product_versions.any?(&:eol)
|
67
|
-
end
|
68
|
-
|
69
49
|
# Prints all of the product version statuses
|
70
50
|
# Raises an error if any products are EOL
|
71
51
|
def check
|
72
52
|
puts
|
73
|
-
|
53
|
+
product_versions.sort_by(&:name).each(&:print_status)
|
74
54
|
puts
|
75
|
-
raise 'found EOL versions' if eol
|
55
|
+
raise 'found EOL versions' if product_versions.any?(&:eol)
|
76
56
|
end
|
77
57
|
end
|
78
58
|
end
|
@@ -1,6 +1,5 @@
|
|
1
1
|
require 'fileutils'
|
2
2
|
require 'git'
|
3
|
-
require 'octokit'
|
4
3
|
|
5
4
|
module Dev
|
6
5
|
# Class for performing git functions
|
@@ -97,8 +96,7 @@ module Dev
|
|
97
96
|
def branch_name(dir: default_project_dir)
|
98
97
|
return unless File.exist?(dir)
|
99
98
|
|
100
|
-
|
101
|
-
g.current_branch || "HEAD detached at #{g.object('HEAD').sha[0..7]}"
|
99
|
+
::Git.open(dir).current_branch
|
102
100
|
end
|
103
101
|
|
104
102
|
# Returns true if the remote branch exists, false otherwise
|
@@ -115,10 +113,10 @@ module Dev
|
|
115
113
|
next unless File.exist?(project_dir)
|
116
114
|
|
117
115
|
repo_basename = File.basename(File.realpath(project_dir))
|
118
|
-
header = "#{repo_basename} (#{original_branches[project_dir]})"
|
119
|
-
puts
|
116
|
+
header = " #{repo_basename} (#{original_branches[project_dir]}) "
|
117
|
+
puts center_pad(header).light_green
|
120
118
|
@success &= status(dir: project_dir)
|
121
|
-
puts
|
119
|
+
puts center_pad.light_green
|
122
120
|
end
|
123
121
|
puts
|
124
122
|
|
@@ -167,10 +165,10 @@ module Dev
|
|
167
165
|
next unless File.exist?(project_dir)
|
168
166
|
|
169
167
|
repo_basename = File.basename(File.realpath(project_dir))
|
170
|
-
header = "#{repo_basename} (#{original_branches[project_dir]})"
|
171
|
-
puts
|
168
|
+
header = " #{repo_basename} (#{original_branches[project_dir]}) "
|
169
|
+
puts center_pad(header).light_green
|
172
170
|
reset(dir: project_dir)
|
173
|
-
puts
|
171
|
+
puts center_pad.light_green
|
174
172
|
end
|
175
173
|
puts
|
176
174
|
end
|
@@ -193,9 +191,10 @@ module Dev
|
|
193
191
|
next unless File.exist?(project_dir)
|
194
192
|
|
195
193
|
repo_basename = File.basename(File.realpath(project_dir))
|
196
|
-
|
194
|
+
header = " #{repo_basename} "
|
195
|
+
puts center_pad(header).light_green
|
197
196
|
@success &= checkout(branch, dir: project_dir)
|
198
|
-
puts
|
197
|
+
puts center_pad.light_green
|
199
198
|
end
|
200
199
|
puts
|
201
200
|
|
@@ -286,9 +285,10 @@ module Dev
|
|
286
285
|
next unless File.exist?(project_dir)
|
287
286
|
|
288
287
|
repo_basename = File.basename(File.realpath(project_dir))
|
289
|
-
|
288
|
+
header = " #{repo_basename} "
|
289
|
+
puts center_pad(header).light_green
|
290
290
|
@success &= merge(branch, dir: project_dir)
|
291
|
-
puts
|
291
|
+
puts center_pad.light_green
|
292
292
|
end
|
293
293
|
puts
|
294
294
|
|
@@ -334,9 +334,10 @@ module Dev
|
|
334
334
|
next unless File.exist?(project_dir)
|
335
335
|
|
336
336
|
repo_basename = File.basename(File.realpath(project_dir))
|
337
|
-
|
337
|
+
header = " #{repo_basename} "
|
338
|
+
puts center_pad(header).light_green
|
338
339
|
@success &= pull(dir: project_dir)
|
339
|
-
puts
|
340
|
+
puts center_pad.light_green
|
340
341
|
end
|
341
342
|
puts
|
342
343
|
|
@@ -372,9 +373,10 @@ module Dev
|
|
372
373
|
next unless File.exist?(project_dir)
|
373
374
|
|
374
375
|
repo_basename = File.basename(File.realpath(project_dir))
|
375
|
-
|
376
|
+
header = " #{repo_basename} "
|
377
|
+
puts center_pad(header).light_green
|
376
378
|
@success &= push(dir: project_dir)
|
377
|
-
puts
|
379
|
+
puts center_pad.light_green
|
378
380
|
end
|
379
381
|
puts
|
380
382
|
|
@@ -409,10 +411,7 @@ module Dev
|
|
409
411
|
# Clones the repo_name into the dir
|
410
412
|
# Optionally specify a repo_org
|
411
413
|
# Optionally specify a branch to check out (defaults to the repository default branch)
|
412
|
-
def clone_repo(dir:, repo_name:, repo_org:
|
413
|
-
# TODO: Split out the default of 'firespring' into a configuration variable
|
414
|
-
repo_org = 'firespring' if repo_org.to_s.strip.empty?
|
415
|
-
|
414
|
+
def clone_repo(dir:, repo_name:, repo_org: 'firespring', branch: nil, depth: nil)
|
416
415
|
if Dir.exist?("#{dir}/.git")
|
417
416
|
puts "#{dir} already cloned".light_green
|
418
417
|
return
|
@@ -429,19 +428,6 @@ module Dev
|
|
429
428
|
g.fetch('origin', prune: true)
|
430
429
|
end
|
431
430
|
|
432
|
-
def commit_status(token:, repo_name:, commit_id:, status:, repo_org: nil, options: {})
|
433
|
-
# TODO: Split out the default of 'firespring' into a configuration variable
|
434
|
-
repo_org = 'firespring' if repo_org.to_s.strip.empty?
|
435
|
-
repo = "#{repo_org}/#{repo_name}"
|
436
|
-
|
437
|
-
# Set up the GitHub client
|
438
|
-
client = Octokit::Client.new(access_token: token)
|
439
|
-
|
440
|
-
# Create the commit status
|
441
|
-
puts "Tagging commit #{commit_id} in #{repo} as #{status} for #{options[:context]}"
|
442
|
-
client.create_status(repo, commit_id, status, options)
|
443
|
-
end
|
444
|
-
|
445
431
|
# Builds an ssh repo URL using the org and repo name given
|
446
432
|
def ssh_repo_url(name, org)
|
447
433
|
"git@github.com:#{org}/#{name}.git"
|
@@ -453,10 +439,11 @@ module Dev
|
|
453
439
|
end
|
454
440
|
|
455
441
|
# Center the string and pad on either side with the given padding character
|
456
|
-
# @deprecated Please use {Dev::Common#center_pad} instead
|
457
442
|
def center_pad(string = '', pad: '-', len: 80)
|
458
|
-
|
459
|
-
|
443
|
+
center_dash = len / 2
|
444
|
+
string = string.to_s
|
445
|
+
center_str = string.length / 2
|
446
|
+
string.rjust(center_dash + center_str - 1, pad).ljust(len - 1, pad)
|
460
447
|
end
|
461
448
|
|
462
449
|
# Exclude the command from the message and print all error lines
|
@@ -5,13 +5,11 @@ module Dev
|
|
5
5
|
# Issue subtypes which do not map to a story type
|
6
6
|
NON_STORY_TYPES = ['epic', 'review', 'sub-task', 'code review sub-task', 'pre-deploy sub-task', 'deploy sub-task', 'devops sub-task'].freeze
|
7
7
|
|
8
|
-
attr_accessor :data, :project, :
|
9
|
-
:last_in_progress_history, :first_in_review_history, :last_closed_history
|
8
|
+
attr_accessor :data, :project, :id, :title, :points, :assignee, :resolved_date, :histories, :last_in_progress_history, :first_in_review_history, :last_closed_history
|
10
9
|
|
11
10
|
def initialize(data)
|
12
11
|
@data = data
|
13
12
|
@project = Jira::Project.new(data)
|
14
|
-
@parent = Jira::Parent.new(data) if data.respond_to?(:parent)
|
15
13
|
@id = data.key
|
16
14
|
@title = data.summary
|
17
15
|
@points = calculate_points(data)
|
@@ -16,10 +16,6 @@ module Dev
|
|
16
16
|
def to_report
|
17
17
|
Dev::Audit::Report.new(
|
18
18
|
data['advisories'].map do |_, v|
|
19
|
-
# If there are multiple advisories for the same package, v changes from an array into a hash
|
20
|
-
v = v.values if v.is_a?(Hash)
|
21
|
-
|
22
|
-
# Iterate over the advisories and turn them into report items
|
23
19
|
v.map do |it|
|
24
20
|
Dev::Audit::Report::Item.new(
|
25
21
|
id: it['advisoryId'],
|