prima-twig 0.41.6 → 0.42.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/bin/twig-feature +1 -1
- data/bin/twig-update-ami +147 -129
- data/lib/prima_aws_client.rb +38 -0
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 2bba0686dbbaea6743f6ae770b2337067e127f50d38c9659922f34d2b55aa38e
|
4
|
+
data.tar.gz: 61986a6d3dd0e96bda0063164212130b3020c54cb48794176b6af5fc1072f531
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 84d9573bfa3aa39a9003336d7312e19187e4432da927087b1a9681666f05263fcc40bf379c2fb25e76d20157f5861ca134ed1090f2afe9eaa4c35c2b736aac95
|
7
|
+
data.tar.gz: 878c964e52e8e24f7ebbef73a60c8df821bd78427b9e99551e9d8e9d68f58a348ceaaddd0bd5f342b1e605c3112dc671ab655fc388dc136cc3032e8e209c41ef
|
data/bin/twig-feature
CHANGED
@@ -2684,7 +2684,7 @@ class Release
|
|
2684
2684
|
exec_step 'prepare-docker-compose --directory backoffice && cp docker-compose-qainit.yml docker-compose.yml'
|
2685
2685
|
[
|
2686
2686
|
"docker-compose build",
|
2687
|
-
"docker-compose run -w $PWD -e PHANTOMJS_BIN=$PWD/node_modules/grunt-selenium-webdriver/node_modules/phantomjs/lib/phantom/bin/phantomjs web 'sed -i \"s/web-qa-url/#{web_qa_host}/g\" Gruntfile.js && sed -i \"s/web-qa-ri-url/#{webri_qa_host}/g\" Gruntfile.js && sed -i \"s/qa-ip/#{ip_qa_host}/g\" Gruntfile.js &&
|
2687
|
+
"docker-compose run -w $PWD -e PHANTOMJS_BIN=$PWD/node_modules/grunt-selenium-webdriver/node_modules/phantomjs/lib/phantom/bin/phantomjs web 'sed -i \"s/web-qa-url/#{web_qa_host}/g\" Gruntfile.js && sed -i \"s/web-qa-ri-url/#{webri_qa_host}/g\" Gruntfile.js && sed -i \"s/qa-ip/#{ip_qa_host}/g\" Gruntfile.js && npm install && bower install --allow-root --config.interactive=false && grunt qa'"
|
2688
2688
|
].each do |cmd|
|
2689
2689
|
execute_command cmd
|
2690
2690
|
end
|
data/bin/twig-update-ami
CHANGED
@@ -15,68 +15,103 @@ class TwigUpdateAmi
|
|
15
15
|
exec "gem update prima-twig && twig update-ami #{ARGV.join ' '}" unless `gem outdated`.lines.grep(/^prima-twig \(.*\)/).empty?
|
16
16
|
@s3 = Aws::S3::Client.new
|
17
17
|
@s3_bucket = 'prima-deploy'
|
18
|
-
@
|
19
|
-
@instances_staging = JSON.parse File.read('../twig-binaries/cloudformation_staging.json')
|
18
|
+
@templates_base_url = "https://s3-eu-west-1.amazonaws.com"
|
20
19
|
end
|
21
20
|
|
22
21
|
def execute!(args)
|
23
|
-
|
24
|
-
update_amis args[0], args[1], args[2], [@instances_staging]
|
25
|
-
else
|
26
|
-
update_amis args[0], args[1], args[2], [@instances_production] #[@instances_staging, @instances_production]
|
27
|
-
end
|
22
|
+
update_amis(args[0], args[1], args[2], args[3], args[4])
|
28
23
|
end
|
29
24
|
|
30
25
|
private
|
31
26
|
|
32
|
-
def update_amis(ami_id, ami_name, ami_description,
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
27
|
+
def update_amis(ami_template, ami_id, ami_name, ami_description, env)
|
28
|
+
output "updating instance definition #{ami_template}".light_green
|
29
|
+
Dir.chdir 'ami'
|
30
|
+
update_instance_name(ami_id, ami_name, ami_description, ami_template)
|
31
|
+
output 'running packer update (this could take some time)'.light_green
|
32
|
+
new_ami_id = update_packer(ami_template)
|
33
|
+
Dir.chdir '..'
|
34
|
+
stop_if(new_ami_id.to_s.empty?, 'Failed to generate AMI!'.red)
|
35
|
+
output "new ami id: #{new_ami_id}"
|
36
|
+
|
37
|
+
output 'searching for ami to update...'
|
38
|
+
ami_mappings = JSON.parse(@s3.get_object(bucket: @s3_bucket, key: "ami/ami-mappings.json")["body"].read())
|
39
|
+
old_amis = update_ami_mappings(ami_mappings, ami_template, env, new_ami_id)
|
40
|
+
stop_if(old_amis.empty?, "No ami to update! No #{ami_template} in env #{env}, exiting".yellow)
|
41
|
+
|
42
|
+
output 'retrieving stacks that uses old ami ids'
|
43
|
+
exports = list_exports()
|
44
|
+
stacks = get_stacks_from_exports(exports, old_amis)
|
45
|
+
stop_if(stacks.empty?, "No stack to update found! This means that ami-mapping file is not in sync, please check manually")
|
46
|
+
|
47
|
+
stacks.each do |stack|
|
48
|
+
output "stack to update: #{stack}"
|
49
|
+
if stack.include?('batch')
|
50
|
+
stack_parameters = update_stack_parameters(stack,
|
51
|
+
[
|
52
|
+
{ parameter_key: 'AMIID', parameter_value: new_ami_id }
|
53
|
+
]
|
54
|
+
)
|
55
|
+
update_batch_compute_environment(stack, get_stack_template(stack), stack_parameters, tags_to_hashes(get_stack_tags(stack)), env, old_amis)
|
56
|
+
else
|
57
|
+
stack_parameters = update_stack_parameters(stack,
|
58
|
+
[
|
59
|
+
{ parameter_key: 'AMIID', parameter_value: new_ami_id },
|
60
|
+
{ parameter_key: 'DesiredCapacity', parameter_value: get_desired_capacity(stack).to_s }
|
61
|
+
]
|
62
|
+
)
|
63
|
+
if stack.include?('spotfleet')
|
64
|
+
update_spotfleet(stack, get_stack_template(stack), stack_parameters, tags_to_hashes(get_stack_tags(stack)))
|
65
|
+
else
|
66
|
+
update_stack(stack, get_stack_template(stack), stack_parameters)
|
63
67
|
end
|
68
|
+
end
|
69
|
+
end
|
64
70
|
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
71
|
+
stacks.each do |stack|
|
72
|
+
wait_for_stack_ready(stack, ['CREATE_FAILED', 'ROLLBACK_IN_PROGRESS', 'ROLLBACK_FAILED', 'DELETE_IN_PROGRESS', 'DELETE_FAILED', 'DELETE_COMPLETE', 'UPDATE_ROLLBACK_FAILED', 'UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS', 'UPDATE_ROLLBACK_COMPLETE', 'ROLLBACK_COMPLETE'])
|
73
|
+
end
|
74
|
+
|
75
|
+
output 'writing new ami mapping'
|
76
|
+
File.open("ami/ami-mappings.json", 'w+') do |f|
|
77
|
+
mapping_file = JSON.pretty_generate(ami_mappings)
|
78
|
+
f.write(mapping_file)
|
79
|
+
@s3.put_object(bucket: @s3_bucket, key: "ami/ami-mappings.json", body: mapping_file)
|
80
|
+
end
|
70
81
|
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
82
|
+
output 'Update finished! ( ͡° ͜ʖ ͡°)'
|
83
|
+
end
|
84
|
+
|
85
|
+
def get_stacks_from_exports(exports, old_amis)
|
86
|
+
stacks = []
|
87
|
+
old_amis.each do |old_ami|
|
88
|
+
exports.each do |export|
|
89
|
+
if export.value.eql?(old_ami)
|
90
|
+
stacks.insert(0,export.exporting_stack_id)
|
77
91
|
end
|
78
92
|
end
|
79
93
|
end
|
94
|
+
stacks
|
95
|
+
end
|
96
|
+
|
97
|
+
def update_ami_mappings(mappings, ami_template, env, new_ami_id)
|
98
|
+
old_values = []
|
99
|
+
mappings.each do |item|
|
100
|
+
if item['ami_template'].eql?(ami_template) and item['env'].eql?(env)
|
101
|
+
old_values.insert(0,item['ami_id'])
|
102
|
+
item['ami_id'] = new_ami_id
|
103
|
+
end
|
104
|
+
end
|
105
|
+
old_values.uniq
|
106
|
+
end
|
107
|
+
|
108
|
+
def update_stack_parameters(stack_name, new_parameters)
|
109
|
+
stack_parameters = get_stack_parameters(stack_name)
|
110
|
+
new_parameters.each do |new_param|
|
111
|
+
stack_parameters.reject{ |k| k["parameter_key"] == new_param["parameter_key"] }
|
112
|
+
stack_parameters.push(new_param)
|
113
|
+
end
|
114
|
+
stack_parameters
|
80
115
|
end
|
81
116
|
|
82
117
|
def update_instance_name(ami_id, ami_name, ami_description, ecs_json_path)
|
@@ -108,108 +143,91 @@ class TwigUpdateAmi
|
|
108
143
|
`grep 'artifact,0,id' build.log | cut -d, -f6 | cut -d: -f2`.sub(/\n/, '')
|
109
144
|
end
|
110
145
|
|
111
|
-
def
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
end
|
146
|
+
def update_body_and_tags(stack_body, stack_tags, old_amis = [])
|
147
|
+
if stack_tags['ArtemideTemplatePath'].include?('spotfleet')
|
148
|
+
if stack_tags['ArtemideTemplatePath'].include?('ci')
|
149
|
+
return
|
150
|
+
end
|
117
151
|
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
handle_version = old_handle.sub('InstanceReadyWaitHandleUpdate', '').to_i + 1
|
152
|
+
stack_tags['SpotFleetHandleVersion'] = stack_tags['SpotFleetHandleVersion'].to_i + 1
|
153
|
+
stack_body.gsub!(/InstanceReadyWaitHandleUpdate[0-9]*/, 'InstanceReadyWaitHandleUpdate' + stack_tags['SpotFleetHandleVersion'].to_s)
|
154
|
+
stack_body.gsub!(/InstanceReadyWaitConditionUpdate[0-9]*/, 'InstanceReadyWaitConditionUpdate' + stack_tags['SpotFleetHandleVersion'].to_s)
|
122
155
|
|
123
|
-
|
124
|
-
|
156
|
+
File.open stack_tags['ArtemideTemplatePath'], 'w' do |f|
|
157
|
+
f.write(stack_body)
|
158
|
+
@s3.put_object(body: stack_body, bucket: @s3_bucket, key: stack_tags['ArtemideTemplatePath'])
|
159
|
+
end
|
125
160
|
|
126
|
-
|
127
|
-
|
128
|
-
|
161
|
+
elsif stack_tags['ArtemideTemplatePath'].include?('batch')
|
162
|
+
stack_body.gsub!(/(\w+:\s+)!(\w+)/i, '\1QuaCeraUnPuntoEsclamativo\2')
|
163
|
+
stack_body_original = stack_body.clone
|
164
|
+
ce_name = stack_tags['ComputeEnvironment'].sub(/[0-9]+/, '')
|
165
|
+
new_ce_version = stack_tags['ComputeEnvironment'].sub(/[a-zA-Z]*/, '').to_i + 1
|
166
|
+
new_ce_name = ce_name + new_ce_version.to_s
|
167
|
+
stack_body.gsub!(/#{ce_name}[0-9]*/, new_ce_name)
|
168
|
+
stack_body_original.gsub!("QuaCeraUnPuntoEsclamativoRef AMIID", old_amis[0])
|
169
|
+
stack_tags['OldComputeEnvironment'] = stack_tags['ComputeEnvironment']
|
170
|
+
stack_tags['ComputeEnvironment'] = new_ce_name
|
171
|
+
|
172
|
+
File.open stack_tags['ArtemideTemplatePath'] + 'new', 'w' do |f|
|
173
|
+
f.write(stack_body)
|
174
|
+
end
|
129
175
|
|
130
|
-
|
131
|
-
|
132
|
-
|
176
|
+
yaml_stack_body = YAML.load(stack_body_original)
|
177
|
+
yaml_stack_body_new = YAML.load(stack_body)
|
178
|
+
yaml_stack_body_merged = (yaml_stack_body.deep_merge(yaml_stack_body_new)).to_yaml.gsub('QuaCeraUnPuntoEsclamativo', '!')
|
179
|
+
|
180
|
+
File.open(stack_tags['ArtemideTemplatePath'], 'w') do |file|
|
181
|
+
file.write yaml_stack_body_merged
|
182
|
+
@s3.put_object(body: yaml_stack_body_merged, bucket: @s3_bucket, key: stack_tags['ArtemideTemplatePath'])
|
133
183
|
end
|
134
184
|
end
|
135
185
|
end
|
136
186
|
|
137
|
-
def
|
138
|
-
|
139
|
-
@
|
140
|
-
body: body,
|
141
|
-
bucket: @s3_bucket,
|
142
|
-
key: s3_key
|
143
|
-
)
|
187
|
+
def update_spotfleet(stack_name, stack_template, stack_parameters, stack_tags)
|
188
|
+
update_body_and_tags(stack_template, stack_tags)
|
189
|
+
update_stack_url(stack_name, "#{@templates_base_url}/#{@s3_bucket}/#{stack_tags['ArtemideTemplatePath']}", stack_parameters, hashes_to_tags(stack_tags))
|
144
190
|
end
|
145
191
|
|
146
|
-
def update_batch_compute_environment(
|
147
|
-
output
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
old_ce_name = file_content[/#{stack['label']}[0-9]*/]
|
153
|
-
new_ce_version = old_ce_name.sub(stack['label'], '').to_i + 1
|
154
|
-
new_ce_name = stack['label'] + new_ce_version.to_s
|
155
|
-
file_content.gsub!(old_ce_name, new_ce_name)
|
156
|
-
File.open stack['yaml_filename'] + 'new', 'w' do |f|
|
157
|
-
f.write file_content
|
158
|
-
end
|
159
|
-
update_yml_files(ami_id, stack['yaml_filename'] + 'new')
|
192
|
+
def update_batch_compute_environment(stack_name, stack_body, stack_parameters, stack_tags, env, old_amis)
|
193
|
+
output "updating #{stack_name} to add a new compute environment"
|
194
|
+
update_body_and_tags(stack_body, stack_tags, old_amis)
|
195
|
+
output 'updating stack on cloudformation (step 1)'
|
196
|
+
update_stack_url(stack_name, "#{@templates_base_url}/#{@s3_bucket}/#{stack_tags['ArtemideTemplatePath']}", stack_parameters, hashes_to_tags(stack_tags))
|
197
|
+
wait_for_stack_ready(stack_name, ['CREATE_FAILED', 'ROLLBACK_IN_PROGRESS', 'ROLLBACK_FAILED', 'DELETE_IN_PROGRESS', 'DELETE_FAILED', 'DELETE_COMPLETE', 'UPDATE_ROLLBACK_FAILED', 'UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS', 'UPDATE_ROLLBACK_COMPLETE', 'ROLLBACK_COMPLETE'])
|
160
198
|
|
161
|
-
|
162
|
-
|
163
|
-
stack_body_merged = stack_body.deep_merge stack_body_new
|
164
|
-
File.open(stack['yaml_filename'], 'w') do |file|
|
165
|
-
file.write stack_body_merged.to_yaml.gsub('QuaCeraUnPuntoEsclamativo', '!')
|
166
|
-
end
|
167
|
-
|
168
|
-
output 'updating stack on cloudformation, (step 1)'
|
169
|
-
copy_yml_files_to_s3(stack['yaml_filename'], stack['s3_key'])
|
170
|
-
if not stack['stack_name'] or not stack_exists?(stack['stack_name'])
|
171
|
-
return false
|
172
|
-
end
|
173
|
-
|
174
|
-
update_stack_url(stack['stack_name'], stack['template_url'], get_stack_parameters(stack['stack_name']))
|
175
|
-
wait_for_stack_ready(stack['stack_name'], ['CREATE_FAILED', 'ROLLBACK_IN_PROGRESS', 'ROLLBACK_FAILED', 'DELETE_IN_PROGRESS', 'DELETE_FAILED', 'DELETE_COMPLETE', 'UPDATE_ROLLBACK_FAILED', 'UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS', 'UPDATE_ROLLBACK_COMPLETE', 'ROLLBACK_COMPLETE'])
|
176
|
-
|
177
|
-
output "retrieving the list of stacks that are currently using the stack #{stack['stack_name']}"
|
178
|
-
if stack['stack_name'].include?('staging')
|
179
|
-
job_stacks = list_import_stacks old_ce_name + '-staging'
|
180
|
-
else
|
181
|
-
job_stacks = list_import_stacks old_ce_name + '-production'
|
182
|
-
end
|
183
|
-
job_stacks = list_import_stacks old_ce_name + '-production'
|
199
|
+
output "retrieving the list of stacks that are currently using the stack #{stack_name}"
|
200
|
+
job_stacks = list_import_stacks(stack_tags['OldComputeEnvironment'] + "-" + env)
|
184
201
|
job_stacks.each do |job_stack_name|
|
185
202
|
output "updating the stack #{job_stack_name} to use to the new compute environment"
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
203
|
+
job_stack_parameters = update_stack_parameters(job_stack_name,
|
204
|
+
[
|
205
|
+
{
|
206
|
+
parameter_key: "ComputeEnvironmentExportName",
|
207
|
+
parameter_value: stack_tags['ComputeEnvironment']
|
208
|
+
}
|
209
|
+
]
|
193
210
|
)
|
194
|
-
update_stack(job_stack_name,
|
211
|
+
update_stack(job_stack_name, get_stack_template(job_stack_name), job_stack_parameters)
|
195
212
|
end
|
196
213
|
job_stacks.each do |job_stack_name|
|
197
214
|
wait_for_stack_ready(job_stack_name, ['CREATE_FAILED', 'ROLLBACK_IN_PROGRESS', 'ROLLBACK_FAILED', 'DELETE_IN_PROGRESS', 'DELETE_FAILED', 'DELETE_COMPLETE', 'UPDATE_ROLLBACK_FAILED', 'UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS', 'UPDATE_ROLLBACK_COMPLETE', 'ROLLBACK_COMPLETE'])
|
198
215
|
end
|
199
216
|
|
200
|
-
|
201
|
-
File.open
|
202
|
-
f.write
|
217
|
+
stack_body = File.read(stack_tags['ArtemideTemplatePath'] + 'new').gsub('QuaCeraUnPuntoEsclamativo', '!')
|
218
|
+
File.open stack_tags['ArtemideTemplatePath'], 'w' do |f|
|
219
|
+
f.write stack_body
|
220
|
+
@s3.put_object(body: stack_body, bucket: @s3_bucket, key: stack_tags['ArtemideTemplatePath'])
|
203
221
|
end
|
222
|
+
stack_tags.delete('OldComputeEnvironment')
|
204
223
|
|
205
|
-
output "updating stack #{
|
206
|
-
|
207
|
-
|
208
|
-
wait_for_stack_ready(stack['stack_name'], ['CREATE_FAILED', 'ROLLBACK_IN_PROGRESS', 'ROLLBACK_FAILED', 'DELETE_IN_PROGRESS', 'DELETE_FAILED', 'DELETE_COMPLETE', 'UPDATE_ROLLBACK_FAILED', 'UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS', 'UPDATE_ROLLBACK_COMPLETE', 'ROLLBACK_COMPLETE'])
|
224
|
+
output "updating stack #{stack_name} on cloudformation to remove the old compute environment"
|
225
|
+
update_stack_url(stack_name, "#{@templates_base_url}/#{@s3_bucket}/#{stack_tags['ArtemideTemplatePath']}", stack_parameters, hashes_to_tags(stack_tags))
|
226
|
+
wait_for_stack_ready(stack_name, ['CREATE_FAILED', 'ROLLBACK_IN_PROGRESS', 'ROLLBACK_FAILED', 'DELETE_IN_PROGRESS', 'DELETE_FAILED', 'DELETE_COMPLETE', 'UPDATE_ROLLBACK_FAILED', 'UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS', 'UPDATE_ROLLBACK_COMPLETE', 'ROLLBACK_COMPLETE'])
|
209
227
|
|
210
|
-
File.delete(
|
228
|
+
File.delete(stack_tags['ArtemideTemplatePath'] + 'new')
|
211
229
|
|
212
|
-
output "cloudformation stack update for #{
|
230
|
+
output "cloudformation stack update for #{stack_name} done!"
|
213
231
|
end
|
214
232
|
|
215
233
|
def help_content
|
@@ -229,7 +247,7 @@ class TwigUpdateAmi
|
|
229
247
|
-----------
|
230
248
|
|
231
249
|
from artemide main folder run
|
232
|
-
`twig-update-ami ${AMI_ID} ${AMI_NAME} ${AMI_DESCRIPTION}`
|
250
|
+
`twig-update-ami ${AMI_TEMPLATE} ${AMI_ID} ${AMI_NAME} ${AMI_DESCRIPTION} ${ENV}`
|
233
251
|
|
234
252
|
Subcommand for Twig: <http://rondevera.github.io/twig/>
|
235
253
|
Author: Eugenio Laghi <https://github.com/eugeniolaghi>
|
data/lib/prima_aws_client.rb
CHANGED
@@ -40,6 +40,27 @@ module PrimaAwsClient
|
|
40
40
|
stacks
|
41
41
|
end
|
42
42
|
|
43
|
+
def list_exports
|
44
|
+
exports = []
|
45
|
+
next_token = ''
|
46
|
+
loop do
|
47
|
+
print '.'.yellow; STDOUT.flush
|
48
|
+
options = next_token != '' ? { next_token: next_token } : {}
|
49
|
+
begin
|
50
|
+
resp = cf_client.list_exports(options)
|
51
|
+
rescue Aws::CloudFormation::Errors::Throttling => e
|
52
|
+
output 'Throttling, retrying in 15 seconds'.red
|
53
|
+
sleep 15
|
54
|
+
resp = cf_client.list_exports(options)
|
55
|
+
end
|
56
|
+
exports += resp.exports
|
57
|
+
break unless resp.next_token
|
58
|
+
next_token = resp.next_token
|
59
|
+
end
|
60
|
+
puts '.'.yellow; STDOUT.flush
|
61
|
+
exports
|
62
|
+
end
|
63
|
+
|
43
64
|
def create_stack(stack_name, stack_body, parameters = [], tags = [], role = nil)
|
44
65
|
cf_args = {
|
45
66
|
stack_name: stack_name,
|
@@ -273,4 +294,21 @@ module PrimaAwsClient
|
|
273
294
|
resp = appscaling_client.describe_scalable_targets(service_namespace: 'ec2', resource_ids: ["spot-fleet-request/#{fleet_arn}"])
|
274
295
|
resp.scalable_targets[0].min_capacity
|
275
296
|
end
|
297
|
+
|
298
|
+
def hashes_to_tags(hashes)
|
299
|
+
tags = []
|
300
|
+
hkeys = hashes.keys
|
301
|
+
hkeys.each do |hkey|
|
302
|
+
tags.insert(0, { key: hkey, value: hashes[hkey].to_s })
|
303
|
+
end
|
304
|
+
tags
|
305
|
+
end
|
306
|
+
|
307
|
+
def tags_to_hashes(tags)
|
308
|
+
hash = Hash.new
|
309
|
+
tags.each do |tags_obj|
|
310
|
+
hash[tags_obj.key] = tags_obj.value
|
311
|
+
end
|
312
|
+
hash
|
313
|
+
end
|
276
314
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: prima-twig
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.42.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Matteo Giachino
|
@@ -13,7 +13,7 @@ authors:
|
|
13
13
|
autorequire:
|
14
14
|
bindir: bin
|
15
15
|
cert_chain: []
|
16
|
-
date: 2018-10-
|
16
|
+
date: 2018-10-18 00:00:00.000000000 Z
|
17
17
|
dependencies:
|
18
18
|
- !ruby/object:Gem::Dependency
|
19
19
|
name: aws-sdk
|