prima-twig 0.54.235 → 0.55.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/bin/twig-deploy +69 -14
- data/bin/twig-feature +2408 -48
- data/bin/twig-update-ami +29 -29
- data/lib/prima_aws_client.rb +1 -159
- data/lib/prima_twig.rb +2 -4
- metadata +5 -118
- data/bin/twig-build +0 -2152
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 250f32133d43076e476ac7c927122fcafc6bd697c6cd35ceb878dac84588ba43
|
4
|
+
data.tar.gz: ea8a5aa33842d8a256e351c649c8ad3a056bc088568d07cfa955aabf29cbcf93
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: feaf2df5582620b7499708b6d8f09c74f92e5943b8aa92e9cd5c661257d2984606a8e0dc55e4b4d8a4c01fa11f43f6292579163ff649f56e085dbde1cf00a7a2
|
7
|
+
data.tar.gz: 5ef83a72778cc6aea4d9f48e3c70be50df00082d84abee36c2817eb348e6b5b958b07017519ddcab9651dd9b3a86cf0f4cda10769a4a30a2d859aef67f081649
|
data/bin/twig-deploy
CHANGED
@@ -5,10 +5,7 @@ require_relative '../lib/prima_twig.rb'
|
|
5
5
|
require_relative '../lib/prima_aws_client.rb'
|
6
6
|
require 'colorize'
|
7
7
|
require 'highline/import'
|
8
|
-
require 'aws-sdk
|
9
|
-
require 'aws-sdk-cloudformation'
|
10
|
-
require 'aws-sdk-ecs'
|
11
|
-
require 'aws-sdk-s3'
|
8
|
+
require 'aws-sdk'
|
12
9
|
require 'redcarpet'
|
13
10
|
require 'mail'
|
14
11
|
require 'erb'
|
@@ -58,7 +55,6 @@ class Review
|
|
58
55
|
@cf = Aws::CloudFormation::Client.new
|
59
56
|
@ecs = Aws::ECS::Client.new
|
60
57
|
@s3 = Aws::S3::Client.new
|
61
|
-
@batch = Aws::Batch::Client.new
|
62
58
|
@s3_bucket = "prima-artifacts-encrypted"
|
63
59
|
end
|
64
60
|
|
@@ -98,6 +94,15 @@ class Review
|
|
98
94
|
artifact = artifacts.select {|v| v[:rev] == artifact_rev}.first
|
99
95
|
|
100
96
|
do_deploy! artifact_rev
|
97
|
+
# exec_step "terminal-notifier -message 'Deploy terminato, vuoi lanciare paparatzinger?'" if which 'terminal-notifier'
|
98
|
+
#
|
99
|
+
# confirm_message = "Vuoi lanciare paparatzinger?"
|
100
|
+
# launch_paparatzinger = @prima.yesno confirm_message.blue
|
101
|
+
#
|
102
|
+
# if launch_paparatzinger
|
103
|
+
# output "Avvio paparatzinger per gli screenshot".yellow
|
104
|
+
# job_name = launch_paparatzinger(artifact[:commit_msg])
|
105
|
+
# end
|
101
106
|
|
102
107
|
mail = Mail.new do
|
103
108
|
from 'deploy@prima.it'
|
@@ -113,6 +118,7 @@ class Review
|
|
113
118
|
body << "Revision: [#{artifact[:rev]}](https://github.com/primait/prima/commit/#{artifact[:rev]}) del #{artifact[:created_at].strftime('%d/%m/%Y %H:%M:%S')}\n\n"
|
114
119
|
body << "Branch: [#{artifact[:branch]}](https://github.com/primait/prima/tree/#{artifact[:branch]})\n\n"
|
115
120
|
body << "Commit: #{commit_msg.gsub(/_/, '\_')}\n\n"
|
121
|
+
#body << "Screenshots (tra qualche minuto): [BrowserStack](https://www.browserstack.com/automate) (Filtrare per: \"#{get_paparatzinger_job_name(commit_msg).gsub(/_/, '\_')}\")" if launch_paparatzinger
|
116
122
|
|
117
123
|
htmlBody = Redcarpet::Markdown.new(Redcarpet::Render::HTML.new).render body
|
118
124
|
|
@@ -138,8 +144,6 @@ class Review
|
|
138
144
|
|
139
145
|
invalidate_prismic_cache
|
140
146
|
|
141
|
-
launch_crawler
|
142
|
-
|
143
147
|
exec_step "terminal-notifier -message 'Deploy terminato'" if which 'terminal-notifier'
|
144
148
|
end
|
145
149
|
|
@@ -213,16 +217,63 @@ class Review
|
|
213
217
|
artifacts.sort_by { |v| v[:created_at] }.reverse
|
214
218
|
end
|
215
219
|
|
216
|
-
def
|
217
|
-
|
220
|
+
def launch_paparatzinger(job_name)
|
221
|
+
@s3.get_object(
|
222
|
+
response_target: '/tmp/paparatzinger_twig.yml',
|
223
|
+
bucket: 'prima-deploy',
|
224
|
+
key: 'paparatzinger_twig.yml')
|
225
|
+
|
226
|
+
paparatzinger_config = YAML.load_file '/tmp/paparatzinger_twig.yml'
|
227
|
+
|
228
|
+
uri = URI.parse(paparatzinger_config['prima_api_search_url'])
|
229
|
+
body = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http|
|
230
|
+
req = Net::HTTP::Get.new(uri)
|
231
|
+
req["x-apikey"] = paparatzinger_config['prima_api_token']
|
232
|
+
response = http.request req
|
233
|
+
response.body
|
234
|
+
end
|
235
|
+
|
236
|
+
saves = JSON.parse body
|
218
237
|
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
238
|
+
save_code = saves.sample['unique_identifier']
|
239
|
+
url_garanzie = "https://www.prima.it/preventivo/auto/#{save_code}/garanzie?browserstack=true"
|
240
|
+
job_name = get_paparatzinger_job_name(clean_commit_message(job_name))
|
241
|
+
|
242
|
+
logical_resource_id = 'TaskDefinitionPaparatzinger'
|
243
|
+
resp = @cf.describe_stack_resource({
|
244
|
+
stack_name: 'ecs-task-paparatzinger-production',
|
245
|
+
logical_resource_id: logical_resource_id
|
246
|
+
})
|
247
|
+
|
248
|
+
resp = @ecs.run_task({
|
249
|
+
cluster: 'ecs-cluster-tools-vpc-production-ECSCluster-1WJQLW5EVLYEB',
|
250
|
+
task_definition: resp.stack_resource_detail.physical_resource_id,
|
251
|
+
overrides: {
|
252
|
+
container_overrides: [
|
253
|
+
{
|
254
|
+
name: 'paparatzinger',
|
255
|
+
environment: [
|
256
|
+
{
|
257
|
+
name: 'JOB_NAME',
|
258
|
+
value: job_name,
|
259
|
+
},
|
260
|
+
{
|
261
|
+
name: 'VERSION',
|
262
|
+
value: paparatzinger_config['version'],
|
263
|
+
},
|
264
|
+
{
|
265
|
+
name: 'URL_GARANZIE',
|
266
|
+
value: url_garanzie
|
267
|
+
}
|
268
|
+
]
|
269
|
+
}
|
270
|
+
]
|
271
|
+
},
|
272
|
+
count: 1
|
223
273
|
})
|
274
|
+
output "Paparatzinger lanciato con successo. URL: #{url_garanzie}\n".green
|
224
275
|
|
225
|
-
|
276
|
+
job_name
|
226
277
|
end
|
227
278
|
|
228
279
|
end
|
@@ -234,6 +285,10 @@ def clean_commit_message(commit_msg)
|
|
234
285
|
commit_msg[0..99]
|
235
286
|
end
|
236
287
|
|
288
|
+
def get_paparatzinger_job_name(job_name)
|
289
|
+
job_name.gsub /[^0-9a-z]/i, '-'
|
290
|
+
end
|
291
|
+
|
237
292
|
def which(cmd)
|
238
293
|
exts = ENV['PATHEXT'] ? ENV['PATHEXT'].split(';') : ['']
|
239
294
|
ENV['PATH'].split(File::PATH_SEPARATOR).each do |path|
|
data/bin/twig-feature
CHANGED
@@ -22,9 +22,23 @@ class Release
|
|
22
22
|
exec "twig feature #{ARGV.join ' '}"
|
23
23
|
end
|
24
24
|
end
|
25
|
+
@cf = Aws::CloudFormation::Client.new
|
26
|
+
@alb = Aws::ElasticLoadBalancingV2::Client.new
|
27
|
+
@ec2 = Aws::EC2::Client.new
|
28
|
+
@ecs = Aws::ECS::Client.new
|
29
|
+
@batch = Aws::Batch::Client.new
|
30
|
+
@asg = Aws::AutoScaling::Client.new
|
31
|
+
@s3 = Aws::S3::Client.new
|
32
|
+
@s3_bucket = 'prima-artifacts'
|
33
|
+
@artifact_path = '/tmp/prima-artifact.zip'
|
34
|
+
@import_db_task = 'arn:aws:ecs:eu-west-1:001575623345:task-definition/ecs-task-db-restore2-TaskDefinitionDbRestore-1BXH13XEVLPP0:1'
|
35
|
+
@cf_role = 'arn:aws:iam::001575623345:role/qainit-service-role-cloudformat-CloudformationRole-18KBZQIS148R9'
|
25
36
|
@dns_record_identifier = nil
|
26
37
|
@ecs_cluster_name = nil
|
27
38
|
@deploy_update = false
|
39
|
+
@qainit = false
|
40
|
+
@qainit_host_folder = "/var/ci/#{ENV['DRONE_REPO']}/#{ENV['DRONE_BRANCH']}/#{ENV['DRONE_BUILD_NUMBER']}"
|
41
|
+
@qainit_folder = "/drone/src/github.com/project/primait/qainit"
|
28
42
|
@projects = {
|
29
43
|
'prima' => {},
|
30
44
|
'urania' => {},
|
@@ -33,7 +47,7 @@ class Release
|
|
33
47
|
'hal9000' => {},
|
34
48
|
'fidaty' => {},
|
35
49
|
'peano' => {},
|
36
|
-
|
50
|
+
'rogoreport' => {},
|
37
51
|
'assange' => {},
|
38
52
|
'borat' => {},
|
39
53
|
'crash' => {},
|
@@ -44,14 +58,11 @@ class Release
|
|
44
58
|
'leftorium' => {},
|
45
59
|
'pyxis-npm' => {},
|
46
60
|
'starsky' => {},
|
47
|
-
'hutch' => {}
|
48
|
-
'maia' => {},
|
49
|
-
'legion' => {}
|
61
|
+
'hutch' => {}
|
50
62
|
}
|
51
63
|
@base_stack_name_alb = 'ecs-alb-http-public-qa-'
|
52
64
|
@base_stack_name_alb_ws = 'ecs-alb-ws-public-qa-'
|
53
65
|
@git_branch = ''
|
54
|
-
@cloudflare = Rubyflare.connect_with(ENV['CLOUDFLARE_EMAIL'], ENV['CLOUDFLARE_APIKEY'])
|
55
66
|
end
|
56
67
|
|
57
68
|
def execute!(args)
|
@@ -66,6 +77,10 @@ class Release
|
|
66
77
|
qainit_deploy_shutdown!
|
67
78
|
elsif 'update' == args[1]
|
68
79
|
qainit_deploy_update!
|
80
|
+
elsif 'read' == args[1]
|
81
|
+
qainit_read_config! args[2]
|
82
|
+
elsif 'minimal' == args[1]
|
83
|
+
qainit_minimal_deploy! args[2]
|
69
84
|
else
|
70
85
|
if args[1]
|
71
86
|
select_branches(args[1..-1])
|
@@ -79,11 +94,26 @@ class Release
|
|
79
94
|
if 'deploy' == args[1]
|
80
95
|
suite_py_branches(args[2])
|
81
96
|
qainit_deploy!(true)
|
97
|
+
else
|
98
|
+
qainit_deploy_shutdown!(args[2])
|
82
99
|
end
|
83
100
|
when 'deploy'
|
84
101
|
abort('Non sei nella cartella di artemide') unless Dir.pwd.match 'artemide$'
|
85
|
-
if '
|
102
|
+
if ['terminate', 'stop', 'shutdown', 'halt', 'destroy'].include? args[1]
|
103
|
+
deploy_shutdown!
|
104
|
+
elsif 'update' == args[1]
|
105
|
+
deploy_update!
|
106
|
+
elsif 'lock' == args[1]
|
86
107
|
deploy_lock!
|
108
|
+
elsif 'minimal' == args[1]
|
109
|
+
qainit_drone_minimal_deploy!
|
110
|
+
else
|
111
|
+
if args[1]
|
112
|
+
select_branches(args[1])
|
113
|
+
else
|
114
|
+
select_branches
|
115
|
+
end
|
116
|
+
deploy_feature!
|
87
117
|
end
|
88
118
|
when 'aggregator'
|
89
119
|
if 'enable' == args[1]
|
@@ -119,6 +149,7 @@ class Release
|
|
119
149
|
output 'Disable aggregator'
|
120
150
|
|
121
151
|
output "Recupero le informazioni relative al puntamento dei record DNS..."
|
152
|
+
cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
|
122
153
|
output "Recupero le informazioni sui QA attivi..."
|
123
154
|
stack_list, envs = get_stacks()
|
124
155
|
|
@@ -130,7 +161,7 @@ class Release
|
|
130
161
|
end.is_a?(Aws::CloudFormation::Types::Tag)
|
131
162
|
aggregator_enabled
|
132
163
|
end[0]
|
133
|
-
dns_records =
|
164
|
+
dns_records = cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
|
134
165
|
stop_if dns_records.body[:result].empty?, "I record DNS degli aggregatori non stanno puntando ad un QA".red
|
135
166
|
change_hostname_priority(env_hash, hostname_pattern_priority())
|
136
167
|
dns_to_staging(env_hash)
|
@@ -146,7 +177,8 @@ class Release
|
|
146
177
|
output 'Enable aggregator'
|
147
178
|
|
148
179
|
output 'Recupero le informazioni relative al puntamento dei record DNS...'
|
149
|
-
|
180
|
+
cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
|
181
|
+
dns_records = cloudflare.get('zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records', { per_page: 100, type: 'CNAME', content: 'staging.prima.it' })
|
150
182
|
stop_if dns_records.body[:result].empty?, "I record DNS degli aggregatori stanno gia' puntando ad un QA".red
|
151
183
|
|
152
184
|
output "Recupero le informazioni sui QA attivi..."
|
@@ -176,7 +208,7 @@ class Release
|
|
176
208
|
dns_records.body[:result].each do |dns|
|
177
209
|
if dns[:name] =~ /^\w+\-\w+\-staging\.prima\.it$/
|
178
210
|
output "Changing #{dns[:name]} DNS record"
|
179
|
-
|
211
|
+
cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: get_alb_host(@base_stack_name_alb + env_hash[3..8]), proxied: true, ttl: 1})
|
180
212
|
end
|
181
213
|
end
|
182
214
|
|
@@ -235,11 +267,12 @@ class Release
|
|
235
267
|
|
236
268
|
def dns_to_staging(env_hash)
|
237
269
|
output "Recupero le informazioni relative al puntamento dei record DNS..."
|
238
|
-
|
270
|
+
cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
|
271
|
+
dns_records = cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
|
239
272
|
dns_records.body[:result].each do |dns|
|
240
273
|
if dns[:name] =~ /^\w+\-\w+\-staging\.prima\.it$/
|
241
274
|
output "Changing #{dns[:name]} DNS record"
|
242
|
-
|
275
|
+
cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: 'staging.prima.it', proxied: true, ttl: 1})
|
243
276
|
end
|
244
277
|
end
|
245
278
|
end
|
@@ -406,20 +439,26 @@ class Release
|
|
406
439
|
end
|
407
440
|
|
408
441
|
def get_default_branch_name(projects)
|
409
|
-
projects
|
410
|
-
return projects[
|
442
|
+
if !projects['prima'][:default_branch]
|
443
|
+
return projects['prima'][:name]
|
444
|
+
elsif ![nil, 'master'].include? projects['crash'][:name]
|
445
|
+
return projects['crash'][:name]
|
446
|
+
else
|
447
|
+
projects.each_key do |project_key|
|
448
|
+
return projects[project_key][:name] if projects[project_key][:name] != 'master'
|
449
|
+
end
|
411
450
|
end
|
412
451
|
end
|
413
452
|
|
414
453
|
def suite_py_branches(args_json)
|
415
|
-
|
454
|
+
args = JSON.parse(args_json)
|
416
455
|
|
417
|
-
|
456
|
+
args['projects'].each_key do |project|
|
457
|
+
@projects[project] = { name: args['projects'][project]['branch'], revision: args['projects'][project]['revision'], committer: '', default_branch: false }
|
458
|
+
end
|
418
459
|
|
419
460
|
@projects.each_key do |project|
|
420
|
-
|
421
|
-
@projects[project] = choose_branch_to_deploy(project, true)
|
422
|
-
end
|
461
|
+
@projects[project] = choose_branch_to_deploy(project, true) unless args['projects'].key? project
|
423
462
|
end
|
424
463
|
end
|
425
464
|
|
@@ -449,19 +488,53 @@ class Release
|
|
449
488
|
`git checkout -b #{branch_name}`
|
450
489
|
end
|
451
490
|
|
491
|
+
branches = ''
|
452
492
|
@git_branch = branch_name
|
453
493
|
|
454
|
-
|
494
|
+
@projects.each_key do |project_key|
|
495
|
+
if @projects[project_key][:revision]
|
496
|
+
branches += "#{project_key}:#{@projects[project_key][:name]}:#{@projects[project_key][:revision]}:#{@projects[project_key][:default_branch]}\n"
|
497
|
+
end
|
498
|
+
end
|
455
499
|
|
456
|
-
|
500
|
+
File.open('branch_names', 'w') { |file| file.write(branches) }
|
457
501
|
|
458
502
|
`git add projects && \
|
459
|
-
git add branch_names
|
503
|
+
git add branch_names && \
|
460
504
|
git commit -m '#{branch_name}' && \
|
461
505
|
git push -f --set-upstream origin #{branch_name} && \
|
462
506
|
git checkout master`
|
463
507
|
end
|
464
508
|
|
509
|
+
def qainit_minimal_deploy!(project)
|
510
|
+
abort('L\'unico progetto permesso è prima') unless ['prima'].include? project
|
511
|
+
project_definition = choose_branch_to_deploy(project)
|
512
|
+
|
513
|
+
`git checkout master && git pull && git remote prune origin`
|
514
|
+
|
515
|
+
default_name = project_definition[:name]
|
516
|
+
output "Inserisci la feature a cui si riferisce il QA: [#{default_name}]".cyan
|
517
|
+
feature_number = String(STDIN.gets.chomp)
|
518
|
+
feature_number = default_name if feature_number.empty?
|
519
|
+
|
520
|
+
if `git branch -l | grep #{feature_number}`.size > 0
|
521
|
+
`git checkout #{feature_number} && git pull`
|
522
|
+
else
|
523
|
+
`git checkout -b #{feature_number}`
|
524
|
+
end
|
525
|
+
|
526
|
+
# così recupero le informazioni sul branch, poi vado a scrivere il file branch_names con una sola riga
|
527
|
+
branch = "#{project}:#{project_definition[:name]}:#{project_definition[:revision]}:#{project_definition[:default_branch]}"
|
528
|
+
|
529
|
+
File.open('branch_names', 'w') { |file| file.write(branch) }
|
530
|
+
|
531
|
+
`git add projects && \
|
532
|
+
git add branch_names && \
|
533
|
+
git commit -m 'minimal_#{feature_number}' && \
|
534
|
+
git push --set-upstream origin #{feature_number} && \
|
535
|
+
git checkout master`
|
536
|
+
end
|
537
|
+
|
465
538
|
def qainit_deploy_update!
|
466
539
|
`git checkout master && git pull`
|
467
540
|
# cancelliamo tutti i branch che non sono più sul repo remoto
|
@@ -486,24 +559,29 @@ class Release
|
|
486
559
|
# aggiornare il commit (revision a cui fa riferimento)
|
487
560
|
|
488
561
|
# leggo il file branch_names / recupero i nomi dei branch / riscrivo tutto
|
489
|
-
projects = ''
|
490
562
|
File.open('branch_names', 'r') do |file|
|
491
563
|
file.each_line do |line|
|
492
|
-
|
564
|
+
project = line.split(':')
|
565
|
+
@projects[project[0]] = select_branch_to_deploy(project[0], project[1])
|
566
|
+
@projects[project[0]][:default_branch] = project[3]
|
493
567
|
end
|
494
568
|
end
|
495
569
|
|
496
|
-
|
497
|
-
@projects[key] = select_branch_to_deploy(key, project['name'])
|
498
|
-
@projects[key]['default_branch'] = project['default_branch']
|
499
|
-
end
|
570
|
+
branches = ''
|
500
571
|
|
501
|
-
|
572
|
+
@projects.each_key do |project_key|
|
573
|
+
if @projects[project_key][:revision]
|
574
|
+
branches += "#{project_key}:#{@projects[project_key][:name]}:#{@projects[project_key][:revision]}:#{@projects[project_key][:default_branch]}"
|
575
|
+
end
|
576
|
+
end
|
502
577
|
|
503
|
-
|
578
|
+
File.open('branch_names', 'w') { |file| file.write(branches) }
|
504
579
|
|
505
|
-
`git
|
506
|
-
|
580
|
+
if `git log -1` =~ /minimal_/
|
581
|
+
`git commit -am 'minimal_update'`
|
582
|
+
else
|
583
|
+
`git commit -am 'update'`
|
584
|
+
end
|
507
585
|
`git push && git checkout master`
|
508
586
|
end
|
509
587
|
|
@@ -578,9 +656,11 @@ class Release
|
|
578
656
|
delete_stack(@base_stack_name_alb + env_hash[3..8]) if stack_exists?(@base_stack_name_alb + env_hash[3..8])
|
579
657
|
delete_stack(@base_stack_name_alb_ws + env_hash[3..8]) if stack_exists?(@base_stack_name_alb_ws + env_hash[3..8])
|
580
658
|
`git checkout master && git push origin --delete ${DRONE_BRANCH}`
|
581
|
-
output "Cancello il record DNS utilizzato da Lighthouse"
|
582
|
-
delete_lighthouse_dns()
|
583
659
|
output "Finito!".green
|
660
|
+
|
661
|
+
if @qainit
|
662
|
+
qainit_write_output("QA #{ENV["DRONE_BRANCH"]} spento.", 'Indirizzi scritti su ')
|
663
|
+
end
|
584
664
|
end
|
585
665
|
|
586
666
|
def qainit_write_output(file_message, output_message)
|
@@ -590,16 +670,43 @@ class Release
|
|
590
670
|
output "#{output_message} #{qa_file_name}".green
|
591
671
|
end
|
592
672
|
|
593
|
-
def
|
594
|
-
|
595
|
-
|
596
|
-
|
673
|
+
def qainit_read_config!(action)
|
674
|
+
File.open('branch_names', 'r') do |file|
|
675
|
+
file.each_line do |line|
|
676
|
+
project = line.gsub("\n", '').split(':')
|
677
|
+
if project[3] == 'true'
|
678
|
+
@projects[project[0]] = {:name=> project[1], :revision=> project[2], :default_branch=> true}
|
679
|
+
elsif project[3] == 'false'
|
680
|
+
@projects[project[0]] = {:name=> project[1], :revision=> project[2], :default_branch=> false}
|
681
|
+
end
|
682
|
+
end
|
597
683
|
end
|
598
|
-
|
599
|
-
|
684
|
+
get_s3_config_files
|
685
|
+
@qainit = true
|
686
|
+
case action
|
687
|
+
when 'shutdown'
|
688
|
+
output 'Shutting down'.green
|
689
|
+
qainit_drone_shutdown!
|
690
|
+
when 'minimal'
|
691
|
+
output 'Starting minimal deploy'.green
|
692
|
+
qainit_drone_minimal_deploy!
|
693
|
+
else
|
694
|
+
output 'Starting standard deploy'.green
|
695
|
+
deploy_feature!
|
600
696
|
end
|
601
697
|
end
|
602
698
|
|
699
|
+
def get_s3_config_files
|
700
|
+
# manteniamo la struttura per lanciarlo facilmente anche da locale
|
701
|
+
`mkdir -p cloudformation/stacks/task cloudformation/stacks/route53 cloudformation/stacks/asg cloudformation/stacks/elb`
|
702
|
+
@s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/task/db.yml', response_target: 'cloudformation/stacks/task/db.yml'})
|
703
|
+
@s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/route53/qa.yml', response_target: 'cloudformation/stacks/route53/qa.yml'})
|
704
|
+
@s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/route53/minimal_qa.yml', response_target: 'cloudformation/stacks/route53/minimal_qa.yml'})
|
705
|
+
@s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/asg/ecs-asg-allinone.yml', response_target: 'cloudformation/stacks/asg/ecs-asg-allinone.yml'})
|
706
|
+
@s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/ecs-cluster.yml', response_target: 'cloudformation/stacks/ecs-cluster.yml'})
|
707
|
+
@s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/elb/alb-public-qa.yml', response_target: 'cloudformation/stacks/elb/alb-public-qa.yml'})
|
708
|
+
end
|
709
|
+
|
603
710
|
def get_deploy_id
|
604
711
|
if @deploy_id
|
605
712
|
@deploy_id
|
@@ -609,6 +716,1349 @@ class Release
|
|
609
716
|
end
|
610
717
|
end
|
611
718
|
|
719
|
+
def qainit_drone_minimal_deploy!
|
720
|
+
# tenere solo il project da deployare (l'unico project è prima)
|
721
|
+
@ami_id = get_ami_id("ecs-fleet-allinone-staging")
|
722
|
+
project = ''
|
723
|
+
@projects.each_key do |project_key|
|
724
|
+
if @projects[project_key][:revision]
|
725
|
+
project = project_key
|
726
|
+
git_checkout_version(project_key, @projects[project_key][:revision])
|
727
|
+
end
|
728
|
+
end
|
729
|
+
deploy_id = get_deploy_id
|
730
|
+
|
731
|
+
@git_branch = ENV['DRONE_BRANCH']
|
732
|
+
@dns_record_identifier = deploy_id
|
733
|
+
hostname_pattern_priority = hostname_pattern_priority()
|
734
|
+
tags = [
|
735
|
+
{
|
736
|
+
key: "qainit",
|
737
|
+
value: @git_branch
|
738
|
+
},
|
739
|
+
{
|
740
|
+
key: project,
|
741
|
+
value: @projects[project][:name]
|
742
|
+
},
|
743
|
+
{
|
744
|
+
key: "hostname_pattern_priority",
|
745
|
+
value: hostname_pattern_priority
|
746
|
+
}
|
747
|
+
]
|
748
|
+
|
749
|
+
cluster_stack_name = "ecs-cluster-qa-#{deploy_id}"
|
750
|
+
|
751
|
+
if stack_exists?(cluster_stack_name)
|
752
|
+
tags = get_stack_tags(cluster_stack_name)
|
753
|
+
hostname_pattern_priority = tags.detect do |tag|
|
754
|
+
tag.key == 'hostname_pattern_priority'
|
755
|
+
end.value
|
756
|
+
end
|
757
|
+
|
758
|
+
stack_name_alb = @base_stack_name_alb + deploy_id[0..5]
|
759
|
+
stack_name_alb_ws = @base_stack_name_alb_ws + deploy_id[0..5]
|
760
|
+
|
761
|
+
create_cluster_stack(cluster_stack_name, tags) unless stack_exists?(cluster_stack_name)
|
762
|
+
wait_for_stack_ready(cluster_stack_name) unless stack_ready?(cluster_stack_name)
|
763
|
+
|
764
|
+
create_alb_stack(stack_name_alb, "http", deploy_id, 'qa-minimal') unless stack_exists?(stack_name_alb)
|
765
|
+
create_alb_stack(stack_name_alb_ws, "websocket", deploy_id, 'qa-minimal') unless stack_exists?(stack_name_alb_ws)
|
766
|
+
|
767
|
+
resp = @cf.describe_stack_resource({stack_name: cluster_stack_name, logical_resource_id: 'ECSCluster'})
|
768
|
+
@ecs_cluster_name = resp.stack_resource_detail.physical_resource_id
|
769
|
+
|
770
|
+
asg_stack_name = "ecs-asg-allinone-qa-#{deploy_id}"
|
771
|
+
create_asg_stack(asg_stack_name, tags) unless stack_exists?(asg_stack_name)
|
772
|
+
|
773
|
+
deploy_id = get_deploy_id
|
774
|
+
create_pyxis_artifact(@projects["pyxis-npm"][:revision], deploy_id)
|
775
|
+
create_prima_artifact(@projects["prima"][:revision], @projects["prima"][:name], deploy_id, true) unless artifact_exists?('prima-artifacts-encrypted', "prima/#{@projects["prima"][:revision]}.tar.gz")
|
776
|
+
|
777
|
+
wait_for_stack_ready(stack_name_alb) unless stack_ready?(stack_name_alb)
|
778
|
+
wait_for_stack_ready(stack_name_alb_ws) unless stack_ready?(stack_name_alb_ws)
|
779
|
+
|
780
|
+
stack_name_route53 = "ecs-route53-qa-#{deploy_id}"
|
781
|
+
stack_body = IO.read('cloudformation/stacks/route53/minimal_qa.yml')
|
782
|
+
parameters = [
|
783
|
+
{
|
784
|
+
parameter_key: "DnsRecordIdentifier",
|
785
|
+
parameter_value: @dns_record_identifier
|
786
|
+
},
|
787
|
+
{
|
788
|
+
parameter_key: "PrimaElbHostname",
|
789
|
+
parameter_value: get_alb_host(stack_name_alb)
|
790
|
+
},
|
791
|
+
{
|
792
|
+
parameter_key: 'CrashElbHostname',
|
793
|
+
parameter_value: get_alb_host(stack_name_alb_ws)
|
794
|
+
}
|
795
|
+
]
|
796
|
+
|
797
|
+
create_stack(stack_name_route53, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_route53)
|
798
|
+
wait_for_stack_ready(stack_name_route53) unless stack_ready?(stack_name_route53)
|
799
|
+
|
800
|
+
stack_name_web = "ecs-task-web-qa-#{deploy_id}"
|
801
|
+
git_checkout_version('prima', @projects["prima"][:revision])
|
802
|
+
stack_body = IO.read('projects/prima/app/cloudformation/tasks/web.yml')
|
803
|
+
parameters = [
|
804
|
+
{
|
805
|
+
parameter_key: "Environment",
|
806
|
+
parameter_value: "qa-minimal"
|
807
|
+
},
|
808
|
+
{
|
809
|
+
parameter_key: "ReleaseVersion",
|
810
|
+
parameter_value: "#{@projects["prima"][:revision]}"
|
811
|
+
},
|
812
|
+
{
|
813
|
+
parameter_key: "TaskDesiredCount",
|
814
|
+
parameter_value: "1"
|
815
|
+
},
|
816
|
+
{
|
817
|
+
parameter_key: "ECSClusterName",
|
818
|
+
parameter_value: @ecs_cluster_name
|
819
|
+
},
|
820
|
+
{
|
821
|
+
parameter_key: "ALBShortName",
|
822
|
+
parameter_value: "web-qa-#{deploy_id}"[0..27]
|
823
|
+
},
|
824
|
+
{
|
825
|
+
parameter_key: "WebQaBaseHostname",
|
826
|
+
parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
|
827
|
+
},
|
828
|
+
{
|
829
|
+
parameter_key: "HostnamePattern",
|
830
|
+
parameter_value: "ww*-#{@dns_record_identifier}.qa.colaster.com"
|
831
|
+
},
|
832
|
+
{
|
833
|
+
parameter_key: "HostnamePatternPriority",
|
834
|
+
parameter_value: hostname_pattern_priority
|
835
|
+
},
|
836
|
+
{
|
837
|
+
parameter_key: "HostnamePatternAggregatorPriority",
|
838
|
+
parameter_value: (hostname_pattern_priority.to_i + 1).to_s
|
839
|
+
},
|
840
|
+
{
|
841
|
+
parameter_key: "EnvHash",
|
842
|
+
parameter_value: deploy_id
|
843
|
+
},
|
844
|
+
{
|
845
|
+
parameter_key: "AssangeHostname",
|
846
|
+
parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
|
847
|
+
},
|
848
|
+
{
|
849
|
+
parameter_key: "BackofficeHostname",
|
850
|
+
parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
|
851
|
+
},
|
852
|
+
{
|
853
|
+
parameter_key: "WebHostname",
|
854
|
+
parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
|
855
|
+
},
|
856
|
+
{
|
857
|
+
parameter_key: "FePrimaDomain",
|
858
|
+
parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
|
859
|
+
},
|
860
|
+
{
|
861
|
+
parameter_key: "HostnamePattern",
|
862
|
+
parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
|
863
|
+
}
|
864
|
+
]
|
865
|
+
if stack_exists?(stack_name_web)
|
866
|
+
cur_version = get_currently_deployed_version(stack_name_web)
|
867
|
+
update_stack(stack_name_web, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"][:revision])
|
868
|
+
else
|
869
|
+
create_stack(stack_name_web, stack_body, parameters, tags, @cf_role)
|
870
|
+
end
|
871
|
+
wait_for_stack_ready(stack_name_web) unless stack_ready?(stack_name_web)
|
872
|
+
update_service_defaults(stack_name_web)
|
873
|
+
prima_hostname = get_route53_hostname(stack_name_web)
|
874
|
+
|
875
|
+
projects_text = "
|
876
|
+
> Prima url: https://#{prima_hostname}
|
877
|
+
> SSH connection: ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no githubUsername@#{ec2_ip_address(asg_stack_name)}\n"
|
878
|
+
|
879
|
+
output projects_text.cyan
|
880
|
+
output "Deploy effettuato, everything is awesome!\n".green
|
881
|
+
if @qainit
|
882
|
+
qainit_write_output(projects_text, 'Indirizzi scritti su ')
|
883
|
+
end
|
884
|
+
end
|
885
|
+
|
886
|
+
def deploy_feature!
|
887
|
+
`git pull && git submodule init && git submodule update`
|
888
|
+
@ami_id = get_ami_id("ecs-fleet-allinone-staging")
|
889
|
+
deploy_id = get_deploy_id
|
890
|
+
stack_name_alb = 'ecs-alb-http-public-qa-' + deploy_id[0..5]
|
891
|
+
stack_name_alb_ws = 'ecs-alb-ws-public-qa-' + deploy_id[0..5]
|
892
|
+
unless @qainit
|
893
|
+
@projects.each_key do |project_key|
|
894
|
+
if @projects[project_key][:revision]
|
895
|
+
git_checkout_version(project_key, @projects[project_key][:revision])
|
896
|
+
end
|
897
|
+
end
|
898
|
+
end
|
899
|
+
@dns_record_identifier = deploy_id
|
900
|
+
@git_branch = ENV['DRONE_BRANCH']
|
901
|
+
hostname_pattern_priority = hostname_pattern_priority()
|
902
|
+
tags = [
|
903
|
+
{
|
904
|
+
key: "qainit",
|
905
|
+
value: @git_branch
|
906
|
+
},
|
907
|
+
{
|
908
|
+
key: "hostname_pattern_priority",
|
909
|
+
value: hostname_pattern_priority
|
910
|
+
}
|
911
|
+
]
|
912
|
+
@projects.each do |key, value|
|
913
|
+
case key.to_s
|
914
|
+
when 'crash'
|
915
|
+
tags << { key: 'crash', value: @projects['crash'][:name] } if deploy_crash?
|
916
|
+
when 'starsky', 'hutch'
|
917
|
+
tags << { key: key.to_s, value: @projects[key.to_s][:name] } if deploy_starsky_hutch?
|
918
|
+
else
|
919
|
+
tags << { key: key, value: value[:name] }
|
920
|
+
end
|
921
|
+
end
|
922
|
+
|
923
|
+
cluster_stack_name = "ecs-cluster-qa-#{deploy_id}"
|
924
|
+
|
925
|
+
if stack_exists?(cluster_stack_name)
|
926
|
+
tags = get_stack_tags(cluster_stack_name)
|
927
|
+
hostname_pattern_priority = tags.detect do |tag|
|
928
|
+
tag.key == 'hostname_pattern_priority'
|
929
|
+
end.value
|
930
|
+
end
|
931
|
+
|
932
|
+
create_cluster_stack(cluster_stack_name, tags) unless stack_exists?(cluster_stack_name)
|
933
|
+
wait_for_stack_ready(cluster_stack_name) unless stack_ready?(cluster_stack_name)
|
934
|
+
|
935
|
+
create_alb_stack(stack_name_alb, "http", deploy_id) unless stack_exists?(stack_name_alb)
|
936
|
+
create_alb_stack(stack_name_alb_ws, "websocket", deploy_id) unless stack_exists?(stack_name_alb_ws)
|
937
|
+
|
938
|
+
resp = @cf.describe_stack_resource({stack_name: cluster_stack_name, logical_resource_id: 'ECSCluster'})
|
939
|
+
@ecs_cluster_name = resp.stack_resource_detail.physical_resource_id
|
940
|
+
|
941
|
+
asg_stack_name = "ecs-asg-allinone-qa-#{deploy_id}"
|
942
|
+
create_asg_stack(asg_stack_name, tags) unless stack_exists?(asg_stack_name)
|
943
|
+
|
944
|
+
stack_name_db = "ecs-task-db-qa-#{deploy_id}"
|
945
|
+
stack_body = IO.read('cloudformation/stacks/task/db.yml')
|
946
|
+
parameters = [
|
947
|
+
{
|
948
|
+
parameter_key: "Environment",
|
949
|
+
parameter_value: "qa"
|
950
|
+
},
|
951
|
+
{
|
952
|
+
parameter_key: "ECSClusterName",
|
953
|
+
parameter_value: @ecs_cluster_name
|
954
|
+
}
|
955
|
+
]
|
956
|
+
create_stack(stack_name_db, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_db) # creazione asincrona stack dei db (~4 min)
|
957
|
+
|
958
|
+
output "check pyxis \n".yellow
|
959
|
+
|
960
|
+
create_pyxis_artifact(@projects["pyxis-npm"][:revision], deploy_id) unless @projects["pyxis-npm"].nil? # deve essere creato prima di quello di prima, per avere la versione
|
961
|
+
create_prima_artifact(@projects["prima"][:revision], @projects["prima"][:name], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "prima/#{@projects["prima"][:revision]}.tar.gz")
|
962
|
+
# l'artefatto di prima viene creato sempre (puntamenti all'ambiente compilati nel js) e richiede molto più di 4 minuti
|
963
|
+
wait_for_stack_ready(stack_name_db) unless stack_ready?(stack_name_db) # dovrebbe essere istantaneo
|
964
|
+
db_task = ''
|
965
|
+
db_task = import_dbs(ec2_ip_address(asg_stack_name)) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # import asincrono dei dati
|
966
|
+
|
967
|
+
create_crash_artifact(@projects['crash'][:revision], deploy_id) unless !deploy_crash? || artifact_exists?('prima-artifacts-encrypted', "microservices/crash/#{@projects['crash'][:revision]}-qa.tar.gz")
|
968
|
+
create_urania_artifact(@projects["urania"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/urania/#{@projects["urania"][:revision]}-qa.tar.gz")
|
969
|
+
create_roger_artifact(@projects["roger"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/roger/#{@projects["roger"][:revision]}-qa.tar.gz")
|
970
|
+
create_ermes_artifact(@projects["ermes"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/ermes/#{@projects["ermes"][:revision]}-qa.tar.gz")
|
971
|
+
create_bburago_artifact(@projects["bburago"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/bburago/#{@projects["bburago"][:revision]}-qa.tar.gz")
|
972
|
+
create_hal9000_artifact(@projects["hal9000"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/hal9000/#{@projects["hal9000"][:revision]}-qa.tar.gz")
|
973
|
+
create_rachele_artifact(@projects["rachele"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rachele/#{@projects["rachele"][:revision]}-qa.tar.gz")
|
974
|
+
create_fidaty_artifact(@projects["fidaty"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/fidaty/#{@projects["fidaty"][:revision]}-qa.tar.gz")
|
975
|
+
create_peano_artifact(@projects["peano"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/peano/#{@projects["peano"][:revision]}-qa.tar.gz")
|
976
|
+
create_rogoreport_artifact(@projects["rogoreport"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rogoreport/rogoreport-#{@projects["rogoreport"][:revision]}-qa.tar.gz")
|
977
|
+
create_assange_artifact(@projects["assange"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/assange/#{@projects["assange"][:revision]}-qa.tar.gz")
|
978
|
+
create_borat_artifact(@projects["borat"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/borat/#{@projects["borat"][:revision]}-qa.tar.gz")
|
979
|
+
create_activia_artifact(@projects["activia"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/activia/#{@projects["activia"][:revision]}-qa.tar.gz")
|
980
|
+
create_leftorium_artifact(@projects["leftorium"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/leftorium/#{@projects["leftorium"][:revision]}-qa.tar.gz")
|
981
|
+
create_skynet_artifact(@projects["skynet"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/skynet/#{@projects["skynet"][:revision]}-qa.tar.gz")
|
982
|
+
create_starsky_artifact(@projects["starsky"][:revision]) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/starsky/#{@projects["starsky"][:revision]}-qa.tar.gz")
|
983
|
+
create_hutch_artifact(@projects["hutch"][:revision]) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/hutch/#{@projects["hutch"][:revision]}-qa.tar.gz")
|
984
|
+
|
985
|
+
wait_for_db_import(db_task) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # dovrebbe essere istantaneo
|
986
|
+
|
987
|
+
import_redis_crash(ec2_ip_address(asg_stack_name)) if deploy_crash? && !stack_exists?("ecs-task-web-qa-#{deploy_id}")
|
988
|
+
|
989
|
+
wait_for_stack_ready(stack_name_alb) unless stack_ready?(stack_name_alb)
|
990
|
+
wait_for_stack_ready(stack_name_alb_ws) unless stack_ready?(stack_name_alb_ws)
|
991
|
+
|
992
|
+
stack_name_route53 = "ecs-route53-qa-#{deploy_id}"
|
993
|
+
stack_body = IO.read('cloudformation/stacks/route53/qa.yml')
|
994
|
+
parameters = [
|
995
|
+
{
|
996
|
+
parameter_key: "DnsRecordIdentifier",
|
997
|
+
parameter_value: @dns_record_identifier
|
998
|
+
},
|
999
|
+
{
|
1000
|
+
parameter_key: "PrimaElbHostname",
|
1001
|
+
parameter_value: get_alb_host(stack_name_alb)
|
1002
|
+
},
|
1003
|
+
{
|
1004
|
+
parameter_key: "UraniaIp",
|
1005
|
+
parameter_value: ec2_ip_address(asg_stack_name)
|
1006
|
+
},
|
1007
|
+
{
|
1008
|
+
parameter_key: "BburagoIp",
|
1009
|
+
parameter_value: ec2_ip_address(asg_stack_name)
|
1010
|
+
},
|
1011
|
+
{
|
1012
|
+
parameter_key: "Hal9000Ip",
|
1013
|
+
parameter_value: ec2_ip_address(asg_stack_name)
|
1014
|
+
},
|
1015
|
+
{
|
1016
|
+
parameter_key: "FidatyIp",
|
1017
|
+
parameter_value: ec2_ip_address(asg_stack_name)
|
1018
|
+
},
|
1019
|
+
{
|
1020
|
+
parameter_key: "PeanoIp",
|
1021
|
+
parameter_value: ec2_ip_address(asg_stack_name)
|
1022
|
+
},
|
1023
|
+
{
|
1024
|
+
parameter_key: "ErmesIp",
|
1025
|
+
parameter_value: ec2_ip_address(asg_stack_name)
|
1026
|
+
},
|
1027
|
+
{
|
1028
|
+
parameter_key: "ActiviaIp",
|
1029
|
+
parameter_value: ec2_ip_address(asg_stack_name)
|
1030
|
+
},
|
1031
|
+
{
|
1032
|
+
parameter_key: "SkynetIp",
|
1033
|
+
parameter_value: ec2_ip_address(asg_stack_name)
|
1034
|
+
},
|
1035
|
+
{
|
1036
|
+
parameter_key: "RogerIp",
|
1037
|
+
parameter_value: ec2_ip_address(asg_stack_name)
|
1038
|
+
},
|
1039
|
+
{
|
1040
|
+
parameter_key: "LeftoriumIp",
|
1041
|
+
parameter_value: ec2_ip_address(asg_stack_name)
|
1042
|
+
},
|
1043
|
+
{
|
1044
|
+
parameter_key: "RacheleIp",
|
1045
|
+
parameter_value: ec2_ip_address(asg_stack_name)
|
1046
|
+
},
|
1047
|
+
{
|
1048
|
+
parameter_key: "RedisIp",
|
1049
|
+
parameter_value: ec2_ip_address(asg_stack_name)
|
1050
|
+
},
|
1051
|
+
{
|
1052
|
+
parameter_key: "AssangeElbHostname",
|
1053
|
+
parameter_value: get_alb_host(stack_name_alb)
|
1054
|
+
},
|
1055
|
+
{
|
1056
|
+
parameter_key: "BoratElbHostname",
|
1057
|
+
parameter_value: get_alb_host(stack_name_alb_ws)
|
1058
|
+
},
|
1059
|
+
{
|
1060
|
+
parameter_key: 'CrashElbHostname',
|
1061
|
+
parameter_value: get_alb_host(stack_name_alb_ws)
|
1062
|
+
},
|
1063
|
+
{
|
1064
|
+
parameter_key: 'StarskyElbHostname',
|
1065
|
+
parameter_value: get_alb_host(stack_name_alb)
|
1066
|
+
},
|
1067
|
+
{
|
1068
|
+
parameter_key: 'HutchElbHostname',
|
1069
|
+
parameter_value: get_alb_host(stack_name_alb)
|
1070
|
+
}
|
1071
|
+
]
|
1072
|
+
|
1073
|
+
create_stack(stack_name_route53, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_route53)
|
1074
|
+
wait_for_stack_ready(stack_name_route53) unless stack_ready?(stack_name_route53)
|
1075
|
+
|
1076
|
+
stack_name_skynet = "ecs-task-skynet-qa-#{deploy_id}"
|
1077
|
+
git_checkout_version('skynet', @projects["skynet"][:revision])
|
1078
|
+
stack_body = File.read('projects/skynet/deploy/task.yml')
|
1079
|
+
parameters = [
|
1080
|
+
{
|
1081
|
+
parameter_key: "Environment",
|
1082
|
+
parameter_value: "qa"
|
1083
|
+
},
|
1084
|
+
{
|
1085
|
+
parameter_key: "ReleaseVersion",
|
1086
|
+
parameter_value: @projects["skynet"][:revision]
|
1087
|
+
},
|
1088
|
+
{
|
1089
|
+
parameter_key: "TaskDesiredCount",
|
1090
|
+
parameter_value: "1"
|
1091
|
+
},
|
1092
|
+
{
|
1093
|
+
parameter_key: "ECSClusterName",
|
1094
|
+
parameter_value: @ecs_cluster_name
|
1095
|
+
},
|
1096
|
+
{
|
1097
|
+
parameter_key: "HostnamePattern",
|
1098
|
+
parameter_value: "skynet-#{@dns_record_identifier}.qa.colaster.com"
|
1099
|
+
},
|
1100
|
+
{
|
1101
|
+
parameter_key: "HostnamePatternPriority",
|
1102
|
+
parameter_value: hostname_pattern_priority
|
1103
|
+
}
|
1104
|
+
]
|
1105
|
+
if stack_exists?(stack_name_skynet)
|
1106
|
+
cur_version = get_currently_deployed_version(stack_name_skynet)
|
1107
|
+
update_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["skynet"][:revision])
|
1108
|
+
else
|
1109
|
+
create_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role)
|
1110
|
+
end
|
1111
|
+
|
1112
|
+
stack_name_urania = "ecs-task-urania-qa-#{deploy_id}"
|
1113
|
+
git_checkout_version('urania', @projects["urania"][:revision])
|
1114
|
+
stack_body = File.read('projects/urania/deploy/task.yml')
|
1115
|
+
parameters = [
|
1116
|
+
{
|
1117
|
+
parameter_key: "Environment",
|
1118
|
+
parameter_value: "qa"
|
1119
|
+
},
|
1120
|
+
{
|
1121
|
+
parameter_key: "ReleaseVersion",
|
1122
|
+
parameter_value: @projects["urania"][:revision]
|
1123
|
+
},
|
1124
|
+
{
|
1125
|
+
parameter_key: "TaskDesiredCount",
|
1126
|
+
parameter_value: "1"
|
1127
|
+
},
|
1128
|
+
{
|
1129
|
+
parameter_key: "ECSClusterName",
|
1130
|
+
parameter_value: @ecs_cluster_name
|
1131
|
+
},
|
1132
|
+
{
|
1133
|
+
parameter_key: "HostnamePattern",
|
1134
|
+
parameter_value: "urania-#{@dns_record_identifier}.qa.colaster.com"
|
1135
|
+
},
|
1136
|
+
{
|
1137
|
+
parameter_key: "HostnamePatternPriority",
|
1138
|
+
parameter_value: hostname_pattern_priority
|
1139
|
+
}
|
1140
|
+
]
|
1141
|
+
if stack_exists?(stack_name_urania)
|
1142
|
+
cur_version = get_currently_deployed_version(stack_name_urania)
|
1143
|
+
update_stack(stack_name_urania, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["urania"][:revision])
|
1144
|
+
else
|
1145
|
+
create_stack(stack_name_urania, stack_body, parameters, tags, @cf_role)
|
1146
|
+
end
|
1147
|
+
|
1148
|
+
stack_name_ermes = "ecs-task-ermes-qa-#{deploy_id}"
|
1149
|
+
git_checkout_version('ermes', @projects["ermes"][:revision])
|
1150
|
+
stack_body = File.read('projects/ermes/deploy/task.yml')
|
1151
|
+
parameters = [
|
1152
|
+
{
|
1153
|
+
parameter_key: "Environment",
|
1154
|
+
parameter_value: "qa"
|
1155
|
+
},
|
1156
|
+
{
|
1157
|
+
parameter_key: "ReleaseVersion",
|
1158
|
+
parameter_value: "#{@projects['ermes'][:revision]}"
|
1159
|
+
},
|
1160
|
+
{
|
1161
|
+
parameter_key: "TaskDesiredCount",
|
1162
|
+
parameter_value: "1"
|
1163
|
+
},
|
1164
|
+
{
|
1165
|
+
parameter_key: "ECSClusterName",
|
1166
|
+
parameter_value: @ecs_cluster_name
|
1167
|
+
},
|
1168
|
+
{
|
1169
|
+
parameter_key: "HostnamePattern",
|
1170
|
+
parameter_value: "ermes-#{@dns_record_identifier}.qa.colaster.com"
|
1171
|
+
},
|
1172
|
+
{
|
1173
|
+
parameter_key: "HostnamePatternPriority",
|
1174
|
+
parameter_value: hostname_pattern_priority
|
1175
|
+
},
|
1176
|
+
{
|
1177
|
+
parameter_key: "WebHost",
|
1178
|
+
parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
|
1179
|
+
},
|
1180
|
+
{
|
1181
|
+
parameter_key: "PeanoHost",
|
1182
|
+
parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
|
1183
|
+
}
|
1184
|
+
]
|
1185
|
+
if stack_exists?(stack_name_ermes)
|
1186
|
+
cur_version = get_currently_deployed_version(stack_name_ermes)
|
1187
|
+
update_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["ermes"][:revision])
|
1188
|
+
else
|
1189
|
+
create_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role)
|
1190
|
+
end
|
1191
|
+
|
1192
|
+
stack_name_bburago = "ecs-task-bburago-qa-#{deploy_id}"
|
1193
|
+
git_checkout_version('bburago', @projects["bburago"][:revision])
|
1194
|
+
stack_body = File.read('projects/bburago/deploy/task.yml')
|
1195
|
+
parameters = [
|
1196
|
+
{
|
1197
|
+
parameter_key: "Environment",
|
1198
|
+
parameter_value: "qa"
|
1199
|
+
},
|
1200
|
+
{
|
1201
|
+
parameter_key: "ReleaseVersion",
|
1202
|
+
parameter_value: @projects["bburago"][:revision]
|
1203
|
+
},
|
1204
|
+
{
|
1205
|
+
parameter_key: "ECSClusterName",
|
1206
|
+
parameter_value: @ecs_cluster_name
|
1207
|
+
},
|
1208
|
+
{
|
1209
|
+
parameter_key: "TaskDesiredCount",
|
1210
|
+
parameter_value: "1"
|
1211
|
+
},
|
1212
|
+
{
|
1213
|
+
parameter_key: "HostnamePattern",
|
1214
|
+
parameter_value: "bburago-#{@dns_record_identifier}.qa.colaster.com"
|
1215
|
+
},
|
1216
|
+
{
|
1217
|
+
parameter_key: "HostnamePatternPriority",
|
1218
|
+
parameter_value: hostname_pattern_priority
|
1219
|
+
}
|
1220
|
+
]
|
1221
|
+
if stack_exists?(stack_name_bburago)
|
1222
|
+
cur_version = get_currently_deployed_version(stack_name_bburago)
|
1223
|
+
update_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["bburago"][:revision])
|
1224
|
+
else
|
1225
|
+
create_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role)
|
1226
|
+
end
|
1227
|
+
|
1228
|
+
stack_name_hal9000 = "ecs-task-hal9000-qa-#{deploy_id}"
|
1229
|
+
git_checkout_version('hal9000', @projects["hal9000"][:revision])
|
1230
|
+
stack_body = File.read('projects/hal9000/deploy/task.yml')
|
1231
|
+
parameters = [
|
1232
|
+
{
|
1233
|
+
parameter_key: "Environment",
|
1234
|
+
parameter_value: "qa"
|
1235
|
+
},
|
1236
|
+
{
|
1237
|
+
parameter_key: "ReleaseVersion",
|
1238
|
+
parameter_value: @projects["hal9000"][:revision]
|
1239
|
+
},
|
1240
|
+
{
|
1241
|
+
parameter_key: "ECSClusterName",
|
1242
|
+
parameter_value: @ecs_cluster_name
|
1243
|
+
},
|
1244
|
+
{
|
1245
|
+
parameter_key: "TaskDesiredCount",
|
1246
|
+
parameter_value: "1"
|
1247
|
+
},
|
1248
|
+
{
|
1249
|
+
parameter_key: "HostnamePattern",
|
1250
|
+
parameter_value: "hal9000-#{@dns_record_identifier}.qa.colaster.com"
|
1251
|
+
},
|
1252
|
+
{
|
1253
|
+
parameter_key: "HostnamePatternPriority",
|
1254
|
+
parameter_value: hostname_pattern_priority
|
1255
|
+
}
|
1256
|
+
]
|
1257
|
+
if stack_exists?(stack_name_hal9000)
|
1258
|
+
cur_version = get_currently_deployed_version(stack_name_hal9000)
|
1259
|
+
update_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hal9000"][:revision])
|
1260
|
+
else
|
1261
|
+
create_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role)
|
1262
|
+
end
|
1263
|
+
|
1264
|
+
stack_name_fidaty = "ecs-task-fidaty-qa-#{deploy_id}"
|
1265
|
+
git_checkout_version('fidaty', @projects["fidaty"][:revision])
|
1266
|
+
stack_body = File.read('projects/fidaty/deploy/task.yml')
|
1267
|
+
parameters = [
|
1268
|
+
{
|
1269
|
+
parameter_key: "Environment",
|
1270
|
+
parameter_value: "qa"
|
1271
|
+
},
|
1272
|
+
{
|
1273
|
+
parameter_key: "ReleaseVersion",
|
1274
|
+
parameter_value: "#{@projects["fidaty"][:revision]}"
|
1275
|
+
},
|
1276
|
+
{
|
1277
|
+
parameter_key: "ECSClusterName",
|
1278
|
+
parameter_value: @ecs_cluster_name
|
1279
|
+
},
|
1280
|
+
{
|
1281
|
+
parameter_key: "TaskDesiredCount",
|
1282
|
+
parameter_value: "1"
|
1283
|
+
},
|
1284
|
+
{
|
1285
|
+
parameter_key: "HostnamePattern",
|
1286
|
+
parameter_value: "fidaty-#{@dns_record_identifier}.qa.colaster.com"
|
1287
|
+
},
|
1288
|
+
{
|
1289
|
+
parameter_key: "HostnamePatternPriority",
|
1290
|
+
parameter_value: hostname_pattern_priority
|
1291
|
+
},
|
1292
|
+
{
|
1293
|
+
parameter_key: "PeanoHost",
|
1294
|
+
parameter_value: get_route53_hostname("ecs-task-peano-qa-notneeded")
|
1295
|
+
}
|
1296
|
+
]
|
1297
|
+
if stack_exists?(stack_name_fidaty)
|
1298
|
+
cur_version = get_currently_deployed_version(stack_name_fidaty)
|
1299
|
+
update_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["fidaty"][:revision])
|
1300
|
+
else
|
1301
|
+
create_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role)
|
1302
|
+
end
|
1303
|
+
|
1304
|
+
stack_name_peano = "ecs-task-peano-qa-#{deploy_id}"
|
1305
|
+
git_checkout_version('peano', @projects["peano"][:revision])
|
1306
|
+
stack_body = File.read('projects/peano/deploy/task.yml')
|
1307
|
+
parameters = [
|
1308
|
+
{
|
1309
|
+
parameter_key: "Environment",
|
1310
|
+
parameter_value: "qa"
|
1311
|
+
},
|
1312
|
+
{
|
1313
|
+
parameter_key: "ReleaseVersion",
|
1314
|
+
parameter_value: "#{@projects['peano'][:revision]}"
|
1315
|
+
},
|
1316
|
+
{
|
1317
|
+
parameter_key: "ECSClusterName",
|
1318
|
+
parameter_value: @ecs_cluster_name
|
1319
|
+
},
|
1320
|
+
{
|
1321
|
+
parameter_key: "TaskDesiredCount",
|
1322
|
+
parameter_value: "1"
|
1323
|
+
},
|
1324
|
+
{
|
1325
|
+
parameter_key: "HostnamePattern",
|
1326
|
+
parameter_value: "peano-#{@dns_record_identifier}.qa.colaster.com"
|
1327
|
+
},
|
1328
|
+
{
|
1329
|
+
parameter_key: "HostnamePatternPriority",
|
1330
|
+
parameter_value: hostname_pattern_priority
|
1331
|
+
},
|
1332
|
+
{
|
1333
|
+
parameter_key: "WebHost",
|
1334
|
+
parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
|
1335
|
+
},
|
1336
|
+
{
|
1337
|
+
parameter_key: "AssangeHost",
|
1338
|
+
parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
|
1339
|
+
}
|
1340
|
+
]
|
1341
|
+
if stack_exists?(stack_name_peano)
|
1342
|
+
cur_version = get_currently_deployed_version(stack_name_peano)
|
1343
|
+
update_stack(stack_name_peano, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["peano"][:revision])
|
1344
|
+
else
|
1345
|
+
create_stack(stack_name_peano, stack_body, parameters, tags, @cf_role)
|
1346
|
+
end
|
1347
|
+
|
1348
|
+
stack_name_rogoreport = "ecs-task-rogoreport-qa-#{deploy_id}"
|
1349
|
+
git_checkout_version('rogoreport', @projects["rogoreport"][:revision])
|
1350
|
+
stack_body = IO.read('projects/rogoreport/deploy/task.yml')
|
1351
|
+
parameters = [
|
1352
|
+
{
|
1353
|
+
parameter_key: "Environment",
|
1354
|
+
parameter_value: "qa"
|
1355
|
+
},
|
1356
|
+
{
|
1357
|
+
parameter_key: "ReleaseVersion",
|
1358
|
+
parameter_value: "#{@projects["rogoreport"][:revision]}"
|
1359
|
+
},
|
1360
|
+
{
|
1361
|
+
parameter_key: "ReleaseName",
|
1362
|
+
parameter_value: "rogoreport"
|
1363
|
+
},
|
1364
|
+
{
|
1365
|
+
parameter_key: "ECSClusterName",
|
1366
|
+
parameter_value: @ecs_cluster_name
|
1367
|
+
}
|
1368
|
+
]
|
1369
|
+
if stack_exists?(stack_name_rogoreport)
|
1370
|
+
cur_version = get_currently_deployed_version(stack_name_rogoreport)
|
1371
|
+
update_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["rogoreport"][:revision])
|
1372
|
+
else
|
1373
|
+
create_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role)
|
1374
|
+
end
|
1375
|
+
|
1376
|
+
stack_name_assange = "ecs-task-assange-qa-#{deploy_id}"
|
1377
|
+
git_checkout_version('assange', @projects["assange"][:revision])
|
1378
|
+
stack_body = IO.read('projects/assange/deploy/task.yml')
|
1379
|
+
parameters = [
|
1380
|
+
{
|
1381
|
+
parameter_key: "Environment",
|
1382
|
+
parameter_value: "qa"
|
1383
|
+
},
|
1384
|
+
{
|
1385
|
+
parameter_key: "ReleaseVersion",
|
1386
|
+
parameter_value: "#{@projects["assange"][:revision]}"
|
1387
|
+
},
|
1388
|
+
{
|
1389
|
+
parameter_key: "ECSClusterName",
|
1390
|
+
parameter_value: @ecs_cluster_name
|
1391
|
+
},
|
1392
|
+
{
|
1393
|
+
parameter_key: "TaskDesiredCount",
|
1394
|
+
parameter_value: "1"
|
1395
|
+
},
|
1396
|
+
{
|
1397
|
+
parameter_key: "ALBShortName",
|
1398
|
+
parameter_value: "assange-qa-#{deploy_id}"[0..27]
|
1399
|
+
},
|
1400
|
+
{
|
1401
|
+
parameter_key: "HostnamePattern",
|
1402
|
+
parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
|
1403
|
+
},
|
1404
|
+
{
|
1405
|
+
parameter_key: "HostnamePatternPriority",
|
1406
|
+
parameter_value: (hostname_pattern_priority.to_i + 20).to_s
|
1407
|
+
},
|
1408
|
+
{
|
1409
|
+
parameter_key: "EnvHash",
|
1410
|
+
parameter_value: deploy_id
|
1411
|
+
},
|
1412
|
+
{
|
1413
|
+
parameter_key: "WebHost",
|
1414
|
+
parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
|
1415
|
+
},
|
1416
|
+
{
|
1417
|
+
parameter_key: "AssangeHost",
|
1418
|
+
parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
|
1419
|
+
}
|
1420
|
+
]
|
1421
|
+
if stack_exists?(stack_name_assange)
|
1422
|
+
cur_version = get_currently_deployed_version(stack_name_assange)
|
1423
|
+
update_stack(stack_name_assange, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["assange"][:revision])
|
1424
|
+
else
|
1425
|
+
create_stack(stack_name_assange, stack_body, parameters, tags, @cf_role)
|
1426
|
+
end
|
1427
|
+
|
1428
|
+
stack_name_leftorium = "ecs-task-leftorium-qa-#{deploy_id}"
|
1429
|
+
git_checkout_version('leftorium', @projects["leftorium"][:revision])
|
1430
|
+
stack_body = File.read('projects/leftorium/deploy/task.yml')
|
1431
|
+
parameters = [
|
1432
|
+
{
|
1433
|
+
parameter_key: "Environment",
|
1434
|
+
parameter_value: "qa"
|
1435
|
+
},
|
1436
|
+
{
|
1437
|
+
parameter_key: "ReleaseVersion",
|
1438
|
+
parameter_value: "#{@projects["leftorium"][:revision]}"
|
1439
|
+
},
|
1440
|
+
{
|
1441
|
+
parameter_key: "ECSClusterName",
|
1442
|
+
parameter_value: @ecs_cluster_name
|
1443
|
+
},
|
1444
|
+
{
|
1445
|
+
parameter_key: "TaskDesiredCount",
|
1446
|
+
parameter_value: "1"
|
1447
|
+
},
|
1448
|
+
{
|
1449
|
+
parameter_key: "HostnamePattern",
|
1450
|
+
parameter_value: "leftorium-#{@dns_record_identifier}.qa.colaster.com"
|
1451
|
+
},
|
1452
|
+
{
|
1453
|
+
parameter_key: "HostnamePatternPriority",
|
1454
|
+
parameter_value: hostname_pattern_priority
|
1455
|
+
}
|
1456
|
+
]
|
1457
|
+
if stack_exists?(stack_name_leftorium)
|
1458
|
+
cur_version = get_currently_deployed_version(stack_name_leftorium)
|
1459
|
+
update_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["leftorium"][:revision])
|
1460
|
+
else
|
1461
|
+
create_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role)
|
1462
|
+
end
|
1463
|
+
|
1464
|
+
stack_name_rachele = "ecs-task-rachele-qa-#{deploy_id}"
|
1465
|
+
git_checkout_version('rachele', @projects["rachele"][:revision])
|
1466
|
+
stack_body = File.read('projects/rachele/deploy/task.yml')
|
1467
|
+
parameters = [
|
1468
|
+
{
|
1469
|
+
parameter_key: "Environment",
|
1470
|
+
parameter_value: "qa"
|
1471
|
+
},
|
1472
|
+
{
|
1473
|
+
parameter_key: "ReleaseVersion",
|
1474
|
+
parameter_value: "#{@projects["rachele"][:revision]}"
|
1475
|
+
},
|
1476
|
+
{
|
1477
|
+
parameter_key: "ECSClusterName",
|
1478
|
+
parameter_value: @ecs_cluster_name
|
1479
|
+
},
|
1480
|
+
{
|
1481
|
+
parameter_key: "TaskDesiredCount",
|
1482
|
+
parameter_value: "1"
|
1483
|
+
},
|
1484
|
+
{
|
1485
|
+
parameter_key: "WebHost",
|
1486
|
+
parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
|
1487
|
+
},
|
1488
|
+
{
|
1489
|
+
parameter_key: "HostnamePattern",
|
1490
|
+
parameter_value: "rachele-#{@dns_record_identifier}.qa.colaster.com"
|
1491
|
+
},
|
1492
|
+
{
|
1493
|
+
parameter_key: "HostnamePatternPriority",
|
1494
|
+
parameter_value: hostname_pattern_priority
|
1495
|
+
}
|
1496
|
+
]
|
1497
|
+
if stack_exists?(stack_name_rachele)
|
1498
|
+
cur_version = get_currently_deployed_version(stack_name_rachele)
|
1499
|
+
update_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["rachele"][:revision])
|
1500
|
+
else
|
1501
|
+
create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
|
1502
|
+
end
|
1503
|
+
|
1504
|
+
stack_name_borat = "ecs-task-borat-qa-#{deploy_id}"
|
1505
|
+
git_checkout_version('borat', @projects["borat"][:revision])
|
1506
|
+
stack_body = IO.read('projects/borat/deploy/task.yml')
|
1507
|
+
parameters = [
|
1508
|
+
{
|
1509
|
+
parameter_key: "Environment",
|
1510
|
+
parameter_value: "qa"
|
1511
|
+
},
|
1512
|
+
{
|
1513
|
+
parameter_key: "ReleaseVersion",
|
1514
|
+
parameter_value: "#{@projects["borat"][:revision]}"
|
1515
|
+
},
|
1516
|
+
{
|
1517
|
+
parameter_key: "ECSClusterName",
|
1518
|
+
parameter_value: @ecs_cluster_name
|
1519
|
+
},
|
1520
|
+
{
|
1521
|
+
parameter_key: "TaskDesiredCount",
|
1522
|
+
parameter_value: "1"
|
1523
|
+
},
|
1524
|
+
{
|
1525
|
+
parameter_key: "ALBShortName",
|
1526
|
+
parameter_value: "borat-qa-#{deploy_id}"[0..27]
|
1527
|
+
},
|
1528
|
+
{
|
1529
|
+
parameter_key: "HostnamePattern",
|
1530
|
+
parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
|
1531
|
+
},
|
1532
|
+
{
|
1533
|
+
parameter_key: "HostnamePatternPriority",
|
1534
|
+
parameter_value: (hostname_pattern_priority.to_i + 30).to_s
|
1535
|
+
},
|
1536
|
+
{
|
1537
|
+
parameter_key: "EnvHash",
|
1538
|
+
parameter_value: deploy_id
|
1539
|
+
},
|
1540
|
+
{
|
1541
|
+
parameter_key: "WsEndpoint",
|
1542
|
+
parameter_value: "wss://backoffice-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
|
1543
|
+
},
|
1544
|
+
{
|
1545
|
+
parameter_key: "GraphqlEndpoint",
|
1546
|
+
parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/graphql"
|
1547
|
+
},
|
1548
|
+
{
|
1549
|
+
parameter_key: "AuthEndpoint",
|
1550
|
+
parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/auth"
|
1551
|
+
},
|
1552
|
+
{
|
1553
|
+
parameter_key: "FrontendEndpoint",
|
1554
|
+
parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
|
1555
|
+
}
|
1556
|
+
]
|
1557
|
+
if stack_exists?(stack_name_borat)
|
1558
|
+
cur_version = get_currently_deployed_version(stack_name_borat)
|
1559
|
+
update_stack(stack_name_borat, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["borat"][:revision])
|
1560
|
+
else
|
1561
|
+
create_stack(stack_name_borat, stack_body, parameters, tags, @cf_role)
|
1562
|
+
end
|
1563
|
+
|
1564
|
+
if deploy_crash?
|
1565
|
+
git_checkout_version('crash', @projects['crash'][:revision])
|
1566
|
+
stack_name_crash = "ecs-task-crash-qa-#{deploy_id}"
|
1567
|
+
stack_body = IO.read('projects/crash/deploy/task.yml')
|
1568
|
+
parameters = [
|
1569
|
+
{
|
1570
|
+
parameter_key: 'Environment',
|
1571
|
+
parameter_value: 'qa'
|
1572
|
+
},
|
1573
|
+
{
|
1574
|
+
parameter_key: 'ReleaseVersion',
|
1575
|
+
parameter_value: "#{@projects['crash'][:revision]}"
|
1576
|
+
},
|
1577
|
+
{
|
1578
|
+
parameter_key: 'TaskDesiredCount',
|
1579
|
+
parameter_value: '1'
|
1580
|
+
},
|
1581
|
+
{
|
1582
|
+
parameter_key: 'ECSClusterName',
|
1583
|
+
parameter_value: @ecs_cluster_name
|
1584
|
+
},
|
1585
|
+
{
|
1586
|
+
parameter_key: 'ALBShortName',
|
1587
|
+
parameter_value: "crash-qa-#{deploy_id}"[0..27]
|
1588
|
+
},
|
1589
|
+
{
|
1590
|
+
parameter_key: 'HostnamePattern',
|
1591
|
+
parameter_value: "crash-#{@dns_record_identifier}.qa.colaster.com"
|
1592
|
+
},
|
1593
|
+
{
|
1594
|
+
parameter_key: 'HostnamePatternPriority',
|
1595
|
+
parameter_value: (hostname_pattern_priority.to_i + 10).to_s
|
1596
|
+
},
|
1597
|
+
{
|
1598
|
+
parameter_key: "EnvHash",
|
1599
|
+
parameter_value: deploy_id
|
1600
|
+
},
|
1601
|
+
{
|
1602
|
+
parameter_key: "WsEndpoint",
|
1603
|
+
parameter_value: "wss://crash-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
|
1604
|
+
},
|
1605
|
+
{
|
1606
|
+
parameter_key: "GraphqlEndpoint",
|
1607
|
+
parameter_value: "https://crash-#{@dns_record_identifier}.qa.colaster.com/graphql"
|
1608
|
+
},
|
1609
|
+
{
|
1610
|
+
parameter_key: "AuthDomain",
|
1611
|
+
parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
|
1612
|
+
},
|
1613
|
+
]
|
1614
|
+
if stack_exists?(stack_name_crash)
|
1615
|
+
cur_version = get_currently_deployed_version(stack_name_crash)
|
1616
|
+
update_stack(stack_name_crash, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["crash"][:revision])
|
1617
|
+
else
|
1618
|
+
create_stack(stack_name_crash, stack_body, parameters, tags, @cf_role)
|
1619
|
+
end
|
1620
|
+
end
|
1621
|
+
|
1622
|
+
if deploy_starsky_hutch?
|
1623
|
+
stack_name_starsky = "ecs-task-starsky-qa-#{deploy_id}"
|
1624
|
+
git_checkout_version('starsky', @projects["starsky"][:revision])
|
1625
|
+
stack_body = IO.read('projects/starsky/deploy/task.yml')
|
1626
|
+
parameters = [
|
1627
|
+
{
|
1628
|
+
parameter_key: "Environment",
|
1629
|
+
parameter_value: "qa"
|
1630
|
+
},
|
1631
|
+
{
|
1632
|
+
parameter_key: "ReleaseVersion",
|
1633
|
+
parameter_value: "#{@projects["starsky"][:revision]}"
|
1634
|
+
},
|
1635
|
+
{
|
1636
|
+
parameter_key: "TaskDesiredCount",
|
1637
|
+
parameter_value: "1"
|
1638
|
+
},
|
1639
|
+
{
|
1640
|
+
parameter_key: "ECSClusterName",
|
1641
|
+
parameter_value: @ecs_cluster_name
|
1642
|
+
},
|
1643
|
+
{
|
1644
|
+
parameter_key: "ALBShortName",
|
1645
|
+
parameter_value: "starsky-qa-#{deploy_id}"[0..27]
|
1646
|
+
},
|
1647
|
+
{
|
1648
|
+
parameter_key: "EnvHash",
|
1649
|
+
parameter_value: deploy_id
|
1650
|
+
},
|
1651
|
+
{
|
1652
|
+
parameter_key: "HostnamePattern",
|
1653
|
+
parameter_value: "starsky-#{@dns_record_identifier}.qa.colaster.com"
|
1654
|
+
},
|
1655
|
+
{
|
1656
|
+
parameter_key: "HostnamePatternPriority",
|
1657
|
+
parameter_value: (hostname_pattern_priority.to_i + 74).to_s
|
1658
|
+
}
|
1659
|
+
]
|
1660
|
+
if stack_exists?(stack_name_starsky)
|
1661
|
+
cur_version = get_currently_deployed_version(stack_name_starsky)
|
1662
|
+
update_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["starsky"][:revision])
|
1663
|
+
else
|
1664
|
+
create_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role)
|
1665
|
+
end
|
1666
|
+
end
|
1667
|
+
|
1668
|
+
stack_name_activia = "ecs-task-activia-qa-#{deploy_id}"
|
1669
|
+
git_checkout_version('activia', @projects["activia"][:revision])
|
1670
|
+
stack_body = File.read('projects/activia/deploy/task.yml')
|
1671
|
+
parameters = [
|
1672
|
+
{
|
1673
|
+
parameter_key: "Environment",
|
1674
|
+
parameter_value: "qa"
|
1675
|
+
},
|
1676
|
+
{
|
1677
|
+
parameter_key: "ReleaseVersion",
|
1678
|
+
parameter_value: "#{@projects["activia"][:revision]}"
|
1679
|
+
},
|
1680
|
+
{
|
1681
|
+
parameter_key: "ECSClusterName",
|
1682
|
+
parameter_value: @ecs_cluster_name
|
1683
|
+
},
|
1684
|
+
{
|
1685
|
+
parameter_key: "TaskDesiredCount",
|
1686
|
+
parameter_value: "1"
|
1687
|
+
},
|
1688
|
+
{
|
1689
|
+
parameter_key: "HostnamePattern",
|
1690
|
+
parameter_value: "activia-#{@dns_record_identifier}.qa.colaster.com"
|
1691
|
+
},
|
1692
|
+
{
|
1693
|
+
parameter_key: "HostnamePatternPriority",
|
1694
|
+
parameter_value: hostname_pattern_priority
|
1695
|
+
},
|
1696
|
+
{
|
1697
|
+
parameter_key: "WebHost",
|
1698
|
+
parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
|
1699
|
+
},
|
1700
|
+
{
|
1701
|
+
parameter_key: "PeanoHost",
|
1702
|
+
parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
|
1703
|
+
}
|
1704
|
+
]
|
1705
|
+
if stack_exists?(stack_name_activia)
|
1706
|
+
cur_version = get_currently_deployed_version(stack_name_activia)
|
1707
|
+
update_stack(stack_name_activia, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["activia"][:revision])
|
1708
|
+
else
|
1709
|
+
create_stack(stack_name_activia, stack_body, parameters, tags, @cf_role)
|
1710
|
+
end
|
1711
|
+
|
1712
|
+
# Waiting for prima healtcheck dependencies
|
1713
|
+
wait_for_stack_ready(stack_name_skynet) unless stack_ready?(stack_name_skynet)
|
1714
|
+
wait_for_stack_ready(stack_name_urania) unless stack_ready?(stack_name_urania)
|
1715
|
+
wait_for_stack_ready(stack_name_bburago) unless stack_ready?(stack_name_bburago)
|
1716
|
+
wait_for_stack_ready(stack_name_hal9000) unless stack_ready?(stack_name_hal9000)
|
1717
|
+
wait_for_stack_ready(stack_name_assange) unless stack_ready?(stack_name_assange)
|
1718
|
+
wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
|
1719
|
+
wait_for_stack_ready(stack_name_leftorium) unless stack_ready?(stack_name_leftorium)
|
1720
|
+
wait_for_stack_ready(stack_name_rachele) unless stack_ready?(stack_name_rachele)
|
1721
|
+
|
1722
|
+
stack_name_web = "ecs-task-web-qa-#{deploy_id}"
|
1723
|
+
git_checkout_version('prima', @projects["prima"][:revision])
|
1724
|
+
stack_body = IO.read('projects/prima/app/cloudformation/tasks/web.yml')
|
1725
|
+
parameters = [
|
1726
|
+
{
|
1727
|
+
parameter_key: "Environment",
|
1728
|
+
parameter_value: "qa"
|
1729
|
+
},
|
1730
|
+
{
|
1731
|
+
parameter_key: "ReleaseVersion",
|
1732
|
+
parameter_value: "#{@projects["prima"][:revision]}"
|
1733
|
+
},
|
1734
|
+
{
|
1735
|
+
parameter_key: "TaskDesiredCount",
|
1736
|
+
parameter_value: "1"
|
1737
|
+
},
|
1738
|
+
{
|
1739
|
+
parameter_key: "ECSClusterName",
|
1740
|
+
parameter_value: @ecs_cluster_name
|
1741
|
+
},
|
1742
|
+
{
|
1743
|
+
parameter_key: "ALBShortName",
|
1744
|
+
parameter_value: "web-qa-#{deploy_id}"[0..27]
|
1745
|
+
},
|
1746
|
+
{
|
1747
|
+
parameter_key: "WebQaBaseHostname",
|
1748
|
+
parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
|
1749
|
+
},
|
1750
|
+
{
|
1751
|
+
parameter_key: "HostnamePattern",
|
1752
|
+
parameter_value: "ww*-#{@dns_record_identifier}.qa.colaster.com"
|
1753
|
+
},
|
1754
|
+
{
|
1755
|
+
parameter_key: "HostnamePatternPriority",
|
1756
|
+
parameter_value: hostname_pattern_priority
|
1757
|
+
},
|
1758
|
+
{
|
1759
|
+
parameter_key: "HostnamePatternAggregatorPriority",
|
1760
|
+
parameter_value: (hostname_pattern_priority.to_i + 1).to_s
|
1761
|
+
},
|
1762
|
+
{
|
1763
|
+
parameter_key: "EnvHash",
|
1764
|
+
parameter_value: deploy_id
|
1765
|
+
},
|
1766
|
+
{
|
1767
|
+
parameter_key: "AssangeHostname",
|
1768
|
+
parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
|
1769
|
+
},
|
1770
|
+
{
|
1771
|
+
parameter_key: "BackofficeHostname",
|
1772
|
+
parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
|
1773
|
+
},
|
1774
|
+
{
|
1775
|
+
parameter_key: "WebHostname",
|
1776
|
+
parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
|
1777
|
+
},
|
1778
|
+
{
|
1779
|
+
parameter_key: "FePrimaDomain",
|
1780
|
+
parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
|
1781
|
+
},
|
1782
|
+
{
|
1783
|
+
parameter_key: "HostnamePattern",
|
1784
|
+
parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
|
1785
|
+
}
|
1786
|
+
]
|
1787
|
+
if stack_exists?(stack_name_web)
|
1788
|
+
cur_version = get_currently_deployed_version(stack_name_web)
|
1789
|
+
update_stack(stack_name_web, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"][:revision])
|
1790
|
+
else
|
1791
|
+
create_stack(stack_name_web, stack_body, parameters, tags, @cf_role)
|
1792
|
+
end
|
1793
|
+
|
1794
|
+
stack_name_consumer = "ecs-task-consumer-qa-#{deploy_id}"
|
1795
|
+
git_checkout_version('prima', @projects["prima"][:revision])
|
1796
|
+
stack_body = IO.read('projects/prima/app/cloudformation/tasks/consumer.yml')
|
1797
|
+
parameters = [
|
1798
|
+
{
|
1799
|
+
parameter_key: "Environment",
|
1800
|
+
parameter_value: "qa"
|
1801
|
+
},
|
1802
|
+
{
|
1803
|
+
parameter_key: "ReleaseVersion",
|
1804
|
+
parameter_value: "#{@projects["prima"][:revision]}"
|
1805
|
+
},
|
1806
|
+
{
|
1807
|
+
parameter_key: "ECSClusterName",
|
1808
|
+
parameter_value: @ecs_cluster_name
|
1809
|
+
},
|
1810
|
+
{
|
1811
|
+
parameter_key: "NginxHttpHost",
|
1812
|
+
parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
|
1813
|
+
},
|
1814
|
+
{
|
1815
|
+
parameter_key: "AssangeHostname",
|
1816
|
+
parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
|
1817
|
+
},
|
1818
|
+
{
|
1819
|
+
parameter_key: "BackofficeHostname",
|
1820
|
+
parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
|
1821
|
+
},
|
1822
|
+
{
|
1823
|
+
parameter_key: "WebHostname",
|
1824
|
+
parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
|
1825
|
+
},
|
1826
|
+
{
|
1827
|
+
parameter_key: "FePrimaDomain",
|
1828
|
+
parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
|
1829
|
+
},
|
1830
|
+
{
|
1831
|
+
parameter_key: "HostnamePattern",
|
1832
|
+
parameter_value: "consumer-#{@dns_record_identifier}.qa.colaster.com"
|
1833
|
+
}
|
1834
|
+
]
|
1835
|
+
if stack_exists?(stack_name_consumer)
|
1836
|
+
cur_version = get_currently_deployed_version(stack_name_consumer)
|
1837
|
+
update_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"][:revision])
|
1838
|
+
else
|
1839
|
+
create_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role)
|
1840
|
+
end
|
1841
|
+
|
1842
|
+
stack_name_roger = "ecs-task-roger-qa-#{deploy_id}"
|
1843
|
+
git_checkout_version('roger', @projects["roger"][:revision])
|
1844
|
+
stack_body = File.read('projects/roger/deploy/task.yml')
|
1845
|
+
parameters = [
|
1846
|
+
{
|
1847
|
+
parameter_key: "Environment",
|
1848
|
+
parameter_value: "qa"
|
1849
|
+
},
|
1850
|
+
{
|
1851
|
+
parameter_key: "ReleaseVersion",
|
1852
|
+
parameter_value: @projects["roger"][:revision]
|
1853
|
+
},
|
1854
|
+
{
|
1855
|
+
parameter_key: "TaskDesiredCount",
|
1856
|
+
parameter_value: "1"
|
1857
|
+
},
|
1858
|
+
{
|
1859
|
+
parameter_key: "ECSClusterName",
|
1860
|
+
parameter_value: @ecs_cluster_name
|
1861
|
+
},
|
1862
|
+
{
|
1863
|
+
parameter_key: "HostnamePattern",
|
1864
|
+
parameter_value: "roger-#{@dns_record_identifier}.qa.colaster.com"
|
1865
|
+
},
|
1866
|
+
{
|
1867
|
+
parameter_key: "HostnamePatternPriority",
|
1868
|
+
parameter_value: hostname_pattern_priority
|
1869
|
+
}
|
1870
|
+
]
|
1871
|
+
if stack_exists?(stack_name_roger)
|
1872
|
+
cur_version = get_currently_deployed_version(stack_name_roger)
|
1873
|
+
update_stack(stack_name_roger, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["roger"][:revision])
|
1874
|
+
else
|
1875
|
+
create_stack(stack_name_roger, stack_body, parameters, tags, @cf_role)
|
1876
|
+
end
|
1877
|
+
|
1878
|
+
|
1879
|
+
if deploy_starsky_hutch?
|
1880
|
+
wait_for_stack_ready(stack_name_starsky) unless stack_ready?(stack_name_starsky) unless !deploy_starsky_hutch?
|
1881
|
+
|
1882
|
+
stack_name_hutch = "ecs-task-hutch-qa-#{deploy_id}"
|
1883
|
+
git_checkout_version('hutch', @projects["hutch"][:revision])
|
1884
|
+
stack_body = File.read('projects/hutch/deploy/task.yml')
|
1885
|
+
parameters = [
|
1886
|
+
{
|
1887
|
+
parameter_key: "Environment",
|
1888
|
+
parameter_value: "qa"
|
1889
|
+
},
|
1890
|
+
{
|
1891
|
+
parameter_key: "ReleaseVersion",
|
1892
|
+
parameter_value: "#{@projects["hutch"][:revision]}"
|
1893
|
+
},
|
1894
|
+
{
|
1895
|
+
parameter_key: "ALBShortName",
|
1896
|
+
parameter_value: "hutch-qa-#{deploy_id}"[0..27]
|
1897
|
+
},
|
1898
|
+
{
|
1899
|
+
parameter_key: "ECSClusterName",
|
1900
|
+
parameter_value: @ecs_cluster_name
|
1901
|
+
},
|
1902
|
+
{
|
1903
|
+
parameter_key: "EnvHash",
|
1904
|
+
parameter_value: deploy_id
|
1905
|
+
},
|
1906
|
+
{
|
1907
|
+
parameter_key: "HostnamePattern",
|
1908
|
+
parameter_value: "hutch-#{@dns_record_identifier}.qa.colaster.com"
|
1909
|
+
},
|
1910
|
+
{
|
1911
|
+
parameter_key: "HostnamePatternPriority",
|
1912
|
+
parameter_value: (hostname_pattern_priority.to_i + 254).to_s
|
1913
|
+
},
|
1914
|
+
{
|
1915
|
+
parameter_key: "StarskyUrl",
|
1916
|
+
parameter_value: "https://#{get_route53_hostname('ecs-task-starsky-qa-notneeded')}"
|
1917
|
+
}
|
1918
|
+
]
|
1919
|
+
if stack_exists?(stack_name_hutch)
|
1920
|
+
cur_version = get_currently_deployed_version(stack_name_hutch)
|
1921
|
+
update_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hutch"][:revision])
|
1922
|
+
else
|
1923
|
+
create_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role)
|
1924
|
+
end
|
1925
|
+
end
|
1926
|
+
|
1927
|
+
wait_for_stack_ready(stack_name_web) unless stack_ready?(stack_name_web)
|
1928
|
+
wait_for_stack_ready(stack_name_consumer) unless stack_ready?(stack_name_consumer)
|
1929
|
+
wait_for_stack_ready(stack_name_ermes) unless stack_ready?(stack_name_ermes)
|
1930
|
+
wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
|
1931
|
+
wait_for_stack_ready(stack_name_peano) unless stack_ready?(stack_name_peano)
|
1932
|
+
wait_for_stack_ready(stack_name_rogoreport) unless stack_ready?(stack_name_rogoreport)
|
1933
|
+
wait_for_stack_ready(stack_name_borat) unless stack_ready?(stack_name_borat)
|
1934
|
+
wait_for_stack_ready(stack_name_activia) unless stack_ready?(stack_name_activia)
|
1935
|
+
wait_for_stack_ready(stack_name_crash) unless stack_ready?(stack_name_crash) unless !deploy_crash?
|
1936
|
+
wait_for_stack_ready(stack_name_hutch) unless stack_ready?(stack_name_hutch) unless !deploy_starsky_hutch?
|
1937
|
+
|
1938
|
+
update_service_defaults(stack_name_web)
|
1939
|
+
update_service_defaults(stack_name_consumer)
|
1940
|
+
update_service_defaults(stack_name_urania)
|
1941
|
+
update_service_defaults(stack_name_ermes)
|
1942
|
+
update_service_defaults(stack_name_bburago)
|
1943
|
+
update_service_defaults(stack_name_hal9000)
|
1944
|
+
update_service_defaults(stack_name_fidaty)
|
1945
|
+
update_service_defaults(stack_name_peano)
|
1946
|
+
update_service_defaults(stack_name_rogoreport)
|
1947
|
+
update_service_defaults(stack_name_assange)
|
1948
|
+
update_service_defaults(stack_name_borat)
|
1949
|
+
update_service_defaults(stack_name_activia)
|
1950
|
+
update_service_defaults(stack_name_skynet)
|
1951
|
+
update_service_defaults(stack_name_leftorium)
|
1952
|
+
update_service_defaults(stack_name_rachele)
|
1953
|
+
update_service_defaults(stack_name_starsky) unless !deploy_starsky_hutch?
|
1954
|
+
update_service_defaults(stack_name_hutch) unless !deploy_starsky_hutch?
|
1955
|
+
update_service_defaults(stack_name_crash) unless !deploy_crash?
|
1956
|
+
|
1957
|
+
activia_hostname = get_route53_hostname(stack_name_activia)
|
1958
|
+
assange_hostname = get_route53_hostname(stack_name_assange)
|
1959
|
+
bburago_hostname = get_route53_hostname(stack_name_bburago)
|
1960
|
+
borat_hostname = get_route53_hostname(stack_name_borat)
|
1961
|
+
ermes_hostname = get_route53_hostname(stack_name_ermes)
|
1962
|
+
fidaty_hostname = get_route53_hostname(stack_name_fidaty)
|
1963
|
+
hal9000_hostname = get_route53_hostname(stack_name_hal9000)
|
1964
|
+
prima_hostname = get_route53_hostname(stack_name_web)
|
1965
|
+
peano_hostname = get_route53_hostname(stack_name_peano)
|
1966
|
+
skynet_hostname = get_route53_hostname(stack_name_skynet)
|
1967
|
+
urania_hostname = get_route53_hostname(stack_name_urania)
|
1968
|
+
roger_hostname = get_route53_hostname(stack_name_roger)
|
1969
|
+
leftorium_hostname = get_route53_hostname(stack_name_leftorium)
|
1970
|
+
rachele_hostname = get_route53_hostname(stack_name_rachele)
|
1971
|
+
crash_hostname = get_route53_hostname(stack_name_crash) unless !deploy_crash?
|
1972
|
+
starsky_hostname = get_route53_hostname(stack_name_starsky) unless !deploy_starsky_hutch?
|
1973
|
+
hutch_hostname = get_route53_hostname(stack_name_hutch) unless !deploy_starsky_hutch?
|
1974
|
+
|
1975
|
+
# launch_marley ec2_ip_address(asg_stack_name), prima_hostname, borat_hostname
|
1976
|
+
|
1977
|
+
projects_text = "
|
1978
|
+
> Prima url: https://#{prima_hostname}
|
1979
|
+
> Backoffice (Borat) url: https://#{borat_hostname}
|
1980
|
+
> Urania url: http://#{urania_hostname}:81
|
1981
|
+
> Bburago url: http://#{bburago_hostname}:83
|
1982
|
+
> Ermes url: http://#{ermes_hostname}:10002
|
1983
|
+
> Hal9000 url: http://#{hal9000_hostname}:10031
|
1984
|
+
> Fidaty url: http://#{fidaty_hostname}:10021
|
1985
|
+
> Peano url: http://#{peano_hostname}:10039
|
1986
|
+
> Assange url: https://#{assange_hostname}
|
1987
|
+
> Activia url: http://#{activia_hostname}:10041
|
1988
|
+
> Skynet url: http://#{skynet_hostname}:8050
|
1989
|
+
> Roger url: http://#{roger_hostname}:10051
|
1990
|
+
> Leftorium url: http://#{leftorium_hostname}:10061
|
1991
|
+
> Rachele url: http://#{rachele_hostname}:10040"
|
1992
|
+
projects_text.concat "
|
1993
|
+
> Crash url: https://#{crash_hostname}" if deploy_crash?
|
1994
|
+
projects_text.concat "
|
1995
|
+
> Starsky url: https://#{starsky_hostname}
|
1996
|
+
> Hutch url: https://#{hutch_hostname}" if deploy_starsky_hutch?
|
1997
|
+
projects_text.concat "
|
1998
|
+
> RabbitMQ url: http://#{ec2_ip_address(asg_stack_name)}:15672
|
1999
|
+
> Supervisor url: http://#{ec2_ip_address(asg_stack_name)}:9001
|
2000
|
+
> Elasticsearch url: http://#{ec2_ip_address(asg_stack_name)}:9200
|
2001
|
+
> SSH connection: ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no githubUsername@#{ec2_ip_address(asg_stack_name)}\n"
|
2002
|
+
output projects_text.cyan
|
2003
|
+
output "Deploy effettuato, everything is awesome!\n".green
|
2004
|
+
|
2005
|
+
qainit_write_output(projects_text, 'Indirizzi scritti su ')
|
2006
|
+
end
|
2007
|
+
|
2008
|
+
def get_route53_hostname(stack_name)
|
2009
|
+
case
|
2010
|
+
when stack_name.include?('web')
|
2011
|
+
host = "www-#{@dns_record_identifier}.qa.colaster.com"
|
2012
|
+
when stack_name.include?('urania')
|
2013
|
+
host = "urania-#{@dns_record_identifier}.qa.colaster.com"
|
2014
|
+
when stack_name.include?('bburago')
|
2015
|
+
host = "bburago-#{@dns_record_identifier}.qa.colaster.com"
|
2016
|
+
when stack_name.include?('hal9000')
|
2017
|
+
host = "hal9000-#{@dns_record_identifier}.qa.colaster.com"
|
2018
|
+
when stack_name.include?('fidaty')
|
2019
|
+
host = "fidaty-#{@dns_record_identifier}.qa.colaster.com"
|
2020
|
+
when stack_name.include?('peano')
|
2021
|
+
host = "peano-#{@dns_record_identifier}.qa.colaster.com"
|
2022
|
+
when stack_name.include?('assange')
|
2023
|
+
host = "assange-#{@dns_record_identifier}.qa.colaster.com"
|
2024
|
+
when stack_name.include?('borat')
|
2025
|
+
host = "backoffice-#{@dns_record_identifier}.qa.colaster.com"
|
2026
|
+
when stack_name.include?('crash')
|
2027
|
+
host = "crash-#{@dns_record_identifier}.qa.colaster.com"
|
2028
|
+
when stack_name.include?('ermes')
|
2029
|
+
host = "ermes-#{@dns_record_identifier}.qa.colaster.com"
|
2030
|
+
when stack_name.include?('activia')
|
2031
|
+
host = "activia-#{@dns_record_identifier}.qa.colaster.com"
|
2032
|
+
when stack_name.include?('skynet')
|
2033
|
+
host = "skynet-#{@dns_record_identifier}.qa.colaster.com"
|
2034
|
+
when stack_name.include?('roger')
|
2035
|
+
host = "roger-#{@dns_record_identifier}.qa.colaster.com"
|
2036
|
+
when stack_name.include?('leftorium')
|
2037
|
+
host = "leftorium-#{@dns_record_identifier}.qa.colaster.com"
|
2038
|
+
when stack_name.include?('rachele')
|
2039
|
+
host = "rachele-#{@dns_record_identifier}.qa.colaster.com"
|
2040
|
+
when stack_name.include?('starsky')
|
2041
|
+
host = "starsky-#{@dns_record_identifier}.qa.colaster.com"
|
2042
|
+
when stack_name.include?('hutch')
|
2043
|
+
host = "hutch-#{@dns_record_identifier}.qa.colaster.com"
|
2044
|
+
end
|
2045
|
+
host
|
2046
|
+
end
|
2047
|
+
|
2048
|
+
def ec2_ip_address(asg_stack_name)
|
2049
|
+
resp = @cf.describe_stack_resource({
|
2050
|
+
stack_name: asg_stack_name,
|
2051
|
+
logical_resource_id: 'ECSAutoScalingGroup'
|
2052
|
+
})
|
2053
|
+
resp = @asg.describe_auto_scaling_groups({
|
2054
|
+
auto_scaling_group_names: [resp.stack_resource_detail.physical_resource_id],
|
2055
|
+
max_records: 1
|
2056
|
+
})
|
2057
|
+
instance_id = resp.auto_scaling_groups[0].instances[0].instance_id
|
2058
|
+
resp = @ec2.describe_instances({instance_ids: [instance_id]})
|
2059
|
+
resp.reservations[0].instances[0].private_ip_address
|
2060
|
+
end
|
2061
|
+
|
612
2062
|
def get_alb_host(stack_name)
|
613
2063
|
case
|
614
2064
|
when stack_name.include?('web')
|
@@ -649,35 +2099,868 @@ class Release
|
|
649
2099
|
logical_resource_id = 'EcsApplicationLoadBalancerPublic'
|
650
2100
|
when stack_name.include?('hutch')
|
651
2101
|
logical_resource_id = 'EcsApplicationLoadBalancerPublic'
|
652
|
-
when stack_name.include?('maia')
|
653
|
-
logical_resource_id = 'EcsApplicationLoadBalancerPublic'
|
654
|
-
when stack_name.include?('legion')
|
655
|
-
logical_resource_id = 'EcsApplicationLoadBalancerInternal'
|
656
2102
|
end
|
657
|
-
resp = describe_stack_resource(
|
658
|
-
|
2103
|
+
resp = @cf.describe_stack_resource({
|
2104
|
+
stack_name: stack_name,
|
2105
|
+
logical_resource_id: logical_resource_id
|
2106
|
+
})
|
2107
|
+
resp = @alb.describe_load_balancers({
|
2108
|
+
load_balancer_arns: [resp.stack_resource_detail.physical_resource_id]
|
2109
|
+
})
|
659
2110
|
resp.load_balancers[0].dns_name
|
660
2111
|
end
|
661
2112
|
|
2113
|
+
def update_service_defaults(stack_name)
|
2114
|
+
case
|
2115
|
+
when stack_name.include?('web')
|
2116
|
+
logical_resource_id = 'ECSServiceWebQA'
|
2117
|
+
when stack_name.include?('consumer')
|
2118
|
+
logical_resource_id = 'ECSServiceConsumerQa'
|
2119
|
+
when stack_name.include?('urania')
|
2120
|
+
logical_resource_id = 'ECSServiceUraniaQA'
|
2121
|
+
when stack_name.include?('backoffice')
|
2122
|
+
logical_resource_id = 'ECSServiceBackoffice'
|
2123
|
+
when stack_name.include?('ermes')
|
2124
|
+
logical_resource_id = 'ECSServiceErmesQA'
|
2125
|
+
when stack_name.include?('bburago')
|
2126
|
+
logical_resource_id = 'ECSServiceBburagoQA'
|
2127
|
+
when stack_name.include?('hal9000')
|
2128
|
+
logical_resource_id = 'ECSServiceHal9000QA'
|
2129
|
+
when stack_name.include?('fidaty')
|
2130
|
+
logical_resource_id = 'ECSServiceFidatyQA'
|
2131
|
+
when stack_name.include?('skynet')
|
2132
|
+
logical_resource_id = 'ECSServiceSkynetQA'
|
2133
|
+
when stack_name.include?('roger')
|
2134
|
+
logical_resource_id = 'ECSServiceRogerQA'
|
2135
|
+
when stack_name.include?('activia')
|
2136
|
+
logical_resource_id = 'ECSServiceActiviaQA'
|
2137
|
+
when stack_name.include?('peano')
|
2138
|
+
logical_resource_id = 'ECSServicePeanoQA'
|
2139
|
+
when stack_name.include?('rogoreport')
|
2140
|
+
logical_resource_id = 'ECSServiceRogoreport'
|
2141
|
+
when stack_name.include?('assange')
|
2142
|
+
logical_resource_id = 'ECSServiceAssangeQA'
|
2143
|
+
when stack_name.include?('borat')
|
2144
|
+
logical_resource_id = 'ECSServiceBorat'
|
2145
|
+
when stack_name.include?('leftorium')
|
2146
|
+
logical_resource_id = 'ECSServiceLeftoriumQA'
|
2147
|
+
when stack_name.include?('rachele')
|
2148
|
+
logical_resource_id = 'ECSServiceRacheleQA'
|
2149
|
+
when stack_name.include?('crash')
|
2150
|
+
logical_resource_id = 'ECSServiceCrashQA'
|
2151
|
+
when stack_name.include?('starsky')
|
2152
|
+
logical_resource_id = 'ECSServiceStarskyQA'
|
2153
|
+
when stack_name.include?('hutch')
|
2154
|
+
logical_resource_id = 'ECSServiceHutch'
|
2155
|
+
else
|
2156
|
+
raise "Service name non gestito per lo stack #{stack_name}"
|
2157
|
+
end
|
2158
|
+
resp = @cf.describe_stack_resource(
|
2159
|
+
stack_name: stack_name,
|
2160
|
+
logical_resource_id: logical_resource_id
|
2161
|
+
)
|
2162
|
+
@ecs.update_service(
|
2163
|
+
cluster: @ecs_cluster_name,
|
2164
|
+
service: resp.stack_resource_detail.physical_resource_id,
|
2165
|
+
deployment_configuration: {
|
2166
|
+
minimum_healthy_percent: 0,
|
2167
|
+
maximum_percent: 100
|
2168
|
+
}
|
2169
|
+
)
|
2170
|
+
end
|
2171
|
+
|
2172
|
+
def create_activia_artifact(revision)
|
2173
|
+
output "Preparo l'artifact activia .zip\n".yellow
|
2174
|
+
|
2175
|
+
git_checkout_version('activia', revision)
|
2176
|
+
|
2177
|
+
Dir.chdir 'projects/activia'
|
2178
|
+
|
2179
|
+
decrypt_secrets()
|
2180
|
+
|
2181
|
+
exec_step 'cp docker-compose.yml docker-compose-ci.yml'
|
2182
|
+
exec_step 'prepare-docker-compose --directory activia && cp docker-compose-qainit.yml docker-compose.yml'
|
2183
|
+
[
|
2184
|
+
"docker-compose build web",
|
2185
|
+
"docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
|
2186
|
+
'-c' 'mix local.hex --force && mix hex.info && \
|
2187
|
+
mix deps.get && mix compile && mix deps.compile && \
|
2188
|
+
cd assets && \
|
2189
|
+
rm -rf node_modules && \
|
2190
|
+
yarn --cache-folder ~/.cache/yarn && \
|
2191
|
+
sysconfcpus -n 2 ./node_modules/.bin/webpack --mode production && \
|
2192
|
+
cd .. && \
|
2193
|
+
mix phx.digest && \
|
2194
|
+
rm -rf _build/qa/rel/ && \
|
2195
|
+
mix release --env=qa'"
|
2196
|
+
].each do |cmd|
|
2197
|
+
execute_command cmd
|
2198
|
+
end
|
2199
|
+
|
2200
|
+
cleanup_containers
|
2201
|
+
|
2202
|
+
artifact_path = Dir.glob("_build/qa/rel/activia/releases/*/activia.tar.gz").first
|
2203
|
+
|
2204
|
+
upload_artifact(artifact_path, "microservices/activia/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2205
|
+
|
2206
|
+
Dir.chdir '../../'
|
2207
|
+
end
|
2208
|
+
|
2209
|
+
def create_assange_artifact(revision)
|
2210
|
+
output "Preparo l'artifact assange .zip\n".yellow
|
2211
|
+
|
2212
|
+
git_checkout_version('assange', revision)
|
2213
|
+
|
2214
|
+
Dir.chdir 'projects/assange'
|
2215
|
+
|
2216
|
+
decrypt_secrets()
|
2217
|
+
|
2218
|
+
exec_step 'cp docker-compose.yml docker-compose-ci.yml'
|
2219
|
+
exec_step 'prepare-docker-compose --directory assange && cp docker-compose-qainit.yml docker-compose.yml'
|
2220
|
+
[
|
2221
|
+
"docker-compose build web",
|
2222
|
+
"docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
|
2223
|
+
'-c' 'mix local.hex --force && mix hex.info && \
|
2224
|
+
mix deps.get && mix compile && mix deps.compile && \
|
2225
|
+
mix phx.digest && \
|
2226
|
+
rm -rf _build/qa/rel/ && \
|
2227
|
+
mix release --env=qa'"
|
2228
|
+
].each do |cmd|
|
2229
|
+
execute_command cmd
|
2230
|
+
end
|
2231
|
+
|
2232
|
+
cleanup_containers
|
2233
|
+
|
2234
|
+
artifact_path = Dir.glob("_build/qa/rel/assange/releases/*/assange.tar.gz").first
|
2235
|
+
upload_artifact(artifact_path, "microservices/assange/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2236
|
+
|
2237
|
+
Dir.chdir '../../'
|
2238
|
+
end
|
2239
|
+
|
2240
|
+
def create_bburago_artifact(revision)
|
2241
|
+
output "Preparo l'artifact bburago .zip\n".yellow
|
2242
|
+
|
2243
|
+
git_checkout_version('bburago', revision)
|
2244
|
+
|
2245
|
+
Dir.chdir 'projects/bburago'
|
2246
|
+
|
2247
|
+
decrypt_secrets()
|
2248
|
+
|
2249
|
+
exec_step 'cp docker-compose.yml docker-compose-ci.yml'
|
2250
|
+
exec_step 'prepare-docker-compose --directory bburago && cp docker-compose-qainit.yml docker-compose.yml'
|
2251
|
+
[
|
2252
|
+
"docker-compose build web",
|
2253
|
+
"docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint deploy/create_qa_artifact web"
|
2254
|
+
].each do |cmd|
|
2255
|
+
execute_command cmd
|
2256
|
+
end
|
2257
|
+
|
2258
|
+
cleanup_containers
|
2259
|
+
|
2260
|
+
artifact_path = Dir.glob('_build/qa/rel/bburago/releases/*/bburago.tar.gz').first
|
2261
|
+
upload_artifact(artifact_path, "microservices/bburago/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2262
|
+
|
2263
|
+
Dir.chdir '../../'
|
2264
|
+
end
|
2265
|
+
|
2266
|
+
def create_borat_artifact(revision)
|
2267
|
+
output "Preparo l'artifact borat .zip\n".yellow
|
2268
|
+
|
2269
|
+
git_checkout_version('borat', revision)
|
2270
|
+
|
2271
|
+
Dir.chdir 'projects/borat'
|
2272
|
+
|
2273
|
+
decrypt_secrets()
|
2274
|
+
|
2275
|
+
exec_step 'cp docker-compose.yml docker-compose-ci.yml'
|
2276
|
+
exec_step 'prepare-docker-compose --directory borat && cp docker-compose-qainit.yml docker-compose.yml'
|
2277
|
+
[
|
2278
|
+
"docker network create borat_network || true",
|
2279
|
+
"docker-compose build web",
|
2280
|
+
"docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
|
2281
|
+
'-c' 'mix local.hex --force && mix hex.info && \
|
2282
|
+
mix deps.get && \
|
2283
|
+
cd assets && \
|
2284
|
+
yarn --cache-folder ~/.cache/yarn && \
|
2285
|
+
sysconfcpus -n 2 ./node_modules/.bin/webpack -p --config config/webpack.config.prod.js && \
|
2286
|
+
cd ../ && \
|
2287
|
+
mix phx.digest && \
|
2288
|
+
mix compile && mix deps.compile && \
|
2289
|
+
rm -rf _build/qa/rel/ && \
|
2290
|
+
mix distillery.release --env=qa'"
|
2291
|
+
].each do |cmd|
|
2292
|
+
execute_command cmd
|
2293
|
+
end
|
2294
|
+
|
2295
|
+
cleanup_containers
|
2296
|
+
|
2297
|
+
artifact_path = Dir.glob("_build/qa/rel/borat/releases/*/borat.tar.gz").first
|
2298
|
+
upload_artifact(artifact_path, "microservices/borat/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2299
|
+
|
2300
|
+
Dir.chdir '../../'
|
2301
|
+
end
|
2302
|
+
|
2303
|
+
def create_crash_artifact(revision, deploy_id)
|
2304
|
+
output "Preparo l'artifact crash .zip\n".yellow
|
2305
|
+
|
2306
|
+
git_checkout_version('crash', revision)
|
2307
|
+
|
2308
|
+
Dir.chdir 'projects/crash'
|
2309
|
+
|
2310
|
+
crash_qa_host = get_route53_hostname('ecs-task-crash-qa-notneeded')
|
2311
|
+
|
2312
|
+
decrypt_secrets()
|
2313
|
+
|
2314
|
+
`mv docker-compose-ci.yml docker-compose.yml`
|
2315
|
+
exec_step 'prepare-docker-compose --directory crash && cp docker-compose-qainit.yml docker-compose.yml'
|
2316
|
+
|
2317
|
+
if File.exists? 'deploy/build_qa_artifact'
|
2318
|
+
`deploy/build_qa_artifact #{deploy_id}`
|
2319
|
+
else # TODO remove when deploy/build_qa_artifact is merged
|
2320
|
+
[
|
2321
|
+
'docker-compose build web',
|
2322
|
+
"docker-compose run -w $PWD -u root -e MIX_ENV=qa -e ENV_HASH=#{deploy_id} web \
|
2323
|
+
'-c' 'mix local.hex --force && mix hex.info && \
|
2324
|
+
mix deps.get && \
|
2325
|
+
cd assets && \
|
2326
|
+
yarn --cache-folder ~/.cache/yarn && \
|
2327
|
+
NODE_ENV=production sysconfcpus -n 1 yarn run build && \
|
2328
|
+
cd ../ && \
|
2329
|
+
mix release.clean --implode --no-confirm && \
|
2330
|
+
mix phx.digest && \
|
2331
|
+
mix deps.clean --all && \
|
2332
|
+
mix deps.get && \
|
2333
|
+
mix compile && mix release --env=qa'",
|
2334
|
+
'docker-compose down'
|
2335
|
+
].each do |cmd|
|
2336
|
+
execute_command cmd
|
2337
|
+
end
|
2338
|
+
end
|
2339
|
+
|
2340
|
+
cleanup_containers
|
2341
|
+
|
2342
|
+
artifact_path = Dir.glob('_build/qa/rel/crash/releases/*/crash.tar.gz').first
|
2343
|
+
upload_artifact(artifact_path, "microservices/crash/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2344
|
+
|
2345
|
+
Dir.chdir '../../'
|
2346
|
+
end
|
2347
|
+
|
2348
|
+
def create_ermes_artifact(revision)
|
2349
|
+
output "Preparo l'artifact ermes .zip\n".yellow
|
2350
|
+
|
2351
|
+
git_checkout_version('ermes', revision)
|
2352
|
+
|
2353
|
+
Dir.chdir 'projects/ermes'
|
2354
|
+
|
2355
|
+
decrypt_secrets()
|
2356
|
+
|
2357
|
+
exec_step 'cp docker-compose.yml docker-compose-ci.yml'
|
2358
|
+
exec_step 'prepare-docker-compose --directory ermes && cp docker-compose-qainit.yml docker-compose.yml'
|
2359
|
+
|
2360
|
+
[
|
2361
|
+
"if echo `docker network ls` | grep crash_default; \
|
2362
|
+
then echo 'crash_default network already existing'; \
|
2363
|
+
else docker network create crash_default; fi",
|
2364
|
+
'docker-compose build web'
|
2365
|
+
].each do |cmd|
|
2366
|
+
execute_command cmd
|
2367
|
+
end
|
2368
|
+
|
2369
|
+
[ "docker-compose run --rm -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
|
2370
|
+
'-c' 'mix local.hex --force && mix hex.info && \
|
2371
|
+
mix deps.get && mix compile && mix deps.compile && \
|
2372
|
+
mix phx.digest && \
|
2373
|
+
MIX_ENV=dev mix compile.sms && \
|
2374
|
+
MIX_ENV=dev mix compile.html && \
|
2375
|
+
MIX_ENV=dev mix compile.heml && \
|
2376
|
+
MIX_ENV=dev mix compile.app_notification && \
|
2377
|
+
rm -rf _build/qa/rel/ && \
|
2378
|
+
mix release --env=qa'"
|
2379
|
+
].each do |cmd|
|
2380
|
+
execute_command cmd
|
2381
|
+
end
|
2382
|
+
|
2383
|
+
cleanup_containers
|
2384
|
+
|
2385
|
+
artifact_path = Dir.glob("_build/qa/rel/ermes/releases/*/ermes.tar.gz").first
|
2386
|
+
upload_artifact(artifact_path, "microservices/ermes/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2387
|
+
|
2388
|
+
Dir.chdir '../../'
|
2389
|
+
end
|
2390
|
+
|
2391
|
+
def create_fidaty_artifact(revision)
|
2392
|
+
output "Preparo l'artifact fidaty .zip\n".yellow
|
2393
|
+
|
2394
|
+
git_checkout_version('fidaty', revision)
|
2395
|
+
|
2396
|
+
Dir.chdir 'projects/fidaty'
|
2397
|
+
|
2398
|
+
decrypt_secrets()
|
2399
|
+
|
2400
|
+
exec_step 'cp docker-compose.yml docker-compose-ci.yml'
|
2401
|
+
exec_step 'prepare-docker-compose --directory fidaty && cp docker-compose-qainit.yml docker-compose.yml'
|
2402
|
+
[
|
2403
|
+
"docker-compose build web",
|
2404
|
+
"docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
|
2405
|
+
'-c' 'mix local.hex --force && mix hex.info && \
|
2406
|
+
mix deps.get && mix compile && mix deps.compile && \
|
2407
|
+
mix phx.digest && \
|
2408
|
+
rm -rf _build/qa/rel/ && \
|
2409
|
+
mix release --env=qa'"
|
2410
|
+
].each do |cmd|
|
2411
|
+
execute_command cmd
|
2412
|
+
end
|
2413
|
+
|
2414
|
+
cleanup_containers
|
2415
|
+
|
2416
|
+
artifact_path = Dir.glob("_build/qa/rel/fidaty/releases/*/fidaty.tar.gz").first
|
2417
|
+
upload_artifact(artifact_path, "microservices/fidaty/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2418
|
+
|
2419
|
+
Dir.chdir '../../'
|
2420
|
+
end
|
2421
|
+
|
2422
|
+
def create_hal9000_artifact(revision)
|
2423
|
+
output "Preparo l'artifact hal9000 .zip\n".yellow
|
2424
|
+
|
2425
|
+
git_checkout_version('hal9000', revision)
|
2426
|
+
|
2427
|
+
Dir.chdir 'projects/hal9000'
|
2428
|
+
|
2429
|
+
decrypt_secrets()
|
2430
|
+
|
2431
|
+
exec_step 'cp docker-compose.yml docker-compose-ci.yml'
|
2432
|
+
exec_step 'prepare-docker-compose --directory hal9000 && cp docker-compose-qainit.yml docker-compose.yml'
|
2433
|
+
[
|
2434
|
+
"docker-compose build web",
|
2435
|
+
"docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
|
2436
|
+
'-c' 'mix local.hex --force && mix hex.info && \
|
2437
|
+
mix deps.get && mix compile && mix deps.compile && \
|
2438
|
+
mix phx.digest assets -o priv/static && \
|
2439
|
+
rm -rf _build/qa/rel/ && \
|
2440
|
+
mix release --env=qa'"
|
2441
|
+
].each do |cmd|
|
2442
|
+
execute_command cmd
|
2443
|
+
end
|
2444
|
+
|
2445
|
+
cleanup_containers
|
2446
|
+
|
2447
|
+
artifact_path = Dir.glob("_build/qa/rel/hal9000/releases/*/hal9000.tar.gz").first
|
2448
|
+
upload_artifact(artifact_path, "microservices/hal9000/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2449
|
+
|
2450
|
+
Dir.chdir '../../'
|
2451
|
+
end
|
2452
|
+
|
2453
|
+
def create_hutch_artifact(revision)
|
2454
|
+
output "Preparo l'artifact hutch\n".yellow
|
2455
|
+
|
2456
|
+
git_checkout_version('hutch', revision)
|
2457
|
+
|
2458
|
+
Dir.chdir 'projects/hutch'
|
2459
|
+
|
2460
|
+
version = `git rev-parse HEAD`
|
2461
|
+
|
2462
|
+
decrypt_secrets() unless File.exist?('config/secrets.yml')
|
2463
|
+
|
2464
|
+
exec_step 'git submodule update'
|
2465
|
+
exec_step 'cp docker-compose-ci.yml docker-compose.yml'
|
2466
|
+
exec_step 'prepare-docker-compose --directory hutch && cp docker-compose-qainit.yml docker-compose.yml'
|
2467
|
+
exec_step "sed s/qa_deploy_id/#{get_deploy_id}/ .env.dist.qa"
|
2468
|
+
exec_step "cp .env.dist.qa .env"
|
2469
|
+
exec_step "rm -fr peano"
|
2470
|
+
|
2471
|
+
[
|
2472
|
+
"sed -i 's/USER app/USER root/g' Dockerfile",
|
2473
|
+
"docker-compose build web",
|
2474
|
+
"docker-compose run -w $PWD -u root -e ELM_APP_STARSKY_URL=https://#{get_route53_hostname("starsky")} --entrypoint /bin/sh web \
|
2475
|
+
'-c' 'yarn && yarn run build \
|
2476
|
+
&& tar cfz #{revision}-qa.tar.gz *'"
|
2477
|
+
].each do |cmd|
|
2478
|
+
execute_command cmd
|
2479
|
+
end
|
2480
|
+
|
2481
|
+
artifact_path = "./#{revision}-qa.tar.gz"
|
2482
|
+
|
2483
|
+
upload_artifact(artifact_path, "microservices/hutch/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2484
|
+
|
2485
|
+
Dir.chdir '../../'
|
2486
|
+
end
|
2487
|
+
|
2488
|
+
def create_leftorium_artifact(revision)
|
2489
|
+
output "Preparo l'artifact leftorium .zip\n".yellow
|
2490
|
+
|
2491
|
+
git_checkout_version('leftorium', revision)
|
2492
|
+
|
2493
|
+
Dir.chdir 'projects/leftorium'
|
2494
|
+
|
2495
|
+
decrypt_secrets()
|
2496
|
+
|
2497
|
+
exec_step 'cp docker-compose.yml docker-compose-ci.yml'
|
2498
|
+
exec_step 'prepare-docker-compose --directory leftorium && cp docker-compose-qainit.yml docker-compose.yml'
|
2499
|
+
[
|
2500
|
+
"docker-compose build web",
|
2501
|
+
"docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
|
2502
|
+
'-c' 'mix local.hex --force && mix hex.info && \
|
2503
|
+
mix deps.get && mix compile && mix deps.compile && \
|
2504
|
+
rm -rf _build/qa/rel/ && \
|
2505
|
+
mix release --env=qa'"
|
2506
|
+
].each do |cmd|
|
2507
|
+
execute_command cmd
|
2508
|
+
end
|
2509
|
+
|
2510
|
+
cleanup_containers
|
2511
|
+
|
2512
|
+
artifact_path = Dir.glob('_build/qa/rel/leftorium/releases/*/leftorium.tar.gz').first
|
2513
|
+
upload_artifact(artifact_path, "microservices/leftorium/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2514
|
+
|
2515
|
+
Dir.chdir '../../'
|
2516
|
+
end
|
2517
|
+
|
2518
|
+
def create_peano_artifact(revision)
|
2519
|
+
output "Preparo l'artifact peano .zip\n".yellow
|
2520
|
+
|
2521
|
+
git_checkout_version('peano', revision)
|
2522
|
+
|
2523
|
+
Dir.chdir 'projects/peano'
|
2524
|
+
|
2525
|
+
decrypt_secrets() unless File.exist?('config/secrets.yml')
|
2526
|
+
|
2527
|
+
exec_step 'cp docker-compose.yml docker-compose-ci.yml'
|
2528
|
+
exec_step 'prepare-docker-compose --directory peano && cp docker-compose-qainit.yml docker-compose.yml'
|
2529
|
+
|
2530
|
+
if File.exists? 'deploy/build_qa_artifact'
|
2531
|
+
`deploy/build_qa_artifact`
|
2532
|
+
else # TODO remove when deploy/build_qa_artifact is merged
|
2533
|
+
[
|
2534
|
+
"docker-compose build web",
|
2535
|
+
"docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
|
2536
|
+
'-c' 'mix local.hex --force && mix hex.info && \
|
2537
|
+
mix deps.get && mix compile && mix deps.compile && \
|
2538
|
+
rm -rf _build/qa/rel/ && \
|
2539
|
+
mix release --env=qa'"
|
2540
|
+
].each do |cmd|
|
2541
|
+
execute_command cmd
|
2542
|
+
end
|
2543
|
+
end
|
2544
|
+
|
2545
|
+
cleanup_containers
|
2546
|
+
|
2547
|
+
artifact_path = Dir.glob("_build/qa/rel/peano/releases/*/peano.tar.gz").first
|
2548
|
+
upload_artifact(artifact_path, "microservices/peano/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2549
|
+
|
2550
|
+
Dir.chdir '../../'
|
2551
|
+
end
|
2552
|
+
|
2553
|
+
def create_prima_artifact(revision, branch_name, deploy_id, minimal = false)
|
2554
|
+
output "Preparo l'artifact prima .zip\n".yellow
|
2555
|
+
|
2556
|
+
git_checkout_version('prima', revision)
|
2557
|
+
|
2558
|
+
Dir.chdir 'projects/prima'
|
2559
|
+
|
2560
|
+
['vendor'].each do |dir|
|
2561
|
+
unless File.directory?(dir)
|
2562
|
+
if File.directory?("../../../prima/#{dir}")
|
2563
|
+
exec_step "rsync -a ../../../prima/#{dir} ."
|
2564
|
+
end
|
2565
|
+
end
|
2566
|
+
end
|
2567
|
+
|
2568
|
+
exec_step 'mv docker-compose-ci.yml docker-compose.yml'
|
2569
|
+
exec_step 'prepare-docker-compose --directory prima'
|
2570
|
+
exec_step 'sed -i "s/\\/home\\/app/\\/root/g" docker-compose.yml'
|
2571
|
+
`sed -i 's/"@prima-assicurazioni/pyxis-npm": ".*",/"@prima-assicurazioni/pyxis-npm": "#{@pyxis_version}",/' package.json` if deploy_pyxis?
|
2572
|
+
[
|
2573
|
+
"bin/qa_build_artifact.sh #{branch_name} #{'update' if @deploy_update}"
|
2574
|
+
].each do |cmd|
|
2575
|
+
execute_command cmd
|
2576
|
+
end
|
2577
|
+
|
2578
|
+
cleanup_containers
|
2579
|
+
|
2580
|
+
Dir.chdir "../../"
|
2581
|
+
end
|
2582
|
+
|
2583
|
+
def create_pyxis_artifact(revision, deploy_id)
|
2584
|
+
if (deploy_pyxis?)
|
2585
|
+
output "Preparo l'artifact pyxis\n".yellow
|
2586
|
+
|
2587
|
+
git_checkout_version('pyxis-npm', revision)
|
2588
|
+
|
2589
|
+
Dir.chdir 'projects/pyxis-npm'
|
2590
|
+
|
2591
|
+
decrypt_secrets()
|
2592
|
+
|
2593
|
+
exec_step 'mv .fakenpmrc .npmrc'
|
2594
|
+
exec_step 'cp docker-compose.yml docker-compose-ci.yml'
|
2595
|
+
exec_step 'prepare-docker-compose --directory pyxis-npm && cp docker-compose-qainit.yml docker-compose.yml'
|
2596
|
+
exec_step 'docker-compose build web'
|
2597
|
+
|
2598
|
+
exec_step "docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
|
2599
|
+
'-c' 'npm view prima-assicurazioni/pyxis-npm versions --json > versions.json'" # posso recuperare le versioni pubblicate solo da dentro al container, scrivo su un file che leggo subito dopo
|
2600
|
+
published_versions = `cat versions.json`
|
2601
|
+
qa_versions = JSON.parse(published_versions).select{ |version| version.include? get_pyxis_version(deploy_id) }
|
2602
|
+
|
2603
|
+
@pyxis_version = "0.#{get_pyxis_version(deploy_id)}.#{qa_versions.size}"
|
2604
|
+
|
2605
|
+
`sed -i '3s/".*/"version": "#{@pyxis_version}",/' package.json`
|
2606
|
+
[
|
2607
|
+
"docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
|
2608
|
+
'-c' 'yarn install && \
|
2609
|
+
yarn build:prod && \
|
2610
|
+
npm publish'"
|
2611
|
+
].each do |cmd|
|
2612
|
+
execute_command cmd
|
2613
|
+
end
|
2614
|
+
|
2615
|
+
cleanup_containers
|
2616
|
+
Dir.chdir '../../'
|
2617
|
+
end
|
2618
|
+
end
|
2619
|
+
|
2620
|
+
def create_rachele_artifact(revision)
|
2621
|
+
output "Preparo l'artifact rachele .zip\n".yellow
|
2622
|
+
|
2623
|
+
git_checkout_version('rachele', revision)
|
2624
|
+
|
2625
|
+
Dir.chdir 'projects/rachele'
|
2626
|
+
|
2627
|
+
decrypt_secrets()
|
2628
|
+
|
2629
|
+
exec_step 'cp docker-compose.yml docker-compose-ci.yml'
|
2630
|
+
exec_step 'prepare-docker-compose --directory rachele && cp docker-compose-qainit.yml docker-compose.yml'
|
2631
|
+
|
2632
|
+
execute_command "docker-compose build web"
|
2633
|
+
|
2634
|
+
[ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
|
2635
|
+
'-c' 'mix local.hex --force && mix hex.info && \
|
2636
|
+
mix deps.get && mix compile && mix deps.compile && \
|
2637
|
+
rm -rf _build/qa/rel/ && \
|
2638
|
+
mix release --env=qa'"
|
2639
|
+
].each do |cmd|
|
2640
|
+
execute_command cmd
|
2641
|
+
end
|
2642
|
+
|
2643
|
+
cleanup_containers
|
2644
|
+
|
2645
|
+
artifact_path = Dir.glob("_build/qa/rel/rachele/releases/*/rachele.tar.gz").first
|
2646
|
+
upload_artifact(artifact_path, "microservices/rachele/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2647
|
+
|
2648
|
+
Dir.chdir '../../'
|
2649
|
+
end
|
2650
|
+
|
2651
|
+
def create_roger_artifact(revision)
|
2652
|
+
output "Preparo l'artifact roger .zip\n".yellow
|
2653
|
+
|
2654
|
+
git_checkout_version('roger', revision)
|
2655
|
+
|
2656
|
+
Dir.chdir 'projects/roger'
|
2657
|
+
|
2658
|
+
decrypt_secrets()
|
2659
|
+
|
2660
|
+
exec_step 'cp docker-compose.yml docker-compose-ci.yml'
|
2661
|
+
exec_step 'prepare-docker-compose --directory roger && cp docker-compose-qainit.yml docker-compose.yml'
|
2662
|
+
[
|
2663
|
+
"docker-compose build web",
|
2664
|
+
"docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
|
2665
|
+
'-c' 'mix local.hex --force && mix hex.info && \
|
2666
|
+
mix deps.get && mix compile && mix deps.compile && \
|
2667
|
+
mix phx.digest && \
|
2668
|
+
rm -rf _build/qa/rel/ && \
|
2669
|
+
mix distillery.release --env=qa'"
|
2670
|
+
].each do |cmd|
|
2671
|
+
execute_command cmd
|
2672
|
+
end
|
2673
|
+
|
2674
|
+
cleanup_containers
|
2675
|
+
|
2676
|
+
artifact_path = Dir.glob("_build/qa/rel/roger/releases/*/roger.tar.gz").first
|
2677
|
+
upload_artifact(artifact_path, "microservices/roger/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2678
|
+
|
2679
|
+
Dir.chdir '../../'
|
2680
|
+
end
|
2681
|
+
|
2682
|
+
def create_rogoreport_artifact(revision)
|
2683
|
+
output "Preparo l'artifact rogoreport .zip\n".yellow
|
2684
|
+
|
2685
|
+
git_checkout_version('rogoreport', revision)
|
2686
|
+
|
2687
|
+
Dir.chdir 'projects/rogoreport'
|
2688
|
+
|
2689
|
+
decrypt_secrets() unless File.exist?('config/secrets.yml')
|
2690
|
+
|
2691
|
+
exec_step 'cp docker-compose.yml docker-compose-ci.yml'
|
2692
|
+
exec_step 'prepare-docker-compose --directory rogoreport && cp docker-compose-qainit.yml docker-compose.yml'
|
2693
|
+
[
|
2694
|
+
"docker-compose build web",
|
2695
|
+
"docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
|
2696
|
+
'-c' 'mix local.hex --force && mix hex.info && \
|
2697
|
+
mix deps.get && mix compile && mix deps.compile && \
|
2698
|
+
rm -rf _build/qa/rel/ && \
|
2699
|
+
mix release --name=rogoreport --env=qa'"
|
2700
|
+
].each do |cmd|
|
2701
|
+
execute_command cmd
|
2702
|
+
end
|
2703
|
+
|
2704
|
+
cleanup_containers
|
2705
|
+
|
2706
|
+
artifact_path = Dir.glob("_build/qa/rel/rogoreport/releases/*/rogoreport.tar.gz").first
|
2707
|
+
upload_artifact(artifact_path, "microservices/rogoreport/rogoreport-#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2708
|
+
|
2709
|
+
Dir.chdir '../../'
|
2710
|
+
end
|
2711
|
+
|
2712
|
+
def create_skynet_artifact(revision)
|
2713
|
+
output "Preparo l'artifact skynet\n".yellow
|
2714
|
+
|
2715
|
+
git_checkout_version('skynet', revision)
|
2716
|
+
|
2717
|
+
Dir.chdir 'projects/skynet'
|
2718
|
+
|
2719
|
+
version = `git rev-parse HEAD`
|
2720
|
+
|
2721
|
+
artifact_path = "/tmp/skynet-#{revision}-qa.tar.gz"
|
2722
|
+
|
2723
|
+
exec_step "tar cfz #{artifact_path} core config oracles pickles web tests logstash application_log configuration.py wsgi.py requirements.txt requirements_training.txt"
|
2724
|
+
|
2725
|
+
upload_artifact(artifact_path, "microservices/skynet/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2726
|
+
|
2727
|
+
Dir.chdir '../../'
|
2728
|
+
end
|
2729
|
+
|
2730
|
+
def create_starsky_artifact(revision)
|
2731
|
+
output "Preparo l'artifact starsky\n".yellow
|
2732
|
+
|
2733
|
+
git_checkout_version('starsky', revision)
|
2734
|
+
|
2735
|
+
Dir.chdir 'projects/starsky'
|
2736
|
+
|
2737
|
+
version = `git rev-parse HEAD`
|
2738
|
+
|
2739
|
+
#artifact_path = "/tmp/starsky-#{revision}-qa.tar.gz"
|
2740
|
+
|
2741
|
+
decrypt_secrets() unless File.exist?('config/secrets.yml')
|
2742
|
+
|
2743
|
+
`mv docker-compose-ci.yml docker-compose.yml`
|
2744
|
+
exec_step 'prepare-docker-compose --directory starsky && cp docker-compose-qainit.yml docker-compose.yml'
|
2745
|
+
exec_step "sed s/qa_deploy_id/#{get_deploy_id}/ .env.dist.qa"
|
2746
|
+
exec_step "cp .env.dist.qa .env"
|
2747
|
+
|
2748
|
+
[
|
2749
|
+
"sed -i 's/USER app/USER root/g' Dockerfile",
|
2750
|
+
"if echo `docker network ls` | grep peano_default; \
|
2751
|
+
then echo 'peano_default network already existing'; \
|
2752
|
+
else docker network create peano_default; fi",
|
2753
|
+
"docker-compose build web",
|
2754
|
+
"docker-compose run -w $PWD -e BUILD_ENV=qa -u root --entrypoint /bin/sh web \
|
2755
|
+
'-c' 'cargo build --release -vv --features=qa \
|
2756
|
+
&& cargo build --bin migrate --release --features=qa \
|
2757
|
+
&& cargo build --bin rabbit_worker --release --features=qa \
|
2758
|
+
&& cp -p target/release/starsky . \
|
2759
|
+
&& cp -p target/release/migrate . \
|
2760
|
+
&& cp -p target/release/rabbit_worker . \
|
2761
|
+
&& tar cfz #{revision}-qa.tar.gz config starsky migrate rabbit_worker .env.dist.*'"
|
2762
|
+
].each do |cmd|
|
2763
|
+
execute_command cmd
|
2764
|
+
end
|
2765
|
+
|
2766
|
+
artifact_path = "./#{revision}-qa.tar.gz"
|
2767
|
+
|
2768
|
+
upload_artifact(artifact_path, "microservices/starsky/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2769
|
+
|
2770
|
+
Dir.chdir '../../'
|
2771
|
+
end
|
2772
|
+
|
2773
|
+
def create_urania_artifact(revision)
|
2774
|
+
output "Preparo l'artifact urania .zip\n".yellow
|
2775
|
+
|
2776
|
+
git_checkout_version('urania', revision)
|
2777
|
+
|
2778
|
+
Dir.chdir 'projects/urania'
|
2779
|
+
|
2780
|
+
decrypt_secrets()
|
2781
|
+
|
2782
|
+
exec_step 'cp docker-compose.yml docker-compose-ci.yml'
|
2783
|
+
exec_step 'prepare-docker-compose --directory urania && cp docker-compose-qainit.yml docker-compose.yml'
|
2784
|
+
|
2785
|
+
execute_command "docker-compose build web"
|
2786
|
+
|
2787
|
+
[ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
|
2788
|
+
'-c' 'mix local.hex --force && mix hex.info && \
|
2789
|
+
mix deps.get && mix compile && mix deps.compile && \
|
2790
|
+
rm -rf _build/qa/rel/ && \
|
2791
|
+
mix release --env=qa'"
|
2792
|
+
].each do |cmd|
|
2793
|
+
execute_command cmd
|
2794
|
+
end
|
2795
|
+
|
2796
|
+
cleanup_containers
|
2797
|
+
|
2798
|
+
artifact_path = Dir.glob("_build/qa/rel/urania/releases/*/urania.tar.gz").first
|
2799
|
+
upload_artifact(artifact_path, "microservices/urania/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
|
2800
|
+
|
2801
|
+
Dir.chdir '../../'
|
2802
|
+
end
|
2803
|
+
|
662
2804
|
def deploy_pyxis?
|
663
2805
|
if defined? @deploy_pyxis
|
664
2806
|
@deploy_pyxis
|
665
2807
|
else
|
666
2808
|
pyxis_updated = `git log -p -1 --unified=0 | grep pyxis-npm:`.length > 0
|
667
2809
|
|
668
|
-
update_pyxis = !@projects['pyxis-npm'].empty? && @projects['pyxis-npm'][
|
2810
|
+
update_pyxis = !@projects['pyxis-npm'].empty? && @projects['pyxis-npm'][:name] != 'master' && pyxis_updated
|
669
2811
|
|
670
2812
|
@deploy_pyxis = update_pyxis
|
671
2813
|
return update_pyxis
|
672
2814
|
end
|
673
2815
|
end
|
674
2816
|
|
2817
|
+
def deploy_crash?
|
2818
|
+
crash_present = !@projects['crash'].empty? && @projects['crash'][:name] != 'master' && !@projects['crash'][:default_branch]
|
2819
|
+
leftorium_present = !@projects['leftorium'].empty? && @projects['leftorium'][:name] != 'master' && !@projects['leftorium'][:default_branch]
|
2820
|
+
crash_present || leftorium_present
|
2821
|
+
end
|
2822
|
+
|
2823
|
+
def deploy_starsky_hutch?
|
2824
|
+
starsky_present = !@projects['starsky'].nil? && !@projects['starsky'].empty? && @projects['starsky'][:name] != 'master' && !@projects['starsky'][:default_branch]
|
2825
|
+
hutch_present = !@projects['hutch'].nil? && !@projects['hutch'].empty? && @projects['hutch'][:name] != 'master' && !@projects['hutch'][:default_branch]
|
2826
|
+
starsky_present || hutch_present
|
2827
|
+
end
|
2828
|
+
|
2829
|
+
def get_pyxis_version(deploy_id)
|
2830
|
+
(deploy_id.delete '[a-z0]')[0..9]
|
2831
|
+
end
|
2832
|
+
|
2833
|
+
def cleanup_containers
|
2834
|
+
`docker-compose kill && docker-compose down -v --remove-orphans`
|
2835
|
+
`docker rm $(docker ps -q -f status=exited)`
|
2836
|
+
end
|
2837
|
+
|
2838
|
+
def git_checkout_version(project, revision)
|
2839
|
+
Dir.chdir "projects/#{project}"
|
2840
|
+
exec_step "git checkout -- . && git checkout #{revision}"
|
2841
|
+
Dir.chdir "../../"
|
2842
|
+
end
|
2843
|
+
|
2844
|
+
def create_asg_stack(stack_name, tags = [])
|
2845
|
+
stack_body = IO.read('cloudformation/stacks/asg/ecs-asg-allinone.yml')
|
2846
|
+
parameters = [
|
2847
|
+
{
|
2848
|
+
parameter_key: "Environment",
|
2849
|
+
parameter_value: "qa"
|
2850
|
+
},
|
2851
|
+
{
|
2852
|
+
parameter_key: "InstanceType",
|
2853
|
+
parameter_value: "t3.large"
|
2854
|
+
},
|
2855
|
+
{
|
2856
|
+
parameter_key: "ECSClusterName",
|
2857
|
+
parameter_value: @ecs_cluster_name
|
2858
|
+
},
|
2859
|
+
{
|
2860
|
+
parameter_key: "AMIID",
|
2861
|
+
parameter_value: @ami_id
|
2862
|
+
}
|
2863
|
+
]
|
2864
|
+
create_stack(stack_name, stack_body, parameters, tags, @cf_role)
|
2865
|
+
end
|
2866
|
+
|
2867
|
+
def create_cluster_stack(stack_name, tags = [])
|
2868
|
+
stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
|
2869
|
+
create_stack(stack_name, stack_body, [], tags)
|
2870
|
+
end
|
2871
|
+
|
675
2872
|
def update_cluster_stack(stack_name, tags = [])
|
676
2873
|
stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
|
677
2874
|
update_stack(stack_name, stack_body, [], tags)
|
678
2875
|
end
|
679
2876
|
|
2877
|
+
def create_alb_stack(stack_name, role, hash, environment = 'qa')
|
2878
|
+
stack_body = IO.read('cloudformation/stacks/elb/alb-public-qa.yml')
|
2879
|
+
parameters = [
|
2880
|
+
{
|
2881
|
+
parameter_key: "Environment",
|
2882
|
+
parameter_value: environment
|
2883
|
+
},
|
2884
|
+
{
|
2885
|
+
parameter_key: "Role",
|
2886
|
+
parameter_value: role
|
2887
|
+
},
|
2888
|
+
{
|
2889
|
+
parameter_key: "EnvHash",
|
2890
|
+
parameter_value: hash
|
2891
|
+
}
|
2892
|
+
]
|
2893
|
+
create_stack(stack_name, stack_body, parameters, [], @cf_role)
|
2894
|
+
end
|
2895
|
+
|
2896
|
+
def import_redis_crash(qa_ip_address)
|
2897
|
+
output "Importo chiavi di Redis da staging\n".yellow
|
2898
|
+
|
2899
|
+
prefixes = ['CODICI', 'fun_with_flags']
|
2900
|
+
redis_qa = Redis.new(:url => "redis://#{qa_ip_address}:6379/10")
|
2901
|
+
redis_staging = Redis.new(:url => 'redis://staging.cache-1.prima.it:6379/10')
|
2902
|
+
|
2903
|
+
prefixes.each do |prefix|
|
2904
|
+
redis_staging.keys("#{prefix}*").each do |key|
|
2905
|
+
next unless redis_qa.keys(key).empty?
|
2906
|
+
output "Importo #{key} dal Redis di staging\n".yellow
|
2907
|
+
dump_staging = redis_staging.dump key
|
2908
|
+
redis_qa.restore key, 0, dump_staging
|
2909
|
+
end
|
2910
|
+
end
|
2911
|
+
end
|
2912
|
+
|
2913
|
+
def import_dbs(ip_address)
|
2914
|
+
resp = @ecs.run_task({
|
2915
|
+
cluster: @ecs_cluster_name,
|
2916
|
+
task_definition: @import_db_task,
|
2917
|
+
overrides: {
|
2918
|
+
container_overrides: [
|
2919
|
+
{
|
2920
|
+
name: 'dbrestore',
|
2921
|
+
environment: [
|
2922
|
+
{
|
2923
|
+
name: 'EC2_IP_ADDRESS',
|
2924
|
+
value: ip_address
|
2925
|
+
}
|
2926
|
+
]
|
2927
|
+
}
|
2928
|
+
]
|
2929
|
+
},
|
2930
|
+
count: 1
|
2931
|
+
})
|
2932
|
+
return resp
|
2933
|
+
end
|
2934
|
+
|
2935
|
+
def wait_for_db_import(task)
|
2936
|
+
output "Attendo che i DB vengano importati...\n".yellow
|
2937
|
+
stopped_at = nil
|
2938
|
+
sleep 15 # altrimenti non trova il task appena avviato...
|
2939
|
+
while stopped_at.nil?
|
2940
|
+
if task.tasks[0].nil?
|
2941
|
+
pp @ecs_cluster_name
|
2942
|
+
pp task
|
2943
|
+
stop_if true, "Task di import DB lanciato, ma risposta vuota!".red
|
2944
|
+
end
|
2945
|
+
task = @ecs.describe_tasks({
|
2946
|
+
cluster: task.tasks[0].cluster_arn,
|
2947
|
+
tasks: [task.tasks[0].task_arn]
|
2948
|
+
})
|
2949
|
+
stopped_at = task.tasks[0].stopped_at unless task.tasks[0].nil?
|
2950
|
+
sleep_seconds = 10
|
2951
|
+
seconds_elapsed = 0
|
2952
|
+
while true && stopped_at.nil?
|
2953
|
+
break if seconds_elapsed >= sleep_seconds
|
2954
|
+
print '.'.yellow; STDOUT.flush
|
2955
|
+
sleep 1
|
2956
|
+
seconds_elapsed += 1
|
2957
|
+
end
|
2958
|
+
end
|
2959
|
+
print "\n"
|
2960
|
+
end
|
2961
|
+
|
680
2962
|
def choose_branch_to_deploy(project_name, select_master = false)
|
2963
|
+
return {name: 'master', revision: '399653d555b8864', committer: 'crash@prima.it', default_branch: true} if project_name == 'crash' && select_master
|
681
2964
|
Dir.chdir "projects/#{project_name}"
|
682
2965
|
output "Recupero la lista dei branch del progetto #{project_name}..."
|
683
2966
|
`git remote prune origin`
|
@@ -717,7 +3000,8 @@ class Release
|
|
717
3000
|
name = branch_name.split(' ')[0]
|
718
3001
|
revision = branch_name.split(' ')[1]
|
719
3002
|
committer_email = branch_name.split(' ')[2].tr('<>', '')
|
720
|
-
{
|
3003
|
+
return { name: 'crash', default_branch: true } if project_name == 'crash' && branch_name == 'master' #rimuovere questa riga se mai nei qa servirà crash con un branch diverso da master
|
3004
|
+
{ name: name, revision: revision[0..14], committer: committer_email, default_branch: select_master }
|
721
3005
|
end
|
722
3006
|
|
723
3007
|
def select_branch_to_deploy(project_name, branch_name)
|
@@ -733,7 +3017,7 @@ class Release
|
|
733
3017
|
name = branch_name.split(' ')[0]
|
734
3018
|
revision = branch_name.split(' ')[1]
|
735
3019
|
committer_email = branch_name.split(' ')[2].tr('<>', '')
|
736
|
-
{
|
3020
|
+
{ name: name, revision: revision[0..14], committer: committer_email }
|
737
3021
|
end
|
738
3022
|
|
739
3023
|
def get_stacks()
|
@@ -764,6 +3048,73 @@ class Release
|
|
764
3048
|
(Time.now.to_i.to_s[-4..-1].to_i + Random.rand(40000)).to_s
|
765
3049
|
end
|
766
3050
|
|
3051
|
+
def launch_marley(ip_address, prima_hostname, borat_hostname)
|
3052
|
+
resp = @cf.describe_stack_resource({
|
3053
|
+
stack_name: 'batch-job-marley',
|
3054
|
+
logical_resource_id: 'JobDefinition'
|
3055
|
+
})
|
3056
|
+
|
3057
|
+
@batch.submit_job({
|
3058
|
+
job_name: "marley-#{@dns_record_identifier}", # required
|
3059
|
+
job_queue: "tools-production", # required
|
3060
|
+
job_definition: resp.stack_resource_detail.physical_resource_id, # required
|
3061
|
+
container_overrides: {
|
3062
|
+
environment: [
|
3063
|
+
{
|
3064
|
+
name: 'PRIMA_URL',
|
3065
|
+
value: "https://#{prima_hostname}/?superprima"
|
3066
|
+
},
|
3067
|
+
{
|
3068
|
+
name: 'PRIMA_IP',
|
3069
|
+
value: ip_address
|
3070
|
+
},
|
3071
|
+
{
|
3072
|
+
name: 'PROJECTS_JSON',
|
3073
|
+
value: @projects.to_json
|
3074
|
+
},
|
3075
|
+
{
|
3076
|
+
name: 'BACKOFFICE_URL',
|
3077
|
+
value: "https://#{borat_hostname}"
|
3078
|
+
}
|
3079
|
+
]
|
3080
|
+
}
|
3081
|
+
})
|
3082
|
+
|
3083
|
+
output "Marley lanciato con successo!\n".green
|
3084
|
+
end
|
3085
|
+
|
3086
|
+
def get_currently_deployed_version(stack_name)
|
3087
|
+
parameters = get_stack_parameters(stack_name)
|
3088
|
+
currently_deployed_version = nil
|
3089
|
+
parameters.each do |parameter|
|
3090
|
+
if parameter.parameter_key == "ReleaseVersion"
|
3091
|
+
currently_deployed_version = parameter.parameter_value
|
3092
|
+
end
|
3093
|
+
end
|
3094
|
+
currently_deployed_version
|
3095
|
+
end
|
3096
|
+
|
3097
|
+
def decrypt_secrets()
|
3098
|
+
docker_image = "prima/biscuit_populate_configs"
|
3099
|
+
[
|
3100
|
+
"docker pull #{docker_image}",
|
3101
|
+
"docker run -t --rm -v $HOME/.aws:/root/.aws -w $PWD --volumes-from #{get_host_container_name} #{docker_image}"
|
3102
|
+
].each do |cmd|
|
3103
|
+
execute_command cmd
|
3104
|
+
end
|
3105
|
+
end
|
3106
|
+
|
3107
|
+
def get_host_container_name()
|
3108
|
+
if @host_container_name
|
3109
|
+
@host_container_name
|
3110
|
+
else
|
3111
|
+
hostname = `cat /etc/hostname`.gsub("\n", '')
|
3112
|
+
execute_command "docker ps | grep #{hostname} | awk '{print $NF}'"
|
3113
|
+
@host_container_name = `docker ps | grep #{hostname} | awk '{print $NF}'`.gsub("\n", '')
|
3114
|
+
# @host_container_name = `docker ps | grep #{hostname} | sed -r "s/.+ ([^\s].+)$/\1/p"`
|
3115
|
+
end
|
3116
|
+
end
|
3117
|
+
|
767
3118
|
def select_branches(project_names = nil)
|
768
3119
|
output "Deploy feature menu"
|
769
3120
|
if project_names.nil?
|
@@ -777,6 +3128,14 @@ class Release
|
|
777
3128
|
end
|
778
3129
|
end
|
779
3130
|
end
|
3131
|
+
|
3132
|
+
def get_ami_id(stack_name)
|
3133
|
+
get_stack_parameters(stack_name).each do |param|
|
3134
|
+
if param.parameter_key == "AMIID"
|
3135
|
+
return param.parameter_value
|
3136
|
+
end
|
3137
|
+
end
|
3138
|
+
end
|
780
3139
|
end
|
781
3140
|
|
782
3141
|
def help_content
|
@@ -802,6 +3161,7 @@ Description
|
|
802
3161
|
finish finishes the feature by merging to dev and master
|
803
3162
|
qainit deploys a new environment with selected branches from every project
|
804
3163
|
qainit $PROJECT_NAME deploys a new environment allowing to selected a branch from the input project (everything else is master)
|
3164
|
+
qainit minimal prima deploys a new copy of prima project, using staging microservices and database
|
805
3165
|
qainit shutdown deletes a specific qa environment
|
806
3166
|
|
807
3167
|
Available only to devops (from artemide)
|