prima-twig 0.54.46 → 0.54.230

Sign up to get free protection for your applications and to get access to all the features.
@@ -5,7 +5,10 @@ require_relative '../lib/prima_twig.rb'
5
5
  require_relative '../lib/prima_aws_client.rb'
6
6
  require 'colorize'
7
7
  require 'highline/import'
8
- require 'aws-sdk'
8
+ require 'aws-sdk-batch'
9
+ require 'aws-sdk-cloudformation'
10
+ require 'aws-sdk-ecs'
11
+ require 'aws-sdk-s3'
9
12
  require 'redcarpet'
10
13
  require 'mail'
11
14
  require 'erb'
@@ -55,6 +58,7 @@ class Review
55
58
  @cf = Aws::CloudFormation::Client.new
56
59
  @ecs = Aws::ECS::Client.new
57
60
  @s3 = Aws::S3::Client.new
61
+ @batch = Aws::Batch::Client.new
58
62
  @s3_bucket = "prima-artifacts-encrypted"
59
63
  end
60
64
 
@@ -94,15 +98,6 @@ class Review
94
98
  artifact = artifacts.select {|v| v[:rev] == artifact_rev}.first
95
99
 
96
100
  do_deploy! artifact_rev
97
- # exec_step "terminal-notifier -message 'Deploy terminato, vuoi lanciare paparatzinger?'" if which 'terminal-notifier'
98
- #
99
- # confirm_message = "Vuoi lanciare paparatzinger?"
100
- # launch_paparatzinger = @prima.yesno confirm_message.blue
101
- #
102
- # if launch_paparatzinger
103
- # output "Avvio paparatzinger per gli screenshot".yellow
104
- # job_name = launch_paparatzinger(artifact[:commit_msg])
105
- # end
106
101
 
107
102
  mail = Mail.new do
108
103
  from 'deploy@prima.it'
@@ -118,7 +113,6 @@ class Review
118
113
  body << "Revision: [#{artifact[:rev]}](https://github.com/primait/prima/commit/#{artifact[:rev]}) del #{artifact[:created_at].strftime('%d/%m/%Y %H:%M:%S')}\n\n"
119
114
  body << "Branch: [#{artifact[:branch]}](https://github.com/primait/prima/tree/#{artifact[:branch]})\n\n"
120
115
  body << "Commit: #{commit_msg.gsub(/_/, '\_')}\n\n"
121
- #body << "Screenshots (tra qualche minuto): [BrowserStack](https://www.browserstack.com/automate) (Filtrare per: \"#{get_paparatzinger_job_name(commit_msg).gsub(/_/, '\_')}\")" if launch_paparatzinger
122
116
 
123
117
  htmlBody = Redcarpet::Markdown.new(Redcarpet::Render::HTML.new).render body
124
118
 
@@ -144,6 +138,8 @@ class Review
144
138
 
145
139
  invalidate_prismic_cache
146
140
 
141
+ launch_crawler
142
+
147
143
  exec_step "terminal-notifier -message 'Deploy terminato'" if which 'terminal-notifier'
148
144
  end
149
145
 
@@ -217,63 +213,16 @@ class Review
217
213
  artifacts.sort_by { |v| v[:created_at] }.reverse
218
214
  end
219
215
 
220
- def launch_paparatzinger(job_name)
221
- @s3.get_object(
222
- response_target: '/tmp/paparatzinger_twig.yml',
223
- bucket: 'prima-deploy',
224
- key: 'paparatzinger_twig.yml')
225
-
226
- paparatzinger_config = YAML.load_file '/tmp/paparatzinger_twig.yml'
227
-
228
- uri = URI.parse(paparatzinger_config['prima_api_search_url'])
229
- body = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http|
230
- req = Net::HTTP::Get.new(uri)
231
- req["x-apikey"] = paparatzinger_config['prima_api_token']
232
- response = http.request req
233
- response.body
234
- end
235
-
236
- saves = JSON.parse body
216
+ def launch_crawler()
217
+ resp = describe_stack_resource('batch-job-crawler-production', 'JobDefinition')
237
218
 
238
- save_code = saves.sample['unique_identifier']
239
- url_garanzie = "https://www.prima.it/preventivo/auto/#{save_code}/garanzie?browserstack=true"
240
- job_name = get_paparatzinger_job_name(clean_commit_message(job_name))
241
-
242
- logical_resource_id = 'TaskDefinitionPaparatzinger'
243
- resp = @cf.describe_stack_resource({
244
- stack_name: 'ecs-task-paparatzinger-production',
245
- logical_resource_id: logical_resource_id
246
- })
247
-
248
- resp = @ecs.run_task({
249
- cluster: 'ecs-cluster-tools-vpc-production-ECSCluster-1WJQLW5EVLYEB',
250
- task_definition: resp.stack_resource_detail.physical_resource_id,
251
- overrides: {
252
- container_overrides: [
253
- {
254
- name: 'paparatzinger',
255
- environment: [
256
- {
257
- name: 'JOB_NAME',
258
- value: job_name,
259
- },
260
- {
261
- name: 'VERSION',
262
- value: paparatzinger_config['version'],
263
- },
264
- {
265
- name: 'URL_GARANZIE',
266
- value: url_garanzie
267
- }
268
- ]
269
- }
270
- ]
271
- },
272
- count: 1
219
+ @batch.submit_job({
220
+ job_name: "crawler", # required
221
+ job_queue: "tools-production", # required
222
+ job_definition: resp.stack_resource_detail.physical_resource_id # required
273
223
  })
274
- output "Paparatzinger lanciato con successo. URL: #{url_garanzie}\n".green
275
224
 
276
- job_name
225
+ output "Crawler lanciato con successo!\n".green
277
226
  end
278
227
 
279
228
  end
@@ -285,10 +234,6 @@ def clean_commit_message(commit_msg)
285
234
  commit_msg[0..99]
286
235
  end
287
236
 
288
- def get_paparatzinger_job_name(job_name)
289
- job_name.gsub /[^0-9a-z]/i, '-'
290
- end
291
-
292
237
  def which(cmd)
293
238
  exts = ENV['PATHEXT'] ? ENV['PATHEXT'].split(';') : ['']
294
239
  ENV['PATH'].split(File::PATH_SEPARATOR).each do |path|
@@ -22,23 +22,9 @@ class Release
22
22
  exec "twig feature #{ARGV.join ' '}"
23
23
  end
24
24
  end
25
- @cf = Aws::CloudFormation::Client.new
26
- @alb = Aws::ElasticLoadBalancingV2::Client.new
27
- @ec2 = Aws::EC2::Client.new
28
- @ecs = Aws::ECS::Client.new
29
- @batch = Aws::Batch::Client.new
30
- @asg = Aws::AutoScaling::Client.new
31
- @s3 = Aws::S3::Client.new
32
- @s3_bucket = 'prima-artifacts'
33
- @artifact_path = '/tmp/prima-artifact.zip'
34
- @import_db_task = 'arn:aws:ecs:eu-west-1:001575623345:task-definition/ecs-task-db-restore2-TaskDefinitionDbRestore-1BXH13XEVLPP0:1'
35
- @cf_role = 'arn:aws:iam::001575623345:role/qainit-service-role-cloudformat-CloudformationRole-18KBZQIS148R9'
36
25
  @dns_record_identifier = nil
37
26
  @ecs_cluster_name = nil
38
27
  @deploy_update = false
39
- @qainit = false
40
- @qainit_host_folder = "/var/ci/#{ENV['DRONE_REPO']}/#{ENV['DRONE_BRANCH']}/#{ENV['DRONE_BUILD_NUMBER']}"
41
- @qainit_folder = "/drone/src/github.com/project/primait/qainit"
42
28
  @projects = {
43
29
  'prima' => {},
44
30
  'urania' => {},
@@ -47,7 +33,7 @@ class Release
47
33
  'hal9000' => {},
48
34
  'fidaty' => {},
49
35
  'peano' => {},
50
- 'rogoreport' => {},
36
+ # 'rogoreport' => {},
51
37
  'assange' => {},
52
38
  'borat' => {},
53
39
  'crash' => {},
@@ -59,11 +45,13 @@ class Release
59
45
  'pyxis-npm' => {},
60
46
  'starsky' => {},
61
47
  'hutch' => {},
62
- 'maia' => {}
48
+ 'maia' => {},
49
+ 'legion' => {}
63
50
  }
64
51
  @base_stack_name_alb = 'ecs-alb-http-public-qa-'
65
52
  @base_stack_name_alb_ws = 'ecs-alb-ws-public-qa-'
66
53
  @git_branch = ''
54
+ @cloudflare = Rubyflare.connect_with(ENV['CLOUDFLARE_EMAIL'], ENV['CLOUDFLARE_APIKEY'])
67
55
  end
68
56
 
69
57
  def execute!(args)
@@ -78,10 +66,6 @@ class Release
78
66
  qainit_deploy_shutdown!
79
67
  elsif 'update' == args[1]
80
68
  qainit_deploy_update!
81
- elsif 'read' == args[1]
82
- qainit_read_config! args[2]
83
- elsif 'minimal' == args[1]
84
- qainit_minimal_deploy! args[2]
85
69
  else
86
70
  if args[1]
87
71
  select_branches(args[1..-1])
@@ -95,26 +79,11 @@ class Release
95
79
  if 'deploy' == args[1]
96
80
  suite_py_branches(args[2])
97
81
  qainit_deploy!(true)
98
- else
99
- qainit_deploy_shutdown!(args[2])
100
82
  end
101
83
  when 'deploy'
102
84
  abort('Non sei nella cartella di artemide') unless Dir.pwd.match 'artemide$'
103
- if ['terminate', 'stop', 'shutdown', 'halt', 'destroy'].include? args[1]
104
- deploy_shutdown!
105
- elsif 'update' == args[1]
106
- deploy_update!
107
- elsif 'lock' == args[1]
85
+ if 'lock' == args[1]
108
86
  deploy_lock!
109
- elsif 'minimal' == args[1]
110
- qainit_drone_minimal_deploy!
111
- else
112
- if args[1]
113
- select_branches(args[1])
114
- else
115
- select_branches
116
- end
117
- deploy_feature!
118
87
  end
119
88
  when 'aggregator'
120
89
  if 'enable' == args[1]
@@ -150,7 +119,6 @@ class Release
150
119
  output 'Disable aggregator'
151
120
 
152
121
  output "Recupero le informazioni relative al puntamento dei record DNS..."
153
- cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
154
122
  output "Recupero le informazioni sui QA attivi..."
155
123
  stack_list, envs = get_stacks()
156
124
 
@@ -162,7 +130,7 @@ class Release
162
130
  end.is_a?(Aws::CloudFormation::Types::Tag)
163
131
  aggregator_enabled
164
132
  end[0]
165
- dns_records = cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
133
+ dns_records = @cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
166
134
  stop_if dns_records.body[:result].empty?, "I record DNS degli aggregatori non stanno puntando ad un QA".red
167
135
  change_hostname_priority(env_hash, hostname_pattern_priority())
168
136
  dns_to_staging(env_hash)
@@ -178,8 +146,7 @@ class Release
178
146
  output 'Enable aggregator'
179
147
 
180
148
  output 'Recupero le informazioni relative al puntamento dei record DNS...'
181
- cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
182
- dns_records = cloudflare.get('zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records', { per_page: 100, type: 'CNAME', content: 'staging.prima.it' })
149
+ dns_records = @cloudflare.get('zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records', { per_page: 100, type: 'CNAME', content: 'staging.prima.it' })
183
150
  stop_if dns_records.body[:result].empty?, "I record DNS degli aggregatori stanno gia' puntando ad un QA".red
184
151
 
185
152
  output "Recupero le informazioni sui QA attivi..."
@@ -209,7 +176,7 @@ class Release
209
176
  dns_records.body[:result].each do |dns|
210
177
  if dns[:name] =~ /^\w+\-\w+\-staging\.prima\.it$/
211
178
  output "Changing #{dns[:name]} DNS record"
212
- cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: get_alb_host(@base_stack_name_alb + env_hash[3..8]), proxied: true, ttl: 1})
179
+ @cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: get_alb_host(@base_stack_name_alb + env_hash[3..8]), proxied: true, ttl: 1})
213
180
  end
214
181
  end
215
182
 
@@ -268,12 +235,11 @@ class Release
268
235
 
269
236
  def dns_to_staging(env_hash)
270
237
  output "Recupero le informazioni relative al puntamento dei record DNS..."
271
- cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
272
- dns_records = cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
238
+ dns_records = @cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
273
239
  dns_records.body[:result].each do |dns|
274
240
  if dns[:name] =~ /^\w+\-\w+\-staging\.prima\.it$/
275
241
  output "Changing #{dns[:name]} DNS record"
276
- cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: 'staging.prima.it', proxied: true, ttl: 1})
242
+ @cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: 'staging.prima.it', proxied: true, ttl: 1})
277
243
  end
278
244
  end
279
245
  end
@@ -440,26 +406,20 @@ class Release
440
406
  end
441
407
 
442
408
  def get_default_branch_name(projects)
443
- if !projects['prima'][:default_branch]
444
- return projects['prima'][:name]
445
- elsif ![nil, 'master'].include? projects['crash'][:name]
446
- return projects['crash'][:name]
447
- else
448
- projects.each_key do |project_key|
449
- return projects[project_key][:name] if projects[project_key][:name] != 'master'
450
- end
409
+ projects.each_key do |project|
410
+ return projects[project]['name'] if not projects[project]['default_branch']
451
411
  end
452
412
  end
453
413
 
454
414
  def suite_py_branches(args_json)
455
- args = JSON.parse(args_json)
415
+ arg_projects = JSON.parse(args_json)
456
416
 
457
- args['projects'].each_key do |project|
458
- @projects[project] = { name: args['projects'][project]['branch'], revision: args['projects'][project]['revision'], committer: '', default_branch: false }
459
- end
417
+ @projects.merge!(arg_projects)
460
418
 
461
419
  @projects.each_key do |project|
462
- @projects[project] = choose_branch_to_deploy(project, true) unless args['projects'].key? project
420
+ if @projects[project].empty?
421
+ @projects[project] = choose_branch_to_deploy(project, true)
422
+ end
463
423
  end
464
424
  end
465
425
 
@@ -489,53 +449,19 @@ class Release
489
449
  `git checkout -b #{branch_name}`
490
450
  end
491
451
 
492
- branches = ''
493
452
  @git_branch = branch_name
494
453
 
495
- @projects.each_key do |project_key|
496
- if @projects[project_key][:revision]
497
- branches += "#{project_key}:#{@projects[project_key][:name]}:#{@projects[project_key][:revision]}:#{@projects[project_key][:default_branch]}\n"
498
- end
499
- end
454
+ File.open('branch_names', 'w') { |file| file.write(JSON.generate(@projects)) }
500
455
 
501
- File.open('branch_names', 'w') { |file| file.write(branches) }
456
+ update_drone_yml!
502
457
 
503
458
  `git add projects && \
504
- git add branch_names && \
459
+ git add branch_names .drone.yml && \
505
460
  git commit -m '#{branch_name}' && \
506
461
  git push -f --set-upstream origin #{branch_name} && \
507
462
  git checkout master`
508
463
  end
509
464
 
510
- def qainit_minimal_deploy!(project)
511
- abort('L\'unico progetto permesso è prima') unless ['prima'].include? project
512
- project_definition = choose_branch_to_deploy(project)
513
-
514
- `git checkout master && git pull && git remote prune origin`
515
-
516
- default_name = project_definition[:name]
517
- output "Inserisci la feature a cui si riferisce il QA: [#{default_name}]".cyan
518
- feature_number = String(STDIN.gets.chomp)
519
- feature_number = default_name if feature_number.empty?
520
-
521
- if `git branch -l | grep #{feature_number}`.size > 0
522
- `git checkout #{feature_number} && git pull`
523
- else
524
- `git checkout -b #{feature_number}`
525
- end
526
-
527
- # così recupero le informazioni sul branch, poi vado a scrivere il file branch_names con una sola riga
528
- branch = "#{project}:#{project_definition[:name]}:#{project_definition[:revision]}:#{project_definition[:default_branch]}"
529
-
530
- File.open('branch_names', 'w') { |file| file.write(branch) }
531
-
532
- `git add projects && \
533
- git add branch_names && \
534
- git commit -m 'minimal_#{feature_number}' && \
535
- git push --set-upstream origin #{feature_number} && \
536
- git checkout master`
537
- end
538
-
539
465
  def qainit_deploy_update!
540
466
  `git checkout master && git pull`
541
467
  # cancelliamo tutti i branch che non sono più sul repo remoto
@@ -560,29 +486,24 @@ class Release
560
486
  # aggiornare il commit (revision a cui fa riferimento)
561
487
 
562
488
  # leggo il file branch_names / recupero i nomi dei branch / riscrivo tutto
489
+ projects = ''
563
490
  File.open('branch_names', 'r') do |file|
564
491
  file.each_line do |line|
565
- project = line.split(':')
566
- @projects[project[0]] = select_branch_to_deploy(project[0], project[1])
567
- @projects[project[0]][:default_branch] = project[3]
492
+ projects = JSON.parse(line)
568
493
  end
569
494
  end
570
495
 
571
- branches = ''
572
-
573
- @projects.each_key do |project_key|
574
- if @projects[project_key][:revision]
575
- branches += "#{project_key}:#{@projects[project_key][:name]}:#{@projects[project_key][:revision]}:#{@projects[project_key][:default_branch]}"
576
- end
496
+ projects.each do |key, project|
497
+ @projects[key] = select_branch_to_deploy(key, project['name'])
498
+ @projects[key]['default_branch'] = project['default_branch']
577
499
  end
578
500
 
579
- File.open('branch_names', 'w') { |file| file.write(branches) }
501
+ File.open('branch_names', 'w') { |file| file.write(JSON.generate(@projects)) }
580
502
 
581
- if `git log -1` =~ /minimal_/
582
- `git commit -am 'minimal_update'`
583
- else
584
- `git commit -am 'update'`
585
- end
503
+ update_drone_yml!
504
+
505
+ `git add branch_names .drone.yml`
506
+ `git commit -m 'update'`
586
507
  `git push && git checkout master`
587
508
  end
588
509
 
@@ -657,11 +578,9 @@ class Release
657
578
  delete_stack(@base_stack_name_alb + env_hash[3..8]) if stack_exists?(@base_stack_name_alb + env_hash[3..8])
658
579
  delete_stack(@base_stack_name_alb_ws + env_hash[3..8]) if stack_exists?(@base_stack_name_alb_ws + env_hash[3..8])
659
580
  `git checkout master && git push origin --delete ${DRONE_BRANCH}`
581
+ output "Cancello il record DNS utilizzato da Lighthouse"
582
+ delete_lighthouse_dns()
660
583
  output "Finito!".green
661
-
662
- if @qainit
663
- qainit_write_output("QA #{ENV["DRONE_BRANCH"]} spento.", 'Indirizzi scritti su ')
664
- end
665
584
  end
666
585
 
667
586
  def qainit_write_output(file_message, output_message)
@@ -671,43 +590,16 @@ class Release
671
590
  output "#{output_message} #{qa_file_name}".green
672
591
  end
673
592
 
674
- def qainit_read_config!(action)
675
- File.open('branch_names', 'r') do |file|
676
- file.each_line do |line|
677
- project = line.gsub("\n", '').split(':')
678
- if project[3] == 'true'
679
- @projects[project[0]] = {:name=> project[1], :revision=> project[2], :default_branch=> true}
680
- elsif project[3] == 'false'
681
- @projects[project[0]] = {:name=> project[1], :revision=> project[2], :default_branch=> false}
682
- end
683
- end
593
+ def update_drone_yml!()
594
+ drone_yml = File.read('.drone.yml')
595
+ @projects.each do |key, project|
596
+ drone_yml = drone_yml.gsub(/#{key}@.+\n/, "#{key}@#{project['revision']}\n")
684
597
  end
685
- get_s3_config_files
686
- @qainit = true
687
- case action
688
- when 'shutdown'
689
- output 'Shutting down'.green
690
- qainit_drone_shutdown!
691
- when 'minimal'
692
- output 'Starting minimal deploy'.green
693
- qainit_drone_minimal_deploy!
694
- else
695
- output 'Starting standard deploy'.green
696
- deploy_feature!
598
+ File.open(".drone.yml", "w") do |f|
599
+ f.write(drone_yml)
697
600
  end
698
601
  end
699
602
 
700
- def get_s3_config_files
701
- # manteniamo la struttura per lanciarlo facilmente anche da locale
702
- `mkdir -p cloudformation/stacks/task cloudformation/stacks/route53 cloudformation/stacks/asg cloudformation/stacks/elb`
703
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/task/db.yml', response_target: 'cloudformation/stacks/task/db.yml'})
704
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/route53/qa.yml', response_target: 'cloudformation/stacks/route53/qa.yml'})
705
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/route53/minimal_qa.yml', response_target: 'cloudformation/stacks/route53/minimal_qa.yml'})
706
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/asg/ecs-asg-allinone.yml', response_target: 'cloudformation/stacks/asg/ecs-asg-allinone.yml'})
707
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/ecs-cluster.yml', response_target: 'cloudformation/stacks/ecs-cluster.yml'})
708
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/elb/alb-public-qa.yml', response_target: 'cloudformation/stacks/elb/alb-public-qa.yml'})
709
- end
710
-
711
603
  def get_deploy_id
712
604
  if @deploy_id
713
605
  @deploy_id
@@ -717,1412 +609,6 @@ class Release
717
609
  end
718
610
  end
719
611
 
720
- def qainit_drone_minimal_deploy!
721
- # tenere solo il project da deployare (l'unico project è prima)
722
- @ami_id = get_ami_id("ecs-fleet-allinone-staging")
723
- project = ''
724
- @projects.each_key do |project_key|
725
- if @projects[project_key][:revision]
726
- project = project_key
727
- git_checkout_version(project_key, @projects[project_key][:revision])
728
- end
729
- end
730
- deploy_id = get_deploy_id
731
-
732
- @git_branch = ENV['DRONE_BRANCH']
733
- @dns_record_identifier = deploy_id
734
- hostname_pattern_priority = hostname_pattern_priority()
735
- tags = [
736
- {
737
- key: "qainit",
738
- value: @git_branch
739
- },
740
- {
741
- key: project,
742
- value: @projects[project][:name]
743
- },
744
- {
745
- key: "hostname_pattern_priority",
746
- value: hostname_pattern_priority
747
- }
748
- ]
749
-
750
- cluster_stack_name = "ecs-cluster-qa-#{deploy_id}"
751
-
752
- if stack_exists?(cluster_stack_name)
753
- tags = get_stack_tags(cluster_stack_name)
754
- hostname_pattern_priority = tags.detect do |tag|
755
- tag.key == 'hostname_pattern_priority'
756
- end.value
757
- end
758
-
759
- stack_name_alb = @base_stack_name_alb + deploy_id[0..5]
760
- stack_name_alb_ws = @base_stack_name_alb_ws + deploy_id[0..5]
761
-
762
- create_cluster_stack(cluster_stack_name, tags) unless stack_exists?(cluster_stack_name)
763
- wait_for_stack_ready(cluster_stack_name) unless stack_ready?(cluster_stack_name)
764
-
765
- create_alb_stack(stack_name_alb, "http", deploy_id, 'qa-minimal') unless stack_exists?(stack_name_alb)
766
- create_alb_stack(stack_name_alb_ws, "websocket", deploy_id, 'qa-minimal') unless stack_exists?(stack_name_alb_ws)
767
-
768
- resp = @cf.describe_stack_resource({stack_name: cluster_stack_name, logical_resource_id: 'ECSCluster'})
769
- @ecs_cluster_name = resp.stack_resource_detail.physical_resource_id
770
-
771
- asg_stack_name = "ecs-asg-allinone-qa-#{deploy_id}"
772
- create_asg_stack(asg_stack_name, tags) unless stack_exists?(asg_stack_name)
773
-
774
- deploy_id = get_deploy_id
775
- create_pyxis_artifact(@projects["pyxis-npm"][:revision], deploy_id)
776
- create_prima_artifact(@projects["prima"][:revision], @projects["prima"][:name], deploy_id, true) unless artifact_exists?('prima-artifacts-encrypted', "prima/#{@projects["prima"][:revision]}.tar.gz")
777
-
778
- wait_for_stack_ready(stack_name_alb) unless stack_ready?(stack_name_alb)
779
- wait_for_stack_ready(stack_name_alb_ws) unless stack_ready?(stack_name_alb_ws)
780
-
781
- stack_name_route53 = "ecs-route53-qa-#{deploy_id}"
782
- stack_body = IO.read('cloudformation/stacks/route53/minimal_qa.yml')
783
- parameters = [
784
- {
785
- parameter_key: "DnsRecordIdentifier",
786
- parameter_value: @dns_record_identifier
787
- },
788
- {
789
- parameter_key: "PrimaElbHostname",
790
- parameter_value: get_alb_host(stack_name_alb)
791
- },
792
- {
793
- parameter_key: 'CrashElbHostname',
794
- parameter_value: get_alb_host(stack_name_alb_ws)
795
- }
796
- ]
797
-
798
- create_stack(stack_name_route53, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_route53)
799
- wait_for_stack_ready(stack_name_route53) unless stack_ready?(stack_name_route53)
800
-
801
- stack_name_web = "ecs-task-web-qa-#{deploy_id}"
802
- git_checkout_version('prima', @projects["prima"][:revision])
803
- stack_body = IO.read('projects/prima/app/cloudformation/tasks/web.yml')
804
- parameters = [
805
- {
806
- parameter_key: "Environment",
807
- parameter_value: "qa-minimal"
808
- },
809
- {
810
- parameter_key: "ReleaseVersion",
811
- parameter_value: "#{@projects["prima"][:revision]}"
812
- },
813
- {
814
- parameter_key: "TaskDesiredCount",
815
- parameter_value: "1"
816
- },
817
- {
818
- parameter_key: "ECSClusterName",
819
- parameter_value: @ecs_cluster_name
820
- },
821
- {
822
- parameter_key: "ALBShortName",
823
- parameter_value: "web-qa-#{deploy_id}"[0..27]
824
- },
825
- {
826
- parameter_key: "WebQaBaseHostname",
827
- parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
828
- },
829
- {
830
- parameter_key: "HostnamePattern",
831
- parameter_value: "ww*-#{@dns_record_identifier}.qa.colaster.com"
832
- },
833
- {
834
- parameter_key: "HostnamePatternPriority",
835
- parameter_value: hostname_pattern_priority
836
- },
837
- {
838
- parameter_key: "HostnamePatternAggregatorPriority",
839
- parameter_value: (hostname_pattern_priority.to_i + 1).to_s
840
- },
841
- {
842
- parameter_key: "EnvHash",
843
- parameter_value: deploy_id
844
- },
845
- {
846
- parameter_key: "AssangeHostname",
847
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
848
- },
849
- {
850
- parameter_key: "BackofficeHostname",
851
- parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
852
- },
853
- {
854
- parameter_key: "WebHostname",
855
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
856
- },
857
- {
858
- parameter_key: "FePrimaDomain",
859
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
860
- },
861
- {
862
- parameter_key: "HostnamePattern",
863
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
864
- }
865
- ]
866
- if stack_exists?(stack_name_web)
867
- cur_version = get_currently_deployed_version(stack_name_web)
868
- update_stack(stack_name_web, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"][:revision])
869
- else
870
- create_stack(stack_name_web, stack_body, parameters, tags, @cf_role)
871
- end
872
- wait_for_stack_ready(stack_name_web) unless stack_ready?(stack_name_web)
873
- update_service_defaults(stack_name_web)
874
- prima_hostname = get_route53_hostname(stack_name_web)
875
-
876
- projects_text = "
877
- > Prima url: https://#{prima_hostname}
878
- > SSH connection: ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no githubUsername@#{ec2_ip_address(asg_stack_name)}\n"
879
-
880
- output projects_text.cyan
881
- output "Deploy effettuato, everything is awesome!\n".green
882
- if @qainit
883
- qainit_write_output(projects_text, 'Indirizzi scritti su ')
884
- end
885
- end
886
-
887
- def deploy_feature!
888
- `git pull && git submodule init && git submodule update`
889
- @ami_id = get_ami_id("ecs-fleet-allinone-staging")
890
- deploy_id = get_deploy_id
891
- stack_name_alb = 'ecs-alb-http-public-qa-' + deploy_id[0..5]
892
- stack_name_alb_ws = 'ecs-alb-ws-public-qa-' + deploy_id[0..5]
893
- unless @qainit
894
- @projects.each_key do |project_key|
895
- if @projects[project_key][:revision]
896
- git_checkout_version(project_key, @projects[project_key][:revision])
897
- end
898
- end
899
- end
900
- @dns_record_identifier = deploy_id
901
- @git_branch = ENV['DRONE_BRANCH']
902
- hostname_pattern_priority = hostname_pattern_priority()
903
- tags = [
904
- {
905
- key: "qainit",
906
- value: @git_branch
907
- },
908
- {
909
- key: "hostname_pattern_priority",
910
- value: hostname_pattern_priority
911
- }
912
- ]
913
- @projects.each do |key, value|
914
- case key.to_s
915
- when 'crash'
916
- tags << { key: 'crash', value: @projects['crash'][:name] } if deploy_crash?
917
- when 'starsky', 'hutch', 'maia'
918
- tags << { key: key.to_s, value: @projects[key.to_s][:name] } if deploy_starsky_hutch_maia?
919
- else
920
- tags << { key: key, value: value[:name] }
921
- end
922
- end
923
-
924
- cluster_stack_name = "ecs-cluster-qa-#{deploy_id}"
925
-
926
- if stack_exists?(cluster_stack_name)
927
- tags = get_stack_tags(cluster_stack_name)
928
- hostname_pattern_priority = tags.detect do |tag|
929
- tag.key == 'hostname_pattern_priority'
930
- end.value
931
- end
932
-
933
- create_cluster_stack(cluster_stack_name, tags) unless stack_exists?(cluster_stack_name)
934
- wait_for_stack_ready(cluster_stack_name) unless stack_ready?(cluster_stack_name)
935
-
936
- create_alb_stack(stack_name_alb, "http", deploy_id) unless stack_exists?(stack_name_alb)
937
- create_alb_stack(stack_name_alb_ws, "websocket", deploy_id) unless stack_exists?(stack_name_alb_ws)
938
-
939
- resp = @cf.describe_stack_resource({stack_name: cluster_stack_name, logical_resource_id: 'ECSCluster'})
940
- @ecs_cluster_name = resp.stack_resource_detail.physical_resource_id
941
-
942
- asg_stack_name = "ecs-asg-allinone-qa-#{deploy_id}"
943
- create_asg_stack(asg_stack_name, tags) unless stack_exists?(asg_stack_name)
944
-
945
- stack_name_db = "ecs-task-db-qa-#{deploy_id}"
946
- stack_body = IO.read('cloudformation/stacks/task/db.yml')
947
- parameters = [
948
- {
949
- parameter_key: "Environment",
950
- parameter_value: "qa"
951
- },
952
- {
953
- parameter_key: "ECSClusterName",
954
- parameter_value: @ecs_cluster_name
955
- }
956
- ]
957
- create_stack(stack_name_db, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_db) # creazione asincrona stack dei db (~4 min)
958
-
959
- output "check pyxis \n".yellow
960
-
961
- create_pyxis_artifact(@projects["pyxis-npm"][:revision], deploy_id) unless @projects["pyxis-npm"].nil? # deve essere creato prima di quello di prima, per avere la versione
962
- create_prima_artifact(@projects["prima"][:revision], @projects["prima"][:name], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "prima/#{@projects["prima"][:revision]}.tar.gz")
963
- # l'artefatto di prima viene creato sempre (puntamenti all'ambiente compilati nel js) e richiede molto più di 4 minuti
964
- wait_for_stack_ready(stack_name_db) unless stack_ready?(stack_name_db) # dovrebbe essere istantaneo
965
- db_task = ''
966
- db_task = import_dbs(ec2_ip_address(asg_stack_name)) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # import asincrono dei dati
967
-
968
- create_crash_artifact(@projects['crash'][:revision], deploy_id) unless !deploy_crash? || artifact_exists?('prima-artifacts-encrypted', "microservices/crash/#{@projects['crash'][:revision]}-qa.tar.gz")
969
- create_urania_artifact(@projects["urania"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/urania/#{@projects["urania"][:revision]}-qa.tar.gz")
970
- create_roger_artifact(@projects["roger"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/roger/#{@projects["roger"][:revision]}-qa.tar.gz")
971
- create_ermes_artifact(@projects["ermes"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/ermes/#{@projects["ermes"][:revision]}-qa.tar.gz")
972
- create_bburago_artifact(@projects["bburago"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/bburago/#{@projects["bburago"][:revision]}-qa.tar.gz")
973
- create_hal9000_artifact(@projects["hal9000"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/hal9000/#{@projects["hal9000"][:revision]}-qa.tar.gz")
974
- create_rachele_artifact(@projects["rachele"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rachele/#{@projects["rachele"][:revision]}-qa.tar.gz")
975
- create_fidaty_artifact(@projects["fidaty"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/fidaty/#{@projects["fidaty"][:revision]}-qa.tar.gz")
976
- create_peano_artifact(@projects["peano"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/peano/#{@projects["peano"][:revision]}-qa.tar.gz")
977
- create_rogoreport_artifact(@projects["rogoreport"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rogoreport/rogoreport-#{@projects["rogoreport"][:revision]}-qa.tar.gz")
978
- create_assange_artifact(@projects["assange"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/assange/#{@projects["assange"][:revision]}-qa.tar.gz")
979
- create_borat_artifact(@projects["borat"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/borat/#{@projects["borat"][:revision]}-qa.tar.gz")
980
- create_activia_artifact(@projects["activia"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/activia/#{@projects["activia"][:revision]}-qa.tar.gz")
981
- create_leftorium_artifact(@projects["leftorium"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/leftorium/#{@projects["leftorium"][:revision]}-qa.tar.gz")
982
- create_skynet_artifact(@projects["skynet"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/skynet/#{@projects["skynet"][:revision]}-qa.tar.gz")
983
- create_starsky_artifact(@projects["starsky"][:revision]) unless !deploy_starsky_hutch_maia? || artifact_exists?('prima-artifacts-encrypted', "microservices/starsky/#{@projects["starsky"][:revision]}-qa.tar.gz")
984
- create_hutch_artifact(@projects["hutch"][:revision]) unless !deploy_starsky_hutch_maia? || artifact_exists?('prima-artifacts-encrypted', "microservices/hutch/#{@projects["hutch"][:revision]}-qa.tar.gz")
985
- create_maia_artifact(@projects["maia"][:revision]) unless !deploy_starsky_hutch_maia? || artifact_exists?('prima-artifacts-encrypted', "microservices/maia/#{@projects["maia"][:revision]}-qa.tar.gz")
986
-
987
- wait_for_db_import(db_task) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # dovrebbe essere istantaneo
988
-
989
- import_redis_crash(ec2_ip_address(asg_stack_name)) if deploy_crash? && !stack_exists?("ecs-task-web-qa-#{deploy_id}")
990
-
991
- wait_for_stack_ready(stack_name_alb) unless stack_ready?(stack_name_alb)
992
- wait_for_stack_ready(stack_name_alb_ws) unless stack_ready?(stack_name_alb_ws)
993
-
994
- stack_name_route53 = "ecs-route53-qa-#{deploy_id}"
995
- stack_body = IO.read('cloudformation/stacks/route53/qa.yml')
996
- parameters = [
997
- {
998
- parameter_key: "DnsRecordIdentifier",
999
- parameter_value: @dns_record_identifier
1000
- },
1001
- {
1002
- parameter_key: "PrimaElbHostname",
1003
- parameter_value: get_alb_host(stack_name_alb)
1004
- },
1005
- {
1006
- parameter_key: "UraniaIp",
1007
- parameter_value: ec2_ip_address(asg_stack_name)
1008
- },
1009
- {
1010
- parameter_key: "BburagoIp",
1011
- parameter_value: ec2_ip_address(asg_stack_name)
1012
- },
1013
- {
1014
- parameter_key: "Hal9000Ip",
1015
- parameter_value: ec2_ip_address(asg_stack_name)
1016
- },
1017
- {
1018
- parameter_key: "FidatyIp",
1019
- parameter_value: ec2_ip_address(asg_stack_name)
1020
- },
1021
- {
1022
- parameter_key: "PeanoIp",
1023
- parameter_value: ec2_ip_address(asg_stack_name)
1024
- },
1025
- {
1026
- parameter_key: "ErmesIp",
1027
- parameter_value: ec2_ip_address(asg_stack_name)
1028
- },
1029
- {
1030
- parameter_key: "ActiviaIp",
1031
- parameter_value: ec2_ip_address(asg_stack_name)
1032
- },
1033
- {
1034
- parameter_key: "SkynetIp",
1035
- parameter_value: ec2_ip_address(asg_stack_name)
1036
- },
1037
- {
1038
- parameter_key: "RogerIp",
1039
- parameter_value: ec2_ip_address(asg_stack_name)
1040
- },
1041
- {
1042
- parameter_key: "LeftoriumIp",
1043
- parameter_value: ec2_ip_address(asg_stack_name)
1044
- },
1045
- {
1046
- parameter_key: "RacheleIp",
1047
- parameter_value: ec2_ip_address(asg_stack_name)
1048
- },
1049
- {
1050
- parameter_key: "RedisIp",
1051
- parameter_value: ec2_ip_address(asg_stack_name)
1052
- },
1053
- {
1054
- parameter_key: "AssangeElbHostname",
1055
- parameter_value: get_alb_host(stack_name_alb)
1056
- },
1057
- {
1058
- parameter_key: "BoratElbHostname",
1059
- parameter_value: get_alb_host(stack_name_alb_ws)
1060
- },
1061
- {
1062
- parameter_key: 'CrashElbHostname',
1063
- parameter_value: get_alb_host(stack_name_alb_ws)
1064
- },
1065
- {
1066
- parameter_key: 'StarskyElbHostname',
1067
- parameter_value: get_alb_host(stack_name_alb)
1068
- },
1069
- {
1070
- parameter_key: 'HutchElbHostname',
1071
- parameter_value: get_alb_host(stack_name_alb)
1072
- },
1073
- {
1074
- parameter_key: 'MaiaElbHostname',
1075
- parameter_value: get_alb_host(stack_name_alb)
1076
- }
1077
- ]
1078
-
1079
- create_stack(stack_name_route53, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_route53)
1080
- wait_for_stack_ready(stack_name_route53) unless stack_ready?(stack_name_route53)
1081
-
1082
- stack_name_skynet = "ecs-task-skynet-qa-#{deploy_id}"
1083
- git_checkout_version('skynet', @projects["skynet"][:revision])
1084
- stack_body = File.read('projects/skynet/deploy/task.yml')
1085
- parameters = [
1086
- {
1087
- parameter_key: "Environment",
1088
- parameter_value: "qa"
1089
- },
1090
- {
1091
- parameter_key: "ReleaseVersion",
1092
- parameter_value: @projects["skynet"][:revision]
1093
- },
1094
- {
1095
- parameter_key: "TaskDesiredCount",
1096
- parameter_value: "1"
1097
- },
1098
- {
1099
- parameter_key: "ECSClusterName",
1100
- parameter_value: @ecs_cluster_name
1101
- },
1102
- {
1103
- parameter_key: "HostnamePattern",
1104
- parameter_value: "skynet-#{@dns_record_identifier}.qa.colaster.com"
1105
- },
1106
- {
1107
- parameter_key: "HostnamePatternPriority",
1108
- parameter_value: hostname_pattern_priority
1109
- }
1110
- ]
1111
- if stack_exists?(stack_name_skynet)
1112
- cur_version = get_currently_deployed_version(stack_name_skynet)
1113
- update_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["skynet"][:revision])
1114
- else
1115
- create_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role)
1116
- end
1117
-
1118
- stack_name_urania = "ecs-task-urania-qa-#{deploy_id}"
1119
- git_checkout_version('urania', @projects["urania"][:revision])
1120
- stack_body = File.read('projects/urania/deploy/task.yml')
1121
- parameters = [
1122
- {
1123
- parameter_key: "Environment",
1124
- parameter_value: "qa"
1125
- },
1126
- {
1127
- parameter_key: "ReleaseVersion",
1128
- parameter_value: @projects["urania"][:revision]
1129
- },
1130
- {
1131
- parameter_key: "TaskDesiredCount",
1132
- parameter_value: "1"
1133
- },
1134
- {
1135
- parameter_key: "ECSClusterName",
1136
- parameter_value: @ecs_cluster_name
1137
- },
1138
- {
1139
- parameter_key: "HostnamePattern",
1140
- parameter_value: "urania-#{@dns_record_identifier}.qa.colaster.com"
1141
- },
1142
- {
1143
- parameter_key: "HostnamePatternPriority",
1144
- parameter_value: hostname_pattern_priority
1145
- }
1146
- ]
1147
- if stack_exists?(stack_name_urania)
1148
- cur_version = get_currently_deployed_version(stack_name_urania)
1149
- update_stack(stack_name_urania, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["urania"][:revision])
1150
- else
1151
- create_stack(stack_name_urania, stack_body, parameters, tags, @cf_role)
1152
- end
1153
-
1154
- stack_name_ermes = "ecs-task-ermes-qa-#{deploy_id}"
1155
- git_checkout_version('ermes', @projects["ermes"][:revision])
1156
- stack_body = File.read('projects/ermes/deploy/task.yml')
1157
- parameters = [
1158
- {
1159
- parameter_key: "Environment",
1160
- parameter_value: "qa"
1161
- },
1162
- {
1163
- parameter_key: "ReleaseVersion",
1164
- parameter_value: "#{@projects['ermes'][:revision]}"
1165
- },
1166
- {
1167
- parameter_key: "TaskDesiredCount",
1168
- parameter_value: "1"
1169
- },
1170
- {
1171
- parameter_key: "ECSClusterName",
1172
- parameter_value: @ecs_cluster_name
1173
- },
1174
- {
1175
- parameter_key: "HostnamePattern",
1176
- parameter_value: "ermes-#{@dns_record_identifier}.qa.colaster.com"
1177
- },
1178
- {
1179
- parameter_key: "HostnamePatternPriority",
1180
- parameter_value: hostname_pattern_priority
1181
- },
1182
- {
1183
- parameter_key: "WebHost",
1184
- parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
1185
- },
1186
- {
1187
- parameter_key: "PeanoHost",
1188
- parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
1189
- }
1190
- ]
1191
- if stack_exists?(stack_name_ermes)
1192
- cur_version = get_currently_deployed_version(stack_name_ermes)
1193
- update_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["ermes"][:revision])
1194
- else
1195
- create_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role)
1196
- end
1197
-
1198
- stack_name_bburago = "ecs-task-bburago-qa-#{deploy_id}"
1199
- git_checkout_version('bburago', @projects["bburago"][:revision])
1200
- stack_body = File.read('projects/bburago/deploy/task.yml')
1201
- parameters = [
1202
- {
1203
- parameter_key: "Environment",
1204
- parameter_value: "qa"
1205
- },
1206
- {
1207
- parameter_key: "ReleaseVersion",
1208
- parameter_value: @projects["bburago"][:revision]
1209
- },
1210
- {
1211
- parameter_key: "ECSClusterName",
1212
- parameter_value: @ecs_cluster_name
1213
- },
1214
- {
1215
- parameter_key: "TaskDesiredCount",
1216
- parameter_value: "1"
1217
- },
1218
- {
1219
- parameter_key: "HostnamePattern",
1220
- parameter_value: "bburago-#{@dns_record_identifier}.qa.colaster.com"
1221
- },
1222
- {
1223
- parameter_key: "HostnamePatternPriority",
1224
- parameter_value: hostname_pattern_priority
1225
- }
1226
- ]
1227
- if stack_exists?(stack_name_bburago)
1228
- cur_version = get_currently_deployed_version(stack_name_bburago)
1229
- update_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["bburago"][:revision])
1230
- else
1231
- create_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role)
1232
- end
1233
-
1234
- stack_name_hal9000 = "ecs-task-hal9000-qa-#{deploy_id}"
1235
- git_checkout_version('hal9000', @projects["hal9000"][:revision])
1236
- stack_body = File.read('projects/hal9000/deploy/task.yml')
1237
- parameters = [
1238
- {
1239
- parameter_key: "Environment",
1240
- parameter_value: "qa"
1241
- },
1242
- {
1243
- parameter_key: "ReleaseVersion",
1244
- parameter_value: @projects["hal9000"][:revision]
1245
- },
1246
- {
1247
- parameter_key: "ECSClusterName",
1248
- parameter_value: @ecs_cluster_name
1249
- },
1250
- {
1251
- parameter_key: "TaskDesiredCount",
1252
- parameter_value: "1"
1253
- },
1254
- {
1255
- parameter_key: "HostnamePattern",
1256
- parameter_value: "hal9000-#{@dns_record_identifier}.qa.colaster.com"
1257
- },
1258
- {
1259
- parameter_key: "HostnamePatternPriority",
1260
- parameter_value: hostname_pattern_priority
1261
- }
1262
- ]
1263
- if stack_exists?(stack_name_hal9000)
1264
- cur_version = get_currently_deployed_version(stack_name_hal9000)
1265
- update_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hal9000"][:revision])
1266
- else
1267
- create_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role)
1268
- end
1269
-
1270
- stack_name_fidaty = "ecs-task-fidaty-qa-#{deploy_id}"
1271
- git_checkout_version('fidaty', @projects["fidaty"][:revision])
1272
- stack_body = File.read('projects/fidaty/deploy/task.yml')
1273
- parameters = [
1274
- {
1275
- parameter_key: "Environment",
1276
- parameter_value: "qa"
1277
- },
1278
- {
1279
- parameter_key: "ReleaseVersion",
1280
- parameter_value: "#{@projects["fidaty"][:revision]}"
1281
- },
1282
- {
1283
- parameter_key: "ECSClusterName",
1284
- parameter_value: @ecs_cluster_name
1285
- },
1286
- {
1287
- parameter_key: "TaskDesiredCount",
1288
- parameter_value: "1"
1289
- },
1290
- {
1291
- parameter_key: "HostnamePattern",
1292
- parameter_value: "fidaty-#{@dns_record_identifier}.qa.colaster.com"
1293
- },
1294
- {
1295
- parameter_key: "HostnamePatternPriority",
1296
- parameter_value: hostname_pattern_priority
1297
- },
1298
- {
1299
- parameter_key: "PeanoHost",
1300
- parameter_value: get_route53_hostname("ecs-task-peano-qa-notneeded")
1301
- }
1302
- ]
1303
- if stack_exists?(stack_name_fidaty)
1304
- cur_version = get_currently_deployed_version(stack_name_fidaty)
1305
- update_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["fidaty"][:revision])
1306
- else
1307
- create_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role)
1308
- end
1309
-
1310
- stack_name_peano = "ecs-task-peano-qa-#{deploy_id}"
1311
- git_checkout_version('peano', @projects["peano"][:revision])
1312
- stack_body = File.read('projects/peano/deploy/task.yml')
1313
- parameters = [
1314
- {
1315
- parameter_key: "Environment",
1316
- parameter_value: "qa"
1317
- },
1318
- {
1319
- parameter_key: "ReleaseVersion",
1320
- parameter_value: "#{@projects['peano'][:revision]}"
1321
- },
1322
- {
1323
- parameter_key: "ECSClusterName",
1324
- parameter_value: @ecs_cluster_name
1325
- },
1326
- {
1327
- parameter_key: "TaskDesiredCount",
1328
- parameter_value: "1"
1329
- },
1330
- {
1331
- parameter_key: "HostnamePattern",
1332
- parameter_value: "peano-#{@dns_record_identifier}.qa.colaster.com"
1333
- },
1334
- {
1335
- parameter_key: "HostnamePatternPriority",
1336
- parameter_value: hostname_pattern_priority
1337
- },
1338
- {
1339
- parameter_key: "WebHost",
1340
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1341
- },
1342
- {
1343
- parameter_key: "AssangeHost",
1344
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1345
- }
1346
- ]
1347
- if stack_exists?(stack_name_peano)
1348
- cur_version = get_currently_deployed_version(stack_name_peano)
1349
- update_stack(stack_name_peano, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["peano"][:revision])
1350
- else
1351
- create_stack(stack_name_peano, stack_body, parameters, tags, @cf_role)
1352
- end
1353
-
1354
- stack_name_rogoreport = "ecs-task-rogoreport-qa-#{deploy_id}"
1355
- git_checkout_version('rogoreport', @projects["rogoreport"][:revision])
1356
- stack_body = IO.read('projects/rogoreport/deploy/task.yml')
1357
- parameters = [
1358
- {
1359
- parameter_key: "Environment",
1360
- parameter_value: "qa"
1361
- },
1362
- {
1363
- parameter_key: "ReleaseVersion",
1364
- parameter_value: "#{@projects["rogoreport"][:revision]}"
1365
- },
1366
- {
1367
- parameter_key: "ReleaseName",
1368
- parameter_value: "rogoreport"
1369
- },
1370
- {
1371
- parameter_key: "ECSClusterName",
1372
- parameter_value: @ecs_cluster_name
1373
- }
1374
- ]
1375
- if stack_exists?(stack_name_rogoreport)
1376
- cur_version = get_currently_deployed_version(stack_name_rogoreport)
1377
- update_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["rogoreport"][:revision])
1378
- else
1379
- create_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role)
1380
- end
1381
-
1382
- stack_name_assange = "ecs-task-assange-qa-#{deploy_id}"
1383
- git_checkout_version('assange', @projects["assange"][:revision])
1384
- stack_body = IO.read('projects/assange/deploy/task.yml')
1385
- parameters = [
1386
- {
1387
- parameter_key: "Environment",
1388
- parameter_value: "qa"
1389
- },
1390
- {
1391
- parameter_key: "ReleaseVersion",
1392
- parameter_value: "#{@projects["assange"][:revision]}"
1393
- },
1394
- {
1395
- parameter_key: "ECSClusterName",
1396
- parameter_value: @ecs_cluster_name
1397
- },
1398
- {
1399
- parameter_key: "TaskDesiredCount",
1400
- parameter_value: "1"
1401
- },
1402
- {
1403
- parameter_key: "ALBShortName",
1404
- parameter_value: "assange-qa-#{deploy_id}"[0..27]
1405
- },
1406
- {
1407
- parameter_key: "HostnamePattern",
1408
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1409
- },
1410
- {
1411
- parameter_key: "HostnamePatternPriority",
1412
- parameter_value: (hostname_pattern_priority.to_i + 20).to_s
1413
- },
1414
- {
1415
- parameter_key: "EnvHash",
1416
- parameter_value: deploy_id
1417
- },
1418
- {
1419
- parameter_key: "WebHost",
1420
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1421
- },
1422
- {
1423
- parameter_key: "AssangeHost",
1424
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1425
- }
1426
- ]
1427
- if stack_exists?(stack_name_assange)
1428
- cur_version = get_currently_deployed_version(stack_name_assange)
1429
- update_stack(stack_name_assange, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["assange"][:revision])
1430
- else
1431
- create_stack(stack_name_assange, stack_body, parameters, tags, @cf_role)
1432
- end
1433
-
1434
- stack_name_leftorium = "ecs-task-leftorium-qa-#{deploy_id}"
1435
- git_checkout_version('leftorium', @projects["leftorium"][:revision])
1436
- stack_body = File.read('projects/leftorium/deploy/task.yml')
1437
- parameters = [
1438
- {
1439
- parameter_key: "Environment",
1440
- parameter_value: "qa"
1441
- },
1442
- {
1443
- parameter_key: "ReleaseVersion",
1444
- parameter_value: "#{@projects["leftorium"][:revision]}"
1445
- },
1446
- {
1447
- parameter_key: "ECSClusterName",
1448
- parameter_value: @ecs_cluster_name
1449
- },
1450
- {
1451
- parameter_key: "TaskDesiredCount",
1452
- parameter_value: "1"
1453
- },
1454
- {
1455
- parameter_key: "HostnamePattern",
1456
- parameter_value: "leftorium-#{@dns_record_identifier}.qa.colaster.com"
1457
- },
1458
- {
1459
- parameter_key: "HostnamePatternPriority",
1460
- parameter_value: hostname_pattern_priority
1461
- }
1462
- ]
1463
- if stack_exists?(stack_name_leftorium)
1464
- cur_version = get_currently_deployed_version(stack_name_leftorium)
1465
- update_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["leftorium"][:revision])
1466
- else
1467
- create_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role)
1468
- end
1469
-
1470
- stack_name_rachele = "ecs-task-rachele-qa-#{deploy_id}"
1471
- git_checkout_version('rachele', @projects["rachele"][:revision])
1472
- stack_body = File.read('projects/rachele/deploy/task.yml')
1473
- parameters = [
1474
- {
1475
- parameter_key: "Environment",
1476
- parameter_value: "qa"
1477
- },
1478
- {
1479
- parameter_key: "ReleaseVersion",
1480
- parameter_value: "#{@projects["rachele"][:revision]}"
1481
- },
1482
- {
1483
- parameter_key: "ECSClusterName",
1484
- parameter_value: @ecs_cluster_name
1485
- },
1486
- {
1487
- parameter_key: "TaskDesiredCount",
1488
- parameter_value: "1"
1489
- },
1490
- {
1491
- parameter_key: "WebHost",
1492
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1493
- },
1494
- {
1495
- parameter_key: "HostnamePattern",
1496
- parameter_value: "rachele-#{@dns_record_identifier}.qa.colaster.com"
1497
- },
1498
- {
1499
- parameter_key: "HostnamePatternPriority",
1500
- parameter_value: hostname_pattern_priority
1501
- }
1502
- ]
1503
- if stack_exists?(stack_name_rachele)
1504
- cur_version = get_currently_deployed_version(stack_name_rachele)
1505
- update_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["rachele"][:revision])
1506
- else
1507
- create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
1508
- end
1509
-
1510
- stack_name_borat = "ecs-task-borat-qa-#{deploy_id}"
1511
- git_checkout_version('borat', @projects["borat"][:revision])
1512
- stack_body = IO.read('projects/borat/deploy/task.yml')
1513
- parameters = [
1514
- {
1515
- parameter_key: "Environment",
1516
- parameter_value: "qa"
1517
- },
1518
- {
1519
- parameter_key: "ReleaseVersion",
1520
- parameter_value: "#{@projects["borat"][:revision]}"
1521
- },
1522
- {
1523
- parameter_key: "ECSClusterName",
1524
- parameter_value: @ecs_cluster_name
1525
- },
1526
- {
1527
- parameter_key: "TaskDesiredCount",
1528
- parameter_value: "1"
1529
- },
1530
- {
1531
- parameter_key: "ALBShortName",
1532
- parameter_value: "borat-qa-#{deploy_id}"[0..27]
1533
- },
1534
- {
1535
- parameter_key: "HostnamePattern",
1536
- parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1537
- },
1538
- {
1539
- parameter_key: "HostnamePatternPriority",
1540
- parameter_value: (hostname_pattern_priority.to_i + 30).to_s
1541
- },
1542
- {
1543
- parameter_key: "EnvHash",
1544
- parameter_value: deploy_id
1545
- },
1546
- {
1547
- parameter_key: "WsEndpoint",
1548
- parameter_value: "wss://backoffice-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
1549
- },
1550
- {
1551
- parameter_key: "GraphqlEndpoint",
1552
- parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/graphql"
1553
- },
1554
- {
1555
- parameter_key: "AuthEndpoint",
1556
- parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/auth"
1557
- },
1558
- {
1559
- parameter_key: "FrontendEndpoint",
1560
- parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
1561
- }
1562
- ]
1563
- if stack_exists?(stack_name_borat)
1564
- cur_version = get_currently_deployed_version(stack_name_borat)
1565
- update_stack(stack_name_borat, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["borat"][:revision])
1566
- else
1567
- create_stack(stack_name_borat, stack_body, parameters, tags, @cf_role)
1568
- end
1569
-
1570
- if deploy_crash?
1571
- git_checkout_version('crash', @projects['crash'][:revision])
1572
- stack_name_crash = "ecs-task-crash-qa-#{deploy_id}"
1573
- stack_body = IO.read('projects/crash/deploy/task.yml')
1574
- parameters = [
1575
- {
1576
- parameter_key: 'Environment',
1577
- parameter_value: 'qa'
1578
- },
1579
- {
1580
- parameter_key: 'ReleaseVersion',
1581
- parameter_value: "#{@projects['crash'][:revision]}"
1582
- },
1583
- {
1584
- parameter_key: 'TaskDesiredCount',
1585
- parameter_value: '1'
1586
- },
1587
- {
1588
- parameter_key: 'ECSClusterName',
1589
- parameter_value: @ecs_cluster_name
1590
- },
1591
- {
1592
- parameter_key: 'ALBShortName',
1593
- parameter_value: "crash-qa-#{deploy_id}"[0..27]
1594
- },
1595
- {
1596
- parameter_key: 'HostnamePattern',
1597
- parameter_value: "crash-#{@dns_record_identifier}.qa.colaster.com"
1598
- },
1599
- {
1600
- parameter_key: 'HostnamePatternPriority',
1601
- parameter_value: (hostname_pattern_priority.to_i + 10).to_s
1602
- },
1603
- {
1604
- parameter_key: "EnvHash",
1605
- parameter_value: deploy_id
1606
- },
1607
- {
1608
- parameter_key: "WsEndpoint",
1609
- parameter_value: "wss://crash-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
1610
- },
1611
- {
1612
- parameter_key: "GraphqlEndpoint",
1613
- parameter_value: "https://crash-#{@dns_record_identifier}.qa.colaster.com/graphql"
1614
- },
1615
- {
1616
- parameter_key: "AuthDomain",
1617
- parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
1618
- },
1619
- ]
1620
- if stack_exists?(stack_name_crash)
1621
- cur_version = get_currently_deployed_version(stack_name_crash)
1622
- update_stack(stack_name_crash, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["crash"][:revision])
1623
- else
1624
- create_stack(stack_name_crash, stack_body, parameters, tags, @cf_role)
1625
- end
1626
- end
1627
-
1628
- if deploy_starsky_hutch_maia?
1629
- stack_name_starsky = "ecs-task-starsky-qa-#{deploy_id}"
1630
- git_checkout_version('starsky', @projects["starsky"][:revision])
1631
- stack_body = IO.read('projects/starsky/deploy/task.yml')
1632
- parameters = [
1633
- {
1634
- parameter_key: "Environment",
1635
- parameter_value: "qa"
1636
- },
1637
- {
1638
- parameter_key: "ReleaseVersion",
1639
- parameter_value: "#{@projects["starsky"][:revision]}"
1640
- },
1641
- {
1642
- parameter_key: "TaskDesiredCount",
1643
- parameter_value: "1"
1644
- },
1645
- {
1646
- parameter_key: "ECSClusterName",
1647
- parameter_value: @ecs_cluster_name
1648
- },
1649
- {
1650
- parameter_key: "ALBShortName",
1651
- parameter_value: "starsky-qa-#{deploy_id}"[0..27]
1652
- },
1653
- {
1654
- parameter_key: "EnvHash",
1655
- parameter_value: deploy_id
1656
- },
1657
- {
1658
- parameter_key: "HostnamePattern",
1659
- parameter_value: "starsky-#{@dns_record_identifier}.qa.colaster.com"
1660
- },
1661
- {
1662
- parameter_key: "HostnamePatternPriority",
1663
- parameter_value: (hostname_pattern_priority.to_i + 74).to_s
1664
- }
1665
- ]
1666
- if stack_exists?(stack_name_starsky)
1667
- cur_version = get_currently_deployed_version(stack_name_starsky)
1668
- update_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["starsky"][:revision])
1669
- else
1670
- create_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role)
1671
- end
1672
- end
1673
-
1674
- stack_name_activia = "ecs-task-activia-qa-#{deploy_id}"
1675
- git_checkout_version('activia', @projects["activia"][:revision])
1676
- stack_body = File.read('projects/activia/deploy/task.yml')
1677
- parameters = [
1678
- {
1679
- parameter_key: "Environment",
1680
- parameter_value: "qa"
1681
- },
1682
- {
1683
- parameter_key: "ReleaseVersion",
1684
- parameter_value: "#{@projects["activia"][:revision]}"
1685
- },
1686
- {
1687
- parameter_key: "ECSClusterName",
1688
- parameter_value: @ecs_cluster_name
1689
- },
1690
- {
1691
- parameter_key: "TaskDesiredCount",
1692
- parameter_value: "1"
1693
- },
1694
- {
1695
- parameter_key: "HostnamePattern",
1696
- parameter_value: "activia-#{@dns_record_identifier}.qa.colaster.com"
1697
- },
1698
- {
1699
- parameter_key: "HostnamePatternPriority",
1700
- parameter_value: hostname_pattern_priority
1701
- },
1702
- {
1703
- parameter_key: "WebHost",
1704
- parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
1705
- },
1706
- {
1707
- parameter_key: "PeanoHost",
1708
- parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
1709
- }
1710
- ]
1711
- if stack_exists?(stack_name_activia)
1712
- cur_version = get_currently_deployed_version(stack_name_activia)
1713
- update_stack(stack_name_activia, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["activia"][:revision])
1714
- else
1715
- create_stack(stack_name_activia, stack_body, parameters, tags, @cf_role)
1716
- end
1717
-
1718
- # Waiting for prima healtcheck dependencies
1719
- wait_for_stack_ready(stack_name_skynet) unless stack_ready?(stack_name_skynet)
1720
- wait_for_stack_ready(stack_name_urania) unless stack_ready?(stack_name_urania)
1721
- wait_for_stack_ready(stack_name_bburago) unless stack_ready?(stack_name_bburago)
1722
- wait_for_stack_ready(stack_name_hal9000) unless stack_ready?(stack_name_hal9000)
1723
- wait_for_stack_ready(stack_name_assange) unless stack_ready?(stack_name_assange)
1724
- wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1725
- wait_for_stack_ready(stack_name_leftorium) unless stack_ready?(stack_name_leftorium)
1726
- wait_for_stack_ready(stack_name_rachele) unless stack_ready?(stack_name_rachele)
1727
-
1728
- stack_name_web = "ecs-task-web-qa-#{deploy_id}"
1729
- git_checkout_version('prima', @projects["prima"][:revision])
1730
- stack_body = IO.read('projects/prima/app/cloudformation/tasks/web.yml')
1731
- parameters = [
1732
- {
1733
- parameter_key: "Environment",
1734
- parameter_value: "qa"
1735
- },
1736
- {
1737
- parameter_key: "ReleaseVersion",
1738
- parameter_value: "#{@projects["prima"][:revision]}"
1739
- },
1740
- {
1741
- parameter_key: "TaskDesiredCount",
1742
- parameter_value: "1"
1743
- },
1744
- {
1745
- parameter_key: "ECSClusterName",
1746
- parameter_value: @ecs_cluster_name
1747
- },
1748
- {
1749
- parameter_key: "ALBShortName",
1750
- parameter_value: "web-qa-#{deploy_id}"[0..27]
1751
- },
1752
- {
1753
- parameter_key: "WebQaBaseHostname",
1754
- parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
1755
- },
1756
- {
1757
- parameter_key: "HostnamePattern",
1758
- parameter_value: "ww*-#{@dns_record_identifier}.qa.colaster.com"
1759
- },
1760
- {
1761
- parameter_key: "HostnamePatternPriority",
1762
- parameter_value: hostname_pattern_priority
1763
- },
1764
- {
1765
- parameter_key: "HostnamePatternAggregatorPriority",
1766
- parameter_value: (hostname_pattern_priority.to_i + 1).to_s
1767
- },
1768
- {
1769
- parameter_key: "EnvHash",
1770
- parameter_value: deploy_id
1771
- },
1772
- {
1773
- parameter_key: "AssangeHostname",
1774
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1775
- },
1776
- {
1777
- parameter_key: "BackofficeHostname",
1778
- parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1779
- },
1780
- {
1781
- parameter_key: "WebHostname",
1782
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1783
- },
1784
- {
1785
- parameter_key: "FePrimaDomain",
1786
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1787
- },
1788
- {
1789
- parameter_key: "HostnamePattern",
1790
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1791
- }
1792
- ]
1793
- if stack_exists?(stack_name_web)
1794
- cur_version = get_currently_deployed_version(stack_name_web)
1795
- update_stack(stack_name_web, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"][:revision])
1796
- else
1797
- create_stack(stack_name_web, stack_body, parameters, tags, @cf_role)
1798
- end
1799
-
1800
- stack_name_consumer = "ecs-task-consumer-qa-#{deploy_id}"
1801
- git_checkout_version('prima', @projects["prima"][:revision])
1802
- stack_body = IO.read('projects/prima/app/cloudformation/tasks/consumer.yml')
1803
- parameters = [
1804
- {
1805
- parameter_key: "Environment",
1806
- parameter_value: "qa"
1807
- },
1808
- {
1809
- parameter_key: "ReleaseVersion",
1810
- parameter_value: "#{@projects["prima"][:revision]}"
1811
- },
1812
- {
1813
- parameter_key: "ECSClusterName",
1814
- parameter_value: @ecs_cluster_name
1815
- },
1816
- {
1817
- parameter_key: "NginxHttpHost",
1818
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1819
- },
1820
- {
1821
- parameter_key: "AssangeHostname",
1822
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1823
- },
1824
- {
1825
- parameter_key: "BackofficeHostname",
1826
- parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1827
- },
1828
- {
1829
- parameter_key: "WebHostname",
1830
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1831
- },
1832
- {
1833
- parameter_key: "FePrimaDomain",
1834
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1835
- },
1836
- {
1837
- parameter_key: "HostnamePattern",
1838
- parameter_value: "consumer-#{@dns_record_identifier}.qa.colaster.com"
1839
- }
1840
- ]
1841
- if stack_exists?(stack_name_consumer)
1842
- cur_version = get_currently_deployed_version(stack_name_consumer)
1843
- update_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"][:revision])
1844
- else
1845
- create_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role)
1846
- end
1847
-
1848
- stack_name_roger = "ecs-task-roger-qa-#{deploy_id}"
1849
- git_checkout_version('roger', @projects["roger"][:revision])
1850
- stack_body = File.read('projects/roger/deploy/task.yml')
1851
- parameters = [
1852
- {
1853
- parameter_key: "Environment",
1854
- parameter_value: "qa"
1855
- },
1856
- {
1857
- parameter_key: "ReleaseVersion",
1858
- parameter_value: @projects["roger"][:revision]
1859
- },
1860
- {
1861
- parameter_key: "TaskDesiredCount",
1862
- parameter_value: "1"
1863
- },
1864
- {
1865
- parameter_key: "ECSClusterName",
1866
- parameter_value: @ecs_cluster_name
1867
- },
1868
- {
1869
- parameter_key: "HostnamePattern",
1870
- parameter_value: "roger-#{@dns_record_identifier}.qa.colaster.com"
1871
- },
1872
- {
1873
- parameter_key: "HostnamePatternPriority",
1874
- parameter_value: hostname_pattern_priority
1875
- }
1876
- ]
1877
- if stack_exists?(stack_name_roger)
1878
- cur_version = get_currently_deployed_version(stack_name_roger)
1879
- update_stack(stack_name_roger, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["roger"][:revision])
1880
- else
1881
- create_stack(stack_name_roger, stack_body, parameters, tags, @cf_role)
1882
- end
1883
-
1884
-
1885
- if deploy_starsky_hutch_maia?
1886
- wait_for_stack_ready(stack_name_starsky) unless stack_ready?(stack_name_starsky) unless !deploy_starsky_hutch_maia?
1887
-
1888
- stack_name_hutch = "ecs-task-hutch-qa-#{deploy_id}"
1889
- git_checkout_version('hutch', @projects["hutch"][:revision])
1890
- stack_body = File.read('projects/hutch/deploy/task.yml')
1891
- parameters = [
1892
- {
1893
- parameter_key: "Environment",
1894
- parameter_value: "qa"
1895
- },
1896
- {
1897
- parameter_key: "ReleaseVersion",
1898
- parameter_value: "#{@projects["hutch"][:revision]}"
1899
- },
1900
- {
1901
- parameter_key: "ALBShortName",
1902
- parameter_value: "hutch-qa-#{deploy_id}"[0..27]
1903
- },
1904
- {
1905
- parameter_key: "ECSClusterName",
1906
- parameter_value: @ecs_cluster_name
1907
- },
1908
- {
1909
- parameter_key: "EnvHash",
1910
- parameter_value: deploy_id
1911
- },
1912
- {
1913
- parameter_key: "HostnamePattern",
1914
- parameter_value: "hutch-#{@dns_record_identifier}.qa.colaster.com"
1915
- },
1916
- {
1917
- parameter_key: "HostnamePatternPriority",
1918
- parameter_value: (hostname_pattern_priority.to_i + 254).to_s
1919
- },
1920
- {
1921
- parameter_key: "StarskyUrl",
1922
- parameter_value: "https://#{get_route53_hostname('ecs-task-starsky-qa-notneeded')}"
1923
- }
1924
- ]
1925
- if stack_exists?(stack_name_hutch)
1926
- cur_version = get_currently_deployed_version(stack_name_hutch)
1927
- update_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hutch"][:revision])
1928
- else
1929
- create_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role)
1930
- end
1931
- end
1932
-
1933
- stack_name_maia = "ecs-task-maia-qa-#{deploy_id}"
1934
- git_checkout_version('maia', @projects["maia"][:revision])
1935
- stack_body = File.read('projects/maia/deploy/task.yml')
1936
- parameters = [
1937
- {
1938
- parameter_key: "Environment",
1939
- parameter_value: "qa"
1940
- },
1941
- {
1942
- parameter_key: "ReleaseVersion",
1943
- parameter_value: "#{@projects["maia"][:revision]}"
1944
- },
1945
- {
1946
- parameter_key: "ALBShortName",
1947
- parameter_value: "maia-qa-#{deploy_id}"[0..27]
1948
- },
1949
- {
1950
- parameter_key: "ECSClusterName",
1951
- parameter_value: @ecs_cluster_name
1952
- },
1953
- {
1954
- parameter_key: "EnvHash",
1955
- parameter_value: deploy_id
1956
- },
1957
- {
1958
- parameter_key: "HostnamePatternPublic",
1959
- parameter_value: "api*-#{@dns_record_identifier}.qa.colaster.com"
1960
- },
1961
- {
1962
- parameter_key: "HostnamePatternPriority",
1963
- parameter_value: (hostname_pattern_priority.to_i + 128).to_s
1964
- },
1965
- {
1966
- parameter_key: "ProxyHostnameIntermediari",
1967
- parameter_value: "api-intermediari-#{@dns_record_identifier}.qa.colaster.com"
1968
- },
1969
- {
1970
- parameter_key: "ProxyHostnameApp",
1971
- parameter_value: "api-#{@dns_record_identifier}.qa.colaster.com"
1972
- }
1973
- ]
1974
- if stack_exists?(stack_name_maia)
1975
- cur_version = get_currently_deployed_version(stack_name_maia)
1976
- update_stack(stack_name_maia, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["maia"][:revision])
1977
- else
1978
- create_stack(stack_name_maia, stack_body, parameters, tags, @cf_role)
1979
- end
1980
-
1981
- wait_for_stack_ready(stack_name_web) unless stack_ready?(stack_name_web)
1982
- wait_for_stack_ready(stack_name_consumer) unless stack_ready?(stack_name_consumer)
1983
- wait_for_stack_ready(stack_name_ermes) unless stack_ready?(stack_name_ermes)
1984
- wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1985
- wait_for_stack_ready(stack_name_peano) unless stack_ready?(stack_name_peano)
1986
- wait_for_stack_ready(stack_name_rogoreport) unless stack_ready?(stack_name_rogoreport)
1987
- wait_for_stack_ready(stack_name_borat) unless stack_ready?(stack_name_borat)
1988
- wait_for_stack_ready(stack_name_activia) unless stack_ready?(stack_name_activia)
1989
- wait_for_stack_ready(stack_name_crash) unless stack_ready?(stack_name_crash) unless !deploy_crash?
1990
- wait_for_stack_ready(stack_name_hutch) unless stack_ready?(stack_name_hutch) unless !deploy_starsky_hutch_maia?
1991
- wait_for_stack_ready(stack_name_maia) unless stack_ready?(stack_name_maia) unless !deploy_starsky_hutch_maia?
1992
-
1993
- update_service_defaults(stack_name_web)
1994
- update_service_defaults(stack_name_consumer)
1995
- update_service_defaults(stack_name_urania)
1996
- update_service_defaults(stack_name_ermes)
1997
- update_service_defaults(stack_name_bburago)
1998
- update_service_defaults(stack_name_hal9000)
1999
- update_service_defaults(stack_name_fidaty)
2000
- update_service_defaults(stack_name_peano)
2001
- update_service_defaults(stack_name_rogoreport)
2002
- update_service_defaults(stack_name_assange)
2003
- update_service_defaults(stack_name_borat)
2004
- update_service_defaults(stack_name_activia)
2005
- update_service_defaults(stack_name_skynet)
2006
- update_service_defaults(stack_name_leftorium)
2007
- update_service_defaults(stack_name_rachele)
2008
- update_service_defaults(stack_name_starsky) unless !deploy_starsky_hutch_maia?
2009
- update_service_defaults(stack_name_hutch) unless !deploy_starsky_hutch_maia?
2010
- update_service_defaults(stack_name_maia) unless !deploy_starsky_hutch_maia?
2011
- update_service_defaults(stack_name_crash) unless !deploy_crash?
2012
-
2013
- activia_hostname = get_route53_hostname("activia")
2014
- assange_hostname = get_route53_hostname("assange")
2015
- bburago_hostname = get_route53_hostname("bburago")
2016
- borat_hostname = get_route53_hostname("borat")
2017
- ermes_hostname = get_route53_hostname("ermes")
2018
- fidaty_hostname = get_route53_hostname("fidaty")
2019
- hal9000_hostname = get_route53_hostname("hal9000")
2020
- prima_hostname = get_route53_hostname("web")
2021
- peano_hostname = get_route53_hostname("peano")
2022
- skynet_hostname = get_route53_hostname("skynet")
2023
- urania_hostname = get_route53_hostname("urania")
2024
- roger_hostname = get_route53_hostname("roger")
2025
- leftorium_hostname = get_route53_hostname("leftorium")
2026
- rachele_hostname = get_route53_hostname("rachele")
2027
- crash_hostname = get_route53_hostname("crash") unless !deploy_crash?
2028
- starsky_hostname = get_route53_hostname("starsky") unless !deploy_starsky_hutch_maia?
2029
- hutch_hostname = get_route53_hostname("hutch") unless !deploy_starsky_hutch_maia?
2030
- maia_app_hostname = get_route53_hostname("maia-app") unless !deploy_starsky_hutch_maia?
2031
- maia_intermediari_hostname = get_route53_hostname("maia-intermediari") unless !deploy_starsky_hutch_maia?
2032
-
2033
- # launch_marley ec2_ip_address(asg_stack_name), prima_hostname, borat_hostname
2034
-
2035
- projects_text = "
2036
- > Prima url: https://#{prima_hostname}
2037
- > Backoffice (Borat) url: https://#{borat_hostname}
2038
- > Urania url: http://#{urania_hostname}:81
2039
- > Bburago url: http://#{bburago_hostname}:83
2040
- > Ermes url: http://#{ermes_hostname}:10002
2041
- > Hal9000 url: http://#{hal9000_hostname}:10031
2042
- > Fidaty url: http://#{fidaty_hostname}:10021
2043
- > Peano url: http://#{peano_hostname}:10039
2044
- > Assange url: https://#{assange_hostname}
2045
- > Activia url: http://#{activia_hostname}:10041
2046
- > Skynet url: http://#{skynet_hostname}:8050
2047
- > Roger url: http://#{roger_hostname}:10051
2048
- > Leftorium url: http://#{leftorium_hostname}:10061
2049
- > Rachele url: http://#{rachele_hostname}:10040"
2050
- projects_text.concat "
2051
- > Crash url: https://#{crash_hostname}" if deploy_crash?
2052
- projects_text.concat "
2053
- > Starsky url: https://#{starsky_hostname}
2054
- > Hutch url: https://#{hutch_hostname}
2055
- > Maia App url: https://#{maia_app_hostname}
2056
- > Maia Intermediari url: https://#{maia_intermediari_hostname}" if deploy_starsky_hutch_maia?
2057
- projects_text.concat "
2058
- > RabbitMQ url: http://#{ec2_ip_address(asg_stack_name)}:15672
2059
- > Supervisor url: http://#{ec2_ip_address(asg_stack_name)}:9001
2060
- > Elasticsearch url: http://#{ec2_ip_address(asg_stack_name)}:9200
2061
- > SSH connection: ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no githubUsername@#{ec2_ip_address(asg_stack_name)}\n"
2062
- output projects_text.cyan
2063
- output "Deploy effettuato, everything is awesome!\n".green
2064
-
2065
- qainit_write_output(projects_text, 'Indirizzi scritti su ')
2066
- end
2067
-
2068
- def get_route53_hostname(project)
2069
- case
2070
- when project.include?('web')
2071
- host = "www-#{@dns_record_identifier}.qa.colaster.com"
2072
- when project.include?('urania')
2073
- host = "urania-#{@dns_record_identifier}.qa.colaster.com"
2074
- when project.include?('bburago')
2075
- host = "bburago-#{@dns_record_identifier}.qa.colaster.com"
2076
- when project.include?('hal9000')
2077
- host = "hal9000-#{@dns_record_identifier}.qa.colaster.com"
2078
- when project.include?('fidaty')
2079
- host = "fidaty-#{@dns_record_identifier}.qa.colaster.com"
2080
- when project.include?('peano')
2081
- host = "peano-#{@dns_record_identifier}.qa.colaster.com"
2082
- when project.include?('assange')
2083
- host = "assange-#{@dns_record_identifier}.qa.colaster.com"
2084
- when project.include?('borat')
2085
- host = "backoffice-#{@dns_record_identifier}.qa.colaster.com"
2086
- when project.include?('crash')
2087
- host = "crash-#{@dns_record_identifier}.qa.colaster.com"
2088
- when project.include?('ermes')
2089
- host = "ermes-#{@dns_record_identifier}.qa.colaster.com"
2090
- when project.include?('activia')
2091
- host = "activia-#{@dns_record_identifier}.qa.colaster.com"
2092
- when project.include?('skynet')
2093
- host = "skynet-#{@dns_record_identifier}.qa.colaster.com"
2094
- when project.include?('roger')
2095
- host = "roger-#{@dns_record_identifier}.qa.colaster.com"
2096
- when project.include?('leftorium')
2097
- host = "leftorium-#{@dns_record_identifier}.qa.colaster.com"
2098
- when project.include?('rachele')
2099
- host = "rachele-#{@dns_record_identifier}.qa.colaster.com"
2100
- when project.include?('starsky')
2101
- host = "starsky-#{@dns_record_identifier}.qa.colaster.com"
2102
- when project.include?('hutch')
2103
- host = "hutch-#{@dns_record_identifier}.qa.colaster.com"
2104
- when project.include?('maia-app')
2105
- host = "api-#{@dns_record_identifier}.qa.colaster.com"
2106
- when project.include?('maia-intermediari')
2107
- host = "api-intermediari-#{@dns_record_identifier}.qa.colaster.com"
2108
- end
2109
- host
2110
- end
2111
-
2112
- def ec2_ip_address(asg_stack_name)
2113
- resp = @cf.describe_stack_resource({
2114
- stack_name: asg_stack_name,
2115
- logical_resource_id: 'ECSAutoScalingGroup'
2116
- })
2117
- resp = @asg.describe_auto_scaling_groups({
2118
- auto_scaling_group_names: [resp.stack_resource_detail.physical_resource_id],
2119
- max_records: 1
2120
- })
2121
- instance_id = resp.auto_scaling_groups[0].instances[0].instance_id
2122
- resp = @ec2.describe_instances({instance_ids: [instance_id]})
2123
- resp.reservations[0].instances[0].private_ip_address
2124
- end
2125
-
2126
612
  def get_alb_host(stack_name)
2127
613
  case
2128
614
  when stack_name.include?('web')
@@ -2165,896 +651,33 @@ class Release
2165
651
  logical_resource_id = 'EcsApplicationLoadBalancerPublic'
2166
652
  when stack_name.include?('maia')
2167
653
  logical_resource_id = 'EcsApplicationLoadBalancerPublic'
654
+ when stack_name.include?('legion')
655
+ logical_resource_id = 'EcsApplicationLoadBalancerInternal'
2168
656
  end
2169
- resp = @cf.describe_stack_resource({
2170
- stack_name: stack_name,
2171
- logical_resource_id: logical_resource_id
2172
- })
2173
- resp = @alb.describe_load_balancers({
2174
- load_balancer_arns: [resp.stack_resource_detail.physical_resource_id]
2175
- })
657
+ resp = describe_stack_resource(stack_name, logical_resource_id)
658
+ resp = describe_load_balancers([resp.stack_resource_detail.physical_resource_id])
2176
659
  resp.load_balancers[0].dns_name
2177
660
  end
2178
661
 
2179
- def update_service_defaults(stack_name)
2180
- case
2181
- when stack_name.include?('web')
2182
- logical_resource_id = 'ECSServiceWebQA'
2183
- when stack_name.include?('consumer')
2184
- logical_resource_id = 'ECSServiceConsumerQa'
2185
- when stack_name.include?('urania')
2186
- logical_resource_id = 'ECSServiceUraniaQA'
2187
- when stack_name.include?('backoffice')
2188
- logical_resource_id = 'ECSServiceBackoffice'
2189
- when stack_name.include?('ermes')
2190
- logical_resource_id = 'ECSServiceErmesQA'
2191
- when stack_name.include?('bburago')
2192
- logical_resource_id = 'ECSServiceBburagoQA'
2193
- when stack_name.include?('hal9000')
2194
- logical_resource_id = 'ECSServiceHal9000QA'
2195
- when stack_name.include?('fidaty')
2196
- logical_resource_id = 'ECSServiceFidatyQA'
2197
- when stack_name.include?('skynet')
2198
- logical_resource_id = 'ECSServiceSkynetQA'
2199
- when stack_name.include?('roger')
2200
- logical_resource_id = 'ECSServiceRogerQA'
2201
- when stack_name.include?('activia')
2202
- logical_resource_id = 'ECSServiceActiviaQA'
2203
- when stack_name.include?('peano')
2204
- logical_resource_id = 'ECSServicePeanoQA'
2205
- when stack_name.include?('rogoreport')
2206
- logical_resource_id = 'ECSServiceRogoreport'
2207
- when stack_name.include?('assange')
2208
- logical_resource_id = 'ECSServiceAssangeQA'
2209
- when stack_name.include?('borat')
2210
- logical_resource_id = 'ECSServiceBorat'
2211
- when stack_name.include?('leftorium')
2212
- logical_resource_id = 'ECSServiceLeftoriumQA'
2213
- when stack_name.include?('rachele')
2214
- logical_resource_id = 'ECSServiceRacheleQA'
2215
- when stack_name.include?('crash')
2216
- logical_resource_id = 'ECSServiceCrashQA'
2217
- when stack_name.include?('starsky')
2218
- logical_resource_id = 'ECSServiceStarskyQA'
2219
- when stack_name.include?('hutch')
2220
- logical_resource_id = 'ECSServiceHutch'
2221
- when stack_name.include?('maia')
2222
- logical_resource_id = 'ECSServiceMaia'
2223
- else
2224
- raise "Service name non gestito per lo stack #{stack_name}"
2225
- end
2226
- resp = @cf.describe_stack_resource(
2227
- stack_name: stack_name,
2228
- logical_resource_id: logical_resource_id
2229
- )
2230
- @ecs.update_service(
2231
- cluster: @ecs_cluster_name,
2232
- service: resp.stack_resource_detail.physical_resource_id,
2233
- deployment_configuration: {
2234
- minimum_healthy_percent: 0,
2235
- maximum_percent: 100
2236
- }
2237
- )
2238
- end
2239
-
2240
- def create_activia_artifact(revision)
2241
- output "Preparo l'artifact activia .zip\n".yellow
2242
-
2243
- git_checkout_version('activia', revision)
2244
-
2245
- Dir.chdir 'projects/activia'
2246
-
2247
- decrypt_secrets()
2248
-
2249
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2250
- exec_step 'prepare-docker-compose --directory activia && cp docker-compose-qainit.yml docker-compose.yml'
2251
- [
2252
- "docker-compose build web",
2253
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2254
- '-c' 'mix local.hex --force && mix hex.info && \
2255
- mix deps.get && mix compile && mix deps.compile && \
2256
- cd assets && \
2257
- rm -rf node_modules && \
2258
- yarn --cache-folder ~/.cache/yarn && \
2259
- sysconfcpus -n 2 ./node_modules/.bin/webpack --mode production && \
2260
- cd .. && \
2261
- mix phx.digest && \
2262
- rm -rf _build/qa/rel/ && \
2263
- mix release --env=qa'"
2264
- ].each do |cmd|
2265
- execute_command cmd
2266
- end
2267
-
2268
- cleanup_containers
2269
-
2270
- artifact_path = Dir.glob("_build/qa/rel/activia/releases/*/activia.tar.gz").first
2271
-
2272
- upload_artifact(artifact_path, "microservices/activia/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2273
-
2274
- Dir.chdir '../../'
2275
- end
2276
-
2277
- def create_assange_artifact(revision)
2278
- output "Preparo l'artifact assange .zip\n".yellow
2279
-
2280
- git_checkout_version('assange', revision)
2281
-
2282
- Dir.chdir 'projects/assange'
2283
-
2284
- decrypt_secrets()
2285
-
2286
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2287
- exec_step 'prepare-docker-compose --directory assange && cp docker-compose-qainit.yml docker-compose.yml'
2288
- exec_step 'deploy/build_qa_artifact'
2289
-
2290
- cleanup_containers
2291
-
2292
- artifact_path = Dir.glob("_build/qa/rel/assange/releases/*/assange.tar.gz").first
2293
- upload_artifact(artifact_path, "microservices/assange/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2294
-
2295
- Dir.chdir '../../'
2296
- end
2297
-
2298
- def create_bburago_artifact(revision)
2299
- output "Preparo l'artifact bburago .zip\n".yellow
2300
-
2301
- git_checkout_version('bburago', revision)
2302
-
2303
- Dir.chdir 'projects/bburago'
2304
-
2305
- decrypt_secrets()
2306
-
2307
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2308
- exec_step 'prepare-docker-compose --directory bburago && cp docker-compose-qainit.yml docker-compose.yml'
2309
- [
2310
- "docker-compose build web",
2311
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint deploy/create_qa_artifact web"
2312
- ].each do |cmd|
2313
- execute_command cmd
2314
- end
2315
-
2316
- cleanup_containers
2317
-
2318
- artifact_path = Dir.glob('_build/qa/rel/bburago/releases/*/bburago.tar.gz').first
2319
- upload_artifact(artifact_path, "microservices/bburago/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2320
-
2321
- Dir.chdir '../../'
2322
- end
2323
-
2324
- def create_borat_artifact(revision)
2325
- output "Preparo l'artifact borat .zip\n".yellow
2326
-
2327
- git_checkout_version('borat', revision)
2328
-
2329
- Dir.chdir 'projects/borat'
2330
-
2331
- decrypt_secrets()
2332
-
2333
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2334
- exec_step 'prepare-docker-compose --directory borat && cp docker-compose-qainit.yml docker-compose.yml'
2335
- [
2336
- "docker network create borat_network || true",
2337
- "docker-compose build web",
2338
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2339
- '-c' 'mix local.hex --force && mix hex.info && \
2340
- mix deps.get && \
2341
- cd assets && \
2342
- yarn --cache-folder ~/.cache/yarn && \
2343
- sysconfcpus -n 2 ./node_modules/.bin/webpack -p --config config/webpack.config.prod.js && \
2344
- cd ../ && \
2345
- mix phx.digest && \
2346
- mix compile && mix deps.compile && \
2347
- rm -rf _build/qa/rel/ && \
2348
- mix distillery.release --env=qa'"
2349
- ].each do |cmd|
2350
- execute_command cmd
2351
- end
2352
-
2353
- cleanup_containers
2354
-
2355
- artifact_path = Dir.glob("_build/qa/rel/borat/releases/*/borat.tar.gz").first
2356
- upload_artifact(artifact_path, "microservices/borat/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2357
-
2358
- Dir.chdir '../../'
2359
- end
2360
-
2361
- def create_crash_artifact(revision, deploy_id)
2362
- output "Preparo l'artifact crash .zip\n".yellow
2363
-
2364
- git_checkout_version('crash', revision)
2365
-
2366
- Dir.chdir 'projects/crash'
2367
-
2368
- crash_qa_host = get_route53_hostname('ecs-task-crash-qa-notneeded')
2369
-
2370
- decrypt_secrets()
2371
-
2372
- `mv docker-compose-ci.yml docker-compose.yml`
2373
- exec_step 'prepare-docker-compose --directory crash && cp docker-compose-qainit.yml docker-compose.yml'
2374
-
2375
- if File.exists? 'deploy/build_qa_artifact'
2376
- `deploy/build_qa_artifact #{deploy_id}`
2377
- else # TODO remove when deploy/build_qa_artifact is merged
2378
- [
2379
- 'docker-compose build web',
2380
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa -e ENV_HASH=#{deploy_id} web \
2381
- '-c' 'mix local.hex --force && mix hex.info && \
2382
- mix deps.get && \
2383
- cd assets && \
2384
- yarn --cache-folder ~/.cache/yarn && \
2385
- NODE_ENV=production sysconfcpus -n 1 yarn run build && \
2386
- cd ../ && \
2387
- mix release.clean --implode --no-confirm && \
2388
- mix phx.digest && \
2389
- mix deps.clean --all && \
2390
- mix deps.get && \
2391
- mix compile && mix release --env=qa'",
2392
- 'docker-compose down'
2393
- ].each do |cmd|
2394
- execute_command cmd
2395
- end
2396
- end
2397
-
2398
- cleanup_containers
2399
-
2400
- artifact_path = Dir.glob('_build/qa/rel/crash/releases/*/crash.tar.gz').first
2401
- upload_artifact(artifact_path, "microservices/crash/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2402
-
2403
- Dir.chdir '../../'
2404
- end
2405
-
2406
- def create_ermes_artifact(revision)
2407
- output "Preparo l'artifact ermes .zip\n".yellow
2408
-
2409
- git_checkout_version('ermes', revision)
2410
-
2411
- Dir.chdir 'projects/ermes'
2412
-
2413
- decrypt_secrets()
2414
-
2415
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2416
- exec_step 'prepare-docker-compose --directory ermes && cp docker-compose-qainit.yml docker-compose.yml'
2417
-
2418
- [
2419
- "if echo `docker network ls` | grep crash_default; \
2420
- then echo 'crash_default network already existing'; \
2421
- else docker network create crash_default; fi",
2422
- 'docker-compose build web'
2423
- ].each do |cmd|
2424
- execute_command cmd
2425
- end
2426
-
2427
- [ "docker-compose run --rm -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2428
- '-c' 'mix local.hex --force && mix hex.info && \
2429
- mix deps.get && mix compile && mix deps.compile && \
2430
- mix phx.digest && \
2431
- MIX_ENV=dev mix compile.sms && \
2432
- MIX_ENV=dev mix compile.html && \
2433
- MIX_ENV=dev mix compile.heml && \
2434
- MIX_ENV=dev mix compile.app_notification && \
2435
- rm -rf _build/qa/rel/ && \
2436
- mix release --env=qa'"
2437
- ].each do |cmd|
2438
- execute_command cmd
2439
- end
2440
-
2441
- cleanup_containers
2442
-
2443
- artifact_path = Dir.glob("_build/qa/rel/ermes/releases/*/ermes.tar.gz").first
2444
- upload_artifact(artifact_path, "microservices/ermes/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2445
-
2446
- Dir.chdir '../../'
2447
- end
2448
-
2449
- def create_fidaty_artifact(revision)
2450
- output "Preparo l'artifact fidaty .zip\n".yellow
2451
-
2452
- git_checkout_version('fidaty', revision)
2453
-
2454
- Dir.chdir 'projects/fidaty'
2455
-
2456
- decrypt_secrets()
2457
-
2458
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2459
- exec_step 'prepare-docker-compose --directory fidaty && cp docker-compose-qainit.yml docker-compose.yml'
2460
- [
2461
- "docker-compose build web",
2462
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2463
- '-c' 'mix local.hex --force && mix hex.info && \
2464
- mix deps.get && mix compile && mix deps.compile && \
2465
- mix phx.digest && \
2466
- rm -rf _build/qa/rel/ && \
2467
- mix release --env=qa'"
2468
- ].each do |cmd|
2469
- execute_command cmd
2470
- end
2471
-
2472
- cleanup_containers
2473
-
2474
- artifact_path = Dir.glob("_build/qa/rel/fidaty/releases/*/fidaty.tar.gz").first
2475
- upload_artifact(artifact_path, "microservices/fidaty/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2476
-
2477
- Dir.chdir '../../'
2478
- end
2479
-
2480
- def create_hal9000_artifact(revision)
2481
- output "Preparo l'artifact hal9000 .zip\n".yellow
2482
-
2483
- git_checkout_version('hal9000', revision)
2484
-
2485
- Dir.chdir 'projects/hal9000'
2486
-
2487
- decrypt_secrets()
2488
-
2489
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2490
- exec_step 'prepare-docker-compose --directory hal9000 && cp docker-compose-qainit.yml docker-compose.yml'
2491
- [
2492
- "docker-compose build web",
2493
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2494
- '-c' 'mix local.hex --force && mix hex.info && \
2495
- mix deps.get && mix compile && mix deps.compile && \
2496
- mix phx.digest assets -o priv/static && \
2497
- rm -rf _build/qa/rel/ && \
2498
- mix release --env=qa'"
2499
- ].each do |cmd|
2500
- execute_command cmd
2501
- end
2502
-
2503
- cleanup_containers
2504
-
2505
- artifact_path = Dir.glob("_build/qa/rel/hal9000/releases/*/hal9000.tar.gz").first
2506
- upload_artifact(artifact_path, "microservices/hal9000/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2507
-
2508
- Dir.chdir '../../'
2509
- end
2510
-
2511
- def create_hutch_artifact(revision)
2512
- output "Preparo l'artifact hutch\n".yellow
2513
-
2514
- git_checkout_version('hutch', revision)
2515
-
2516
- Dir.chdir 'projects/hutch'
2517
-
2518
- version = `git rev-parse HEAD`
2519
-
2520
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2521
-
2522
- exec_step 'git submodule update'
2523
- exec_step 'cp docker-compose-ci.yml docker-compose.yml'
2524
- exec_step 'prepare-docker-compose --directory hutch && cp docker-compose-qainit.yml docker-compose.yml'
2525
- exec_step "sed s/qa_deploy_id/#{get_deploy_id}/ .env.dist.qa"
2526
- exec_step "cp .env.dist.qa .env"
2527
- exec_step "rm -fr peano"
2528
-
2529
- [
2530
- "sed -i 's/USER app/USER root/g' Dockerfile",
2531
- "docker-compose build web",
2532
- "docker-compose run -w $PWD -u root -e ELM_APP_STARSKY_URL=https://#{get_route53_hostname("starsky")} --entrypoint /bin/sh web \
2533
- '-c' 'yarn && yarn run build \
2534
- && tar cfz #{revision}-qa.tar.gz *'"
2535
- ].each do |cmd|
2536
- execute_command cmd
2537
- end
2538
-
2539
- artifact_path = "./#{revision}-qa.tar.gz"
2540
-
2541
- upload_artifact(artifact_path, "microservices/hutch/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2542
-
2543
- Dir.chdir '../../'
2544
- end
2545
-
2546
- def create_leftorium_artifact(revision)
2547
- output "Preparo l'artifact leftorium .zip\n".yellow
2548
-
2549
- git_checkout_version('leftorium', revision)
2550
-
2551
- Dir.chdir 'projects/leftorium'
2552
-
2553
- decrypt_secrets()
2554
-
2555
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2556
- exec_step 'prepare-docker-compose --directory leftorium && cp docker-compose-qainit.yml docker-compose.yml'
2557
- [
2558
- "docker-compose build web",
2559
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2560
- '-c' 'mix local.hex --force && mix hex.info && \
2561
- mix deps.get && mix compile && mix deps.compile && \
2562
- rm -rf _build/qa/rel/ && \
2563
- mix release --env=qa'"
2564
- ].each do |cmd|
2565
- execute_command cmd
2566
- end
2567
-
2568
- cleanup_containers
2569
-
2570
- artifact_path = Dir.glob('_build/qa/rel/leftorium/releases/*/leftorium.tar.gz').first
2571
- upload_artifact(artifact_path, "microservices/leftorium/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2572
-
2573
- Dir.chdir '../../'
2574
- end
2575
-
2576
- def create_maia_artifact(revision)
2577
- output "Preparo l'artifact maia .zip\n".yellow
2578
-
2579
- git_checkout_version('maia', revision)
2580
-
2581
- Dir.chdir 'projects/maia'
2582
-
2583
- decrypt_secrets()
2584
-
2585
- exec_step 'prepare-docker-compose --directory maia'
2586
-
2587
- if File.exists? 'deploy/build_qa_artifact'
2588
- exec_step 'deploy/build_qa_artifact'
2589
- else
2590
- [
2591
- "docker-compose build web",
2592
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/bash web \
2593
- '-c' 'pwd && ls -al && \
2594
- mix local.hex --force && \
2595
- mix deps.get && \
2596
- mix release.clean --implode --no-confirm && \
2597
- mix release --env=qa'"
2598
- ].each do |cmd|
2599
- execute_command cmd
2600
- end
2601
- end
2602
-
2603
- cleanup_containers
2604
-
2605
- artifact_path = Dir.glob('_build/qa/rel/maia/releases/*/maia.tar.gz').first
2606
- upload_artifact(artifact_path, "microservices/maia/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2607
-
2608
- Dir.chdir '../../'
2609
- end
2610
-
2611
- def create_peano_artifact(revision)
2612
- output "Preparo l'artifact peano .zip\n".yellow
2613
-
2614
- git_checkout_version('peano', revision)
2615
-
2616
- Dir.chdir 'projects/peano'
2617
-
2618
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2619
-
2620
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2621
- exec_step 'prepare-docker-compose --directory peano && cp docker-compose-qainit.yml docker-compose.yml'
2622
-
2623
- if File.exists? 'deploy/build_qa_artifact'
2624
- `deploy/build_qa_artifact`
2625
- else # TODO remove when deploy/build_qa_artifact is merged
2626
- [
2627
- "docker-compose build web",
2628
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2629
- '-c' 'mix local.hex --force && mix hex.info && \
2630
- mix deps.get && mix compile && mix deps.compile && \
2631
- rm -rf _build/qa/rel/ && \
2632
- mix release --env=qa'"
2633
- ].each do |cmd|
2634
- execute_command cmd
2635
- end
2636
- end
2637
-
2638
- cleanup_containers
2639
-
2640
- artifact_path = Dir.glob("_build/qa/rel/peano/releases/*/peano.tar.gz").first
2641
- upload_artifact(artifact_path, "microservices/peano/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2642
-
2643
- Dir.chdir '../../'
2644
- end
2645
-
2646
- def create_prima_artifact(revision, branch_name, deploy_id, minimal = false)
2647
- output "Preparo l'artifact prima .zip\n".yellow
2648
-
2649
- git_checkout_version('prima', revision)
2650
-
2651
- Dir.chdir 'projects/prima'
2652
-
2653
- ['vendor'].each do |dir|
2654
- unless File.directory?(dir)
2655
- if File.directory?("../../../prima/#{dir}")
2656
- exec_step "rsync -a ../../../prima/#{dir} ."
2657
- end
2658
- end
2659
- end
2660
-
2661
- exec_step 'mv docker-compose-ci.yml docker-compose.yml'
2662
- exec_step 'prepare-docker-compose --directory prima'
2663
- exec_step 'sed -i "s/\\/home\\/app/\\/root/g" docker-compose.yml'
2664
- `sed -i 's/"@prima-assicurazioni/pyxis-npm": ".*",/"@prima-assicurazioni/pyxis-npm": "#{@pyxis_version}",/' package.json` if deploy_pyxis?
2665
- [
2666
- "bin/qa_build_artifact.sh #{branch_name} #{'update' if @deploy_update}"
2667
- ].each do |cmd|
2668
- execute_command cmd
2669
- end
2670
-
2671
- cleanup_containers
2672
-
2673
- Dir.chdir "../../"
2674
- end
2675
-
2676
- def create_pyxis_artifact(revision, deploy_id)
2677
- if (deploy_pyxis?)
2678
- output "Preparo l'artifact pyxis\n".yellow
2679
-
2680
- git_checkout_version('pyxis-npm', revision)
2681
-
2682
- Dir.chdir 'projects/pyxis-npm'
2683
-
2684
- decrypt_secrets()
2685
-
2686
- exec_step 'mv .fakenpmrc .npmrc'
2687
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2688
- exec_step 'prepare-docker-compose --directory pyxis-npm && cp docker-compose-qainit.yml docker-compose.yml'
2689
- exec_step 'docker-compose build web'
2690
-
2691
- exec_step "docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
2692
- '-c' 'npm view prima-assicurazioni/pyxis-npm versions --json > versions.json'" # posso recuperare le versioni pubblicate solo da dentro al container, scrivo su un file che leggo subito dopo
2693
- published_versions = `cat versions.json`
2694
- qa_versions = JSON.parse(published_versions).select{ |version| version.include? get_pyxis_version(deploy_id) }
2695
-
2696
- @pyxis_version = "0.#{get_pyxis_version(deploy_id)}.#{qa_versions.size}"
2697
-
2698
- `sed -i '3s/".*/"version": "#{@pyxis_version}",/' package.json`
2699
- [
2700
- "docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
2701
- '-c' 'yarn install && \
2702
- yarn build:prod && \
2703
- npm publish'"
2704
- ].each do |cmd|
2705
- execute_command cmd
2706
- end
2707
-
2708
- cleanup_containers
2709
- Dir.chdir '../../'
2710
- end
2711
- end
2712
-
2713
- def create_rachele_artifact(revision)
2714
- output "Preparo l'artifact rachele .zip\n".yellow
2715
-
2716
- git_checkout_version('rachele', revision)
2717
-
2718
- Dir.chdir 'projects/rachele'
2719
-
2720
- decrypt_secrets()
2721
-
2722
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2723
- exec_step 'prepare-docker-compose --directory rachele && cp docker-compose-qainit.yml docker-compose.yml'
2724
-
2725
- execute_command "docker-compose build web"
2726
-
2727
- [ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2728
- '-c' 'mix local.hex --force && mix hex.info && \
2729
- mix deps.get && mix compile && mix deps.compile && \
2730
- rm -rf _build/qa/rel/ && \
2731
- mix release --env=qa'"
2732
- ].each do |cmd|
2733
- execute_command cmd
2734
- end
2735
-
2736
- cleanup_containers
2737
-
2738
- artifact_path = Dir.glob("_build/qa/rel/rachele/releases/*/rachele.tar.gz").first
2739
- upload_artifact(artifact_path, "microservices/rachele/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2740
-
2741
- Dir.chdir '../../'
2742
- end
2743
-
2744
- def create_roger_artifact(revision)
2745
- output "Preparo l'artifact roger .zip\n".yellow
2746
-
2747
- git_checkout_version('roger', revision)
2748
-
2749
- Dir.chdir 'projects/roger'
2750
-
2751
- decrypt_secrets()
2752
-
2753
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2754
- exec_step 'prepare-docker-compose --directory roger && cp docker-compose-qainit.yml docker-compose.yml'
2755
- [
2756
- "docker-compose build web",
2757
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2758
- '-c' 'mix local.hex --force && mix hex.info && \
2759
- mix deps.get && mix compile && mix deps.compile && \
2760
- mix phx.digest && \
2761
- rm -rf _build/qa/rel/ && \
2762
- mix distillery.release --env=qa'"
2763
- ].each do |cmd|
2764
- execute_command cmd
2765
- end
2766
-
2767
- cleanup_containers
2768
-
2769
- artifact_path = Dir.glob("_build/qa/rel/roger/releases/*/roger.tar.gz").first
2770
- upload_artifact(artifact_path, "microservices/roger/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2771
-
2772
- Dir.chdir '../../'
2773
- end
2774
-
2775
- def create_rogoreport_artifact(revision)
2776
- output "Preparo l'artifact rogoreport .zip\n".yellow
2777
-
2778
- git_checkout_version('rogoreport', revision)
2779
-
2780
- Dir.chdir 'projects/rogoreport'
2781
-
2782
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2783
-
2784
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2785
- exec_step 'prepare-docker-compose --directory rogoreport && cp docker-compose-qainit.yml docker-compose.yml'
2786
- [
2787
- "docker-compose build web",
2788
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2789
- '-c' 'mix local.hex --force && mix hex.info && \
2790
- mix deps.get && mix compile && mix deps.compile && \
2791
- rm -rf _build/qa/rel/ && \
2792
- mix release --name=rogoreport --env=qa'"
2793
- ].each do |cmd|
2794
- execute_command cmd
2795
- end
2796
-
2797
- cleanup_containers
2798
-
2799
- artifact_path = Dir.glob("_build/qa/rel/rogoreport/releases/*/rogoreport.tar.gz").first
2800
- upload_artifact(artifact_path, "microservices/rogoreport/rogoreport-#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2801
-
2802
- Dir.chdir '../../'
2803
- end
2804
-
2805
- def create_skynet_artifact(revision)
2806
- output "Preparo l'artifact skynet\n".yellow
2807
-
2808
- git_checkout_version('skynet', revision)
2809
-
2810
- Dir.chdir 'projects/skynet'
2811
-
2812
- version = `git rev-parse HEAD`
2813
-
2814
- artifact_path = "/tmp/skynet-#{revision}-qa.tar.gz"
2815
-
2816
- exec_step "tar cfz #{artifact_path} core config oracles pickles web tests logstash application_log configuration.py wsgi.py requirements.txt requirements_training.txt"
2817
-
2818
- upload_artifact(artifact_path, "microservices/skynet/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2819
-
2820
- Dir.chdir '../../'
2821
- end
2822
-
2823
- def create_starsky_artifact(revision)
2824
- output "Preparo l'artifact starsky\n".yellow
2825
-
2826
- git_checkout_version('starsky', revision)
2827
-
2828
- Dir.chdir 'projects/starsky'
2829
-
2830
- version = `git rev-parse HEAD`
2831
-
2832
- #artifact_path = "/tmp/starsky-#{revision}-qa.tar.gz"
2833
-
2834
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2835
-
2836
- `mv docker-compose-ci.yml docker-compose.yml`
2837
- exec_step 'prepare-docker-compose --directory starsky && cp docker-compose-qainit.yml docker-compose.yml'
2838
- exec_step "sed s/qa_deploy_id/#{get_deploy_id}/ .env.dist.qa"
2839
- exec_step "cp .env.dist.qa .env"
2840
-
2841
- [
2842
- "sed -i 's/USER app/USER root/g' Dockerfile",
2843
- "if echo `docker network ls` | grep peano_default; \
2844
- then echo 'peano_default network already existing'; \
2845
- else docker network create peano_default; fi",
2846
- "docker-compose build web",
2847
- "docker-compose run -w $PWD -e BUILD_ENV=qa -u root --entrypoint /bin/sh web \
2848
- '-c' 'cargo build --release -vv --features=qa \
2849
- && cargo build --bin migrate --release --features=qa \
2850
- && cargo build --bin rabbit_worker --release --features=qa \
2851
- && cp -p target/release/starsky . \
2852
- && cp -p target/release/migrate . \
2853
- && cp -p target/release/rabbit_worker . \
2854
- && tar cfz #{revision}-qa.tar.gz config starsky migrate rabbit_worker .env.dist.*'"
2855
- ].each do |cmd|
2856
- execute_command cmd
2857
- end
2858
-
2859
- artifact_path = "./#{revision}-qa.tar.gz"
2860
-
2861
- upload_artifact(artifact_path, "microservices/starsky/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2862
-
2863
- Dir.chdir '../../'
2864
- end
2865
-
2866
- def create_urania_artifact(revision)
2867
- output "Preparo l'artifact urania .zip\n".yellow
2868
-
2869
- git_checkout_version('urania', revision)
2870
-
2871
- Dir.chdir 'projects/urania'
2872
-
2873
- decrypt_secrets()
2874
-
2875
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2876
- exec_step 'prepare-docker-compose --directory urania && cp docker-compose-qainit.yml docker-compose.yml'
2877
-
2878
- execute_command "docker-compose build web"
2879
-
2880
- [ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2881
- '-c' 'mix local.hex --force && mix hex.info && \
2882
- mix deps.get && mix compile && mix deps.compile && \
2883
- rm -rf _build/qa/rel/ && \
2884
- mix release --env=qa'"
2885
- ].each do |cmd|
2886
- execute_command cmd
2887
- end
2888
-
2889
- cleanup_containers
2890
-
2891
- artifact_path = Dir.glob("_build/qa/rel/urania/releases/*/urania.tar.gz").first
2892
- upload_artifact(artifact_path, "microservices/urania/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2893
-
2894
- Dir.chdir '../../'
2895
- end
2896
-
2897
662
  def deploy_pyxis?
2898
663
  if defined? @deploy_pyxis
2899
664
  @deploy_pyxis
2900
665
  else
2901
666
  pyxis_updated = `git log -p -1 --unified=0 | grep pyxis-npm:`.length > 0
2902
667
 
2903
- update_pyxis = !@projects['pyxis-npm'].empty? && @projects['pyxis-npm'][:name] != 'master' && pyxis_updated
668
+ update_pyxis = !@projects['pyxis-npm'].empty? && @projects['pyxis-npm']['name'] != 'master' && pyxis_updated
2904
669
 
2905
670
  @deploy_pyxis = update_pyxis
2906
671
  return update_pyxis
2907
672
  end
2908
673
  end
2909
674
 
2910
- def deploy_crash?
2911
- crash_present = !@projects['crash'].empty? && @projects['crash'][:name] != 'master' && !@projects['crash'][:default_branch]
2912
- leftorium_present = !@projects['leftorium'].empty? && @projects['leftorium'][:name] != 'master' && !@projects['leftorium'][:default_branch]
2913
- crash_present || leftorium_present
2914
- end
2915
-
2916
- def deploy_starsky_hutch_maia?
2917
- starsky_present = !@projects['starsky'].nil? && !@projects['starsky'].empty? && @projects['starsky'][:name] != 'master' && !@projects['starsky'][:default_branch]
2918
- hutch_present = !@projects['hutch'].nil? && !@projects['hutch'].empty? && @projects['hutch'][:name] != 'master' && !@projects['hutch'][:default_branch]
2919
- maia_present = !@projects['maia'].nil? && !@projects['maia'].empty? && @projects['maia'][:name] != 'master' && !@projects['maia'][:default_branch]
2920
- starsky_present || hutch_present || maia_present
2921
- end
2922
-
2923
- def get_pyxis_version(deploy_id)
2924
- (deploy_id.delete '[a-z0]')[0..9]
2925
- end
2926
-
2927
- def cleanup_containers
2928
- `docker-compose kill && docker-compose down -v --remove-orphans`
2929
- `docker rm $(docker ps -q -f status=exited)`
2930
- end
2931
-
2932
- def git_checkout_version(project, revision)
2933
- Dir.chdir "projects/#{project}"
2934
- exec_step "git checkout -- . && git checkout #{revision}"
2935
- Dir.chdir "../../"
2936
- end
2937
-
2938
- def create_asg_stack(stack_name, tags = [])
2939
- stack_body = IO.read('cloudformation/stacks/asg/ecs-asg-allinone.yml')
2940
- parameters = [
2941
- {
2942
- parameter_key: "Environment",
2943
- parameter_value: "qa"
2944
- },
2945
- {
2946
- parameter_key: "InstanceType",
2947
- parameter_value: "t3.large"
2948
- },
2949
- {
2950
- parameter_key: "ECSClusterName",
2951
- parameter_value: @ecs_cluster_name
2952
- },
2953
- {
2954
- parameter_key: "AMIID",
2955
- parameter_value: @ami_id
2956
- }
2957
- ]
2958
- create_stack(stack_name, stack_body, parameters, tags, @cf_role)
2959
- end
2960
-
2961
- def create_cluster_stack(stack_name, tags = [])
2962
- stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
2963
- create_stack(stack_name, stack_body, [], tags)
2964
- end
2965
-
2966
675
  def update_cluster_stack(stack_name, tags = [])
2967
676
  stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
2968
677
  update_stack(stack_name, stack_body, [], tags)
2969
678
  end
2970
679
 
2971
- def create_alb_stack(stack_name, role, hash, environment = 'qa')
2972
- stack_body = IO.read('cloudformation/stacks/elb/alb-public-qa.yml')
2973
- parameters = [
2974
- {
2975
- parameter_key: "Environment",
2976
- parameter_value: environment
2977
- },
2978
- {
2979
- parameter_key: "Role",
2980
- parameter_value: role
2981
- },
2982
- {
2983
- parameter_key: "EnvHash",
2984
- parameter_value: hash
2985
- }
2986
- ]
2987
- create_stack(stack_name, stack_body, parameters, [], @cf_role)
2988
- end
2989
-
2990
- def import_redis_crash(qa_ip_address)
2991
- output "Importo chiavi di Redis da staging\n".yellow
2992
-
2993
- prefixes = ['CODICI', 'fun_with_flags']
2994
- redis_qa = Redis.new(:url => "redis://#{qa_ip_address}:6379/10")
2995
- redis_staging = Redis.new(:url => 'redis://staging.cache-1.prima.it:6379/10')
2996
-
2997
- prefixes.each do |prefix|
2998
- redis_staging.keys("#{prefix}*").each do |key|
2999
- next unless redis_qa.keys(key).empty?
3000
- output "Importo #{key} dal Redis di staging\n".yellow
3001
- dump_staging = redis_staging.dump key
3002
- redis_qa.restore key, 0, dump_staging
3003
- end
3004
- end
3005
- end
3006
-
3007
- def import_dbs(ip_address)
3008
- resp = @ecs.run_task({
3009
- cluster: @ecs_cluster_name,
3010
- task_definition: @import_db_task,
3011
- overrides: {
3012
- container_overrides: [
3013
- {
3014
- name: 'dbrestore',
3015
- environment: [
3016
- {
3017
- name: 'EC2_IP_ADDRESS',
3018
- value: ip_address
3019
- }
3020
- ]
3021
- }
3022
- ]
3023
- },
3024
- count: 1
3025
- })
3026
- return resp
3027
- end
3028
-
3029
- def wait_for_db_import(task)
3030
- output "Attendo che i DB vengano importati...\n".yellow
3031
- stopped_at = nil
3032
- sleep 15 # altrimenti non trova il task appena avviato...
3033
- while stopped_at.nil?
3034
- if task.tasks[0].nil?
3035
- pp @ecs_cluster_name
3036
- pp task
3037
- stop_if true, "Task di import DB lanciato, ma risposta vuota!".red
3038
- end
3039
- task = @ecs.describe_tasks({
3040
- cluster: task.tasks[0].cluster_arn,
3041
- tasks: [task.tasks[0].task_arn]
3042
- })
3043
- stopped_at = task.tasks[0].stopped_at unless task.tasks[0].nil?
3044
- sleep_seconds = 10
3045
- seconds_elapsed = 0
3046
- while true && stopped_at.nil?
3047
- break if seconds_elapsed >= sleep_seconds
3048
- print '.'.yellow; STDOUT.flush
3049
- sleep 1
3050
- seconds_elapsed += 1
3051
- end
3052
- end
3053
- print "\n"
3054
- end
3055
-
3056
680
  def choose_branch_to_deploy(project_name, select_master = false)
3057
- return {name: 'master', revision: '399653d555b8864', committer: 'crash@prima.it', default_branch: true} if project_name == 'crash' && select_master
3058
681
  Dir.chdir "projects/#{project_name}"
3059
682
  output "Recupero la lista dei branch del progetto #{project_name}..."
3060
683
  `git remote prune origin`
@@ -3094,8 +717,7 @@ class Release
3094
717
  name = branch_name.split(' ')[0]
3095
718
  revision = branch_name.split(' ')[1]
3096
719
  committer_email = branch_name.split(' ')[2].tr('<>', '')
3097
- return { name: 'crash', default_branch: true } if project_name == 'crash' && branch_name == 'master' #rimuovere questa riga se mai nei qa servirà crash con un branch diverso da master
3098
- { name: name, revision: revision[0..14], committer: committer_email, default_branch: select_master }
720
+ { 'name' => name, 'revision' => revision[0..14], 'committer' => committer_email, 'default_branch' => select_master }
3099
721
  end
3100
722
 
3101
723
  def select_branch_to_deploy(project_name, branch_name)
@@ -3111,7 +733,7 @@ class Release
3111
733
  name = branch_name.split(' ')[0]
3112
734
  revision = branch_name.split(' ')[1]
3113
735
  committer_email = branch_name.split(' ')[2].tr('<>', '')
3114
- { name: name, revision: revision[0..14], committer: committer_email }
736
+ { 'name' => name, 'revision' => revision[0..14], 'committer' => committer_email }
3115
737
  end
3116
738
 
3117
739
  def get_stacks()
@@ -3142,73 +764,6 @@ class Release
3142
764
  (Time.now.to_i.to_s[-4..-1].to_i + Random.rand(40000)).to_s
3143
765
  end
3144
766
 
3145
- def launch_marley(ip_address, prima_hostname, borat_hostname)
3146
- resp = @cf.describe_stack_resource({
3147
- stack_name: 'batch-job-marley',
3148
- logical_resource_id: 'JobDefinition'
3149
- })
3150
-
3151
- @batch.submit_job({
3152
- job_name: "marley-#{@dns_record_identifier}", # required
3153
- job_queue: "tools-production", # required
3154
- job_definition: resp.stack_resource_detail.physical_resource_id, # required
3155
- container_overrides: {
3156
- environment: [
3157
- {
3158
- name: 'PRIMA_URL',
3159
- value: "https://#{prima_hostname}/?superprima"
3160
- },
3161
- {
3162
- name: 'PRIMA_IP',
3163
- value: ip_address
3164
- },
3165
- {
3166
- name: 'PROJECTS_JSON',
3167
- value: @projects.to_json
3168
- },
3169
- {
3170
- name: 'BACKOFFICE_URL',
3171
- value: "https://#{borat_hostname}"
3172
- }
3173
- ]
3174
- }
3175
- })
3176
-
3177
- output "Marley lanciato con successo!\n".green
3178
- end
3179
-
3180
- def get_currently_deployed_version(stack_name)
3181
- parameters = get_stack_parameters(stack_name)
3182
- currently_deployed_version = nil
3183
- parameters.each do |parameter|
3184
- if parameter.parameter_key == "ReleaseVersion"
3185
- currently_deployed_version = parameter.parameter_value
3186
- end
3187
- end
3188
- currently_deployed_version
3189
- end
3190
-
3191
- def decrypt_secrets()
3192
- docker_image = "prima/biscuit_populate_configs"
3193
- [
3194
- "docker pull #{docker_image}",
3195
- "docker run -t --rm -v $HOME/.aws:/root/.aws -w $PWD --volumes-from #{get_host_container_name} #{docker_image}"
3196
- ].each do |cmd|
3197
- execute_command cmd
3198
- end
3199
- end
3200
-
3201
- def get_host_container_name()
3202
- if @host_container_name
3203
- @host_container_name
3204
- else
3205
- hostname = `cat /etc/hostname`.gsub("\n", '')
3206
- execute_command "docker ps | grep #{hostname} | awk '{print $NF}'"
3207
- @host_container_name = `docker ps | grep #{hostname} | awk '{print $NF}'`.gsub("\n", '')
3208
- # @host_container_name = `docker ps | grep #{hostname} | sed -r "s/.+ ([^\s].+)$/\1/p"`
3209
- end
3210
- end
3211
-
3212
767
  def select_branches(project_names = nil)
3213
768
  output "Deploy feature menu"
3214
769
  if project_names.nil?
@@ -3222,14 +777,6 @@ class Release
3222
777
  end
3223
778
  end
3224
779
  end
3225
-
3226
- def get_ami_id(stack_name)
3227
- get_stack_parameters(stack_name).each do |param|
3228
- if param.parameter_key == "AMIID"
3229
- return param.parameter_value
3230
- end
3231
- end
3232
- end
3233
780
  end
3234
781
 
3235
782
  def help_content
@@ -3255,7 +802,6 @@ Description
3255
802
  finish finishes the feature by merging to dev and master
3256
803
  qainit deploys a new environment with selected branches from every project
3257
804
  qainit $PROJECT_NAME deploys a new environment allowing to selected a branch from the input project (everything else is master)
3258
- qainit minimal prima deploys a new copy of prima project, using staging microservices and database
3259
805
  qainit shutdown deletes a specific qa environment
3260
806
 
3261
807
  Available only to devops (from artemide)