prima-twig 0.59.124 → 0.60.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: a2e4e4e6e6fdbd8467adac60aef5e92500af1515ef201ff8a578775625d3c6e5
4
- data.tar.gz: dca974a49aaef9e663b427f65752a47b83276c8906bd55aea55451254fabeeb6
3
+ metadata.gz: 468ffd1ffc03af67b0865e0f0ad9ec9fd94b624693d43c11bed994781c1bcc8c
4
+ data.tar.gz: 0dfbce429edc483f0ca9b954ecab8f3b7bc02f7801fcfb57f54f2a0a968497be
5
5
  SHA512:
6
- metadata.gz: f84e2c97c22a62d75863195e2ee5ebc0f6d849ec56cebe521827b6ffc60761b692a9c0a74463ea029e4e0a8eecf9c456640d3a7f093d2ffb206bd8258be74d06
7
- data.tar.gz: 93fc155420cb41331796aa3a3135a5502115ad97d282836caa9b4456663ba11b7d7cb9d3c10c472416fd8243afb779032ce10e45035fc938d762eb37e869d8c4
6
+ metadata.gz: 80c1a779b946a2cee6d522456eedb26ba8edc4cd0cbaf891703871b245793f5fdea95394f4228e8dbec40ebafbe0d835624454745571dd327a636fdf14e541b0
7
+ data.tar.gz: ec46fd5a1c9895e99abe72c91ab0b919808e5ea6c541e7171ccb4edeb871c98385496c75dc2874c1b378d8692764650cc6b2b2ccc38d10db4d6e601b518fb917
@@ -5,10 +5,7 @@ require_relative '../lib/prima_twig.rb'
5
5
  require_relative '../lib/prima_aws_client.rb'
6
6
  require 'colorize'
7
7
  require 'highline/import'
8
- require 'aws-sdk-batch'
9
- require 'aws-sdk-cloudformation'
10
- require 'aws-sdk-ecs'
11
- require 'aws-sdk-s3'
8
+ require 'aws-sdk'
12
9
  require 'redcarpet'
13
10
  require 'mail'
14
11
  require 'erb'
@@ -22,9 +22,18 @@ class Release
22
22
  exec "twig feature #{ARGV.join ' '}"
23
23
  end
24
24
  end
25
+ @batch = Aws::Batch::Client.new
26
+ @s3 = Aws::S3::Client.new
27
+ @s3_bucket = 'prima-artifacts'
28
+ @artifact_path = '/tmp/prima-artifact.zip'
29
+ @import_db_task = 'arn:aws:ecs:eu-west-1:001575623345:task-definition/ecs-task-db-restore2-TaskDefinitionDbRestore-1BXH13XEVLPP0:1'
30
+ @cf_role = 'arn:aws:iam::001575623345:role/qainit-service-role-cloudformat-CloudformationRole-18KBZQIS148R9'
25
31
  @dns_record_identifier = nil
26
32
  @ecs_cluster_name = nil
27
33
  @deploy_update = false
34
+ @qainit = false
35
+ @qainit_host_folder = "/var/ci/#{ENV['DRONE_REPO']}/#{ENV['DRONE_BRANCH']}/#{ENV['DRONE_BUILD_NUMBER']}"
36
+ @qainit_folder = "/drone/src/github.com/project/primait/qainit"
28
37
  @projects = {
29
38
  'prima' => {},
30
39
  'urania' => {},
@@ -33,7 +42,7 @@ class Release
33
42
  'hal9000' => {},
34
43
  'fidaty' => {},
35
44
  'peano' => {},
36
- # 'rogoreport' => {},
45
+ 'rogoreport' => {},
37
46
  'assange' => {},
38
47
  'borat' => {},
39
48
  'crash' => {},
@@ -50,7 +59,6 @@ class Release
50
59
  @base_stack_name_alb = 'ecs-alb-http-public-qa-'
51
60
  @base_stack_name_alb_ws = 'ecs-alb-ws-public-qa-'
52
61
  @git_branch = ''
53
- @cloudflare = Rubyflare.connect_with(ENV['CLOUDFLARE_EMAIL'], ENV['CLOUDFLARE_APIKEY'])
54
62
  end
55
63
 
56
64
  def execute!(args)
@@ -65,6 +73,8 @@ class Release
65
73
  qainit_deploy_shutdown!
66
74
  elsif 'update' == args[1]
67
75
  qainit_deploy_update!
76
+ elsif 'read' == args[1]
77
+ qainit_read_config! args[2]
68
78
  else
69
79
  if args[1]
70
80
  select_branches(args[1..-1])
@@ -81,8 +91,19 @@ class Release
81
91
  end
82
92
  when 'deploy'
83
93
  abort('Non sei nella cartella di artemide') unless Dir.pwd.match 'artemide$'
84
- if 'lock' == args[1]
94
+ if ['terminate', 'stop', 'shutdown', 'halt', 'destroy'].include? args[1]
95
+ deploy_shutdown!
96
+ elsif 'update' == args[1]
97
+ deploy_update!
98
+ elsif 'lock' == args[1]
85
99
  deploy_lock!
100
+ else
101
+ if args[1]
102
+ select_branches(args[1])
103
+ else
104
+ select_branches
105
+ end
106
+ deploy_feature!
86
107
  end
87
108
  when 'aggregator'
88
109
  if 'enable' == args[1]
@@ -118,6 +139,7 @@ class Release
118
139
  output 'Disable aggregator'
119
140
 
120
141
  output "Recupero le informazioni relative al puntamento dei record DNS..."
142
+ cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
121
143
  output "Recupero le informazioni sui QA attivi..."
122
144
  stack_list, envs = get_stacks()
123
145
 
@@ -129,7 +151,7 @@ class Release
129
151
  end.is_a?(Aws::CloudFormation::Types::Tag)
130
152
  aggregator_enabled
131
153
  end[0]
132
- dns_records = @cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
154
+ dns_records = cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
133
155
  stop_if dns_records.body[:result].empty?, "I record DNS degli aggregatori non stanno puntando ad un QA".red
134
156
  change_hostname_priority(env_hash, hostname_pattern_priority())
135
157
  dns_to_staging(env_hash)
@@ -145,7 +167,8 @@ class Release
145
167
  output 'Enable aggregator'
146
168
 
147
169
  output 'Recupero le informazioni relative al puntamento dei record DNS...'
148
- dns_records = @cloudflare.get('zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records', { per_page: 100, type: 'CNAME', content: 'staging.prima.it' })
170
+ cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
171
+ dns_records = cloudflare.get('zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records', { per_page: 100, type: 'CNAME', content: 'staging.prima.it' })
149
172
  stop_if dns_records.body[:result].empty?, "I record DNS degli aggregatori stanno gia' puntando ad un QA".red
150
173
 
151
174
  output "Recupero le informazioni sui QA attivi..."
@@ -175,7 +198,7 @@ class Release
175
198
  dns_records.body[:result].each do |dns|
176
199
  if dns[:name] =~ /^\w+\-\w+\-staging\.prima\.it$/
177
200
  output "Changing #{dns[:name]} DNS record"
178
- @cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: get_alb_host(@base_stack_name_alb + env_hash[3..8]), proxied: true, ttl: 1})
201
+ cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: get_alb_host(@base_stack_name_alb + env_hash[3..8]), proxied: true, ttl: 1})
179
202
  end
180
203
  end
181
204
 
@@ -234,11 +257,12 @@ class Release
234
257
 
235
258
  def dns_to_staging(env_hash)
236
259
  output "Recupero le informazioni relative al puntamento dei record DNS..."
237
- dns_records = @cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
260
+ cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
261
+ dns_records = cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
238
262
  dns_records.body[:result].each do |dns|
239
263
  if dns[:name] =~ /^\w+\-\w+\-staging\.prima\.it$/
240
264
  output "Changing #{dns[:name]} DNS record"
241
- @cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: 'staging.prima.it', proxied: true, ttl: 1})
265
+ cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: 'staging.prima.it', proxied: true, ttl: 1})
242
266
  end
243
267
  end
244
268
  end
@@ -452,10 +476,8 @@ class Release
452
476
 
453
477
  File.open('branch_names', 'w') { |file| file.write(JSON.generate(@projects)) }
454
478
 
455
- update_drone_yml!
456
-
457
479
  `git add projects && \
458
- git add branch_names .drone.yml && \
480
+ git add branch_names && \
459
481
  git commit -m '#{branch_name}' && \
460
482
  git push -f --set-upstream origin #{branch_name} && \
461
483
  git checkout master`
@@ -499,10 +521,7 @@ class Release
499
521
 
500
522
  File.open('branch_names', 'w') { |file| file.write(JSON.generate(@projects)) }
501
523
 
502
- update_drone_yml!
503
-
504
- `git add branch_names .drone.yml`
505
- `git commit -m 'update'`
524
+ `git commit -am 'update'`
506
525
  `git push && git checkout master`
507
526
  end
508
527
 
@@ -577,9 +596,11 @@ class Release
577
596
  delete_stack(@base_stack_name_alb + env_hash[3..8]) if stack_exists?(@base_stack_name_alb + env_hash[3..8])
578
597
  delete_stack(@base_stack_name_alb_ws + env_hash[3..8]) if stack_exists?(@base_stack_name_alb_ws + env_hash[3..8])
579
598
  `git checkout master && git push origin --delete ${DRONE_BRANCH}`
580
- output "Cancello il record DNS utilizzato da Lighthouse"
581
- delete_lighthouse_dns()
582
599
  output "Finito!".green
600
+
601
+ if @qainit
602
+ qainit_write_output("QA #{ENV["DRONE_BRANCH"]} spento.", 'Indirizzi scritti su ')
603
+ end
583
604
  end
584
605
 
585
606
  def qainit_write_output(file_message, output_message)
@@ -589,16 +610,41 @@ class Release
589
610
  output "#{output_message} #{qa_file_name}".green
590
611
  end
591
612
 
592
- def update_drone_yml!()
593
- drone_yml = File.read('.drone.yml')
594
- @projects.each do |key, project|
595
- drone_yml = drone_yml.gsub(/#{key}@.+\n/, "#{key}@#{project['revision']}\n")
613
+ def qainit_read_config!(action)
614
+ projects = ''
615
+
616
+ File.open('branch_names', 'r') do |file|
617
+ file.each_line do |line|
618
+ projects = JSON.parse(line)
619
+ end
620
+ end
621
+
622
+ projects.each do |key, project|
623
+ @projects[key] = project
596
624
  end
597
- File.open(".drone.yml", "w") do |f|
598
- f.write(drone_yml)
625
+
626
+ get_s3_config_files
627
+ @qainit = true
628
+ case action
629
+ when 'shutdown'
630
+ output 'Shutting down'.green
631
+ qainit_drone_shutdown!
632
+ else
633
+ output 'Starting standard deploy'.green
634
+ deploy_feature!
599
635
  end
600
636
  end
601
637
 
638
+ def get_s3_config_files
639
+ # manteniamo la struttura per lanciarlo facilmente anche da locale
640
+ `mkdir -p cloudformation/stacks/task cloudformation/stacks/route53 cloudformation/stacks/asg cloudformation/stacks/elb`
641
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/task/db.yml', response_target: 'cloudformation/stacks/task/db.yml'})
642
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/route53/qa.yml', response_target: 'cloudformation/stacks/route53/qa.yml'})
643
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/asg/ecs-asg-allinone.yml', response_target: 'cloudformation/stacks/asg/ecs-asg-allinone.yml'})
644
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/ecs-cluster.yml', response_target: 'cloudformation/stacks/ecs-cluster.yml'})
645
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/elb/alb-public-qa.yml', response_target: 'cloudformation/stacks/elb/alb-public-qa.yml'})
646
+ end
647
+
602
648
  def get_deploy_id
603
649
  if @deploy_id
604
650
  @deploy_id
@@ -608,6 +654,1277 @@ class Release
608
654
  end
609
655
  end
610
656
 
657
+ def deploy_feature!
658
+ `git pull && git submodule init && git submodule update`
659
+ @ami_id = get_ami_id("ecs-fleet-allinone-staging")
660
+ deploy_id = get_deploy_id
661
+ stack_name_alb = 'ecs-alb-http-public-qa-' + deploy_id[0..5]
662
+ stack_name_alb_ws = 'ecs-alb-ws-public-qa-' + deploy_id[0..5]
663
+ unless @qainit
664
+ @projects.each_key do |project_key|
665
+ if @projects[project_key]['revision']
666
+ git_checkout_version(project_key, @projects[project_key]['revision'])
667
+ end
668
+ end
669
+ end
670
+ @dns_record_identifier = deploy_id
671
+ @git_branch = ENV['DRONE_BRANCH']
672
+ hostname_pattern_priority = hostname_pattern_priority()
673
+ tags = [
674
+ {
675
+ key: "qainit",
676
+ value: @git_branch
677
+ },
678
+ {
679
+ key: "hostname_pattern_priority",
680
+ value: hostname_pattern_priority
681
+ }
682
+ ]
683
+ @projects.each do |key, value|
684
+ case key.to_s
685
+ when 'crash'
686
+ tags << { key: 'crash', value: @projects['crash']['name'] } if deploy_crash?
687
+ when 'starsky', 'hutch'
688
+ tags << { key: key.to_s, value: @projects[key.to_s]['name'] } if deploy_starsky_hutch?
689
+ else
690
+ tags << { key: key, value: value['name'] }
691
+ end
692
+ end
693
+
694
+ cluster_stack_name = "ecs-cluster-qa-#{deploy_id}"
695
+
696
+ if stack_exists?(cluster_stack_name)
697
+ tags = get_stack_tags(cluster_stack_name)
698
+ hostname_pattern_priority = tags.detect do |tag|
699
+ tag.key == 'hostname_pattern_priority'
700
+ end.value
701
+ end
702
+
703
+ create_cluster_stack(cluster_stack_name, tags) unless stack_exists?(cluster_stack_name)
704
+ wait_for_stack_ready(cluster_stack_name) unless stack_ready?(cluster_stack_name)
705
+
706
+ create_alb_stack(stack_name_alb, "http", deploy_id) unless stack_exists?(stack_name_alb)
707
+ create_alb_stack(stack_name_alb_ws, "websocket", deploy_id) unless stack_exists?(stack_name_alb_ws)
708
+
709
+ resp = describe_stack_resource(cluster_stack_name, 'ECSCluster')
710
+ @ecs_cluster_name = resp.stack_resource_detail.physical_resource_id
711
+
712
+ asg_stack_name = "ecs-asg-allinone-qa-#{deploy_id}"
713
+ create_asg_stack(asg_stack_name, tags) unless stack_exists?(asg_stack_name)
714
+
715
+ stack_name_db = "ecs-task-db-qa-#{deploy_id}"
716
+ stack_body = IO.read('cloudformation/stacks/task/db.yml')
717
+ parameters = [
718
+ {
719
+ parameter_key: "Environment",
720
+ parameter_value: "qa"
721
+ },
722
+ {
723
+ parameter_key: "ECSClusterName",
724
+ parameter_value: @ecs_cluster_name
725
+ }
726
+ ]
727
+ create_stack(stack_name_db, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_db) # creazione asincrona stack dei db (~4 min)
728
+
729
+ output "check pyxis \n".yellow
730
+
731
+ create_pyxis_artifact(@projects["pyxis-npm"]['revision'], deploy_id) unless @projects["pyxis-npm"].nil? # deve essere creato prima di quello di prima, per avere la versione
732
+ create_prima_artifact(@projects["prima"]['revision'], @projects["prima"]['name'], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "prima/#{@projects["prima"]['revision']}.tar.gz")
733
+ # l'artefatto di prima viene creato sempre (puntamenti all'ambiente compilati nel js) e richiede molto più di 4 minuti
734
+ wait_for_stack_ready(stack_name_db) unless stack_ready?(stack_name_db) # dovrebbe essere istantaneo
735
+ db_task = ''
736
+ db_task = import_dbs(ec2_ip_address(asg_stack_name)) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # import asincrono dei dati
737
+
738
+ create_crash_artifact(@projects['crash']['revision'], deploy_id) unless !deploy_crash? || artifact_exists?('prima-artifacts-encrypted', "microservices/crash/#{@projects['crash']['revision']}-qa.tar.gz")
739
+ create_urania_artifact(@projects["urania"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/urania/#{@projects["urania"]['revision']}-qa.tar.gz")
740
+ create_roger_artifact(@projects["roger"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/roger/#{@projects["roger"]['revision']}-qa.tar.gz")
741
+ create_ermes_artifact(@projects["ermes"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/ermes/#{@projects["ermes"]['revision']}-qa.tar.gz")
742
+ create_bburago_artifact(@projects["bburago"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/bburago/#{@projects["bburago"]['revision']}-qa.tar.gz")
743
+ create_hal9000_artifact(@projects["hal9000"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/hal9000/#{@projects["hal9000"]['revision']}-qa.tar.gz")
744
+ create_rachele_artifact(@projects["rachele"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rachele/#{@projects["rachele"]['revision']}-qa.tar.gz")
745
+ create_fidaty_artifact(@projects["fidaty"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/fidaty/#{@projects["fidaty"]['revision']}-qa.tar.gz")
746
+ create_peano_artifact(@projects["peano"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/peano/#{@projects["peano"]['revision']}-qa.tar.gz")
747
+ create_rogoreport_artifact(@projects["rogoreport"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rogoreport/rogoreport-#{@projects["rogoreport"]['revision']}-qa.tar.gz")
748
+ create_assange_artifact(@projects["assange"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/assange/#{@projects["assange"]['revision']}-qa.tar.gz")
749
+ create_borat_artifact(@projects["borat"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/borat/#{@projects["borat"]['revision']}-qa.tar.gz")
750
+ create_activia_artifact(@projects["activia"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/activia/#{@projects["activia"]['revision']}-qa.tar.gz")
751
+ create_leftorium_artifact(@projects["leftorium"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/leftorium/#{@projects["leftorium"]['revision']}-qa.tar.gz")
752
+ create_skynet_artifact(@projects["skynet"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/skynet/#{@projects["skynet"]['revision']}-qa.tar.gz")
753
+ create_maia_artifact(@projects["maia"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/maia/#{@projects["maia"]['revision']}-qa.tar.gz")
754
+ create_starsky_artifact(@projects["starsky"]['revision']) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/starsky/#{@projects["starsky"]['revision']}-qa.tar.gz")
755
+ create_hutch_artifact(@projects["hutch"]['revision']) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/hutch/#{@projects["hutch"]['revision']}-qa.tar.gz")
756
+
757
+
758
+ wait_for_db_import(db_task) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # dovrebbe essere istantaneo
759
+
760
+ import_redis_crash(ec2_ip_address(asg_stack_name)) if deploy_crash? && !stack_exists?("ecs-task-web-qa-#{deploy_id}")
761
+
762
+ wait_for_stack_ready(stack_name_alb) unless stack_ready?(stack_name_alb)
763
+ wait_for_stack_ready(stack_name_alb_ws) unless stack_ready?(stack_name_alb_ws)
764
+
765
+ stack_name_route53 = "ecs-route53-qa-#{deploy_id}"
766
+ stack_body = IO.read('cloudformation/stacks/route53/qa.yml')
767
+ parameters = [
768
+ {
769
+ parameter_key: "DnsRecordIdentifier",
770
+ parameter_value: @dns_record_identifier
771
+ },
772
+ {
773
+ parameter_key: "PrimaElbHostname",
774
+ parameter_value: get_alb_host(stack_name_alb)
775
+ },
776
+ {
777
+ parameter_key: "UraniaIp",
778
+ parameter_value: ec2_ip_address(asg_stack_name)
779
+ },
780
+ {
781
+ parameter_key: "BburagoIp",
782
+ parameter_value: ec2_ip_address(asg_stack_name)
783
+ },
784
+ {
785
+ parameter_key: "Hal9000Ip",
786
+ parameter_value: ec2_ip_address(asg_stack_name)
787
+ },
788
+ {
789
+ parameter_key: "FidatyIp",
790
+ parameter_value: ec2_ip_address(asg_stack_name)
791
+ },
792
+ {
793
+ parameter_key: "PeanoIp",
794
+ parameter_value: ec2_ip_address(asg_stack_name)
795
+ },
796
+ {
797
+ parameter_key: "ErmesIp",
798
+ parameter_value: ec2_ip_address(asg_stack_name)
799
+ },
800
+ {
801
+ parameter_key: "ActiviaIp",
802
+ parameter_value: ec2_ip_address(asg_stack_name)
803
+ },
804
+ {
805
+ parameter_key: "SkynetIp",
806
+ parameter_value: ec2_ip_address(asg_stack_name)
807
+ },
808
+ {
809
+ parameter_key: "RogerIp",
810
+ parameter_value: ec2_ip_address(asg_stack_name)
811
+ },
812
+ {
813
+ parameter_key: "LeftoriumIp",
814
+ parameter_value: ec2_ip_address(asg_stack_name)
815
+ },
816
+ {
817
+ parameter_key: "RacheleIp",
818
+ parameter_value: ec2_ip_address(asg_stack_name)
819
+ },
820
+ {
821
+ parameter_key: "RedisIp",
822
+ parameter_value: ec2_ip_address(asg_stack_name)
823
+ },
824
+ {
825
+ parameter_key: "AssangeElbHostname",
826
+ parameter_value: get_alb_host(stack_name_alb)
827
+ },
828
+ {
829
+ parameter_key: "BoratElbHostname",
830
+ parameter_value: get_alb_host(stack_name_alb_ws)
831
+ },
832
+ {
833
+ parameter_key: 'CrashElbHostname',
834
+ parameter_value: get_alb_host(stack_name_alb_ws)
835
+ },
836
+ {
837
+ parameter_key: 'StarskyElbHostname',
838
+ parameter_value: get_alb_host(stack_name_alb)
839
+ },
840
+ {
841
+ parameter_key: 'HutchElbHostname',
842
+ parameter_value: get_alb_host(stack_name_alb)
843
+ },
844
+ {
845
+ parameter_key: 'MaiaElbHostname',
846
+ parameter_value: get_alb_host(stack_name_alb)
847
+ }
848
+ ]
849
+
850
+ create_stack(stack_name_route53, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_route53)
851
+ wait_for_stack_ready(stack_name_route53) unless stack_ready?(stack_name_route53)
852
+
853
+ stack_name_skynet = "ecs-task-skynet-qa-#{deploy_id}"
854
+ git_checkout_version('skynet', @projects["skynet"]['revision'])
855
+ stack_body = File.read('projects/skynet/deploy/task.yml')
856
+ parameters = [
857
+ {
858
+ parameter_key: "Environment",
859
+ parameter_value: "qa"
860
+ },
861
+ {
862
+ parameter_key: "ReleaseVersion",
863
+ parameter_value: @projects["skynet"]['revision']
864
+ },
865
+ {
866
+ parameter_key: "TaskDesiredCount",
867
+ parameter_value: "1"
868
+ },
869
+ {
870
+ parameter_key: "ECSClusterName",
871
+ parameter_value: @ecs_cluster_name
872
+ },
873
+ {
874
+ parameter_key: "HostnamePattern",
875
+ parameter_value: "skynet-#{@dns_record_identifier}.qa.colaster.com"
876
+ },
877
+ {
878
+ parameter_key: "HostnamePatternPriority",
879
+ parameter_value: hostname_pattern_priority
880
+ }
881
+ ]
882
+ if stack_exists?(stack_name_skynet)
883
+ cur_version = get_currently_deployed_version(stack_name_skynet)
884
+ update_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["skynet"]['revision'])
885
+ else
886
+ create_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role)
887
+ end
888
+
889
+ stack_name_urania = "ecs-task-urania-qa-#{deploy_id}"
890
+ git_checkout_version('urania', @projects["urania"]['revision'])
891
+ stack_body = File.read('projects/urania/deploy/task.yml')
892
+ parameters = [
893
+ {
894
+ parameter_key: "Environment",
895
+ parameter_value: "qa"
896
+ },
897
+ {
898
+ parameter_key: "ReleaseVersion",
899
+ parameter_value: @projects["urania"]['revision']
900
+ },
901
+ {
902
+ parameter_key: "TaskDesiredCount",
903
+ parameter_value: "1"
904
+ },
905
+ {
906
+ parameter_key: "ECSClusterName",
907
+ parameter_value: @ecs_cluster_name
908
+ },
909
+ {
910
+ parameter_key: "HostnamePattern",
911
+ parameter_value: "urania-#{@dns_record_identifier}.qa.colaster.com"
912
+ },
913
+ {
914
+ parameter_key: "HostnamePatternPriority",
915
+ parameter_value: hostname_pattern_priority
916
+ }
917
+ ]
918
+ if stack_exists?(stack_name_urania)
919
+ cur_version = get_currently_deployed_version(stack_name_urania)
920
+ update_stack(stack_name_urania, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["urania"]['revision'])
921
+ else
922
+ create_stack(stack_name_urania, stack_body, parameters, tags, @cf_role)
923
+ end
924
+
925
+ stack_name_ermes = "ecs-task-ermes-qa-#{deploy_id}"
926
+ git_checkout_version('ermes', @projects["ermes"]['revision'])
927
+ stack_body = File.read('projects/ermes/deploy/task.yml')
928
+ parameters = [
929
+ {
930
+ parameter_key: "Environment",
931
+ parameter_value: "qa"
932
+ },
933
+ {
934
+ parameter_key: "ReleaseVersion",
935
+ parameter_value: "#{@projects['ermes']['revision']}"
936
+ },
937
+ {
938
+ parameter_key: "TaskDesiredCount",
939
+ parameter_value: "1"
940
+ },
941
+ {
942
+ parameter_key: "ECSClusterName",
943
+ parameter_value: @ecs_cluster_name
944
+ },
945
+ {
946
+ parameter_key: "HostnamePattern",
947
+ parameter_value: "ermes-#{@dns_record_identifier}.qa.colaster.com"
948
+ },
949
+ {
950
+ parameter_key: "HostnamePatternPriority",
951
+ parameter_value: hostname_pattern_priority
952
+ },
953
+ {
954
+ parameter_key: "WebHost",
955
+ parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
956
+ },
957
+ {
958
+ parameter_key: "PeanoHost",
959
+ parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
960
+ }
961
+ ]
962
+ if stack_exists?(stack_name_ermes)
963
+ cur_version = get_currently_deployed_version(stack_name_ermes)
964
+ update_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["ermes"]['revision'])
965
+ else
966
+ create_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role)
967
+ end
968
+
969
+ stack_name_bburago = "ecs-task-bburago-qa-#{deploy_id}"
970
+ git_checkout_version('bburago', @projects["bburago"]['revision'])
971
+ stack_body = File.read('projects/bburago/deploy/task.yml')
972
+ parameters = [
973
+ {
974
+ parameter_key: "Environment",
975
+ parameter_value: "qa"
976
+ },
977
+ {
978
+ parameter_key: "ReleaseVersion",
979
+ parameter_value: @projects["bburago"]['revision']
980
+ },
981
+ {
982
+ parameter_key: "ECSClusterName",
983
+ parameter_value: @ecs_cluster_name
984
+ },
985
+ {
986
+ parameter_key: "TaskDesiredCount",
987
+ parameter_value: "1"
988
+ },
989
+ {
990
+ parameter_key: "HostnamePattern",
991
+ parameter_value: "bburago-#{@dns_record_identifier}.qa.colaster.com"
992
+ },
993
+ {
994
+ parameter_key: "HostnamePatternPriority",
995
+ parameter_value: hostname_pattern_priority
996
+ }
997
+ ]
998
+ if stack_exists?(stack_name_bburago)
999
+ cur_version = get_currently_deployed_version(stack_name_bburago)
1000
+ update_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["bburago"]['revision'])
1001
+ else
1002
+ create_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role)
1003
+ end
1004
+
1005
+ stack_name_hal9000 = "ecs-task-hal9000-qa-#{deploy_id}"
1006
+ git_checkout_version('hal9000', @projects["hal9000"]['revision'])
1007
+ stack_body = File.read('projects/hal9000/deploy/task.yml')
1008
+ parameters = [
1009
+ {
1010
+ parameter_key: "Environment",
1011
+ parameter_value: "qa"
1012
+ },
1013
+ {
1014
+ parameter_key: "ReleaseVersion",
1015
+ parameter_value: @projects["hal9000"]['revision']
1016
+ },
1017
+ {
1018
+ parameter_key: "ECSClusterName",
1019
+ parameter_value: @ecs_cluster_name
1020
+ },
1021
+ {
1022
+ parameter_key: "TaskDesiredCount",
1023
+ parameter_value: "1"
1024
+ },
1025
+ {
1026
+ parameter_key: "HostnamePattern",
1027
+ parameter_value: "hal9000-#{@dns_record_identifier}.qa.colaster.com"
1028
+ },
1029
+ {
1030
+ parameter_key: "HostnamePatternPriority",
1031
+ parameter_value: hostname_pattern_priority
1032
+ }
1033
+ ]
1034
+ if stack_exists?(stack_name_hal9000)
1035
+ cur_version = get_currently_deployed_version(stack_name_hal9000)
1036
+ update_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hal9000"]['revision'])
1037
+ else
1038
+ create_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role)
1039
+ end
1040
+
1041
+ stack_name_fidaty = "ecs-task-fidaty-qa-#{deploy_id}"
1042
+ git_checkout_version('fidaty', @projects["fidaty"]['revision'])
1043
+ stack_body = File.read('projects/fidaty/deploy/task.yml')
1044
+ parameters = [
1045
+ {
1046
+ parameter_key: "Environment",
1047
+ parameter_value: "qa"
1048
+ },
1049
+ {
1050
+ parameter_key: "ReleaseVersion",
1051
+ parameter_value: "#{@projects["fidaty"]['revision']}"
1052
+ },
1053
+ {
1054
+ parameter_key: "ECSClusterName",
1055
+ parameter_value: @ecs_cluster_name
1056
+ },
1057
+ {
1058
+ parameter_key: "TaskDesiredCount",
1059
+ parameter_value: "1"
1060
+ },
1061
+ {
1062
+ parameter_key: "HostnamePattern",
1063
+ parameter_value: "fidaty-#{@dns_record_identifier}.qa.colaster.com"
1064
+ },
1065
+ {
1066
+ parameter_key: "HostnamePatternPriority",
1067
+ parameter_value: hostname_pattern_priority
1068
+ },
1069
+ {
1070
+ parameter_key: "PeanoHost",
1071
+ parameter_value: get_route53_hostname("ecs-task-peano-qa-notneeded")
1072
+ }
1073
+ ]
1074
+ if stack_exists?(stack_name_fidaty)
1075
+ cur_version = get_currently_deployed_version(stack_name_fidaty)
1076
+ update_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["fidaty"]['revision'])
1077
+ else
1078
+ create_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role)
1079
+ end
1080
+
1081
+ stack_name_peano = "ecs-task-peano-qa-#{deploy_id}"
1082
+ git_checkout_version('peano', @projects["peano"]['revision'])
1083
+ stack_body = File.read('projects/peano/deploy/task.yml')
1084
+ parameters = [
1085
+ {
1086
+ parameter_key: "Environment",
1087
+ parameter_value: "qa"
1088
+ },
1089
+ {
1090
+ parameter_key: "ReleaseVersion",
1091
+ parameter_value: "#{@projects['peano']['revision']}"
1092
+ },
1093
+ {
1094
+ parameter_key: "ECSClusterName",
1095
+ parameter_value: @ecs_cluster_name
1096
+ },
1097
+ {
1098
+ parameter_key: "TaskDesiredCount",
1099
+ parameter_value: "1"
1100
+ },
1101
+ {
1102
+ parameter_key: "HostnamePattern",
1103
+ parameter_value: "peano-#{@dns_record_identifier}.qa.colaster.com"
1104
+ },
1105
+ {
1106
+ parameter_key: "HostnamePatternPriority",
1107
+ parameter_value: hostname_pattern_priority
1108
+ },
1109
+ {
1110
+ parameter_key: "WebHost",
1111
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1112
+ },
1113
+ {
1114
+ parameter_key: "AssangeHost",
1115
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1116
+ }
1117
+ ]
1118
+ if stack_exists?(stack_name_peano)
1119
+ cur_version = get_currently_deployed_version(stack_name_peano)
1120
+ update_stack(stack_name_peano, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["peano"]['revision'])
1121
+ else
1122
+ create_stack(stack_name_peano, stack_body, parameters, tags, @cf_role)
1123
+ end
1124
+
1125
+ stack_name_rogoreport = "ecs-task-rogoreport-qa-#{deploy_id}"
1126
+ git_checkout_version('rogoreport', @projects["rogoreport"]['revision'])
1127
+ stack_body = IO.read('projects/rogoreport/deploy/task.yml')
1128
+ parameters = [
1129
+ {
1130
+ parameter_key: "Environment",
1131
+ parameter_value: "qa"
1132
+ },
1133
+ {
1134
+ parameter_key: "ReleaseVersion",
1135
+ parameter_value: "#{@projects["rogoreport"]['revision']}"
1136
+ },
1137
+ {
1138
+ parameter_key: "ReleaseName",
1139
+ parameter_value: "rogoreport"
1140
+ },
1141
+ {
1142
+ parameter_key: "ECSClusterName",
1143
+ parameter_value: @ecs_cluster_name
1144
+ }
1145
+ ]
1146
+ if stack_exists?(stack_name_rogoreport)
1147
+ cur_version = get_currently_deployed_version(stack_name_rogoreport)
1148
+ update_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["rogoreport"]['revision'])
1149
+ else
1150
+ create_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role)
1151
+ end
1152
+
1153
+ stack_name_assange = "ecs-task-assange-qa-#{deploy_id}"
1154
+ git_checkout_version('assange', @projects["assange"]['revision'])
1155
+ stack_body = IO.read('projects/assange/deploy/task.yml')
1156
+ parameters = [
1157
+ {
1158
+ parameter_key: "Environment",
1159
+ parameter_value: "qa"
1160
+ },
1161
+ {
1162
+ parameter_key: "ReleaseVersion",
1163
+ parameter_value: "#{@projects["assange"]['revision']}"
1164
+ },
1165
+ {
1166
+ parameter_key: "ECSClusterName",
1167
+ parameter_value: @ecs_cluster_name
1168
+ },
1169
+ {
1170
+ parameter_key: "TaskDesiredCount",
1171
+ parameter_value: "1"
1172
+ },
1173
+ {
1174
+ parameter_key: "ALBShortName",
1175
+ parameter_value: "assange-qa-#{deploy_id}"[0..27]
1176
+ },
1177
+ {
1178
+ parameter_key: "HostnamePattern",
1179
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1180
+ },
1181
+ {
1182
+ parameter_key: "HostnamePatternPriority",
1183
+ parameter_value: (hostname_pattern_priority.to_i + 20).to_s
1184
+ },
1185
+ {
1186
+ parameter_key: "EnvHash",
1187
+ parameter_value: deploy_id
1188
+ },
1189
+ {
1190
+ parameter_key: "WebHost",
1191
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1192
+ },
1193
+ {
1194
+ parameter_key: "AssangeHost",
1195
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1196
+ }
1197
+ ]
1198
+ if stack_exists?(stack_name_assange)
1199
+ cur_version = get_currently_deployed_version(stack_name_assange)
1200
+ update_stack(stack_name_assange, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["assange"]['revision'])
1201
+ else
1202
+ create_stack(stack_name_assange, stack_body, parameters, tags, @cf_role)
1203
+ end
1204
+
1205
+ stack_name_leftorium = "ecs-task-leftorium-qa-#{deploy_id}"
1206
+ git_checkout_version('leftorium', @projects["leftorium"]['revision'])
1207
+ stack_body = File.read('projects/leftorium/deploy/task.yml')
1208
+ parameters = [
1209
+ {
1210
+ parameter_key: "Environment",
1211
+ parameter_value: "qa"
1212
+ },
1213
+ {
1214
+ parameter_key: "ReleaseVersion",
1215
+ parameter_value: "#{@projects["leftorium"]['revision']}"
1216
+ },
1217
+ {
1218
+ parameter_key: "ECSClusterName",
1219
+ parameter_value: @ecs_cluster_name
1220
+ },
1221
+ {
1222
+ parameter_key: "TaskDesiredCount",
1223
+ parameter_value: "1"
1224
+ },
1225
+ {
1226
+ parameter_key: "HostnamePattern",
1227
+ parameter_value: "leftorium-#{@dns_record_identifier}.qa.colaster.com"
1228
+ },
1229
+ {
1230
+ parameter_key: "HostnamePatternPriority",
1231
+ parameter_value: hostname_pattern_priority
1232
+ }
1233
+ ]
1234
+ if stack_exists?(stack_name_leftorium)
1235
+ cur_version = get_currently_deployed_version(stack_name_leftorium)
1236
+ update_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["leftorium"]['revision'])
1237
+ else
1238
+ create_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role)
1239
+ end
1240
+
1241
+ stack_name_rachele = "ecs-task-rachele-qa-#{deploy_id}"
1242
+ git_checkout_version('rachele', @projects["rachele"]['revision'])
1243
+ stack_body = File.read('projects/rachele/deploy/task.yml')
1244
+ parameters = [
1245
+ {
1246
+ parameter_key: "Environment",
1247
+ parameter_value: "qa"
1248
+ },
1249
+ {
1250
+ parameter_key: "ReleaseVersion",
1251
+ parameter_value: "#{@projects["rachele"]['revision']}"
1252
+ },
1253
+ {
1254
+ parameter_key: "ECSClusterName",
1255
+ parameter_value: @ecs_cluster_name
1256
+ },
1257
+ {
1258
+ parameter_key: "TaskDesiredCount",
1259
+ parameter_value: "1"
1260
+ },
1261
+ {
1262
+ parameter_key: "WebHost",
1263
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1264
+ },
1265
+ {
1266
+ parameter_key: "HostnamePattern",
1267
+ parameter_value: "rachele-#{@dns_record_identifier}.qa.colaster.com"
1268
+ },
1269
+ {
1270
+ parameter_key: "HostnamePatternPriority",
1271
+ parameter_value: hostname_pattern_priority
1272
+ }
1273
+ ]
1274
+ if stack_exists?(stack_name_rachele)
1275
+ cur_version = get_currently_deployed_version(stack_name_rachele)
1276
+ unless cur_version.include?(@projects["rachele"]['revision'])
1277
+ delete_stack(stack_name_rachele)
1278
+ wait_for_stack_removal(stack_name_rachele)
1279
+ create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
1280
+ end
1281
+ else
1282
+ create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
1283
+ end
1284
+
1285
+ stack_name_borat = "ecs-task-borat-qa-#{deploy_id}"
1286
+ git_checkout_version('borat', @projects["borat"]['revision'])
1287
+ stack_body = IO.read('projects/borat/deploy/task.yml')
1288
+ parameters = [
1289
+ {
1290
+ parameter_key: "Environment",
1291
+ parameter_value: "qa"
1292
+ },
1293
+ {
1294
+ parameter_key: "ReleaseVersion",
1295
+ parameter_value: "#{@projects["borat"]['revision']}"
1296
+ },
1297
+ {
1298
+ parameter_key: "ECSClusterName",
1299
+ parameter_value: @ecs_cluster_name
1300
+ },
1301
+ {
1302
+ parameter_key: "TaskDesiredCount",
1303
+ parameter_value: "1"
1304
+ },
1305
+ {
1306
+ parameter_key: "ALBShortName",
1307
+ parameter_value: "borat-qa-#{deploy_id}"[0..27]
1308
+ },
1309
+ {
1310
+ parameter_key: "HostnamePattern",
1311
+ parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1312
+ },
1313
+ {
1314
+ parameter_key: "HostnamePatternPriority",
1315
+ parameter_value: (hostname_pattern_priority.to_i + 30).to_s
1316
+ },
1317
+ {
1318
+ parameter_key: "EnvHash",
1319
+ parameter_value: deploy_id
1320
+ },
1321
+ {
1322
+ parameter_key: "WsEndpoint",
1323
+ parameter_value: "wss://backoffice-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
1324
+ },
1325
+ {
1326
+ parameter_key: "GraphqlEndpoint",
1327
+ parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/graphql"
1328
+ },
1329
+ {
1330
+ parameter_key: "AuthEndpoint",
1331
+ parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/auth"
1332
+ },
1333
+ {
1334
+ parameter_key: "FrontendEndpoint",
1335
+ parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
1336
+ }
1337
+ ]
1338
+ if stack_exists?(stack_name_borat)
1339
+ cur_version = get_currently_deployed_version(stack_name_borat)
1340
+ update_stack(stack_name_borat, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["borat"]['revision'])
1341
+ else
1342
+ create_stack(stack_name_borat, stack_body, parameters, tags, @cf_role)
1343
+ end
1344
+
1345
+ if deploy_crash?
1346
+ git_checkout_version('crash', @projects['crash']['revision'])
1347
+ stack_name_crash = "ecs-task-crash-qa-#{deploy_id}"
1348
+ stack_body = IO.read('projects/crash/deploy/task.yml')
1349
+ parameters = [
1350
+ {
1351
+ parameter_key: 'Environment',
1352
+ parameter_value: 'qa'
1353
+ },
1354
+ {
1355
+ parameter_key: 'ReleaseVersion',
1356
+ parameter_value: "#{@projects['crash']['revision']}"
1357
+ },
1358
+ {
1359
+ parameter_key: 'TaskDesiredCount',
1360
+ parameter_value: '1'
1361
+ },
1362
+ {
1363
+ parameter_key: 'ECSClusterName',
1364
+ parameter_value: @ecs_cluster_name
1365
+ },
1366
+ {
1367
+ parameter_key: 'ALBShortName',
1368
+ parameter_value: "crash-qa-#{deploy_id}"[0..27]
1369
+ },
1370
+ {
1371
+ parameter_key: 'HostnamePattern',
1372
+ parameter_value: "crash-#{@dns_record_identifier}.qa.colaster.com"
1373
+ },
1374
+ {
1375
+ parameter_key: 'HostnamePatternPriority',
1376
+ parameter_value: (hostname_pattern_priority.to_i + 10).to_s
1377
+ },
1378
+ {
1379
+ parameter_key: "EnvHash",
1380
+ parameter_value: deploy_id
1381
+ },
1382
+ {
1383
+ parameter_key: "WsEndpoint",
1384
+ parameter_value: "wss://crash-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
1385
+ },
1386
+ {
1387
+ parameter_key: "GraphqlEndpoint",
1388
+ parameter_value: "https://crash-#{@dns_record_identifier}.qa.colaster.com/graphql"
1389
+ },
1390
+ {
1391
+ parameter_key: "AuthDomain",
1392
+ parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
1393
+ },
1394
+ ]
1395
+ if stack_exists?(stack_name_crash)
1396
+ cur_version = get_currently_deployed_version(stack_name_crash)
1397
+ update_stack(stack_name_crash, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["crash"]['revision'])
1398
+ else
1399
+ create_stack(stack_name_crash, stack_body, parameters, tags, @cf_role)
1400
+ end
1401
+ end
1402
+
1403
+ if deploy_starsky_hutch?
1404
+ stack_name_starsky = "ecs-task-starsky-qa-#{deploy_id}"
1405
+ git_checkout_version('starsky', @projects["starsky"]['revision'])
1406
+ stack_body = IO.read('projects/starsky/deploy/task.yml')
1407
+ parameters = [
1408
+ {
1409
+ parameter_key: "Environment",
1410
+ parameter_value: "qa"
1411
+ },
1412
+ {
1413
+ parameter_key: "ReleaseVersion",
1414
+ parameter_value: "#{@projects["starsky"]['revision']}"
1415
+ },
1416
+ {
1417
+ parameter_key: "TaskDesiredCount",
1418
+ parameter_value: "1"
1419
+ },
1420
+ {
1421
+ parameter_key: "ECSClusterName",
1422
+ parameter_value: @ecs_cluster_name
1423
+ },
1424
+ {
1425
+ parameter_key: "ALBShortName",
1426
+ parameter_value: "starsky-qa-#{deploy_id}"[0..27]
1427
+ },
1428
+ {
1429
+ parameter_key: "EnvHash",
1430
+ parameter_value: deploy_id
1431
+ },
1432
+ {
1433
+ parameter_key: "HostnamePattern",
1434
+ parameter_value: "starsky-#{@dns_record_identifier}.qa.colaster.com"
1435
+ },
1436
+ {
1437
+ parameter_key: "HostnamePatternPriority",
1438
+ parameter_value: (hostname_pattern_priority.to_i + 74).to_s
1439
+ }
1440
+ ]
1441
+ if stack_exists?(stack_name_starsky)
1442
+ cur_version = get_currently_deployed_version(stack_name_starsky)
1443
+ update_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["starsky"]['revision'])
1444
+ else
1445
+ create_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role)
1446
+ end
1447
+ end
1448
+
1449
+ stack_name_activia = "ecs-task-activia-qa-#{deploy_id}"
1450
+ git_checkout_version('activia', @projects["activia"]['revision'])
1451
+ stack_body = File.read('projects/activia/deploy/task.yml')
1452
+ parameters = [
1453
+ {
1454
+ parameter_key: "Environment",
1455
+ parameter_value: "qa"
1456
+ },
1457
+ {
1458
+ parameter_key: "ReleaseVersion",
1459
+ parameter_value: "#{@projects["activia"]['revision']}"
1460
+ },
1461
+ {
1462
+ parameter_key: "ECSClusterName",
1463
+ parameter_value: @ecs_cluster_name
1464
+ },
1465
+ {
1466
+ parameter_key: "TaskDesiredCount",
1467
+ parameter_value: "1"
1468
+ },
1469
+ {
1470
+ parameter_key: "HostnamePattern",
1471
+ parameter_value: "activia-#{@dns_record_identifier}.qa.colaster.com"
1472
+ },
1473
+ {
1474
+ parameter_key: "HostnamePatternPriority",
1475
+ parameter_value: hostname_pattern_priority
1476
+ },
1477
+ {
1478
+ parameter_key: "WebHost",
1479
+ parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
1480
+ },
1481
+ {
1482
+ parameter_key: "PeanoHost",
1483
+ parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
1484
+ }
1485
+ ]
1486
+ if stack_exists?(stack_name_activia)
1487
+ cur_version = get_currently_deployed_version(stack_name_activia)
1488
+ update_stack(stack_name_activia, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["activia"]['revision'])
1489
+ else
1490
+ create_stack(stack_name_activia, stack_body, parameters, tags, @cf_role)
1491
+ end
1492
+
1493
+ # Waiting for prima healtcheck dependencies
1494
+ wait_for_stack_ready(stack_name_skynet) unless stack_ready?(stack_name_skynet)
1495
+ wait_for_stack_ready(stack_name_urania) unless stack_ready?(stack_name_urania)
1496
+ wait_for_stack_ready(stack_name_bburago) unless stack_ready?(stack_name_bburago)
1497
+ wait_for_stack_ready(stack_name_hal9000) unless stack_ready?(stack_name_hal9000)
1498
+ wait_for_stack_ready(stack_name_assange) unless stack_ready?(stack_name_assange)
1499
+ wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1500
+ wait_for_stack_ready(stack_name_leftorium) unless stack_ready?(stack_name_leftorium)
1501
+ wait_for_stack_ready(stack_name_rachele) unless stack_ready?(stack_name_rachele)
1502
+
1503
+ stack_name_web = "ecs-task-web-qa-#{deploy_id}"
1504
+ git_checkout_version('prima', @projects["prima"]['revision'])
1505
+ stack_body = IO.read('projects/prima/app/cloudformation/tasks/web.yml')
1506
+ parameters = [
1507
+ {
1508
+ parameter_key: "Environment",
1509
+ parameter_value: "qa"
1510
+ },
1511
+ {
1512
+ parameter_key: "ReleaseVersion",
1513
+ parameter_value: "#{@projects["prima"]['revision']}"
1514
+ },
1515
+ {
1516
+ parameter_key: "TaskDesiredCount",
1517
+ parameter_value: "1"
1518
+ },
1519
+ {
1520
+ parameter_key: "ECSClusterName",
1521
+ parameter_value: @ecs_cluster_name
1522
+ },
1523
+ {
1524
+ parameter_key: "ALBShortName",
1525
+ parameter_value: "web-qa-#{deploy_id}"[0..27]
1526
+ },
1527
+ {
1528
+ parameter_key: "WebQaBaseHostname",
1529
+ parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
1530
+ },
1531
+ {
1532
+ parameter_key: "HostnamePattern",
1533
+ parameter_value: "ww*-#{@dns_record_identifier}.qa.colaster.com"
1534
+ },
1535
+ {
1536
+ parameter_key: "HostnamePatternPriority",
1537
+ parameter_value: hostname_pattern_priority
1538
+ },
1539
+ {
1540
+ parameter_key: "HostnamePatternAggregatorPriority",
1541
+ parameter_value: (hostname_pattern_priority.to_i + 1).to_s
1542
+ },
1543
+ {
1544
+ parameter_key: "EnvHash",
1545
+ parameter_value: deploy_id
1546
+ },
1547
+ {
1548
+ parameter_key: "AssangeHostname",
1549
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1550
+ },
1551
+ {
1552
+ parameter_key: "BackofficeHostname",
1553
+ parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1554
+ },
1555
+ {
1556
+ parameter_key: "WebHostname",
1557
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1558
+ },
1559
+ {
1560
+ parameter_key: "FePrimaDomain",
1561
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1562
+ },
1563
+ {
1564
+ parameter_key: "HostnamePattern",
1565
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1566
+ }
1567
+ ]
1568
+ if stack_exists?(stack_name_web)
1569
+ cur_version = get_currently_deployed_version(stack_name_web)
1570
+ update_stack(stack_name_web, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"]['revision'])
1571
+ else
1572
+ create_stack(stack_name_web, stack_body, parameters, tags, @cf_role)
1573
+ end
1574
+
1575
+ stack_name_consumer = "ecs-task-consumer-qa-#{deploy_id}"
1576
+ git_checkout_version('prima', @projects["prima"]['revision'])
1577
+ stack_body = IO.read('projects/prima/app/cloudformation/tasks/consumer.yml')
1578
+ parameters = [
1579
+ {
1580
+ parameter_key: "Environment",
1581
+ parameter_value: "qa"
1582
+ },
1583
+ {
1584
+ parameter_key: "ReleaseVersion",
1585
+ parameter_value: "#{@projects["prima"]['revision']}"
1586
+ },
1587
+ {
1588
+ parameter_key: "ECSClusterName",
1589
+ parameter_value: @ecs_cluster_name
1590
+ },
1591
+ {
1592
+ parameter_key: "NginxHttpHost",
1593
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1594
+ },
1595
+ {
1596
+ parameter_key: "AssangeHostname",
1597
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1598
+ },
1599
+ {
1600
+ parameter_key: "BackofficeHostname",
1601
+ parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1602
+ },
1603
+ {
1604
+ parameter_key: "WebHostname",
1605
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1606
+ },
1607
+ {
1608
+ parameter_key: "FePrimaDomain",
1609
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1610
+ },
1611
+ {
1612
+ parameter_key: "HostnamePattern",
1613
+ parameter_value: "consumer-#{@dns_record_identifier}.qa.colaster.com"
1614
+ }
1615
+ ]
1616
+ if stack_exists?(stack_name_consumer)
1617
+ cur_version = get_currently_deployed_version(stack_name_consumer)
1618
+ update_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"]['revision'])
1619
+ else
1620
+ create_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role)
1621
+ end
1622
+
1623
+ stack_name_roger = "ecs-task-roger-qa-#{deploy_id}"
1624
+ git_checkout_version('roger', @projects["roger"]['revision'])
1625
+ stack_body = File.read('projects/roger/deploy/task.yml')
1626
+ parameters = [
1627
+ {
1628
+ parameter_key: "Environment",
1629
+ parameter_value: "qa"
1630
+ },
1631
+ {
1632
+ parameter_key: "ReleaseVersion",
1633
+ parameter_value: @projects["roger"]['revision']
1634
+ },
1635
+ {
1636
+ parameter_key: "TaskDesiredCount",
1637
+ parameter_value: "1"
1638
+ },
1639
+ {
1640
+ parameter_key: "ECSClusterName",
1641
+ parameter_value: @ecs_cluster_name
1642
+ },
1643
+ {
1644
+ parameter_key: "HostnamePattern",
1645
+ parameter_value: "roger-#{@dns_record_identifier}.qa.colaster.com"
1646
+ },
1647
+ {
1648
+ parameter_key: "HostnamePatternPriority",
1649
+ parameter_value: hostname_pattern_priority
1650
+ }
1651
+ ]
1652
+ if stack_exists?(stack_name_roger)
1653
+ cur_version = get_currently_deployed_version(stack_name_roger)
1654
+ update_stack(stack_name_roger, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["roger"]['revision'])
1655
+ else
1656
+ create_stack(stack_name_roger, stack_body, parameters, tags, @cf_role)
1657
+ end
1658
+
1659
+
1660
+ if deploy_starsky_hutch?
1661
+ wait_for_stack_ready(stack_name_starsky) unless stack_ready?(stack_name_starsky) unless !deploy_starsky_hutch?
1662
+
1663
+ stack_name_hutch = "ecs-task-hutch-qa-#{deploy_id}"
1664
+ git_checkout_version('hutch', @projects["hutch"]['revision'])
1665
+ stack_body = File.read('projects/hutch/deploy/task.yml')
1666
+ parameters = [
1667
+ {
1668
+ parameter_key: "Environment",
1669
+ parameter_value: "qa"
1670
+ },
1671
+ {
1672
+ parameter_key: "ReleaseVersion",
1673
+ parameter_value: "#{@projects["hutch"]['revision']}"
1674
+ },
1675
+ {
1676
+ parameter_key: "ALBShortName",
1677
+ parameter_value: "hutch-qa-#{deploy_id}"[0..27]
1678
+ },
1679
+ {
1680
+ parameter_key: "ECSClusterName",
1681
+ parameter_value: @ecs_cluster_name
1682
+ },
1683
+ {
1684
+ parameter_key: "EnvHash",
1685
+ parameter_value: deploy_id
1686
+ },
1687
+ {
1688
+ parameter_key: "HostnamePattern",
1689
+ parameter_value: "hutch-#{@dns_record_identifier}.qa.colaster.com"
1690
+ },
1691
+ {
1692
+ parameter_key: "HostnamePatternPriority",
1693
+ parameter_value: (hostname_pattern_priority.to_i + 254).to_s
1694
+ },
1695
+ {
1696
+ parameter_key: "ApiUrl",
1697
+ parameter_value: "https://#{get_route53_hostname('maia-intermediari')}"
1698
+ }
1699
+ ]
1700
+ if stack_exists?(stack_name_hutch)
1701
+ cur_version = get_currently_deployed_version(stack_name_hutch)
1702
+ update_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hutch"]['revision'])
1703
+ else
1704
+ create_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role)
1705
+ end
1706
+ end
1707
+
1708
+ stack_name_maia = "ecs-task-maia-qa-#{deploy_id}"
1709
+ git_checkout_version('maia', @projects["maia"]['revision'])
1710
+ stack_body = File.read('projects/maia/deploy/task.yml')
1711
+ parameters = [
1712
+ {
1713
+ parameter_key: "Environment",
1714
+ parameter_value: "qa"
1715
+ },
1716
+ {
1717
+ parameter_key: "ReleaseVersion",
1718
+ parameter_value: "#{@projects["maia"]['revision']}"
1719
+ },
1720
+ {
1721
+ parameter_key: "ALBShortName",
1722
+ parameter_value: "maia-qa-#{deploy_id}"[0..15]
1723
+ },
1724
+ {
1725
+ parameter_key: "ECSClusterName",
1726
+ parameter_value: @ecs_cluster_name
1727
+ },
1728
+ {
1729
+ parameter_key: "EnvHash",
1730
+ parameter_value: deploy_id
1731
+ },
1732
+ {
1733
+ parameter_key: "HostnamePatternPublic",
1734
+ parameter_value: "api*-#{@dns_record_identifier}.qa.colaster.com"
1735
+ },
1736
+ {
1737
+ parameter_key: "HostnamePatternPriority",
1738
+ parameter_value: (hostname_pattern_priority.to_i + 128).to_s
1739
+ },
1740
+ {
1741
+ parameter_key: "ProxyHostnameIntermediari",
1742
+ parameter_value: "api-intermediari-#{@dns_record_identifier}.qa.colaster.com"
1743
+ },
1744
+ {
1745
+ parameter_key: "ProxyHostnameApp",
1746
+ parameter_value: "api-#{@dns_record_identifier}.qa.colaster.com"
1747
+ }
1748
+ ]
1749
+ if stack_exists?(stack_name_maia)
1750
+ cur_version = get_currently_deployed_version(stack_name_maia)
1751
+ update_stack(stack_name_maia, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["maia"]['revision'])
1752
+ else
1753
+ create_stack(stack_name_maia, stack_body, parameters, tags, @cf_role)
1754
+ end
1755
+
1756
+ wait_for_stack_ready(stack_name_web) unless stack_ready?(stack_name_web)
1757
+ wait_for_stack_ready(stack_name_consumer) unless stack_ready?(stack_name_consumer)
1758
+ wait_for_stack_ready(stack_name_ermes) unless stack_ready?(stack_name_ermes)
1759
+ wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1760
+ wait_for_stack_ready(stack_name_peano) unless stack_ready?(stack_name_peano)
1761
+ wait_for_stack_ready(stack_name_rogoreport) unless stack_ready?(stack_name_rogoreport)
1762
+ wait_for_stack_ready(stack_name_borat) unless stack_ready?(stack_name_borat)
1763
+ wait_for_stack_ready(stack_name_activia) unless stack_ready?(stack_name_activia)
1764
+ wait_for_stack_ready(stack_name_maia) unless stack_ready?(stack_name_maia)
1765
+ wait_for_stack_ready(stack_name_crash) unless stack_ready?(stack_name_crash) unless !deploy_crash?
1766
+ wait_for_stack_ready(stack_name_hutch) unless stack_ready?(stack_name_hutch) unless !deploy_starsky_hutch?
1767
+
1768
+
1769
+ update_service_defaults(stack_name_web)
1770
+ update_service_defaults(stack_name_consumer)
1771
+ update_service_defaults(stack_name_urania)
1772
+ update_service_defaults(stack_name_ermes)
1773
+ update_service_defaults(stack_name_bburago)
1774
+ update_service_defaults(stack_name_hal9000)
1775
+ update_service_defaults(stack_name_fidaty)
1776
+ update_service_defaults(stack_name_peano)
1777
+ update_service_defaults(stack_name_rogoreport)
1778
+ update_service_defaults(stack_name_assange)
1779
+ update_service_defaults(stack_name_borat)
1780
+ update_service_defaults(stack_name_activia)
1781
+ update_service_defaults(stack_name_skynet)
1782
+ update_service_defaults(stack_name_leftorium)
1783
+ update_service_defaults(stack_name_rachele)
1784
+ update_service_defaults(stack_name_maia)
1785
+ update_service_defaults(stack_name_starsky) unless !deploy_starsky_hutch?
1786
+ update_service_defaults(stack_name_hutch) unless !deploy_starsky_hutch?
1787
+ update_service_defaults(stack_name_crash) unless !deploy_crash?
1788
+
1789
+ activia_hostname = get_route53_hostname("activia")
1790
+ assange_hostname = get_route53_hostname("assange")
1791
+ bburago_hostname = get_route53_hostname("bburago")
1792
+ borat_hostname = get_route53_hostname("borat")
1793
+ ermes_hostname = get_route53_hostname("ermes")
1794
+ fidaty_hostname = get_route53_hostname("fidaty")
1795
+ hal9000_hostname = get_route53_hostname("hal9000")
1796
+ prima_hostname = get_route53_hostname("web")
1797
+ peano_hostname = get_route53_hostname("peano")
1798
+ skynet_hostname = get_route53_hostname("skynet")
1799
+ urania_hostname = get_route53_hostname("urania")
1800
+ roger_hostname = get_route53_hostname("roger")
1801
+ leftorium_hostname = get_route53_hostname("leftorium")
1802
+ rachele_hostname = get_route53_hostname("rachele")
1803
+ maia_app_hostname = get_route53_hostname("maia-app")
1804
+ maia_intermediari_hostname = get_route53_hostname("maia-intermediari")
1805
+ crash_hostname = get_route53_hostname("crash") unless !deploy_crash?
1806
+ starsky_hostname = get_route53_hostname("starsky") unless !deploy_starsky_hutch?
1807
+ hutch_hostname = get_route53_hostname("hutch") unless !deploy_starsky_hutch?
1808
+
1809
+
1810
+ # launch_marley ec2_ip_address(asg_stack_name), prima_hostname, borat_hostname
1811
+
1812
+ projects_text = "
1813
+ > Prima url: https://#{prima_hostname}
1814
+ > Backoffice (Borat) url: https://#{borat_hostname}
1815
+ > Urania url: http://#{urania_hostname}:81
1816
+ > Bburago url: http://#{bburago_hostname}:83
1817
+ > Ermes url: http://#{ermes_hostname}:10002
1818
+ > Hal9000 url: http://#{hal9000_hostname}:10031
1819
+ > Fidaty url: http://#{fidaty_hostname}:10021
1820
+ > Peano url: http://#{peano_hostname}:10039
1821
+ > Assange url: https://#{assange_hostname}
1822
+ > Activia url: http://#{activia_hostname}:10041
1823
+ > Skynet url: http://#{skynet_hostname}:8050
1824
+ > Roger url: http://#{roger_hostname}:10051
1825
+ > Leftorium url: http://#{leftorium_hostname}:10061
1826
+ > Rachele url: http://#{rachele_hostname}:10040
1827
+ > Maia App url: https://#{maia_app_hostname}
1828
+ > Maia Intermediari url: https://#{maia_intermediari_hostname}"
1829
+ projects_text.concat "
1830
+ > Crash url: https://#{crash_hostname}" if deploy_crash?
1831
+ projects_text.concat "
1832
+ > Starsky url: https://#{starsky_hostname}
1833
+ > Hutch url: https://#{hutch_hostname}" if deploy_starsky_hutch?
1834
+ projects_text.concat "
1835
+ > RabbitMQ url: http://#{ec2_ip_address(asg_stack_name)}:15672
1836
+ > Supervisor url: http://#{ec2_ip_address(asg_stack_name)}:9001
1837
+ > Elasticsearch url: http://#{ec2_ip_address(asg_stack_name)}:9200
1838
+ > SSH connection: ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no githubUsername@#{ec2_ip_address(asg_stack_name)}\n"
1839
+ output projects_text.cyan
1840
+ output "Deploy effettuato, everything is awesome!\n".green
1841
+
1842
+ if @projects['prima']['name'] != 'master' then
1843
+ output "Lancio il batch job per la visual regression..."
1844
+ @batch.submit_job({
1845
+ job_name: "bocelli-test-#{@dns_record_identifier}",
1846
+ job_queue: "tools-production",
1847
+ job_definition: describe_stack_resource('batch-job-bocelli-production', 'JobDefinition').stack_resource_detail.physical_resource_id,
1848
+ container_overrides: {
1849
+ environment: [
1850
+ {
1851
+ name: "BATCH_COMMAND",
1852
+ value: "test"
1853
+ },
1854
+ {
1855
+ name: "QA_HOSTNAME",
1856
+ value: prima_hostname
1857
+ },
1858
+ {
1859
+ name: "BRANCH_NAME",
1860
+ value: @projects['prima']['name']
1861
+ },
1862
+ {
1863
+ name: "COMMITTER_EMAIL",
1864
+ value: @projects['prima']['committer']
1865
+ }
1866
+ ]
1867
+ }
1868
+ })
1869
+
1870
+ output "Visual regression lanciata con successo!"
1871
+ end
1872
+
1873
+ qainit_write_output(projects_text, 'Indirizzi scritti su ')
1874
+ end
1875
+
1876
+ def get_route53_hostname(project)
1877
+ case
1878
+ when project.include?('web')
1879
+ host = "www-#{@dns_record_identifier}.qa.colaster.com"
1880
+ when project.include?('urania')
1881
+ host = "urania-#{@dns_record_identifier}.qa.colaster.com"
1882
+ when project.include?('bburago')
1883
+ host = "bburago-#{@dns_record_identifier}.qa.colaster.com"
1884
+ when project.include?('hal9000')
1885
+ host = "hal9000-#{@dns_record_identifier}.qa.colaster.com"
1886
+ when project.include?('fidaty')
1887
+ host = "fidaty-#{@dns_record_identifier}.qa.colaster.com"
1888
+ when project.include?('peano')
1889
+ host = "peano-#{@dns_record_identifier}.qa.colaster.com"
1890
+ when project.include?('assange')
1891
+ host = "assange-#{@dns_record_identifier}.qa.colaster.com"
1892
+ when project.include?('borat')
1893
+ host = "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1894
+ when project.include?('crash')
1895
+ host = "crash-#{@dns_record_identifier}.qa.colaster.com"
1896
+ when project.include?('ermes')
1897
+ host = "ermes-#{@dns_record_identifier}.qa.colaster.com"
1898
+ when project.include?('activia')
1899
+ host = "activia-#{@dns_record_identifier}.qa.colaster.com"
1900
+ when project.include?('skynet')
1901
+ host = "skynet-#{@dns_record_identifier}.qa.colaster.com"
1902
+ when project.include?('roger')
1903
+ host = "roger-#{@dns_record_identifier}.qa.colaster.com"
1904
+ when project.include?('leftorium')
1905
+ host = "leftorium-#{@dns_record_identifier}.qa.colaster.com"
1906
+ when project.include?('rachele')
1907
+ host = "rachele-#{@dns_record_identifier}.qa.colaster.com"
1908
+ when project.include?('starsky')
1909
+ host = "starsky-#{@dns_record_identifier}.qa.colaster.com"
1910
+ when project.include?('hutch')
1911
+ host = "hutch-#{@dns_record_identifier}.qa.colaster.com"
1912
+ when project.include?('maia-app')
1913
+ host = "api-#{@dns_record_identifier}.qa.colaster.com"
1914
+ when project.include?('maia-intermediari')
1915
+ host = "api-intermediari-#{@dns_record_identifier}.qa.colaster.com"
1916
+ end
1917
+ host
1918
+ end
1919
+
1920
+ def ec2_ip_address(asg_stack_name)
1921
+ resp = describe_stack_resource(asg_stack_name, 'ECSAutoScalingGroup')
1922
+ resp = describe_auto_scaling_groups([resp.stack_resource_detail.physical_resource_id], 1)
1923
+ instance_id = resp.auto_scaling_groups[0].instances[0].instance_id
1924
+ resp = describe_instances([instance_id])
1925
+ resp.reservations[0].instances[0].private_ip_address
1926
+ end
1927
+
611
1928
  def get_alb_host(stack_name)
612
1929
  case
613
1930
  when stack_name.include?('web')
@@ -656,6 +1973,664 @@ class Release
656
1973
  resp.load_balancers[0].dns_name
657
1974
  end
658
1975
 
1976
+ def update_service_defaults(stack_name)
1977
+ case
1978
+ when stack_name.include?('web')
1979
+ logical_resource_id = 'ECSServiceWebQA'
1980
+ when stack_name.include?('consumer')
1981
+ logical_resource_id = 'ECSServiceConsumerQa'
1982
+ when stack_name.include?('urania')
1983
+ logical_resource_id = 'ECSServiceUraniaQA'
1984
+ when stack_name.include?('backoffice')
1985
+ logical_resource_id = 'ECSServiceBackoffice'
1986
+ when stack_name.include?('ermes')
1987
+ logical_resource_id = 'ECSServiceErmesQA'
1988
+ when stack_name.include?('bburago')
1989
+ logical_resource_id = 'ECSServiceBburagoQA'
1990
+ when stack_name.include?('hal9000')
1991
+ logical_resource_id = 'ECSServiceHal9000QA'
1992
+ when stack_name.include?('fidaty')
1993
+ logical_resource_id = 'ECSServiceFidatyQA'
1994
+ when stack_name.include?('skynet')
1995
+ logical_resource_id = 'ECSServiceSkynetQA'
1996
+ when stack_name.include?('roger')
1997
+ logical_resource_id = 'ECSServiceRogerQA'
1998
+ when stack_name.include?('activia')
1999
+ logical_resource_id = 'ECSServiceActiviaQA'
2000
+ when stack_name.include?('peano')
2001
+ logical_resource_id = 'ECSServicePeanoQA'
2002
+ when stack_name.include?('rogoreport')
2003
+ logical_resource_id = 'ECSServiceRogoreport'
2004
+ when stack_name.include?('assange')
2005
+ logical_resource_id = 'ECSServiceAssangeQA'
2006
+ when stack_name.include?('borat')
2007
+ logical_resource_id = 'ECSServiceBorat'
2008
+ when stack_name.include?('leftorium')
2009
+ logical_resource_id = 'ECSServiceLeftoriumQA'
2010
+ when stack_name.include?('rachele')
2011
+ logical_resource_id = 'ECSServiceRacheleQA'
2012
+ when stack_name.include?('crash')
2013
+ logical_resource_id = 'ECSServiceCrashQA'
2014
+ when stack_name.include?('starsky')
2015
+ logical_resource_id = 'ECSServiceStarskyQA'
2016
+ when stack_name.include?('hutch')
2017
+ logical_resource_id = 'ECSServiceHutch'
2018
+ when stack_name.include?('maia')
2019
+ logical_resource_id = 'ECSServiceMaia'
2020
+ else
2021
+ raise "Service name non gestito per lo stack #{stack_name}"
2022
+ end
2023
+ resp = describe_stack_resource(stack_name, logical_resource_id)
2024
+ update_ecs_service(@ecs_cluster_name, resp.stack_resource_detail.physical_resource_id, {minimum_healthy_percent: 0, maximum_percent: 100})
2025
+ end
2026
+
2027
+ def create_activia_artifact(revision)
2028
+ output "Preparo l'artifact activia .zip\n".yellow
2029
+
2030
+ git_checkout_version('activia', revision)
2031
+
2032
+ Dir.chdir 'projects/activia'
2033
+
2034
+ decrypt_secrets()
2035
+
2036
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2037
+ exec_step 'prepare-docker-compose --directory activia && cp docker-compose-qainit.yml docker-compose.yml'
2038
+
2039
+ if File.exists? 'deploy/build_qa_artifact'
2040
+ execute_command "deploy/build_qa_artifact"
2041
+ else
2042
+ [
2043
+ "docker-compose build web",
2044
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2045
+ '-c' 'mix local.hex --force && mix hex.info && \
2046
+ mix deps.get && mix compile && mix deps.compile && \
2047
+ cd assets && \
2048
+ rm -rf node_modules && \
2049
+ yarn --cache-folder ~/.cache/yarn && \
2050
+ sysconfcpus -n 2 ./node_modules/.bin/webpack --mode production && \
2051
+ cd .. && \
2052
+ mix phx.digest && \
2053
+ rm -rf _build/qa/rel/ && \
2054
+ mix release --env=qa'"
2055
+ ].each do |cmd|
2056
+ execute_command cmd
2057
+ end
2058
+ end
2059
+
2060
+ cleanup_containers
2061
+
2062
+ artifact_path = Dir.glob("_build/qa/rel/activia/releases/*/activia.tar.gz").first
2063
+
2064
+ upload_artifact(artifact_path, "microservices/activia/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2065
+
2066
+ Dir.chdir '../../'
2067
+ end
2068
+
2069
+ def create_assange_artifact(revision)
2070
+ output "Preparo l'artifact assange .zip\n".yellow
2071
+
2072
+ git_checkout_version('assange', revision)
2073
+
2074
+ Dir.chdir 'projects/assange'
2075
+
2076
+ decrypt_secrets()
2077
+
2078
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2079
+ exec_step 'prepare-docker-compose --directory assange && cp docker-compose-qainit.yml docker-compose.yml'
2080
+ exec_step 'deploy/build_qa_artifact'
2081
+
2082
+ cleanup_containers
2083
+
2084
+ artifact_path = Dir.glob("_build/qa/rel/assange/releases/*/assange.tar.gz").first
2085
+ upload_artifact(artifact_path, "microservices/assange/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2086
+
2087
+ Dir.chdir '../../'
2088
+ end
2089
+
2090
+ def create_bburago_artifact(revision)
2091
+ output "Preparo l'artifact bburago .zip\n".yellow
2092
+
2093
+ git_checkout_version('bburago', revision)
2094
+
2095
+ Dir.chdir 'projects/bburago'
2096
+
2097
+ decrypt_secrets()
2098
+
2099
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2100
+ exec_step 'prepare-docker-compose --directory bburago && cp docker-compose-qainit.yml docker-compose.yml'
2101
+ [
2102
+ "docker-compose build web",
2103
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint deploy/create_qa_artifact web"
2104
+ ].each do |cmd|
2105
+ execute_command cmd
2106
+ end
2107
+
2108
+ cleanup_containers
2109
+
2110
+ artifact_path = Dir.glob('_build/qa/rel/bburago/releases/*/bburago.tar.gz').first
2111
+ upload_artifact(artifact_path, "microservices/bburago/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2112
+
2113
+ Dir.chdir '../../'
2114
+ end
2115
+
2116
+ def create_borat_artifact(revision)
2117
+ output "Preparo l'artifact borat .zip\n".yellow
2118
+
2119
+ git_checkout_version('borat', revision)
2120
+
2121
+ Dir.chdir 'projects/borat'
2122
+
2123
+ decrypt_secrets()
2124
+
2125
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2126
+ exec_step 'prepare-docker-compose --directory borat && cp docker-compose-qainit.yml docker-compose.yml'
2127
+
2128
+ if File.exists? 'deploy/build_qa_artifact'
2129
+ execute_command "deploy/build_qa_artifact"
2130
+ else
2131
+ [
2132
+ "docker network create borat_network || true",
2133
+ "docker-compose build web",
2134
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2135
+ '-c' 'mix local.hex --force && mix hex.info && \
2136
+ mix deps.get && \
2137
+ cd assets && \
2138
+ yarn --cache-folder ~/.cache/yarn && \
2139
+ sysconfcpus -n 2 ./node_modules/.bin/webpack -p --config config/webpack.config.prod.js && \
2140
+ cd ../ && \
2141
+ mix phx.digest && \
2142
+ mix compile && mix deps.compile && \
2143
+ rm -rf _build/qa/rel/ && \
2144
+ mix distillery.release --env=qa'"
2145
+ ].each do |cmd|
2146
+ execute_command cmd
2147
+ end
2148
+ end
2149
+
2150
+ cleanup_containers
2151
+
2152
+ artifact_path = Dir.glob("_build/qa/rel/borat/releases/*/borat.tar.gz").first
2153
+ upload_artifact(artifact_path, "microservices/borat/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2154
+
2155
+ Dir.chdir '../../'
2156
+ end
2157
+
2158
+ def create_crash_artifact(revision, deploy_id)
2159
+ output "Preparo l'artifact crash .zip\n".yellow
2160
+
2161
+ git_checkout_version('crash', revision)
2162
+
2163
+ Dir.chdir 'projects/crash'
2164
+
2165
+ crash_qa_host = get_route53_hostname('ecs-task-crash-qa-notneeded')
2166
+
2167
+ decrypt_secrets()
2168
+
2169
+ `mv docker-compose-ci.yml docker-compose.yml`
2170
+ exec_step 'prepare-docker-compose --directory crash && cp docker-compose-qainit.yml docker-compose.yml'
2171
+
2172
+ execute_command "deploy/build_qa_artifact #{deploy_id}"
2173
+
2174
+ cleanup_containers
2175
+
2176
+ artifact_path = Dir.glob('_build/qa/rel/crash/releases/*/crash.tar.gz').first
2177
+ upload_artifact(artifact_path, "microservices/crash/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2178
+
2179
+ Dir.chdir '../../'
2180
+ end
2181
+
2182
+ def create_ermes_artifact(revision)
2183
+ output "Preparo l'artifact ermes .zip\n".yellow
2184
+
2185
+ git_checkout_version('ermes', revision)
2186
+
2187
+ Dir.chdir 'projects/ermes'
2188
+
2189
+ decrypt_secrets()
2190
+
2191
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2192
+ exec_step 'prepare-docker-compose --directory ermes && cp docker-compose-qainit.yml docker-compose.yml'
2193
+
2194
+ [
2195
+ "if echo `docker network ls` | grep crash_default; \
2196
+ then echo 'crash_default network already existing'; \
2197
+ else docker network create crash_default; fi",
2198
+ 'docker-compose build web'
2199
+ ].each do |cmd|
2200
+ execute_command cmd
2201
+ end
2202
+
2203
+ [ "docker-compose run --rm -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2204
+ '-c' 'mix local.hex --force && mix hex.info && \
2205
+ mix deps.get && mix compile && mix deps.compile && \
2206
+ mix phx.digest && \
2207
+ MIX_ENV=dev mix compile.sms && \
2208
+ MIX_ENV=dev mix compile.html && \
2209
+ MIX_ENV=dev mix compile.heml && \
2210
+ MIX_ENV=dev mix compile.app_notification && \
2211
+ rm -rf _build/qa/rel/ && \
2212
+ mix release --env=qa'"
2213
+ ].each do |cmd|
2214
+ execute_command cmd
2215
+ end
2216
+
2217
+ cleanup_containers
2218
+
2219
+ artifact_path = Dir.glob("_build/qa/rel/ermes/releases/*/ermes.tar.gz").first
2220
+ upload_artifact(artifact_path, "microservices/ermes/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2221
+
2222
+ Dir.chdir '../../'
2223
+ end
2224
+
2225
+ def create_fidaty_artifact(revision)
2226
+ output "Preparo l'artifact fidaty .zip\n".yellow
2227
+
2228
+ git_checkout_version('fidaty', revision)
2229
+
2230
+ Dir.chdir 'projects/fidaty'
2231
+
2232
+ decrypt_secrets()
2233
+
2234
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2235
+ exec_step 'prepare-docker-compose --directory fidaty && cp docker-compose-qainit.yml docker-compose.yml'
2236
+ [
2237
+ "docker-compose build web",
2238
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2239
+ '-c' 'mix local.hex --force && mix hex.info && \
2240
+ mix deps.get && mix compile && mix deps.compile && \
2241
+ mix phx.digest && \
2242
+ rm -rf _build/qa/rel/ && \
2243
+ mix release --env=qa'"
2244
+ ].each do |cmd|
2245
+ execute_command cmd
2246
+ end
2247
+
2248
+ cleanup_containers
2249
+
2250
+ artifact_path = Dir.glob("_build/qa/rel/fidaty/releases/*/fidaty.tar.gz").first
2251
+ upload_artifact(artifact_path, "microservices/fidaty/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2252
+
2253
+ Dir.chdir '../../'
2254
+ end
2255
+
2256
+ def create_hal9000_artifact(revision)
2257
+ output "Preparo l'artifact hal9000 .zip\n".yellow
2258
+
2259
+ git_checkout_version('hal9000', revision)
2260
+
2261
+ Dir.chdir 'projects/hal9000'
2262
+
2263
+ decrypt_secrets()
2264
+
2265
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2266
+ exec_step 'prepare-docker-compose --directory hal9000 && cp docker-compose-qainit.yml docker-compose.yml'
2267
+ [
2268
+ "docker-compose build web",
2269
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2270
+ '-c' 'mix local.hex --force && mix hex.info && \
2271
+ mix deps.get && mix compile && mix deps.compile && \
2272
+ mix phx.digest assets -o priv/static && \
2273
+ rm -rf _build/qa/rel/ && \
2274
+ mix release --env=qa'"
2275
+ ].each do |cmd|
2276
+ execute_command cmd
2277
+ end
2278
+
2279
+ cleanup_containers
2280
+
2281
+ artifact_path = Dir.glob("_build/qa/rel/hal9000/releases/*/hal9000.tar.gz").first
2282
+ upload_artifact(artifact_path, "microservices/hal9000/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2283
+
2284
+ Dir.chdir '../../'
2285
+ end
2286
+
2287
+ def create_hutch_artifact(revision)
2288
+ output "Preparo l'artifact hutch\n".yellow
2289
+
2290
+ git_checkout_version('hutch', revision)
2291
+
2292
+ Dir.chdir 'projects/hutch'
2293
+
2294
+ decrypt_secrets() unless File.exist?('config/secrets.yml')
2295
+
2296
+ exec_step 'cp docker-compose-ci.yml docker-compose.yml'
2297
+ exec_step 'prepare-docker-compose --directory hutch && cp docker-compose-qainit.yml docker-compose.yml'
2298
+
2299
+ execute_command "deploy/build_qa_artifact #{get_route53_hostname("maia-intermediari")}"
2300
+
2301
+ cleanup_containers
2302
+
2303
+ artifact_path = "./hutch.tar.gz"
2304
+ upload_artifact(artifact_path, "microservices/hutch/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2305
+
2306
+ Dir.chdir '../../'
2307
+ end
2308
+
2309
+ def create_leftorium_artifact(revision)
2310
+ output "Preparo l'artifact leftorium .zip\n".yellow
2311
+
2312
+ git_checkout_version('leftorium', revision)
2313
+
2314
+ Dir.chdir 'projects/leftorium'
2315
+
2316
+ decrypt_secrets()
2317
+
2318
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2319
+ exec_step 'prepare-docker-compose --directory leftorium && cp docker-compose-qainit.yml docker-compose.yml'
2320
+ [
2321
+ "docker-compose build web",
2322
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2323
+ '-c' 'mix local.hex --force && mix hex.info && \
2324
+ mix deps.get && mix compile && mix deps.compile && \
2325
+ rm -rf _build/qa/rel/ && \
2326
+ mix release --env=qa'"
2327
+ ].each do |cmd|
2328
+ execute_command cmd
2329
+ end
2330
+
2331
+ cleanup_containers
2332
+
2333
+ artifact_path = Dir.glob('_build/qa/rel/leftorium/releases/*/leftorium.tar.gz').first
2334
+ upload_artifact(artifact_path, "microservices/leftorium/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2335
+
2336
+ Dir.chdir '../../'
2337
+ end
2338
+
2339
+ def create_maia_artifact(revision)
2340
+ output "Preparo l'artifact maia .zip\n".yellow
2341
+
2342
+ git_checkout_version('maia', revision)
2343
+
2344
+ Dir.chdir 'projects/maia'
2345
+
2346
+ decrypt_secrets()
2347
+
2348
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2349
+ exec_step 'prepare-docker-compose --directory maia && cp docker-compose-qainit.yml docker-compose.yml'
2350
+
2351
+ execute_command 'deploy/build_qa_artifact'
2352
+
2353
+ cleanup_containers
2354
+
2355
+ artifact_path = Dir.glob('_build/qa/rel/maia/releases/*/maia.tar.gz').first
2356
+ upload_artifact(artifact_path, "microservices/maia/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2357
+
2358
+ Dir.chdir '../../'
2359
+ end
2360
+
2361
+ def create_peano_artifact(revision)
2362
+ output "Preparo l'artifact peano .zip\n".yellow
2363
+
2364
+ git_checkout_version('peano', revision)
2365
+
2366
+ Dir.chdir 'projects/peano'
2367
+
2368
+ decrypt_secrets() unless File.exist?('config/secrets.yml')
2369
+
2370
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2371
+ exec_step 'prepare-docker-compose --directory peano && cp docker-compose-qainit.yml docker-compose.yml'
2372
+
2373
+ execute_command "deploy/build_qa_artifact"
2374
+
2375
+ cleanup_containers
2376
+
2377
+ artifact_path = Dir.glob("_build/qa/rel/peano/releases/*/peano.tar.gz").first
2378
+ upload_artifact(artifact_path, "microservices/peano/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2379
+
2380
+ Dir.chdir '../../'
2381
+ end
2382
+
2383
+ def create_prima_artifact(revision, branch_name, deploy_id)
2384
+ output "Preparo l'artifact prima .zip\n".yellow
2385
+
2386
+ git_checkout_version('prima', revision)
2387
+
2388
+ Dir.chdir 'projects/prima'
2389
+
2390
+ ['vendor'].each do |dir|
2391
+ unless File.directory?(dir)
2392
+ if File.directory?("../../../prima/#{dir}")
2393
+ exec_step "rsync -a ../../../prima/#{dir} ."
2394
+ end
2395
+ end
2396
+ end
2397
+
2398
+ exec_step 'mv docker-compose-ci.yml docker-compose.yml'
2399
+ exec_step 'prepare-docker-compose --directory prima'
2400
+ exec_step 'sed -i "s/\\/home\\/app/\\/root/g" docker-compose.yml'
2401
+ `sed -i 's/"@prima-assicurazioni/pyxis-npm": ".*",/"@prima-assicurazioni/pyxis-npm": "#{@pyxis_version}",/' package.json` if deploy_pyxis?
2402
+ [
2403
+ "bin/qa_build_artifact.sh #{branch_name} #{'update' if @deploy_update}"
2404
+ ].each do |cmd|
2405
+ execute_command cmd
2406
+ end
2407
+
2408
+ cleanup_containers
2409
+
2410
+ Dir.chdir "../../"
2411
+ end
2412
+
2413
+ def create_pyxis_artifact(revision, deploy_id)
2414
+ if (deploy_pyxis?)
2415
+ output "Preparo l'artifact pyxis\n".yellow
2416
+
2417
+ git_checkout_version('pyxis-npm', revision)
2418
+
2419
+ Dir.chdir 'projects/pyxis-npm'
2420
+
2421
+ decrypt_secrets()
2422
+
2423
+ exec_step 'mv .fakenpmrc .npmrc'
2424
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2425
+ exec_step 'prepare-docker-compose --directory pyxis-npm && cp docker-compose-qainit.yml docker-compose.yml'
2426
+ exec_step 'docker-compose build web'
2427
+
2428
+ exec_step "docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
2429
+ '-c' 'npm view prima-assicurazioni/pyxis-npm versions --json > versions.json'" # posso recuperare le versioni pubblicate solo da dentro al container, scrivo su un file che leggo subito dopo
2430
+ published_versions = `cat versions.json`
2431
+ qa_versions = JSON.parse(published_versions).select{ |version| version.include? get_pyxis_version(deploy_id) }
2432
+
2433
+ @pyxis_version = "0.#{get_pyxis_version(deploy_id)}.#{qa_versions.size}"
2434
+
2435
+ `sed -i '3s/".*/"version": "#{@pyxis_version}",/' package.json`
2436
+ [
2437
+ "docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
2438
+ '-c' 'yarn install && \
2439
+ yarn build:prod && \
2440
+ npm publish'"
2441
+ ].each do |cmd|
2442
+ execute_command cmd
2443
+ end
2444
+
2445
+ cleanup_containers
2446
+ Dir.chdir '../../'
2447
+ end
2448
+ end
2449
+
2450
+ def create_rachele_artifact(revision)
2451
+ output "Preparo l'artifact rachele .zip\n".yellow
2452
+
2453
+ git_checkout_version('rachele', revision)
2454
+
2455
+ Dir.chdir 'projects/rachele'
2456
+
2457
+ decrypt_secrets()
2458
+
2459
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2460
+ exec_step 'prepare-docker-compose --directory rachele && cp docker-compose-qainit.yml docker-compose.yml'
2461
+
2462
+ execute_command "docker-compose build web"
2463
+
2464
+ [ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2465
+ '-c' 'mix local.hex --force && mix hex.info && \
2466
+ mix deps.get && mix compile && mix deps.compile && \
2467
+ rm -rf _build/qa/rel/ && \
2468
+ mix release --env=qa'"
2469
+ ].each do |cmd|
2470
+ execute_command cmd
2471
+ end
2472
+
2473
+ cleanup_containers
2474
+
2475
+ artifact_path = Dir.glob("_build/qa/rel/rachele/releases/*/rachele.tar.gz").first
2476
+ upload_artifact(artifact_path, "microservices/rachele/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2477
+
2478
+ Dir.chdir '../../'
2479
+ end
2480
+
2481
+ def create_roger_artifact(revision)
2482
+ output "Preparo l'artifact roger .zip\n".yellow
2483
+
2484
+ git_checkout_version('roger', revision)
2485
+
2486
+ Dir.chdir 'projects/roger'
2487
+
2488
+ decrypt_secrets()
2489
+
2490
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2491
+ exec_step 'prepare-docker-compose --directory roger && cp docker-compose-qainit.yml docker-compose.yml'
2492
+ [
2493
+ "docker-compose build web",
2494
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2495
+ '-c' 'mix local.hex --force && mix hex.info && \
2496
+ mix deps.get && mix compile && mix deps.compile && \
2497
+ mix phx.digest && \
2498
+ rm -rf _build/qa/rel/ && \
2499
+ mix distillery.release --env=qa'"
2500
+ ].each do |cmd|
2501
+ execute_command cmd
2502
+ end
2503
+
2504
+ cleanup_containers
2505
+
2506
+ artifact_path = Dir.glob("_build/qa/rel/roger/releases/*/roger.tar.gz").first
2507
+ upload_artifact(artifact_path, "microservices/roger/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2508
+
2509
+ Dir.chdir '../../'
2510
+ end
2511
+
2512
+ def create_rogoreport_artifact(revision)
2513
+ output "Preparo l'artifact rogoreport .zip\n".yellow
2514
+
2515
+ git_checkout_version('rogoreport', revision)
2516
+
2517
+ Dir.chdir 'projects/rogoreport'
2518
+
2519
+ decrypt_secrets() unless File.exist?('config/secrets.yml')
2520
+
2521
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2522
+ exec_step 'prepare-docker-compose --directory rogoreport && cp docker-compose-qainit.yml docker-compose.yml'
2523
+ [
2524
+ "docker-compose build web",
2525
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2526
+ '-c' 'mix local.hex --force && mix hex.info && \
2527
+ mix deps.get && mix compile && mix deps.compile && \
2528
+ rm -rf _build/qa/rel/ && \
2529
+ mix release --name=rogoreport --env=qa'"
2530
+ ].each do |cmd|
2531
+ execute_command cmd
2532
+ end
2533
+
2534
+ cleanup_containers
2535
+
2536
+ artifact_path = Dir.glob("_build/qa/rel/rogoreport/releases/*/rogoreport.tar.gz").first
2537
+ upload_artifact(artifact_path, "microservices/rogoreport/rogoreport-#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2538
+
2539
+ Dir.chdir '../../'
2540
+ end
2541
+
2542
+ def create_skynet_artifact(revision)
2543
+ output "Preparo l'artifact skynet\n".yellow
2544
+
2545
+ git_checkout_version('skynet', revision)
2546
+
2547
+ Dir.chdir 'projects/skynet'
2548
+
2549
+ version = `git rev-parse HEAD`
2550
+
2551
+ artifact_path = "/tmp/skynet-#{revision}-qa.tar.gz"
2552
+
2553
+ exec_step "tar cfz #{artifact_path} core config oracles pickles web tests logstash application_log configuration.py wsgi.py requirements.txt requirements_training.txt"
2554
+
2555
+ upload_artifact(artifact_path, "microservices/skynet/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2556
+
2557
+ Dir.chdir '../../'
2558
+ end
2559
+
2560
+ def create_starsky_artifact(revision)
2561
+ output "Preparo l'artifact starsky\n".yellow
2562
+
2563
+ git_checkout_version('starsky', revision)
2564
+
2565
+ Dir.chdir 'projects/starsky'
2566
+
2567
+ version = `git rev-parse HEAD`
2568
+
2569
+ #artifact_path = "/tmp/starsky-#{revision}-qa.tar.gz"
2570
+
2571
+ decrypt_secrets() unless File.exist?('config/secrets.yml')
2572
+
2573
+ `mv docker-compose-ci.yml docker-compose.yml`
2574
+ exec_step 'prepare-docker-compose --directory starsky && cp docker-compose-qainit.yml docker-compose.yml'
2575
+ exec_step "sed s/qa_deploy_id/#{get_deploy_id}/ .env.dist.qa"
2576
+ exec_step "cp .env.dist.qa .env"
2577
+
2578
+ [
2579
+ "sed -i 's/USER app/USER root/g' Dockerfile",
2580
+ "if echo `docker network ls` | grep peano_default; \
2581
+ then echo 'peano_default network already existing'; \
2582
+ else docker network create peano_default; fi",
2583
+ "docker-compose build web",
2584
+ "docker-compose run -w $PWD -e BUILD_ENV=qa -u root --entrypoint /bin/sh web \
2585
+ '-c' 'cargo build --release -vv --features=qa \
2586
+ && cargo build --bin migrate --release --features=qa \
2587
+ && cargo build --bin rabbit_worker --release --features=qa \
2588
+ && cp -p target/release/starsky . \
2589
+ && cp -p target/release/migrate . \
2590
+ && cp -p target/release/rabbit_worker . \
2591
+ && tar cfz #{revision}-qa.tar.gz config starsky migrate rabbit_worker .env.dist.*'"
2592
+ ].each do |cmd|
2593
+ execute_command cmd
2594
+ end
2595
+
2596
+ artifact_path = "./#{revision}-qa.tar.gz"
2597
+
2598
+ upload_artifact(artifact_path, "microservices/starsky/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2599
+
2600
+ Dir.chdir '../../'
2601
+ end
2602
+
2603
+ def create_urania_artifact(revision)
2604
+ output "Preparo l'artifact urania .zip\n".yellow
2605
+
2606
+ git_checkout_version('urania', revision)
2607
+
2608
+ Dir.chdir 'projects/urania'
2609
+
2610
+ decrypt_secrets()
2611
+
2612
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2613
+ exec_step 'prepare-docker-compose --directory urania && cp docker-compose-qainit.yml docker-compose.yml'
2614
+
2615
+ execute_command "docker-compose build web"
2616
+
2617
+ [ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2618
+ '-c' 'mix local.hex --force && mix hex.info && \
2619
+ mix deps.get && mix compile && mix deps.compile && \
2620
+ rm -rf _build/qa/rel/ && \
2621
+ mix release --env=qa'"
2622
+ ].each do |cmd|
2623
+ execute_command cmd
2624
+ end
2625
+
2626
+ cleanup_containers
2627
+
2628
+ artifact_path = Dir.glob("_build/qa/rel/urania/releases/*/urania.tar.gz").first
2629
+ upload_artifact(artifact_path, "microservices/urania/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2630
+
2631
+ Dir.chdir '../../'
2632
+ end
2633
+
659
2634
  def deploy_pyxis?
660
2635
  if defined? @deploy_pyxis
661
2636
  @deploy_pyxis
@@ -669,12 +2644,146 @@ class Release
669
2644
  end
670
2645
  end
671
2646
 
2647
+ def deploy_crash?
2648
+ crash_present = !@projects['crash'].empty? && @projects['crash']['name'] != 'master' && !@projects['crash']['default_branch']
2649
+ leftorium_present = !@projects['leftorium'].empty? && @projects['leftorium']['name'] != 'master' && !@projects['leftorium']['default_branch']
2650
+ crash_present || leftorium_present
2651
+ end
2652
+
2653
+ def deploy_starsky_hutch?
2654
+ starsky_present = !@projects['starsky'].nil? && !@projects['starsky'].empty? && @projects['starsky']['name'] != 'master' && !@projects['starsky']['default_branch']
2655
+ hutch_present = !@projects['hutch'].nil? && !@projects['hutch'].empty? && @projects['hutch']['name'] != 'master' && !@projects['hutch']['default_branch']
2656
+ starsky_present || hutch_present
2657
+ end
2658
+
2659
+ def get_pyxis_version(deploy_id)
2660
+ (deploy_id.delete '[a-z0]')[0..9]
2661
+ end
2662
+
2663
+ def cleanup_containers
2664
+ `docker-compose kill && docker-compose down -v --remove-orphans`
2665
+ `docker rm $(docker ps -q -f status=exited)`
2666
+ end
2667
+
2668
+ def git_checkout_version(project, revision)
2669
+ Dir.chdir "projects/#{project}"
2670
+ exec_step "git checkout -- . && git checkout #{revision}"
2671
+ Dir.chdir "../../"
2672
+ end
2673
+
2674
+ def create_asg_stack(stack_name, tags = [])
2675
+ stack_body = IO.read('cloudformation/stacks/asg/ecs-asg-allinone.yml')
2676
+ parameters = [
2677
+ {
2678
+ parameter_key: "Environment",
2679
+ parameter_value: "qa"
2680
+ },
2681
+ {
2682
+ parameter_key: "InstanceType",
2683
+ parameter_value: "t3.large"
2684
+ },
2685
+ {
2686
+ parameter_key: "ECSClusterName",
2687
+ parameter_value: @ecs_cluster_name
2688
+ },
2689
+ {
2690
+ parameter_key: "AMIID",
2691
+ parameter_value: @ami_id
2692
+ }
2693
+ ]
2694
+ create_stack(stack_name, stack_body, parameters, tags, @cf_role)
2695
+ end
2696
+
2697
+ def create_cluster_stack(stack_name, tags = [])
2698
+ stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
2699
+ create_stack(stack_name, stack_body, [], tags)
2700
+ end
2701
+
672
2702
  def update_cluster_stack(stack_name, tags = [])
673
2703
  stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
674
2704
  update_stack(stack_name, stack_body, [], tags)
675
2705
  end
676
2706
 
2707
+ def create_alb_stack(stack_name, role, hash, environment = 'qa')
2708
+ stack_body = IO.read('cloudformation/stacks/elb/alb-public-qa.yml')
2709
+ parameters = [
2710
+ {
2711
+ parameter_key: "Environment",
2712
+ parameter_value: environment
2713
+ },
2714
+ {
2715
+ parameter_key: "Role",
2716
+ parameter_value: role
2717
+ },
2718
+ {
2719
+ parameter_key: "EnvHash",
2720
+ parameter_value: hash
2721
+ }
2722
+ ]
2723
+ create_stack(stack_name, stack_body, parameters, [], @cf_role)
2724
+ end
2725
+
2726
+ def import_redis_crash(qa_ip_address)
2727
+ output "Importo chiavi di Redis da staging\n".yellow
2728
+
2729
+ prefixes = ['CODICI', 'fun_with_flags']
2730
+ redis_qa = Redis.new(:url => "redis://#{qa_ip_address}:6379/10")
2731
+ redis_staging = Redis.new(:url => 'redis://staging.cache-1.prima.it:6379/10')
2732
+
2733
+ prefixes.each do |prefix|
2734
+ redis_staging.keys("#{prefix}*").each do |key|
2735
+ next unless redis_qa.keys(key).empty?
2736
+ output "Importo #{key} dal Redis di staging\n".yellow
2737
+ dump_staging = redis_staging.dump key
2738
+ redis_qa.restore key, 0, dump_staging
2739
+ end
2740
+ end
2741
+ end
2742
+
2743
+ def import_dbs(ip_address)
2744
+ overrides = {
2745
+ container_overrides: [
2746
+ {
2747
+ name: 'dbrestore',
2748
+ environment: [
2749
+ {
2750
+ name: 'EC2_IP_ADDRESS',
2751
+ value: ip_address
2752
+ }
2753
+ ]
2754
+ }
2755
+ ]
2756
+ }
2757
+ resp = run_ecs_task(@ecs_cluster_name, @import_db_task, overrides, 1)
2758
+ return resp
2759
+ end
2760
+
2761
+ def wait_for_db_import(task)
2762
+ output "Attendo che i DB vengano importati...\n".yellow
2763
+ stopped_at = nil
2764
+ sleep 15 # altrimenti non trova il task appena avviato...
2765
+ while stopped_at.nil?
2766
+ if task.tasks[0].nil?
2767
+ pp @ecs_cluster_name
2768
+ pp task
2769
+ stop_if true, "Task di import DB lanciato, ma risposta vuota!".red
2770
+ end
2771
+ task = describe_ecs_tasks(task.tasks[0].cluster_arn, [task.tasks[0].task_arn])
2772
+ stopped_at = task.tasks[0].stopped_at unless task.tasks[0].nil?
2773
+ sleep_seconds = 10
2774
+ seconds_elapsed = 0
2775
+ while true && stopped_at.nil?
2776
+ break if seconds_elapsed >= sleep_seconds
2777
+ print '.'.yellow; STDOUT.flush
2778
+ sleep 1
2779
+ seconds_elapsed += 1
2780
+ end
2781
+ end
2782
+ print "\n"
2783
+ end
2784
+
677
2785
  def choose_branch_to_deploy(project_name, select_master = false)
2786
+ return {'name' => 'master', 'revision' => '399653d555b8864', 'committer' => 'crash@prima.it', 'default_branch' => true} if project_name == 'crash' && select_master
678
2787
  Dir.chdir "projects/#{project_name}"
679
2788
  output "Recupero la lista dei branch del progetto #{project_name}..."
680
2789
  `git remote prune origin`
@@ -714,6 +2823,7 @@ class Release
714
2823
  name = branch_name.split(' ')[0]
715
2824
  revision = branch_name.split(' ')[1]
716
2825
  committer_email = branch_name.split(' ')[2].tr('<>', '')
2826
+ return { 'name' => 'crash', 'default_branch' => true } if project_name == 'crash' && branch_name == 'master' #rimuovere questa riga se mai nei qa servirà crash con un branch diverso da master
717
2827
  { 'name' => name, 'revision' => revision[0..14], 'committer' => committer_email, 'default_branch' => select_master }
718
2828
  end
719
2829
 
@@ -761,6 +2871,70 @@ class Release
761
2871
  (Time.now.to_i.to_s[-4..-1].to_i + Random.rand(40000)).to_s
762
2872
  end
763
2873
 
2874
+ def launch_marley(ip_address, prima_hostname, borat_hostname)
2875
+ resp = describe_stack_resource('batch-job-marley', 'JobDefinition')
2876
+
2877
+ @batch.submit_job({
2878
+ job_name: "marley-#{@dns_record_identifier}", # required
2879
+ job_queue: "tools-production", # required
2880
+ job_definition: resp.stack_resource_detail.physical_resource_id, # required
2881
+ container_overrides: {
2882
+ environment: [
2883
+ {
2884
+ name: 'PRIMA_URL',
2885
+ value: "https://#{prima_hostname}/?superprima"
2886
+ },
2887
+ {
2888
+ name: 'PRIMA_IP',
2889
+ value: ip_address
2890
+ },
2891
+ {
2892
+ name: 'PROJECTS_JSON',
2893
+ value: @projects.to_json
2894
+ },
2895
+ {
2896
+ name: 'BACKOFFICE_URL',
2897
+ value: "https://#{borat_hostname}"
2898
+ }
2899
+ ]
2900
+ }
2901
+ })
2902
+
2903
+ output "Marley lanciato con successo!\n".green
2904
+ end
2905
+
2906
+ def get_currently_deployed_version(stack_name)
2907
+ parameters = get_stack_parameters(stack_name)
2908
+ currently_deployed_version = nil
2909
+ parameters.each do |parameter|
2910
+ if parameter.parameter_key == "ReleaseVersion"
2911
+ currently_deployed_version = parameter.parameter_value
2912
+ end
2913
+ end
2914
+ currently_deployed_version
2915
+ end
2916
+
2917
+ def decrypt_secrets()
2918
+ docker_image = "prima/biscuit_populate_configs"
2919
+ [
2920
+ "docker pull #{docker_image}",
2921
+ "docker run -t --rm -v $HOME/.aws:/root/.aws -w $PWD --volumes-from #{get_host_container_name} #{docker_image}"
2922
+ ].each do |cmd|
2923
+ execute_command cmd
2924
+ end
2925
+ end
2926
+
2927
+ def get_host_container_name()
2928
+ if @host_container_name
2929
+ @host_container_name
2930
+ else
2931
+ hostname = `cat /etc/hostname`.gsub("\n", '')
2932
+ execute_command "docker ps | grep #{hostname} | awk '{print $NF}'"
2933
+ @host_container_name = `docker ps | grep #{hostname} | awk '{print $NF}'`.gsub("\n", '')
2934
+ # @host_container_name = `docker ps | grep #{hostname} | sed -r "s/.+ ([^\s].+)$/\1/p"`
2935
+ end
2936
+ end
2937
+
764
2938
  def select_branches(project_names = nil)
765
2939
  output "Deploy feature menu"
766
2940
  if project_names.nil?
@@ -774,6 +2948,14 @@ class Release
774
2948
  end
775
2949
  end
776
2950
  end
2951
+
2952
+ def get_ami_id(stack_name)
2953
+ get_stack_parameters(stack_name).each do |param|
2954
+ if param.parameter_key == "AMIID"
2955
+ return param.parameter_value
2956
+ end
2957
+ end
2958
+ end
777
2959
  end
778
2960
 
779
2961
  def help_content