prima-twig 0.63.27 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/bin/twig-feature CHANGED
@@ -22,9 +22,18 @@ class Release
22
22
  exec "twig feature #{ARGV.join ' '}"
23
23
  end
24
24
  end
25
+ @batch = Aws::Batch::Client.new
26
+ @s3 = Aws::S3::Client.new
27
+ @s3_bucket = 'prima-artifacts'
28
+ @artifact_path = '/tmp/prima-artifact.zip'
29
+ @import_db_task = 'arn:aws:ecs:eu-west-1:001575623345:task-definition/ecs-task-db-restore2-TaskDefinitionDbRestore-4UBHMCZBE5WM:1'
30
+ @cf_role = 'arn:aws:iam::001575623345:role/qainit-service-role-cloudformat-CloudformationRole-18KBZQIS148R9'
25
31
  @dns_record_identifier = nil
26
32
  @ecs_cluster_name = nil
27
33
  @deploy_update = false
34
+ @qainit = false
35
+ @qainit_host_folder = "/var/ci/#{ENV['DRONE_REPO']}/#{ENV['DRONE_BRANCH']}/#{ENV['DRONE_BUILD_NUMBER']}"
36
+ @qainit_folder = "/drone/src/github.com/project/primait/qainit"
28
37
  @projects = {
29
38
  'prima' => {},
30
39
  'urania' => {},
@@ -33,7 +42,7 @@ class Release
33
42
  'hal9000' => {},
34
43
  'fidaty' => {},
35
44
  'peano' => {},
36
- # 'rogoreport' => {},
45
+ 'rogoreport' => {},
37
46
  'assange' => {},
38
47
  'borat' => {},
39
48
  'crash' => {},
@@ -65,6 +74,8 @@ class Release
65
74
  qainit_deploy_shutdown!
66
75
  elsif 'update' == args[1]
67
76
  qainit_deploy_update!
77
+ elsif 'read' == args[1]
78
+ qainit_read_config! args[2]
68
79
  else
69
80
  if args[1]
70
81
  select_branches(args[1..-1])
@@ -81,8 +92,19 @@ class Release
81
92
  end
82
93
  when 'deploy'
83
94
  abort('Non sei nella cartella di artemide') unless Dir.pwd.match 'artemide$'
84
- if 'lock' == args[1]
95
+ if ['terminate', 'stop', 'shutdown', 'halt', 'destroy'].include? args[1]
96
+ deploy_shutdown!
97
+ elsif 'update' == args[1]
98
+ deploy_update!
99
+ elsif 'lock' == args[1]
85
100
  deploy_lock!
101
+ else
102
+ if args[1]
103
+ select_branches(args[1])
104
+ else
105
+ select_branches
106
+ end
107
+ deploy_feature!
86
108
  end
87
109
  when 'aggregator'
88
110
  if 'enable' == args[1]
@@ -501,8 +523,7 @@ class Release
501
523
 
502
524
  update_drone_yml!
503
525
 
504
- `git add branch_names .drone.yml`
505
- `git commit -m 'update'`
526
+ `git commit -am 'update'`
506
527
  `git push && git checkout master`
507
528
  end
508
529
 
@@ -580,6 +601,10 @@ class Release
580
601
  output "Cancello il record DNS utilizzato da Lighthouse"
581
602
  delete_lighthouse_dns()
582
603
  output "Finito!".green
604
+
605
+ if @qainit
606
+ qainit_write_output("QA #{ENV["DRONE_BRANCH"]} spento.", 'Indirizzi scritti su ')
607
+ end
583
608
  end
584
609
 
585
610
  def qainit_write_output(file_message, output_message)
@@ -589,16 +614,51 @@ class Release
589
614
  output "#{output_message} #{qa_file_name}".green
590
615
  end
591
616
 
617
+ def qainit_read_config!(action)
618
+ projects = ''
619
+
620
+ File.open('branch_names', 'r') do |file|
621
+ file.each_line do |line|
622
+ projects = JSON.parse(line)
623
+ end
624
+ end
625
+
626
+ projects.each do |key, project|
627
+ @projects[key] = project
628
+ end
629
+
630
+ get_s3_config_files
631
+ @qainit = true
632
+ case action
633
+ when 'shutdown'
634
+ output 'Shutting down'.green
635
+ qainit_drone_shutdown!
636
+ else
637
+ output 'Starting standard deploy'.green
638
+ deploy_feature!
639
+ end
640
+ end
641
+
592
642
  def update_drone_yml!()
593
643
  drone_yml = File.read('.drone.yml')
594
644
  @projects.each do |key, project|
595
- drone_yml = drone_yml.gsub(/#{key}@.+\n/, "#{key}@#{project['revision']}\n")
645
+ drone_yml = drone_yml.gsub("#{key}_placeholder", project['name'])
596
646
  end
597
647
  File.open(".drone.yml", "w") do |f|
598
648
  f.write(drone_yml)
599
649
  end
600
650
  end
601
651
 
652
+ def get_s3_config_files
653
+ # manteniamo la struttura per lanciarlo facilmente anche da locale
654
+ `mkdir -p cloudformation/stacks/task cloudformation/stacks/route53 cloudformation/stacks/asg cloudformation/stacks/elb`
655
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/task/db.yml', response_target: 'cloudformation/stacks/task/db.yml'})
656
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/route53/qa.yml', response_target: 'cloudformation/stacks/route53/qa.yml'})
657
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/asg/ecs-asg-allinone.yml', response_target: 'cloudformation/stacks/asg/ecs-asg-allinone.yml'})
658
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/ecs-cluster.yml', response_target: 'cloudformation/stacks/ecs-cluster.yml'})
659
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/elb/alb-public-qa.yml', response_target: 'cloudformation/stacks/elb/alb-public-qa.yml'})
660
+ end
661
+
602
662
  def get_deploy_id
603
663
  if @deploy_id
604
664
  @deploy_id
@@ -608,6 +668,1264 @@ class Release
608
668
  end
609
669
  end
610
670
 
671
+ def deploy_feature!
672
+ `git pull && git submodule init && git submodule update`
673
+ @ami_id = get_ami_id("ecs-fleet-allinone-staging")
674
+ deploy_id = get_deploy_id
675
+ stack_name_alb = 'ecs-alb-http-public-qa-' + deploy_id[0..5]
676
+ stack_name_alb_ws = 'ecs-alb-ws-public-qa-' + deploy_id[0..5]
677
+ unless @qainit
678
+ @projects.each_key do |project_key|
679
+ if @projects[project_key]['revision']
680
+ git_checkout_version(project_key, @projects[project_key]['revision'])
681
+ end
682
+ end
683
+ end
684
+ @dns_record_identifier = deploy_id
685
+ @git_branch = ENV['DRONE_BRANCH']
686
+ hostname_pattern_priority = hostname_pattern_priority()
687
+ tags = [
688
+ {
689
+ key: "qainit",
690
+ value: @git_branch.gsub(/[^a-zA-Z\_\.\/\=\+\-\:\@\.]/, '')
691
+ },
692
+ {
693
+ key: "hostname_pattern_priority",
694
+ value: hostname_pattern_priority
695
+ }
696
+ ]
697
+ @projects.each do |key, value|
698
+ case key.to_s
699
+ when 'crash'
700
+ tags << { key: 'crash', value: @projects['crash']['name'].gsub(/[^a-zA-Z\_\.\/\=\+\-\:\@\.]/, '') } if deploy_crash?
701
+ when 'starsky', 'hutch'
702
+ tags << { key: key.to_s, value: @projects[key.to_s]['name'].gsub(/[^a-zA-Z\_\.\/\=\+\-\:\@\.]/, '') } if deploy_starsky_hutch?
703
+ else
704
+ tags << { key: key, value: value['name'].gsub(/[^a-zA-Z\_\.\/\=\+\-\:\@\.]/, '') }
705
+ end
706
+ end
707
+
708
+ cluster_stack_name = "ecs-cluster-qa-#{deploy_id}"
709
+
710
+ if stack_exists?(cluster_stack_name)
711
+ tags = get_stack_tags(cluster_stack_name)
712
+ hostname_pattern_priority = tags.detect do |tag|
713
+ tag.key == 'hostname_pattern_priority'
714
+ end.value
715
+ end
716
+
717
+ create_cluster_stack(cluster_stack_name, tags) unless stack_exists?(cluster_stack_name)
718
+ wait_for_stack_ready(cluster_stack_name) unless stack_ready?(cluster_stack_name)
719
+
720
+ create_alb_stack(stack_name_alb, "http", deploy_id) unless stack_exists?(stack_name_alb)
721
+ create_alb_stack(stack_name_alb_ws, "websocket", deploy_id) unless stack_exists?(stack_name_alb_ws)
722
+
723
+ resp = describe_stack_resource(cluster_stack_name, 'ECSCluster')
724
+ @ecs_cluster_name = resp.stack_resource_detail.physical_resource_id
725
+
726
+ asg_stack_name = "ecs-asg-allinone-qa-#{deploy_id}"
727
+ create_asg_stack(asg_stack_name, tags) unless stack_exists?(asg_stack_name)
728
+
729
+ stack_name_db = "ecs-task-db-qa-#{deploy_id}"
730
+ stack_body = IO.read('cloudformation/stacks/task/db.yml')
731
+ parameters = [
732
+ {
733
+ parameter_key: "Environment",
734
+ parameter_value: "qa"
735
+ },
736
+ {
737
+ parameter_key: "ECSClusterName",
738
+ parameter_value: @ecs_cluster_name
739
+ }
740
+ ]
741
+ create_stack(stack_name_db, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_db) # creazione asincrona stack dei db (~4 min)
742
+
743
+ output "check pyxis \n".yellow
744
+
745
+ create_pyxis_artifact(@projects["pyxis-npm"]['revision'], deploy_id) unless @projects["pyxis-npm"].nil? # deve essere creato prima di quello di prima, per avere la versione
746
+ create_prima_artifact(@projects["prima"]['revision'], @projects["prima"]['name'], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "prima/#{@projects["prima"]['revision']}.tar.gz")
747
+ # l'artefatto di prima viene creato sempre (puntamenti all'ambiente compilati nel js) e richiede molto più di 4 minuti
748
+ wait_for_stack_ready(stack_name_db) unless stack_ready?(stack_name_db) # dovrebbe essere istantaneo
749
+ db_task = ''
750
+ db_task = import_dbs(ec2_ip_address(asg_stack_name)) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # import asincrono dei dati
751
+
752
+ create_crash_artifact(@projects['crash']['revision'], deploy_id) unless !deploy_crash? || artifact_exists?('prima-artifacts-encrypted', "microservices/crash/#{@projects['crash']['revision']}-qa.tar.gz")
753
+ create_urania_artifact(@projects["urania"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/urania/#{@projects["urania"]['revision']}-qa.tar.gz")
754
+ create_roger_artifact(@projects["roger"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/roger/#{@projects["roger"]['revision']}-qa.tar.gz")
755
+ create_ermes_artifact(@projects["ermes"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/ermes/#{@projects["ermes"]['revision']}-qa.tar.gz")
756
+ create_bburago_artifact(@projects["bburago"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/bburago/#{@projects["bburago"]['revision']}-qa.tar.gz")
757
+ create_hal9000_artifact(@projects["hal9000"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/hal9000/#{@projects["hal9000"]['revision']}-qa.tar.gz")
758
+ create_rachele_artifact(@projects["rachele"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rachele/#{@projects["rachele"]['revision']}-qa.tar.gz")
759
+ create_fidaty_artifact(@projects["fidaty"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/fidaty/#{@projects["fidaty"]['revision']}-qa.tar.gz")
760
+ create_peano_artifact(@projects["peano"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/peano/#{@projects["peano"]['revision']}-qa.tar.gz")
761
+ create_rogoreport_artifact(@projects["rogoreport"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rogoreport/rogoreport-#{@projects["rogoreport"]['revision']}-qa.tar.gz")
762
+ create_assange_artifact(@projects["assange"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/assange/#{@projects["assange"]['revision']}-qa.tar.gz")
763
+ create_borat_artifact(@projects["borat"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/borat/#{@projects["borat"]['revision']}-qa.tar.gz")
764
+ create_activia_artifact(@projects["activia"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/activia/#{@projects["activia"]['revision']}-qa.tar.gz")
765
+ create_leftorium_artifact(@projects["leftorium"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/leftorium/#{@projects["leftorium"]['revision']}-qa.tar.gz")
766
+ create_skynet_artifact(@projects["skynet"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/skynet/#{@projects["skynet"]['revision']}-qa.tar.gz")
767
+ create_maia_artifact(@projects["maia"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/maia/#{@projects["maia"]['revision']}-qa.tar.gz")
768
+ create_starsky_artifact(@projects["starsky"]['revision']) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/starsky/#{@projects["starsky"]['revision']}-qa.tar.gz")
769
+ create_hutch_artifact(@projects["hutch"]['revision']) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/hutch/#{@projects["hutch"]['revision']}-#{@dns_record_identifier[0..7]}-qa.tar.gz")
770
+
771
+
772
+ wait_for_db_import(db_task) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # dovrebbe essere istantaneo
773
+
774
+ import_redis_crash(ec2_ip_address(asg_stack_name)) if deploy_crash? && !stack_exists?("ecs-task-web-qa-#{deploy_id}")
775
+
776
+ wait_for_stack_ready(stack_name_alb) unless stack_ready?(stack_name_alb)
777
+ wait_for_stack_ready(stack_name_alb_ws) unless stack_ready?(stack_name_alb_ws)
778
+
779
+ stack_name_route53 = "ecs-route53-qa-#{deploy_id}"
780
+ stack_body = IO.read('cloudformation/stacks/route53/qa.yml')
781
+ parameters = [
782
+ {
783
+ parameter_key: "DnsRecordIdentifier",
784
+ parameter_value: @dns_record_identifier
785
+ },
786
+ {
787
+ parameter_key: "PrimaElbHostname",
788
+ parameter_value: get_alb_host(stack_name_alb)
789
+ },
790
+ {
791
+ parameter_key: "UraniaIp",
792
+ parameter_value: ec2_ip_address(asg_stack_name)
793
+ },
794
+ {
795
+ parameter_key: "BburagoIp",
796
+ parameter_value: ec2_ip_address(asg_stack_name)
797
+ },
798
+ {
799
+ parameter_key: "Hal9000Ip",
800
+ parameter_value: ec2_ip_address(asg_stack_name)
801
+ },
802
+ {
803
+ parameter_key: "FidatyIp",
804
+ parameter_value: ec2_ip_address(asg_stack_name)
805
+ },
806
+ {
807
+ parameter_key: "PeanoIp",
808
+ parameter_value: ec2_ip_address(asg_stack_name)
809
+ },
810
+ {
811
+ parameter_key: "ErmesIp",
812
+ parameter_value: ec2_ip_address(asg_stack_name)
813
+ },
814
+ {
815
+ parameter_key: "ActiviaIp",
816
+ parameter_value: ec2_ip_address(asg_stack_name)
817
+ },
818
+ {
819
+ parameter_key: "SkynetIp",
820
+ parameter_value: ec2_ip_address(asg_stack_name)
821
+ },
822
+ {
823
+ parameter_key: "RogerIp",
824
+ parameter_value: ec2_ip_address(asg_stack_name)
825
+ },
826
+ {
827
+ parameter_key: "LeftoriumIp",
828
+ parameter_value: ec2_ip_address(asg_stack_name)
829
+ },
830
+ {
831
+ parameter_key: "RacheleIp",
832
+ parameter_value: ec2_ip_address(asg_stack_name)
833
+ },
834
+ {
835
+ parameter_key: "RedisIp",
836
+ parameter_value: ec2_ip_address(asg_stack_name)
837
+ },
838
+ {
839
+ parameter_key: "AssangeElbHostname",
840
+ parameter_value: get_alb_host(stack_name_alb)
841
+ },
842
+ {
843
+ parameter_key: "BoratElbHostname",
844
+ parameter_value: get_alb_host(stack_name_alb_ws)
845
+ },
846
+ {
847
+ parameter_key: 'CrashElbHostname',
848
+ parameter_value: get_alb_host(stack_name_alb_ws)
849
+ },
850
+ {
851
+ parameter_key: 'StarskyElbHostname',
852
+ parameter_value: get_alb_host(stack_name_alb)
853
+ },
854
+ {
855
+ parameter_key: 'HutchElbHostname',
856
+ parameter_value: get_alb_host(stack_name_alb)
857
+ },
858
+ {
859
+ parameter_key: 'MaiaElbHostname',
860
+ parameter_value: get_alb_host(stack_name_alb)
861
+ }
862
+ ]
863
+
864
+ create_stack(stack_name_route53, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_route53)
865
+ wait_for_stack_ready(stack_name_route53) unless stack_ready?(stack_name_route53)
866
+
867
+ stack_name_skynet = "ecs-task-skynet-qa-#{deploy_id}"
868
+ git_checkout_version('skynet', @projects["skynet"]['revision'])
869
+ stack_body = File.read('projects/skynet/deploy/task.yml')
870
+ parameters = [
871
+ {
872
+ parameter_key: "Environment",
873
+ parameter_value: "qa"
874
+ },
875
+ {
876
+ parameter_key: "ReleaseVersion",
877
+ parameter_value: @projects["skynet"]['revision']
878
+ },
879
+ {
880
+ parameter_key: "TaskDesiredCount",
881
+ parameter_value: "1"
882
+ },
883
+ {
884
+ parameter_key: "ECSClusterName",
885
+ parameter_value: @ecs_cluster_name
886
+ },
887
+ {
888
+ parameter_key: "HostnamePattern",
889
+ parameter_value: "skynet-#{@dns_record_identifier}.qa.colaster.com"
890
+ },
891
+ {
892
+ parameter_key: "HostnamePatternPriority",
893
+ parameter_value: hostname_pattern_priority
894
+ }
895
+ ]
896
+ if stack_exists?(stack_name_skynet)
897
+ cur_version = get_currently_deployed_version(stack_name_skynet)
898
+ update_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["skynet"]['revision'])
899
+ else
900
+ create_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role)
901
+ end
902
+
903
+ stack_name_urania = "ecs-task-urania-qa-#{deploy_id}"
904
+ git_checkout_version('urania', @projects["urania"]['revision'])
905
+ stack_body = File.read('projects/urania/deploy/task.yml')
906
+ parameters = [
907
+ {
908
+ parameter_key: "Environment",
909
+ parameter_value: "qa"
910
+ },
911
+ {
912
+ parameter_key: "ReleaseVersion",
913
+ parameter_value: @projects["urania"]['revision']
914
+ },
915
+ {
916
+ parameter_key: "TaskDesiredCount",
917
+ parameter_value: "1"
918
+ },
919
+ {
920
+ parameter_key: "ECSClusterName",
921
+ parameter_value: @ecs_cluster_name
922
+ },
923
+ {
924
+ parameter_key: "HostnamePattern",
925
+ parameter_value: "urania-#{@dns_record_identifier}.qa.colaster.com"
926
+ },
927
+ {
928
+ parameter_key: "HostnamePatternPriority",
929
+ parameter_value: hostname_pattern_priority
930
+ }
931
+ ]
932
+ if stack_exists?(stack_name_urania)
933
+ cur_version = get_currently_deployed_version(stack_name_urania)
934
+ update_stack(stack_name_urania, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["urania"]['revision'])
935
+ else
936
+ create_stack(stack_name_urania, stack_body, parameters, tags, @cf_role)
937
+ end
938
+
939
+ stack_name_ermes = "ecs-task-ermes-qa-#{deploy_id}"
940
+ git_checkout_version('ermes', @projects["ermes"]['revision'])
941
+ stack_body = File.read('projects/ermes/deploy/task.yml')
942
+ parameters = [
943
+ {
944
+ parameter_key: "Environment",
945
+ parameter_value: "qa"
946
+ },
947
+ {
948
+ parameter_key: "ReleaseVersion",
949
+ parameter_value: "#{@projects['ermes']['revision']}"
950
+ },
951
+ {
952
+ parameter_key: "TaskDesiredCount",
953
+ parameter_value: "1"
954
+ },
955
+ {
956
+ parameter_key: "ECSClusterName",
957
+ parameter_value: @ecs_cluster_name
958
+ },
959
+ {
960
+ parameter_key: "HostnamePattern",
961
+ parameter_value: "ermes-#{@dns_record_identifier}.qa.colaster.com"
962
+ },
963
+ {
964
+ parameter_key: "HostnamePatternPriority",
965
+ parameter_value: hostname_pattern_priority
966
+ },
967
+ {
968
+ parameter_key: "WebHost",
969
+ parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
970
+ },
971
+ {
972
+ parameter_key: "PeanoHost",
973
+ parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
974
+ }
975
+ ]
976
+ if stack_exists?(stack_name_ermes)
977
+ cur_version = get_currently_deployed_version(stack_name_ermes)
978
+ update_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["ermes"]['revision'])
979
+ else
980
+ create_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role)
981
+ end
982
+
983
+ stack_name_bburago = "ecs-task-bburago-qa-#{deploy_id}"
984
+ git_checkout_version('bburago', @projects["bburago"]['revision'])
985
+ stack_body = File.read('projects/bburago/deploy/task.yml')
986
+ parameters = [
987
+ {
988
+ parameter_key: "Environment",
989
+ parameter_value: "qa"
990
+ },
991
+ {
992
+ parameter_key: "ReleaseVersion",
993
+ parameter_value: @projects["bburago"]['revision']
994
+ },
995
+ {
996
+ parameter_key: "ECSClusterName",
997
+ parameter_value: @ecs_cluster_name
998
+ },
999
+ {
1000
+ parameter_key: "TaskDesiredCount",
1001
+ parameter_value: "1"
1002
+ },
1003
+ {
1004
+ parameter_key: "HostnamePattern",
1005
+ parameter_value: "bburago-#{@dns_record_identifier}.qa.colaster.com"
1006
+ },
1007
+ {
1008
+ parameter_key: "HostnamePatternPriority",
1009
+ parameter_value: hostname_pattern_priority
1010
+ }
1011
+ ]
1012
+ if stack_exists?(stack_name_bburago)
1013
+ cur_version = get_currently_deployed_version(stack_name_bburago)
1014
+ update_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["bburago"]['revision'])
1015
+ else
1016
+ create_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role)
1017
+ end
1018
+
1019
+ stack_name_hal9000 = "ecs-task-hal9000-qa-#{deploy_id}"
1020
+ git_checkout_version('hal9000', @projects["hal9000"]['revision'])
1021
+ stack_body = File.read('projects/hal9000/deploy/task.yml')
1022
+ parameters = [
1023
+ {
1024
+ parameter_key: "Environment",
1025
+ parameter_value: "qa"
1026
+ },
1027
+ {
1028
+ parameter_key: "ReleaseVersion",
1029
+ parameter_value: @projects["hal9000"]['revision']
1030
+ },
1031
+ {
1032
+ parameter_key: "ECSClusterName",
1033
+ parameter_value: @ecs_cluster_name
1034
+ },
1035
+ {
1036
+ parameter_key: "TaskDesiredCount",
1037
+ parameter_value: "1"
1038
+ },
1039
+ {
1040
+ parameter_key: "HostnamePattern",
1041
+ parameter_value: "hal9000-#{@dns_record_identifier}.qa.colaster.com"
1042
+ },
1043
+ {
1044
+ parameter_key: "HostnamePatternPriority",
1045
+ parameter_value: hostname_pattern_priority
1046
+ }
1047
+ ]
1048
+ if stack_exists?(stack_name_hal9000)
1049
+ cur_version = get_currently_deployed_version(stack_name_hal9000)
1050
+ update_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hal9000"]['revision'])
1051
+ else
1052
+ create_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role)
1053
+ end
1054
+
1055
+ stack_name_fidaty = "ecs-task-fidaty-qa-#{deploy_id}"
1056
+ git_checkout_version('fidaty', @projects["fidaty"]['revision'])
1057
+ stack_body = File.read('projects/fidaty/deploy/task.yml')
1058
+ parameters = [
1059
+ {
1060
+ parameter_key: "Environment",
1061
+ parameter_value: "qa"
1062
+ },
1063
+ {
1064
+ parameter_key: "ReleaseVersion",
1065
+ parameter_value: "#{@projects["fidaty"]['revision']}"
1066
+ },
1067
+ {
1068
+ parameter_key: "ECSClusterName",
1069
+ parameter_value: @ecs_cluster_name
1070
+ },
1071
+ {
1072
+ parameter_key: "TaskDesiredCount",
1073
+ parameter_value: "1"
1074
+ },
1075
+ {
1076
+ parameter_key: "HostnamePattern",
1077
+ parameter_value: "fidaty-#{@dns_record_identifier}.qa.colaster.com"
1078
+ },
1079
+ {
1080
+ parameter_key: "HostnamePatternPriority",
1081
+ parameter_value: hostname_pattern_priority
1082
+ },
1083
+ {
1084
+ parameter_key: "PeanoHost",
1085
+ parameter_value: get_route53_hostname("ecs-task-peano-qa-notneeded")
1086
+ }
1087
+ ]
1088
+ if stack_exists?(stack_name_fidaty)
1089
+ cur_version = get_currently_deployed_version(stack_name_fidaty)
1090
+ update_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["fidaty"]['revision'])
1091
+ else
1092
+ create_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role)
1093
+ end
1094
+
1095
+ stack_name_peano = "ecs-task-peano-qa-#{deploy_id}"
1096
+ git_checkout_version('peano', @projects["peano"]['revision'])
1097
+ stack_body = File.read('projects/peano/deploy/task.yml')
1098
+ parameters = [
1099
+ {
1100
+ parameter_key: "Environment",
1101
+ parameter_value: "qa"
1102
+ },
1103
+ {
1104
+ parameter_key: "ReleaseVersion",
1105
+ parameter_value: "#{@projects['peano']['revision']}"
1106
+ },
1107
+ {
1108
+ parameter_key: "ECSClusterName",
1109
+ parameter_value: @ecs_cluster_name
1110
+ },
1111
+ {
1112
+ parameter_key: "TaskDesiredCount",
1113
+ parameter_value: "1"
1114
+ },
1115
+ {
1116
+ parameter_key: "HostnamePattern",
1117
+ parameter_value: "peano-#{@dns_record_identifier}.qa.colaster.com"
1118
+ },
1119
+ {
1120
+ parameter_key: "HostnamePatternPriority",
1121
+ parameter_value: hostname_pattern_priority
1122
+ },
1123
+ {
1124
+ parameter_key: "WebHost",
1125
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1126
+ },
1127
+ {
1128
+ parameter_key: "AssangeHost",
1129
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1130
+ }
1131
+ ]
1132
+ if stack_exists?(stack_name_peano)
1133
+ cur_version = get_currently_deployed_version(stack_name_peano)
1134
+ update_stack(stack_name_peano, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["peano"]['revision'])
1135
+ else
1136
+ create_stack(stack_name_peano, stack_body, parameters, tags, @cf_role)
1137
+ end
1138
+
1139
+ stack_name_rogoreport = "ecs-task-rogoreport-qa-#{deploy_id}"
1140
+ git_checkout_version('rogoreport', @projects["rogoreport"]['revision'])
1141
+ stack_body = IO.read('projects/rogoreport/deploy/task.yml')
1142
+ parameters = [
1143
+ {
1144
+ parameter_key: "Environment",
1145
+ parameter_value: "qa"
1146
+ },
1147
+ {
1148
+ parameter_key: "ReleaseVersion",
1149
+ parameter_value: "#{@projects["rogoreport"]['revision']}"
1150
+ },
1151
+ {
1152
+ parameter_key: "ReleaseName",
1153
+ parameter_value: "rogoreport"
1154
+ },
1155
+ {
1156
+ parameter_key: "ECSClusterName",
1157
+ parameter_value: @ecs_cluster_name
1158
+ }
1159
+ ]
1160
+ if stack_exists?(stack_name_rogoreport)
1161
+ cur_version = get_currently_deployed_version(stack_name_rogoreport)
1162
+ update_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["rogoreport"]['revision'])
1163
+ else
1164
+ create_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role)
1165
+ end
1166
+
1167
+ stack_name_assange = "ecs-task-assange-qa-#{deploy_id}"
1168
+ git_checkout_version('assange', @projects["assange"]['revision'])
1169
+ stack_body = IO.read('projects/assange/deploy/task.yml')
1170
+ parameters = [
1171
+ {
1172
+ parameter_key: "Environment",
1173
+ parameter_value: "qa"
1174
+ },
1175
+ {
1176
+ parameter_key: "ReleaseVersion",
1177
+ parameter_value: "#{@projects["assange"]['revision']}"
1178
+ },
1179
+ {
1180
+ parameter_key: "ECSClusterName",
1181
+ parameter_value: @ecs_cluster_name
1182
+ },
1183
+ {
1184
+ parameter_key: "TaskDesiredCount",
1185
+ parameter_value: "1"
1186
+ },
1187
+ {
1188
+ parameter_key: "ALBShortName",
1189
+ parameter_value: "assange-qa-#{deploy_id}"[0..27]
1190
+ },
1191
+ {
1192
+ parameter_key: "HostnamePattern",
1193
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1194
+ },
1195
+ {
1196
+ parameter_key: "HostnamePatternPriority",
1197
+ parameter_value: (hostname_pattern_priority.to_i + 20).to_s
1198
+ },
1199
+ {
1200
+ parameter_key: "EnvHash",
1201
+ parameter_value: deploy_id
1202
+ },
1203
+ {
1204
+ parameter_key: "WebHost",
1205
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1206
+ },
1207
+ {
1208
+ parameter_key: "AssangeHost",
1209
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1210
+ }
1211
+ ]
1212
+ if stack_exists?(stack_name_assange)
1213
+ cur_version = get_currently_deployed_version(stack_name_assange)
1214
+ update_stack(stack_name_assange, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["assange"]['revision'])
1215
+ else
1216
+ create_stack(stack_name_assange, stack_body, parameters, tags, @cf_role)
1217
+ end
1218
+
1219
+ stack_name_leftorium = "ecs-task-leftorium-qa-#{deploy_id}"
1220
+ git_checkout_version('leftorium', @projects["leftorium"]['revision'])
1221
+ stack_body = File.read('projects/leftorium/deploy/task.yml')
1222
+ parameters = [
1223
+ {
1224
+ parameter_key: "Environment",
1225
+ parameter_value: "qa"
1226
+ },
1227
+ {
1228
+ parameter_key: "ReleaseVersion",
1229
+ parameter_value: "#{@projects["leftorium"]['revision']}"
1230
+ },
1231
+ {
1232
+ parameter_key: "ECSClusterName",
1233
+ parameter_value: @ecs_cluster_name
1234
+ },
1235
+ {
1236
+ parameter_key: "TaskDesiredCount",
1237
+ parameter_value: "1"
1238
+ },
1239
+ {
1240
+ parameter_key: "HostnamePattern",
1241
+ parameter_value: "leftorium-#{@dns_record_identifier}.qa.colaster.com"
1242
+ },
1243
+ {
1244
+ parameter_key: "HostnamePatternPriority",
1245
+ parameter_value: hostname_pattern_priority
1246
+ }
1247
+ ]
1248
+ if stack_exists?(stack_name_leftorium)
1249
+ cur_version = get_currently_deployed_version(stack_name_leftorium)
1250
+ update_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["leftorium"]['revision'])
1251
+ else
1252
+ create_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role)
1253
+ end
1254
+
1255
+ stack_name_rachele = "ecs-task-rachele-qa-#{deploy_id}"
1256
+ git_checkout_version('rachele', @projects["rachele"]['revision'])
1257
+ stack_body = File.read('projects/rachele/deploy/task.yml')
1258
+ parameters = [
1259
+ {
1260
+ parameter_key: "Environment",
1261
+ parameter_value: "qa"
1262
+ },
1263
+ {
1264
+ parameter_key: "ReleaseVersion",
1265
+ parameter_value: "#{@projects["rachele"]['revision']}"
1266
+ },
1267
+ {
1268
+ parameter_key: "ECSClusterName",
1269
+ parameter_value: @ecs_cluster_name
1270
+ },
1271
+ {
1272
+ parameter_key: "TaskDesiredCount",
1273
+ parameter_value: "1"
1274
+ },
1275
+ {
1276
+ parameter_key: "WebHost",
1277
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1278
+ },
1279
+ {
1280
+ parameter_key: "HostnamePattern",
1281
+ parameter_value: "rachele-#{@dns_record_identifier}.qa.colaster.com"
1282
+ },
1283
+ {
1284
+ parameter_key: "HostnamePatternPriority",
1285
+ parameter_value: hostname_pattern_priority
1286
+ }
1287
+ ]
1288
+ if stack_exists?(stack_name_rachele)
1289
+ cur_version = get_currently_deployed_version(stack_name_rachele)
1290
+ unless cur_version.include?(@projects["rachele"]['revision'])
1291
+ delete_stack(stack_name_rachele)
1292
+ wait_for_stack_removal(stack_name_rachele)
1293
+ create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
1294
+ end
1295
+ else
1296
+ create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
1297
+ end
1298
+
1299
+ stack_name_borat = "ecs-task-borat-qa-#{deploy_id}"
1300
+ git_checkout_version('borat', @projects["borat"]['revision'])
1301
+ stack_body = IO.read('projects/borat/deploy/task.yml')
1302
+ parameters = [
1303
+ {
1304
+ parameter_key: "Environment",
1305
+ parameter_value: "qa"
1306
+ },
1307
+ {
1308
+ parameter_key: "ReleaseVersion",
1309
+ parameter_value: "#{@projects["borat"]['revision']}"
1310
+ },
1311
+ {
1312
+ parameter_key: "ECSClusterName",
1313
+ parameter_value: @ecs_cluster_name
1314
+ },
1315
+ {
1316
+ parameter_key: "TaskDesiredCount",
1317
+ parameter_value: "1"
1318
+ },
1319
+ {
1320
+ parameter_key: "ALBShortName",
1321
+ parameter_value: "borat-qa-#{deploy_id}"[0..27]
1322
+ },
1323
+ {
1324
+ parameter_key: "HostnamePattern",
1325
+ parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1326
+ },
1327
+ {
1328
+ parameter_key: "HostnamePatternPriority",
1329
+ parameter_value: (hostname_pattern_priority.to_i + 30).to_s
1330
+ },
1331
+ {
1332
+ parameter_key: "EnvHash",
1333
+ parameter_value: deploy_id
1334
+ },
1335
+ {
1336
+ parameter_key: "WsEndpoint",
1337
+ parameter_value: "wss://backoffice-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
1338
+ },
1339
+ {
1340
+ parameter_key: "GraphqlEndpoint",
1341
+ parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/graphql"
1342
+ },
1343
+ {
1344
+ parameter_key: "GraphqlInsuranceEndpoint",
1345
+ parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/graphql/insurance"
1346
+ },
1347
+ {
1348
+ parameter_key: "AuthEndpoint",
1349
+ parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/auth"
1350
+ },
1351
+ {
1352
+ parameter_key: "FrontendEndpoint",
1353
+ parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
1354
+ }
1355
+ ]
1356
+ if stack_exists?(stack_name_borat)
1357
+ cur_version = get_currently_deployed_version(stack_name_borat)
1358
+ update_stack(stack_name_borat, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["borat"]['revision'])
1359
+ else
1360
+ create_stack(stack_name_borat, stack_body, parameters, tags, @cf_role)
1361
+ end
1362
+
1363
+ if deploy_crash?
1364
+ git_checkout_version('crash', @projects['crash']['revision'])
1365
+ stack_name_crash = "ecs-task-crash-qa-#{deploy_id}"
1366
+ stack_body = IO.read('projects/crash/deploy/task.yml')
1367
+ parameters = [
1368
+ {
1369
+ parameter_key: 'Environment',
1370
+ parameter_value: 'qa'
1371
+ },
1372
+ {
1373
+ parameter_key: 'ReleaseVersion',
1374
+ parameter_value: "#{@projects['crash']['revision']}"
1375
+ },
1376
+ {
1377
+ parameter_key: 'TaskDesiredCount',
1378
+ parameter_value: '1'
1379
+ },
1380
+ {
1381
+ parameter_key: 'ECSClusterName',
1382
+ parameter_value: @ecs_cluster_name
1383
+ },
1384
+ {
1385
+ parameter_key: 'ALBShortName',
1386
+ parameter_value: "crash-qa-#{deploy_id}"[0..27]
1387
+ },
1388
+ {
1389
+ parameter_key: 'HostnamePattern',
1390
+ parameter_value: "crash-#{@dns_record_identifier}.qa.colaster.com"
1391
+ },
1392
+ {
1393
+ parameter_key: 'HostnamePatternPriority',
1394
+ parameter_value: (hostname_pattern_priority.to_i + 10).to_s
1395
+ },
1396
+ {
1397
+ parameter_key: "EnvHash",
1398
+ parameter_value: deploy_id
1399
+ },
1400
+ {
1401
+ parameter_key: "WsEndpoint",
1402
+ parameter_value: "wss://crash-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
1403
+ },
1404
+ {
1405
+ parameter_key: "GraphqlEndpoint",
1406
+ parameter_value: "https://crash-#{@dns_record_identifier}.qa.colaster.com/graphql"
1407
+ },
1408
+ {
1409
+ parameter_key: "AuthDomain",
1410
+ parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
1411
+ },
1412
+ ]
1413
+ if stack_exists?(stack_name_crash)
1414
+ cur_version = get_currently_deployed_version(stack_name_crash)
1415
+ update_stack(stack_name_crash, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["crash"]['revision'])
1416
+ else
1417
+ create_stack(stack_name_crash, stack_body, parameters, tags, @cf_role)
1418
+ end
1419
+ end
1420
+
1421
+ if deploy_starsky_hutch?
1422
+ stack_name_starsky = "ecs-task-starsky-qa-#{deploy_id}"
1423
+ git_checkout_version('starsky', @projects["starsky"]['revision'])
1424
+ stack_body = IO.read('projects/starsky/deploy/task.yml')
1425
+ parameters = [
1426
+ {
1427
+ parameter_key: "Environment",
1428
+ parameter_value: "qa"
1429
+ },
1430
+ {
1431
+ parameter_key: "ReleaseVersion",
1432
+ parameter_value: "#{@projects["starsky"]['revision']}"
1433
+ },
1434
+ {
1435
+ parameter_key: "TaskDesiredCount",
1436
+ parameter_value: "1"
1437
+ },
1438
+ {
1439
+ parameter_key: "ECSClusterName",
1440
+ parameter_value: @ecs_cluster_name
1441
+ },
1442
+ {
1443
+ parameter_key: "ALBShortName",
1444
+ parameter_value: "starsky-qa-#{deploy_id}"[0..27]
1445
+ },
1446
+ {
1447
+ parameter_key: "EnvHash",
1448
+ parameter_value: deploy_id
1449
+ },
1450
+ {
1451
+ parameter_key: "HostnamePattern",
1452
+ parameter_value: "starsky-#{@dns_record_identifier}.qa.colaster.com"
1453
+ },
1454
+ {
1455
+ parameter_key: "HostnamePatternPriority",
1456
+ parameter_value: (hostname_pattern_priority.to_i + 74).to_s
1457
+ }
1458
+ ]
1459
+ if stack_exists?(stack_name_starsky)
1460
+ cur_version = get_currently_deployed_version(stack_name_starsky)
1461
+ unless cur_version.include?(@projects["starsky"]['revision'])
1462
+ delete_stack(stack_name_starsky)
1463
+ wait_for_stack_removal(stack_name_starsky)
1464
+ create_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role)
1465
+ end
1466
+ else
1467
+ create_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role)
1468
+ end
1469
+ end
1470
+
1471
+ stack_name_activia = "ecs-task-activia-qa-#{deploy_id}"
1472
+ git_checkout_version('activia', @projects["activia"]['revision'])
1473
+ stack_body = File.read('projects/activia/deploy/task.yml')
1474
+ parameters = [
1475
+ {
1476
+ parameter_key: "Environment",
1477
+ parameter_value: "qa"
1478
+ },
1479
+ {
1480
+ parameter_key: "ReleaseVersion",
1481
+ parameter_value: "#{@projects["activia"]['revision']}"
1482
+ },
1483
+ {
1484
+ parameter_key: "ECSClusterName",
1485
+ parameter_value: @ecs_cluster_name
1486
+ },
1487
+ {
1488
+ parameter_key: "TaskDesiredCount",
1489
+ parameter_value: "1"
1490
+ },
1491
+ {
1492
+ parameter_key: "HostnamePattern",
1493
+ parameter_value: "activia-#{@dns_record_identifier}.qa.colaster.com"
1494
+ },
1495
+ {
1496
+ parameter_key: "HostnamePatternPriority",
1497
+ parameter_value: hostname_pattern_priority
1498
+ },
1499
+ {
1500
+ parameter_key: "WebHost",
1501
+ parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
1502
+ },
1503
+ {
1504
+ parameter_key: "PeanoHost",
1505
+ parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
1506
+ }
1507
+ ]
1508
+ if stack_exists?(stack_name_activia)
1509
+ cur_version = get_currently_deployed_version(stack_name_activia)
1510
+ update_stack(stack_name_activia, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["activia"]['revision'])
1511
+ else
1512
+ create_stack(stack_name_activia, stack_body, parameters, tags, @cf_role)
1513
+ end
1514
+
1515
+ # Waiting for prima healtcheck dependencies
1516
+ wait_for_stack_ready(stack_name_skynet) unless stack_ready?(stack_name_skynet)
1517
+ wait_for_stack_ready(stack_name_urania) unless stack_ready?(stack_name_urania)
1518
+ wait_for_stack_ready(stack_name_bburago) unless stack_ready?(stack_name_bburago)
1519
+ wait_for_stack_ready(stack_name_hal9000) unless stack_ready?(stack_name_hal9000)
1520
+ wait_for_stack_ready(stack_name_assange) unless stack_ready?(stack_name_assange)
1521
+ wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1522
+ wait_for_stack_ready(stack_name_leftorium) unless stack_ready?(stack_name_leftorium)
1523
+ wait_for_stack_ready(stack_name_rachele) unless stack_ready?(stack_name_rachele)
1524
+
1525
+ stack_name_web = "ecs-task-web-qa-#{deploy_id}"
1526
+ git_checkout_version('prima', @projects["prima"]['revision'])
1527
+ stack_body = IO.read('projects/prima/app/cloudformation/tasks/web.yml')
1528
+ parameters = [
1529
+ {
1530
+ parameter_key: "Environment",
1531
+ parameter_value: "qa"
1532
+ },
1533
+ {
1534
+ parameter_key: "ReleaseVersion",
1535
+ parameter_value: "#{@projects["prima"]['revision']}"
1536
+ },
1537
+ {
1538
+ parameter_key: "TaskDesiredCount",
1539
+ parameter_value: "1"
1540
+ },
1541
+ {
1542
+ parameter_key: "ECSClusterName",
1543
+ parameter_value: @ecs_cluster_name
1544
+ },
1545
+ {
1546
+ parameter_key: "ALBShortName",
1547
+ parameter_value: "web-qa-#{deploy_id}"[0..27]
1548
+ },
1549
+ {
1550
+ parameter_key: "WebQaBaseHostname",
1551
+ parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
1552
+ },
1553
+ {
1554
+ parameter_key: "HostnamePatternPriority",
1555
+ parameter_value: hostname_pattern_priority
1556
+ },
1557
+ {
1558
+ parameter_key: "HostnamePatternAggregatorPriority",
1559
+ parameter_value: (hostname_pattern_priority.to_i + 1).to_s
1560
+ },
1561
+ {
1562
+ parameter_key: "EnvHash",
1563
+ parameter_value: deploy_id
1564
+ },
1565
+ {
1566
+ parameter_key: "AssangeHostname",
1567
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1568
+ },
1569
+ {
1570
+ parameter_key: "BackofficeHostname",
1571
+ parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1572
+ },
1573
+ {
1574
+ parameter_key: "WebHostname",
1575
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1576
+ },
1577
+ {
1578
+ parameter_key: "FePrimaDomain",
1579
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1580
+ },
1581
+ {
1582
+ parameter_key: "HostnamePattern",
1583
+ parameter_value: "www-#{@dns_record_identifier}.*"
1584
+ }
1585
+ ]
1586
+ if stack_exists?(stack_name_web)
1587
+ cur_version = get_currently_deployed_version(stack_name_web)
1588
+ update_stack(stack_name_web, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"]['revision'])
1589
+ else
1590
+ create_stack(stack_name_web, stack_body, parameters, tags, @cf_role)
1591
+ end
1592
+
1593
+ stack_name_consumer = "ecs-task-consumer-qa-#{deploy_id}"
1594
+ git_checkout_version('prima', @projects["prima"]['revision'])
1595
+ stack_body = IO.read('projects/prima/app/cloudformation/tasks/consumer.yml')
1596
+ parameters = [
1597
+ {
1598
+ parameter_key: "Environment",
1599
+ parameter_value: "qa"
1600
+ },
1601
+ {
1602
+ parameter_key: "ReleaseVersion",
1603
+ parameter_value: "#{@projects["prima"]['revision']}"
1604
+ },
1605
+ {
1606
+ parameter_key: "ECSClusterName",
1607
+ parameter_value: @ecs_cluster_name
1608
+ },
1609
+ {
1610
+ parameter_key: "NginxHttpHost",
1611
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1612
+ },
1613
+ {
1614
+ parameter_key: "AssangeHostname",
1615
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1616
+ },
1617
+ {
1618
+ parameter_key: "BackofficeHostname",
1619
+ parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1620
+ },
1621
+ {
1622
+ parameter_key: "WebHostname",
1623
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1624
+ },
1625
+ {
1626
+ parameter_key: "FePrimaDomain",
1627
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1628
+ },
1629
+ {
1630
+ parameter_key: "HostnamePattern",
1631
+ parameter_value: "consumer-#{@dns_record_identifier}.qa.colaster.com"
1632
+ },
1633
+ {
1634
+ parameter_key: "WebQaBaseHostname",
1635
+ parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
1636
+ }
1637
+ ]
1638
+ if stack_exists?(stack_name_consumer)
1639
+ cur_version = get_currently_deployed_version(stack_name_consumer)
1640
+ update_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"]['revision'])
1641
+ else
1642
+ create_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role)
1643
+ end
1644
+
1645
+ stack_name_roger = "ecs-task-roger-qa-#{deploy_id}"
1646
+ git_checkout_version('roger', @projects["roger"]['revision'])
1647
+ stack_body = File.read('projects/roger/deploy/task.yml')
1648
+ parameters = [
1649
+ {
1650
+ parameter_key: "Environment",
1651
+ parameter_value: "qa"
1652
+ },
1653
+ {
1654
+ parameter_key: "ReleaseVersion",
1655
+ parameter_value: @projects["roger"]['revision']
1656
+ },
1657
+ {
1658
+ parameter_key: "TaskDesiredCount",
1659
+ parameter_value: "1"
1660
+ },
1661
+ {
1662
+ parameter_key: "ECSClusterName",
1663
+ parameter_value: @ecs_cluster_name
1664
+ },
1665
+ {
1666
+ parameter_key: "HostnamePattern",
1667
+ parameter_value: "roger-#{@dns_record_identifier}.qa.colaster.com"
1668
+ },
1669
+ {
1670
+ parameter_key: "HostnamePatternPriority",
1671
+ parameter_value: hostname_pattern_priority
1672
+ }
1673
+ ]
1674
+ if stack_exists?(stack_name_roger)
1675
+ cur_version = get_currently_deployed_version(stack_name_roger)
1676
+ update_stack(stack_name_roger, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["roger"]['revision'])
1677
+ else
1678
+ create_stack(stack_name_roger, stack_body, parameters, tags, @cf_role)
1679
+ end
1680
+
1681
+
1682
+ if deploy_starsky_hutch?
1683
+ wait_for_stack_ready(stack_name_starsky) unless stack_ready?(stack_name_starsky) unless !deploy_starsky_hutch?
1684
+
1685
+ stack_name_hutch = "ecs-task-hutch-qa-#{deploy_id}"
1686
+ git_checkout_version('hutch', @projects["hutch"]['revision'])
1687
+ stack_body = File.read('projects/hutch/deploy/task.yml')
1688
+ parameters = [
1689
+ {
1690
+ parameter_key: "Environment",
1691
+ parameter_value: "qa"
1692
+ },
1693
+ {
1694
+ parameter_key: "ReleaseVersion",
1695
+ parameter_value: "#{@projects["hutch"]['revision']}-#{@dns_record_identifier[0..7]}"
1696
+ },
1697
+ {
1698
+ parameter_key: "ALBShortName",
1699
+ parameter_value: "hutch-qa-#{deploy_id}"[0..27]
1700
+ },
1701
+ {
1702
+ parameter_key: "ECSClusterName",
1703
+ parameter_value: @ecs_cluster_name
1704
+ },
1705
+ {
1706
+ parameter_key: "EnvHash",
1707
+ parameter_value: deploy_id
1708
+ },
1709
+ {
1710
+ parameter_key: "HostnamePattern",
1711
+ parameter_value: "hutch-#{@dns_record_identifier}.qa.colaster.com"
1712
+ },
1713
+ {
1714
+ parameter_key: "HostnamePatternPriority",
1715
+ parameter_value: (hostname_pattern_priority.to_i + 254).to_s
1716
+ },
1717
+ {
1718
+ parameter_key: "ApiUrl",
1719
+ parameter_value: "https://#{get_route53_hostname('maia-intermediari')}"
1720
+ }
1721
+ ]
1722
+ if stack_exists?(stack_name_hutch)
1723
+ cur_version = get_currently_deployed_version(stack_name_hutch)
1724
+ update_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hutch"]['revision'])
1725
+ else
1726
+ create_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role)
1727
+ end
1728
+ end
1729
+
1730
+ stack_name_maia = "ecs-task-maia-qa-#{deploy_id}"
1731
+ git_checkout_version('maia', @projects["maia"]['revision'])
1732
+ stack_body = File.read('projects/maia/deploy/task.yml')
1733
+ parameters = [
1734
+ {
1735
+ parameter_key: "Environment",
1736
+ parameter_value: "qa"
1737
+ },
1738
+ {
1739
+ parameter_key: "ReleaseVersion",
1740
+ parameter_value: "#{@projects["maia"]['revision']}"
1741
+ },
1742
+ {
1743
+ parameter_key: "ALBShortName",
1744
+ parameter_value: "maia-qa-#{deploy_id}"[0..15]
1745
+ },
1746
+ {
1747
+ parameter_key: "ECSClusterName",
1748
+ parameter_value: @ecs_cluster_name
1749
+ },
1750
+ {
1751
+ parameter_key: "EnvHash",
1752
+ parameter_value: deploy_id
1753
+ },
1754
+ {
1755
+ parameter_key: "HostnamePatternPublic",
1756
+ parameter_value: "api*-#{@dns_record_identifier}.qa.colaster.com"
1757
+ },
1758
+ {
1759
+ parameter_key: "HostnamePatternPriority",
1760
+ parameter_value: (hostname_pattern_priority.to_i + 128).to_s
1761
+ },
1762
+ {
1763
+ parameter_key: "ProxyHostnameIntermediari",
1764
+ parameter_value: "api-intermediari-#{@dns_record_identifier}.qa.colaster.com"
1765
+ },
1766
+ {
1767
+ parameter_key: "ProxyHostnameApp",
1768
+ parameter_value: "api-#{@dns_record_identifier}.qa.colaster.com"
1769
+ }
1770
+ ]
1771
+ if stack_exists?(stack_name_maia)
1772
+ cur_version = get_currently_deployed_version(stack_name_maia)
1773
+ update_stack(stack_name_maia, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["maia"]['revision'])
1774
+ else
1775
+ create_stack(stack_name_maia, stack_body, parameters, tags, @cf_role)
1776
+ end
1777
+
1778
+ wait_for_stack_ready(stack_name_web) unless stack_ready?(stack_name_web)
1779
+ wait_for_stack_ready(stack_name_consumer) unless stack_ready?(stack_name_consumer)
1780
+ wait_for_stack_ready(stack_name_ermes) unless stack_ready?(stack_name_ermes)
1781
+ wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1782
+ wait_for_stack_ready(stack_name_peano) unless stack_ready?(stack_name_peano)
1783
+ wait_for_stack_ready(stack_name_rogoreport) unless stack_ready?(stack_name_rogoreport)
1784
+ wait_for_stack_ready(stack_name_borat) unless stack_ready?(stack_name_borat)
1785
+ wait_for_stack_ready(stack_name_activia) unless stack_ready?(stack_name_activia)
1786
+ wait_for_stack_ready(stack_name_maia) unless stack_ready?(stack_name_maia)
1787
+ wait_for_stack_ready(stack_name_crash) unless stack_ready?(stack_name_crash) unless !deploy_crash?
1788
+ wait_for_stack_ready(stack_name_hutch) unless stack_ready?(stack_name_hutch) unless !deploy_starsky_hutch?
1789
+
1790
+
1791
+ update_service_defaults(stack_name_web)
1792
+ update_service_defaults(stack_name_consumer)
1793
+ update_service_defaults(stack_name_urania)
1794
+ update_service_defaults(stack_name_ermes)
1795
+ update_service_defaults(stack_name_bburago)
1796
+ update_service_defaults(stack_name_hal9000)
1797
+ update_service_defaults(stack_name_fidaty)
1798
+ update_service_defaults(stack_name_peano)
1799
+ update_service_defaults(stack_name_rogoreport)
1800
+ update_service_defaults(stack_name_assange)
1801
+ update_service_defaults(stack_name_borat)
1802
+ update_service_defaults(stack_name_activia)
1803
+ update_service_defaults(stack_name_skynet)
1804
+ update_service_defaults(stack_name_leftorium)
1805
+ update_service_defaults(stack_name_rachele)
1806
+ update_service_defaults(stack_name_maia)
1807
+ update_service_defaults(stack_name_starsky) unless !deploy_starsky_hutch?
1808
+ update_service_defaults(stack_name_hutch) unless !deploy_starsky_hutch?
1809
+ update_service_defaults(stack_name_crash) unless !deploy_crash?
1810
+
1811
+ activia_hostname = get_route53_hostname("activia")
1812
+ assange_hostname = get_route53_hostname("assange")
1813
+ bburago_hostname = get_route53_hostname("bburago")
1814
+ borat_hostname = get_route53_hostname("borat")
1815
+ ermes_hostname = get_route53_hostname("ermes")
1816
+ fidaty_hostname = get_route53_hostname("fidaty")
1817
+ hal9000_hostname = get_route53_hostname("hal9000")
1818
+ prima_hostname = get_route53_hostname("web")
1819
+ peano_hostname = get_route53_hostname("peano")
1820
+ skynet_hostname = get_route53_hostname("skynet")
1821
+ urania_hostname = get_route53_hostname("urania")
1822
+ roger_hostname = get_route53_hostname("roger")
1823
+ leftorium_hostname = get_route53_hostname("leftorium")
1824
+ rachele_hostname = get_route53_hostname("rachele")
1825
+ maia_app_hostname = get_route53_hostname("maia-app")
1826
+ maia_intermediari_hostname = get_route53_hostname("maia-intermediari")
1827
+ crash_hostname = get_route53_hostname("crash") unless !deploy_crash?
1828
+ starsky_hostname = get_route53_hostname("starsky") unless !deploy_starsky_hutch?
1829
+ hutch_hostname = get_route53_hostname("hutch") unless !deploy_starsky_hutch?
1830
+
1831
+ launch_mimo(deploy_id) if deploy_starsky_hutch?
1832
+
1833
+ projects_text = "
1834
+ > Prima url: https://#{prima_hostname}
1835
+ > Backoffice (Borat) url: https://#{borat_hostname}
1836
+ > Urania url: http://#{urania_hostname}:81
1837
+ > Bburago url: http://#{bburago_hostname}:83
1838
+ > Ermes url: http://#{ermes_hostname}:10002
1839
+ > Hal9000 url: http://#{hal9000_hostname}:10031
1840
+ > Fidaty url: http://#{fidaty_hostname}:10021
1841
+ > Peano url: http://#{peano_hostname}:10039
1842
+ > Assange url: https://#{assange_hostname}
1843
+ > Activia url: http://#{activia_hostname}:10041
1844
+ > Skynet url: http://#{skynet_hostname}:8050
1845
+ > Roger url: http://#{roger_hostname}:10051
1846
+ > Leftorium url: http://#{leftorium_hostname}:10061
1847
+ > Rachele url: http://#{rachele_hostname}:10040
1848
+ > Maia App url: https://#{maia_app_hostname}
1849
+ > Maia Intermediari url: https://#{maia_intermediari_hostname}"
1850
+ projects_text.concat "
1851
+ > Crash url: https://#{crash_hostname}" if deploy_crash?
1852
+ projects_text.concat "
1853
+ > Starsky url: https://#{starsky_hostname}
1854
+ > Hutch url: https://#{hutch_hostname}" if deploy_starsky_hutch?
1855
+ projects_text.concat "
1856
+ > RabbitMQ url: http://#{ec2_ip_address(asg_stack_name)}:15672
1857
+ > Supervisor url: http://#{ec2_ip_address(asg_stack_name)}:9001
1858
+ > Elasticsearch url: http://#{ec2_ip_address(asg_stack_name)}:9200
1859
+ > SSH connection: ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no githubUsername@#{ec2_ip_address(asg_stack_name)}\n"
1860
+ output projects_text.cyan
1861
+ output "Deploy effettuato, everything is awesome!\n".green
1862
+
1863
+ if @projects['prima']['name'] != 'master' then
1864
+ # output "Lancio il batch job per la visual regression..."
1865
+ # launch_bocelli_test(prima_hostname)
1866
+ # output "Visual regression lanciata con successo!"
1867
+
1868
+ output "Lancio i test con Lighthouse..."
1869
+ launch_lighthouse_test(prima_hostname, "mobile")
1870
+ launch_lighthouse_test(prima_hostname, "desktop")
1871
+ output "Test con Lighthouse lanciati con successo..."
1872
+ end
1873
+
1874
+ qainit_write_output(projects_text, 'Indirizzi scritti su ')
1875
+ end
1876
+
1877
+ def get_route53_hostname(project)
1878
+ case
1879
+ when project.include?('web')
1880
+ host = "www-#{@dns_record_identifier}.qa.colaster.com"
1881
+ when project.include?('urania')
1882
+ host = "urania-#{@dns_record_identifier}.qa.colaster.com"
1883
+ when project.include?('bburago')
1884
+ host = "bburago-#{@dns_record_identifier}.qa.colaster.com"
1885
+ when project.include?('hal9000')
1886
+ host = "hal9000-#{@dns_record_identifier}.qa.colaster.com"
1887
+ when project.include?('fidaty')
1888
+ host = "fidaty-#{@dns_record_identifier}.qa.colaster.com"
1889
+ when project.include?('peano')
1890
+ host = "peano-#{@dns_record_identifier}.qa.colaster.com"
1891
+ when project.include?('assange')
1892
+ host = "assange-#{@dns_record_identifier}.qa.colaster.com"
1893
+ when project.include?('borat')
1894
+ host = "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1895
+ when project.include?('crash')
1896
+ host = "crash-#{@dns_record_identifier}.qa.colaster.com"
1897
+ when project.include?('ermes')
1898
+ host = "ermes-#{@dns_record_identifier}.qa.colaster.com"
1899
+ when project.include?('activia')
1900
+ host = "activia-#{@dns_record_identifier}.qa.colaster.com"
1901
+ when project.include?('skynet')
1902
+ host = "skynet-#{@dns_record_identifier}.qa.colaster.com"
1903
+ when project.include?('roger')
1904
+ host = "roger-#{@dns_record_identifier}.qa.colaster.com"
1905
+ when project.include?('leftorium')
1906
+ host = "leftorium-#{@dns_record_identifier}.qa.colaster.com"
1907
+ when project.include?('rachele')
1908
+ host = "rachele-#{@dns_record_identifier}.qa.colaster.com"
1909
+ when project.include?('starsky')
1910
+ host = "starsky-#{@dns_record_identifier}.qa.colaster.com"
1911
+ when project.include?('hutch')
1912
+ host = "hutch-#{@dns_record_identifier}.qa.colaster.com"
1913
+ when project.include?('maia-app')
1914
+ host = "api-#{@dns_record_identifier}.qa.colaster.com"
1915
+ when project.include?('maia-intermediari')
1916
+ host = "api-intermediari-#{@dns_record_identifier}.qa.colaster.com"
1917
+ end
1918
+ host
1919
+ end
1920
+
1921
+ def ec2_ip_address(asg_stack_name)
1922
+ resp = describe_stack_resource(asg_stack_name, 'ECSAutoScalingGroup')
1923
+ resp = describe_auto_scaling_groups([resp.stack_resource_detail.physical_resource_id], 1)
1924
+ instance_id = resp.auto_scaling_groups[0].instances[0].instance_id
1925
+ resp = describe_instances([instance_id])
1926
+ resp.reservations[0].instances[0].private_ip_address
1927
+ end
1928
+
611
1929
  def get_alb_host(stack_name)
612
1930
  case
613
1931
  when stack_name.include?('web')
@@ -656,6 +1974,723 @@ class Release
656
1974
  resp.load_balancers[0].dns_name
657
1975
  end
658
1976
 
1977
+ def update_service_defaults(stack_name)
1978
+ case
1979
+ when stack_name.include?('web')
1980
+ logical_resource_id = 'ECSServiceWebQA'
1981
+ when stack_name.include?('consumer')
1982
+ logical_resource_id = 'ECSServiceConsumerApiQa'
1983
+ when stack_name.include?('urania')
1984
+ logical_resource_id = 'ECSServiceUraniaQA'
1985
+ when stack_name.include?('backoffice')
1986
+ logical_resource_id = 'ECSServiceBackoffice'
1987
+ when stack_name.include?('ermes')
1988
+ logical_resource_id = 'ECSServiceErmesQA'
1989
+ when stack_name.include?('bburago')
1990
+ logical_resource_id = 'ECSServiceBburagoQA'
1991
+ when stack_name.include?('hal9000')
1992
+ logical_resource_id = 'ECSServiceHal9000QA'
1993
+ when stack_name.include?('fidaty')
1994
+ logical_resource_id = 'ECSServiceFidatyQA'
1995
+ when stack_name.include?('skynet')
1996
+ logical_resource_id = 'ECSServiceSkynetQA'
1997
+ when stack_name.include?('roger')
1998
+ logical_resource_id = 'ECSServiceRogerQA'
1999
+ when stack_name.include?('activia')
2000
+ logical_resource_id = 'ECSServiceActiviaQA'
2001
+ when stack_name.include?('peano')
2002
+ logical_resource_id = 'ECSServicePeanoQA'
2003
+ when stack_name.include?('rogoreport')
2004
+ logical_resource_id = 'ECSServiceRogoreport'
2005
+ when stack_name.include?('assange')
2006
+ logical_resource_id = 'ECSServiceAssangeQA'
2007
+ when stack_name.include?('borat')
2008
+ logical_resource_id = 'ECSServiceBorat'
2009
+ when stack_name.include?('leftorium')
2010
+ logical_resource_id = 'ECSServiceLeftoriumQA'
2011
+ when stack_name.include?('rachele')
2012
+ logical_resource_id = 'ECSServiceRacheleQA'
2013
+ when stack_name.include?('crash')
2014
+ logical_resource_id = 'ECSServiceCrashQA'
2015
+ when stack_name.include?('starsky')
2016
+ logical_resource_id = 'ECSServiceStarskyQA'
2017
+ when stack_name.include?('hutch')
2018
+ logical_resource_id = 'ECSServiceHutch'
2019
+ when stack_name.include?('maia')
2020
+ logical_resource_id = 'ECSServiceMaia'
2021
+ else
2022
+ raise "Service name non gestito per lo stack #{stack_name}"
2023
+ end
2024
+ resp = describe_stack_resource(stack_name, logical_resource_id)
2025
+ update_ecs_service(@ecs_cluster_name, resp.stack_resource_detail.physical_resource_id, {minimum_healthy_percent: 0, maximum_percent: 100})
2026
+ end
2027
+
2028
+ def launch_lighthouse_test(url, device)
2029
+ @cloudflare.post("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {type: 'CNAME', name: "www-#{@dns_record_identifier}", content: url, proxied: true, ttl: 1}) unless get_lighthouse_dns()
2030
+
2031
+ @batch.submit_job({
2032
+ job_name: "lighthouse-#{device}-#{@dns_record_identifier}",
2033
+ job_queue: "tools-production",
2034
+ job_definition: describe_stack_resource('batch-job-lighthouse-production', 'JobDefinition').stack_resource_detail.physical_resource_id,
2035
+ container_overrides: {
2036
+ environment: [
2037
+ {
2038
+ name: "URL_TO_TEST",
2039
+ value: "https://www-#{@dns_record_identifier}.prima.it/?superprima"
2040
+ },
2041
+ {
2042
+ name: "DEVICE",
2043
+ value: device
2044
+ },
2045
+ {
2046
+ name: "BRANCH_NAME",
2047
+ value: @projects['prima']['name']
2048
+ },
2049
+ {
2050
+ name: "COMMITTER_EMAIL",
2051
+ value: @projects['prima']['committer']
2052
+ }
2053
+ ]
2054
+ }
2055
+ })
2056
+ end
2057
+
2058
+ def get_lighthouse_dns()
2059
+ dns_records = @cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', name: "www-#{@dns_record_identifier}.prima.it"})
2060
+ if dns_records.body[:result_info][:count] > 0
2061
+ return dns_records.body[:result][0][:id]
2062
+ end
2063
+ false
2064
+ end
2065
+
2066
+ def delete_lighthouse_dns()
2067
+ dns_id = get_lighthouse_dns()
2068
+ @cloudflare.delete("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns_id}") if dns_id
2069
+ end
2070
+
2071
+ def launch_bocelli_test(url)
2072
+ @batch.submit_job({
2073
+ job_name: "bocelli-test-#{@dns_record_identifier}",
2074
+ job_queue: "tools-production",
2075
+ job_definition: describe_stack_resource('batch-job-bocelli-production', 'JobDefinition').stack_resource_detail.physical_resource_id,
2076
+ container_overrides: {
2077
+ environment: [
2078
+ {
2079
+ name: "BATCH_COMMAND",
2080
+ value: "test"
2081
+ },
2082
+ {
2083
+ name: "QA_HOSTNAME",
2084
+ value: url
2085
+ },
2086
+ {
2087
+ name: "BRANCH_NAME",
2088
+ value: @projects['prima']['name']
2089
+ },
2090
+ {
2091
+ name: "COMMITTER_EMAIL",
2092
+ value: @projects['prima']['committer']
2093
+ }
2094
+ ]
2095
+ }
2096
+ })
2097
+ end
2098
+
2099
+ def create_activia_artifact(revision)
2100
+ output "Preparo l'artifact activia .zip\n".yellow
2101
+
2102
+ git_checkout_version('activia', revision)
2103
+
2104
+ Dir.chdir 'projects/activia'
2105
+
2106
+ decrypt_secrets()
2107
+
2108
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2109
+ exec_step 'prepare-docker-compose --directory activia && cp docker-compose-qainit.yml docker-compose.yml'
2110
+
2111
+ # execute_command "deploy/build_qa_artifact"
2112
+
2113
+ [
2114
+ "docker-compose build web",
2115
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2116
+ '-c' 'mix local.hex --force && mix hex.info && \
2117
+ mix deps.get && mix compile && mix deps.compile && \
2118
+ cd assets && \
2119
+ rm -rf node_modules && \
2120
+ yarn --cache-folder ~/.cache/yarn && \
2121
+ sysconfcpus -n 2 ./node_modules/.bin/webpack --mode production && \
2122
+ cd .. && \
2123
+ mix phx.digest && \
2124
+ rm -rf _build/qa/rel/ && \
2125
+ mix distillery.release --env=qa'"
2126
+ ].each do |cmd|
2127
+ execute_command cmd
2128
+ end
2129
+
2130
+ cleanup_containers
2131
+
2132
+ artifact_path = Dir.glob("_build/qa/rel/activia/releases/*/activia.tar.gz").first
2133
+
2134
+ upload_artifact(artifact_path, "microservices/activia/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2135
+
2136
+ Dir.chdir '../../'
2137
+ end
2138
+
2139
+ def create_assange_artifact(revision)
2140
+ output "Preparo l'artifact assange .zip\n".yellow
2141
+
2142
+ git_checkout_version('assange', revision)
2143
+
2144
+ Dir.chdir 'projects/assange'
2145
+
2146
+ decrypt_secrets()
2147
+
2148
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2149
+ exec_step 'prepare-docker-compose --directory assange && cp docker-compose-qainit.yml docker-compose.yml'
2150
+ exec_step 'deploy/build_qa_artifact'
2151
+
2152
+ cleanup_containers
2153
+
2154
+ artifact_path = Dir.glob("_build/qa/rel/assange/releases/*/assange.tar.gz").first
2155
+ upload_artifact(artifact_path, "microservices/assange/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2156
+
2157
+ Dir.chdir '../../'
2158
+ end
2159
+
2160
+ def create_bburago_artifact(revision)
2161
+ output "Preparo l'artifact bburago .zip\n".yellow
2162
+
2163
+ git_checkout_version('bburago', revision)
2164
+
2165
+ Dir.chdir 'projects/bburago'
2166
+
2167
+ decrypt_secrets()
2168
+
2169
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2170
+ exec_step 'prepare-docker-compose --directory bburago && cp docker-compose-qainit.yml docker-compose.yml'
2171
+ [
2172
+ "docker-compose build web",
2173
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint deploy/create_qa_artifact web"
2174
+ ].each do |cmd|
2175
+ execute_command cmd
2176
+ end
2177
+
2178
+ cleanup_containers
2179
+
2180
+ artifact_path = Dir.glob('_build/qa/rel/bburago/releases/*/bburago.tar.gz').first
2181
+ upload_artifact(artifact_path, "microservices/bburago/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2182
+
2183
+ Dir.chdir '../../'
2184
+ end
2185
+
2186
+ def create_borat_artifact(revision)
2187
+ output "Preparo l'artifact borat .zip\n".yellow
2188
+
2189
+ git_checkout_version('borat', revision)
2190
+
2191
+ Dir.chdir 'projects/borat'
2192
+
2193
+ decrypt_secrets()
2194
+
2195
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2196
+ exec_step 'prepare-docker-compose --directory borat && cp docker-compose-qainit.yml docker-compose.yml'
2197
+
2198
+ execute_command "deploy/build_qa_artifact"
2199
+
2200
+ cleanup_containers
2201
+
2202
+ artifact_path = Dir.glob("_build/qa/rel/borat/releases/*/borat.tar.gz").first
2203
+ upload_artifact(artifact_path, "microservices/borat/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2204
+
2205
+ Dir.chdir '../../'
2206
+ end
2207
+
2208
+ def create_crash_artifact(revision, deploy_id)
2209
+ output "Preparo l'artifact crash .zip\n".yellow
2210
+
2211
+ git_checkout_version('crash', revision)
2212
+
2213
+ Dir.chdir 'projects/crash'
2214
+
2215
+ crash_qa_host = get_route53_hostname('ecs-task-crash-qa-notneeded')
2216
+
2217
+ decrypt_secrets()
2218
+
2219
+ `mv docker-compose-ci.yml docker-compose.yml`
2220
+ exec_step 'prepare-docker-compose --directory crash && cp docker-compose-qainit.yml docker-compose.yml'
2221
+
2222
+ execute_command "deploy/build_qa_artifact #{deploy_id}"
2223
+
2224
+ cleanup_containers
2225
+
2226
+ artifact_path = Dir.glob('_build/qa/rel/crash/releases/*/crash.tar.gz').first
2227
+ upload_artifact(artifact_path, "microservices/crash/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2228
+
2229
+ Dir.chdir '../../'
2230
+ end
2231
+
2232
+ def create_ermes_artifact(revision)
2233
+ output "Preparo l'artifact ermes .zip\n".yellow
2234
+
2235
+ git_checkout_version('ermes', revision)
2236
+
2237
+ Dir.chdir 'projects/ermes'
2238
+
2239
+ decrypt_secrets()
2240
+
2241
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2242
+ exec_step 'prepare-docker-compose --directory ermes && cp docker-compose-qainit.yml docker-compose.yml'
2243
+
2244
+ if File.exists? 'deploy/build_qa_artifact'
2245
+ execute_command "deploy/build_qa_artifact"
2246
+ else
2247
+ [
2248
+ "if echo `docker network ls` | grep crash_default; \
2249
+ then echo 'crash_default network already existing'; \
2250
+ else docker network create crash_default; fi",
2251
+ 'docker-compose build web',"docker-compose run --rm -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2252
+ '-c' 'mix local.hex --force && mix hex.info && \
2253
+ mix deps.get && mix compile && mix deps.compile && \
2254
+ mix phx.digest && \
2255
+ MIX_ENV=dev mix compile.sms && \
2256
+ MIX_ENV=dev mix compile.html && \
2257
+ MIX_ENV=dev mix compile.heml && \
2258
+ MIX_ENV=dev mix compile.app_notification && \
2259
+ rm -rf _build/qa/rel/ && \
2260
+ mix release --env=qa'"
2261
+ ].each do |cmd|
2262
+ execute_command cmd
2263
+ end
2264
+ end
2265
+
2266
+ cleanup_containers
2267
+
2268
+ artifact_path = Dir.glob("_build/qa/rel/ermes/releases/*/ermes.tar.gz").first
2269
+ upload_artifact(artifact_path, "microservices/ermes/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2270
+
2271
+ Dir.chdir '../../'
2272
+ end
2273
+
2274
+ def create_fidaty_artifact(revision)
2275
+ output "Preparo l'artifact fidaty .zip\n".yellow
2276
+
2277
+ git_checkout_version('fidaty', revision)
2278
+
2279
+ Dir.chdir 'projects/fidaty'
2280
+
2281
+ decrypt_secrets()
2282
+
2283
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2284
+ exec_step 'prepare-docker-compose --directory fidaty && cp docker-compose-qainit.yml docker-compose.yml'
2285
+
2286
+ if File.exists? 'deploy/build_qa_artifact'
2287
+ execute_command "deploy/build_qa_artifact"
2288
+ else
2289
+ [
2290
+ "docker-compose build web",
2291
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2292
+ '-c' 'mix local.hex --force && mix hex.info && \
2293
+ mix deps.get && mix compile && mix deps.compile && \
2294
+ mix phx.digest && \
2295
+ rm -rf _build/qa/rel/ && \
2296
+ mix release --env=qa'"
2297
+ ].each do |cmd|
2298
+ execute_command cmd
2299
+ end
2300
+ end
2301
+
2302
+ cleanup_containers
2303
+
2304
+ artifact_path = Dir.glob("_build/qa/rel/fidaty/releases/*/fidaty.tar.gz").first
2305
+ upload_artifact(artifact_path, "microservices/fidaty/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2306
+
2307
+ Dir.chdir '../../'
2308
+ end
2309
+
2310
+ def create_hal9000_artifact(revision)
2311
+ output "Preparo l'artifact hal9000 .zip\n".yellow
2312
+
2313
+ git_checkout_version('hal9000', revision)
2314
+
2315
+ Dir.chdir 'projects/hal9000'
2316
+
2317
+ decrypt_secrets()
2318
+
2319
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2320
+ exec_step 'prepare-docker-compose --directory hal9000 && cp docker-compose-qainit.yml docker-compose.yml'
2321
+ # [
2322
+ # # "docker-compose build web",
2323
+ # # "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2324
+ # # '-c' 'mix local.hex --force && mix hex.info && \
2325
+ # # mix deps.get && mix compile && mix deps.compile && \
2326
+ # # mix phx.digest assets -o priv/static && \
2327
+ # # rm -rf _build/qa/rel/ && \
2328
+ # # mix release --env=qa'"
2329
+ # ].each do |cmd|
2330
+ # execute_command cmd
2331
+ # end
2332
+
2333
+ execute_command "deploy/build_qa_artifact"
2334
+
2335
+ cleanup_containers
2336
+
2337
+ artifact_path = Dir.glob("_build/qa/rel/hal9000/releases/*/hal9000.tar.gz").first
2338
+ upload_artifact(artifact_path, "microservices/hal9000/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2339
+
2340
+ Dir.chdir '../../'
2341
+ end
2342
+
2343
+ def create_hutch_artifact(revision)
2344
+ output "Preparo l'artifact hutch\n".yellow
2345
+
2346
+ git_checkout_version('hutch', revision)
2347
+
2348
+ Dir.chdir 'projects/hutch'
2349
+
2350
+ decrypt_secrets() unless File.exist?('config/secrets.yml')
2351
+
2352
+ exec_step 'cp docker-compose-ci.yml docker-compose.yml'
2353
+ exec_step 'prepare-docker-compose --directory hutch && cp docker-compose-qainit.yml docker-compose.yml'
2354
+
2355
+ execute_command "deploy/build_qa_artifact #{get_route53_hostname("maia-intermediari")}"
2356
+
2357
+ cleanup_containers
2358
+
2359
+ artifact_path = "./hutch.tar.gz"
2360
+ upload_artifact(artifact_path, "microservices/hutch/#{revision}-#{@dns_record_identifier[0..7]}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2361
+
2362
+ Dir.chdir '../../'
2363
+ end
2364
+
2365
+ def create_leftorium_artifact(revision)
2366
+ output "Preparo l'artifact leftorium .zip\n".yellow
2367
+
2368
+ git_checkout_version('leftorium', revision)
2369
+
2370
+ Dir.chdir 'projects/leftorium'
2371
+
2372
+ decrypt_secrets()
2373
+
2374
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2375
+ exec_step 'prepare-docker-compose --directory leftorium && cp docker-compose-qainit.yml docker-compose.yml'
2376
+ [
2377
+ "docker-compose build web",
2378
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2379
+ '-c' 'mix local.hex --force && mix hex.info && \
2380
+ mix deps.get && mix compile && mix deps.compile && \
2381
+ rm -rf _build/qa/rel/ && \
2382
+ mix release --env=qa'"
2383
+ ].each do |cmd|
2384
+ execute_command cmd
2385
+ end
2386
+
2387
+ cleanup_containers
2388
+
2389
+ artifact_path = Dir.glob('_build/qa/rel/leftorium/releases/*/leftorium.tar.gz').first
2390
+ upload_artifact(artifact_path, "microservices/leftorium/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2391
+
2392
+ Dir.chdir '../../'
2393
+ end
2394
+
2395
+ def create_maia_artifact(revision)
2396
+ output "Preparo l'artifact maia .zip\n".yellow
2397
+
2398
+ git_checkout_version('maia', revision)
2399
+
2400
+ Dir.chdir 'projects/maia'
2401
+
2402
+ decrypt_secrets()
2403
+
2404
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2405
+ exec_step 'prepare-docker-compose --directory maia && cp docker-compose-qainit.yml docker-compose.yml'
2406
+
2407
+ execute_command 'deploy/build_qa_artifact'
2408
+
2409
+ cleanup_containers
2410
+
2411
+ artifact_path = Dir.glob('_build/qa/rel/maia/releases/*/maia.tar.gz').first
2412
+ upload_artifact(artifact_path, "microservices/maia/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2413
+
2414
+ Dir.chdir '../../'
2415
+ end
2416
+
2417
+ def create_peano_artifact(revision)
2418
+ output "Preparo l'artifact peano .zip\n".yellow
2419
+
2420
+ git_checkout_version('peano', revision)
2421
+
2422
+ Dir.chdir 'projects/peano'
2423
+
2424
+ decrypt_secrets() unless File.exist?('config/secrets.yml')
2425
+
2426
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2427
+ exec_step 'prepare-docker-compose --directory peano && cp docker-compose-qainit.yml docker-compose.yml'
2428
+
2429
+ execute_command "deploy/build_qa_artifact"
2430
+
2431
+ cleanup_containers
2432
+
2433
+ artifact_path = Dir.glob("_build/qa/rel/peano/releases/*/peano.tar.gz").first
2434
+ upload_artifact(artifact_path, "microservices/peano/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2435
+
2436
+ Dir.chdir '../../'
2437
+ end
2438
+
2439
+ def create_prima_artifact(revision, branch_name, deploy_id)
2440
+ output "Preparo l'artifact prima .zip\n".yellow
2441
+
2442
+ git_checkout_version('prima', revision)
2443
+
2444
+ Dir.chdir 'projects/prima'
2445
+
2446
+ ['vendor'].each do |dir|
2447
+ unless File.directory?(dir)
2448
+ if File.directory?("../../../prima/#{dir}")
2449
+ exec_step "rsync -a ../../../prima/#{dir} ."
2450
+ end
2451
+ end
2452
+ end
2453
+
2454
+ exec_step 'mv docker-compose-ci.yml docker-compose.yml'
2455
+ exec_step 'prepare-docker-compose --directory prima'
2456
+ exec_step 'sed -i "s/\\/home\\/app/\\/root/g" docker-compose.yml'
2457
+ `sed -i 's/"@prima-assicurazioni/pyxis-npm": ".*",/"@prima-assicurazioni/pyxis-npm": "#{@pyxis_version}",/' package.json` if deploy_pyxis?
2458
+ [
2459
+ "bin/qa_build_artifact.sh #{branch_name} #{'update' if @deploy_update}"
2460
+ ].each do |cmd|
2461
+ execute_command cmd
2462
+ end
2463
+
2464
+ cleanup_containers
2465
+
2466
+ Dir.chdir "../../"
2467
+ end
2468
+
2469
+ def create_pyxis_artifact(revision, deploy_id)
2470
+ if (deploy_pyxis?)
2471
+ output "Preparo l'artifact pyxis\n".yellow
2472
+
2473
+ git_checkout_version('pyxis-npm', revision)
2474
+
2475
+ Dir.chdir 'projects/pyxis-npm'
2476
+
2477
+ decrypt_secrets()
2478
+
2479
+ exec_step 'mv .fakenpmrc .npmrc'
2480
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2481
+ exec_step 'prepare-docker-compose --directory pyxis-npm && cp docker-compose-qainit.yml docker-compose.yml'
2482
+ exec_step 'docker-compose build web'
2483
+
2484
+ exec_step "docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
2485
+ '-c' 'npm view prima-assicurazioni/pyxis-npm versions --json > versions.json'" # posso recuperare le versioni pubblicate solo da dentro al container, scrivo su un file che leggo subito dopo
2486
+ published_versions = `cat versions.json`
2487
+ qa_versions = JSON.parse(published_versions).select{ |version| version.include? get_pyxis_version(deploy_id) }
2488
+
2489
+ @pyxis_version = "0.#{get_pyxis_version(deploy_id)}.#{qa_versions.size}"
2490
+
2491
+ `sed -i '3s/".*/"version": "#{@pyxis_version}",/' package.json`
2492
+ [
2493
+ "docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
2494
+ '-c' 'yarn install && \
2495
+ yarn build:prod && \
2496
+ npm publish'"
2497
+ ].each do |cmd|
2498
+ execute_command cmd
2499
+ end
2500
+
2501
+ cleanup_containers
2502
+ Dir.chdir '../../'
2503
+ end
2504
+ end
2505
+
2506
+ def create_rachele_artifact(revision)
2507
+ output "Preparo l'artifact rachele .zip\n".yellow
2508
+
2509
+ git_checkout_version('rachele', revision)
2510
+
2511
+ Dir.chdir 'projects/rachele'
2512
+
2513
+ decrypt_secrets()
2514
+
2515
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2516
+ exec_step 'prepare-docker-compose --directory rachele && cp docker-compose-qainit.yml docker-compose.yml'
2517
+
2518
+ execute_command "docker-compose build web"
2519
+
2520
+ [ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2521
+ '-c' 'mix local.hex --force && mix hex.info && \
2522
+ mix deps.get && mix compile && mix deps.compile && \
2523
+ rm -rf _build/qa/rel/ && \
2524
+ mix release --env=qa'"
2525
+ ].each do |cmd|
2526
+ execute_command cmd
2527
+ end
2528
+
2529
+ cleanup_containers
2530
+
2531
+ artifact_path = Dir.glob("_build/qa/rel/rachele/releases/*/rachele.tar.gz").first
2532
+ upload_artifact(artifact_path, "microservices/rachele/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2533
+
2534
+ Dir.chdir '../../'
2535
+ end
2536
+
2537
+ def create_roger_artifact(revision)
2538
+ output "Preparo l'artifact roger .zip\n".yellow
2539
+
2540
+ git_checkout_version('roger', revision)
2541
+
2542
+ Dir.chdir 'projects/roger'
2543
+
2544
+ decrypt_secrets()
2545
+
2546
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2547
+ exec_step 'prepare-docker-compose --directory roger && cp docker-compose-qainit.yml docker-compose.yml'
2548
+ [
2549
+ "docker-compose build web",
2550
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2551
+ '-c' 'mix local.hex --force && mix hex.info && \
2552
+ mix deps.get && mix compile && mix deps.compile && \
2553
+ mix phx.digest && \
2554
+ rm -rf _build/qa/rel/ && \
2555
+ mix distillery.release --env=qa'"
2556
+ ].each do |cmd|
2557
+ execute_command cmd
2558
+ end
2559
+
2560
+ cleanup_containers
2561
+
2562
+ artifact_path = Dir.glob("_build/qa/rel/roger/releases/*/roger.tar.gz").first
2563
+ upload_artifact(artifact_path, "microservices/roger/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2564
+
2565
+ Dir.chdir '../../'
2566
+ end
2567
+
2568
+ def create_rogoreport_artifact(revision)
2569
+ output "Preparo l'artifact rogoreport .zip\n".yellow
2570
+
2571
+ git_checkout_version('rogoreport', revision)
2572
+
2573
+ Dir.chdir 'projects/rogoreport'
2574
+
2575
+ decrypt_secrets() unless File.exist?('config/secrets.yml')
2576
+
2577
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2578
+ exec_step 'prepare-docker-compose --directory rogoreport && cp docker-compose-qainit.yml docker-compose.yml'
2579
+ [
2580
+ "docker-compose build web",
2581
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2582
+ '-c' 'mix local.hex --force && mix hex.info && \
2583
+ mix deps.get && mix compile && mix deps.compile && \
2584
+ rm -rf _build/qa/rel/ && \
2585
+ mix release --name=rogoreport --env=qa'"
2586
+ ].each do |cmd|
2587
+ execute_command cmd
2588
+ end
2589
+
2590
+ cleanup_containers
2591
+
2592
+ artifact_path = Dir.glob("_build/qa/rel/rogoreport/releases/*/rogoreport.tar.gz").first
2593
+ upload_artifact(artifact_path, "microservices/rogoreport/rogoreport-#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2594
+
2595
+ Dir.chdir '../../'
2596
+ end
2597
+
2598
+ def create_skynet_artifact(revision)
2599
+ output "Preparo l'artifact skynet\n".yellow
2600
+
2601
+ git_checkout_version('skynet', revision)
2602
+
2603
+ Dir.chdir 'projects/skynet'
2604
+
2605
+ version = `git rev-parse HEAD`
2606
+
2607
+ artifact_path = "/tmp/skynet-#{revision}-qa.tar.gz"
2608
+
2609
+ exec_step "tar cfz #{artifact_path} core config oracles pickles web tests logstash application_log configuration.py wsgi.py requirements.txt requirements_training.txt"
2610
+
2611
+ upload_artifact(artifact_path, "microservices/skynet/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2612
+
2613
+ Dir.chdir '../../'
2614
+ end
2615
+
2616
+ def create_starsky_artifact(revision)
2617
+ output "Preparo l'artifact starsky\n".yellow
2618
+
2619
+ git_checkout_version('starsky', revision)
2620
+
2621
+ Dir.chdir 'projects/starsky'
2622
+
2623
+ version = `git rev-parse HEAD`
2624
+
2625
+ #artifact_path = "/tmp/starsky-#{revision}-qa.tar.gz"
2626
+
2627
+ decrypt_secrets() unless File.exist?('config/secrets.yml')
2628
+
2629
+ `mv docker-compose-ci.yml docker-compose.yml`
2630
+ exec_step 'prepare-docker-compose --directory starsky && cp docker-compose-qainit.yml docker-compose.yml'
2631
+ exec_step "sed -i s/qa_deploy_id/#{get_deploy_id}/g .env.dist.qa"
2632
+ exec_step "cp .env.dist.qa .env"
2633
+
2634
+ [
2635
+ "sed -i 's/USER app/USER root/g' Dockerfile",
2636
+ "if echo `docker network ls` | grep peano_default; \
2637
+ then echo 'peano_default network already existing'; \
2638
+ else docker network create peano_default; fi",
2639
+ "docker-compose build web",
2640
+ "docker-compose run -w $PWD -e BUILD_ENV=qa -u root --entrypoint /bin/sh web \
2641
+ '-c' 'cargo build --release -vv --features=qa \
2642
+ && cargo build --bin migrate --release --features=qa \
2643
+ && cargo build --bin rabbit_worker --release --features=qa \
2644
+ && cp -p target/release/starsky . \
2645
+ && cp -p target/release/migrate . \
2646
+ && cp -p target/release/rabbit_worker . \
2647
+ && tar cfz #{revision}-qa.tar.gz config starsky migrate rabbit_worker .env.dist.*'"
2648
+ ].each do |cmd|
2649
+ execute_command cmd
2650
+ end
2651
+
2652
+ artifact_path = "./#{revision}-qa.tar.gz"
2653
+
2654
+ upload_artifact(artifact_path, "microservices/starsky/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2655
+
2656
+ Dir.chdir '../../'
2657
+ end
2658
+
2659
+ def create_urania_artifact(revision)
2660
+ output "Preparo l'artifact urania .zip\n".yellow
2661
+
2662
+ git_checkout_version('urania', revision)
2663
+
2664
+ Dir.chdir 'projects/urania'
2665
+
2666
+ decrypt_secrets()
2667
+
2668
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2669
+ exec_step 'prepare-docker-compose --directory urania && cp docker-compose-qainit.yml docker-compose.yml'
2670
+
2671
+ if File.exists? 'deploy/build_qa_artifact'
2672
+ execute_command "deploy/build_qa_artifact"
2673
+ else
2674
+ [
2675
+ "docker-compose build web",
2676
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2677
+ '-c' 'mix local.hex --force && mix hex.info && \
2678
+ mix deps.get && mix compile && mix deps.compile && \
2679
+ rm -rf _build/qa/rel/ && \
2680
+ mix release --env=qa'"
2681
+ ].each do |cmd|
2682
+ execute_command cmd
2683
+ end
2684
+ end
2685
+
2686
+ cleanup_containers
2687
+
2688
+ artifact_path = Dir.glob("_build/qa/rel/urania/releases/*/urania.tar.gz").first
2689
+ upload_artifact(artifact_path, "microservices/urania/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2690
+
2691
+ Dir.chdir '../../'
2692
+ end
2693
+
659
2694
  def deploy_pyxis?
660
2695
  if defined? @deploy_pyxis
661
2696
  @deploy_pyxis
@@ -669,11 +2704,146 @@ class Release
669
2704
  end
670
2705
  end
671
2706
 
2707
+ def deploy_crash?
2708
+ true # fino a che non ci mettiamo d'accordo su come gestire il fatto che leftorium ha bisogno di comunicare con crash
2709
+ # crash_present = !@projects['crash'].empty? && @projects['crash']['name'] != 'master' && !@projects['crash']['default_branch']
2710
+ # leftorium_present = !@projects['leftorium'].empty? && @projects['leftorium']['name'] != 'master' && !@projects['leftorium']['default_branch']
2711
+ # crash_present || leftorium_present
2712
+ end
2713
+
2714
+ def deploy_starsky_hutch?
2715
+ true
2716
+ #starsky_present = !@projects['starsky'].nil? && !@projects['starsky'].empty? && @projects['starsky']['name'] != 'master' && !@projects['starsky']['default_branch']
2717
+ #hutch_present = !@projects['hutch'].nil? && !@projects['hutch'].empty? && @projects['hutch']['name'] != 'master' && !@projects['hutch']['default_branch']
2718
+ #starsky_present || hutch_present
2719
+ end
2720
+
2721
+ def get_pyxis_version(deploy_id)
2722
+ (deploy_id.delete '[a-z0]')[0..9]
2723
+ end
2724
+
2725
+ def cleanup_containers
2726
+ `docker-compose kill && docker-compose down -v --remove-orphans`
2727
+ `docker rm $(docker ps -q -f status=exited)`
2728
+ end
2729
+
2730
+ def git_checkout_version(project, revision)
2731
+ Dir.chdir "projects/#{project}"
2732
+ exec_step "git checkout -- . && git checkout #{revision}"
2733
+ Dir.chdir "../../"
2734
+ end
2735
+
2736
+ def create_asg_stack(stack_name, tags = [])
2737
+ stack_body = IO.read('cloudformation/stacks/asg/ecs-asg-allinone.yml')
2738
+ parameters = [
2739
+ {
2740
+ parameter_key: "Environment",
2741
+ parameter_value: "qa"
2742
+ },
2743
+ {
2744
+ parameter_key: "InstanceType",
2745
+ parameter_value: "t3a.xlarge"
2746
+ },
2747
+ {
2748
+ parameter_key: "ECSClusterName",
2749
+ parameter_value: @ecs_cluster_name
2750
+ },
2751
+ {
2752
+ parameter_key: "AMIID",
2753
+ parameter_value: @ami_id
2754
+ }
2755
+ ]
2756
+ create_stack(stack_name, stack_body, parameters, tags, @cf_role)
2757
+ end
2758
+
2759
+ def create_cluster_stack(stack_name, tags = [])
2760
+ stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
2761
+ create_stack(stack_name, stack_body, [], tags)
2762
+ end
2763
+
672
2764
  def update_cluster_stack(stack_name, tags = [])
673
2765
  stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
674
2766
  update_stack(stack_name, stack_body, [], tags)
675
2767
  end
676
2768
 
2769
+ def create_alb_stack(stack_name, role, hash, environment = 'qa')
2770
+ stack_body = IO.read('cloudformation/stacks/elb/alb-public-qa.yml')
2771
+ parameters = [
2772
+ {
2773
+ parameter_key: "Environment",
2774
+ parameter_value: environment
2775
+ },
2776
+ {
2777
+ parameter_key: "Role",
2778
+ parameter_value: role
2779
+ },
2780
+ {
2781
+ parameter_key: "EnvHash",
2782
+ parameter_value: hash
2783
+ }
2784
+ ]
2785
+ create_stack(stack_name, stack_body, parameters, [], @cf_role)
2786
+ end
2787
+
2788
+ def import_redis_crash(qa_ip_address)
2789
+ output "Importo chiavi di Redis da staging\n".yellow
2790
+
2791
+ prefixes = ['CODICI', 'fun_with_flags']
2792
+ redis_qa = Redis.new(:url => "redis://#{qa_ip_address}:6379/10")
2793
+ redis_staging = Redis.new(:url => 'redis://staging.cache-1.prima.it:6379/10')
2794
+
2795
+ prefixes.each do |prefix|
2796
+ redis_staging.keys("#{prefix}*").each do |key|
2797
+ next unless redis_qa.keys(key).empty?
2798
+ output "Importo #{key} dal Redis di staging\n".yellow
2799
+ dump_staging = redis_staging.dump key
2800
+ redis_qa.restore key, 0, dump_staging
2801
+ end
2802
+ end
2803
+ end
2804
+
2805
+ def import_dbs(ip_address)
2806
+ overrides = {
2807
+ container_overrides: [
2808
+ {
2809
+ name: 'dbrestore',
2810
+ environment: [
2811
+ {
2812
+ name: 'EC2_IP_ADDRESS',
2813
+ value: ip_address
2814
+ }
2815
+ ]
2816
+ }
2817
+ ]
2818
+ }
2819
+ resp = run_ecs_task(@ecs_cluster_name, @import_db_task, overrides, 1)
2820
+ return resp
2821
+ end
2822
+
2823
+ def wait_for_db_import(task)
2824
+ output "Attendo che i DB vengano importati...\n".yellow
2825
+ stopped_at = nil
2826
+ sleep 15 # altrimenti non trova il task appena avviato...
2827
+ while stopped_at.nil?
2828
+ if task.tasks[0].nil?
2829
+ pp @ecs_cluster_name
2830
+ pp task
2831
+ stop_if true, "Task di import DB lanciato, ma risposta vuota!".red
2832
+ end
2833
+ task = describe_ecs_tasks(task.tasks[0].cluster_arn, [task.tasks[0].task_arn])
2834
+ stopped_at = task.tasks[0].stopped_at unless task.tasks[0].nil?
2835
+ sleep_seconds = 10
2836
+ seconds_elapsed = 0
2837
+ while true && stopped_at.nil?
2838
+ break if seconds_elapsed >= sleep_seconds
2839
+ print '.'.yellow; STDOUT.flush
2840
+ sleep 1
2841
+ seconds_elapsed += 1
2842
+ end
2843
+ end
2844
+ print "\n"
2845
+ end
2846
+
677
2847
  def choose_branch_to_deploy(project_name, select_master = false)
678
2848
  Dir.chdir "projects/#{project_name}"
679
2849
  output "Recupero la lista dei branch del progetto #{project_name}..."
@@ -761,6 +2931,78 @@ class Release
761
2931
  (Time.now.to_i.to_s[-4..-1].to_i + Random.rand(40000)).to_s
762
2932
  end
763
2933
 
2934
+ def launch_mimo(env_hash)
2935
+ resp = describe_stack_resource('batch-job-mimo', 'JobDefinition')
2936
+
2937
+ @batch.submit_job({
2938
+ job_name: "mimo-#{@dns_record_identifier}", # required
2939
+ job_queue: "tools-production", # required
2940
+ job_definition: resp.stack_resource_detail.physical_resource_id, # required
2941
+ container_overrides: {
2942
+ environment: [
2943
+ {
2944
+ name: 'ENV_HASH',
2945
+ value: env_hash
2946
+ },
2947
+ {
2948
+ name: 'APP_ENV',
2949
+ value: 'qa'
2950
+ },
2951
+ {
2952
+ name: 'CYPRESS_BASE_URL',
2953
+ value: "https://hutch-#{env_hash}.qa.colaster.com"
2954
+ },
2955
+ {
2956
+ name: 'CYPRESS_PEANO_BASE_URL',
2957
+ value: "http://peano-#{env_hash}.qa.colaster.com:10039/quotation"
2958
+ },
2959
+ {
2960
+ name: 'CYPRESS_API_BASE_URL',
2961
+ value: "https://#{get_route53_hostname("starsky")}/graphql"
2962
+ },
2963
+ {
2964
+ name: 'QA_NAME',
2965
+ value: @git_branch
2966
+ }
2967
+ ]
2968
+ }
2969
+ })
2970
+
2971
+ output "Mimo lanciato con successo!\n".green
2972
+ end
2973
+
2974
+ def get_currently_deployed_version(stack_name)
2975
+ parameters = get_stack_parameters(stack_name)
2976
+ currently_deployed_version = nil
2977
+ parameters.each do |parameter|
2978
+ if parameter.parameter_key == "ReleaseVersion"
2979
+ currently_deployed_version = parameter.parameter_value
2980
+ end
2981
+ end
2982
+ currently_deployed_version
2983
+ end
2984
+
2985
+ def decrypt_secrets()
2986
+ docker_image = "prima/biscuit_populate_configs"
2987
+ [
2988
+ "docker pull #{docker_image}",
2989
+ "docker run -t --rm -v $HOME/.aws:/root/.aws -w $PWD --volumes-from #{get_host_container_name} #{docker_image}"
2990
+ ].each do |cmd|
2991
+ execute_command cmd
2992
+ end
2993
+ end
2994
+
2995
+ def get_host_container_name()
2996
+ if @host_container_name
2997
+ @host_container_name
2998
+ else
2999
+ hostname = `cat /etc/hostname`.gsub("\n", '')
3000
+ execute_command "docker ps | grep #{hostname} | awk '{print $NF}'"
3001
+ @host_container_name = `docker ps | grep #{hostname} | awk '{print $NF}'`.gsub("\n", '')
3002
+ # @host_container_name = `docker ps | grep #{hostname} | sed -r "s/.+ ([^\s].+)$/\1/p"`
3003
+ end
3004
+ end
3005
+
764
3006
  def select_branches(project_names = nil)
765
3007
  output "Deploy feature menu"
766
3008
  if project_names.nil?
@@ -774,6 +3016,14 @@ class Release
774
3016
  end
775
3017
  end
776
3018
  end
3019
+
3020
+ def get_ami_id(stack_name)
3021
+ get_stack_parameters(stack_name).each do |param|
3022
+ if param.parameter_key == "AMIID"
3023
+ return param.parameter_value
3024
+ end
3025
+ end
3026
+ end
777
3027
  end
778
3028
 
779
3029
  def help_content