prima-twig 1.0.11 → 1.0.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/bin/twig-feature +1 -2252
  3. metadata +2 -2
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: babafc568dfdd6d73386ff89419ca54895537d7f3b1c30cdb6d9a9c9a4f22528
4
- data.tar.gz: 01e565b508b0b6d762a2c0c969928a18706ba4ce134e2ca3603bb60ec3225974
3
+ metadata.gz: adf0530a6a9e2c48322ea3ae20939178c3e35cbfcc5f179ecff80947d256eb31
4
+ data.tar.gz: b5f3e843846dca8365e43340ee1f970d1e08e41bc594f903e0c54e5d198357b6
5
5
  SHA512:
6
- metadata.gz: 5e64d4a8c486dbb184eb1d3d46c89e0a4346d703ff3321f054b4055f4108815afa611af2196e28027bfde64303b05cd70983487fb6eac0cc6d1821dfd51c07c5
7
- data.tar.gz: fade16080025a145303e74c252721ca3573137d71e5dd6dc745f50b7c7d4c28cf48e755c61a7c5b563bea61571c48645e718efbc7b402ce76b13629d6a082008
6
+ metadata.gz: 47b86a2ce12a6250b32c81f342460039d739d42ae3fe8b3118b3b71bd4a1992cd5810225ec90667d0710a963d01b319bb81d76d346059324ccfec5008cdc6880
7
+ data.tar.gz: efdeaf108651ea7bfabcfd010423fdb03baa65ee343763a4ab0f6b2105956902dbadfc8d98784a3d8f59589a0eb42662f76e6db06fe3b558b34ca560c505952c
@@ -22,18 +22,9 @@ class Release
22
22
  exec "twig feature #{ARGV.join ' '}"
23
23
  end
24
24
  end
25
- @batch = Aws::Batch::Client.new
26
- @s3 = Aws::S3::Client.new
27
- @s3_bucket = 'prima-artifacts'
28
- @artifact_path = '/tmp/prima-artifact.zip'
29
- @import_db_task = 'arn:aws:ecs:eu-west-1:001575623345:task-definition/ecs-task-db-restore2-TaskDefinitionDbRestore-4UBHMCZBE5WM:1'
30
- @cf_role = 'arn:aws:iam::001575623345:role/qainit-service-role-cloudformat-CloudformationRole-18KBZQIS148R9'
31
25
  @dns_record_identifier = nil
32
26
  @ecs_cluster_name = nil
33
27
  @deploy_update = false
34
- @qainit = false
35
- @qainit_host_folder = "/var/ci/#{ENV['DRONE_REPO']}/#{ENV['DRONE_BRANCH']}/#{ENV['DRONE_BUILD_NUMBER']}"
36
- @qainit_folder = "/drone/src/github.com/project/primait/qainit"
37
28
  @projects = {
38
29
  'prima' => {},
39
30
  'urania' => {},
@@ -74,8 +65,6 @@ class Release
74
65
  qainit_deploy_shutdown!
75
66
  elsif 'update' == args[1]
76
67
  qainit_deploy_update!
77
- elsif 'read' == args[1]
78
- qainit_read_config! args[2]
79
68
  else
80
69
  if args[1]
81
70
  select_branches(args[1..-1])
@@ -92,19 +81,8 @@ class Release
92
81
  end
93
82
  when 'deploy'
94
83
  abort('Non sei nella cartella di artemide') unless Dir.pwd.match 'artemide$'
95
- if ['terminate', 'stop', 'shutdown', 'halt', 'destroy'].include? args[1]
96
- deploy_shutdown!
97
- elsif 'update' == args[1]
98
- deploy_update!
99
- elsif 'lock' == args[1]
84
+ if 'lock' == args[1]
100
85
  deploy_lock!
101
- else
102
- if args[1]
103
- select_branches(args[1])
104
- else
105
- select_branches
106
- end
107
- deploy_feature!
108
86
  end
109
87
  when 'aggregator'
110
88
  if 'enable' == args[1]
@@ -602,10 +580,6 @@ class Release
602
580
  output "Cancello il record DNS utilizzato da Lighthouse"
603
581
  delete_lighthouse_dns()
604
582
  output "Finito!".green
605
-
606
- if @qainit
607
- qainit_write_output("QA #{ENV["DRONE_BRANCH"]} spento.", 'Indirizzi scritti su ')
608
- end
609
583
  end
610
584
 
611
585
  def qainit_write_output(file_message, output_message)
@@ -615,31 +589,6 @@ class Release
615
589
  output "#{output_message} #{qa_file_name}".green
616
590
  end
617
591
 
618
- def qainit_read_config!(action)
619
- projects = ''
620
-
621
- File.open('branch_names', 'r') do |file|
622
- file.each_line do |line|
623
- projects = JSON.parse(line)
624
- end
625
- end
626
-
627
- projects.each do |key, project|
628
- @projects[key] = project
629
- end
630
-
631
- get_s3_config_files
632
- @qainit = true
633
- case action
634
- when 'shutdown'
635
- output 'Shutting down'.green
636
- qainit_drone_shutdown!
637
- else
638
- output 'Starting standard deploy'.green
639
- deploy_feature!
640
- end
641
- end
642
-
643
592
  def update_drone_yml!()
644
593
  drone_yml = File.read('.drone.yml')
645
594
  @projects.each do |key, project|
@@ -650,16 +599,6 @@ class Release
650
599
  end
651
600
  end
652
601
 
653
- def get_s3_config_files
654
- # manteniamo la struttura per lanciarlo facilmente anche da locale
655
- `mkdir -p cloudformation/stacks/task cloudformation/stacks/route53 cloudformation/stacks/asg cloudformation/stacks/elb`
656
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/task/db.yml', response_target: 'cloudformation/stacks/task/db.yml'})
657
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/route53/qa.yml', response_target: 'cloudformation/stacks/route53/qa.yml'})
658
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/asg/ecs-asg-allinone.yml', response_target: 'cloudformation/stacks/asg/ecs-asg-allinone.yml'})
659
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/ecs-cluster.yml', response_target: 'cloudformation/stacks/ecs-cluster.yml'})
660
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/elb/alb-public-qa.yml', response_target: 'cloudformation/stacks/elb/alb-public-qa.yml'})
661
- end
662
-
663
602
  def get_deploy_id
664
603
  if @deploy_id
665
604
  @deploy_id
@@ -669,1264 +608,6 @@ class Release
669
608
  end
670
609
  end
671
610
 
672
- def deploy_feature!
673
- `git pull && git submodule init && git submodule update`
674
- @ami_id = get_ami_id("ecs-fleet-allinone-staging")
675
- deploy_id = get_deploy_id
676
- stack_name_alb = 'ecs-alb-http-public-qa-' + deploy_id[0..5]
677
- stack_name_alb_ws = 'ecs-alb-ws-public-qa-' + deploy_id[0..5]
678
- unless @qainit
679
- @projects.each_key do |project_key|
680
- if @projects[project_key]['revision']
681
- git_checkout_version(project_key, @projects[project_key]['revision'])
682
- end
683
- end
684
- end
685
- @dns_record_identifier = deploy_id
686
- @git_branch = ENV['DRONE_BRANCH']
687
- hostname_pattern_priority = hostname_pattern_priority()
688
- tags = [
689
- {
690
- key: "qainit",
691
- value: @git_branch.gsub(/[^a-zA-Z\_\.\/\=\+\-\:\@\.]/, '')
692
- },
693
- {
694
- key: "hostname_pattern_priority",
695
- value: hostname_pattern_priority
696
- }
697
- ]
698
- @projects.each do |key, value|
699
- case key.to_s
700
- when 'crash'
701
- tags << { key: 'crash', value: @projects['crash']['name'].gsub(/[^a-zA-Z\_\.\/\=\+\-\:\@\.]/, '') } if deploy_crash?
702
- when 'starsky', 'hutch'
703
- tags << { key: key.to_s, value: @projects[key.to_s]['name'].gsub(/[^a-zA-Z\_\.\/\=\+\-\:\@\.]/, '') } if deploy_starsky_hutch?
704
- else
705
- tags << { key: key, value: value['name'].gsub(/[^a-zA-Z\_\.\/\=\+\-\:\@\.]/, '') }
706
- end
707
- end
708
-
709
- cluster_stack_name = "ecs-cluster-qa-#{deploy_id}"
710
-
711
- if stack_exists?(cluster_stack_name)
712
- tags = get_stack_tags(cluster_stack_name)
713
- hostname_pattern_priority = tags.detect do |tag|
714
- tag.key == 'hostname_pattern_priority'
715
- end.value
716
- end
717
-
718
- create_cluster_stack(cluster_stack_name, tags) unless stack_exists?(cluster_stack_name)
719
- wait_for_stack_ready(cluster_stack_name) unless stack_ready?(cluster_stack_name)
720
-
721
- create_alb_stack(stack_name_alb, "http", deploy_id) unless stack_exists?(stack_name_alb)
722
- create_alb_stack(stack_name_alb_ws, "websocket", deploy_id) unless stack_exists?(stack_name_alb_ws)
723
-
724
- resp = describe_stack_resource(cluster_stack_name, 'ECSCluster')
725
- @ecs_cluster_name = resp.stack_resource_detail.physical_resource_id
726
-
727
- asg_stack_name = "ecs-asg-allinone-qa-#{deploy_id}"
728
- create_asg_stack(asg_stack_name, tags) unless stack_exists?(asg_stack_name)
729
-
730
- stack_name_db = "ecs-task-db-qa-#{deploy_id}"
731
- stack_body = IO.read('cloudformation/stacks/task/db.yml')
732
- parameters = [
733
- {
734
- parameter_key: "Environment",
735
- parameter_value: "qa"
736
- },
737
- {
738
- parameter_key: "ECSClusterName",
739
- parameter_value: @ecs_cluster_name
740
- }
741
- ]
742
- create_stack(stack_name_db, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_db) # creazione asincrona stack dei db (~4 min)
743
-
744
- output "check pyxis \n".yellow
745
-
746
- create_pyxis_artifact(@projects["pyxis-npm"]['revision'], deploy_id) unless @projects["pyxis-npm"].nil? # deve essere creato prima di quello di prima, per avere la versione
747
- create_prima_artifact(@projects["prima"]['revision'], @projects["prima"]['name'], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "prima/#{@projects["prima"]['revision']}.tar.gz")
748
- # l'artefatto di prima viene creato sempre (puntamenti all'ambiente compilati nel js) e richiede molto più di 4 minuti
749
- wait_for_stack_ready(stack_name_db) unless stack_ready?(stack_name_db) # dovrebbe essere istantaneo
750
- db_task = ''
751
- db_task = import_dbs(ec2_ip_address(asg_stack_name)) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # import asincrono dei dati
752
-
753
- create_crash_artifact(@projects['crash']['revision'], deploy_id) unless !deploy_crash? || artifact_exists?('prima-artifacts-encrypted', "microservices/crash/#{@projects['crash']['revision']}-qa.tar.gz")
754
- create_urania_artifact(@projects["urania"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/urania/#{@projects["urania"]['revision']}-qa.tar.gz")
755
- create_roger_artifact(@projects["roger"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/roger/#{@projects["roger"]['revision']}-qa.tar.gz")
756
- create_ermes_artifact(@projects["ermes"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/ermes/#{@projects["ermes"]['revision']}-qa.tar.gz")
757
- create_bburago_artifact(@projects["bburago"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/bburago/#{@projects["bburago"]['revision']}-qa.tar.gz")
758
- create_hal9000_artifact(@projects["hal9000"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/hal9000/#{@projects["hal9000"]['revision']}-qa.tar.gz")
759
- create_rachele_artifact(@projects["rachele"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rachele/#{@projects["rachele"]['revision']}-qa.tar.gz")
760
- create_fidaty_artifact(@projects["fidaty"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/fidaty/#{@projects["fidaty"]['revision']}-qa.tar.gz")
761
- create_peano_artifact(@projects["peano"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/peano/#{@projects["peano"]['revision']}-qa.tar.gz")
762
- create_rogoreport_artifact(@projects["rogoreport"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rogoreport/rogoreport-#{@projects["rogoreport"]['revision']}-qa.tar.gz")
763
- create_assange_artifact(@projects["assange"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/assange/#{@projects["assange"]['revision']}-qa.tar.gz")
764
- create_borat_artifact(@projects["borat"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/borat/#{@projects["borat"]['revision']}-qa.tar.gz")
765
- create_activia_artifact(@projects["activia"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/activia/#{@projects["activia"]['revision']}-qa.tar.gz")
766
- create_leftorium_artifact(@projects["leftorium"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/leftorium/#{@projects["leftorium"]['revision']}-qa.tar.gz")
767
- create_skynet_artifact(@projects["skynet"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/skynet/#{@projects["skynet"]['revision']}-qa.tar.gz")
768
- create_maia_artifact(@projects["maia"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/maia/#{@projects["maia"]['revision']}-qa.tar.gz")
769
- create_starsky_artifact(@projects["starsky"]['revision']) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/starsky/#{@projects["starsky"]['revision']}-qa.tar.gz")
770
- create_hutch_artifact(@projects["hutch"]['revision']) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/hutch/#{@projects["hutch"]['revision']}-#{@dns_record_identifier[0..7]}-qa.tar.gz")
771
-
772
-
773
- wait_for_db_import(db_task) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # dovrebbe essere istantaneo
774
-
775
- import_redis_crash(ec2_ip_address(asg_stack_name)) if deploy_crash? && !stack_exists?("ecs-task-web-qa-#{deploy_id}")
776
-
777
- wait_for_stack_ready(stack_name_alb) unless stack_ready?(stack_name_alb)
778
- wait_for_stack_ready(stack_name_alb_ws) unless stack_ready?(stack_name_alb_ws)
779
-
780
- stack_name_route53 = "ecs-route53-qa-#{deploy_id}"
781
- stack_body = IO.read('cloudformation/stacks/route53/qa.yml')
782
- parameters = [
783
- {
784
- parameter_key: "DnsRecordIdentifier",
785
- parameter_value: @dns_record_identifier
786
- },
787
- {
788
- parameter_key: "PrimaElbHostname",
789
- parameter_value: get_alb_host(stack_name_alb)
790
- },
791
- {
792
- parameter_key: "UraniaIp",
793
- parameter_value: ec2_ip_address(asg_stack_name)
794
- },
795
- {
796
- parameter_key: "BburagoIp",
797
- parameter_value: ec2_ip_address(asg_stack_name)
798
- },
799
- {
800
- parameter_key: "Hal9000Ip",
801
- parameter_value: ec2_ip_address(asg_stack_name)
802
- },
803
- {
804
- parameter_key: "FidatyIp",
805
- parameter_value: ec2_ip_address(asg_stack_name)
806
- },
807
- {
808
- parameter_key: "PeanoIp",
809
- parameter_value: ec2_ip_address(asg_stack_name)
810
- },
811
- {
812
- parameter_key: "ErmesIp",
813
- parameter_value: ec2_ip_address(asg_stack_name)
814
- },
815
- {
816
- parameter_key: "ActiviaIp",
817
- parameter_value: ec2_ip_address(asg_stack_name)
818
- },
819
- {
820
- parameter_key: "SkynetIp",
821
- parameter_value: ec2_ip_address(asg_stack_name)
822
- },
823
- {
824
- parameter_key: "RogerIp",
825
- parameter_value: ec2_ip_address(asg_stack_name)
826
- },
827
- {
828
- parameter_key: "LeftoriumIp",
829
- parameter_value: ec2_ip_address(asg_stack_name)
830
- },
831
- {
832
- parameter_key: "RacheleIp",
833
- parameter_value: ec2_ip_address(asg_stack_name)
834
- },
835
- {
836
- parameter_key: "RedisIp",
837
- parameter_value: ec2_ip_address(asg_stack_name)
838
- },
839
- {
840
- parameter_key: "AssangeElbHostname",
841
- parameter_value: get_alb_host(stack_name_alb)
842
- },
843
- {
844
- parameter_key: "BoratElbHostname",
845
- parameter_value: get_alb_host(stack_name_alb_ws)
846
- },
847
- {
848
- parameter_key: 'CrashElbHostname',
849
- parameter_value: get_alb_host(stack_name_alb_ws)
850
- },
851
- {
852
- parameter_key: 'StarskyElbHostname',
853
- parameter_value: get_alb_host(stack_name_alb)
854
- },
855
- {
856
- parameter_key: 'HutchElbHostname',
857
- parameter_value: get_alb_host(stack_name_alb)
858
- },
859
- {
860
- parameter_key: 'MaiaElbHostname',
861
- parameter_value: get_alb_host(stack_name_alb)
862
- }
863
- ]
864
-
865
- create_stack(stack_name_route53, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_route53)
866
- wait_for_stack_ready(stack_name_route53) unless stack_ready?(stack_name_route53)
867
-
868
- stack_name_skynet = "ecs-task-skynet-qa-#{deploy_id}"
869
- git_checkout_version('skynet', @projects["skynet"]['revision'])
870
- stack_body = File.read('projects/skynet/deploy/task.yml')
871
- parameters = [
872
- {
873
- parameter_key: "Environment",
874
- parameter_value: "qa"
875
- },
876
- {
877
- parameter_key: "ReleaseVersion",
878
- parameter_value: @projects["skynet"]['revision']
879
- },
880
- {
881
- parameter_key: "TaskDesiredCount",
882
- parameter_value: "1"
883
- },
884
- {
885
- parameter_key: "ECSClusterName",
886
- parameter_value: @ecs_cluster_name
887
- },
888
- {
889
- parameter_key: "HostnamePattern",
890
- parameter_value: "skynet-#{@dns_record_identifier}.qa.colaster.com"
891
- },
892
- {
893
- parameter_key: "HostnamePatternPriority",
894
- parameter_value: hostname_pattern_priority
895
- }
896
- ]
897
- if stack_exists?(stack_name_skynet)
898
- cur_version = get_currently_deployed_version(stack_name_skynet)
899
- update_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["skynet"]['revision'])
900
- else
901
- create_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role)
902
- end
903
-
904
- stack_name_urania = "ecs-task-urania-qa-#{deploy_id}"
905
- git_checkout_version('urania', @projects["urania"]['revision'])
906
- stack_body = File.read('projects/urania/deploy/task.yml')
907
- parameters = [
908
- {
909
- parameter_key: "Environment",
910
- parameter_value: "qa"
911
- },
912
- {
913
- parameter_key: "ReleaseVersion",
914
- parameter_value: @projects["urania"]['revision']
915
- },
916
- {
917
- parameter_key: "TaskDesiredCount",
918
- parameter_value: "1"
919
- },
920
- {
921
- parameter_key: "ECSClusterName",
922
- parameter_value: @ecs_cluster_name
923
- },
924
- {
925
- parameter_key: "HostnamePattern",
926
- parameter_value: "urania-#{@dns_record_identifier}.qa.colaster.com"
927
- },
928
- {
929
- parameter_key: "HostnamePatternPriority",
930
- parameter_value: hostname_pattern_priority
931
- }
932
- ]
933
- if stack_exists?(stack_name_urania)
934
- cur_version = get_currently_deployed_version(stack_name_urania)
935
- update_stack(stack_name_urania, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["urania"]['revision'])
936
- else
937
- create_stack(stack_name_urania, stack_body, parameters, tags, @cf_role)
938
- end
939
-
940
- stack_name_ermes = "ecs-task-ermes-qa-#{deploy_id}"
941
- git_checkout_version('ermes', @projects["ermes"]['revision'])
942
- stack_body = File.read('projects/ermes/deploy/task.yml')
943
- parameters = [
944
- {
945
- parameter_key: "Environment",
946
- parameter_value: "qa"
947
- },
948
- {
949
- parameter_key: "ReleaseVersion",
950
- parameter_value: "#{@projects['ermes']['revision']}"
951
- },
952
- {
953
- parameter_key: "TaskDesiredCount",
954
- parameter_value: "1"
955
- },
956
- {
957
- parameter_key: "ECSClusterName",
958
- parameter_value: @ecs_cluster_name
959
- },
960
- {
961
- parameter_key: "HostnamePattern",
962
- parameter_value: "ermes-#{@dns_record_identifier}.qa.colaster.com"
963
- },
964
- {
965
- parameter_key: "HostnamePatternPriority",
966
- parameter_value: hostname_pattern_priority
967
- },
968
- {
969
- parameter_key: "WebHost",
970
- parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
971
- },
972
- {
973
- parameter_key: "PeanoHost",
974
- parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
975
- }
976
- ]
977
- if stack_exists?(stack_name_ermes)
978
- cur_version = get_currently_deployed_version(stack_name_ermes)
979
- update_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["ermes"]['revision'])
980
- else
981
- create_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role)
982
- end
983
-
984
- stack_name_bburago = "ecs-task-bburago-qa-#{deploy_id}"
985
- git_checkout_version('bburago', @projects["bburago"]['revision'])
986
- stack_body = File.read('projects/bburago/deploy/task.yml')
987
- parameters = [
988
- {
989
- parameter_key: "Environment",
990
- parameter_value: "qa"
991
- },
992
- {
993
- parameter_key: "ReleaseVersion",
994
- parameter_value: @projects["bburago"]['revision']
995
- },
996
- {
997
- parameter_key: "ECSClusterName",
998
- parameter_value: @ecs_cluster_name
999
- },
1000
- {
1001
- parameter_key: "TaskDesiredCount",
1002
- parameter_value: "1"
1003
- },
1004
- {
1005
- parameter_key: "HostnamePattern",
1006
- parameter_value: "bburago-#{@dns_record_identifier}.qa.colaster.com"
1007
- },
1008
- {
1009
- parameter_key: "HostnamePatternPriority",
1010
- parameter_value: hostname_pattern_priority
1011
- }
1012
- ]
1013
- if stack_exists?(stack_name_bburago)
1014
- cur_version = get_currently_deployed_version(stack_name_bburago)
1015
- update_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["bburago"]['revision'])
1016
- else
1017
- create_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role)
1018
- end
1019
-
1020
- stack_name_hal9000 = "ecs-task-hal9000-qa-#{deploy_id}"
1021
- git_checkout_version('hal9000', @projects["hal9000"]['revision'])
1022
- stack_body = File.read('projects/hal9000/deploy/task.yml')
1023
- parameters = [
1024
- {
1025
- parameter_key: "Environment",
1026
- parameter_value: "qa"
1027
- },
1028
- {
1029
- parameter_key: "ReleaseVersion",
1030
- parameter_value: @projects["hal9000"]['revision']
1031
- },
1032
- {
1033
- parameter_key: "ECSClusterName",
1034
- parameter_value: @ecs_cluster_name
1035
- },
1036
- {
1037
- parameter_key: "TaskDesiredCount",
1038
- parameter_value: "1"
1039
- },
1040
- {
1041
- parameter_key: "HostnamePattern",
1042
- parameter_value: "hal9000-#{@dns_record_identifier}.qa.colaster.com"
1043
- },
1044
- {
1045
- parameter_key: "HostnamePatternPriority",
1046
- parameter_value: hostname_pattern_priority
1047
- }
1048
- ]
1049
- if stack_exists?(stack_name_hal9000)
1050
- cur_version = get_currently_deployed_version(stack_name_hal9000)
1051
- update_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hal9000"]['revision'])
1052
- else
1053
- create_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role)
1054
- end
1055
-
1056
- stack_name_fidaty = "ecs-task-fidaty-qa-#{deploy_id}"
1057
- git_checkout_version('fidaty', @projects["fidaty"]['revision'])
1058
- stack_body = File.read('projects/fidaty/deploy/task.yml')
1059
- parameters = [
1060
- {
1061
- parameter_key: "Environment",
1062
- parameter_value: "qa"
1063
- },
1064
- {
1065
- parameter_key: "ReleaseVersion",
1066
- parameter_value: "#{@projects["fidaty"]['revision']}"
1067
- },
1068
- {
1069
- parameter_key: "ECSClusterName",
1070
- parameter_value: @ecs_cluster_name
1071
- },
1072
- {
1073
- parameter_key: "TaskDesiredCount",
1074
- parameter_value: "1"
1075
- },
1076
- {
1077
- parameter_key: "HostnamePattern",
1078
- parameter_value: "fidaty-#{@dns_record_identifier}.qa.colaster.com"
1079
- },
1080
- {
1081
- parameter_key: "HostnamePatternPriority",
1082
- parameter_value: hostname_pattern_priority
1083
- },
1084
- {
1085
- parameter_key: "PeanoHost",
1086
- parameter_value: get_route53_hostname("ecs-task-peano-qa-notneeded")
1087
- }
1088
- ]
1089
- if stack_exists?(stack_name_fidaty)
1090
- cur_version = get_currently_deployed_version(stack_name_fidaty)
1091
- update_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["fidaty"]['revision'])
1092
- else
1093
- create_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role)
1094
- end
1095
-
1096
- stack_name_peano = "ecs-task-peano-qa-#{deploy_id}"
1097
- git_checkout_version('peano', @projects["peano"]['revision'])
1098
- stack_body = File.read('projects/peano/deploy/task.yml')
1099
- parameters = [
1100
- {
1101
- parameter_key: "Environment",
1102
- parameter_value: "qa"
1103
- },
1104
- {
1105
- parameter_key: "ReleaseVersion",
1106
- parameter_value: "#{@projects['peano']['revision']}"
1107
- },
1108
- {
1109
- parameter_key: "ECSClusterName",
1110
- parameter_value: @ecs_cluster_name
1111
- },
1112
- {
1113
- parameter_key: "TaskDesiredCount",
1114
- parameter_value: "1"
1115
- },
1116
- {
1117
- parameter_key: "HostnamePattern",
1118
- parameter_value: "peano-#{@dns_record_identifier}.qa.colaster.com"
1119
- },
1120
- {
1121
- parameter_key: "HostnamePatternPriority",
1122
- parameter_value: hostname_pattern_priority
1123
- },
1124
- {
1125
- parameter_key: "WebHost",
1126
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1127
- },
1128
- {
1129
- parameter_key: "AssangeHost",
1130
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1131
- }
1132
- ]
1133
- if stack_exists?(stack_name_peano)
1134
- cur_version = get_currently_deployed_version(stack_name_peano)
1135
- update_stack(stack_name_peano, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["peano"]['revision'])
1136
- else
1137
- create_stack(stack_name_peano, stack_body, parameters, tags, @cf_role)
1138
- end
1139
-
1140
- stack_name_rogoreport = "ecs-task-rogoreport-qa-#{deploy_id}"
1141
- git_checkout_version('rogoreport', @projects["rogoreport"]['revision'])
1142
- stack_body = IO.read('projects/rogoreport/deploy/task.yml')
1143
- parameters = [
1144
- {
1145
- parameter_key: "Environment",
1146
- parameter_value: "qa"
1147
- },
1148
- {
1149
- parameter_key: "ReleaseVersion",
1150
- parameter_value: "#{@projects["rogoreport"]['revision']}"
1151
- },
1152
- {
1153
- parameter_key: "ReleaseName",
1154
- parameter_value: "rogoreport"
1155
- },
1156
- {
1157
- parameter_key: "ECSClusterName",
1158
- parameter_value: @ecs_cluster_name
1159
- }
1160
- ]
1161
- if stack_exists?(stack_name_rogoreport)
1162
- cur_version = get_currently_deployed_version(stack_name_rogoreport)
1163
- update_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["rogoreport"]['revision'])
1164
- else
1165
- create_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role)
1166
- end
1167
-
1168
- stack_name_assange = "ecs-task-assange-qa-#{deploy_id}"
1169
- git_checkout_version('assange', @projects["assange"]['revision'])
1170
- stack_body = IO.read('projects/assange/deploy/task.yml')
1171
- parameters = [
1172
- {
1173
- parameter_key: "Environment",
1174
- parameter_value: "qa"
1175
- },
1176
- {
1177
- parameter_key: "ReleaseVersion",
1178
- parameter_value: "#{@projects["assange"]['revision']}"
1179
- },
1180
- {
1181
- parameter_key: "ECSClusterName",
1182
- parameter_value: @ecs_cluster_name
1183
- },
1184
- {
1185
- parameter_key: "TaskDesiredCount",
1186
- parameter_value: "1"
1187
- },
1188
- {
1189
- parameter_key: "ALBShortName",
1190
- parameter_value: "assange-qa-#{deploy_id}"[0..27]
1191
- },
1192
- {
1193
- parameter_key: "HostnamePattern",
1194
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1195
- },
1196
- {
1197
- parameter_key: "HostnamePatternPriority",
1198
- parameter_value: (hostname_pattern_priority.to_i + 20).to_s
1199
- },
1200
- {
1201
- parameter_key: "EnvHash",
1202
- parameter_value: deploy_id
1203
- },
1204
- {
1205
- parameter_key: "WebHost",
1206
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1207
- },
1208
- {
1209
- parameter_key: "AssangeHost",
1210
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1211
- }
1212
- ]
1213
- if stack_exists?(stack_name_assange)
1214
- cur_version = get_currently_deployed_version(stack_name_assange)
1215
- update_stack(stack_name_assange, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["assange"]['revision'])
1216
- else
1217
- create_stack(stack_name_assange, stack_body, parameters, tags, @cf_role)
1218
- end
1219
-
1220
- stack_name_leftorium = "ecs-task-leftorium-qa-#{deploy_id}"
1221
- git_checkout_version('leftorium', @projects["leftorium"]['revision'])
1222
- stack_body = File.read('projects/leftorium/deploy/task.yml')
1223
- parameters = [
1224
- {
1225
- parameter_key: "Environment",
1226
- parameter_value: "qa"
1227
- },
1228
- {
1229
- parameter_key: "ReleaseVersion",
1230
- parameter_value: "#{@projects["leftorium"]['revision']}"
1231
- },
1232
- {
1233
- parameter_key: "ECSClusterName",
1234
- parameter_value: @ecs_cluster_name
1235
- },
1236
- {
1237
- parameter_key: "TaskDesiredCount",
1238
- parameter_value: "1"
1239
- },
1240
- {
1241
- parameter_key: "HostnamePattern",
1242
- parameter_value: "leftorium-#{@dns_record_identifier}.qa.colaster.com"
1243
- },
1244
- {
1245
- parameter_key: "HostnamePatternPriority",
1246
- parameter_value: hostname_pattern_priority
1247
- }
1248
- ]
1249
- if stack_exists?(stack_name_leftorium)
1250
- cur_version = get_currently_deployed_version(stack_name_leftorium)
1251
- update_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["leftorium"]['revision'])
1252
- else
1253
- create_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role)
1254
- end
1255
-
1256
- stack_name_rachele = "ecs-task-rachele-qa-#{deploy_id}"
1257
- git_checkout_version('rachele', @projects["rachele"]['revision'])
1258
- stack_body = File.read('projects/rachele/deploy/task.yml')
1259
- parameters = [
1260
- {
1261
- parameter_key: "Environment",
1262
- parameter_value: "qa"
1263
- },
1264
- {
1265
- parameter_key: "ReleaseVersion",
1266
- parameter_value: "#{@projects["rachele"]['revision']}"
1267
- },
1268
- {
1269
- parameter_key: "ECSClusterName",
1270
- parameter_value: @ecs_cluster_name
1271
- },
1272
- {
1273
- parameter_key: "TaskDesiredCount",
1274
- parameter_value: "1"
1275
- },
1276
- {
1277
- parameter_key: "WebHost",
1278
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1279
- },
1280
- {
1281
- parameter_key: "HostnamePattern",
1282
- parameter_value: "rachele-#{@dns_record_identifier}.qa.colaster.com"
1283
- },
1284
- {
1285
- parameter_key: "HostnamePatternPriority",
1286
- parameter_value: hostname_pattern_priority
1287
- }
1288
- ]
1289
- if stack_exists?(stack_name_rachele)
1290
- cur_version = get_currently_deployed_version(stack_name_rachele)
1291
- unless cur_version.include?(@projects["rachele"]['revision'])
1292
- delete_stack(stack_name_rachele)
1293
- wait_for_stack_removal(stack_name_rachele)
1294
- create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
1295
- end
1296
- else
1297
- create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
1298
- end
1299
-
1300
- stack_name_borat = "ecs-task-borat-qa-#{deploy_id}"
1301
- git_checkout_version('borat', @projects["borat"]['revision'])
1302
- stack_body = IO.read('projects/borat/deploy/task.yml')
1303
- parameters = [
1304
- {
1305
- parameter_key: "Environment",
1306
- parameter_value: "qa"
1307
- },
1308
- {
1309
- parameter_key: "ReleaseVersion",
1310
- parameter_value: "#{@projects["borat"]['revision']}"
1311
- },
1312
- {
1313
- parameter_key: "ECSClusterName",
1314
- parameter_value: @ecs_cluster_name
1315
- },
1316
- {
1317
- parameter_key: "TaskDesiredCount",
1318
- parameter_value: "1"
1319
- },
1320
- {
1321
- parameter_key: "ALBShortName",
1322
- parameter_value: "borat-qa-#{deploy_id}"[0..27]
1323
- },
1324
- {
1325
- parameter_key: "HostnamePattern",
1326
- parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1327
- },
1328
- {
1329
- parameter_key: "HostnamePatternPriority",
1330
- parameter_value: (hostname_pattern_priority.to_i + 30).to_s
1331
- },
1332
- {
1333
- parameter_key: "EnvHash",
1334
- parameter_value: deploy_id
1335
- },
1336
- {
1337
- parameter_key: "WsEndpoint",
1338
- parameter_value: "wss://backoffice-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
1339
- },
1340
- {
1341
- parameter_key: "GraphqlEndpoint",
1342
- parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/graphql"
1343
- },
1344
- {
1345
- parameter_key: "GraphqlInsuranceEndpoint",
1346
- parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/graphql/insurance"
1347
- },
1348
- {
1349
- parameter_key: "AuthEndpoint",
1350
- parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/auth"
1351
- },
1352
- {
1353
- parameter_key: "FrontendEndpoint",
1354
- parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
1355
- }
1356
- ]
1357
- if stack_exists?(stack_name_borat)
1358
- cur_version = get_currently_deployed_version(stack_name_borat)
1359
- update_stack(stack_name_borat, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["borat"]['revision'])
1360
- else
1361
- create_stack(stack_name_borat, stack_body, parameters, tags, @cf_role)
1362
- end
1363
-
1364
- if deploy_crash?
1365
- git_checkout_version('crash', @projects['crash']['revision'])
1366
- stack_name_crash = "ecs-task-crash-qa-#{deploy_id}"
1367
- stack_body = IO.read('projects/crash/deploy/task.yml')
1368
- parameters = [
1369
- {
1370
- parameter_key: 'Environment',
1371
- parameter_value: 'qa'
1372
- },
1373
- {
1374
- parameter_key: 'ReleaseVersion',
1375
- parameter_value: "#{@projects['crash']['revision']}"
1376
- },
1377
- {
1378
- parameter_key: 'TaskDesiredCount',
1379
- parameter_value: '1'
1380
- },
1381
- {
1382
- parameter_key: 'ECSClusterName',
1383
- parameter_value: @ecs_cluster_name
1384
- },
1385
- {
1386
- parameter_key: 'ALBShortName',
1387
- parameter_value: "crash-qa-#{deploy_id}"[0..27]
1388
- },
1389
- {
1390
- parameter_key: 'HostnamePattern',
1391
- parameter_value: "crash-#{@dns_record_identifier}.qa.colaster.com"
1392
- },
1393
- {
1394
- parameter_key: 'HostnamePatternPriority',
1395
- parameter_value: (hostname_pattern_priority.to_i + 10).to_s
1396
- },
1397
- {
1398
- parameter_key: "EnvHash",
1399
- parameter_value: deploy_id
1400
- },
1401
- {
1402
- parameter_key: "WsEndpoint",
1403
- parameter_value: "wss://crash-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
1404
- },
1405
- {
1406
- parameter_key: "GraphqlEndpoint",
1407
- parameter_value: "https://crash-#{@dns_record_identifier}.qa.colaster.com/graphql"
1408
- },
1409
- {
1410
- parameter_key: "AuthDomain",
1411
- parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
1412
- },
1413
- ]
1414
- if stack_exists?(stack_name_crash)
1415
- cur_version = get_currently_deployed_version(stack_name_crash)
1416
- update_stack(stack_name_crash, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["crash"]['revision'])
1417
- else
1418
- create_stack(stack_name_crash, stack_body, parameters, tags, @cf_role)
1419
- end
1420
- end
1421
-
1422
- if deploy_starsky_hutch?
1423
- stack_name_starsky = "ecs-task-starsky-qa-#{deploy_id}"
1424
- git_checkout_version('starsky', @projects["starsky"]['revision'])
1425
- stack_body = IO.read('projects/starsky/deploy/task.yml')
1426
- parameters = [
1427
- {
1428
- parameter_key: "Environment",
1429
- parameter_value: "qa"
1430
- },
1431
- {
1432
- parameter_key: "ReleaseVersion",
1433
- parameter_value: "#{@projects["starsky"]['revision']}"
1434
- },
1435
- {
1436
- parameter_key: "TaskDesiredCount",
1437
- parameter_value: "1"
1438
- },
1439
- {
1440
- parameter_key: "ECSClusterName",
1441
- parameter_value: @ecs_cluster_name
1442
- },
1443
- {
1444
- parameter_key: "ALBShortName",
1445
- parameter_value: "starsky-qa-#{deploy_id}"[0..27]
1446
- },
1447
- {
1448
- parameter_key: "EnvHash",
1449
- parameter_value: deploy_id
1450
- },
1451
- {
1452
- parameter_key: "HostnamePattern",
1453
- parameter_value: "starsky-#{@dns_record_identifier}.qa.colaster.com"
1454
- },
1455
- {
1456
- parameter_key: "HostnamePatternPriority",
1457
- parameter_value: (hostname_pattern_priority.to_i + 74).to_s
1458
- }
1459
- ]
1460
- if stack_exists?(stack_name_starsky)
1461
- cur_version = get_currently_deployed_version(stack_name_starsky)
1462
- unless cur_version.include?(@projects["starsky"]['revision'])
1463
- delete_stack(stack_name_starsky)
1464
- wait_for_stack_removal(stack_name_starsky)
1465
- create_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role)
1466
- end
1467
- else
1468
- create_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role)
1469
- end
1470
- end
1471
-
1472
- stack_name_activia = "ecs-task-activia-qa-#{deploy_id}"
1473
- git_checkout_version('activia', @projects["activia"]['revision'])
1474
- stack_body = File.read('projects/activia/deploy/task.yml')
1475
- parameters = [
1476
- {
1477
- parameter_key: "Environment",
1478
- parameter_value: "qa"
1479
- },
1480
- {
1481
- parameter_key: "ReleaseVersion",
1482
- parameter_value: "#{@projects["activia"]['revision']}"
1483
- },
1484
- {
1485
- parameter_key: "ECSClusterName",
1486
- parameter_value: @ecs_cluster_name
1487
- },
1488
- {
1489
- parameter_key: "TaskDesiredCount",
1490
- parameter_value: "1"
1491
- },
1492
- {
1493
- parameter_key: "HostnamePattern",
1494
- parameter_value: "activia-#{@dns_record_identifier}.qa.colaster.com"
1495
- },
1496
- {
1497
- parameter_key: "HostnamePatternPriority",
1498
- parameter_value: hostname_pattern_priority
1499
- },
1500
- {
1501
- parameter_key: "WebHost",
1502
- parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
1503
- },
1504
- {
1505
- parameter_key: "PeanoHost",
1506
- parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
1507
- }
1508
- ]
1509
- if stack_exists?(stack_name_activia)
1510
- cur_version = get_currently_deployed_version(stack_name_activia)
1511
- update_stack(stack_name_activia, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["activia"]['revision'])
1512
- else
1513
- create_stack(stack_name_activia, stack_body, parameters, tags, @cf_role)
1514
- end
1515
-
1516
- # Waiting for prima healtcheck dependencies
1517
- wait_for_stack_ready(stack_name_skynet) unless stack_ready?(stack_name_skynet)
1518
- wait_for_stack_ready(stack_name_urania) unless stack_ready?(stack_name_urania)
1519
- wait_for_stack_ready(stack_name_bburago) unless stack_ready?(stack_name_bburago)
1520
- wait_for_stack_ready(stack_name_hal9000) unless stack_ready?(stack_name_hal9000)
1521
- wait_for_stack_ready(stack_name_assange) unless stack_ready?(stack_name_assange)
1522
- wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1523
- wait_for_stack_ready(stack_name_leftorium) unless stack_ready?(stack_name_leftorium)
1524
- wait_for_stack_ready(stack_name_rachele) unless stack_ready?(stack_name_rachele)
1525
-
1526
- stack_name_web = "ecs-task-web-qa-#{deploy_id}"
1527
- git_checkout_version('prima', @projects["prima"]['revision'])
1528
- stack_body = IO.read('projects/prima/app/cloudformation/tasks/web.yml')
1529
- parameters = [
1530
- {
1531
- parameter_key: "Environment",
1532
- parameter_value: "qa"
1533
- },
1534
- {
1535
- parameter_key: "ReleaseVersion",
1536
- parameter_value: "#{@projects["prima"]['revision']}"
1537
- },
1538
- {
1539
- parameter_key: "TaskDesiredCount",
1540
- parameter_value: "1"
1541
- },
1542
- {
1543
- parameter_key: "ECSClusterName",
1544
- parameter_value: @ecs_cluster_name
1545
- },
1546
- {
1547
- parameter_key: "ALBShortName",
1548
- parameter_value: "web-qa-#{deploy_id}"[0..27]
1549
- },
1550
- {
1551
- parameter_key: "WebQaBaseHostname",
1552
- parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
1553
- },
1554
- {
1555
- parameter_key: "HostnamePatternPriority",
1556
- parameter_value: hostname_pattern_priority
1557
- },
1558
- {
1559
- parameter_key: "HostnamePatternAggregatorPriority",
1560
- parameter_value: (hostname_pattern_priority.to_i + 1).to_s
1561
- },
1562
- {
1563
- parameter_key: "EnvHash",
1564
- parameter_value: deploy_id
1565
- },
1566
- {
1567
- parameter_key: "AssangeHostname",
1568
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1569
- },
1570
- {
1571
- parameter_key: "BackofficeHostname",
1572
- parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1573
- },
1574
- {
1575
- parameter_key: "WebHostname",
1576
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1577
- },
1578
- {
1579
- parameter_key: "FePrimaDomain",
1580
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1581
- },
1582
- {
1583
- parameter_key: "HostnamePattern",
1584
- parameter_value: "www-#{@dns_record_identifier}.*"
1585
- }
1586
- ]
1587
- if stack_exists?(stack_name_web)
1588
- cur_version = get_currently_deployed_version(stack_name_web)
1589
- update_stack(stack_name_web, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"]['revision'])
1590
- else
1591
- create_stack(stack_name_web, stack_body, parameters, tags, @cf_role)
1592
- end
1593
-
1594
- stack_name_consumer = "ecs-task-consumer-qa-#{deploy_id}"
1595
- git_checkout_version('prima', @projects["prima"]['revision'])
1596
- stack_body = IO.read('projects/prima/app/cloudformation/tasks/consumer.yml')
1597
- parameters = [
1598
- {
1599
- parameter_key: "Environment",
1600
- parameter_value: "qa"
1601
- },
1602
- {
1603
- parameter_key: "ReleaseVersion",
1604
- parameter_value: "#{@projects["prima"]['revision']}"
1605
- },
1606
- {
1607
- parameter_key: "ECSClusterName",
1608
- parameter_value: @ecs_cluster_name
1609
- },
1610
- {
1611
- parameter_key: "NginxHttpHost",
1612
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1613
- },
1614
- {
1615
- parameter_key: "AssangeHostname",
1616
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1617
- },
1618
- {
1619
- parameter_key: "BackofficeHostname",
1620
- parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1621
- },
1622
- {
1623
- parameter_key: "WebHostname",
1624
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1625
- },
1626
- {
1627
- parameter_key: "FePrimaDomain",
1628
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1629
- },
1630
- {
1631
- parameter_key: "HostnamePattern",
1632
- parameter_value: "consumer-#{@dns_record_identifier}.qa.colaster.com"
1633
- },
1634
- {
1635
- parameter_key: "WebQaBaseHostname",
1636
- parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
1637
- }
1638
- ]
1639
- if stack_exists?(stack_name_consumer)
1640
- cur_version = get_currently_deployed_version(stack_name_consumer)
1641
- update_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"]['revision'])
1642
- else
1643
- create_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role)
1644
- end
1645
-
1646
- stack_name_roger = "ecs-task-roger-qa-#{deploy_id}"
1647
- git_checkout_version('roger', @projects["roger"]['revision'])
1648
- stack_body = File.read('projects/roger/deploy/task.yml')
1649
- parameters = [
1650
- {
1651
- parameter_key: "Environment",
1652
- parameter_value: "qa"
1653
- },
1654
- {
1655
- parameter_key: "ReleaseVersion",
1656
- parameter_value: @projects["roger"]['revision']
1657
- },
1658
- {
1659
- parameter_key: "TaskDesiredCount",
1660
- parameter_value: "1"
1661
- },
1662
- {
1663
- parameter_key: "ECSClusterName",
1664
- parameter_value: @ecs_cluster_name
1665
- },
1666
- {
1667
- parameter_key: "HostnamePattern",
1668
- parameter_value: "roger-#{@dns_record_identifier}.qa.colaster.com"
1669
- },
1670
- {
1671
- parameter_key: "HostnamePatternPriority",
1672
- parameter_value: hostname_pattern_priority
1673
- }
1674
- ]
1675
- if stack_exists?(stack_name_roger)
1676
- cur_version = get_currently_deployed_version(stack_name_roger)
1677
- update_stack(stack_name_roger, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["roger"]['revision'])
1678
- else
1679
- create_stack(stack_name_roger, stack_body, parameters, tags, @cf_role)
1680
- end
1681
-
1682
-
1683
- if deploy_starsky_hutch?
1684
- wait_for_stack_ready(stack_name_starsky) unless stack_ready?(stack_name_starsky) unless !deploy_starsky_hutch?
1685
-
1686
- stack_name_hutch = "ecs-task-hutch-qa-#{deploy_id}"
1687
- git_checkout_version('hutch', @projects["hutch"]['revision'])
1688
- stack_body = File.read('projects/hutch/deploy/task.yml')
1689
- parameters = [
1690
- {
1691
- parameter_key: "Environment",
1692
- parameter_value: "qa"
1693
- },
1694
- {
1695
- parameter_key: "ReleaseVersion",
1696
- parameter_value: "#{@projects["hutch"]['revision']}-#{@dns_record_identifier[0..7]}"
1697
- },
1698
- {
1699
- parameter_key: "ALBShortName",
1700
- parameter_value: "hutch-qa-#{deploy_id}"[0..27]
1701
- },
1702
- {
1703
- parameter_key: "ECSClusterName",
1704
- parameter_value: @ecs_cluster_name
1705
- },
1706
- {
1707
- parameter_key: "EnvHash",
1708
- parameter_value: deploy_id
1709
- },
1710
- {
1711
- parameter_key: "HostnamePattern",
1712
- parameter_value: "hutch-#{@dns_record_identifier}.qa.colaster.com"
1713
- },
1714
- {
1715
- parameter_key: "HostnamePatternPriority",
1716
- parameter_value: (hostname_pattern_priority.to_i + 254).to_s
1717
- },
1718
- {
1719
- parameter_key: "ApiUrl",
1720
- parameter_value: "https://#{get_route53_hostname('maia-intermediari')}"
1721
- }
1722
- ]
1723
- if stack_exists?(stack_name_hutch)
1724
- cur_version = get_currently_deployed_version(stack_name_hutch)
1725
- update_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hutch"]['revision'])
1726
- else
1727
- create_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role)
1728
- end
1729
- end
1730
-
1731
- stack_name_maia = "ecs-task-maia-qa-#{deploy_id}"
1732
- git_checkout_version('maia', @projects["maia"]['revision'])
1733
- stack_body = File.read('projects/maia/deploy/task.yml')
1734
- parameters = [
1735
- {
1736
- parameter_key: "Environment",
1737
- parameter_value: "qa"
1738
- },
1739
- {
1740
- parameter_key: "ReleaseVersion",
1741
- parameter_value: "#{@projects["maia"]['revision']}"
1742
- },
1743
- {
1744
- parameter_key: "ALBShortName",
1745
- parameter_value: "maia-qa-#{deploy_id}"[0..15]
1746
- },
1747
- {
1748
- parameter_key: "ECSClusterName",
1749
- parameter_value: @ecs_cluster_name
1750
- },
1751
- {
1752
- parameter_key: "EnvHash",
1753
- parameter_value: deploy_id
1754
- },
1755
- {
1756
- parameter_key: "HostnamePatternPublic",
1757
- parameter_value: "api*-#{@dns_record_identifier}.qa.colaster.com"
1758
- },
1759
- {
1760
- parameter_key: "HostnamePatternPriority",
1761
- parameter_value: (hostname_pattern_priority.to_i + 128).to_s
1762
- },
1763
- {
1764
- parameter_key: "ProxyHostnameIntermediari",
1765
- parameter_value: "api-intermediari-#{@dns_record_identifier}.qa.colaster.com"
1766
- },
1767
- {
1768
- parameter_key: "ProxyHostnameApp",
1769
- parameter_value: "api-#{@dns_record_identifier}.qa.colaster.com"
1770
- }
1771
- ]
1772
- if stack_exists?(stack_name_maia)
1773
- cur_version = get_currently_deployed_version(stack_name_maia)
1774
- update_stack(stack_name_maia, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["maia"]['revision'])
1775
- else
1776
- create_stack(stack_name_maia, stack_body, parameters, tags, @cf_role)
1777
- end
1778
-
1779
- wait_for_stack_ready(stack_name_web) unless stack_ready?(stack_name_web)
1780
- wait_for_stack_ready(stack_name_consumer) unless stack_ready?(stack_name_consumer)
1781
- wait_for_stack_ready(stack_name_ermes) unless stack_ready?(stack_name_ermes)
1782
- wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1783
- wait_for_stack_ready(stack_name_peano) unless stack_ready?(stack_name_peano)
1784
- wait_for_stack_ready(stack_name_rogoreport) unless stack_ready?(stack_name_rogoreport)
1785
- wait_for_stack_ready(stack_name_borat) unless stack_ready?(stack_name_borat)
1786
- wait_for_stack_ready(stack_name_activia) unless stack_ready?(stack_name_activia)
1787
- wait_for_stack_ready(stack_name_maia) unless stack_ready?(stack_name_maia)
1788
- wait_for_stack_ready(stack_name_crash) unless stack_ready?(stack_name_crash) unless !deploy_crash?
1789
- wait_for_stack_ready(stack_name_hutch) unless stack_ready?(stack_name_hutch) unless !deploy_starsky_hutch?
1790
-
1791
-
1792
- update_service_defaults(stack_name_web)
1793
- update_service_defaults(stack_name_consumer)
1794
- update_service_defaults(stack_name_urania)
1795
- update_service_defaults(stack_name_ermes)
1796
- update_service_defaults(stack_name_bburago)
1797
- update_service_defaults(stack_name_hal9000)
1798
- update_service_defaults(stack_name_fidaty)
1799
- update_service_defaults(stack_name_peano)
1800
- update_service_defaults(stack_name_rogoreport)
1801
- update_service_defaults(stack_name_assange)
1802
- update_service_defaults(stack_name_borat)
1803
- update_service_defaults(stack_name_activia)
1804
- update_service_defaults(stack_name_skynet)
1805
- update_service_defaults(stack_name_leftorium)
1806
- update_service_defaults(stack_name_rachele)
1807
- update_service_defaults(stack_name_maia)
1808
- update_service_defaults(stack_name_starsky) unless !deploy_starsky_hutch?
1809
- update_service_defaults(stack_name_hutch) unless !deploy_starsky_hutch?
1810
- update_service_defaults(stack_name_crash) unless !deploy_crash?
1811
-
1812
- activia_hostname = get_route53_hostname("activia")
1813
- assange_hostname = get_route53_hostname("assange")
1814
- bburago_hostname = get_route53_hostname("bburago")
1815
- borat_hostname = get_route53_hostname("borat")
1816
- ermes_hostname = get_route53_hostname("ermes")
1817
- fidaty_hostname = get_route53_hostname("fidaty")
1818
- hal9000_hostname = get_route53_hostname("hal9000")
1819
- prima_hostname = get_route53_hostname("web")
1820
- peano_hostname = get_route53_hostname("peano")
1821
- skynet_hostname = get_route53_hostname("skynet")
1822
- urania_hostname = get_route53_hostname("urania")
1823
- roger_hostname = get_route53_hostname("roger")
1824
- leftorium_hostname = get_route53_hostname("leftorium")
1825
- rachele_hostname = get_route53_hostname("rachele")
1826
- maia_app_hostname = get_route53_hostname("maia-app")
1827
- maia_intermediari_hostname = get_route53_hostname("maia-intermediari")
1828
- crash_hostname = get_route53_hostname("crash") unless !deploy_crash?
1829
- starsky_hostname = get_route53_hostname("starsky") unless !deploy_starsky_hutch?
1830
- hutch_hostname = get_route53_hostname("hutch") unless !deploy_starsky_hutch?
1831
-
1832
- launch_mimo(deploy_id) if deploy_starsky_hutch?
1833
-
1834
- projects_text = "
1835
- > Prima url: https://#{prima_hostname}
1836
- > Backoffice (Borat) url: https://#{borat_hostname}
1837
- > Urania url: http://#{urania_hostname}:81
1838
- > Bburago url: http://#{bburago_hostname}:83
1839
- > Ermes url: http://#{ermes_hostname}:10002
1840
- > Hal9000 url: http://#{hal9000_hostname}:10031
1841
- > Fidaty url: http://#{fidaty_hostname}:10021
1842
- > Peano url: http://#{peano_hostname}:10039
1843
- > Assange url: https://#{assange_hostname}
1844
- > Activia url: http://#{activia_hostname}:10041
1845
- > Skynet url: http://#{skynet_hostname}:8050
1846
- > Roger url: http://#{roger_hostname}:10051
1847
- > Leftorium url: http://#{leftorium_hostname}:10061
1848
- > Rachele url: http://#{rachele_hostname}:10040
1849
- > Maia App url: https://#{maia_app_hostname}
1850
- > Maia Intermediari url: https://#{maia_intermediari_hostname}"
1851
- projects_text.concat "
1852
- > Crash url: https://#{crash_hostname}" if deploy_crash?
1853
- projects_text.concat "
1854
- > Starsky url: https://#{starsky_hostname}
1855
- > Hutch url: https://#{hutch_hostname}" if deploy_starsky_hutch?
1856
- projects_text.concat "
1857
- > RabbitMQ url: http://#{ec2_ip_address(asg_stack_name)}:15672
1858
- > Supervisor url: http://#{ec2_ip_address(asg_stack_name)}:9001
1859
- > Elasticsearch url: http://#{ec2_ip_address(asg_stack_name)}:9200
1860
- > SSH connection: ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no githubUsername@#{ec2_ip_address(asg_stack_name)}\n"
1861
- output projects_text.cyan
1862
- output "Deploy effettuato, everything is awesome!\n".green
1863
-
1864
- if @projects['prima']['name'] != 'master' then
1865
- # output "Lancio il batch job per la visual regression..."
1866
- # launch_bocelli_test(prima_hostname)
1867
- # output "Visual regression lanciata con successo!"
1868
-
1869
- output "Lancio i test con Lighthouse..."
1870
- launch_lighthouse_test(prima_hostname, "mobile")
1871
- launch_lighthouse_test(prima_hostname, "desktop")
1872
- output "Test con Lighthouse lanciati con successo..."
1873
- end
1874
-
1875
- qainit_write_output(projects_text, 'Indirizzi scritti su ')
1876
- end
1877
-
1878
- def get_route53_hostname(project)
1879
- case
1880
- when project.include?('web')
1881
- host = "www-#{@dns_record_identifier}.qa.colaster.com"
1882
- when project.include?('urania')
1883
- host = "urania-#{@dns_record_identifier}.qa.colaster.com"
1884
- when project.include?('bburago')
1885
- host = "bburago-#{@dns_record_identifier}.qa.colaster.com"
1886
- when project.include?('hal9000')
1887
- host = "hal9000-#{@dns_record_identifier}.qa.colaster.com"
1888
- when project.include?('fidaty')
1889
- host = "fidaty-#{@dns_record_identifier}.qa.colaster.com"
1890
- when project.include?('peano')
1891
- host = "peano-#{@dns_record_identifier}.qa.colaster.com"
1892
- when project.include?('assange')
1893
- host = "assange-#{@dns_record_identifier}.qa.colaster.com"
1894
- when project.include?('borat')
1895
- host = "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1896
- when project.include?('crash')
1897
- host = "crash-#{@dns_record_identifier}.qa.colaster.com"
1898
- when project.include?('ermes')
1899
- host = "ermes-#{@dns_record_identifier}.qa.colaster.com"
1900
- when project.include?('activia')
1901
- host = "activia-#{@dns_record_identifier}.qa.colaster.com"
1902
- when project.include?('skynet')
1903
- host = "skynet-#{@dns_record_identifier}.qa.colaster.com"
1904
- when project.include?('roger')
1905
- host = "roger-#{@dns_record_identifier}.qa.colaster.com"
1906
- when project.include?('leftorium')
1907
- host = "leftorium-#{@dns_record_identifier}.qa.colaster.com"
1908
- when project.include?('rachele')
1909
- host = "rachele-#{@dns_record_identifier}.qa.colaster.com"
1910
- when project.include?('starsky')
1911
- host = "starsky-#{@dns_record_identifier}.qa.colaster.com"
1912
- when project.include?('hutch')
1913
- host = "hutch-#{@dns_record_identifier}.qa.colaster.com"
1914
- when project.include?('maia-app')
1915
- host = "api-#{@dns_record_identifier}.qa.colaster.com"
1916
- when project.include?('maia-intermediari')
1917
- host = "api-intermediari-#{@dns_record_identifier}.qa.colaster.com"
1918
- end
1919
- host
1920
- end
1921
-
1922
- def ec2_ip_address(asg_stack_name)
1923
- resp = describe_stack_resource(asg_stack_name, 'ECSAutoScalingGroup')
1924
- resp = describe_auto_scaling_groups([resp.stack_resource_detail.physical_resource_id], 1)
1925
- instance_id = resp.auto_scaling_groups[0].instances[0].instance_id
1926
- resp = describe_instances([instance_id])
1927
- resp.reservations[0].instances[0].private_ip_address
1928
- end
1929
-
1930
611
  def get_alb_host(stack_name)
1931
612
  case
1932
613
  when stack_name.include?('web')
@@ -1975,723 +656,6 @@ class Release
1975
656
  resp.load_balancers[0].dns_name
1976
657
  end
1977
658
 
1978
- def update_service_defaults(stack_name)
1979
- case
1980
- when stack_name.include?('web')
1981
- logical_resource_id = 'ECSServiceWebQA'
1982
- when stack_name.include?('consumer')
1983
- logical_resource_id = 'ECSServiceConsumerApiQa'
1984
- when stack_name.include?('urania')
1985
- logical_resource_id = 'ECSServiceUraniaQA'
1986
- when stack_name.include?('backoffice')
1987
- logical_resource_id = 'ECSServiceBackoffice'
1988
- when stack_name.include?('ermes')
1989
- logical_resource_id = 'ECSServiceErmesQA'
1990
- when stack_name.include?('bburago')
1991
- logical_resource_id = 'ECSServiceBburagoQA'
1992
- when stack_name.include?('hal9000')
1993
- logical_resource_id = 'ECSServiceHal9000QA'
1994
- when stack_name.include?('fidaty')
1995
- logical_resource_id = 'ECSServiceFidatyQA'
1996
- when stack_name.include?('skynet')
1997
- logical_resource_id = 'ECSServiceSkynetQA'
1998
- when stack_name.include?('roger')
1999
- logical_resource_id = 'ECSServiceRogerQA'
2000
- when stack_name.include?('activia')
2001
- logical_resource_id = 'ECSServiceActiviaQA'
2002
- when stack_name.include?('peano')
2003
- logical_resource_id = 'ECSServicePeanoQA'
2004
- when stack_name.include?('rogoreport')
2005
- logical_resource_id = 'ECSServiceRogoreport'
2006
- when stack_name.include?('assange')
2007
- logical_resource_id = 'ECSServiceAssangeQA'
2008
- when stack_name.include?('borat')
2009
- logical_resource_id = 'ECSServiceBorat'
2010
- when stack_name.include?('leftorium')
2011
- logical_resource_id = 'ECSServiceLeftoriumQA'
2012
- when stack_name.include?('rachele')
2013
- logical_resource_id = 'ECSServiceRacheleQA'
2014
- when stack_name.include?('crash')
2015
- logical_resource_id = 'ECSServiceCrashQA'
2016
- when stack_name.include?('starsky')
2017
- logical_resource_id = 'ECSServiceStarskyQA'
2018
- when stack_name.include?('hutch')
2019
- logical_resource_id = 'ECSServiceHutch'
2020
- when stack_name.include?('maia')
2021
- logical_resource_id = 'ECSServiceMaia'
2022
- else
2023
- raise "Service name non gestito per lo stack #{stack_name}"
2024
- end
2025
- resp = describe_stack_resource(stack_name, logical_resource_id)
2026
- update_ecs_service(@ecs_cluster_name, resp.stack_resource_detail.physical_resource_id, {minimum_healthy_percent: 0, maximum_percent: 100})
2027
- end
2028
-
2029
- def launch_lighthouse_test(url, device)
2030
- @cloudflare.post("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {type: 'CNAME', name: "www-#{@dns_record_identifier}", content: url, proxied: true, ttl: 1}) unless get_lighthouse_dns()
2031
-
2032
- @batch.submit_job({
2033
- job_name: "lighthouse-#{device}-#{@dns_record_identifier}",
2034
- job_queue: "tools-production",
2035
- job_definition: describe_stack_resource('batch-job-lighthouse-production', 'JobDefinition').stack_resource_detail.physical_resource_id,
2036
- container_overrides: {
2037
- environment: [
2038
- {
2039
- name: "URL_TO_TEST",
2040
- value: "https://www-#{@dns_record_identifier}.prima.it/?superprima"
2041
- },
2042
- {
2043
- name: "DEVICE",
2044
- value: device
2045
- },
2046
- {
2047
- name: "BRANCH_NAME",
2048
- value: @projects['prima']['name']
2049
- },
2050
- {
2051
- name: "COMMITTER_EMAIL",
2052
- value: @projects['prima']['committer']
2053
- }
2054
- ]
2055
- }
2056
- })
2057
- end
2058
-
2059
- def get_lighthouse_dns()
2060
- dns_records = @cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', name: "www-#{@dns_record_identifier}.prima.it"})
2061
- if dns_records.body[:result_info][:count] > 0
2062
- return dns_records.body[:result][0][:id]
2063
- end
2064
- false
2065
- end
2066
-
2067
- def delete_lighthouse_dns()
2068
- dns_id = get_lighthouse_dns()
2069
- @cloudflare.delete("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns_id}") if dns_id
2070
- end
2071
-
2072
- def launch_bocelli_test(url)
2073
- @batch.submit_job({
2074
- job_name: "bocelli-test-#{@dns_record_identifier}",
2075
- job_queue: "tools-production",
2076
- job_definition: describe_stack_resource('batch-job-bocelli-production', 'JobDefinition').stack_resource_detail.physical_resource_id,
2077
- container_overrides: {
2078
- environment: [
2079
- {
2080
- name: "BATCH_COMMAND",
2081
- value: "test"
2082
- },
2083
- {
2084
- name: "QA_HOSTNAME",
2085
- value: url
2086
- },
2087
- {
2088
- name: "BRANCH_NAME",
2089
- value: @projects['prima']['name']
2090
- },
2091
- {
2092
- name: "COMMITTER_EMAIL",
2093
- value: @projects['prima']['committer']
2094
- }
2095
- ]
2096
- }
2097
- })
2098
- end
2099
-
2100
- def create_activia_artifact(revision)
2101
- output "Preparo l'artifact activia .zip\n".yellow
2102
-
2103
- git_checkout_version('activia', revision)
2104
-
2105
- Dir.chdir 'projects/activia'
2106
-
2107
- decrypt_secrets()
2108
-
2109
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2110
- exec_step 'prepare-docker-compose --directory activia && cp docker-compose-qainit.yml docker-compose.yml'
2111
-
2112
- # execute_command "deploy/build_qa_artifact"
2113
-
2114
- [
2115
- "docker-compose build web",
2116
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2117
- '-c' 'mix local.hex --force && mix hex.info && \
2118
- mix deps.get && mix compile && mix deps.compile && \
2119
- cd assets && \
2120
- rm -rf node_modules && \
2121
- yarn --cache-folder ~/.cache/yarn && \
2122
- sysconfcpus -n 2 ./node_modules/.bin/webpack --mode production && \
2123
- cd .. && \
2124
- mix phx.digest && \
2125
- rm -rf _build/qa/rel/ && \
2126
- mix distillery.release --env=qa'"
2127
- ].each do |cmd|
2128
- execute_command cmd
2129
- end
2130
-
2131
- cleanup_containers
2132
-
2133
- artifact_path = Dir.glob("_build/qa/rel/activia/releases/*/activia.tar.gz").first
2134
-
2135
- upload_artifact(artifact_path, "microservices/activia/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2136
-
2137
- Dir.chdir '../../'
2138
- end
2139
-
2140
- def create_assange_artifact(revision)
2141
- output "Preparo l'artifact assange .zip\n".yellow
2142
-
2143
- git_checkout_version('assange', revision)
2144
-
2145
- Dir.chdir 'projects/assange'
2146
-
2147
- decrypt_secrets()
2148
-
2149
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2150
- exec_step 'prepare-docker-compose --directory assange && cp docker-compose-qainit.yml docker-compose.yml'
2151
- exec_step 'deploy/build_qa_artifact'
2152
-
2153
- cleanup_containers
2154
-
2155
- artifact_path = Dir.glob("_build/qa/rel/assange/releases/*/assange.tar.gz").first
2156
- upload_artifact(artifact_path, "microservices/assange/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2157
-
2158
- Dir.chdir '../../'
2159
- end
2160
-
2161
- def create_bburago_artifact(revision)
2162
- output "Preparo l'artifact bburago .zip\n".yellow
2163
-
2164
- git_checkout_version('bburago', revision)
2165
-
2166
- Dir.chdir 'projects/bburago'
2167
-
2168
- decrypt_secrets()
2169
-
2170
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2171
- exec_step 'prepare-docker-compose --directory bburago && cp docker-compose-qainit.yml docker-compose.yml'
2172
- [
2173
- "docker-compose build web",
2174
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint deploy/create_qa_artifact web"
2175
- ].each do |cmd|
2176
- execute_command cmd
2177
- end
2178
-
2179
- cleanup_containers
2180
-
2181
- artifact_path = Dir.glob('_build/qa/rel/bburago/releases/*/bburago.tar.gz').first
2182
- upload_artifact(artifact_path, "microservices/bburago/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2183
-
2184
- Dir.chdir '../../'
2185
- end
2186
-
2187
- def create_borat_artifact(revision)
2188
- output "Preparo l'artifact borat .zip\n".yellow
2189
-
2190
- git_checkout_version('borat', revision)
2191
-
2192
- Dir.chdir 'projects/borat'
2193
-
2194
- decrypt_secrets()
2195
-
2196
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2197
- exec_step 'prepare-docker-compose --directory borat && cp docker-compose-qainit.yml docker-compose.yml'
2198
-
2199
- execute_command "deploy/build_qa_artifact"
2200
-
2201
- cleanup_containers
2202
-
2203
- artifact_path = Dir.glob("_build/qa/rel/borat/releases/*/borat.tar.gz").first
2204
- upload_artifact(artifact_path, "microservices/borat/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2205
-
2206
- Dir.chdir '../../'
2207
- end
2208
-
2209
- def create_crash_artifact(revision, deploy_id)
2210
- output "Preparo l'artifact crash .zip\n".yellow
2211
-
2212
- git_checkout_version('crash', revision)
2213
-
2214
- Dir.chdir 'projects/crash'
2215
-
2216
- crash_qa_host = get_route53_hostname('ecs-task-crash-qa-notneeded')
2217
-
2218
- decrypt_secrets()
2219
-
2220
- `mv docker-compose-ci.yml docker-compose.yml`
2221
- exec_step 'prepare-docker-compose --directory crash && cp docker-compose-qainit.yml docker-compose.yml'
2222
-
2223
- execute_command "deploy/build_qa_artifact #{deploy_id}"
2224
-
2225
- cleanup_containers
2226
-
2227
- artifact_path = Dir.glob('_build/qa/rel/crash/releases/*/crash.tar.gz').first
2228
- upload_artifact(artifact_path, "microservices/crash/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2229
-
2230
- Dir.chdir '../../'
2231
- end
2232
-
2233
- def create_ermes_artifact(revision)
2234
- output "Preparo l'artifact ermes .zip\n".yellow
2235
-
2236
- git_checkout_version('ermes', revision)
2237
-
2238
- Dir.chdir 'projects/ermes'
2239
-
2240
- decrypt_secrets()
2241
-
2242
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2243
- exec_step 'prepare-docker-compose --directory ermes && cp docker-compose-qainit.yml docker-compose.yml'
2244
-
2245
- if File.exists? 'deploy/build_qa_artifact'
2246
- execute_command "deploy/build_qa_artifact"
2247
- else
2248
- [
2249
- "if echo `docker network ls` | grep crash_default; \
2250
- then echo 'crash_default network already existing'; \
2251
- else docker network create crash_default; fi",
2252
- 'docker-compose build web',"docker-compose run --rm -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2253
- '-c' 'mix local.hex --force && mix hex.info && \
2254
- mix deps.get && mix compile && mix deps.compile && \
2255
- mix phx.digest && \
2256
- MIX_ENV=dev mix compile.sms && \
2257
- MIX_ENV=dev mix compile.html && \
2258
- MIX_ENV=dev mix compile.heml && \
2259
- MIX_ENV=dev mix compile.app_notification && \
2260
- rm -rf _build/qa/rel/ && \
2261
- mix release --env=qa'"
2262
- ].each do |cmd|
2263
- execute_command cmd
2264
- end
2265
- end
2266
-
2267
- cleanup_containers
2268
-
2269
- artifact_path = Dir.glob("_build/qa/rel/ermes/releases/*/ermes.tar.gz").first
2270
- upload_artifact(artifact_path, "microservices/ermes/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2271
-
2272
- Dir.chdir '../../'
2273
- end
2274
-
2275
- def create_fidaty_artifact(revision)
2276
- output "Preparo l'artifact fidaty .zip\n".yellow
2277
-
2278
- git_checkout_version('fidaty', revision)
2279
-
2280
- Dir.chdir 'projects/fidaty'
2281
-
2282
- decrypt_secrets()
2283
-
2284
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2285
- exec_step 'prepare-docker-compose --directory fidaty && cp docker-compose-qainit.yml docker-compose.yml'
2286
-
2287
- if File.exists? 'deploy/build_qa_artifact'
2288
- execute_command "deploy/build_qa_artifact"
2289
- else
2290
- [
2291
- "docker-compose build web",
2292
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2293
- '-c' 'mix local.hex --force && mix hex.info && \
2294
- mix deps.get && mix compile && mix deps.compile && \
2295
- mix phx.digest && \
2296
- rm -rf _build/qa/rel/ && \
2297
- mix release --env=qa'"
2298
- ].each do |cmd|
2299
- execute_command cmd
2300
- end
2301
- end
2302
-
2303
- cleanup_containers
2304
-
2305
- artifact_path = Dir.glob("_build/qa/rel/fidaty/releases/*/fidaty.tar.gz").first
2306
- upload_artifact(artifact_path, "microservices/fidaty/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2307
-
2308
- Dir.chdir '../../'
2309
- end
2310
-
2311
- def create_hal9000_artifact(revision)
2312
- output "Preparo l'artifact hal9000 .zip\n".yellow
2313
-
2314
- git_checkout_version('hal9000', revision)
2315
-
2316
- Dir.chdir 'projects/hal9000'
2317
-
2318
- decrypt_secrets()
2319
-
2320
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2321
- exec_step 'prepare-docker-compose --directory hal9000 && cp docker-compose-qainit.yml docker-compose.yml'
2322
- # [
2323
- # # "docker-compose build web",
2324
- # # "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2325
- # # '-c' 'mix local.hex --force && mix hex.info && \
2326
- # # mix deps.get && mix compile && mix deps.compile && \
2327
- # # mix phx.digest assets -o priv/static && \
2328
- # # rm -rf _build/qa/rel/ && \
2329
- # # mix release --env=qa'"
2330
- # ].each do |cmd|
2331
- # execute_command cmd
2332
- # end
2333
-
2334
- execute_command "deploy/build_qa_artifact"
2335
-
2336
- cleanup_containers
2337
-
2338
- artifact_path = Dir.glob("_build/qa/rel/hal9000/releases/*/hal9000.tar.gz").first
2339
- upload_artifact(artifact_path, "microservices/hal9000/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2340
-
2341
- Dir.chdir '../../'
2342
- end
2343
-
2344
- def create_hutch_artifact(revision)
2345
- output "Preparo l'artifact hutch\n".yellow
2346
-
2347
- git_checkout_version('hutch', revision)
2348
-
2349
- Dir.chdir 'projects/hutch'
2350
-
2351
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2352
-
2353
- exec_step 'cp docker-compose-ci.yml docker-compose.yml'
2354
- exec_step 'prepare-docker-compose --directory hutch && cp docker-compose-qainit.yml docker-compose.yml'
2355
-
2356
- execute_command "deploy/build_qa_artifact #{get_route53_hostname("maia-intermediari")}"
2357
-
2358
- cleanup_containers
2359
-
2360
- artifact_path = "./hutch.tar.gz"
2361
- upload_artifact(artifact_path, "microservices/hutch/#{revision}-#{@dns_record_identifier[0..7]}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2362
-
2363
- Dir.chdir '../../'
2364
- end
2365
-
2366
- def create_leftorium_artifact(revision)
2367
- output "Preparo l'artifact leftorium .zip\n".yellow
2368
-
2369
- git_checkout_version('leftorium', revision)
2370
-
2371
- Dir.chdir 'projects/leftorium'
2372
-
2373
- decrypt_secrets()
2374
-
2375
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2376
- exec_step 'prepare-docker-compose --directory leftorium && cp docker-compose-qainit.yml docker-compose.yml'
2377
- [
2378
- "docker-compose build web",
2379
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2380
- '-c' 'mix local.hex --force && mix hex.info && \
2381
- mix deps.get && mix compile && mix deps.compile && \
2382
- rm -rf _build/qa/rel/ && \
2383
- mix release --env=qa'"
2384
- ].each do |cmd|
2385
- execute_command cmd
2386
- end
2387
-
2388
- cleanup_containers
2389
-
2390
- artifact_path = Dir.glob('_build/qa/rel/leftorium/releases/*/leftorium.tar.gz').first
2391
- upload_artifact(artifact_path, "microservices/leftorium/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2392
-
2393
- Dir.chdir '../../'
2394
- end
2395
-
2396
- def create_maia_artifact(revision)
2397
- output "Preparo l'artifact maia .zip\n".yellow
2398
-
2399
- git_checkout_version('maia', revision)
2400
-
2401
- Dir.chdir 'projects/maia'
2402
-
2403
- decrypt_secrets()
2404
-
2405
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2406
- exec_step 'prepare-docker-compose --directory maia && cp docker-compose-qainit.yml docker-compose.yml'
2407
-
2408
- execute_command 'deploy/build_qa_artifact'
2409
-
2410
- cleanup_containers
2411
-
2412
- artifact_path = Dir.glob('_build/qa/rel/maia/releases/*/maia.tar.gz').first
2413
- upload_artifact(artifact_path, "microservices/maia/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2414
-
2415
- Dir.chdir '../../'
2416
- end
2417
-
2418
- def create_peano_artifact(revision)
2419
- output "Preparo l'artifact peano .zip\n".yellow
2420
-
2421
- git_checkout_version('peano', revision)
2422
-
2423
- Dir.chdir 'projects/peano'
2424
-
2425
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2426
-
2427
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2428
- exec_step 'prepare-docker-compose --directory peano && cp docker-compose-qainit.yml docker-compose.yml'
2429
-
2430
- execute_command "deploy/build_qa_artifact"
2431
-
2432
- cleanup_containers
2433
-
2434
- artifact_path = Dir.glob("_build/qa/rel/peano/releases/*/peano.tar.gz").first
2435
- upload_artifact(artifact_path, "microservices/peano/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2436
-
2437
- Dir.chdir '../../'
2438
- end
2439
-
2440
- def create_prima_artifact(revision, branch_name, deploy_id)
2441
- output "Preparo l'artifact prima .zip\n".yellow
2442
-
2443
- git_checkout_version('prima', revision)
2444
-
2445
- Dir.chdir 'projects/prima'
2446
-
2447
- ['vendor'].each do |dir|
2448
- unless File.directory?(dir)
2449
- if File.directory?("../../../prima/#{dir}")
2450
- exec_step "rsync -a ../../../prima/#{dir} ."
2451
- end
2452
- end
2453
- end
2454
-
2455
- exec_step 'mv docker-compose-ci.yml docker-compose.yml'
2456
- exec_step 'prepare-docker-compose --directory prima'
2457
- exec_step 'sed -i "s/\\/home\\/app/\\/root/g" docker-compose.yml'
2458
- `sed -i 's/"@prima-assicurazioni/pyxis-npm": ".*",/"@prima-assicurazioni/pyxis-npm": "#{@pyxis_version}",/' package.json` if deploy_pyxis?
2459
- [
2460
- "bin/qa_build_artifact.sh #{branch_name} #{'update' if @deploy_update}"
2461
- ].each do |cmd|
2462
- execute_command cmd
2463
- end
2464
-
2465
- cleanup_containers
2466
-
2467
- Dir.chdir "../../"
2468
- end
2469
-
2470
- def create_pyxis_artifact(revision, deploy_id)
2471
- if (deploy_pyxis?)
2472
- output "Preparo l'artifact pyxis\n".yellow
2473
-
2474
- git_checkout_version('pyxis-npm', revision)
2475
-
2476
- Dir.chdir 'projects/pyxis-npm'
2477
-
2478
- decrypt_secrets()
2479
-
2480
- exec_step 'mv .fakenpmrc .npmrc'
2481
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2482
- exec_step 'prepare-docker-compose --directory pyxis-npm && cp docker-compose-qainit.yml docker-compose.yml'
2483
- exec_step 'docker-compose build web'
2484
-
2485
- exec_step "docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
2486
- '-c' 'npm view prima-assicurazioni/pyxis-npm versions --json > versions.json'" # posso recuperare le versioni pubblicate solo da dentro al container, scrivo su un file che leggo subito dopo
2487
- published_versions = `cat versions.json`
2488
- qa_versions = JSON.parse(published_versions).select{ |version| version.include? get_pyxis_version(deploy_id) }
2489
-
2490
- @pyxis_version = "0.#{get_pyxis_version(deploy_id)}.#{qa_versions.size}"
2491
-
2492
- `sed -i '3s/".*/"version": "#{@pyxis_version}",/' package.json`
2493
- [
2494
- "docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
2495
- '-c' 'yarn install && \
2496
- yarn build:prod && \
2497
- npm publish'"
2498
- ].each do |cmd|
2499
- execute_command cmd
2500
- end
2501
-
2502
- cleanup_containers
2503
- Dir.chdir '../../'
2504
- end
2505
- end
2506
-
2507
- def create_rachele_artifact(revision)
2508
- output "Preparo l'artifact rachele .zip\n".yellow
2509
-
2510
- git_checkout_version('rachele', revision)
2511
-
2512
- Dir.chdir 'projects/rachele'
2513
-
2514
- decrypt_secrets()
2515
-
2516
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2517
- exec_step 'prepare-docker-compose --directory rachele && cp docker-compose-qainit.yml docker-compose.yml'
2518
-
2519
- execute_command "docker-compose build web"
2520
-
2521
- [ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2522
- '-c' 'mix local.hex --force && mix hex.info && \
2523
- mix deps.get && mix compile && mix deps.compile && \
2524
- rm -rf _build/qa/rel/ && \
2525
- mix release --env=qa'"
2526
- ].each do |cmd|
2527
- execute_command cmd
2528
- end
2529
-
2530
- cleanup_containers
2531
-
2532
- artifact_path = Dir.glob("_build/qa/rel/rachele/releases/*/rachele.tar.gz").first
2533
- upload_artifact(artifact_path, "microservices/rachele/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2534
-
2535
- Dir.chdir '../../'
2536
- end
2537
-
2538
- def create_roger_artifact(revision)
2539
- output "Preparo l'artifact roger .zip\n".yellow
2540
-
2541
- git_checkout_version('roger', revision)
2542
-
2543
- Dir.chdir 'projects/roger'
2544
-
2545
- decrypt_secrets()
2546
-
2547
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2548
- exec_step 'prepare-docker-compose --directory roger && cp docker-compose-qainit.yml docker-compose.yml'
2549
- [
2550
- "docker-compose build web",
2551
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2552
- '-c' 'mix local.hex --force && mix hex.info && \
2553
- mix deps.get && mix compile && mix deps.compile && \
2554
- mix phx.digest && \
2555
- rm -rf _build/qa/rel/ && \
2556
- mix distillery.release --env=qa'"
2557
- ].each do |cmd|
2558
- execute_command cmd
2559
- end
2560
-
2561
- cleanup_containers
2562
-
2563
- artifact_path = Dir.glob("_build/qa/rel/roger/releases/*/roger.tar.gz").first
2564
- upload_artifact(artifact_path, "microservices/roger/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2565
-
2566
- Dir.chdir '../../'
2567
- end
2568
-
2569
- def create_rogoreport_artifact(revision)
2570
- output "Preparo l'artifact rogoreport .zip\n".yellow
2571
-
2572
- git_checkout_version('rogoreport', revision)
2573
-
2574
- Dir.chdir 'projects/rogoreport'
2575
-
2576
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2577
-
2578
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2579
- exec_step 'prepare-docker-compose --directory rogoreport && cp docker-compose-qainit.yml docker-compose.yml'
2580
- [
2581
- "docker-compose build web",
2582
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2583
- '-c' 'mix local.hex --force && mix hex.info && \
2584
- mix deps.get && mix compile && mix deps.compile && \
2585
- rm -rf _build/qa/rel/ && \
2586
- mix release --name=rogoreport --env=qa'"
2587
- ].each do |cmd|
2588
- execute_command cmd
2589
- end
2590
-
2591
- cleanup_containers
2592
-
2593
- artifact_path = Dir.glob("_build/qa/rel/rogoreport/releases/*/rogoreport.tar.gz").first
2594
- upload_artifact(artifact_path, "microservices/rogoreport/rogoreport-#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2595
-
2596
- Dir.chdir '../../'
2597
- end
2598
-
2599
- def create_skynet_artifact(revision)
2600
- output "Preparo l'artifact skynet\n".yellow
2601
-
2602
- git_checkout_version('skynet', revision)
2603
-
2604
- Dir.chdir 'projects/skynet'
2605
-
2606
- version = `git rev-parse HEAD`
2607
-
2608
- artifact_path = "/tmp/skynet-#{revision}-qa.tar.gz"
2609
-
2610
- exec_step "tar cfz #{artifact_path} core config oracles pickles web tests logstash application_log configuration.py wsgi.py requirements.txt requirements_training.txt"
2611
-
2612
- upload_artifact(artifact_path, "microservices/skynet/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2613
-
2614
- Dir.chdir '../../'
2615
- end
2616
-
2617
- def create_starsky_artifact(revision)
2618
- output "Preparo l'artifact starsky\n".yellow
2619
-
2620
- git_checkout_version('starsky', revision)
2621
-
2622
- Dir.chdir 'projects/starsky'
2623
-
2624
- version = `git rev-parse HEAD`
2625
-
2626
- #artifact_path = "/tmp/starsky-#{revision}-qa.tar.gz"
2627
-
2628
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2629
-
2630
- `mv docker-compose-ci.yml docker-compose.yml`
2631
- exec_step 'prepare-docker-compose --directory starsky && cp docker-compose-qainit.yml docker-compose.yml'
2632
- exec_step "sed -i s/qa_deploy_id/#{get_deploy_id}/g .env.dist.qa"
2633
- exec_step "cp .env.dist.qa .env"
2634
-
2635
- [
2636
- "sed -i 's/USER app/USER root/g' Dockerfile",
2637
- "if echo `docker network ls` | grep peano_default; \
2638
- then echo 'peano_default network already existing'; \
2639
- else docker network create peano_default; fi",
2640
- "docker-compose build web",
2641
- "docker-compose run -w $PWD -e BUILD_ENV=qa -u root --entrypoint /bin/sh web \
2642
- '-c' 'cargo build --release -vv --features=qa \
2643
- && cargo build --bin migrate --release --features=qa \
2644
- && cargo build --bin rabbit_worker --release --features=qa \
2645
- && cp -p target/release/starsky . \
2646
- && cp -p target/release/migrate . \
2647
- && cp -p target/release/rabbit_worker . \
2648
- && tar cfz #{revision}-qa.tar.gz config starsky migrate rabbit_worker .env.dist.*'"
2649
- ].each do |cmd|
2650
- execute_command cmd
2651
- end
2652
-
2653
- artifact_path = "./#{revision}-qa.tar.gz"
2654
-
2655
- upload_artifact(artifact_path, "microservices/starsky/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2656
-
2657
- Dir.chdir '../../'
2658
- end
2659
-
2660
- def create_urania_artifact(revision)
2661
- output "Preparo l'artifact urania .zip\n".yellow
2662
-
2663
- git_checkout_version('urania', revision)
2664
-
2665
- Dir.chdir 'projects/urania'
2666
-
2667
- decrypt_secrets()
2668
-
2669
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2670
- exec_step 'prepare-docker-compose --directory urania && cp docker-compose-qainit.yml docker-compose.yml'
2671
-
2672
- if File.exists? 'deploy/build_qa_artifact'
2673
- execute_command "deploy/build_qa_artifact"
2674
- else
2675
- [
2676
- "docker-compose build web",
2677
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2678
- '-c' 'mix local.hex --force && mix hex.info && \
2679
- mix deps.get && mix compile && mix deps.compile && \
2680
- rm -rf _build/qa/rel/ && \
2681
- mix release --env=qa'"
2682
- ].each do |cmd|
2683
- execute_command cmd
2684
- end
2685
- end
2686
-
2687
- cleanup_containers
2688
-
2689
- artifact_path = Dir.glob("_build/qa/rel/urania/releases/*/urania.tar.gz").first
2690
- upload_artifact(artifact_path, "microservices/urania/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2691
-
2692
- Dir.chdir '../../'
2693
- end
2694
-
2695
659
  def deploy_pyxis?
2696
660
  if defined? @deploy_pyxis
2697
661
  @deploy_pyxis
@@ -2705,146 +669,11 @@ class Release
2705
669
  end
2706
670
  end
2707
671
 
2708
- def deploy_crash?
2709
- true # fino a che non ci mettiamo d'accordo su come gestire il fatto che leftorium ha bisogno di comunicare con crash
2710
- # crash_present = !@projects['crash'].empty? && @projects['crash']['name'] != 'master' && !@projects['crash']['default_branch']
2711
- # leftorium_present = !@projects['leftorium'].empty? && @projects['leftorium']['name'] != 'master' && !@projects['leftorium']['default_branch']
2712
- # crash_present || leftorium_present
2713
- end
2714
-
2715
- def deploy_starsky_hutch?
2716
- true
2717
- #starsky_present = !@projects['starsky'].nil? && !@projects['starsky'].empty? && @projects['starsky']['name'] != 'master' && !@projects['starsky']['default_branch']
2718
- #hutch_present = !@projects['hutch'].nil? && !@projects['hutch'].empty? && @projects['hutch']['name'] != 'master' && !@projects['hutch']['default_branch']
2719
- #starsky_present || hutch_present
2720
- end
2721
-
2722
- def get_pyxis_version(deploy_id)
2723
- (deploy_id.delete '[a-z0]')[0..9]
2724
- end
2725
-
2726
- def cleanup_containers
2727
- `docker-compose kill && docker-compose down -v --remove-orphans`
2728
- `docker rm $(docker ps -q -f status=exited)`
2729
- end
2730
-
2731
- def git_checkout_version(project, revision)
2732
- Dir.chdir "projects/#{project}"
2733
- exec_step "git checkout -- . && git checkout #{revision}"
2734
- Dir.chdir "../../"
2735
- end
2736
-
2737
- def create_asg_stack(stack_name, tags = [])
2738
- stack_body = IO.read('cloudformation/stacks/asg/ecs-asg-allinone.yml')
2739
- parameters = [
2740
- {
2741
- parameter_key: "Environment",
2742
- parameter_value: "qa"
2743
- },
2744
- {
2745
- parameter_key: "InstanceType",
2746
- parameter_value: "t3a.xlarge"
2747
- },
2748
- {
2749
- parameter_key: "ECSClusterName",
2750
- parameter_value: @ecs_cluster_name
2751
- },
2752
- {
2753
- parameter_key: "AMIID",
2754
- parameter_value: @ami_id
2755
- }
2756
- ]
2757
- create_stack(stack_name, stack_body, parameters, tags, @cf_role)
2758
- end
2759
-
2760
- def create_cluster_stack(stack_name, tags = [])
2761
- stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
2762
- create_stack(stack_name, stack_body, [], tags)
2763
- end
2764
-
2765
672
  def update_cluster_stack(stack_name, tags = [])
2766
673
  stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
2767
674
  update_stack(stack_name, stack_body, [], tags)
2768
675
  end
2769
676
 
2770
- def create_alb_stack(stack_name, role, hash, environment = 'qa')
2771
- stack_body = IO.read('cloudformation/stacks/elb/alb-public-qa.yml')
2772
- parameters = [
2773
- {
2774
- parameter_key: "Environment",
2775
- parameter_value: environment
2776
- },
2777
- {
2778
- parameter_key: "Role",
2779
- parameter_value: role
2780
- },
2781
- {
2782
- parameter_key: "EnvHash",
2783
- parameter_value: hash
2784
- }
2785
- ]
2786
- create_stack(stack_name, stack_body, parameters, [], @cf_role)
2787
- end
2788
-
2789
- def import_redis_crash(qa_ip_address)
2790
- output "Importo chiavi di Redis da staging\n".yellow
2791
-
2792
- prefixes = ['CODICI', 'fun_with_flags']
2793
- redis_qa = Redis.new(:url => "redis://#{qa_ip_address}:6379/10")
2794
- redis_staging = Redis.new(:url => 'redis://staging.cache-1.prima.it:6379/10')
2795
-
2796
- prefixes.each do |prefix|
2797
- redis_staging.keys("#{prefix}*").each do |key|
2798
- next unless redis_qa.keys(key).empty?
2799
- output "Importo #{key} dal Redis di staging\n".yellow
2800
- dump_staging = redis_staging.dump key
2801
- redis_qa.restore key, 0, dump_staging
2802
- end
2803
- end
2804
- end
2805
-
2806
- def import_dbs(ip_address)
2807
- overrides = {
2808
- container_overrides: [
2809
- {
2810
- name: 'dbrestore',
2811
- environment: [
2812
- {
2813
- name: 'EC2_IP_ADDRESS',
2814
- value: ip_address
2815
- }
2816
- ]
2817
- }
2818
- ]
2819
- }
2820
- resp = run_ecs_task(@ecs_cluster_name, @import_db_task, overrides, 1)
2821
- return resp
2822
- end
2823
-
2824
- def wait_for_db_import(task)
2825
- output "Attendo che i DB vengano importati...\n".yellow
2826
- stopped_at = nil
2827
- sleep 15 # altrimenti non trova il task appena avviato...
2828
- while stopped_at.nil?
2829
- if task.tasks[0].nil?
2830
- pp @ecs_cluster_name
2831
- pp task
2832
- stop_if true, "Task di import DB lanciato, ma risposta vuota!".red
2833
- end
2834
- task = describe_ecs_tasks(task.tasks[0].cluster_arn, [task.tasks[0].task_arn])
2835
- stopped_at = task.tasks[0].stopped_at unless task.tasks[0].nil?
2836
- sleep_seconds = 10
2837
- seconds_elapsed = 0
2838
- while true && stopped_at.nil?
2839
- break if seconds_elapsed >= sleep_seconds
2840
- print '.'.yellow; STDOUT.flush
2841
- sleep 1
2842
- seconds_elapsed += 1
2843
- end
2844
- end
2845
- print "\n"
2846
- end
2847
-
2848
677
  def choose_branch_to_deploy(project_name, select_master = false)
2849
678
  Dir.chdir "projects/#{project_name}"
2850
679
  output "Recupero la lista dei branch del progetto #{project_name}..."
@@ -2932,78 +761,6 @@ class Release
2932
761
  (Time.now.to_i.to_s[-4..-1].to_i + Random.rand(40000)).to_s
2933
762
  end
2934
763
 
2935
- def launch_mimo(env_hash)
2936
- resp = describe_stack_resource('batch-job-mimo', 'JobDefinition')
2937
-
2938
- @batch.submit_job({
2939
- job_name: "mimo-#{@dns_record_identifier}", # required
2940
- job_queue: "tools-production", # required
2941
- job_definition: resp.stack_resource_detail.physical_resource_id, # required
2942
- container_overrides: {
2943
- environment: [
2944
- {
2945
- name: 'ENV_HASH',
2946
- value: env_hash
2947
- },
2948
- {
2949
- name: 'APP_ENV',
2950
- value: 'qa'
2951
- },
2952
- {
2953
- name: 'CYPRESS_BASE_URL',
2954
- value: "https://hutch-#{env_hash}.qa.colaster.com"
2955
- },
2956
- {
2957
- name: 'CYPRESS_PEANO_BASE_URL',
2958
- value: "http://peano-#{env_hash}.qa.colaster.com:10039/quotation"
2959
- },
2960
- {
2961
- name: 'CYPRESS_API_BASE_URL',
2962
- value: "https://#{get_route53_hostname("starsky")}/graphql"
2963
- },
2964
- {
2965
- name: 'QA_NAME',
2966
- value: @git_branch
2967
- }
2968
- ]
2969
- }
2970
- })
2971
-
2972
- output "Mimo lanciato con successo!\n".green
2973
- end
2974
-
2975
- def get_currently_deployed_version(stack_name)
2976
- parameters = get_stack_parameters(stack_name)
2977
- currently_deployed_version = nil
2978
- parameters.each do |parameter|
2979
- if parameter.parameter_key == "ReleaseVersion"
2980
- currently_deployed_version = parameter.parameter_value
2981
- end
2982
- end
2983
- currently_deployed_version
2984
- end
2985
-
2986
- def decrypt_secrets()
2987
- docker_image = "prima/biscuit_populate_configs"
2988
- [
2989
- "docker pull #{docker_image}",
2990
- "docker run -t --rm -v $HOME/.aws:/root/.aws -w $PWD --volumes-from #{get_host_container_name} #{docker_image}"
2991
- ].each do |cmd|
2992
- execute_command cmd
2993
- end
2994
- end
2995
-
2996
- def get_host_container_name()
2997
- if @host_container_name
2998
- @host_container_name
2999
- else
3000
- hostname = `cat /etc/hostname`.gsub("\n", '')
3001
- execute_command "docker ps | grep #{hostname} | awk '{print $NF}'"
3002
- @host_container_name = `docker ps | grep #{hostname} | awk '{print $NF}'`.gsub("\n", '')
3003
- # @host_container_name = `docker ps | grep #{hostname} | sed -r "s/.+ ([^\s].+)$/\1/p"`
3004
- end
3005
- end
3006
-
3007
764
  def select_branches(project_names = nil)
3008
765
  output "Deploy feature menu"
3009
766
  if project_names.nil?
@@ -3017,14 +774,6 @@ class Release
3017
774
  end
3018
775
  end
3019
776
  end
3020
-
3021
- def get_ami_id(stack_name)
3022
- get_stack_parameters(stack_name).each do |param|
3023
- if param.parameter_key == "AMIID"
3024
- return param.parameter_value
3025
- end
3026
- end
3027
- end
3028
777
  end
3029
778
 
3030
779
  def help_content