prima-twig 0.62.17 → 0.62.100

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -22,18 +22,9 @@ class Release
22
22
  exec "twig feature #{ARGV.join ' '}"
23
23
  end
24
24
  end
25
- @batch = Aws::Batch::Client.new
26
- @s3 = Aws::S3::Client.new
27
- @s3_bucket = 'prima-artifacts'
28
- @artifact_path = '/tmp/prima-artifact.zip'
29
- @import_db_task = 'arn:aws:ecs:eu-west-1:001575623345:task-definition/ecs-task-db-restore2-TaskDefinitionDbRestore-4UBHMCZBE5WM:1'
30
- @cf_role = 'arn:aws:iam::001575623345:role/qainit-service-role-cloudformat-CloudformationRole-18KBZQIS148R9'
31
25
  @dns_record_identifier = nil
32
26
  @ecs_cluster_name = nil
33
27
  @deploy_update = false
34
- @qainit = false
35
- @qainit_host_folder = "/var/ci/#{ENV['DRONE_REPO']}/#{ENV['DRONE_BRANCH']}/#{ENV['DRONE_BUILD_NUMBER']}"
36
- @qainit_folder = "/drone/src/github.com/project/primait/qainit"
37
28
  @projects = {
38
29
  'prima' => {},
39
30
  'urania' => {},
@@ -42,7 +33,7 @@ class Release
42
33
  'hal9000' => {},
43
34
  'fidaty' => {},
44
35
  'peano' => {},
45
- 'rogoreport' => {},
36
+ # 'rogoreport' => {},
46
37
  'assange' => {},
47
38
  'borat' => {},
48
39
  'crash' => {},
@@ -74,8 +65,6 @@ class Release
74
65
  qainit_deploy_shutdown!
75
66
  elsif 'update' == args[1]
76
67
  qainit_deploy_update!
77
- elsif 'read' == args[1]
78
- qainit_read_config! args[2]
79
68
  else
80
69
  if args[1]
81
70
  select_branches(args[1..-1])
@@ -92,19 +81,8 @@ class Release
92
81
  end
93
82
  when 'deploy'
94
83
  abort('Non sei nella cartella di artemide') unless Dir.pwd.match 'artemide$'
95
- if ['terminate', 'stop', 'shutdown', 'halt', 'destroy'].include? args[1]
96
- deploy_shutdown!
97
- elsif 'update' == args[1]
98
- deploy_update!
99
- elsif 'lock' == args[1]
84
+ if 'lock' == args[1]
100
85
  deploy_lock!
101
- else
102
- if args[1]
103
- select_branches(args[1])
104
- else
105
- select_branches
106
- end
107
- deploy_feature!
108
86
  end
109
87
  when 'aggregator'
110
88
  if 'enable' == args[1]
@@ -474,8 +452,10 @@ class Release
474
452
 
475
453
  File.open('branch_names', 'w') { |file| file.write(JSON.generate(@projects)) }
476
454
 
455
+ update_drone_yml!
456
+
477
457
  `git add projects && \
478
- git add branch_names && \
458
+ git add branch_names .drone.yml && \
479
459
  git commit -m '#{branch_name}' && \
480
460
  git push -f --set-upstream origin #{branch_name} && \
481
461
  git checkout master`
@@ -519,7 +499,10 @@ class Release
519
499
 
520
500
  File.open('branch_names', 'w') { |file| file.write(JSON.generate(@projects)) }
521
501
 
522
- `git commit -am 'update'`
502
+ update_drone_yml!
503
+
504
+ `git add branch_names .drone.yml`
505
+ `git commit -m 'update'`
523
506
  `git push && git checkout master`
524
507
  end
525
508
 
@@ -597,10 +580,6 @@ class Release
597
580
  output "Cancello il record DNS utilizzato da Lighthouse"
598
581
  delete_lighthouse_dns()
599
582
  output "Finito!".green
600
-
601
- if @qainit
602
- qainit_write_output("QA #{ENV["DRONE_BRANCH"]} spento.", 'Indirizzi scritti su ')
603
- end
604
583
  end
605
584
 
606
585
  def qainit_write_output(file_message, output_message)
@@ -610,41 +589,16 @@ class Release
610
589
  output "#{output_message} #{qa_file_name}".green
611
590
  end
612
591
 
613
- def qainit_read_config!(action)
614
- projects = ''
615
-
616
- File.open('branch_names', 'r') do |file|
617
- file.each_line do |line|
618
- projects = JSON.parse(line)
619
- end
592
+ def update_drone_yml!()
593
+ drone_yml = File.read('.drone.yml')
594
+ @projects.each do |key, project|
595
+ drone_yml = drone_yml.gsub(/#{key}@.+\n/, "#{key}@#{project['revision']}\n")
620
596
  end
621
-
622
- projects.each do |key, project|
623
- @projects[key] = project
624
- end
625
-
626
- get_s3_config_files
627
- @qainit = true
628
- case action
629
- when 'shutdown'
630
- output 'Shutting down'.green
631
- qainit_drone_shutdown!
632
- else
633
- output 'Starting standard deploy'.green
634
- deploy_feature!
597
+ File.open(".drone.yml", "w") do |f|
598
+ f.write(drone_yml)
635
599
  end
636
600
  end
637
601
 
638
- def get_s3_config_files
639
- # manteniamo la struttura per lanciarlo facilmente anche da locale
640
- `mkdir -p cloudformation/stacks/task cloudformation/stacks/route53 cloudformation/stacks/asg cloudformation/stacks/elb`
641
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/task/db.yml', response_target: 'cloudformation/stacks/task/db.yml'})
642
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/route53/qa.yml', response_target: 'cloudformation/stacks/route53/qa.yml'})
643
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/asg/ecs-asg-allinone.yml', response_target: 'cloudformation/stacks/asg/ecs-asg-allinone.yml'})
644
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/ecs-cluster.yml', response_target: 'cloudformation/stacks/ecs-cluster.yml'})
645
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/elb/alb-public-qa.yml', response_target: 'cloudformation/stacks/elb/alb-public-qa.yml'})
646
- end
647
-
648
602
  def get_deploy_id
649
603
  if @deploy_id
650
604
  @deploy_id
@@ -654,1264 +608,6 @@ class Release
654
608
  end
655
609
  end
656
610
 
657
- def deploy_feature!
658
- `git pull && git submodule init && git submodule update`
659
- @ami_id = get_ami_id("ecs-fleet-allinone-staging")
660
- deploy_id = get_deploy_id
661
- stack_name_alb = 'ecs-alb-http-public-qa-' + deploy_id[0..5]
662
- stack_name_alb_ws = 'ecs-alb-ws-public-qa-' + deploy_id[0..5]
663
- unless @qainit
664
- @projects.each_key do |project_key|
665
- if @projects[project_key]['revision']
666
- git_checkout_version(project_key, @projects[project_key]['revision'])
667
- end
668
- end
669
- end
670
- @dns_record_identifier = deploy_id
671
- @git_branch = ENV['DRONE_BRANCH']
672
- hostname_pattern_priority = hostname_pattern_priority()
673
- tags = [
674
- {
675
- key: "qainit",
676
- value: @git_branch
677
- },
678
- {
679
- key: "hostname_pattern_priority",
680
- value: hostname_pattern_priority
681
- }
682
- ]
683
- @projects.each do |key, value|
684
- case key.to_s
685
- when 'crash'
686
- tags << { key: 'crash', value: @projects['crash']['name'] } if deploy_crash?
687
- when 'starsky', 'hutch'
688
- tags << { key: key.to_s, value: @projects[key.to_s]['name'] } if deploy_starsky_hutch?
689
- else
690
- tags << { key: key, value: value['name'] }
691
- end
692
- end
693
-
694
- cluster_stack_name = "ecs-cluster-qa-#{deploy_id}"
695
-
696
- if stack_exists?(cluster_stack_name)
697
- tags = get_stack_tags(cluster_stack_name)
698
- hostname_pattern_priority = tags.detect do |tag|
699
- tag.key == 'hostname_pattern_priority'
700
- end.value
701
- end
702
-
703
- create_cluster_stack(cluster_stack_name, tags) unless stack_exists?(cluster_stack_name)
704
- wait_for_stack_ready(cluster_stack_name) unless stack_ready?(cluster_stack_name)
705
-
706
- create_alb_stack(stack_name_alb, "http", deploy_id) unless stack_exists?(stack_name_alb)
707
- create_alb_stack(stack_name_alb_ws, "websocket", deploy_id) unless stack_exists?(stack_name_alb_ws)
708
-
709
- resp = describe_stack_resource(cluster_stack_name, 'ECSCluster')
710
- @ecs_cluster_name = resp.stack_resource_detail.physical_resource_id
711
-
712
- asg_stack_name = "ecs-asg-allinone-qa-#{deploy_id}"
713
- create_asg_stack(asg_stack_name, tags) unless stack_exists?(asg_stack_name)
714
-
715
- stack_name_db = "ecs-task-db-qa-#{deploy_id}"
716
- stack_body = IO.read('cloudformation/stacks/task/db.yml')
717
- parameters = [
718
- {
719
- parameter_key: "Environment",
720
- parameter_value: "qa"
721
- },
722
- {
723
- parameter_key: "ECSClusterName",
724
- parameter_value: @ecs_cluster_name
725
- }
726
- ]
727
- create_stack(stack_name_db, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_db) # creazione asincrona stack dei db (~4 min)
728
-
729
- output "check pyxis \n".yellow
730
-
731
- create_pyxis_artifact(@projects["pyxis-npm"]['revision'], deploy_id) unless @projects["pyxis-npm"].nil? # deve essere creato prima di quello di prima, per avere la versione
732
- create_prima_artifact(@projects["prima"]['revision'], @projects["prima"]['name'], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "prima/#{@projects["prima"]['revision']}.tar.gz")
733
- # l'artefatto di prima viene creato sempre (puntamenti all'ambiente compilati nel js) e richiede molto più di 4 minuti
734
- wait_for_stack_ready(stack_name_db) unless stack_ready?(stack_name_db) # dovrebbe essere istantaneo
735
- db_task = ''
736
- db_task = import_dbs(ec2_ip_address(asg_stack_name)) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # import asincrono dei dati
737
-
738
- create_crash_artifact(@projects['crash']['revision'], deploy_id) unless !deploy_crash? || artifact_exists?('prima-artifacts-encrypted', "microservices/crash/#{@projects['crash']['revision']}-qa.tar.gz")
739
- create_urania_artifact(@projects["urania"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/urania/#{@projects["urania"]['revision']}-qa.tar.gz")
740
- create_roger_artifact(@projects["roger"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/roger/#{@projects["roger"]['revision']}-qa.tar.gz")
741
- create_ermes_artifact(@projects["ermes"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/ermes/#{@projects["ermes"]['revision']}-qa.tar.gz")
742
- create_bburago_artifact(@projects["bburago"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/bburago/#{@projects["bburago"]['revision']}-qa.tar.gz")
743
- create_hal9000_artifact(@projects["hal9000"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/hal9000/#{@projects["hal9000"]['revision']}-qa.tar.gz")
744
- create_rachele_artifact(@projects["rachele"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rachele/#{@projects["rachele"]['revision']}-qa.tar.gz")
745
- create_fidaty_artifact(@projects["fidaty"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/fidaty/#{@projects["fidaty"]['revision']}-qa.tar.gz")
746
- create_peano_artifact(@projects["peano"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/peano/#{@projects["peano"]['revision']}-qa.tar.gz")
747
- create_rogoreport_artifact(@projects["rogoreport"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rogoreport/rogoreport-#{@projects["rogoreport"]['revision']}-qa.tar.gz")
748
- create_assange_artifact(@projects["assange"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/assange/#{@projects["assange"]['revision']}-qa.tar.gz")
749
- create_borat_artifact(@projects["borat"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/borat/#{@projects["borat"]['revision']}-qa.tar.gz")
750
- create_activia_artifact(@projects["activia"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/activia/#{@projects["activia"]['revision']}-qa.tar.gz")
751
- create_leftorium_artifact(@projects["leftorium"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/leftorium/#{@projects["leftorium"]['revision']}-qa.tar.gz")
752
- create_skynet_artifact(@projects["skynet"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/skynet/#{@projects["skynet"]['revision']}-qa.tar.gz")
753
- create_maia_artifact(@projects["maia"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/maia/#{@projects["maia"]['revision']}-qa.tar.gz")
754
- create_starsky_artifact(@projects["starsky"]['revision']) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/starsky/#{@projects["starsky"]['revision']}-qa.tar.gz")
755
- create_hutch_artifact(@projects["hutch"]['revision']) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/hutch/#{@projects["hutch"]['revision']}-#{@dns_record_identifier[0..7]}-qa.tar.gz")
756
-
757
-
758
- wait_for_db_import(db_task) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # dovrebbe essere istantaneo
759
-
760
- import_redis_crash(ec2_ip_address(asg_stack_name)) if deploy_crash? && !stack_exists?("ecs-task-web-qa-#{deploy_id}")
761
-
762
- wait_for_stack_ready(stack_name_alb) unless stack_ready?(stack_name_alb)
763
- wait_for_stack_ready(stack_name_alb_ws) unless stack_ready?(stack_name_alb_ws)
764
-
765
- stack_name_route53 = "ecs-route53-qa-#{deploy_id}"
766
- stack_body = IO.read('cloudformation/stacks/route53/qa.yml')
767
- parameters = [
768
- {
769
- parameter_key: "DnsRecordIdentifier",
770
- parameter_value: @dns_record_identifier
771
- },
772
- {
773
- parameter_key: "PrimaElbHostname",
774
- parameter_value: get_alb_host(stack_name_alb)
775
- },
776
- {
777
- parameter_key: "UraniaIp",
778
- parameter_value: ec2_ip_address(asg_stack_name)
779
- },
780
- {
781
- parameter_key: "BburagoIp",
782
- parameter_value: ec2_ip_address(asg_stack_name)
783
- },
784
- {
785
- parameter_key: "Hal9000Ip",
786
- parameter_value: ec2_ip_address(asg_stack_name)
787
- },
788
- {
789
- parameter_key: "FidatyIp",
790
- parameter_value: ec2_ip_address(asg_stack_name)
791
- },
792
- {
793
- parameter_key: "PeanoIp",
794
- parameter_value: ec2_ip_address(asg_stack_name)
795
- },
796
- {
797
- parameter_key: "ErmesIp",
798
- parameter_value: ec2_ip_address(asg_stack_name)
799
- },
800
- {
801
- parameter_key: "ActiviaIp",
802
- parameter_value: ec2_ip_address(asg_stack_name)
803
- },
804
- {
805
- parameter_key: "SkynetIp",
806
- parameter_value: ec2_ip_address(asg_stack_name)
807
- },
808
- {
809
- parameter_key: "RogerIp",
810
- parameter_value: ec2_ip_address(asg_stack_name)
811
- },
812
- {
813
- parameter_key: "LeftoriumIp",
814
- parameter_value: ec2_ip_address(asg_stack_name)
815
- },
816
- {
817
- parameter_key: "RacheleIp",
818
- parameter_value: ec2_ip_address(asg_stack_name)
819
- },
820
- {
821
- parameter_key: "RedisIp",
822
- parameter_value: ec2_ip_address(asg_stack_name)
823
- },
824
- {
825
- parameter_key: "AssangeElbHostname",
826
- parameter_value: get_alb_host(stack_name_alb)
827
- },
828
- {
829
- parameter_key: "BoratElbHostname",
830
- parameter_value: get_alb_host(stack_name_alb_ws)
831
- },
832
- {
833
- parameter_key: 'CrashElbHostname',
834
- parameter_value: get_alb_host(stack_name_alb_ws)
835
- },
836
- {
837
- parameter_key: 'StarskyElbHostname',
838
- parameter_value: get_alb_host(stack_name_alb)
839
- },
840
- {
841
- parameter_key: 'HutchElbHostname',
842
- parameter_value: get_alb_host(stack_name_alb)
843
- },
844
- {
845
- parameter_key: 'MaiaElbHostname',
846
- parameter_value: get_alb_host(stack_name_alb)
847
- }
848
- ]
849
-
850
- create_stack(stack_name_route53, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_route53)
851
- wait_for_stack_ready(stack_name_route53) unless stack_ready?(stack_name_route53)
852
-
853
- stack_name_skynet = "ecs-task-skynet-qa-#{deploy_id}"
854
- git_checkout_version('skynet', @projects["skynet"]['revision'])
855
- stack_body = File.read('projects/skynet/deploy/task.yml')
856
- parameters = [
857
- {
858
- parameter_key: "Environment",
859
- parameter_value: "qa"
860
- },
861
- {
862
- parameter_key: "ReleaseVersion",
863
- parameter_value: @projects["skynet"]['revision']
864
- },
865
- {
866
- parameter_key: "TaskDesiredCount",
867
- parameter_value: "1"
868
- },
869
- {
870
- parameter_key: "ECSClusterName",
871
- parameter_value: @ecs_cluster_name
872
- },
873
- {
874
- parameter_key: "HostnamePattern",
875
- parameter_value: "skynet-#{@dns_record_identifier}.qa.colaster.com"
876
- },
877
- {
878
- parameter_key: "HostnamePatternPriority",
879
- parameter_value: hostname_pattern_priority
880
- }
881
- ]
882
- if stack_exists?(stack_name_skynet)
883
- cur_version = get_currently_deployed_version(stack_name_skynet)
884
- update_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["skynet"]['revision'])
885
- else
886
- create_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role)
887
- end
888
-
889
- stack_name_urania = "ecs-task-urania-qa-#{deploy_id}"
890
- git_checkout_version('urania', @projects["urania"]['revision'])
891
- stack_body = File.read('projects/urania/deploy/task.yml')
892
- parameters = [
893
- {
894
- parameter_key: "Environment",
895
- parameter_value: "qa"
896
- },
897
- {
898
- parameter_key: "ReleaseVersion",
899
- parameter_value: @projects["urania"]['revision']
900
- },
901
- {
902
- parameter_key: "TaskDesiredCount",
903
- parameter_value: "1"
904
- },
905
- {
906
- parameter_key: "ECSClusterName",
907
- parameter_value: @ecs_cluster_name
908
- },
909
- {
910
- parameter_key: "HostnamePattern",
911
- parameter_value: "urania-#{@dns_record_identifier}.qa.colaster.com"
912
- },
913
- {
914
- parameter_key: "HostnamePatternPriority",
915
- parameter_value: hostname_pattern_priority
916
- }
917
- ]
918
- if stack_exists?(stack_name_urania)
919
- cur_version = get_currently_deployed_version(stack_name_urania)
920
- update_stack(stack_name_urania, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["urania"]['revision'])
921
- else
922
- create_stack(stack_name_urania, stack_body, parameters, tags, @cf_role)
923
- end
924
-
925
- stack_name_ermes = "ecs-task-ermes-qa-#{deploy_id}"
926
- git_checkout_version('ermes', @projects["ermes"]['revision'])
927
- stack_body = File.read('projects/ermes/deploy/task.yml')
928
- parameters = [
929
- {
930
- parameter_key: "Environment",
931
- parameter_value: "qa"
932
- },
933
- {
934
- parameter_key: "ReleaseVersion",
935
- parameter_value: "#{@projects['ermes']['revision']}"
936
- },
937
- {
938
- parameter_key: "TaskDesiredCount",
939
- parameter_value: "1"
940
- },
941
- {
942
- parameter_key: "ECSClusterName",
943
- parameter_value: @ecs_cluster_name
944
- },
945
- {
946
- parameter_key: "HostnamePattern",
947
- parameter_value: "ermes-#{@dns_record_identifier}.qa.colaster.com"
948
- },
949
- {
950
- parameter_key: "HostnamePatternPriority",
951
- parameter_value: hostname_pattern_priority
952
- },
953
- {
954
- parameter_key: "WebHost",
955
- parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
956
- },
957
- {
958
- parameter_key: "PeanoHost",
959
- parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
960
- }
961
- ]
962
- if stack_exists?(stack_name_ermes)
963
- cur_version = get_currently_deployed_version(stack_name_ermes)
964
- update_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["ermes"]['revision'])
965
- else
966
- create_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role)
967
- end
968
-
969
- stack_name_bburago = "ecs-task-bburago-qa-#{deploy_id}"
970
- git_checkout_version('bburago', @projects["bburago"]['revision'])
971
- stack_body = File.read('projects/bburago/deploy/task.yml')
972
- parameters = [
973
- {
974
- parameter_key: "Environment",
975
- parameter_value: "qa"
976
- },
977
- {
978
- parameter_key: "ReleaseVersion",
979
- parameter_value: @projects["bburago"]['revision']
980
- },
981
- {
982
- parameter_key: "ECSClusterName",
983
- parameter_value: @ecs_cluster_name
984
- },
985
- {
986
- parameter_key: "TaskDesiredCount",
987
- parameter_value: "1"
988
- },
989
- {
990
- parameter_key: "HostnamePattern",
991
- parameter_value: "bburago-#{@dns_record_identifier}.qa.colaster.com"
992
- },
993
- {
994
- parameter_key: "HostnamePatternPriority",
995
- parameter_value: hostname_pattern_priority
996
- }
997
- ]
998
- if stack_exists?(stack_name_bburago)
999
- cur_version = get_currently_deployed_version(stack_name_bburago)
1000
- update_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["bburago"]['revision'])
1001
- else
1002
- create_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role)
1003
- end
1004
-
1005
- stack_name_hal9000 = "ecs-task-hal9000-qa-#{deploy_id}"
1006
- git_checkout_version('hal9000', @projects["hal9000"]['revision'])
1007
- stack_body = File.read('projects/hal9000/deploy/task.yml')
1008
- parameters = [
1009
- {
1010
- parameter_key: "Environment",
1011
- parameter_value: "qa"
1012
- },
1013
- {
1014
- parameter_key: "ReleaseVersion",
1015
- parameter_value: @projects["hal9000"]['revision']
1016
- },
1017
- {
1018
- parameter_key: "ECSClusterName",
1019
- parameter_value: @ecs_cluster_name
1020
- },
1021
- {
1022
- parameter_key: "TaskDesiredCount",
1023
- parameter_value: "1"
1024
- },
1025
- {
1026
- parameter_key: "HostnamePattern",
1027
- parameter_value: "hal9000-#{@dns_record_identifier}.qa.colaster.com"
1028
- },
1029
- {
1030
- parameter_key: "HostnamePatternPriority",
1031
- parameter_value: hostname_pattern_priority
1032
- }
1033
- ]
1034
- if stack_exists?(stack_name_hal9000)
1035
- cur_version = get_currently_deployed_version(stack_name_hal9000)
1036
- update_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hal9000"]['revision'])
1037
- else
1038
- create_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role)
1039
- end
1040
-
1041
- stack_name_fidaty = "ecs-task-fidaty-qa-#{deploy_id}"
1042
- git_checkout_version('fidaty', @projects["fidaty"]['revision'])
1043
- stack_body = File.read('projects/fidaty/deploy/task.yml')
1044
- parameters = [
1045
- {
1046
- parameter_key: "Environment",
1047
- parameter_value: "qa"
1048
- },
1049
- {
1050
- parameter_key: "ReleaseVersion",
1051
- parameter_value: "#{@projects["fidaty"]['revision']}"
1052
- },
1053
- {
1054
- parameter_key: "ECSClusterName",
1055
- parameter_value: @ecs_cluster_name
1056
- },
1057
- {
1058
- parameter_key: "TaskDesiredCount",
1059
- parameter_value: "1"
1060
- },
1061
- {
1062
- parameter_key: "HostnamePattern",
1063
- parameter_value: "fidaty-#{@dns_record_identifier}.qa.colaster.com"
1064
- },
1065
- {
1066
- parameter_key: "HostnamePatternPriority",
1067
- parameter_value: hostname_pattern_priority
1068
- },
1069
- {
1070
- parameter_key: "PeanoHost",
1071
- parameter_value: get_route53_hostname("ecs-task-peano-qa-notneeded")
1072
- }
1073
- ]
1074
- if stack_exists?(stack_name_fidaty)
1075
- cur_version = get_currently_deployed_version(stack_name_fidaty)
1076
- update_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["fidaty"]['revision'])
1077
- else
1078
- create_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role)
1079
- end
1080
-
1081
- stack_name_peano = "ecs-task-peano-qa-#{deploy_id}"
1082
- git_checkout_version('peano', @projects["peano"]['revision'])
1083
- stack_body = File.read('projects/peano/deploy/task.yml')
1084
- parameters = [
1085
- {
1086
- parameter_key: "Environment",
1087
- parameter_value: "qa"
1088
- },
1089
- {
1090
- parameter_key: "ReleaseVersion",
1091
- parameter_value: "#{@projects['peano']['revision']}"
1092
- },
1093
- {
1094
- parameter_key: "ECSClusterName",
1095
- parameter_value: @ecs_cluster_name
1096
- },
1097
- {
1098
- parameter_key: "TaskDesiredCount",
1099
- parameter_value: "1"
1100
- },
1101
- {
1102
- parameter_key: "HostnamePattern",
1103
- parameter_value: "peano-#{@dns_record_identifier}.qa.colaster.com"
1104
- },
1105
- {
1106
- parameter_key: "HostnamePatternPriority",
1107
- parameter_value: hostname_pattern_priority
1108
- },
1109
- {
1110
- parameter_key: "WebHost",
1111
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1112
- },
1113
- {
1114
- parameter_key: "AssangeHost",
1115
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1116
- }
1117
- ]
1118
- if stack_exists?(stack_name_peano)
1119
- cur_version = get_currently_deployed_version(stack_name_peano)
1120
- update_stack(stack_name_peano, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["peano"]['revision'])
1121
- else
1122
- create_stack(stack_name_peano, stack_body, parameters, tags, @cf_role)
1123
- end
1124
-
1125
- stack_name_rogoreport = "ecs-task-rogoreport-qa-#{deploy_id}"
1126
- git_checkout_version('rogoreport', @projects["rogoreport"]['revision'])
1127
- stack_body = IO.read('projects/rogoreport/deploy/task.yml')
1128
- parameters = [
1129
- {
1130
- parameter_key: "Environment",
1131
- parameter_value: "qa"
1132
- },
1133
- {
1134
- parameter_key: "ReleaseVersion",
1135
- parameter_value: "#{@projects["rogoreport"]['revision']}"
1136
- },
1137
- {
1138
- parameter_key: "ReleaseName",
1139
- parameter_value: "rogoreport"
1140
- },
1141
- {
1142
- parameter_key: "ECSClusterName",
1143
- parameter_value: @ecs_cluster_name
1144
- }
1145
- ]
1146
- if stack_exists?(stack_name_rogoreport)
1147
- cur_version = get_currently_deployed_version(stack_name_rogoreport)
1148
- update_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["rogoreport"]['revision'])
1149
- else
1150
- create_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role)
1151
- end
1152
-
1153
- stack_name_assange = "ecs-task-assange-qa-#{deploy_id}"
1154
- git_checkout_version('assange', @projects["assange"]['revision'])
1155
- stack_body = IO.read('projects/assange/deploy/task.yml')
1156
- parameters = [
1157
- {
1158
- parameter_key: "Environment",
1159
- parameter_value: "qa"
1160
- },
1161
- {
1162
- parameter_key: "ReleaseVersion",
1163
- parameter_value: "#{@projects["assange"]['revision']}"
1164
- },
1165
- {
1166
- parameter_key: "ECSClusterName",
1167
- parameter_value: @ecs_cluster_name
1168
- },
1169
- {
1170
- parameter_key: "TaskDesiredCount",
1171
- parameter_value: "1"
1172
- },
1173
- {
1174
- parameter_key: "ALBShortName",
1175
- parameter_value: "assange-qa-#{deploy_id}"[0..27]
1176
- },
1177
- {
1178
- parameter_key: "HostnamePattern",
1179
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1180
- },
1181
- {
1182
- parameter_key: "HostnamePatternPriority",
1183
- parameter_value: (hostname_pattern_priority.to_i + 20).to_s
1184
- },
1185
- {
1186
- parameter_key: "EnvHash",
1187
- parameter_value: deploy_id
1188
- },
1189
- {
1190
- parameter_key: "WebHost",
1191
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1192
- },
1193
- {
1194
- parameter_key: "AssangeHost",
1195
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1196
- }
1197
- ]
1198
- if stack_exists?(stack_name_assange)
1199
- cur_version = get_currently_deployed_version(stack_name_assange)
1200
- update_stack(stack_name_assange, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["assange"]['revision'])
1201
- else
1202
- create_stack(stack_name_assange, stack_body, parameters, tags, @cf_role)
1203
- end
1204
-
1205
- stack_name_leftorium = "ecs-task-leftorium-qa-#{deploy_id}"
1206
- git_checkout_version('leftorium', @projects["leftorium"]['revision'])
1207
- stack_body = File.read('projects/leftorium/deploy/task.yml')
1208
- parameters = [
1209
- {
1210
- parameter_key: "Environment",
1211
- parameter_value: "qa"
1212
- },
1213
- {
1214
- parameter_key: "ReleaseVersion",
1215
- parameter_value: "#{@projects["leftorium"]['revision']}"
1216
- },
1217
- {
1218
- parameter_key: "ECSClusterName",
1219
- parameter_value: @ecs_cluster_name
1220
- },
1221
- {
1222
- parameter_key: "TaskDesiredCount",
1223
- parameter_value: "1"
1224
- },
1225
- {
1226
- parameter_key: "HostnamePattern",
1227
- parameter_value: "leftorium-#{@dns_record_identifier}.qa.colaster.com"
1228
- },
1229
- {
1230
- parameter_key: "HostnamePatternPriority",
1231
- parameter_value: hostname_pattern_priority
1232
- }
1233
- ]
1234
- if stack_exists?(stack_name_leftorium)
1235
- cur_version = get_currently_deployed_version(stack_name_leftorium)
1236
- update_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["leftorium"]['revision'])
1237
- else
1238
- create_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role)
1239
- end
1240
-
1241
- stack_name_rachele = "ecs-task-rachele-qa-#{deploy_id}"
1242
- git_checkout_version('rachele', @projects["rachele"]['revision'])
1243
- stack_body = File.read('projects/rachele/deploy/task.yml')
1244
- parameters = [
1245
- {
1246
- parameter_key: "Environment",
1247
- parameter_value: "qa"
1248
- },
1249
- {
1250
- parameter_key: "ReleaseVersion",
1251
- parameter_value: "#{@projects["rachele"]['revision']}"
1252
- },
1253
- {
1254
- parameter_key: "ECSClusterName",
1255
- parameter_value: @ecs_cluster_name
1256
- },
1257
- {
1258
- parameter_key: "TaskDesiredCount",
1259
- parameter_value: "1"
1260
- },
1261
- {
1262
- parameter_key: "WebHost",
1263
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1264
- },
1265
- {
1266
- parameter_key: "HostnamePattern",
1267
- parameter_value: "rachele-#{@dns_record_identifier}.qa.colaster.com"
1268
- },
1269
- {
1270
- parameter_key: "HostnamePatternPriority",
1271
- parameter_value: hostname_pattern_priority
1272
- }
1273
- ]
1274
- if stack_exists?(stack_name_rachele)
1275
- cur_version = get_currently_deployed_version(stack_name_rachele)
1276
- unless cur_version.include?(@projects["rachele"]['revision'])
1277
- delete_stack(stack_name_rachele)
1278
- wait_for_stack_removal(stack_name_rachele)
1279
- create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
1280
- end
1281
- else
1282
- create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
1283
- end
1284
-
1285
- stack_name_borat = "ecs-task-borat-qa-#{deploy_id}"
1286
- git_checkout_version('borat', @projects["borat"]['revision'])
1287
- stack_body = IO.read('projects/borat/deploy/task.yml')
1288
- parameters = [
1289
- {
1290
- parameter_key: "Environment",
1291
- parameter_value: "qa"
1292
- },
1293
- {
1294
- parameter_key: "ReleaseVersion",
1295
- parameter_value: "#{@projects["borat"]['revision']}"
1296
- },
1297
- {
1298
- parameter_key: "ECSClusterName",
1299
- parameter_value: @ecs_cluster_name
1300
- },
1301
- {
1302
- parameter_key: "TaskDesiredCount",
1303
- parameter_value: "1"
1304
- },
1305
- {
1306
- parameter_key: "ALBShortName",
1307
- parameter_value: "borat-qa-#{deploy_id}"[0..27]
1308
- },
1309
- {
1310
- parameter_key: "HostnamePattern",
1311
- parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1312
- },
1313
- {
1314
- parameter_key: "HostnamePatternPriority",
1315
- parameter_value: (hostname_pattern_priority.to_i + 30).to_s
1316
- },
1317
- {
1318
- parameter_key: "EnvHash",
1319
- parameter_value: deploy_id
1320
- },
1321
- {
1322
- parameter_key: "WsEndpoint",
1323
- parameter_value: "wss://backoffice-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
1324
- },
1325
- {
1326
- parameter_key: "GraphqlEndpoint",
1327
- parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/graphql"
1328
- },
1329
- {
1330
- parameter_key: "GraphqlInsuranceEndpoint",
1331
- parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/graphql/insurance"
1332
- },
1333
- {
1334
- parameter_key: "AuthEndpoint",
1335
- parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/auth"
1336
- },
1337
- {
1338
- parameter_key: "FrontendEndpoint",
1339
- parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
1340
- }
1341
- ]
1342
- if stack_exists?(stack_name_borat)
1343
- cur_version = get_currently_deployed_version(stack_name_borat)
1344
- update_stack(stack_name_borat, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["borat"]['revision'])
1345
- else
1346
- create_stack(stack_name_borat, stack_body, parameters, tags, @cf_role)
1347
- end
1348
-
1349
- if deploy_crash?
1350
- git_checkout_version('crash', @projects['crash']['revision'])
1351
- stack_name_crash = "ecs-task-crash-qa-#{deploy_id}"
1352
- stack_body = IO.read('projects/crash/deploy/task.yml')
1353
- parameters = [
1354
- {
1355
- parameter_key: 'Environment',
1356
- parameter_value: 'qa'
1357
- },
1358
- {
1359
- parameter_key: 'ReleaseVersion',
1360
- parameter_value: "#{@projects['crash']['revision']}"
1361
- },
1362
- {
1363
- parameter_key: 'TaskDesiredCount',
1364
- parameter_value: '1'
1365
- },
1366
- {
1367
- parameter_key: 'ECSClusterName',
1368
- parameter_value: @ecs_cluster_name
1369
- },
1370
- {
1371
- parameter_key: 'ALBShortName',
1372
- parameter_value: "crash-qa-#{deploy_id}"[0..27]
1373
- },
1374
- {
1375
- parameter_key: 'HostnamePattern',
1376
- parameter_value: "crash-#{@dns_record_identifier}.qa.colaster.com"
1377
- },
1378
- {
1379
- parameter_key: 'HostnamePatternPriority',
1380
- parameter_value: (hostname_pattern_priority.to_i + 10).to_s
1381
- },
1382
- {
1383
- parameter_key: "EnvHash",
1384
- parameter_value: deploy_id
1385
- },
1386
- {
1387
- parameter_key: "WsEndpoint",
1388
- parameter_value: "wss://crash-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
1389
- },
1390
- {
1391
- parameter_key: "GraphqlEndpoint",
1392
- parameter_value: "https://crash-#{@dns_record_identifier}.qa.colaster.com/graphql"
1393
- },
1394
- {
1395
- parameter_key: "AuthDomain",
1396
- parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
1397
- },
1398
- ]
1399
- if stack_exists?(stack_name_crash)
1400
- cur_version = get_currently_deployed_version(stack_name_crash)
1401
- update_stack(stack_name_crash, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["crash"]['revision'])
1402
- else
1403
- create_stack(stack_name_crash, stack_body, parameters, tags, @cf_role)
1404
- end
1405
- end
1406
-
1407
- if deploy_starsky_hutch?
1408
- stack_name_starsky = "ecs-task-starsky-qa-#{deploy_id}"
1409
- git_checkout_version('starsky', @projects["starsky"]['revision'])
1410
- stack_body = IO.read('projects/starsky/deploy/task.yml')
1411
- parameters = [
1412
- {
1413
- parameter_key: "Environment",
1414
- parameter_value: "qa"
1415
- },
1416
- {
1417
- parameter_key: "ReleaseVersion",
1418
- parameter_value: "#{@projects["starsky"]['revision']}"
1419
- },
1420
- {
1421
- parameter_key: "TaskDesiredCount",
1422
- parameter_value: "1"
1423
- },
1424
- {
1425
- parameter_key: "ECSClusterName",
1426
- parameter_value: @ecs_cluster_name
1427
- },
1428
- {
1429
- parameter_key: "ALBShortName",
1430
- parameter_value: "starsky-qa-#{deploy_id}"[0..27]
1431
- },
1432
- {
1433
- parameter_key: "EnvHash",
1434
- parameter_value: deploy_id
1435
- },
1436
- {
1437
- parameter_key: "HostnamePattern",
1438
- parameter_value: "starsky-#{@dns_record_identifier}.qa.colaster.com"
1439
- },
1440
- {
1441
- parameter_key: "HostnamePatternPriority",
1442
- parameter_value: (hostname_pattern_priority.to_i + 74).to_s
1443
- }
1444
- ]
1445
- if stack_exists?(stack_name_starsky)
1446
- cur_version = get_currently_deployed_version(stack_name_starsky)
1447
- unless cur_version.include?(@projects["starsky"]['revision'])
1448
- delete_stack(stack_name_starsky)
1449
- wait_for_stack_removal(stack_name_starsky)
1450
- create_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role)
1451
- end
1452
- else
1453
- create_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role)
1454
- end
1455
- end
1456
-
1457
- stack_name_activia = "ecs-task-activia-qa-#{deploy_id}"
1458
- git_checkout_version('activia', @projects["activia"]['revision'])
1459
- stack_body = File.read('projects/activia/deploy/task.yml')
1460
- parameters = [
1461
- {
1462
- parameter_key: "Environment",
1463
- parameter_value: "qa"
1464
- },
1465
- {
1466
- parameter_key: "ReleaseVersion",
1467
- parameter_value: "#{@projects["activia"]['revision']}"
1468
- },
1469
- {
1470
- parameter_key: "ECSClusterName",
1471
- parameter_value: @ecs_cluster_name
1472
- },
1473
- {
1474
- parameter_key: "TaskDesiredCount",
1475
- parameter_value: "1"
1476
- },
1477
- {
1478
- parameter_key: "HostnamePattern",
1479
- parameter_value: "activia-#{@dns_record_identifier}.qa.colaster.com"
1480
- },
1481
- {
1482
- parameter_key: "HostnamePatternPriority",
1483
- parameter_value: hostname_pattern_priority
1484
- },
1485
- {
1486
- parameter_key: "WebHost",
1487
- parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
1488
- },
1489
- {
1490
- parameter_key: "PeanoHost",
1491
- parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
1492
- }
1493
- ]
1494
- if stack_exists?(stack_name_activia)
1495
- cur_version = get_currently_deployed_version(stack_name_activia)
1496
- update_stack(stack_name_activia, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["activia"]['revision'])
1497
- else
1498
- create_stack(stack_name_activia, stack_body, parameters, tags, @cf_role)
1499
- end
1500
-
1501
- # Waiting for prima healtcheck dependencies
1502
- wait_for_stack_ready(stack_name_skynet) unless stack_ready?(stack_name_skynet)
1503
- wait_for_stack_ready(stack_name_urania) unless stack_ready?(stack_name_urania)
1504
- wait_for_stack_ready(stack_name_bburago) unless stack_ready?(stack_name_bburago)
1505
- wait_for_stack_ready(stack_name_hal9000) unless stack_ready?(stack_name_hal9000)
1506
- wait_for_stack_ready(stack_name_assange) unless stack_ready?(stack_name_assange)
1507
- wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1508
- wait_for_stack_ready(stack_name_leftorium) unless stack_ready?(stack_name_leftorium)
1509
- wait_for_stack_ready(stack_name_rachele) unless stack_ready?(stack_name_rachele)
1510
-
1511
- stack_name_web = "ecs-task-web-qa-#{deploy_id}"
1512
- git_checkout_version('prima', @projects["prima"]['revision'])
1513
- stack_body = IO.read('projects/prima/app/cloudformation/tasks/web.yml')
1514
- parameters = [
1515
- {
1516
- parameter_key: "Environment",
1517
- parameter_value: "qa"
1518
- },
1519
- {
1520
- parameter_key: "ReleaseVersion",
1521
- parameter_value: "#{@projects["prima"]['revision']}"
1522
- },
1523
- {
1524
- parameter_key: "TaskDesiredCount",
1525
- parameter_value: "1"
1526
- },
1527
- {
1528
- parameter_key: "ECSClusterName",
1529
- parameter_value: @ecs_cluster_name
1530
- },
1531
- {
1532
- parameter_key: "ALBShortName",
1533
- parameter_value: "web-qa-#{deploy_id}"[0..27]
1534
- },
1535
- {
1536
- parameter_key: "WebQaBaseHostname",
1537
- parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
1538
- },
1539
- {
1540
- parameter_key: "HostnamePatternPriority",
1541
- parameter_value: hostname_pattern_priority
1542
- },
1543
- {
1544
- parameter_key: "HostnamePatternAggregatorPriority",
1545
- parameter_value: (hostname_pattern_priority.to_i + 1).to_s
1546
- },
1547
- {
1548
- parameter_key: "EnvHash",
1549
- parameter_value: deploy_id
1550
- },
1551
- {
1552
- parameter_key: "AssangeHostname",
1553
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1554
- },
1555
- {
1556
- parameter_key: "BackofficeHostname",
1557
- parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1558
- },
1559
- {
1560
- parameter_key: "WebHostname",
1561
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1562
- },
1563
- {
1564
- parameter_key: "FePrimaDomain",
1565
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1566
- },
1567
- {
1568
- parameter_key: "HostnamePattern",
1569
- parameter_value: "www-#{@dns_record_identifier}.*"
1570
- }
1571
- ]
1572
- if stack_exists?(stack_name_web)
1573
- cur_version = get_currently_deployed_version(stack_name_web)
1574
- update_stack(stack_name_web, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"]['revision'])
1575
- else
1576
- create_stack(stack_name_web, stack_body, parameters, tags, @cf_role)
1577
- end
1578
-
1579
- stack_name_consumer = "ecs-task-consumer-qa-#{deploy_id}"
1580
- git_checkout_version('prima', @projects["prima"]['revision'])
1581
- stack_body = IO.read('projects/prima/app/cloudformation/tasks/consumer.yml')
1582
- parameters = [
1583
- {
1584
- parameter_key: "Environment",
1585
- parameter_value: "qa"
1586
- },
1587
- {
1588
- parameter_key: "ReleaseVersion",
1589
- parameter_value: "#{@projects["prima"]['revision']}"
1590
- },
1591
- {
1592
- parameter_key: "ECSClusterName",
1593
- parameter_value: @ecs_cluster_name
1594
- },
1595
- {
1596
- parameter_key: "NginxHttpHost",
1597
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1598
- },
1599
- {
1600
- parameter_key: "AssangeHostname",
1601
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1602
- },
1603
- {
1604
- parameter_key: "BackofficeHostname",
1605
- parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1606
- },
1607
- {
1608
- parameter_key: "WebHostname",
1609
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1610
- },
1611
- {
1612
- parameter_key: "FePrimaDomain",
1613
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1614
- },
1615
- {
1616
- parameter_key: "HostnamePattern",
1617
- parameter_value: "consumer-#{@dns_record_identifier}.qa.colaster.com"
1618
- },
1619
- {
1620
- parameter_key: "WebQaBaseHostname",
1621
- parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
1622
- }
1623
- ]
1624
- if stack_exists?(stack_name_consumer)
1625
- cur_version = get_currently_deployed_version(stack_name_consumer)
1626
- update_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"]['revision'])
1627
- else
1628
- create_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role)
1629
- end
1630
-
1631
- stack_name_roger = "ecs-task-roger-qa-#{deploy_id}"
1632
- git_checkout_version('roger', @projects["roger"]['revision'])
1633
- stack_body = File.read('projects/roger/deploy/task.yml')
1634
- parameters = [
1635
- {
1636
- parameter_key: "Environment",
1637
- parameter_value: "qa"
1638
- },
1639
- {
1640
- parameter_key: "ReleaseVersion",
1641
- parameter_value: @projects["roger"]['revision']
1642
- },
1643
- {
1644
- parameter_key: "TaskDesiredCount",
1645
- parameter_value: "1"
1646
- },
1647
- {
1648
- parameter_key: "ECSClusterName",
1649
- parameter_value: @ecs_cluster_name
1650
- },
1651
- {
1652
- parameter_key: "HostnamePattern",
1653
- parameter_value: "roger-#{@dns_record_identifier}.qa.colaster.com"
1654
- },
1655
- {
1656
- parameter_key: "HostnamePatternPriority",
1657
- parameter_value: hostname_pattern_priority
1658
- }
1659
- ]
1660
- if stack_exists?(stack_name_roger)
1661
- cur_version = get_currently_deployed_version(stack_name_roger)
1662
- update_stack(stack_name_roger, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["roger"]['revision'])
1663
- else
1664
- create_stack(stack_name_roger, stack_body, parameters, tags, @cf_role)
1665
- end
1666
-
1667
-
1668
- if deploy_starsky_hutch?
1669
- wait_for_stack_ready(stack_name_starsky) unless stack_ready?(stack_name_starsky) unless !deploy_starsky_hutch?
1670
-
1671
- stack_name_hutch = "ecs-task-hutch-qa-#{deploy_id}"
1672
- git_checkout_version('hutch', @projects["hutch"]['revision'])
1673
- stack_body = File.read('projects/hutch/deploy/task.yml')
1674
- parameters = [
1675
- {
1676
- parameter_key: "Environment",
1677
- parameter_value: "qa"
1678
- },
1679
- {
1680
- parameter_key: "ReleaseVersion",
1681
- parameter_value: "#{@projects["hutch"]['revision']}-#{@dns_record_identifier[0..7]}"
1682
- },
1683
- {
1684
- parameter_key: "ALBShortName",
1685
- parameter_value: "hutch-qa-#{deploy_id}"[0..27]
1686
- },
1687
- {
1688
- parameter_key: "ECSClusterName",
1689
- parameter_value: @ecs_cluster_name
1690
- },
1691
- {
1692
- parameter_key: "EnvHash",
1693
- parameter_value: deploy_id
1694
- },
1695
- {
1696
- parameter_key: "HostnamePattern",
1697
- parameter_value: "hutch-#{@dns_record_identifier}.qa.colaster.com"
1698
- },
1699
- {
1700
- parameter_key: "HostnamePatternPriority",
1701
- parameter_value: (hostname_pattern_priority.to_i + 254).to_s
1702
- },
1703
- {
1704
- parameter_key: "ApiUrl",
1705
- parameter_value: "https://#{get_route53_hostname('maia-intermediari')}"
1706
- }
1707
- ]
1708
- if stack_exists?(stack_name_hutch)
1709
- cur_version = get_currently_deployed_version(stack_name_hutch)
1710
- update_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hutch"]['revision'])
1711
- else
1712
- create_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role)
1713
- end
1714
- end
1715
-
1716
- stack_name_maia = "ecs-task-maia-qa-#{deploy_id}"
1717
- git_checkout_version('maia', @projects["maia"]['revision'])
1718
- stack_body = File.read('projects/maia/deploy/task.yml')
1719
- parameters = [
1720
- {
1721
- parameter_key: "Environment",
1722
- parameter_value: "qa"
1723
- },
1724
- {
1725
- parameter_key: "ReleaseVersion",
1726
- parameter_value: "#{@projects["maia"]['revision']}"
1727
- },
1728
- {
1729
- parameter_key: "ALBShortName",
1730
- parameter_value: "maia-qa-#{deploy_id}"[0..15]
1731
- },
1732
- {
1733
- parameter_key: "ECSClusterName",
1734
- parameter_value: @ecs_cluster_name
1735
- },
1736
- {
1737
- parameter_key: "EnvHash",
1738
- parameter_value: deploy_id
1739
- },
1740
- {
1741
- parameter_key: "HostnamePatternPublic",
1742
- parameter_value: "api*-#{@dns_record_identifier}.qa.colaster.com"
1743
- },
1744
- {
1745
- parameter_key: "HostnamePatternPriority",
1746
- parameter_value: (hostname_pattern_priority.to_i + 128).to_s
1747
- },
1748
- {
1749
- parameter_key: "ProxyHostnameIntermediari",
1750
- parameter_value: "api-intermediari-#{@dns_record_identifier}.qa.colaster.com"
1751
- },
1752
- {
1753
- parameter_key: "ProxyHostnameApp",
1754
- parameter_value: "api-#{@dns_record_identifier}.qa.colaster.com"
1755
- }
1756
- ]
1757
- if stack_exists?(stack_name_maia)
1758
- cur_version = get_currently_deployed_version(stack_name_maia)
1759
- update_stack(stack_name_maia, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["maia"]['revision'])
1760
- else
1761
- create_stack(stack_name_maia, stack_body, parameters, tags, @cf_role)
1762
- end
1763
-
1764
- wait_for_stack_ready(stack_name_web) unless stack_ready?(stack_name_web)
1765
- wait_for_stack_ready(stack_name_consumer) unless stack_ready?(stack_name_consumer)
1766
- wait_for_stack_ready(stack_name_ermes) unless stack_ready?(stack_name_ermes)
1767
- wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1768
- wait_for_stack_ready(stack_name_peano) unless stack_ready?(stack_name_peano)
1769
- wait_for_stack_ready(stack_name_rogoreport) unless stack_ready?(stack_name_rogoreport)
1770
- wait_for_stack_ready(stack_name_borat) unless stack_ready?(stack_name_borat)
1771
- wait_for_stack_ready(stack_name_activia) unless stack_ready?(stack_name_activia)
1772
- wait_for_stack_ready(stack_name_maia) unless stack_ready?(stack_name_maia)
1773
- wait_for_stack_ready(stack_name_crash) unless stack_ready?(stack_name_crash) unless !deploy_crash?
1774
- wait_for_stack_ready(stack_name_hutch) unless stack_ready?(stack_name_hutch) unless !deploy_starsky_hutch?
1775
-
1776
-
1777
- update_service_defaults(stack_name_web)
1778
- update_service_defaults(stack_name_consumer)
1779
- update_service_defaults(stack_name_urania)
1780
- update_service_defaults(stack_name_ermes)
1781
- update_service_defaults(stack_name_bburago)
1782
- update_service_defaults(stack_name_hal9000)
1783
- update_service_defaults(stack_name_fidaty)
1784
- update_service_defaults(stack_name_peano)
1785
- update_service_defaults(stack_name_rogoreport)
1786
- update_service_defaults(stack_name_assange)
1787
- update_service_defaults(stack_name_borat)
1788
- update_service_defaults(stack_name_activia)
1789
- update_service_defaults(stack_name_skynet)
1790
- update_service_defaults(stack_name_leftorium)
1791
- update_service_defaults(stack_name_rachele)
1792
- update_service_defaults(stack_name_maia)
1793
- update_service_defaults(stack_name_starsky) unless !deploy_starsky_hutch?
1794
- update_service_defaults(stack_name_hutch) unless !deploy_starsky_hutch?
1795
- update_service_defaults(stack_name_crash) unless !deploy_crash?
1796
-
1797
- activia_hostname = get_route53_hostname("activia")
1798
- assange_hostname = get_route53_hostname("assange")
1799
- bburago_hostname = get_route53_hostname("bburago")
1800
- borat_hostname = get_route53_hostname("borat")
1801
- ermes_hostname = get_route53_hostname("ermes")
1802
- fidaty_hostname = get_route53_hostname("fidaty")
1803
- hal9000_hostname = get_route53_hostname("hal9000")
1804
- prima_hostname = get_route53_hostname("web")
1805
- peano_hostname = get_route53_hostname("peano")
1806
- skynet_hostname = get_route53_hostname("skynet")
1807
- urania_hostname = get_route53_hostname("urania")
1808
- roger_hostname = get_route53_hostname("roger")
1809
- leftorium_hostname = get_route53_hostname("leftorium")
1810
- rachele_hostname = get_route53_hostname("rachele")
1811
- maia_app_hostname = get_route53_hostname("maia-app")
1812
- maia_intermediari_hostname = get_route53_hostname("maia-intermediari")
1813
- crash_hostname = get_route53_hostname("crash") unless !deploy_crash?
1814
- starsky_hostname = get_route53_hostname("starsky") unless !deploy_starsky_hutch?
1815
- hutch_hostname = get_route53_hostname("hutch") unless !deploy_starsky_hutch?
1816
-
1817
- launch_mimo(deploy_id) if deploy_starsky_hutch?
1818
-
1819
- projects_text = "
1820
- > Prima url: https://#{prima_hostname}
1821
- > Backoffice (Borat) url: https://#{borat_hostname}
1822
- > Urania url: http://#{urania_hostname}:81
1823
- > Bburago url: http://#{bburago_hostname}:83
1824
- > Ermes url: http://#{ermes_hostname}:10002
1825
- > Hal9000 url: http://#{hal9000_hostname}:10031
1826
- > Fidaty url: http://#{fidaty_hostname}:10021
1827
- > Peano url: http://#{peano_hostname}:10039
1828
- > Assange url: https://#{assange_hostname}
1829
- > Activia url: http://#{activia_hostname}:10041
1830
- > Skynet url: http://#{skynet_hostname}:8050
1831
- > Roger url: http://#{roger_hostname}:10051
1832
- > Leftorium url: http://#{leftorium_hostname}:10061
1833
- > Rachele url: http://#{rachele_hostname}:10040
1834
- > Maia App url: https://#{maia_app_hostname}
1835
- > Maia Intermediari url: https://#{maia_intermediari_hostname}"
1836
- projects_text.concat "
1837
- > Crash url: https://#{crash_hostname}" if deploy_crash?
1838
- projects_text.concat "
1839
- > Starsky url: https://#{starsky_hostname}
1840
- > Hutch url: https://#{hutch_hostname}" if deploy_starsky_hutch?
1841
- projects_text.concat "
1842
- > RabbitMQ url: http://#{ec2_ip_address(asg_stack_name)}:15672
1843
- > Supervisor url: http://#{ec2_ip_address(asg_stack_name)}:9001
1844
- > Elasticsearch url: http://#{ec2_ip_address(asg_stack_name)}:9200
1845
- > SSH connection: ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no githubUsername@#{ec2_ip_address(asg_stack_name)}\n"
1846
- output projects_text.cyan
1847
- output "Deploy effettuato, everything is awesome!\n".green
1848
-
1849
- if @projects['prima']['name'] != 'master' then
1850
- output "Lancio il batch job per la visual regression..."
1851
- launch_bocelli_test(prima_hostname)
1852
- output "Visual regression lanciata con successo!"
1853
-
1854
- output "Lancio i test con Lighthouse..."
1855
- launch_lighthouse_test(prima_hostname, "mobile")
1856
- launch_lighthouse_test(prima_hostname, "desktop")
1857
- output "Test con Lighthouse lanciati con successo..."
1858
- end
1859
-
1860
- qainit_write_output(projects_text, 'Indirizzi scritti su ')
1861
- end
1862
-
1863
- def get_route53_hostname(project)
1864
- case
1865
- when project.include?('web')
1866
- host = "www-#{@dns_record_identifier}.qa.colaster.com"
1867
- when project.include?('urania')
1868
- host = "urania-#{@dns_record_identifier}.qa.colaster.com"
1869
- when project.include?('bburago')
1870
- host = "bburago-#{@dns_record_identifier}.qa.colaster.com"
1871
- when project.include?('hal9000')
1872
- host = "hal9000-#{@dns_record_identifier}.qa.colaster.com"
1873
- when project.include?('fidaty')
1874
- host = "fidaty-#{@dns_record_identifier}.qa.colaster.com"
1875
- when project.include?('peano')
1876
- host = "peano-#{@dns_record_identifier}.qa.colaster.com"
1877
- when project.include?('assange')
1878
- host = "assange-#{@dns_record_identifier}.qa.colaster.com"
1879
- when project.include?('borat')
1880
- host = "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1881
- when project.include?('crash')
1882
- host = "crash-#{@dns_record_identifier}.qa.colaster.com"
1883
- when project.include?('ermes')
1884
- host = "ermes-#{@dns_record_identifier}.qa.colaster.com"
1885
- when project.include?('activia')
1886
- host = "activia-#{@dns_record_identifier}.qa.colaster.com"
1887
- when project.include?('skynet')
1888
- host = "skynet-#{@dns_record_identifier}.qa.colaster.com"
1889
- when project.include?('roger')
1890
- host = "roger-#{@dns_record_identifier}.qa.colaster.com"
1891
- when project.include?('leftorium')
1892
- host = "leftorium-#{@dns_record_identifier}.qa.colaster.com"
1893
- when project.include?('rachele')
1894
- host = "rachele-#{@dns_record_identifier}.qa.colaster.com"
1895
- when project.include?('starsky')
1896
- host = "starsky-#{@dns_record_identifier}.qa.colaster.com"
1897
- when project.include?('hutch')
1898
- host = "hutch-#{@dns_record_identifier}.qa.colaster.com"
1899
- when project.include?('maia-app')
1900
- host = "api-#{@dns_record_identifier}.qa.colaster.com"
1901
- when project.include?('maia-intermediari')
1902
- host = "api-intermediari-#{@dns_record_identifier}.qa.colaster.com"
1903
- end
1904
- host
1905
- end
1906
-
1907
- def ec2_ip_address(asg_stack_name)
1908
- resp = describe_stack_resource(asg_stack_name, 'ECSAutoScalingGroup')
1909
- resp = describe_auto_scaling_groups([resp.stack_resource_detail.physical_resource_id], 1)
1910
- instance_id = resp.auto_scaling_groups[0].instances[0].instance_id
1911
- resp = describe_instances([instance_id])
1912
- resp.reservations[0].instances[0].private_ip_address
1913
- end
1914
-
1915
611
  def get_alb_host(stack_name)
1916
612
  case
1917
613
  when stack_name.include?('web')
@@ -1960,695 +656,6 @@ class Release
1960
656
  resp.load_balancers[0].dns_name
1961
657
  end
1962
658
 
1963
- def update_service_defaults(stack_name)
1964
- case
1965
- when stack_name.include?('web')
1966
- logical_resource_id = 'ECSServiceWebQA'
1967
- when stack_name.include?('consumer')
1968
- logical_resource_id = 'ECSServiceConsumerApiQa'
1969
- when stack_name.include?('urania')
1970
- logical_resource_id = 'ECSServiceUraniaQA'
1971
- when stack_name.include?('backoffice')
1972
- logical_resource_id = 'ECSServiceBackoffice'
1973
- when stack_name.include?('ermes')
1974
- logical_resource_id = 'ECSServiceErmesQA'
1975
- when stack_name.include?('bburago')
1976
- logical_resource_id = 'ECSServiceBburagoQA'
1977
- when stack_name.include?('hal9000')
1978
- logical_resource_id = 'ECSServiceHal9000QA'
1979
- when stack_name.include?('fidaty')
1980
- logical_resource_id = 'ECSServiceFidatyQA'
1981
- when stack_name.include?('skynet')
1982
- logical_resource_id = 'ECSServiceSkynetQA'
1983
- when stack_name.include?('roger')
1984
- logical_resource_id = 'ECSServiceRogerQA'
1985
- when stack_name.include?('activia')
1986
- logical_resource_id = 'ECSServiceActiviaQA'
1987
- when stack_name.include?('peano')
1988
- logical_resource_id = 'ECSServicePeanoQA'
1989
- when stack_name.include?('rogoreport')
1990
- logical_resource_id = 'ECSServiceRogoreport'
1991
- when stack_name.include?('assange')
1992
- logical_resource_id = 'ECSServiceAssangeQA'
1993
- when stack_name.include?('borat')
1994
- logical_resource_id = 'ECSServiceBorat'
1995
- when stack_name.include?('leftorium')
1996
- logical_resource_id = 'ECSServiceLeftoriumQA'
1997
- when stack_name.include?('rachele')
1998
- logical_resource_id = 'ECSServiceRacheleQA'
1999
- when stack_name.include?('crash')
2000
- logical_resource_id = 'ECSServiceCrashQA'
2001
- when stack_name.include?('starsky')
2002
- logical_resource_id = 'ECSServiceStarskyQA'
2003
- when stack_name.include?('hutch')
2004
- logical_resource_id = 'ECSServiceHutch'
2005
- when stack_name.include?('maia')
2006
- logical_resource_id = 'ECSServiceMaia'
2007
- else
2008
- raise "Service name non gestito per lo stack #{stack_name}"
2009
- end
2010
- resp = describe_stack_resource(stack_name, logical_resource_id)
2011
- update_ecs_service(@ecs_cluster_name, resp.stack_resource_detail.physical_resource_id, {minimum_healthy_percent: 0, maximum_percent: 100})
2012
- end
2013
-
2014
- def launch_lighthouse_test(url, device)
2015
- @cloudflare.post("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {type: 'CNAME', name: "www-#{@dns_record_identifier}", content: url, proxied: true, ttl: 1}) unless get_lighthouse_dns()
2016
-
2017
- @batch.submit_job({
2018
- job_name: "lighthouse-#{device}-#{@dns_record_identifier}",
2019
- job_queue: "tools-production",
2020
- job_definition: describe_stack_resource('batch-job-lighthouse-production', 'JobDefinition').stack_resource_detail.physical_resource_id,
2021
- container_overrides: {
2022
- environment: [
2023
- {
2024
- name: "URL_TO_TEST",
2025
- value: "https://www-#{@dns_record_identifier}.prima.it/?superprima"
2026
- },
2027
- {
2028
- name: "DEVICE",
2029
- value: device
2030
- },
2031
- {
2032
- name: "BRANCH_NAME",
2033
- value: @projects['prima']['name']
2034
- },
2035
- {
2036
- name: "COMMITTER_EMAIL",
2037
- value: @projects['prima']['committer']
2038
- }
2039
- ]
2040
- }
2041
- })
2042
- end
2043
-
2044
- def get_lighthouse_dns()
2045
- dns_records = @cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', name: "www-#{@dns_record_identifier}.prima.it"})
2046
- if dns_records.body[:result_info][:count] > 0
2047
- return dns_records.body[:result][0][:id]
2048
- end
2049
- false
2050
- end
2051
-
2052
- def delete_lighthouse_dns()
2053
- dns_id = get_lighthouse_dns()
2054
- @cloudflare.delete("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns_id}") if dns_id
2055
- end
2056
-
2057
- def launch_bocelli_test(url)
2058
- @batch.submit_job({
2059
- job_name: "bocelli-test-#{@dns_record_identifier}",
2060
- job_queue: "tools-production",
2061
- job_definition: describe_stack_resource('batch-job-bocelli-production', 'JobDefinition').stack_resource_detail.physical_resource_id,
2062
- container_overrides: {
2063
- environment: [
2064
- {
2065
- name: "BATCH_COMMAND",
2066
- value: "test"
2067
- },
2068
- {
2069
- name: "QA_HOSTNAME",
2070
- value: url
2071
- },
2072
- {
2073
- name: "BRANCH_NAME",
2074
- value: @projects['prima']['name']
2075
- },
2076
- {
2077
- name: "COMMITTER_EMAIL",
2078
- value: @projects['prima']['committer']
2079
- }
2080
- ]
2081
- }
2082
- })
2083
- end
2084
-
2085
- def create_activia_artifact(revision)
2086
- output "Preparo l'artifact activia .zip\n".yellow
2087
-
2088
- git_checkout_version('activia', revision)
2089
-
2090
- Dir.chdir 'projects/activia'
2091
-
2092
- decrypt_secrets()
2093
-
2094
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2095
- exec_step 'prepare-docker-compose --directory activia && cp docker-compose-qainit.yml docker-compose.yml'
2096
-
2097
- execute_command "deploy/build_qa_artifact"
2098
-
2099
- cleanup_containers
2100
-
2101
- artifact_path = Dir.glob("_build/qa/rel/activia/releases/*/activia.tar.gz").first
2102
-
2103
- upload_artifact(artifact_path, "microservices/activia/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2104
-
2105
- Dir.chdir '../../'
2106
- end
2107
-
2108
- def create_assange_artifact(revision)
2109
- output "Preparo l'artifact assange .zip\n".yellow
2110
-
2111
- git_checkout_version('assange', revision)
2112
-
2113
- Dir.chdir 'projects/assange'
2114
-
2115
- decrypt_secrets()
2116
-
2117
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2118
- exec_step 'prepare-docker-compose --directory assange && cp docker-compose-qainit.yml docker-compose.yml'
2119
- exec_step 'deploy/build_qa_artifact'
2120
-
2121
- cleanup_containers
2122
-
2123
- artifact_path = Dir.glob("_build/qa/rel/assange/releases/*/assange.tar.gz").first
2124
- upload_artifact(artifact_path, "microservices/assange/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2125
-
2126
- Dir.chdir '../../'
2127
- end
2128
-
2129
- def create_bburago_artifact(revision)
2130
- output "Preparo l'artifact bburago .zip\n".yellow
2131
-
2132
- git_checkout_version('bburago', revision)
2133
-
2134
- Dir.chdir 'projects/bburago'
2135
-
2136
- decrypt_secrets()
2137
-
2138
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2139
- exec_step 'prepare-docker-compose --directory bburago && cp docker-compose-qainit.yml docker-compose.yml'
2140
- [
2141
- "docker-compose build web",
2142
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint deploy/create_qa_artifact web"
2143
- ].each do |cmd|
2144
- execute_command cmd
2145
- end
2146
-
2147
- cleanup_containers
2148
-
2149
- artifact_path = Dir.glob('_build/qa/rel/bburago/releases/*/bburago.tar.gz').first
2150
- upload_artifact(artifact_path, "microservices/bburago/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2151
-
2152
- Dir.chdir '../../'
2153
- end
2154
-
2155
- def create_borat_artifact(revision)
2156
- output "Preparo l'artifact borat .zip\n".yellow
2157
-
2158
- git_checkout_version('borat', revision)
2159
-
2160
- Dir.chdir 'projects/borat'
2161
-
2162
- decrypt_secrets()
2163
-
2164
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2165
- exec_step 'prepare-docker-compose --directory borat && cp docker-compose-qainit.yml docker-compose.yml'
2166
-
2167
- execute_command "deploy/build_qa_artifact"
2168
-
2169
- cleanup_containers
2170
-
2171
- artifact_path = Dir.glob("_build/qa/rel/borat/releases/*/borat.tar.gz").first
2172
- upload_artifact(artifact_path, "microservices/borat/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2173
-
2174
- Dir.chdir '../../'
2175
- end
2176
-
2177
- def create_crash_artifact(revision, deploy_id)
2178
- output "Preparo l'artifact crash .zip\n".yellow
2179
-
2180
- git_checkout_version('crash', revision)
2181
-
2182
- Dir.chdir 'projects/crash'
2183
-
2184
- crash_qa_host = get_route53_hostname('ecs-task-crash-qa-notneeded')
2185
-
2186
- decrypt_secrets()
2187
-
2188
- `mv docker-compose-ci.yml docker-compose.yml`
2189
- exec_step 'prepare-docker-compose --directory crash && cp docker-compose-qainit.yml docker-compose.yml'
2190
-
2191
- execute_command "deploy/build_qa_artifact #{deploy_id}"
2192
-
2193
- cleanup_containers
2194
-
2195
- artifact_path = Dir.glob('_build/qa/rel/crash/releases/*/crash.tar.gz').first
2196
- upload_artifact(artifact_path, "microservices/crash/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2197
-
2198
- Dir.chdir '../../'
2199
- end
2200
-
2201
- def create_ermes_artifact(revision)
2202
- output "Preparo l'artifact ermes .zip\n".yellow
2203
-
2204
- git_checkout_version('ermes', revision)
2205
-
2206
- Dir.chdir 'projects/ermes'
2207
-
2208
- decrypt_secrets()
2209
-
2210
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2211
- exec_step 'prepare-docker-compose --directory ermes && cp docker-compose-qainit.yml docker-compose.yml'
2212
-
2213
- if File.exists? 'deploy/build_qa_artifact'
2214
- execute_command "deploy/build_qa_artifact"
2215
- else
2216
- [
2217
- "if echo `docker network ls` | grep crash_default; \
2218
- then echo 'crash_default network already existing'; \
2219
- else docker network create crash_default; fi",
2220
- 'docker-compose build web',"docker-compose run --rm -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2221
- '-c' 'mix local.hex --force && mix hex.info && \
2222
- mix deps.get && mix compile && mix deps.compile && \
2223
- mix phx.digest && \
2224
- MIX_ENV=dev mix compile.sms && \
2225
- MIX_ENV=dev mix compile.html && \
2226
- MIX_ENV=dev mix compile.heml && \
2227
- MIX_ENV=dev mix compile.app_notification && \
2228
- rm -rf _build/qa/rel/ && \
2229
- mix release --env=qa'"
2230
- ].each do |cmd|
2231
- execute_command cmd
2232
- end
2233
- end
2234
-
2235
- cleanup_containers
2236
-
2237
- artifact_path = Dir.glob("_build/qa/rel/ermes/releases/*/ermes.tar.gz").first
2238
- upload_artifact(artifact_path, "microservices/ermes/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2239
-
2240
- Dir.chdir '../../'
2241
- end
2242
-
2243
- def create_fidaty_artifact(revision)
2244
- output "Preparo l'artifact fidaty .zip\n".yellow
2245
-
2246
- git_checkout_version('fidaty', revision)
2247
-
2248
- Dir.chdir 'projects/fidaty'
2249
-
2250
- decrypt_secrets()
2251
-
2252
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2253
- exec_step 'prepare-docker-compose --directory fidaty && cp docker-compose-qainit.yml docker-compose.yml'
2254
- [
2255
- "docker-compose build web",
2256
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2257
- '-c' 'mix local.hex --force && mix hex.info && \
2258
- mix deps.get && mix compile && mix deps.compile && \
2259
- mix phx.digest && \
2260
- rm -rf _build/qa/rel/ && \
2261
- mix release --env=qa'"
2262
- ].each do |cmd|
2263
- execute_command cmd
2264
- end
2265
-
2266
- cleanup_containers
2267
-
2268
- artifact_path = Dir.glob("_build/qa/rel/fidaty/releases/*/fidaty.tar.gz").first
2269
- upload_artifact(artifact_path, "microservices/fidaty/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2270
-
2271
- Dir.chdir '../../'
2272
- end
2273
-
2274
- def create_hal9000_artifact(revision)
2275
- output "Preparo l'artifact hal9000 .zip\n".yellow
2276
-
2277
- git_checkout_version('hal9000', revision)
2278
-
2279
- Dir.chdir 'projects/hal9000'
2280
-
2281
- decrypt_secrets()
2282
-
2283
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2284
- exec_step 'prepare-docker-compose --directory hal9000 && cp docker-compose-qainit.yml docker-compose.yml'
2285
- [
2286
- "docker-compose build web",
2287
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2288
- '-c' 'mix local.hex --force && mix hex.info && \
2289
- mix deps.get && mix compile && mix deps.compile && \
2290
- mix phx.digest assets -o priv/static && \
2291
- rm -rf _build/qa/rel/ && \
2292
- mix release --env=qa'"
2293
- ].each do |cmd|
2294
- execute_command cmd
2295
- end
2296
-
2297
- cleanup_containers
2298
-
2299
- artifact_path = Dir.glob("_build/qa/rel/hal9000/releases/*/hal9000.tar.gz").first
2300
- upload_artifact(artifact_path, "microservices/hal9000/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2301
-
2302
- Dir.chdir '../../'
2303
- end
2304
-
2305
- def create_hutch_artifact(revision)
2306
- output "Preparo l'artifact hutch\n".yellow
2307
-
2308
- git_checkout_version('hutch', revision)
2309
-
2310
- Dir.chdir 'projects/hutch'
2311
-
2312
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2313
-
2314
- exec_step 'cp docker-compose-ci.yml docker-compose.yml'
2315
- exec_step 'prepare-docker-compose --directory hutch && cp docker-compose-qainit.yml docker-compose.yml'
2316
-
2317
- execute_command "deploy/build_qa_artifact #{get_route53_hostname("maia-intermediari")}"
2318
-
2319
- cleanup_containers
2320
-
2321
- artifact_path = "./hutch.tar.gz"
2322
- upload_artifact(artifact_path, "microservices/hutch/#{revision}-#{@dns_record_identifier[0..7]}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2323
-
2324
- Dir.chdir '../../'
2325
- end
2326
-
2327
- def create_leftorium_artifact(revision)
2328
- output "Preparo l'artifact leftorium .zip\n".yellow
2329
-
2330
- git_checkout_version('leftorium', revision)
2331
-
2332
- Dir.chdir 'projects/leftorium'
2333
-
2334
- decrypt_secrets()
2335
-
2336
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2337
- exec_step 'prepare-docker-compose --directory leftorium && cp docker-compose-qainit.yml docker-compose.yml'
2338
- [
2339
- "docker-compose build web",
2340
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2341
- '-c' 'mix local.hex --force && mix hex.info && \
2342
- mix deps.get && mix compile && mix deps.compile && \
2343
- rm -rf _build/qa/rel/ && \
2344
- mix release --env=qa'"
2345
- ].each do |cmd|
2346
- execute_command cmd
2347
- end
2348
-
2349
- cleanup_containers
2350
-
2351
- artifact_path = Dir.glob('_build/qa/rel/leftorium/releases/*/leftorium.tar.gz').first
2352
- upload_artifact(artifact_path, "microservices/leftorium/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2353
-
2354
- Dir.chdir '../../'
2355
- end
2356
-
2357
- def create_maia_artifact(revision)
2358
- output "Preparo l'artifact maia .zip\n".yellow
2359
-
2360
- git_checkout_version('maia', revision)
2361
-
2362
- Dir.chdir 'projects/maia'
2363
-
2364
- decrypt_secrets()
2365
-
2366
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2367
- exec_step 'prepare-docker-compose --directory maia && cp docker-compose-qainit.yml docker-compose.yml'
2368
-
2369
- execute_command 'deploy/build_qa_artifact'
2370
-
2371
- cleanup_containers
2372
-
2373
- artifact_path = Dir.glob('_build/qa/rel/maia/releases/*/maia.tar.gz').first
2374
- upload_artifact(artifact_path, "microservices/maia/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2375
-
2376
- Dir.chdir '../../'
2377
- end
2378
-
2379
- def create_peano_artifact(revision)
2380
- output "Preparo l'artifact peano .zip\n".yellow
2381
-
2382
- git_checkout_version('peano', revision)
2383
-
2384
- Dir.chdir 'projects/peano'
2385
-
2386
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2387
-
2388
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2389
- exec_step 'prepare-docker-compose --directory peano && cp docker-compose-qainit.yml docker-compose.yml'
2390
-
2391
- execute_command "deploy/build_qa_artifact"
2392
-
2393
- cleanup_containers
2394
-
2395
- artifact_path = Dir.glob("_build/qa/rel/peano/releases/*/peano.tar.gz").first
2396
- upload_artifact(artifact_path, "microservices/peano/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2397
-
2398
- Dir.chdir '../../'
2399
- end
2400
-
2401
- def create_prima_artifact(revision, branch_name, deploy_id)
2402
- output "Preparo l'artifact prima .zip\n".yellow
2403
-
2404
- git_checkout_version('prima', revision)
2405
-
2406
- Dir.chdir 'projects/prima'
2407
-
2408
- ['vendor'].each do |dir|
2409
- unless File.directory?(dir)
2410
- if File.directory?("../../../prima/#{dir}")
2411
- exec_step "rsync -a ../../../prima/#{dir} ."
2412
- end
2413
- end
2414
- end
2415
-
2416
- exec_step 'mv docker-compose-ci.yml docker-compose.yml'
2417
- exec_step 'prepare-docker-compose --directory prima'
2418
- exec_step 'sed -i "s/\\/home\\/app/\\/root/g" docker-compose.yml'
2419
- `sed -i 's/"@prima-assicurazioni/pyxis-npm": ".*",/"@prima-assicurazioni/pyxis-npm": "#{@pyxis_version}",/' package.json` if deploy_pyxis?
2420
- [
2421
- "bin/qa_build_artifact.sh #{branch_name} #{'update' if @deploy_update}"
2422
- ].each do |cmd|
2423
- execute_command cmd
2424
- end
2425
-
2426
- cleanup_containers
2427
-
2428
- Dir.chdir "../../"
2429
- end
2430
-
2431
- def create_pyxis_artifact(revision, deploy_id)
2432
- if (deploy_pyxis?)
2433
- output "Preparo l'artifact pyxis\n".yellow
2434
-
2435
- git_checkout_version('pyxis-npm', revision)
2436
-
2437
- Dir.chdir 'projects/pyxis-npm'
2438
-
2439
- decrypt_secrets()
2440
-
2441
- exec_step 'mv .fakenpmrc .npmrc'
2442
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2443
- exec_step 'prepare-docker-compose --directory pyxis-npm && cp docker-compose-qainit.yml docker-compose.yml'
2444
- exec_step 'docker-compose build web'
2445
-
2446
- exec_step "docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
2447
- '-c' 'npm view prima-assicurazioni/pyxis-npm versions --json > versions.json'" # posso recuperare le versioni pubblicate solo da dentro al container, scrivo su un file che leggo subito dopo
2448
- published_versions = `cat versions.json`
2449
- qa_versions = JSON.parse(published_versions).select{ |version| version.include? get_pyxis_version(deploy_id) }
2450
-
2451
- @pyxis_version = "0.#{get_pyxis_version(deploy_id)}.#{qa_versions.size}"
2452
-
2453
- `sed -i '3s/".*/"version": "#{@pyxis_version}",/' package.json`
2454
- [
2455
- "docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
2456
- '-c' 'yarn install && \
2457
- yarn build:prod && \
2458
- npm publish'"
2459
- ].each do |cmd|
2460
- execute_command cmd
2461
- end
2462
-
2463
- cleanup_containers
2464
- Dir.chdir '../../'
2465
- end
2466
- end
2467
-
2468
- def create_rachele_artifact(revision)
2469
- output "Preparo l'artifact rachele .zip\n".yellow
2470
-
2471
- git_checkout_version('rachele', revision)
2472
-
2473
- Dir.chdir 'projects/rachele'
2474
-
2475
- decrypt_secrets()
2476
-
2477
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2478
- exec_step 'prepare-docker-compose --directory rachele && cp docker-compose-qainit.yml docker-compose.yml'
2479
-
2480
- execute_command "docker-compose build web"
2481
-
2482
- [ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2483
- '-c' 'mix local.hex --force && mix hex.info && \
2484
- mix deps.get && mix compile && mix deps.compile && \
2485
- rm -rf _build/qa/rel/ && \
2486
- mix release --env=qa'"
2487
- ].each do |cmd|
2488
- execute_command cmd
2489
- end
2490
-
2491
- cleanup_containers
2492
-
2493
- artifact_path = Dir.glob("_build/qa/rel/rachele/releases/*/rachele.tar.gz").first
2494
- upload_artifact(artifact_path, "microservices/rachele/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2495
-
2496
- Dir.chdir '../../'
2497
- end
2498
-
2499
- def create_roger_artifact(revision)
2500
- output "Preparo l'artifact roger .zip\n".yellow
2501
-
2502
- git_checkout_version('roger', revision)
2503
-
2504
- Dir.chdir 'projects/roger'
2505
-
2506
- decrypt_secrets()
2507
-
2508
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2509
- exec_step 'prepare-docker-compose --directory roger && cp docker-compose-qainit.yml docker-compose.yml'
2510
- [
2511
- "docker-compose build web",
2512
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2513
- '-c' 'mix local.hex --force && mix hex.info && \
2514
- mix deps.get && mix compile && mix deps.compile && \
2515
- mix phx.digest && \
2516
- rm -rf _build/qa/rel/ && \
2517
- mix distillery.release --env=qa'"
2518
- ].each do |cmd|
2519
- execute_command cmd
2520
- end
2521
-
2522
- cleanup_containers
2523
-
2524
- artifact_path = Dir.glob("_build/qa/rel/roger/releases/*/roger.tar.gz").first
2525
- upload_artifact(artifact_path, "microservices/roger/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2526
-
2527
- Dir.chdir '../../'
2528
- end
2529
-
2530
- def create_rogoreport_artifact(revision)
2531
- output "Preparo l'artifact rogoreport .zip\n".yellow
2532
-
2533
- git_checkout_version('rogoreport', revision)
2534
-
2535
- Dir.chdir 'projects/rogoreport'
2536
-
2537
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2538
-
2539
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2540
- exec_step 'prepare-docker-compose --directory rogoreport && cp docker-compose-qainit.yml docker-compose.yml'
2541
- [
2542
- "docker-compose build web",
2543
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2544
- '-c' 'mix local.hex --force && mix hex.info && \
2545
- mix deps.get && mix compile && mix deps.compile && \
2546
- rm -rf _build/qa/rel/ && \
2547
- mix release --name=rogoreport --env=qa'"
2548
- ].each do |cmd|
2549
- execute_command cmd
2550
- end
2551
-
2552
- cleanup_containers
2553
-
2554
- artifact_path = Dir.glob("_build/qa/rel/rogoreport/releases/*/rogoreport.tar.gz").first
2555
- upload_artifact(artifact_path, "microservices/rogoreport/rogoreport-#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2556
-
2557
- Dir.chdir '../../'
2558
- end
2559
-
2560
- def create_skynet_artifact(revision)
2561
- output "Preparo l'artifact skynet\n".yellow
2562
-
2563
- git_checkout_version('skynet', revision)
2564
-
2565
- Dir.chdir 'projects/skynet'
2566
-
2567
- version = `git rev-parse HEAD`
2568
-
2569
- artifact_path = "/tmp/skynet-#{revision}-qa.tar.gz"
2570
-
2571
- exec_step "tar cfz #{artifact_path} core config oracles pickles web tests logstash application_log configuration.py wsgi.py requirements.txt requirements_training.txt"
2572
-
2573
- upload_artifact(artifact_path, "microservices/skynet/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2574
-
2575
- Dir.chdir '../../'
2576
- end
2577
-
2578
- def create_starsky_artifact(revision)
2579
- output "Preparo l'artifact starsky\n".yellow
2580
-
2581
- git_checkout_version('starsky', revision)
2582
-
2583
- Dir.chdir 'projects/starsky'
2584
-
2585
- version = `git rev-parse HEAD`
2586
-
2587
- #artifact_path = "/tmp/starsky-#{revision}-qa.tar.gz"
2588
-
2589
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2590
-
2591
- `mv docker-compose-ci.yml docker-compose.yml`
2592
- exec_step 'prepare-docker-compose --directory starsky && cp docker-compose-qainit.yml docker-compose.yml'
2593
- exec_step "sed -i s/qa_deploy_id/#{get_deploy_id}/g .env.dist.qa"
2594
- exec_step "cp .env.dist.qa .env"
2595
-
2596
- [
2597
- "sed -i 's/USER app/USER root/g' Dockerfile",
2598
- "if echo `docker network ls` | grep peano_default; \
2599
- then echo 'peano_default network already existing'; \
2600
- else docker network create peano_default; fi",
2601
- "docker-compose build web",
2602
- "docker-compose run -w $PWD -e BUILD_ENV=qa -u root --entrypoint /bin/sh web \
2603
- '-c' 'cargo build --release -vv --features=qa \
2604
- && cargo build --bin migrate --release --features=qa \
2605
- && cargo build --bin rabbit_worker --release --features=qa \
2606
- && cp -p target/release/starsky . \
2607
- && cp -p target/release/migrate . \
2608
- && cp -p target/release/rabbit_worker . \
2609
- && tar cfz #{revision}-qa.tar.gz config starsky migrate rabbit_worker .env.dist.*'"
2610
- ].each do |cmd|
2611
- execute_command cmd
2612
- end
2613
-
2614
- artifact_path = "./#{revision}-qa.tar.gz"
2615
-
2616
- upload_artifact(artifact_path, "microservices/starsky/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2617
-
2618
- Dir.chdir '../../'
2619
- end
2620
-
2621
- def create_urania_artifact(revision)
2622
- output "Preparo l'artifact urania .zip\n".yellow
2623
-
2624
- git_checkout_version('urania', revision)
2625
-
2626
- Dir.chdir 'projects/urania'
2627
-
2628
- decrypt_secrets()
2629
-
2630
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2631
- exec_step 'prepare-docker-compose --directory urania && cp docker-compose-qainit.yml docker-compose.yml'
2632
-
2633
- execute_command "docker-compose build web"
2634
-
2635
- [ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2636
- '-c' 'mix local.hex --force && mix hex.info && \
2637
- mix deps.get && mix compile && mix deps.compile && \
2638
- rm -rf _build/qa/rel/ && \
2639
- mix release --env=qa'"
2640
- ].each do |cmd|
2641
- execute_command cmd
2642
- end
2643
-
2644
- cleanup_containers
2645
-
2646
- artifact_path = Dir.glob("_build/qa/rel/urania/releases/*/urania.tar.gz").first
2647
- upload_artifact(artifact_path, "microservices/urania/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2648
-
2649
- Dir.chdir '../../'
2650
- end
2651
-
2652
659
  def deploy_pyxis?
2653
660
  if defined? @deploy_pyxis
2654
661
  @deploy_pyxis
@@ -2662,146 +669,11 @@ class Release
2662
669
  end
2663
670
  end
2664
671
 
2665
- def deploy_crash?
2666
- true # fino a che non ci mettiamo d'accordo su come gestire il fatto che leftorium ha bisogno di comunicare con crash
2667
- # crash_present = !@projects['crash'].empty? && @projects['crash']['name'] != 'master' && !@projects['crash']['default_branch']
2668
- # leftorium_present = !@projects['leftorium'].empty? && @projects['leftorium']['name'] != 'master' && !@projects['leftorium']['default_branch']
2669
- # crash_present || leftorium_present
2670
- end
2671
-
2672
- def deploy_starsky_hutch?
2673
- true
2674
- #starsky_present = !@projects['starsky'].nil? && !@projects['starsky'].empty? && @projects['starsky']['name'] != 'master' && !@projects['starsky']['default_branch']
2675
- #hutch_present = !@projects['hutch'].nil? && !@projects['hutch'].empty? && @projects['hutch']['name'] != 'master' && !@projects['hutch']['default_branch']
2676
- #starsky_present || hutch_present
2677
- end
2678
-
2679
- def get_pyxis_version(deploy_id)
2680
- (deploy_id.delete '[a-z0]')[0..9]
2681
- end
2682
-
2683
- def cleanup_containers
2684
- `docker-compose kill && docker-compose down -v --remove-orphans`
2685
- `docker rm $(docker ps -q -f status=exited)`
2686
- end
2687
-
2688
- def git_checkout_version(project, revision)
2689
- Dir.chdir "projects/#{project}"
2690
- exec_step "git checkout -- . && git checkout #{revision}"
2691
- Dir.chdir "../../"
2692
- end
2693
-
2694
- def create_asg_stack(stack_name, tags = [])
2695
- stack_body = IO.read('cloudformation/stacks/asg/ecs-asg-allinone.yml')
2696
- parameters = [
2697
- {
2698
- parameter_key: "Environment",
2699
- parameter_value: "qa"
2700
- },
2701
- {
2702
- parameter_key: "InstanceType",
2703
- parameter_value: "t3.large"
2704
- },
2705
- {
2706
- parameter_key: "ECSClusterName",
2707
- parameter_value: @ecs_cluster_name
2708
- },
2709
- {
2710
- parameter_key: "AMIID",
2711
- parameter_value: @ami_id
2712
- }
2713
- ]
2714
- create_stack(stack_name, stack_body, parameters, tags, @cf_role)
2715
- end
2716
-
2717
- def create_cluster_stack(stack_name, tags = [])
2718
- stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
2719
- create_stack(stack_name, stack_body, [], tags)
2720
- end
2721
-
2722
672
  def update_cluster_stack(stack_name, tags = [])
2723
673
  stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
2724
674
  update_stack(stack_name, stack_body, [], tags)
2725
675
  end
2726
676
 
2727
- def create_alb_stack(stack_name, role, hash, environment = 'qa')
2728
- stack_body = IO.read('cloudformation/stacks/elb/alb-public-qa.yml')
2729
- parameters = [
2730
- {
2731
- parameter_key: "Environment",
2732
- parameter_value: environment
2733
- },
2734
- {
2735
- parameter_key: "Role",
2736
- parameter_value: role
2737
- },
2738
- {
2739
- parameter_key: "EnvHash",
2740
- parameter_value: hash
2741
- }
2742
- ]
2743
- create_stack(stack_name, stack_body, parameters, [], @cf_role)
2744
- end
2745
-
2746
- def import_redis_crash(qa_ip_address)
2747
- output "Importo chiavi di Redis da staging\n".yellow
2748
-
2749
- prefixes = ['CODICI', 'fun_with_flags']
2750
- redis_qa = Redis.new(:url => "redis://#{qa_ip_address}:6379/10")
2751
- redis_staging = Redis.new(:url => 'redis://staging.cache-1.prima.it:6379/10')
2752
-
2753
- prefixes.each do |prefix|
2754
- redis_staging.keys("#{prefix}*").each do |key|
2755
- next unless redis_qa.keys(key).empty?
2756
- output "Importo #{key} dal Redis di staging\n".yellow
2757
- dump_staging = redis_staging.dump key
2758
- redis_qa.restore key, 0, dump_staging
2759
- end
2760
- end
2761
- end
2762
-
2763
- def import_dbs(ip_address)
2764
- overrides = {
2765
- container_overrides: [
2766
- {
2767
- name: 'dbrestore',
2768
- environment: [
2769
- {
2770
- name: 'EC2_IP_ADDRESS',
2771
- value: ip_address
2772
- }
2773
- ]
2774
- }
2775
- ]
2776
- }
2777
- resp = run_ecs_task(@ecs_cluster_name, @import_db_task, overrides, 1)
2778
- return resp
2779
- end
2780
-
2781
- def wait_for_db_import(task)
2782
- output "Attendo che i DB vengano importati...\n".yellow
2783
- stopped_at = nil
2784
- sleep 15 # altrimenti non trova il task appena avviato...
2785
- while stopped_at.nil?
2786
- if task.tasks[0].nil?
2787
- pp @ecs_cluster_name
2788
- pp task
2789
- stop_if true, "Task di import DB lanciato, ma risposta vuota!".red
2790
- end
2791
- task = describe_ecs_tasks(task.tasks[0].cluster_arn, [task.tasks[0].task_arn])
2792
- stopped_at = task.tasks[0].stopped_at unless task.tasks[0].nil?
2793
- sleep_seconds = 10
2794
- seconds_elapsed = 0
2795
- while true && stopped_at.nil?
2796
- break if seconds_elapsed >= sleep_seconds
2797
- print '.'.yellow; STDOUT.flush
2798
- sleep 1
2799
- seconds_elapsed += 1
2800
- end
2801
- end
2802
- print "\n"
2803
- end
2804
-
2805
677
  def choose_branch_to_deploy(project_name, select_master = false)
2806
678
  Dir.chdir "projects/#{project_name}"
2807
679
  output "Recupero la lista dei branch del progetto #{project_name}..."
@@ -2889,74 +761,6 @@ class Release
2889
761
  (Time.now.to_i.to_s[-4..-1].to_i + Random.rand(40000)).to_s
2890
762
  end
2891
763
 
2892
- def launch_mimo(env_hash)
2893
- resp = describe_stack_resource('batch-job-mimo', 'JobDefinition')
2894
-
2895
- @batch.submit_job({
2896
- job_name: "mimo-#{@dns_record_identifier}", # required
2897
- job_queue: "tools-production", # required
2898
- job_definition: resp.stack_resource_detail.physical_resource_id, # required
2899
- container_overrides: {
2900
- environment: [
2901
- {
2902
- name: 'ENV_HASH',
2903
- value: env_hash
2904
- },
2905
- {
2906
- name: 'APP_ENV',
2907
- value: 'qa'
2908
- },
2909
- {
2910
- name: 'CYPRESS_BASE_URL',
2911
- value: "https://hutch-#{env_hash}.qa.colaster.com"
2912
- },
2913
- {
2914
- name: 'CYPRESS_PEANO_BASE_URL',
2915
- value: "http://peano-#{env_hash}.qa.colaster.com:10039/quotation"
2916
- },
2917
- {
2918
- name: 'QA_NAME',
2919
- value: @git_branch
2920
- }
2921
- ]
2922
- }
2923
- })
2924
-
2925
- output "Mimo lanciato con successo!\n".green
2926
- end
2927
-
2928
- def get_currently_deployed_version(stack_name)
2929
- parameters = get_stack_parameters(stack_name)
2930
- currently_deployed_version = nil
2931
- parameters.each do |parameter|
2932
- if parameter.parameter_key == "ReleaseVersion"
2933
- currently_deployed_version = parameter.parameter_value
2934
- end
2935
- end
2936
- currently_deployed_version
2937
- end
2938
-
2939
- def decrypt_secrets()
2940
- docker_image = "prima/biscuit_populate_configs"
2941
- [
2942
- "docker pull #{docker_image}",
2943
- "docker run -t --rm -v $HOME/.aws:/root/.aws -w $PWD --volumes-from #{get_host_container_name} #{docker_image}"
2944
- ].each do |cmd|
2945
- execute_command cmd
2946
- end
2947
- end
2948
-
2949
- def get_host_container_name()
2950
- if @host_container_name
2951
- @host_container_name
2952
- else
2953
- hostname = `cat /etc/hostname`.gsub("\n", '')
2954
- execute_command "docker ps | grep #{hostname} | awk '{print $NF}'"
2955
- @host_container_name = `docker ps | grep #{hostname} | awk '{print $NF}'`.gsub("\n", '')
2956
- # @host_container_name = `docker ps | grep #{hostname} | sed -r "s/.+ ([^\s].+)$/\1/p"`
2957
- end
2958
- end
2959
-
2960
764
  def select_branches(project_names = nil)
2961
765
  output "Deploy feature menu"
2962
766
  if project_names.nil?
@@ -2970,14 +774,6 @@ class Release
2970
774
  end
2971
775
  end
2972
776
  end
2973
-
2974
- def get_ami_id(stack_name)
2975
- get_stack_parameters(stack_name).each do |param|
2976
- if param.parameter_key == "AMIID"
2977
- return param.parameter_value
2978
- end
2979
- end
2980
- end
2981
777
  end
2982
778
 
2983
779
  def help_content